From ed908d04722cd363ac1fdf0916058b576e91527c Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Fri, 19 Jul 2024 15:10:28 +0800 Subject: [PATCH 001/103] =?UTF-8?q?feat=20=20=E6=B5=8B=E8=AF=95=E6=89=B9?= =?UTF-8?q?=E9=87=8F=E4=B8=8A=E4=BC=A0=E5=9B=BE=E7=89=87=20fix?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .gitignore | 1 + app/api/api_design.py | 17 +++--- app/core/config.py | 4 +- app/service/design/items/clothing.py | 5 +- .../design/items/pipelines/keypoints.py | 3 +- app/service/design/items/pipelines/loading.py | 2 + .../design/items/pipelines/painting.py | 6 +- app/service/design/items/pipelines/scale.py | 3 +- .../design/items/pipelines/segmentation.py | 34 ++++++++++- app/service/design/items/pipelines/split.py | 4 +- app/service/design/service.py | 54 +++++++++++++++-- app/service/design/utils/upload_image.py | 60 +++++++++++-------- app/service/utils/decorator.py | 15 ++++- 13 files changed, 160 insertions(+), 48 deletions(-) diff --git a/.gitignore b/.gitignore index 1bf82fb..87a4934 100644 --- a/.gitignore +++ b/.gitignore @@ -120,6 +120,7 @@ dmypy.json #runtime produce test +seg_cache logs seg_result/ seg_result diff --git a/app/api/api_design.py b/app/api/api_design.py index 5ce6096..d4537c1 100644 --- a/app/api/api_design.py +++ b/app/api/api_design.py @@ -136,13 +136,16 @@ def design(request_data: DesignModel): "process_id": "89" } """ - try: - logger.info(f"design request item is : @@@@@@:{json.dumps(request_data.dict())}") - data = generate(request_data=request_data) - logger.info(f"design response @@@@@@:{json.dumps(data)}") - except Exception as e: - logger.warning(f"design Run Exception @@@@@@:{e}") - raise HTTPException(status_code=404, detail=str(e)) + logger.info(f"design request item is : @@@@@@:{json.dumps(request_data.dict())}") + data = generate(request_data=request_data) + logger.info(f"design response @@@@@@:{json.dumps(data)}") + # try: + # logger.info(f"design request item is : @@@@@@:{json.dumps(request_data.dict())}") + # data = generate(request_data=request_data) + # logger.info(f"design response @@@@@@:{json.dumps(data)}") + # except Exception as e: + # logger.warning(f"design Run Exception @@@@@@:{e}") + # raise HTTPException(status_code=404, detail=str(e)) return ResponseModel(data=data) diff --git a/app/core/config.py b/app/core/config.py index 8b7e7e8..0e32724 100644 --- a/app/core/config.py +++ b/app/core/config.py @@ -24,11 +24,11 @@ DEBUG = False if DEBUG: LOGS_PATH = "logs/" CATEGORY_PATH = "service/attribute/config/descriptor/category/category_dis.csv" - # FACE_CLASSIFIER = "service/generate_image/utils/haarcascade_frontalface_alt.xml" + SEG_CACHE_PATH = "../seg_cache/" else: LOGS_PATH = "app/logs/" CATEGORY_PATH = "app/service/attribute/config/descriptor/category/category_dis.csv" - # FACE_CLASSIFIER = 'app/service/generate_image/utils/haarcascade_frontalface_alt.xml' + SEG_CACHE_PATH = "/seg_cache/" # RABBITMQ_ENV = "" # 生产环境 RABBITMQ_ENV = "-dev" # 开发环境 diff --git a/app/service/design/items/clothing.py b/app/service/design/items/clothing.py index f9f9561..7dd845b 100644 --- a/app/service/design/items/clothing.py +++ b/app/service/design/items/clothing.py @@ -37,7 +37,8 @@ class Clothing(object): resize_scale=self.result["resize_scale"], mask=cv2.resize(self.result['mask'], self.result["front_image"].size), gradient_string=self.result['gradient_string'] if 'gradient_string' in self.result.keys() else "", - pattern_image_url=self.result['pattern_image_url'] + pattern_image_url=self.result['pattern_image_url'], + pattern_image=self.result['pattern_image'] ) layer.insert(front_layer) @@ -54,7 +55,7 @@ class Clothing(object): resize_scale=self.result["resize_scale"], mask=cv2.resize(self.result['mask'], self.result["front_image"].size), gradient_string=self.result['gradient_string'] if 'gradient_string' in self.result.keys() else "", - pattern_image_url=self.result['pattern_image_url'] + pattern_image_url=self.result['pattern_image_url'], ) layer.insert(back_layer) diff --git a/app/service/design/items/pipelines/keypoints.py b/app/service/design/items/pipelines/keypoints.py index 1f53ced..1a264d6 100644 --- a/app/service/design/items/pipelines/keypoints.py +++ b/app/service/design/items/pipelines/keypoints.py @@ -5,6 +5,7 @@ import numpy as np from pymilvus import MilvusClient from app.core.config import * +from app.service.utils.decorator import RunTime, ClassCallRunTime from ..builder import PIPELINES from ...utils.design_ensemble import get_keypoint_result @@ -27,7 +28,7 @@ class KeypointDetection(object): # self.client.close() # print(f"client close time : {time.time() - start_time}") - # @ RunTime + @ ClassCallRunTime def __call__(self, result): # logging.info("KeypointDetection run ") if result['name'] in ['blouse', 'skirt', 'dress', 'outwear', 'trousers', 'tops', 'bottoms']: # 查询是否有数据 且类别相同 相同则直接读 不同则推理后更新 diff --git a/app/service/design/items/pipelines/loading.py b/app/service/design/items/pipelines/loading.py index d792646..ad5aec2 100644 --- a/app/service/design/items/pipelines/loading.py +++ b/app/service/design/items/pipelines/loading.py @@ -1,5 +1,6 @@ import cv2 +from app.service.utils.decorator import RunTime, ClassCallRunTime from app.service.utils.oss_client import oss_get_image from ..builder import PIPELINES @@ -12,6 +13,7 @@ class LoadImageFromFile(object): self.print_dict = print_dict # self.minio_client = Minio(f"{MINIO_URL}", access_key=MINIO_ACCESS, secret_key=MINIO_SECRET, secure=MINIO_SECURE) + @ClassCallRunTime def __call__(self, result): result['image'], result['pre_mask'] = self.read_image(self.path) result['gray'] = cv2.cvtColor(result['image'], cv2.COLOR_BGR2GRAY) diff --git a/app/service/design/items/pipelines/painting.py b/app/service/design/items/pipelines/painting.py index 0fd2897..8e4e524 100644 --- a/app/service/design/items/pipelines/painting.py +++ b/app/service/design/items/pipelines/painting.py @@ -4,6 +4,7 @@ import cv2 import numpy as np from PIL import Image +from app.service.utils.decorator import RunTime, ClassCallRunTime from app.service.utils.oss_client import oss_get_image from ..builder import PIPELINES @@ -13,7 +14,7 @@ class Painting(object): def __init__(self, painting_flag=True): self.painting_flag = painting_flag - # @ RunTime + @ClassCallRunTime def __call__(self, result): if result['name'] not in ['hairstyle', 'earring'] and self.painting_flag and result['color'] != 'none': dim_image_h, dim_image_w = result['image'].shape[0:2] @@ -86,7 +87,7 @@ class PrintPainting(object): def __init__(self, print_flag=True): self.print_flag = print_flag - # @ RunTime + @ClassCallRunTime def __call__(self, result): single_print = result['print']['single'] overall_print = result['print']['overall'] @@ -236,7 +237,6 @@ class PrintPainting(object): print_background = cv2.cvtColor(np.array(source_image_pil), cv2.COLOR_RGBA2BGR) mask_background = cv2.cvtColor(np.array(source_image_pil_mask), cv2.COLOR_RGBA2BGR) - print(1) else: mask = self.get_mask_inv(image) mask = np.expand_dims(mask, axis=2) diff --git a/app/service/design/items/pipelines/scale.py b/app/service/design/items/pipelines/scale.py index d101530..43604cb 100644 --- a/app/service/design/items/pipelines/scale.py +++ b/app/service/design/items/pipelines/scale.py @@ -2,6 +2,7 @@ import math import cv2 +from app.service.utils.decorator import ClassCallRunTime from ..builder import PIPELINES @@ -10,7 +11,7 @@ class Scaling(object): def __init__(self): pass - # @ RunTime + @ClassCallRunTime def __call__(self, result): if result['keypoint'] in ['waistband', 'shoulder', 'head_point']: # milvus_db_keypoint_cache diff --git a/app/service/design/items/pipelines/segmentation.py b/app/service/design/items/pipelines/segmentation.py index d9f8ac0..4e6e0d0 100644 --- a/app/service/design/items/pipelines/segmentation.py +++ b/app/service/design/items/pipelines/segmentation.py @@ -1,3 +1,9 @@ +import os + +import numpy as np + +from app.core.config import SEG_CACHE_PATH +from app.service.utils.decorator import ClassCallRunTime from ..builder import PIPELINES from ...utils.design_ensemble import get_seg_result @@ -9,6 +15,32 @@ class Segmentation(object): self.device = device self.debug = debug + @ClassCallRunTime def __call__(self, result): - result['seg_result'] = get_seg_result(result["image_id"], result['image']) + _, seg_result = self.load_seg_result(result["image_id"]) + if not _: + result['seg_result'] = get_seg_result(result["image_id"], result['image']) + self.save_seg_result(result['seg_result'][0], result['image_id']) return result + + @staticmethod + def save_seg_result(seg_result, image_id): + file_path = f"{SEG_CACHE_PATH}{image_id}.npy" + try: + np.save(file_path, seg_result) + print("保存成功", os.path.abspath(file_path)) + except Exception as e: + print(f"保存失败: {e}") + + @staticmethod + def load_seg_result(image_id): + file_path = f"{SEG_CACHE_PATH}{image_id}.npy" + try: + seg_result = np.load(file_path) + return True, seg_result + except FileNotFoundError: + print("文件不存在") + return False, None + except Exception as e: + print(f"加载失败: {e}") + return False, None diff --git a/app/service/design/items/pipelines/split.py b/app/service/design/items/pipelines/split.py index efa20e4..dd9becb 100644 --- a/app/service/design/items/pipelines/split.py +++ b/app/service/design/items/pipelines/split.py @@ -5,6 +5,7 @@ import numpy as np from PIL import Image from cv2 import cvtColor, COLOR_BGR2RGBA +from app.service.utils.decorator import ClassCallRunTime from app.service.utils.generate_uuid import generate_uuid from ..builder import PIPELINES from ...utils.conversion_image import rgb_to_rgba @@ -17,6 +18,7 @@ class Split(object): Split image into front and back layer according to the segmentation result """ + @ClassCallRunTime # KNet def __call__(self, result): try: @@ -66,7 +68,7 @@ class Split(object): # 创建中间图层 result_pattern_image_rgba = rgb_to_rgba(result['pattern_image'], result['mask']) result_pattern_image_pil = Image.fromarray(cvtColor(result_pattern_image_rgba, COLOR_BGR2RGBA)) - _, result['pattern_image_url'], _ = upload_png_mask(result_pattern_image_pil, f'{generate_uuid()}') + result['pattern'], result['pattern_image_url'], _ = upload_png_mask(result_pattern_image_pil, f'{generate_uuid()}') return result except Exception as e: logging.warning(f"split runtime exception : {e} image_id : {result['image_id']}") diff --git a/app/service/design/service.py b/app/service/design/service.py index 54cb45b..3d71326 100644 --- a/app/service/design/service.py +++ b/app/service/design/service.py @@ -1,4 +1,8 @@ import concurrent.futures +import io + +import cv2 +from PIL import Image from app.core.config import PRIORITY_DICT from app.service.design.core.layer import Layer @@ -6,6 +10,7 @@ from app.service.design.items import build_item from app.service.design.utils.redis_utils import Redis from app.service.design.utils.synthesis_item import synthesis, synthesis_single from app.service.utils.decorator import RunTime +from app.service.utils.oss_client import oss_upload_image def process_item(item, layers): @@ -43,6 +48,7 @@ def final_progress(process_id): @RunTime def generate(request_data): return_response = {} + return_png_mask = [] request_data = request_data.dict() assert "process_id" in request_data.keys(), "Need process_id parameters" @@ -55,14 +61,15 @@ def generate(request_data): # 获取处理结果 for future in concurrent.futures.as_completed(futures): obj = futures[future] - - result = future.result() - return_response[obj] = result + return_response[obj] = future.result()[0] + return_png_mask.extend(future.result()[1]) final_progress(process_id) + upload_results = process_images(return_png_mask) return return_response def process_object(cfg, process_id, total): + uploaded_images = [] basic_info = cfg.get('basic') items_response = { 'layers': [] @@ -83,6 +90,15 @@ def process_object(cfg, process_id, total): layers = sorted(layers.layer, key=lambda s: s.get("priority", float('inf'))) else: layers = sorted(layers.layer, key=lambda x: PRIORITY_DICT.get(x['name'], float('inf'))) + # 上传所有图片 + for layer in layers: + if 'image' in layer.keys() and layer['image'] is not None: + uploaded_images.append({'image_obj': layer['image'], 'image_url': layer['image_url']}) + if 'pattern_image' in layer.keys() and layer['pattern_image'] is not None: + uploaded_images.append({'image_obj': layer['pattern_image'], 'image_url': layer['pattern_image_url']}) + if 'mask' in layer.keys() and layer['mask'] is not None and layer['mask_url'] is not None: + uploaded_images.append({'image_obj': layer['mask'], 'image_url': layer['mask_url']}) + # 合成 items_response['synthesis_url'] = synthesis(layers, body_size) @@ -131,4 +147,34 @@ def process_object(cfg, process_id, total): items_response['synthesis_url'] = synthesis_single(item.result['front_image'], item.result['back_image']) break update_progress(process_id, total) - return items_response + return items_response, uploaded_images + + +@RunTime +def process_images(images): + with concurrent.futures.ThreadPoolExecutor() as executor: + results = list(executor.map(upload_images, images)) + # results = [] + # for image in images: + # results.append(upload_images(image)) + return results + + +@RunTime +def upload_images(image_obj): + bucket_name = image_obj['image_url'].split("/", 1)[0] + object_name = image_obj['image_url'].split("/", 1)[1] + if isinstance(image_obj['image_obj'], Image.Image): + image_data = io.BytesIO() + image_obj['image_obj'].save(image_data, format='PNG') + image_data.seek(0) + image_bytes = image_data.read() + req = oss_upload_image(bucket=bucket_name, object_name=object_name, image_bytes=image_bytes) + return image_obj['image_url'] + else: + mask_inverted = cv2.bitwise_not(image_obj['image_obj']) + # 将掩模的3通道转换为4通道,白色部分不透明,黑色部分透明 + rgba_image = cv2.cvtColor(mask_inverted, cv2.COLOR_BGR2BGRA) + rgba_image[rgba_image[:, :, 0] == 0] = [0, 0, 0, 0] + req = oss_upload_image(bucket=bucket_name, object_name=object_name, image_bytes=cv2.imencode('.png', rgba_image)[1]) + return image_obj['image_url'] diff --git a/app/service/design/utils/upload_image.py b/app/service/design/utils/upload_image.py index 3571816..610c188 100644 --- a/app/service/design/utils/upload_image.py +++ b/app/service/design/utils/upload_image.py @@ -13,33 +13,43 @@ import logging import cv2 from app.core.config import * +from app.service.utils.decorator import RunTime from app.service.utils.oss_client import oss_upload_image # @RunTime -def upload_png_mask(front_image, object_name, mask=None): - try: - mask_url = None - if mask is not None: - mask_inverted = cv2.bitwise_not(mask) - # 将掩模的3通道转换为4通道,白色部分不透明,黑色部分透明 - rgba_image = cv2.cvtColor(mask_inverted, cv2.COLOR_BGR2BGRA) - rgba_image[rgba_image[:, :, 0] == 0] = [0, 0, 0, 0] - # image_bytes = io.BytesIO() - # image_bytes.write(cv2.imencode('.png', rgba_image)[1].tobytes()) - # image_bytes.seek(0) - # mask_url = f"{AIDA_CLOTHING}/{minio_client.put_object('aida-clothing', f'mask/mask_{object_name}.png', image_bytes, len(image_bytes.getvalue()), content_type='image/png').object_name}" - # oss upload #################### - req = oss_upload_image(bucket=AIDA_CLOTHING, object_name=f"mask/mask_{object_name}.png", image_bytes=cv2.imencode('.png', rgba_image)[1]) - mask_url = f"{AIDA_CLOTHING}/mask/mask_{object_name}.png" +# def upload_png_mask(front_image, object_name, mask=None): +# try: +# mask_url = None +# if mask is not None: +# mask_inverted = cv2.bitwise_not(mask) +# # 将掩模的3通道转换为4通道,白色部分不透明,黑色部分透明 +# rgba_image = cv2.cvtColor(mask_inverted, cv2.COLOR_BGR2BGRA) +# rgba_image[rgba_image[:, :, 0] == 0] = [0, 0, 0, 0] +# # image_bytes = io.BytesIO() +# # image_bytes.write(cv2.imencode('.png', rgba_image)[1].tobytes()) +# # image_bytes.seek(0) +# # mask_url = f"{AIDA_CLOTHING}/{minio_client.put_object('aida-clothing', f'mask/mask_{object_name}.png', image_bytes, len(image_bytes.getvalue()), content_type='image/png').object_name}" +# # oss upload #################### +# req = oss_upload_image(bucket=AIDA_CLOTHING, object_name=f"mask/mask_{object_name}.png", image_bytes=cv2.imencode('.png', rgba_image)[1]) +# mask_url = f"{AIDA_CLOTHING}/mask/mask_{object_name}.png" +# +# image_data = io.BytesIO() +# front_image.save(image_data, format='PNG') +# image_data.seek(0) +# image_bytes = image_data.read() +# # image_url = f"{AIDA_CLOTHING}/{minio_client.put_object('aida-clothing', f'image/image_{object_name}.png', io.BytesIO(image_bytes), len(image_bytes), content_type='image/png').object_name}" +# req = oss_upload_image(bucket=AIDA_CLOTHING, object_name=f"image/image_{object_name}.png", image_bytes=image_bytes) +# image_url = f"{AIDA_CLOTHING}/image/image_{object_name}.png" +# return front_image, image_url, mask_url +# except Exception as e: +# logging.warning(f"upload_png_mask runtime exception : {e}") - image_data = io.BytesIO() - front_image.save(image_data, format='PNG') - image_data.seek(0) - image_bytes = image_data.read() - # image_url = f"{AIDA_CLOTHING}/{minio_client.put_object('aida-clothing', f'image/image_{object_name}.png', io.BytesIO(image_bytes), len(image_bytes), content_type='image/png').object_name}" - req = oss_upload_image(bucket=AIDA_CLOTHING, object_name=f"image/image_{object_name}.png", image_bytes=image_bytes) - image_url = f"{AIDA_CLOTHING}/image/image_{object_name}.png" - return front_image, image_url, mask_url - except Exception as e: - logging.warning(f"upload_png_mask runtime exception : {e}") + +@RunTime +def upload_png_mask(front_image, object_name, mask=None): + mask_url = None + if mask is not None: + mask_url = f"{AIDA_CLOTHING}/mask/mask_{object_name}.png" + image_url = f"{AIDA_CLOTHING}/image/image_{object_name}.png" + return front_image, image_url, mask_url diff --git a/app/service/utils/decorator.py b/app/service/utils/decorator.py index 294b54b..fcf8666 100644 --- a/app/service/utils/decorator.py +++ b/app/service/utils/decorator.py @@ -1,5 +1,5 @@ -import time import logging +import time def RunTime(func): @@ -12,3 +12,16 @@ def RunTime(func): return res return wrapper + + +def ClassCallRunTime(func): + def wrapper(*args, **kwargs): + start_time = time.time() + result = func(*args, **kwargs) + end_time = time.time() + execution_time = end_time - start_time + class_name = args[0].__class__.__name__ # 获取类名 + print(f"class name: {class_name} , run time is : {execution_time} s") + return result + + return wrapper From 33e7d884cf200937ae2f64d0901824c8c097d7c7 Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Fri, 19 Jul 2024 15:17:54 +0800 Subject: [PATCH 002/103] =?UTF-8?q?feat=20=20=E6=B5=8B=E8=AF=95=E6=89=B9?= =?UTF-8?q?=E9=87=8F=E4=B8=8A=E4=BC=A0=E5=9B=BE=E7=89=87=20fix?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/service/design/items/pipelines/keypoints.py | 2 +- app/service/design/items/pipelines/loading.py | 2 +- app/service/design/items/pipelines/painting.py | 4 ++-- app/service/design/items/pipelines/scale.py | 2 +- app/service/design/items/pipelines/segmentation.py | 2 +- app/service/design/items/pipelines/split.py | 2 +- app/service/design/service.py | 2 +- 7 files changed, 8 insertions(+), 8 deletions(-) diff --git a/app/service/design/items/pipelines/keypoints.py b/app/service/design/items/pipelines/keypoints.py index 1a264d6..fded7de 100644 --- a/app/service/design/items/pipelines/keypoints.py +++ b/app/service/design/items/pipelines/keypoints.py @@ -28,7 +28,7 @@ class KeypointDetection(object): # self.client.close() # print(f"client close time : {time.time() - start_time}") - @ ClassCallRunTime + # @ClassCallRunTime def __call__(self, result): # logging.info("KeypointDetection run ") if result['name'] in ['blouse', 'skirt', 'dress', 'outwear', 'trousers', 'tops', 'bottoms']: # 查询是否有数据 且类别相同 相同则直接读 不同则推理后更新 diff --git a/app/service/design/items/pipelines/loading.py b/app/service/design/items/pipelines/loading.py index ad5aec2..f0f4188 100644 --- a/app/service/design/items/pipelines/loading.py +++ b/app/service/design/items/pipelines/loading.py @@ -13,7 +13,7 @@ class LoadImageFromFile(object): self.print_dict = print_dict # self.minio_client = Minio(f"{MINIO_URL}", access_key=MINIO_ACCESS, secret_key=MINIO_SECRET, secure=MINIO_SECURE) - @ClassCallRunTime + # @ClassCallRunTime def __call__(self, result): result['image'], result['pre_mask'] = self.read_image(self.path) result['gray'] = cv2.cvtColor(result['image'], cv2.COLOR_BGR2GRAY) diff --git a/app/service/design/items/pipelines/painting.py b/app/service/design/items/pipelines/painting.py index 8e4e524..07c6a84 100644 --- a/app/service/design/items/pipelines/painting.py +++ b/app/service/design/items/pipelines/painting.py @@ -14,7 +14,7 @@ class Painting(object): def __init__(self, painting_flag=True): self.painting_flag = painting_flag - @ClassCallRunTime + # @ClassCallRunTime def __call__(self, result): if result['name'] not in ['hairstyle', 'earring'] and self.painting_flag and result['color'] != 'none': dim_image_h, dim_image_w = result['image'].shape[0:2] @@ -87,7 +87,7 @@ class PrintPainting(object): def __init__(self, print_flag=True): self.print_flag = print_flag - @ClassCallRunTime + # @ClassCallRunTime def __call__(self, result): single_print = result['print']['single'] overall_print = result['print']['overall'] diff --git a/app/service/design/items/pipelines/scale.py b/app/service/design/items/pipelines/scale.py index 43604cb..edd98c9 100644 --- a/app/service/design/items/pipelines/scale.py +++ b/app/service/design/items/pipelines/scale.py @@ -11,7 +11,7 @@ class Scaling(object): def __init__(self): pass - @ClassCallRunTime + # @ClassCallRunTime def __call__(self, result): if result['keypoint'] in ['waistband', 'shoulder', 'head_point']: # milvus_db_keypoint_cache diff --git a/app/service/design/items/pipelines/segmentation.py b/app/service/design/items/pipelines/segmentation.py index 4e6e0d0..7a1773c 100644 --- a/app/service/design/items/pipelines/segmentation.py +++ b/app/service/design/items/pipelines/segmentation.py @@ -15,7 +15,7 @@ class Segmentation(object): self.device = device self.debug = debug - @ClassCallRunTime + # @ClassCallRunTime def __call__(self, result): _, seg_result = self.load_seg_result(result["image_id"]) if not _: diff --git a/app/service/design/items/pipelines/split.py b/app/service/design/items/pipelines/split.py index dd9becb..aab0f8d 100644 --- a/app/service/design/items/pipelines/split.py +++ b/app/service/design/items/pipelines/split.py @@ -18,7 +18,7 @@ class Split(object): Split image into front and back layer according to the segmentation result """ - @ClassCallRunTime + # @ClassCallRunTime # KNet def __call__(self, result): try: diff --git a/app/service/design/service.py b/app/service/design/service.py index 3d71326..e5cfac7 100644 --- a/app/service/design/service.py +++ b/app/service/design/service.py @@ -160,7 +160,7 @@ def process_images(images): return results -@RunTime +# @RunTime def upload_images(image_obj): bucket_name = image_obj['image_url'].split("/", 1)[0] object_name = image_obj['image_url'].split("/", 1)[1] From 6d68ba1add9131aaf7e8bf3f6c0ffcc5c86b6311 Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Fri, 19 Jul 2024 15:30:21 +0800 Subject: [PATCH 003/103] =?UTF-8?q?feat=20=20=E6=B5=8B=E8=AF=95=E6=89=B9?= =?UTF-8?q?=E9=87=8F=E4=B8=8A=E4=BC=A0=E5=9B=BE=E7=89=87=20fix?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/service/design/items/pipelines/split.py | 2 +- app/service/design/service.py | 9 ++++----- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/app/service/design/items/pipelines/split.py b/app/service/design/items/pipelines/split.py index aab0f8d..031d644 100644 --- a/app/service/design/items/pipelines/split.py +++ b/app/service/design/items/pipelines/split.py @@ -68,7 +68,7 @@ class Split(object): # 创建中间图层 result_pattern_image_rgba = rgb_to_rgba(result['pattern_image'], result['mask']) result_pattern_image_pil = Image.fromarray(cvtColor(result_pattern_image_rgba, COLOR_BGR2RGBA)) - result['pattern'], result['pattern_image_url'], _ = upload_png_mask(result_pattern_image_pil, f'{generate_uuid()}') + result['pattern_image'], result['pattern_image_url'], _ = upload_png_mask(result_pattern_image_pil, f'{generate_uuid()}') return result except Exception as e: logging.warning(f"split runtime exception : {e} image_id : {result['image_id']}") diff --git a/app/service/design/service.py b/app/service/design/service.py index e5cfac7..03096ca 100644 --- a/app/service/design/service.py +++ b/app/service/design/service.py @@ -2,7 +2,6 @@ import concurrent.futures import io import cv2 -from PIL import Image from app.core.config import PRIORITY_DICT from app.service.design.core.layer import Layer @@ -93,11 +92,11 @@ def process_object(cfg, process_id, total): # 上传所有图片 for layer in layers: if 'image' in layer.keys() and layer['image'] is not None: - uploaded_images.append({'image_obj': layer['image'], 'image_url': layer['image_url']}) + uploaded_images.append({'image_obj': layer['image'], 'image_url': layer['image_url'], 'image_type': 'image'}) if 'pattern_image' in layer.keys() and layer['pattern_image'] is not None: - uploaded_images.append({'image_obj': layer['pattern_image'], 'image_url': layer['pattern_image_url']}) + uploaded_images.append({'image_obj': layer['pattern_image'], 'image_url': layer['pattern_image_url'], 'image_type': 'pattern_image'}) if 'mask' in layer.keys() and layer['mask'] is not None and layer['mask_url'] is not None: - uploaded_images.append({'image_obj': layer['mask'], 'image_url': layer['mask_url']}) + uploaded_images.append({'image_obj': layer['mask'], 'image_url': layer['mask_url'], 'image_type': 'mask'}) # 合成 items_response['synthesis_url'] = synthesis(layers, body_size) @@ -164,7 +163,7 @@ def process_images(images): def upload_images(image_obj): bucket_name = image_obj['image_url'].split("/", 1)[0] object_name = image_obj['image_url'].split("/", 1)[1] - if isinstance(image_obj['image_obj'], Image.Image): + if image_obj['image_type'] == 'image' or image_obj['image_type'] == 'pattern_image': image_data = io.BytesIO() image_obj['image_obj'].save(image_data, format='PNG') image_data.seek(0) From b75d1e5574162c7947a1f9541ff045708b3f0ed8 Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Mon, 22 Jul 2024 11:09:06 +0800 Subject: [PATCH 004/103] =?UTF-8?q?feat=20=20=E5=A4=84=E7=90=86=E5=A4=9A?= =?UTF-8?q?=E5=B1=82sketch=20design=E5=87=BA=E7=8E=B0=E7=9A=84=E5=86=85?= =?UTF-8?q?=E6=90=AD=E8=B6=8A=E7=95=8C=E9=97=AE=E9=A2=98=20fix?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/service/design/utils/synthesis_item.py | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/app/service/design/utils/synthesis_item.py b/app/service/design/utils/synthesis_item.py index 7bedbe6..1b0c64f 100644 --- a/app/service/design/utils/synthesis_item.py +++ b/app/service/design/utils/synthesis_item.py @@ -65,8 +65,12 @@ def synthesis(data, size): try: all_mask_shape = (size[1], size[0]) - top_outer_mask = np.zeros(all_mask_shape, dtype=np.uint8) - bottom_outer_mask = np.zeros(all_mask_shape, dtype=np.uint8) + body_mask = None + for d in data: + if d['name'] == 'body': + body_mask = d['image'].split()[3] + top_outer_mask = np.array(body_mask) + bottom_outer_mask = np.array(body_mask) top = True bottom = True @@ -101,10 +105,10 @@ def synthesis(data, size): if layer['name'] != "body": test_image = Image.new('RGBA', size, (0, 0, 0, 0)) test_image.paste(layer['image'], (layer['position'][1], layer['position'][0]), layer['image']) - # mask_data = np.where(all_mask > 0, 255, 0).astype(np.uint8) - # mask_alpha = Image.fromarray(mask_data) - # cropped_image = Image.composite(test_image, Image.new("RGBA", test_image.size, (255, 255, 255, 0)), mask_alpha) - base_image.paste(test_image, (0, 0), test_image) + mask_data = np.where(all_mask > 0, 255, 0).astype(np.uint8) + mask_alpha = Image.fromarray(mask_data) + cropped_image = Image.composite(test_image, Image.new("RGBA", test_image.size, (255, 255, 255, 0)), mask_alpha) + base_image.paste(test_image, (0, 0), cropped_image) else: base_image.paste(layer['image'], (layer['position'][1], layer['position'][0]), layer['image']) From 87fcd5e9e9c993f4896df9d6441cac1552b3e72b Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Mon, 22 Jul 2024 15:20:46 +0800 Subject: [PATCH 005/103] =?UTF-8?q?feat=20=20=E5=A4=84=E7=90=86=E5=A4=9A?= =?UTF-8?q?=E5=B1=82sketch=20design=E5=87=BA=E7=8E=B0=E7=9A=84=E5=86=85?= =?UTF-8?q?=E6=90=AD=E8=B6=8A=E7=95=8C=E9=97=AE=E9=A2=98=20fix?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/service/design/service.py | 2 +- app/service/design/utils/synthesis_item.py | 22 ++++++++++++++++------ 2 files changed, 17 insertions(+), 7 deletions(-) diff --git a/app/service/design/service.py b/app/service/design/service.py index 03096ca..0200249 100644 --- a/app/service/design/service.py +++ b/app/service/design/service.py @@ -99,7 +99,7 @@ def process_object(cfg, process_id, total): uploaded_images.append({'image_obj': layer['mask'], 'image_url': layer['mask_url'], 'image_type': 'mask'}) # 合成 - items_response['synthesis_url'] = synthesis(layers, body_size) + items_response['synthesis_url'] = synthesis(layers, body_size, basic_info) for lay in layers: items_response['layers'].append({ diff --git a/app/service/design/utils/synthesis_item.py b/app/service/design/utils/synthesis_item.py index 1b0c64f..d560f37 100644 --- a/app/service/design/utils/synthesis_item.py +++ b/app/service/design/utils/synthesis_item.py @@ -59,7 +59,7 @@ def positioning(all_mask_shape, mask_shape, offset): # @RunTime -def synthesis(data, size): +def synthesis(data, size, basic_info): # 创建底图 base_image = Image.new('RGBA', size, (0, 0, 0, 0)) try: @@ -68,9 +68,13 @@ def synthesis(data, size): body_mask = None for d in data: if d['name'] == 'body': - body_mask = d['image'].split()[3] - top_outer_mask = np.array(body_mask) - bottom_outer_mask = np.array(body_mask) + body_mask = np.array(d['image'].split()[3]) + left_shoulder = basic_info['body_point_test']['shoulder_left'] + right_shoulder = basic_info['body_point_test']['shoulder_right'] + body_mask[:min(left_shoulder[1], right_shoulder[1]), left_shoulder[0]:right_shoulder[0]] = 255 + _, binary_body_mask = cv2.threshold(body_mask, 127, 255, cv2.THRESH_BINARY) + top_outer_mask = np.array(binary_body_mask) + bottom_outer_mask = np.array(binary_body_mask) top = True bottom = True @@ -85,7 +89,10 @@ def synthesis(data, size): all_y_start, all_y_end, mask_y_start, mask_y_end = positioning(all_mask_shape=all_mask_shape[0], mask_shape=mask_shape[0], offset=y_offset) all_x_start, all_x_end, mask_x_start, mask_x_end = positioning(all_mask_shape=all_mask_shape[1], mask_shape=mask_shape[1], offset=x_offset) # 将叠加区域赋值为相应的像素值 - top_outer_mask[all_y_start:all_y_end, all_x_start:all_x_end] = data[i]['mask'][mask_y_start:mask_y_end, mask_x_start:mask_x_end] + _, sketch_mask = cv2.threshold(data[i]['mask'], 127, 255, cv2.THRESH_BINARY) + background = np.zeros_like(top_outer_mask) + background[all_y_start:all_y_end, all_x_start:all_x_end] = sketch_mask[mask_y_start:mask_y_end, mask_x_start:mask_x_end] + top_outer_mask = background + top_outer_mask elif bottom and data[i]['name'] in ["trousers_front", "skirt_front", "bottoms_front"]: bottom = False mask_shape = data[i]['mask'].shape @@ -94,7 +101,10 @@ def synthesis(data, size): all_y_start, all_y_end, mask_y_start, mask_y_end = positioning(all_mask_shape=all_mask_shape[0], mask_shape=mask_shape[0], offset=y_offset) all_x_start, all_x_end, mask_x_start, mask_x_end = positioning(all_mask_shape=all_mask_shape[1], mask_shape=mask_shape[1], offset=x_offset) # 将叠加区域赋值为相应的像素值 - bottom_outer_mask[all_y_start:all_y_end, all_x_start:all_x_end] = data[i]['mask'][mask_y_start:mask_y_end, mask_x_start:mask_x_end] + _, sketch_mask = cv2.threshold(data[i]['mask'], 127, 255, cv2.THRESH_BINARY) + background = np.zeros_like(top_outer_mask) + background[all_y_start:all_y_end, all_x_start:all_x_end] = sketch_mask[mask_y_start:mask_y_end, mask_x_start:mask_x_end] + bottom_outer_mask = background + bottom_outer_mask elif bottom is False and top is False: break From 16c05c0f5b36cf0096805b60e5caf12166ba1635 Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Mon, 22 Jul 2024 15:41:07 +0800 Subject: [PATCH 006/103] =?UTF-8?q?feat=20=20=E5=A4=84=E7=90=86=E5=A4=9A?= =?UTF-8?q?=E5=B1=82sketch=20design=E5=87=BA=E7=8E=B0=E7=9A=84=E5=86=85?= =?UTF-8?q?=E6=90=AD=E8=B6=8A=E7=95=8C=E9=97=AE=E9=A2=98=20fix?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/service/design/items/pipelines/segmentation.py | 2 +- app/service/design/items/pipelines/split.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/app/service/design/items/pipelines/segmentation.py b/app/service/design/items/pipelines/segmentation.py index 7a1773c..8782e75 100644 --- a/app/service/design/items/pipelines/segmentation.py +++ b/app/service/design/items/pipelines/segmentation.py @@ -3,7 +3,6 @@ import os import numpy as np from app.core.config import SEG_CACHE_PATH -from app.service.utils.decorator import ClassCallRunTime from ..builder import PIPELINES from ...utils.design_ensemble import get_seg_result @@ -18,6 +17,7 @@ class Segmentation(object): # @ClassCallRunTime def __call__(self, result): _, seg_result = self.load_seg_result(result["image_id"]) + result['seg_result'] = seg_result if not _: result['seg_result'] = get_seg_result(result["image_id"], result['image']) self.save_seg_result(result['seg_result'][0], result['image_id']) diff --git a/app/service/design/items/pipelines/split.py b/app/service/design/items/pipelines/split.py index 031d644..f3da4e7 100644 --- a/app/service/design/items/pipelines/split.py +++ b/app/service/design/items/pipelines/split.py @@ -28,9 +28,9 @@ class Split(object): result['front_mask'] = result['mask'].copy() result['back_mask'] = np.zeros_like(result['mask']) else: - temp_front = result['seg_result'] == 1 + temp_front = result['seg_result'] == 1.0 result['front_mask'] = (result['mask'] * (temp_front + 0).astype(np.uint8)) - temp_back = result['seg_result'] == 2 + temp_back = result['seg_result'] == 2.0 result['back_mask'] = (result['mask'] * (temp_back + 0).astype(np.uint8)) if result['name'] in ('outwear', 'dress', 'blouse', 'skirt', 'trousers', 'tops', 'bottoms'): From ff971d50fc28463d534a4a7c742054c062dc9909 Mon Sep 17 00:00:00 2001 From: xupei Date: Mon, 22 Jul 2024 15:54:11 +0800 Subject: [PATCH 007/103] =?UTF-8?q?=E7=BF=BB=E8=AF=91--=E6=B7=BB=E5=8A=A0?= =?UTF-8?q?=E8=BF=9E=E6=8E=A5=E5=A4=B1=E8=B4=A5=E6=97=B6=E7=9A=84=E9=87=8D?= =?UTF-8?q?=E8=AF=95?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/core/config.py | 2 ++ .../chat_robot/script/service/CallQWen.py | 10 +++++----- .../chatgpt_for_translation.py | 15 ++++++--------- requirements.txt | Bin 1600 -> 1792 bytes 4 files changed, 13 insertions(+), 14 deletions(-) diff --git a/app/core/config.py b/app/core/config.py index 0e32724..2b713fd 100644 --- a/app/core/config.py +++ b/app/core/config.py @@ -175,3 +175,5 @@ PRIORITY_DICT = { 'bag_back': -98, 'earring_back': -99, } + +QWEN_API_KEY = "sk-a6bdf594e1f54a4aa3e9d4d48f8c661f" diff --git a/app/service/chat_robot/script/service/CallQWen.py b/app/service/chat_robot/script/service/CallQWen.py index 414e4ba..f8e6bd5 100644 --- a/app/service/chat_robot/script/service/CallQWen.py +++ b/app/service/chat_robot/script/service/CallQWen.py @@ -2,6 +2,8 @@ import json from typing import Dict, Any from dashscope import Generation +from retry import retry +from urllib3.exceptions import NewConnectionError from app.core.config import * from app.service.chat_robot.script.callbacks.qwen_callback_handler import QWenCallbackHandler @@ -107,8 +109,7 @@ qwen = QWenCallbackHandler() def search_from_internet(message): response = Generation.call( model='qwen-turbo', - # api_key='sk-7658298c6b99443c98184a5e634fe6ab', - api_key='sk-a6bdf594e1f54a4aa3e9d4d48f8c661f', + api_key=QWEN_API_KEY, messages=message, tools=tools, # seed=random.randint(1, 10000), # 设置随机数种子seed,如果没有设置,则随机数种子默认为1234 @@ -126,12 +127,11 @@ def get_table_info(table_names): def query_database(sql_string): return CustomDatabase.run(db, sql_string) - +@retry(exceptions=NewConnectionError, tries=3, delay=1) def get_response(messages): response = Generation.call( model='qwen-max', - # api_key='sk-7658298c6b99443c98184a5e634fe6ab', - api_key='sk-a6bdf594e1f54a4aa3e9d4d48f8c661f', + api_key=QWEN_API_KEY, messages=messages, tools=tools, # seed=random.randint(1, 10000), # 设置随机数种子seed,如果没有设置,则随机数种子默认为1234 diff --git a/app/service/prompt_generation/chatgpt_for_translation.py b/app/service/prompt_generation/chatgpt_for_translation.py index d158c0f..fcf8ec5 100644 --- a/app/service/prompt_generation/chatgpt_for_translation.py +++ b/app/service/prompt_generation/chatgpt_for_translation.py @@ -1,13 +1,11 @@ import logging from dashscope import Generation -# from langchain.chains import LLMChain -from langchain_community.chat_models import QianfanChatEndpoint, ChatTongyi -# from langchain.chat_models import ChatOpenAI -from langchain_core.prompts import SystemMessagePromptTemplate, HumanMessagePromptTemplate, ChatPromptTemplate -from langchain_core.runnables import RunnableSequence +from requests import RequestException +from retry import retry + +from app.core.config import QWEN_API_KEY -from app.core.config import OPENAI_MODEL, OPENAI_API_KEY # os.environ["http_proxy"] = "http://127.0.0.1:7890" # os.environ["https_proxy"] = "http://127.0.0.1:7890" @@ -44,12 +42,11 @@ def translate_to_en(text): return assistant_output.content - +@retry(exceptions=RequestException, tries=3, delay=1) def get_response(messages): response = Generation.call( model='qwen-max', - # api_key='sk-7658298c6b99443c98184a5e634fe6ab', - api_key='sk-a6bdf594e1f54a4aa3e9d4d48f8c661f', + api_key= QWEN_API_KEY, messages=messages, # seed=random.randint(1, 10000), # 设置随机数种子seed,如果没有设置,则随机数种子默认为1234 result_format='message', # 将输出设置为message形式 diff --git a/requirements.txt b/requirements.txt index 51bd04179946642602d11bd7adf1292c16e18038..50dbfa2e7269f5e4d9c2102d3370e314a44227da 100644 GIT binary patch delta 209 zcmX@W)4;c(k9G1S)^+u~3|tIJ4EYQtKy1uV$6(7~$e;&gnE*)(1|tTLJV>sHA(m~pi8U%6x From b31e5cdadab658d396fa42e567ea4821f42c4ce3 Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Mon, 22 Jul 2024 16:15:59 +0800 Subject: [PATCH 008/103] =?UTF-8?q?feat=20=20=E5=A4=84=E7=90=86=E5=A4=9A?= =?UTF-8?q?=E5=B1=82sketch=20design=E5=87=BA=E7=8E=B0=E7=9A=84=E5=86=85?= =?UTF-8?q?=E6=90=AD=E8=B6=8A=E7=95=8C=E9=97=AE=E9=A2=98=20fix?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/service/design/service.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/app/service/design/service.py b/app/service/design/service.py index 0200249..9e29cc0 100644 --- a/app/service/design/service.py +++ b/app/service/design/service.py @@ -27,7 +27,7 @@ def update_progress(process_id, total): if int(progress) <= 100: r.write(key=process_id, value=int(progress) + int(100 / total)) else: - r.write(key=process_id, value=100) + r.write(key=process_id, value=99) return progress elif total == 1: r.write(key=process_id, value=100) @@ -62,8 +62,8 @@ def generate(request_data): obj = futures[future] return_response[obj] = future.result()[0] return_png_mask.extend(future.result()[1]) - final_progress(process_id) upload_results = process_images(return_png_mask) + final_progress(process_id) return return_response From 492829ce7e2471aa52f63c57bb9d08214736a4f6 Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Mon, 22 Jul 2024 16:20:41 +0800 Subject: [PATCH 009/103] =?UTF-8?q?feat=20=20=E5=A4=84=E7=90=86=E5=A4=9A?= =?UTF-8?q?=E5=B1=82sketch=20design=E5=87=BA=E7=8E=B0=E7=9A=84=E5=86=85?= =?UTF-8?q?=E6=90=AD=E8=B6=8A=E7=95=8C=E9=97=AE=E9=A2=98=20fix?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- requirements.txt | Bin 1792 -> 1832 bytes 1 file changed, 0 insertions(+), 0 deletions(-) diff --git a/requirements.txt b/requirements.txt index 50dbfa2e7269f5e4d9c2102d3370e314a44227da..1bfec2b331173f5c059f68262d4358586f8f04d1 100644 GIT binary patch delta 67 zcmZqRTfw)Xj#apbA(f$op@^Z9!4?P&81xt{CzrCGW>lVR&!z|@ Date: Tue, 23 Jul 2024 17:44:56 +0800 Subject: [PATCH 010/103] =?UTF-8?q?=E7=BF=BB=E8=AF=91=E6=8E=A5=E5=8F=A3?= =?UTF-8?q?=E4=BC=98=E5=8C=96?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/api/api_prompt_generation.py | 2 +- .../chatgpt_for_translation.py | 25 +++++++++++++------ 2 files changed, 19 insertions(+), 8 deletions(-) diff --git a/app/api/api_prompt_generation.py b/app/api/api_prompt_generation.py index c227b07..59e5779 100644 --- a/app/api/api_prompt_generation.py +++ b/app/api/api_prompt_generation.py @@ -26,7 +26,7 @@ def prompt_generation(request_data: PromptGenerationImageModel): """ try: logger.info(f"prompt_generation request item is : @@@@@@:{request_data}") - data = translate_to_en(request_data.text) + data = translate_to_en("[" + request_data.text + "]") logger.info(f"prompt_generation response @@@@@@:{data}") except Exception as e: logger.warning(f"prompt_generation Run Exception @@@@@@:{e}") diff --git a/app/service/prompt_generation/chatgpt_for_translation.py b/app/service/prompt_generation/chatgpt_for_translation.py index fcf8ec5..193bcfc 100644 --- a/app/service/prompt_generation/chatgpt_for_translation.py +++ b/app/service/prompt_generation/chatgpt_for_translation.py @@ -25,27 +25,38 @@ def translate_to_en(text): output the input text exactly as it is without any modifications or additions. If there are grammatical errors, correct them and then output the sentence.""" ) + + prefix = ( + """ + Translate everything within the brackets [] into English. + Never translate or modify any English input. + The input must be fully translated into coherent English sentences. + Never present the translation results in the format + "The translation of \"Material suave\" into English would be \"Smooth material.\"". Instead, directly output "Smooth material". + """ + ) messages = [ - { - "content": template, # 系统message - "role": "system" - }, + # { + # Translate the entire text and ensure the output is a complete and coherent sentence in English. + # "content": template, # 系统message + # "role": "system" + # }, { # "content": input('请输入:'), # 用户message - "content": text, # 用户message + "content": prefix + text, # 用户message "role": "user" } ] first_response = get_response(messages) assistant_output = first_response.output.choices[0].message - print("translate result : {}".format(assistant_output)) + print("input : {}, translate result : {}".format(text, assistant_output.content)) return assistant_output.content @retry(exceptions=RequestException, tries=3, delay=1) def get_response(messages): response = Generation.call( - model='qwen-max', + model='qwen-turbo', api_key= QWEN_API_KEY, messages=messages, # seed=random.randint(1, 10000), # 设置随机数种子seed,如果没有设置,则随机数种子默认为1234 From dbd75a0d84b384e5dc730729dd909483a64f6784 Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Wed, 24 Jul 2024 15:18:49 +0800 Subject: [PATCH 011/103] =?UTF-8?q?feat=20fix=20=20=20=E5=9B=BE=E7=89=87?= =?UTF-8?q?=E6=A0=BC=E5=BC=8F=E7=BB=9F=E4=B8=80=EF=BC=8C=E5=8F=AF=E8=83=BD?= =?UTF-8?q?=E4=B8=BAuint16=EF=BC=8C=E6=96=B0=E5=A2=9E=E5=88=A4=E6=96=AD?= =?UTF-8?q?=E8=BD=AC=E7=BD=AE=E4=B8=BAuint8?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/service/utils/oss_client.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/app/service/utils/oss_client.py b/app/service/utils/oss_client.py index c2bb82c..370cd7c 100644 --- a/app/service/utils/oss_client.py +++ b/app/service/utils/oss_client.py @@ -28,6 +28,8 @@ def oss_get_image(bucket, object_name, data_type): image_bytes = image_data.read() image_array = np.frombuffer(image_bytes, np.uint8) # 转成8位无符号整型 image_object = cv2.imdecode(image_array, cv2.IMREAD_UNCHANGED) + if image_object.dtype == np.uint16: + image_object = (image_object / 256).astype('uint8') else: data_bytes = BytesIO(image_data.read()) image_object = Image.open(data_bytes) @@ -63,8 +65,8 @@ if __name__ == '__main__': # url = "aida-users/87/print/956614a2-7e75-4fbe-9ed0-c1831e37a2c9-4-87.png" # url = "aida-users/89/single_logo/123-89.png" # url = "aida-users/89/product_image/string-89.png" - url = "aida-results/result_c6520ce7-33a1-11ef-a8d3-b0dcefbff887.png" - read_type = "PIL" + url = "aida-collection-element/12148/Sketchboard/95ea577b-305b-4a62-b30a-39c0dd3ddb3f.png" + read_type = "cv2" if read_type == "cv2": img = oss_get_image(bucket=url.split('/')[0], object_name=url[url.find('/') + 1:], data_type=read_type) cv2.imshow("", img) From b7e9131cf7556326f2e2d93e863f5013aa35513a Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Wed, 24 Jul 2024 15:20:18 +0800 Subject: [PATCH 012/103] =?UTF-8?q?feat=20fix=20=20design=20=E6=81=A2?= =?UTF-8?q?=E5=A4=8Dseg=E6=A8=A1=E5=9E=8B=E6=8E=A8=E7=90=86=EF=BC=8C?= =?UTF-8?q?=E5=B9=B6=E4=BF=9D=E5=AD=98=E6=9C=AC=E5=9C=B0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/service/design_pre_processing/service.py | 80 ++++++++------------ 1 file changed, 33 insertions(+), 47 deletions(-) diff --git a/app/service/design_pre_processing/service.py b/app/service/design_pre_processing/service.py index f69c3ee..b6d868d 100644 --- a/app/service/design_pre_processing/service.py +++ b/app/service/design_pre_processing/service.py @@ -5,11 +5,12 @@ import cv2 import numpy as np import torch import tritonclient.grpc as grpcclient +from pymilvus import MilvusClient from urllib3.exceptions import ResponseError from app.core.config import * from app.schemas.pre_processing import DesignPreProcessingModel -from app.service.design.utils.design_ensemble import get_keypoint_result +from app.service.design.utils.design_ensemble import get_keypoint_result, get_seg_result from app.service.utils.oss_client import oss_get_image, oss_upload_image @@ -124,9 +125,9 @@ class DesignPreprocessing: bucket_name = item['image_url'].split("/", 1)[0] object_name = item['image_url'].split("/", 1)[1] oss_upload_image(bucket=bucket_name, object_name=object_name, image_bytes=image_bytes) - print(f"Object '{item['image_url'].split('/', 1)[1]}' overwritten successfully.") + logging.info(f"Object '{item['image_url'].split('/', 1)[1]}' overwritten successfully.") except ResponseError as err: - print(f"Error: {err}") + logging.warning(f"Error: {err}") return image_list # @ RunTime @@ -138,6 +139,12 @@ class DesignPreprocessing: sketch['site'] = 'up' if image_category in ['blouse', 'outwear', 'dress', 'tops'] else 'down' # 推理得到keypoint sketch['keypoint_result'] = self.keypoint_cache(sketch) + if sketch['site'] == 'up': + _, seg_cache = self.load_seg_result(sketch['image_id']) + if not _: + # 推理获得seg 结果 + seg_result = get_seg_result(sketch["image_id"], sketch['image_obj'])[0] + self.save_seg_result(seg_result, sketch['image_id']) if IF_DEBUG_SHOW: debug_show_image = sketch['obj'].copy() @@ -236,58 +243,37 @@ class DesignPreprocessing: return image_list @staticmethod - def select_seg_result(image_id, image_obj): + def load_seg_result(image_id): + file_path = f"{SEG_CACHE_PATH}{image_id}.npy" try: - # 如果shape不匹配 返回false - result = np.load(f"seg_result/{image_id}.npy").astype(np.int64) - if result.shape[1] == image_obj.shape[0] and result.shape[2] == image_obj.shape[1]: - return result - else: - return False - except FileNotFoundError as e: - logging.warning(f"{image_id} Image segmentation results cache file does not exist : {e}") - return False + seg_result = np.load(file_path) + return True, seg_result + except FileNotFoundError: + logging.info("文件不存在") + return False, None + except Exception as e: + logging.warning(f"加载失败: {e}") + return False, None @staticmethod - def search_seg_result(image_id, ori_shape): + def save_seg_result(seg_result, image_id): + file_path = f"{SEG_CACHE_PATH}{image_id}.npy" try: - # connections.connect(alias=MILVUS_ALIAS, host=MILVUS_DB_HOST, port=MILVUS_PORT) - # collection = Collection(MILVUS_TABLE_SEG) # Get an existing collection. - # collection.load() - # start_time = time.time() - # res = collection.query( - # expr=f"seg_id == {image_id}", - # offset=0, - # limit=10, - # output_fields=["seg_cache"], - # ) - # logging.info(f"search seg cache time : {time.time() - start_time}") - - # if len(res): - # vector = np.reshape(res[0]['seg_cache'] + res[1]['seg_cache'], (224, 224)) - # array_2d_exact = F.interpolate(torch.tensor(vector).unsqueeze(0).unsqueeze(0), size=ori_shape, mode='bilinear', align_corners=False) - # array_2d_exact = array_2d_exact.squeeze().numpy() - # return array_2d_exact - # else: - return False + np.save(file_path, seg_result) + logging.info(f"保存成功,{os.path.abspath(file_path)}") except Exception as e: - logging.warning(f"{image_id} Image segmentation results cache file does not exist : {e}") - return False + logging.warning(f"保存失败: {e}") def keypoint_cache(self, sketch): try: - # connections.connect(alias=MILVUS_ALIAS, host=MILVUS_DB_HOST, port=MILVUS_PORT) - # collection = Collection(MILVUS_TABLE_KEYPOINT) # Get an existing collection. - # collection.load() - start_time = time.time() - # res = collection.query( - # expr=f"keypoint_id == {sketch['image_id']}", - # offset=0, - # limit=1, - # output_fields=["keypoint_cache", "keypoint_site"], - # ) - res = [] - logging.info(f"search keypoint time : {time.time() - start_time}") + client = MilvusClient(uri=MILVUS_URL, token=MILVUS_TOKEN, db_name=MILVUS_ALIAS) + keypoint_id = sketch['image_id'] + res = client.query( + collection_name=MILVUS_TABLE_KEYPOINT, + # ids=[keypoint_id], + filter=f"keypoint_id == {keypoint_id}", + output_fields=['keypoint_vector', 'keypoint_site'] + ) if len(res) == 0: # 没有结果 直接推理拿结果 并保存 keypoint_infer_result = self.infer_keypoint_result(sketch) From 8363ec9ab3ce9b010e5ea9c07573e4daeafd4007 Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Wed, 24 Jul 2024 15:21:06 +0800 Subject: [PATCH 013/103] =?UTF-8?q?feat=20fix=20=20design=20pipeline=20?= =?UTF-8?q?=E5=89=94=E9=99=A4=E8=BE=B9=E7=BC=98=E6=A3=80=E6=B5=8B=E4=BB=BB?= =?UTF-8?q?=E5=8A=A1=EF=BC=8C=E7=9B=B4=E6=8E=A5=E7=94=A8=E5=88=86=E5=89=B2?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/service/design/items/bottom.py | 1 + .../design/items/pipelines/segmentation.py | 16 ++++++++----- app/service/design/items/pipelines/split.py | 23 ++----------------- app/service/design/items/top.py | 4 ++-- 4 files changed, 15 insertions(+), 29 deletions(-) diff --git a/app/service/design/items/bottom.py b/app/service/design/items/bottom.py index eb575fb..e01ec02 100644 --- a/app/service/design/items/bottom.py +++ b/app/service/design/items/bottom.py @@ -10,6 +10,7 @@ class Bottom(Clothing): dict(type='LoadImageFromFile', path=kwargs['path'], color=kwargs['color'], print_dict=kwargs['print']), dict(type='KeypointDetection'), dict(type='ContourDetection'), + # dict(type='Segmentation'), dict(type='Painting', painting_flag=True), dict(type='PrintPainting', print_flag=True), dict(type='Scaling'), diff --git a/app/service/design/items/pipelines/segmentation.py b/app/service/design/items/pipelines/segmentation.py index 8782e75..2966ee7 100644 --- a/app/service/design/items/pipelines/segmentation.py +++ b/app/service/design/items/pipelines/segmentation.py @@ -9,18 +9,22 @@ from ...utils.design_ensemble import get_seg_result @PIPELINES.register_module() class Segmentation(object): - def __init__(self, device='cpu', show=False, debug=None): - self.show = show - self.device = device - self.debug = debug # @ClassCallRunTime def __call__(self, result): + # 本地查询seg 缓存是否存在 _, seg_result = self.load_seg_result(result["image_id"]) result['seg_result'] = seg_result if not _: - result['seg_result'] = get_seg_result(result["image_id"], result['image']) - self.save_seg_result(result['seg_result'][0], result['image_id']) + # 推理获得seg 结果 + seg_result = get_seg_result(result["image_id"], result['image'])[0] + self.save_seg_result(seg_result, result['image_id']) + # 处理前片后片 + temp_front = seg_result == 1.0 + result['front_mask'] = (255 * (temp_front + 0).astype(np.uint8)) + temp_back = seg_result == 2.0 + result['back_mask'] = (255 * (temp_back + 0).astype(np.uint8)) + result['mask'] = result['front_mask'] + result['back_mask'] return result @staticmethod diff --git a/app/service/design/items/pipelines/split.py b/app/service/design/items/pipelines/split.py index f3da4e7..5b7f1bc 100644 --- a/app/service/design/items/pipelines/split.py +++ b/app/service/design/items/pipelines/split.py @@ -22,29 +22,10 @@ class Split(object): # KNet def __call__(self, result): try: - if 'mask' not in result.keys(): - raise KeyError(f'Cannot find mask in result dict, please check ContourDetection is included in process pipelines.') - if 'seg_result' not in result.keys(): # 没过seg模型 - result['front_mask'] = result['mask'].copy() - result['back_mask'] = np.zeros_like(result['mask']) - else: - temp_front = result['seg_result'] == 1.0 - result['front_mask'] = (result['mask'] * (temp_front + 0).astype(np.uint8)) - temp_back = result['seg_result'] == 2.0 - result['back_mask'] = (result['mask'] * (temp_back + 0).astype(np.uint8)) if result['name'] in ('outwear', 'dress', 'blouse', 'skirt', 'trousers', 'tops', 'bottoms'): - if len(result['front_mask'].shape) > 2: - front_mask = result['front_mask'][0] - else: - front_mask = result['front_mask'] - - if len(result['back_mask'].shape) > 2: - back_mask = result['back_mask'][0] - else: - back_mask = result['back_mask'] - - # rgba_image = rgb_to_rgba((result['final_image'].shape[0], result['final_image'].shape[1]), result['final_image'], front_mask + back_mask) + front_mask = result['front_mask'] + back_mask = result['back_mask'] rgba_image = rgb_to_rgba(result['final_image'], front_mask + back_mask) new_size = (int(rgba_image.shape[1] * result["scale"] * result["resize_scale"][0]), int(rgba_image.shape[0] * result["scale"] * result["resize_scale"][1])) rgba_image = cv2.resize(rgba_image, new_size) diff --git a/app/service/design/items/top.py b/app/service/design/items/top.py index 135328f..fc0d2a5 100644 --- a/app/service/design/items/top.py +++ b/app/service/design/items/top.py @@ -9,8 +9,8 @@ class Top(Clothing): pipeline = [ dict(type='LoadImageFromFile', path=kwargs['path'], color=kwargs['color'], print_dict=kwargs['print']), dict(type='KeypointDetection'), - dict(type='ContourDetection'), - dict(type='Segmentation', device='cpu', show=False, debug=kwargs['debug']), + # dict(type='ContourDetection'), + dict(type='Segmentation'), dict(type='Painting', painting_flag=True), dict(type='PrintPainting', print_flag=True), # dict(type='ImageShow', key=['image', 'mask', 'seg_visualize', 'pattern_image']), From 20e5ead9c585d8266ec1adc270e768f2415117a0 Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Thu, 25 Jul 2024 09:47:27 +0800 Subject: [PATCH 014/103] =?UTF-8?q?feat=20fix=20=20design=20pipeline=20=20?= =?UTF-8?q?contour=20detection=20=E6=96=B0=E5=A2=9E=E5=89=8D=E5=90=8E?= =?UTF-8?q?=E7=89=87?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/service/design/items/pipelines/contour_detection.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/app/service/design/items/pipelines/contour_detection.py b/app/service/design/items/pipelines/contour_detection.py index 018dbca..487d2d6 100644 --- a/app/service/design/items/pipelines/contour_detection.py +++ b/app/service/design/items/pipelines/contour_detection.py @@ -43,7 +43,8 @@ class ContourDetection(object): result['mask'] = Mask else: result['mask'] = cv2.bitwise_and(Mask, result['pre_mask']) - + result['front_mask'] = result['mask'] + result['back_mask'] = result['mask'] return result @staticmethod From 795411f96cf032206b894c64c0abf22dff53c3d4 Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Thu, 25 Jul 2024 10:01:25 +0800 Subject: [PATCH 015/103] =?UTF-8?q?feat=20fix=20=20design=20pipeline=20=20?= =?UTF-8?q?contour=20detection=20=E6=96=B0=E5=A2=9E=E5=89=8D=E5=90=8E?= =?UTF-8?q?=E7=89=87?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/service/design/service.py | 16 +++--- app/service/design/utils/upload_image.py | 68 ++++++++++++------------ 2 files changed, 42 insertions(+), 42 deletions(-) diff --git a/app/service/design/service.py b/app/service/design/service.py index 9e29cc0..b8c5a8c 100644 --- a/app/service/design/service.py +++ b/app/service/design/service.py @@ -62,7 +62,7 @@ def generate(request_data): obj = futures[future] return_response[obj] = future.result()[0] return_png_mask.extend(future.result()[1]) - upload_results = process_images(return_png_mask) + # upload_results = process_images(return_png_mask) final_progress(process_id) return return_response @@ -90,13 +90,13 @@ def process_object(cfg, process_id, total): else: layers = sorted(layers.layer, key=lambda x: PRIORITY_DICT.get(x['name'], float('inf'))) # 上传所有图片 - for layer in layers: - if 'image' in layer.keys() and layer['image'] is not None: - uploaded_images.append({'image_obj': layer['image'], 'image_url': layer['image_url'], 'image_type': 'image'}) - if 'pattern_image' in layer.keys() and layer['pattern_image'] is not None: - uploaded_images.append({'image_obj': layer['pattern_image'], 'image_url': layer['pattern_image_url'], 'image_type': 'pattern_image'}) - if 'mask' in layer.keys() and layer['mask'] is not None and layer['mask_url'] is not None: - uploaded_images.append({'image_obj': layer['mask'], 'image_url': layer['mask_url'], 'image_type': 'mask'}) + # for layer in layers: + # if 'image' in layer.keys() and layer['image'] is not None: + # uploaded_images.append({'image_obj': layer['image'], 'image_url': layer['image_url'], 'image_type': 'image'}) + # if 'pattern_image' in layer.keys() and layer['pattern_image'] is not None: + # uploaded_images.append({'image_obj': layer['pattern_image'], 'image_url': layer['pattern_image_url'], 'image_type': 'pattern_image'}) + # if 'mask' in layer.keys() and layer['mask'] is not None and layer['mask_url'] is not None: + # uploaded_images.append({'image_obj': layer['mask'], 'image_url': layer['mask_url'], 'image_type': 'mask'}) # 合成 items_response['synthesis_url'] = synthesis(layers, body_size, basic_info) diff --git a/app/service/design/utils/upload_image.py b/app/service/design/utils/upload_image.py index 610c188..e48dabd 100644 --- a/app/service/design/utils/upload_image.py +++ b/app/service/design/utils/upload_image.py @@ -17,39 +17,39 @@ from app.service.utils.decorator import RunTime from app.service.utils.oss_client import oss_upload_image -# @RunTime -# def upload_png_mask(front_image, object_name, mask=None): -# try: -# mask_url = None -# if mask is not None: -# mask_inverted = cv2.bitwise_not(mask) -# # 将掩模的3通道转换为4通道,白色部分不透明,黑色部分透明 -# rgba_image = cv2.cvtColor(mask_inverted, cv2.COLOR_BGR2BGRA) -# rgba_image[rgba_image[:, :, 0] == 0] = [0, 0, 0, 0] -# # image_bytes = io.BytesIO() -# # image_bytes.write(cv2.imencode('.png', rgba_image)[1].tobytes()) -# # image_bytes.seek(0) -# # mask_url = f"{AIDA_CLOTHING}/{minio_client.put_object('aida-clothing', f'mask/mask_{object_name}.png', image_bytes, len(image_bytes.getvalue()), content_type='image/png').object_name}" -# # oss upload #################### -# req = oss_upload_image(bucket=AIDA_CLOTHING, object_name=f"mask/mask_{object_name}.png", image_bytes=cv2.imencode('.png', rgba_image)[1]) -# mask_url = f"{AIDA_CLOTHING}/mask/mask_{object_name}.png" -# -# image_data = io.BytesIO() -# front_image.save(image_data, format='PNG') -# image_data.seek(0) -# image_bytes = image_data.read() -# # image_url = f"{AIDA_CLOTHING}/{minio_client.put_object('aida-clothing', f'image/image_{object_name}.png', io.BytesIO(image_bytes), len(image_bytes), content_type='image/png').object_name}" -# req = oss_upload_image(bucket=AIDA_CLOTHING, object_name=f"image/image_{object_name}.png", image_bytes=image_bytes) -# image_url = f"{AIDA_CLOTHING}/image/image_{object_name}.png" -# return front_image, image_url, mask_url -# except Exception as e: -# logging.warning(f"upload_png_mask runtime exception : {e}") - - @RunTime def upload_png_mask(front_image, object_name, mask=None): - mask_url = None - if mask is not None: - mask_url = f"{AIDA_CLOTHING}/mask/mask_{object_name}.png" - image_url = f"{AIDA_CLOTHING}/image/image_{object_name}.png" - return front_image, image_url, mask_url + try: + mask_url = None + if mask is not None: + mask_inverted = cv2.bitwise_not(mask) + # 将掩模的3通道转换为4通道,白色部分不透明,黑色部分透明 + rgba_image = cv2.cvtColor(mask_inverted, cv2.COLOR_BGR2BGRA) + rgba_image[rgba_image[:, :, 0] == 0] = [0, 0, 0, 0] + # image_bytes = io.BytesIO() + # image_bytes.write(cv2.imencode('.png', rgba_image)[1].tobytes()) + # image_bytes.seek(0) + # mask_url = f"{AIDA_CLOTHING}/{minio_client.put_object('aida-clothing', f'mask/mask_{object_name}.png', image_bytes, len(image_bytes.getvalue()), content_type='image/png').object_name}" + # oss upload #################### + req = oss_upload_image(bucket=AIDA_CLOTHING, object_name=f"mask/mask_{object_name}.png", image_bytes=cv2.imencode('.png', rgba_image)[1]) + mask_url = f"{AIDA_CLOTHING}/mask/mask_{object_name}.png" + + image_data = io.BytesIO() + front_image.save(image_data, format='PNG') + image_data.seek(0) + image_bytes = image_data.read() + # image_url = f"{AIDA_CLOTHING}/{minio_client.put_object('aida-clothing', f'image/image_{object_name}.png', io.BytesIO(image_bytes), len(image_bytes), content_type='image/png').object_name}" + req = oss_upload_image(bucket=AIDA_CLOTHING, object_name=f"image/image_{object_name}.png", image_bytes=image_bytes) + image_url = f"{AIDA_CLOTHING}/image/image_{object_name}.png" + return front_image, image_url, mask_url + except Exception as e: + logging.warning(f"upload_png_mask runtime exception : {e}") + + +# @RunTime +# def upload_png_mask(front_image, object_name, mask=None): +# mask_url = None +# if mask is not None: +# mask_url = f"{AIDA_CLOTHING}/mask/mask_{object_name}.png" +# image_url = f"{AIDA_CLOTHING}/image/image_{object_name}.png" +# return front_image, image_url, mask_url From 8dd6fc924c14ef9f68f8e7e4c5fe556b063c3645 Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Thu, 25 Jul 2024 10:33:25 +0800 Subject: [PATCH 016/103] =?UTF-8?q?feat=20fix=20=20design=20load=20image?= =?UTF-8?q?=20=E5=88=A4=E6=96=AD=E5=9B=BE=E7=89=87size=20=E5=A6=82?= =?UTF-8?q?=E6=9E=9C=E5=B0=8F=E4=BA=8E50=20=E5=88=99resize=20=E4=B8=80?= =?UTF-8?q?=E5=80=8D=20=EF=BC=8C=E5=90=A6=E5=88=99=E4=B8=8D=E8=83=BD?= =?UTF-8?q?=E6=8E=A8=E7=90=86?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/service/design/items/pipelines/loading.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/app/service/design/items/pipelines/loading.py b/app/service/design/items/pipelines/loading.py index f0f4188..04dc4d8 100644 --- a/app/service/design/items/pipelines/loading.py +++ b/app/service/design/items/pipelines/loading.py @@ -1,6 +1,5 @@ import cv2 -from app.service.utils.decorator import RunTime, ClassCallRunTime from app.service.utils.oss_client import oss_get_image from ..builder import PIPELINES @@ -47,15 +46,18 @@ class LoadImageFromFile(object): @staticmethod def read_image(image_path): image_mask = None - # file = self.minio_client.get_object(image_path.split("/", 1)[0], image_path.split("/", 1)[1]).data - # image = cv2.imdecode(np.frombuffer(file, np.uint8), 1) - image = oss_get_image(bucket=image_path.split("/", 1)[0], object_name=image_path.split("/", 1)[1], data_type="cv2") if len(image.shape) == 2: image = cv2.cvtColor(image, cv2.COLOR_GRAY2RGB) if image.shape[2] == 4: # 如果是四通道 mask image_mask = image[:, :, 3] image = image[:, :, :3] + + if image.shape[:2] <= (50, 50): + # 计算新尺寸 + new_size = (image.shape[1] * 2, image.shape[0] * 2) + # 调整大小 + image = cv2.resize(image, new_size, interpolation=cv2.INTER_LINEAR) return image, image_mask From 9093eaf674d9e874e8b7760bd39500ffab4f8d41 Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Thu, 25 Jul 2024 10:57:59 +0800 Subject: [PATCH 017/103] =?UTF-8?q?feat=20fix=20=20=20design=20=E5=A4=9A?= =?UTF-8?q?=E8=BF=9B=E7=A8=8B=E5=A4=84=E7=90=86?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/service/design/service.py | 5 +- app/service/design/utils/upload_image.py | 68 ++++++++++++------------ 2 files changed, 37 insertions(+), 36 deletions(-) diff --git a/app/service/design/service.py b/app/service/design/service.py index b8c5a8c..d7f1899 100644 --- a/app/service/design/service.py +++ b/app/service/design/service.py @@ -1,5 +1,6 @@ import concurrent.futures import io +from concurrent.futures import ProcessPoolExecutor import cv2 @@ -62,7 +63,7 @@ def generate(request_data): obj = futures[future] return_response[obj] = future.result()[0] return_png_mask.extend(future.result()[1]) - # upload_results = process_images(return_png_mask) + upload_results = process_images(return_png_mask) final_progress(process_id) return return_response @@ -151,7 +152,7 @@ def process_object(cfg, process_id, total): @RunTime def process_images(images): - with concurrent.futures.ThreadPoolExecutor() as executor: + with ProcessPoolExecutor() as executor: results = list(executor.map(upload_images, images)) # results = [] # for image in images: diff --git a/app/service/design/utils/upload_image.py b/app/service/design/utils/upload_image.py index e48dabd..610c188 100644 --- a/app/service/design/utils/upload_image.py +++ b/app/service/design/utils/upload_image.py @@ -17,39 +17,39 @@ from app.service.utils.decorator import RunTime from app.service.utils.oss_client import oss_upload_image -@RunTime -def upload_png_mask(front_image, object_name, mask=None): - try: - mask_url = None - if mask is not None: - mask_inverted = cv2.bitwise_not(mask) - # 将掩模的3通道转换为4通道,白色部分不透明,黑色部分透明 - rgba_image = cv2.cvtColor(mask_inverted, cv2.COLOR_BGR2BGRA) - rgba_image[rgba_image[:, :, 0] == 0] = [0, 0, 0, 0] - # image_bytes = io.BytesIO() - # image_bytes.write(cv2.imencode('.png', rgba_image)[1].tobytes()) - # image_bytes.seek(0) - # mask_url = f"{AIDA_CLOTHING}/{minio_client.put_object('aida-clothing', f'mask/mask_{object_name}.png', image_bytes, len(image_bytes.getvalue()), content_type='image/png').object_name}" - # oss upload #################### - req = oss_upload_image(bucket=AIDA_CLOTHING, object_name=f"mask/mask_{object_name}.png", image_bytes=cv2.imencode('.png', rgba_image)[1]) - mask_url = f"{AIDA_CLOTHING}/mask/mask_{object_name}.png" - - image_data = io.BytesIO() - front_image.save(image_data, format='PNG') - image_data.seek(0) - image_bytes = image_data.read() - # image_url = f"{AIDA_CLOTHING}/{minio_client.put_object('aida-clothing', f'image/image_{object_name}.png', io.BytesIO(image_bytes), len(image_bytes), content_type='image/png').object_name}" - req = oss_upload_image(bucket=AIDA_CLOTHING, object_name=f"image/image_{object_name}.png", image_bytes=image_bytes) - image_url = f"{AIDA_CLOTHING}/image/image_{object_name}.png" - return front_image, image_url, mask_url - except Exception as e: - logging.warning(f"upload_png_mask runtime exception : {e}") - - # @RunTime # def upload_png_mask(front_image, object_name, mask=None): -# mask_url = None -# if mask is not None: -# mask_url = f"{AIDA_CLOTHING}/mask/mask_{object_name}.png" -# image_url = f"{AIDA_CLOTHING}/image/image_{object_name}.png" -# return front_image, image_url, mask_url +# try: +# mask_url = None +# if mask is not None: +# mask_inverted = cv2.bitwise_not(mask) +# # 将掩模的3通道转换为4通道,白色部分不透明,黑色部分透明 +# rgba_image = cv2.cvtColor(mask_inverted, cv2.COLOR_BGR2BGRA) +# rgba_image[rgba_image[:, :, 0] == 0] = [0, 0, 0, 0] +# # image_bytes = io.BytesIO() +# # image_bytes.write(cv2.imencode('.png', rgba_image)[1].tobytes()) +# # image_bytes.seek(0) +# # mask_url = f"{AIDA_CLOTHING}/{minio_client.put_object('aida-clothing', f'mask/mask_{object_name}.png', image_bytes, len(image_bytes.getvalue()), content_type='image/png').object_name}" +# # oss upload #################### +# req = oss_upload_image(bucket=AIDA_CLOTHING, object_name=f"mask/mask_{object_name}.png", image_bytes=cv2.imencode('.png', rgba_image)[1]) +# mask_url = f"{AIDA_CLOTHING}/mask/mask_{object_name}.png" +# +# image_data = io.BytesIO() +# front_image.save(image_data, format='PNG') +# image_data.seek(0) +# image_bytes = image_data.read() +# # image_url = f"{AIDA_CLOTHING}/{minio_client.put_object('aida-clothing', f'image/image_{object_name}.png', io.BytesIO(image_bytes), len(image_bytes), content_type='image/png').object_name}" +# req = oss_upload_image(bucket=AIDA_CLOTHING, object_name=f"image/image_{object_name}.png", image_bytes=image_bytes) +# image_url = f"{AIDA_CLOTHING}/image/image_{object_name}.png" +# return front_image, image_url, mask_url +# except Exception as e: +# logging.warning(f"upload_png_mask runtime exception : {e}") + + +@RunTime +def upload_png_mask(front_image, object_name, mask=None): + mask_url = None + if mask is not None: + mask_url = f"{AIDA_CLOTHING}/mask/mask_{object_name}.png" + image_url = f"{AIDA_CLOTHING}/image/image_{object_name}.png" + return front_image, image_url, mask_url From 9a1b4152317381ff76561b89bbbc005ca592fbf9 Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Thu, 25 Jul 2024 11:00:09 +0800 Subject: [PATCH 018/103] =?UTF-8?q?feat=20fix=20=20=20design=20=E5=A4=9A?= =?UTF-8?q?=E8=BF=9B=E7=A8=8B=E5=A4=84=E7=90=86?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/service/utils/decorator.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/app/service/utils/decorator.py b/app/service/utils/decorator.py index fcf8666..3e86182 100644 --- a/app/service/utils/decorator.py +++ b/app/service/utils/decorator.py @@ -7,8 +7,9 @@ def RunTime(func): t1 = time.time() res = func(*args, **kwargs) t2 = time.time() - if t2 - t1 > 0.05: - logging.info(f"function:【{func.__name__}】,runtime:【{str(t2 - t1)}】s") + # if t2 - t1 > 0.05: + # logging.info(f"function:【{func.__name__}】,runtime:【{str(t2 - t1)}】s") + logging.info(f"function:【{func.__name__}】,runtime:【{str(t2 - t1)}】s") return res return wrapper From 6e007b36cf42b3ec04e57b32e4edb8cc97f872ed Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Thu, 25 Jul 2024 11:21:27 +0800 Subject: [PATCH 019/103] feat fix --- app/service/design/service.py | 17 ++++++++--------- app/service/design/utils/upload_image.py | 2 +- 2 files changed, 9 insertions(+), 10 deletions(-) diff --git a/app/service/design/service.py b/app/service/design/service.py index d7f1899..9e29cc0 100644 --- a/app/service/design/service.py +++ b/app/service/design/service.py @@ -1,6 +1,5 @@ import concurrent.futures import io -from concurrent.futures import ProcessPoolExecutor import cv2 @@ -91,13 +90,13 @@ def process_object(cfg, process_id, total): else: layers = sorted(layers.layer, key=lambda x: PRIORITY_DICT.get(x['name'], float('inf'))) # 上传所有图片 - # for layer in layers: - # if 'image' in layer.keys() and layer['image'] is not None: - # uploaded_images.append({'image_obj': layer['image'], 'image_url': layer['image_url'], 'image_type': 'image'}) - # if 'pattern_image' in layer.keys() and layer['pattern_image'] is not None: - # uploaded_images.append({'image_obj': layer['pattern_image'], 'image_url': layer['pattern_image_url'], 'image_type': 'pattern_image'}) - # if 'mask' in layer.keys() and layer['mask'] is not None and layer['mask_url'] is not None: - # uploaded_images.append({'image_obj': layer['mask'], 'image_url': layer['mask_url'], 'image_type': 'mask'}) + for layer in layers: + if 'image' in layer.keys() and layer['image'] is not None: + uploaded_images.append({'image_obj': layer['image'], 'image_url': layer['image_url'], 'image_type': 'image'}) + if 'pattern_image' in layer.keys() and layer['pattern_image'] is not None: + uploaded_images.append({'image_obj': layer['pattern_image'], 'image_url': layer['pattern_image_url'], 'image_type': 'pattern_image'}) + if 'mask' in layer.keys() and layer['mask'] is not None and layer['mask_url'] is not None: + uploaded_images.append({'image_obj': layer['mask'], 'image_url': layer['mask_url'], 'image_type': 'mask'}) # 合成 items_response['synthesis_url'] = synthesis(layers, body_size, basic_info) @@ -152,7 +151,7 @@ def process_object(cfg, process_id, total): @RunTime def process_images(images): - with ProcessPoolExecutor() as executor: + with concurrent.futures.ThreadPoolExecutor() as executor: results = list(executor.map(upload_images, images)) # results = [] # for image in images: diff --git a/app/service/design/utils/upload_image.py b/app/service/design/utils/upload_image.py index 610c188..20036e1 100644 --- a/app/service/design/utils/upload_image.py +++ b/app/service/design/utils/upload_image.py @@ -46,7 +46,7 @@ from app.service.utils.oss_client import oss_upload_image # logging.warning(f"upload_png_mask runtime exception : {e}") -@RunTime +# @RunTime def upload_png_mask(front_image, object_name, mask=None): mask_url = None if mask is not None: From b9b31d26271eaa38dcb4d9a78f2007da068fd7ab Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Thu, 25 Jul 2024 11:52:31 +0800 Subject: [PATCH 020/103] feat fix --- app/service/design/service.py | 16 +++--- app/service/design/utils/upload_image.py | 64 ++++++++++++------------ 2 files changed, 40 insertions(+), 40 deletions(-) diff --git a/app/service/design/service.py b/app/service/design/service.py index 9e29cc0..b8c5a8c 100644 --- a/app/service/design/service.py +++ b/app/service/design/service.py @@ -62,7 +62,7 @@ def generate(request_data): obj = futures[future] return_response[obj] = future.result()[0] return_png_mask.extend(future.result()[1]) - upload_results = process_images(return_png_mask) + # upload_results = process_images(return_png_mask) final_progress(process_id) return return_response @@ -90,13 +90,13 @@ def process_object(cfg, process_id, total): else: layers = sorted(layers.layer, key=lambda x: PRIORITY_DICT.get(x['name'], float('inf'))) # 上传所有图片 - for layer in layers: - if 'image' in layer.keys() and layer['image'] is not None: - uploaded_images.append({'image_obj': layer['image'], 'image_url': layer['image_url'], 'image_type': 'image'}) - if 'pattern_image' in layer.keys() and layer['pattern_image'] is not None: - uploaded_images.append({'image_obj': layer['pattern_image'], 'image_url': layer['pattern_image_url'], 'image_type': 'pattern_image'}) - if 'mask' in layer.keys() and layer['mask'] is not None and layer['mask_url'] is not None: - uploaded_images.append({'image_obj': layer['mask'], 'image_url': layer['mask_url'], 'image_type': 'mask'}) + # for layer in layers: + # if 'image' in layer.keys() and layer['image'] is not None: + # uploaded_images.append({'image_obj': layer['image'], 'image_url': layer['image_url'], 'image_type': 'image'}) + # if 'pattern_image' in layer.keys() and layer['pattern_image'] is not None: + # uploaded_images.append({'image_obj': layer['pattern_image'], 'image_url': layer['pattern_image_url'], 'image_type': 'pattern_image'}) + # if 'mask' in layer.keys() and layer['mask'] is not None and layer['mask_url'] is not None: + # uploaded_images.append({'image_obj': layer['mask'], 'image_url': layer['mask_url'], 'image_type': 'mask'}) # 合成 items_response['synthesis_url'] = synthesis(layers, body_size, basic_info) diff --git a/app/service/design/utils/upload_image.py b/app/service/design/utils/upload_image.py index 20036e1..9039ce7 100644 --- a/app/service/design/utils/upload_image.py +++ b/app/service/design/utils/upload_image.py @@ -18,38 +18,38 @@ from app.service.utils.oss_client import oss_upload_image # @RunTime -# def upload_png_mask(front_image, object_name, mask=None): -# try: -# mask_url = None -# if mask is not None: -# mask_inverted = cv2.bitwise_not(mask) -# # 将掩模的3通道转换为4通道,白色部分不透明,黑色部分透明 -# rgba_image = cv2.cvtColor(mask_inverted, cv2.COLOR_BGR2BGRA) -# rgba_image[rgba_image[:, :, 0] == 0] = [0, 0, 0, 0] -# # image_bytes = io.BytesIO() -# # image_bytes.write(cv2.imencode('.png', rgba_image)[1].tobytes()) -# # image_bytes.seek(0) -# # mask_url = f"{AIDA_CLOTHING}/{minio_client.put_object('aida-clothing', f'mask/mask_{object_name}.png', image_bytes, len(image_bytes.getvalue()), content_type='image/png').object_name}" -# # oss upload #################### -# req = oss_upload_image(bucket=AIDA_CLOTHING, object_name=f"mask/mask_{object_name}.png", image_bytes=cv2.imencode('.png', rgba_image)[1]) -# mask_url = f"{AIDA_CLOTHING}/mask/mask_{object_name}.png" -# -# image_data = io.BytesIO() -# front_image.save(image_data, format='PNG') -# image_data.seek(0) -# image_bytes = image_data.read() -# # image_url = f"{AIDA_CLOTHING}/{minio_client.put_object('aida-clothing', f'image/image_{object_name}.png', io.BytesIO(image_bytes), len(image_bytes), content_type='image/png').object_name}" -# req = oss_upload_image(bucket=AIDA_CLOTHING, object_name=f"image/image_{object_name}.png", image_bytes=image_bytes) -# image_url = f"{AIDA_CLOTHING}/image/image_{object_name}.png" -# return front_image, image_url, mask_url -# except Exception as e: -# logging.warning(f"upload_png_mask runtime exception : {e}") +def upload_png_mask(front_image, object_name, mask=None): + try: + mask_url = None + if mask is not None: + mask_inverted = cv2.bitwise_not(mask) + # 将掩模的3通道转换为4通道,白色部分不透明,黑色部分透明 + rgba_image = cv2.cvtColor(mask_inverted, cv2.COLOR_BGR2BGRA) + rgba_image[rgba_image[:, :, 0] == 0] = [0, 0, 0, 0] + # image_bytes = io.BytesIO() + # image_bytes.write(cv2.imencode('.png', rgba_image)[1].tobytes()) + # image_bytes.seek(0) + # mask_url = f"{AIDA_CLOTHING}/{minio_client.put_object('aida-clothing', f'mask/mask_{object_name}.png', image_bytes, len(image_bytes.getvalue()), content_type='image/png').object_name}" + # oss upload #################### + req = oss_upload_image(bucket=AIDA_CLOTHING, object_name=f"mask/mask_{object_name}.png", image_bytes=cv2.imencode('.png', rgba_image)[1]) + mask_url = f"{AIDA_CLOTHING}/mask/mask_{object_name}.png" + + image_data = io.BytesIO() + front_image.save(image_data, format='PNG') + image_data.seek(0) + image_bytes = image_data.read() + # image_url = f"{AIDA_CLOTHING}/{minio_client.put_object('aida-clothing', f'image/image_{object_name}.png', io.BytesIO(image_bytes), len(image_bytes), content_type='image/png').object_name}" + req = oss_upload_image(bucket=AIDA_CLOTHING, object_name=f"image/image_{object_name}.png", image_bytes=image_bytes) + image_url = f"{AIDA_CLOTHING}/image/image_{object_name}.png" + return front_image, image_url, mask_url + except Exception as e: + logging.warning(f"upload_png_mask runtime exception : {e}") # @RunTime -def upload_png_mask(front_image, object_name, mask=None): - mask_url = None - if mask is not None: - mask_url = f"{AIDA_CLOTHING}/mask/mask_{object_name}.png" - image_url = f"{AIDA_CLOTHING}/image/image_{object_name}.png" - return front_image, image_url, mask_url +# def upload_png_mask(front_image, object_name, mask=None): +# mask_url = None +# if mask is not None: +# mask_url = f"{AIDA_CLOTHING}/mask/mask_{object_name}.png" +# image_url = f"{AIDA_CLOTHING}/image/image_{object_name}.png" +# return front_image, image_url, mask_url From 58bc68ff5e1b94f07c6a6e0728bf802f653e3625 Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Thu, 1 Aug 2024 10:08:25 +0800 Subject: [PATCH 021/103] =?UTF-8?q?feat=20=20=20design=20=E9=A2=84?= =?UTF-8?q?=E5=A4=84=E7=90=86=E6=96=B0=E5=A2=9Epadding=20=E5=A4=84?= =?UTF-8?q?=E7=90=86=20=E4=BD=BF=E5=9B=BE=E7=89=87=E4=B8=8D=E8=A6=81?= =?UTF-8?q?=E5=AE=8C=E5=85=A8=E8=B4=B4=E8=BE=B9=20fix?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/service/design_pre_processing/service.py | 34 +++++++++++++------- 1 file changed, 22 insertions(+), 12 deletions(-) diff --git a/app/service/design_pre_processing/service.py b/app/service/design_pre_processing/service.py index b6d868d..bd6f81c 100644 --- a/app/service/design_pre_processing/service.py +++ b/app/service/design_pre_processing/service.py @@ -60,6 +60,7 @@ class DesignPreprocessing: def bounding_box(self, image_list): for item in image_list: image = item['image_obj'] + height, width = image.shape[:2] # 使用Canny边缘检测来检测物体的轮廓 edges = cv2.Canny(image, 50, 150) # 查找轮廓 @@ -83,16 +84,25 @@ class DesignPreprocessing: if len(contours) > 0: cropped_image = image[y_min:y_max, x_min:x_max] item['obj'] = cropped_image # 新shape图像 - # 取消直接覆盖,新增size判断 - # try: - # # 覆盖到minio - # image_bytes = cv2.imencode(".jpg", cropped_image)[1].tobytes() - # self.minio_client.put_object(item['image_url'].split("/", 1)[0], item['image_url'].split("/", 1)[1], io.BytesIO(image_bytes), len(image_bytes), content_type="image/jpeg", ) - # print(f"Object '{item['image_url'].split('/', 1)[1]}' overwritten successfully.") - # except ResponseError as err: - # print(f"Error: {err}") else: item['obj'] = image + + padding_top = max(20 - y_min, 0) + padding_bottom = max(20 - (height - y_max), 0) + padding_left = max(20 - x_min, 0) + padding_right = max(20 - (width - x_max), 0) + + # 添加padding + padded_image = cv2.copyMakeBorder( + image, + padding_top, + padding_bottom, + padding_left, + padding_right, + cv2.BORDER_CONSTANT, + value=(255, 255, 255) # 你可以选择填充颜色,例如黑色 + ) + item['obj'] = padded_image return image_list def super_resolution(self, image_list): @@ -334,7 +344,7 @@ class DesignPreprocessing: ] try: # connections.connect(alias=MILVUS_ALIAS, host=MILVUS_DB_HOST, port=MILVUS_PORT) - start_time = time.time() + # start_time = time.time() # collection = Collection(MILVUS_TABLE_KEYPOINT) # Get an existing collection. # mr = collection.upsert(data) # logging.info(f"save keypoint time : {time.time() - start_time}") @@ -348,9 +358,9 @@ if __name__ == '__main__': data = { "sketches": [ { - "image_category": "dress", - "image_id": "107903", - "image_url": "aida-sys-image/images/female/dress/0628000000.jpg" + "image_category": "blouse", + "image_id": "123123123", + "image_url": "test/0628000198.jpg" } ] } From 6b236459537955a84dbde5ecd6b37c22ca48f81d Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Thu, 1 Aug 2024 10:12:13 +0800 Subject: [PATCH 022/103] =?UTF-8?q?feat=20=20=20design=20=E9=A2=84?= =?UTF-8?q?=E5=A4=84=E7=90=86=E6=96=B0=E5=A2=9Epadding=20=E5=A4=84?= =?UTF-8?q?=E7=90=86=20=E4=BD=BF=E5=9B=BE=E7=89=87=E4=B8=8D=E8=A6=81?= =?UTF-8?q?=E5=AE=8C=E5=85=A8=E8=B4=B4=E8=BE=B9=20fix?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/service/design_pre_processing/service.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/app/service/design_pre_processing/service.py b/app/service/design_pre_processing/service.py index bd6f81c..1b4f33c 100644 --- a/app/service/design_pre_processing/service.py +++ b/app/service/design_pre_processing/service.py @@ -142,6 +142,7 @@ class DesignPreprocessing: # @ RunTime def infer_image(self, image_list): + seg_result = None for sketch in image_list: # 小写 image_category = sketch['image_category'].lower() @@ -166,6 +167,7 @@ class DesignPreprocessing: points_list.append((int(i[1]), int(i[0]))) for point in points_list: cv2.circle(debug_show_image, point, point_size, point_color, thickness) + cv2.imshow("seg_result", seg_result) cv2.imshow("", debug_show_image) cv2.waitKey(0) # # 关键点在上部则推理seg From 4444c00949f78f8f8cde34867d1ed485abfa91bb Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Thu, 1 Aug 2024 16:58:49 +0800 Subject: [PATCH 023/103] =?UTF-8?q?feat=20=20=20=E7=BB=93=E6=9E=9C?= =?UTF-8?q?=E5=9B=BE=E5=AE=BD=E5=BA=A6=E8=87=AA=E9=80=82=E5=BA=94=20fix?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/service/design/service.py | 20 ++++++++++++++++++-- app/service/design/utils/synthesis_item.py | 16 ++++++++++------ 2 files changed, 28 insertions(+), 8 deletions(-) diff --git a/app/service/design/service.py b/app/service/design/service.py index b8c5a8c..e492089 100644 --- a/app/service/design/service.py +++ b/app/service/design/service.py @@ -97,9 +97,9 @@ def process_object(cfg, process_id, total): # uploaded_images.append({'image_obj': layer['pattern_image'], 'image_url': layer['pattern_image_url'], 'image_type': 'pattern_image'}) # if 'mask' in layer.keys() and layer['mask'] is not None and layer['mask_url'] is not None: # uploaded_images.append({'image_obj': layer['mask'], 'image_url': layer['mask_url'], 'image_type': 'mask'}) - + layers, new_size = update_base_size_priority(layers, body_size) # 合成 - items_response['synthesis_url'] = synthesis(layers, body_size, basic_info) + items_response['synthesis_url'] = synthesis(layers, new_size, basic_info) for lay in layers: items_response['layers'].append({ @@ -177,3 +177,19 @@ def upload_images(image_obj): rgba_image[rgba_image[:, :, 0] == 0] = [0, 0, 0, 0] req = oss_upload_image(bucket=bucket_name, object_name=object_name, image_bytes=cv2.imencode('.png', rgba_image)[1]) return image_obj['image_url'] + + +def update_base_size_priority(layers, size): + # 计算新图片的宽度和高度 + max_x = max([layer["position"][1] + layer["image"].size[1] for layer in layers]) + min_x = min([layer["position"][1] for layer in layers]) + max_y = max([layer["position"][0] + layer["image"].size[0] for layer in layers]) + min_y = min([layer["position"][0] for layer in layers]) + new_width = max(size[0], max_x - min_x) + new_height = max(size[1], max_y - min_y) + + # 更新图片的坐标 + for layer in layers: + updated_coords = [layer["position"][1] - min_x, layer["position"][0] - min_y] + layer['position'] = updated_coords + return layers, (new_width, new_height) diff --git a/app/service/design/utils/synthesis_item.py b/app/service/design/utils/synthesis_item.py index d560f37..c8be2c7 100644 --- a/app/service/design/utils/synthesis_item.py +++ b/app/service/design/utils/synthesis_item.py @@ -63,14 +63,18 @@ def synthesis(data, size, basic_info): # 创建底图 base_image = Image.new('RGBA', size, (0, 0, 0, 0)) try: - all_mask_shape = (size[1], size[0]) body_mask = None for d in data: if d['name'] == 'body': - body_mask = np.array(d['image'].split()[3]) - left_shoulder = basic_info['body_point_test']['shoulder_left'] - right_shoulder = basic_info['body_point_test']['shoulder_right'] + # 创建一个新的宽高透明图像, 把模特贴上去获取mask + transparent_image = Image.new("RGBA", size, (0, 0, 0, 0)) + transparent_image.paste(d['image'], d['position'], d['image']) + body_mask = np.array(transparent_image.split()[3]) + + # 根据新的坐标获取新的肩点 + left_shoulder = [x + y for x, y in zip(basic_info['body_point_test']['shoulder_left'], d['position'])] + right_shoulder = [x + y for x, y in zip(basic_info['body_point_test']['shoulder_right'], d['position'])] body_mask[:min(left_shoulder[1], right_shoulder[1]), left_shoulder[0]:right_shoulder[0]] = 255 _, binary_body_mask = cv2.threshold(body_mask, 127, 255, cv2.THRESH_BINARY) top_outer_mask = np.array(binary_body_mask) @@ -114,13 +118,13 @@ def synthesis(data, size, basic_info): if layer['image'] is not None: if layer['name'] != "body": test_image = Image.new('RGBA', size, (0, 0, 0, 0)) - test_image.paste(layer['image'], (layer['position'][1], layer['position'][0]), layer['image']) + test_image.paste(layer['image'], layer['position'], layer['image']) mask_data = np.where(all_mask > 0, 255, 0).astype(np.uint8) mask_alpha = Image.fromarray(mask_data) cropped_image = Image.composite(test_image, Image.new("RGBA", test_image.size, (255, 255, 255, 0)), mask_alpha) base_image.paste(test_image, (0, 0), cropped_image) else: - base_image.paste(layer['image'], (layer['position'][1], layer['position'][0]), layer['image']) + base_image.paste(layer['image'], layer['position'], layer['image']) result_image = base_image From c6348a7979354a5102f571861eb89c7cf6fd158e Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Thu, 1 Aug 2024 17:03:58 +0800 Subject: [PATCH 024/103] =?UTF-8?q?Revert=20"feat=20=20=20=E7=BB=93?= =?UTF-8?q?=E6=9E=9C=E5=9B=BE=E5=AE=BD=E5=BA=A6=E8=87=AA=E9=80=82=E5=BA=94?= =?UTF-8?q?"?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This reverts commit 4444c00949f78f8f8cde34867d1ed485abfa91bb. --- app/service/design/service.py | 20 ++------------------ app/service/design/utils/synthesis_item.py | 16 ++++++---------- 2 files changed, 8 insertions(+), 28 deletions(-) diff --git a/app/service/design/service.py b/app/service/design/service.py index e492089..b8c5a8c 100644 --- a/app/service/design/service.py +++ b/app/service/design/service.py @@ -97,9 +97,9 @@ def process_object(cfg, process_id, total): # uploaded_images.append({'image_obj': layer['pattern_image'], 'image_url': layer['pattern_image_url'], 'image_type': 'pattern_image'}) # if 'mask' in layer.keys() and layer['mask'] is not None and layer['mask_url'] is not None: # uploaded_images.append({'image_obj': layer['mask'], 'image_url': layer['mask_url'], 'image_type': 'mask'}) - layers, new_size = update_base_size_priority(layers, body_size) + # 合成 - items_response['synthesis_url'] = synthesis(layers, new_size, basic_info) + items_response['synthesis_url'] = synthesis(layers, body_size, basic_info) for lay in layers: items_response['layers'].append({ @@ -177,19 +177,3 @@ def upload_images(image_obj): rgba_image[rgba_image[:, :, 0] == 0] = [0, 0, 0, 0] req = oss_upload_image(bucket=bucket_name, object_name=object_name, image_bytes=cv2.imencode('.png', rgba_image)[1]) return image_obj['image_url'] - - -def update_base_size_priority(layers, size): - # 计算新图片的宽度和高度 - max_x = max([layer["position"][1] + layer["image"].size[1] for layer in layers]) - min_x = min([layer["position"][1] for layer in layers]) - max_y = max([layer["position"][0] + layer["image"].size[0] for layer in layers]) - min_y = min([layer["position"][0] for layer in layers]) - new_width = max(size[0], max_x - min_x) - new_height = max(size[1], max_y - min_y) - - # 更新图片的坐标 - for layer in layers: - updated_coords = [layer["position"][1] - min_x, layer["position"][0] - min_y] - layer['position'] = updated_coords - return layers, (new_width, new_height) diff --git a/app/service/design/utils/synthesis_item.py b/app/service/design/utils/synthesis_item.py index c8be2c7..d560f37 100644 --- a/app/service/design/utils/synthesis_item.py +++ b/app/service/design/utils/synthesis_item.py @@ -63,18 +63,14 @@ def synthesis(data, size, basic_info): # 创建底图 base_image = Image.new('RGBA', size, (0, 0, 0, 0)) try: + all_mask_shape = (size[1], size[0]) body_mask = None for d in data: if d['name'] == 'body': - # 创建一个新的宽高透明图像, 把模特贴上去获取mask - transparent_image = Image.new("RGBA", size, (0, 0, 0, 0)) - transparent_image.paste(d['image'], d['position'], d['image']) - body_mask = np.array(transparent_image.split()[3]) - - # 根据新的坐标获取新的肩点 - left_shoulder = [x + y for x, y in zip(basic_info['body_point_test']['shoulder_left'], d['position'])] - right_shoulder = [x + y for x, y in zip(basic_info['body_point_test']['shoulder_right'], d['position'])] + body_mask = np.array(d['image'].split()[3]) + left_shoulder = basic_info['body_point_test']['shoulder_left'] + right_shoulder = basic_info['body_point_test']['shoulder_right'] body_mask[:min(left_shoulder[1], right_shoulder[1]), left_shoulder[0]:right_shoulder[0]] = 255 _, binary_body_mask = cv2.threshold(body_mask, 127, 255, cv2.THRESH_BINARY) top_outer_mask = np.array(binary_body_mask) @@ -118,13 +114,13 @@ def synthesis(data, size, basic_info): if layer['image'] is not None: if layer['name'] != "body": test_image = Image.new('RGBA', size, (0, 0, 0, 0)) - test_image.paste(layer['image'], layer['position'], layer['image']) + test_image.paste(layer['image'], (layer['position'][1], layer['position'][0]), layer['image']) mask_data = np.where(all_mask > 0, 255, 0).astype(np.uint8) mask_alpha = Image.fromarray(mask_data) cropped_image = Image.composite(test_image, Image.new("RGBA", test_image.size, (255, 255, 255, 0)), mask_alpha) base_image.paste(test_image, (0, 0), cropped_image) else: - base_image.paste(layer['image'], layer['position'], layer['image']) + base_image.paste(layer['image'], (layer['position'][1], layer['position'][0]), layer['image']) result_image = base_image From a86841c206fccc0eb128c7d577ff660ef2873099 Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Thu, 1 Aug 2024 17:27:18 +0800 Subject: [PATCH 025/103] =?UTF-8?q?feat=20=20=20=E7=BB=93=E6=9E=9C?= =?UTF-8?q?=E5=9B=BE=E5=AE=BD=E5=BA=A6=E8=87=AA=E9=80=82=E5=BA=94=20fix?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/service/design/service.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/app/service/design/service.py b/app/service/design/service.py index e492089..211b485 100644 --- a/app/service/design/service.py +++ b/app/service/design/service.py @@ -183,13 +183,11 @@ def update_base_size_priority(layers, size): # 计算新图片的宽度和高度 max_x = max([layer["position"][1] + layer["image"].size[1] for layer in layers]) min_x = min([layer["position"][1] for layer in layers]) - max_y = max([layer["position"][0] + layer["image"].size[0] for layer in layers]) - min_y = min([layer["position"][0] for layer in layers]) new_width = max(size[0], max_x - min_x) - new_height = max(size[1], max_y - min_y) + new_height = size[1] # 更新图片的坐标 for layer in layers: - updated_coords = [layer["position"][1] - min_x, layer["position"][0] - min_y] + updated_coords = [layer["position"][1] - min_x, layer["position"][0]] layer['position'] = updated_coords return layers, (new_width, new_height) From a8f76cb65e05a52ad7a54571d485bf13be20367e Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Thu, 1 Aug 2024 17:36:15 +0800 Subject: [PATCH 026/103] =?UTF-8?q?Revert=20"feat=20=20=20=E7=BB=93?= =?UTF-8?q?=E6=9E=9C=E5=9B=BE=E5=AE=BD=E5=BA=A6=E8=87=AA=E9=80=82=E5=BA=94?= =?UTF-8?q?"?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This reverts commit 4444c00949f78f8f8cde34867d1ed485abfa91bb. --- app/service/design/service.py | 20 ++------------------ app/service/design/utils/synthesis_item.py | 16 ++++++---------- 2 files changed, 8 insertions(+), 28 deletions(-) diff --git a/app/service/design/service.py b/app/service/design/service.py index e492089..b8c5a8c 100644 --- a/app/service/design/service.py +++ b/app/service/design/service.py @@ -97,9 +97,9 @@ def process_object(cfg, process_id, total): # uploaded_images.append({'image_obj': layer['pattern_image'], 'image_url': layer['pattern_image_url'], 'image_type': 'pattern_image'}) # if 'mask' in layer.keys() and layer['mask'] is not None and layer['mask_url'] is not None: # uploaded_images.append({'image_obj': layer['mask'], 'image_url': layer['mask_url'], 'image_type': 'mask'}) - layers, new_size = update_base_size_priority(layers, body_size) + # 合成 - items_response['synthesis_url'] = synthesis(layers, new_size, basic_info) + items_response['synthesis_url'] = synthesis(layers, body_size, basic_info) for lay in layers: items_response['layers'].append({ @@ -177,19 +177,3 @@ def upload_images(image_obj): rgba_image[rgba_image[:, :, 0] == 0] = [0, 0, 0, 0] req = oss_upload_image(bucket=bucket_name, object_name=object_name, image_bytes=cv2.imencode('.png', rgba_image)[1]) return image_obj['image_url'] - - -def update_base_size_priority(layers, size): - # 计算新图片的宽度和高度 - max_x = max([layer["position"][1] + layer["image"].size[1] for layer in layers]) - min_x = min([layer["position"][1] for layer in layers]) - max_y = max([layer["position"][0] + layer["image"].size[0] for layer in layers]) - min_y = min([layer["position"][0] for layer in layers]) - new_width = max(size[0], max_x - min_x) - new_height = max(size[1], max_y - min_y) - - # 更新图片的坐标 - for layer in layers: - updated_coords = [layer["position"][1] - min_x, layer["position"][0] - min_y] - layer['position'] = updated_coords - return layers, (new_width, new_height) diff --git a/app/service/design/utils/synthesis_item.py b/app/service/design/utils/synthesis_item.py index c8be2c7..d560f37 100644 --- a/app/service/design/utils/synthesis_item.py +++ b/app/service/design/utils/synthesis_item.py @@ -63,18 +63,14 @@ def synthesis(data, size, basic_info): # 创建底图 base_image = Image.new('RGBA', size, (0, 0, 0, 0)) try: + all_mask_shape = (size[1], size[0]) body_mask = None for d in data: if d['name'] == 'body': - # 创建一个新的宽高透明图像, 把模特贴上去获取mask - transparent_image = Image.new("RGBA", size, (0, 0, 0, 0)) - transparent_image.paste(d['image'], d['position'], d['image']) - body_mask = np.array(transparent_image.split()[3]) - - # 根据新的坐标获取新的肩点 - left_shoulder = [x + y for x, y in zip(basic_info['body_point_test']['shoulder_left'], d['position'])] - right_shoulder = [x + y for x, y in zip(basic_info['body_point_test']['shoulder_right'], d['position'])] + body_mask = np.array(d['image'].split()[3]) + left_shoulder = basic_info['body_point_test']['shoulder_left'] + right_shoulder = basic_info['body_point_test']['shoulder_right'] body_mask[:min(left_shoulder[1], right_shoulder[1]), left_shoulder[0]:right_shoulder[0]] = 255 _, binary_body_mask = cv2.threshold(body_mask, 127, 255, cv2.THRESH_BINARY) top_outer_mask = np.array(binary_body_mask) @@ -118,13 +114,13 @@ def synthesis(data, size, basic_info): if layer['image'] is not None: if layer['name'] != "body": test_image = Image.new('RGBA', size, (0, 0, 0, 0)) - test_image.paste(layer['image'], layer['position'], layer['image']) + test_image.paste(layer['image'], (layer['position'][1], layer['position'][0]), layer['image']) mask_data = np.where(all_mask > 0, 255, 0).astype(np.uint8) mask_alpha = Image.fromarray(mask_data) cropped_image = Image.composite(test_image, Image.new("RGBA", test_image.size, (255, 255, 255, 0)), mask_alpha) base_image.paste(test_image, (0, 0), cropped_image) else: - base_image.paste(layer['image'], layer['position'], layer['image']) + base_image.paste(layer['image'], (layer['position'][1], layer['position'][0]), layer['image']) result_image = base_image From b75dd7161a3851414cc9f841c84c714c667de509 Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Thu, 1 Aug 2024 16:58:49 +0800 Subject: [PATCH 027/103] =?UTF-8?q?feat=20=20=20=E7=BB=93=E6=9E=9C?= =?UTF-8?q?=E5=9B=BE=E5=AE=BD=E5=BA=A6=E8=87=AA=E9=80=82=E5=BA=94=20fix?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/service/design/service.py | 18 ++++++++++++++++-- 1 file changed, 16 insertions(+), 2 deletions(-) diff --git a/app/service/design/service.py b/app/service/design/service.py index b8c5a8c..211b485 100644 --- a/app/service/design/service.py +++ b/app/service/design/service.py @@ -97,9 +97,9 @@ def process_object(cfg, process_id, total): # uploaded_images.append({'image_obj': layer['pattern_image'], 'image_url': layer['pattern_image_url'], 'image_type': 'pattern_image'}) # if 'mask' in layer.keys() and layer['mask'] is not None and layer['mask_url'] is not None: # uploaded_images.append({'image_obj': layer['mask'], 'image_url': layer['mask_url'], 'image_type': 'mask'}) - + layers, new_size = update_base_size_priority(layers, body_size) # 合成 - items_response['synthesis_url'] = synthesis(layers, body_size, basic_info) + items_response['synthesis_url'] = synthesis(layers, new_size, basic_info) for lay in layers: items_response['layers'].append({ @@ -177,3 +177,17 @@ def upload_images(image_obj): rgba_image[rgba_image[:, :, 0] == 0] = [0, 0, 0, 0] req = oss_upload_image(bucket=bucket_name, object_name=object_name, image_bytes=cv2.imencode('.png', rgba_image)[1]) return image_obj['image_url'] + + +def update_base_size_priority(layers, size): + # 计算新图片的宽度和高度 + max_x = max([layer["position"][1] + layer["image"].size[1] for layer in layers]) + min_x = min([layer["position"][1] for layer in layers]) + new_width = max(size[0], max_x - min_x) + new_height = size[1] + + # 更新图片的坐标 + for layer in layers: + updated_coords = [layer["position"][1] - min_x, layer["position"][0]] + layer['position'] = updated_coords + return layers, (new_width, new_height) From 529bb023931b9c461d911f9d071f1a708611596d Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Thu, 1 Aug 2024 17:46:26 +0800 Subject: [PATCH 028/103] =?UTF-8?q?feat=20=20=20=E7=BB=93=E6=9E=9C?= =?UTF-8?q?=E5=9B=BE=E5=AE=BD=E5=BA=A6=E8=87=AA=E9=80=82=E5=BA=94=20fix?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/service/design/service.py | 2 +- app/service/design/utils/synthesis_item.py | 16 ++++++++++------ 2 files changed, 11 insertions(+), 7 deletions(-) diff --git a/app/service/design/service.py b/app/service/design/service.py index 211b485..37fdbde 100644 --- a/app/service/design/service.py +++ b/app/service/design/service.py @@ -181,7 +181,7 @@ def upload_images(image_obj): def update_base_size_priority(layers, size): # 计算新图片的宽度和高度 - max_x = max([layer["position"][1] + layer["image"].size[1] for layer in layers]) + max_x = max([layer["position"][1] + layer["image"].size[0] for layer in layers]) min_x = min([layer["position"][1] for layer in layers]) new_width = max(size[0], max_x - min_x) new_height = size[1] diff --git a/app/service/design/utils/synthesis_item.py b/app/service/design/utils/synthesis_item.py index d560f37..c8be2c7 100644 --- a/app/service/design/utils/synthesis_item.py +++ b/app/service/design/utils/synthesis_item.py @@ -63,14 +63,18 @@ def synthesis(data, size, basic_info): # 创建底图 base_image = Image.new('RGBA', size, (0, 0, 0, 0)) try: - all_mask_shape = (size[1], size[0]) body_mask = None for d in data: if d['name'] == 'body': - body_mask = np.array(d['image'].split()[3]) - left_shoulder = basic_info['body_point_test']['shoulder_left'] - right_shoulder = basic_info['body_point_test']['shoulder_right'] + # 创建一个新的宽高透明图像, 把模特贴上去获取mask + transparent_image = Image.new("RGBA", size, (0, 0, 0, 0)) + transparent_image.paste(d['image'], d['position'], d['image']) + body_mask = np.array(transparent_image.split()[3]) + + # 根据新的坐标获取新的肩点 + left_shoulder = [x + y for x, y in zip(basic_info['body_point_test']['shoulder_left'], d['position'])] + right_shoulder = [x + y for x, y in zip(basic_info['body_point_test']['shoulder_right'], d['position'])] body_mask[:min(left_shoulder[1], right_shoulder[1]), left_shoulder[0]:right_shoulder[0]] = 255 _, binary_body_mask = cv2.threshold(body_mask, 127, 255, cv2.THRESH_BINARY) top_outer_mask = np.array(binary_body_mask) @@ -114,13 +118,13 @@ def synthesis(data, size, basic_info): if layer['image'] is not None: if layer['name'] != "body": test_image = Image.new('RGBA', size, (0, 0, 0, 0)) - test_image.paste(layer['image'], (layer['position'][1], layer['position'][0]), layer['image']) + test_image.paste(layer['image'], layer['position'], layer['image']) mask_data = np.where(all_mask > 0, 255, 0).astype(np.uint8) mask_alpha = Image.fromarray(mask_data) cropped_image = Image.composite(test_image, Image.new("RGBA", test_image.size, (255, 255, 255, 0)), mask_alpha) base_image.paste(test_image, (0, 0), cropped_image) else: - base_image.paste(layer['image'], (layer['position'][1], layer['position'][0]), layer['image']) + base_image.paste(layer['image'], layer['position'], layer['image']) result_image = base_image From 1bb1e1d649d518aa82cf2f5cc1c154173d5749bb Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Thu, 1 Aug 2024 17:53:22 +0800 Subject: [PATCH 029/103] =?UTF-8?q?Revert=20"feat=20=20=20=E7=BB=93?= =?UTF-8?q?=E6=9E=9C=E5=9B=BE=E5=AE=BD=E5=BA=A6=E8=87=AA=E9=80=82=E5=BA=94?= =?UTF-8?q?=20fix"?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This reverts commit 4444c009 --- app/service/design/service.py | 18 ++---------------- app/service/design/utils/synthesis_item.py | 16 ++++++---------- 2 files changed, 8 insertions(+), 26 deletions(-) diff --git a/app/service/design/service.py b/app/service/design/service.py index 37fdbde..b8c5a8c 100644 --- a/app/service/design/service.py +++ b/app/service/design/service.py @@ -97,9 +97,9 @@ def process_object(cfg, process_id, total): # uploaded_images.append({'image_obj': layer['pattern_image'], 'image_url': layer['pattern_image_url'], 'image_type': 'pattern_image'}) # if 'mask' in layer.keys() and layer['mask'] is not None and layer['mask_url'] is not None: # uploaded_images.append({'image_obj': layer['mask'], 'image_url': layer['mask_url'], 'image_type': 'mask'}) - layers, new_size = update_base_size_priority(layers, body_size) + # 合成 - items_response['synthesis_url'] = synthesis(layers, new_size, basic_info) + items_response['synthesis_url'] = synthesis(layers, body_size, basic_info) for lay in layers: items_response['layers'].append({ @@ -177,17 +177,3 @@ def upload_images(image_obj): rgba_image[rgba_image[:, :, 0] == 0] = [0, 0, 0, 0] req = oss_upload_image(bucket=bucket_name, object_name=object_name, image_bytes=cv2.imencode('.png', rgba_image)[1]) return image_obj['image_url'] - - -def update_base_size_priority(layers, size): - # 计算新图片的宽度和高度 - max_x = max([layer["position"][1] + layer["image"].size[0] for layer in layers]) - min_x = min([layer["position"][1] for layer in layers]) - new_width = max(size[0], max_x - min_x) - new_height = size[1] - - # 更新图片的坐标 - for layer in layers: - updated_coords = [layer["position"][1] - min_x, layer["position"][0]] - layer['position'] = updated_coords - return layers, (new_width, new_height) diff --git a/app/service/design/utils/synthesis_item.py b/app/service/design/utils/synthesis_item.py index c8be2c7..d560f37 100644 --- a/app/service/design/utils/synthesis_item.py +++ b/app/service/design/utils/synthesis_item.py @@ -63,18 +63,14 @@ def synthesis(data, size, basic_info): # 创建底图 base_image = Image.new('RGBA', size, (0, 0, 0, 0)) try: + all_mask_shape = (size[1], size[0]) body_mask = None for d in data: if d['name'] == 'body': - # 创建一个新的宽高透明图像, 把模特贴上去获取mask - transparent_image = Image.new("RGBA", size, (0, 0, 0, 0)) - transparent_image.paste(d['image'], d['position'], d['image']) - body_mask = np.array(transparent_image.split()[3]) - - # 根据新的坐标获取新的肩点 - left_shoulder = [x + y for x, y in zip(basic_info['body_point_test']['shoulder_left'], d['position'])] - right_shoulder = [x + y for x, y in zip(basic_info['body_point_test']['shoulder_right'], d['position'])] + body_mask = np.array(d['image'].split()[3]) + left_shoulder = basic_info['body_point_test']['shoulder_left'] + right_shoulder = basic_info['body_point_test']['shoulder_right'] body_mask[:min(left_shoulder[1], right_shoulder[1]), left_shoulder[0]:right_shoulder[0]] = 255 _, binary_body_mask = cv2.threshold(body_mask, 127, 255, cv2.THRESH_BINARY) top_outer_mask = np.array(binary_body_mask) @@ -118,13 +114,13 @@ def synthesis(data, size, basic_info): if layer['image'] is not None: if layer['name'] != "body": test_image = Image.new('RGBA', size, (0, 0, 0, 0)) - test_image.paste(layer['image'], layer['position'], layer['image']) + test_image.paste(layer['image'], (layer['position'][1], layer['position'][0]), layer['image']) mask_data = np.where(all_mask > 0, 255, 0).astype(np.uint8) mask_alpha = Image.fromarray(mask_data) cropped_image = Image.composite(test_image, Image.new("RGBA", test_image.size, (255, 255, 255, 0)), mask_alpha) base_image.paste(test_image, (0, 0), cropped_image) else: - base_image.paste(layer['image'], layer['position'], layer['image']) + base_image.paste(layer['image'], (layer['position'][1], layer['position'][0]), layer['image']) result_image = base_image From 7254f3ae311a2ab92b2c4b5bb403a330e9bccf42 Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Fri, 2 Aug 2024 11:17:07 +0800 Subject: [PATCH 030/103] =?UTF-8?q?feat=20=20=20=E7=BB=93=E6=9E=9C?= =?UTF-8?q?=E5=9B=BE=E5=AE=BD=E5=BA=A6=E8=87=AA=E9=80=82=E5=BA=94=20fix?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/service/design/service.py | 20 ++++++++++++++++++-- app/service/design/utils/synthesis_item.py | 14 +++++++++----- 2 files changed, 27 insertions(+), 7 deletions(-) diff --git a/app/service/design/service.py b/app/service/design/service.py index b8c5a8c..4846a2d 100644 --- a/app/service/design/service.py +++ b/app/service/design/service.py @@ -97,9 +97,9 @@ def process_object(cfg, process_id, total): # uploaded_images.append({'image_obj': layer['pattern_image'], 'image_url': layer['pattern_image_url'], 'image_type': 'pattern_image'}) # if 'mask' in layer.keys() and layer['mask'] is not None and layer['mask_url'] is not None: # uploaded_images.append({'image_obj': layer['mask'], 'image_url': layer['mask_url'], 'image_type': 'mask'}) - + layers, new_size = update_base_size_priority(layers, body_size) # 合成 - items_response['synthesis_url'] = synthesis(layers, body_size, basic_info) + items_response['synthesis_url'] = synthesis(layers, new_size, basic_info) for lay in layers: items_response['layers'].append({ @@ -177,3 +177,19 @@ def upload_images(image_obj): rgba_image[rgba_image[:, :, 0] == 0] = [0, 0, 0, 0] req = oss_upload_image(bucket=bucket_name, object_name=object_name, image_bytes=cv2.imencode('.png', rgba_image)[1]) return image_obj['image_url'] + + +def update_base_size_priority(layers, size): + # 计算透明背景图片的宽度 + min_x = min(info['position'][1] for info in layers) + x_list = [] + for info in layers: + if info['image'] is not None: + x_list.append(info['position'][1] + info['image'].width) + max_x = max(x_list) + new_width = max_x - min_x + new_height = 700 + # 更新坐标 + for info in layers: + info['position'] = (info['position'][0], info['position'][1] - min_x) + return layers, (new_width, new_height) diff --git a/app/service/design/utils/synthesis_item.py b/app/service/design/utils/synthesis_item.py index d560f37..0af8134 100644 --- a/app/service/design/utils/synthesis_item.py +++ b/app/service/design/utils/synthesis_item.py @@ -63,14 +63,18 @@ def synthesis(data, size, basic_info): # 创建底图 base_image = Image.new('RGBA', size, (0, 0, 0, 0)) try: - all_mask_shape = (size[1], size[0]) body_mask = None for d in data: if d['name'] == 'body': - body_mask = np.array(d['image'].split()[3]) - left_shoulder = basic_info['body_point_test']['shoulder_left'] - right_shoulder = basic_info['body_point_test']['shoulder_right'] + # 创建一个新的宽高透明图像, 把模特贴上去获取mask + transparent_image = Image.new("RGBA", size, (0, 0, 0, 0)) + transparent_image.paste(d['image'], (d['position'][1], d['position'][0]), d['image']) # 此处可变数组会被paste篡改值,所以使用下标获取position + body_mask = np.array(transparent_image.split()[3]) + + # 根据新的坐标获取新的肩点 + left_shoulder = [x + y for x, y in zip(basic_info['body_point_test']['shoulder_left'], [d['position'][1], d['position'][0]])] + right_shoulder = [x + y for x, y in zip(basic_info['body_point_test']['shoulder_right'], [d['position'][1], d['position'][0]])] body_mask[:min(left_shoulder[1], right_shoulder[1]), left_shoulder[0]:right_shoulder[0]] = 255 _, binary_body_mask = cv2.threshold(body_mask, 127, 255, cv2.THRESH_BINARY) top_outer_mask = np.array(binary_body_mask) @@ -118,7 +122,7 @@ def synthesis(data, size, basic_info): mask_data = np.where(all_mask > 0, 255, 0).astype(np.uint8) mask_alpha = Image.fromarray(mask_data) cropped_image = Image.composite(test_image, Image.new("RGBA", test_image.size, (255, 255, 255, 0)), mask_alpha) - base_image.paste(test_image, (0, 0), cropped_image) + base_image.paste(test_image, (0, 0), cropped_image) # test_image 已经按照坐标贴到最大宽值的图片上 坐着这里坐标为00 else: base_image.paste(layer['image'], (layer['position'][1], layer['position'][0]), layer['image']) From 3a0e175e61f78077df1750573e6c6f681ddc360b Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Fri, 2 Aug 2024 11:35:31 +0800 Subject: [PATCH 031/103] =?UTF-8?q?feat=20=20=20=E7=BB=93=E6=9E=9C?= =?UTF-8?q?=E5=9B=BE=E5=AE=BD=E5=BA=A6=E8=87=AA=E9=80=82=E5=BA=94=20fix?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/service/design/utils/synthesis_item.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app/service/design/utils/synthesis_item.py b/app/service/design/utils/synthesis_item.py index 0af8134..59bb1de 100644 --- a/app/service/design/utils/synthesis_item.py +++ b/app/service/design/utils/synthesis_item.py @@ -97,7 +97,7 @@ def synthesis(data, size, basic_info): background = np.zeros_like(top_outer_mask) background[all_y_start:all_y_end, all_x_start:all_x_end] = sketch_mask[mask_y_start:mask_y_end, mask_x_start:mask_x_end] top_outer_mask = background + top_outer_mask - elif bottom and data[i]['name'] in ["trousers_front", "skirt_front", "bottoms_front"]: + elif bottom and data[i]['name'] in ["trousers_front", "skirt_front", "bottoms_front", "dress_front"]: bottom = False mask_shape = data[i]['mask'].shape y_offset, x_offset = data[i]['position'] From 281f8126362cc8bd00bb76dc79b8100630e22c66 Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Fri, 2 Aug 2024 17:12:34 +0800 Subject: [PATCH 032/103] =?UTF-8?q?feat=20=20=20=E7=BB=93=E6=9E=9C?= =?UTF-8?q?=E5=9B=BE=E5=AE=BD=E5=BA=A6=E8=87=AA=E9=80=82=E5=BA=94=20fix?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/service/design/service.py | 2 +- app/service/design/utils/synthesis_item.py | 14 +++++++------- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/app/service/design/service.py b/app/service/design/service.py index 4846a2d..798b6f4 100644 --- a/app/service/design/service.py +++ b/app/service/design/service.py @@ -191,5 +191,5 @@ def update_base_size_priority(layers, size): new_height = 700 # 更新坐标 for info in layers: - info['position'] = (info['position'][0], info['position'][1] - min_x) + info['adaptive_position'] = (info['position'][0], info['position'][1] - min_x) return layers, (new_width, new_height) diff --git a/app/service/design/utils/synthesis_item.py b/app/service/design/utils/synthesis_item.py index 59bb1de..03df2d9 100644 --- a/app/service/design/utils/synthesis_item.py +++ b/app/service/design/utils/synthesis_item.py @@ -69,12 +69,12 @@ def synthesis(data, size, basic_info): if d['name'] == 'body': # 创建一个新的宽高透明图像, 把模特贴上去获取mask transparent_image = Image.new("RGBA", size, (0, 0, 0, 0)) - transparent_image.paste(d['image'], (d['position'][1], d['position'][0]), d['image']) # 此处可变数组会被paste篡改值,所以使用下标获取position + transparent_image.paste(d['image'], (d['adaptive_position'][1], d['adaptive_position'][0]), d['image']) # 此处可变数组会被paste篡改值,所以使用下标获取position body_mask = np.array(transparent_image.split()[3]) # 根据新的坐标获取新的肩点 - left_shoulder = [x + y for x, y in zip(basic_info['body_point_test']['shoulder_left'], [d['position'][1], d['position'][0]])] - right_shoulder = [x + y for x, y in zip(basic_info['body_point_test']['shoulder_right'], [d['position'][1], d['position'][0]])] + left_shoulder = [x + y for x, y in zip(basic_info['body_point_test']['shoulder_left'], [d['adaptive_position'][1], d['adaptive_position'][0]])] + right_shoulder = [x + y for x, y in zip(basic_info['body_point_test']['shoulder_right'], [d['adaptive_position'][1], d['adaptive_position'][0]])] body_mask[:min(left_shoulder[1], right_shoulder[1]), left_shoulder[0]:right_shoulder[0]] = 255 _, binary_body_mask = cv2.threshold(body_mask, 127, 255, cv2.THRESH_BINARY) top_outer_mask = np.array(binary_body_mask) @@ -88,7 +88,7 @@ def synthesis(data, size, basic_info): if top and data[i]['name'] in ["blouse_front", "outwear_front", "dress_front", "tops_front"]: top = False mask_shape = data[i]['mask'].shape - y_offset, x_offset = data[i]['position'] + y_offset, x_offset = data[i]['adaptive_position'] # 初始化叠加区域的起始和结束位置 all_y_start, all_y_end, mask_y_start, mask_y_end = positioning(all_mask_shape=all_mask_shape[0], mask_shape=mask_shape[0], offset=y_offset) all_x_start, all_x_end, mask_x_start, mask_x_end = positioning(all_mask_shape=all_mask_shape[1], mask_shape=mask_shape[1], offset=x_offset) @@ -100,7 +100,7 @@ def synthesis(data, size, basic_info): elif bottom and data[i]['name'] in ["trousers_front", "skirt_front", "bottoms_front", "dress_front"]: bottom = False mask_shape = data[i]['mask'].shape - y_offset, x_offset = data[i]['position'] + y_offset, x_offset = data[i]['adaptive_position'] # 初始化叠加区域的起始和结束位置 all_y_start, all_y_end, mask_y_start, mask_y_end = positioning(all_mask_shape=all_mask_shape[0], mask_shape=mask_shape[0], offset=y_offset) all_x_start, all_x_end, mask_x_start, mask_x_end = positioning(all_mask_shape=all_mask_shape[1], mask_shape=mask_shape[1], offset=x_offset) @@ -118,13 +118,13 @@ def synthesis(data, size, basic_info): if layer['image'] is not None: if layer['name'] != "body": test_image = Image.new('RGBA', size, (0, 0, 0, 0)) - test_image.paste(layer['image'], (layer['position'][1], layer['position'][0]), layer['image']) + test_image.paste(layer['image'], (layer['adaptive_position'][1], layer['adaptive_position'][0]), layer['image']) mask_data = np.where(all_mask > 0, 255, 0).astype(np.uint8) mask_alpha = Image.fromarray(mask_data) cropped_image = Image.composite(test_image, Image.new("RGBA", test_image.size, (255, 255, 255, 0)), mask_alpha) base_image.paste(test_image, (0, 0), cropped_image) # test_image 已经按照坐标贴到最大宽值的图片上 坐着这里坐标为00 else: - base_image.paste(layer['image'], (layer['position'][1], layer['position'][0]), layer['image']) + base_image.paste(layer['image'], (layer['adaptive_position'][1], layer['adaptive_position'][0]), layer['image']) result_image = base_image From ef78528e4716764112fa348a487df08d7908aac8 Mon Sep 17 00:00:00 2001 From: zchen Date: Fri, 9 Aug 2024 15:42:43 +0800 Subject: [PATCH 033/103] =?UTF-8?q?feat=20fix=20design=20=E9=A2=84?= =?UTF-8?q?=E5=A4=84=E7=90=86seg=E7=BB=93=E6=9E=9C=E9=80=BB=E8=BE=91?= =?UTF-8?q?=E4=BF=AE=E5=A4=8D?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/service/design_pre_processing/service.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/app/service/design_pre_processing/service.py b/app/service/design_pre_processing/service.py index 1b4f33c..4cbff8f 100644 --- a/app/service/design_pre_processing/service.py +++ b/app/service/design_pre_processing/service.py @@ -151,10 +151,10 @@ class DesignPreprocessing: # 推理得到keypoint sketch['keypoint_result'] = self.keypoint_cache(sketch) if sketch['site'] == 'up': - _, seg_cache = self.load_seg_result(sketch['image_id']) + _, seg_cache = self.load_seg_result(sketch['obj']) if not _: # 推理获得seg 结果 - seg_result = get_seg_result(sketch["image_id"], sketch['image_obj'])[0] + seg_result = get_seg_result(sketch["image_id"], sketch['obj'])[0] self.save_seg_result(seg_result, sketch['image_id']) if IF_DEBUG_SHOW: From d11beb0107ee91110afef82a13313ea5a9bc0f84 Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Wed, 14 Aug 2024 16:45:34 +0800 Subject: [PATCH 034/103] =?UTF-8?q?feat=20=20=20sketch=20=E6=8F=90?= =?UTF-8?q?=E5=8F=96=E6=8E=A5=E5=8F=A3=20fix?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .gitignore | 2 + app/api/api_image2sketch.py | 36 + app/api/api_route.py | 13 +- app/schemas/image2sketch.py | 7 + app/service/design_pre_processing/service.py | 4 +- .../testC/20180422151845_stEe4.jpeg | Bin 0 -> 385002 bytes app/service/image2sketch/infer.py | 89 +++ app/service/image2sketch/models/__init__.py | 49 ++ app/service/image2sketch/models/base_model.py | 230 ++++++ app/service/image2sketch/models/layer.py | 354 +++++++++ app/service/image2sketch/models/networks.py | 734 ++++++++++++++++++ app/service/image2sketch/models/perceptual.py | 86 ++ .../image2sketch/models/template_model.py | 82 ++ app/service/image2sketch/models/test_model.py | 45 ++ .../image2sketch/models/triplet_model.py | 68 ++ .../image2sketch/models/unpaired_model.py | 144 ++++ app/service/image2sketch/opt.py | 45 ++ app/service/image2sketch/server.py | 79 ++ app/service/image2sketch/util/__init__.py | 1 + app/service/image2sketch/util/get_data.py | 110 +++ app/service/image2sketch/util/html.py | 86 ++ app/service/image2sketch/util/image_pool.py | 54 ++ app/service/image2sketch/util/util.py | 103 +++ app/service/image2sketch/util/visualizer.py | 223 ++++++ download_checkpoints.py | 45 ++ 25 files changed, 2681 insertions(+), 8 deletions(-) create mode 100644 app/api/api_image2sketch.py create mode 100644 app/schemas/image2sketch.py create mode 100644 app/service/image2sketch/datasets/ref_unpair/testC/20180422151845_stEe4.jpeg create mode 100644 app/service/image2sketch/infer.py create mode 100644 app/service/image2sketch/models/__init__.py create mode 100644 app/service/image2sketch/models/base_model.py create mode 100644 app/service/image2sketch/models/layer.py create mode 100644 app/service/image2sketch/models/networks.py create mode 100644 app/service/image2sketch/models/perceptual.py create mode 100644 app/service/image2sketch/models/template_model.py create mode 100644 app/service/image2sketch/models/test_model.py create mode 100644 app/service/image2sketch/models/triplet_model.py create mode 100644 app/service/image2sketch/models/unpaired_model.py create mode 100644 app/service/image2sketch/opt.py create mode 100644 app/service/image2sketch/server.py create mode 100644 app/service/image2sketch/util/__init__.py create mode 100644 app/service/image2sketch/util/get_data.py create mode 100644 app/service/image2sketch/util/html.py create mode 100644 app/service/image2sketch/util/image_pool.py create mode 100644 app/service/image2sketch/util/util.py create mode 100644 app/service/image2sketch/util/visualizer.py create mode 100644 download_checkpoints.py diff --git a/.gitignore b/.gitignore index 87a4934..fe14af2 100644 --- a/.gitignore +++ b/.gitignore @@ -136,3 +136,5 @@ app/logs/* *.log *.jpg /qodana.yaml +.pth +.pytorch \ No newline at end of file diff --git a/app/api/api_image2sketch.py b/app/api/api_image2sketch.py new file mode 100644 index 0000000..98d94ee --- /dev/null +++ b/app/api/api_image2sketch.py @@ -0,0 +1,36 @@ +import json +import logging + +from fastapi import APIRouter, HTTPException + +from app.schemas.image2sketch import Image2SketchModel +from app.schemas.response_template import ResponseModel +from app.service.image2sketch.server import Image2SketchServer + +router = APIRouter() +logger = logging.getLogger() + + +@router.post("/image2sketch") +def image2sketch(request_item: Image2SketchModel): + """ + 创建一个具有以下参数的请求体: + - **sr_image_url**: 超分图片的minio或s3 url地址 + - **sr_xn**: 超分的倍数,只接受2或4 + - **sr_tasks_id**: 任务id 用于取消超分任务和获取超分结果 + + 示例参数: + { + "image_url": "test/real_Top_971fe3085a69f31f3e66c225eabb0eea.jpg_Img.jpg", + "sketch_bucket": "test", + "sketch_name": "12341556-89.jpg" + } + """ + # try: + logger.info(f"image2sketch request item is : @@@@@@:{json.dumps(request_item.dict())}") + service = Image2SketchServer(request_item) + sketch_url = service.get_result() + # except Exception as e: + # logger.warning(f"image2sketch Run Exception @@@@@@:{e}") + # raise HTTPException(status_code=404, detail=str(e)) + return ResponseModel(data=sketch_url) diff --git a/app/api/api_route.py b/app/api/api_route.py index c2bd2d2..8bcbe44 100644 --- a/app/api/api_route.py +++ b/app/api/api_route.py @@ -1,14 +1,14 @@ from fastapi import APIRouter -from app.api import api_test -from app.api import api_super_resolution -from app.api import api_generate_image from app.api import api_attribute_retrieve -from app.api import api_design from app.api import api_chat_robot -from app.api import api_prompt_generation +from app.api import api_design from app.api import api_design_pre_processing - +from app.api import api_generate_image +from app.api import api_image2sketch +from app.api import api_prompt_generation +from app.api import api_super_resolution +from app.api import api_test router = APIRouter() @@ -20,3 +20,4 @@ router.include_router(api_design.router, tags=['design'], prefix="/api") router.include_router(api_chat_robot.router, tags=['chat_robot'], prefix="/api") router.include_router(api_prompt_generation.router, tags=['prompt_generation'], prefix="/api") router.include_router(api_design_pre_processing.router, tags=['design_pre_processing'], prefix="/api") +router.include_router(api_image2sketch.router, tags=['api_image2sketch'], prefix="/api") diff --git a/app/schemas/image2sketch.py b/app/schemas/image2sketch.py new file mode 100644 index 0000000..a124739 --- /dev/null +++ b/app/schemas/image2sketch.py @@ -0,0 +1,7 @@ +from pydantic import BaseModel + + +class Image2SketchModel(BaseModel): + image_url: str + sketch_bucket: str + sketch_name: str diff --git a/app/service/design_pre_processing/service.py b/app/service/design_pre_processing/service.py index 1b4f33c..4cbff8f 100644 --- a/app/service/design_pre_processing/service.py +++ b/app/service/design_pre_processing/service.py @@ -151,10 +151,10 @@ class DesignPreprocessing: # 推理得到keypoint sketch['keypoint_result'] = self.keypoint_cache(sketch) if sketch['site'] == 'up': - _, seg_cache = self.load_seg_result(sketch['image_id']) + _, seg_cache = self.load_seg_result(sketch['obj']) if not _: # 推理获得seg 结果 - seg_result = get_seg_result(sketch["image_id"], sketch['image_obj'])[0] + seg_result = get_seg_result(sketch["image_id"], sketch['obj'])[0] self.save_seg_result(seg_result, sketch['image_id']) if IF_DEBUG_SHOW: diff --git a/app/service/image2sketch/datasets/ref_unpair/testC/20180422151845_stEe4.jpeg b/app/service/image2sketch/datasets/ref_unpair/testC/20180422151845_stEe4.jpeg new file mode 100644 index 0000000000000000000000000000000000000000..0347322b0f484ed75bbcca27230ff2cdd87a275a GIT binary patch literal 385002 zcmb??XF!v|)@BlVZ=oZ-gd)94fC$p1cR{Lj5CoCXg9M~^L5frX1(YrzE%Z)6lrFs~ zE%XkH_wL>A-o5*Ce`b=*J9EyQGv|5E^G4}vtCA2h5CQ-I5_Ppl`TziyH~;`Tz{kBC zA+Akw0RUJ5>W>~8!Y#Ld;WY%B7)fH<=L7a4Bd1_d+^|?hF75hr?Z3ax z)5`;B-ADfS7uK%4x(El?k}nS||9!UqJ$c?IiT`-WKZo7!;}DLz?ffw6jrr%Z|9vSS z@jBf9y`+C1{#*nDW`2JE?GOC_UijUw5XBS!wVymlWgQp-tEn?ExhM2L+x*v@grh$g z{%!aF8vdFF!g2cg>QCv)|HDSW%qLj?J&c;r&jSokXg~F7{!fekV@{ZX>%Zpyw_Qy= zAq3}5&wMJ2r~Q}x|L0dYPA(+>$1VReY_1u4=N?&~pZovqg*)eb{(t%O|DSW1pg8lF z^QT|mtOrenZ9U!drwTddBV+>Kd6EfSv?cQ&cKM0`AqE604_I#ZSyI|A%Gxroel1-8 zT2;6`RG9UVgP$YF&Jg{oK|fTrEkaA%hCPj^9O1D3%Az`IyW03 zH;xB4cYo=HO0w)zu!uDFYdsg0IsRe5!*V^w@++7q66h(vZYS?>(4TwNPle)veT0pa zIvi|%H9)Lyjs(U1yGJ=RLJ{vmF6%?Gu72m<{9d)XS+io0_B(A_kZW8`*5qzGlx-6` zzkB8et_vXkG@$KN_Js84Y~grbVKYqrBap@Yd{;?+RVmZA$I&v|0&g*J%_Mkj^;Lb- zuT>6(0}jTi^F^|o#eSaPWgcz$-4*#=x;|cn_aoXh`G}_o2bQif^Zy7JHzFL6E!+x0 zhw8e-p|ub`Pf<8e!If_YPzhOrQC&osGYr`+1{W%SY@pzCfMbQh*$G%&Q&Komnw6Vb zuG?p_WVUhsyPsJANg_ymjd|gEGUVFia4I|Spg)C52U-=%gkEe12Kx(*d>w7L@RPm^ zCvWMO6(uzPerBb~2?2EdLFa>QSA+et3pXeI=Nq{TUN+5evnwF^-xII^<<32E&Xqim zMW4muS_6{c`trY16t>1uz^K|ox=<7MO`#c3}M18j}3LkMOlY1z$6JkljOvl=D z+cApFILCSopr21e<~Uy_+-xR131GRbVwnvd$8+DL8eNgkT{tS1lGq!ST{s_^_!5Aq zK;LYmbD-NcWLZqUb1$g;WN=(u>d=LEY3m8ge?-TtSP%+W(Vw+&wcf8x`}9Y%C;}J7 z`e7@kbu)%r>f2ZxL!x?-=mHV&vNzqBDb?76$Ox?e}rI`$Tq zM$~}rY-I+dD2w`!M3MyyMFs$c5$!~lnAbE|2Y58taq2{L#8ZC8SI5y zK>7oH6WT9)0y}xF$qoXWZ;mT+vwi3NNcsTylxNp6j1eUqdqA$caLk==7E&BTkXtfd zisJx_ccwWn@VY)M!oJ{G`pcutKPUi5$jB4ogvA$%x@wphQMez!>#(^91@Ff&Vkm=O z)nxrDzcyZSWNalT%MZYDA`RUD%BfbX9*{>o~Jz;;48a2+_`gEV%AexawTZbp7JY8hxs!kQx)b zWxg=efwp*4f}b(I*Q;=|f1mh%^P2GQr|5ZszX$(+j2Bk|P!4}<)Q3;b@zoN9*|I$S z)-OB1AMX&P3Td-+2#g_>XW3$6E6m%jOr|E(0p9Y0=}{G(EkM1sIfHk{zZ zzscGIuudDYPn+fQGe#mU`O&_$^YEwT;8R9d*X5&7)&;Bw^({N@S=1RFMWxA@WPM}F zXWz>h8FfP0-Yp#`Vs8C%PyJFD=N56@b*3FpI9XF3g#?IPy1i%q4&X!*lP&oyeF8o? z+PKSyWQ>t|Uot|KoE?p(k8(?X8&&ul@th=?-amd|Aggg5zmT8JNSL{1V!!%!%e&{Y-9+1dxh3PX2^`MX|^3{nnz}k-w%lP zjYwj%c@k7q**Rqel(;iS$QUsPEJoyGPfwQj)FS=0J6Sna!@?n4>!5>nmDWDs9!K2tniWu#t-AwKotfb%bb;`-*b z`&m5PYZfUVvi_*03hZOlZze5sa?v^OQwBphHuNn5_q$1i8?B;@^PN&@qP2}Ao@^a_ z?5R%$K^ZH8kIQ)O;!R=hBX*ztr2W`A7hGqs7o4&$^(*&2PeVPEn(fDS=|4e0Cg|Jk-37t0+ zTyvV`7Zukt6%sJ4+7DNyHzf@8;i0b+KLptbG;<^3UZwCCYQ4au5}xX3>wl3biNcFr z&68UQkVMC~HZI7LV^2Z1%+M|($5O5PaXbtA_ZG(Hu|7)1_592&K-uV_g1w#-?GX!= zCHIVYRO#%9bOW|E1K{Jtpo@`!!AMZEa3#Ld+tb3m14n>JTX$I(VEI9Fs46X)%vO0# zrYa%Xbgc;U#5d_u^nIAOp<`>$)4LGe7Qg|J0z!WpNaG26_j9G!!hNj67E1uSG!Qx4 zA5?_g(AZ^w1sqS`G`OY7^6U-LVgS1cS-om`TS5-&7`phd0)GbK0qSWq{NX7Y)NBjS zg`&k_y7asy}UgUkm%@#@xm z{XG*MK{V>tEpEi~kD>`BP1tpCo>t#%fABrM_2mI$+diX~NFHk?T!UtPpPW0*po5pX zLmq++6@=l2M;4mZue88l%wS8Wn(WBqZ1XH-EF3>w_z7if21f&grSYYx7&3LsnF6EuIxEY!WhPgl z8O^hwNOgS%!>2O z3g@@&boc3qAxF&_fZk?LG3Ke$<_)=F@4-a?Wbq!9BYAHD6CowO_eIi3152@sYkcf=`H2I0#2qK>o-2aPqH_lB|w2d zfG33|&zsFpIzcXn+zNm65+1x#(OMg4YH`=U10lnHaSf14Fit_NppSv?zBvDt>p2v<76Gr-B1c|viTJc0j{kabw7i$3Rz>#ajs57O zS0RW8Ff3-`n_id0SW7r!bxpQ zLa!hnF1aOIZ25||QCeZ2N@1+IMX{mv#I^0KpJOe(Co9-+j5Pl<3_Q(x#BW5=PrNXD zXxTEx=_fj)Ap!`(1XjS|$N{-UAeoa2z0AY@>2aY&<#!o@8MAZilEXxwg%W~~3K@m` zhU}OPjL@%KrxTd9fSF?-N{M1XP3ob{sMSPe8tIOQ#C2k%_(|CmEX-=?onHN^2R&ou0-FVx4J#H zS|}yz86SHS@y+-qCTOchvg5RFD|p^^o(9I_6)J#AKkDZw1BGS6MyfhAgBS6F7wO3P&a3Zj1yHvLz-kEjj2V?f?-#;g z#QB`qtbm@-7(Tn}b&s3%s+6ok{N8WcGDQMd+}PTR8(p}-+mV2{uTWTtxBTWO&mtD$6pi&cN@*MBepCGz*a0f#D2QAFJwHqQmBI#eu= z3TP!6s-r=ZiR`&JF9d=FNOa#%_~5f|x71h|Txnr1Fhix!?2mWkoO$eTQa>XYD}I^vH2* zx+QJiG||sLIRz*heO3_mru8rnE0C@~9S}=PrXk>f&B2wS*;XN_Z2jT8mVhI+MF)ZZ z_ea0ep8XLPuNp(XzeMsy*0rL8cD=X7UAbha%7R7*PQPk>(iKb7U7|%4bsn2K1L-YA zUdB4ERw8VZFL|lK_s)f*X-q=~h6vMb;z$~H+PxK6CenPokAIrfi%CJAE25Dfp0&8m zUKxeNfb4h^vBckUJ0LE;Ra|~cyArH_1pj2xG3W*Ve7i%!+G9offq1W14F04K%clFl z3t1_5luza!sKE8z(TMMGh&lF|!1STq)V?2$!rpA;X$XF>!~Sa?x!>w>q-4REuq-LH z1}5G0#|(i5Ud}yVDt|S60SrGMvWqK#2&aIsnfNWBBG@xTR%ac>hWebmnMjrxrc*hx z=uyBDbe`%fxCn0i>6aNu@Ta>i0d_F5`f2d_TFT!;>;Y2~#Ie7+!>ztV)Vi<4*Zhd4 zn)iMbWC#ls#g6UPj%&N-J1A<8(wYtvEY-j|9=c~h1W@H+n7=@nCQArP%G&=Vi)N2S z8KRM_q1voRI%}8VgRs_UWBvjEb4Q(p6(g>?DhB8mIb_p^dNz`}hN%FX!Jih1~m7 zV8nw=Kvqi`^>idhnFThWozi0{`2JNVc{uXZZ#?DGlHfljPmJyvKdKsExcIyfIm<19Ap17(@xmv=a1craPa zvzS)|DAJDyfJ59A?T|T*s+-fp&tKI%@bn(T{;_vQbguUAk1Vc~1`teZ!Occqf(+r4 zyk}6s=3Nbjm9lB8U76n>QWz5IPPgxiwg)foEWKx=odAN?f!X$<28w^cM1CDqIipa` zg{$F(xmYNi7_HzzLSG$U<+fuxvmbvKCi?_mmhQYP@%7`WjZ?1)ud z{&jiYk5ANUobvUlV&S|}@BxXM1W{hI)K-(LpcDKO!buc8FKAI7a)1hRb+F4u0F*y-ZeO6|GS0=U`tW^+Cm z$AqYqWy{$w5L^cJYg&FM-Nb&Eim)`X%nT-cGn^p>OE)3y$fli<+>wID_nxQ*uc~I! z{<%4A3)#9n4cQiJoFhMw4wf$L3#{JlVShBG_$8Vol%*3D7)r4AzE7TuhF~fcWzP-6?>) zMH#dA>0zv_9|830%c)lC1=syUcm&oCBroyZx>RG1Y2;B&kJP9}!Doa*nYD?O`$q~F zg$fV;63nx?7T`k$*06~HAWNrAbjKqK1|nGTaE)oOdQ)nQ{E(Y~?w`(If!dqX1~~_* zUw3)!lEZlYjRyR<1d14@0fa~MrYbo&l7 ztjbBWD}X7`NIo-QZB=^G{^xu~AD5puvD?XgOuHwS^Fq#hTqn`zMuTwkRXoud?7T?BCm%2jr1-{c05-+Lp|s@wopSk$ z0Bvz8;I23b$oO$cmN<<~E*3{4SIPDfU%`xaI>nhu@(26J?HTwIqa*dFCn9#>Y<2ngy%|=M`!S!u%!1weowZ!ZXY+ zi52P_huxn@JC1nLAB1j?!OeE~^CGV*6Z~H4b6P9p{Nmig>%&xvecdnEObIR>QgZ;> z_H-VK3bZ_*LnOL}r*iR-=jyVJJsds=I+~72dLz|a7t4JX?by@`7?!75O#Af&;V{Tu z{w<)Eca%S+nMUD`*&D~k4cF-w?T9amdp*li+`z)-V+z*U#a7N()0&TTfnxvA`~1B7 z;%sKJ_0_Vc-2QSjcjZ(8b(veUN6>tK@VdOEm^UDHb=4zZaCwi{FqP993x%z}`d}@V zI6q9F+pEAdnu^_y>DrGTR{si`P*9|X9DR@rd_VhZ_tg%zr`FQMl`Jwq?H48Wm_NRAuOEp2+F0&S)^;EfGx$}XYus>cT zP%ydto^E6#&r2T1gl*M~U)UO|`I5mJU%I@wa=O*{jmkkYdiy`}!W$ibuCBr7R+uNt ztHIcZP$)$}1#{+T_L@ny9BY9_j_8<2@L?9?Ci#S7JEkBJAL~atCmTlBH( zunf43KWop{iUqN;iS@mA)g!;~$jvsQn(#hw5^t4YsieFNT)3RC2j$75#g_yvmF5*5&?gmv}yjY9@?{jbS8gui=UHor%Q!yb8IvnD`qjhpEAYs=Di z8zvE*m3WVkyQqSUcZ%zK9+8cJjvBF~5Dr`aGS72p)OwTn`jRO=T=D&AF{<);_0UE0 zCT2)vk$awE&G54Xjd6agKn`z|TM%$mn@`7{eIR^c_#q!P2@(D(6T6*gyv>akSu>D} zStdHY4OwM+SzejSu+Yf;rmA{Izf|zsX2D4o{GxU4RInn3VRlzP4^RB8sRhmWJK$%B z;h84H;Uw|869OQf;_xy-dhCChNuOEzS?9RJwf4jB1Q?&KF4?1*f(*%-$>vDAc5mC# zffJ5)0J^=2nuQbLJW52X+h>IfD^yp3Ilavt-dS_-Rc4Y*f8jeH` zX*8a&md%6xE-D_LY){3*h^q1nL9VvXG6U++N8{1jM*s(Whhhp=^`VwGBdw8jW@l{5 z8o6@m$x2m8N{h9g(etN43#Sq|?iDuTg*?TA0)*vJpc80t$;*}&_@McGHINb)$5KxP zCm`XJi%{MUsbO_iDE9{)?DJXS(qxLe5ojtL+h8I;UyFUcG-_+Tn6yu9-4pz_-mCYn zkMV#j?WtxkL7W4ITpi)a^(o|?tXLpT27dN$eoS}MVZ3j1p`VrCGkEMiz)!l>fN+K` z{8K*q;p+GB+@0#}r{f2fel<8w#@gRqDj~wTAS8H@QYDvA{2jYyLzgd)6qPU%n~gR0 z4JtXyb1z__=~{>Ni0hqHo!4~fq@vX?jVaK#$Sj9*j^eUKF;LxrMq@RcRTD1{nhQ4NoeK@uY9Fk2VbmW$<6O-VzLf$Vsw?z6Gg5ZQNaSUct zjGKdjF0AVt!MhuoIH|{m4Y*to*Uxw&>msN{R)8W4-(*Gab-FJq;w&djAI{OqV zompH0amNj$Q|4=ejOcNn{ zg>%-S(Nt^BifLB2CUU~uLl3Q`Tg^uk_vMR7+F^f$8g7mKzF~mp_0Cw4(cm%N?2h~5%?R1a*ex)_-Ux|xP? z;h58Qe1|-Z1@W=xPl|6H3Vu#v7<{gWnlavGa>7v3aMw}_C#}4?V?xC%007|X)XFp5 zFeEKUIIT`(TdV2gXOwmkO8(({8kLTZHSxu60L_@?J}Y&ntUvZ=;mwSBuLGBZ^vVH3 zxTSCb3n78n)AaQI8v`6IC%QyCi}}Om;1d7@@o*$Dn5%gt2iJlQC$wPo04RQ-8NU1n z%kwKKj7qrBlOT+NVWs!4&?Bae>EMJP=G{l7o3Ic>_{bCTRFl4YRpjb&ZbM!R&KWNq zEgNH$YM3S-qW;~&#|d3*&}L#C5dOfACG`c3%!DY&%|r zP==nO4s0(=a)bSA7!vxaq&KT34x>Ti5NoT}9d{4c9)`SYbsc~G6r2f37*?O|Oz0~w zEPPBn&+1#k$Z4e|suLO^DZrwP25*Urt;WX{2_gTnn9LWf=P%(A!oq0qGS`pC@^QMt z%;_ufvS0dzdSrGErlhZoKZMRnqVc>|e_|xppc$k>TcAjlz+T+mV+07kWy(N{{jwV-PcR-H;fscuM<;) z;0n~0p3`L!$_rGbMw=mUgOba*UMi^@4d55$uqS%+T#(n6%j5xtH`}{_+U@~{sDaAm z@503Q0VQp3zT0i5K4cRGiyGSxB)DjHxK7Ypg^bpgZtQU5l>6kpK6o7U3dPMelZnTK z01`^9M6}%FK>8wZ)cX9ML4GjeBFmE06^j8RxNOi`ogW*=s}$;AN?Q7a3lae<6lf%^ zXL}@THNtHx?j`C?@*i+ew89<*N9SLF^=ik=@%^#9CBv1rkEHdWL`N^k37YV@n=HJt zzd|H9hvjOF&QBX&t$*%KU52G#h(J(i*-I~uvB-PhvGqF7U&NR*4H7(X@;Kq^sV8#3 z>$r!Tm5aCu;Mm#}I#H4SrEjySV?_B>9T-+*`1V~iG3-^?_F^?j*$2(e7wEKMo zHD~pTPrmmQ1VjKz@FdFzz}0Zqt}~+HyB=3$>G5T;E!-B&OarP?^Sf2H3 zI4(S%<0Dmh(HgRYS9i3XlKScuegDq+?pkYLxcMA`IB?IEf$5}zlSzDx04DE zS#1MIpvV(Q?O7VgZzMl||8RpP7^_|S@zZFJH>grN9wtq%5I ze8e?uUt8OO7#1D#j~=Hb@wp6qprAAASZTAAFXn`m;~tDFbv(Fa6N^iUBiWHqHU(V_ z0SHC=F6AkX!Q;Gpj5;*;No6Dzmtub&#=cMI&T+C^xp(k>A>4xF3IWy5@`=K_5`!FQWC{z)CS%h zdHhDjw(=z6Gdd`feha&v`d^&>e)HPRs`2rw( z{Uf0^7ld`e;|s{sBOrk(qRN1VV#cQna+-SzkRP4$J)aHls3jE?<$2ZAM{^en+b>n1E2>EaIe-o+2%KT z!PvEFbnq)#bFDKAkJaz0Ijn8K`-G&6Ktw|8G7mglek<%%?$s)f)I6FB+cTEnwgb;E z8Z0%+!M~uier%uY(g2Y{U zUdSze)X&pdT_14Fw_T#|bn<=c(CL{kAHp%PY1u4U1tlDo-fKQ&IA6`EY*meP6Vt+S zf?sxx63vpU&oeratv}0-tX=W03Vww6h7hGey-WIv8iz{ z(Vd`6p>w9rW%}Q$9!$|mB#s$z3b4m3NOz{o=em7k2c!UrS>4M?9 z%RVA+G_nJ@i`ox&=mS)ta`xRUXf3NV8yeP$FI8R9?>k-nj6dhvWZ!6!9Ge_oR7@%+`178Gs)xZQ5&5F)UxGRlL}q9@*^EW`rP4&?|b(K#c_W0TtP zReo%7xlG&EaBq&c%Vk@d zhp+FHT*EacK?`s~@(wzdbx1AFVfT?Q8?g)V{tC%v3l`YI_&$vnO@MWnHSwA0%P#}X z8{C0>c|EsCcCL^tZx?^ExJsgYI)S>X2EKlQ-E94bA5X1uNiMQe%+luIZjf6rHjQ3X zB^WC~$ulOX`zZ)&>JN6uO-`jphVh0k;a zkZ&ptFG)xc6Nut>mhk6aOMOpzFaL$H|Cm&t;}l!)~k+8`~9?B zoB7X&kUH4*r;?lpQUtPV(Vxvi;2Nl)g~1jEofC!cYfoUb8ufE9z#LorNdc{rE=yX* zH8SG*VPHlg9H5 z-ShJfwE$dydE4?;P^IPgYz-48* z!Mu@T-h5JlY#d%09{l{S(C@CdQYf=0@dv8_kl*35n+=Y(Dz}ZH6ejx^;2J`#%$YCW zvZgIhu@pU)>ovf8KR5JP*v2+Bp-S;9%cG8;{yHT58KYclat5LSv)FhKsu}nK{O^RH z-$KPTGN0M>-(nZi?fuow-bZq7l~vrl^^A9}I@nBeQ3ms9wEEc@ah9(wlcR3*e}G8F zC&Y;w&T$-$5)|3}jNv97t$-p(8Bozr>b9AyL!6Vei58v@mo|wg*b1hxS{!Kt;g6S! zE4OQ!W}ZSunk5vUK%8>a`|UoxFTVN`RKqle_e>CiOf}j&si+$+Q?2S^ELYx|`=*1X06W+<`-V_g3DCc|~meJ~G#wZ8kiM zzBxF;l*4IN9OHhsjY{LP14jHk=Sfzg>L4DNzb(%t`%MJT0kg{+ynAS3D-ngWy4Ecr zT_t8enYP|<#&p02D4&1(H`AxUOdq|kGxrIvLI%Z@DX_Yn{0Q-rm?*!xM;}{qOVGR+ zz%9%6#>1y{B*OQD5p_fv#V%6w$Paf&631lzOlST90HzvsL1^TQSN)BNzCIE1Hl2e2uqSjLK$3gm-10a1x1kzJrUe))?> zGHQL+R?fUXLm`}q$VQbAcLQDCGDRcG8=P$CUOyQGShH-yAkZ|$Ooi7PY&n2awEUV# z&}lM2P`5mgAN4?!x9=M!N4P%QKuLz8x+G5i$c>76)~LLs(~Se5!a_N*`QB*W!W_Lm z8B^dEoHCm_s=mZ(E430c0;vsJi;Ip zr5$yiVp7N=7Ryu)iAAR7;nD-vhd)3V)a;}DJ?G~+^!Je;_Wc2Xp5^e<-`?=_%Tc8y z(31Pz+a<6SW)+Ghx8^fA;ROT&c-O}Z>ViMR7`Q5n17YhK6E+7v0OdT`Oq_K9)W#rE2l#W@v8)BM!rblDd@5u)8_S-3v*xu|^^F@gQWGCo7?>l#DA$ zjA*;eU_rA_7}J=|?kK=UY2IiTR|&W)%Q*3Cuk45#wV(g zZibBHQ9Zswm@fNg=hG~z1*$G=D1aP5#-x|`S7syW0S-@ZyIRxA7v7j?^`p_z=EZsq z!G=}Py1FW&0W{G(qg;=}jU+Dzydj=oo{fi`^-fRDr*4=yueF0oJaGbl%Z`3j2zq@O zx%M=DC?B>L4zz&OIq48Q^4*F&z)v()J!z03pYJO`S9|PnTTDLiz90T>Sr8gW+0U3& zy*T0Q)qcQV`<4_6vuQ+EuRvwJV|TDC(~pNXhAv33=9!gc%l6 zm$>(8dd9-{)mo!ptW<4Azi9~3lh;?UJSuu8HVO<<3=ajNX{zl#4%xBTz{lAE_JVZc z+Y$j2OpA7iPT_=}!=seBR9U;{)QT?>f=GTmB3;y(0C`l_VbcvgSk+(X2QGOc@GEW5 zMCP4Rh(|}=DF};eC7$XBTzhvp4t4+trFB)wt+l&XC~L+&_Za@(hZu7nM~WFRX{|O= z!a4~|$o*}w{!*X;8Y|K88-1rF`YmR7J?l?M&WFv{o!OM}mgEq6*KK92j5>ai{<(dB zB+`RBmFbVewz^u;<4S>QOFd)WyW8{OLsOdzY)1RAI=9E?g9wIG131`$JbmZw=+~R( z6fH}q{)@rXpqYesYh1uZ*lzg( zTr6-x60NxoSrS0tEV7rp$!q&d+RjXv5Hw6Yu+gSqJ>!*KN1c8t#Jm#{vJ-i}Q1TJW znh^ZyM8!;$hA^U1lEx1G_{)=v33S;O)^5S}d2as0rjBWdHdLdf8=f-RQ+#rt6<62B z9S;imxIS>Z|J77;0^n~D>(@W0seYQ}vx$Dv-%6$<@yeB{a*TW4_I+_5(l@%;6^iE1 zPnD74jfND-ASQjj)b}8%e`m>?S_QRsK0I^_;#NqO>PSrT+S(V$nb%7{cr#I#Xasw61DPiAf)WQaQy^gwSOkxZiEb~p1_ z#jYS>YyqspTi8&8%9S!g)2|nNdFV{~L@k~HB`*}M7~k&QX)r_(i4W}vJAMzd{ak3M z62%3OcuS%ED@q8;Zg()aW-kd6t)n?>14 zAk})QLK!D+Om~_Ze$9%tZH~Y+iPN9y7M7efGl}IO{C%?(moRY|dkOtdA$P7nqt`kArKy~(2x9J!Z=ZICz` zJWgi(BP7jCy&;uq0KBC2kusu2o6y(FYUEws`p!2@2a$OnZGKMIA9?j}wB<8u^A86^ zPq)Onal6v6K3u3bj!B8^fQgpEQ|WTAB|nQkK@5?PnvU>3XXVlO?Xm^iyr(QJWJe{h z`Vdn+v8t(sB_W5Yk>o`Q9FZOt2G#@;ZRA^^3wCgy*C_4ir@9lep>r$Q4|6-noSa+B zi7rX3F>OA;N2cb%Sg+s~qjENlz%?ahA%MiWILtR%zU$C7MAxvTJPE-t2@(aW|1Q}J z6}cov^cL>BTt#`sJL;M8=AG#(%8_Xh?6iody3f;DQ{3n5IkY`RuwPD|(P3juFY&Eu z3nWg+2ZCQ5U!_8^+=em~=7mwzA?kgQ2Q%KWB2KC|84GmJ*5BWPg%}a-((D4n_`5{C zn>wW^GfiUt0@mfw)28#Lv*j4`RHShIC;^JfzvQW_lKj1#b|uKm__=nfRU%+t0nyPf zf84I}`#E;v-8`X|mOOdtvYTw*0E^pxub`6`U1B+%%JsbUhr0A5sc-H&@U`@xZnnj7 z@XcR`CqC12pl44FEO|h<#c~>eeg2T3--k*L5rN`D0Kpxo1`&Uw5q#F%a1NQiSoV%& zwMYU3z$4SICm<)=q_}RfVY6%agHh9Rv5N$`6q{d13}0C;_My7Q*+*K|7f_WY?onPB zzCC~#I1>NmK1Sv+96)n9LOF1erC+WD5F3tBo5B)Xk9obqU~FwkuMzt-G^4hzoJTZI zJDf{HD5gZ>QPk*H+VHNL&q;oH4r*5J2Cw|I$I_B{`r+j~n7ql(Vmb81<8c03N55j& zOy_#!cOkq1f9oCXc-3`M7V8#L&!TeuGmuIrUdiVS_;hp0I~aIZzI^DaXErSO0zt+j zh1USIwV)N^O5B?6pr?l+oey5F1|V>Cw%%^oPaao$PX{=2ZWK}^P$qilaE5S3zi|2^ zKOnJ|{V*yvzb`AR1AjwVv{3Vs1qX_1RHW;6tD!g8}BR5}`hE+QwWA;oO!-P=U{APH+V$r0cOwN#u%xCddu`<(T#)*0cz zm*sYOT20Xe!kdXK#9lMrLZiC@C<;@@XX<_I965zVebOtRz7G%XB2QcHgeW)8ruV@( zX$3HW4lr!!ZFr0keP=O1tuO7J7R6@D%H(}WIzv}7ttwl(=l$j=yokIc;ta|WY&KU~ zB6eNXF;-%HDw?NkM5!^H)q*KddL3n?a699g$w$Bv_K(P=g|6}C6k(lyTzvF{t@2{G z1od@^b;ElpeS;T|J*lR!D!Iv8Q^AE4n=u6#sP z+3(Yzp$<>2yC*SMY`Bgx7(LF@u;k9BV0k+J;ao^~E{vh^TX z2v$Q{YMiAUiUZ!Hc8$#};Fe7HJWwE_bxpi`Uo%4D{zS37hmASDRwHYyDcGz6A^~;< zywQh|V+kS&i9DcKNXc$}WqQRhFDhkkfxM{EILAENj#Qo{GAL}hQEI;yUaQ$a|FG|1 z>W#$_o4pc{PFM-3q@Y1u6hr&C|2srf?*hS~$ncbNA8VFZWPtMivu8Q9{MK> zL|r$;TEXCijswzk)J=Z&V5ilmU$2>}rZ6{8}0J&#YUlzwy2~ed^^@ zJ_*hrZ7b&qTIyCGBV;o|(E_^oJFwczHbSB+>%?%OLdDxx{2Era)e3s!f;)&!tlZ1@O9c_uReXHe#OLwcXba^_WaI<+gWF;4~ti z|LtC~W2DacfDUy_Ce-B(YNp~QkchJ~m>qyI(IS@<>mzVUh+-2*0!?nb&9Fgm3h86AQM zg31O014gGbh;*rRDDWjlcd3MkNQZ!oMnVq1b6)3v_{9A@@9TZty3xH;Yd^_~eCpTm zr;P_U`G^H14b^%VP*0JL#2ejV;^LYt`&~ycEdP?gFEpZM&Jb1!g_!uMt z7nXLL&ZioF()j1*Q(~_o*|+g)xGysEc}u|IfA~w)y4kEK){q8!;l)VBZAts#WICSs>HpXu5B6nT7J_h+2wCs z)m2Kn;=w}pQo4Ip6iZ$p{Ou7X`7%gYob(BmQ4>Ad|6+%)&3TDj6AgBBeTM776xcaj z=3iUb?@{E&bZ*Pu{=IK!hD3h|Q#1{$fr>cP_@_iH9hy z8oB|ayWw7wwqz)8obB*Mgj}j0PTO%*`#k ziF*`b-7C7O{tKXXVzhAoLQ;di-l==bunU)^cxXXWkuS4`^Pl>>YXH-nekgd-clCv` zm{BvCM#IT@215BHc`%;y$NVMG}Kp* zm@(nD`mEzu+bk8U{tfsR`1~dXs zgj{u}3{4<^@Yj_dvAU^v%KH%@GAJLd7|gl62GPI{0qGFDdm6tc80zw}ZzJl0m5sf~ z0kb)eM_x`2$TZ*EC79gbUm>~qr-PrGT9BT&zbwW7(4~f^F3FHyCLO`#1Qaf`CE}F% zdl{K}IPhaFBz50IWsCIz#_c2zBG}3E(~1ncN2Qj|2FLPRM`76*JCjxtMN$=>gZgx- z8S+#Y)TSGd*ZRgkulZLEMP}J}SmE9qFLxLOEj`da9P&R8 zP0)N-R9bqgDv9{Adix2HU4`kJv)-T)g?GsF;=rMf zL7EB9QGKq$Xr30T$kN23VtjkOs12{>TsJCaSXE~;$&@aXJEg){a#c!rEdzDpShwbs z;o8iKXBs`a7drU;vt&4ab-DLso( z?PcDfTF#7Qe_(XvEWDPvqT6(Mgy!BkSrW^_2@YpB5n0XD-H!hNr!vl`&M(Yw6zgTG z{f8!oM;hR0WCWRKFOZSqAeAjW!=lEKK_)8MHathUJ7TeK%Aq=lk@r1KIHenS2IkH} z{abVdpJ#0dLO$~gKxaPn<)sAsxMM{XMhD-;S>ubcOLD`C$E6ocuR~e@T2IpGX%W!= z=N0L6_1oUrP!mU8(o+t_5nY`#C;-i4vrJT>E#PVqUmnvaqu*oK$RyOwoXX+IqNi04 zphT$Ui@lD}CT4WX_oIymuEL)iF<8a_39b38-jmC1#|oAzXJ&pC4u~*rvku{eMV$iz z1cLMHA9{0TzD84KDe29QAVHcEvnxeWw&v@>VWi#{{c{)Q(@5V+WpvBnhL0+ zY7bV9Oz_Y;2adSjcM2~97Y182ub-OzZ~hUGG`Zy`H7JJrJ{b>xF7Kdv%;(srrqNcq z=CVl+?Dd+x zU{BxGE9=UEJ*rKeSh^kxVq%Bb?Rc?2>MpO(S;)A6LK*#xjvQs*njo}0q4ZW$lIvlP z(O{mUC`U|fhDbm;mHd#4sH0Z*Mq})KZUE3UoYeAea~|Xgwf}_?19nztQWEb6P>;LJ z_1zD`0LEgDv`C)SV@|+V2*mq~!s&lqCUQ01`BDBk@s~N$!PfKrYfnWE9OOx`^*E-s z(5@zTe(#uaej_pqQaJ~LpUu80P$Bld#byo9Q zw<>b-T#1U&CZdk$27pmB1rim211c5rO6WS06Gh;ZHg2`17UBpUL^h^Cd=RLE&M>|GqVo zLp`<57PE+;ZQ_>WNhZ_XQ)jWH01bj8xA9i=M8F*Js3H9oSwIpN{48s_&3cf8odX{O zDaNeU31wM3kksWjKis{kpXAUTU>)V$EI`X-hd?cR#jjB8XcPjVL8rmqQ+VlfLd8s- z0Wqc<$+u=H=fzIuIa_3O^H9NC?Py}M1^SDWLW&>T)b)~tlhEm~@=0a0xz_ylpt-Z* z3mN(?Uu~ELRwZR5YnF~OoC^sni1uKLoTiYH`dnSRl^rN{;XT!!ez$s!45oDHPW)~b zgL;JiZ@UO(!P((@~t+3_>cEZoZ5_#y|l^U+`*H|^l4fRGNK)2SgKO`&0x#$#FrCep+Az| z5csO;OQgVzXN5aPBsIju43K{d#AEq1t#f}NX;Bpu4C^V^47JE$91-EeLw|uIJh)DB z(l(!~wA&C0tI16K1J4;5d?Y={-9d)!x2s9W?m8`}FaTmNIq>2GIci;*e3M_HVsR<~ zyk)PTwXF^ce~xbnJYS(+c|TRu4JY2gls4>u^Pr!=ta{A6i+1_xfzMO*@gUqii+_)EHlzX083v>2m#F7IAck{r$Cx5~cmAW0Dmq798y%FHqq#!EewcHJu%~xg|E?$0%UWQnJ z=Qy^bkGDgMY=<+>0=ut&*4ZFxYBF9ThOQO~+#y91qg1U6>E|`sN)-I1_LGFRvMeq; zBtF-7eSGPW4ar^ocIYXau{i~!2i{%x7d8A<=hOeQH$gnEX^V2L)L!q~79;U~qe)ZM za$I)?_ z`6o3X{God39;yNrd1{AnO|%HbOGUG3Pq@c9=>(K;sFD;64KVn{)ioU!#7+ z@*kz_twOMsTw;gFiizpT^C6ab&mrQ}NQr}smx5y<&x&6<)*QuKy9iXJ!RYrMIpF=9 zL1qTzZ&#`ng<4-+(oGk!B^uBS9eH4x4)uY_x2FS=C>AsWY^=-3iBJmWJ?rl;^K9Jc8F^qyrfGu@>5jYP#afg7szws)d4u9so7jX&L2RpV}5)(CX0$35l_f zFjxb+`2X9p9Ir^y4CBA)rFhNDY&JknL44-qggS?YHYTGDXDUIXU2TUc}hy* z#(s2<2XQHie^2Gw-V-Mhu@Kc_!hzwo^Hjc_V}F(n7% zcDo@!-R_i#-~N0`VfbtN=jBa&5_*#X1C>&K!qbM|B^-=Eqq*#Dpq_BY?bVMI|F+5d z0o12dHfqwTb&gM+B#PRRF3;u86xUG)8tVN?4Y1hRT91>!DQd1!O|lJ;tV z17gSxe?~>ytA~G;ZUcg*JnHs`P9%LoPNR9c`9AYiV@=u4rN6~*)sWMs00iw1E= z4%O6)cY>;<W+oCc)sue}SxEB6RjPsB?9`cB6oF6I*tI!crr>z_{_k43 zv`jo~g@l`%8-;u|3+rN4z;EWss3XE8Y$^F5DQ$V%**W6-XePD_ha!*0a>l9robU4V z17fpu=Z5N_$2gI7>6k>X4#^TA&cexc@DVRi<^K|D6|o@fwFDb*dXCkPI2Bp@3&{R@ z#N!~{1ew2m(w}4JFTyc<`2h@HVO#*l;ygo0(GphVrsojEfAM-qmyJ^qdo#3qwtLf{ z|AxSHus46((r@8~4BK(Ks?|TrRgqi*92rlZk3zs9MfqfW?f3b>+XwLx8h-~*2Sm(z zqZ9_(d0|O#Nf|v=6|F)N1q53G5uLLhAv!%?>BRQy2V86qOQ3GzTd8QZkmb8H;8jF$3V{ zt+5+jek~@xo*^$eBb4|dDY(p;IVDA2%Qxa(jMfjBa0bn76C$8hI6)x_WwPe4Hd@UA zHyn0E`;fADn%%(MrY`OA9-A;u3uew|nf#fPzk9nVvuRq+TOfJD**_IjhCG|S7(DN` zO8uFBCPmKCREWCf|CI4uPv9eJF1d^Ny?GIKai4;*ApFqyx4>tZ-nk}~v(20mUTVX( zy+`%pQ3tcwSWc1B^>E(t>l*x@!+H6gE_#`L{_v8ZAT%m}dd!C*R&vv8UBPT#Ua#h_mNj!(Dcn!9; z`Ho>sb@`$lBo$m~|I-GtU`>jlrX9rREYH}~LxT3wOf%J<&DHep5|MGLvMV| zs}*ogciEI^TNq6f zWingb&XbrDqf0WkVl)5=5rxOhSE=OqGbW7}gXLVrDmxIc7ue`u{NlQ&+TH=bgDu_y zY*$r}2+v$tBC60T@f=`{H%<))V+2Gvo0BM0F2 z1p0#_+n;5ADo-AOhem$1Ij=EsIp|co;yA?XF5=%)se48GcH!hob!I4X!p{HcRXn;6 zH3p65c}I^*h+fXag>_)I-+i8E=4BQ|84f1NoOj|OgS!qG>yYUvnMA%R2`VI+g4a;~ zh$6nsky=0*MZoXxb4gQ_!k=W25i<;jP0!F>#XX{@(*P6bWRaRF30~)6IN#UBH{p}#csl#8RPsy0>R(2A56BEknb|7;#E`s zt48C~s?x8drxKE)vx7t2Yq)OGpsWd-sxi4yU00fpg3(x8zVF!s5~~!#xqUYna}Af= zGWNIwnF?n1`Iz5sk2CP*UU`UkI>h30h>|BtXdL0`5C_gThF%*g8;o*IDZcn3H{Rwa z2FjOs%3=&_CRjMdas@DKHmcC`kupR{vdfZwl;*yR>CN*WH^C3Wxsdm=%nUD9ZW!mo zQZDW_U(WEpw?zA-|E)Tm4Xu$+pKkUK9mga7zD#MuvW*K2n5mLG#Uk;+SYkDj$_zyTq-?iQkAzTO-hkXf9ij1fhq{BtId*=CX z!&6@!B><$Q$nSE7)^U%52haM%>NQW8rq`AJq6B0jhPWHpJR7tp9u{oNt36^+O|$xP zF%trR^J$vu5iXNij!{@MRY39)Q)lKT4I z?pJ;wl2WE6hYA&tj>icNHz z%O42-W>;iJoiuA~NZd2rtAiapRwSo7BH!*VqFB}YTU-^tDnkU(Iw0NTYz3;El(2;w zy(~r*4Z^mK&9h$7Jvy!Q1IjG!2|)-v32_zvsnRqGu?dyz#`x77qqQB(#R(8bI>VDZ z8Hs14j0Cy8fE*Jq%qmoc;Qa@Z35VIOwCwETm=v5Rq1fSU9&?xLvyNu{9(-j$E*j{) zkUtXAOS>Nf361qv7JVvsGkAlC>xhIn0D401|16gU;{6I|Pk-Tw&cT+|OdkCEEjjR( z2vRk}wrA36j(tGD=%d<6dxtcS z{spHgoI>khlj#D+D&}bP@xb-c*QYxuh9KLlO%zZP5+cu)_{h^G>^GkuU(c_bTwg7c z4v+&h>j@iUFti5!NNW8kPz4_U89=gOE0_+H3T>JSCwJ^wp0crqQ`U(DiDNzKt9t6D z1&h5y{y36*Z6(7bWishz3dD63(PWXS^_Ee{w6TEudcF)7`sD*e6#Yr94SlcEW?5#S z`}YSI-ShW60MpOkwIx_f6ZTO&tqIPDJ#W%RBVHJjOFh7#$s^Qhy6Ayp$5Z}OUfT8B zD~)bgO}H64`2y+5*-^SfQ;WbDtI1`d=Gu83?l5e_X6%WZf%lyTq0KPpZ@2MdGK}4h z3R(LScQ1>huClK0?hC4@H`aidOEO2u)0LTThl%vx$aFZZq4x+(c;o+NXY=tv| zR7k#{P3AZ@qcUIbJQ`VY6LBuL^vaAN*3lFw*)V~evLN+?3m6hYekG@BjdJkNb^@M^ zvb(t&G(^6f_$$-w&AM+hT6B9K8`FEHygKR4-bxbXfMFA-1xR^)BhSXB;o>dl{5X=& zzdu$bP2APuN; zIU)xL5E3tYALRXqe0wA^wdw>!+|=3^Y2SaN3giRuV}}{~?W!M6oseT6;98cCj5P7k z4jp{U{`1jRxiJvv@LyRZ#rJq^322)vN1#03c>ac%ho-d#$x64<&Jy8ry>7_;F;ln6 zBqUcie3?#x*`^PNeBeFDiU*sl^^dp&sl!P(J-uUH1~d5-e=l zZG(;$YnH`AJijI4{OB*@p#y9XuJz0q%d&$d$dRqW@u(g`Q;a{Z5PxD3jn-iiQe1{oVxY zlg$BZyg9lqPbz@?Aj$YCg7(6!OJL`mp4FYaL+Of<;2p4+4E??+b3P+v!Z7h4U-*JU_>UL-J5o^Kj3@aXNkAjOvL;_yz3^UOqkDukv>#r_i`^#qi>RHkcZ8bvFw;H@#`wFRlBFDM6$?pLZp#s(y zbs3<}mr4{vKKXh)IZRp+0#3s6;h|a1?bSHDU9?Pf0|rRa&CX0LKazmCK&-6{k&DtP z&d%|t;M4T%_IrWSBJzxr_y<3N-Z!Cf9~Rjp|GCx9IIz;sX$KHw)Guhp8CjGoS1{>p zm~afE%&k45V1SQJYYhUn!mDX71YTj<|7If}equA`aO^a5QbdA5eC-5zUAo zL7Y5sAJQ0PGO(lVVFd1^|aYJhA=?7bhew=$T55g%0)IErm9N=;#M<-{80-@*pV4{_x&mIf& zYH_Ge9BZ*VGWaPziDq7l0j-MAN-qvUO*tsi;ta{a&=sW=3dH z`s}sb3(;O~y(;_FNx&l6UoTddf4(0$j-NglD;{p0BNk=th*9PiXreF3;c$NkMRoh6 zY5o(d5Fh!RBV=4ENGZeyl(!sV{&9$AAbr2!s+I{)Gk!XGMT5OIu^c@2_*$nfs+Rnv zKu;d>08~))==;{VnOeB@_>3hbIZwLSnF@iFlVm4ot;^g&kCmFhz(Aqiv3tQGqaZ)l z=dR&j<8MO zSKi}{Sn^E@2RDYsN)t$T7!P2SCpfQ`Xu~$Ptod4whxNjn3_+eaKQ!8uO2a!c2K^?sfRT!uz zmS-}2>pRhgIHFwtOtl#c)I1_~duVq}PN&8lPKwaESjU3|k&2bS^%xB3cWOavZTVV51Yu?Ru3H=1x z25I6v7c&CGDz|EQQau+KraZd>ua1+O#!Lyd!@! zVb{s_-=T|@_($e~tl#Dgh=NFQXXgz5ECxD{#BQTRm}?sI<@yZaBk>RerdxryUJg2z z<4*=2l0!?kSn?~gdU526z}ziJ;!b-OqeUH$=H3`N|_a9djYB1(M|_7O`736r;&b_4yzX*6gz!5{3_USaHSDm@j`!GmM~tHN9u2I zLP5G1zrXHfPN(}}l^S6V4h>-FP=OO`F0+>KpV%(+L4qDkA`|mvsEm0`V0+WHyjcK6?)@a$ zuN~{_kl+xY#553|$;AB{{3r?mB=vNvJ)&l{b@^6%yW^(p&zp1?ym*}$P5y9E%>7#0 zg2bNcDUbzFDrSluwh@bmT_(=$_#$X}Jq;iC4LdWx=VhSE`Fm!-PU*K@^PDO-j&ejG zvBMxYXf?cR{h`D&^k9%Xy6xWcy#zD)N0gUn!v=t}Rw8TZkNVtPaVGzLy3Zos9;q{m zRzRu9KL=4-tMC4=tN1-Wh(}!3}A-dvt)W?ztrwhIc zHtF)k*JUq6i{oW53Ix96J#i;lCR=&Z@6^V@o4(cTI-?P5>rNH|`6a zH;?KgU7PbIY9F&8wCL=>DFKdA(C=$(VdySV0xwU0h$pSkE|h)kZ*>Pu3PKZ4vgx_p z6CKkImDyPZN}-7s?TpB2GtN&oMF}2IYxrnpl@1;xu4Ta z-NJ{IVQY`T|1u;~8Y_mX%l8YB3O0uas{YYme12)D5-{#tHBK-l2B!Oy!RoK{6|w_V zk5~)BGfBPE7Q=&X16dGyChoBkd;%CJX2eL_E`>Js%@Uow`cdtUDBOiS(FkUxRHSQ% zl*faXE3ZCAhCc^You{K*ssNKWgzz^Pg4D;2V)Z+|9hN4`*E&hlbT!k&#{SXZ$efn6 zRM`{ARIJ~_b%JlxtP|DmFoYF3!~ivTsET z25`>DkQ}^>BVDX!PM)|Nr>e;V7sT%*Cjx}!~2;N63+Pk zQmprDhyi*Zg2w*ayEcw&El}d^9$1XYb#l%@tDpsny_VO{hZdT5L&x7u_nf9J#0xwU z^8IfwJ%Kgn<1~>T?;fjhUz!Xo-%t2q^Am&(kY|!6qWY!qe;HXQK152$V6DSFm!v#9 zWc}KvFJQ$(P+%aQ1zsvh#i7@LJW6m_!dD@P|KjO@^E42mP%Lvhccv-2eI|m^I*sz^ zL+nm1K^58LsqVLb{1vZt>iF0EX*%-wiOP?m>OF`=<`Mtw?RTkFjy^xhUwHAoipTiPr28E--K|*=p7?LwYPVCcC^~qI`@m@d-2Dt_WXjR zYgXuWlIeIr(%q;wXG#T&?T-`lL5^Y%AZ0;Rx5T8)&=x0bE>u@qyD*6go!+$SCc2j* zCK{4YJnB${?#v>1UJfKSZ+5zhXIzUOiPNMP4pG~NQeCa_QQ2&bNRes|E zP-^gZQ!uPg0T%|B0%sqgc)PFxE)xJ-n#&q0RspHcLm^bg{nzvWkPg7+l=|XK0qsG` zpiVZ0zkzeS4NxjtlO_DJ(Sa=RAaQz-#)tXBNVJ7{VaIihIW3%LW?K*Us6J-(lcF25wq0>BiZYj^vr_ zxRQQqNT|Lw{KE(1eN#o&(t(?wLwOHVFr3}hn}5;6ZFNIM>%v0rYXwn~j^Ann5bO>m8Zwub}e`}A8gcXNM%C_Lz@X8j+Rp1lMRAPGWE zr*5gDr-0)K&qklAY&y+_zx+B3)=XOKFI5%e8*(&xzh&e4=q<#;F$qA_i?oT}tojQPE9Iwke-GFl`_eUDE0-m=KU zhR#q|M-8asiEN?tO6u3f)KJ!K;Y<__eYSkv z-)?x6kpD<>&V}}GeQftb&i01yl@Q0}XL=uBG8jFA5$?EB+3jTn-MrDx1V*Tl;d3*D z$q~Bn3?;IJwfyR23iwbxFufwbVcq+ae;VN`D*sgFhq_>g$*1t(Ha=v`4P@%il!LBZ zV28-x)p+%n^8`@~9fBcwlv^@79#jHRn;ljAd_ZuQWQB zvQ1s>ki(KH$I#s$_C3e@nsIZ(!Y63=x%}Yqr0q3&I4|SalW#<#9boY0_mhxbi}b=jKM#LT|i)c1F`L#al^?uMXD~EG)%`@L%6a!(J1WdtDP!9kII*nAY3+L4a@2M z$J(;;osMz>h9vAOYrWC#_mA6?k`f|L5AU~0ec}((WV1fCKs6pleYNq1aE(F+W?HYO zsiqigG z=@RX8f^{NYMs1al|9C5B&F}?|{wN_d2H*I}@1<8a4Mlx8a;$uzlrq)~l7&CY2wNe!#5*@bqNzz&=0)1y)arj|(zqw&%9rxz_Rl&no93?QkufxhmVrwM?ncq^3FM*dTE$A?bM5pB+M2Jd6dy1f#9rqbgr?Z&iyk-61Nk&ieaN5Qxk6b z_j~cK9FFqz;;BvqJx@vq;LBhHR_^<|HYmh0o4$kb{fT}`K+?~uCY6qWZRlcSElPp zK*T?M32%0{DO)R%?9^t&t*CqiDav=KW0ut9iAS!$Nfji#>(M8_T$8L zKLWoHv-rJ9t3_vqP#PQeZf!o@jZ_krpU-nWYraS_2}WCfQ0FOpCf9c5TsT%ZLs@@2 ztgYNAfJJJeD7JlxPWvQl*p@N*aHAu`oUc&0i^!2*+~si*&L;jPsX^IQ=3kv(x{+=xMer z(8nxXDEPlA-^PopzNeMpBgow)dNl3dw@XSs{pv0TgEci!=V*l0alBO`Q8sxm9#HQ7 z;3-*97Qv^C4*)&f5i$~AwmDcUP0Twbb_$p)e^niLV-$vd;yu-|D;WxEJvHGW7YW^d zPd!R^=spPpMV~S)Z0t4?z0&<9XRvCE-V%G?%{7(0nHj~JZb2y_{t0S@PFQuel=1Bl zP_Cq*j3!w*eehhYpr5J9++_hORnhmGI)O4;L$?O(l<=LMe^2Io);nOhYECi4*lchfQgfiYo#XJezKKheOKo02X61{PTvZMxx z_sTIeYU6t;da%Cic}FcN2ij|JYT7b(z~t%P32j3zWp0d%gM;iZd=Jl*e{|K=|Tr&OZb|`#ZJ)y_)gH z9Tsa}lI#s`Ywj(-6$X4SULumMGpLH$)83$}Y4W57e>8viyEOM|T!;(oEqz}JOE?}g z(^2hX>l`I1;j-ey&f#bfOXZy^lV3;zsz-* ze?Zb;0nNg$;EUIDOA z+Nhef_9BFmoC|uTzTg~HQV-w|Zpok~N4tFp)C_jVI6XiM{HMao-aB=11y$8; zHO4-!R9Sm$Q%hW8FH9%qGcy=O=foNw`DshggOFNqiy=K(V)wt7%+Y}xGFGxH#Kza` z;ydJc5QZ?w!+1^(;eyBcGUZ=eGBow5H3wi1N#&V>JjdHU5kJ0;q0yDE4?JKRwV^^U z)v4gW5}xf9;U__y3my{JC0^UtIV_u@s;u0vQL>h!tUQsMbMjC%{jE%&Iih(}L4lbJ zqUO{9lcV43ffneR5^$oj8j^ot$M+w9-WK?9yr<6gGvW1_6Xj88`Bl^(x$MYFH%mm8 zObw4-s~RW+g^0*l8Suq%vFdibCGS1##0icHmqOeEt4&5j><$1oUDY0oHp0Jy&q$ zRJQMLIRWT^>)vfl$ah75?&^DZq0{x9_W|E8vcnb?>)ivsYl|N{#0ITdm6!`O`5Mak za_kMB%P|JT8%~cPaioH?lO^fXC=L7!KF*a0^K_g{viXHv9YR_R)z#kp_eDYSRmeC> zi5!eH?2x=gt``G^tGSLBZ^VsDK6*0*_(OnjJYkz)*LKs9Ta<0wqSzgIQG6rLeU+h; zex{=|2B%cu7kArdjPS9^o~x0@|F*$+!|3VuTB2*?!#T`P^z>u!sJfZABNZeT2g2{~ zbNRH?pF>iSd_+r}10bX2rR*rnD5Wrr+N3N^Wtg2ir#xE{eWc+)}=!aWbZK>{^XRr>-*iifY%BF)oi zI7>w#&{d;L)zP0?tH5d;4@W?C~Ba1&y#3{-7Ff&uOSHpda)u#$uHAD5?FG$VVqG^ zwGGz=Oiv|>ez?9HEfqHRar;X0Qx`Vr%+>yf9bBc|+V>BKl{N0Hu)`@45C5zXXlz10 z|E3fYIHke0$I#jA5|*k9hbr82;S{8)oBfDyub%Y$8CO$Hh6G#gFeH5J`dBgaCHFPSd_#SEbVMZOhXa#>q_$)C zLD84Z2C4*f-Ye8%&4)+#Wt@QcLp~~+U&#`Tti=!6Y3m0Q1nyw*Uq@%#=Wg=UeROLH zmuMGf@qVK1M?Z8BuELR@yKK^{PkY~N2_(6af$?ti(mYa2&C^`~WPkBI_q>mlPkU@j z%1m=QPc1^J$GXnOy+`$3MR7U;j!t2zPKi%OeDRK}Uy5Qs)N#7s{-(-}d3OBdw6Al+ z>`)1M>~j0OZd+X882_(fa@?x}+1DD@nVsWIz|^w=2Tr@Q329HaB_K;D{j;0VywGx) zJ~LVooYWw!Pc||;;>k$oA^J@6vX0YXOoM#r&m-)8K}X!U5Q|&U*6sK>sPPIruV-* zWyM;H(U7OU3|D;6WFV}92YSj$sqUdAws)pYn*q|;F2*+HSkSZyn%gs|3 zhzR!8<5OCZs}CT`qmME&tV?Sgyz?}(%1t&UuT@{GkhOK{nSg}TU_Z7qHF9-l;dHJ( zh~R^kM-4!Y$Lnnnj(BE*dMYJ}@%=+4`Iw%$0dR_!^!FXN*2od`wgIsqntq$_xt^cN-h*v8f;V_4;Q_nBouJ z=|l>>bFUc#!^OiV2Kh9>BN_f%F!>=&X&af2fd{_5+*7^p=%ZvU?D(Lw{esr1?Zu06 z;pf*vZn{c%RZ}ysHS4{zk`F=d>7tpU{*JCIu5F1ill8BdS=>JDd@0UjHdkfo0Hl|R zdT%Gr|E$DN{1UVnpnAd6VM@ytWECA9d7N^0nWBPxVI|?tvP$_#Ozs33?(&S%6^%zZ zl*;fBf$>X5@fEl>bf87&(!2m`RTfTZclEI!BmAU`V0-R5dKi)ni zVFYSQ5pC6EucS5Fdq;PbOhl+1v;Zh~!x+SS*gpXmPVkn#TlGc*Zd>@3L6Jxzm9u*w z+iib=ex6uI;J^ODeY{YBaVGP~6-!O`c21-)ug&v!E9k)I3ph5#2mJ1lkIi0Un-!6^ z-n|~$i=fkH5hjuRvlP!`+HExw0WHMa% zr#~y6%6MR&4mXx`KeskIPZ>vvDmmW3KaY}1G+A;ZEg!io7@Vk;CR@Ro*15*yKh*6n zN&hu{&!F%lk`vp*v56-Avm&X=bVXQbBa}pq4^R9qmqb>$Lf_nAmC*T_(zDOxxGfl0 zEWaA~I{Ew(l=918?aMom)S&m%0)1Y&H|O#}1s(yOf|37yA-rvu3M&-)MLBU{vsdXc zS`{ZZV%Z$t`#rrMT# z0pG;u@Ts*5Hl&08#Ol!k|9LUl(N%;9V@rl*hw!if5p)q}!#yIx&O!70J7*o|Ps$VX zA?2~C@h1v0%U`=>(f!a}uBqH@_D&k>PFG;}j=Db_C~}>-^So$^$}gs?WC_jv)93d0 zGa^4b#jEVTYCH3C+a05!(R075yK~>W^P$E2UB%o_-i64F&9z4-)&Du5e$xiVn4a_Z z{^>-PnikmY`Q8A0@7FjHR_d=LxID&RmS(0q#4qr0?}yG9M!zvg)07yJ-!8+e1G((8 z0VczbS!XGwt@(+aszj{d4}Fnez-(OnV^~2w3m=$bV;B#Qv1kLaeUy zw+_?aT4$IGUsX_ricz_2Kue0TJd5j_`I;wND@6&yaet?|m|vVUSG-8uep@J#*0g_H zb+Z_}%Ini1Kbp@7TajG1AW>uUpTvQLPwex+O1)&C|DY75>MC}8Z(x)FO~9owE2lJs z!cEmUGtij$UzQE^rqc?)1Ov?s_h9(*{{xCZb-$#DrO!D$uBTKf9?>Gtqd@d=2cm!! z%0hZqM9(;+k^wbFE2T`icn%{*S&1Z48WfUD4&x-ULZR9GNO4e1qBWv6jA2fb><b!O%YI&w(gAVirK;_v29WyKQZdP%Psp(2V9Ke=?0LcIKU;p)w$iGwA0YDjnfB1)g z_@O%iF+TxLyyiaO%{tSu^s{xJt#g4xS&7>59(8ysyWqK;Zw^Q0gfemTJMd{Nwg1PP z_&BHFO0>Jo13sejm1cQ{?2gj0%7FS_=P^!@>`EQS0`_=cC4z`M2ae4k0|EiA_bl?H zSpfT|X+fLHjl&wCcaB`$)~rQ)O4(ik0(REE5%-~`Zfd(dNv%w?YWhs+Sfu4wKs~Kqp^w+ zdI!zv0Q_d}l<()Em+cqCJFYig&XCVtZ!Ozf$bqagpT7lh@hy4@h^j`RWBk6wW9DJ} z=u*ar4(1qf{(WwoXm9;yEAw{{=9=F6b71X%(-`;N_rCg@QGoKIOw`vBJm85ii@=mt z6(N?vptLv}l(Mw12nZ$5!BNZ4@c_a^>L_IqCrW`cL|KT&a%wm&3>IY}t720#fJ1>Q z4G?V>4Rb$7NT!BDV)#TCMOY{h&Wn3R)679?+IKZBFuuC9QD#NSC>_9xGGfR%PmF+w z_AZkB%~;|+-7i3a$|Yr7fJnS6ayUC66`AGiFq~w8!!HwAD|3-@pm43jXUNGK@WuI% zo!wm*paqPKh4VxXjR*J;j1aK}Y{*qzgJ!f{R9}Xd&Y)Kqd)MmHcI)7|-c1(>wwNOj zUa1qgott()yOG5-vHo7mrdBrkSo?zQ>c zn$yXGOl4s>oyx3SBf4&*ylC}N*SRt$fIG(tpU{+x{HuXfKN3z^#_o zD{psx4V%d^k`V&jT~`^GaaylrdMA&@iB~FX<3Pd+WEx-E7gu?0nGOya?7)fQ6bO{g z^4pp5s|~)j6!?TSI4K;t+R^6((n&Ht=uI!l{0M-07YFJn!#L#{&l*44%RwCsV%6OhV zK|xX?l!o#8p=>BVyNWhE-ih?pmZ+}R(D#o3AL-;O!n6aHxw2!sJ{anv<KC-9* zyndsxG581U<@FJYuGeS8K;KLP-0!P0Bc@HPJsiC8~Soy1m@63@PQ+v z?6ZstTURDy9!%_g?3#C?mFL&mxy%Y1Dhq4g9Fd~~MacmIVru`nj% zF&}(s?8@HlC4?2`-rQA|b8gVX8qtktOV_NlgAZT;yl5`Q<_B-9`nctg_g-}2(8zKs zhZGRN$JVV39VeKxYdzjsnn$$n`y86bsP@h7YVV9q!=kVLPch5ZP#hEzFu{lsT8e=Z z5@Df;MIY4db_PUcCcYD00+=YH4lAQ z5JowLZ0sB|21SV(h3PZONsoT-=};|EIEr3mrtWNv5JfA?^zKIT%>>~KPPeiNV1d!) zc#tFAa{&0xdIC!EyFF=uBtS6BBz7ee3_0*6n(TT&p^e96zVaCIB5?6<|MqV`vY8A# zV9A*QTmU1_C-+6ojTL|@((YO0>_7hFKYoA|MqO3~SUHR0TNZTC`H2kE8G;i)C16G; z(I@2GnCPKfK>F%3uhD7r0;1_PfX>|L4PcIbqHEkkhgHT?g9GbV3uF;yb^O3BkW2@O z8eb`$x;pmtI%5`57hvLa(Ai4M@DK-;eg^u0TkD7~1h|wR*8Pb-1h6>+%4lRbIMs9| z9tcEQCr%B10v_q+U4{m2=-_|)r+@kp+=`DlG58Q&lm-V}J6YcM_>+UJbdE!D<(!Y! z6W@@R*?n!znps;87TN&Xvs)Y58he!l94rpG%oIN4ObEJ>5psh^JkR`UFN|(&Huo&o z)CRl%qqXOFKeXJ)TqMZ7j3&?iQa- zXQ57|l?n>Hcs{3~vQ*!>$GbX*%37ig$6jzoFf~Oecp4sQF13|5R=Vy=aKw9*;VBK4skXn&y>zwUsdeO}!$b5JM-sMrm$u)h z(cT%F`{?y8ucmA(5uiL6T%d>(P=tkZAv!C%$9OaD{>j#KfK)x;14Big2r~uBn7Wt3 zrLY)OP7?sKMq7$Z&rFRmQZ^Jukwi+P2w>5r4(Q_uQ6R=6iYHx9ftoMlE*(!vW&|i> z4v%qWh-Zu2Z^<0*N&hqo;7er|0FuZaUgy97DWbHqBOkc{cAS|YLda;6siLE6=EJkw zU!!guU}Io`CHHlLIA=zw1oFN47m6O8NJnlhWl%E76UfgG6^``f{w?%@b_@bt7( zXvE1+Z&(ARdT5K61^jA|!!c#|*kB;MGit7Y`03cR?98hI{ z)>-*t!I^kyI+$b)-}=sVv#s_O8NgctK^z%$g&zcjW|``oOL_p!uh1br>fA8v{m=jW z&p*cMZ~P;`ihnrnbT>J&XE%9&qtZ9Oib4~;ppYp%(Nl+d0Zb{FIZ;9;R%Ki2-PS#zOf!nRxBh|IOLtJ^fFn0RCsl+moPt#YSv= zDY{$$0l!z01?-DplF3Tab|8<;RMIJ8?->lPj0-v8?6j9oK!V(8M@sfw)23VRb*+*m z*8%k8sl#~b5YER0UY!voitEr#KsIo(1M%cl=>{hS0G5?)FHI$PKp>FH*`Q?;#wzXZHyAgl5;P7|klJvsm_*bhM2{vIW4_}RMp%-GiP zTL)d#j0caAC7G@q8#dH7^u6L#=L|&m#;n0m4L<}fJ+sCR_@1u9^Rh2xhjtExHG&zeM>28GW_shH z>o}0;h~@=hcNwn9KUe18^_(SmL#7-r;deB|v)+gH0xp8WWpo6Q@b1jEKO^hE`@QcB z&F{X#PtK2E0saJ#k`px$`Jn`BvD{`!*H+6<2~nbylE~dr4Q|S=x?~Eaax;Q_7H!Gc zQ)ueH0jGNFx7m?0_G9cdEuioiZ;=q=XN+>VQuqv&xiY9ebG>nT22e!tz01y?WU%+` zPf-9*ax7#U@v=QGB8&joELW&(6gUNZTz^zTg4fB6Qc%yv?=mF-UhS0w{N4|Mi|E(d zmkv1E(?+3_D%0mi~BlC)tbSR(1*m)&jcd zW{wn~Ci=^{p(~853=5iZh`e()E~s5BdY~VDfu^1fMA3(Be&=L(ZhJlE9+O*Hps5Sd z6`-vhGKY;L;WMBIuy!U!n}@G-jXGj9&jf;$2;bY&#a-j$PUPS zp{bH&0FZ7pMowiLz^#k*LpwUN%(|=(yCMUi;Y5yY^9j5g`wC{GW%frf%l<3f^^{23 zux*^y$Gx|r>6~_@a58=NhRSAkCK;RL9djRO&m@f@`bl#ykr&e6A+I-vsLk;>+KtoHN}5uYTVi1cmTfx(!RGQ)~bx9 zYuG$IhyP?$WpwBS&tykAz1Hl^KJT1I`awoSnV0#JQ@o6~=mX=CwczA%+;7?1>5QU( z8KisU0qyWFhX(D9NlCpdO*a0D<1+biCu?Xf&@1DqbX}<*{uC4-V;pIi2an({yk(CT zc|o7bWbbj-o-!`K;LUG`<`-PF&m1yiN3o0OQi@6i2;;7Hl)zF}jG5m+Kt1jmw6iE# z*&0eJuydxET^5C~REV@~!U0FP}rM~n7C1UBh`4&w{JR{L*!mBrq5`=Z)T_HYyl%{KsV!A{I)v)bJFG3-0)+2vw;2%uxwvb z*(}d97XKH-YP~t&hq4z5J zG)`GNpUsxfIsWL%VIj*h6>JSh#q(_zC)dW0-`bZ(w%IVho&B>rcOsXR;xaVJzO|oSj{xjazF(_Tm?GFn|0egH?$rfRD$mg%WQL z;Zg9=IPtCQ8a*+~#I9P592%vv=;xXAhG4AqM(6nxKIyZGSMbDanlb}0uCp!z0M?Rj zBxg#|=qi~zC1%!}9N+;NVAq?!K*RfB@7t^Tc4)40{WrWWV@L6d(f|umql9ZM9~0G} zd_|The#*r)l`~K}m4S&q?0Va;sp(xbc2z8tEd|bqQY1D7b|yh>G|R4VOejf_5z*{94dB$~=}JRg$7lkt zBALL}u51$TccQx}tzZU+0^kE&fojG!XNn^MpaHP}MbYX?ugZ|PUXa6BJ88WLFVamQ zWDKq~7tcO&yxM02nAztet3x(rjApq_nOwi!Ltg=@y25cnWS~1dvEYVli+tx~_)X^L zIXuRll#w&ogy@co}Xa&dt zurf1kRHkFgbhKv(a0h(7lm4Y+MYv_Uj1{i{dSz+Q-X0qfdd^1suFCp9x?cdjYW~og zaF)u7v<`qYCkwvjd+0<4$$;tO5r$RmB?|EeL{kEQ{K&pFumW9@>NeGN|l@Az2>o@^3( zjozMbuClu_TWqdOsV=4XdUt53@!5;R36lLiI)i7$4O6!2;7T91udn?$)@I#jSP-VQ zCWF?APQVj%kH4)Cn$M1fIo*}EdKP+WErwi0mluP(KX7h~kMvtWMDph1Q5j zk*qqhh-grvQt}<7wQeA;v4Ji~l&2c;vsRuA1clGoi%OXfB~GzYe#!{0v}}CE%1;G6 zLC@7^pQXdEKIeVi*P}qdn_=d3$dUk}|^ z%yfFT(x;pn?-Z>iPu}IZm6TP2TFYX9#kh3L0wRs8O|QG4LXqm-CV0>GZa_-}lT3>C z(m`~Mmct_col@RLYVr%TkXwNjpi?v$0H>GIZ8-!}_auY9tJ`AS@R9Q6y>+odUwf76 zC2*wyYx0HKIEo{$j(mVij*XJ{dy0~ph-^FY<^vuRvp+xVQ-!9H+; z?e*bc`Qfzq8{N!>PPOmLJ@`g;!`i(9sO1o#2Y#z*MU868YLvmjt3ES6&KzFl5aT^_ z<~Uee&M!XUhzQQ1v63i2-uwz~#W&ebPBmL--zkU0^U0LC+m|X>Sb%Rjq0_Nx&lZQu z*v!jVtp}&f_cfUCe1BU@Ws$fuS6tiH|sOFa;TwY4404d}#ih8cwY?+tzaz;Q4x%>GMbQc4$7I=D*AvXDU|Y z0|27H>)c}^l!Pd%&R-OQ5(oxy_LujZ4kM$~izv>HJ-3{hKt-Uz^C%@HA|g2ybthNP zgS6%|iw-GGF@ANdXX#t_FwRo(KoTWFv6~yEZxifU_ndndJJ2*8m?H2DSq%z0#dytX z?cPU$nzOOy#1#>LM|tjDp|5#;PEDtZb8K{*0^os9vIaolpMU+=fBo?ruqbl70`xXC zt0UJigY%AeI$xsA(ypu9R3O^38DUuj86jZOxX2A74n!)2k@2l@$1bA~Z%=2Z@+&VJOo`CeKET2%`ei+9S5m?s{zw(aYxywKp&-|^KK-}3O&B;IxJf}h= z-~F6drGTC(lCM0AEOHL~uw6>r%)@WruN4J#_~M+IA08kZn*PWjW$Vd3n@g6-9UkBW zdLQ0I7c{8>k1?PxesrB|j33?cI3PI*=tc+86X@wjKt_N957AqmFHk|Rv>r9`FlN~c z^suj|v#!=`uX)zF4Q{X}$k^cpw8h8P$yi#;nVFe;u*&|W2Rz%n%c=?X&?lT6W9A&u zg&bGo$7`@hOM@gRUXq}>F-X(2C^8C z=wTfgXVGNJ56#Ub2ZuslGdPoOjEX3ss9ur8K6N8r(%07{WufT|7TIhnh8SVvKRdtH1FoO>51Q5J#Y zX>WoC5;89FfcLu(tMOSjrWao^9 zuCvbGpN<0zjn{nXU1MZJfOtTY!^XaF;yMS({p8AbXoO#sf7MFbcN}fk;Z?yqYl2S3 z#71%|>_hclk$6tw)xcgQo~j|prDrH{s?iM)Y&|+N4Xw#On~NW0S=W**K;GD_g?X7T z`C=#7$ay&L%z7Dv@!+q5Wq6SDWDR6uIaKJ3E&+J9jLqh};9-so2SzrAEh5KkmCke= zNjd_L<2RW|*9e-nmy46ao*P4ZW6RLAUUZ5McbfOswl`tn``s z>k)Z<^*wKg=0y|@Oc2=GJz}hWFc#)J+T8l0VZ7`yfqBNPvo2QPV}2<^Q6d`_%i!$l zpsUHHXelm{o65Fot2+1Bq`)bmN=MpMsLoV+TBc+=Gq*rF!=*+V&=D!o&E~AkfihN- zq0X3MVl0dcIFqKn+Uab9mFC)kYGqw0e-TkN!4x7IW-u(QvE<~`;&tcn#fwFZ03Kte zI0J{2I-{jt9vA^utw$SYe>La(WsLtDJg7CsBe#Z9?EnBE07*naQ~_Q*0I-l(Wj=U~ z0adah1G(;tD*&KO)K8#pf`v({3@5%7MV6tV3)+{G1JnIEKxYQmI0QmuT>vD&z_W~7 zDF;9!NFhL?)Rlf9t8_xGP=SUve#h5EUwb~i04$)**||5bV3B0BXOU0(fb-Sj0O-*4^-Rl9<-^WBV8@>4?rrF1pw2J>BbpZrrUs7y1@0aKqB6PICQQcip|l! zx49gh0a0rqiVr}u5k9B4w05SGUGF>NZ8JLk&xx=n(Z+UJ0pomCr%3d|cbr{+E0y&8 zvXALbG_O6j-~N#gjxl)=C3hb>1Gu$gPOf~8?&t|vXY0^jnWOLVr=Sy?$q@v)$&&1u z033`Uv&9Z^y3Xv7G57w?eB;#M4;2yCpR)uc8<$`Pd&lvT8R9T-JPTmWl1lQWbWG+) zpw2VVWL6*~r)34(3rCNz=k1ds@AF(Kc*uBj`s)107I8AjsM2vXz>D+{zUm-tv^8&X z%}L^9;xA5-XURS_r;&_($$wjx^$h7yqus;jIjgS%5mlnjL+^2Cv1 zSms$BJ0fO;MMOFrkkPY-XPwRhEw1MbF`E9a6Af^QXDZ(@W_(ii1fXD8DL23I3`5{w z&T?P@y;sHR{|pZLmC=y~XJEXu!~fhXD$Teu&_IYam`e2wc)pwg-w%9>0@JC*!vKmk zC$o$wJ_E8ujqM!)mS;(d^{zW0-G<+1oA#r@c^CPvJ@4wkxG!Eh8cBF!Mne77+JR{$M{0-yjy&q~kkS)l2_?1Em_r888F z0sZ{6_ouxGJ|p|>5e#5<-cFmi{hy8m@?=+{LApgWAIQ>J01%gHFkiY=Rsr~>6Z|c6 zWBi5OCFn}`a&GC6%8$JdAoWi6LW$2UdvOna1N^d60K9T6AQx{O z1-Sy9*(LfAEo2Nh@SL(TinBtX5+$Ft4nFg44hf2SmBGjKr>mvn-ASKk12@iRAp?O>?bndSj>* znnRr(Yj29cB~F|=BUdAdcGe{E__sj2J#xOdzb82hpcMiVIG`N_RPE{ z4ULggr<9BSV7uwR_E7nM@1+j~D6Ip2gb5n^3<6x`$2C~t|Hf}F1!+0nf|mja*3Vec zS8&08zTN&_&!|JA@-t(l_ttFySI7A~UiWrr9$ZySnqeYT1Py4coTG>!;UL5ertc{o zk&mx_(tP_zC>UT`G-*b6%%2mYRD&Y-{gsXWDMPelJScjOg=Ya*qIY#56DcCN482W_ z6c+_ZIjhMf^Z*QH3q&z4^~XO-#ZtcNaTx$g@C>A(gY>$TJtYWW7`qZY5tUu*=2E^Q zPu83>LwR#x{KO-ay!SIe0D&lxf51oOa}+Qo=Er>TlQEdDANQ42$;p`poPlzGSs?dP zjFhOT9B0ShXxq5k7;Mui!!%oqo`KK@#p!+92T>p=H@UOM_yf35Dk>|1#~4SzmwuWX z_3;bm=S)r-e(&*n^5MN1zoN20vQiV{lT9Q8_*pi9-tAyc0EA2^-B6C|Yzu%^#Ciu7 zX9UyVH8v59qOa*_yuY?{zjv7mc+qe42H6vxFX&ZSis$;?x}+l}2rA12Feb}wDh7ni zy;43wj9Icur=%kUE6@ae(9$~uP6UG5jE#OA4vwxLnFU!7jtVEleROjj^#F6@meny| zx>;}w-*|SL%h?Iertv9F>mcJ-mAY0jAi$=~>#T+ElnwJlS2`N7r&sF&IL{Hvaf%P7 zKD2)JV{vNebeRO%fXdI8tY<>m33LW90|O^IW@1=(>QC~;d{<6U_QHtl1tx_Q)B7iRP$HDZv1`( z5YWxI*`ER<))em>D~Ci7##uw;lbo{2o=wKr&Z%U8ymM8usH~noRmK!P3UE0IEx$|#)bn$VN-gY2|(d-9z^DcS>_rXcJ(rQTz7ea1c3dGG-!23;;SZT==+>B=oK`-FH`%cxjydLSKa zuLq|N_@v*-yzCXa0FQ0F*QVAy+Y_T36$i<9it4*p@QE%lC+)wVImc^)#F;%|BhdPh zhAq~Nv#A<^uIHegIYo2*tta~FY-UX6S_v3O#(F3d%b}4G#IL)vcg|!b9;UsVopcz_ZPV9x20xZe^?5hvJ@@X@qsW z-}A^P8P=4f!z9TYy@U4)xNto1AKX#tnQ?I7@EG9VzMZwR89C!z)om;rTQJdDxMl_c zjhF0uZ%qIMGV$pvw+f_-(cBn}1>fQaGOrX~Fo7(GQPzMFnI=y#52x8ZIg8)fHoqO3 zGtDu+pMaHN72PDb6bR!g-AkY`VYQW<-j=%qO{B5GPE zt}Tj-hRMtTMM@UTb8VM=%c%k2DKuFEbjg9Txvx$%()S`++JORK1fS9qMXEy%14%hB zuz&pHAAbNgrE8)+YuQLyh2Gs*TdjG zlo8(#f0_dLh^%!`pNt0pN&>A1n@rh6T!|5ebnA`W@s7*9l|LBXaP1+el)sz)>J?RU_)Qe0^I2% zkzdcBj!QCMW~IF(KKIXiyyJ`ouMPFB^m-{<^Y7URo2uq=^W2; zZJ^lNl4*{Oakj}{rUF=^SCqzZa0E2~b>s56_Zq+NWzm5!x|bfaK7M2zxSKBwWhm(JlVW^>{0r$ssovPd~h{L*Zph|n~3K22+{r4LKcgj zRnljyXrWR7sJ~SveTyFHaPL%BW)F&BAfRvV)(DNwgQdh6KwtPeriduAtLql<%Ic5`F zQwaV~2~qA8En~_M5oC@Y^wGdc#;NPXmM~+TG^53G7#A;0RzD zpA&|MZ;BdiI4}$d-auE1lj6xqF^(d&x5^G)p+sk-XVPMp zw-ntHS;p(4mF7;dGMF6k_DfxLU@wT-`;r$@AaI?df-lJsc>+EeXO2Q;Kd!g#^pZ%W zeFT+j&~?_GQ;wgno^f)|bbt%;i`M`zpoI=l21mZjIsnY{ZKX`f3qT<1t7L|LXg^q` zR#(PS_LWW(NvHETHD^Jm^caT_t$;4NfnFdt0kZ%TT|k!HM}Ew&y?{A4Y>3Y|8gzwg zt)(@p?9{W{J6O9~IvBvCo9WXWo7(6G`f@Tjxs??bm@#hSpue<|MQdw9pR!-db!Fy@ zoCB?ZnEf;E6KJx@n-0zvT3-&BjK3(qKv^BMu9Tc*gXvjz*t_rwoyrbt0ARiF5XaX0 z*@v9bE4tG=0Cm*>Xi_%io~8wL2Gw|mEwdkjO#1Au0-E@;B>+`Y3b4d=^PWNT#Zj2G{b3+s%h@Q-((yZij)XyP?G zqP@Z$-fL~hhJ94d;Bmdof{YfPNG3e1Mh*q((r0+SMkRIZ#0Q*1bGL5M(epiL24lh< z1@Y*(?6Sata!K~u`+P?q(uHIcU*je4e)EQ}@ z5YRAA??WT&Z;eH$04~=RrAj&1qdyQc6=HwCLPBLG}@IcKy(AD*HA6=ERl|xe}IQo?S@c*v! z&LYZ{%>Y~e?sWF{5O|NZ7oFzRv|$@4^{>otZJ;?}VMc0uVLC)Qpj&W(HU z5)euk;|b3b`~Xxr%jgbV33`}oW4Der>-XiOX%W9HCtaAXpX z&O@6GA8rM@X1OGXp3UTp%BE`%41lw-98z{t`P)u!&nyjxTvkHB7VXHtlBhB&KsNiv zS?H%sF!^B<*$uLwngXx0A7lz1!e{QqN3yN>eeQvoYh+wA*r5>uy+jX~n`=9Jr#M7s$T5TxWo%9U}FQQDOUpwFxW-w8q+&?z%}N(d@vkTDeD zp_Bl9drcSt5fH#6CGVMx7emRIaZ)I123WMNY?+8NkVa8csFZ;+wNAI*(Kb+Jy-NF1 z8Y>?F0X9|wN}`dJC(r@Vql>Xt;!rC{j zHMn>d;GT`?tAAg_4_`9a3?%)4R|{r*qWW`l zaBE+EJlW&O(+P|^IqJ~84D1yrd^#|UFc~FR-qWFwou>lC%LV~Vf*e4FNNy!$Wf#&t ze6`3pr-4obvH+u^!j+YpL$-pxLQmP- zGB3Ti&VoRlzialfW|+J;I|G2XcVRX!$jQQk-Us~UOrhD_gGb++gXlDU>~A_12nVcy zLFGsGSm7l))|yx|whOSO3q6xgsI9CnV|2WLpE2VVI#hE3*`1?3js;YV3ojc3dE$hN z%-81DeDQbzHBJuwR3(IYR0dUPGC+zB(G*>bjqU>41uOsOfBxqOznj}VPH{Zu{cIUM zsj>ktlR2BvMgL_q$e|#iu39#9uV=&bEb@V8%+(mn6z=4$;Dhob>ql0|0>?}V8Yhpl zgN{J&QET$DG;A?i3)<2DFaRAOu!na$BzJZ4lOGN}Czs9kcjxoahnzfe;(I@JeqC*E ze2iY!Q(y*9)3MeI@1db}J*sxd!I{Sqic6+WX(N(^ujI)k!=yKH3`6KPn&wlpXq50DatQnf=%wQ@JFAO4L zNBLK)7*M8AR?s{d)N3-tn6AAcl&**nAm3qcKZ_~Mfs&EJ)WCp&qHN7yHffi7Kgw)n zm^c6;cg~NfTTGp{M1LqG0jANP`1{|By=3<1TJVF4D#i;n;YAOih3 zVw{3D6qlWlL3W+bJr`gU^bl1~mtF-Lt6WZyV4s|zg(&Hi@k(%Xk3w&Ni~a+j65E_k*UZdvP76I-6O@8$c%_n{qTcj6 zIX33j$6h6ebkfVlhR#5hQZDO9S9>=|n5EzoddZ}j zJ6TdvYJY`wXVYbhIBGIQyRx(7RS=g`2JfIZxk3*ma^&Gwa~RL4O+0$zA=w=c3+K}P z*0lXF_-pPZLWi2`n1^wgKOQs}H1vMY%f`BASMe|d6$Jxk04Etr=k*+GNxyXNoWI#S zW#ur9F;qVJYTRZl#>K%_X`ljtGWqc@&-d5J2?a(~ODlz`Ry>1tCE|I{B44@IoJCSZK}5nt zbr@BKff9UV<07R*d8_S3BfwWwANT@{C^W`Klwb6L0%ri$&771AV@jwU;&)|h0LI8%%ZIxvo$E>2!ivo%2yz)2pucmXj5otge|V-q1?Rxuz@iKTK5ReZBYP5}wRd%ZA$_PZ3c7hW z8K=9fxizL|I^zPk7pM`@t%IQ`JrGx>B36zsWRO97rsyyMJT|=S=PmgvK!~^SIR^=E;Ge25 z(BEg)Gk)=II*nYhFPt5ErXUQARC#}Ex8kLEz5O@nh@U;%Iu}@Nj7NAYIqEv&%#<-Xb3g)wUd6z;pKwWOQgXhhJbq@D25?v7Y%?xFRc1h#C}Ilu zUWdk<7!fIFrFD!OMbEhVj&c=w@td(q(Y7CF($w{6MVS&>P9?=?Js44nhajV^`_09( zQ-qAm8VNiT%E<>>U7Cvw9fQmemJH(V#QlI2*@T_`W$+GFKE2kYuc7;Lrn@ z{4SZAXaMlbXyW5K0g+=J!sri%#P3HsI^A+)lwklmjJ`}N5DI+c$mD1&W(|g+7jyB${G_y_Z0Hkq< z9D5fXqx_b<*1DLk#CJduCqeYldw`;%+xAx2tX#zO6}9a-jFmcZV1Pr>c5|g;{C1t6 zvNwRVD6%Lxoku6pQEk>==ans?CtQ;w6G(X!C}ndhHJj^dOn!18=r-5I8+0tb;9Rr; zzW|ZI6FPHhWbbNzU_ChR0%GQCpGG|28gjUGO62HqSf)&rEt18-(`cbA4Nn8mXswis zBjtXyH9maf`2vpChXY?j8TJd1#W&W=JS%G?F9Klh?GRzks`aMV@S;H5J%?c9aT!{r zj~sQu1NMk+kUt5Lz_?!aEBiN{-U0~dE6`$1tgFDE z`Fp3e<&5syH{%PuBVb`|(NK9B2NZoBH(@=lG~0*=bC8UQeiDc*DD3%yn=%mgYSC%3 zAl9GmkO{Uvy>sfSUFo)W=FE3(;|QA&cZ?Mq(DdeU2do<0AivE_87Pb*Q! zQ)EI5aW>Z8t6jh*eSy~a1+Tzz#z_~^N#CIa?d{ME)DVcJ2yC*#=xT-uGRB>0)_Z_z zOk32CLUBJMtwxjLQ1dA2|1+{Qgoz?oA4~BArkom)GGKwTQKyo#sAmeGXsS69N(!)( z=gr+`lqRqR&?pTnoxU1z&m^F_YZ-?~MO{$>0R%aP1@y>#wek6$vr|07vw$r%tiUKE zNZGa#zShb`fL7=0s-ycAoRzaN#JkgYq&(|dqbvs>0~x^1cW`Rzs0XO{8)(5ZoFB}5$PTm0QUGoCZ zC6@pUJ*X)M=gi-=15V}uLlIcl7g!)8KnOj87CHas1Bi;W)3fg7z|6BtyrbHDfRs^g zle_;Lhp_`Z=%FMQ-5)*EV~w&`?gA{!q672B0EE#6XoF_JGrM3M99S}E4_g4u(t> zhmZ4N{nw89mE9qcz&d$^hy13E_LRmW5!C z@Td9dHfF3!$k6FlYv}ZE`#oeY=vre}CP?=Ep5@^5g7E{vo!JQi2C{`e1c6`C(B+EF z`dQb!9hyH;9y4G95f;u2f#zUH*HUbq7xG9cQ6Rb6XH1(wP;km6Ttm>SN4{GB{R+mR z)Pai*-;!Ab?=1G88CJx|`NfV(w0|CF{ymK%BD5Y2cLnm7Ub@-mq@x3w)fQ($-Qw9}ny^CxB zF~BEz1S|nN`p5=jr8WR*S%CPP9Ej`zBxKiK2Y{1v51a!$?%{+2+Uc7ay%a>SmbL26 zDKTIFDv#jUklT6KUL{lvzWD%^-fN!&X9_?e*UEb;jcZR&G^UR`&&7NIze%76i; zGW}>nXV!+=xH%HatN?!dM-gzE-~a$107*naR78x?gbftE2YNXo_!@rz>dI#Xm5c!> z7x+`Ag^#Uq2NQBclrAaFu?}b~Q_7A3fAl!sz;k|TtbniZExnFTXaGPeDWd<4?@DJs zYt=bO-iIfR(=$~*$YSdnCYwv1(L#nw^+Q>j_+z)mo?jF{+X}1#!8$Vqc58%{IVI2jfw}a$g26BZJ|96j3he?MGTgUJ;sGQ3r;^{uJq6 zse6Jzu^6MzICi%-ND?>-nxSG?{2vV&m)cmHJEO!%b_i5`=Yb(k1O-fq))$^Kqu3~H z2A6}x;ZzQ{cKqm_z(}1*iliF5=#uADv%ZVe7zam&0cF$}L<*p8Z1-e(o>`M$<=zyh z^O&9oG7A6o)u0%%x zYtd(pyt6F;dw~HwSwKkWHYm*1$ku_37RZhyj|)!~jB( zKF=eQb+?)T**H2grP7`{HQ|q(0{7E1oPx?hXP}{t>u4#;Xndl?t}mhtNEjoXL4V3B zb1u$0%B6o>hq4&-6(^7E%US^GzN_Zmn9vLGMI%6v69drGMf4?oWsb&J)(cH5CtG#S znH^cj`iNeLx4L(}6PT}DahA`zC#S2uOL!X%l#MArqOa-F%2{L_=ymJZnPF@wec#$z zFBy4!$#DUy(Mt50-ryMGQ<)3n!KWN3PL1riEF=D(r>Ez%)rpbbRwgaGUTbIbG*aiXIPYejKCU7fbdnMaq&0!)<4&3M2GAZP2 z*C6N?f1)i}hI8CYUeQ_!ojq69NJAfbl)XN(wKJo6MIL5Z*(;rzEAM{QHReE{qa8Vs z{bO5?4h^1;T-{sgd(N~U8P46=Hm@4*&wS3?q50FPv4o$ZW_TGYdnG8{8Ik{ph3ecC zQC*Q#3K3X2Gt93V3t^#Dc>ffZu3aLN$~XvvC>sGd%FtAHWG?|FN?B8!<^}xt-6nqu zlHsGo85PD7fOD@1Kj#i;qwokTkijS!qs{*6X&EeQ#JE`-O3L@1%h@p(pL>UliaB*A z%-VHq8j9L;ixi0_i)x9?0B%5$zsWc-%IjX4SJD4heohWenXR0fBDlY+Q-j|)`pQMj znNve&h?)gvE2mOoQv`N)*J<4WM|=-J7Z{NFAv?4DhfWaT1)zOSKg?3G>CDuO1BepQ zwvl-5BO}+q5F691aB#%b7z{#d_g{^Gt6_MOm>k52K|CTM9YcaDrbUgiP&FFIg8r>^p z!C&kK$DLD$PdGI)Dzg=@_gPmu7R{|28*3c_G(2uyI5l*5dk4^1`KCa@YLAPS=h(Cf zy*)EziDPX2Dy4Icbu}LAXw3A#|8rDj5b!D)lm!8}$w`~!SKC~9rUNm3 zWnnmi0&1!Y1WC!4U;$bx2@`d{+Bl>+o0D;)si22-vh%nj=2Vp|$^xgT>K8ALWJk#M+`?^ULOv$GY^9e`|KDh4uRW zR|E!LEqnJqkDLnLa4yMAM_bh3;8wsIf0BpxK3a$0)v5XR4h?3nbBdHTr7C(*MDyOw z`Xhg3>%|rv7y}-bUOxdWr8xO-kArTZ# z3_%MxWnlcyP!=sQCr$%}Rts0oNHyF=e_T)a2(##&F#!{l8}Q*?;K}z46wt$gk--7_ zL?0=A&!H@9Wjmuw#+NhGK~@yMIWwR|LB1j&d3Nnzaeu%EjnNSZ;hYK(h~Czj<##JT zYqKU$0_^#zdlcY;hjb>wmjDjF1rYG;EX~5-z`y9O@9`-9rho7t!;SZVsM?P+>Xjm` z`T@U_XXEltS!Q5tf-q}ltei2QTPL59asT%>ndZ`azfe%1aUKu!iq#QC0P0Li^_8uRNw zOth^`7EhNcpj!aczx>O;{B!1!Lx+awO!rj=;u+bEuMDQjPFaI^Y6k`@ck&(n;^5#} zP7SA+u4OwqPtA4ss690PpZ>INMdd~P0kZa5ae#a;pw^i%ctS=}may`uoi3h^O`F2m z6Zf~*jGgD4kPmW*Uph~vvwt}2*5BS44mrn*t?`>I)zvDS79Yt90sHI^ombNs`cPm8 z?`vnxu_p`B&^=^GWyZZ3?be0taM}P~GHEXMbdhVHYeqmG;Rtjjm%7oV3pu>>4Vs~yAgij8t7lEk zz~z+3kH%K7u&*=l#iq*m%Up7ea$wLYA_|F8 zEIJ)XaqUzfDSyT=!@~Gbz!WE1Q)0CZtu~)w@&jCmVgX7(61s^PQ-Er6fsM|A;4t}) z!$pugvw$*f6QyzH%!pQ*htdWB5Qs|A|4NGQmAo<6x;ozpDgzO%bL-29>xw11C$lDU zd20jeM~ry~%mUD_cwbR3APJvxo<%;Dl9($eMv0{V+Y^AV8D-}L;JG5#XvVnI)k(Cw zXyrU(p*?72Vvg#qJG;Pd9fk1v4(O99&Ieg!)QpcD`t65KpbyAd9jPj*DlkFrD(fUG z0EdV$JtpI1?SZL+Lv!QnTm!Oiyk%w>Xk(&(&|ud|(Ad@YGw${tib|hFD~X*o{3M)bMA%W+@TNN z;S9>i;)NVyJO^;K4`T9at#7S0JL8^O^`j%clSQz;z_a%P)XI@O_h=LQT-&LSYFtr( z4Y2oY89DD}@2WnqW^`6%W^4ui2PEws@eIyM`&`LnWyFk&G?%9{fp{eEdl~S@m{cN;x z<-S+R*|X0v{>p&y2_B?p?`hUFGc|aeZ2xXf&9`=FN)?MfP;yeA6cT}=WIqegQo00P zHpDxDfX`|UkV7Nd#+jq2)M=uLI(MaaM}6lh2a1#-pxkE<^;rUwgA<4VL;x6u1o#t8 z3cU5Kz;aP2%9^1Qf$|Vx%@AZ}jD@0AJ_4{5 zefsXsOhygpv=->Qc4@)~=-gR647m~oK?2_?gDAUpZ&dGILol7JfK7Wcd}rN{PPo5! zt($jX32&M+kZm(P1B_=yc+rl->>YTXu4!LdPQS5nD2$i$LtdsMS!5co?>6ST-urag z0tRI@kLK#RoG}>+KY)s7*8D_?3_!$SlGhH_O_l=)If*_82I(#hImoMfWqIn}IDm&0amb8Ulv{Ae zT-?`Mp`En?km(|TG`;aF0i|2=Wv+ONZ3IB+XuKV*fnotInMeUDJd3aJH-Nu8V}-qN z9a;kP{;z}x-}xNx2m-XX#P>FmTNh5H^|)%6ce;RG@JvpM_lR;U7j3VPakbA!=RNPt z31TC?zkROwhO@=tM;lq48baWKx}BLPx!~Z>eGl}9vPw=5e&j&i(95M|#{ zQ`sV&m2MK`C1>c2E&>Ypklb+|+wXT(Z|A(fGO%^8eR}5lGF&jKHLH=r)%{E}Q-eo1 zHJtc)_%=U6|pBvj4*xz)&M?# z11$6m9-I5Q0?Eb!Y;j5)Ldah>tm;_KW~G zKw_KR(SSpv^wv9ejY=jsFg|k!Mnw-j(>(lmCMOR-4T#bOKoWpet7r40Ysez{+zKdI zTl^&uAqW(3J}bS*!8Ja61a;;D(12MERQo2p+t`%53ATtF%O*Hr(>!Y|BKRXXA`{Vi z{!XQ-WC#D@pZG5s&*`?7U#bNNhtVUf1=UzH;VtyPBG`8VeMZ{4V*qH-b6rQc^FiuStIG-AocrH3~pzv<( z*l&Si&*aD|X`|C&B)_e}z4)Y3T_v_~4*8R{Zoe*?a`MnofCXLYP&y&oTsOX%t$x;a z{0jG52eMJ42Rf`k(^b8*IyHF0y@F2o{hq+kSESj8I5Zi%)#&XAkg@%2@K|<63R%jN z;D|_CP{4O~+W3rxBj^A&1xk?b^}Zh+V-Ahz3kSykMJ_2C%I>Ul&|GUusZxt|Jt=M- zQ#frREsO|2Fw4}gC~JmLT{LA4_;uDt4%Au_6^KcYjVoFMM4Ty^nVgwlrBvk6^}hE$ z^sXEbZLOny4ggK9h#43LoZQsxfzj0L0bc>Fz#)+HQKgjDxAH+H9-J8KZ0yEb`&fMK zZxOyag&8~J+`(fEXC+Kmjb;&2&bDqc+MUaK027P`&@BUsX0xNzTo2C>)di;LB;y1Y zJEs7hjoULgW}I-@47`uVBH6x^otc2|XwALPdK$B}a*xO>nH0eTTAUB1i`!9mz?|96 ztdXT6L+zUM$;_lLr+&<#O;`2Z{QO?IefIj+ud`^% zVpT5X|CKm#2Cb|01kCH?rR;Pk?=wIW|M*O00G`9+f{g3~-o_)M_%`@^zqQhmzXLIA zI&+2o(b)XVO_qie_V<7P_dm*?=sR}BxiNHEdptawOmJ%OiLA5npo4Na&PojiT#J9$ z7rIEPoGj-qtC|DSUM{jG&^PCF&st{Z=swP3XT2GN463=3J#tFs&Kd*sZaRo=60l%@ z+w*g))7Ea>=&M>nXF^U@>tU@0Qppj_VZV}fp8KZOVAE$~#(UP`qpodx0-ItD+72)s znlm|=P7Rr5tL$H5s|28~>eG*!<1^2Fi9<6Zl|@N0eH*eH;B;^h-6Akxn7B=JE0dlr z&ZCnrJHy+hK!L4V9(v}{Cdo(NcXl7TiLe6Kl-X59bN1Y`?{jEG&?$8WNT$jdL{{$s zphpJS=ZuH6GGhVQ0h6;F=1e(F$BAuu zI?oonXMEB3?QgJFzyw*%dFfn^%9N5DYrt>;JfeCUPH+;)8pq~dBY1VJ-pLUVNoUAK z*%?pSPsTmo06Z81>u!Fcm+jMX9loecsskct``O)i+^=Lt)K_y6;{af6X6Ag0I*QiH zo|ym6akfUCWzqRZjJ|nrJd6eXJXc8{C&>m~>+Bt5OeZuMgR2|lz0;gUiRo<52jm1F z$m>yOr%3}cWmB|u=197qb7DN6?U{55Q01o<@7}EphqH9GT+3V;Uwb#Kx2)GuN$k2# zIYNRe95DRL?%)G}v{F{|buBs;F$S^#S!;}cT*JQ6kNB$2j>b1jqNZ&$p0fX4mz)5f4=ES|-4cwq+BZrKCtCmR!Q+CK^42`tK9$RgNd zqsffelMyj*5qZIvD{YuPvo`70*SzWTtDV^zH=e>X#^afsDr2ehnxHa1>zuhYKzfD~ zuF&UKzNYO7XfL3`0a-MEe$^CzN>o@zY;y4a%d2RvSXkK zHvtk&k!ocS8AygcrH+{crgiS=a5p{L*Ag$x%s963>1wp}jKIcW@qht}nrAvU?0u9I zwHueQ1UP{T4vnZ_9aGLWL7vU^?0fGQc|r@qVc(3}R)*ryZYJwHtvLhaxd4IRMXRoq ziLB4VXUZF-T!E;$K`~>zf&iZv!I%E01OQARg#!2;Gc#>mMhkKZh~YJ~xC)Z+2gihi z;CXc}D#{9IGvsSMbKe)$0}S!jqooRGeC&JgY`=`(#*5!<0A@660Zyj?*Nkzey|;;% z17=@92Q=a}8AZVke6}m&@?J(-nV9$4)8YSsgF1Ym#&_N+3%c7|VEmO70D{H<>;r4S zRQn7#37j0jQ3;R)fHJeyJiNE{9Tr|eR6ol556Je9CZMwHoxRZ#>2WFbz`DC-~~1gUjtS4O$eyD z7Y)%q@KyU>dQX)SD;vMg2R4xnEF?w;lmCMVo)L+7lJRcQIr#rDIx+MV4n)v zXRdXf-~M0SG$mJ?MTVSFXS6yD3z3U7l;ydXu+@80oD`q31y0QhY#aNn=bv5Udq2Ax zN8beqT|-HTesC~ETGb0bntHxU6Jrpq)G3G}6-|;JSKlgHGWWM!edf-g5rG0`DSII4 z-rCRkOo|naDB~1=C1*wR=1Jy7K1JR*K|m4kXRc>wS3ElBuW+Avx9_FRw*lu#pIc)s z8r$jaj33!1k7zAo>YAdDYbi~8mDI!o1=i~$voz+s?wtafjH7@`;29VL8gu;Gh))gx zUH2$;5t((5Xsv&9KM2|t`4?s6zyJyiwK>;DoO1&-DHE%`?arxWT!1E>h{&Z(4}$~{ z0lFMBK)L#Ia{$6b>j1KO-po}V%=yeH(&eJWj3ZrOZLMAUcIvV+%l5L>^0u-%*U4A` zoc8p{jG(J^C<}4#o~3HY0snvbmw)+x!1vaC(UtrIVL%=J1zgQ7hsGRh3c%6ugPv@} zJof_q(G0&>tN0mMLqqp^mV3w@&}{9knQTx1md-*m4maHofOn1;npyMo|J54#t80GG zbpfC0&=xo%BmSOyxj5X`nxktzN0I!pG}Z>~@Vu-DyNpgbGzBXH>1DZOXwa8aBP&W4 z&%NpZ-vP~A++a)N*>0%d+(dWidrXqdEHW{b+ zD@i-+Mnxv*8IF()GkJQ&iS9Xr#)VfoBQP8K$R3;PQRZl8$2&JpDKQy>8+K(+JN_~j zazGby1eJVRZ~8)|1Sj^rhxJ@C$qL)h(G_LF>Edi0z7;S+SAn>Bw&9wwagTMS_mwSM zBaH_hX;5)K`j5ThZigl%RH|7dPrA4|VoJICbjBaCD5g%dE{aO|07Di-w8eD@?Qb9h zfvbNHgn5T7LZ|)Cb49LZZ@foDi68>mqJ|bvv}~r>GV-?~e`nYDJ|}P2{<)3roqTM* zT1bl`inPkobhg1q88ly66x26|uaxRN)>9tXbgJ;9aro^~IohBcuO_?bkEUfBIfw#Y zcsU!Btwu|qyOx}ww>cOyUeM8wqXOUHea;ck&(R4Bz$ezLHs_va?qv^^Gbi_gA6Il; zxJc%LEP0;2bTh*K=z3-of!q#1CQBU0K$BI)(hy$u*n|LrXIF-5$jpo1=_P&r~W&uaXZo;Obd7 zUxluh9hwA=FpH9jXml`^NsA8pX@ejI!AZcFrDSKh3q|5z3zCu|bY*COCqR^PqF7w- z8HAA%6!Ef9weF=@DUVwnax5GJLUB=cHiwFY?GFA6l-*javugrZx9S*ECUYN+Id%TS z)wX<(G8KAjRY?(xr0V<5X)Hstwp#^^txX4DG5UtqFd29 z^vj46I-BAc6=PhZnB7~~DZkfi~;v9SoXhRpK$_0-uO#GSGMgQ1~oGIh~uMsA1O>siI%#8)Y+{ zv0=UpuK5Ey46v+-pFjNJ4?lDkXQpT+z)F5ayF6D$l0!t6%pX8H0;=?!wUc>~&Gc^3 zW|3sU4sxS}4>%XqlohJM#v>)1lP>AU_WQ^vnX@0D(f2?a`N2cz<61h}4;qR1s*ML& z7*w>tJIW-?Wj*cvOopsa<)Q$u=p>ze^*jt??x4`tfNkPjqMv7(6DNYBI1d6&KG4;3 z{LJqKNC0vAuMNzu5kyf2r~v`Hqym7ABzxom4{=BZX3S4n6B!jO_|O0R&mVRStpa*q zsh-_&%(4via= zHG~(e6^GSzolDl)O6Fs2tf9~F7``?qzwN21q~7litHldFuNKbiksyY7b8N2I@5V>p z*1?ZcN48(lc`7=iEvH6sMR3VK<&N{<-7|Tl^No=_(|c?=`WzjeTo97ouxCeb0sRGM zI5YUZJ&N9qzE?}4&&K+@exFl5WuUp@A-s;xGA``1b*JCpCF{L*+;eY1y}I||ZF|4) z@keicc-1;xa%d7*6IEwt{{lg^sf>pX#4u7kLJGk@#3{S_6(2+CPT}p+7zwR$js2h2cj&DE}An?0G5o)RBEdO7btPB?nE5QHdEq< zT7**mlq8|7laBQbP`gf~U*yxis}66QP7yi+?i8pJGoZ_TGKLIm`(}P;rP*GR($fH& zb@DmdF}k-pSH-8UCzAj+BaY6%nqY?YW^8@O!KE`KVL0} z({WCzm$_jCfO(*k?!#B2$9@>}_JDNoUrsXq1Lz}4p3V#hhHjGGx5vT$IXrj9L*~TkKy&wd zKOMkXpo7>ApsHpK1#pgnRna&gZH077<*908R!!q?3X zEy)Wyva2=BIol`nYT3B;yqucK5Zi!WgAja9Pjf)c5e<|a@3IxiAiacch#IUG@CRh(TQ5YL-i9fVwYya74)_CSGxv&CeMGhcZHvK>l}Wfb2v2iaI#5s3!NvZ zyDMo#e|8E^!-Fzt*6G$+MPG?7mmC^~ixVT_0?aS~j2-1fXn{q>xxF*9>;!>k?ez}m z)?Ss8XptzEO0ue1c5f%*VtU5I#z4xS5n=S*M@WrN6t2^SGs+efKw#K_0~?ih!0s%v zd$x$p^B#cVos42-h;d!p7_mxEXT|o8Loa*ESsH3-gVF=^Td3Hbk@Xr{% z*T#0(LYCGV7OXhpBja-~TZeCC^E_YHf{tYi*;$VJEBDgoh@!DE%UlZV(k1tlw*`C! z3Ity14Oyu&pZK}W=JcU`Gjt%m2Gh`K0^&0>6ISs1iu}D&4$}Lt(4ns!*CWpgLlPa$1c6OuJ8C9H9_%^cn7Gy^0<|5_1d8T*l_MqG<^U3(CotO2P=d66BkW)~MGKWSqngIxW zWJoc$`d5(|!hjbs;235Y0%a@(bP1I3U)V!_;9~6fMB<7k}{=e+Eiu z9`MLG-~ocD6o)|pc+ACK8-|IKR!PA}((;u;`-q|~f+ph0=|E556QE*D$PQZJ&)HyO zH|`>m)(yC}F2IWCku!WUWxwm!rUS-+zeF6}H|y%JLH3qYYOWg-2DI{SwvMUjCQ;0|Dw*(M8s5TFeJqcM;J2-qJ}J-_oe zfF99%0i|a%Gdq0?SaUer%su7Ie3E5lW^4f+?9_1?(>lSS8D62!I`@UHAme!EZ2#PJ zW`Ik!Ms`f844Wewe&ql{&jH@-OOP3EgTHZ#e3J)&ZU(jJhO@3~o=YA$YixyxCWn*E zd7dmg`7%~Kqg;>yaJH6chA-J%K^nC7ei_W!{Nl`+H4hkxQ{&7X_<-YA892Qr zGt0)%1$f$A>-sj!3Bz-*=J7LH_`Yf)eDyQle`So1T(dehHJq_lbPNnHFtnBnx{sW3 zs+AQAKnX_7?HrGc=QHj(b!aSh^`wM=kt28vfS+o6Yp-|=lre_p_&;aUf_U~CaNbREmhJYR$+0Rn$uV55&Y11Aiww_8ndUshAbd{yS;4`Y( z+#Ac$dZJa%>vVA2@JraxMq~;&>@;_LmJ_s=m${eY0yLTfg902{$4){fxRnhUv*(*DBeXt%f1~a)t!JU~QE(UD+`E3jTiP83DJMv7uXJCeV}fL0^y=pnwh% z01`OCvpV_6e&Tz&#&sMSfXQconDqd*84p=RKT5SYOENw7`k|GqGCr>B<1>7WM}QC@ zqc+vnkRh`#sOAl8NmmZDc_|}uKRsuB=ut%5ecnMf$sv6z+Rh0&15QQCl`I)kZAfLW zWj{DMSI*BsUtrT(ct740h|vtjI&ew@;pUa|mQ$1SmK>Xp_giz%v+f)wrFX{3NmZgj zt^hq51AK!v)>Tl(I^zey7tweB*I0tiBaH*Jt^1sfgh z5eq2N9e9T=H72Fq_=T>h;RHQ_w*+YgWnbNK3Z3z?aVP~-YOYl7p7*2|!(8kyY{2f~ zyIKh2alB5az(lgOJ52MegMF`#&x!K8rZ@QPicbCPvEDnk6)%S^tQET=14`%6p@P%a zead9jG9yqO%8wqh993%+JLj?br2*uzSSII1|B@K=%?|L zD|1Bm=<50-gOIZh&;lExwRq?(TK|zefA!f@*O&)7ip~RGKrDKSbdpu`kx5X-VKX?H z0?Y(5+y_vjWd@}4w&)u41eEA6de?L9zmOT_49HT{rS6L1J({2S(0u?m=c?BD_yzDV zHzfzV$Z$a};81|boPlu>bO4af=**)Uh@6K)*oAMpb3&4ejBJ?sMoCR6d ztEFqkN}qBx$rIXfxMYdwe`{2^GfN|n?*%;RwpKe+jH+Zh^RRRE?=IA=$ zUA9Zv9{O@tJ-_A}0&mvTz3nlz_MCou8brU75d~qa3(2X3mOx%umMC5B@|$bjAmO zxc!{y#F?h&&4r_mFY!z^9xusm)~dMorz<%=0&3)!oK-pr_|N@rfjwP2;R~53&P;pD zj%d_g663XQ)LXXkgoEkh& zZ(ApJDW~1FXu=M_{MK$Bk?{!L|EPKWzktjCu1I`&YX*TcK!K|jc0>bGk!hRKK z-vU_H0r*4zBO~6Xb`dwF1|rxL5)h@Nj1onAG#5P+X%hkCAib(nP#Hn%w<}k)?mkDi zqS?1L;7>lzA@B}LN0i(eB!Bm$#H-QvEbCO=^E@=Ni1aNbo&&n7iZ&ju`(=)v760IE za}!kH7@tM!U%}U}e1>@dteg~zUL>Awu{Ht$fCwY*v~yqw?a?admIDq9D@_xXrIUcM zx&`7>Alb7-e66FhEm;OYcgn_2CtaDVIRGEZfxJ)Fnn42?WraCvvarUrf{%HQ7a#*@ znWy{xO>Y4Qj2?XgbP6tUP;y4*xd>}Ck<(|s_7HfUY&9o@F6YqD$w2Xyg9o!;bbvJz z*rF@wch8jhVFxnyosW|E$1Nd4;V4ZI1kWceMFF>5*PVaM`0Qo=w+nI9< zC=0p>Sa^QALrJLYQJc?K01+6a13U9(HVv>Y=tu|ETDhQ50My*79yrn~tIVH)y{z|>A>#oaR_R)LMgDu3997F4b_A;6UQ#^zHlyM^${>Go^RVge-2Jgstc)n~= z-S)^O=gB(CjIqIGW<1C9ta;9laj`x2w7I@RW7o~?lMc0$hCu*(<6^5k+xx5syh4v! zPkNdR(TnUDdBk5Gc6ihrVh82U)~f(89mc8g46-siht~bAz}5Op|1a2OU1Vehjq#m; zf$Sy60o^%E*`KqqUD0FDTw|_ut1<7YKh|f5*`5g`Ju}`{+&i5bdZ$BD1y%%&>eSep z!ecNN3~l||%l1{v#4lxNFuN45$pY9NK4xKmX6svvLm?HF>dX*4*y1on7?&p^nn=bpg{&uMds-vft2dYy$e49L-YX<-q}{@?>(L| zPei}6rtSsul>GtE0VX`n*|IM5qjH4V;Ra6uh~!EX7N9flx`qL1?fEbcyf0b~)CiK8 zk9z_5wTv?7U(`@A!kjqrwJs*}B8f6Kl^mTp_|u^=4&$v600*oy&62yhUkII}ah=Dk z5B(x2W}J8!IJLK_Jx$&Th@ze6_CDWpb~?eH-oy)aG;4!4Uf~pbX62hxca-f9U{wN& zS98P)y41*l-3jEIm$kR9{sq`~`nq7o>U8$5EAmzwXBjKI@xQbQbJ%s zAn2&8Ut>cfZojz$D1!kD1KZ3H>#JTFXJ{ zYYj{Uqa1mWA8@93KXX3wU6&l1QuY{|LqkYRbe;(PvnD|339%o*0>}{kr3@W}25o!p>3{;@8M0|WY5C6$lL z@e1ZR0P5fEOLC&OC_2ytw1_|gDcUMi{&)qTQE~*>l7|{&0BQ6T{_&d*uoLuARqylDZ}$@W8IY<6)XXS$fk@Ga0jed3g);UJw@;Bqw;Jz3Lpgt z(A0CS9Uw@*;T<$ZKbZr1!uQsjo}AIpO3BibMeD0DaJ{wle~tj4o0H}@p0z&iFUV78 zw^a}B0zZ9L5b>7oY^>?!T72U%B~wdOYMbaIMz z=EmkLpF5mGULyV z`O*9Da%czwX2-all>0MVNe0*$R!#sQE{aFlh}?)e5|H-3h$7T|i6GwUWV1#x9wD&t z1z=Z&os+XVIKYy8UKwycIDlPaCV6=#)D4KN!IA_UQ0D(8pNTV@fH||og zIcAg+xuuHXXkU&p~mM$G8wWWbCfK_jt?oxGBPt-pp1sIB`c>4Mp;}@ zhsIZW{3vyQYpoecJO*R}N%)KmvD<7MURNqirO-CpJBjWF&DhZ=-Yc@?d{JZk7%9q;{Q&2 z2lQnx>>a@)I_*`$3Xm}*fWCDA8hj_?LuUb9G9YA$j3{BEzc^5w9sI>1QkvyE&%+ClNxo1~eZS3B;i@3H&Cx^%X{asbW+-tOxEi~bT=c2symy*2o1Sy#x&BU%n;TnzmYfK`t)0(|6|IfIGdaaFYiN^G z$FZuVeP=pF5AXMG&P5pqG^2~H|GGCbd(WoTT*Ej3VfWD=c-FX#8$Y09XCRdg5ZqvU ztruDIo}*?I!dRQSy_utN z;d598ui5xdmORfps`Q|**nD~>$Ir9e&ygqVGFR5b`;BXNp!X|u_wE@56Q^@i@i`zGt61?fr@y`p)`k6R)Ym+7;L}0zrZ}@bz!u(9{$FLt}E_Iwkc; zl>6C5az7z99)c{rsSJWANiZ1(5rfV`NKxk4%qYV>(fL=5dv$VRsvI1Oj^ZWoBE2H? zISatF43iQSz~Li2WprhTYp+WY&69)!2qLt=2Ia>|^h4k&O$JLUpHgIaiWU|v%ZR$( zXGI&W4}(kzQ+_(40A;R6Cq}r<#zmU~AB{nz6BzS7#Rx>q*2F2W49`{PptWb93BPC~ z#|iN84D&W$V6^#V1Q^5`KomXQl`_tE0J;FL{S}mHr)?KmyW-3?*VQ>+9i7jX0lWl4+ptD-SYf&X4-||6}h?aAetyEsNrhQ5u~Fp$TYkRag($Dih@m-6Qhq>itR} zkxjU>F3MQ_nzj#KC8X~)`^9Z7Gf(?(lPpbMLUxT3!1 zPTtISf}=Y*m>C-23Ej#naf;?$Hf1t$06Vph%qz_>53*=3)&Py#1C!IxCDR;T`T$r) z_l{tuOW7^sE>q&Una!fxfn5C5UWO^7IW_Ig2=KS(L?%K&#Jc;;JOE9yCd%L81YKTD z7aavA_3sb?U)Gb3L!qoUuO80&7F zJ~K}?!tdG7bZ<^wWtnAmI-muw3jp8+&JQOXpK0IEc}JNx*kH0JfaM+Q#|8u5-Ecok zm%T%7$e}DPhuHoed{;Ksns!-g6(Z5kmCC?iQLTCVcC^a@F=sSJ7ym;`&$F}kx^+#3X8>SwAV?wzQMp{1 zDm#emR)R38mHiuwLmkv?^m2U`*toz@1@Vq4*tNi*3ssF zI#4iX9uR=HRLRHy!h1WH8Gm#9PujZ8&=gU%Iaze98-EEThJO>~elozLXE9#NTS^qj zupoq;@^Csy8_TDn{T)+;GeVG~R^4;X4ThpjfdM}m768KL#d)X0w}5H;9MrLM4j94= z7vbpSFyPv8vR#l_G>qZV#@vjW@tJFo5ua;ZnIGZfENiQ;vTBS|r4wkuNKu;jf+GNQ za0En+=CQxf^)Md&(mD1km>;KSJ+isAo58B|DIh09 z!x2G4;F;b;XC*-9PYw!r`8^v00Q%zOIcW}^&08sr`O-xI9eGX{qqns%Z`mNh2x~1k zBT!RjnH*OZ-9ABM!4E~l3m6uF@w2QxAc$w!Kr}T*eO6)}X!efvr8D(y{OF5^(Nhqn z607t9dg4{Q)#-@s#j{?%%!_kWN!Y5>a;E5LfEvAI&dEKx0*LWUHx=G^gdi+oK;Cor#%{z8S-C+ zKp8oXl@jtUgAQCqGvk#(L~r-nn1kQ~8VYt8H{Ov2nR`O#dW9RNEM#N7&#}P=If1gC zbPGO0D>^~2)U%v8Iz<2IUbRIPF8cCqZOw_3+JwNhTsjz#1e8nbe~Y&R z&WzBnIDtlg48F2OfDT~()ls>4ur;o<#Jc*u14i1n& z%36w20?ojRh@^M)NmkL?9y#)Z4*|2lVp$*b0mzj)=+oUK^dHAv`#^wq^s7(2Q3g!* zg?{iqU&)ic7%&DJn4w2%i>%VC?u`Lr$g!v?Fo=Iep2-%z2H-i|Wtr&U?s3qUb@o0- z0!Z-_K(PM|c;c9FWMu(3Y{td$0Cc(=2H-|7dd0p6QRG<$8`zaaX+Iu-U3rdq1F(K& zmZty!AOJ~3K~(6#Ru~^3&dKavn1E~I9H$bW_R^sd-2^al z_&84O&oJN0E3I4E4m1FieP%8E-7&p6SY_@zpa1{?NYT}ro3pw5)}Me^`!etyhY=0% z1Nze?-Ot8uwtt}c8=L9}PArGZT4>MjZ~(sd4jD-owWr13m7KM~-Dh++o;P-5kf8yd zTYL2Jd_l5oK{nCY0_hW&tkKz&$6Zq1*N>n$9l)-s z@-Pp<40Ll4k#_!*<;Uh~%{YRimToz2@5PLu7wM{e3+SRK(4&1N=8qOLa`_hU zmW2U0(UaGnSFTOQ0O&z~fQ~^k768e(051TeQ=qf~*i=5UGp?RlpCx!eqAZCtwur9r z_r8)DSx!;gPw93iJLZXpZFsdO0q|lp?l!XD zwfp3AJkChbCw|7$Z5jr0(5}Npl0hKV_yHk20XQv>`HYY=8jgrkZ)68Xo~a3=p0qliJ6J zj1|L;->f^p2BZSVGG^9CgjlJK^${$h`vDTb!*06hA@EIq~SkQ{z zRH7&H3P2gpx{t(Kk`2xjL(h5eJflAE1gXTQv3kB^qKy-Mj7NE!F?Dho-4GbXAIWff zM|;-Hdf_8{g@3x3k*@W=b)%mhlF~i}d@zF()*o=RcGkqWI28C-#>#hx5Cpu?R-c?@ zJcM4_v0mM;gf}_T#)p2|@Kt$OYhQ$ztl?Q1K=L9oZyt1kb)>(I3%|{>mz*pzm)=X~ zMt9GY$tf6Uyk)G~LuRaj=DF^kg)Q(RS}Ci-x7NJ7%;u)?bgRB(d)i-xw_JwHneD)d z_CihPLw{tViw^M-`X#ev=i1A}F}Huqy5s=jtsT8-jL~on<^+1fS;mO((ZZaJ6L0DV zE#Qo@6!HGdR{0h=9Lpdx*-+O_w8Le8ygd~o;P+t0&~$#VKQ84(KoR|6~{2 zvm3@IQ)FJSSjpM%d@AXdLu0Z$?l$z98+rESy z5cif*zV#j@!9W5kfh5LA&Gp>#F_lkI77-&fz+{x0`C>*!iXp{t^JK55^axmU8qgaP z&wB%`2k>B%FQdcIQCPqbLt0%qCzx_JSCKx?eF~OK!He$N=c7N4ljvg?u2Dd13B}R6 zCG=-&I-?sI1WLY4<;>8`h!qe5WMp^&h#ZiswMfo5GTsp-(T??Y7Q6_as9%xzPMGsK zGn%FqBcm!}u1DRwfxB10>Pk zSji}TS469g;%JNCJ28uH%t`2u9rB`mkyeHqfB~2|-ei_6krffpWCdU((;}Dpw#JOT z+I_NUeYDHa0y%znr;29+owBH8LFuK;qM!-jPd4oR@ss0X12X4J#syvIdGpbptP=j< zP@6Mnhf||uOxY43Z9HA3dBlV0*?tW3U@$o-qMgPq`$&iDPk^8(e{JQ^usN~{-ZOUk zQ&}H6Y1i7KOB?j{t38ecJqbAZ;#`;?kj{xLyS?&!MqXw{P=}0lH_y~#=?1*eSSxV@ zfazjcI^X!IBYV9UKLz+FaPQy?G^NUC1#bp&T?>|n9bl6TXT+s z^)@z{7J8ihi3WjmItPz&F!Tku3tZVVVSM-#z_X4V#iIN3FrFElD*G=8pf9$igDB{5 zvS_~629M$k@|A-`w|3%I^q$#pjy}Bzn3@AUYJEMUZ!$e;jovv#^Z`)KNvqVEqi3v~ zlOvm=Eq%($uu0^ATzii*Gqa*OVCf(-4>-%l86(@>I<9wnl||?L(bZ&IJ4&k^tT3Iw zJAFSf#k75^y-Od)-yH$~d(kP@t;0g5gJG1nIqnA2_>TMmuJFp|xNB0P+kV{sYy z^)7+kj(4BKXatKv)<1$19)j!}aXWtUr@h|YG4DK=vZbIgoWC)8(ZVC8lwu(qXhHyi zj1&#!5Ex@ni^5ICx-&Mr{ejPFdkdW+g3X1H)Bjeo9oQbXe&ci^dmq~ zcIVFVHx6Kz!z5Bi(No;MC{)o*Uz`962Oy}#?USseaVx74fsq+#U%^jA!J=ybi}AN! zz`##mAZwmv2KZ1;A<|8D(47u3j`$NjtuZH~$d&t!$P_RsvxDz|#N<~*9FH^3p0j^N z=7Z5^jDhdUPDH&KcXGk_E6wD57>oa*y>|eA0KAd_z@6T+{vGeyrGdsSg9E&CT#S=p z*0y$mAz&1MqIdjHKW&VbZS{Y^8L*U<;NZxp0K)iaE-WGl#u?1A!TbflB?gLm){z23=l zK%Y6v_OKhC#}j2l$aIAWwc{}4zd@s1jQ6R$tad;D70Ivb}`F_q!WmMgprM z9{U}ZKpQ%hoS+>HK_-tbkIts**Pa~fM22LKcaCkp!#m+ie9ZztbUmSteoH{f!9-lK6&2!I^C?O(e7N@ zTjRYB58yyh62Ly?;~AizLdlu91MU-WYD*S^VUabMo;e`_ka+EY1tA6gq$w%?Hf(km zi}3^)b|XkfB-~^3Ilie0L=g;pbQVqOk@ED0F1E{%`bpcrhyX(P_>T&@CVSX zKO@P(##`hC2(RoAKLM=_JwV=xg`N>e09Y7(YcK0VhRKjp4#5J^Yh%_PBhOh9S@Z>1 zauD>v7?BlY!Lwwq`)Yg+e3}zrnd78=*#sp0C>!}Il1WEKDhLKQB{9y+nQO^jugaAikAUO zV^oru{LmTx=RI^SIA`7Gy(g8OagwZ&tS|?#BeKaB`UzgPzsXMrEm*hi+R?tRX)^_e%qDU0-g;DR~J(#aSH`Wv&kdIlh7$LJDs zwSHv*@en5gc=n!P1${OH8U>*6flOfs69~9<0Kr|5@W?JTujyQJWI8F2PNF06WqeEi z@F;ytuAd%aqt8lAe+n;~Yffl70e_d(GB0!Rn`nJN)FaopyIbG8I7-&c~IC8Uu7R~U5FIuLcE+oD%&i@6rdI z2m~s%A)oUw0`1V@_%7L?cj-7hV0_x+tab-njz{wCy-Eo2EqZf8@pc>cIV1o(yFhnw z=E#6=W9(inGRU5wIX>a+>qoE*-RXD%9^eN61biy<%KivC>O)`gcze9?nTR~z0F=le z*|47amH{;fI^CQE4y>nrROv-)P~>~=PYhE`XR&p%HZqvTR4F@rL9Vn1U@I${`;S(P zk!`d#oD-mzV`8jF&J4QXJ#;j0d;lz)2cDZY>rPjP6`EuFa5!Z4rIeYC3N`?iO*l(w$&VW`lmcfB7^nvH}Ez5IuC(o5V z3hWCS;VEs=;l_eabRb9F8dxvRnBWwiC4&N)bfF;c>U{0k!JO^gp!H6h>ofH+%P`3) zIZIa-)b;MXm(kxH(F_aFd328V1$FG-r6a;hZ^rl2?`%6XDO?eG3K4OXBYm3Uq*NG~ z^*)2sw^fXceA--7@C@+Qu z4>5{oC0pK6_F--vr73cXFQuMWCG7bl1Gq*A^|?AZjI2@%P9&wd9y#qDpi+d0%rIOc z_vVmd0-PB(AQQOWNy{6vc16D#NOJu&rT%7J&6k7WG$ENp88$%t+bQ{3W(Lf3!k9=o zqXop_)47KN9Yp1f4_F680WQyQ;u&m?25`?w@Wntfz)CC7_c-!x)L6DUB0->=H|0z_{B#U<{(@9B2B$ z+|YvK0H6YMGG}FVwZ*B!ALdThfK_uqH=wM23M(MG+={=Im?1891=1V-_oyw2Y{!!;rp_K z^c8xd50HyCf*Qt2ejIFozr0swyV5^SwJ+mm!;DFm58yIxJZanjH9qy#fi%fZnJ@j< zKEA9(4Q~Q%>ygaU!R~$%YlN=_m&!246L_xjJM&k{W=?pZ!yC-~?x2_}y&8D!$^bT4 z1`b_h*~$HmoqwXc^+~@8ya_Vk2~Pjq)6$+#`UPENdhvPzHgeo1bD3l7$-zqx7!Mwn zo#b$kOZ?Ba;p-<^g0K;})fb(rUH!ug)(F3G*wIm-L_dDFmNJv{AR9}b=s zkuzQZ0|yN)7&`w8keSm22^68IrYI31XE+%{P6e=|&i3gMjztyc=0NLZT-pTyIW*`t zPZG?S09u@Upcn9D$TPs?Qe;=ePILu*W*Yy9U$x6na4bZoM9c0jZT&{Hsl5@R^qpSy z?KAFk*31bwW4!EL0k9}|&dS`^mD5XB)YJo7YTN^`?X{_fANUnG0CLD$&a^UF&XS0p z6P)NU@&HgN#jtkx2Qcd%6ufBdMT-G_01v=oVD-I60!&#v_{n zK#Dqci6h{K-<4YdTHSBsJv;;0a>mSS1|t9gQ993X)EG9jFn(jr(5((lB@XDKKaQO! zKWC0Z)4qY;BOAVxNjk$?&{=GR;0lm$jAbhB^vVP^`nUg+PDB&(%&u@0{0`jX3%n_E z&jIqBvO5kI{Z`4{*#RfXvV#OTmD(~^&J%|Qxbs!UdzR{OG-c+jd(IGkwRfoO`c>I* zQFL?{yx~A{Nbos6@VU}C*#%%4-~@cFJ6@>|wAP04;BR!}2>QQ(CVFv-fIRa6Rs}`q zEdA*l9|BF?$7BqZw9%om8feWK*S@2%(aC%52Wfu{eg?Dvc5_g^L=MOne(@anvPO7R zd!ExCo@eXubJ6K!UjKN_^MY_~P(QlsOM5x@#cJNN;-qGNv zK7ZuU7zBo7a69=5@S1hEl=D&a`7Y9sQnw%!3_(%)fT4NA9mD(ootZsWn2rrl-rYS# zaJtCsPT95IHoyZ^oSLIaL}wTfYv)~nN7}Y%;94fJH=FXNBkCPE! zbKeU9BNB{8vQOQSuqvF&?I>tqTa;eZzKi-OV*noTboBO)Lx~QuP9pyT77Xv%#y@jG z`;MTTr5!sZ6Rl>YH95>^%3-c-<0)^PIcwHi^9ZBC6o zIRVE~-l1cR z+vaHWP}+pYJd0NJ5HJobcleFxfr@#j)}4%{gV0bvoeoN$;AH_3x(q!4ZTunoL^e8( z8n56FJdDPIRcvyXFXBnOMF-$}yu&HLE3%qEzRX3N?UkpKA@js1)1l#{0^$0izsWzE z(`&}3Eo<*H@3AfZu9T4u#ETqV{Aym^MOQYK{8{JdZ9am0@pPwbu~%o;QM7-A+(-6Q_hp0iO~zGa_yR)dmv3LO(E$O1iq@0kUl0sG;{}c^ zXP^u=IS`zXb?FFb)d=>%l0$3N#hdt{KsFu_XcJ(P9lSdY6V2&Tx><&s%o-!!5PT76 zB9E1=;q_Ufh3{q6tsnh{Kk>NWfQ$uQ4EG9tyxptxlTM9!nm<149xu91hML|=4y}`C z*LwmzOE$={>?;~dUd#(!pYrFgdj7FP)8POXloDf*F$sp~BoIpU=v0uqNm=B)^B7vR z=FjNNOn`ewmrp)V(N)?W83kYD5|-FiMKCv>}#a z07S}atQnEjSO;*T0sf!_0e{99_@;F9Q3+X5xOrk+O0LL7k-s+0QE+SI(>p*V=YSGq z6d6hm6+lJ_i*V0P$z>cXBQow)d9@~t0wu0r|D!}XXpAdIWS+V;c>s+?QACME{247W zC`yWcfER;ioRo0}?W*p-G8ykNPFGG)<&5TPjI|q2-vO>U7mAm`K}Wn{ydrRSj`kCo zXdj1hS@-Ew&#@MrqTBGN@=W6p-SY*kbl4yScD4&?x%oZ1#~@4gi!ANUs_uz451 zl3{Y`i~a`Ej6s{|#6ib@`Xg(`N&ljUwL&A~_8CJ5v;dScmBvZFx}yhQ^}Y0wYAB@)Z&Gf&Qg z@zQxfcK82?5bKW~xEed!rZ+j2K(lo+e(x(&G-mx+GyH*n%#&k^Z;aEJI??dWarfz& zjwDaNaZKnIR{3`09mXY(Kw61 z3p{XqWHkkT^ob7iC%LKQZVgD`YtM6Xt-UdI5o%z#{VZ&r=giY{ z(P!((reMuku+5mPQ@Tbwc-GvtrA>U$zQ%ObELqit@sfLh-gpEQ$Ov5`NWoUo73f;# z>37Ol#0LdT{iNq%GJ0s1ozdy*UR&}&H=3W{Igqm2-uvkzr$3d2#|{l*&8R))!Fw~& zPOz8+-M!BuX9mLnRg}S>H8T@^esXizWQ+iHX=w&$IyF}joYotI14tMg=%l=kHUc*{ z%A%47>1R2gH@7Yx<|1M%BF+Jsk&EWev;`p2q?Cu8u9)JT}IOy$Bs=N_TaRfNl=xIMxCuA|I z06m^TKm15{%2)uA-X$~o@(g3DEtwRKNE@YjJyCxknjDE{19`O_ zZv$73NkwP05;asv^)iGK?j;mw71S2_Rfu zpf_bOheq(lI66L5pGupI*?52>;DzkiF9JjYoa-Ki^jW26bfGz-vF8C5fD33={z+dc z-J^qmGPV9!r|}igiRPR~nKVFK``*P<_KTs7s405mOR~qgx9-LvsDW>G?$kO10?9}G z56lCH_{*2RIWzPFosPzUV;RT}2f&+X&;b+ZicWZd9YC*2)9{_r9Q+L!as=6P{GshK zG#vzh&KwQ=fq#vWgJNyT2|D?v>+R39UU-*$DO;l_D=)$?Wo3%IqXGH?y6y9%SBwK+ znhUHUn{afxqxaj3svmn`I+!ROPaoqCV-g%7V}b)5vi9fw3VT1D8ep8yE>@cW03ZNK zL_t)|X5YzA!KZJ5Z#;#6@slr^c5;Wdogjv1&;#AAz4a3uBnO;QhYQGhpcfvH@xfF0 zr;ADHH*!xVlp2yBbC~;;I)zjlWRBy+Y4(n_?bJH&&=>T)^)ep)OlK1fJwrEi7aX06 ze%cjm!Ur$_N4?@0-I67U$@(U36rb8|$-{S6bJCNZu11kXZQL%gz3n@BeA$ z52$G|F_|b2r6Y={CL5!*kD-lfl-->m|8H_;B6DE&0ogx(bSgUrT`e^Q4m@9_6B!AH zMI@RdLvjhqVq*y2z#;cM7g5_C42%Ou9PpRme z!RN#nkM=23WAVIxbB+*B)khX^VpKdta3{_z6S+C!Vt=?U@mck;MWo0BvB54;p)iiXmqwQe9GHY@MmklsROuW-7;JZ zvTvZ5QS%I5?R1}x2lfoPlCkit>;;g@=y6(rG)9)e_Kly-xjTjMsIoun_c0XWu#KWX9*67$`V`=g2=DW9_tU+?+wQ6wCm!9h}ffL31Pj_&QWK-yGI@TC`77q!W z;CFOIBYa6O31E2#O?`EU0GiXcjoUkwiMG)@x^jw?m!bh(d34I6cJy!kWVY})KAHU6 zAptTKXaNfW(vw%NN+CZs>`Ma$aPT4h!u|r^cl#orjw9L#cCta7bn-9gL+0_b^|wYH zNQ5TV9DmU>^n#!u-G%qboP9U+8f-$h(YLaA>=>sL54CR!z0qG5RDc6-pgjy=OkEOL zhSPg^NSz38(+XbVHO2@J6Pvu0*~ z<-$%QJDE0RS47MjikJWjm607qa43Qzt@h#gnk))Kpw|(t-*cD=Vb#JF7?$MN#F@^ zP*TE4yE^^sTfu=8xe_Br%Gv`m_PzjmjH2~4KH$eQ{ubO})Voks_Jg4Wd6^Ss}&)$ESn=|lUJI5eChI<&I3 zfawe-0q6E_aD0H}>167ER+R|Em@5bFlM~FI>d+$x7QJL?Dhb;;Am(YmcmnX}OyDbg z&5>Xu@DCfTB4a(GT0bxVy};pT=h<{TBRfNW3S`kO0xy+ThcP=qfP6WCpu29!ud;M= zCr9(n82Z5eKzf*bDO<#oiPCE-W{kg z9U6QrD7TwY|EaO>w)@JV(Rf)JX+}{ff?oZ<&k1^w_N$uu6oN_O3e!piW7Q2Jd2gyEmj_PK5|yF*ckXi+|tjP;tgRZqGGIC7+NaJt-*!7uyZ znWAe6ozIM&VQMog!y@8T=||)AQ@e~Pr=T>lHt;CW-$@+|m#?$*dz*SWZ+LROgGV$S z&r-^OZ=kfut+{9es5&Yg^Lb?wfCWG*qQ${9ZpNqmHEr&rT#VOy-B+UaTU1jtjx*p( zK)_lu#OUNFT5-yZ^JsrcN{mqf=(Ty~s4!xp%A&Cxud_QycI7KtpclGw4wN#91XH9o z)OKw0T?yK5pGVJ?4KRPk6gc5@OSzZ91O$!7pLBX2(bR&%axI;QN* zQHJAaP4GI$O=(KynCt0)y_cg#hXBI>uTwI~7CU1OvNmL)P00RFuav2=F6QKYV3&^L zxG8n1O!jK*hK}+XV1KpmEnQ6C(Ff#Ff1H3E zfU}4(9#>|?nE*tsGv4MXT6c5jFt%Ss`{_vi8b6xp$K14+PT~amMmxaP7yXB)I3hry zwe!WH5q!b-vQcbEmo)poK9p~13!P-b^=({egy*!~!2on7e#eJ7;^e{_de;Ap%^K2& zPDca4IjYuvMxfCXExQPpZk0I|jpyK&ZSMq3fRAjKAtyJV9)9zQ9_!t*5$sgKz=Cq$ zDm#lO=(-N-F@FxSy<&oqbYg+ZGF9HATj?kJa_J#~7r`|An2qurhsGMwl`~_%I+L=? zK4+UaK-%%Y0=4PYa4P-IIbnC%%=N@xyv+vDU-(RUu=S^B1Rsov+)PaIWdGo%RG z=1cJdAdD-J(CId9XfJaEEP3|q$nKOFLm+w!uxC_`GH;B$lZAljPmWp+oRJG_S*a>e zsJse~0YXZS7@SH6R;QUR=`fP3eKJ>agNE%D1A^Mf%NY#wl zNTo4kMn(jU>1M_r$U|ei7MSLwk#(6F{4T;wpO=l0b?`qt0t~i)23;6<^tOI75S%^z zDGdR5)w6T7MgKVk1wM9lyYY&S<2BCq)d@%Gk{o1X0&>xg?BiK$OlNi9i7{4EBvV7b zpsRPSBiROM(0At&Y44IlYe%29DO&J|Yys`YhYrSp#`pwJ=#w5*>UegFns<$tGeVC6 z^4=pSc-MTC#&tCCO6Qz*Yk@9w2k@!i_7Qb2lfKryh3V~?844Vc84gJM6X`NCNiI4l zgAQv992YVVKNxfODfwL%5?%1OxpMwF?f8v+7@P5%uQjHh$xz3z<8S{*f6w7nW8tKr zlkzb%*O!vQvWE17IoM-iJ_1JMh>Xha2dF(qUaYC-$eHmK+}Y*tCwBblv+;_+6CJ{) zu=C&E^TFogYuR5i!p6V?mAJJ}c9z{48(qUGkO`%e+OIbEzEwFOlLIGmK--^JW~KuK z+6%+hD+#kEbH3WbW3UTeVYBsHc4ob254}6I#>sjzHP+vJI=F=_s?;dsx*Mo@x|W|l zKXF#_8Xh^SPO0RKeu61Jy>If<@$bI>%Av_%Fw`C4OsFXx%z^1FItDt66lsHUv0;$G z!*~?SJZpb<68|mFVSYxRFa;WZMeSvqIoJDA7;FGV%Aet&^h--pNbQekb0uXBGJr6{HG%zE#3IL~pI2*GGzS1Qmd>p&14E&rE#q5hoZmZk7#}jOc#YCM z+W0y9_dJ89jM98XOjppZ9}Wrd0T7A=DQlVK2+a)*7}HL+suV!PRy%lJS{@yh|A+`H z0}Q-p9FNLNS{q{~gTNGd(yoXrW9H~r#)Wf(CS6Q7Gc_3wbCp%dIXwzUm>0ur+ImOacVWh6P3z?x_qr{*Zb#Gznd0PP~EXg5!vGnSdjTgR3Yt6YN7vPO(3 zKH?l&BXSEU0y0kL0k#2bK%rxctuw$RD?%Su2aZ?K+ZuJWc$;*M9bjTSW#(j0tRejh zXv&6|BS&7Ffu`1sv!cukNYyr{$(qnLjJo-Bzec=7_5mB?CG)@?xzdleIA(Z;eCx-U zI8x>=;!7T6q}pJLwrECf94XCV04kM=>64sT!zXpv)&!^pCS-+ypPh2GBgY0vm8nr0 zW3JYNKB4pcUdbmLB&&oj_|UwSv&`d+ulPFMLDpn;tuYy`&VTON@&CZNv2omees)J# zfe=9`{MANp?HIo{tT{g8h#i$_8lU-_JHBJ5$rSxVH&|DHTYvU>y-07$Ec#gY%7qKq z84o_k3+5lNof&g_N|_fMf}hNtP9fvo;pF%X*u~FyR6mu)d0yZ61K+jx#~A!=j%d*S z8FTAmM$U}v4>?jU%%RIRl0mjYzihPi#}D*!89pEnPn(|#6~Q0;E=X~<@9<6+{?zB> zoHI@Sjgy_avwu&Ei9Ot+)PI8In z^d9*nOXN&Yg`9Ix+V`5?v%Z4xbh|7zn#hF1L7da@=hTyURZvs#&KU7!QGxM-T-F$Vf_gPf+VDYEQA7YPE=IZfte+{Oqj`5llF?V)&#+dLU44usTl5$*|cSBFM~ z0vH4^rP9~Pa?vj`AR0ued9Ko=N$bF})80fL8D<86kp(=_m62pfDPjMkJlBZuS-pIY z06J0V8~_n*071$e2%vBQ9_3NS1`ILe$=fs>z{J%(8|K3RCd1L0I^?r8 zQsnO(Q%((_4SaE)0El@ijd7bJz0;j7=mD(Ass#S=0eV~ed2di-s~wR{k##^xM6iv- zfQ_IF8jAS(76B$BN=+J*|0`hte$d$32KM#A=*F|g%RuXc4nBpZ}PzrpKwxvd_Y~lPxR&L*i?!}E|eT~`7gdQUotO< zfv4$3^rOSb%x(?2J^@|tbW)*p^8XH2F&`k4uELW58rq=~P<1AIY$m?2FUxp3;Gy=- zk8Spo{hcQ_MO)(_FZ8`&2j1ksI7S;itUreepVFg6%Z;y$7uh85W%|vF{>r&YANhZB z2lVD(8AC@qdr!HYe#o3`qX;`^u>CHaQM%Fpm4?w-;%>d@UhY8$FVnNG9cjK{q7{um+@Fj>%8mmernH;WaP|_`G0zy4xl## zB6orUyW_0v4o%T&26xtjcGfnn9a=lBl>Bin$>I!pWcxWw_Jdkm!41w#_tfOfkXyD- z#)MoM(+otm2arQUhGe@rHRP33Evu|TDBKVq(`oj&(eXJnKErFW;hY-%D2a;?ChkoS z)!y%ry<0J&F*dGf&QT<1e_DqI^NJ#401?ELAteK>VKn73z{Qo&Af8Ac@TQKHR}T!% z&0D(y_22YdQFI1Xgor?@L$6fjr|K_frr=1a$TV0pid(eAcsk{#&9=ZBuxIT(2h0F` z3?ReI;4n-89WX$^MU=bvjox0)nTTtf4 z16)$tqVTdbmDKGjK=d*uatZ`U=>ylmf_Yn0eNf(vhOu@$vT^#1!2>2bO86{=4mjHk z&N##~WyEq)jvVR6SczFBgDW3Qrv~ui$N*}Lj#8B}AD$D96%YYH7$h`SrUa1EGsZ<9 z>5HzC{o9`7TRl5;6uONrLt~DZH>=> zkIw@rXbtSXDScQS8??h0c$Z!S=FJ&^XS_M0=wvvA3%;AGpR74w|qg92Gz5xOqpR_5>xY zhw2iJQU?R!1M4e@?!XX^Eg9s1&}Ve6b=6O0ZP{J6K_G|zN;c5}kJG=}@e`Ia#%xm# zO!9y?rUU*{c<(7DM&pie4zHjI`q2-6x{@;sLNO6)o~p$I7H!lORl-~p1qDtSh~r*>>_upm z(TG6FGR!(#W5)251!bb98Axf|84?jJMlt{h5S2M$m;j>Y$slxF@6kr@Hoj{cPl$v_ z6XQ9lP(0^7iV*0Tr6Z!~07{Bg8;l@_X1;r-P>OPkF5~5leBgW15?wfizzAR~S~u_Y znNc1JyA!fxU>HFTVH;&R6buhzUL~TGor+65%#)CmXftT@)VAm=EWbaYi565j<`#ML~12a}2D#5-ZP$PT~RkYef2i zV0{S!c?L}ZF>@y;%8~#>V-+m$oP9ak5g}$&lu7E7;RbqvI5cB~v`yxKD*Bm2W6jAj zIbpy#PTiMNB%izkr)_5Ll1m0XXB5wL8J@XvXgGJqVXkxmc{KJ;0JIk9$cf@i0)xgR z>NpBmQb*mPhAfJ_gQL-WUD02cjWE_hGL=I+IoXoGGXK91%r zpP5cN{f}p@1F%Hr3J4uVc+;<(0N@D-S2nr=uhwqTccp7iBAMlo0%_J%zZ_cG3Yit_ z&Drx4zXJLKA^2<_S9=tY&_{>6M04X{bMY)^9gYws7sMo^fULHRNj4e1l{CttAen9=Z?Fu0=kN*c;ia;`bHA)HkO6!$ zgD+txc7ra(^PGTb|IhSW|DPD-N*DelpM)8zD)Bd)3YVi>8K~c_obre!w|i(zu5;EI z6oSpLiuh8jm6|Z@qLLjeN`L}6(|M@0gR-X(0EPtNR}d`b{1tmwjtCy}03sAQ(AA!X zc@FwdGVt0qVI`CzETYpX>`9}_nE){gjI!u>ZA!xzW6%x3=ErCe2#U|QHj3Jd^fFd+ zugEN^(hfy~chChuFiuWE_kZ||0izH((12c9#3DVFoaNBSxLh3yV0{2f85i&9?<%5| zQfqTInu+2uigP`?F&kYdZAO*+0MI#*914!2?7>miW0xl;a6>b6?Nm2^bJzeW0Lo`d z`SeNAa~xy@0P8Cd$HQ8tfSWPrufH4A9rsqWy4d z359?J8qnFC9Z_-bkqv!t4BIOM2%0AlE9&WYG}Q+EjgR$(r@U)!#%n%!8xUEKO2!L- z^xa8Ncm$uym{=?P`X)nj1nP~o`;SCrIj^GfqVT{zC#*v%c7sjY>_R^41$3>K8Rpbj zBlK6_k4LDKg5G*%E^}X!aI1GZtn}2`}MU&snSX zPj(0ZTjO{7iazJ4(X*8o86Q3)=YaWiW^xkQ8Nmg#kZDH;x&)5VXD4U<3JkMr2RvsT zU<^8qj^v!n@SRFp!6%Jzx5vZ)8jH2|d8xv`;5`XSX@s zc!;dbvUIX0;Ld5~#8~_2!0y5_`d8+Aq+<$X9oaGZ8*j-- zo1=V&VJ;e@9mYpS8vG9OqwuvMaz+pbg)M0s=?Y-QEP5p-M6tN)CwU&`B1>`pcxy ztH#rj^*$#n3?*5VHDX}!hRAh1j7RZ1dUHVWl>d>HGBUIL2<^4SP#F*X418->1_^L8 zCPp5Av~LIvI;o01Krip;+c?dY!-Ic}*=I^ZIE)?HYMnY!O9b7rs9X?j>FmDz z-dLLpJtsIy{$xJNX5v3QM2?I>R-auo#x7*6eL$BSa=y`#4rPn*UXG1*v926$7c0}x z9o0;R$Q&N#K;d=o9nqCcu|Xp7oC30IUdlU_WYRtJl+Qce{3|}s8KAH5pRsfNWR)s& ze~O(NH$5s40xO_9C-2GrwmE-%c4Z5!5qV-0$&+AKn4^jSbA?IBh_=cqkVk)4L4G77TEa7hX4=W#ZUC1U=f~L_XOda|5QMIm+#OVUL%`y1+4equ)qD% zp()xVg2k{93ZAV9%G@Mb-5tdtf+20^i_)5>5@EWI!vz{BGg*PRHJt(HnI7C-uwU_9 zwTXmgRs22!K7a{81<)2HoLQ~6(UYfhGZqR%`4OWb%0cP-$|=aH0mwLF(zPPYG9wJ# zyu+rb7QxNI=>oJ2UCuHkfVTiihTZd`Fd}w+S;Z4r7!-)NUJNXPJw! zib^wLvQSq?5?6Cf!R>CMtUg}3i4DQ#z^N%}WnAVe9d7?jcXTkYXagkpAKIG>-ZMzoDXP^JOx{V#{J(m;+0XQe$y zfC_!!d7u$b#!DO_posDIoXxVvZGQGM8N0dZS9^GY%^=V1Taax~9>?gnmXjCI4sUUc z%;lRhQKbOj(@A}FgngYH5l*JL=RD4`$s;)Q6faq zP}oP2Cj!Knib4|_NNY?eDNjV;dGO;J0-l&Na1sH z$D>_%=(E82k<#_Ks4oZ2I7P@sH`=2DI13I?oE$V`@IM9zxTCOr11tc0C(cdf_l6&$ z$K0>dnxGxp7&jonNExpZiuylOU;uoRMe*Do0X{^D04 z*#G^naxt_=SB@6_NzOSP_#Keu^CkUHC z9z9!moc7300g^6Uk$&%VCYb<6o4&OU_9OY*8rw(J zX_V=b0y_aPe|Hfj5F_&k7&|24%AqOfGk=}|X*!DyR^nuApOk-;%~aZmukdhXW|j8> zfOJB72dxAwL}>w6Ye`>R9a@rpNzW(et*I!vpb~x4$&hWD7F^>n7)NqQR`9^o+sQE* zX|Ieml=;Dv+ShO3SM`W73sTK__&mN`*C2g?7WAEftzaGp1`W|cDJ0zhEF0(DP3%93 zH)h-DC(t81A>-&|O}2>xcBk8~##ok3;0Uk7db_6pzMZ>1t+TJ3G`&*jc ztucFGFDSi^Zr=*9{=|4|^VFeXs0af_;Ejux^L{xqN2jo)m>F<{06G9e%F279au!Q^ zmSI91Da20%A>cGYQBlV`qQz7{?J0MZCgr+DqKzRsiym1EA;z zj9c%il${O*j4IW{FDoxw2OyO(3e@fJMdLc+zvy3CA0uUs1thM@TG{~3zzb63w3st+ zXRSB`Qt!Uo*HlI~2k1&x=h*d!KY<-|5V1yA?Eo4wV|PbR*Vp7lbH`&0A>GzVM(7L_ z*k}rXlLtS686cKy({&t`Kv|$&<^q85JSWNNRU8t!6zJuE09V~<13>zpQWkB`o3b<3 z$~HL(^c?_azQ!Pm%?P8f|DzNB2EdeB8i#e1dEl&+Z755|=`kiiOa|UMGXC8f&zS-g z%&{^#^F%9Po0G?xv?kWsbAIoH!+?Qx;z*&HQZKp>NCs5rX+)lD@0)dDO9Xnl=jo`- z=}zCz&#k=x0=|`5>l8CUm`%b59CWlZH}j9K(*bFme%z$v34?-@zPrY zM=~X!lz!yU&`DiPiQjx#51;$i2WP@j-J;g`#u_Lq^n9DGkB)|(GM4dJa1fNTUjLe22N!d1V-mRz1AWdP=Jig;Txcm>{&aq!-12XS8`QVNILc`OFS`# z-|01ShgNV6K9{v+zm3!X=0Omb!Cu~So&{NRv z?y~CLw$Gl6kNoUNbxtd1P4>k)<1xBKP^*0~+M{o*i#6^Jt*Ux*u$8Lecfpf&-`nok z8za5ishI*MbbHq%kQd|cG}x(6j4%8Crkr$loIB5%OLu_D+6#8yoqW2}?oU5+=+Kyq zRHb@#kvS1VN?-Kh?gBmU&iynZCL(Zv5++h}FPb4rF6#bP5MrJ{QA(}9uYevyfuVeT zvdMJEc{&4=Eo>X|IeEZ<=q@8#eLsUTBUnGd5IHmeC}2Y2DFf+vQ_2EJVobW-n(z@? zN<~SS4U?k01Xjrsx-wKLjrrS1MoHIWbY2?WGW%^A0(5Y7v@P6Z|bQ%&`Do z&|M!K0UMh+6S5yNtT{SBuv47SsAE6Nb}&|I&N&T$DjA|R= z^xFIlJXIpp8l#=Qte-tA-v8@g|N2K@D}cOaKj( ziucJKeQ2#Xv*^)?>a%1y`3kcyfTE8?5uH&GQp5shuKbO11!_E(jDc z1+2|aZ7u_HRwngk;a3pP0El!DG?BQ!{N*oyC<}BY^k^oF<9`5;pWVVtQ2X@M$yh}FD_8WHv6@qR7vd-U;D2NeZ+Eu|KnJknAv^~>;%9v0dH*v$ z8AG~Isbhy6pbZ&AKP42LDUQ9iWTgtr5V@$wW_CH+-JX?TB>iMz+zA%@b_sP?Tsx7U)1=-x?R)$J3m%N=4Da z`fzrY4&KSn)%eL{Wh>;7lTGL1f%cx=1sB)$(UhE6BaTwzl}W37l0$}1tuH%G9+b?K z3G{#EOB^=sRbExLYU*}6ja|W0_{iVNFUc;s!f)OCF(c9iR{$vQ%Yf<+9m+cIQc1)s!9f*xbqI3|xL=p0$y_@A&lgrgUN@o`5m%WTl1pcw!B5)QMf} zDxT_Sbh-#%z@&J-gAH~7``tW^z05zGO2(A>k!cuZ%APoC~|L5xm*QpzOK zhcPV}$L8CDK-n-l8R0X~Y<`RxAqPZ6j-|ifME_>w<_Soj3lO~-Kr~ZRjQJTZ83js- zV2H{)_2s87@IpUjJJQG;C`N?P6*VRN02pB7C!uGkD3Ho*x)%lz0raeg|JhheVQ9}g z^McJghDKvqKBYu{D#tq8e0diYV0dIrIBueiM~WD&IpX-k_yKi590;Rmc26hCsp*s* zKth^cbQBF4=k}>+6DT4lHZB4Zlq*{0KxvQs*nh!D0|b3_kW85W zol!O>#$9I5*DO1$tgI8C@G%~fseKcD0^;ZphFRvPW1A}>Fct=j(F2&yj?4`315o&u z;irqpA>)F+0yv!d_CvK<*nSUSh%>^$33xdLies8i5fv>1jOWmiA&|kc_M9T)2bjH= zK}IutU>@4Q2l$@6n4dO{sj@LNFOcIWLr-tu4}IdDvXIsZf1xAYj(5muAQ^927yZzW zo+H~pJbhHgfWswlL7wSM^s~mYEq-T|{T+R)Z|~H%_SI;6W=L24X&f9b;EOW^h`l{L zX61Ktv6n#G0vpyFKo!09lk;SK$t5u4S!+Q4jXQ@DKLgwyya3d4ima;$_IfwdQ@OO3 zctz=zJx11ob0@fj{@Ul{?Vb=C&^13!EdC`YfH}v`+&YlL9DrTCpghf3fOVN_wy2|! z$>UiTz95se?eGHtlD;x#4#2##V*(;_!WN01<9E-QD;$Dv=)JpgSbSqYFCAelm4)rn zYmSFJpc(y*7W9MlFa~YQ5OaLlyWN9ip72AbcUqImGtV%GXYq}!1A95|8S5fYywkp+ zr{h2a0d9e`CzsAX>8A{bV3T>kLL3!xXVv0 z$r8JSpV;`B*?BV7vuE*qw#qYfjeUr0h-|zu-8l5?^ zJC4>J7W)Q$kJ|V*ho%!%fP4yp@m5p(^hmyx8ATwfK^XuVfU&foD8}1zv!jV%0EXo* zgH+8V@P+v)2cT8d=?=gh^&4>A$<&-JBBLpDAOhN68qCvCzZe`1jZXyasdKr*Jaaz; z#hkJ(f>SLq8vDJIk^mu0-zAwqKsEjVlSm+t;M@O2qm^Dgi7o;*6ek*pmP>)&l|voP zxu~Zx;b&{UX1%Nhr-I@~13arPcY@9xdU|F?3(<{(58#mnidd8yu%vvunU~WBC{nQA z;lO&fy&}e!(gRX~T+Wg5E9F9OBRW7R@E}@cL;2l3G||9#M75=+fp7AZGRxSPdKVZm zAI9*>DLd8;T>x^xnQY(zJlpN@cnk3KH{G=wY|nBAjF*uEj?hXrgrQSjBiKR?jn6o( z3nRmrGUDWsv*%slnrz@rM_bZ6z`7uib}0S!4Yn7>`U9u>_bg*)%?eUzufUA?a=rl) zveJoiV-*;v3=(!HF zu*Xdq2wHFw@C}e;Z%xh<-ebc8N;B(M6gFFjp4N@yEBcBqW&Ym;ysRS{qA|Xb9qPad zv?2R+NDl7Vp#tca6K3w5X1rLo&)QpGf6ssoUQ=QT#Lz{6Ai2WVc+xv_Ki`q#*r{Wc zZwh{hP@^v}c@{`46Dz<%e((+ZM_%w>cXyrHUGqdE0SX{e2C>cScNk&L!`jjt95HqS zt=KPn;tDn;Up@!GW%$0ahiW=hbiGVB{)C^7std9?f@j8pci@~(0438Z9O$0AWBJdp zj<)God!YgJvr30g2$A$%8;{yudzz|qe#`Cny>uO?w$2(@kIE(`j z!UJZkfDB5CqNX(OPF~WNuZ+~E-YA1Z5mN9rEOyD5J}O%Rr~n2u>vs1yYjP!g0F=#` zB9|$_xlhHr7(B&^mK}P)xNUbT2ad8MQJfy+Ns^fhqeS-bXc3( z_dbIri}f~It6z!|cyF_Qb^eqI-q5}>8qT~(y7@dYf-8-uV}o9m4y8bSPKj}1C}I4k zPqfi5@Je<7Y2Rogy9mfyFOen&{;gA^f5s2cKpW2EZpN|z2xBXq&r!5a=*$Q)9_C|S zfFckKq;z!ePT5(8p#uw)0~%)=Q}v?{YbLF4pNNPyKJ%{6@ESUyzqyffPBxHF2LR$O zyF^#O2c3a2>q74QpCKg=+65$m3eRwY07UQThl3&`g^fR63C!3^1hwvjzSJyvk&w(b374PmHs+(UolwEWj&3UB{{G1K-UPomR)m z=Xj@#hW^PwduJ*Y%Q2XM5YKb==@@hX;(4SuDgvf)Q|m!3;*w5#$1jg{Syx2wx2 zjSt{t>$F{oB0KGC%F7W=+d~4s2)fbrobVm{UVAw+Y!~O3{EMu&kF>HI4i8;~m#n#9 z3jlp}V8RnVKzp(v(4ucn{@K9;oy3N}l_}CK953{z%h2E0-dx;_9)c6Hk90$2zejsf zpUiJQYu)gNDhb(kdc@k{wN6l-PWP0@*-mm!zY83(t$4;-p%HoT(>LdQ>YY!5({l{j zEj)tX-ebN9ZOx)A8y@fyz%Y=acm*7zi7|sAx`)zp1o%((jvR z|HYwczkFTv3dKg&IJJN!+W0~X05I@k?M8m9U$5va@oF%0z9JHN`Xf&JPRu+>p zVzaurGkV6sL2$8|YzrB}dmW`*=^tkU5cE8wX%B?`V%h@&7&mj_beIQu=%PM6NiNH@ z0s5lwXomjU)xLG=G^P&DFjr0;ox^zq4ml-dBCMTva?S$GfhKER{XWNxo-&6nhbyyV zj7O(nc^?e{FPX`XYL=NmC!m*pF&^`y+eC5k0gz61IWy)*{{aeY1i%Eu0iVFFY!2WG zWXmd&QO}E#3nD025^dK{L61sS&3R^oD-G^A)we;673h$Epp&zO*U`d0H@}m4{6i<8 zDZ1_6`BDJSx|ALJ)HZZ5f?$P;2|x%>poJ<2>(4q@WFbL~5va*sCC`Hfce zAt%qC6SmtLab~P99w6)JX1&@EH=UX?TXY!SCkGre<&OA7(BLYlF{9hoi97@1_}*9h zAdYmL`Qc?Yo~_V7XG4}|_ekaG%!o{*FP){|4xTX%a)G}3AxFjs=X}d%=%W%zP6+xt z4UtS7?K@*n?Ukcr;J8ka)sA)KAa=s+)5t%XkR2Hs`pV%tcSn;y!D;LT9mNTyD>-lQ zP?u=VLu7X3s(=)^riaK&WuTqbTEJ^M@;hzs%&Tf3Hqh?^a!m<% z1`_D8umsUUeKRGyJ0{G**eanTJd_Z{BXTM=$zU6!(v7oH0g(xr3}9T8GhzG0tDiO9 zl$Cb%Bh3d0P|lPO@K7075&3U06oy3uilpOAcS{U2jtSW`h&i+C{|F*O&0#BAxUQjR z7;b#%yG#Ip35-;7bC;1@+cl<6;d4x~Xf-7V#3>a4Y&mzL*#IYg*a6OFqu~h zn2Z3boQ3w-by5y}!~tYbe8#8%+`zWezhoDINj&HsYr}bA*nvxZ8y~sElRy^$+{RLC zMt;3#j7ql5LGVO|SU=vC1*#;oy&B#zj{*Z_oXXyC-~b$az{%mDY0qAuz)Be!Z7`Ar z3OE@`TxCAVdPl|^gL!k}q9Z+I8~~y97V)Gz@sM}@512zg4mFuJHl>8HRb=MrxS zWC8YM9KWz%co)!?wGbq*$8rMp4jQq3z~`=GHbFEHh~M>tuL=NJSNvIj?O$ll1-bmh zJ00*K>dr|P)V#7?<}S!006<*7NybukhD@82&&&hgyt$-S z8}tlb6ny*S%8ett%+BEt2jj5G^n5gLpUe}P$|+Sg*=6B^SM*XxGtVH(j(k6PW;!$i zBy@jx(L4l%tSMdWG*vt&kgALi55WYoG}gR~-(8UM$@rc;81 zJ`wgghMAcIrfk4{bBWR#`J3Rq{xbIZM9b9?>bd5UBCAwTL|z$;NZ8qZNO$}K474d2 zNVDF)oJ<8EGET}Y2&}sGE|7am-@Eao_&79_8-?qOQnSA1&cTvBGB(Q3=|F*L02A*3 zv;qY_13uQ0M0_L~B)Ur0Ifs5{{3xnQLf5-~uErnl=uC;L-jKCyW7(bW$`#6JtepJOkYb`d&!NcV}L0+*fW#KGgh8Z-XHXY7dadj{P&hj>dr)sP#bK7jFgZ;dh2fgE78 zP^S2esJC`Sf$@Vm({*T~q>F9hkd>WIpW)@*Y;c*0N?GnsM7+}tc*8p30plp31?&rc z;2Szy#N1~4%2TYb&t=6ps~u&&8+a?o!XD`x9^g=Pk{@T3Ea6{s^xbjd>B)4vpL79W zhA-$XCEs^@{>VHYw>IY3UM)I~jCb+tEEm$Z@d{AdKV#j<+mmXBHbMiNbkzjbv&>9p zhTP$8I6!tco3k8^n0u)8e)S9Byl=mFcI4W0qQt=F4`fv|9Am*INd@a&rO4dF_hc;JcovVb1+ww7G;BvFpFKTj;~V2SYU7^{ z%}g-^Hvl6ea<%DiXHeQD1lpot84S^+ObyMo001BWNklKMTYEx}o-KohIm-Z~te$2-?)r}gXO7Htd_2Rrm^T`6I@{B=b5F|ISSn`# z02m*Jl@p0SfjH|ApeuI+#!3^fcR3>Ha=H*)!T7TAt9Fa?3=K4UI| zRQN=n%8aw^bPfzd*Kv5r1Q{p`3){h%Fd_MFU*9*5lwQXf7Tdvk=F}k>bh&Yd#h&oe z8+z`%^Dhof8}|_n<5N1HdR~D?V2MEhB&vTsA|T>==Np2u8ip=dYU8h{X&c&#tTD_Y ziI`0-)E@g&juv^qHceelk>)CM6Ag${Hr<{*Nz#?vYD2v2ln4G}V}y?650GYLfIv8FC^LV0vcp}JpNOUdh{jL`p?i@y z(&QD7uv7RLzmdHH4M#S!%r!Y?3+NV(nKn3Y_{cN(rYsRXh$dv3+>&8V-cEMUdXq!h zTI0jBGLCb9QhR3Dq)O4u9W9P7bpF&XmRTV$^!m}gJ$QqB1I}pLCi)fp;s^KJ(ZSX> zr=+aY-B@!D=(2A)GdZI4A?LZhKWsZtzm_xY@b8ghA?OFMksG?{N%`d+U9LXYZ%0QL z+~`D6&WfOpb*SVv*{&Q{nXVE)!OTyS3rCJjK>)nJTVZ63Y=J5kPBQy@bo}}!bbI^m zzc@4^Vw4Br6ruP=xmij{gpDC2bZ4NyNxTJyS40TJQVizRad>kPJrjuLEKq6~i-I6v z>%Ap}g8~;B2DT_q5!JJr!lQYmux#MdSI5xi?Pkc#&F?WhAz?g~s1OFx_6cBj8AAUI z4KNKP0latXv)lIWbH>my&_EAi0fPNbAple&%GQ9PWnh#G9+g$?^cgRRdRSXZ66j@A z)$IEl$P#q}gm;b?Umez`Jlv@~GAjTAUXvXXCB3JJN_D=W0W}I9pNitq~r{ZfzI7MRPnC}qWyOGpTag)hQnAo0gqmx1c8Rtp*f2h zWx!-9MA+ykMnHKP0|5|m?(i-k13ZyG^Z|l_9$VpZM+N{(2jNjEgU@{+k4hP zme%;y(*xT2;?($J1b_j2D#C9K%^#?r$K>jdliX%_eS03zLMKiR&{!D{ngP+;pfhDA z@hV^{Lau!>LH>;yxG;}08*S1y2Z0ZC^1OMVoqik+V9xrmhV-DaCFNq>#$HXk`EgE+ zgQI8+z+M}I(L!4c8@`Ws=_NFgHRSABJG^U;pbQWPK~@Nlb24RO(bW5ZByfta07h%c zS*!%*OwOlcd<1@cZax4nngHs`&h#1IS7sc5(+--V6DJ6*RtKTLl(xthaK<_N<_S>Q zfaZu+=|{XjPdt>_;(&Pe4!o_pD*f308T^)$!jT~Z_=aOiF+xUJ|_SWfH_DG|npHE8h zaz6C=?SOL`WR4;k^TmOsGdqRMde1`$rhMNyI$;cB&-snkckjMgx8Bh&`KV9*)1&Yf zEK{IBkcoaHPiRrr5f)I*(20QG%+Sm|wZ^;h?-_>ie`915={fq~?Nu6|(Dvxve{*OE z1P1!n#{C=`85$mUsm`~jroAPA$cpHMO;$i0Wsk`?28@Emqy#7@%G&cl9y&097*C3v z;j6TaAar^VM+7jTB%XMy(Ffq7JUe0RD1Db9^OKNtgSLn_U`GMmDejei(M7~ksgia8 zzh6)4}H= zw&v-7z&FDU5Hp(gD~J|RwmT8c3>>hbd=Ky>BeF=GB>-I8#_pTrpavYj0$)cZnrBMX zyRs_Elcc!G9^L?wl0$2VC-fykZa)uRviGP{)fh{903FCbAk{^E=1k6;7afJh-Vxvs zEmQuoUh3y_V1lEf{G|1uQ^+Yt639{>N5|M3V%>m4fu6D{85BB+6M*mODF7J|1d!uL zItd`GJdLweMxcmmM?C|c=mf-YoH!OvoI-1J#^+t|8Bj+&*r5r6)w zeHd^786@ZP$Y1p7bVs^^PQNw5EOS-Ue^t}GqnPfVY3vj+ zFvj@iGy>$-!ZVfK0GvP!B@M8lv*-OExN5WOlhZ$nKr&jCB(MlP0#nkv04T#KDlIa+ zUSgJVx|-`$o_93qbKn=yHZI@+=*0^_ov|qe;(P&zJ_9yIyNli!7chnY)bFoHBbzsH z350mI+uHRHkSbwAV}|qSSkN;b@~(0%a!v8#DPsdv${Ki=459T=$(heP&CA@2x>kY) zc$ybQ$|pl$m`ipm#(wl2Vo z%z?uXW`y3l(VhC_$k~(BZQka^aN=8Q1^97R$Q=DD`aJhHXp4cvKi0tW)|(D87tR2H zM&8Mf5=CRR-W~0!PwfDa=mw+*{_wT6@db?Gb$tPe93FJ{E+Cu3;UEBZL3^DRHM4l$ zZx52_IRL9qYXgMRQ$UxJR!#$l2;fB*`b(djVdJ-U0yPeNKzno*>?os%ZuIkdG3Ty~ zd5!~MJ&Y4i%0^n-yHeYtyy$(jmq)w!OZl$M@2(6z8T*+n2oPEaARZ6Ng3Wtt&LYv% z&Z55n1#qP;Yv`*oy8xB>3;ZZO?CvFC({t7qO|2Q*MsD#M+3%KdJZ25x4C{UtNNXPr znX=|=0Vj=|>0i5*x0(l@;*0@U97?5{R}}}|$(iY{8P3W)UCUU>Ob1BN&jM=M>7U5V zw9aTqr?YwR%RCUkxZo3Wr6ymM&nw*k002EvR%^oPlXE%itrXK)L(Vd z0ML3U042)N5#a`Kl%nJ`_^dJ#MoaX&liwI+iWb1Ie)xgR0(IoLqnFVXeUy3u=ww#_ zfuW-ptg)brY(@GhJ}O|L_Pjf8X4D%EI2%Ad=L#5{`|~&?fQNvNsBx!k8D9sl;1g{! zXgR~3J{F+r$XiYo+MuJhtuY55*z_#=%T_5#^F{CGH2K?neCZSYI9TLTFaxMJE;31< z`2suvPO@6$JDvk30T4g~t$j{jy^j~|f6=z}#_vq@-*|Jp~x8OEpdoAs*PHUJ2Su@5q;_=%1@+k3D&Gwk?uW@-~JlYN;( z*g(XY>wP6wDFS@<00CGxpBWJ#;Gm zweGWG;9J4;NegYtND7+Lad?`JupW4eE#RaTobxWb!Ip7ozP*R$Y@Ns)nMG6bO)khK zoJWs2LE(%Wllm_5q!%L!9OBV6cNP&BrB4rGj93?%7>>>V(U&UZQ* zp9^R@G`C&IER5zTTTkYC!8;@AYqx2EDii=>9<2=`8yR-8Dya2b#uVV@#M zoPfCk-t>~u^<2CTSOZyPA7Icfrb0#DfMwYyvf%xAucZt~a@gco1gKUVgxR3Me92dz9ykpNi7fn=T=muct9&#YN zEpbEV$^d5L$h2qBP4jC*05k-Q*-c}>7djy@5iD72N4D_4Lr%S4U`rSA!rZ+dAQgaX zfn?vDhD_h;~mj>p;_2K$Bc}K3PFe za=@m4vUadQr}i)7d6LYybJQoF7u2AofDylE=J_s|sZxRGDpwsGtI|^jKf2<3Nhxy4 zb_0%fUL=p~w4jPS(mOiGHwk#M)#L>q*e~+PHni?gEP-X>#1bpZq*u2IssAXx_mZuL2sj zmCfg4_%Z>tak0^2gz!eK#>U1zBTQ5MSj34j^;9i}0eF8-0HT9+nU}R@h`edUxHDlEJ!-3H!86Lpfu0cxf=+=Tm z&W+P5Gye+U$~T2 z2FQr7TH*u?wU3c4d~A-gaU46{rtjnf80v65K!`0c2E5}$jGt3uqgva6hQI<)<{osl z-h%9Lrn_}+wSV}`{HFuLBaW7=3RF7W*Z6_JHW?su9XJahdbbu-I>AP%$he1G&=3DF znDIA0YN0hw{}({1KC}fWr$mOJmg4um7C z0H(3CX??K+=uFSq&ekk?r*WDWnvgYP<}}?S^DL=>o&pa+2M62zfGi%eTfPM|?)S{A z1P3_-2E7kJ@=QL-dZZKIwA1GjUlWA?b}-Yp8VmZPIoUdCk;OywrYq8nVSHV_>-*+ zz8DvJ%p_*r0aGsUL~@zmhff5Ve6#s^S5=-T$-yV{d3x;(I^hSr*B(a3;4hek4DuNO zHrvXFwA&>=!gjYip(KMZNe8|P9m%MU3<)6ie;%s-bi7x#Vfs*fA*bYzZ_>%Y&q$QP zJLbSHsu~$5orE(xyK%QE0QqMN=MJ1`_eu4z#45W8bFk}dx8TbhqLsuIpNqC@lJXn$ zd-{yO1T>5jg8~SAt3tz(1DuG#z+5ffnumAmLZlIZ7l=`%E)j; zj0%uLR%$6>sK|i~6CmO~Ekm4=fTWEW<`$GwE>;<-%}O93b2Hns%HT4Xbix<_IG~B_ z0DgcY*=A(tfp(u}x4b)JB4a~_+pI#Hl;DZuS9K83$a4Fea|2ufL0LCrRY^EmkA-If zV$URxPu9*A(6ozT88h=JOHKC4242x;RhD(*0oTjGl1a3cbZ7xQV@+4+nXFN3C9eCqFB*_x+L`P1w3Ri1MKY6=>F6@0LE5FOE4vaSO&J+FDz3RL1ph8(Za zv>hw?E6-v(>9(W{-IS1M^94Ksic}iNHCwuN806n1Tgbm4$$Q8qTSc$PfcHxz0Q&-P zHobLa(ZTqENji?_|up1G!0w01PROZ0roL-(T%yQ7jwcRE~_p7MhLG2hoY zIR#nfhzDyB+L{OF^Js6!<2pP#@Rn`-6Dl>~6n00zfscGlI|uT;beEmt6SPUQb!}(P zj*uJjiQjErl=$IB#41)=zs^-6ZeIDO*bRBnON~&3U z8_+yGT<_{-RUrt8FWH~FDnLri7!fw*=p7^1jFi78m?TUD0no#o4uNY|lwHu$qO{h~ zT&<1BVN|gJC^D3?AKEDZq?B(3xd1DJN&tXmMyX6IB?R7mt`$ZwkTOya{u< zw+tCNb0Sx#0MB3spfbPO0N>fIB|5a#H6tUN0Dv$eE#^gQ3TnJ7h*p{K>fMeALOJ;xpAgCS6(73gF zvTndfElfU3{=AC}3+_1R*4m+InYI?Q0^O=Doi_t`;Xj(UeJ~mepvWVg0ty9Sey{Y; z=>unMhH4bq0peQ>Ouo@7+bb9Y1nk15qC*B%Nc_)yRip8^9n&OG*d)3!*NvEuaiIgf z_F1$DbhU#N8|E`SWIrV<$b)eMV)&q9;QbN`S^!I;kXts!cUv(7fwcw|M0pOoN*>w) zuWi{s323f>%c&Fo^t#@UekTFU?tOJ0-~)TprYxT6Gr`pkGe1u5jHgz#dCj{Z@hP9Pt!x&XCV9ahO40(u{7t)>wbl5se*g3WeR$+T= z`a_=C9q+OX!Nxe>I&^a;I^qpKCXm4g^4#V;e)n04FFe6}cC27_tML9zB;_@KX|buqTYb{_-CxH*`i)mA~evN~(TyC$^_Lv)#7YKsztylYR7akV;Y< zuV^K7@4c64EE2x+u;kFUne*fZ{BQYxJaQYb3mxSk^tO$eyZMgIVejM$Tn z0A>)wKr$>8lQHzns!9cn=1x&L$xbH+gcu*n=XXwnQF2}Fg|z{;RjT>7b*`TjOjfVc zbY0(O41uauF4od7ASe*=e;NGf0GO%>0LR7zc*=mACxZf@amt==-W_!Lt)M1jqU{M# zs7JapN6eK0=8S=Qs?Dlj001BWNklszaHNMlaveV{84gm=|Rt6hL2MUbQ_+6vAq&y~!_I@^ zoavTl;jb(vdlA@CO+X)ZoedDkp&QUmCj=~l+IHFFobi}W*{DDuNpCsiZzS+`z~!kk ze&_lCp!t=3KGEBq9jF^1yFn&98J;~9)ccI=vN>qTev>IW%Kk~P7zbb|Dd`%rn%|Rn zAd+I5OtwlRQl3u4$2d~8c*08uhcR{)t%310O){gd!B*E$nA z>cBiylGrn?h*fX?iD0Ir8{4I7&Zfamwa`{ILWh&}Jm^oi?W#yWN(@zfqc;UL^s@wE zT^rvF;;s?^`8|M{p28C5$L^KHstRR{KGzbh9Tz*DMr+tf(8TufGi^Y@F0$RmF7c(R{%Qv?G(!hTUN)3owvO;q zo}r}{V;G!`69c|MJ6uFjS*9KTn8Za*?DI){$m?tOW{b;4WWlt0?E)IlXA~Jj zastE$awdHmhj~Wd&a4p#`SJ`h1enqtfs)@P4mbtB%N(hWW>^boWVisRcG;SlF7GBo z^u`?Uy={cs5|&KV{(;Z*wxHZQ0(*2OO?)TE zoD^`?8XngO7=S!_1+-}aqeI4ARc#hn|R9qus7@xaESIQ#*!BEoEG!$m;pA% z*jtFsM;Hg&s%xQhVEBxxyf6WK>ly5z;Fb=0hik~0WCtD5f+P^1XR>)d-SBR9)#exU zrvzG6A{8V!h%fLyviX~BW>Y>&a7ye5MEngm;I+zvv7H7S=07X(vpopE@uQ#yjeOPO z^{PVvia>S0<{f$7b;6h%-mx(fLU^zG#0JsNb({7Ac}YfggMRaq>>;_3Fk=_ZtKB>c zAlL{ptnH4Sjd9FrPb5x{}<4>u`L^cMr{Q^!zZP;cfgW+ z7%v$D4)8RfY2uixN&DdBU1w%s@@~1#*co)zSv~V& zL>WZ1W%M~N-#()_zRNZ+4&;X7a)jo}DEW2`BgJ9(Y2M~lmSU}D)e~#H4!i8k8f`(8 zjPvUtIAvELlA~JXwycox_71eregHVj=5`-hqNI!(8B%H`i-5Mj86$xwkO)wDrhv-- zWh?;iPY=~ATLq-!6aLih)(%gyoq_`fuT!~!yMo*kgI3Z3xG=Y;0gZ72!YV7WIP<{F zGOy$sy~t^M@2@+dq&un?Du)6Cj)~j?ATsm-6FH<0oGr(MPcpUs-@%&#ycy86yB?q= z+dt2c$#8c_Sgl*KeL#ELd6N|mm&}n#`sEs*8wb5t)i758(C3|;PUdCgI7@*8AR+){ zll)J18EEHtIa;k%0s=G;bkG}XW;{dtPAv;6g=Dnq2YRzHk~!o7KR6u@+U5fEk}i^C zfkXCzPRn$9k3>w)(7f7psvVL9b!-#6LC4Sqm;@FD3Z3}w|AG`_X3veq`;3F$1Fy2g zf`1uU6+GiGRse(D5aaJ(K1nzro3g&Z8eX?mICR1elw>Sl zLawS@)NWWpkDLfcoVizNwzIBpN`k)&mJU0t2`&G^cOZ^!`N2A zDjVJH9XFD2(su+t-|~-%xQcQyLTN}Pw4c{wSNg{IBEvP06F8k z-LmdpNZ@3_IB!CEvJ1itXgE!RcT%AVm=IoB3_z990!r4?pXa!Zu^qVpaPRL_+8q0d zW5{50xQqoF0wVpXq9tN)}fbrp#;G`YnrUCXm1}>T> zYvdeHIO6(>(VT-6a0yV+9VqfnU=66oQ^prBeKA6Gi1T?8R9xwi@o{RME749?tv6us z1qhtCv2lEKrh_q0TFy#3&{1+w2L5#GMf%g$qQJP|uwV|Y=fYY>k!|=AFQ-Br#usdJJ9PpbypdXth2(oS|9ThZ@ zHN0qRYL3SL@xeRjUwQ_pk#Rg|cOmjiF9Za5W-fR}*9F3WyPe3`EMVR@d+f`&>9Nc+ z{o%anhU7(OPmv4Gnk@lz$ve45b9^>-`fncSL8n`vz6x0(N?eKJTncJb6#o z;2rzW7V~@r8a!b|x{v2UK)F6TzN)8fw|FnK0`94q&d65 zcCq6=V=G?;5>I(LuQz5kx#W54kntLRrEij@{6tA2*Ivn=`vrvjiGcUXd4{j%VT|l7 zj3Y_bme}N`>b`_8+U5KIh&41Og~$fqLh06GoPwK`FlB!kJkK}{Xb4nekR>|p%k#c= zEW%KXZ5NcCQVzbP})e(HP9H!v;BCbjBl9GsZU~IM-jj3djTV#*bI+koUwW86U6; zu&azR#z0m(3E{Vl8@jC3kwBEH2L75K-g0mNcz|8dK+eq7bwE=)rvbGQPq9~Pzk8mOV4jD5xYiq=sp_MDuyne&ldY>d>l4?Qh^~Owe$hE zY+LIC(T;wxY32%ql65iwxO)$ssEQr9)-H$^l0gD6vMI4ej@(DjTPxB|K9XhtF7WtC zR@QvT4Nxx_#s|+`1Dn$X*&SdaBLSn zURg)OPq8QK4rA#$y~S_IH#DoOfZc6ZLb5=w1^VO)&r~c0HDrhX1ndQ?bW9K|0ATm| zeKJpW+kl5XagT-dWSjr0jhUQ~HI+`CDdhhum*gSEZYPCR;kL$ zE|q+!-FW9u*(+6SdJI=x?V6SUm#Fd{w$AguwXJhWWSD|2m89e&@QzQ%-#L$HbOq+q z1G32W8>^}xTdu2v{jF<6;>0`2ig!pZwSf%#&{{q?qmGDKWxUhLlh3PAvE_m;I>*-A z=)v!75L(yn*!#P0*FnS%bd(6+UjXvy*h~8E&ZnFSCp4yyKNSF;(Dgq!^8X5z|GQ0~ z=oK+Bo6vJc3D@aEyAClr6w5PCdhFcOY3UgLB&aH&P;w@yl`Q90zEe3WLu-fO=vvFd zDF6>;3}lQ?4y&x)O~#Ty5l)OIsOGOtO1XS4a7!ovLKzW22yrv1W*`F?FoZq>2+c!Q2xthnq8rD} z`Eeu#J^*an)z)s-V*j~mz?E~zu$qH$1DwWI25rivYrTu?Fu=xUp6kwIt5GCl0Hd}o z4x@H4;{#4)zwwok6^L4V2Mh|Tyn};aGz3E&zsiE3k|P3WKG_O80AOyMZ<%j@dnc#M zIBQ4pH>1-cYqUwVU#)?2O^fl#(k^rNO7GoIM)1a1*o@8%0Tj?}-AQQWcZmqJ_L*nM zkm-%4dz~v{9-KHmktD%ynODHs9M~IF%XS9JDqP`h|vq9%G_= zY^lHwt?6WoVgX>Fg~n!{d10JhIjlSJ5AQvxY@qdvunYmyJVDZw)R!viI!K1NcYege3Jm; zJJ?^c>%D<@cOmifjObdw48K~l;*095vouJjps@HbiJ z}&5J!1(5fb|7xcYA(zr{eP5KwK>=IHlP(~MD>?y2Z7gxH7 zzw};Gkp6iOf7)gw)ja`=dPts-tozg3QZ^i#REFfn1cXd2r}U^f51ef zFT>;cvYoQXfa2t>&)T0KAOu3phf$+cWB?C6mxCkRWfS}l?a{7-@O)<8EsEZi#S4;N zIS!dXff|D&lhcW{_22G5K&==IuXZvS!>hGm6q4iMI2lU@us{T865uetoHS?InJsu! zMjGhi3{_@Y6aj=mw;PsWxfUJ958S#a~v3FJ2)lpCwKn#js8_aT4d^)cB7-a zvVe3-YXcs#d3Xi9c)oWEa`1}YdTtd2cEA|XgKeVg-rvq;=E$M5L4pLbN;U;8=7Y9^ zHgsa=jK%Y{8nYK*=p|`JKZ6J?3X~# z-)M*)TIp(6dQzdBfCZ>0n`GLv@zcAEk8HF5t$7TH_-RbuNgm9pof_ZSvFyaAtk=+E z^46vmD*pVK46@{0?Y&ih1gC;v{BJiwc2rU!{_A$y z2oXaNVqHZ!0p`1SKD z*nIN7Pp|z;K;vPxaZn@`qAxOxRoi*!nr?Bmaf(8tfU{w|w zUC3&nz08iE#tMLOxEU~6yIo(Y`vE$EEx^j~6}%d=cj>n!Cjc{}gN7;sou$K}l9vvB zWH=cuEl>>qZq3L$ii=NpBV*=$qpVkj?g!SbHeCc@s&1s8g zfXsb@WIO>1foM*2J+SVI*Ypv1VJztjC)dtLbH^t7Rr{puvCrBj*8AzMmLu8!C%gaP zp8yP)!&9;i$jQ)oz5oDF7C^EwXi7FX4Laz4`l>>LwmxUCWK?}7SWsaQ2ymwKw00`? z#F~;$YS$X&UFL=^=41@Ou52@#OyB2Wg|cx4PxRUStw#XrRAlI>FM3Ue+f|H>Q+<&* zF&A{D-{}_6DR2TR_3x88K)sDT=Hh>WiO<*|HkHl*E_Bg&$(;Wg3tM8&KrlHiATyRW zwgCLeZl`H;`mSf|jLA>(*1>WuE=MnbgkF2MF`@?=R@ua3^F?ph;vK-~44&PgoG1EP zU=GxmR54HEGJj(*ZsRj9{1uF$gD)Vot+Ly60l%NFi8&eTdJVY{;7Z!ClVpm#(@F2| z0Mfvn`SB^{MCR}bZv+SI44EVIRSb8#RCy=)^iF6)5xn0xHQkl^OJ9Q(E9(o0;SV8&#k)Tg>i!(mV8hdJp?xfxK+H_Pc3}0o_NpNAbgVyh-QNNl z01|Kn-Z&>9Ue^6|Yu`E0vOb&|BA*6FMYJ*$oDD;-TJ&n6lY3rA_EWu)qx zc3oLNnPKk`vA3;ptx_GD$)R(0vU;rp*g1uITaA_DZd=HdV%KidVtzo{ea0o=Fn`L= z`Eu^Y2#9ztqs{R$5bmd(#>WA60jGzT)QX^@RZb4~!z71;V)&?4~Tgn=OdmE$1WoF;t%u*i|W8LeqBKT+h`>deRQ z#!s&5{Uxj9&zEc`y7}8rfD!%h(tI?W+F&?dN`J6qp|ydeRLLNwRL7rc+Pw3 zKp8O$vgsV-2@uc2ZC%eP3jBdv)d7xx6Xf{u&VA;{ZaqEenRD@M&){sl&%7iH$d*7t zrNHwG;sh;dYrcU634~fz>8RgbW6Y8fbgUL$a@GmabeWt1W%%Yk`hzxj2K)jjKsOx| zBxy@CMvjXvk}++oWLt$m){#EbTfFvsV8m9{_EG4@n|)28v&`OU3^ZDO0WsXn-X0YTg2ac3%S+ z@Y6Rr!$&+vC$`Kz_(OKu{$5ah((e3;T``x|9HAfoAhBaSz^?>ffr9b5o(&-*Ew;u( ze#m@13*E^LzbT+t4}&(|TF+EOC5gy>hiZBrzLHV00{GT0P4*-`(1}d&2V@IKXOoRd zpk&hULZefQpyqvpdlOU{~q)sPu%B##040uXcMC+LFv*Z~15 zEQY4+Z56RyOaAJEXWKLXr&`0_(ItUNb_Sp4+E`-{h>|1gBiR>Nt^{dk4dM5<1Ielj zuwL6A!$)+EY)HDSn6iLIz~jD80~*hxQ<9=|!rbY)=aW;d(|j2^p0=08n;o`dH~0$p zmn~iosolwBLGi3akt24By=}b}oZLaspRCh+S1(TP`&&R$FkxY!br~F^>`Z5LJO!w2 zjmw!U|D76VMCO1ApPqRNqvfh?MAvlz#?1gCzMmbS$KcK0J_^nN0$}YDMF}}|LO>YG zJ|SRj9Ns5GDMGC`agAsLoQ!=Liy3WSwcBt60HszL85Is(bj_G~zsv&QEaS*f6H-8e zQUD!*s4@AQ!nO;L0LMG^bZf7Z{S?#y-+~_Z3SO1zfl=cps2mYR26iY~!3;3NFcj?g zy()~K<~jxan+9rTddLKR-+Mw|08-4st*SWTwc*)0Q(dlg%;?`oU?Dow>#duLR>{#OE1LG*zA9yveYD zWPQK6)}lox=mJ^vEV?3tDG+CH@#ATWEP4XcY@R9|S_%%+N6r|X*`Ijno&dD*(sy&D zzX6XHIIGOi6?76bo3lmAGG2nS+BZ2qy5f0wF?KeP%$rl7scM4x zv^yhu&=cdL=XeRcSEbp3y&V7uptDVYGTE2>V28>;vxn|AU%EmM$&B~=AG!wYjHMkb zKiz%p4hWR^V8c59!%sX1{>kNRQ8Gqj=1bApw`Z8U;Dw$Cu<;iU@S1+2$4ri*0lm;> zivQ-?#w`9OKav81SKyeBvrA`dBL#+n*$!HkO!2?KGh5Cc8?Wo!|0f4C$x(+DlhbwA z#vMQN2Q%PK*2q1bBy%0?ORo7sv_4tqSr)%_HFzRx&%cskzK>58+^oqmw$A${U3H7N zj^6Fo$}11g0{{RZ07*naRNTqxlh140Yu8?5V?!l@1-X(!?P@qXX3(D8lf_R1nt}pJ zQFuaOjqZXPz<~nT^nf#=s6a<2W&`eM!%(zcHiZ@3 z0T&bqP2I;Jc_-)N9h9Skr+{H#i87k=3~UH@*|&h(UC?$aE3d9YQw|MJ%vk`2hxiM8 zcD4;CET9BT1TJVJAm)?-ck`502dK<5IdYHEw7?P|Bnup`45{nrL1&QQr_7G_3PuQ> z-0K@{Inh92#^f8p%$k|&@3LojiX-9X-5{^% z)d|#dVQm2;eKU>#8eS9(p?_O(uMXB2IU>~vKof{&$jb`jrFTnGRNZ-!K`KKH>{v7! z00tz;5#8s!*^@F<=qF$iV5txRfVHENFU}OeQ&~V)It0`iE8X>MdP811>xRyfYimJD z!psaT8u?r+4PEdo$r}I<$ZwMhI%2G7Rokucw0?vg_AGkSmg{KlOJIm6YCz*RG|ad_qbp65C?gWTe!Z)hlPayb z3$*c|>}&S~Qp%>UpvB*8z2t!LRULU}UC)W_^*-Yz$7~n+vk9F_&(^hTXMr=j%BN&2 z(4X##6cWjaRp1nlI3?QorU)b2~i_%g{xw##$T zF1aAr#zBVoJD=BP_D)v(gr1*XLyq_#$umK~JNz%enTapI(`!Kppo*Sktg6b1k4k3S zq=voV8`($CMqjeS#`*FN_JY38glORV)1+s*vul^)m&uMKRfqGk$Ly9MqTTN1z4 zM!;8x&C_v4C%6OtLKlf>dLVg2-`FcjA~ufSk}#8i^gp(l4Q01yuxcCysCA=AUS-GO zJ$4w@(VmYNa7sbs36If$4-wSy1$=vLi%+XapX$QX`?moN0uoq(h>C{^?skkid6|1< z1!SrTbO-aCYz1u`1vSQ^wSf@@DrD$5Bw5J0{juz-OpNRGS94xnV@`xfCf@(G1p&Pb zAaKB$5punughE-8QgCb>E@6`y3bb80n4Pn`%Bs{WtIwV@E}&s_&6NWoxc(3DP)2~p zmvMB0IGQLY10e1JGF!tTSVJShoOyVL;HM1-JRd!LnIGYm`Ien6pg|AHf^TJDXO--! z!k@^_3Klq~+BG;(vcYk3g0=MFiE9LE0v?|+D&}rZAmD&RzjZ6`SkjXI{79u#@sGMbOen8BIdrzr?~;e z=4<|B?L-iUM#fpeDN< zq?T1qiXGDW0ies;(a#QtGe>m5H;!;-4_(71dxo_)t>5CHjRXJO<8Q%-aj`Mvp(=Ol z5y?s|W@td3-9xupY+OK&*5-g`_@TYcb5+YahbK96FIuR0b-oRU8&C(D*&FZoKfFQ@ zWA)X0$cjV`2XCGdNkFkLK-YWQ61l+GGqsHY4A$J3H+s}6B|xd-9FV7@vYE!q2AH4s zsya$;JZZPChh6f>oXLVfmOgl2t+*4U%-_uoZ|RE!0Q${f;slV~$@}i}YzMsurd8nZ zqjQszgQ_#=>vulgoY0N_u#aR3y~(ukk_R@K&hXpqXqL@(Jz1h(bOp`YgaAEjBg6}Q z2J}4}|ABKn<}dke*Ynd=g6J;$BtwqQDoAY(a0SkO+Lzt&$C%kWIE5|XquxE>Jl{|N z;n@zwx3Cnyy;!?`U%Mxp%S`tG#EZIv^Bo zF=yA|y|s;XvaA)@Y=XcUemq%=xh5pMMVdR_0IJ(z1-nu??=|J4D4PI60 zENK)S!d__XedNAP7T_9lHU@T$o}6y(WiIdln=AQ5?{y2+dGJ&A=Ux7<_iY23fC^`g z0eXrVXTrahjPC}5Aq?SAGSqL)445Q3CFeP$kpn)rfgpkNm>7frKD^&_8>S_Iy!GvB0ff3MWNf3c=84Xdo zHXK2g2%CWUT;$Ba2~M;)iN;-5g{FXpgOMTR0MLq(Gc+<;szCmx1dNU6`O>l@fb%)< zK;h8V{Tx!QLK(mO}`}s1j=Nqma$}{U4_~;Mkdy{yi-yEaJB9Ou-{c@fLA;rHltR*{vV1=Ps|$4pO7vq|Q_Cdh=cOLPE01J*kn(;Ng7(_J zJM5tMvJY(zQOlMJtn4p6piAaq{Op0?K`>?xwT}e4%NVjve#bL_h;Dc`TP3^Ku5W=I zSzwhX$r8M$!!{A{F3#5YeVG%x%l@E|;Dj6j80@)3gZquSfUf|H?6FgPl>hNrc-00b z=xaX4iJs;laB#gbYstei0NNMZX`%Y*E}QvHv?eF?$$VQpeOF=0*Rf;Tq9v>W7pE>x!x^l zgbuKV_oJC40ePU~%vk zIDP^*tO+{r=Id2s&=G$HFsd{Xw#n^P09WF)fc%b@@F<;#26O|SBb(&j7yH*%(Voxe zN$jA%_wX-tr-XFvsaN>Y=iVo=AQ5A}WZIW!@hLWklBAa`k_>9g@2h9l1=G2V^uT@a zb?xH?`~P&%f5nr_fW{3R7C;sK0Qtd#)+3)(B!W@w5o#su6lLfrOU;mSxMXwgslHXeE7#CoWvkP=HAc9i{ ziStEs022@ajs?qrv@akPjq!#I1DXXe4CXwL7D(?rfNwaV6DR1J^aM@nb)H*cUOgur zB~Jo=v}mi(yMVoW>5w+hdZ_)M{C2oxTW-@aYg2$qAOyW77RcSM1%z=1X4xP7nYP1x z=?@SpkZlWM_q1i~-T0#kheTG`2KRG@WXW^UlKi2i_mKgC3Yn8_GiQL4^UyEeeL%e` z0UHXuxAi5x^A5VG#gSt|JHT0x%{k#gfgxE}F(SLQ2GM6hI(h)T=x8jQr2xTr%>lhz z>wu?PZ;YRV#p~A6x!3>M6TI<0^J<-3mA#S*p2;S#6C5htLUTG}KK@1%EgwKA{sI*A z!I(V{z-5!!S=rFm&(uN**h@muZ?dF{NC(+CcHezy;u=*t`YZ|K9mXL;DOe$c0rG&A zdEhI02%3?j)*AW@&*(`>u6D>YUb@W2k~`yVog80W;!J=>P6c^nZ?^Q| zM;p{=9iI@^!tgWdQI2{h&LoB{{X1^OP>}=tK_hd{D73aBMVIyC zY->%hlao?@=d_>$uuX{>qP7d>NChzrLVMo`hy}Wgmuf?63a$>( zdxst;_mr)g!OflHttCu$RM61cqUnt8f+2GUQaBHCMTY=H^5<_f13>5-=jmHD18^aa zyOOp!2nGb0z>JE?+*jY{ffGLg=Cb=hNeepf=5KEJ40yR#GDM{VFrrI9lh4@)wBZ!! zN0nQkldU0>915AFSHQgX2DU{&LoNaiXzu^OmSEU3y^qcT=wzE-l5N+vcE>z9Fz;rM z$RPU42zE9V@CJDLAAoP{t|i}ipd!r<7#q3=7LA2tl-VQ`;0X(ENGA%GG;c-xX1|*-I-aOd5+zC(7Cob6=?Iq6LbW41v_+?6DJR< zQ^qZjB)^_voPxvI?h;U`B~37dKIE;lQOuY9v;8pMo$j7j7QEyefW==LbHN-rWsk|6 zF|=zQKjSleDm!|zX#2!A z2u%e0bV;DOu2b^7Dy1E)3uu$+5|*w>=E;<2v%~a~{c)ZD6});69U#-}JXs^J%jfj9jY+=g8;wcu@`Jb7@3R+TG%mm zqRm~%$lR2QO>7K)lAYSa>pTb-ua&1A-NQr1&c4CHs(NUF4)lRfl5|qFr<3H|{K+;v zSui6Aq#y7tTjkk0l=!KR$)NvqT+*AJ^|!gUGp8|5+I~Zx?&NKL{xYCRNH`IITjm54 z5vJ_)T^Zdo2nEBU#L197J@~ExCv0WcVmRh;s2q_fl+!e3&V#TJR+$?{7h?n|Kx_wC zTHnK|bAlMVT17995)J8*zDvYqCLCIr=WongWdlu`U`!JIr_m83ui z=mR?12q7RuDFFqv(sCn{4H#L!pbf8JJlP~Kz<4b!XzpIm1Ns0=Ktte*HUMu6?zAQf zE(JY68gS0I0rP0@`=k%|iPBzOn|?esCr03cV%3O2AH zb+&BQPSZPDa4q+TV_v|$H*5ZIzG-lChNk~v$ehem<}V7hLH+6`UTHtdo}##^g% zhj)68@!=022%Mu;i@5Xq^aP+oKVx8z*dfUV^d`?_#eF_A2J+=v@1*yzgZKF^ORx1D z)^Ja2TxVO=JN6upj1^8I6V}wu!%5$H=I)-_gxOv?H)(ul@4q#MPo9HLD?VuL6kB2a zmq4%`+}ads7hI8BxX^kvy!PzY38O{Tbn;#>+`2t-!k)9MY>)r5E3RXM*bR1WCcWnv z(&_Nb8f={gG&2Yz(`+SsNQc-xx+~el#_;dPBe-YZ;9&EnM{I*Rv-NcclyG6YvYp0l zp6=n3=)qiT=ox5e?$PSE0^8ewhTxQ;spp+BA~1TQPkMXn(dQHxCj@Vi*gFa|8CcL& zD-aNm`0ajzAsJbvbihXyM+OAbFxgx0Lt1NpmQ70lLB*i=-Aqs*7j7Pk--E4=b>n5g9dH=%yDX~F=xuz zL5(e}euqDw7^~BJ%S@P$G3HD-9K7OWD6wFV+?t<&k%O2@hL&w3OO{n4(SeLpbQyEb zh2zF6?R!24q&Q2T;S=K~Fy_R)7hnW@*Auo+=lA5gXhBY7CD8+D%9!4Pc@B-?%6q1u32iwW_JHhNt=DO*W*`%7TbG13 z?II_Dm-$x#L2p5lpxr$J*xH!xXb`>8f}v-lJWGWM*zqOM0qV^gU4VEX*BXgh^jf>b z0Scx8Y@Y!tXd!s0g(TbRofE|So2-%Lw#PLl?-Vo`2i;e(B5&;kNe3jJ(AqP|HT^bz zGU{{BB>!kezR`)k2r>ZD)|VI$-9Sr$4Lt)U=|hgq9O)CF2ei7D?02A_Ae7_tnaX!` zW5@81?s#t5%>ZQ;S=YNxV#&L$rl^qsdkUxlil{SmQS*`Yh`SH0cGz(Gx~#1 zz6CWU!`OWKG25i3Ou4_7v4SD;O8?Om|7G4~jRjh55@7y#?Own8LZ1n$?qs*lh2}DC zN4}EI(NT#-i90e$E(#W^GR&mN1YG^!Sm`C%lHhF1ZuBOL=uKzn4*v99yk%qXQUby^ z*>)}9-sTkELtgkpiKfmDt990WDxv_HFLoX<@9<4@BXfdCV>3>{x3Q?Y(jgeZb$$8Y z+8X6=@(&<-R$J-PBgwH^9_f?&RVL~6C#_p0mcjw->Nk?6t1l^_;mgS9Pj3tU#Fwps z$_!q7hTiNEe$tncTI2z~BRf80lWhPoTV{9TV7siH(?*Vuw5wv* z+90|_kI16&RrL*LtQcyi&nLmlZv5G;l6>S<#Z*AknQ~;6u5}ziYs_8CR=0MHJ&=H4 zEMp_go{yt`yXpSL>Qv2g=s9qvW^F<9j7fM*&R{WX^>X@xOJGfP`;lSS3z@ z1Lg}9zNx65&*3ou`a&7jjJz=sKn`3sh=USQ`W^EaS#35H0pSU6hs+UjOqG3R02x_h zC%oPzDn0G1zG8y$0T!}SDqkXd&QI_JFfou6&AV)$U1^U3GtjbQzz!j_D^rV@%Vcpd zKzb3k_W}68Bp~E-&Kej6`T;=nt2FwKG5W207#T7N3Aj4OUABjX65gl5jJnZw?NT58Ae8v9I1GbV?E$xOp7LZ(kb+C z2qQV;1a^z}%T`JzxQ>I$AhviJon@Q=tSV%(_J9arPv++~%Xh%AXSD;*@=pYYs?1w0WXAp0>c% zGXx~omyiLpl*sUZHbCpF00)0Ohs?H`mw`pWi2?sOS>h}*A^SQmVB8j zJrihkb{5%Z&)PU<>K{4>0t%d_!=~%z=bh$ZeTX@;DYZv>*G$NHo^g=})d&3dcZVRe z>k?A}T((;9SOtI`@~o)?@ARgxfGD4h1}cl_EJ)}7$Of4d@C$CxXFX?b+NoM{3a-&a z<<7InDm~N6>pA2V{XHKYtD0wj$Rl6du7LvX4y<-hRXe^9Amh8)mpTswG%DrhL_fTf zuT&NE?%F<^r-k)w0)6AhjY&Xb%;>Odsh+wRK=s^$R{CEhNKkI!x$DTEP73;Pb*#fH z`%gy%o&1adY1PMX`HmUT0G#OiNg(#Eaixd+2wOsS=^YshTe5-lNFs)f1*qAjw&%tJ zJoY!as7k-voTttnT^leMUlIl+17wRl(p9oT7LCjJJf981Yu9)_8(T+9?PNb)nLr!k zY2A$$GV(+I+ohBJr7IHIbiik9CYkja-J%b4h(9wg->YAa2jQ-EV>4I!;kkScyNpf+ zfcb)2-cE`j@55Xc=yVF_C$a@vfJ&$PP#lY?wGg&z%$A9enqtpg`|z_fGAO#8bX>#H2v8XAKmozBc#@%-+f3sJkO1U#)`TpLtR~=wZnbdzsnwb2 zTsFK6EMsDRXr|pq3!dzn?6549`EVF`4J0#Q1mU60sj^3AqSIXwx! zul!Bx3k6Rdut!emo_FwjbVk*oo$2_Gc0`(A zQ58rc%Dc%Y9c1IlNt=w2DLPSQY*(GiKGb?tl@G8sKlX<&X9EQCPkrgSFN_y?IC$e#R z&2d0eP-Bp-sp$k;%7-B9dYS+ygC>wr;gB)mU}b2ZDEzAlRiR<%Fc9$UkUNa-bCno` z1!Osvb`%4~FoH8OPL7o0!d%zOSTmRmEU?Gn0jl0J7n`0EeZ4LKQ+p3#;6%NbQWH9c zZnn6&hT(3Zu}ruzwt$lJ;00Q0;7A@xF%;q-IcRa|+ z0m^KFfJ7puAU9q0PGF3KqpQHPwijRi7BCB{RLOv8a)}?n9NLgKAeV73Tkqc5d%Z_c zVjkqSL4*&ol z07*naR24LO51x633|@gtXHo%)_<_d2rE5LQIutgD>;aVKAYjICG8vHMO9TUWEfBxyD{mXiNOo1}_PN$T!91M=#=RRo*j+#Z*bvMLlswyfPB+@Ql3}uo-|Qa0 z;r(c4zGSd%tR(>i8*E+MYu7TFK1K^a$)xMaq-U`ud=mZ$B+ZvC6nMe`Y#_R|=1ZU? zaB95@dXf+85XlNqEs-?4HoOyUU<`BWtSQ&?b*k!kYaHZ?eFuiEw`By)yN;l6VJ*7+3A@IYkYUemXS~|B$YRwP_8D(pbuoNOmnZkgcgqint~bv)XqJe zp$L~@!{ADLCd+UEft(D326)+di?io|>zn-)I3u7+i;4-MZb4xI)(mKjn}dnN#xeO@lngQ$WM{ zP)2gepsA>7i(rfdoqz(KmNhUQ*UQ4nPB5N;iRX1N}hym{Y&*7q(ci^eHl4U^J zbJ!sFnVTdEn#er6k6xn*x*7wU2T%jhoN^$vimthIk~jTFA2!UFY%Kr`Y`1Aa>j&J& zCe~(U{G4U&8QzV@WN&Uk${q`j>4>&V^za=1f}O!9^I(_!L@Tl+_%Uz6wJ`z70G@ZE z1%0B+Xhpt_$9&r+SPPYRnVvV;! zLKAcr)ObGnwU$!S!94kbD%{Z`8%P%Ub+YDn*O7NL1%`mh+FJaN?6vuTpJWJd3LAL0 zAQr%3E66dMJ#AETl36B4syOCBo&nyr=@!rc!Dub0lmH39;+gIQKGa<34FAx9uKd4X z*}FWq-JjY`l#JqA8yvX4Yy2P03-H;ys?UNoHj5meY@3hIRYV13)&#YI#|*wt<>3{^ z5H!FHt&OugAf3uieEN=e?`Qwm1_=uK#FoG_0#k<7U%S^&VVqa=T&pvk2i3OC zaH09J?`Ud_ojXL9*~ze8zVVZJ>;j!IT7gaS=l=q)ZwEDx1DZf8Ml$gFayc3xqAd+6 zVF3-$-3~o6E~*Y3yG-}DcJS~n&j#RgbTe40S5l_ln3c!prT{85#>~l>7bgIHK!U$T zfEgJr8ypcK=Meoa)5h`26n!(G$-$yAMdIYGJ>aN-0`KoUnNAhwsI+Hr2DR|GA05!1 z)AzY7429-wflq;1JDM@P?gunvPR;!r0ZpaJ6CnAHAuI13$t2ip}0YY~2YfPMljELt8<^h$tb>lm8P45_fG7Jci zRaq7?SoSIzqGQ@C7@xt|Gqsbus1gzLd_R+kwec>0G0|+@0z#Ayyz-5u$$A$reS|>SE zbTBXXqJQAZcmX%S1Tgjtp9`Eg68sSm2`Ib|@blaj)S@w3={M)_fO7O?cZ^%W&o-Ek zU`WtpE>-9RWM#g+ukDYmM?-(S!voiQkA#x(84r1OuVkJ9}3m|fg zd-7XTnRZS4#vUPYh)Q#+!-gANo3PX}K1J-g+8G5}Pgg=Yf9=79FVv_O}AMOX7K z@gYl3hiY3fUbfg5U)ceVbQ$ji&;_Wwe*azIZJbp@$Q7GooPr9~dG;Q?W_u_3^o_1$ zj(v2Gv9p7Mkrp-6F*FG9)8Tf@G#)%9iz+@a!}K=E|?26HwgQBm&yiC%+0b%u_H!*Ua-L*0kUwIaCFbENIK- zZ*-Zw>(jaqugNf-re~eIMRv&@8Hq1+N1_7{_J#{?Bjkz;lUX z*TGm4O=Q@+*!tEiwi7LYz<(^?YV{YCxOgK z&{beV|L|8(?SJ2{)VvkY1g;1S#SqkBstlj1MoveDLxwBh%n+#9Xb0k0MA7w-KP70< zX~bldWO5kkwff^)hRf&rZGmp<5ol2DL<98P-RJ*wpK}D<$dX`!Tmol|t7?~C^44~p1Qf;v zXwzf*1)z}0DkKH0KyEw8#e4dShv=kg%nku9ev)xCtF7}DxB!IAfh-DI0dEe``^Y*458<-hvbh8%ScL?7!%#3*X^<+f#P?qSjHnMfiaxJE(SKjA2#?|6_;0vf}(QRuf05pIM{Qwv8 z2jmzV(1UI=`1DY4?Wb|D19(s?34RqwuqOoz0H!etu39H!Z2W}4OFJTfB?v?-^fVVT z3s{xuCC~Ij@(iFLEBGNGqtE0Ezl|NBTJ7)5ei~1U;`u!l0pmks!J}u*^%v$9=*k9X zAMlc0YlkPkj@|=uooVJ;yhXd#OL>0l4U83^@YMZ&=Qr`ieEbi+N<85q86+F3Cw#L& z-3Bi0WSDHDk;D*x?K6Rl@%o83aD-NJG?z${c<8JRHkiGZoM@}+a6_`!dczi<*J9VY z7&gUs2a5WfY`fpQ@k|1YT)Rg_fW2TBPY=ZOEC~j|m!u`R>P(WSxabZXo2LpF+X_2) z-;*_lch7(K^Y9M4+PYxL4KhSF@t%(%N9~%$U)iFWeDE3UD!G7VI*^qO6Y!G3Dgtfb z;+?R8Dz9tUKr|`YK?bMK`Zh1#rPahfB{0bln`omh<0MNROCsq=KKZB)N}n5#d^(Pk z`_nr=>5{|{yKLNeHj5!A&%3(j?SRGu8UA*{(ZWTblnh&}%2-fT08oX15hsL7dxVOi zd9^vf4lGydZ6T>~Fp!=HXy^-;Enc_FWf-l2VeEm>c3R`O2{VUN6@!E2AUu;{2l@di z&o9W~w7qNBcJwYo=6q8&0Z9Gif-IRF4ueCHT_d1nwWh-5uv}xloo)}vP+Ie5AUJjv zsZQVq{s{XC_}o=sa=bghJgWnhb6wvKS@lIS-1vKLXWCqH2EZBbY=9$4l?MR~B~NJr1o~J765DCO`(8gYP-SS7Y#g zl{%TO)(BX-03ebDMc^Er8W9(#@Eoo7_K*X~P9`T!u z0%`?Rfc`wpuf$TDB#;Zhu(cxWv-i`5GSFScd3s-X zk?mzi1Qr6;cF*JQe3Mbh1-^}5qPN6_gpA*7i}ZeW3T^2bIhNe>49}!Dbdpci@jwUZ zGKPF_!LTjdc7QKQV1Ocnv<4E!y72l#M)?W{8a`3jbc;AP}Eczwc{$nX+C zfCnQP6xrBXVQQDDqzSAVr)rFGVy?9@3<*Ub> z=NDs8wyiO`hLh;*g#wtIhU*D=3;PQ0>Y;T%W8@iasq5Jpo&s%-5@-OD0B^ENkr+fY zMLV=$*g5qwn{6twZgP<`X!kfYT?5+6z-7gaz0&R#4GZpe>uj1k8EwHUxh$B8w$tdA zMRbqstpvh}a|K9M3{(Z^M_J9as$ooMOE&2WV=2pp&h$^PjlKd#bXQ3NG)}fM^=$Xb zA_8eDJOY*4)&MNvrk%&;*wC>7^#*i}eEXb^YoQY$Ym;b)!7^ER;o0bi=WDghb!@6i ziMesgDFKx8l7J>a6>k70!Jc^r&IG^KGoniaw5*_p|?@;l{X13;t{0a2<4P*9y zv<1fLf%ZlA$Gp80?^RFHPV%L7PUO456bO?;+T9i>2b+sd5`k^9#1!KCC@l@K0n^Oc|2o%9J!?ygNn*%=8yzL@>d2F=c} z8?ZKf9#;8@K>5`;3uNdh`-ms(4gF~Cr19h9r|0&46ws6*B!o)mGPH=rs0s+mS};EC zb#8JLpW2 zlxVd=QCLos0{|8&L8aK!2!0JZlZS-6&sWe%Ug}$BuzUdtExN1qF0kH?RkDN~l;s+L zj6pJg^qRfr{&pXy2dK4{f|G8c9Du{&nZHURBfp;8GaX=6FF+Y!(Y`o$PcdKP@?C3; zx$QEfs{xpS;iO;t$7}Z?n+{Vfh-*hX2BH>~0!s6g{r7+KYD?BvAjfm)Cd01{Q-+xX z!I$W6F2-707I0&Y5IG~GKpqF_I+=Ys=68m6>f;q%q9G6m;Ia*6K|P1=l>sNOv+C!W zjHb_3ae!)LA_suI>jB%lj3hqOIoWc$BG?c_;;HA5-_C0CHzN)3vp?uhNBu3>V*>@v z<|UaRxWPB}5}@pmNrs%B7@ucos}S%AM#_+@q^{f1doG%AX2wSzlNow%9KPJ=-RR++ z97=uv#xHnNm9DY@6xUt{TnK;!W5zB}27Coz zAzA~v0>}E++i^nk1?{_^cpPyBf!Tv=nU{rHqnTDnTz*# z&WvZOrUL4xL=OQedgVaC*)j8qg3-q|wUNoocZ4qAk6Z!Pz zxhMYnWNtrkP03+?n*L~S=BwCPexY^c9o_L$f%4Vd_+H6GU+@LYgb(}-du=@IFWp9S zf&DJ|a7w$60~#+%2q`824(PW9D4?j_N#N3gLX+gs0ZmS_oqMkO+c`DhLcoM51X<5E zg>1V)W8jb|DB!^8GkkOH!dz%iF%ZwQv~2htIN>mi$@pt0GA@Ec8Dw8Dfq03)JouO%_;&QpEv{WiJI}0EEnjwKNpFgO%3o#>u#6&{?Hn21K4sz|~jt<@dECc)AA?u-irSVKB_-Vgw0U1L})_k>nIKyoo_4@j3R;L8!GWJzL zPqQB0SxXlnLw5i!I>AVyO+hChTQ!t1MQ8dZ%bgPdl9c+<8=p9fx#kGn=9xd{f&a!p zKgmc7-|J`gH^8!jtH~pF06<5F&R_vJ*&^~@mi)@zkV{4vQ19@@Kytx2;7*Th1b{Ah zlB|<|IxCnK5P3KKWmjBpe&kI{6`gJ?YjkECWtQ=btZA13bm;--8&IAtjs-pf1agmV z^vM_mFrFpA6>zX=zVHlx*h*uegB{q3SDug7_zYMGRI45Vy4vu7YFTwQ7~sHnfu!dF zsq891iO2YWcJ2$TspJ4t#@d-tey@d?43HOqL!yCB(Rsm=v9~=b{mq^UCXAhb0eEX0 zWHt1-*0+|5_(DZBe?h`U~$8An<+0jxXc` zPjoN%F8IaUwZ#(+=~z2Rk}2a8RJ8E`ohWejKQ_yK-tC#>uj2)*_v;|sH6Y_Z$el{C z&JMaIc=aw-8oZtDzn%{i!wjDUG*u1AG8;!0j9(Iresn-HIT4hQ%MQ4{1CO4~7SIt1 z0`~N@vUB2_ch^**L34P_yYJeO-+kA+_tjyc0>l1E6p$;rRpp;9SszSi<{F@@^iF)Bs0_NkeU2Z{Kzr3KRWrc? zz7YDgpgJJA}1U;^v*Wz0MlRf~t z0y+r+hKR9}WSE>xTBY$` z{1kK;6Y${~o(Tv6=56h}3u;^oaN@IfwpDkTUrrUUl40)*F87$5cazE5a#RWEpWs|# z0nb$1wD)KcBBLBE+5>ZBm%XDy?51~eRDKE^TK7?I=smz#8;`$%*4jV(P3~lu@v>?lpu>R!;-2FfXe}AwZ{tQ=vMN|$kG$J; zc!{R?H#7KbLJPs$6oK8;$|eA4ry#+j_vpjtZ#oZyOq#m@j%||=sLF{S?m<)h7qFTW zx&U#l3!yjd%vdsOwm7vO)w{@Q+a8la^hvHtc#x?TptT#OcM0U875mD6<6Aqpt=VdP zcK@st*A2nm`_49@x4+p;^dN(tO@34k(UEUw4|W>`c}^Sh&>=P*041Y*`MNPqo8m~O z3LKx<=@{ zf(c$E-m=xOxBt=G+T+(Y`kx*ze(?i*8UAS>r1!=Ij;LyG+O2i@fJ#R95=U_muU>$ri*EPyoj&1%BdjRR@BM-sU|s&EDyM97(NH zepWehKZOJ^TENV4m_z&a%kcV^O(S1`6Q|^TZA~g02@I2gxqYgPFaEyDTGb{&>0Fy( zT^Hv+F%-2jpbH)WBNkBmo7|zR?A^MpY3*O`cORo5E9pL9M9_NLPJs6GhW_d&W>B<} z;T>=YI0)v;@K^cs9yAm%2Yljb0gXk8vf6FbfH$?O;7JRQz2Ci6A@I97`dQW%FU<=G zY^No9TH9TLF_7tZ2@|~XyLZX-a$G=A>w4DKLe&u&K;r|dn`ae8S#f~1O*hPmof1?? zxB&acBN(NBKJV~RZ7Hq+VpM_5Th_qFySwt>V9KsC&|vG0jfFU zT2k@AoXww%w*Cb#Tq9ud8M}(6=)gYG9rQ6i`b7Q$!>#EMz_7`55g4z6f<% zL6|^O0K>VNqjgs1FDqJmq30Nbb~S4N*{8ND=Tm@Uv=?~Wl@M>TVdy3CB#1N)ff`;L zBVWXMcH+Hp6{L}^GVXX@MF5@o0pOr>!30MFMdL&R$vbmvhoypZ!50}sTXSmz6ZVK5 z)^$LqJ2;jO3b?Gp;D01o$hGIKyDhcJ4cQ>WbPONywVeRX)pee0PJEWYszv>1Mt{&n zB2x0f82MrLRC}vy((9_F**3H$Pk4`(XvXj2JHAR-;sHHCGl4G|Lpx*kY;s~_0^_!p z2wkqurz11wCYiv-NkRfZwZ~4dRwV*pueE$_ISb@YTWt=;ud*j-vTn-U@IiZf37ysi zvAbl$xV#hJ$miT;?#dUSomLykCjq6o%xeD=A98xFIhQCvk23ea4Rj~YxT>lcE1r{M zNk&Nkwt{b>BLaAS1YTqFIvT;V-Mb6W3QBu_{)LURF@QCnY#j$v(NzEdAOJ~3K~&lF zy=!YerS)%Jm#o9saGo7O(Sl40_R*)-!r$hzvEiAZRC0lS@!Jw!?R<#8>=!)ldbr~> zDez+fjRzAx*;*Mz87xK;F}0kmW#0lIMifwD%w%e2ufEB((3lYhFn}jtlv0M2QgW({ zYKNv(S-8r+0^GnK0D|ej1c$@;GVZd{#_exP!6~@MI|z*|@i)#DN%1&#z=E^>=Kk{u zx&Vwr1ggx3Q`2+Iam?18wf8hSt_CMZAV>i$D7`Nw33NNf7*ocEgLJPz zZaPbHG!5RYu%#dr14seVWTVN2xdE#b#CvP2@jmn9RMA*Tmcyn*oHZHZlw^E;y(-T# zpn_fs2h4DCvLLlKKFxB~;v$oeW1ZgQTFPrwiR(3(DSC_9X1=*Ldg${3x4*Dd35X`VS-os`HNUI1Gc$otmrfm_=y-ORBcL_>M5&*THDKlR} zE&$ZdX5^ILDr1jdTC*iW&{M$HHqHeW=B1-yCe3#3VuD*i6yL~)N`6Sv&=EExei^?x zp$}S>Okhvw8e53oKo7slb^sLu&lXL0&Q&^MJmkVy=}&aa4Yh@|dLnw2M92z5~W?UIK<7CkFyywuW8a!q0dVN6~1_Gi$z-}AnllK&WbZr5RGCl`O z!38q@X3S)f@X43Y*2Vh-vBUS!I_2Rw0)~t-kP4ilB`4t<(5@XJXI?f0P~{*1XP^Sl z)@%M2dH;7YdJDW|J^=u92T5(3(4(9k{f1y2PP?qL{YqqTlz z2u>E3nh$yb+>E(7%li61It3m)3%%$G8n(%YYv+a%>0FKzZGb3Mo?3^z4{&0Hz1OpW zCxD+po=J_Bj^vT783RDA0)pqYY-B&&XY3pvV+ru%sj;9R`K&F6`~Y#C{ZMdDCg{HB zYw4m7+MCF}#g}+5kP5{5NsjS??g4vjjLbgX%0%O7b7%*h){Pi1xx#Zg21t=${D1+KMOp%0KDXkgo2 zC#V#B7!#oBXL3dF(A{&;S<4`q_6&BMY~z8-C?CP*a%^~s{v4hEnI|1+A->OM+hvsj zpuUCYo(p6HE1qHQGU&!i&I0|@SC9?aTF8H50bw$57l@b_zQcUvW84 z8Z+BhaKk^bSMip-3Cbk#TEou&p0q)JgJO;zPy7YW0>pzJK06%L^zD2{qj5K;?g48EM9uqXWt~5`qNGm}d**I6#Kg7rF$f7%@hef^a4rB06!*j5=jY(HK4aM^ikK zQJGuz);>h2DPQ}>uN9)5K}s=N^F+bD$5@T4Ehrf!APZmsCO9jAh5=+00TmU&##QE2 z%L6`lxY@4C$C#M6Y8|JEri|=6{m=E9YD9Yr5Jw9Ft%5hog%Mf{Y~|kP(4Q z?Nl;hc7NiaI-}xMW{sTJOARD(&H^#xHP(_MWWu%NSMY(qc-{_d=t^eDzQB-bkQ`rxhko2Rkio%gV>9aL<7>=rxTfs|w)|JL31 zN|LZY0!N^kESSSQBp6s>!vyK*)oxg;teF$LM&9`nGEX;JZ&{FG{&=o}#?A!7jR!vk zYIxYLg!omP7@7zaRL*7dea1eMFZM(*gC4KIlyP;?uNFIeB*)}gAmZ7A5MY}uvX|y& ztOX@>U*N^(8FvAO0FsPIe$f|v=)5I7_Jy_`SlosfV5DF_^Ey+BA9dhH4lL$dqG|W@9g)wQ4T%BXJ2Htcl~sbfR4?deUWs&+F&KXJU3;cH+9>5 zvwdkz#-d9+XCEX^$PSyQlFN1o#(b$}R#7>{Wj)(7wRcO()IwQ@1kCyFmey6qYlA+) zf?bo3%P$nn@I4ZEFn}-bpnJaAYgmWg!rWw2vIFm6p4u*}Wx8TgPI`|C4twxRLBe7KSm)^$NW_ z4?Y9`MGVb+R;h2hM?gbrDJzox;2~j=eM(c-{wn&vs|4oH0c*`cr0*X0xV!%f3g$t6 zs!C+cVQ_rD111g&IdlzhW7M*BK)9Dza)xaj=0FhxnG!!!PD%*$lQ+dIyH&sRJlU#o z=OAkHn3?@r*V_2J?&E1Av~`xP^I1-kVN?l3@aRI-E(gf2l-yf~$*pXiK5KeOSNvW8fymPN4y`CdCom#w&qgFW)_FP@w&Xd= z_x`S$8Xh1B2%P8m8v-~hlvNDrr(lPE(Ko??xeF!)4Fbz52yn2B8}db7e&=6RD7uDd zt{Lr)ZI*4chHTSw;j*>$EOJT~>3;#71dO%hJNVu<2N!VkLd*_6s1>sNz!$`xuJ{{!7yR(s^ugMx;*qhn@tyu5 zTVy48A}hbITH{&lUeaO`PS+z_$Ufo%TMOpMQp+MA-r)w5+#_-PzF-SZ!gez7Eb?Ph z)~hxz>q-`6T1MZqe5q`~ce2)UXPy$GY)jxxSBz&JM+DzJ!q?iPOGH$ufqi^rW17LK zakD|Xz-P!*f3wy6ihGcyfLjHvU=OAj3|L3@XN_SLZa`n*o1m}urEdl_>zK*fIN1f= zRE01Wub-f&WQQ|aLq51Yv{PqVuhkctUl^%E##Zb-NsrCb3B2&6r1{-Isd>a<;Na2L zhslNdnQp@`)pK@e-hNjC-rV_-P6LcTc}Azi3ew1oZsB=s5&!lx?l4al{pkRD_1)|R z4!{QXDWL#g$iD3zYzU`#(*^bQfQCcqZOHn;EeeuUyTVQ}Op+4GY4NJW!;a+;gfrH zEdwAYL>p0}90-YLP#*telk@LE%Cy@(lNB`feaum#(0|LhR)puVnJ`9oFCQrns)AQ!O8qYz}$+{MzK-lwDp6H={ zRc!$Gcg|DN0>(*_APRy6`mI%o4%Xu8`JA+Xk)vg|2q1lBqh#*0?N_pHy;3}37)JMJ0vG|?w-aaP%PtIaBm$V^JGnj$%t&* z%Vy5jhqIMk_jmK=gkh3^pA5D`!rs< z&z{+`wNOAK)&)z&fxC@}eK^VcaU7=IfcYl3F`0nZP1s0eK%n?`}o$$IN`Ptl#6Rg-d1F(WH*O&v_mV|+m)~~7={V2i0 zw@D<>0~ik5$VZj$ZCm^mm&vBc8Wyp|l9jNGuG1Oo45wh9N*9cp)evjI-&n`?R-3yy zzz7`A*-=3 zx?1Lbwh}VztJjy1lmB~8T|Wi!9f#jh{A<$qYJPA3Zhgs|AAqU8>__fEFtV~{1!_;9 zvkhpDBHOc@`%D!9eZc-NbF5WBfN_?AXp$5d0CBJ!u1bN0Wq7^O)`S623*=`2A@HHV zO6ZhC5C!1oM&&!gY3`BCsf<4r&DQ$%&5TDScLqtZfwhy(Y9BB^go@#tBM=7=j9IYY z^8f^?00116^<_+DmOH(tmaZB6O(#FIpu06NmNu1gc8E`J0B4X|mVisZZyh)anNvW^ zU;zR0E#s0yoX)$Ao5&IJ2C&-59`WH65q1BULActaSvI8;Jj@d~A_koO+Sh$o|H!@` z+4OM&&a7fwRWx?bEJKBW%V3g|Ju&2l1RzReBU5Z`5iuDbYl3LEpKW(4U%=`m5h>wFMT;^j?j~ zE8G=8SXUJc{*k<3y0$^2UNXv>!~0tM)`U(n>Ar}f$}D^5Pd#5n#`DP2Gqr}aH`2KL z-E-);e-00@HuT5y*a|;pOo9`q0Z-5uYwG z#}^=c^o5=or}ab7JBlAM)KW#)-Mb_MY=ZO1qTtVWI?5g-Bg`v*km~Jh|)3{+cdC*_?qjU7&CkU|n(@DuEHi3K4C0J*@*|9Z(;be=eSg)`w zeGkVZCCNjwq=b#+E`66k>ZQCh@Q1Z@k1w%4pS+NA#fAbIvc?&3D!ir_PP?8?lfQYB zGyU$A!-)Re8{6;lyKyNziVS;UaVNdPM|wi9`Fq@)PO#}Ce}0FzJh{f0B{3v@@Hdrn zfochYiBnJR`|r>FYYF3X>Ktb;t-M1vC_myOYcA7mA+#BEipy1CQO3IyU^rjKPVm6K zUhK$4j*U_R4dd>mbrcz>c{bpm3h+cgJ7@Le^LMWew7wZIk(;tIqRKHNMr5KL47hVb zfE1Y3g5xusCh!I(h!5vU7dU4MCo{$fkS(&c&a>5N5}YHwS>=lSFhFH^MlT4?Q6X{0 z!Ke}G_E#AbaME^$$RH51V7;|`Iz-PyRJ8#h)MOw-XZ~f{jayb2A?66_aXp+z<>nj) zxgpEI&sYUBNEqi2EIPsMjvYld*6iAmk>9;%_qE;#1i(fn$PsWWxpNQ#LUyk`iF{PG z1b5`8ge{0g#?Ar}pvh@#50T+RgxwoCbRTPGUrw!EpXz<~(}5Az!96{PzI1qqb)sj8 zB|x>V=0b1Gr@dUTgu|<~ZhcwdBocGIc3JT30lpch@0^y(BI2z7n=Sd`r2QU_3DDV~ z@0>U+ws0glWznawZH>O{!)F3~e{MqsTso1#Ni!OW>d2)VQSGT5H4)n!+)Sh3YJI>2Q9 z4W95v_NSD+-zAO;d~^mF6Jm@62>4~Ck=MCt+x6zVZWK=+W_H%P!zZ#QN9#$~{2ob$ zN!F6h6}b64>?OB(K#k|Y@?J(MU=-xIr{DQoZM0{Jq*`W?J7a)u+jb6i959o zIqJPwbcTM>VeNyG8!)JVhW#MH?hT)eo1M`&`e*)R&pz23+;A;B;5+#dxX6$2N$|6Z z#kBX@`K*uo!XtARG}B4;Xk14}{?li^n@zUw4af3TyM38#_jMo7sfE-$YBj7CoG$w; zS>gxgVx8f!YH}Cb>;djf=1L zw<Cmd z(p$JDAS>DLGbOiuuC=XdVO<+$}Ry%APNX%)s9X~d7_yAZP(@m0J{tv z`7<2yBRAjm&->yKWtkWP#{mQwoD7?_0dTID6;t66)G+wkoo3ndEP$D$labXk*~U~l z$6$M(NluR*(h1~vwskRLhKU3)Qo4X-324?e_iXbqH?0M-e25F6mdz~SFfL9^3l>rc z{L9dv%`L)aGe3~!1Ocx0CV&jM+-J8R=U3?#82~SPSU6N*X8z{cTgO$#I4o=6db;6S za!06@_ksLbOP$YghDfm>hLd!E_Ka}&JQCk2a!-4c&*aq}BuNqERj?9?C3|0;Oavdu zg5#ot^ha=Oosci&hdmhQ(TR6EEISH3r~XSA&}Rx%j-_gd09i(o({mlW zL*9%{Uo;X}%N;(o`J4pcYBl}wNe2<6;-UJoaLTdrrY@=-N zp(_7TKx58y6A6;JX3zYK&s9;Wl1Gl_*2#Hm@US}ZT1m+)93UgDodQ=_1B?7UA}hmc zt?8^WcO*MPFK~r>u!`=$!2*(9khKP$f<`0)z9Rg3@;l|PWJ=jy>uc@#RY8LBnYWfL zf1?l9v;+=P=u04kKnl1e!VuMLiM+ahdy&k&eYUgWC+L&hbYJAPcDjzSr(gUWe}`|V z29Xs`LO$BN!g=c_K(J?q9lEY|SaNeO@-`>G=l2Q_3mA-t{kl)DlPJ+P@hqDlGk%3_ z(ouW`#!P+{0dAe>ttyK3hf}r2`n1RG2uH)Ipj>ExRWAnKJZ@;4}a2Y;mOBE=o zR+B6J@ddxjme~HiOj_}dDJxOoYxB0Ak<9Xp1oM5B; z+b8ey`?t(yg~r2yEFcH6kuiebITgSL*bRs?1zbRZLzYQlAiEQgCfJv$=`;fdDT-Bk z1~{A}(4pv^GaiXX9QDi_s}e5aO;Es%L)2>vIL-p9H;eSUf2I;W(yn|-Ikg4=bwj zD2!3Y0m&26MKL&#wJNIPciF| z1bR)zbh%);PJ|T~SEABB22~4l>$E&1**{-oOs2@4=djyekZQfzt%L)cI`XwU`^?9) zaRIlSwknEeA^qggek*hGr3!8A*<4i}W9wA| zRnN#k64&}kE>yKlS0kJ_47;~pK0~(!&gRjYgBn z_WafESq0QsBEsxhg-2BmM)uAeWAP04rqdDtDt77o8{2z=-^M2qjq}kJ^N|RHZyioV z@2#68g5SX$-6y^T+U$q!-~@gzfFNJIk)E29Lv6^IF2E^kf*W-rD;XFEo#wx^e(H?y_;vagL@ede&ct>sQS*2nnNv`pL8fx6>qO7%qUrqE|f$m*r~b<=LN6U9WTTE z$d@hm^2?t*wwhny>*${(4SeVn&9^eywwzU}2AFWH?9;obzCODtCey^H6y?YB>XNNSxru zb##kN5Ce0meI~-m&;ek**H@rU?w-N%atc6!)9i>vPSU-!+W=>QR3{M4D%UPE^H!%O z5n(6R8Su%hA^{QsND3k%m@K=8@B&I?yBFZi7wessqTNIGm%~)iLtZ>X1r^xpnQa5$ zl}zWn*Npd(Ok782kq3BzJg=wnq*vzX=Q63*ihhub-woj>c0%JsSU^D$iM&O)8MSFI7v0|qyb$|f*bE=4|`$!DHOToenP?JUN z46a44IXc0N`$-V7JA|N94~>Pc!G(^sH`d;LCCl1A2l~K)t$SjunPMmNuck z>Df0Q*dvISY>>6xB`B(BbEpU|Vr-5Q8^-B#vmFAlimV{O^xF6ZzmsU6Rdr>URf{=v zZBJ~0-OYVQGWZ?IO>SfkM_`sJdPED!?}+L%B0wJOzC!@u-#`BGkH6mSbJkj0U<=dW z96RPS_~5DpaIO6{wSvI`B;J^N3*9jMd6F#HAkVORXHG4bK-wu&SXMsa3EyxHTt}e&T;hc-#)!;*&MP?>m(k&Yrz{>&|ok zBcO>8AiE-80BOqMlg#ZA}CgzE@_gKbHL0Ol8l$0>A(+Cr0*ch-5jlC04ry=Tvd3-pTo1T$d&=z2G3;6(0!nxQi&W;2 zoOK^xnafCkb+LxX1?NC6KF>ysxAxh$xMoh|iyTD);g{+Oy{ih&b`UmM(iudV7rZ4C z>m@m0Q>k&$Ay~~B$r{sJ^F_FPd8RBfU5A%GBfxJnevYQV(RWU=lZNPtsu4T!#ZIf_ z`0k5tmFV!h`Fnb(8c{$qkJa}qx=1hSo;5{;jTy}=1?9nzYOm?LcA8`v^lz^>^vyr+ZNmHCrVr#12waDiRI;H!PL z69eddCrXkB?4*m`)k0B1m$zsW<;BnWnY0j}p-EBf5axjo1K-9t;8 z=ifOXsAK`2AxQz(@rL#+_1>%5W_yRA-@PXMnOy|4OUAs`WXH4t*OCVv$6s)6Yu_HR zPb)R|@UM8NttDTrW24}B*$0X5*;3m6H8!v~VSmtnsm zUuRj?nO)|HI7Ax;k>7c2D5uA<%fc~q$6<2T070ge@;*7-;I0Dw>g$f2_8bR8v}V8v zAz)A){|cNrO=}0(kXI48H3D1$jxn2Z_N$5yeE>`=@C3L`m@AXy?~l@au8awvc?Kfs|y&EE=Y$ky20Z@$@l z+L!wy4jl^&%n@bJ@>%4?vk-QHzIy|8tz^ccios@(0w9c7CX+jk%kw*_%QIzBt*d*p zPn9&8x3b<>2X&NKF%JQ-bvD-7(&HYTz%~1uYg6Im*a0$7aIgl7*-zk(4EPKJBX}nd zauul2RWkepaLbB$uJvP2h^gP3kFk3`?2;fV5X%{rP_kAk3I1P}-Pk$PGF@6L;5M0+ zT<|AZ?&Xv@qKdT&# zfg_)K=Xa_b?vISA3>6raS%oES?w(0P>!-CF$%5JR`cA^m_{<4u5M&5=@9w!NFk{Q^ z;q%tZ`qB?r>i_oO^}^q(2L%j$XFGzL-ivW$kE?%mf1E_XE`Wdu1%T$JtyJLJ>mkSy zW>`~LBv6=#B{U~8)@Fo&ItdT%(oeq4&pu-=?FploDjkBOe5-Lul$pD#{_aFRV>DhB zS>v}4L^T0kOMYoZA{%~|uDKT<#GcqM!sqiij`6w17>tkZ&YG=u>eviF1Ix|5-BFS;rNF2i1bxpA|l5&^YJ8kYbZ zG4Xumb9>SJog?Y3_OCPSIb-obH|P0soEQZ+#s9V%lTY2>Joc0otMMh85U z^+OivZ|~aKRfcA=fPM=w*aefr+39luKqPOrzIcA8O1Vx|Z66axE^MxMzOcE7Rwq4b z^WneX&#r~k^XRRXR`_Xb2&#;+OuV3B_j2GnNv4SNl$kY@Sn0)zY>~|1U>m$Ue5I4O z+6#f`nveg}?Xy#|CZRx7WL+hMV9wn=H|Z!@!U?Uq;QkxcQX1SB{@EUcSOJt%0$Vh0l>8Tt@!sn`_`}`+%%P za<5B)9LzIYO3dpQYdm=kPC4cqSU)+)7*){PFYdrUaWmnTi$;+D18GJ-io4nSevEPho zP418%*-5NfA6rMh-f}i4S3e+NP}cP&v(izhogCC>*6h@sNmI(IJ+r5_x5UaObEb?G z8KtQ8;8J2j>DXKVprk6>L8<&bMf|&z>5dFOy(R+4A?qKNxsY9<92~s16pD((s1OMB zWDf*>t^urNDj2-$RCFSWz4NDWx>x%SkP7|zz0eN0BZPEA)?aIZbpY12i6OYavaIA; z-}jV_XLb^iIU#W}hBkfHa@IRkR^r-eN3vcB3sTUAOoW#V%l>yQ=Op7N(_a5@o0uJ`(*_^;^&A6kR#vll!H|`_Dm%L<2Ib{x;OI?DWS^aiwI1F}imz9+a^J^1bsR@?h-#nLDRron`v1tF=dD%m)U*PDIVz=m7ns zo9@X@$h5b>!%#tJax-^pCoqS<=34;WM$|TPH#WaF2c%gt!1%1YwShOTqw`gyU;tgA zo9&^Iz4jRnv?}3TuNmyB%3GIO8DI{ZshWbE$ugRUIU^?RVVkW-*1Gl{dO~-N6GjNm zedb$Le&e?`DjR|tYlo2feg3aK5sfj->EHq#3xd_w<*dq)f0P`M&@neUQ0Djy&k7u2 zEd3D>k`=5Hgz1)$RdsNT!2hast+p1}JS$RX7&C)LKUW<<@aZ}G&bX!DPV|5)Ff`57|nXRW+OVkbl6TJeObL zpW`U7)4cc}xM!@rCnT&-9_-qhvN3u>raB>vgWgMI(2q`nb&a4;kYG%n5mwYD$aW>v ze2yJWonLXTq|B%IXU?4I1VTFXt$i|geDIYGnST}Z$??sj zstozXnO#G!5`lC~6{RGEYuQ;IHI zTge85p*?UMJo_UXHY(V#UgQZEWF=+Y$rE{EQG8t8w^8x6Oks(mSN0 zeN;$%B-9wSWcnMd3h#ZMQ#BsJ?5;hmP3_1v`M@b8)O?JaU3s?8x)<`w#^Hnv?&>tz zECSSv9|c(KvtanFLUU(rcX%y{;CaRe6J?5L3ueS4e^oZU)BD)94D#GhdNh_c)T>mq zZ>Bw0XNPiBiRXvu3Hi|tc;>mtB73p%xx+WE_QmBh;PRcy%+6PKk21;TT>v=krOZD< z#@F@IU@b|t^3gr|L!R^pp1{Ae!;Ge!=~(J?T z?94dmBwOm`zp!5AhMkdnRZnwwJrdrV;n`O<;eLER997L`lg2F|@;Cki*0q;}9Qejg zjBea)+DHU-oVM zk|n(#jNPl8SeIJQt~xC4;$q2Im3+w^dkL?C*sFV=1~fC^=x8^jgfSw^Wr&cqHniT& zwy%oq{~RPE1NioMFg^qndGLi`a7f57GFvvbpoX*KG%cKkFlJfDyV;;CVSW1dHfjSm zbEu^IR`D4#a#7#B@*Qv_Hw4i7Ft$!nQ2}7I^Zh_&5rDj)RpwUqBB!_8q`w9>ZCXuV z$X+`PW8v(8hhRxI4_Nm8fSp4<%lwq#l|3OlSp(0YBLJG7c+W;w3BF(m$d-o$U!fh^LcB@##F~TPMJ*r z5rj;&fOBaqT8WS&fL;$UvZx(Jwg`~t^yl8TbgrX31%k%vb64}6c0f+-lD&31UfD_4 z70|$k`7Ws)PH;@UL!gcE91Sw!Z?#brMAw4iyTFB(*r!Rw}QYfgta zt+@}UfIzhYzUl4u)$d)a`dIbytqo^00tDpH`g5?e&9Qq&1glK@J!}(@aYh1?_6fMJ zxe1U|GT4qeSO@q@N63RCqw{1~VuPH`nQkIUbPLYVrH&}3YsS%aesCx?n8S>OkJA$^7y=Fg8> zFM2_j`3roBtodX2!d-gla;=ACDzRV<$&^goi(cJ5Jm;yLzIi=)nS(&m{vvvB9R#`g z75YVn_<)3~JFs$d!Stcm?Bbb4y>hz+}w|z8#iz}%T53WKng&utDar9U@WqL9eZBj zBYT7p%7B)o6#!Tx0S%Dlgg86j>*?n3w0O|#j)}JJ=7ih=XwG2=K0C+H_Ou$Rz*had zvuB9B5I_0{1ZBT^w*nb9PuW~+UO=&HBXBKUww82|-Pg~~;npHiiy4AtF4h#`X7jRg zixypWx|Qn**DH5%dmLo1=APzvO66y@R6-;m9}q zWJku;ex{=YVnnY41n396>7}N-&s8BM4}pv^A^GeGp7&G9A-D_p2BSJ_2Lm zI$sS7VH7{jepOQ8R4*QNKe~{wm#{#hVWjcV8JJ-WRDzxCANR@GZQvrp67qA zFvFLio_+HL?Tc&c1f9SQ=(fEx{8sy+$P}Kc*b9t(mfYDRjLxTh5~#n?^CzF_&>&pQ zUQ5`*9@YTG925h{gKW%Api7Rhu-3CX+_3SrKax+$Px%|)BO$^*wD;Y4@88*+FDo>Y zv{D`e<9HD#0AbP;3E^T?h*o4YhXyQa9dSPyIRYbKhN=gZ<1txEj&M_`HYb;bay^4a z@PGhNr*woM0P^`1s)%z2Gl7pW%<}XTrFeU-XEPKTLu81fTk87PDdt< zBQ#H>jO;ic&jWfC5>UwqkrM-7-_l-|lB4AGJErvQX-@UhcQml92!bHnT~=ylJ9EAW z7ehn3I%ab^kb=U_nb)3W?%FA2pXm{B@%MSmYO*V%k2I=CxZkYw)g#?;%K*&2B&~dm zSOC4Ab~N=NIdW9yXB^g~<5mmwBl_C2I9H^yvbG>qrb5t*^vI;=SPLKmBiSrEfZWLl zT4Mno`}0p`vaJ2t=1ucvSJs9@*LrqzWM}g=2exa|pfS%kmG>;;<%Fxs*%xvISoKnq z8An#zUTb1J##Uv3{UThPdM}Sf6jV772hUMyOunjJ$m4u3mATZ8w@Z4^7q}#W1N2pk zkQ*|lcgD=#zOlc;yvzrt(m4dweYJ$awbrzKEMb^2mRPD%P50-^sHB&ljaWE0k9t#;2q%N%wXJ4qj6$%)Aj(pT%(v7PL|+96WAYF>Lt z*dzR8Kl9W-&yRE|w>QEV$c66p61rW&W{$`DvpvKa`RWjqql8SsOb3Jrgkc8^>M#-dL@o#@ zpMvBH7+}=hBbeXF?9FG$S@M-0sXF5u{E>j7J!QR|mY#RY6<#9=BDupJ={K24Xqaa& zem)9p3g%bd0Y};o%xA+PiD3I0>Am0HnO|b zE+?1hXWYbE3bv8f4voO|_*MEh_b)#ovq$R}=9|CD6kY6xTib6Dj}ez4jXIr8)TARe?HV}CV9_k#BlwPfn#;r11?3u{6D zC4hfPg=P(EC=XuY!jypH83@onbB|` zw!;Yk2{tFAjjYS089$C=8D!b^-d&~2%i$q{ zK%A`0Zj&|Aa-{2JN9i&9>D?EeO9sYjOdVJ$2|Iw_9bN&a+oYpui=4U-=NI9a`!C_4%&dTzy$OD-mFooy?yu#D zZCFPH9`4X#l?G#ya1-1i!{ph?n%`(S>zOc%ZVDRsIdbtgKEjtdSueIuMsy7}!-L(s zbXI)|C)tK}S@?FeH*WPS2se^E_gQAwwc<*8SVQY{bG}vZv!dgQM`V@Or+YVhs`|W*Q3{6F}MY&PNEeJzEu; zP8tKu-|ZW#6_?#emLdFfS{H=L%FrSJ03ZNKL_t(wL-GSz-{JMN$s_ypd%+Q3VNQbJ zP77pDaGnkca$xvbmGAD_<;(E_@{o|MN<#+pzf&#Qeq1HOP7m3tuTBN^H{&h9Afw)Y z#eYk-!#`X@vO;iLMcUf%5qtu|-K#3xgU+x+av*#ARE@D9>diKbyK;JZo%z%C=__H2 zIl~dyM$YyI!jbjBr};c`_vM}fLwJCvO9r1|kabvFGs&IKvzuOw>)CLscLMSO?VEZl z)7urAX|R#KN|+IK0BrFI4xwuUugTSV<7Wm+aR{9;%2EKwj_I3~on4De8}R@ZkODw{ z=e`7@LecTk35J70s4b{_A~lR?ZjihiSUgp-DZBbt&G9U2zCNQ(TnLB&r5Bx&E236L z9kC=2*?V(S4MMOfq=4_}#gkffkOd`pC&XQ4ozsD%N|&{FPaut4$QA}9?a?qt<3lo4 zO=MdEv&>a{vR21n-ZF$pk4zHch8*}Er@=UEl;$`(Dwd;?m5^l;DEUWM1V6HN#)DkQ zM%Q{&h0k?uSaV&Zd{@096WM10kY|!705BK#i8#B?JQz4a4Jdl!JAHL+hj7S{YWXNL z9{Dww$Q}FPfH-5%sHIMTL&pS^k+>Bo=#`+L6AhJ?t(`ILf;V=?#_6FkBF5}!eN(w{ zawdomXTS+0kChk}Pz&(LLp!Dl9jC!b!-d+ProOQybLO>()D~1d9R&gj>LqqV1pwJOcYLM57;zU2^^O?D8#gHs!@KT$(F^0+HJlk|72_(?rg?Z9dEyMyATmz4=F72)Le16J0dA79@G}3>( z#2De@E|~pEeZIPPt#@pcoJy`$nU`4Z9XxQg{YGaTG#szmiLdzi=tb4WxZ{QG&zAfU zz{7kAOFj+%gTeN))vo@DUOf(I)=Webh^Wsh#9Rb}%-DcNLI)@vA-kURaui??s^ZkOKt8SY^=7(|iybgpn*d zf|7G&q>&xK2Q*|UeW%Ds)4G8(LW!)4o`Lz<#!=%zI;=H^7x>LNWdwi(cpy$ln@msd z267*cmm@VMWJLsDPwFn1H4oq;W6e=3XLEE0pa`*D?v&deLWi~)jEl^dggqAXW4&_ z#g|}fp0Z~BRd}?KR6Yghv;QqU6h-+olwI=#mK;7ohst{qsT1H2hIi&)@`u=5p z5IQn2SMwz28PL0z=be!NpEXzW)-qyjvQyz6y+%6e?Jm1|g$p@cI?Uk+z64@q#y;pG znQ^q%qSNr`b?+5m|8&B91T65ZDvtn1Kn@qIzj?4XxC)9rE zi3h>3WCEgSu4K<|_~K8jNiTf6I>qa1t;0m)Ab+j0^Q6!iD1JgBv0@Yrhh_4uH^UR@8$*@wb+mw zy@LUQRXzhgn_HDIbAwT^M1b$}(vsJj5( zk+Z6-gq|S20GAy3S2*C=5$oRfX8df0{#b|I$)0!Rg-3O;kX9=DD#*+4*@FZK|H$9; zqT8o|6Io4Nt$no9P~n_loj-wV>@$DGrpcltNdC+H!kd0a=HYsBhed)vgqmLxy#2JMxF1AHAHBN1bK;xt>9)}b0oL|oyhPV z0r#Au()!2=$5GN<<_6Qd}?DhA$J2_?n-w zBUJ!Rg~E5&)#Hwwm=B!0Q`W(E(=ul6UYL8nWMAz1l0%57+D0sdNGlsGFkzR z>llITCS|3_1vCt>ca4yZ5^!x1N39(`BimBd$9i&F09};=;nOx}95S>j37iKfrHo8A zz5UoT0XF#|@rW+aRNW&d_ws*Ey*96|QE`&tUQf89dx9hD0+1VvHLK+;ATbvK9x`BD z9XH)Rn_4>rndB#6D?4lK))O%UFg`;T2%qtiF-L0*vP=G%OJpNwSrw``DGR^^1mwy2 z3#3%+{GBsyFBaVZXx4NdzddcHAX(7qXB8i^t)*`bOqBe4Karrcw{){pL}7PQq5HPk z6$V&K;~_g^6!anQ$O5NEMq27-K)*WLnRuud*>m8DmJhN(wv7+YAc35P`&ehszjHcI z7$zVB_SVto1ii-ZImizA(=&P{FeeAkXag-R@Mt_pKV4|gCf!%%gnw|YmxHp4H~U2Li)(AIAd4;2 z_oEYod^QX~czs47O}2DC!o9W+7i{nsWYzKQyFDNLNvA5pB~=GLqnFzi%%{)YgMaVz zK!N=FvgDaMHc(ikxOiw#q|?EFcL2U%-w$B842G@AVgRMxGh5 zh_uvB;2GNA7-_9<9BL<>`8!9Zf`QyI^xDEIvy+?l7~>Og8&huvr{Dh00nw4FK-M2{ z2~Inrms7Iozm1gi1;Ep(0#XhikQ)5Xf=dAg zJQ75}83C@p!vMHsF0$kZyhJ1cSY>djxEKt8_#O#=t1SY!ybN$UDj$x?mh+ zD$!%Vm_SLuz(;r{yJth@LGIcmt1cet`w?-QJ^gIfI>8ywJIi*HKe@0AI3xJvtDLYz zAKS32owSTH8M#KbnVjLzdH_u1huugh84LZPcVy*f<8=Ryn_siJRaEJjHP{`o{izT9 z^z&6{$j@Bq72Qu~+`r)6wIvS81D=pMo9VcH>kiA2Z$1ZJlQCaGH`cBO_jDa@LElvY z-3z9`Il%&Q?Vi?5aE*Y{JtUi53Y6H1FZSiLa31-$CLIK{OA_6cpS9#qI|))^jqUO+ zY|Iy5M^~S|aQSEq`G#KdiyOdH^XmBgf~Hy=$uZlnf?N_M{cQgTT=wN2Rc`zZezOgH z$hxUmXyfC5@D^?B9n;JP_)<6w%S*D-8NL9YnkU)amG@6xH!+(&^W&1R)-KF{TV1ja zI3JtG4X<9o&^)V3tC-VMT+KMOf~uBUD*?He^nRkpJ@4xQ%{Le9%Bh|OQbo~-NvHRS z3^_~RDLaq=N+NdP0C*5b#7jU>_A>JO&AILFWz6lB0WzZ4%F4B4)i3Wl4j0HFQydXM zq`(#%ch=F>a`8=NsR!5}zO z=NeFb8k^o`Z-mfC4*)s)UTBFgzI;*C0PgI0xZNa6lHw=G=4$01z7vje~J- zdLV1$9!Mm@==sQy0E~{vz|j+qhE53_kQG16BKO%G=*-skQZd4K@8JJN2^xEP(gO#v%wZ(9? z7ZD{z-3g(Mmz*RO1&dlUjfH>XL)n{gluY@?JC1(a zfQH~J1>Y6=F2He*%FFen`VQa(3;>Ms*S2MGfDXsZkx+2|IE)tV6Qf=gGo$RZFC>!j zQUcBs!7C#Uyjt`Wa{`@tA{RiElQxffgv}9wRqa{#(72c9wkdS(=Qx_H=ej4tD5J#j zQc@KJ(YPLdW9>aTdej9pND7iCQ-biyTp=J=4iQk1i4v-Hsnr6JM^fmB=X#!X6*vK^ zPB9_NHo&_U&{d^M*APTNpl?us6s%##G&K@wi zM-HU&eD9kQ9JZmnb}EGUZkES05o(S|_IF2o$R0WHEV@Y#$Q7|SHU#dd{W067hjfWv zxxV%hwr1{tSO%25wKg8TrzqQGHv)uOyCVMXMLz`bWQS~5RjmDu?92zgn14s!nhPMG z4$!!j+ig-sSY=78D9r2Fnsu!Oa%L|Pa{*g>1B_W( ztR=gk=l*q4C5I_!5}XL^%*mJ$FIY-eut|j#Sz|u}8{@Suh`vCE9?O`*2W_NPn~b}F z#u~$_6*D?!UAq}l-|2HcTic)tohpsF$f_gqf^LKgaU%!or@c^e02zfLT5ibI8d{?# zN7~OIf{vR%f_rwNPwlk=a3mao@H_G`R+uYsA|QmlWI^_@PjE^9U_`c;-*69!8+L%W z3f7PedSb5doUd!|(hmO%Xe40hSuX~DB9kWs?d`QC-jG-NMP?;8*?_>-{!a7eo8Sz& z(N_WIdou@-FKTEHY2!ygD1B?@a7wML!|n*?mu!n5fBo30(bmifQ3{{Qno z|1mZ9nr&xqIO&DQF963$$vrgI_WQu*hy)i366dVC+&WbGB{O>w*@wX04@GKceawCKGN|6nD z%RbSa0-A_d{lzoGRI(uA9Ebxv_n_Z`H$=dEa-2^#6V_{Q?Kz^Ojoh3y-D^*Q@eAUN ztwSPq`>JX=pf~Q3{;;P`tFyNLFR((Akr;A5dLNE^vU`pXNnf+BbX}k+h#(UVac1?Z z8X-Q2lIwxK@w?vrdS4dZk}ZTW9Iturf;YO^K`N__v>BA+=csFsKo&SGL4)7bZrQr~ zok|Val(9}f+>?&`TkV;(anQknJ=uSs^~J8}p`g3$C>)T5=CF`=1X`c~&y36S&D}HD z0qjw2aGgpG45SlnIA>c3y79^G&XdQ?tNm+im|RyI_j|#=#0UM*=0`5D!dyMe8qjfT z2M1J=Ya61E#v)i+_e-3)P&xpM;iLJgdbWxDti4u?r^*2`EPHP~$wxp#UMe)mnqZwQ zVUomvWB{z~G$H>tU(ZrCAZK=M3~-B$u~mPo(r|V}bpcJdK#srq6$p1J3*91rfm-`X z=@%Wc&eor;(@$+V5@sqcvpu$FnRos>pIh5n-zUj+eQjrK-u?M6_|aSFwUJw6wj)51 zIP*DU64=u>_JbD)fcP%+Xz#2&uk#>*yK?Ee_D1m05-JiT$UdK=YDq`V4hXt>UWr=C z48BW+8IkRDK{n2g>9)2nxKt}IpUGb7t4iw8d;7viW7i@|mN4Af!zUF@_94OI_c>2R)VvCij}nDPd=JloVdN|M z+DV*rke+lR<+K<44`v$|yo!6ehOhMXjpOkDD*~Fgi|TW|w3B1Du!N0Fmvt$qDJbYW zAu|jFh|w_!PDn-vh%j^xQI^m>+Z=pntQbC>JN%sU|j2Bg`C8`z|QO zQR~W7BF_OHf*Ri=Cde|vu?uL-U$p}u)P}Ho$=94`MqMVXzFPziiB;J^GCY$b23kPM ze35-XZ8KPFvI7UtK(G-hq(HW^BdR%QB!zQFIs~1F9f#3N@@C`@;A#m%XjemOJ_Tt4 zL-%FC*39|C-%*7bUJpEZSAwFxo%KU-{>m8}5Z@>Wg@i_#o zwl`#&WAl4JPkyeIJ)u*;8F+9WSDCdrCQiP+W*n((7ah{VRF7-@%XjqM{K!zC>C3%# zo2zRF;y~G`zVTYC+EBYY6rezKem9>sN6tW%PB{6FZTPN&xO+Db++hE%YyZL2%XFP= z1Q3F8tz5Mn7Ch1+dTSlYlua0yHH?7R`=qj>#g47*PI61mlR!<`8;|?41Ns?0&_jfl zZ6N04M-LG@{{(iP8@|-K;yHAkL<#XMUR_tzF80Ebns>kZWGrG9_c zv0;Q}+ORnYR>+er&|&x|se<@*%)2?lWf|mNh}%ZpGR+btJ_n=Mm*85H_N^J0goSlO z{^_Q9)*3{{Y#cG5Coq+%)$z#23uv^e(OYx7s<7X| zi=Vh=Cf>|Z6^l-1Pra{4RR+evR+vjJ>;AHt`#rKpbC{S~+nY+f0Fa#Mkg5$m>SfzH z7_4LMl~sYP8C_soTB3X*Vkx&x~}**6oprf+Nv z0d_xnMtA8No5KUllTG+Ld&gPYD@V6g0q2^mJf!wpNnt?>d5{x6L&haF&8?RylcDGK z-m{Xp#!W|dNywkj(Qn?0{5+fz{ z@|S!lKZ5H?VA20sVT*HrGbsFRTh2aNWDc40u&@LO%mLQg6CBu4Y}pVXLLm@XeYe1f za}a$Z2o}@dIh0z^7@gmBAi)H^32Yg+awx*&Z;Z|t;w0nFn30H%4n({dcck1suF8XR zPBK5TckaoE8SYU)V;tlr69m*LsqFKeD86R`EQG-v$;jtB?$%sn!8*7ga<`I>b=|Fr zF##$WDnxf~EN2wf&NG2#8~hkEK%PKvegND!RwG{l(tQ~u2k5)Ng98DM?g2oJ-FRe1 zkv3$?*tAypjs&d-Fs#oprcU^xryUXNegH&3WgP)}hY%pHbcpkzqn()_$RN>5`Tl=Y ziCE=a;D`u$Hpk16(nX{l(K{nyJ9~9c^3paVD<)g71is#fthK>tVX(-HNxB2*rNS-;H0-FdGOfnZvhwdT}u*90O?_R)J1%^$KwLlq3MKa)`-?bsw z|5X449)32@+PUgQrzdkmb!}B-i;xTQB~EfwODanv9_}Q>imQI_h%*KXyllwV8bmkAx$55)7Mr8%;TJ3r z#0fwO=8hb>d6JPpK_Dz}pgUSf;5eeqmm#vU`u;DI4cn1qcINZ!2}a4j(@lQOIDAQ* zkU{rqE$`Ua3}}AWLjhKtoyn?xc8xVI+pMA}aYk-*kG<>?LUY}(bf8V`_(Uy^umGkD zinZcY9b|U}QDp2NJoZ^%TCIC;3cKTHROuu(Gb+z2*3_c?axcUmmrG`;otqe^`*-DTqlv}=7HMoTohC%+;n zEJzbrs)E8t>kUtgtG%RWdqY0qr$6~wa)ZyXOn1jo<@64h+;Oq?nn~L76_O~{&$Fy& zysD%OjHSmCR2>*~)QR&Ax%_bf4TaV&MDPruEW^yU%+?BvLg_h!-u=+VGGLZ7;}~+{ zwTy5YdT)_-#20uKb><*E7lC8QoFii8i-WNlPB0@&;s2CC>kR{xd66l)BK${0(pWmW z8i7G%1ZKMzYtG1>;maJ8pUnJ~^X;?7-o6^LAS3@5p(9m#=z)=dhx1iF<_vdLpy?#6 ziF+XT0BP=h>Dgt~wTD;_Ag$fRGi6;o2eD#E#uV_MIipCHHM6FkVGJA~BL;$gMl7r` zzy^xi(l`gi&K!@*_3r83#-NIUgn0(ShosOwW9?KVe+LMGKaxzBj1%e6jwk3NFJH2K z_6wZ#w@%%0KLAgs*$jQ{G&vQB`j?+nXx!hNtTzBO7S3jU$zKFXW(^>!99Tbu3#sT- zuRCOG7pN7S7N8*s+COB=Bi!jiYkk&Iny!dBsa!JV;|+(?()8$NM5?1L3R?XR;2#^ZTCzkNA$kG--h|L73CmOPNL7Myel0ew|rk>nB7mrd=s zY2=EInxFfd6Fszk^b4*Ch!AV{ngN`9kf*Ay02+az1M>teYt+e!NV@+cdB~hfykvmj zP!eZ7$&P%(6epno03ZNKL_t&}JM6vb^gy<14#vv1jZ4rgn6Zx9+RPsw*HRI#UlG~# zuub3sCP5k*RaH83;MNI_u^pLaRm*z(vq8iQwyFTZB!6e8Fq;o#PvoJx!^iZ#8A%v_ zn~9y~b7x*NpgB5m?i=Lf+xM7(%=8^_K`_jJ@a5(NgY!|az!$$k=Du4)etm8j-^tE1 zt)ET|b|^6Rd4AqrC-x`#Bmp-w+>N!smW}df^q?d3`EtKg@mo)iydyj72;&9Wk`3nR zKo7iQm)XB_&&eeUGJKaH6)doG|JYY8W*t~jb-OA!u0VGMY;feL+n`PW^RPZzn>$

S}S_r&W=188z+o>Fm}Y|C^NDnkCCYE-DYM$1H=(;@-u%9 zReyIcY%?GK$f*|!)&hul^uoez+p%4RoYb$=sTi`_zBqAh#*uuRQpQ!`;v97N69Kl6S~dT zJVy{@-C>3x`EJs2m7%#C6Tf0SbV5ax{e7};#&g(`O+Tfx9j>F>NR z41=sa3TY6O`2jB90Hua5D%&z44#{@K#nqXyG zIA49-6!ge20lhXDS!~9l6$S}u^SFCCZ48)o^t0?>Eg}@YmXhA9P=8=#$ae>wgT=5TpOUt2$2wtwYFFIq$Q6$m(t zg55}d$nHioW^hy>;hqKmoH1f9&@=CEJJEvjf`?hnF^4jI5xCkk;FAsWY)~b;-hWjC z*eaW;<*SqL3Shz>*#+>~9hld{)U3YlDoFE58EQaESGQjS?BPQn4kU;VWP(;@+V$Fh3ug zU*)^`&i2RAP2=g5MObsSPx5LV;V!?y=c;BPs-KWgWAE+osviQRf^lQfHNgh+>1$gr zTokmy0JbOCXdm5?ewA!7e$V0y1$?d-3|c>YjxUv5PG+7ZIQM(&B7rAx#M!L&Q>%V~G= zQ1{@F1y{=6oRds1@G}l$)nMR=R9^{Es08{^OgS|26IR`kZLu3$9 zIx6X}U}5lWdgLgq75N3s1xDmk zMoOt%dmzV#+yNxc3vu88%w?YJG&817nIlU!(3;Dj%W%&*t^i^DND>Ev93iN5)0zXL zj^b8XyPI8JpFvInu1>b=nCjUtUh4xAsEx=xtt;JN%V!5i9D$y7_v|XFNCf?&Pl&MW zq%k<91?awUSI-nY!6xgC#0aeEd`Dx4aZ~2V z3fUn}Fr$s9wb+FX)?Kp8zJrc~UW1F)0tO@haCkc7CzTBM668uy8KcVNZePz_s{+gU zyZi_;T}J^uX18pE?!uJb8xuJ!TdU2c&Deq}NshC3jNQ?lhz(q#tGl4{&V7EDYpPVJ za3R5XK_{6SV{MwSg#Xo2EXb_;08Z0e*a`EG5{0!pTbthD#>Z*Pv2O5|ebH6PC2P!< z=(e?oDb`R0n}4ol@J%bOd3jFnccEX#!ahE&wp9&H_xN7e+L7nv1K&$juB~0>Zd}?l zwJLYi?@TCUD`A1}e2jn{ZfI?89^~E0uWWb*HRK>^DA@o%;GSUCn0&pJ?XT8t^Y8s} zbogoV}AIF6_>Rtt5VgV@;^XhjO*zqj3j648vhujX9{ExT*SOLI#;IdGp^0DbgEa&zFI5pY$MUUXPfbpDhaOy1b&A&lc($4uQra=z6xmIA5v)?S{dL^{qSpLa0WH> zSGF5A8E*k>*;Uup&o3zBi01~@FcQJjx~Y8vamUd&fv<@k9Bp)aX234D6Q2flvc24@NDv{}0icWbWGp)l>yB%P&$I0n*9EVb~ zd-fLa&i^MD#EY{>h>ZK_*{cD~z@`HPBB97nNM?~KRe!b9*yo}$!+Be;!jOS4RS|WlBJ2KpQ`9FO|_{fW1 zn==P!9}->O9qD`qwCRB{u_O0(0-AYgN9kaS-s3axRTUV9amsX|{eQpeq#d%N3j&GS zOnWQ3`H&MEA{)-DBjwG5Zu%YkrQhc2AH9ZY?PEL&3<_xIx9Xgv3c^Pp>EThkiOTkZo?%S7W`60% zj(o8>2?V5&d=SjubM>@+Zt80Rjo?$T&Ub!V_4$OveEL~yFSt+@L}uv;az7IrlMk4G z>*3k$-=uqI-2^2bVGUh}Nl2gP3&Cxt zc%g(Q*}_EfuLWLm6L<5R+UD^bZP)ZjmkC??q;>I)4F8yb1|io!$Z;U2K*53t78ofq zN%%4#y~SQ8ki!IAB6bEes{&J?6VL#Vd)HEC!8w1~cw_)^M=CiRi@lz(mNFL1=;sGs zO87pv9!u#yOZMa3Gc5OTEC9vb=_9a%Ka zPRBdTlwE<3=k-1znH}@ilF%uBKA$~k)A4(Iy{w(JAWP(_)AL+!jjxW-jeN}N2)nf& z9p)kU5U?TT=E}+M4hy)NV{4zjyT3pa31NqGU(IyRbeN5s4}C!h*jjI|etI11opG+$ zG-eoP9ejtWf(75nxmH2;Pj|^tUH+70~F%h7)v<%#Pmq z(&l}Y-YUmsttT!-z*XRkQSxY38w-q(0rpAnv~!&u?S3X>_tWCSekGOY`Q7TwU5x3v zvn;Z4Aj5nh?2tr3^V~z7kD7{pXh3V5>K8xuqrL))w z4uuo|aI8qQV8-VezF?-~TxSGMk=Oql0}%F{dQ1@_QE~ws2d>RYW*kAVVQU^M8?ba9 zyU)wK8h>LXR|J>*$cU2%4oFQwOIfKiC+m7alB~6jjX)ET|D<1cm$4`VkEkP`= z9yqtb5`iGMT|h&ooFXShkJtfXhT;Em0?qC#>pvf zTh7vp9EwaanH%p69LR}7Vw3uTy^M>~W*@aT0afeJNnv*?7;Dh$vvd)8^&MWo0&^rs z4*uu_v^&t%+83mxTh>HRlrPk3#T?F%6fV<~I<6aC!Aij1*ydgy1to-C#y*jT+3 zkZwB6L7+*8RUgd@@#yp?K>&N9XPg?{^S7&NXVp16Z{HCoN*0n4XD`!ZN7l?bw&$k( zaC4pDCtU2MnJ~Au67!e2^>15oa+eed6pb(it&yM_+JZ`C!V z@>9Y}Ui_G(i=bv6Y5jN1@2yA2=tE#Q)#SFkU< zgnbF1_&HL*kkY(d=(i)7_F2vucpnsCx zFL@w1V7q*89T>(`f~g8u?UQhve}*X?j${AlOe|i_$$B8%PO1|Wk)!U0+0x!i$8A~{ zNQSX0L3IQe6C37h?)aChalQGwxfIL_d?iNWlxwA`;KQ6$_PzGt(<74IvsTFDM9=vn zNxE9%Cs!s*|BSDHZ|45VXa0x^4Iv_ZvSNrE!{?j|7G&=L2N0;8h{AfmiE=r`0mfRA zIL3L}$ZUf`0y%rm7RZ&6sWq%N7+EokKi^>@^R65!=wYOcoZ&H=f|>U4>`JS1k%1!O zrLtnaRZ1u!qF7MlGk^tnXr+*SSYMFnGl;b|4?rdWDl^HL88;B{yLo8B9M7FzQ_rzZ zWMlk@PJQ)P^F34WNml)AT{&7nZC*&RIq%5$3_3cpl@W6`NTqqn;Ar@U%_|@- z%i{islwea^8oAUDjKCW^M+Mw{7nt`#z;t*zSAmI46P>4<0y4n?2MDP3g|~;R{ZjOv zE+Ti<*O$-G8Dp{Uh#cLI{;cozxB{N!J6kmAQt#FwUyh`HUpDCP95&*`-UOloX+gV0 z#0*OB=-qCN=0*q0tX4I%2IeWy6U==Q)J*41w`2wd%xp?f!xpP16a;saH;gbg#L>Fa z2SFF-aaHZOT9ftf>;+Z|4Cub`30hzZa&O!)g)VgR69*^5Y>aTs`f#{aW_>o>zFH$X z!WOhDkh@kIB-uKvHXjAi?1-)+VzRJ~)<1gZLYtpu6z4$$^o@++g(@v9RmJ$^-3``| z!*6r7An@C#4Lw<7y55Oo@N0L-!jt=1FTMw!uvKgHN#*v*vw!m1k{kjEiJ*=%S0Tg; zbT_PfJ!{d1$9G!4BB^v1@$SXP0vc_By?k(Jns9eYMb^#5Wlw{DF*pcj?@GA3wWh!QH zp1w$wdk$Zun%?P;)4wedHBzmXSx? z{VYokz?6E8!F&W{Knl^8rPJcEYcr?~1c3s`vTg!YB$_S)Y$SwUcg%37hb5bcpM5Nx znifpKU2Qr~9UYnWj+2=f=ooS976TH$fS-17g$&HJxb%=+cmoE5>^+HN#i{}#^T*n?c zg$Xc=T#*XxEN~0ik@VqA=?#7AT{Q@p>~$NIVVm`-!hE%tV3Hsh%o!v8syvPYm4ND{ zn!DD8GTo{mT5APcYoH$+ir`NSF z#@;H4)%i}jiTlRf5%${6+RyfhZ-}SxL$xE57d=(Ygr~4=eT~G^Iez>4KcYf|%mE#S zgA6*k!xy0=O7k5PIdNGJ&WFY0^%I*%FLM5g_Yewc&cQ!9@QG$#PqRr3TX*0Q5=S5Tvi&flj;^iH)6;48nA1H0(G zA?DZ~ull|@O}4<9tMG6F0L<8wv)AKu?|_lduJSSG917=WjJ^Dny&?z#3V{{JB;ynz zv)1g2tmyuyL5<)_byu*^{+tdkuoi+Da+@cW-I@1#P1QGq#O8ScjuzP7vy(k0uWWMa zBAc#dHB!1eCbrLVh}M?=@0@d4K*Y+N>6unVa#gWF%;~vtdM-i*8)fSR-ehKNT;I{b z1s`)C-I=_?V|Hu*8aq?n5L{@J>fK)Jo+B-ZT5)^{I(My@1vK>9Mp<|-So5z9!emPK ztiyZ>^P4rM$Exsx2KO_rPuh3hjA8vuEkKA4QZAVLbdLm^tbOm3jQ%&~_lY0|huh zoI&MeTNL{^M7@F&gkOt;3<46xiF0zujX@Wv5-{*o|rV-lt z-h`*5{ZqxfB0-H8`Ih0;Cg6AGC*#zS(X-rL#bB1G1I0DKOQ)^LgZ&&cYM&T9X(h7wDRHFT~V)DA*2Nv7g1gwVH8Wf*JZ?EGo=M0C@;J zkRLkZi@w1znS+#=>@W5NI`XIFW#etstTjJ8VbgtCQ$pvtU6$ zPGA4|&wu{g*6F)Shrs5|J`cLe7WfzbfW6^Ba0RD8pzl=U$*GoA7!DWcg2V(Y;frUN zOTH#Q2_q&>lCy-JHfeu2VolhXc(Y8OKY)tP5B0D+( zXvw^?^*j#P{Cig5n{M<@9ThpCZxeI-sX9q489faG3$74IB#+bS4b`$&1#7L#JSw$J zto9&8xudkrOWOoT+WS_t7|nC)$(~bV7peylTe?eMwe=wNS}v|O8_uA9{!IQHCI{GB z$e61}37D)4yG3GU={R+tuXVsR#w<%^-o9GrH-TjKievz0`qK%1?#E`dht*mo(7W4; zL8q*Nd$>Q{F&B0xupqqm^4TdLsbhSdkRCc2tG(8PA~q?kQk{qvVO?kbN@wM33l@y$ymafec6g zNhRbC9^SboTtJjn+f?H|8CUmH@lw5Ht8dz#?u`9cUBfn4te%-^m<0!sUb-R(pz}{o zi*vvBtw_-D%K}WC1?CD)R5;izQqPyrDZ#kk`%YK2Aot1y)f9Wao)T|s2v6a@kRU)W-81qOW4C;REXp})pQ{{&_E;hE4(uT&Aq>yD3pGOlSGGpJD! za@Y`A&4U--V(61Gon8A!1vCJozI?|9&SQ!#2BVXqLWTttf&~Cw;rz@=%Nn4O1qG%8D1l#r+)oBKt4zrT*a#$$6O|-n9g#x3d@m!k zW>o-!H3z_mvwK@_PNFQF@14-$yY=l@U+5iw;C*ufij91B*KKVT$>0LAkC@>b3S~q%c%$%6N30MTKwdr`i z4cYpLRV!2uUBjtL0B{JLi~}oSZUpt$Nwf z$?Q|+oW9X{86m->%%=4g6wH%_tXUNS_wRViGk~oMXRX(LYd)`i0%-d#n37q&Y9aD$ zPSwE%NJ1~ywT8xvKsW*u(WgsnisN;y@mNbTLD0hmZCIT`#vc71CXt~^n{}jX@Pti} zjh`a}#?+C}#!n`6Uslf>xqY4k7l5gB8WS0pTlicUOxAG^#o?F7D=b z`RwB6(~+Bjp8hs2t@8r*doNPCp zNJq1cWv}Ti!E!+<0eqOCecn%W+B%1HR)Jo@R>2HEu*A&L9If{|>9aoTmvCr`vQrEI z6D6=JZ49gX25>KJtYk>!10d~SFi}bi*0~|T2mvFQz$Gwk;V?I)bhI}kMFxOlMu3}P zp)@AbwFjuqU$Y*x1)~r}?SVxCc@!ykAG#>pVf00D7_+_TT!spGCG->;CxGJsz)%|I zO_@`;=DvdVDL`k1Y0DX`{LYv>ha(3NQ=pty3Y2mIu&r|&c{{J8RJJjr8%L;(_GoPU z=;xW%+K+q92?*>TP*~(S-mo@}4{&9KlQ~Qv$9(=n?;r=UiVP4Xp63acke}G5p0Ur^8s+bjluddYMti< zFi_s+R>K5y#?NG+{bm7nXX1Pu7qwvD0&(sOPjrL?aGXw~@8|^%2|1!O zosoi{$T{8bTK3I%co@(ZNKlHbG>k44AZ4eH$~y~m`dvnmPOo#7U=>-QgJ12JB-ic{ z)Q~wSm`A>incg;*^}Lk5b;hHxjjSel<v|Z7J{2GoI3WXMp{%ZZ zy1&knbiTbxbRrvSKI|2JWUTBNTkpOqK*DUkN1urop6ZtzP-W0MjTr|$Mt9ew<=q|5 zUeVhe;cQMK{-y8mDITo(LqX*ybHAe(ehG&LaG(STsYNbQU#c^Kk#b~U0XGB}nV>ir zBllr`3q@EdkIJ?%DDcF%V`NMuIz{LL(_0Hy*DX;DW0LvEnVHI`0~n2|j%D+C#xJuV zg1`_{AbTy8Clk02K%yuJ*xKP{Mhaa^NpkLhS*29Q60qVF0PX-g!0cX8Yf+ibH}Q@l z!~x6ZG4}&h<7(Y1%RzhBS`W__mG*!4i6k?evP{NNbQnF+b(X^gVvHLw#up3`fWeWa zw7tU^W(HxFiOpKy)@dta)*cLV@VCgB2p-3z6gVC*7UK%Mw?>}Hsq&j5DC35vG9rMy zdjMcN+>;XOek5@RJzAFj{xZ-13Ookbd1W;^1#4BXM zyYZuFtPQ|;pYii-4ieh>8x8$;1qzw_jLp6jFPgPl~>^h&&7uvJ3tiJ zmz4pY$u}VFdZl%o#ySrg2l{(A1B9m5@u+0smD04P)(3!LNXUAnHd|*^O-Hqim1kK0 zTDcn|ex_r5Zf!Y>Xv|RgH?tf$j70{g(eAtHNC5<89?$@9dWU!AywPU>1V;i-SQiEy zEg2^FaZ1tD_cK%CUh9L;$v>ye__v~|Mf2QaTy$5ZycubwJe+e*6lWWlvPZzC=k=NC z1-#lmo4t1I-pRq}tQ^;)t<2NxWcI}123BR?+k@2e+gEqTD8KWb92x-((fz>h-9z_0 zk7FpCO4pp-JpRr({fK8-YdTE;D81d!2#?lBK|sVw!`X#d1A^IhQq&j-Lc zH*{K?+1I^O=u21Qaal0>lkH@G*jTy_=3tlT4)&$}jNa>SYYuPF{YOFXv%KtCaEuar zHXk3!5~4L*0>i4L@o#ItPU2DRQ?)Qpzw9kS&LXkYON>1VPV%IAVsPha5M?y$zgbQ>3`at7mx! z(5%FUvp_ldac=;Tu!_oXl#0Luk^XK2vis1_v&(2u@O5z`06s@oY5agFWkx{)DLJM^ z{M&%(di9%1WI7}?$JKnT3xx}a`fc1my?^abni;T}AzHyrPR<^ki|kZdQ)e88gTbK` z{o^E4k|~GY(_RNomOTo!Wu%ab0vW64;!O&fvY=QQig}F3fCIMnR#?l6X(s3zrB|XwI{7ofjhu85W}$|JF-}!qi4>iiLbVJ z1ikCbC$eV!@C%w-BeKI7w7K5gqUTm7X?3Km55NMva6V;e8FDo7GXn=emLGg3`-%^N zDm1JXzWr#%K(1uEIYe!q#lL%5k{tWi-TzyAd`g$n*PLF_=~oe2at44YaiB{%mun09 zz$6_;Cg=}z6-_tJS(kr@C#Ei_Mjyb%XY9k)p{YlD`jg+6c?VwYZ%}r8wv=aUT-Jb2 z17-vWzxw`}E3}Wd@D-a{8+iaZzDj=y$l*N>HF+dYKs38XrwZ5reH_d>C>jgh3?yo9 zqy$*dhCcQTyf4tomT;`tQvg<5djK1*XpfzB!ruj)$hZ5*1?Pj#sr+7G79hsYoC#&0 z0Q?##^u770+7Mu4V+7aWAMZmCdO3VCb^ehC*PDjnXcUZUfK?&-jNzG^0 zz*K=Kwv_&)qhxo&UvvrFXI*BwH{Dofroa(iI7@>gKJ58;fS$BAN?Yko_DSZIPJ}a8 zop|)@z3&Uy7G$m(fK5jinKbXF2V5(tN z?2HA+)Vg^9jtCFD0lFd*00lobNt3YJ|}dQzc9W8W9{;Q`AX>$PQU;t zocmAa!Wd(lez!^$2nuJ$?=u_aCcpt^0YgzlMvXuM{RC9Xn{bxiCFlf(&;uRlW8R`R zl&(mSduOACqQC$*Wo#ZkD;;ZX(8<^-RqrQw6fmV>UTuWN`~f510bzg>`Vu04iwZFy z_9DzkM9vI_%Yi^s-+Pzev)js%Q1ty7z~j9WyrQB17nw(o_C2i6?fISmlL48fIc2GU z2u24FinO-b+IzeczX8e|8qSFy&O628`L3hv+-IGvvrJ9dC5E=zf8*e6`N1Q2m(k+T zbQVc#d_>ED?2NSSIf~ITdB7VS7~qNVE<*|^7&ik-wnRwrrAQ%gY^;@OwL$dETCQ_3 z4ocj==nqKXshpZA3wyNK;t|ir7v{(~05LU3ux4a$9yppKvh|Lh-ySUdU$-!HAX6Du zyhVqIE_VQ19URFC{o^^-Yil2lAdM1QbQZX1@5|O%F^x5lFrC=+vbE)?0)Rjwu&q3I zD?)3H=^_p-UY;#X(O|Z3w-*Q>aJm?Mb3S_j;t@~#-ur4_O;3p~)1yDzev&dYvMC&z zD|-{R=zyQ-db&gD&Y7~#GxYo_*JLB zP)#S$xiATttyGXst^=lL_#Q5)Lm&N}EiPa}-i){Xr20>~YCi-ghwXS$R>RjrxnDfiJ8GUw#fT-XIP$j;(BKU?MVTY8p5!+|5Kvm4pj49yY$uzwvDK);}s zQfd_nt5Z`D%35|lVP|D=a>xKZC7>n%WNou^)6Rr}j>dNOd(VSU?Yj~PLo4_bma7B} zUp{I2F~<^iE88rX#s1Nycncn|Zg|I7l)MX2b(BDrJn#el;7GsA!P}$RFX7NwXpANj z$;h?ApD>i2;jGmA-GW;ff~z(XV=_uQkBDkBf<-q)V_b`|wJ6omfib7lIY%OLKAU@X zDmAOMDM1Z9WYkxOhOki@Tbug`mhaj~eMQKPTf~)P28h)rlHgHhm{XL90|)p}nwV5M zia8lKP)R8pBcK3Oi75_mq2}-#aV$qV(KT8A@ef0Jlhz_fa?^OXk3M zP}asIOHl^M{pMw^gqva|K;DyZol&f370Vgm>Y6sHPe8i`Osz%bSA9PfBRb(ht!IIf zl<4GxO8P|Pfng2+1IuCe8x2!>3?buX&0KE{Wg}$;0F*KVo?oK@w62|J^|3R%wX0x$ z_q%UjU2!NfD0ssdIpyY8)|Fzlzk_1;oeT{yY3z&(8LlJOnY!{<>q(aJl;?2*l{?`> z4z+zvTTKF@%hsG%=O&|RPJoIvmYo7V)+plM^WuBstNp08W2ng~9aqr8=Q+57K;*xy zNlgXXPlop5AYq_JIN)G6Zbfladtt zVBYkzvg@mx(Vx!eN8e9=Ip6q|{NP;`1AwN03f)|}RyM#IxK?Q;TS|Uv3{SeOmN=WrX|QLY^?N(SJ=s)9a2qhH3M0c?OT z(fl^tTM*!b<3bS-m|EV6GH`Mz3*do(i~R8>WndUaK#wA^D42&sAkzc^AhF22+&zMaBVw6BOCZ&)lzV(#XqScsVReom}6EhKDAeX>({)p(W`KX-- zri~MAjG0n22lN26C@zt3iq<|FYGfMB4LGUM2fao|8V^3f zTfh;z0&Swct}(}>084T(Yq8N(*$O~L-T`jeH1w-9c6KW)Fhd49(9Kvq(>$z;XW&`E z4*CioR4(@}9T3fBsgeiAUErXyimUwpsq4#vaRTucuv1{|XwA09WbOhX){V}+(_QOR zbN%#l<6(S+7wJV&asXE_h&`fv=v zh6)539;zpPsOx?9ak9a5to?E_6vj^{ zaX5^hZmsHM2D!>ioXt>hGJs(30g#L2-{H|MJ}B7N-d(ms@P|`lj4&VF*Itklk{`)7AoQ*3Oyl;8EkqPIx~&hYvV}a4MTb7rn}uUl|A5;YZIie>fRt z6x4PM~6J6MhU85wQb!h6l1I@pTLnFhY#(hShEIdzIB$XnU zcCQ47u`U{6VF)+o;1F?U01OLRBpX;Ng2^k#u$ad?0G#@p`@b4fsafg$%3$W6o&;=0 zH8=~D?L*~54dpR{a!BL3XL%PO2 zlo;kN!du(mSL2#d>lxWcV@`~@b6$A=oD)hzG>8Mm!DKu|h0sy9#^!KJotfyUhiIZTra;dexORtjc)~bVBe3TbT5mxGWlBJIomNCg&0P?oE^2qdY-NMSP+hnv z6^BC$YEdYWF%dD}lYM$fV8Xp*jFHT_t_$CE&ivgRXRUXcAbM-dsd10@biN9CF?Y@< z`33fYdC#OjtfzYb{yHMfV2SVPK{|;6LR$vk=Ny+B0Jyhy-{y1GVKfF~l{HWv$)OQA z@j3k?%VMwC%y|0_FjPvJLuq}`|hT8WU(QnPp;ZQljR z?#PpS(i=*ftZAiucfpQzs9Xte*OcXMgMqvA`Vse`C7H(WIklV`c8JZQS4E!5Kd|hx zDiH7qCtEhhHU7QYQ&3=v-eq%TP+$+46*dv=VGFvU4uaFBrc?aJgS8u`%K=N7H9CP~ z1PlY0#>1iE{NfMqCZ}YMO@NCu>VTgL8hS2g%I`8N=1e!AZHi&s0wypNy{+W!ot^VM zKYNEBW&P<+dV@^ln88M~LoU8XYr2H|%q*Jq1@7TSYXE!D1Fm5Q+5>wg;PZ-~3wBLC zYd+RSl>+C&dK!=YPuA_7t<+~Q%6sWy*+bbGWoBroi3^;DH#ws+!FU*s#vdx6@E0sD zc;&}91>D%FCyjen+Wj&P4MocA;MGz>35OMw9;o&&&OOir7B_NQ>%C}*@V2FeV5IBCWrB2DOl2Q+SDdt=#iYErnK z;hE;s-VchFKvMXCwYAg_p5pYG@;5Y;)}%BzJ~pIL7XSLM|N8A-$_0>lr!b$P@AtT- zHIzl8>>0SNI@zw{&{$7~6Ca`z`f;wE;+z9Y@uHnI!+UchW)UWizX%i=IBMtllooj8 ztg~*e6CmMlI>`H#QqnmhwYAYt*P*Ae&|%)q zp)^n9uO+^DkN+>bvwjskgt8!}2yz@mAv z7b+WSsoq&zKiYY_L!YbnO#MV3(4FK{(9hrOk01x#JTt0f1Te1rd9QQk)I056#P7g9 z*(WPyJv#3U-B)Mke}TOJF3H3T^nAyGYGq}@lP6myrMO_;2Wi9 z0?ll&pdO4(zrfrwmiWrGvY6=2slh+=r|ak@4kRr7YCFam9-7zuG7b#}MLdAL)~A#W z&zTWtBva-HkVC_fA>6u=*)WGOF(_vQc(!1eoC5I<##A|n|8s^Y1B`@$fGZ0P5Qr2O zLD||jGZ&$V4^Zjenewb0%{b~Pa|J{Ys#+qV1Mp|;7)$yZa{)6H7)QZe0gKYvK(6mF zFNH|yiAZxaY7bk9jd4+O^MES~WFE2z#E1lSwhaeH^s2K+>MBA=dGYOcvdP)CvSYpj zDn(@}T2TS_DY=mf?eNnpbh;`y-}Cw+tsEM2<5YJN^InF=I*FR$`C2L~mGq1v^fP+3 z);m`M#@L{_bp@W^*?s0I4U1GMi>OQs&{C2I;Nwqnxi(Nhv$N$vMfUBJx2; zinc#>Xz-%!lx+FCPbL2Z9X|EUbZ3oilu+7V>i>WmdgI}FM%s}hvpO^dresG2?m7?> zua)gRW8;if5K&2zfB~E%0}J>HcvaCrN0BMP6V(^w-a50xt|N2aSGHf%0{t9Dy3>!6$VyWEKgTax$S9CwHnGls1trW+CV{h(ZZtpip-TmMWEbdD zm`Q*`z(v5^+WWi)D^DGo_-xHI`<#x4?PNaS0rp^?H~2I=w6|V(TtG*+Nw&p_Fgb}KrZDQ(Dhvo zjpzqYTP>jPrO{<7EJRKX0096HatkRcCo;(K0i1xu94ZQ|4S!dd8FP9*2ASS9VKqJu z&7G37Qo5oMo=;g3c3BSs47>!qs>LO^1c*act4YoT=cKM5>h_E78($?BlwO-63AN`_ z_M))WV**ALB``+OwuumUrcfyf(NBt(fOZg|&8XI=vLYobo{QerNrsRx001BWNklqPcMLnz`7fK3?!VfIo1fNR!b0% zBB0lJ$zNwC1$@0%gq>c4bAVFW`2q!O#&o)J6wZ!g4&OV=+T0;?LXq^EtNN;?R|q=ROIr z>;*@xmvyC=I8}nW-brV_yD`AzSu|lotXX?&%@aSiYyHpRo;lU^d^08o!Bv0 z8|_u!wBP0lw)kZn8U_xNQUcOg7PtP`%0sFXt)A5NJ{Nrk)&YDqi3sZlSf;FPViN5p z1Q;JE^lZiyAh&VPLV1?O1-PdII4kUgk%7l7m6lb)0s#0v;W+!u6iEAe06xwHVP%W~ zfq6Cy0ReK*MmDC@GSDOHJv-V^7Afe!6oyvjGB*q6#JGlFcDR?%DHJJi&*!8N8ix*g z50ECxWdo)4QJc(IP!z@@8UwtmAr-wMGdi~k)t9bxK)0* zIyCqZ4bi|eDqT8zc7%H`63HR3rmSIb@JT#Uu} zHDuJg$tOKSugZAS+vFK2kToxO;~C0AIMwu+@!)g6&8rSjpLAxd1sfoHL=KFV@m(8p z+~MC(-4neh5Jc&GrS#Og5BMtGrejp=UuFF8*Of9xw4zh9^ZtfSastc&ekwrXbAeO(gbjre zz0Y+8l8vqB#uw&GreGqvoW7|N#l7T$qtCgala)SuUP0}p6wIN)1b+C=t{w% zt4`tGd(7MXW|=Yl%l65@)cv%*>1>hjU=&qA@Uq~Cb>VcO1KwkI3UaCZ$k|~Rt|S81 zasN^djR-OZWq>WRG6saB80G>;NT2g$ff0b2k+gt7bSJbkx|kP}Vq_qkVPp6?FG|#0 zgP|BYfa8vFwcrE~7^6T+e{vGM6NqZ_@l|g>kJ@Vm{JC0y_15o z8Bx^9nA)FIx09K64uoNT#=cAs26dhH*igaVk>94ML_i{EVkzvuJNwey5z2KRu;>d;hjhL=5qQIerMd+?Tf zL{jj(w6e^HePi!ZmQQJDe2ledA|R|>3Y|ov$urqhqJ)RY)I6VR>)m*e5@xu_$#iUL zGmPIvpYSO~UjqQ|u!n`Q6!7q!NGhj+KCu>_=Xy$gwh1NAWC74w_kJY103^qa9B~Ll zzd99~5oye2-uxd>FcyE;;gD=FGGubLX`VIk?xQQ{8@yIF>+a@FbOoq8)2nm4fB0`OooB}N z>Kb~WL!I%RpqvQur0bn=mnjB71d)Cf{rZ&N0pLopYIUp>2%mPm0sBXH+LO|L-q~0K zuhq1J%n0rYWU&9_lRVPl9}==Q2Q0@!m?KHgRJKqrzd?6s1Ga3kbSuZ?Sn|<{2(|JGE6wZScIX~{BXZus_Zjw=0IqQPqDMDZpsH9vmy2v@9mo0F;biI-q08|N&^$_ilHB&NZ z9ni*lsl7)diiGneN&s*Y3{esa#k#Kt6TZW6{itWuwibW3mtw7voem8{R7WaG)psI$ z9GbHk8guntYao+p4er!9e}ZNi5rGSGECQ+)x>nPry(xP9gBHmYXMt=xsN0-!o)z&!3`Xv`0-$viz` z{>EW$#!YY9zv%r8BYh+znA2=+WNt)mWuF*p^hXQw-5!OfhI@}kYtaj+SSJzdqSZfZ z_BX&m7mDcP3yo@Ic%J~B92(#QKb{SYd_w+z#%Bhi(TRQ#kdr|qn;Zyy3xxWq=?c9- zCj9_+>@93Twnel#(w>Fal(*Ay^f5it{#5tKNP9=1Qf8hG0N$;a%q+g;%&@I6QrEB9 z;hOc3JNjOA0Nb5YQ)805S((-jz1k#Bo;loC+n*OiP4Cs{q=U#?6Lz8Qt;(GIQ*MS% zwXr{v=NaHKhdF<1UWqW884sOB&sl4FnBJ?Ag6ogo7oEuO%rbeeH4;3N@n$oWBi1F3 zvjBh9vi^yzY&otzg8@0)0%n}Ux}Ubk3N7%DBmq4SH{v}umM+qvR%RYu*keH}*v4F8 zQBMDzfXSD4Xb?B&gaWBHQcXL-18gwuni{7J5RtHpcU4FM(!7ErPlqFIC! z*jAgZzL(Gv3UrnEq9iEt*b@N&J%*>2NAm%5sL(a)89^u>q1Y}b- z5|5Zm0Gks8Jl8phBCf2UvM}@ww2>Rimr?XSGKr_?1Wt9&H3zclbMk8}3|d(-*VQ43 z92ErceTRX1F4-_A^5dSmCvtq0xf+*pEqaRnGd{p3zR1{mmU*QRS^a@G>!`|U} z_@&dx?>IDpV8D)UU}%-lp|SPSSb?n4L7WE8&php#v6FoR#+9T1RSfv-wDc+2Mt4CQ zd_=!-4jAU2xzwx(sq#K$)WD6%u`ZQsJM6Ra)^+%{R>k(5{z?wb1gq_Ny9?Npakh)z zB;yXZ6m$`s5QO05&=Hkl(*^z}r|8O|ca6XzAdKIAE>PS42u_NUYckT=Yp$s)7ktzC zAM9~m5X%bX{E)@|ZtgWEX15<{_=Xk@OVOs+@Gcxbiio6T-2-1I2KGC}dDc>2Xw&|^g2$+lD6OPK2C<+Y`C|fNcF?w!JLqMjdknNS2(RGe1XVH418$*V7tr;4UlPgYMSs(nsK@vrIva$Fbw8S$&ni54$fW{Sd z;lUfm%qXD&qfUH4;>d}aYqB~t z@ofP?qoW4!8fYg31u92ztu&zu=Lo=)#jS@WA^AMfC+`Hb`; zV5D3}(59@_-E&;tJ;tr)We?FC|Ivj21bLwU@3j7(kzyGGj?&tR?t9GbQ|~CS$IjyM z&Tq2Ldrthi|7{&x0VsOYbAV2KPDij;beMl+ldfPdy&Hz1OJptuTsojzc81R5)VDta zkJ24}8;@tvA7qJcu}<)b;Gn=x<>KDgei7H3cO|i9E3&PCw|CHE1%JuGm7v((x}|Fb z3OH?dgJpt7g4ysGyiy=(mW^3Q|LAM9W)o|YL5I3ez(vrN-9S&^8@BN|9O#EGF7V5# z5NMTAbFK9YYYBpInlxJhg_I@cRQiGGXHEQF#cQD{ z8vqD{)+Uj_5}rF<&Ke70;y3`r%DO~e36ysNSCoNwV6Mu*I6V{qW~7KkQ$?o& zO3htnmg!x*HJ?M*3wDXL)>`yz8~#s>=hZW(PMqr}8qH~>53S#mbELcjj{|D-?bH1@ z-3T)QOJR6go(MN0&UZnJ-*FO}ixz4W|bqyJY`;aQ!HQYN~+c4d>wPE;9ErY1cH z^N>$LHGxVPVxF~gW<3MB$!@jW?Xr7AHW2I%xtB`TsMoC*uq zMqy45Mgk-~TlW~(RL*f0G`tdiLL$1tW5pmA95}bQm6Kcr72b!D2rh6?_acfGP?DLU z6hv}dXJfrNp&j864Uh^{0|^LXJW3f8Q_L9B*eDcq?EH%koD&&xuWK=}pH8pla1vHZ z%^n>QhYlb+O8r|uj#I5DfiFUDpH8PBOKEms))}SyvtE<(M>h_NDE8{87xA)@TiG6m z&RU7+i!fR1%K5CxnN5nlmqEn~^}7F2PNFrhjk@_T82%}-vF>EWm?y9RP#EAkwBaxR zD9APP(|rM$ErSzZrm)Fukw0Vdem`^*I#=rEJ~S`l#-RZc$$OwCz{I$^4)4-Sew4$5q z#)$W734ccio|SEDK$ocvM{fWQ(0$T<=j^+mURR52;L{qS6TJv*Jpnn<#2%)A_Pbic zKArQE`*XNEpG}~Ry~mHhRpmwjd;Cyvg6#*y*l9KuFOx&Ei_h6lc2$&KW)=VV@eF!} zPIhfFZ>=gh^V_?Xnu^5NTtvpu_^bg?>|Sz0pLKXAyDMwJCU>q~4$PKK4LFX|tUvkO@bKj<=Yf3@KZ9g0Src0m#Vn7p5`p=cg)FgKYQdIFcVoUAWfHgUTbp1zta&EXsMz_ z>7V%){GqejJ1+Rckv*H)I2yzHeYk)FMYrr7CDGW~Wx;Ff1i#2C!L-I{k9)@+kO6b7 zQyN)=9R z?O+j~!octhtoQJ+*T~FO##pcKoEO5^hF1$r!9M9}jsXcX#fnKNa11X3Dnla5AUg$+ zDNT?iaxFvd-S!;#K~oCD^F#=gAjtxVil8Bdn-avtBCnKlhjXE?k~H^=S|}Cc$jJ0` ziun=KMzebBbGDRV0qzV0!RFyJNW6RL;4{MTbnQNQ|8y+y;Fbc(p#j9u6F_7z7(qxFV zgTrK=01Ri<9OvPQSA#4w!jE<{Aar#Pjq%aX#!CmIk+}g}WPP3$)#hNjdpb09N_!Tc zo}+Sx7mXQEv~NdX2UwxoIE!Z@glFFIscX{lHtgdq`|ap+vWwnN8hW6MK%t-^+f@15 zj|yzOi-vfTv#lIjh9B7SQxGEE+R5?^FZX_dt@eq_ z{>jc%HjS^~FF4lR?qt!fO9kdZMRbFB0C zYPOK!19r(brO27#MB_bjS#2|=-d-2M2E10wT9L5;X~5ygS&0T2l2?Er2WBgZ_sTiP zr$t()e)1grjd$@ZFzWrHi0Y(uTNJ^nwi{i@D83LeCR6U48!FELnhX{Br#od(taX{< zGa0?fJLX}%oSVaeHcrk#dxN44`q91ZqigfJ_t87DP}cOF2gdC=sPh0qI;rkz>v)aD3Kx`EJSud&))&&?-d}WNbf%@ubh% zKbMTo48vL3-=lGkj=2NO0qOQp?FBl*0O&=o)^axA(=|XXKx=+$qno33HzpYxI)>is z{JFixCjzPhpuQLNm&sIlV@_>sKML|q+c2|Q0=TME=yZV)bWz4wr3CQL4$m{cj^;PN z#|!AMGDe^ozs@Wve(-+Ilg1ENj%c_tj__UGD>*iTP_Hs}1@K@9dI(S2AA@EbN_1&U zLym6_qCl4G&zvv#r5zf~Bw|8gGv*kF5h}_@v0K>ox4aTq&jK(oO=T;TLPCkib+dTt zSR64=DNgUE@QO+^s&~(#@lFg+@hTCMuB04Aw+RUF$Egv`_g)SNrEC)-utZ2XFO)E) z!HMxav_dD@EBCFSj^cL>CsP!RVz(YjSwxl@-K%?$ya!NXh>TH6lH*$YO9~0iIJ>fU z3^4`3i8KDM7*!0c$aGZ?zwTw}8d(&XDKf#ar&O&W8UU0>k+s#KL1O@h(V+AxpeLJ) zkNDsEirR2mlv^o@WcaKp<0w*ENf=r%5atH70mjObIBpE6A9J4@FwYwJWZ(5iWn4uu zx8}C@zGMR+nw_)EvBPw&VX{V6$t9yq*>hBF(sn2;mdFMN??Eqz!3iLMnM|Q0xr~XM(0CNRgI5ce1lV`ni&QG3IKopO&tuldgkJfzl z+*7j3dI8b)#mK(cGpfqL^Y9ZJ*ZCmin+z0Lp8$E)Gi)q8V-F5FpACK1^4Jx`pVf0` zX&7M6K^A~GYPwU$$F*b?4Lkf*^8%6kt2U*`KOLitjQ$g>n{B22O?Q-CrDtSbWn}19 zjtslOVZ0jfT!UThtubfo+g=$C7oBE{f-F!09dy4^b|yOm^uuDt4XaITJ9Rgw*ZRyr z@!54gdtK#zO7>*zIB~BkVT=zJlo2Hh)`b&VvnUSQtp4bF0dbhd^#ZkZ+1qkFzq~^u zf+u|iXcfHxU=c?dl?_N8Zgdo74ZvVbKPj>Po*9{UD~pwh5*EKX0~lDO^hrBD3j9i1%JbwzPr+)@5;Iug9tlWp{v*wL6TP`HCLYh>1&OjqbrMr570`MQZ}3(0Bi&z z?(Tb;rKj|NB_>zP5I;?aE6+zW;7}Jh!MYA{o}dh$(jP$RD_$sLa)CZIEn?GWsTv$&8wlG}m%WwWzqCWsDYJwE(~AD7pDb&`n3u*nt9Xwht)EmF zd!R?1sYT=fJZZ;AAz=y|`Y03`4vq_95e33jl!4D|+66!=uS+r2Noht`Q^>D?YPHHr z**GAa8=qY%gGjln9~NaH(AHgDamK+ldF})b=(E2HP%$40;p)c2D@O8D*P0jS1W@4= zTDvtm7U%%RfLWP0bZP!%KonA~a(ifwoO}S8p|EbUVw4ugumd39ArI!(0kZaVb+|3# zgEzgy+-j7;2;fUflu{H;r{I(<)OMA@bx$?fc*6hj#LB~;TC=0)&Vay-Hu;>RQ_EC3 ztsehA6O>@AWl%UgfOz}WI1VDi{`m~;fFRjdQQ}z^u(ftMG_L2gJ^{hzKlc~m#Wn=b zQoMK@xFD~+v!(!|a!OtC-ku>lqV=oqCukJat1uf3rc7>x3ciS zGks4V*r!PEw=wx_SxxUMSRxn#*gt77@b0-kc^*0A0E+OF89Z9XrSqleN`W)g9`p&m zm_T&P2?4}vi7c4LS$D6pb$Uy%1#cA`e5FT?N2ajiVf$)aJcCi9mmc*YtAFA<~O~;b_lu_BoYwg;LtUI zHmtx_u!FFn@8BsQdh1-bo^LHV2yBtL@jJ}`|x>C@!xn(#R4F(kr zY^-x{CnUcrBl5gDh&e=6HUPN96YpxWfY0zGV&tl9=qRH{#_<5d zAO&5t4i8X-K5K*R%qFgp!aKbDj1}gv=oE+@oe3@EZ9Gr%f>IzBBrH2E^(X z$ypKnKyz!@A3x2kHR|~{fR;12^miS+A{%a&!Kla1Qni;F|d~GRO48KAb6mx zDJQwos#ndb<{DM%*MZtw*_p0)Ej-NmRn{MN`-C2wwgUb3_BgJ#jXgqpdt;Qb3cj$_ z^cEe~9-um4x}QDc1k+`l&%H#&mvv|m6O%~eVtU>kpu^aG&wku@mBZQl2)k}#wGfp~ zJhLxo@A(ObT3o^?6VriPgqOe(eg_d!>Hs1jVc*Pbr+jw&6Sx7^wP_Yvw^nD;)}B#R zha<+T&zUJofgkWejtn68m%sewxAwLK#`|kS`?Gf}+M;hQ%0-dQW3446_~z&oZ*(Y& zQ~OV4i+%td=b4}hI#41VCWi-R)Q|xJsyR_gCm2#4RGvo4Ga7hJ^oem~FglbHfUvfB zQv?r?aB^?}nR6p`?}u@vAk8zHw7GN|{G5kb4}Y{b<|?1xxxP$I2E6P=n~<$hN}Ulj zPv4Or>m`%L8MNjM7`;fo7#W6`9HXo6(UW6iFNDm^*5Rz2#X8~T(A+IsC_^)iyt%9y zsY=f1pK9~V(3EZP`zhb8>w_jTUNW~zPy}1(MDO`(I-*Py=L`*Xy(1%lgR(Q3f%N4W zptw4woTZ=TJgPf#ws#MmLZ*^GJWH0D|N7 z&LB{0GlLxFzt`z3c`AtIezdeVM%IZ;;O9EvTEEWMY44|XP`X^ui*qMSvu3Hjv_n%S zW~RUq1E>>8M#v|vDz6IFyRWANIA<6cr~@u6;?>jMSHsjk8o-7kDLUagjN`q82r%+a z>2*p>q;_tqzhV^29uPDNh9dV*W=+=MRpSPNgPGfR!@&|^#>}4Kdmg-!0RUMx0WczD zl&0?;HfByDWuL0UfA{*Y&%@6^h4ML$ytH!yA4oTUkq*B_X_QA$n4ASsFcB#VF>pxn zGAgdK_R3Z$nAr+a_Qb|bbmiD`O3!4~_UKWz&1Ol)4Vc9f?fvL12H@LTF*g1InGBQ9 zEB)wPmp0_)bmyoXmDF9~;ZLq_&X#w~a)lgqpZmK;C0$<$1TZc;!*Jq#1|2=EC)p`O z3aGhP27*lEp~}!a+dF_ZWlEfw&NH~{&=j3rYyBrJU4sY6_v}__ZRsGooKbF5^}9+a zfLNP^m1zO14Y^Rw`gIf>BHv({D6O1Y@WrV_=wC3F+alD)Ih`A^Es zyPh5RpTRft^)LPk%DhFQ_Wj=S&fzPTg%&x{@3 z-OJiWx2aF@33)O;!ASg!-mr!M47;SM$=s7uyK;5_?+D7zPM+Uv=l`3Q^k@b@(f3>F zu0R%G3yAZ;fv=eBYTfXUoUUat59e%HcqIeo(mtBIWgye51`yZwn<@9wtb`jK-3R>I zRFBStaCT`~74S+s_Y-89Cd`i!WoOjJi;$GE-&!O0Vsu`-h@(x3ozX+!Wn+9#F;Za0 zX+ITZ0|-kya{?)Zx|-aL1pP>!0NNP@V1i>-_m)XBkwoVvaa<_voHzG&!Z+Zn+nJi; z+JaLs_<`c}uXMQkC}}n3qC}!g9HS@v2LRaY&p3Ow|KmGj;iQsR(JSEFdyLC^`ws6g z;?*t#3`It*%^64LN3P-3cL$!_xBfjNWg>{28U~px`VByF2so_36$9*V`){mqZJuY3 zfA`Wi$`b21c6Y7M&NYVyPp=soyeiuO%n82CGj`5Cf4$E8@D{xRP>~CILUx8T(~03| zRpdUosD8dI6Gso6b#6eFOsf42_G{1)N_0gwp9CR(6^ABpHJz7& zVR*tkleKvbUJzL1wBUtgtSkmW|HrN%S#5qrd$WdHmCukOP;c#Uk*cR+3j>eh&$ z<}Kdm`P%Xd($a?<{kk`&MXYTi*p7Ics?CQm*yPd1CKC(G=I zY@y7EU<+`Hwogl2TeInO((+Iwm3eP^tK8>$%E6EbP%9d$Ib zmf;}x!y_=r8g!Xi8W_a?>0dZV#;(q>>)t|OpPi(WW#`t;W?MK*&=W?274I}#ILjyI z%TZOvj0RWs)tP_JS`G@jkS&L=tJ)B>E!drnX9r+be28uuOwB5TFYC}?D3K^atu9rH zs&WJjEc$E1IWI3o@iUZ{TwxXSUvLT9h>##c4aAHJF z2oU-LM-(~(kB%5AKurh3J_F#?#pjnssIfu}rt|5Z8{tx8lDL)n7bZV^}epVsp+tPi8d5vCwYrF#~o?p{jV zdXO1>R4Edr#Avn;g+agzB7*ot+h$SUJ6X>ZIt5Kx83SGxbz!_28-|4p1JDeLHRtel z5F;Z|`{21Dw(Qg%aJ(WPU-8-idV6Qkk`5$m02o@4O~zXJDyM-Qaaiz+NUxwm8!b7- z$}Ip9C9a>&XzpcbDu+}%4e$U`BBoFFZ{cS=$9B+X-rc!c}HJ1PSTE9MevZXInY?{@)L|LB|HR-+5^6jL~Pq%5I~#IkXoH2~0oIQcMc!v(Bui1J*K3UhJHrd|Ku9A1oWoNvcZQFl^U(kSk zQ8s3MuR5jHf;@6y@N^x{j9<_cPj%*9G^tfI-By4aKK^Zw_gBr(P)efEqEDO_?PTYE z3ZRZ+w}H77>G#|~cZD&wuB}JA9^JqO;W8&qfU{f($5Ro2O?pdtu97+-ep86 zA+$9n0w_)B8bGJW3_(O|O1RBjGhI$;6V5hq0|v^#fL6-msQV7##Po!QvXHS7-DiX- z1REtO*CJYs0tF&!tqcIzL&JF<$hwN|;1HtHJUE4=vj+73AL-A0oXS>@(pE-W-RSw5FD@!0M?r-ZuNp&EmwXI&b z@_~0A827!_@6+!IM4+KHMk4?T7zdc^00cOY8BV2Z{0&sNU*(_~ZwD zbF@1r$@Md1?Qi4to8k9Nfg;Z-Ts%)(fcHiTPaS!N&%tvD;}j`>^n9Fq27uq6Pb z=doKd{%a}G8g%Oa>mIQgbeVdQUDW7BCY6mcx11WkbEpM%R2>M;0^V=|-YFRPt}@J5 z>r$14l4iPEhM!}?rsQn7zn}qUhMXCjHp&0+5C8C6We0i)u-Hoi$6V=ldX{(AwaK~R z(0DIC&XFR=b$}(Ga8}O4)`t4kX_{-gCr{|Y_L3n0obJJMO1U}P|M-vp_|1W>;RO4q zG6LpOqF1AvqYeIe#xps+u$!YAj`Yl3nzG|6MruZKB@2v(^b=escrG9$K|xR1!vt^W zpe9R`PDVokQNeBaRTdh)|E+85S9E9)LiEw3Y{E*wszWTAgct~H&%qT!-&x#u-czKF z$BN-avl$Py?P@-G_R5Lw)|}?$>c8J}W(k(}ml0tQC{3jv#-aobJ#B{j;~)R{jrj;r z8?x8zhqY>xoz{hQh#{0RPz;RE0M}QMCSwYWQ<5?~B0gFWb7VMF^>$~tj9W@uhcg)( zqe*YxhQat3Vl1u*S>05BP8{Jn1Y%NgbP%?wTT%sDmIg3`9G_>8hdKi|)i zGvnsuaAMljN`ajz{V8)m=`mhqWsDI(@IAQ%P&wJwlXC@Z$O_`E_EuF|W;|bI?e4Dm zo%_(#+64fuw{>JF$pM-0^Mu zb0TBUIh<7mWx!n9#{Q}hYCU|!*HcH8;aPbnd9+@2&gvSn4C4TGbfX}IchiZm0DZ08 zN7NqRYd>iYMSDoztt;^jpxs`eN`VS&k~K0wx3Cv<2fHf^LZ-rK<_QGLGNJz`_j=5= zszOCM6MD|90z4uJ&S?aa>4>#)!_m4QIfP23JX?@SU|K+ter8jxF>EJW!>OT9;3L6X zndy!oI9gL{FL+FMdrsx1?=G{qhU5w^kww3Qv#kdVkIy-KvRAMmT|a-j02n8c z?z<7P%JpPt>@BHC^8PMlsJG|?dgop<lsOg!Vz zT&d4*hmLc~a8JX$Fu58w4p{b>ewS5AP+r8W8v$pT*bY9`69_^rC2-eLp!h zv*g2AJg+@OIWvMF3}?Fh-3%1fp4;`FMHhB%6k2*F5XI4=pXuW^*9PY2n)=N9 z=z39i*@SrH-5_b@rJR8+9!qDMgZa~Ib^H@}6aAHWZqEyPuw(ef8gml&Kw|4YZKA*5 z?Yx;ofrwRp>tM*S!N1ncwRn}xny)fZptms@dCP7XIZ4X4vn$k zb6GRaB`n9O!7KQP?ZO*1z{r8NhX3|&|Mm^<3Bu{%NT$E!9-7kM%B~pOt?mj8f(@UD zLg4T2IeFLOpM6f*8Ib~WuuzmPrvfR5r-)a8P|eqw#YNDSyxW+u#27 zt<;PY1f($Jy!;#@ElWjOycYnMn%pYUI=e1i_sYPgFz2;ro^Sj`b}MIbA0WaoD@ma6 zq+|gJ^XcHsHB0AyktXY=TxiWq#$R=tvtA6o5*1Djn$A5m#?hWM&J-|t)Zqw?fJDHH zBS!g{3uQ%#;~V_ucboj{CX>SQ&Kf_w8$_I0lV9N)GR$ZL#^gkFRi+HjkT*Q#Gdw0b z7U1VtT5Ev64mnQ@`4KJg3x@^I8q*s6_dR|fBid8j^bTB$WYX(wKxIR8qdCb!(Fe6} z1x|ood$Q;mk9<(XD6K5Sz7pzJq4JEDR2u(|LZ{pb1fJR0;UlYeFW>w7(i{t&RV{llKBJ5@skgXk9g z1CJ`NRBCMOc*z>g?E)Pj$}z=bvitDFmvv|;`Sy`u!Zlk#5r_^0b@tb+E#Qur;zxbo z-Wkz0$`}w4-BxeScmN;j$wd|Jwr|cMxz7T< zR^jMGSgB|GYt(YChlz38M2f5TeAS9Fhk+wRP$?$L#`q`z%Dl2N{6Gm&933==UV-GR zf{bRy+KI;izyvTkI-Or&9Xqjj8muj)0DPfQ`%0{V=ljP9m&T`bm74t5fBn}tp=U6* z&JkhQWhF&+*0XAi1*jGEVCa2MxuXmE)aH2}d`PjQbBDXW+C!5wQx`K47T|+&mYES% zcCefXBVz)10il!@rw7d%!zVT}{wTTr5%29#ICK=fvi2gkHGu$tl_qc|$P>QsJwD{j zh?LnA*M{Fu%>mDl0UJT-o4tK~MVe8chS0$Eo z1m_#C;z9i0ehPE0Tyq6V{hz#&!#X?BEojU^tkHna@P75)vl)Q-bGYbN`Y?n+0UbW}Rn$fjrX3o}uUSTgeUdB@wD&xq$wRZ0u+nQlHk{5I*Z|#%G37gq* z&nPR>^D3(oa1_;M``827km<-?wQ)z|-LhSH6VBpLa%k9HfiZGtJ*EyfUwVeVuNecL z=hTql&JMdej#t;0(VC@#1yW@$jK#hULD~YgO0pXZwDgAY*m}XEx~ksU z&t;wHejVNDIJSp8pgsM}ae)yESi9bO)sO&ofr-DYL(^F*1Zb=NGSCiu1A;*9r`wM{ z6+~lZ(4^2PXd9(~auHT#Mj|o1cwVb{-%TY_yqzcy)Yz~@ zKMYwQXHTG9-`OXa-Z}shF?$gtzty&ThdEO&Yj5!_MOul7NCY4R5P1(JWc(Z>UOr>X z@sScW&&o-zDtlw2gglF)W+-d(8Tep`(2$`aoD7vQ8LN3t3+~fU>l@zu} z5odedLsO&5dFPvEo8Pg)Hl{=kcE^yZBDN6zq&=Ws~qCK+1J z4g*L|&5;c#T8qaxH5_f%kZ=FIV+Jb9P!yDyk=_6-oq|SXx?N*F0tJAOXBZbb^Un0Z zlrig(^J7fr-j6x^NzVPIYsd^I$={u|(Ls;oZ321s7ifkJkjKB=8B@K>26+D!fU z#DSR(-yA>MTW{dLwc48BYID$$uEfLmgS^=oZv3n6i~q{~L9Dj6zVH}17Y_+07g*}W}c(gG}a)Ia);Lq&F7gP&w+I5l{;w)zvh zn8VDxuJkFOFVhZpDNmpK5@*)xE^Xas3>;cKkRy23!CRd(^M-TL-(F$!?x+vgjiWr9 zPgn=MBohUba(cd`Lt`RJ4>&Sv@qx3tTTO%0*##7zC_vuzQJ07(3;g8XJb3dVBtU+i zJFi`>qWu*91`5?&ilknRWMgK4MW#ojT#A)~#2o(L#y!rA$OfUWf4uin07t3evNQkw z@BjWy@U0W!_0zq9kgnyRsU4Mh=uom(4wHE>vcMRgFefQn(b$Z$@oF3Z^mxAcZj&tWMdG=V|yE9+IXwT;2(LP7Qq4||vEtuF@;4-;61kE15T zvX$P=p`pM;DeI&Z&CAf>F>;`W7brp>vFla_%HWDs#<6pYuq zjFD5tS?W9h;6`=`9~E)&z5wDA(D6k6e#LtyO#`*@z4Z~@7KIcI1jt0*WrSrt%hU)m zeA1~g9)QRm8BRO?+6z7vJSY=No&^uc4uf485wPz^_LtrSo_v=>f%nwM1O6g^IS}cj z$qVzdsSl!O&n3^Cgj)0pL#?~6|67^wC)^}t#_xdytl}6j+oCp zbFaZljsaOrSLML%0e0_mJ$q_>%&UDzQ$N?ppfzfooDG>AHrhI&`-~313NEEvyc@U? zOzB`|08a_4Jqdy(W#7xfq(`m>uU0VZ|M*R?hW@RwL+g*X>}?WkWRGUBDqE2g+POsq zve+#=VhuWnizCr{Jj?er4G>7GGg!|?4`tizNL3p($hjMZH{Y#&IM$;QX*NL#y8kQh zmi=U_W;eZPk6$#Rv3{=6%}Z8?TnL&7K%Bu9@6yp_L)a|N1De}cijUyRdDyr$MGKV= zHKLL=G475|I1_yHPCRVe0G-R;dbf;?;A5C4XOW|TUTDZ!CpT3ht$1ani)WY{=ZRjq zbJW5aT8CL-Re17Ai~qJc2*%=RW%1<2vjy9fowffBy>q5iq6l1Dw=d_=5U6@z^Z2E? zIx&59R7!^-G=br*e!64N87A1fre**PdYk_tI++|KYkEcB&`m2F)jWxxP00BkOX8$ASlR%XLNeNd& zFRH@XAp{%)sa#R}E_3L`2{BGT zbs6GBTNk{i=6MDY&>IM*ctpZEN39Ik;s5|307*naROaNFB8Jwoj1~pO8Jg|2r$hNf ze*S*%H3}5v>tI2$$GHWdfDgfef+sS0pLA;I8VAAxIrQ4AKuy3qKu+%IteMp)+NT^D zXau4F-y(eEvTW#jcFY+nmi^)gk{NR6ZxKeae?0Xo&ia^ zM@C!_&pP7Ix@!rNrB~2!28)is-mCSEx8|8O?N4%gJm=ZkdY`e=YrZ!&_M%`FJIO9{ zG78+d9^eB?%OK^5wr|1!~>{#%Q45_RfdT|K<`Jey!x8^H#i!0c~;Xx-lMixSFR$zXM z&T|YoH0hYFZ5e0B{OY>0PAW2>g4^&7`r>D@V1Evqz&4aUGcLFhUy%=0Kwr+G zDZ)%BEUKtGC7H39#3zs#B1fj$?7E@7V?p2b4D%B4;Ds_895uiX(5)Vlx9&&O(R?vC zFnZ^}v`&uaBzY&A5GEj+GN-r+aP_%GUXC&|#@9*kBCeG^y*do9s1H!gJC_Z*(nSaD z{NzxifEXEB8$d}UmXej;WsF2+ImF)0(Wo1a@BHllij<4&$Y602tCOEbD~E={RdQp`&2K&0fA_m@ zbTS`2#Bs)R=uL@pK60)muy7AY#`OT3(wg~x2G{#+>Jv%v42ITz0W`$xfGS_MAMA#guI zumA>GRznV`%#w?)CvQhQI5QXi<^TenKs%n-w4?oB=$Ib$Za?IKjLqOe2N9=F8^e(^ zvc<1!R_O@P2Of@`nDt%h*R4!W4xagU_$c}an&2N9xPl{ijHBZJbVBFEaSmkW=mz{C zc*C*Q0LJq<+oIWmd4Q;}QSLhOBw~3K!%)`7Gg4j|J^%MP`c=>G9&6iLapD;)PT$_SJvlVGR| zzwbH)0y6k69?m%+kNBLir(-z2pLA*%G!B$3Ej~Zv&`i+eZ*!o#$hhVJ_zhUVhvaKZ zM+SuVoTWgn-=0rDpauQtU4kZ}y!g0H=zxZ7*4#@{a3X%MG;YnJ962M`SNT16i_tJeEX@KuWNiq|Djz$AoQc(1(^iP=vHfwE|mw<%biVu9s)NU830o7u54D>`@J>X zn|XGkOg_M5%=DMCKLwNplfwm`aabadqung7qh*J0%bFO2fZ^hxmCr0k-9-)M~ zs3C?2#0d(gNwg4P64@$qFb@c=9E#B>J7Ww*1v4-yx*}*Cn$q+P9mT<5&2udnGUKRO zKv7H2?o6Lb*#eqCQ*Ez1rF$1~*_}tprce7R38rY!lWUYWS^Oni_!yo?e&8V19 zb=rQ53@RhEfl}tp_hr>MMGQWkRDNLH(S-ssf9pBVi7JYD#8YQ@`R~`ilWtF)U=j4XGZBQ9FtxrN zNV}fJ06?7tXExIJ-pA3I-6jLOtw&Bz86ayR zNTp*Y`$k94y;!dtJmcE}<{VI!48Wp|-^KukD1S2MZ2A^RHa_%&(a1JuMj#H|e8#S^ zQypxIhO#v1<+JZ`{uMh-&e>o3jniga_MXbZv9spr2Q7S8W+Z)pC$H>PGC!Ea+|iRw z&7K!n>UrL4ZLF+dQg&gjEe8l;W+)|0H_ z<+CNT^SSYKdw1apwty_aFnC7Qk~Q%E96)?m1rw*{%Q`f`HY0L$CI;pK@CYF$5s?Pg zuVyr!n}yd;$f2=`i*l8PA!NK?QA-L$`Vb?^sxYz)h=@rM^{bvY!vv6ehO!LC1>l!e z5rF`NuXx*(gKQG#p_ZPM0%m0>G8%hAR5U_a6XhulOJH*_?g(G+7JXqjWwsc!4q`$( zhK2HEK=B1YveqD{?C}$WoKs>w(-nC|4w<7fK;;#U0!1-Oz zosszfEzmDpXY)H?WxdLjxW*6JrWgHYo0addBcj9f7T{HvNre&ctLm1 zQc*#d0b%Un-Z?&7bI*yfrj=*W{i5vN>w5szbJ;)opKi}VH6{%u1pabJ(&yH>R_HZY zu+{}`a?Fx3*Hl)M4m4(CtMj0D_{aXp@CgicF!H+BXs&rY*8Ur1qJmEXin8=)0A@jq z84NTwa=;luGizflMarwXc$HxfdkIXkDcxr+;IIzJ6+lJvoD2NG4tvKLhwLuj=QtZ@ z6&mcUOffr0rtNR3i<$q!j$s39$_eu`4-WPJwUpI6W@8>SWzWrhZiSc`n${#9!awuO zA06ZHXl>q;LD%DNcGx(`jvwRaP%3>z`*|kdRqPne@h+Ot+p^&t=`;M+{nnr$aG&`< z-c$Kuj^tlp%DUJ~;~jIWg=ZK8$L&iyG?@J;Lt}A8ekfvP2wEG~DdA|t`CUP|bDu@w zl>(L+6j0=a+XRJa{ANHa6EY_q!KC@_W}K>j^lYh13X6x`SuWMGPNO^(-<+p0+6;=} zD6iT#cF@-p=vVZqO`_+RBXFZstO!;{>WTn*CP&5rP-@=W7X(C_~b_ z%ABA_f|#RI+MdE`{|p6YyhZH%9nY2J>De>75{>(eqUVfJ8fc-8bUoCxYdzOGpg;L2 z)0acBMn#O9@$*c8rMh5as3`*DQpCvo09t^Pv1gp{pRA226CS0c%}Z3<^Ll2AKBc`E z6}%%qKkA-6dH7M|x%%Ef@yrTNUL-4I2EbzE@u0E{4lAINQ{$fM{AX18?09l$MCFvr zbQt21jyC@G?CIpg;pm*KBWETV25K39yp5lg?ciOXnG>Lk$LSz|PUO1)$UN_*^3mxa zK5^Jvo9Vbrqi@_rYTGAwru-#_1#mpz%l-oYgg5XMOQo8|?kx z``a65ZwCJGe>}>;g3qct@f`NEPIcM)?EKsdgpa}&{#L~z}5> z)){()-vc$HTGwJCnra;$ZbqR1$y+NV<7^E`rezGr#jj?8_JdnPbEBj4X+ zOn2Wg!B)@kS;i4B(MR@<(7B9|z)}HfL$p$`h5%0g-fwhntP&`X_7HiJaQWG#$Akmav`{p&a0O4rr87{FW|3O1C( zZSSno9CWgd{=p3Nk&KHUj)bfbM=Bf9n&+e$qeG!P)UOOpS^2$vSz9(}{j5@=vNYDE zAN1hN2*R=Z=!zHVU3B5#u>aPL4MUT7u})foy>z=wk7@_&SDXCSIk`tC4oqNPGY356 z`QAhC3$n76M^!QAU~cZA9{^B+iVk_Mw=~BP^hBq(`pkzg2&i zv7rOXgmCCM^JD;T!DTWW$xHaIvcjBmylOr0Q|I~Q6r(eHP9AgyLl^v3@b{Je$g#<3 zZ{Kh0SdhXveCGegVGXjk_+J$b8N&}Ma^R~D=}tdd2V>a^M)#~78tbgQyv&JpIumrB zYcTPF&(T}fog5n@tXr1G_7S+SU_PB80QIFE8b*^)9F<&I5XxVQ5Ad5H?+nqe2+5Wr zI~$2k&@nltr;I2uSqjhKjRT7mvoxZ==Rs{(#~FxVq&N;D2BJ=O%YP3#mey5 zRA+5o?U{KsR^y_~85{eBICPkIoPH~8C18_w% zDcYhYuCs|zG{%}Us+>a6H+)mHMVW->8gFa%X*s*IUhlZ)t2<>!au~>}98&hr`r|!6 z0t2FFXLx)T$6wux&f?3-|I3a*w_4&9HC=-DU8+s&6PvJ zA%Z2$-!;A?AAazDO&P2+c_1G!2OwTk3pAHeP)^QPo^@!HRk0`52d0^UHGJ!P=a#YW z4itq!&;$SbZ4Bn`TC~Q~t;HmeW6V|o|7c7WIK6m9z=l)PxjOD` zKMVS&qqef4Yy!PY_XtQ`J&R_~hMF7Pt`Y#&@N7J5`v`i@QoVHNmvd-Ll9#46Ax79c zReCNkf&qDeEU#abyAyTqWH!z+UDvG<_r0jR>o5=Hf#EG4;|eVDs-@YhC(M978Nc^Y zR_MV&o#>;;4ut~fRC;!%tYT}8@Q$dQvO@wY-Ahmi zar=e7SJv(<4^gb>Ao78p?X4mMGI@2``Tg&I|J(Tv3>Za;e{49MU}1ufIyjLDqx=9_^y9p`4p2cqhKE6_ThKeBYG>*8_v@ZhAfp-2kV*f5Mu)=z z-J-Z6*mSF4k_G!IxhK&tRYW0jK*#eQ?HY-mZ zQM8W%7Zn9~?WGVj@owX0Ak}Gi+I)xImWi3;ch4OVwHod-h{rnceRVqc#yT_b>dpZ%= z6xlTwd}==>9yUJ?;G82qTK7ZbaLnI#rEPU&R@^cHt2IvF`@2ZH`Jov-jz92l4Fxy} zoI$}V_tMn@j-D$kX^q=6B;ZE|tOZ(B##kVbz9(lL_`Oxe-1x15;1_#k9}4?K#-eT6 zDS-&tJMS!zW_|DoSvI!plxN~=bG%a4baic#3BS=9tpq`oKan-T7B&^m*~+Svd7b6u z$zH*A`crw9K*74H|Lk~Q@y@Gf6fht=N=gNy%?qss2-Ynb#=KXx;#qhIK5L)U)`8z= z=d|)adf|<#^ZGtje97wlC+%sqG$;v!_iR6B| zh%?NcbJwA295_xaC3m+^J;7AMKrjfOgC{vNXj)m-nR2r;j?1IFnA_SOS9qpeiB>(CSU5nX4zihN|SrZYpa<3$EYq^ixa-tWEb zEhu90%ILj|MnB>?D-iq!E zZ&&*y(ZhNwvto;CW!##hB}bY5YJWzhdU&FBpR~E_z&_OpTY!}f%Xy8C=3d9r_T13p z=9hB<=g9U6Mwn;&MZ816l#^3O&Nkxb#Cty36fl5AwgO&ruGXN#&+!vLO0N6}99AxF z9sQgiiSIl6i5;x#-PYMRPw77KO_@>jRQ}YSX7U*xnfvJGy=xX1 ztp)iwlJ*wCOLXJiEdzJwIpvX)SdgJ}4h5w-HMQ}6<+yv6wbo(G-|z{&dZu0YneoG? zf;${Q&I_918JH9Hna5Im8|PO#hX%oT?tmL*NU-wIjVO#{85E$K@qLH0Fc~SjP%40a0(O)IG5#W?vNKA}M26Zwaizp) z%h5B&B2s|4x_!#dhTOCL^;dzZCex|56cR-)8tm^JnpY(*X9V!w*UVBJfRPXbW|XK% z5eJm95%CmNReC{@+B0TzvrTj}+tRvG&J-BK=D9X(0tOtM4%uY9E)5>n10Np2J{Ny;2$-rDC7}aAlD$0jA!FU9K6v4!6 zfEJ#NPu1YhbCW#FHM4xHT5O;ePkYwhA#LsjROx^GM-GaxC%<@6SrKC^%@0zlQlklCRL z?gRC77l6nyW$UA@(!y*g9DUqRBqxsVy z&pA?DwO3{w)XH)=?qp9=9BX%^02`-IDS`Flgh*kZ>9R7%>se)!2p|9iI8eB?7k)=g z^c6lhb1hy`qe@|DRZgKx<=1!ty+y=q=tw}=R(#4(~Y09bOP#@sjn!P-a`p=v%wweCc7&(P#&xF!(!4jFthpHGY<5SOv5 z?56s1|8L{5h#dVv9y!k(T|CL?0F%HK`J&ete_)2r=@7NGm8t6(dAwVx4YaONMfPDAEBjxak z@5|6AkrdSdIz+=oy~~W$(_X|#wZocm5u2LQ+u5DM&BQ#{D= z7v)7WidoxL280o`_MD=%`2*vFCx2y!CJ^EG+(U3h7JnpI+T+zJ@BO`Qz%|E!2)&_| zDR9A=QC>nfkw@i;GB`jEeiQYxNgFSE_5|xiff-SJz1MCWPja%o2N(cUfd+uXcQW{N z6Qj(~wjtMhfe^a0Eb@97-qp2xRa-utdq8gu*0`4~QxZz1WsHDp8Vq zW-F~d9*`;UY)&HE-~O7aZx8?gAOJ~3K~#X|bcMEC_Vda8M|3hTv}vq(rjA)_8QkjR z6^yX1{;{9-9`t{Xvur<`a&&+#KEgkoV`X7%W?2j4p#RA%2a4RPgrN%r%h(rkN*@@1 z!I7yq+fQTN1tC0}jjNL!+apNVJ?S5`5U3zuwX&Wz=N)SkZ+R9P3bc@?&PQVZyx$z@ zA0?m0WW2B`-bEi(CW0{ZAcsu=ragk+%T8`#x#<|E(*-~TJk42|VeOH(s-UJrBXFd8 zV;-~djy0%?$g??tDp3SL@U!k}|Kkwmua==|O6CI?W>m&Y3K?DqyCtx<?NsqK{s5A+h7@s?IinQNfP=#L0FFS^|7Y*+ zTCC}=a*w+|kjD}64LtF{XfRPj9)frxil>4gh=?92ehATELX==MF`_6Q`>EXgTK?IC z+H0+ic;bZu;)-T2-^=9CM6oT;pYyC(8k%yzDFU>5F`H*rvR6f9EtcUle!1 zE9S7&nrW~eor5z!zs0GDd9yDj)4l}%-OKfLmAf_%=8jyNzZ^oJ?`t|q47d9QaExJj z-84*(7?GvX>zytcLox-FtDiwOAO;wnx&ZmRXbKWSyL7+=!`bQE%LoC-oLn;cb)uX# zG{zvMNTUltm4mI0k`4eW$T#_yT2{>jz~&kS;>qrGWFDQXwa;H7Yy1xARtya|m^Wk+ zTh;-6Ybn2Ld!qG<+e#v{5_j=X4+`;6R2e$IW_E~C=n-b zrYqiZ#HWMgy+xIQPav6IqX*flWxMC)8g9`EYx959-XAb=h2{wnXVXKnU&gIyc@^6$4C;$$YL#(UDUbS851T7wqzL#F6!a#9`k zkp!_ZAyuWbkz=@~CINJ&bhICz%lDBS8k0_VrQBe)C|(; zZp|>=D?H2Vu|y0z17;k+bREHf>RH?tC@fuJDkH_X*e8yS%?+GmGemi2!QHL*$tme% zZJTSRj9pWhK35)S`SG*s+FBjcBXejj0+vGPK$LYfR^T4ES#}G_SOPfMC7;&_K-yG} z(Co|q$oS6sYz_@}(V8lnrby?xGZZF3L?$p~IW!_PIuwayVO=_Sux?NlQ0MFequ6z9 zCZ|z|o#7GqI_c4VP(6(?O`qPG^&FEPYMp(b_Pcj2^jk zZ4O@yT+i^_+SmdUWRWpX7bWL(8Y5mL%;!2Y#Pg;@IStZ$mpyvz8lYSL-p}bZ`d9?q zUQ~-G?VvWS9AhbvbeCE?@>|=}#+M9oXv_y$U8V%P2B9JSl8j4z1SZT~=T_0J#$?{e zE%_(YGbLroZlASxFWooYD}|zb@W_Hm3hi09<@3^i_Aq%9i3d{rzfN+#dlqLW9H z&ViM^m1=VIoCO^19SlDvoZcocY|~mpgSlUOZ*n1OKqioOx`4Sq*&$~DcckWf%EZxg zd1)@}9q-Oj1Uxye92+{uUNmQ&$CEB&8*BRwz;|G(@2>G#_l%hFTp1tqT^;c}x59q9 zv2-lDL9`Un&>1?d8!W_@(_@|s@6|Mk5<1zOzJeynE*&9*y(Ht6r0%-9B32+Cy zttnp4UAKbIjb9u2mpZWOLSR13sMK`J`efx z_vz4(F@(v|p)pq?FLal=C&R!Dqe~w$sP02H-M_=wtWSXEJ@@$}gs_apE>c`~$hFj; zd-TRqZ?YfuGy4Yw`U3js3;UQ}5y`M-fWp>P?!9BWW@E?``9{F7PjnZyU#fLAPg~Y- zI&&Nwc9Pu`q4u4e)PMlUBumTa>z3yn`Tj@VE3yIDq~9kl)hR=9vO!lOmS@sb*qR+f8XCD^`zUkOlSi)AGFhRmzU`8-1m{A5`<~>dpv~zA4Ff0w0L{1`y=$b>*oD(43*lcP5 zTd^NSu@)WMvaz;alcENUyn{{W{eW=uLV=s36bHo$+{hOO^t~f+n8JPL*tfppye@~P z+-2{)Y>3ci2SkLx{UZ}VlO_SuI2ZwPA$M9tE(WD=uCOwPUw*GS0w&B2P$;TpkJxtr zB_m?K8L`@EAGL?}%uxnnIfgAxj%$|A%kp=At}e*609UC6K&c3rxw5xPizuoCG|@@+ zjQMkozqy`_VnqQ0Yw9{+!5%Kfj3M_qj-$15J^f|hlIKa}OaOLmPWH_2tU2YWCQh%WQbGt;+|Aj-kWF$v2wckU&XjWaF!Q52}!6!x6GvPPWjoFRHo z8q{n|Fl8b=+949_`RHM!^8le`}6#EyazkkzIn^`*03Tn~HZ{ z@si&GUbmbYlVSpNY_fqcQ_dc!h2T~NxyA9AsU^UVP#J*8LoY>Zo|mvyF_$;`zSL?+eWL4TTbVzJ2VV2_M5!gxY93b2GG2l4=lCx8hZfa z%fSVl5UubKuwHXZiQ`8yjzp*=+Me7iFXFNpGHeygCgLY(>Qg z340eHC6hC?<{5kE-nm)3-n-f(9A@^>exoB0SxC;(zp4jZwgvD0rQp#iOZ#3F*){&} z-z8!weYuPt&v%FrvDWmYpNo`NFDsH^{jF0eZFHolT?gPIgd*v3s2clTvh5N9axzwaqyYc|sR)7jR>yIjJ`?jb_MaYme~(>t=B zx_PsP^rtj9`X5=w0ft-QP}wM+`ym~gynP!NkYoG^2B&8B&R%XD9v-F0tCnsC_+1L4 zttpKye^0TfQxO%yh|GgAAMwnWjt$^s(-S@lx&{IOr#y4d#$t-DxDIO|y+zsvXUiM{ zVH7&HFwjJSlsZFT2^A(wr>hROdWJJ|Y2&roC>BvFP7Q@Zu>gEFp|u4Ta%jwhFY}Zl z-bzkb~}djD0%6 zJ@&AXkIHH!13AI=(;ZSR6l5p-Q&~*uDh@OWDmIzJu=VB&u(cF5Y96HVdXh?b3qrw zCe`p>YPbK$xU=Ktn1haxS89TELBO!jAKsw}RKw)p`I0G<)^9UG^9hl2Av8ot3x# zi0t%P?tKVPEUTSp^=gHRpswQe?0>po}%D;P^1`zRZ{O zG>5Ta&9S-ei%}*+oz9LWvzI6^?09E9Jj2Otj=cM96Fr}uqqAfWeKr=*|0zn!*q$X5 z6|cr<+eaK`OqBiLc}4$RYYu7{u2mooFbd@o)Lv=NKrURNB0)o!IVK>!pSr6%cY8RGuSzF%m zjBAY5-lT&##-a{vXlLo{wcFW_+B{b);5o=2WT&*2w5a&ynPZe1IJgJfZ&os-9w zmL9B$53IE8q?_G&1&8LE!#3aBdzNB2Ys{wojffI06Y0HnAo8PgZ+#JB^e7Swre-sc z?OM^pARjhD6Cg{lL>T}x%7X`TmUlwADc_ok7oF_dKj8h7d6# zB5|v0;+kVM9of3fNT~uiy;q%oM1uTdyxr4xK+G7dfqlx5cPjXq=?&HN#s1cy0@E*r zkt}g=)Md~q2o@dmqz#A=)S1z{lPz-RD~HC|&4E-c4vl%Sep1=Ur2XI?GdlQc`x)t` zQgN9h*(fW8;C*HV_;%2+^Xz4u{%OF%`YILN>fTPE#HYU8CtptsIX zU@p=EP@-SyL;Hz*X|YRw_c~5n$8yGXmz)H@n>8nAJom_ZF5jX0nSM{F(}U@Bkx!0= z{YHl)RFH7&R;}Xqrk`DUuOb}m3W5M3C_=4+q5rcBQn~3Uj@TK_tlweSI?&F>EC;kTD3p1m5#`3FQ z{pwrTU;3Nq4PeG;mIlC4ud`GgbB>Vn7`jUibVjXWRP$cSR?iNsa0rZ@Q^?S??)HPd z!HF-D=`+6E1H%X0O;(K}<+zBBL{LRimm-HCFV?;S%B00XS ztMMT!u>Tri)U*NM_jh0P!ZQ72kKCF^`;5cEp*0W2i9F#vi|m<4X>-yb$(Ap2Wv?Q9 zDj@ZHU=`EmOIm?=g5BojJG6%gppZ3aQCVCmxQjWN^z)zuv}e|y$=8Rc7!g-(;bKtFRVmfb7&Jo6H!%?7gpGEk6^GFN<; zx{Sc(;Cy(8#>-EoR85WF=hP4=LT1u|J{~(zfJG!+z8_TxX@NNf^1J~HV`vlIoAbL@ z7*ZqwLIB3*qNo7{?+)M^8&F`Qal)`*#_eZ-fpD6;(r5_Pdu^_3&WCsOK8!MjA|kKKk>XZEhmJ)fi4Y+z10`JMgNmnmnIw}NWUCw3bcMgF>j#B3T1 zBQDKE?UmMRBKyjKnM_?J5^RxYik>hw+ ze?J?C_XTvkI|to0_9X?#x#cV}ey#_^YnA}$dmg#x)N=r5F{C~0XV)Z8hy?89(BL(^*T@y8JQX_5o1l-9%dHJ>xb9es@Ge6nXJDTr$vDTyzSV`2TzNN!%HLeWnJHasMmXL}FRkt8PFZJ! zt(*6IX8qx<@!9X3QEP(`($*fq*P(x}GN<-}u_9)qM*W~A@zi&w|I_g=^y3f+V@$+;nkXWL=bzUSR(x^pD zX2j;v@y>B%gXn5Hnm!hhCR>~xSWu%E0Q{PBRecwEvj@|ww}{k zic+5M)tcMkQR>N*xo5wXvr| zBjnAQwZ4q94ofK=pGN?2XhbUjNS$N+;{=d}BA6TiikbZAmO~kLh!}8e?;PD3DIi1s zfP_n5q8OrBfDNTfv2w(V$eBNLr^bqN*qH^j_C`9GQ}YBMb8rGK0RIy7lcAZm*qDya zN1yDUF;zptK@eTV@?-EAO8>l1vf@4IA-}VxB9>|b{LHDXsP>3%G_~@c_p(;N;Gh2K zpT6x)&b7lZMg7Q($e|)qkw463sdUCH8c83~(Fhhg2`KT!nZcOTsb{6{Zrz((t}FGJ z%@GAO_Rc=r3bwaAXYVy+3@L$OMe5l%Ma_;(!QMN*`MJhQU(ti)%bu4qz9b{Br6^w- zM>+=Btv17cD+SGS$fEYfKmYST|Kl@FF4a9nB(h0#?vGC||3Cldh$;Bu9xF4jbS!F) z0Q4g0)QJMbbUxw}*|W5zk4|znM5a=6#Vm-pfC-LO*%$@}2iO z`@kFBDylnM4;Q8BU{^IKO&;s2#aMCJ?B9qX*SfFzAP&26e*|a7nvl!R z*0Ba6tfj#|D_Xy%W9%zYTKn7@Vp-2B>RlVl+TYD5XI$q{x{j?9JuXcyofuQ^ert|n zptlsv8LeR_=|N-<88SxSD_9=ZOYKqtr zG+-k{@!7af%AuJ~4TVAxNm;=9F|dp(#+e~yP&&K|YfYh*f^{#D&zbQo>^LWi@ipN9 z)~gt=sjPv9Kq_XJvId#}LB;}LogK0Esg>)W%0X$(mRDxH7!qq|L;DQCDcp#Kri|1m zVO%tXAoK)XKEF*5q>3EO)S#o^7K;+WmXmKz3uE5Fn(jwV_Q?5ERF~#4 zurZ4%lN<8EI51A;2w6c6I9Wai7?T6QUWnS?u+N=7?|G${k$ZDso<%=uyl`eQ{k?MI zeeDB`vD!cT0>dYrtGPEfrKxdHMPVy+2CPJ9$b^>OdgBBtRXLyEVh z#c`+^9jR}0k2)``sY3bW**jWKq!ZokI?l8`u7->JH`XK5`|e$fd3`$d(s+t~aNdmF zzI!!i!T8O!^)TNY%%cy+dt`o>p9M|>F!sEtvot>6YYgEtrIjul$t>@;Bu%Qvi6o&{ zMHD$UB841MwMM?v6P)dw*9kN|-~M=oI*23Z|D)Zj_4VC;MDEd}bmqN>wC;KLWz>lL za?05*M1piUx}04?Qi;~ghDJTpeO|?pX-w=UasfGBiNetluS>WPQ3uDvcF1>nOT_<; zl=z>CEcdL~3w|aeW5NNv030@_(|{S?K(n(AfI&>*Z~Vq@{J7Y3izAZa1NPj*XVe&A z6dz4tJ7a2_o0)=!IRrlZ&E`@&!1MJE6_8hyH64)oyL-k;(c9EQubsN?cN5@JKERuK z<#16#1eOw_lo%dtEyfJ9hzSMQDJnpmQlK=!I)ZS|JMX>Lya3zY38TI|pQHPl zMt~OQ314ABF?KkUyKzXX|6eqfTL&-r_tXTb!0cAMNpYXf_ePB$gp(v*H;`q5=O#}Aq z4SQ4sv6L~-thJ|c%vu`%SC}hJfJ{jP6g^8fTQ~2f$%Gm%bvf=U!iY&_@JqcQ1MTUf z=lI_HExWwUIxkMO4u#&80sh5b{Kcn$sC(bRI?@&Nj2f+1v|AI{ktO5<=gZzk)^OB| zHj?Xm>n4|JYB?!6M&_pUN6(Sk+&M~Q(O#?pO?L4WLFm1*@u^&&?U-FIpB+6*H`kzn zzN4^f#}NPkAOJ~3K~$IFMi`X7qnGJudU+WI+#5wdV+|(}BBf83$I<7{TpG9MBI-QP z{K-b4gVh1TfS;s86X*uOw9Ax|gjJcXGP{QI1LAeXP^%#9M>%2=Z?Vo9Z0wc^mfptT zG6-5ezQ>syTLrg`HB-=lTmk{i0;hm##o*q>W+OmygFBmsLa3FZd->Y3>D$)Pc?WE!Ih0ATIS7v@;nrN4jWD_?nw0-0NDZ0@n+qL>^@ zN;rqI7L-0$JI8yBLyqfOgod0MFZr{^IlW|#11^=!{>JokRO*lgco>g{0`|5PI}FlH zK{S`{FY?5oc&BfC;~N*x`8i@8kGh9Buzvt7&Mg@b4NhOp4t}oX{D{1XeAw4yhq2@c z0bn_Jm(Cbi8}sa(1SpvAnGWgi_Bi9iU~`)5z~}dB8p$f>+y2M)i7pze^;5GzMn&4% z^8ro!;vSh>dzZa`zgB==emNa#P3$>Qni)md`^=+1SM;ye`1g_%M~`RT?~;xnS0X2B z`r5l9ebpD%yknX_k}Cv@hVf2#MUW61a(M$u%$csi|?uB1-XacTs<3(CH?bXR}lB69; zp8y<1D3S?tNye>@eSn2Am)4vEY7cO7JDX@rG;@*&%RM;ftaX~*g8Z(|UK};4WTJQW z9@)Wud#}=%+>aasIZ{#p{>gX&FXY*L`M*473i-&^%SXz=l-VN799RaWdc9?;kn1{_ zRRqd@qo+ixq@UR{$QOHR4~UKREMj7P?8grA^LIc{otCp`$P3^}p0tVAsDZOZ$C7RD z!qM?L&I|{e?wL{I92(Dj1&1b?Ug{BZ&z2w(>=}UN(xJF#$jF|18ZUXc)bL`@{m%Rh z`CvcHuf2`5kS?xP_K|kj*BqeaoSp2vJCR{>ZqFeU5Pa4E3zK;0UW(8=1b7_9=y1LlHJpTCkE1 zBq$QuSkF2e3?`9P0o(uKAO7K+jqePzPtu_wC;*uh8p7HsqSwGm(H4MEq(#~d<%EGD z@a2cMO)!y)&;bet03iJAv(J8<dF=E&bT1sq!dw7Dez)`w#|Q#daRz3kCY4AN}Y}fsse^0Q4dg$QzK%Aoz^ED}@v}LD5doPF8?u>1y`5dA)Xc*gb2N z!e4%-d075_PlJH~mNgMsmHKC$t9zTE%G_eF)Ux<)o*4j!QWR57M~8%3(+W?`{Zj8; znuiXzv(IOAC>GIuFxx;MJpgoymifK;=eQt-L@+orWP@Sk%#hJ*MZ5Q~@At0X5!Z9% zq|cdmk-;+#a6V$3(@~})?VDSN!yfs}NA5*V*c1_3?OVw-9rws#o43X@$v@Fy867YH zU`I~L^i0uy^P^p#gA<&`^># zJvNbo!2oes8GfuLSr@tB@c-=3{_NYmt5tG8kr+;k^|wyuvD7c`L-F2f)g16{U8hr1 z_?&W7FF^pAKxV(`*=xYLbqico2j{t>3}n+3 zVK_ypt-HC#D2uvs^f*H9Wh};0Z_ow+9)^nGqjYdr2>sK~Q~e zpq>qY9cus*c}rev;Y^0;#Rw_7j=n{z(amZ(Blcug$OK|5k%`*Zvu$Kx;B9X?{DmEE^-w9Mts7_>`g@3UgyTXW;Zx6Y#FQv z%di`tltYu(E7eRY2`?7Yhha|1-P+6+w}X*@BHfWFc#G(7bok7h%C16scj5g*=z(cavFOvdqiB6@~HaH6h*#=&rqo&4j&Y+$as-t(Pz3pfIc@ z@NCnS8u+T~W?e)vyc6&a1d2NNH~ZJ;&;W-FJiw^_M8r`lAq6N3BgAdv1N7zs;0Nd_ zhH9cHX#2pkL^Wz@dMWh0ww}{rz2r!Hx1wjh_YYg|-V~SpY`mN+zvEy5UC0EH72~u2 zC{@i9th>LdXLIfHEE(%>KEM0j@4gkPcc#uIHtiND3e1xw2F!CAEDkjz#Q2C{q&JMS z&)8$g1M*jG7+qrB>b|o!w>chCZ@%-L@4PX^-e=j=z*?G5&aDWhXbOYL;BX$vMp0AG zG?x(pd+0qTI9v6>cs%5eWW%}_eVfa8f^y>(k1DxquwJsX;0a|^7yAyLw+B1YSuue zF?jx~q+A{;hp%+6*7}+r&S@yBN#4R0ITGH--#ITL)NBJ?$WD}^4@V*!>(qy+oE_LA z$=m};&zWYUHTn^mM67d$$T+!Rr?cO55xcUK6~8Y+6)wBwNc&sP406h|*pu4Odq?_Q zAOMjHXTXleNte+3$SMvH`{Q4SG2crn8w(Oa8Z-Q0|G_HK=07Qi#-vIEQ#ftHFxrHK zfYgrh)`poZs(S$>K#GO$%j7dmb%L4gN{>>S@42ZHYK8)sWQ@HRh93w45HrkM(q&@e zd#AJK$oLr=AXwdqCISF^scbg8&0GX&4K%*zTJ&t=r6d?_-$fxP&RCpM=&Tpn$HW6r z-c7D4r%qQM^*2JZ_JybjHdFeVJrd{-oca!or|gO*9JziuYZW6G$)F6s_r34EIUO8p z&VlEWKLDJAUD!P)*}c3!KqE>a1#8yC%yHY(2n*>VQl4&gb-D!Bjgx*bmy8QXN(u@E zZahG2>5bL^D#QRLI<4+5^k3ey+dfNaxJJRGbI)Ssf;V zsuWMxcwgi5cd6X$B3pwffhE|!j(9-gX~M&)SsskLCNG}FW~c{dLtq$Ve>=M4Q)%1m zB`tLMh?o*aa!(7&gw!MjvL+y(g_Dm)6xJ5+J^ntO8g)h3U!W66(hUm(2Bgh2z9WI| zqwmbf0k=GMKpklEWz4l0_KqK^)~Ob_YGgznFgyz3t6}ke-dU#)(L>G-5H^<>O9u7S~kz8G%-=eOJS2cvN)rn?u8)| znwM)_P0XbfQ1|pKO8p)tI>*3xIYY+Cp+TOwCm-B>n!++=^UeCvzama3PZ%8C^%Mm*1z;={q{Evv4}cD>+>f#gk}fvp5It ztwvA82RX?W(&20$5{g};&zB8V!mCRMI^x;yjl=ieZPxmT5=n_8-^+6ry<-^`?4tK( zm(;XM2WR8VOX>dR!CukO!#(dg4~_jq)*=#Qt{@9JIiHL}Q=t;20{mbid4GGu{BuE< zTVP9M;{I*(TAxVZDO1b>pridY#Y%7hH=Ql8;BC-XacYcR3XtXoScTFKZ8AU&18w8k zKwAo*-*aU$fRq5%SGpKR2fINzU}peO3bK51Q3p*bfGNKdwV>>@g48WdaX3&Zf>oH? zJ2JdQAFQRl&Ss&>e6dq%4-Ew47%*)6NhRAy_qlilo z6j8CI=FJ)u_6Jmj{Y5W)k$-7>o@uz>*xHMTlds&pm$U_vejhxst`j>cgTF+VWK@XM%ryF2^HE~}>GhnrQm8)(hsMN6Pr*c6IK~lx%RBv)H`|%- zma-gV?MOtfT0_;rk6^iZLfRLHNNcsJ%`~w`mUd+&s$eIGkw0?yZ)Xd zgACC0M8vte5snPKN@sHztk)z^_Y&hd4mmI!26zY2teI0CKs}4YLZ`D2aF9p{64d`W zEHDy#A)BcTg6d(s3!P04IYJx{wLY*!WgOSC2Bv+>xiLmWC1*y|S|)=3n^$tp7D&^j zE31vYmI38`j14)5SXWl?R!8*FQ_f5r#)9!MNeW!0QQS(?_&jaON8fenbrVW}R|Lf7 z!AAHZh>Xd-`H+u{FGJ!U)sa-_R+L5xg)>VS6`nknzk{mgA$M!Q_K|V!{p@sTj1O3q zGs-CuvA}XOK*!v$2BSBhN<82v)&ippFvQhQ}-Ngyyd z*euOSIHA^x@(v&^4QvsgYovnv&nS&OOirX7%H;;C{Hs{i^?M`S=9{s%ri{7u=A6~O zSy~=FA_Wfl^ieySE#=A@aDEXe>V>3#+Gnw%uk6r>&WR3k$dH4^{1HI2IhSrrXK=H}8bFk}hoz*(dgl_X7}t&Uq%=9{u*k zXKv3x#|T_%V{U!uQVzjC{KG%Ir8~0EbTmf>VaVnJ*_`ToA~9RnW!j6#JT`-D_#2xc z;{nm>|Jh=8jBbtuGe!{|UvzSx&kn$J)vY2q?jgFukM8aNrOa_`kg*~*vJnuOwD%hx`nuX%58kW@}_1AY$l!#F^{}WD^-AyVV`-J# z^xk92%T6#0mt!Qg2F9}6AcTw%J}XL=5n9gKbO4OeW^@h4%in-1p;qjfO>INWLk3+7 zgxSb-9w~fUD$TP}8j3s_50H(q13byRzy06`KX?O;e#fcgs2Fo6=!&F?WKwQIyA&}% z8Qap@=D2Nn6l*-OPzShB&x4t=75}rIDIzeM(uBKFZqzh`zdG&>^VC zj0CAA>qj64YntNBpV~K9+j(o=BVDX-GQTHndTE2IsS&A>5=4K9f?}Z8Ql04@z>+fq zIO`}SQbUL4pgrPv9ig8-FZY=1N^goLh@8m=f+N^ZQSQ3H2K>)CWu}(XlZmLex z&-OTd%z5!;Z+M5Az>r%u!T3w__WvWvyK9aOSx4IWjtF2E=pY#;vlcBqtDcpf6%`ZE zK#YA@hsLDX_)aV}30U`9;5`Fmp1q;3g`&@X?$-Oad2ANOgwmE$g3ZSCsV}f5rFUI3 zDqDfkJ>!~%&{&kBcYvF*NkhXz*xWX-@!Jdxp=$sT42Am?^#QVt)%Czw0R9=LYL{Zb zSu}=H5ilEMUSW5s6#(drx=;=bBeu^PSugCqJ#z%mN;v`C=B7eHO3}|80M{5_=jliR zG#+cgNMnL9+8PziMh$ydpsjLp3!oa8ZevmjIK4Xi0r}>CwrPL#SzA6o_gVEuBBH*2 z_jiAHms3NYI7gy%WXLMXXplNO=xvGok#VWI-W?d-(*R}Z z)MnG@5$UR&5&KD6%-R_>>_#_~-Th*|+MM=q}iSokef~)NEKa zTeEZOJ$vSi`CU3VIXm`Be>28X%adDsq2Gtw&L9@AIW|%?=}q&@v0;-$IL&wGd3hHO zjZz1AhfS0Q{$U*&AQaoDtC<(##r3iufl*o$;1>JUS-mzvpc_zRPxh8$DmSafT=$Eoq2 z91FTrWR(oa`7Ynvx^Oh?hn&p4=`FE?$>Z!ug+Sm`)i!+UhjPMSy$j)Tzb+jb0O-|Q zHqU)ryL-jM_U3HqzGk!D^Y7-}zL1UyT#Ag;@Z=UK%Ng-~Epl>;1o5{izd7>ACwiLh zL)=JzrmJUUy!-kL(&-=n@gLtJv1}J|feeWP9HFb=s>V9pM@&QtSc( z4z9_O`6%ZRhAZb<XOJr14`iuzVf-BtKtX>9zBKt~qW;uzc>p%Lo1(@+2LB z(;MgxAaZOdEv$g_D27dU5_zS7Hba7u$&ul_kugq?w7Na4-WK_p%wH!{+t|s!&8{uA zd~r^Vu~vZC{bMerq16glR7Z}y=*gB8h+AXLAQa6Kg)y(4Jn!$E7Y>;2Kh^^iDqTvB zEJJNxZ?#$8GFOjYW1O4?;}pS@ifEp&Z}${IK01yqb8Va=NaoYn0MFUSf6Ev+7T#5A zk>&u{FAfHI5b-Nz(f#kOLzn=qz0#g5t%)NB5ad`dpK&J9v1NYtUh~l%8mYN-11C&M z;74i<&|#)@34K_@lNe~^Pjx{t(6QD_hh*#ePaVC)9qGRR{_p>OJCIPcTpgZ0M^`T! z7TDMHIXl!o;rz%j_$W^C)_HrC>t^=DByA!uMc8CBz?p0>vIj{UdH#&_<5!uhv&XS? zY}hDKS9VV7JW|wtK!VAb5M71o>>C7tz5O90G@Uu)MX?^(=Plvovlrx%`&&!^8R-0| zY)s(R`fzFpnH+iw{2suSU>_0Y^n6N5A-!%hqDHfSI^b0L7^Q`wRNtX!9mrDzPKg0} zWDLNQI}4Np?;N$g?DZCTi8aaD1^j&3>_DyPlW{QMr6ttn*>y3OQef1pcz1wRsFw37 zA~l_fJ!F6}QZ5*M3>NSzeaXC#Y2#$jr5Q2g>OXv!*2hTrKKt_@vF=;ub^`C58ICw+ z7t`wO{O*cYYCA#OyXOS?^<*%!c&wzG7(wdwq>>D=s*pXK|ev(vwHi>R**0c^6`HROT+ zr>keGnLS5lG|%|*m%sd`FGb2e(m~LB=JDB|u}G1$8y=D0hf zKon1^6{R$d!(;_G1~w3VEdIZjbvr&wd z0q59LytByKICT+|M#ovU2KEfl>hl%;=9JwFT$?}lQbT1v%$e_=M+yDS-~7#+;xTWW z1jgE2n@?XFNLU;5iM3>0I@{$48Ca9!PC^E*$%gfoYIUo*$Sr1e?Rk&>-LuUfMbDw_ zFiq<^S?OEPi9L2;-yi(JAH1d7=_rGE;NUXyz@as>mU~mba&`d+a%nv~j|Rvz{%alE z9-XK6_9nGAHQ#Qpe|Wd(Y{(_y394;1Y9F zlL2$}NCYb^Nq^bXIxr&eoY7{_*cUno!Z!%h4;P{Ff;RrY{_DT~#NYny-@bWh829?k ze?$RGaJ(DPC)`Yt)w{k9TSp7_nfD9q*(4ORupuV+Qv1C*4hn`bWPlz`h3dXB!n%P; zBa%jO3G}tmOPiCA?Ep~<%@+p%!v~ZB>;QaD=h+d3lm=x(=>VMmZ&Qo9kQuF3IWr1S zMUqNqV8khB&JIOj4X{ptyK$FN(mUJ(TvIqc%PA0{v2MONh1Q-^tLPTn?@SI6FV`>} zK$bl5z?R=pUYte+$wf%cbn2})@b)Kv@+WVK`1gMA_ukeu_Vkucc@HLf%C)%?{j#3s zj?-Q@m>H$Jw%6@9PLa=XGJrcxF+^+VREM+Hoo=t=Zhq-(=MDhU#^ssjS$e19bM=p} zeoo7B4%?H{pu46>fe4hnDjNDbzwx9nM!Q_KY(OMs=V0|5=DwUReU+U&X_tJeD zVsw7a*0rYtI2EGvbPzr3nX~=#SvK0d(>dmv4zr(2>!PbU?jn)q#5gg%fm8a11Ih{7 zdQ$nc+x8J1=bmgH;DrElka3;Kil~cziHy?2qN+#)dPf&Zoh#{OX=zev=t;%S&pg}h zOqqPOduP9LVmnvnTi^QDn+*Fq+0cEH{hE8`EIB@7j+V}LHN>6;N6-;!DG`w5)b-}X zK7uvs@H$&UN3M35vwM+acEdg*-*8fd2;3yPRoBd|n5i}L9OKk=)R~iTPY0ONHPQk< zTn&wvV!*ZiG(lMZP7(hU8UfUrm9s_=GK3#7)Jzbq7e>l@VVyW>*A7Zz{G~fEO5T@J zzh-pr+4KPn#)CIsgytG>Zv!!U*h*u<+yiCafpM`>-Iwgm!_AgOp+_ja83_n50Ugrb z%w+%s(@i0B97{c;lsIG}I6}4*88#g2XZ)DLnWAL8_e5ys*h-0Ok21uV6F+MQ3>+2R zGWSIp{G9{uXP`j%*t`Q4fY&m0#KM^4EHStXB* z(%xE~Yh#-7`RHDp%%(jj<*)Xq)2Xo!u<_>8ew4jWk_+iGz=V26 z@_nfZ!*rB6oB)EfOM7+}qhE94{T|5hJUWvDOFofEoL1{Qf%&8Bv3&RRyfM?s&TXkF zfU(hi*96f^#huP>s_PRFEH6^m2 z$T1yh4E6;4M-RFe9YnsjoNLp%xeupH!ycFnf#7e_&ydgd$%m<-G4Kk!6|+*#6y?#; znfGR02_B_KxnYeMgt}K9v#pVR+94|b%@+BoT4^ZL+@hzIsgHzrFiYld!@txV)NiVts`*FffAK7FF*a$ zKmEqQW4JtTI!w)TfUpz*??xeRiIO}b&BiA((aGXEs8GBgrJGRm2Pio+$H9UOhW$p# zU+Sc^oG){LB;p7$c#O9+#PXCmM;!NBR&SN(>NUug^_=OFm^KWw$f0PMy*1gpN7i(H zwniFLkF=MJuRU}Nu%@4YVDjyAB9WY6G7b#SHmIJ@@FNn)x`-F~!X6&&uw9xb;|0=n zZZxm{kGKHTBNT2$Tzp=%Kx&7Gs&04lP~3Uu&UEC=q4x%GygMgZv?dTF0%r`!AP&RP zb5Vq*u15e*GL;OvCI_37NA@{h01~}rjp-ck{F}e|n{Rs|XJRtS?orL4&;9jZ|MeT0 zAQ}U-b;xn^mVA*DHiT{Gr~q|zvy?Y`*WCJUABviK{kh;b54?V*XD3k|2X%2^Sji4t~JlF2Pa<&pZ8=}=}v!#2c?;p4KN9xYx>xHc_$7h zoeJ~V$JPlc#`*j(4vm*_>N&7x^5vsa)LV@9nqc*J2Frw({%|ylv}GKGLyCYj65|0h zfRnAozFVI0tZPd5;!I$yzW(*Ezk%PPBW*qo9HT(!T_@-Ks2YlL+J*Buz=}ezbzGbC zl7c7&jl;zepeSrc_rW^$4rCH&iHQdGAE`Z9TcZ?9o1F5n$uWf#l+-#-gz;jC{odFB zEPKZs)TVbUNL|j4y~0_fNZg||J&ML!DtwjdW*k#l&z#)lJ!?;0XQH$Ap!oy7{_gMo z?ydbhr%QC=qmc)(d!incy7ds<#PDvZe;_~f4F>~AH}BTVnqruNH2Q~OzsJ$bVaO2n zoiQbo^qX1@Oz2rGbimVIEIKreFa0_rtdj%@3~(0hEvXKePM>G25l_At;v9r2*O*D~ ziI5Vldvm*+ilTOA^9o%_(P~gNNBnYaD3A z`1+e`9QrHj<8Ne(PB==x&o$#f0ixD=&iPW`1jLFeBV|ODI2ux5t(|$2VyvSTfJ?Ur z`YKcg*8DTK9q`v-(wCU$oT<+Gk&R$)X_>BGL&U}bjmRhD9Eoa$#Mpb7{wu@H@i}v5}mAx`uw;)A+$V(MO^r?6G?#%kJ;K_9Dy$ zFH4=z(I?OBGQGZhzka@z8Wu*%F>GC%TRMXNG$!Mun;hF9Le#j;zpN3s4;i}_#q~LQ z#af9Xv)$eu0j_@T!#FgUCL3LmxxsU6F10?~yIBZ@W&`STL`DjSogv8An6(5Urkhb> zFcqYIM5R`Q2GF7;DD2o$bI@t*!i6@RdMM69z1{Z!?d9B-iU8mPdf)u!H}6{8Sny7M z7JZvSBoFu$QI(2A*D{Vp3v?kON?)k1@)|Oh*~7?>=Y$$P}1P4Vm!nj5ZR5E<4L%Tgsnk z(T{%gqqqIeez9X5M*EK*DrLj?Iat@CB;-Ll$F(Ld<_U-ZPRN}5)7_ohRpi-Cp^`Im2NB9)%Z{qZ0F@sGQqJ({U-?|gm=*GKjgm0d<> zO3P%^Io=#t4skV}tfME&4)2u+82FiCbm_$E7_= zHX{w!aMmsI|76~mG0_F^fw3b%=_~*DH}*xN4tr`I`n`O9dkFdP&;R_-Z+p?& zvg04Zp~*n##=}@U$ykSj+0E&Z4QS#iGAs!|HVX>Z7W zj{#lmoX#y6U;-D*RVz!W4K`ru2n^kk=?Fz=gzzz5oh-W*V8=eV&va^RCW;RO8v6m* z%rjM%=M0=BEA`UH(o4(4PBO;oA4K?sd_^G4Goxvo=6Q~F&)6st_g185|7dqjIdNtX z4jBHu&W!P#NhSacl&L5IAP>BM#3<-AjvWeWuQ3SHS){F0hkK zu}X6W5N5=At_AiTII`!S;m~*|??9e8rJ8Ecw`7{llybs3(?GU5;#l0o+3Md9v+6y!M8#&LyVM{1$Uexi zVT*w+_a?hic)XW-9dqdaGrcULsTTW7UB-dpFe9Hh&DE|+5%g@Suhjv4)FT|WuG`ty zkyCQFOglB_))%Q_ec3AMiqdV*PIaE|OcrK(`YoGRikir84L|5kGGoj|KxM*^SHCMA zJkoOMr0pd$x2#)xYkse>fOqi@=Dyl8c-QZZ5%Em#cHYm@p?4nyjmWQ5d(DTe^@nh1 ziq2sAg=n?HJTfQQgac;ST9X9yol&H^=FLuFwm4WmMT34XN+Tua$P~LRLb>-Ef+^RL z6H53*2r@7XCN@CCr4)m;-0bE9 zBaAr))I}B;lOmCTTZF`xIlAP~SYz{Tk6`xgdwXRo_`G+XrXjt?ma{GDs6-p&(EHg} z&MkmLZ`dc~aW<(i50{7$?=Jesq4B<2imr8dy!YKMU2pxROO=mL|JdW!lvBl7vuDY~ zHPX(U0U7oOXEs7&`8?OLmE=syqI6vIX#bKQ_7j2RSpYxXlQVozC&jJvV$LzXT8-yK z-cy$~qc-G|b4C^rHPr>0SHE|E7=*qzKeKa_@w&!3xUQ^=sbf8h6D^JT&;R_--@dc$ z92(9jx#9F7rtaBuTj%QYbX|5kon?=(hh(+1OwmPIKjh__%{gn%wqzufX2l7XPNrj_ zs0`U7YxWigfFsXtX49*cnPWfGfmxfQrGt|*=Q}+`H%aT2O4j=!ppkORPJiv!j$E)G zefH%YnxZpOC=6b7rc|G!I-7Y0PtP;3la&26_G)d>_WZrj+*0XTWL_%}Dt%;YUi_Xh z9Q_%}pCJ$#0SbU6DGUGr1BjhvFw}QY=1b5OoB9ZFbq}no7NnvG)^&RP_rCLVYA8ew z8K)DItzHBurc5x})^ckI`QCAKz44f53W1VlB+W4;2iPih^>_KEz>KB{wPN)d&hZ{_ zkwL~3VY!5SDHGs#*38^Gz|TF56RXVl15xIbYyd=LqGEK$C}n6~u}?GI^j;Y;C!WR0 z)5exVdUOiR-Zg#XbLNQ7uqL83477TwE#!>PNwaZ2hP7ZsIx|SB4RAZ#Xs^xrEr-T9 zfw)q8&v0lakeOrc_lzfiK<{_90Vl?sNy+RW;98MZ6S3ScmjHri03OzWegLw+`qi(V zE@SBf-|ds^hcrad6kj?HBAKM4Tq4JO7BL}$X1z6g-GYTf{h(P;=() z@6b?;a&H-0i$*yg-Iu-mTi~0guFl|AYs)Q*b?-H8y7@VaMUZQ$stzPWdz3SG?>M(y z?_Drka=95s(WMNdO{RF0bH)MSWH59U94F*QY)1E@z&M1MMBwhx$li2nFoN=LIZZ$@ zIU+ZTX*pxk*56Mcq5J>J#lzApBa?E@w|_he231rZ*4p>#j< z%b5YzMYXGGIzl&kKIfD}fT$3ac81bDHD7bS(^VV}PLk9xatc&QkHPGhn#ob4KW2n? zIyCNet(#uU!*II{)(#dC?%sN0hqT&d7JYv48J)@_T zLfB7afX!34M?UBixT$VrjeV<*I%1qh+@n2G;~3ZlsRqv=N#F!~*|7?dY!ff0>yQ~Q z?a+8p#+7FZJn07V`@jGDo4WVhGtm@_6bRr(-9f1kd)VuHp1XE!2AERC_FpOI${o%9bW4#wvlTyx7%x;Nzl>?0N!>RK=A3ZuSD zZayc9(^@yPWmlveE=~zYN&zEf<-UL)@c1)7^D}S2uR5`EeZ7OR`;57!=zMuc1c$u# zdk(}4u!;26$l%(^>3i4YJwF$#hpqCC47YlTwT1`Q)?VQNiL7#5qzcvX3<%{&&Zwhj z^)*3UX*Sl#Kl(rf%r(~B7vRaEu}>axXmaS&)yr|r@d4dZUF}z_r)x!1?T-$DEbVpX zZ0`XGIf%eD`Fuv}X6sS+F+fNsxjG8bwY5*bRo9lzBs1QDy^Tbh5r?G%P_%}=u*W&o z(%edu$(ikZDYb{@{)oCY_ePSLYjWVd>wtQ%BaqESgdu}q5B3R@&!KlebNapSeeaEk z5uHNjeC=yr`|-icJ(Jwj5${qry`CliUj)cpnGenl5z8ysb^m0*~#5nEAlXj ztO|PVnZ^J2%$4^6(8&NDi4-O$o#VoG8$UZd>lydxsC&kB=C#$s)QQsj(mnPT;=q2B zWrK*cKLi}?xgw)J4^zUKFYVBji;PwFlIls0Qi5}Vu*DoE1$qFsQ1V*d`I!qm?`>hr zi^nFGyL(hagrnc16hYA>g2qXd-Vo#I?=~vOOy?L(n%vJlbsTGBCo?t!=gH>Bd~Hd; zS-Y>l=hR^AI<1}Yl8>t>-o_J23;=Gq=bmdCM?HX!3S(ajOxH-o^r63%Gtac3q?<+ z>{ls-U7r-5}M`zSj-+=o3|%^e zJa~SsTFr9>wB~}`0*;~u9DFr(-YbAG_0S`1Ejc6ioCyC%rkq(v(_XXRb?US~L`Zyz z%xHX3YVWOvDArR(geZhLv3513NrzvO-J%-4lM9Xw0)t~C!a*LTsQMy9?6@(_=;5CE z_PlCrIq>v~J-tVsx)(hq-HT%;rDwMBCA;Q?4kAwgGRMbtNF9zyvX?Hl{@1#c`5XB~ zKp@_*(cZ_r*cV%xdYq|CUisdUH=TucsbNWLDs|tQR^{D6HbDxyQfBeUP zys>1IyA*>tRt6Ke1uU?CQv9X?xaWCm*BXx;d1(`jsFZ{w=gs|rM^QvfijF_P5jGDP zu-S5;&P)+Bj-^B6=bT5OTFL@|(C?jtvZl!0vsR{^G#<<4T>&S`hVyUla0=!*CDxe| zGXIJg-49DItpd}@k(mt%%KyCvmgPk2YK(qVSosd(*Ad@lR z(mgw{a~7$JAUx{OBn!T~_uK~=$9c#kd0`CgOCa4|^*+h%ku0aNNV|2miAW{}U#iIR zT$eQ+w&m~XU6Fh?(wYF==7cjNU3-f&<31b@X|(1|Q9qD?Jfo{Qx3`>{WG{e{!z&s_ zU(k<8kepzVA1QEbT6*%{Jy_I@!^i;49=kAsugD19g$15Wei5pof`C?$FnWl? z%7I7zrC^65`67BAMNky5&I3+==>*S3!Or5ATp7u_9mV8vJQ=x5Sd_z zDQHYSWx-ReK=X(Ns(yyzqO1Y!S7SwNRv~qBLHHHaZ#`q;h&kGN4+6)iVy!3-&I~1A zS^)vY(lHp;mV*KG9OQDG%>iG)uSRbJ;yuQYz#GqP^yB|NGTPbWgfi$k*%n= zu15}y2n~QKLUm84Jivpq<~}}e%{VjaUN{swnsF9q3-y>Lh8~a;I_Kk&K!&(7&Z2G9rKk5LT4Lez3mPKmipTSZiH_ha*zX0yL$u zaaQLcaXB;O-dxa`92xIa4acPe5bv4iTh~_qX^$|z(uS{{?XvfoWIDY|zX8^MHYfJM z*2s+Kr!O6ZY`o5~QkV;5(bM$#7N=%9Hh~fHK}V8Dbv7b5WPwZ}IrgM^ySL~Fdng?h zk)cj9Ip3OYwq)@+7~cQ0&pvy*XXkj#I;9w7b1g+v&6<72-W7Se*Z*I~&R2*O`?4;z%27XGnF_#gT-| zN0NaYo4TQ~9qP+!$fO>dUV6(8`Jd3ji@hObWsxa53kv+o;XWegb_se-5CJ0e9rkt> z$lf!}mPq(s5i8>YKw_AW2IqUm)@O1+rMLLMw3TJ?srLw6`J7^XsR3t!ea|EG3^ags zhC>q&sC^*g3M4Vo>O!m$qx2q5O#l@WjzO;t=++b8eI7{U*l-x^4Pjq3EkG`UVXm>c zFb`Np%rvK`w$?eWbFKF(=;`28Ghv*Zeg&V#=Y6rY>U&Dh^f>@XLczqe{oaVluK=Oc8OP6^R1|YEx?+%^|YM?-`S#e(vMToC5WX zwC7sy&VxCkDL{s90?izz>C8yusA&axv=^TN5~^Y22$(lU_~`cF9Zt+Xau}?M{mq%- zNNtJCB$ssxL+Uxpf)k6N;Uv(b_B!w&va%MbnS@WklT%X~nn)5EKqeq7*^T7^rMLD= zfK_T;$1OPElCH}M*b?oTz!!@zg62N-Rp*LbONZ{g=s6Bm-Q;dLHPs8usG#?(K>+)~ z`G$LRvHBFcFP^FQA~mM|nQKbN+w1Vir5evO$F@a&%T^IxL{48ykaV9(D-L)9+jPsEZn&q74=+>Qz z!2=EfaJmMdnW-jc??dD9{MdVY##|m15$r4zKw2b4v<$G*LY{-*Sz8abHCB-m^>Fq& zJ7~ShP!R~=frBLCeQ&qTzzA93IBDCBU@<;5D)-bExmN@dQp~uuqn5#t!@7l_n84Rq zIxMrOOm^ayoVW+MCv)aWs`af%P4nRUrDGaIY1l_&_!R5=j?{zArF%A?*&&XCeL?Rz zt^i2`@V)|3Z4XFCg*#=lNbj}Y2qy$DV!bSe-ffFKA33+LdN1p2{*Yt{Ob#o3Lf>%) zU(%r|lq~#;DU`m0 zx;)jT;Vz|t(w7D`J5`*ufkYP+T*|v-@EFdiERK3^fEu6xZ7Cf903ZNKL_t)?7BF%c zHVijVU!6e;^=t)YJe1~lzVn?oGJu@S6WP7D^|;lH0bs&z12P;@O*62OU-`;c-g4@J z^o!%kyWXRmnYksx{9CNkN=C72`qqBSNotH2I+0di_jw7$KIgFDnn%&E) zgY|Cj-EsHccRllZwngNa-Vo)Lfk3xQYkygXM!Fg0Pe?Hp*x;iDl7N+hf|a5a)jzyS zhqI+n04E#=8sxPZy&~DO4!opbtL4zv6L{10jA7%{m>=@onJZh2<=(ZM?69!_q9QEShq>>y^ATp$ zqYgZn5PJoP!5{(;)u&ijiiHdSQ51_VQuDyMGvBp2nt+L10?nKj&#qIGNF9J{|A;1@ znew=I?e=``TC~j?ix^6GLV|Eci>l4`vON!kJ~$8(D_W9@6egZ$7egVPT%&{ftJh~SvHw>9SAn%jtinx1S) zBc0@f>+d-OgswqWaJa}2eZ&d7C!Hu@2~=`?$qw6~>mB`dsaw|Cp1kDL0Myd-bU!ms zWm8;pX#AcNPEV^O;({chbezyzdO3oN!Tsb=0cLf}(!Xs*qNxxn6AA1pg(lMIqQYXr2;*5b47ild?h zxkF>=MSGk~iTcq))|;*tvEh&aT{U~K_x!U5$Pq(L?h#XD5GWO4KrkSz?uj%noi}6l z9Ha&PqE5qjwdQ`$B4Fdhu5v!Gv77}26n!ZjcP~iwzMND*S=|h0!`yO2YSM8}DsuM& zo~@Jd&BA)%3s98u%qDXjJrAI;r;0dvA3Cm|Z#h#FjBa(hjMK9bVn6u758gG90+lPxp4DTsk$@n^R*hr&AO2ehFaPD>4dPFC#^56d67vtKivs z`K;$<`{^QfO1dPnO}PU6xTUM-Sz~kG4wDu+lEGk)(8JZ&wziQ0x6Up)bDSUL-m;TF-pQ)L7Z)c`CSUNS{0qD^^2q^RaS8M=s z&l+#Nw&no9u66ZA7CHBvQ~RP5r#UwqAdIy=g){+_UH@s8o;e}Cj*Ek3}}aB z2GZ-Y#)+1;h=6eqDN*!*J*o&+dX4s^{=e2r+_(dcfx(Cp@<$IDyGWFjsAbK}*738#Dj zp}}c*ueurQWdEqCa_k1(3G>jE3Df4pcI~lgL1B$*A?-@z&YK?MimWGC40|`>0jE9224*HC!0nmjB0my*2 zXGPwO8-oq}0{|F60Lwr$zo{TVW9QS5w|nx>x5&nF$;26LqTsR(0P4faC&m()KdX_rBxW_0y@L@PHU=AytR5DAnnVCJ}36 zd*sR`jY0cUiq_2T%5Ao#s# zoAg9~CtsXgHmODudsBSV`}8SCRk|{r)Pb*X!Axsi+f%(Sa7nNEJcr?0iuBt3ZrMkB zjvUx`9a8{+vD>%mIE|6Bipb?a(znv<%1~)tx2*5p@qe_>wXT{-sjEWTiEurGz`P{` z)s?b4KsSBKMzY0g>DXU5t(Rxkyufz!tNmnOup9K?KmF4`y`Do;*svnja`pFyfp2Y2 z28NO({1pkmdeT8`!fy$okmcRIRMKcteEmr7@dQ`IASW)+Ym;e zSNeeU1}-sBA}Q`iV5N)!Pu9raIhl-gC*?A%<#sc8-kkxZFkB}(p@*dUd)4cA`grmcGmBV@$=%)*eCX$^#g>ZhT2CQ43PrPe*1&N zt>Dt0HdjS7h55;aHRkkS85t7WDTwzmOP^|Ij1u+ z)A=g^U#*z~GVkHckkLAlDV8 z&`BI#V4MCdQXmt6!(zRV8uS1?4J_8A$J)DJ^@BM-mC-H!^QTHIqPxeA&Zkt5pf|8?6!_u_SP*KFxSSZ#>~1~1CII;5v09(^u0f4 zKK)x}encQ}ycLI&+u4D0>wY-${lZ9U&g?DwNz_v6`C2RRBggS6-itj!YOo_5L?jMd z#>T(ds8_0?AqZGy%(i+9DPU)v6X98PIFkZE!4bY!cW4M{AvS4S7E;O_M(oTKCeNMD zjJX14DCfy=EjJP63?MOb91H-7p<^Vyhrj#kAU@wS)OF#a5PYT+;5k1)5J8@bj^n`K z0c}9MND!k?gzuR^yEe|4HjahJfO>?^@&J4|q4PY9BNdQ&!D>qRsq>f!Brq}^Uvq_3 ztpK=n)_PanMTP#H0qpWDYz5eWRKQ63#}VC|j+}$;WiH5>v>$t&ERv_sLk-PbPXq`f zYfo4!G720n+tZ51&?%gRz@_V?97^Nh1WBEkj?AUB(e|c&oSbni>2>+!_NCls1?C-I zxLp50E}$~ONsTD%FYC#1Ft&S&!;hA=97lSY&Ql*`9^2=@n0L6RW6>?9w&?Y2?y<() zw-=<7ax&?LoSCV|?QMirhyA&RVel)mL~{wKr#;7(bmF@=o87Tk>)1U~JrUwvdYvAj2h`5Nk97=tcIiME331Ez!j*IYydnjY z9N(&;diK12wCAbow3m?H_C?KP%qbapDTk&MFkOf!P@(Rl4;vfRYluX8!fm6h3t~nbu5RmqR*7KeQNk}tk!{i zNOJ%P?sG^8ZGZu>(m3upm@;6;f#O&)NK(Y4Wn$Sx zQLlBe%%L&9nR*8B(JRgy0-B{ti7phG>3!A!{@(e#^tsZdMPxW=nBp9)OI^U0&Pono z@498|&ClAk-CLA`!@*Ipz8FqvSF>F)f&E7nJws;MS$YCCnJM>2jp0+ikM(B5k=1O8 z{lktGxqQ=`uN0x_@JIj6o%-5ls$-&ahH+*g|adqwPcYjAhSf7$#v>9pHdc z3|NPd9<8LOGsDSbd;lKCEoRkt02QHfV1v=ae)(sUOBoPm=U{MrYqBU07%TV2u=Ow0Xg97%q9ioLh&3{l`{!o zESp};2$y&Av$bMCHE&>i727S(Qm9i92LS_R9b=!i%*}gTW6W4zL;x8SsW_t@{Cm9H z+B2)8vHvjUoPf@wnfs2h)eiT!e(Sg1^dlKIM}QchFjKki1(lPr?pzaqME!^60G;q2 zcxgY+=uEnZ&L*Se+WXX+)n~0UQ0-kVb^E#$O7EF=wdimi894-{ z;Fbn@Pg>P-oW^AS5KSVs926;6rNb>F73P=DQo~B0)0K8db_CC^s zuJ<0ulMel4FE}+EpWY{3>>4siZcbK95d*WHEU^t)FsRQ>YSo1dz?6S_* z)fxf)45*Y5@?`8Y_0zQgk;tacXw`gny5qg?e(8GSuI>Ur7j+N~K8rLtV$4U~gDe0@ zq6X3*mz|S(&U9!v8`v+5HizSgjzNLDm`sEW*+F8-F;6y2H_fSOzt=&KV+N>dN}#Q@ zJuXezKT%<}ggz1-SVofo?|CS!cR#|(Y3=|kfK|;>j{dFG7oQUewKjC6`7mydp8vZ? z`)f-I&5XuyO6gmTLD*HPr1q?Ry^PG13L`=X^h-Ti*Z${>c-No&$)9{% zJ72NTHI8AI>^o!%Jz`xvi)>-l{f_;J;K`=UbjZE?z_SosoL~1ZtKd>SiuW=%OJZgj zN!v3P_W;Z}iE2IA0**M6>yp!*j@6w}sSv5`Ix`(EUSuZ1x2PK#^RGzTGo7wy%*C0{Xiw<2X)n`F@S{0|Tj-dViO^W& z+ROrh!1I}DEVY{jwAFK9EOTgHxy#vPo-jVZCIcjV&KZ03jDsnV6^>%JI3ScgLr@_< zp#sdYqZIQDp_)TKGfIFICXmrm$HBn*&Y3(q?P@a74C$FNv}N;_>V#F$(aCrKN^=JU z+}o*V%QG*5Q_hRD56%Z!F;Aitz@0VncVN((S#ORq1tOIPV4Pj_{2l-!6P$uy_=R8C zRP=k#*w4B)A^>3mB-;OHoN;y5`1jt|nsHEpDQjt6FV#E+%76Q}fBQ`yI2q>Zp5`n8 z>Kujn-T(c)8Z#h-{+0XA$qcYvaxi=bI2HL2MFjXo4glgJ*5tB=BHo1_@;P0&{PS$- zo*cUB#t<;(qzFRKz87qia%R2&b}eE+7?*w{$=A!?EcctvNq) zoOM65cI<-`L!H(Dd+$kKi?(VdKO^kj!#w`yfBxqiIYuuioR>C-pz>K?oFcZ`Tw|c! zhaLwi=#^@O>c~m=yO(uypCUA+4i}**{n1!vt;{1bkI|K8#*vWm!BGrkURvL!^Wq&i zW_1C4gjFXW?j<_O7I;Vdu$KOuoT7{Dtbgi5IL36g&wPX$-@WT~jfAo$%TzOa96723 zC)^`Dr5ew99Kn%exi-EW_v(sk13u*}XSvq&$==Ir-*<@*=(keLKORl8Som%e-{ zhla5t32=#Yko?B zV6-Y61#*CZq8+~5po~?_>fER)e~~1>gHi^9IW&}!+<6A^$V0<8TbM|>^+M6F09F~P z*m02!VR0ag0=6-MfioDJA~ZtZoEOom9A0Y&yZ}Jf#aaO)Sb4zGL4ZIR0Le2o3%0^%kAZimtHW*;j3A4%B} zg%_o;|BC+jJ$>QxQkSLedktBv zD~Ist42+MUyOv|LXF1&nQR^d0%f>~HJ|Z{w%tsM_0J~0sb$q?#G$#ws8s9zd@)54h z2H4Z~BVwjZA((_de<_Cs@TGJZ(letaCPrQ_utxCYV*{6GL=4Up6v~kkh`G@T2CGLg zN+oEax1=b{F~%5DxY#{KSmhnh;;orU8Hp_LgSu{rkr3AgQL#SF?OjC8Gu(Q z`cCo5ot9Q2z0rFEXymTL;T~cCF3H(s+W{n?Z945S>8%|AB#bYAUdoOG4*{Cmv$`J# zmeJ+#06z{aebw$}*Y+MKPJym86p=;g0q>D_eWq?$-rHI$AF{g1AR*q!Bd;AW|91i56`z7Qgd%^XdLTE{DTA&?y`tt%3nOQLI`W1LLlOBwheRO_5!sm<7 zAX|#igdOd7y6@#28UTs`ICJCL6gC4u!x6yv*%*{}H4XPV0`DOZ78npPejo+=#(Twh z-cy^>*k_6vgG>ofK6ks1E76M);VSDgA}Xka+Y?&Gqe;{Um}f+&MT@C2=+YJ*hBLF zfo8;py0s!y-l>Cy{T-kNij4~yM{YSZ_8+IS=vn76csJ`j&wr`I*fpJP>>{rs2An63 zs7M~CmxD`hu?eCIqBY)Ujf?^`kYm!lI9SNHvvqRoS?lO;eB&E$vUTgw(TF_jVvRX} zerNpV&3AHM-RRQEcP)8ghs_naGFQk_DP}cGAhT>KLW^BAzZ_*mmxy554({K%V5Q61 zQ?;7^2x;+W=*Cha=?n5L9Zx%PcIlpW=#P%gTGwF4UO|4a>DDqVbEJd*=y*Sc&zp05 z3qeko$PDRxFZ&aD@^TIh0Tuek*2Rb&Ef^_oM%_VIQXM!^*U~UvSr}@ut8h?C(f7aq z{kMbMF0q2w-h(oBA4Q-HI-o}Zb7rKRJaRB)&Ld;Z@Nyyvo}U?HQL>{79a~pXJ_bpF z-m9cTP4JeSX@$zUVBi?hWha+Mr}J2yh4e8psqlH3rsW`B**nsrMTr>)Q)_ygVsZ;S$5T$J=3{*#@xTcGw5Y@MD_?9VSm|25e_fs&|rsn-hg@G z{%fJ#M<#eWG+I)Y1_WHcLe0fJ@5W$QWVI<6HO1$Qq$tH~`+4b}8F>o1R1pRolcmnX zL3*5xfZiU*(p&=v92iQSVkUUf8jKGRI_j*UnPSCxdL@U({0H951(3wRVXm>w9IrE? zG@c{XP1KIUCF6{>wJ%M@x-!OOM@wVrFPLH}hbigC?^+BoqYc=7v~;uffqf%AQ7R1M z6X5z(4NAc8Fh|;+;B+Nd zKF=-yF`nmp#%ynmU;uSG7BTEO5B3cM3bbQjrF@ZlK-fF@oA>zjU;p*Dv2xZr{oVaJ zFXr6*$oHqCMOFO1w#-t9I1N6_Il7kGcgY!xh`5xd+V%Gq9(#9yQ96$r98SK73jG|2 zGky+@pS@3w6|UI^&X;vnKf|FxlAKL4kr|+e9s_n~t8wF(4(Xo^0sGw;&3$X*^Zv1e z&F@lHYQ8{kT4V1;CV(vW5sBfbYMA0a9r|248(c%i*j}%~O#0Empd?G*Wp=*coo-w{6d2Fg|2euTJsg`e^H+9DIKd<*R?+7y_leI@| zasJz*rFXtmgeJzqfY02tfow(;GscMmwx1CWKUyFyw8GJxF{x(2#jR+Q&rp&gGL$fd zEIo^o79HbIGGd)<`$%m^&ZD-kgixL-b_2lnMJb%&&;SU}U<2=2&j4U&A7Ku$a-16A zk__HTi@4`mUCW8WkaBPVO%5ei1Jhb(BG+>1n+jk_6D90M%vG$DWTFA3VqqNf5K%mci zekqGsW`Ip}Ho58mShZUmXc5JTB}}(Ncy+3h_tb*i001BWNklz#bedDWOYy zm<|V=IglR_r>yCcwZEGygaWxlkeVx)XIYoGcArQ8UfXzhiS470I+S)sR<}luA05x< z<@5F*LX1NTCzYmOi}kt@zKla-VQRgph>&sQeVniE5!j&=|DVV zupWRtC)oIOjFCDa?XwPN#zTIv)!+X1x8IDqG>KU0kEpw?=>xC&ak?jw3YBF7vy(e$p4By2wB}54chrR@6~uKsGlg=2psIhy9ut zGNp#tJk8oC&sVR340Jsm{&`9}D=GraiZEM8x=d~FS!D^AWH6@+wqiTf|I$Y@0vG}E zsYd$u?#)L(M+T6t>Jmkc;idLy9a`C!mvU$b3HC%M6pBIQ1lT>IV0;Pk05)k_y1@Kj z_U>&}p7gBacydz)$$JrWT-%ZV2t2tlK9h=Mu^F*9gT5y9J1 z`E>G~ys1=oS5;S6_xi83V6iq`n|JeUe$Vy=;)T%x;Cp$GZr#vJ_e+D2A|RiWhlp{* zP!oM>(u@6}2 z)~S>;1`SV^bhdjJpspP+8VAy(5AY0X&1v0@)7mf)c>MN?6tpIyDW&|`UqZl|7p>zZ zjPLA;&@SUjfp8tb%oQ-<2mk`=ShP2Pu3giu*OxY<%{RHj=*vJ`0xqpBA$n>T`Qw8E z>y~AoMY^^aNL%M~?!Js#X?4a!&Rsg6DFA-Ss3uFk``z!JJnve@OA*F%b95U>v_^nG zFvznyrk&vhKn;D7`od#}%Hcpm(U58+0}ybK)J)>`K@;wXD6gC>9>oG;2ikm}sEcaEg@@DzUQTi-gR zG4Pf%Xc#!p27s3kYweqjUrx7kV+d-%RN5FJ41h{S6Rsr`fC}D3bLaJ;bO>h8*z^3K z3>s+%9XJO78wUX6&_+?4X~&`^^Pckvj%VI5n4WDv(H;;C0P#3e?ji%gpfEZ*q*(3S zwR1m~tkG!XQN-R!A;wdsVNj@)WF2>mkHITylSkG2c%vCIyooRE+@`tp0!jflIq(3e zJy;qQ28^Wh`No6z32jTAj#^IWy)rwN%m+O<$BKNiU*u68K=(AxmyG4LU4uqA z4Mp)`Sy>}x5!Iw4nfz1+jg0~@gjc09AYP&G3cvOuoT))0FB6!5wwjBjgj-4pPp0Su zF9JcWK-YRQ4y8u`TncvyPK3ehNoo0Aq53_#&@?uYpK6tWz7CoMUL1^dt^MbcweT+a z?H$hgtovEIjwlUah`uo>!v^@JDD!l$RgQB;9)1!XV~nWTA(T^w6s5fY!gy18o&Xq% zoCluKSyw90rd;rgIlK$G+-SyJ$s1l|=}k*ox050G=95o8In}VK3j@+Uuk<4TNjf1z z=~3t0G`|YHc{Oi!+>j}h3&#n=188U9;_K;Qlv8>H3>eCuA>F6+oKl^t^->@$#f0nv zhVzC#Q#3FoYK}#1IuC)o#Xo3^=diP<{JcjE&=UE$^cr)K!9ZT)cRE9Q59iPr*_)1Is;#u1;q{`T(g2mxv4lhDQ@#t@E|iJfPc!DmoRDn8Ipnw1-#U zyF}xxzjOtH0G;r-Qid6U1T#kz zO5S%oevht(YxeTeTmVVnO%#?aun+jczK|iJwg6=Z?K0#T0y+@^9{>5D|9N^mT>^xY zYhzu=9^<_A8T4j+Th}pZEgTe#bzWgQltY4{j;5`{SX+9FAv2EOSVS&4C9S(CM75x` zeui1dW3>eM>|N;07|+brVET*`PJI#QhCPzeLBAgzoe^zJS93(ZMQ3j1zUB9)bLeNk zGbm*s`F)C!p3a~#P09`M=8@n{+}o(B$c_hJQMTePpyQ<(H0Du`FQ5s4QZj@ruc-(O zj|MLSU`2uQxB(3e9f#OqObW~Y?$H@+j2@M~L@>#j_cviJ3b2RQ5`psw%C8mqxO8~n za(24Ul@uv2CS#p4V?^-G@#t&!$@8!m5a#`Jx-P@fR|S>mAt0%67vP{!c_YyYBS(#o z`)bch;cJ(v5d{w{!1B_)RHdDXIN=$&@k>(v(3^X$q2I@^Qno1SypQ*xq{~m`xDa_0 zb;IBB{MOVha|QSqegIYnn*n z#O)1W#^}nP4K1eWXy}7s!@(qtOWF+iOz&`DjOmDbf#W^stW;F#lw<-OAqr=|$!d{x zX>?MBO9kP@CKt?EREF^*y36>v)<9s7F|U2bxK|7~5fTOeWHY)&!}9fuW{**Xz4xjp zjW*5zy!XQ;wQ)wHj71GhIO;ek8T{{p{5`jCVF$I1oN{!L?0};hgBVxQ8OIrrPYg{? zoO?C4SWCmN;d*qicO}1aQjOdwK{k4|E(w_e=ND9-&3Lh;dG5 zpoRC|d+$`=h5p6{E%)YKbYg9d!=a5F1GUe#*4Ee_SX=zXVP}o^YT&bU-Hy1<-k`m) zsRVsPKaL&f?mZT?%^Aq)N+)pSSo5t8p?i<_U3@-Tpf5z}U=|MF9QV^Ql&3XlOoFmu z)bMHnzXq?Btk8E}lPy9fMGwI6#`1I!7SEibWIZ~8n#Ye~l;g|$&Nz|A02J48rTpH} z`{)@A8o(BFiew0*OA%0HH=a8%wS2cS3U8fAg#A-U7f|$n9x;0$_3T_HptbKU7oWHN z*}HI+A1rTLx}P)@%*x25u&U*_6$qerjsd9?015DEzX=Cmg+VV3h|uORAglp=h9)|* zKNZV+mQEv-smM(ZhP~QMuifvxuE$5f0_On0Fc)1I-<3hi0OVKzjHZzq`_{?wbpQj@ zN3ZIwdwXgAWAh2`wZ9CE&N|w%cIKGj;hE-dj_4N+@TC6W2R}Ha@By}J0m!7$sZiRP z)EVnS29fQai$?t)XweQ=?N>Eidqz^~`iSm<*P=vIc?o2er=5|qM&}f{^V74?3Hgn` zfL5T*8t5j5|DOq*ri1YYxj`38PZl{t^JKWM*26qRHF(lFI;wG4vtCpexZ zN2iCf1M`<{p^i?f7umT-D)y3b8WTF_(BpJeLj`Bi&9|ne10rnwSNw2GrqL-1&uNU%zYVThRA=-FB-bUc^ml0hJZ=tOzNffYGFLkdmG zlQN-1C_YxI`WOtJg{or}W-lMz*w1xbAp{8tIjC~dfep`9Qz36xdPhdUUTZ*k^GG^{ zUNL&9W#&gX^S}ZzU--foc4=`N;F4M+RRySe*22jSOcQxTyNnt8$&jH`#{%nH*s?XT z&Wh{sj&ugzRLWDqC2;DH)9Ob6FL}d2FuGv87#G^&V4z&21p%G!fX7lLxaX^2McgefqFN^;E^X(BSdqvb~!V)zS)7BQEi$UrPg6OTyclUhwKccAuO z`g#pL?|m)$!gJ<=FX$tO>T;HFuGR9pbB6YQ|MYpCi)f9>YC5pajC(bxy7wI3h3CVl zc$L0@TkQclnZq4V8=)D!7MR?mC}@SK6tuq)86`jnA^Ne<^^$Qz5L+C~!s2$80t4q! zH5a27&%Emd3uu>YcHMaqii@e~pqjNZXP3NzoDEV$7&4+e6OUUWCg;|m6W;*= zds@`#R^uRpM(vk8en!U_LCNziil^Nzpr~GCS`aS%is1tocndaj-q^@n>$t^JoUHdgW9YEQYOS8FaDLRoqj6*W z<5!=d@A1IUrTcNzb=KUTYg)je#({%A%lJr7>`iZL<~JU`Oy;UpEd5xdfTNDyV2Ho8 zy5Do_fPT!I69oTBktMSjckltn=e1VzYu5PMwP8dKUUESrC641IwUyVN{r36t70CHT zzj8o{R-yyCckWQtr#EO!eGGleapw``)uIq77|KyfjDlXur^3a;uv3uld7h!=DrLq< z0@9?O-Ltp*=+Wx;(k}p>Jb=6}fA@EPcS=oQtaY9NZ@82#O1EO%rL7~qPkIp`Yuy+` zgr$64Yg$*8TZ-MLVdLJ(OY4hJ8e0Gp%76@aPBo_-c{muE_8Q$YWUUo&>OjM43K-E+u+WP=6kS7a zyd{hy9y4_|YYwAaw?emHZSHx*uSG-3m)5-vn7-B=!2SWGc$h)Ti12%DLt=h0`-sLiRqkD+s`0d{WPjM08DNXehNOp&$tqlOt|Fp%rZ-ZH$5-M-%{ zD>DMTI~}#h*;8w1W$m?%d3s+puV{OZ7W`|+wd8tZ=WHNrIWLTp{%16g5v5yX?UHd^ zyT)2FTsT$84?j6R@B*1EHU8V*{`LvjaT**6^skM5%jf26Uuwoc|8owJF?f%Rl)12n z{8~D`975w^r%StEE`7(_b=Ow_&(dMi&B$F?j?=v;`HM<98>c&ZhX_p|JP)Y|MhBSz zC6xEC{o1dcq89{}pG9v7E6*a(gfuac=s_)ODc2PAXb7DXa-)$l9q;%z1xSJBz4dq2 zr&K{{uwog)!I1Q2CFAPj}JfWI%10i8SYNN)+JW^|6V z6y8aBB#=J%;Db+7DvAI|T>*&qJSeO~`han%L**cEnb&oeyX}eur!_*OqDoRJ88iUX z9w*JCIq3_GJ7?0ME5G}xev0?whd=z`NxAd>QJ&I9C>lmE+T>|RhxouVbU@?zm!h?# zfc~~sT7NhC%EJy6_+8ZNlHt&N8GUG%A#iPOd8*wz8ZwO-ACpz~K-~mK1+Z^S6f~u+ zG09$quI7!uIcY?kd2vN685Vog#T5Y+q47-TVN5%a1sp}a0Wd~-MsjkL6AEY}&x+7= zh7_j<=h8hJPP9(+2B(2>*%Jngec}D{g+D3JTKxJMJyEXHd99_?ss4C0m|i>nu16QL z3CI(TX^sS+YsPGx^aJcrYy7bd^O|Sf>smB1)>qQi9FcH|FKf(^L}uXecA02p*C5xFVHH)TYr^R@zq05`@5Di|~X3xNUDc?LkKEgXT2 zczpwRhl2Z0FU+K+4<#P{^b*(@;>vtrMP<(dgBpRmeOP_h1K@} zU7qLJ1fl1t5xE9T_hZ3zPpW;h^+9V2YAGlYM+RmzyA|jpoPl#Ge%1rHC!`seK&4io zKC@qp8GGXW=$-Qb9~EjhJMCEkS)hQ2{uWR5y46~Cb$~zwP;|t+@U_$u>v^t#Hl79; z)a%)knju^R3+Fu7MTp7|2ZkwdvWkK8wXc0`w^}KyLx%nLfB*MMw(&qSnD%UQif@fw zx*VW?E=|{b0bEWLsTxz@Z=5+)L`cX%22H7Pz{Iqfz_Q(6eic3-htW2VlP8<`KX`s%AACyaWhhn0-$SG#2%0O`j(7A_o{SoXzBmR4-isrDT@{aKv8WdC_9dP;yIm zwW8?P^uVp_@pk`DuK+%Umz_alFYLpm&b2SjpfTONT4U4(5!Q``mrb#=99aN@5TJYj zdCbET?i;v~YQ@8diG*==JINqIyc8BjQ&d|XD{x6D0wfCF6&QNI_W+FE<2k-4eVh3wxb45ir&0;Gh2KpHA<-N9)%;gb&(aOi)mM=iLMb7=OG{TMs~N zYzb$EESgg{rH0Bo7$p>nw#~pKo^g#a@BmUSrQ3{jNWlXAz{|5mXo?Ql`|o_`J0~9R z%nuPx^S$I%_qqL`Pzg`X6K-k!JtwQYgRCk&XRmW=N>vfnAy`?9$%}?5q%L! znd4jUCu>EY=TgoXG~@&N;&nq`Xs`&Ov7ZAm&nQYmPOE$DtT@pO zh76!5`dNMBOKS{ZA7C|x8$N=C;1I?Q#|8(hx!tQV$hp0N-()P1ZPu6Wd{&3(R{DA= z_n{G3T80TMB;D*%*2;@CXr_#xfXbUzPG&jnMICtU0cRd?E%-z~d=mte69J(J%^4fK z&_G9qM~bVwdGDaqyu|YRqI%&MB-s#hMUQCo^qH3ec%75S>wUH+N>mQpTH(#h&o+ zXfY~R-&~h^+n2s`kL!w@@v@<}FMa7tCqqJ{&iAd262|AFVNDVglG-zAeuM*HX}@&+ z;{-sDU;p~oPidRzcI=#I&Hy>^>3(|~jX&CZRb(b(1|Vj@5rm8|=|vnI4l%6|e+p=< zmoz(`QOZ`WkmydQqTjOTV-&}Ga&+tg_&i%`n+Ub&5(kVZoVjzd>;>FXFla?8qAq81 zIs8C@Jb3AeysTqK8uPXfA~^0vgJ?=Lmp7D?$vtzm7^N8}+wu9FGo}9}Kdmw2i7dtk zzDF55ea2kfo#z4fjC?pn#sTMwR8IPW!@$9xYW{9HTk6Pl9^ZvtIE6VzIFG_k zoW#=2%#Hlr%h9=Ho_N~&dav=Rp*h;1S~AYpat-;ei3evZT~_(PxlE!LZ_u1GZpNJU zt^w*5p941twL{$~CBlY*Qmf9o6cI3`sTG+4{yf7yKzg1L&ncZvDuy%#01_~zw52vo zb1+L`m@3!30EoGl_s-M6d&XnR+an54OXDr?^ISrAIw$|*MPjH3h4Z$|dAaS8loVcY zAYSxnt0yEwGjF%Ov4_0vJY2>h?dle9`#pq+aWl9GP(Rf(F%Wpx33NdE7SG#g&@hhF z&UlZ4?V16o>p&A|R~u7ab#$R|1jA;U&d``S2LM%(zRt|q1DtPNXZtNxKnfj0T`DCY zz>&d8fz}w!00LuYE;52{q#^MLslmeE_=X_evm069W((iV0ihfSfBBbxdEy}+^4dc0 zX=oZZxhAUTycv!N^vVcM$&jP=!TNC?ah{NU0X+M}*~etbv$u3l@5{>I5SX4P<#YNCEs%p~cq-%4HRu`bqqEZOa(vQ>wZ4uv?O#0M zr~h{lc31-7cU~MPi)c8yWytuQoQ5r4Vy81}#*kqEqZRyS?&hh3CK=PYS)FHe1if>_ zIG2d=NaoWOM;cT|%<(0?!+vmz(HHbuPU%-+(3~@F`acDzjzyOc5gJ}hp1Sg-^PuKw z89j0-u2Esua+3lixQezg8Yo@S3AH=I!+_OXo6nwmPsPZqDX*U406+jC1mQU^^%{a7 z*T{YT*8BkA0cxys`P5TjPq|2nXbXd;RH4!UDM!W*BNsmaszUDmN4bbl6y|L{0DwJq zopTZbrd!9kWSuTuWAAzW86wtG9h$j_l=5J5W^iE0MZXkL@*I&I{DNOK9PvMAKV;P0 zQfGE<4#}Ri2BG`RR|+750Pxb_1AmVP8{WFU3u9H>4$mi_hAoYJZh7*hzDdWJP1qE#dw0Q;OMVMWO* zWTudK0OmUMTnh~c-1F|#O4>7}Q1}8cq^%S-eN^GJF%sB7f~ZMeo=e5i({)@5wg?1k zA&pEiHn5{x*XW&{K0B|gS{eoMl*S&Ny?PJ9Y2JW5g`v>zmejUuG`8h)bZN~PT!5AX zD=AJPc;MU5InUsdcM;4yP86bNp-ForKbYZY&A`JK~afmLB%;{Zw- zHDiQ=qGc$F(r7GkE=6d`827ryIsyT&16hTwIFMp>+`*piI*Qyd%=! znG8PZ2X!0V(+M$+FfLgWV`UrwJkrd78~bO^_T-^Cd4R4MF4kZ?Tgv}MS&T;wm@bAz z?>wjaAs#n1iNJy{K&FFm0kyeEt$P{LXw}?{$hf~$on%!%$Fb+|DC%ykWCJ<|tZE%w znlT{Gh~f~kKgJ4RIe#gjMbXwt6aN(T+apzY2@O2kHRL&2_E&%PSEsZysYKIA+cS(A zEmZSEJ{UJ+LVIAK2@Obf@tHJSG82#z^#yX4n_ zgYU>A7rbB8i$P;u=?;#Sm$I&p*vosqvnZa10G%_LbL^bs?^PKz76{-0w(GoAi`uoG z<%}Lbft}JH0-uBsfljGo%<=HKdlJF)*#sh`4aif_&L5H9wN%NNU*642#mn9!Osv3P zG^DOl1l6UKDfihEp=VwvxxoRXJpf~%kDwx;N^QG$2cMW@mZ%8w%Tj4z-NsWoj>)fo~XFTMdv_;_FSw84DKu|;*fFUcA4K*wg)z%LD zTpQX^7F}DbYfdS$1)U_f$5f+?5#GU%KmPbsW5$pwy_T`ah-p5+sFvrVI{3z3;MuWZ zz#g*f8a+J2=cN&o2b_7F0MFQgE3jmL>1_LF-%1y=miCu1%g|-qplc2e;EJ;&I?cGo zZ#gE=6Pl_1Ft2iTx8#g3xT6%bvOGGt)xEv|f6)R4m~~}viD=Rnc+Y2y9d$HIvM!9* zJMSqU&e5WGGu+5!JXB2t-6j=URF9*C?A4s+s4jxWWsgPkWhi(zBk~zish@$bA29~J zl2ZXsFlI{oJSR_Il|hp?gh!4QE}F8(fsP4?ibTCfBnW^4p61SKIEp~3p^$P!*^1Px zDIg?d3H3n993c_j2#?_eI0Jj?FL+m$7*2o!a12C>o=ElB)5BaxI-z+v-aNt8r>r$( z@3C)VoeNJOgT^-{Wi9R7SgW;k3`HBtNk)eNJw*u!qX~*t5w_Go%32ME2n9vLOU655 z|LqH&>12D#(EmjTq&hL)pP@N`IhO}58l~2PAQYi9hf505y;2lK3ju~ab=Jt=(w%?` zLRCbn+L5CR!_kW|@p9T9zyO%WZ{$t2e)l>vq1w3socE9c3_rC>B3KSNCTGT0@YWa2 z04$7!4%7r>q{DiK`U_5nb7`k@k(Z_XFn67~^zHM}P$HKYUZt<~Su(>s{^LLXw4rB=IJ*JkZ~#%c~c z{h|fvmZE3e0N9t(2h2xA3aAFs)9L01$b`ZfDVCDTvl<)N>YS(ejZUO*rfJ_5cweA z89L-Ooxx}*L!#%Md-uKR#e4Auhcev+cQO7MavVjXBWmwNnPrkp>*($|zMP8IluV@$ z-_p(Qx#E|fjaE1iKKbO66aKLm93$5vcCXH$v5Ykb;5`DI1Gj5adM<4V;YY~$<~_}W zb1#r_&RfPqO#l+Y)lsZ90J}g$zZCLRO^p*mTf~D9GGAV6ghxo#t!3$afh3UuV+Fne z(mjDqxz1{vv=tSds6oIpL}>scp>D>3s0<}#9i@3uVoOp9rZkX{=1KG{xz;H-1~&x) zoCD-OV}t@}ydo3|!+@fpP2B7+wt*kAgRzH(ybEZT#%JwH)H?ZsZU{o>R%($fsv&t$xYaF{jbs z>2LfnMs&y)>v8L$#as62QTKF~ll_#s^9=QM=hk9$L|$62jDXV1=~K=(|L1@M!i(^r zbq2?!$kZ4mx#pxA!M0RRavLv^Pn=%l6+=N(0|-QK90nrOWIvrewpQO$F0G+4d?7l+ znMTLeh0!xO@-#evSEN8|Tm;B2b-b<_$K18#566m(9fp>yhOHe}Zyn3q{XdeeXhD6i ztdMZawMh4?G-zrsN#OEu0Fjr{9JUZx9vw_Pwlv-HtgZJHMg=sDfw8c4p5`+OLh}Mf z2zB82{rBHLfmwiNi(y!lCWC_@uD+=*bX(V<`zTrlm_l}QVKf~X*1k0U8UxrX#)p(M z1|6e|@VBl@qFv)VDk!y<1i{!1r|vl^4$+$qjwRehZln+Lx?3||R$XT(PKKA3tc(T9 zlQMG9qH8h&w$Q+>`)j_&A_^%w!<)lfDUwoCj!VkCdNcAstxN|(j$II|yI|-s_(Uc} zOze;MSZ6@J;@;_56w^6Q$20z&5oNE+7l^^#nRJw zk2`Fbu}B_~AyVe>BiX^5e`FExt@Ji$@MP>|;N<;Z%e%LZ?UDDVcNnU&0kntT(>e9X zxwXIOUY(wXEBFxzXSCwS(#!*@q`W9PqjrXF;9w%d{O&VOG_smLks5uZW5fGp?Qn1d(df2_>e1=MkEHEq zdN({ITSC`T&Ruj|2f<5w@+u7)1PruE#RBpGJ}Ew1yx^CJpklvADYHB~JnM`D;Puv% z=grr1F+T4;qWj8MzH&;Fs{7M5kJGs|L$Cz5lphL1h+R(br6TS%OgQel9B+{ZU7@ZO znyq=q)@!Y)lsF-8AXp6oMZ{X3IFhMTc9alj818r3|p;Q88f1F z6z#o=(&yINTt$-9@z`IfV#X`VvScjgD?L=lFb0}6JD2j1chx&Z89HwQUjQB6kIvdN zz{hu|+PAi@vDWAtAYy<48lK6R%-d@p7$bPBqJOkH78s9fGWGs?w-lA?_}X9Z&$ue> zaw*XAj?!;(=htZre97RHXxfCPYxK~}nJvPn9i|Et?7W$%%Ecq<>j-T$K%D6O09 zkiNqy^Ne*gOZRIo8W;a})=X#1lyd6x@hqeq7j$rXIQ|=d&W))$8ntG0moEkE_)I;p zlpHczgqn<(n&#b{Oq`F5n&b$5N4}9eil;L+e+288zPYSHt7EIF+5#*YMe-Y+E1P$T4o#DV1{sW;~~&NG1&-V|-GMSY~^ zsPWjVS3BWCLF6eHv8c7<9uLdHepjaG`yjEXvZ(<(nUHtCM6wxWP1L$GgI-O0^ zlJe*St(|rqTQgc1G9qYFdd4=CKnEp*M!)o>Fa6Ykaps%CcIX`A0O$rztd(>tks1+& zngLi>MSL0uh>9FpOCuT}{X?r}3Y_636~j1|)QmAAq^UVb)5-SZiNt}8DcH|&2(F7=xBYUB{dH(wkuA!<^G!Unbu9I*maqQr%QbaK@;C6~y?Km9 zco>}3H+iRPIzPj_Tx-95w)9}zqE(*5BafZ{EdTfZx}c#K3aNScyxJM`$0_n>fA(j) zJCyFtvv1EozXH|KXeo@J$d^Z?MsD4scsqXLU;whv^$3~#j+b>n6cwi9@giOwTY}>S zfDEsbEv4(amQ3(jnF4c;ia<9bhLcNH1et6+I*N+ca30_*jyf{Ln8|w%LNda)f_!oZ zk9#kfuDv>^!q`M$PvZtHXut%AkZ~Q$P$aAR8P_q+_QYXJ_P?{*M2YDQG;55HNSj_V z-go^PJk{aWH8J9hV|<69@dE>Wn%4PhBQ%J@g2?IAT}rLO6!6_bI5tH%`lF9NI$8aI z|FsScx4dt9Z(&sFX21%;#rv_fqnr73h@a>QLKF4zb4fbSnz`8*f*BC^B@{h|$k#mU z(z;MMJTanl(*ss-dB>J}GiaP)c-|?~(h5We7!?#7<={EM zFXcw*)!~j%i2gfk0gcY(rc#2XrcsX4NPtuKNww0+$YFDee;HGp1Kw#KqX62nCQI)@ zn~VnD{@P4CpjB$dl9~+n3vH_hGj?l*S4F1)2Fe{!A0t?EMrFoDsit|7DirrQuu?&A z5gI}n=+7xoB*(R*7b=GWZ>w>N-1(|G0Y3D_!0`-=EX9S@!}f&!Sk6-fWkt#pq_drrM)jlgBZbPPcU zt>OzfL5e!r>I=V${Br1&PUSN)5|82KQbywowC+B}i)%dJH@+3M;MDM0>8E6J0M6K? z|1w-$!#OFn7VndF98Dt3_#7yAAN+xz>A9lRBKDjJ99;}F`-ES8Gd84PliOrRnF~dn zmPT)<6iFU=r*(ustbwm-{R|n)33ZP%zRq!pb+r~8DjJrcQ~K;ktoEE7{J2q}Nwh2C z2>Z}SjQwdzh`sxnw9eNWp|M>St`@HZ3;%8nEQ3ZHPtpm)Ec2R<5oOtZVVi?G(1I=z37H+W|Y2lIJaJ-XScBi`u#nI%k%V>K9&J7 zdVqTdaKz(ke*q_d8&EbUC$Uyp4YkM-)^<>1D%83Uu9`LVwM1}E#_vTRIfFx%`a;zC1flpko z9?7`q32O>t$S~k&BA3W35gB|-{xVKDzC52(4F8cAt|LbnL7vNqE2?TfbdT6#Y5sEE@Ad5WEhAm?C?` zAS_N8>c%9UDP!as!_-n4@bE>L61aOeQ5eVJozhrDgUp3Dx70J&0b!TA$jr@&AwYPF zD)5F;v_QVO@a8}AT$<6K@w*5|3W=d$u7p1Y#G}Mer3fh-3XpRBm0$UlPwO^H_fhX6 ztqx!`$I^=e7o{5oOVl z2QVP$@d3JK+?q46#YyA$(*819C~RMOWivAHT9G4Q2j3ZUUfOB^ukFD(gU0_EV(3+C zcJvE0Dsbk3`{p;ldFYuF*XHr|`RywZUol#Jk+aX%=?qPlrpxK0MJ)O!Yw-kqO*eEF zSOy1P!4u?5jX4+voMdD$*~(z>MP|5mEWppHQT-h`Q&g9X6=f&q{a?gK^t&!kYGOE% zzVL-FoYJ$5pI*d6^cly+4}bW>6AyFP;ji?DcNm*7bucf`NQRh;v{a1+jJ+B_b0GiV zBIBs`g*;?HmBM@}ijy%&CyeQ=Xaqj_`@jGD)3E_k&FGgS^>C?tbQgK2e&)#YZ*J$b zckc->1fv`_$9w4dTn5Q>_OIHYu^1Secb37y6ON(w>c~PU5D-8)Z^~G|a8EL)!Q?Ev;svk$=>>9v4)Hrae$gC< z0_?KXdH(7R8Ulo}70nM&KBe3falEMHG3|c|$O(NDV;? z1f7d^5Ne2v2gtMJ@E+wsF%N)kdKhMDOiKY4Ch;5xXHhylT0-2Iu>C%RZu22}fa?V!)h&#ii z6o_kcsn*Pzp{ zKZ_yDSqOOl@gM*3QyDbYjc&tRbzif8MPy6=EFJV`a`iv^+0)V&ZDD9g2Xnu=A*ovG zSwuh>3*=>;-loOo(+Dme^bMPIj$M&-Ybtsp{hN`4`dse z;HNP%c0{E(tK0(zc!vxQ1`QsqQHc7NYUJEwUNs$IC{-gQ{Z7j6h$pNc=jVuhlFg<3 zbGUgoC&$svWavw&#V`P4iX-u*%A#kM1xv?<=Fy<@XXsxB%^JM<>J6Hdt_2hp=k?yJ zg)@dF40y&VZb0vdngT$NSQ(z*T29Q3L|rJSv=2oZK2QW*?Onkr4{q0e;dSjUC_yQ2owZ{=o+y zoRlzS)j@>3ZGfYnj5Z3XbOizeIJKYXn^I-a0Oh3`T=PU_n5)I|9EJ-%DHX}P&^f+g zXo=!UiO3lBQ=#RvwB;QmGiZVbRWuV&kOl>ibl~df)UcME9_D-0*+m&N+NA4j#N*4O znvl=SYwv*$eC&5XW6NQzc+7Lq7_X^)-qOw<;6V!_OAJQq<=iJl!MxdOvWyRiky^Q= z6Wvm!c#rEw(An?!hqJ-o=)~S^G5qc^-e-6wevv*!*O0j)bI-Isp|kdZ_`Fv48JC{3R*W<+*GtKp0QsBx8=TTPz-!4QlT zdaK3+uJdgCf<9`OtbXRmbT8whTj^T!8}-_)&g^I4zn3@${3B%=?nKvP{mc@F(W^IT zY&r&{c-0zUQa|@dlOb3DJsxsVAJ+epGcj%EOd%MxJ?2ed z66_T+61H_?AruK7sZGGmt>@URom&PcgM)Rxjo+Q0Vr-NP4>_Y#h~FOaC}Dg`Li${Xsdd1iHFFPdnR(h{=NABz zKrw&{_`2S_uN}A;s1})#b|@-@maS)=!yGS)K>Z0%kX^2Qag?mUcm!?o4P ziw?Iu$6As-HIA3reig|As1ZF5F~%; z=p)a6$x#Orz&jaKOB$hUp|!VrF0TMuM>|rJ(J#!**|^3r_h!)iScw6M6fMxK!1@pW z@DFdUd9gthy8h;XBPD+CJmTtSI&^Ca((~x@@&KED5^@9-uMW>HK#HJ*wS{680|Li& zTUjgcZ)Id-Avh@v3Q{K-sVfBI956FSLX>i`{+d1z%oKnq2jP{n;Vo82WB$?(x3;yu zb&g{V4KElv@g*9?n_V{@dm`dzJ`4n2bnD|R4-vrH@&e--EsR&u8Q1ygUWSsCq&Z{Jb_kGFz#*CgRTD)f+0foj_-BkN# zugEj^8Mo{Fk746E_NsKc5p>1Z?hCx7D0Q?xe z925*R(cecKxzmUXImke_4%QU?O1nij)g_=8^8w_@Ab$gv(tjRxkn$2*I`W$IUum32 z*GC*RSMvnSVFx@=J?1ox+jVHl*DVf_oL`&-3>uEczy9mLKIv`rIHq*OJApV&;$q3(xx;ghs0}c+dOV+Q5wbv z#-(&itJxZUBus3OqI}-YI=zf}`Fj%W+;FT9EK@PkT#3C02D8MADAl)isCh+FDz>0J!QMpH@ zomKB*twj$QmW)Ud6)6Hh72&Uw4&}*EF#kHgjb|J1a@b3Xr%qS^BoCOVP6vrr=s5+e zdG$OqZ)%}}=s_;oDmx4youVc;kaurw%q(HT1S zsPvi+BLiB@xq2$k_M9<7;U3S#Z=HXHZ#=^~0!T%M0GuEE;0LE`8Lal#H_%Th6%n<5 z>hRF8IwZ#VlCy%guHD>y;~b5jj4&TwXye0w#^<_Dbua2As>I=y1KM%n0P9e)Q{b<-kjC?V&-TsnJW zKaxKGBto13B@jhacydGl7$d+YK$SWD>K5=(_myc!H}~+;F#HHz9{RORwAcL6 zpV}DdS^(#t{^_5dlnU>b^c_Zw2#2{d%wzxO69$I+c%**u z7k}~7^ucHY4UFFYjG3Z(wK$jZMgcKy#*TpY2)&Es_zaL`Ao@wUF~}-7rO+s^r74UV zty|XGy_6S_%HYyq!MH`fYCmqSoDU+JT6+Vn9k52>Gj`CLwUnY*2eG}=XO_+r-2xC} zfw6n*^p-K=?+)CwcBP^KHR#P}Xrj(U=EG1k&pOySK((%FK6fwKz({sY5eh~NUK925 zOmqqGRPW>coGs{&5g`qUj5?y7ee%s-k`-i$_3O|?G;1!_&D^blLT2-3lmm14#Anq& zWMFy@9&H`2dEdtf_tDYrE&Q|f9%~LbGg`@FhHQsxzDx7YiD-UkLtWk&b@Vv{U`z`w z$%Lrk7a8}VdthnaQfU6d=#M2ofFwq9Ld)M1PYP(D@Uy}(P@H#Z@+MKVg-}Lj=s8-aBT6@aD zB=v?&H#T49S0}0tgf&0opSGbjc1{yU3?2j87)1D~=;vtYP7RpqpYTm}Xo17M*I@0M z?RmZRSAW4tWUlu6T{LKF!ovYY{&I-c)ysaOJLy<-Rt7$KSO>MWYd+RnRzm|{(1oR? zj>g9L{CHyO%*^OAR=iC%cm@Xq`sOsGM|{~!pViX3sMC0!j^~pFWC9+;Uz|zwEPj`@ zLT)gk)EG-kb$Iqsj#Fb++o^#KUKZ_oS0jjP>FW{Khh^Y9Gyu2xUR%%nX3z|~hY5?) z$b+S%J1zH`*XYKuqHbB|(M)?xm0xryxLD3ooT=Vdk9+CnC zex22#6)WS4VKg?-h=)WoI0RfHO2Z(d0DYk|bP8D76FgB$!&b+^9Ie#i)EY2mtebJv zaV@%zE>h|gxbdmI68QpX%~9k*G{KyzRoa^dmPTlnymvHmMuW!h8Bgel5pB*CiF7gb zF6cpIhC>5riXQy59wNUCW9_cd7CI^=b>y?UM)}_e#K=WpVsBoHPTPA98V0oUP=GSy zbPbRtnxys&9Tb(ZuQ?0?oYo?`=KR0|90>pOKmYS-%B%UfFXMiUkhR`^dZz1)gWQq{ z0QliEV;ax=X&qmxk&`}B8$qsc1iWi4am`T^9*10VhH&5j<{9bs94)$rOvXFpt^G%9 zThj07K;0}k^w1&)9GzdcK=g7=jwTzXj*c82=+_tBL2i1dchOPk7cUj%#AEc1|M@Oc zf;>Ra{${+;UvxqR_++ULYK$>8znqXh+dJw#Co7Api5A-v(V3UfZE=a*e`(jCImR+v zhgLLKAq%%;N4*&|lilZWBba!fF(8n8OI|0&;e}vabdJltyi^FIli>*xsY^=_^K>tP zE05It-K&P+-Un!|Wxl-pQkVYrZ~yk>l_HcVh@xHz^f^Oj1ap0ki9Am+IH9IFz|rlS z(VeITI>F2$2ozuGIL7Lm;sDTix(Q+cRJ6nw5133c!(Klb-VZHzr zrAv{F?T7={DLJ20Z0Pypk3T+r^GHi&8`sNRGzQ>#lm6!X6sbkka%I#2%F-iCRkDYd z>Qa^fJ7cZJ!x%&=jmP*jDli6M<6Nyn(I{(Jjfi=>zIu~;r$meqmV1I)?BX?b2LLZwFgjP3@W5J`davuv*Se{ZKxaBz-76Y*uQ^mnkn;PjdRnz95k{oWK#Gd!x=PnGtF>byC$RD>0V<`{*$Y8B^s__Mvf6b z(fG(4MR03rPH%CL_&*)w9)~qE_B8zIoStjO*xGrWGiYSah{~Wz<9ViTi+jxRr91;2 zFqSzu$x%3&TqT>QB?8yx^k&dZR>3rpR7+FmVfpE`GXM_(>tX7A& z1JGaU5vz-pbRr=6PyXajPGR$BNHv=hF<}ZUDCkoU!Sh#f{Ako%QvRbS+`2IkN+lY* z)EpsP%?}8p_&@pNlT*~AcDVuk=0YJdI2qY~r$E$anGb;sv=h*EVDjuSodwWRBf?P6 zaN=zTDoZ2CGu=A@F7)A<-UFaf28v((&iEZu&+=xA(x7*q7J1U`QFS^1XmtzrG@~oe zpt0f^U%coX0i*Jt?r-U3((riC8TY`UNUJeems&Aj@&cyB1FZpsI)dGcCNjqCE02}+ zG2YT9*8rbuf`0@orHYJ338R&PF9pCp)IbEd7PS^#F|OppG{vPg9M{=1M1b)bpf6H( zYh9r=s|g6>Ma0rO{NH?h<0VZ&7_;c0G=`i6t;N)M^bB*yZyFwuH~t2^q|z`z@Ek)o zIbW>=BhcK@E#t8Cv}z2{y>Yu<%Bc0QW;LNO#!?(V|M}0K6uf8Q)3J^AGaER0Ug=RZ zExqull%l2dzxBFkLtO}cVPBnJB?24}8=qb6n!RR_*f$Y6GI)9h+@&$4lCR?$jLsDp zi6i&O4r^QY<6XRu{(+H2>tGSsW~}R6Bjer-ni&cKB!E7kUifQT{Je&!FgIcDa5;*W zXXRere@sgGsss7j*S>a230YEfZH!WEc$wsuOA#uKXl(1cX5RO_)_ALTVt^T=D8c9% zTf+q9(+Yb76!Lp9)!O{ct#dAl$Ghj9(ial0qlY3dn8=2GlKSOdh9|&=t~%#vnxEZz zP^=Uog+dvduf_>-h)c&Q$KQPN#PLK{Z@`H0eqI$c<32{%Xt305fk%!KXH-m23orND zPaa%GYT(%zMUyChz>ksbu+G4OJ!fE{Z=k=i8WWG7IpE!zC*(O#Sze=$sREw@_N{w; z?!JsMN}uPIK}K#^TWOG5a*v07UK{_K@s(cEGwnf5OwgLOHV)o);E8i)Ofx(;mJ!$( z4;qqEsn)<}KpDE*a!6^sAQI#IqmMp1(MqXf)(c&uS#%O#Oe0B`#%{b)QON`E0LDF6 zni{$i9j*=s-*(U-I(9F*WXR;zjz;O1+U2{BG2=Q$kk70?gU2^H=h^m%0V(aV!*Q+K z)IiviF4oY{xoa8b8c2`<(hJ{(2u;Qe+7KBbb2PY6q+bJ!={^+Yu&3k`S|!WKZug+4 zCFy55-=wy2+_(=Z*)EUW5`%+DM)oS z+EUCh9 zgXqOS{KG$-40lG2y%&ibYY2cTW21O|c^+>HFeW|G-yeSX;mIrRUWRbR@~#I0DSY)d zj7>n8kugU7Qhs&qDqRWB;6VTw0Pr_D1sZ(;s66D}%j@oo=g&ISwzT(Lit22ky*=+K z!eBk6$muR|^OImlTe< zjjiArL{heZ2XunZ*0jdY$V7h(E$Nl$4=@7Y7`+U8yu;9~8G$iI14|53??LyJtT6(P z(jcURbqJwndS^hilsxarQ|+f~jF+?3?#Um~1{q9Vlfz^X`B?*@a~Pq$<)~1fCUQr1 z;RVL(v_Zt&xW~wXH6^d{P_3ffgVyN5X{3G2I&6vZj1im8&+>BaQV) zgTh!Cp=!In`OR;hWGcDzX3%V*klXHoF<>qj0uR91^!CzHiRs(l44Rq!$omH@E0E`T zzU83JB0h4`rQY1Db#pD72{-Q3ltD)h7IZ!HF4Nst> zwvwJBb>N6|cK~#rBE}R?l@zVcKS?oWKmc&k(trYo)KP#HV`s=L@n}$rJriit)yuOe z7f}En!_vY6gY84zu_#T-l+nR}=3SO@F`i9BnOReH5`Y|#e6A_RG$PU%JvV?)@Bt7$ zV{ikRc&SDPfy~BQy3->-UGupH_|dJRd`2H78z>Kqn+pJ>nM1|-&dvd3f$1#{mPa$GQ6~BcspiKv-nQ zbIcP>pn0@Uxp%IR|BL1TvZb(jmVGi7z!p!CTjW6tNtqKX+bT3yp&M;bIaZD1WWoYtKVd`mgIwxj!h zj#j0w!!~daJ`j1{TI%p-(45&L3aAssDPmx3Yb}juVP;G$+OyUAyhct0i10qIrA}E( z3>wVsz(8pr6lUJntps&#jC+1QMrnX7hS)T%Y3ciF3}BymK09pd9+9-hWc+B1L1qsW z%}QM;I^#M36T<_v01)8|U=~3ug=UH8d0dOqyeMU-Oe^pQGSy8`(v+@ekKKAoO-d=3 z7tSElpaHm~paR>jlLDy|ohS=1toU<@CoFGf9drCNe?ZTdeJLd{Px!esyQMU+=XH7M z=c0wJA8;-Wm3PowoXw-_)tC!!Oy&+)Gv+F$Rl{PP_jEiM7Xy_u#MnxK6kS%U2ArzV z(y3@|+v>daY7;w56NtG4BF5;I2!%8}^aLDPU&YY6tkvkjT94p;G$5AlEg8tjCkGi4 zqB5FVFcSRD3lD7CliE24&ht!<{5}32zc5P3YV=$?^b8^YE2;50mGNF{vDDqE$=R?c8<7WQ^9e<~q-4%v_5^*b8%D=%QcF75vB1 z35eA&WXaqz(Cr&sAd-z9(6Ouv@)78-E9B@j7*oy|G}f0~Wqfl)p-I=Q%~H5^pXcr$ zWuQ4T;21RM5f&n29%aywBhugKCAdMP=FOnF2Nhg?CY>T$$-u8u>-6yRHKTenXwIxw zQ5vD;&ZANP!ZW!R~wzvh+_&vznga*qtL*#dBUx9wa5K)ASokUS{K;@AOV>leDJ{ufbv$h zz9Sf}F#`h#cwxxk*=aOy>$r`pNG<08@X4^DaL7F#G+&Hv25P|gT7+gC5Ae=gFA~UT zKj)RrkP)#Gl~RyT;rTzB6y*aTOS7u@KVDb@D$fBhshCo38INkV)cMp>JlgL-PJB46 zadQp6z}wZp0INkA0VsRenOQjw@O*}nYds?)erotM_CEJFnz0_GWg4Ts>A3*1D1sWC z>dpX%4AW7LY+b*(HO+X>>9FUsx6=BW`&v&meT;q12m31OKv#%}ZRrG>Q6mZ`-PH3- zGbQWE0$8II;0%;f&N@^R{V}-lsQn~k?fcs9P;1s?&9%wE#@$&j3>t>6XmdsR9BN14 zjxow*?Kv>X)Mrn#U$gEn;o6*BbfDkWCgvDeqgTecz8N$ZcAmlj)_^byilUQJvo_VE zR4~F&SD$+sG~T2AFk^<0tKjyWw+0ab4SB!&OLqYgooTwo|dL8ji7!?i+Q!J zW&7Ok&j_5X7dax*!0CW4tg$En-m}JYhMD{N1_HFA#;*W3Kv4I$rL;cB(+pdYK=o+$ z5+Ksy5#Z7R$miBnD{gz33|zC$<}1ac?u(u~jn-9*QvH;BfN{J5e0y(Qp~iH`J@^5j z6yYZC(Y&8{p#uc1TdA1FnZv~1)G}J6#F{aTtfRlB71gdcM?`e$*?77xX2~<_jn{k^ znW;gCdwq{|Vg4CXoeSdgqL#+$-8p*vWE^&|X%SCrJ=P(P`@3eZM;dFKx6tR&ddT{s z2ay<2baD;vSu^y@v4kF5&n;_{@mu7%xl?&O4e#eZVc5B z8vob;GiWYQQObpq;{mDT*;-EmFDEaf&O!HX3Lvb@!%itpYZ|6g z#n_6>fof?G6swT1d~`zQS-d>g7`-Tsy#HE3GRm$+r>4(!Z#5+ZoT$kj9-}#hUUY^5 zsRjVx59Aw%L#EWrNI&}ZU;p(J@EeuqR)(Kj%YYE0h9OrNUiurQ&7h)K$Ea7SF?n7o z2HtlDH33CAP_**9DPG=wK;FK{_g>ntqVWdcrco|q&&!<`{Ml%C1iYeQQv7(LzWUX# zo(viwBwC$#5ELYz+$-$-F6X)a& zqp7r$xd@Ft1R#Mn5eJ4$V82dvV{O1*d|(Y2lG z8L13e1~G%)e%6A&<{9K2IV#G@P-b9>fRhCrV|A)Dhw+T1qtrB= zgvL1O!8#{SW&GY_9}OD1R>wFpY0E*5dynlkJrAA33uGpmP|`6?<6XNRZwAd3gNCxw zfdOzJ*Z~RAkENgqGxOB(N>h4UywgiJH3J1Wrnq<)FSVTJ5u$`O1CWdEC!$(&Am`Gt z-SfH(PsWI@TEK{IM{8Sz7D@ZYH@GZYl#32?*A6dGgZAOGwL)pMgO`mN~1w<3~ok_)JqXI*Z99Eb%#H$ zJr`$8^#Hu)A~a}`QR983OpPftt_gA$lMi z8KgNN_M*da9QKhcCR@>^&#XV$X^a|(sJkTZ7&Pxy{VICecolppVfcsu8fsNU_7)`*anh3_p^w7x!o;I;SCIG=3 zDcyu{*|UD+D8>f-cvhuu0sm_{F*Qbe1$Z)ADerq7f?9M&aj64PdE0o|J9to<8Dl^v zGqe(&QJw+)y^B6ee*kWHOdRkjqKIaH;TL}4WZ?B>tPG7>&k{%g4UY?OXC9O+PohFU zdjdEyu6Z8SK!^%nO1%th0af-h;1FnUya5hB@h7ik#qy6-=*DARO2yMOf$<_`RZ|E1 zTS5Q1bgAYzHaqZK294AW9!)fQOA4#!RW}0k;(avN{_>!aCw_L`ko1wqk1sB}X1?nL#5eL%)a? z0n;xf&HfsVzl6`xIR~hg?hHv$Va}kOdt2x5X3)&6TpoXQ85oTRlwc$Ld02r^1`4nB zwFUu!_wqUxn6VW(I?HIem-0cHRvqv0NrwI#G^?0^X10VFeItf#q3ZD1_ab!v&h zHQnp#P^{zDNaQ#V9$*nQdjzEY`Jey!rw$PNS$!3mf<{W+6FR5e|248B` z8nC+uolp1Cnu<)4(F_rM&!J%qj2K{FI^}=;*MFV%vJ@@eaeO+qPcNc4*O27iQH$2> zToh+?0`z-r(eV%Oxo(Og6#mcPiO2CE9RlQ6Z$pQXljN?i&fr1UbR(YU(CDllpS#zy zM*f`c@vVD4&VO7J#vFsqnDM7(E7EJpEMu#I!nm&tGIE8}iGizW&JhPd=D4!1=FmZz z_D1v&7BLS-r23QEb-NFZ-kQ3zMAj|6CMQg(rc%%7HjWgI)N|3=rO#h`-DuF%LV9f( z!PL%e1;kmC2 z2cC$S=UxIau&gNnBTV`jqh@UJ+E@V<;XcL1x@+-J@Sf^E;%|a-Nd)Sd7`6HWdGI=E z=}@#bFSh83u3REnvQ9g)&y`6x$tah0>%pL0!_Sbyr>Fw(c3Krr(5?W52kUJ zlTV48KjqVbis;DrGz18sxYu=l007;Xp#yioz0qCWB(&7q$-I9*=sb*BVT*X(a`pLM&?v0jj0|-pFMGDzkzm6 z5K&$HZl3s1G|~U@W{Q?k-+3zLU!-a*z&;mo+neUrK5uI9WYpxyKo6y&;cF?poDd8a zwCyw3&@D0)M5J;)lp2ZVrAoQhe%G-N9a%%(SjHmWCkLue%1Am*>~}JJV&O&pY#^qL$5V^ z*=o#Kt2cvYLNOMbKnC;ye4vc=NYE0Bm=FU1T0q*h7QuuN&n6EK0e(*pkFoPmiz@Mc zh`Q*e)JgDpaZI`MCj2fHh_J3f0A+P8CFmI$%~}g%i?C2;3OLtx(<Ksp4`JuvpeZ0l?TZK{VC^RZ!Zp?zPcYgk-#p&d zlegCYd24xq_s_& zDZ*LtJiyV(_0>)+iFyVy$0&~fF_uKPJPVN7dPuKz2JF;oiA1A4%AK70?ce_G)ANB^ z`)UmM3QuSpVNT>j=W;cl44IQ+`Rr$ZW;ruxL}-ro%3d-=$x1o>WG80=Z|(QK_q|h^ zF|Z4`lLP1wtyMo+m$eL-TNLqH+S}v$IRgj(6#2yCj9LHV+~{yszx%c}j1xMLVQo+A z_9mUgH<`d7!V`e4zpYb_HET%zS=SB+t)a_UyA-gW*6$Sc@qf>@XN+?xff-;qT0GBO zq>z!Z#_V_diH`zR)ew0m-UhtTKDw_a^;%}e9{V$WM$S=@4#t`Jh?t7}G88z*$QtvV z)@j{iJWHNwETa7)yy_@9Oz@I5w@2o3v@8gdiO9l6bG3X+=Js~YwiTee(XX0PJjc<; z$-m@a+G}roGiZht!hD1{fhbp&AtRT%qFa{&Z%+1>YfkV|gqL#IJ(Ho}04^Y_6Mb*Z z6+E}Ggt~Me@8N*}3^A-0ul^RXFb+nR&^6D_`|rPhQr1H2A_oMyw6hLETLQqY5!z9o z+aLSKkP~UDJIqpmOW5&Rt5Kljwird1z!O>kM6IbcrieuT`Xwur-1F{WhtXYL<4mrV?B|pY!^ISw}{NHHM z*jI-ABcf?j!yRpsjSM_Mi(v<3Nm>1^-}R?!nX5gNUB^y}!{}hm3k3AYOXabPXEc$mfiz>U4_C zpw)~R^2j&+W{kf5ZjAQ0`k=9O_ShYhY?jhRr+)qGU*FVhX0N>*`G@nwI%z<`2sd}m z2j>ix;^)272-fs}dyVay&g1;`Md#rw`%LyQ#_YwBQIZjpS)Y44neV;6FYWo(o}&y# zQL|2zh4d3%cc86e*IUM1PFi-0)A)2=@V}wf5$5*E~a-&NV%zB|MWAut4(?x8~v<3a;by% z*Sk4p0Lnl$ztB0OOq8LsQf}1+TlP5qVFb`k=FM>;>OuaXRm~~RrS7z+N78OaSm|9b z2LNl$rF}ihpb>HAyd~2)>h6ijw(Rx0dJh_`835fPLSyass(F1gXnssN7>^Qn&H`ow zOgaU4dKxt0#;Zpm6Do`v*G@azcyu5ZLsvs{Pp@R}lRAP(6!{A8@vO>QX4DYc3>VRj3Y;@Z5bpJ6OA2pC;x zf->SMq->vYt*1r}x@YtOyi(@y2V)GtFh_vf7sEwE3LpSY0S&;vJpqK|&f>S~F{;1u zK1Ks&v$Q?>wY48V_gu6uebl>+mw{Ou;=qi#5Wc0$P=cZ(z!vatoC+JIsA@!z9H_Qr z5AVHqphr<*#ta$(w(H8X23TdxFv4`91KxReIWVNksr4cEsy{%ZMaRsgs1##J3XU3OrMtUCx*GvO1HxxQ#rUE|qrfex7__{9YZ-uI&}$OsXMX~ zoNeT>J>kfdiLv&KuWRqU<#S`?2*dM?%+kB;7lWqs)1xCawWOCZK?m*Sw7g}lUcf$5aI5$mtV#5!KMuY^cO zO(07O3DC*&icokX%txx6v5PXc zM7gXD0FLo)=}c!_fG?#eCtIBYFbA;bg?#3NOh==J;zUOT6C+tl8E>}qD-lh~AHTQ` z7_f%+1F#{`LdK_XAsifqlRW0Dz)>kp^FNszOI< zDR^m(r^0Y46Vl2kL;P>fQY!raTDsr}@UQVi1_a|zlL`j9hzdin!s(?T+k3|dG;nF( z><8HZqyh8xN@}1;ZXK<9M?ltk00Gv~J&d^}h1{icihMH^)luE6HTt!+&Dq4jW>^8? z*4DX8Jl%oTt#r2J{V(;Jc+mKZ+7xNwoB_7+g@`P}#b*HHSHAL=M0xRBH|VF%TV$=!`xm1((dzr27=Gri?>T%joyoh za7KC-*?muE^P?3&ucES=B3XZOz+v@UjsQ5frf&w#kA*VD z#*0kwQScp}MR*26M({w`5~RGHyn7U39?j8fI)+>+%6qnLZj(!+=)guET`ej#0uU7- zxP@7HM?@e}0_G_%S`>#8xzt+snVFQLlJhJo#b|Jz%bE@v)&Z#FeU>62)!aYIlNy##V(K#Mq98*}puK6k+<_UEVfX*w&$RE3A zWz^*no&t7!H@42*;?>DeU{FcB0u(4R|L3s>sK^1J$6gr|qeTRRam)yH9iz_g3||pi zfa=m=lda#i8lNIm3@VYCfBUz8I~j~<8Bi&GX00(Jvcf|OoJkjCL}~^=CaI<4$jMtS zm1T_1#P7!D8U^Bv8tFwMnM=;raxW(XCyHn|*&{l#wsFT^vL_F`wE$?*DA4tY?t**G z>FDQ7jT=0KXBdd3mhi+E$w9~T4tfc67yYvz?yZ$HCnI^$J8GBh9UXd!mTMIc7*&r) zfB7z23RnaCc$EI@nd$1e^MQ(p(3ybOqiMC&bv$Hj<|f6A{N${}Q}jHKH(8MrZmFEO zg?8o~HTF|fo6`a$)zI=njCAT2tRUfZ&~-3bT4|K)95o8fkBf# z+oH$k=Kf~TeC9JJ!{qzl|NaSN>A)e4=9<-*w-WGG5KMr}2OSTwBuFx7cmX@vdrR*7 z8p5rPgeR161MC4X00fBPk>Dkl`cxa<3^x`u4-lf0jz*9_D#X5K&f~Qd9^p@rGGx}I z#f(NFFMiDxuAOXuZLE2xdBrJR&%M=1y7ta{d>$y5E+UnHVQjrA2Hs0X4Vt208PL-8 z)SWPTL~8&L&#*s~2*U`7l&8ML)9+n?4u#&S=4*wa_XoUqgef;)3@vG0Ja<4XumIRI z_!%B)f6S7msGsa%EG^GSp_t2V{d-ZA3+|;V5saj$T-Gi2s&E)pc zrV%4axRQbHaHM^A4eWq zqpzlhP#)sZa2a`Vj6(K*ktRj#905`@&t+j8vF@W|RGKS8OnRA=UXCK`#QA28$z`%j z1oR#$yVLpFX9i>4Cdt6ndCNStp z-`DUCI-7w@Z*qi%6ZTBAfAt0pK_P{LV8DbzqSHnI=ZKBqvq?IJNmV0IMMD@a6-&Fu z28#N8`O9Da%y0hYZ(cQ)ZYA!7AK*fu0i8gNxk;0uOce9h_H_)i5>C9fQqp*Ag|Ckg zinq>ryx*EDI{om&4^OE@JZykzp4aKMr8Z0>a0H%CX#geTZY*A3JKk|mfW^AqJwsZ+ z+PP#9P-2v)R19C}4M6dK=~wuHaZFhNuca^&U_N71^NRBw zv=}!-k=(#jW7GjRfoo4$4g{^ zR5W~#)^na^#P}SK;$?J&o_)pu^%WS(5Soj2Pa_nUA{H6rXqK+5I~O@chCS+RHaJ7` ziX22RhB3(sVGvjwYebfETreUzH0Q>gj2zc*D?M0uP7Tg0@3L;*ZA`{2jUQj>7R!-Z zriA-xSc_eCc%4)C+RreiqV_e}G9Gid(ze@~%Lzy#>^GsQd= zd^W|`w4Me7^Ed<0)e-QJU~mH3XTXDJ14#E4ipEJ{2t@-fz&OwWBr&XbWC4&lke}y5 z+hPg^V40p3@)DSg0ySSJs>?IJ)LwheYwmwMVGaS~wPpufYnv53 z)4B&B01QH^j%|z@bOUhU6Q41h{f!4wu;$`k`LyU$eG%S}mN1vP+1$OiGiZ2!?MDW; zHTFLd6g)PHLv>GI%Z>t~tgN3nvNxOFd8!e1tXe=nc-+2-eVqQfc$aeXM`}EuBFa-f9Ex!XV>$- z>hi|OWY|^TFqN0?LleN3bv-)OWL&$^pn0Z)K9}t8OSmQ)ADsti8-FsM(V)~t*$uqp zWC!N}<9a-^sr&JLP6IT|u`zAw-n!h9X4U&RB0CV9Q)bL&ICA}sr?Xs~8%1t10M&mO zlh5l2X^lAIM38e}OzVDJ!w9jKw|1AxsmoX;r|g^md3Nb$oERcq)w0(7MGdCx5r!lE ze5qsN-s?RYG&L(YGUaXW`8@mc&;&jg?L;GJ60W?J{-*PO^#)COvUy{rAB;eMiD1mT z3IOv+0MY;gr45|vGA6a6a~tk;2F;-AcNY)EM4h-~|KojE;3e-CJsu6gh?u z&&f*!-u9$JHVIT~56rDGCaodwHn7f%SP}oFI*D_FB>~D%L|^KZwjPT3EW)cujl5Vz zz&!S%kXp8)9cwLmiRKs@j05@eKC_0_greqAbl@gMjaDhQb5T-&*uLN);0Vy~S(GZT zA7$+wK++o8E%HF<;u~N|v_|6tpq(P-oDj|RY#@G(5*&dY;SSi5KNP!~FY^NOH5|y0 znd`oEZU0AOMkqJV(ORNm{LP7JP4O9dDm9I~s-s(LXB}>BL}M>S zslSpmQ7x;R&?ys@FVV4QRV{ z+yF(ut7}7TInNDI-jXFdNU%8jzDP@yU!^3ZV7(bM7p4!G8uhkhT4%fB@qG_U*lNFwTkh-h1z4c$}-X%R3zq^IWn4 z4>AB46@)wfV>ppDU;N@1KRxiTGiZ9`buu3N z#^Aq{YBs*h`g<0TWpCZ5F$BlKrPR!f2+<<@B6W`uG#;+HH;rC%G5|FhSC8sqb<~_6 z{hl!*iYGcqw=-?%VU7tpN8K2jEn2#VbI|h{S{->H(}81hE@kK~04H}_Z z;aLX&D*WR~R_IUR>yAay$p__S7<1U?($Q>O9%Nv}b1}U9Qa~EOI7(p}ODR(T2O!O` zB0LBZ1@pWjd7j6jNM0bhx&-49UD@^^EozhNrYo=?8vV zWA`(P=J;idbX23^IgAB$Ip~{$?SQ>CUR=+vOA}*E1VIFaF~i`HCWD`iZ>%q?sfP6~ zr33Gq8#ty2r&M} zZ~VroM$UY#n`@RD*xmCtfAcq=`S!QJecEqnnV!L5by|GgrKa>(S|6U277Oh7eeYcA zC3OE5p;@v@_q!(N1b&T2I5(tKp<{FT3XU*ONDN^gRq3bRn|L5##x4P>UAb z+Yvdp+W1=Zn;fM_fPP~W3AaXcIBY`h=>A6TnFIcExUp;0ui#4{(pS;#YxB8g&}5Ki zER}9b&%qGJ!7w~m%Oi>;tU14 z(*&DVvr=lbY3&S%08~JcFcR4zBng>Ebs@TybWFKqyDB6Jxjgu1UlF+evx zIli5rcJ#5{_7!=YJtr4#rpklrNE~8Rf(wP2WZR`WZ z$oOPHGivMuuc9=+A_vn(0i7rgbl}m(;GNz-rfV(TE7uv*XoPS?@Gvnz0e9bwEzSXg zxpBGAH!l=L4V>|m`kc%#KC~vXb97yrN1v5sSf=ay^%R2j05Xx(hD z&|KZCj_$OybbsIF>jKQBL6ML6ha&_{@$}d) zx6t6Y9uW63X}m>imrKM88N`3!|(_;Kohv+F-CNhM71ZJUEB7#rDDl#gTAw0A z_BO-9|BSKJimhuIz%qu;yf9X+My1AiPiYnev(NBb3LcoS9t1bg{`t>;{?l|W^j0xs z&V;o_jI}S_&zh4n+Q)mY6cYvo+6E#RRZ>jt1Gy!-#IU#Cj2bDaYjT-O#nc^5Dh_&< zrpFj%)Yx-%Q0I)A8g^(EUCY!Xs8e-lY2zbs|@eSu((P(ln z2Ma!BxQHtAj++C=tVpW4>=6kmZE);PXPoq;y)#zwro+3Z>FuJ{44TfOnKOKruGO0B z-Xb(>R%`pUXtWe)dRjID{9MzZTWh$cBYX7*jRm1#d4ez^;YmOM1{lpxU?WgZ*fM|! zJ75_ouJxMcLQ;2Bd4rs)Qhm((#z63{pP zzKpFrZtL{D?|tuN-1x>PuEh`2BJd1{c+lKMKtvH44))GIaSBl0K+V|k%YEoJPtQ1> zTPZ4l)j2i(#usSBSG74AQ5hLUI{oZijEq*-_}|uO%m{Ss0ieP-@4OnaMzat6jkh}l zQ*?$511|iuFTO-_N)KCV{GkD-l6$YNbPP7MfydNBiF$AbRZHdhV{|Pi0Z>M<@qpXo zX+7Vq`*-dBG36|W)1y4`*T(u+VB;9BQ%^&`OPG-V7fwza;$vJ_ObE1Ca6%Ip;~@q-YKN$Wt4;8~1TC=J>BM=82ot{(sy^dg;bG<-$)LGM z-tRf#%mV`Cu!;!`^8sqi8L%ecc)k_Q8#hBuItaljb%aHIG|*VJc~>sx6#$N<;^b-P z1yWRLj(LvkKOn6TTtPJ>hu{a+eCZe`1p)Apy7sxxeeMJV&z+Yt9Ttzhap>sf-R{*S zKtZL?08hqqpnW_=JDNgo_R4h?Z|}8MdHWd$)~u7YN23OR0Iljk0Ah2u9vK+sB*Nv8 zMRbLS7=q^5nJQ8qrR|MH;DMOVk71-Rti4PAI;Q}h0g`|BcYpV%3>#}hIqmJ~XHy6psbZh1$^1>kypmZh;@;MI9JcN7j2(=&S=oQH3hi$ zYOj0#XxN}D5gPJD^p->JZ~`bH&SK0Am2|7`vH~HDoazqc#?AS|bil^+}_HXe#rjsbu5lIX}ijHLD|p zIba^TRfo`XQ5XD2#&fXRQ%);7_u8T6>nLgd>NnosV-+ty0}ta{n96$Bxim)v`ifTf z8qHgT=Ep0UH#QF}0YLHVc0^zRlm15-xP}7f2~}uEsE*;=0872trF_Y8Jvv7_rUi5m z5%3|+L^#-&lrsWRbb??yUyLB>DV_ek7r`C*oWK_^GVhiY7)G3ZvmW+rJeUoT z2Ie(|;DNR#Iyc#0f>%c;f|pX!&55^Rsj-K7c^aiMppQ;|2k6igrUy_x-?;PO7Zorz zppbH)xOmtS2A($uq^JBYf?hF)F5N2QJz7= zs|tMZ2#;;ueU3gv@X)eoob|vrcqXF;9r4)3mkj(zM1u3in+smjwBw~aaE_F(N4#rH zLql7@vc@AEDk8qp!^uOknhYX0Tqi>5YfVFt^aUe^o|M5LrE-nqYU`TJ$s*%n#Hp9z zXe5vE5eEeaC1V0MDSM;?i|OnfJ?Rn8A`|E~4F<>%dR`iK(a}*3WE?Usrn0rM`bnqSn!9jd#Pdgt#$pKupPF5Qn$ z>F+v|?x7c_Z+If%_-Fezs1C+gs5?QX5T zTyBM`)!HmM#sL#`TAWd9lM~0`WPtt7_qfr?5Rj2W&nWqtGU{X6(>Qa}w6GE6lqQxT zGhcPTVKAj6_y*icuQDD$-yHZ{sa|R*zW2TFos2F31Za3a5lRfay+gjFadxQUrPPr2 zlJ^Caq(px6o8LTvLi-7jw3ZxoV`S8=Me@34Jy$jTs zbB+*s!8X%1k7x{FZW&zlf*fVhB=tvoANIDlOuX0b*B;0zFVaAk=@jH!ZIz1>q;I!0 z4WQG0@Pi+mG8s7eYE2)Rf;c+e$R_)hO%|;Y^`w*8t1?3BFh(yIIT=%N-H(nJF_&Il zM@cR6Wg?7cv=t>^+6x_3VI8tbrJNZnKywZ#SR-Ialvzrl8eg5!=#y)XM%c11Je&Mz z*d+2r|8(%@+GzEb=e^giEjk0tu;1o30@$7`>tu`VSu=+(Kr@-8ktO!;>SJ7HlXD9E zJm*0A8|7o8`q=?j!q@liq%oH#Dl#GOUiwrmsVODsSyv>$2lJY8+gsq)2<0BvSUVsD zgY11VF+%L#Ic0f{VGqo5BBjo_2Y|zIlp4dSEI)XQ!E;|mlk-H0W7tJx+!s^Ecu=N& zdq$_klPgAQ8UYx=^tJQm9_}AdDb)ZtFjqw%T?gPx$-u1nGUmGK7_U?i&IN!5V7QKB zAdQcK!@zN{*8suxALlp22^fi*N<%Yu#$=AH%TN8(Po3@yjE#o)oD5$Rp}BUBw|>sp ztO*8|!R@Rb(LAXf-q&1<*p2Pv$*eRpK!TGY`oK9OdmKSBq^P=muw{Q6llkYQkYUci zv>of{d|Eg4d7a6HKnc_s4}eyr$9V5a_Ou=y?ng$+trW>e9iX^XM&4`JM{=;ibQRg+ znA1Pg#v$fr3t>^zP;`b~>Fk+%L}$*eV`n|F)yNB^C?7&rNCz|fZ0X7oQxOZ^X^gHk ze{?B(hM3@R(jn6K*s}Dd`?3Q|k?(Z$m{EXy65T=;*iV!l4^@LM4x!SY6Z*@0HN6gd37I=Fi{k1f?xsgF|i}4U)o3G z-`!KBP;DJh#pyd&Uy!aO+jF4CI=UB-Lr$E{GJ5O{3lRDyMP${b&n{z;t@MeIOG7`7uqOpYw6Xd5swAr@7?FO3Oxg05|6j z+im~RRcwOyyXLI7Yv$PebYzEacG4HCDO@Wz*F5)a{|q@m3INN92Y{B$PUrTBIet$A zn%M5O6FWw4E(zu65HsdBD`Sa;yr;1&LW8}cQ0jCfWrd?pfvD-Ib#cn?h}_0|Z(>T7 zBQ2$cQpLcoaiYg@T1N~n;K68`Cq|PIU{KUAFo0XyT4#_M6l27=7wr?#%R%R)`#Xnq zjF7k%L&=!C2c-;X)GWZiF;AS^B8k?sgB!!x@VZ8xp%$DL^;L_Y1v-YRJhzP268PC`;7i1DqLC07}z(*vEA*rUtoMU6h z)RHax(!4r-TqiSiFM!qbVBYEeA+M#&>GDTT=^o%kDuUmMDAhd>P&zu@WD3$mzc9%_ zGktNdPD@+n{&nsD&cHMZn11&AF$%brz@S@1;-wwYyBu*u&Dgl)-t%t{a|lH&q(;$S zkro_&QQ5L8>}x5$WB11Z-`qJL?o+#E`;+rYN3dt^>u+@9S_VD8C;*wxXDjVzja|<9 zJ)U394To1c_Pu;0&*q;wPYeOpId%8cKlhfadpwWqAsqQ%QSaz9uX)F3zQ+73GGq+Yc4)IK zJ?E11b?$v~$fQi^HX}+y;fXSsKl#j;3W;N_rjA~~rjA+uUfy%jJ=cug^IRhm!ReK9 z#Ub=uURE?dMritXDHQ&uEx8W94XD zmpR5}x&}U*aW&VSn__(a2C#|NSdTizF^*EA03FxZccoE?C{?#)zCKcQrXVQ zfVBqnyoWJMDdAiIZK6%`%E@j{jCTz*x*y<5HcP_;{(u81X#l)=1Nf|`9Oi5GK)Qs} zXYI@zT_QJrshd>JQw>%$c(4v4#K0AX)W10~kBW}OD2o!AYw0KE{^i&_vzGs#h9~Lg z>a~n`1T||S=Nx)1;*nN>0YLMJ^v7n~d(j2R#_xas`zN4HkF(u~fRB9SBPU{kKBYJ4 z2IPe?2W;rE(SA&IPwnp6hW02Qzrnu3{g zQ|mKJj=*$|E&)RI77P}rRxzvQ3=Bu7Tc2w@F*327P7Y@EI@S5~o_>nXNOiE@q8nsg zF+9*BNfOn#1`$#}t&Oxm z%|{Rp?&n^jBhp^(UANEy#Jca9Z>?tU#S&kk-d-OWW?&7di4@9lANLWxVUHlDfC#$j zTJmeJg`uZ3^#JrmXY4Wi4Y?sTifr2Zh%L342od@bfuT_iz1kUSb2TvOqH1JDu#)}o zb1A|#&S%7y->LiKTYUDjpFNRi>}F@)%r&#Ai2!oM-kUZk*lS+r2(xU{BhCpt>b)L$ zUj&`~#14~LvdEs%|Lp8iP1dbre*v0XXYj)3ZEm4AU33`kd*+84Rz3BdAU+no8JAG2 z@hCX#%#$T2_Q0vy?bhDSTvLqS`ObGvtbhVa4qd>lqDY2g3LZGIoIXxuh3oD~={hLT zJ>&$BX=jY#mb9~TFMA1OO&PJqJ;$6)4OEW#+V$Pjk&gvqXr*&V<-mkXZ}7!=t%KSW z^p$2-OKEfLt8|Fbh%)eU;5p3JpFsy!)a=y4)x6jfoQ41V&;LCA+niu%$%)i7sRx{E zGEfc96a+NRbFAfb{wwaseAYC?Sy%G^836AOXfXhqLolvnFTgN%ZgX$W3%#P_oAfXL z_r93M>cqU8$PCcbc{A5W9IS)1EX*}vd<}z54mt=_RM?t|99tV}2GFVZ5jj4WZ@-(8 znB2&mGQt3reT@JrO>ym>IcjXL`!3JG6x1LN?NJ^0=$pEMSxY*<&S&WZb!t){>2?uF zx@jr$xIC)st}v#W*?je@Up+C~4tZw3MKnA2E8VDJ2$_{VAZl;FOYNZx=rqn`T?nTD zC9-Wg(o4~p@!B#9#%_uBxKuf**uMO{R2%16wQiOE~ zm@ftrKoaWb*qLV$0qoel3P?wTvlh?<1ooKoE%z7!O-1RQ9^K(^=bXmsWEeMJIUqH! zD6PW!ia2sgIQkrTdC?*j8OBjQIyjV5pzh{tU;En0DJU}Kx$a%}AHXJ%SHx=$G>*@3 z@2(+dqA=28?8O|g0F!71Fyp<9w@B2OZs;?;zi__P49u*wFAji|!fM2X`Z2rY+&h?u zt#va!k3$8BVlv4&z{gSK^lg=Qb7Jn*JJrsa<5gEFY~Ek~_>8?wY||H!Z0W-q5OE|$Z0It!ss43@VL8mIJu@tzW7l5FWC=c2FZ~ld;yvx zXZJ^S5uH6E^m;G$(Pjl&G$f$d$A0i$85_bu=d5s2Fvr>&V|56b0y_a8^-!1x21C0?^$Tmec5QjL zx8+*%Ou=E^fDMkf;#(1gx^$fbG(O9r9@oIUjLifBGa_?zr8K{{t@qye2Iig zd6AB#afQ0M_M!cj15UnySb#?4L(R&g4_2j*YG;m+u*XGh*)i{6Z>0aSf1Jf4Hh>L~ zrkyoF#FqIR@_}w-6WKd*4K&Qj_f$_@(i@RBku7R__!Wp2pp33tYOX*(ikeILqz@1$ zer_d9w#u9_)1%IpW1~cerTK0pf3`mNZM@!Gk~Ps8GF;jtAbo9`^ELDI0yNjm+J?_i zCfH385=OF<&5u63DP|n&L~%`v|1pwSZQZ{(7#w!d2+k1(mc#NU81pe~UQ|j-nD_B+ z0HpV09Hi)sp=_Hjri&9}t?GuwDB7SlBSRt$2ykNfC_MLD9fsQ8snsgScnW65;`gaR&T((;$FrvD{v*Nx z2y?c*gQ8*2@J!dAt8>c0lLL-}{p6k;cwhve(@~2aAWLI3#(1XbYylapt5g~hCBTBC z!(r%vOrLWtrrEXT7TdYTpcu1xu1lQA&v(E3-4nP6=B-sA%>FU%tzc#ZHRglUr2zt` z3qayfaE5?9?{)8@d~;y_IJk%>Q5t&W6{UHY>bUB`Ir-8p5hutB`k6CNj_D9zoM@38 z*B~%}2B}B)=)`pH#f)7vSA&mqq&hR{j|dv$9}fzxlNlXiPuY_ZD&8BZVISLLWV<>; zKgqSTXXqpM9Z3}T^E-Rr_az;Y-e66PYU_daw6<0h)tnI)>VY`Shnh zed<88mNC6$Xr9Y~bY_GDqjD1JGQla6`XPdYIp*98{Q^1^5tjQA1-jS1Gh^UA6fR50 zdH?(0e@exnj62^2XyJ_c@~+ixFk(LM?-U%y+*eVW>KUr9=|pBgRXTvptAKOKIluPZ z-sI1jS8P9LTv`F<4ueMy$2Q7y0G|w7yHdf%Mg##R9qgnSx-X_xY9TPQmK7SoP4jGT zh;oP;IGcb&VH^RUK+Dp))Y{ruMSPGN3_qE%$4WmPL2!<>^pQ^W=J;b0fov&cSX!rh zbGkbSj>7^t0!?JxH36ne5yv;egMk;!Hycc=SVpL z&>q$8%=(%`&KRa!YH3lLHO&u}v&zVE@o7t^q*2e-Rrt%v|_C9rF3lfBqCL zF2d}Y5j1pxxneUpqmdW(K!-k0_k83B9q+mHCa`2~>$tW=@|7ko+RXm)S!9chUDl3T zkIuo-OvO7FH8zh&lg>xI(kzCNLaA3eaq0HO=^oK?5ga+}oehCS##V5WD6^QYrI7KO@f>-Lck#Z)uCUMi0&olz z#}*q$QP&*|^W!U)J!T6d-KopoIcINdi9F307N+(Y&hImHM#|YfQnt3PB_kyiEuWpS z1U!I0^L0tKfCz?q+G;rP`okao@ZA)lU-^|^Iko5BBUOQslddVk=e{BcBMGq<&{%`O zFB!4FFql4%*Z>%?S-=?P7=RJEU=+x)sG{GITOb3Np+CGMqY<-8_OYW_PDVtu(Ehz- zlmkuxjHr(00>-P(P8YvAzUAPJo$vN&8quCLX3h^F!HM&H4xO_L?j0d+POKB4MR(96 zk3N%R>wbGLT^}Ja*FB5w5Jea5ka56iwU6i{4Map|>?2@?eYD3P1zaO&IP;>F_8&k^ zPjKv~=Y5sp1(@0MKF>~dc1-g!O|NC+YT1kkqL)R5=qLKt=h!&UkAC!Owvc>s)G?u)3HKE_Ej6UWT16%>s2l-+ z#`QTeqE1?abG`r|vdr0b4|^9F@aE+zMu^3MZ==8yr8-L*b+Q)xHjnE`u%n5n?l zBtZVW^^^MJIh?pH4$+?bth5~UQyeU*FM#q?fA0Cr*S)Tv0WwsL-4xW=$7BjPWh1Js z;o5CKw*6wTL+<54=(eitobhebPlDd*90tM)gbuuABtn zUrf(j*N{5Ji0z4CEHT@zp-%?IkW%;Wo{!ePS`dEpfgK+ zE@NkI7(w4{xUp_w`mCNgXXtW#%`diHQRCPm^I*}My}kE)wop0ar+ET~6bq(RcoBz7 zERJ_!=sDcYtMySM0*F(f_7pki1n=dPU?RW%^{=1u&p8+hg-cH~rU39#v1~1(kpxmR zI%LdPeMv9TGM4N}w*a=aSm!Vx6BIOaP{?w5>}1-PImL=`GB_KIFsHlg&ZT)w!3?mk zK1OO?dF!#fM}U}7XanHZQ<|6h73(csj!vp>U@2u~?|o)&jY*L>kO}B@UeUc7$GzX} zZMz>CvL{C!Kk9RKqQ))u(uY3up;J8^0*oUL(5yKi_Q-iFI^(cfX*+bH_Yz4`<3j$V zL)E2pY`@&Qk8^NNM-{c{UB>9mH0nInZADc13`KIXm2?gwkIy0FgC4$gnC9GEb{3R6 z8aiEs`d){ddZ$|cYnmWc-56ebeOc!TbYYZ5Bt%@fha7ex*at%W3Tv8p<#A0k(>sH`PF4e-mES$oG)n?k8;#g!Z}xk z+NBaGp5*))8z!9tgq`kPi>|~p`8l#xw>oU4?O9uZQEkZEUPRpdZDBG265v1@fe4xu zNGTxhb?H!7hLf`{>Js?u&{RyGbPIBdUE_@J!KxV(kX*A2j&MR zO*b%&z>Fvh=ZSnUmcBU6{>~7H0+3%IsPvK2+&D1fSv2MV<7}LaiMe#&YFy6AbzsK0 zI1I)iz0BM(L}OaZmi4_xM~&nJheDc{h#SMmA(8^3PN+7u_Zm++qA{uG@$8W)zc;|T zWq#iapB45j@I95HeEDO)1`(O32>m<6`EH(vE&krIG^!6hF;S12LTeqvPqA*JBlK#bkp_Gmc z$5AqhA1=4ch8TAn`nV(X$q@*B8L|4?B0Q5Ny=NzTz*=_&;AOJ~3K~(=Km=Phvriwlx z0H)E(BV}TGZ*vaxGK%&Rhf`Eo)aDT_oQ>WEtCV}Z|yI|h>t}j4y>kKY@-bSqzBal%`(4$Tw z4nRqj!QRRhz7)EpdmT=Z_*YoE}jw&Lk%h3q+Aq?3}WkGyfkW zG)FA6`->1sDdL!#bBy1skNyA7b|GnUC6Xc(%`s#!IPpv1esoY69y#2=29iK(q4Q^I zJ-P>T?irn%gJ~-Dfo#;uMO$Kw zM{puqMb-dlfX4GVWDI^R>=Kw!Gh=MPjB%N_F*pC7QIP3=^nRknQVFF9*+ZNfbMmMI zf2}j8kj~-c0FE5tSCfDvwCO(goC7pKgY?eQM~lovfIUj?ja7SiU`ESwvW9%rs+lbY zp6yk7hm8iTrKHhINXK9Q^Uu;)|+?W=5&-Z)*nmf!khKR#! zgG!}v0{xuRvd2&g-Est^^8iqk(z_XNhZEkjhD4q-hRruyxb|yzX_zqusX%4yU4Y6%NsOz$cJ0o)d$unu~zA zANc`h>^b-GJ{+0On`nPBxUO@(IWu>_Vf%Us%#>y}f|*Au0zI*@Lu0KG|b{r?l#$ z{Mxne`~oy{ktOjuYUEbC+3Sot>#E%fs9r7eeN6@My}=#y3n|XXpcE2X1b>c!)+MDRM6W<6X~ne!F(OYd$v;PV_u7XiH&#sb5GP zY{lN$ZJ>S)f-Akd5`kSRe<0oZ5odtQKMi_~9vZLF_QDlo%9 z@0IgEWjQf3*hG$jP&NgAj^T(El_xG0g>V8vzwWBIBKAc1j z7)56T`FCBNreJ1{t-EshE`Dq*X5=t(9G~Glnj_xzntK-2_}u3{cLJo0gf23SH!v!a zaVg(+bo@m|IJnX;q??gh(TY78y!I+UL$*dhL&kCzL<~K53T7~@V}Aae+%>d{b4JPcLx%S7m zKXKa~MiP-NlS(SeOcz!U3bcs$*99{9qALIg`oftnbRj)KA4((jvvbxw=bpf=_WZ^{ zUu^|6#;ZnALxJyn=R2n^bo9VIfQ7vaAgUK8JBYJO4QsZo_}g_g8AP&)rX!ljoCwV$ zkF02hUx4NgvX+y?(c!272c7k>r&Cc!H7PxW3xnOEebeXUJV=ET8WzT8pdA1T1aTgu zxL|)e-TfZ@(g{Y5IO~61Z1t#~I zYrGgsz=0m}6YDI^^brSbj`BmFSZ|Rs5ij}@(PLcZlnfgCIem1EEWSRoeFh@wq{4jh zi(fqL8$cd`@(f_c8?h78o21}MBV#KZCJ6}G-^N+D&ax{aK_XS$LmFzJjs5|KWK(c3 z-4pOZvhAI@FzT*%9m)5-?*!;Oq>`>;3;7atPh7K_Qora_X`L8<_c;2Pi^gSt+uL+7 zJ^tu(<<`vCy|1zM>a!4KY!mx!Z$0YVnbF>nzZamngGA&cVV4O2C*z32H5pwXgfqY? z5>d+C8MGV{pDL!oSrqe4iB3{v!$ilM!u?8{1#HBxe94DGymQ53H?w6oFdnn6$s8 zk*YyKNFX`P_oH*1m)6JF=iVQo10b@IDswBp>F7^CQm^ls-aiLTh6fUz`c@-8ya**G;F zp(p5^_Afo zsA5Ui0W?x?q*dJG{G2(lFb-*S#!7Ju(Wk6k|ESuTajc3rzy0lRpAKwOC$)DP)SJ9r zte;T62$uD;cEC1eC~5%E)LQ$J6TnbbKf~B7-rUOmt(nKMDZsRbCUBfn0;H++@J;}O zj(fce!(Sw)7R)&z{^ooga)Xf+tp$SZUnx!4Wl`K(Td$4E^nSI11p<6A%vx%4ES|xs zTUrP6<2)KVjPr*9G}jpDN489kB*}O(>^jt0b7{n5A@QXq4YeYVaCw26C5c%6yEPHz z6pfVHYENkZ@GLbmy#W9sRR!os!!TnG_%SVXe)opG5%n*=U4yiOgX=l=j zy%T${rsW>#Oy*MLRpg&u*5KgL&5)KQ|9)54V?eVg7V?J7BIX0EN0Y~w-W1S6guMXG z9poX_7h6Qpa>n*%9omc>A&X17)deg;OZj0IF&~^NJ^9tlOhfV`2T@3D zwo-H)V<|W($$IaP>~$DDN5TKEaT-P=Vyt7`Lt{5j4o+l5v9^i{YrVUcQC~9;Z|XIr zL28;HS6pfZKvL@2Qrl+lU9qV2M+OHNk`{2OL)x|TwWfwfL8OKm0G9E4H)Fz3DdHz5 zqBa}?*ZRG0j=H_+8Of6fD&Rtn(-ZD1cb=^K`@_KP$vgv2lY;xC>euME#48MH0y zVIBZjtn=5t_O+99p-rutz^P8#a^JnKHNFlo6jcLifL8T~Ii%AyO9xd0F&<*N*SOw` z_c5;kymsaq#&EWf19NT3A3Z05NniLI)_bcM+g;kJPO#>Gefu zzwm`GoT9W27)2J{tL{$}OyrDzq=A#BN^c4gbfkP+7leViF-PgSvQhRpGU1l-+goJ& z1!!)W#g(5Oog|86Ydga=kMRtwiOtW+64D+IEu@qv8j6WCNMZim&;8sf^~d$oRFv{z zv0OIv5o2ESUZpH?W*9B$9X1qJOZ!dl$Wa^X0&*BfAa@P880&BXgE@sZFb9!HiS(lfrAXdFmr_*ma z$=vvWEvayrUg;AYrh71)x6J$Wv*ruai=g2^c^Ciw@P|KqN^x0YnD-pZ+`V%6%(qC5 zw&WN>Yx~iUe)JR>$PtQ7A0vE6*7Y6z?)SX^B{It2%)eAtDP|%b76wV3R|@%0oYq;i9H6l#2t4-g(vcwd+;b63FFSy%3_Z}2Kr&x4}f>ua5rY|ii z&gs$@*%^Ot9=Z=jiWA zAkO|D!8XIeph^=XM^YHq*4xDZb7VM7&U2BQuVC}h0ATByco*p{=2|MJy+^0133^m> zk~w{0oSZz-M}!hEYK<|wSa-(X+;^I~>)*7s*t&LmT|Z5;8l7$X1X*Ct5p>8T(E;<$ zu9cd2O=Nzr@sIbR`$X`84FH(V1HOC%2)>aVTcR1`{mm79V@@;|k-F-B>iWz}^FjXj z3fa|C%KVPbVBcIr*RiwiBLjiHa(_C}nB7~g3*h3*eE=o4kxjHG=tj{ffTm^{={tLX zy%yb>rX1Ho*&&_C3lZzOw;de`Fpu;OKT0zjdRU6|n??xjDZ_LxS+L%GE;7pY+FM)F z3HKb+IbHk$G)KuHn?jZ*fSJOID`fQl_p<$CYVFzxO+AdE44H=f^)42lK4 zmo6ba(|!Pk%+Z!ELu>csJ+31!Qd~G?(oID@6dcRhe`GDq)R82g6>Wgp@ZHTIuwiJ+^6`_WGC+Icf>HcH1qa!WSYA<+wan;k(u z&?(nOXCmSd9@$F3L}5Prq|+EuU|O%(wHgF_LjcO-d z7c{y^9j~=wr};j>CJ@S|dWL%dMvbFqT0_8x9<#=x%m|(}^_FYb<SfdKe{);;Bda#l1& z1PGv^Bs*Q*hU7rX>!uJG*SQq4Ic6QxDZf1EAAS>Oaav73WngZVo} zNnr{VQ;w8RUC>JV@JveAw|kWSF{b|XclTr%DPB&rd}P-#s{RHj>TYJPF@)y*6`eT` zXv`Uhlp$9LkD0T6IdI@{0%-`+3#B25#O>WO zdUWK;I75mh^>eACTLc6kLk6X0VSvr)Qe#7o%PE3NZ^$(8)BYTzGS3EP ztUqGJp0?)fut+tg8!;z(0UTVjSJ^o_f?X=|M;DMcB!Gyr`S{~M{^L`$a7>SNojDc3 zVe9>Wj6@V+Hcs+NrU5c~T$;D}C)4yenJxpunD`xtED>(+?eEA@#7vHGwwWEZrp6rk zYTszMfyiRt0h)W&rlxl^m_VHA*2aH&76<+wy38}IH~B#1ky$p+?~n*|@1Ezs-SSSN zh*JLl>7V}T1!!)W$hDu1JYb^8H(27a!ReLcLVLy>?EvUS|#g4SBj)Cutp`G2Vs3F-Dt=lNkd(ru4*whUVReGVb8A(Fmr< z%a6BVe9pW3+&k~Qb5hupy!>Q}QV=NoL9%W}BMJeH1;r`Q#xb3Ql#M;57)Kk~((B0ubB;Qdm*GN30Sz}c6Q zC4%%eoT1R!5C{WK4ibW6R@0MbF{;Ves5{6JoyAe3H#oM)5V{7S$9QtqBAG6Ao!TP5 zZ_BmTo{q=%t3R*`)X(WUi8hFt1+{0L#5_r+db` zA))CRcHhsnwTy4W{<9t;WI!>yLv8^Y`qq3ri>@j=Nj8J7gw`-)IQK~w;xDV%pQq`OQYi_gq{iQ=b zkD8A+;l62csiBkAHgE#%3DJf%(erz+!M&en&Dkp6y!qWe=y( z?NapUExv*4L`aZ-A}Zz+F=%`upkz=4TEu9YMw?zP>L>z9etpgXi{zTV@>#$JS!F!F zjLo&Wt2(ZNtyUAGyXr_LTJv<2wT!^zFBw03Vb9;IAxKdgB!mVuI!?YD+)qrc3Lcb zO-h3?ORo`4lK+dnsbk3=TJcP+9KaA7nsVw5XJ3aFv9 zffHY@A6u%9z$O622*~9Xkr4%|JDd$)^yg7V_1d|-$LEUBND;%xIr9jkXZ|Qk>#$VB z+dZa$hP+$RlQ&9yoI&DW{l79nRG6|im0kK(R?8kILZhs-!a6`Qhzj!(D;8Xnvs`u6aWU$ zkXw7yI*ZgqY+jodddb2@2EzQ*~NJL~)EokfmQF4Axd!p{N)c=1a0AWMIioAN5Y;*!oYq;f>K5wvC*;0L@XWFx;1r)P;=Gt+nr7 z8>61>(qXU|kZNbM+-e9hG8CaE1C%kabY!Z{+NBuTPfCa4qg*M?3j2XE%AC^n9u%i* zUB@7re}=NOGw)Xm%W)hjaR$fVYo_o5GV4~0gQ9o6G%fQdb?T>o`lnCIQ3~UlL(3>L zp>JotaLlDFFf#7J>9>~FZ!eJ1e*MNbzHw^!Kyix3Aq}c!xz+($H*5cMmAO-}%aGgK z9C+_K?JQG7$^0-pKoG}|k|*yKsdG32HAS|(k7$l{C}nd@D}4rlV}EljI9yV95DMPU zya1fzIU8H%_ukj0x9FkH?I6zzr!{rqyt{@|2|SA`bA)SV@+=Pg9`hE^%UJ>J%msa2 z)ZIKtGp$Qpohq|y&mf!Axd_Z)_K|_~6M6M6_ApY+H5_yA&hf9M@e%LCCK?MH&Oz1I znl7d{`|OaA>8Svz2s&|BtC+t)D0iJtLxqbg?fx6xbYjmM9GXCGzb3 zm(F-G26eFx+0+b3WJ{xzy?m!7Ire-6isLSf**pQEM@Q$E#&zW1*&Mn^WXzhYTLR`D z-JY48`5JxNNCSYKF9C4A0L_uo@uPc(dKDQnUVEjSxi2S0C>P6u8PQI5FX8KuDTT&Z zL#O`|?nl+p%o&dqFNIkVKjG&b)PC7DZG^629Dx#!sqb^$ak@WaQW}95~q6Xod(FQ4@l_%UN4Gv6@pZ4;v9c;r`mM{o1>!V3=+ZjJo$-(?+sfaPp9` zE{B>i*_~{JE5i01acWc=cC*^;b{Mi=tZk!Ti?ok8@>T z1a@5K^W>T1%rLmN2&U`T)Z8684ebh*Fk7=~Tdt{T4xX)H>F&OIC>VGqU90zyUk%%q#V29Cv#jAyQ<;I$0xg z?>=llaDz-BhvUKBbtp@(h_C@aMbE3*Q41wX*T^kh ziA*zQYxXurX=>4IjgScT9=&t!Xo5BKx7Rh*Bj$S+ok8S_5F#7-1usCe*Nk7d4@D=8 zZZmQ)L{auAFte!t{onunZk-Dys?*j|E7`TLY%|NF#O`qBFe-bDd+qbLT$hv3XNs(Z z>@yf6K*>0+HIi$u+cNIA`C4-izyrjb7101G7os!_y{}q8UNY!dMrn`Li2?082KubO zG0K3=FZ{wUyxXN`t%E>OVuu-Yikl+Of$vmphI5*;e~)z|t3_$FkyaSX!JVdDa3ai) zR<_tz&V~pjhq$gqBfH(+s0i1)aH@TVBf`maJ%&!*4j_6|N4K@}$g$yAU`aV$3WR}8 zG9_B~Cbm^gXW1vdt%oQDV1wxfNUaM!>?a2U^X;eg@XaZHW@mpSoa|NaCK>>!vx)W{ z`L$m;jJhCtFVO%$$LPh{J-t?@_PlNynel$e5o^U}O7jwxptGzQg2La9ZX{z|MQOBN zmMP-=7tvmGTvGsYr=gJc%AJ|ycff?`sCA-?tQCSp9pUf(?(d%VUD0v#NrveUenTzx z5fFS5U)J2d)s0l7ehDDBpJoK2v2?t^DU7%P03ZNKL_t*jPtV^=AK90pYk;jt>`ydYpmYYJx2K&-@ix<8^Xc$?LDOK zkW=IL&8atj@j5$E^Rqwuv!`!5s`kd6dhgxr)uU4hMNg!A zasIIHNKfsQt%(k2NYT2WNp(zrT6g-990JwAhy#ti3xEU2IO9gtT_XUW^G@W%x&uPG zuZkM`I~k>CfgKSSWD63OBYMtfiPRCn1AY*z2;n{ar?val`#aai+9G$%*}c-<){gO( z>jE?ME1952$*Q>~pVQivTb^~xXRCECI-?GYP7(bSsUeRqKy%9sul#I)hC*SeI;`)S z^qBFHIgC1RU_r_+b>BU5{0X{FXV_tf^f8`eyTz5Sf74|-T^Y7(z|BnqxbWd;8p~1d zc*}j0PkHbRBf|&GNLA=efi(;zrqKBlSh~(6V3ae0j4cDp&`M!g187=1`w^Q;ksDL# zR_&kCvDP|(x6IevXEW$(*&J@mdB?EUzSL(JA`Jl;9}NMLg|2jc+jI7u8Yosy6^oEuSO<7;etufx6WH;vk~7WM$g6d2$rYEf!^I9p`c zULe2b#rj7)Wn)UOEv>RN&oMW>-;EJGd&{#u*B<8NHb#*lz`4CLe$H6cRfsgPA;!wd zH!gMru>rgUzUtmL0X!#OL~O2ZK;>&BP^+jFzQdN zvnT)(mTuydN|BvAKB4y}bI29`jmVaW>^(aidcJqn)X5zF$)Ehm>0R#CoZ!~=zw~)) z%5KVXphu8p)k=-Edq<98>%Hr2CS}nc5^dBi@&u?~UCy>n=l1I<2JJd20n7#Cd+n*f zjCr6iJ>_L65iH&gik!1+=0)dH>DYjqFZ++4`p8EXv#uQ5rJ97vZQI2mJ~Rj|9^wgYM=3WW(Bv7GlRmdh9=JzBJ(WpLcQ{ z@Zu~jvLKyJ>YYdreFdawUELWk=WK!Jnln)vSuQn5xmSln&$oWo7ooy70ER`0s*Bn} z9$mABdw<6INTbuJ3Ml74R@*fmr?B^Px7;tEuemKcL)S|MRF_8X-mT|)0h%q-d;PUG zB=!bVwKXlqQ>RMLLUS@G3#X)JxHx%~iZ-vI*0~<}Ygr*qjpyk0L=jSc?#+RitJO&W zjrmR6;McG2>yvXcFajEX$I>zIqAZlBuO*f%IhUG12Bnord#IJ@sOVx8Ibfd6`JR(~ z07=?IEmRrjqBP{4aRzM0D99sMID@W+1bIP+Fs7ni43K>(*B$Gsi_#trefxNHSUEt> z2H_w}Kf}(a1J>%3Tjr5dhVfSb${}-*FoW#uEshz&~I!x^bVn*HGa zqBsmb2AxcgfWg0gb6PmtKv3+vHFn<)X10I)-FnmcH-V1Q)ReDG(T(k=hkw*hDOJA-t52zINTl>vE=x*mOvq;$rD3*0V+- zNAGoGPz!(^1zI^o3>koHUO1FM4PeLFSpv(~%*Qi)=3Mlqu@xB!XiOcN*2&MJY~+a@ z5rqrDlulL`NWagqpx@o!9-fj|MSDi)-?IWz_lQUwu}H>8@0cIGFH9YM*(2v+-BegvXc6b9j> z-R~6CxUTvg&L}`5UARNL=@4n7OWjigWQZsP2GC3o)JFg4xg<~3A^}zY4ZKC{maa?o zecfYyua$9Qmz5yRtf)KRvZgra>f)|_-mRZ&-;qVco%N@8MC<7LF80Jv6b^2V!t#E8AuGVbU8{)4s?ZBbD`p`&)8`hUblIY8L1S|j6-+$}3e(U5| zV5=B(j=Cay&Ix5XwjF)+)OL0WcD(&X2Vg5X23XcPptr}k({HsI#>}hh;kfBIHac#5 zJn!5+=pL<&rGC&s4r(i(@2I&r^8L2ndm6nNK}~jNde0Gf_V1!SoE^Wb zt^Ejo0wg(Xm+n7xFvt6BeaC0J-kfvJrDDsFKs0m!C_p1ZLB~n2Ew%g+AkJP8&864q z-M|HX0T_rl02lPen9b3Ab0Fzbd%kwgt>YNEaUY-tp#r?n?INY#ADH8Zm@7blt_V;e zToEE`A~e|nJ{0{g)m~!|;PGC~Kt@odR#Bq^ewR#uBA$K*B&P=;KZ6|G-=cF;to`H{ zN|9tgkc*F~k6be^=2XO)46!@N0{#U1JVrWSfM(5XUbrS9xA7U<@`r&I-N86VHuUHa zj3;xCx1KVb-5ZML%#0mTTuWg=(>7JD5nV$-J=d8H6xyvtY>k5wfr=?OsCZ;TE9Sg$ zI$s}uOJ=}65JANJak@qI9Q;W6FnDTU$nKgmPTUKd^!va6`=>pC4XkU6FTFO+ zM=_Q$vz=K`wCLj>|MHKKdI-6oE`s zMB-heaS1(zkmZzers*r`ZXz9PYKpy6M@i7%{3ryg6#jcfW4*IEMWE2VjwFz&(gD$9 z1nb%M0MKpCMbh}EqB7RUxAC)w(-iGnbkLrkC2x)<5S1sVQW$H@F#4V^Ky%})bA+%R z@<_1<6oC!Si4vW$0euk^AfWC%Ygh&M5^YhCFJCz&GBRZTZ8K94HXA1l`)spw0xi5W z6lfM3vW5|#P6iYu0pTJ8CJ$NU(WPD`AhD69*Bt505maje%|hB);*DZ=Cd=Xjk3qjsjG@ zGeBc4I)A|LIZe`k{Iuq31@4(=f7F`a>%JpEIZ}S^^*(z)X9P3kj7{c<12py>2a6-m zIW&KDaeG8`Cc=Z>8oQ}R2#Auf>A((B0I(QsxV@cE!Qu5K5~aJI^RDPu_vT!BZw+7g z8T0|5$1keRMI;^w5Q*`5Y4V6xM?wH5=Tg__*3Ulo42~_*ie1r=;a+W=169C`R5|vC zE-KY`tTTGly3fsQ5F`k&P#V`Gqq;?vM}VUaffH_w9CC-5@)O2d zID3iS2FmDYB#<$(VV&5vrHOTL)-_R@SW(JIXDCh!WriL8>%ac%lM^WP`T{jK2-92k z(IPaWDge0xeeyx+J3pijNNXJ5x?6Ex72-O#06_1A>bYouJt3Ni6|^^jA>f1o4a@9deX~Cl3D=p=g`Fwr$xn_WEqf109*j9V3gLpZGz)W=r z99!41yFT{}>5lmgHGM$TNIBz(i@4aoQntoMD?Ve7A$}sBY7GzA059x4eTqy~JHt`7 zACU*rnt(njXrKP{r%yV;eMG(;im3(y@KHnKZ%YJ-@r;oJ_5h#+psa~Kj?fto1>H-E zn!l-yF$Qb>o$q|-RH!aO4!{<@_xxvwc34O2zz_6guDp{d4M16R<{8F1w`PDN@JgP@ zBOjFfoh{pVbve6U^ljqreeZie@s+Q9He;vuP)W6x&RVHdx4;%0gdt53-lCcuk(b) z!hO0EI}d~*AL(M_LNeJW-UkqnZGru+R>r-H?zk3N`saWC=O>*j#kDjsjib-y!LT%BYts&$!V|ZQjX3an;XCgHIjme})b*3SIj8mtBUwX6l znSABH|NFmxw=2$A7jy3O6gn`&SOXsnj-U1?L(ce06~*9-8l7`MijqpRWWfBCc82Y$ zCF>&?F7qmvTWy@6;OtHFTCW zCNFaLU+HH@O2XT9Z(xRFjxf;RM0A5=3&g15BWqGwJII!U#mVI~lF_66fb6~JN&`dA z)WX(E058CXdO`Y*ZvcGJgLGY8C|w7Tl(yBli;QsAMIoebR*%cq@t&>`B?(N3b^|ZY z=aI%{4n<4p0{2A-As5&J01Jp(I*(@>q493!7tqoKgp&&(l~%uJdg-|cjWGmxkz;HV zaDdF|(B~t_oDooY{<&I3YsNPNAjt{aAX5S08&mVvWJnyntLNr;Of^Hu^BF_{k)fB? z_XTLKTrCr>)vJO!x%CX^(&127K5Es+5(>tgf}ynJ{abutM`Ock12|*Xr!_aHq@{6C zKmZE`UNN?N`x`(fEk{Q+z-jCazUCRPYu>II7$YpVsAa||+Mm%sqICRYm2 z8Ays*1CF^v;HEQZ3=AsARyQuGl7SF14diQ}Ar+3YyaWhRu-cLWHQqrj(9taHC8_WZ z90UiwG3G~iIJ-2K=9=Rt^^D`h@jGgMIF}Au{pWxF=Tot0hxz411ZwC)y4jjxgU5sa z<{ZvB=jrX*nXk85GXtGDqTrms&QBq#G8a#ZS2-A*~D z$v;5PnI-%D6eOdlvPcGpoBihWdN*K(K2^j&Mi`nW_tKaMaY=tmH!Bhnaiz&XXJG+5 z=Fh#QwjmzemmT2zYZSu=p~o+gpG$LCgv9tYpuynZvq3<>%KRX;MW{vkk$3q>M>a<> z$J3@b$poJin01YH0UY^&({5~!AmYaT*LhaPQG=jtEs|UWjvp&!oc$(um$EuufM$($ zo4IC;(AcC}5w3Bvu6*3;j}Z`?yDlciDMgGD1~fX@a7z&wWzHegHA-Pt#e+60g(h@O zd18sxB5Dm>I~>FKzs1+LKsh1+r^epB^bdbPoV2 zYQzCj>vU8-4Vm!X0188mNC9d(x!Zdh5B3?LwZ6b82b3Ify6D9vcKFEodfoduXMoJ8 zzp~Fo9AsjMIsya;!7?%Ei^##F9M>fhL6kzoxQGe6Zu~ie(y7$sqLtT9P#nmq3kSnbAYvBQgv1G*ym(>jItT@dQsSHo&LZU{)T|dj1v))> z`0AH>eejK&0g9?lG#C{|jdQC#Xsp~?gHzOzAru+8bg<@hqDqGnIbdutR_+7Ta9RL= zhL*uzIvmnxrD;g%>;T8|&hPEe6d`uN8^=OA$fE&GsjmQ}oO|^vS~*v&yLP?T2s@b& zol=Y|S|jp?39g23JV+IT%D_uKe5~D5*V#^c1}*gTrT}dWx91UaC8Pzy_dqGVa zgD(B#=+uI9OJ$BJe34E(&C(J_ z{S?SzC^>8#Dk*x#Ck0LJzsUMCMAz56_v@NtW*{5JNk`mi_M^#;Z;ZDZER98^6xnZd zhQD<{@RA)pV${jDdm~r?7&<{TQZxlv2ionuG4*rwP0Fw+ipd%FyNbo>LhmcGjjW~D zrB>N<9VG#n^Ws=S);}QP#~H7zeXf`*7VGcksYJk*f)fm^#DE*hivZ_TYJkKT=QA7=8H^{ z7wf1l9jSn%0%rJ;rOGY&)h|GE%_{upGl7SUqHbSPXa9!VU-eN!p)Dg|Ast32wnLP1 z{#+yKB4V=?f{Y1PD@0*dD>PL?fji2C69Z6b+4#&|G_IQ9*M}Boz`l@L05o8&7=6yB z7M&I4Q&4LF^$2KcFhIsI|4Z#Y$AOl9#Nas1nqwjbQ;LAJEn)4P;d7nB>gUgJ)HMb; zTMNtWaiDu#sUpw7!sQ$?O6J-eVP*FOeV| zw>f3^d{yITcM+EWiA)hPO-}(ax{j`K2CFx-FGR7WveMnahJB^s0Fc62Mh1v@0yp+E zg36qj57DZ0x-oFF)s_H=01*;Fg9TuVO%S!I>*{#g`4hAT#!wR@^cv)`FSS z9(=WJUOVmo_qqU$vCMKLmbvjy}jn@^Ant_~6pjNMbN=cfAqD@8*sOiwzX;I_c znA>Ln4P%RaD-yW2O>t}{W6aS8`lU!PuAE?Us*~EeoNnV{qyaPv7waY}`=}ICbK*Vh z%U}AXUwSG)69{G8Yk6yo*1r^tCB3sGyS+cBjZs&}$MKS4gGnc6fF_;ine;MQ32f1m za_@iegCCq&XAx|~6i4e(X@pDf{QCR485n`exldsAMPBS_*Rx&hx0(_l0hrMy9!o6h z!9hg=`ue~JK5$B%1=JCk%pILC4NPQ4#K&BT z$f#qazwIyA7@IxESC}>rSsS5gruZ|~Ny-~wCOU=exi`TaICO502oyl!9rsSVE$e`9 zCe_Y!={^A4x&U?b-iPxLOv%y-Z+u8zEG0oe=C+^`xsZgTQDHUt?#CGr z&13-Q*!|wNrU0T(#?4TV9gglP7v_ZX0oZY{DE~2HwbdawV(gq74v|xy&6%j0^Y45Vf?rMbL=?ZocqAa++GBqAJ@`nWT^%fOW?hCn7eKSXYOg773?Edq|k~Yd6ws^-2fQqg7(>Xeha-Zr*Dgj z*sr=20x|ZAd2)aI0$G_4!QZhrA`MD;6P1x>3}pGYFP}9Y;{b$2bTl$h(`Jt%gd>Np z-EYRvo=92J8Ot1vZJfsl(2@J6d9+TV|2k==f7To|ur#(J&FmI3PPPCY2f&a+GHKld zGHZOirDxvqck(uKkJR}903ZNKL_t*7jjelnA0U@4mf=L!WE^M$z*cParC)&NhSe%6 zVe$b8sB&v6%ngtKKOg0A37gx<94!Lwi&IJAU~VWBitkeUyD{WjY750qDada;2T};G z7S3{MITe?>nC$u9uX{b$42+RWh6P)uVFsfnluWUAu;v;A*8Kn)Mp7z)G>~h+a-fFs z!{|xV`1ZHIeKM?4NhsB^i{GetIZF%`X5ap@pEB%APRrgaR8c%eQn9iKSS+V!opW6F z9>=BonQQZkrFBk&IV+mIHnMi<{`4I{f~98Qb;Y6Q$c6Rb*fQuy9J0rG6`}K0T>zqo zL6xe*0ii3&=U7|Qp?cFbzW18nbL}4WtV>|UnAjNWMXy+E-{s-^J5o)`kLZAU3fKF} ziD$!FPkYGv`yI!LOpp(H0azfrY$d_~Y3W+BMrTOB^6j$~V2faU^rIg=**@d&S@-ol zcQB*pdtY+u9j584b*=Iq-q-U04Zeo4&^3Y2Y&5aUVR6`+n|J%(H)L%I>A#>g{rTM8A&2n`v>p5G!{BdFp0NI8?H zU_aD$w4V6m6nFA6_L1}P{`bHC@YbB?mbkN_u#t?mbOZY{W42X)93@J|k&`Lq#KEc} zcE){<9D4gnL`FQrx!!dp|$oL1gVlbL`oas z1acrbecp)^R{g}1BXZQ-KKp&g=)%%i1HabAI{3CeftBvX0kUVM+Hjr`3}YlFrrzh- zv`$86k6J@($1!KGf!9EdYsnOF`-Lxj;nWxa@xjs76o-TC@0?12M?Hx#xW-R%#PO%= z=sdE-h5|};{A^zq#c@CBZa^AFdQ1;97i1WL!Z{{;p24moh>#e@?!CHy%=|f@!t>|k z)$cL>vP=*>-b3B2y<#rRiRYW^*2K^J2EP~GF+PB&CP{pS4jv@~MRj}@(Sbk`RiP93 zP+OXFjKpi>DD_O*y$H7G5MM|XcMqMjB**>T`&HjU?}zHXkFX_j~)p`yiso9vJ{^?a3{5GvhnF0L_gnN+AhF zV+3q?Me|eUIHOJ}P=2n%1Y!*-Ak5|H_~&pfrQ%GTEww8cTS~-1fs_&j>3I|`r%i_v z!sB^`r>9C}e7=8UXhdj)o27M}%S$D+91QoRJcPqvfaXR4d&53XUKnUmLI#C$V%#|o zm`k$8S!96bK*QYYjwx)IK)~bg{_gKC9?G^fZOJ$XN$Yjs%lIyJP;;%?H2J>v7{`U9 zw3MU1G?r`r-g#5T%Ms`tif44LjTQSQwTm$p{n+CW(BwilAT zoLs*HxH~IH>I)~xH|H&o!sSZ6lQoSRGkzx2*zgi{YFu&02h&-b~2fHh}B{j5e{>dbk+q5*86 zG*aL+;KK3qckAlkrB636WP~l%3Z2cS!yH@C9;-_zU_)M9BLYb_eAe%?-7)CK=s6sJ ztQ(&gElFW&5{M8l@t1BeSfZDJL zBLzn()n#Wq7;dd_dWKwA>@uO(>P~7|s6Uj&yYm@?2q#j2bHQouUDmwAn(?f? z#<=Ze=^%3Y&9$-Yk;b)l3|p=-{ zg939BIBG=!%qQ96WYN)NqWTIlQdEb0`#jz1IpjbpWc5pL=qkD|0I7KkVh-Tw5 z7xuYSDfSErgDAAtHJ9-D){YG)D}Gix;ho6}pxt|yUgSCB*)_)KZ$*>Z$K*Ac4e+_A zcL|7*Er1AV1h|b&H#$G2Lh)&O?-<2#Ur_|a4!K5L@(Dc4*h(ELLX(r)oB^$DJ^*P9 z)+2}A92>uD{PbM=!9DCb`s+Xb<3COj3{jyih5X6lD3iuZ-lU#+AE3-n1XK+Sw&<^0 zt~D0#P4~%eAs=j_j-R!%KUdRaU#cmQW^Es_oBTEFLslY%9zm#GJ11jg2H>>*_7pHH z;~`LftxbIanrl~P_*^ZEF&a+Cu8xKRVJNW>zzpL;7=?;!DTwg^L~L>jloDB6OxO@8 z2@W4HMR|!96?K^!CkBs_g^CsUW1AU;bGfEDlbx|6Ja|drxW5ikU;gr!PY%W-Pg37u zXFe2{3i5L%v~Z-@P<2gCJ4QW?bhw4_Eg6;>d0_xFgbRwoX=Qe z4hRE{8MX(C5|AUZ7OCWOIiKzm8&5CmR2e{9GV-3wk(Q3>p7QlMP=K3vC_2EIG)Ejx z_DNKLL)&{*Kpohb0+=O0lY?eHdp9=Cei`l22tLdq-9S&6FMEtF^O-qjoo&sjFY+=4 zeIsZoy>Tu|REuGAV7%VDj#=iWvu8M7PJuUf##D5cY$i)&CvYPx;+=QiIXR`(Z+Vt4 z@9R6zZGJ@jL=&pZb6voJQw(HDn`FPqNY0^q2mF#fYXDdvk?3I2%k))Xk6hV{8RAsSW_cKs>)7 zU*SublP^|j^- z)^?6W*)pzsUpq!-_>rQpWL-lWslC_K#7^Jg1!#6yRT~Kchccn=dZANpff>Sx$+MX$ z#JVFk>1~a4N_Yo>pA=0yJw@@Xf!5oM5r#o$ElxpkfUy97%4s}Ah$+nx_@UkAp{r>5ysK*c|HTf=rcCAhQ)7LZN{5Uk!luu{OES^Yu?8k znmbIRu5u#C>Hy65mektuS?OKihmv#h|U;YaX(I; zFMy@@2yFSi`50Nr5C(S|=jLdo-jO){LgSPX^z8809tXO+B0NW0(oKeEQnmoh$xp43xJqQs%_P`y2$u1DN3uXv0XJ$Y*l3 zHif2Fbbm4`7h0-GIqhrfa4t<1<7nSG$jeXTEsZM{Rdhj35Qn2~Jl8tn*DhQ2deUDw z#4)h;Y$-QuLE^PzxaD(n6}HqmN;Pos?k%-H>0K?EMJBA3_2I~Knxu9}WAHn2f>bh3 z=TbCc*K>|?Kyyg^tp34$ssrFyj?U!hP%`Ak#^LmDW8;+A3xOFrv)TmL1c=;!Y@QK# z@SXrhMeN2KXbL#y)Q#XJXLfvl@54TEB7K&#Zw#KxUYSQ}!}PdkHa_n`{>+iD((7`t zdRBEVbDh4XvNVE}@o#gwV*H(VCs#;^xkduri-Rq1UnC zXX%J#eu@rtU5vSF=@UB6Tu5D;b|`giz^vLIb0#7WvfH6Q6S&ktnRmbf`=ToDaEZ;x3U5ijE?8^yaJtZt1CZ`Fe%sIeyI z7jR+A0yXv=kY+y26FHFPIHn_-1M?nX@a%xbdy@gAx#&Cq>b<>(_T8dph^W1~g}wmI zR>_>XevHuAv^s1s1lkg+SNY^8KY215Hb137xnzKBY>H4Q8)|jEO-IPfpT6XzP^$m> zum5^VKl$JXKX_s&r}@Nluh<5o5P=$R`-+Q(M zw-@SGh#}RIb&rt4T2BAtH# zUVvkIsNUMmR649wUCc6htBX=m2gbjSjesgkEH4IU82b|V=$)#^J&+g(J&=c;GwclHSf9k>ll(neD}rK86(IU;L>Pfg-3^){Bkll8}z>W zk#ngNMKp>!TF`9mg}?r{|{Q{r}Q)#`}{| za>)*sg5~dPK5sq%RQhCWSDaHfI_<{f`Z|FHQc6=BpBMNeYwQn%qQV$T(>oWzE8^wzaYIl#6|;UQbH(YWAiuTp_r$K8YaA0GLDvF!kmjeQi9%6(Rrg@YcwjO zLqQQMR8==a9w{t}3K+K!{J%DwYn-cdlak?606qd5%8;@z?|zHiPn~1?SR)HBeqer;|!Cji(Uox|=Sl$;)Ot%W>r=K$girYfFe&7_=g9LTA)rHAQB^QGnU=xmOK{nNR~p{5V%Z|R8ulV{Wgjl9<- zkK;_I$AEWUN+~fM-Lc!%IlyoZ(A;CKa#E}L;fQsh<+*gK>;VFReJUl63>Q6`?!EEd zect?*N@b1$yc{~Vw4(D7BmtBhahVF|BEhYp&v!0O=}*?j9+R!$-}bs^tN9{BvLD6{ zh~D3I-j57$n1OU4uGBo^b)PX3S?UzqNCryPS_5t_xog!3jTs>3TcpW5@JWo>^ZXt; z!=}^I(jMs+;{oc(ac8BNWBvhh)@R4Pnu9*tg#JCg&lJRW%?M~bd#N6<`}Fft=ErCY zyf<4V0?Sr;kFkE`UQGg;uNrjJ?12uW%K>LPspe24s54i+Z+mG^FmR1Ni6j`o>Ih~k z{`WhCzByM@NB@q|zH6R6_n8-f~3R%Asir%1|v&3*igU${s~tH!jcX;|I;asI`VJvn$)#vbJxDR&-MOlh%k8^D-Hl72!wHFI3&~d z>$g1fsLzreD8y)s>JfpKn z=K^u`tub+yid=H6$&TLzsOFr{bMNXsN|BLX#aS0UvEMtK6!@3|lt{ALHQT55%`{Rm zB}&HmcMp2Rz7s9+e#kQGHrHX0ZSwmyDU9ZCTC?KWY?%m#dRDpsP_^f!2GMO}x{}Wa zzOtX}rnOcl1|*m#GD;=_fc|Za-bv~cS?GQIEqg!KP_>4&;hqCF#%fOi?#AI7-{z2f z_&0yz!yo?e$v)e2#^CSlzDNz()OclV2w-i=mG>IEYfXXnrTud5Z&SU#=Ipu=sH`Qh za=ekX0GsY}{4zcb9Y59#uhBhQu4x_Y#ZpVHBe^#I4jz^g2XwNd#zmKpXR-vwJ)6F- zj`H5A&*QsK$Kh{$+afcFBx|gmk&mhrfecO0e%dmhm#%#Qnj2@*Vu@aGDgY26L&}MQ zz@qAGmZL{-r9XV>OJ6!IvMxV1Qw5U@p0qIn;eG(px%9Dslw5zE(G*zSa>!rI0fp{; zgh46boFocTmnqJHZf;mr%9oQM#f&1dS+%6r0D&RTsNOimua7_33}pCIT8spcfqAk1 z(lQwT4vuA@bK0(b74j}kNovA0<;*=t=I+{ORCMo9Q0%)sB@M&-0uGG4_uAT4{o3)2 zpX(4h2lydRq?U3->^Y7!BV8``m@cx`;O{ZUE%#suIY`oc8t*lR*?l;la{e)ZbQ7o9 zyhvA)lB@e8y(XeXSBfCgPn`vJ?HrLw&V1=r-bH;AIpxe+Gi%bJha+gov6;4e9_Mgm zpGQYO$Hx8WU`}djp7b$0;&=YWq4q2h1|V{59$@_KolC&Ubv{=EfE+Z>r{jT@oRr=% z202za5a(dQoU|o*@ti$gc z0yS<_(Z0E2FWGzGPZu(>2yj%GUn-W*v-i~sRrgUBtC;M%*8$J&J+>SV1Q|Q_jv^S-l(S^WwPU^5(i$%P{hVGuBC~@$SzGDz-cg}C zJu1E4dhQKs0#QJ{HL*6oMVF}d+~q#01dp8~e{j=(fhuxQx`BHLTn>aV4>S^9e* z+y0M456mb%K-?P}&~A>W^)g3{`vqv0R&C+$0U9+U(zLLRm?+T`=OR!LQqQERP+F8E z<)9Fs;4;9Fy8DRU04M;E_JWr>ar9g)GQqzkZNr8dyN=jgHa#ba;=>AyCW$)9SLe6_ zIt-+?&xDv0A*?PZS+wa0g>>Umy*|Ff3LezXOf!5OVa}>|VF1rLR9l|4H80jl? z6d+13lTU!u-_28L64$M`VHQ0y;nsgAZ_UbUU-9dwSoo z{=gUmPh@>$=-CtLK&~;Toa8`MKqcT*;dW_bMR%%kYwrY_#`+o8*aK{u2&Qk&Ek_xk zIv0KEU3~7EHr>-Sqf5GEj9e|r;N0Jv{~|7Ai=LBCCbi7_BO)C9DAHca&A1HXP)^o(xF5|fQG$5O~i2Zde)u} zpoi#z4j#2$)hAn%#sy?)Ho-r%{(uc%3y>ZUO>JLatGljUukmyAjxpN*aX6_wcfab=PC5YHK#cUJ@=bzvLFJng{0@-Q2LfRZczyR2x?T%I$S?rL082S@4I$+y$SWi=Sm+FA#<($8%UqdNe*wnH+MP3)q+G^T*~L)8a8y6yXFOg ztbGx*ac!o=LFA}L27D8`e6CKGenqHsHp;Cem2vVt)E)7k=(f3rLtEEw>H6wvfEoqs zw}6^)@0%ZM=O>+3EtB>0ZOyDXkIyA>>45i8X8G%ghMi9N1{#u%^bAR{l zeDa*P;UN}A&K zy|&}t(re|oj!gk#@`|hg+KdQi(l{{lYtvgsXN1$|fKbL2a0etjt4PAV!Mb;ssw#z5 z{(CKhb5b%a#*G=1LoW@jlqA1r#I09Fy&PZq0+8_g4DL0xEK6&5$slqN73y-nJNbHx zp-wIkNgAUto@#2yBU!1%>u6Bd-oGft+V>kHHbtK}!y-jM7J2Mozp)*w`;QTq>GQ@k zlTl71BoXllw$Ecg%0jfo8#z$< z@NBkTMhV}9&f`x^yOh%F2nZkv`EYAWd1SC?xCk^K>Ej>&_)V$v=X{*;-CL6m#uZTj zY5=M=k(%~usbU?(iU0%nYB|jY@r#P0A-szWd-tNEtx@9w60AR+Z*SM7(R#=NqPNEy zr?vWQuYED2z$ z%qr!_hyy!9!q`-K@iqk{t`4WPmP-r>LDJ^gX2eQjAf^p3B;Uq1tZ88tJ&jK~a!jGSV=h3wUWaf(F>$T8N**VrlN znh`J>l>1!aYN?H6uIn(2l&if+J{jkVgx5a5bC(!ca?QwzcDT>2MttoU#_Iw!x~}P% z$gp+juD`{8`#17|q2}!R@^3nbv9*plMCL{m*c=w6iFv=*9yT6P0t_vJBnN7(bM5^~ zS)u>vL_sp zMQci3c0Tl>51kZ2Aw`Q$*oCV7opa&;HUvch&_Lguj0)+?@xH`(c`m0-G>DSLBwuPE zaP2E%3T!5d1^~l=bAl*3Y(}S5r#SB=wAa4R>vP*@AQ=~Zl-7bt!{`GFl)s;xD6CN@ zpa*8i&o$?Zj0}+dBH}`MW6`8o+An014CY7}r{{?t+Eb-NimVvpmT}m(o`pHYuyZ~| zN4(=TX;Ir}{l`Dy-Mo`D5K#wvz5HwM8F2KDIr%w58J>#4+pD#$opzhK_a5dTh+ZI0 zq^*k5YylBtDk9@A!Y!IiHqRZPxpqC~h@Q2db=}DDi>4!=tfTeg#FZDnCb}`*6Lpoe z9~Csmh!!n47Y)gQ9^d2MYrbU;pW)h^J$tB5Vu&4VJfefVbzH)n^LLFL^~^cP_lWhP zFQnRPu0dAWd9_1hB)~NyG>8M_=@y+jT}SUH?UAGI93S9N+I(j?Z6P(5WaQlZ&w(0x zzx~+(;&e>i`gHl@bNN3##U6ApD_hieX~{*`wnit@9Yt$^Erc>XiL~=f_DDoarq9@B zdkc_l?z|)U9tpd--$iuGCNd@@oAJ;=4xhb+L|R(MHGh8rnl-a@=9-G2) z0xaX$i~a}&d&9E4KDyHk1ZHxcu<6oS7#&KV(@1U^J&aV!a15c}Vwj3JkS}{fWJsHG zMh7E5k_WD-CWE5o+yXuTO;Ly~Io}Nehs?%+gmTmy368OvFk zL#J_1_v2WMoP!k8dyVm1HRzQWG1%5qy&*u;0ikPk$_Qv`8Bed4zIKcJt$c-xmt_O! zQqG8Caw!^h(5tz_nsJW%ku2Dcz_v(YI{DG(QtUBj&w3BCW-oEXoj)Y~%$F!lUC*ZV zG|&1ja{zs*h_w@#md&xHS}tY9hTaSUwU`WGvFiz6Tzzqfev_kL>sou`5dT0 z7Sr`kvIn$`kB+Ja&)7>DrKf3GWrMM#+=89V%eNBe~@K(i!OXZ{|GYtvh78&i!; z3);srXp}*nFKmK(!)LfNL~9)k!YoyTaO-+BvejE&sY2EaK&jPP4 z@A0~J-x(O4CxBMuqx7x+&)%K%+P9Tu|K~-SKmw z33jX!Boqe@Qpn*@9xxbfNXMQwY}m2o>v;b8%)j6B4CXV}nsZfqueE<`Cp)`Xvlzua z?r~jrH9G5r61R~fG-JrcRxrBO;w(hvWywEtK%9hc)$|w;#!5V%XN`4EhBZ=UnF!Yw z;OIG7EG3)U%!G9UF6XT9v)2DUw`s^-a^Rmh_Yy;mv&|O3@@<`VKGZyPVCh}wny0P( zSh~&}^8$;%cJ@|Q(Kg~W>-I=!fFTw`AsOe&-8!Xj^F09Zl$&Ke0fh>}@F?-BtN~%; zFjf&lW_b8{>=?&-#x>08ImYZ-pJ5(4Oj9ui`E_)Ru+$20WH?!q-D~S3oo6ko>`32O zM?8t0(?YfwAo8GY)ul`1iI+nt`wK6;Fs*rnW`6d~xyNV8J2@JADxEA60a&-b=zK=J za$bx9eaur;)h<|WtQxl!!)@(f7_zWx`=GYUy;ewym)76jt4$0#_D6doyf!C^hj@$h z+czR(Ops7j*ytbgB^5zl4`cT==%72H*(z1Tb%8yhW&l3{`rrpYI0;Y0U|c+YubLRa z&s95aVuvtkP4PUjp8$^cBOLRxnOnOyD&CRI$o)~#L|zXIU?FCQwsmVe?$t1TS2Fb24Q^MjS)U3Wvu?<|1adhDT-!n|v*`Ek!aON? z-b=u-6S)fu*#i2E1B;~udlk=Eyu5D?S}r|f+QFq?^%!AGBcah>K$*pkUIA`T&0D&3 zpT_6wAHs;Fs!|qNHlKN)Dp;_jcgmsZd?Grl9IkWA<0<3P9>9BOE_pr4uyCa<2m1k0 z690wem}~b4{WaeYQ4aN#zTe8dOMq9~=lUWepr{K3S~#vf82BGCK*u3$(KpvHM*2_A zfL8sIFN@T4_@w>c*CAfOyjo)c6FQwI$U9&b*8L7#+=4B3&C{+iEcd7UAJ>&HXpb3< z%i4yhDu%>3##n5v>HF9Ry20 ztR`!a1(2u4MnoZ7SHRyPRlE==C3ksgkh-_M?QPTMJ0{Am_2Jad^%=16UYmx50rZ3W()l$$M(t*{9eO)JJd^*KCvblYw24$WnRc)D{##yRhU{{MqUv3LUlrGpagpF zk8Bi1oR`2)U;SO*0{bHw>KfN;1LKx&gvIst1s-j>l0H?Gp!c~(zv)`f~-49V}yM0c-i;rf7J}hs(X#OJyY)2sB@M`JG{zs?s?u}FJjnM zX;a8-hNEjL2z4hk2TGd-3Fr}an=j7@*ITE-5`q_Bd~vd(5vFR$+jIb=CZBKtTH+!p zjQ70fJrmIZxVF|BFXq)oXVX{tBC@nq;mkn~;3iK&g5aCq{N^dF2Y|f>fN86exc~NIq{ox<};p7b)51wnh z&67Uzs#*W$qZn^<)`5(gP%(gq-iYVjZ*1iT33HLy8c`#Qj4E;yfes$BT?HAgna}_L z=4Q+_NH)cx1HwSazQpnjXrck+-;hBBNO*H4$&bU zJ;xJG{*3onv|==FGAuG`GAz1w*SS6qIe>xMLc{|8Yw}V#Vpe&c3I%}}gOD2GVRNs< zzCMkl9$rzIkFgX;hA-r)0NC0}f7xra*Z2j-+lTy}r;}dGVC<(p(QV-r*Sf}YJ5VMDz(>Y!<9@^_$e|bg4#Xed44g3ti_gg~O4 z3KYmdmYu8sLR46GS+OhIXYwgc)z8FKyWigyGsW?~_q}ggYO61xQ8J}9!o$?&wM7XR zR}OsYzcXNL?1h$WAx6hexqcv+O2Uh2*LJ|kyW503n=Z2P0At!Cj05=cr9vDc*nx^R zqH;_|Z$~mwThC2Q=NmD@kWdL82c@XV51>E~+Jry($xlvTgzkL%+uy$PY>I0pG@})F z>_3?z!@@*_kl7n}0n4Wub1>*+;b}mieL`rngI*aDPN_0=tV}+Y`^*jfXMG3*w0GNo z1G$AtjR*ExSG?4$`o>@Yk&k?20_ul&f-g0uW4M?8(i8Nb_;@Dl@*b>EsWCshzbdO8%gXO0mp z^(kVbU*<1wi@w$u_ZK!+@g?g=yUf)*hd9FnjE22=6qkee}_E(PG~lc zRvwjCUU_9&;1;)pD2e$NzDWimxbY-pSy)*G<{==tHVGQgCH4_mlM~Y!60EVjKY-hK ziuSmGv#ogn9$UR{)F@eN1JSmT z@C>o+Q?euJAD>aVOFzp8vY{bghgujrPYS4{^pjwDQib@!ZOfUV^Y0Ph>{E3*9D!knCs&GGjCKemONlc=naV zHAX;(92zo<2#q0m*RvY`Z%YDVyx=^3WSQlg$8iLTV-W!Bj?4*by0)SR*Eq)WHjU)c z@76ILX5S?Dbe4UOZfJi-EX%vF$BVx3H$8&16iMNKj2-r2>%}^K=v=#{IT*bobhvew zM`8_#cP|-4Kp1TG(VhX=%Bvl7c8!UGDfAQ!13;H>?^z6H!k4U9GH#!#m_-C$`$Pt4 zqxDbk7b-OxTr$MCFE36&z4LL(U$Tcqc*YX-J;QT%o!1np;o)%XhJD2RGHN-*cPzxT zHl8CPyVQCa_xs*A57!$>knv^g(k~=N)jq_H9)?+YfgLB(!Rf~Dn!?ML#_^daG=yMH z7)QX5t+VQwSYI|#o6&R6JvX_dDa7JMJ=bE{b*=(o3vd*&#=ZHquYGM2EJ~Epv{@rI zOQ*wD0m()OT(agaS*>?1qWV^8-r`z?^xQ)lE~R=7du}qi%i=qKT5@Ic@!k0eto#y- z0bBA*Y55{!ANtUTCbCctLzTr+jH6qd{M+OJQi>255Hf@SOOCaP*xZ2b5|#tTS^np- z)w|Z9>j4i&1mJ_5+eqbBXp^&_tikyqZ*9na>aV;7?TUyT5^`lctxw|zbXtqragLm| zeUjk4z%HvG?-FZ1FrXj4#y*Uj+1&HuGh;>91lCxS0f=I-SzzfKfIz<(W)AUWZIx(k z;dK%w>)i||^ctfMFPaKEfTi$RWT&DIyX51VJv95q#5hD)gw&Z!#14rw*ZQl(zt0#P zcm@z#Amb21zI9%n`gLTVywO#D7lPw(OrA1wPKVHk)>eCaA%nm=o#$HQhy2oOJf?If zU1{wgExi zazoF<6vm68fsqy=vd_%J@-lMOHuw79NM@4Lk<^T`JJ;)ZX(BvJ{oc3tc)xY3KjvKs z8gCw*#apFM<53;^p7qd0A{w`G{Msk11#7@uiOi6vX^&$(Vla_6M1J$gw7-sf%QfnZ zXPVG7x%(z*1pF#K0doFFpjlxG4=Z@KTOfWzXkhygIlRxS>!5aIK_;urm1 zD4InAtXW)?gi~87*)3r|W8XR`jJ4fa7Xay&XI!IDKczN}GMbz$c|E$WTo`~=?uTy* z83?k0*n}u2N|=?=Pn%7N{Y#2#RE&Nnqc%A}>*beUo-B7Ze485U6*)xmUF+F4zYg0Z zuK@<~zD6uMuRsMM_pyRfv^JM-fBV~$$7Sh&!U&D$>pu&j`;R@iO*<%m;ijzR&cLDc z&ruLzmr*4`1Ju6mb+3EPul&ldOk;bZov>yUemL3t;KoF5Y=@h*D@V2`{pWWgxL5%w{DMVlB4 zfvcf@Mm*In{I|Nj^Ucg&h;A8F5%SEHZ-V}t#a(M7c{YqoIz zU3hYromd$)BCPlJ;EOhxTXR^~TQzO5Xs1thGSV=RWtj zNk&x9;fc_$^v{|ey=6jU?inBy2vGHmOasEkf!>!JX+86LcK*;UmE=CB?40tfbXFwN zILpr1lV-5=&XUWt;3NZ`k9-YAlmp@HCaX|2ZYMz4~PIb z@*0HU09!!h637~@_?`nS0l%&VLRo>4v`biLxAy;j-L?@hmo^p33aIC$Q)=1fLnaG} zC_&YRo3+pzY$^d)e;+Ep0dX=4lx4XhR|-i`l4{^H03iJ~{!UbGW1NSH&3ley^DwDu z29V3sVW|dqyC*lJ{iZfsN*_XBgAGVftSpu(7w%k_%yX#te`vS1QkPVCUSMvG&n7!sdW= zo^5jKpj2KimccQ^Y|V3*tVC#d%k)ne483RCAZlthAGVO0+OnI!f zX^8t(g^cA1&f7;P0C?7l{c|MGyX3wwUb@b6JOe30j*u4X$Ue%bL&oVVmV3oj=AqmB zj2PE8YlVD`A;a2Oeu)rf;lPYqh#hb8{5(EmI!3OXTem#am^E+605bE{uYPqZYC@lp z37Al|S~#j|numJRPwC4@VqgNe(LTW-Zm(p3G>`TNdWE5chmroZKbIqOOCFi|c-=iT z3lc;Tvcg!bEW(r+rG^bDiUSJRQyS2+`W}ktntXbdWZY^|n>-*=|H6P?qh#%uNnP=Q&{IXZjFXf1d)L>}r z44yobw~`~XHcod!GmH@7ps2X{fwOOX;~Nto6YDKRqVTeZtn`orZL|>C>}=cwm&Zu? zLzZ0CRXT7`d$kjgW9{=f%dUH~m;e)!Z-og}qy4EUo;AV0#m@uOfV}(_UO81tC?y~j zFhp*vKC@&MyA)u}(mZ-&v4-!&ErydOwaYqD%l~0l{MwfqRYfek2ye>kd1MkjG5d6bA+G@ceBCT^dp3a zwt<|mKAH!zFURWE2u*X(fCdQD*K!c(a5*#;XxO)2j*;c3dJi(g7{Yo^KCBsI3Do74 zoL9WsIu;oW?5hk8zR0t`=>YNzMDnVt)`w{GhNc@_9|%1p^lNMT&VAkJJq{ni67-3+ z#!FQZ1mRYv7>v%{_Del4Qd2<{$@Rkig>vyi!)1t#ec0XyqxrpRwPfVjxleZV4OTT5 z_6M?vT#8Yex<~)mn5^CUHr~IF(74VxBSyv^NdDgUzBg^Uh>r5t!VZw7 zOT~Q~FX1=jL1}t@Ap>NBMZ3J6tyc2+*pU+?nnl$3o0Ds(E_LWC*AEt6W8_Q|zY7PE zr*&%t?a;oT^Lp}T&$EYH?__~|(;Z}u&OF8NFd7ROR4TaQ|5YEf=6Ef5Bdud|=>3mf z@$8gw4j<8bj4k>{Muo5PoQo8Bma3wLuk1mLb@VGe%NU_*n7xl4r*rKGRYkK;+aoGY zGw#j1jLE&LbK}UhcOsLY{p@F_pGdsx$hf_Y5u}QV_Ku~ZRdb`*De`eG_udK3|5{7} z6vDYoJ}V~A1x1wA_ZqwxSqHS2Ya38Nw#1NIQ1Wb?Rl!=|T5Y$m3Yi033PrNe9@5)% zEkb{d?HDa#WBdHS@Cnv!p@QTP$XBwMl^sa5S&oNYJp_qa0_J&Y7zTvU*zj55kqC;M z$Htl(0f0>bpsfO$wo~4`FGxDiguc#mWb#08uk+?qZDvfI?)(LyTD}GYz`Ne{t|@u* z*f24D=Kzuv8qnv_+ct)m9U{LG%{;t3RXjl2&B%e& zu_7u#TN&zOh~BO?(NjXF5DooNLD6Q8;GRRTk`sxpzx~_4JrTe|%hn$@B;nMBBrst1_=0XV(4tm_4!%K%?+b_tSZ+c49 zPsXowJ6tyAA7zk|YT$l4LFGdRL;Dz9fJoU-$(HA;5ZQT3^XHHF^)c|l>qldJ001BW zNklpy7 zLdOtko1jeq;U=GcBHZK?@J3X~GcV1q^J4&7WJU<4A{oN&idi-$+SB1{tctvqHfWxY zQ!12sFMLF!-SLd9ZD(}XbckFk6_jjkw*YkznbhgZ~UYXHKPIrIlOJADu3K3_yRE68# zn#VaHY%fwZ5Wry+0Ln8wojNB^{eELuu5oC6&G7+TVSUkE+QI8~ z$oX*RYF}%=!U*$r?%I304=|@I>^;IEg~aLyqOB+joqi6F*E#$97|&`P{VcD_UVw}U zL8E8ov~(5_SwwX5tkAji{@dUF_KA2`On`1?uu-A1gR#p&6cvE93%8BLe8F z9%CF&edjYxXe?F>lqE$dhJ{+$;$sq2Po0cI?hmL5r4gn=;4LJqobr?_P&vwCHb)D7 z0O^o--miVQ5iq~3+mxC4r4(7hSo_4pmh*I}s#K@+RNJjBd1wxrvmweek>f()l1=T_ zkIE|hoAUO#--SduWbymo|NcZ4fK^I=4ubQRAU-@gKr_Nr(GH*60C=zv2BZc6wy_9% z8c*~-#Ap%O1U`j+dY^XMWaRDWV>v6kD&1sFMv9PDUJ{A5LX5UlUOIG+0$;|Y3UJzr zm?6yCNxpdhP9-(TFHf=Y5!xnSFQRrTnF(-_EnaH++v(jZgJtEsHA16L<_4L!CkdO1 zWU|1Lr~B8BjzBU!{+I5MOVGj8LLA3(?T4)Mo_pv$+9kA8_&3YC96$!fQ`glo^Ur8x zy)%$(1-9gnjuc|X2t-HF6$lIf%`jn|9h>`hOh0bno`_BQDPvk;zsA)UGNZyQ^3CJb zp|5i}Z607dpH*I8xG*CjBEw*qp?dT_Zk_8}7}u?Bd}av^tEvQfK!`wb*9r5$csr}( zP$jl?(9d<9#Z$@LkAM8*6DVT+R102+4ni{?6!t6%__@aVA)gZ%1Zr4nxv+WKxT%Ff zjux?NmC?$2N{+RuBNaAu9spKTvBWItog!VB86fX>WzwC^fq<~O?;}BH`oW;{2Y>Jf6X2rr zB9*u7(J&`uRJf(Dsz*JGXM@cN%Sg(tALOrmJXY_{W)n`ytKhRSUm;ILVU>}@q8_Yi z-~M)uXZGBs>RlNIm)5Ginf^16a(Q^$9>YsBg6uU6ke865*V|L17~V?NBK z_KkCl$)%t1OcEN3hUGzWHJ6|=b`~iCx-^(L_k;!&yV zKoy_u8i1AceI#;i7G2LOt9&L;r82j9UMTktss&8kC$xeKZXV%969lo*SGu!&$1T0 z1A!Fo$MC{4;k#1H_06+{xGu?@XYwF2c34}&&lq$b`aB_h@f~1Z@{Xp<=uw7IQ32Kz zV;ZB3d>I~Xdk`=B+&Hr~*U#GLb59L#7bBhh%um>p-nSPSb9+?#pO7=feuN+Mf@KUN zt9iukX#>j=^~@2Pta_}*N;?8d!ptbcTXmnN_*sJ-&PKsfNVjyr+_R?ecJ-In068U} zEMt=BS!n0yblk#tS!)!y4U(18=B~OP;=o!Dgj5QBJe?k(5URwgZ}a7GMCw?ja;eY5 zC;%2Q$~;;&Twa0~GRum~!^PY1JHPWgC*^!ys_kpf-9=geI5`2G zlM&#~JL0)OAW~wVR>`Syv_5N%c)qnNG67M@aA18SJyjy3H~bAm(l4GxXSg1KtSCt# zLd{8qbMn;W(V?fUVeeFBa(;kj1q1Yx_n2`?n>!Hu*eZ)_<30a8qB2&MoXc8aP4uTK zd-8JSK)>3WmT?FrCA4tL{_%vJp^1Hb5I8>uA=XhXzPb zdBI$=Z|2d)&3a=~v56>&DR<`D!yreG$v8%N5Ic+#c#!yr%vABrHHCM$m*rJ1gi2`? zy`=KDz3pv3LTCz=%EIbCKt)cA92q5?x#w-Z{trO-a$nX(V9G}COrOquv6&!GRYBX5 zhfxBs{1e25#}@z{2~|MN^IR_+iw?3eF+w1DkC|LMk{TorF$5HO285q&B_kCAK+tUB z=9-SZM54ZTvCun%2+(xMX>C16j?N|5de3g$=j|Ph0WADNmJl}vfJ+_HcWQssDJ!fJEJ8u;5YSk--NH=TjFH!l$cdWXK#f5t}VFfm#PEmbJsp`s_6 zmwC8ZoEs+l2wdBQ5E zOuop9@iCs{j+~O+WQFlXK1>k^{!h-0?^vzVyACBWONNqb8{c!c=b0ij6sJS>00~0F z1$+x&am%C=Ui|}<03=}wOQ)xAdDi`!#~A?tn}v)w!=}u_4d{-ZiN|T#`db4FOW{%g zJnJ8M6oMo{TuzL&muHkB@4!tU6*=HR0ao0n6#1Ai>6zrcn0d+`7~<7ZlGsMVOT%UF z%Z3A#l%q3lP@cmG@X9N%Ol5PulZ8x}#h9GGYZUa|ecmVMhgDPQ-7)XSy#Wj00Z9`M z!(&Ac0cQ^daCY@0a1GEP)W*>q(I>(Jx8$Ydo#A2RDOT{{lxlMOBrihqYrpnu6X6HQ zgv88&z`FE4&J~R}+4cKU!3*OVN&oPNKRlh`^HA3PrBRt%qm%2rAFR*Trf)#cniBE_ z(2?&Bw)7n2-%oPFOIgJ%WmHuE*TsjB z?(R)tNg=Y zVeP@xpcCL5CHm94q>jKlwnI2Pvvrc^TDUg9Ip=R>AD4VLu2O<2WnkGkBT)2yl&+rt zMK{0lgAkNj!{jm{6KowxI1PepEy`d?5?!m{q5ore`EIdoDIY4cO%|;F(M|;vQ$Wg2 z@!1Pm>f1DlOLA)bFE0_F)yp~&2_tfgaXrwg_@x*@Xkd4!`pMO-2jVTnTV3l7jwnZa z2~hlglf&YhK_9XQq!?c8IlU51se!%$(K_#K8E>q28ZFozd;hDd(jGCaSgMU&0KEkv z*J;R9%YWDJdYx4|%0aGG5aSwnHS|zQ$BuJTc#@>35jWE4t9F;S`33l=92|Om$nJ5(=0q_SkET4W2)$k zf8Dj_;!K~!0Q0wWMGvSv9%sFF&x*L@H^jp!hgRT8LBnYc6ThNiwtIQA25~K2r6u{> z4E>?6v@9#`x9u^J9K4GcviT$Mv=(_I79}P~TJX|uqV(asX67&yRS;t>`=f_W+r|;T zV)E~kgpx6(1F}XkJH4$wseRq~pvrjDc))8fPvH*2y zfWtD_weB02b#{D?=z46|upLz2uBNV*7~BRvb)_0`9M~$6`k!UO36G*a)czZjX}4m{g^e$5|DVDP3860Iv(pDpnUyc0t>UNs+* zPy3Et!P-K5z8E8g)U2Bc4dV!Jp{a69)4tLe2d0i=m&SaVGT=PEL}%*2!^?=aRY|6d z6>)?S=x8P_H0>N52}5hhMqk~N8{xH8PPnNh%>(!`t2*bM{=S! zb8UnN?OW>+V&HhGIPc}9-Gw7(AhLbcF4L5s6ly&8%@l#JNC6>~CJRWCQx$|4`^!q2A_Lq(C2AM*X(5nI%_Jchj@iN-bvI$xq#xBDSuv~ESBot z5yuA@)#$&Vxga+{=Nab0&-a9_y?)9xxetCDLUT?8ctfeDB^-7j{MkFPRbkOK!m05! zZ{v6n`o%8$r?1uF@Q#rY^nVxzH4z$9ICrDEPJOKCAp>*TLcQXVfjv)ig0Nrse-DdHg6sN*;e4Xgq6f_+Xl#6m`>JcN-t8^s`_AhF0 zvMx*Ndkl``)ZZM+DAD64pmP16iJ4VE8Szfz;4J?J-)u(0P5FKzePIR1*9$HV_UAkv zYM)_RavIEAdlcgo)O)uo|lC#mPvxWJqgy8mb`L=Nk>A>2N&UiR->z0NsG$BC|f&!99L=7}2 zHAIn_&fj@0LS!drQy+SvDM@T% zkgu5Hm`?IRfCHbVH5Z+X|E(9BD_5b;{Sykn>1;Y?8DW> z;Fl&z6N78>8;}Y zqJN4L!lhu7iJ=;=9mG~bmH{4ly({$#OG|tj6xdyBYPB7TqEC|_c9Xd<;)=7UP;iwY z_9#a>lm%|1MBjM|qU`!_7Y?dZ3xd5}!^*<6>x9?_bL_)>*OK|qMFEF+);`j2Sx9U# z0nM{6vDWBY4)aeU9<(lK@&$}$8hdsR{U(O!dwnQs3J1Jjgf=Go1%D0yUaUxg8uvSE zJ*}Z7QhVq}$vfQT>Aj<(el;!-+h5cLW2pe6N->mZ`4XWY>Xl~^@;3tNrZ!)76gNL@ zQR)I0LyXYaZ*|6KJAS!h`_R+bqg03=0Wf>q#HsvrOZx@yg$_|lO2VtuoSF1g*W3@v zsFwI6$v&@)x7lsRGD4!I_0Rc_@N*fr+E(#{I5rj=$}%1dcjxEJy%7iZGTrKHPbaIg z!Eg(?dlsn-*BlOq9_8%WYa=RqE0V4YnrLX@VzSFt=$K^!@DFcMKBZ+@SGh)q+w1>g z$;*YgY`=D5;k?(ye$h~}c#=x|abCLNO3o%q%7H^1FLMd+jL%pN3DY36nzGKhqs5r< zzD|6D-*Vv_N^K8lBR6g}_ttgkp3|exMd{>RG{MY$d-X^3L)D_ztP(yEO@|IiE0*=o zVs;LXE28iOSt#K=LvH64wR_^{?EWgGWZ2?a$0F}92ZA}?+$|hK!6R1z`jqVF+cZL| zITI$8bx5`!-u9ALy{q`o_{ys~pA+cZvfC8d+_Kp_m~AQuoh~_iUq$?*B0d*D+;#dc zv;vQFS;Uoa{rR?7X%|@Yz6B{r%-Pt{;9)cj0+Riey#>Y(AKjv_(Rn7Q2Ohr?hrS#8 z+HpCAmJM1O;ZD}=NFaRwt$3?A@_QhEQ7?HbJCwg%Id>%BE=0^cN6$sS;cc_g;?ia| z+M91a5ORZoJlqzr7`wDX46c7AZ>kn}tP*or zYv&FMX~RL*bSuO#U7m$I*bgt43qsb-H=A2sMn^h_@zW{XBlH}ndA`e3=*=8+%haqd zK(IMv$dNa>FsHwApI<5+D%IvD@)`5LR_3&I5xd13HY7s)x>fHvRwG}qGxcttb#3=j z(+s_2AmN3aP>kniQ`wyK6@!5YqVn#@@84Z!?|`O*7HIAf8GR^%K!7KIL~BG~lT{_8 zD0Xm~gl&Hkc;+m2HJ$%ZjIC-RPjA;GNHJNV8)H^-)$7e6Y7w(~m*?ZC`VASQu_2wb zQ!srHw4;zF90(ZoM-~^iZ_z+|(lZ=Dho?u-vxum782kW2H8vk7V8iZd)Ptypdpb5% zGhjLG*O_RkSFb*!fZw2#>~M|O*SX(kLl({-K!R{C_M!h+tFhB3ekm68E$hHnH<$z)U<>uhh~;PsCdHGiX%&!ESEfq?eH>JJB%0g^?$4z5tDaM>pe zhHQ|t>FY%qi|jg#-AQh?PLqzcf@8%^aB2A#1-toX1W=|Jt8smD>$>`r&8*`b8-Gl^ z{4^a2+2#a(kjgg@I&47v#BypB_)_+dm)0#YZS!bHTO+*rWBshOJ#+3}if+WS&J0JS z8LO0g&D##gUnol%%};8Zi!7gu=9;S|LDA51e=`pP%nPu~h;R*y1E<0?L} zBaQbl&6Om#$WG39s*bqJ`3{q^ecRM+T^n+>HM7Ajx49Kuz7}7m5=G?uhs)+dF?Ua(O?1+abwMcz;KB^5ff+q*Y z{7r>Zr6f64s*WlXu?TMcvmBC@o>dX^=-vTz$83)(KAa**Sb|?3F%oAYBQ+L}jR<|F zYkZ^MnJXqdn`KnCN&>K0QTB^u{{goes52y%Gzv;N>42o1XCowp9Fv*q$5ss23C`!2 zcnmm-r_Sw2y9S$O`#(?fZrG$g^ES5D-}4?Z;)Go5y)Y|=x>X`7tX=#(MUBqGbI++ z8vwunb&U)Qm955o;{zHC_%dpuy^{7UWiASvregCDtqD9aS*Gf+ZdTJd*eK{by~#Nn z(}(xw`e(U=Pd@b&n zn@R+JkwJ@{5O2T%8GB?$%|=0AIGz!m5dr#VV}&*Q)iv7dq9L^D-tDCZRaMEX5|Qs| z)gz)p4gX21$u~)epjpmuR zDQL3+wh&&Ckc&?&qBXOCm=sCCZ)*Km)sW+~8jm4r8^MgfKmVGc=@iinU^P(NW7*;( zZmRols$ccN|SMR?RYFJn*F3XLeP zw8iW-I$X~;1tp<&G85qmNO5KY@vGne*#hdXH=aT+bs^m+=7D-MWH$r7!0v9c9ljm| zPpm_;v>zhKJ#*Z>x|3!X@YED*HOd9ZzVkGZ9PVHfQ+lRmXt~$wYF>`#U`zUu%o+5? zuVLB2><&q3?FsKlh&y8t=`MZW8_^^HBq<@qV?*v|4Y6j!<}}1zAExYHYSGG|;c(RB z?X9gV@dp4n4w2{Dvc1+*u!f!~51K9Z6SUn%J$6YP-L2e^Bl!oUm~#&)=cY;{iFFCt z$K+fOHy;3-w#-_IR=z*0WD&PN7x9Py?W?oE-14b4UX=C&KtG>sDKbbtLa#O_Dwto1 zq0zopFe706u8iGsA9coM2H)Sw+F|%qn$+3$)))umF#b+$?tF?THogK!#GYpTTSh@q z`js#fXn-4UPra`x~Ygh!bKCbu!3tAEAPCb}KVGwK#_tWVd2YTw`c!>&8 za)6q32(7~$PGVy&zM2L4%PlWJ9ng%-I#toen6>Jls<<$ATJw^U9RB_9Jv>@ur*-`s zW>PJ+(v%zsI^R@>?~$oQe8(>8r4!MK0G|Yh^t5`Nk{YuNOP#RZ=^KlvT8dyg{>+i5 zcsWipfKhwk*7<%#aeG?TXvCiE=-D(bY4E{9$w1=uurbI@yX z^;-m)$sj7t_Q`8zVx!NV_59vtJoLg~W3~@D&#LZ4kPl&AW2DGrNibgB)7)SMKGb@I z>{oLDy>ye*Kb@0|q(#%!-v&)7q9ZX&8gn?>nqV<9&f$iC&$B2mD}Y7{Gm4r~{L@7F zH~;B#kuXP3h5p^UUGh1gXz4!FUIP?eJqBK2ltA^3lPryl8J86HoFi&$IUqp&mksd4 z_X6_TPOk|oo872HECaD5@bMf)6;h6X0H@()VhNi=*dmFWV~t5oZF!V}RbUY@wC^F8 zx|6R4!MQ0nJqa=%R8(edpWGe^YK9Ob%<$Fsh;HdNyjFO;JO8nLEaQl{-$VrDV?0_s zZy-@>KTV_LbltlA_dD=K*3wRA%=TTzF4X=|V;YSiFe(_b`d@J6UMQ8v{+UQmS@gt0 z9i*1ocGye=3zz=xsp=Gt%tN#D9!xgMOV;}Gh1uMq9KBJg1Q;RJvrd?6V2-@euHO={X&(Y?X2>-bVap!>xHi({2dZ(fdsAF&4*(O^9&9OgQn&UAyrB%W;Xwq)Ic8sc@U?%qn|H3AS<26N~KNqz3rY z;<>E#klAkX6F}h~?{G1S9zowLytrsr_yiVaE!?NOp(8D)HV^X(x0O&W_xceXXjg9+ zeB>6;K^nk?lepe8T@cRd$NaCIR2s))zr>MPO7tv8^nYFe&f8VT#%z3BZw$CuZwUB8 z*~RrcziVNu=koxw{ae9gokyvw9?+t0xbaMtnLBb0l>c5bdviDJsO;paz4<~HbLk&) zAvj0XXTS5 z^DM3RW&I?fA~a|uG}9$Lnm*T#MrC4Biq?fe<&60g1uCzu^9z!l3-9EF?L<(_y2jXG zQY<+pt#kG9eziCbEg03kTBvNu#}{^JEpp#boKAkG?7sQKxHd6~L1hRD&AyHnI5N*OsV8{+%cA~`gr&Mg5v}s895j^dHx)8l(1*inkS#@!N^Oj(gJyY#S9UEq z4ILCtKDQ?{Pb$X3~`XQl|=@JN{_bYKOb@JY4m>L~#4hTw+_@;|SE&7b$)e5+9 zsXB3S`5*P;iVPO?Go0?vUW0PTt4z4d!;~C=~j46HgkNnS3^`$m#HPPz4;Tq@T`%xMQMiE7vO0$3_xVK{|=xyIsYrSqK!GGzP|K8&Mf)urfoV(Q+ z<=u{%_KAk%7f=15tXP`TW#dQKSWO+{JPlfe`dN00XqDS@#P{zUOMHJTz z==YeJtRkf+ZkHUwT)UI3#2_e2L;jYjW1JEFhfRIGn^18}f!5#i{?@pKPKE!bs%mKWB#|E_R-z4=noEXi4ABFwFn_=7o3;&WAOgC-40&3?c*B59C$kB~HdFD_Qao zmI!2M{BnEysKWDgM$RGOm0ypY1~n&UkIy?+V$6CqL^_z_{`1l$AK*_IX|aaZSOdl& zn;2={;=8GD#`Abv^#24RYKu#;KYC)jLQ7#*!wH;<_6`M2CaK!_XpoE``5*c#^oZF( z1$2Q-{940V4X-nw!sg>>1?}ILPv3*|TJEb!9iC@{x>Vbr-vg`3A^?DAb*q%m7tSM- z77t*upGz=AMkByY8{;_^eCO}Dy-B78?`nYztkN<39)j=OA4t3&#cbf?*a|1%nl3qW zr>edz(WYYzyd={v`sOG`e0F=ftRE9Q&lVlYcJ0F@k`OMO?(AoqVnUGURJMc2XvWgg zC)=wLt(Ku2nNAo|^PO8IkE$%2$4rj;R_<1O!T!JxPUM!zhTe9h8{WOLxHYP4t`}5c zHQ<@eDoD(+Y~RZwwv|64&=X}05SQw^v%*PLd8a1M)4K!O5{QE;MvPgwdODJ#{s~wyB>+?h6N(*=<2c{QE6D~1f%L~UU@dP1%AHKR zgPdiky+v(b5g!;<^zCRO*>h=-%Q3;v2+;kD>3P-J`Rusb&n~PF=f@Pg2fuB!G-Z|AqFum(YE84P7#G0fm|lD4T;tV_|5mb zK*3K0mO*od6-B4)$+MA!bLJu-cA9ZR#lCXgS-G>5fGm&4A13F!OlwIxs!CSu1b8Rs z{Xm*IhPPQqV6i_>MoVNkM}F6Tz}qYRolqzP-U)f zWnrCmjaifBMeTPf@JZz8+0E8CldyS6_ESW_rzT&^(NH1zxyOXU;4;PQbj92&E>hj?HIg;GE@V?K`Vl# zP_3voI%I2e=iE|HsJA``234C|1m^-&VzK5 zU7*;-iSYNZuMW?DKcc!L7W4(maRK7I5{(+XB%-AA*t1?J3`YHzV+W^g*)mb$BU@VV3pFv#zSqUzId6h4p3R@N$;E%VD329A3s$hLi|fxQVPr_OdPD@X9#0*fG@Pt>vNpnGY4H z%|1!H6G#=9m1EQgk>I*S_ihPzzs!E}8F#X7vS#vwAv)XBiauSw`I^Ku&VYFfQ6<05+k)?;43p)7t1dO8 zk9WvNAovS!w3d|NJBpZoUYvm#wUyPy_l|1zOtLMmL|)BIhCWKq4ESWIr9@|vLNM~^>iR`WVRsc7OBwsH0FW>+v(krKZZ*1LzuWa z$4}Bsv)xY)RL(?qnsz-LAU%haJ%v4Cc*Zrx%IVQ!E(qKXr(V|@N3EXIC| z_`>z8png;kxvdY37jmbPwc96&ta^bnEh&Y0dKmms_u_YwZ>)}<9;0tMZ|GSVaw z;^W!r^&y(U*zcP#rTP}pE)eOBCD{STorB}g4^q^i-QB7l9t4bVv4Qq!hVLDNl!~&?Nq>Eg9b}hl}37Au2O{>(vE`c28vv7L)x@={!Y%a8zWkxa3yNzn}{(Ri;@>6cioZ`N*62 znYD)fwTebOGBsBz+7taBoaAhs?yyxPp?vv)t!IUt*m89|muTKVK7)s<^V;jgJJcKqBa5-QK$DU!ta8h@#Q;%%539+UFJ+)d;0 zc?c{@2d$CRB+Ja9)~GorwDEpWI9PXM}##h=|2NQ8t*ZMAqR0nF!hQg_uth1#`mj@!;^A+E`nZwuci+c^lcIQwDcSL zQWh3C(!H)>V{3-5rz=cHJUu+T6^AGR`u%k0E}hb_(-f7n|8UglD-OezqFY&)?(Vd< zBy{`sdrDiR*=9C_VCY`KV}zw}%>CKZXhBsFbUdAfN}hN5nYi&NcMP{2QYhlSU*U7{x|9xWp2o6= zJd9aU&0NXdxBr@zl45VPRo_wPz zg(VINX6K`1lcVn!R2F?>AK}jgGCy#5Nq6uM`^my$G=Gp zSSF$QOAZ`L`S@d2YP=@(OY0QJW@DEW?=y8M`mmOa`lPhYOCluird0L{5%Jdzn!ZH{ z^Xnf)z1wPU+4II7jv2!i9Vcwo3CNPRl=jj+9QHzLJeAn@iwQH04JM@7ETDUv@lFYu zWH0EYj8Lp<-O7%jCh{j;dxi)TT3t5TD`}^y`>8_4RaSnxdl>Ds4Mb-Q!3Ps?z!90ytKfr~j z4!uFB*&hpYK7T8i*<@&&*e(;KHkmaXa?2f3uB$A{e$U-BTy>(@*~l6qUzwXGsGo5W z@vd4hy@JpWs0^m$ibV6?OQLKsW-basXN(C%E|vn!{+QboE$)m^z_x3Zxl(RadW=W% z?BI*nPY_J`RFXhnUYC?`#8PfOaOgq((HxSXpsYlDtO|l>f36?Dl+SQYf-vWRKT-aQ7JE3OU2)X~*1vFqpT9@HRu?4oTTk^$KE4*$ce|`b?dZLp z9o;o1*2{JW!)}?nGkXp&z~z7oLaP;MtZou#p)&QDDrxf)T5)ojl35Q741ke+`N<>x z3zkxIohv2lKk={i)TLI6ij;z-oNQbh5SWo0y4ti}V1IfjztOE;4P`3za+j4~c8)y( zxJhc#=O*GdI27J(OZCe@x$rs(;9H#x$QF|4rAh-i`hNoR{}deLsH-kGBiVR9zp=UA zeyK4T`}SQGW+2uy=iLOBB2x!6=0IvyJ1SahF)if$wV2w+x=z*etiJqL>uR%Uosz_9 z*-(xIcEC3bXWhgS^=$?m7lc&(;9HE<(VAAPwMZ!wcs`27ytLi6TI$`Zv>EdzNjR$c z(yf-=U)gdq2lw0gvy?H&3)cXC{Zp!6b?{yiF)FWpgB3#=VT%h8hzR-J&-O>27i)6!fYh+Ar9xewv{@R*ZD-;O<2$V$$lR4uP81uvCge)Nc0YUkK> zGV7twJ1pHM1Z;G70Ir0}W2ZD#F;Xr&9yiMAI(`l z;w<4#ULt3HwWJL0@yGpVIrza<=~y~>CDJ3t4%j8pZfqGKLIGi{3^2p1?nH)D+c(x@ zri-%EybelVu}stcEq0Pv6o(=9x0&EIkOvz!DahQoP(N@|!an9FprXdNZq8+{ZK3JU z^HNAD)+W^A@e|F*x0Nhz$_ldh;`$+oNI(54`6mtJ(CX?octIThib zIT57fP7y5l3Lj*3o(jt9JSG%BG!4`nOIie8?OIKpOq+rG9WtNTvEknaaW7*NZmTFB zJK=w`JfQ~=JdbTg)gxMwfJwj1>{U(|A5Kx#{=RiWj~p-0?~IW*t=SacflRbB4#YCo>0_!GN80)l)Z`h7GZf)RdjPF-fA!-J3qT=#T{SZN6VRAMttTDeISQ z*^(6Cb-+%5t!#XLQ?5NW*P?&>2MyV=MYlm7ekQ}umXJI;ArHJmVMs9teFNUz9!G4i zkKQ@2&VLQ(kIpba{5~HuOrCk6)z)ln_Lo=cR~+C#Nsbfo_Co|(H{*$>Ok0U7A^n?I zdl%=hueEL`>|YUi_zh@v{3ntO5h^&@BF(V?UpL^kY<1g{RWuXgnnJdXh)blBK6CBF z1u99tP{mbhd$ZUwk-0<|+jQpRT4ii!LH}DLaq{IEzsGyR1=ffa3G^X)+W64j~GKPd)^Sr!)Fb61@*mEzq6**nEy zp*Kb3QLTR27+fW*=MjI|Y7p_ai0t~tj^U+S=|E``+Et+BymzL!J1G{2Cn+3!~~W%HpSbln#l`~ z-U){|^C{?M5XK|t4K%&N+Cc~VA;1zIE>D6JhdMMEk6TzugGk9y&+KFGP!9kE*TIJT zjYloQjpS4${W${ylM`pp`S{$}F||^8XF;qU{7c{qOVbKfL~ZwoP`v#!F1+z~58;o9 z>lZn7y{hMV!JEGJzHOk0jM;ko?Q@0>&@WCIlb+)3UkptI*fnOF#tJ}F!*?s}P%S0u z_q7xEvqJ>gH}#6!ufA11L2>cZYdC%Yr;2Rg_ApVeedzfrqOE6e(1nfUHfHu8IO{9{ z^JZj2<>9_gS6fD0yTlJ|g%cyOal)bNTj?*lW|OUplR-*!6hDVGH#czZ%mna|9C}fj zJ|)XhUJ2cD+PrM5qo8;RbNb0#isrQ1<(9P9O%OO+*Gv3F7Xp>RUymA?m0wst1JYE{ z)bBzM4X~_k1B%(iHe)Qix&9o56^gYdPjnQ`4D{IveaSzZxk^crI$>HR?YfFjaD4FS zSt?gtabc2WgE5#=PA_O*^orgj)5o%i>QxA%U8(>`!>Z>^A&Pte)& zAEDvUr`P0Sp^|E|BKcU_WxC&7zR9)Aw7zg5Y+vsx2I7*o?BOXkfsRowt6PiVipNnEVca&-}-iKluwL zXy<1JVAeBy#2P*yOuL6x;47W)>4ycf0iKAv0RXyfasyEm0rQjHS3&cfP>0v{1rt1$ zf!R)CG{Wvu2U0Akq=(RoZbsMI#MHFFHLCC#s$Vwy{MHX3!85R z(#J^OUwJE{ywjs+6f~UG2lM+bk4}MKLhez)UK{s5Ut0fILv&UQR#0ajMjB=aGPd3> zLhg`aHN;_j(hkK_DhEqkUE^Jq=zl7P1bG!vE{YHTou3=XuevR;t;@1(>%2c9;h-$< zYsyicOKtxE%<0Zrc9{%zr`PG|_7N($x}~nXp~aopIJTYWY&<9xaxbCfQE!E?H|)qE zbW54nS)-VHyGzgPxFy*)C_P2oC7lr!qztes>3yQM7G9^9s?HHIxSV8ln~wm<)@Rb4 zo@d@Cf6cS{$6n_*^Iy63NqVaBn_*>MEE#jy&jlpG&}?y8_Vqu@G~1VpX~W9j zwGL~!z>YD)aYev$@E43d=v&ZW{;&Bwr7=WSD;i1Ki+c%$J~*3x{UUru~xu+&UZWyFhWeb;<#hJNl-W}ssC>EhAkQ=6|f z(|*@gbm!<;&JS&%7*0Ql(vu_G4lEWkaqFPBC%YeIiQ{Qm_0DfXI-%=1V?On0!4G7g zEKajn3k^0+$gW?JzO377_>We%MEDFZoPZID^S5^-F{zC^rSKH?r{`e9`}}D7Jk|R# zz9qldrw=vr;_RU(ejRa>Pu}b>autenOL*#CmPUZ`Z~1sh@)eE=%BBj(tDp(v_a$?%WjXzA^+XUG-G#TYi_WTx3c*Y{M zG#vGLi~_$H8Iibk31|Z#&zxLkBPBIfusTvc#w7Zin!SjL=%Fpj9pnh!4k5eU@}A%; zl4ytiaShzAR*`Q$Y1~b<7)ES!_2r@L_Q7;3r5^tg{A5X?XY4-@{+j@ODDG~NEFKND zejpfRA#euPcm#jx!_Q4scPLmrFGA^*fVXKIxQV@fPW&yifz8!< ztyvW3-=)zX9)Dbd{t`MaXNPlM-~Bc1mkqZ>r90g_a+=u1%~idPF4{w#VYb0+kR0`Z z*8{p?34AO%NftJXlRTv%gQ4M4>zSe4tKCne51ty*nrmq~3`mS1_gnP$|MLPgQ1jfK z7~v%LB}Eky$P!pr$)4#fph#q{=|=;wCb>H+0}F>*DQP-Xi$*(+_wxN+cJ~MDg1Xvp z9x0g?WyFldEZF&PXP?SF)!4?N)!FLfGa>q+R@Bd{)2{7k9X9dW+8f7}gN02WNZCYf z;6~{3Wzy4gv^-w2)E(!cSbt$TBWY10$^LP^>7|W|3ka!8Z#Fx{??dsqfo^RAge43h zubCQ*cYQ0_@=A}ZKdO=DcLU$gPl+V&(Cs5RHSnac1_kd_xm?8~Q)#OQr(ldqVAp1$ zeA?*(L||d0dr56uRsbhYs@yl&X5zYQeg{N*AoDa8vT%j5fQ@=#=HrL3D`@K zbwu-AqeV=AZI(vA>*3Re@^{}H_Chm9AzBq&qiDbthO>kNTIShz#}rv#XreruW=GE2 zi}t{E%(WYk^N&_aY|8b(cDx7+WUP76RcnZQ@E%zZlSghZ;Ok@J>c2!&z6Upid>(4t zyZXDj`g3Jm3v1`rZ|p>96j)$6FB;mJGHv;pLPITnO~jA2jhdwIV%=hjQy*A9toKG z=8-dyqu0!zpn1X)ca&n^ejJG!=KCf)DSK_Ub5!u^9lF6S%#J>Iook@*;XJQnRT@VG z(kvP}f-jQr&({PE20wQyAXUldv42%YH(*Ni{#Nnv8mYgwU>7xzE~aiqmJLu^$!R1w z)c6^Ew5*ZH03%9#<%UyPNpM(D5Cpeb zfE;=Uv;s885q$~Sfl4tE5)kI>QXZrUu=J0a(d)HnrwfY(9$Ct>h#MnfT~OjC2mU)u zdX^u|#e*JT7?jmat%Qzk6F z|H^EIM!{t%u`FnE$i!eT&?ThtPd9T(A$PM!YAd8~ght(VFJwK2AYk_Wd(KQD#Vl3R zGNJkeF-Mv)K|6|>>@t!%c$B4~UH&Ym3ZqcKO8P5;j8e}yK~~hoX9@da8gH8Ry4aA- z{!5atEn&j4Wy}$HEBF|e=T&kZzn==2o{j6xBG8M4d9cURI+m8=mSZ=6Gbuei!+wvj zIF-F^(pGs&yal&JYWo!JLMwuX*qR2WIUzC4v7`ajH$jQSE;Iw@whh7iS`ru?b(C=c z%hQSmwjFR|r)d{?iE2@SKij&w1|dr;g}1xIz7i5B{lG*FuqY;kDTOe0t=G9XPuh>o{_Pj#qczY zcyBzSsd*2ux!%DiP3+UNb40#+ZyBRKhuFEXS2BjJVyD5m&!%p}ThN@x;`?uxkBdUt zL^AoAeuwcuWgMrn6NE~oMgvu7pwjRP7&HGI8G6;6 zo*m~xus5mFoJqm>eKoqL&9g?CneD@XpGnqxHk=#YorF{yi1G~KZES;P|M{7l6^6y8 zKSO)jK7P)UaV~jI#c%mcfyFew@ArGg1N&^G~#|@B(S8S=p=`3)Ig8 zbdmFtH|MraMnn`JTV5Ks(U_eHEOCzecZy0j{JGM;!e){U{&5L6w9TfjAuCdL$oNJ1 z_fPyUaD~~g(rwRQMRu?F-u#pgU)gR!1(qXQ2G=&S=ij$C|hMJ3)$ zEZ5WfAZA$p6BCu4eAnQ}$7ZnC)6i4rC8miLl)e81zh5|f_jysS*k*S5M-d5Bj|UD0 ziG9*?KWA|BJ4W1oWetum^?M&3B?oi`uOjg}i-N^0r#%y>HL_1+I?3ZcZ0atVZ@sxd zFSNh5PK({_^fy`{XcYOf^^}Da=)fSXcm}!P(+5L%kcko^ROG@}ac>&}gQBh}#(rpC z>>SOhAo@L7(+A^1*c-hff@h9#Y8uCGgYi81hP68(8xQ4SH|6zFPVTW@aB@#A`U`D2 z8m|aqG4YeJG!L&39hzln(~IX0sUC+MVMV`hil{53M}bI9i$LW7Ao`e!!?N`tzF*SS zYO&`EX$N`UdD2dLka<2W3!+MJLJ6Vw(Z1SgFAJM<8gPMrAnBK-wHZ}Li^HJLp^$yK z$2#g-y(Og0r5ruY(m_^&W|1X5YfDeN|E1AhliBI>Eg!deD<^6jt%>(0sAU)JHCbxr zrlYU-6{)?*wrJwIvLn@Q-QUIFQX&S`-~fWKlHZXc1Lh$Qmn&7QZImE(aP;L3 zc7=p>Pl@fiQsN=dkGn2?>hOvvD^qsV>Yi^XM4>S+ZmZPKsjmcYR3~3?sfA3vm?v@@ zzrTrQ4wox|{uAl1l%h%+RADa&=gdlj8;8aNj&g9}=`X;qG6oj^yRSDYE)rBxZI?bL z4l7{-C>m@B!0xyUz;>(cqzaHjs>CD!@B{9%O-pR^?qw$?Gi)8M$&ha2Q2ajOw9?i9 zy^aWjN(yh;svk25yyG00O|qhzRA8?Cp=5K7rGV`Rlb)+M;K~Y3K$|1Z@;Jl{Xosak zaz`5(M|zt@CUS)nLn53nvS@^^#c+6FszLzRNSajCuiwq22AmxsM@-(YgXJOPXa%KP zlqKsg7L+6KLKw88khn)0&d=>Eiim_%qaU3#6^(Y(L=&!eWopa@UY96dEO3Y%cf$0Y!T1caw_YRT&?5{4DBI%`h zwaI|MM<3o3ilo>*)e=>oeR?RHlrKT^Ik<;L8XGC>#8WSDAlQY@MU?Z>oYd0{H+s&8 z_!?~TyVta3dYOAFzRtJlRWT!Ak?5&q=x!Us;??bC`G04Bfn=!ugL_nj z+sI|XKk)VyV}_*jto%od9HNuaG-8H^?V^31a0t_oa*J!inC1A3)K{1cwA4*6uNOG= ztsU)ge~OR5M$I$;u@uTMB_3vh^4Qk-fN==gK(;czpPMav39Y8Voax9sYg*OKsRH1( zRL%Sr>JmUtQY}#Os(bra^O%&ZX`X=$b+~SxCv-GEKgcN$g=lU&_|Ep- zwrF=$OHAODrTy;gG01*>e4o2R|MY>#tePnxxEB%%dOudB{xz_VFLVzcEAeXw8lZh9 ziZWUxuOwB#sA)V~4s^XQ`45}34q3EmC-eYdI^Kq(K@7DTSetPDyE`VQ3h-8_ApQ7*Y_v@i~t7 z`wRQJ_Fik9>x{!WqvIMWv{e-ge>;}p1@rWEpVNQaMGiykRJ*sUSb#c;*LoHi8UW`o z-kNLixY{#9B#xPnv?jrnl@lQLD^Tv6BS{1ahw97}pd_IA>49LxTZCMoXWI0G$7fSrBs{rZxaG3h zJKkyY1kSjMbDDr>bk*tJf7#;$2$%hc7MPnUiTf7AA}om90i`Y?F6GDYJTc#UpbxBU z>dv$K@96NeOAX#BvD);S;23LDDR^%4HUdwnOvc z58;P3)Tg^~_aWqPkSkV_m&mLQtoBG8a1Ybxd8>1CC1uW-N%+poHv8{Jrog9xg|&Z< zU8u?vL4BAJm^iAwXiH8Dg+>&~%{H(It$ysk5t7sdo^|V@GXNUO1IzEw*86o1Qs_LW zDXrwmjr>u)-3j^^Fp*eI4NZn902@6NTP;EeXxgX59Ji${u{DqV7X2Z#J77C~WeK$>rg%s^H6<34SmrbiyYP`Sp)ov}@@VfCFepnFh;rj*Y95gKap?Vo401;$ zy>SSV{{%litO?J>-2+fP2eo$>}_7K_LqB!wmF3XDZs_`YDo@py3gPBYD!*W8t;Pcc=%yFv%wZ*_+ zS@6M#L<&~?LGSekV&3I+wOh%muk|(~<)mQ>|9q+pzbz+={l2IahVptVN<7Sb4&5Q{ zF{QJHm}_Wb;b#Qhnn|v|WPU^g&n?LOot9A-AE4wToHJ8Z(G0)d z9)Veh+l4m{JXHAhA)1HW)d_glXOC!G@wpH#?lY((%W^tj%e}Q;4BV1y zl}uT^QX8KlJmvLtIzm>K>aP+&fjKHQIyP;smZ?71dE^^@W67|v9ay|!w z0!pX+o^2@0jZEd&&C2AL?=VhD)MR$wZqhQ9S~81eS4n*~G#*af50Pesz(|6MBQ7bb zSu?)Y@yjopd^Yv!VRTgM;4A`L@^_XH7Pq7ZGP+DKj!RxI!3D=UCJlQP-<}HR_uaO4 zUtctcrhNbF?L>E^TUJQBG~UcJavMC6dRSi?9npRo^!NWm6Z*p+;i$4;2PrHt_JhXX z4IUfLp5ckU)WT!Hjs`Y$-1hNmB+n+O&3A5uW%$(equ0ZA$AkcpP78otdoh_lAmtto z=$F|82QyKK>@vEQ#iwyD_;J_?K$=#h5 zxG6p7#nM~fNk1XvM~piTfY-6PX4t%TJ!onFTUDJ87u9z~r@556XcC!JWl{F`U4Y(+ zJdJG`mx*7WK0Th2y3G(~)BaV`LsZws&)7Gf(PvXxQTlvpm@v65{gh!UX8P(lA&}L@ z#n=!jjU_akHkr+DXN#Bi4aSfKkXk=#o(p*KXMn0R_3HCT#Gr>w46`Ei9fm?`p^JE* zDCKb3F5SFu%Biw|6n@?A9hkk&CT0E&eS(q#by8z}QJs3mzJI6b>vUh0WXIMkn|2b) zjnEyTDr|uIOAvXeXSBkL1jq47X-7v3Gw^TtT2o6l>>$N|qd`P4XPf!`oR|4bN7x z^=0+lg&Oz!+J;)vmq1Fj8#z%R&Sv51TQLl_hJ-pP2d6 zi9U>HErlX$ohStjN;7>El8=LlJ~zH)P8gE2{#zep>do8?52X{9enQGT3Hmmdb2)j> zMe)65tbr8v>aJM^r<=c+K2mb0-M91T{f&qUV*hEf)R=yckJslfB;~8eU8&*>CHT4U*Kt-qOKMnL0yP>r?2^tY zS}J4eY>I(B^E5@Qa&p`cE`@O-)d{d=d5k`WWwoK0qFDgP*~GCI8O{O0TA`S(@1@A` zTwwhR0U45x6)w5Pl@W2Vlbq%OGQ>L;^68j{u6ofFTV{BW%yu?eYarp=yg&t;K$_>`h8XWv9;sARXtSk z9;mt__XZz=84CRV$g|N3B!LSc@S(z(&{7AluWXtbPu|!4%3si|n(5#@(pXvzag~Kv zIN0bXDwRk=29eI4hq%F$=sL$TeixpKjf-~xJ&z5ESmSqkrh@?*jd!R81-1f|q1> zxfJ_}+;{Iyf6uHITB>43!w6w&C?nDvHo3$*efXx9&UaBap>`PSBz@G#NPnc{)Eh~J zb!^R+1({Wu{K}&j$}ZyGg&bSPPzdP^EpW8bYnBHDAaIpa{w3v2PjxmE?jXZ z>3mgOO)J%U_P$nR+{0ubbeGYiJh|^FmVrUl&;1wcD!e)YfDcu+2;O!76vG(pD;;{0 zCO10-{aaCJ#uYpCUlfeb6C_hGRvSt~X`1|w5wf)KE@sJD14?Mqv?BKjYJc;+wqP90 zn8m!Mf16m{4Z)6XCW&O?9TLCcNj5Wh^7U##v`nzg-?vo@Siuv?tQl_Bj3syy~ zJGT#VOO{cxGAh-c!uT8LS`ZTUm?p!L#>oBaWBHXEi(>4X_XuC6Lng{g-KUCY7ykJ1 z3&j-3u|?fv*)h1OP?$0seh|{MQ;@0N^T-l$5XRO%iiUbe~s_};~JyY83~LMaTUc4@teI$ z(6dYyK~$bUj6I)>E%a?T|FbGIfukMXlc+ShL68}57M9Xn}cS_r35 z{bSjsyvw(r#XY5SugB5l8{*12efPP5axcaCCdRdM%*2yEe4Bu=NZ;jW?D4h{3Ab^S{|EoLVTq*kV0BW1Iu3T(klySvP%3z+ z6mPK;Eerg{F2X*CFQY2xs*JSB74r?7U=H4vOz-QeHq`L;O(kJTe!HH*m6JKT!0K2G zRqZV|_p}rYtdmW)e%|rZ!zqFqy3n)o^=l@+#rw1Z^d*0-KjTYCr zntbPVoM-iNk&yULYh0EbH0-|I`~@_re%5lZ`~5>B%Wo_#k~efzr=JX7tg|#K-ae0w z+6rU@e`j`%dO~M)6 zc|ujwR4p2t)wZ%kB346KzEdQk_4qK?WP#}8O9R7Zq60HXyYH63#2|q=SPI2!hH_%+y=$|xP3;rF}tz}^+*2}45XMKG7@)HpJMJ~{C1fV zw4Wlv%ANx`TM94+I#uh}{2IJ-5X>~aL#PX+{N1a(k+B@W|8K|cPAJR4{5$73C(iu6 z6Vs~}k@LH1I~qL{*j!9w)gvNbyyGN*P~Cm%shl;htx-qJJ)cVO!jAQv*LAzz0WH%a1e1iu)7 zmJPmatqx${$rG~PR_W7F|NfbSxP#E&FOcozCH)$5?Vi`m21Q)_qZ zo>5}Y%6*+iu>K3?8&uHkU5^X7kfiU_yZVatB>JUXjlJ?L8|0lm7_T-zizl<=Y`$q(yi*G?Qmlz)L*|M!Fx*V zkQX(Pg8?qjjn=JMIK#S@xR{Nl*Oca}zE8v{Aq?J~dBy~^%8mUtranbwU`YMDcq>Sc z?-j$Od3wB}4WaK1h`0U2Txs0@obl6M1|`2SFwJ<`H(WZRqVFT`c6|TJ8?C!sEx)gP z!IK(3e!p)Hu2%mo0mRC`u`Ms!6sz>kWdN46L34C|Xnvbnn4oRvJi`ALm(fXOboZY} zud$N-M;+|Z*uM+0>f{sWdiNqGQOd zmiJB2#$bMc(|7*OU1Q15JChQnrQU>X?gtMb$KL41#5eu7ZT~W=uHp>bWo{N;o+8$X z#}D8Ysupy}VMwKXU5%`=cNl!zscZ;x|NW)_-P`vak5iMFr$t?a;l!P?r+C#?vGmo? z{EK74z?;8+4nZi$t~tiHso6j)01Bk=e@D{|2ts+lA^$7PEKBcwzT8X{#3TsmzD_3a zs7$(-;A;9dsI!Lr75Q6Df*F`MR4UA-oo$m?I|#68-{LF}hMPXeg?y=tf0k@s=-ef#Ebit3*Z@Ww@1|_u)-H{F;kE;(&dkM~ z1andS9~Yp^JP>5#faCgc|32CAQV9(RSe~D=4w@gT@wr2x&{wV;`lIY_1B+y6>RZoz z-habn8EHHx|6~x1F?aC1*}G!+)l*QqMEG+H$~jvPY<#>ZxwxGzdy1xC*8?`Ils1*V zc5zka;S?xgSD+1hT8`x}n;^P-Du}FNsKdF)xezjm?2GZjJWVx|EOu6XpJ|VOF&O;f z3AjE=@e4=UufacF(Q(AToc~lQd*A+vJ(T zv5wW~mP)bLaxuqwr$eN}C5Q!+S%e&XETSD5^w;Yj1a3ATu&n3EuDcrype19s!L}9` zvq&BI+37lOm86XS3XK)2!&a+%&`L-t;puv1_u$a8|KtSdD`@>RU!>P;8<{+-+^YY6 z(Vg=??>Go{7>zSkbS?9qVSh<+>0Pp^DqgHpW1fLAC-F15#D_=HoZDU&N9aL9eb6$> z3$o%O1CF(~BTPDX8B7#lgEql&SklRVPNjMo(8L{Q;MFShml4LPyq+$}_9 zD8aE;u23kqLtx;m_A^l^B*KY@Re?9!%>yfyKQ)ex+*J%`E+z+}{|oN!VMVuKJ)1%b zoZ&*M+lqGu4T=-yA|D+(MDAC;6vy}lF5+%LA@Ieom{X=>DCj$2&_6B(K~-cKO`qE? z50r@Qz4fQ_^~`cZ%AB$>%hn8lbb7FZ*8#)&5vEgFb3B2(o-tSpwIt<)22y>kZU>Sq zmfbCT%#LrvHvb7o z<*HHMeowdsyDPc>ZBXnewYa|Y$&JjzKc-ht>|LTSvyB6?QM<+umnbjQc%1!eNG{ud z`dkBm)nrovAt=9?fUrb3qm`qXXS5nQ+Raqk1EfbTlC7V7;Po9Q>p1g6hmUeWh zV99k7Em|V*@)cp{HJ9bxkf$^{knQAYfp{RnHB4K8@8MGO>C%%fx+dxL{W0@PL@UO5 zg?sL2Ph8*UR0B)p?@4)wX{@=UeeblvEC1{-)W2;}nqG%>#J5UXQmT#Fu!!};>HpwE zVn=>T+hr0*@=LmXSV>atQO)NIkz{vt9>iPlVWjh{@s8`^$4@~8_xWPB{Z6kn9Ekp< z3H#2%Z2ZS~s`ai|bX0x0>l2_es$9M>&-Q?z!(i!bC3j3$Zw zc_onihIhtqR4@tabou@!@1pg}?^q#opXu)l%+{^Zl|S%%uSU^-HOSxNlRh_(nB^WJ z*Y|2pu%gbs&fvH%wWeEbru(r+z4wI4`SE;dfRZW|RO!5QPtRBr>M2=ro_ZFd+kZWC z|69vw;Gy!*_XQ_#YpF_vUf+iigxImGAvxsEr2MAeXg%(og+iX=b8oai%`cEM=S^FG zjkUto(Y}F)lQUo4oTmaTWYP5tbO#-B`rlj1_|c(<6Kes!6fR}r$)RVHcw1sC8r*Ro z*150mU@`1mDcCn;zLEQ+$C01ta|vyg9sT5sw}Nh61Q}3XU8`U@O7I9es=dPEFVV?- zMB<;8fm8cujXBGg2ljY))i)Igr7}*a+IXauGy*j&UcmSo24X)k2w}e@HhB}=J-fVV z^2ef`TukBPu&UT6HI3$~nhpLxC{zRac9n=#aMiMX$mshyS(~K~>n1*sH@08etR&Yi zg*sjInE&kthex%i3xekl?Bn1EKTbrqN9?R_;3|rDGn*FJ8B_yU^6Y&ZTk#7NC;`_+f6gcuCl!dIckNuy5nZz!6;txg5*I#63* zQk5`Z;tu{-soCd)?k5b2?wGI8%T-yE=fO?Y_}18bI!k2A?X0|MHIqtq#Hn9$!ynsT)0y3_*f+dfCm`{}GuY~7?l#+DDFQjK$^DkS3tB8N>`Nyhe zuV!c12;Slez~X{?-rW&&k_QRwcobJ!*RZejuy8KbcQOzWWK;sbJgn*wi87S2qv0tW zp_1dTU0(!$bl;UkZapW}GEz%MtrtMphxrHUVH#H_rp!W1=5*1~Ve`4ahIU3Ivt-4i zGcX{#3G9_t(?Cu_j*YVDthYZN1goQcqUnAWHe*SP-`hx8l|qRNj>=ZBW*Nhd7v++XXKSfq{YYV(TYs) z#)#zD=2EudFMrs?5yWdH`F8;R}D>~Py2PHt_&|@pmY5+)-jffF(;bLJ3J9&B!MKk zk#ymBCvI6G0UfC3=P!)k)e@|aWUgO3gZ>^`=0Ld_VkT&S1L#-qh;AMu1GHn**yq4C z@vahKM(hECiWO@1!$cR+u=URb+Esm7k-G-%pz{gmKorBlQf73TF%OE|rApjEWzRsv z$i-%2QbN_Z*3bTV*Hsx@3ub?19e&`^RDXs=HqKyplH$=|VAcr-Wjw{UN!-`SlpQ>+ zeTG^1?uG@u2>U7|#~cl27Np8oCVPuvn*RkoQjIfSUWjvGL0xo0NDe(3*d_l(zKV|| zfAs_We5hkRrT_ODGI8|xO`4{DmTr#j+rmBhs!4PdpZYfD(M~`Aff6BQbzI zDKjY$Q-zl+Mjkf4DMkVIpgZ&=&sPh6{$SF;$y6wW6JQs9lri^9$oG^}G!wv9;v!Jx zHw)wkf;LpS74e=92T(G-ls4rqF^(K~C&G-DfH?_`i7FSVY1eUtI|Zy->A#JiuS(5P z@-zg8h(7JOJlb+5e}_Ek%3SG2sXyB}?SlZs(J^k{h2B$*)3l7K4-sS7f`XZtGEA6K zEP3DOt*U#f#x&=sS~;gQ`-o^&f(%buguD7-($QECub4W!x+Z{RBWPKMViJlL??L~l zX6@4}4(95r`?lbEZ1T=xiKU`|KZN;0Jev?T`sd9{Vc~{e$4#P9_{goO05lRN!?}<8 z>3SdRgl!r6(Ps5Od=_(%&!J3?+#a;~P^=?&o?d@hn;vr@9tr&euj@F!SyHzw*3QoE zB5g@~>(!@=c)zw`_aN+zJF?whdUagR07Xb`!o4?eHks#zI&V)-A5R92`7QHz+)bxO z6#uw>boWGaX9}WWw;r#Yyf?QGYQDZ?cSRE_ZhUDc^#dI3LBrn8mJ{GEknVFEL*PJM zD)>%-r%HFxsaESyhX|pZ=!b^5o_hEBH+;9KGK~x6Weg4uzPL!~2ynn>cm|zX^~w^5 zS7o{g{dw4xk?}f5vwuW2IG$zck)7nkxt|ao3bACYRR=!8qL?eB6Ex9Z4tK;hbL$GD z&)v~s89D|6J~N{HA`0r0uJeELS*59iu^1q0NSMG}K`{Iu<}R-HmgGCG9QjwpA5}F2 z9^<)m9|k?Roz(D5NYDUkEDpLA)339a9vrwkD$j#!K=cy_k2+({oJ_7S?!}CIIRH># z10m(Mvhg#A{+8FmI&=CrB|ipLI|Z!ltK8>iP@V%~z=7A=6d$Z1oF>|#J24avFW0>_ z@($HRdpa_;Gcl)9&xE1jm1*4&`9~?zantRdE0D9?lqq(Z(YqyQc#+F;zRvrS%2!%EGH06Xt%|kP4WbWuA)0u zul2eozVUG(*H-UKF=xR%M*}<&#(~?x^X5>pN6Ljd%$N3~KN&X9Z=|ZQKtS{&u}SS4`^1eoJ)g#-u`lg>T&`@!$*gfz04@*o&7P$0y#iy!eudE zxBiVSI~DON%sPW<&162^MO)TO@{sU-ZwkI^oiNr-mcOU%2ZG62>1w5OpqATO;7x^- z;BuM}ly#{z;`zSmtoek*#H307Pu@;M8;z(5-zEV`jO1RK^&5tfrz$)%13VUPO{!_; zSV@6F0nchXPnX_^YYVU|wQ+-kR6@>b25ix$(r5mT3&E;`LKCerLuRtuPK~_daprxw zThcnP`O_F>S-G{A7F({5=EV@ht(6f#)QckJ*?zbxJD_Dt21M5huGWx);Wr-{qGm#| z=jjv)zpTM`SxRxg%*2PSU|)03D>=*z)pOjN3*Yl$1B{~4d`5k2`;i>il&V9fm9Nj( zDy%Y+d-jSt7ma+(3(K<(^NKoo^W)rq*j5X?&B(k zJi4UpCF=^Lb#o}Tydz{J};ys73tw1>B!+>(`_VUE1kEa3m} zw#uR?JzH+;beU1VjrDP{K{wrT(qP6!rKm?!jL`AgRaE}vS+yE*j35c8yAmp0fupFn zU6Xh7cNXesYb8c`yg$ATUJ^r=#Ikt_7F~-;SP#!$Kd#TdeGu^=PU|qt#_pH+E=6x^abi-tt z6T+W%K2-+X0~rC!PkV6~(Z-dK0OS(7rr+-LUs%LI5%(6CKPc3#Xq3_9p(b>8;S~Lt z#6$TGfrYvHcKS;+=+8iRM|Lt2!30h%73U%f?e;p+-+`YUxv7LTx8>#PS+Y;tdgF7DY&<0gHM*uT_8d&}t7w(sUugYj~kE&SH>UAWAlQ)Ix$Zi;q3 zj>w{QyKI>zYVi85D(F|^c_Q07SXS^d7EDY--f`=f{7u_k&FWqek zf}J$DwAWgn%^bJ#I&TJ|9s+8^tI!=i>3R*NDKdG-fdsDNB4b*d$tQD5B9 zo7@UuPR)r*KK-Y+$RZOL7rEbMEtu7Si(sm{*(S(}n8oer9hXB6l%!5yNscEvqNP%> zw~XYCH`J_H__%%W*>ThH2IOL!bLqPn!}G@tqR&2&Ltp9sGrz||C+v{N{=C3dLiFTa!a9h3t4WiB^GW)`!e}U(;DG1jLFnTrp zQ0Wg*NS;#jjLhrxNv!jzl$&l5DgRAeE_ql}xs}Nzv`IsG;XdL!M;VZm6eC_pkw)QS z=cLw{H|}KtA#m6n237BnjZ_{7qZi^6M@Dt^StT^|N{mpCYt2iY%TDYK%L`h@uM z6<%bS5*Tu~Q+~03_W&IPwK+tOrnwnpprX=S((@>_+>YMg@NbXY76bt3C0FdnLFCWg zF#r=#>_QHG40}5x6ZPaaV@DuAai7XYKZu(LQKZ)@vRx57(@DcUPidI|`j0xu_0k`>2z@7Y`gDkal z9WwT{laeNyXB9#BApQ5&e3&nEh6ykoJed#VZ^!JKx>(Kc4kWKzzVhSxjI{6ix9>iY z1;ws=?7yqvIz#Nm|3b)dvc~2`@=&Rrbyum>TG1de%JQ{dsodq5cD)kcz>3GWLYzYX zEB#bc89x?sG6vibT9*V6$LNa+RvJPT3tllW))SPxw?C&7WSFf1Kpkbq zaXa~~l<0|>ol4-enR+kt<4C5jmwY;-GO=D^PnG8u5XC5ShZYUDs$+?l{$;hWg2cRv zdd>?`m#aH zyegT$qNQ@(Qcz;-1faEo@x{LIMhDk+K9stM2{Vw=a)x7Bdpd2&vsc{c<=?bcZnPX$ zuoV-STLo4ynz$xsBZHgz3no{t%TX`xZ-a;Mos1fbUBP`B$zxc#-1IoRX?Phgf>)tV z)p4DgY4FLHo7FOf^+|{gbN}ChmSZJka@~yL3WsAwAJQ?nr-xZtYY)@q4Ao;()c62F zjQSh|q_I|gw42zP+*R8Re&q6l5CU`3hCB&{X_#s9``E_Ce>o%LerIgEHpRFFpkW{<$W&1vHPda<<3eU0IP2u>@ zRYMPemG1Pj)j-F$n8p#lxOP`mbC~wpzkAO<-)=;(Y3Xnlcp2VfonhogLAUIG(N#9K z-^wDx#aE80^Kd$IMcYXO`E3!E_V}wTu>;?yhG5#}c4NW5CX4NmMcYx;uHS6Y6}AAT z=Q{_)j`y3KG~~ZWwp*(5BnCVuKi4<=TjxIe zzq>Q$Raf+f5fhQ|^<=@Gw3}y>O7v~JA@mRHGitJRU;5pDKD({C?#H%&{Yw71f+J`^ zX9Aw#_F?>Y&Sn}P%z;EG96Mw>IE+a8b{zv6Oh-bMZ6`B`v+jj6p{JR<@fG_^bH{1m zY3uY(bNF+7#yULqYzWJZ(V?y8x9o55IId$&20$CIvu)huTO=@NE|bRZ#3M%nO#%9% znz1aBB?hx~z`fqtI{0!iT^`iWY21LLmDG*7=V39yjXEU0embr6J zjNI2nq;`S!&eIOrU1kC-p${@wKQ-0EmS7Co0{h_5i^{*7Qi-zp45iUc+>AFc6W_xF zg3tv@dV67+4hW%x2qn5<9}i{Ilro6mzz|dn8v%?~(ngo_Lc-ie0Hk@72KxNxsumXs z9QxK4S|im7epk1fv1`cmdAO~P->#m-^hW4KSB1?!*#7nI>47Z>Mbq}#sz>eTF}6TO zhQ=SI6t_SoygprcMPphpBWUm>!mVrF^HU50A7&R`&_UbsmQXn#U1RVIVXBu!WD8rp zH8@7lCi#aOf`O@T0MkdIK!^fl>K9A*aE(wujxSsCZlFB20QuPCq`~s`B&r2_ddH1M3sz(<|8D zq<^}tyOyq^`wH*WjC>ema{plu^Nt~DIot_DbU3(Fx~tt~O0Sn};7^3g1!i7xb=Tq1 zXa26wDzka~adkUP71f(#sx;w||MzF?4bWaAFwA-5_CkXnT!0`R3YE|#l?&DWHD;8P z$dcb7d-wYjL-rlX(v-9Ohewo>*iUCca(?r}>3l_;Va*QB1pFUE7T?{`(Fr64i*#zs zyA8{GRiW_awcNM(L)G2N!pwciYxpPpT0{7MFYLTe)S>|@@th{Ymb;y>5OxK=f`<}2 za7r1b;G7j~VqF)@5h$!914=^ld$=s4GO8Qd@{Px2G)wDrcmJD_24QQPtv2%=4Ad3; z{g0yKb+BO8b3?Rr4Lqz&Pl@N0X5Q^X++-CJnm_*tS@mXPWX}q?(E&mrm!0DIHOj?a z9M7uJP0%T`e@I9MHs=K>1*dyS|Ne@r%y3b|5#tM*tAx#s0bf@uJl4M(-D1 z=pWBOP}X{F?&KREjZwjea_JqiDT4Vz%vg?r{Bn7s`>n6rVdI4-nlP&^i54xn|0sj3?>fa4Nnt#^2rzAQ*JH`wh;9D+3uzTgy0@iW(8~; ziN3?e4*+(FB$8->gc~Qxno&?}pgKwbeMWsFYw;;wVw7Yet+Aojwo*V~IzR~w(L@)V z8Ut`)*7$`vd4Pf$c-m^jqKURnp=%i9l(9FTd7kvh^Z(fMg!W+*`_Vgs31 zV9VZG$14fWb6w3lV_iy>M&8B%z|qRguRmIkEzviYfKey2S-~{Y5XF|XTH?^Cu-L*> zEo&wrCSv!BehUZopxvDR7!$=cn-|Vj-79nY)0J4G;+MMBEIU2J_0-X+(=20 zojLhkjpMn?hsYP>7Yv^-^Xap2dkY!)zd9PGclCsf+-_${&3o9Ds|y3|Tl6H=x+EY< zgl%iI4mesfz#DNLUesC2Y$~8W1;bnq9ih>;b+TC!?WuFEbgjb@k#ZUz4=m3DlgI{u z_*{S*b4Ca!6hRSzt>x9&V6$ptIUhW=#Q&!v{*+3yuDCqHQy^;xWg<?S!11mCT!h$hc%uOkRgdI37Pzh$5tDisEQcvV$r55*%I%2;D4K zs9wF^S)#ot|Dl8bx?^0ELy43`Bc5{KNAvTa>0%xqj_bon#roBjC4S}luGwr#UV~cV zYR*9A1Imdmc1X>$vz7nj0=Ry%x226->Yp=_*YP9ptu7s29Prq}$R=)v*SV~W1Q!AX zi6^z&@LE*oRd=q*{8i1V((rxC55?!DJfNfl90~-vXl5vsy1zrgT}erQ{%(*%mhP6e z3cwji@zW*tH&$iY8wHa84wvXN46W|DR77>@mBLHki9bv0_=|ttkpS0aE@6aG{rDls z)Ko~3y_tTZ89$um@ox?ZV{=Xd22({w84{;XHp{EX?Q>u>jEQ%u?^b7)MK~G0r^o^! z+wSp+<=GG!1LPCA#?Y2@ee;k{flnx}FEfWm)%_tCOk1)L(@jG0Wf8y50XKT|@bmv% z!X@(~<0a`Noz2mb6I`{s9sO{M96d~ogr5H$}J0gqGK zj%;)b9QLb6%V-XbOy8LiHjqZwt|AEZX0jz9MSVeBUB6>SC} z`sz~_d-qC0xCg9W!On`Tdfn{(Lq4-Be>U5uc-B~XM4PA!y!zP49sKMQ=gG~q-F4Yo zbVMe*`G{S6RIgz%>4NvMQLJv=OQpE;X0|gAXF@*6cv_Ef?puUUs_mr^ucvsJc%GB* z`xUPp*4c$Sy#wXowI6~9uDp|J5Usn195)me*%59h6B5c-*u(InH#AwnF*CliI^T>^ zdTByndHB@AhAbxpE9iHaX}T&{D_uD^e4aXbt10%bcfO(yT}V8NpC(iFc`pBnIhy*z zq{$}92ZM_e-?;uBa*hBF*pgWk8DB!mMX6?(Ofo{741K_G-8s6@#is7gjK9rS_u_{K zb853Q^Gk5gfDM`BFzkeh_9)|vSqQRnjJA7Ad7-$bi6c!A!6=f zFdSjOqdF=GocS%nvY7qF(A!3!Ta%E`?f@l#rcX1OOAMweOj4m(mm~5_SMsWIPKuE7 zO-%eYLP0sRl8~Q>za`{mb-_arN=UuEeWF|3`D`Nd5AKS>=CJZOGtmg~a*)9CL(aQm z^+F^Dy9wesx-*7IM(Cy!<36^7&!Jb-`@K@*xF6`t0HUe(hUjpb5E069CUzzTd;{U< z;AkcT#4!(t9g<&rsf8QOS+W_8szVSV1O&X74=-hHcGhYnpfyxjDIsS#tvGjBh6fOh$pi1(cKTEv; z?1vM9fd9S5PIxV2AWXDiYko54I6>@Vz@`fUNa|B`{h+78gffRLE(*-8h|}i}NV`H_ z{rELlSrdp%AYE_qeQfbzp>%l+n%@|Hmp=ElPPX+w@s2#o2#rau&-6@oRgN*Mf<-S( zhnuCnUspLh)9BM=)36e*D@3&%nHR^jYAl(p)=NF(f!g|;m{-Cvakrvx5_PsY$SSli zN9^s6NY7p^|B zr~*aT0yG}*XxKlV0KmxFO8lY!<@kL?J3Xej!cbBd>4HGkv8TDstx~n z7XtLUe@H#PhyocS`OQ5$jI(ac2fzB5%`3(*{cKPWPNdw*JKP1I&2Yrgaea|cOxsVa z3fShldHCnOMwWFhqy4#F$Ojp>xBhc_TcCaA-`b{i^3eTCSlbFVr^m~{tuQ9#La;Fg z!CU6D^~I<3!3gMncnNEDWE%H|zck59Tz(JGa<0#T<|yNIS#nL66-0TRTV(aitD?3E z=%6ym4?hG|BdktjMUjOI?5|J%W5iq*Qt{b}S9*Ec)+3@RCSEIKX`upT-6ZkBdJmO( zg4lD0e>b`Pi^?sT_KQ@sJ~Q8LsWhcmEWU0{7XrnSu{}@zDL6 z=f5R0#6Z*%VKmX!2s$D^Pr`yC*I2yjWyD}#Ml1Zs?T9qc-lyC5L5wBg=N?6FS8Tcr z{MgCmUgof7ILN}Q!uvN->n8R#E#+qNwCrMiMZt9eF?U_}cZO6kKfN{| z*-nFw=YyO|X}dEn+fKeRJwgKKD^FX^KHvX4wni6qmrD6gIwbrJn^fN&8O%H9 z?mb+^d_!!jW+&@Z=+AElIBXx=9;y*`qEO2&iOH;a_xEN^rjSh=v+&R??8jjyU>^;p zi>N3f1Q57LZO-`+emJmNmAoILa~)uJ$r4n8n(y1fZ&jb|6u0x6bkvwv@FUZ855}^s zW&kc=KuF<0zkd04!{vKx$@5-P-GUMEd-C}EF8YTqoX#cqXfec z>1^{3ehl6g%-CQYqnwcM{IckKKl6;eEC}*rgM$)OLlJwn8tc;zR{zT<1%3@F@iu~Ez#%Vc zQK3r~SKf)IGbUDhbo!gG-)Yaa*;e9TiT&i+V&)Y>HsHEj!^0}ug09IZ7ukir%%=sK z4263NDSp@cs=c`SlG!#Wp^i1u0Ko99WSjfhK*ms$=S$W2a)|#~A zY|Ko+;$GA3SIG0|=$hF^FOj*)g)DR`06Mm|f1;Zt-ON`^9gU!E!yucbVTZnC0D*6= zQK0#d@rS_qmDd{}xSRPn$nitDrLY!~it1@ox!Sq4;r-eto-*+8-iVld@<)l7jP|i~ z<=R;?C4#I^m+7t}5;f=(mG>U-3yXW<+`HNvmA9+}F;JGUTex#deL^VQEt zGSsY*VHq-v-H_X9WU4zUgvO?mFWUAMBld?74loB?Y)v^c?mOo3GURQl1q++pS!P(^ z`ZMz#l9D8;v;LmG+A9>2^G*l%Q4u|yKL4|Ye_fwWIlLh0>)Kt`S{gJ;!;vg@fHGfM ze2TGNux;U?ye7G?B@0a}Y0U+dN2Ezog1-jwhY{tCR^FuN+f#sXOVVJzLz#? zV(7V-sT9b9W(Qj03iMluY)E=P673T{nwjtVPi_T*dkhOAxO^FVi!dC#0OEIJWz zd3KPT4n4cYvv>!t0@}CgiOE~42b5LEx;cMlIIt{No#HbkyLM*~9vQ;X8ul0qXD9jn zsAa?AU}B6jlQF*X>Iwgk8lnkg`xwscKY`RwG{9!E(x^A8ttoi&O*yf2CS*h zb$(`h@yrgA_V@An4faQ5)3drJmYS}cik@9xk=gbT%MBLk9 z|0tED{OjD;nL6*N+vpD;VQw77CMgv0S5Jg`l8Q;L^MO9j^NIQYbBz!qgbtkksXR^h z;RT~xZR*1B)g`5r^nRo8%D`;1uPryj_Bk26>9k7`k_+|Kf zFmSWxJ%)BCMs$mmd|!&^OY3l!y~0OMgj=eV?j1ENK#Ij^)kI=^l>L zVRs<(QVA1%pc8FSqX51!hVAFW&{NZOZyB~r4v}NQityI+k%-*ci zW?hww5+72*g~V}y^h+X}Z~WDrQtImYS?k9DF{dif}|i_64EdTg2D_PQbUe(cZ0~#4Be%4ch|@Jzpihe?~{G-+|OS3 zZ>^Oeh%SHskfe`}DnGD1ZyK!OPdM7c_?hfiM$9glw??Nir=JPG$7sM%RJ z-yngQ8~yQh*D2k7q9NI*cs3&C!T}a5(H=+U=EEu1R_MO%e^XWI*S58xkDH1>w#%lw zQDu(lI>xn?2=jPz{b!25=Ft5Iibau3r0Kfai8wEd3(IX-)~_n?ImPp3rLA#oKlY(;W3c$h^=xnv;Rk!;@V%%Bx^k28D{o958H0vLs z*n+f*TAl&>=l=V>05vf|tNxbZ>Ovg4p7FwesB(Y2{eQP=71Ik8Ml5>Wi*g@UO(+Gr zJj7Byza7R%S9bHH@jGq%8!b09twaC&ygo&v1nO7w7(jAI?f?}O_MDT5N``%68O3X3CaLtE3p7z%3{G2 zjR3r5G7SN--EV%a=#4ja9Pjr2Cz>NC#XWbtUTN@&nJ*NA3RXgHF3wx;Z=Vz^)XFF4 z#2J1H-}Rf!z8w4cSB~Los7jCGr5#a3E8PsDZ>#nu zR(@*2ytpRcDd`jIrx&V=KQ0RY_|L@MT{S;xnRZ78aMyab-MT0E2NEjys)oq#P5P3+ zQL3w~H`i^WhS&*{olpIF(|=$3#e^Lv|39s-Uu8f|yb-r*OK)e0FRxE>r6f=a7xvJA zc12X2pX68S-o)~LXN&GUY|{+BGH?=wL`S$?`+kmQ64ZZ}f2SZ&$A$dT1~@ z882}+b+QGvJriZRHvj}_#ec%X7TTU)&7TA&ZKE9>1QVksJ`o#OVaKhI>T^`s%!f(3 zvyBm0=cM*T>Mc3lZ5ueuR#Iats~*1@VBor1_dNiM@n1D<&+q%6r=fusOkQfpD+Yhz z2v^<%;&{K%Ij7Q0a8jF@rQarF$S>|6D}j}7H&V?-{t`TY-^d)R2mS;JRu5H-6mFgs z8+y&0c|t5*>4qM;NIgsApw=I`Q;l%+URS$W{0R}2w;NYdv00(=+O;zg-;77Ojipen zucqbxM@E1r*my|gbZyCl%(($Kkz}36nx13sARgE^P36n0Bd}9#It9nR?AK2q?c58m zH1MEoku6Qw^Qkddgfg(xkPb)g_jOYcS?1#y+4s2nd-WVU_l<{Z(obJ$Bk%MI%~fzn zc*mA;x8KzBPf$5hK(ZG_%PA zM2Na~;;H_%B9JHX$~sr84YH6G(?zh{-5XhSZ%kam414Fw2 zjQsdKOP|xSkwRj>a<6etK!gnfls_3-@HDv^NmTdE1P4o?#!GKuJlzVDg6YsfAhnCC z-Ilwt)j4mG&_h#$ta`H?p7{GA!-Jx{EDyf0uq96(zXx;Ip4L z^YKXiEmyXlMB57OvqeCZ)!jzq-A0bD=Vz*Cl%|#5*!{r4pVaRWYN1|Rpb(LRFrM@e zFY525IaoX1V;N$bBYSSX&TQ|k;Qtk38l0j3GIbqcZNa(U;O;TKJ5OW5Qu)S z@?ny@BCx}JQ7q7?t77-`*o~p-cGoGuCoCrFg9XtYdv;DLpJNa0xqc(Oc|inUkXaBg z@T=o(7q)QErw|SNo#+l`#s7uo9n$NE>8Z-REZ7B0(f|i)-0EgaSk}7%{E_yRUBwQt zkfPY5dQO8fp82p}l&I-0&Wim;kLLY?ETywYSA`nOO;zjNiw-G8@QBd|=9Bu>A+^%e zvR|BGezpNThU5#KhvTW`S@ozWbSLHsoa7a zl@YEQIqGrhe;b;*k0XFQLTLQVdp8np3l9_6H7ux`D3MLc3tPO5R~RG zB7jR+A*p@ks*?q?U2C%#YaG#Gktl47usPlwlm%bGZH~1Hjl~osD0>h(%Z1!k@~qYq zzCXhQw5#@sE0@3Yn|$*=M?|_(C}Y3uRO5nsUYCyR($}8$!i(xHA=HsEf z*0v9>(&l%->F8(6z?8GpI02)*FucDc_2{ZW&oLww%Y!weJWx!uUr|e-O-}mnv?z41 z=~ol{DA-SXE-sHKx2`JTyJRi!b|vjgRq|4%pY@yYv(aawOe&bMvyBr-^rGueWOJ4(d8aBY&*uWMqhDA%^iYmUG!@H(=LPm#b3e|Kpb6$ zcdw0EidEzF)5*Vr<&-%$`7C7Z4=<(`Ti{*?WwQZraI=1d&3=rpGrncyAt|~ zABAD?4w>d&t~c~qps3phsL?1lfI67>8#v&!&bSZPMPR%1r$1Un!eG>{>K0;=QTWJ^^h4WkE0uh4PC(hE|@ zfVWT(%`xs4z-dWV{@6ytXC}u@iP4ea()J-z!$&(ee*s}^t0u3BYPW_lMFzZ6S#{@h zLLAc}aj?MiL64SWb#_Vf*k1Nsxa$vJ%#YF10QdP@4uje>RJdu@GZklR1)R#LM*O=}i&|GC;1j~@_3=Yn!^L2@5Qiunm9*Nvtf z21a9_tLwc+r?55M(xgE~r9NuHG1{vI^ZC{iv)|U?xBux^akh$%*0}05lZI>LuO%jI z=kC_$-oY&Aq?)(B-2p( zff=eAxKN1X0#*Xp208_#xse6(k>`+(wHMaZs6t&w1(Ptb8nTk z{N?j^QnF&t)Z9(8;%}RYBC~VPm#mfo&yBc#s|6jOr9%LA!2EK@;9+k5>;>50bn`$n zBbpIyoGRBARsWa|E4kfa)z8eh$mujh(mmPtB^shsn@VA1QJKVOBsc}D2rFlN`1>9} z2bPI_0iZguy7}=eW<21`D&Pydw>cE#79!g9?jh3f_%tz0&jpS zP3z6BIFzx|ZD-9Z-dwEO$|Q6&$NHCJuA7I$iBgN&{mOd@lrq>z=?w zBhVNT)O*Lufkn)=GS?()y|`2QYs)NmT}wZ4O}s+u4(-X4>d;`1Aq-I?|m@F z6D{!{1+)Tc92=PX6P9)^zW+#C;44@qf*hAoUe1lfH3VzvH`S)I>N&4qU_v<^j|`@b zc}_KPn$@A()vJ<_J(P-z*`ww+-=h~wrm>7>04g39n9{4a*z)d{15BaxNRev>rxSCE zAeT)z*`wDMz;r&+F@77p7e>G4T+bVfTN)O@Rv^3^C9kxPE%&}HEK!wR9Hxq-8Y%T_ zJ=IQBapSMy>#_ZM9YUyPQP;KqEU05k{}4{T%dek?adYl!u00rD{xbh|=bs(_{2A&v zfBPED*bw?!JchZ@9paDff-g8Ua88g)kC9)*C+EMio9~TLSWx;hAZ}Qn&h#8NlDZsgi`_(|J_WjK;1~=SR{8_bg*(nQyDv*vR4R7gNB9}h z4d!?xTebNSO0P*n6p+R)btB)lEJgV@9{I#TnP#V1-Pxg?m$W`&?^}AEci4z68?PSkHV{RpU2NEbYdTw95HXP}f|>Ek!LMNDnxXr{Z*Z zL^_~e>@zo)^fnosWPXb!fQs8=7Em`U9_aGklK*qzya#tD%{-LAY?}yK+7*;P>QD${ zM*goA>W30KxiC2u@LI@_UL8Jo9w{`wjPpN zsS^N{I+8@1{t{{9=psh=Xg^;MI>es`EUu@`_)W-!rOz1nEK>kh_^h7`02zKjr%Ts* z+Hd|drg>Og&+EpzgBpyHpD_}()DP=E;mA0W1)|=_uAkAJvxPMAS75Ye2zXWhP!c6V zZOHO-I_|txJu~1xlfn8f`y!j~|FZy20oP-!irkkCI^ocGxT$&h@QWMFtkGV}=R_KH zu+SNR)ZqIVFEKbQ%!>}L1aVM#T5q2)&r$YhiucUjQZ`@{yJ6-sNhTrYOxTxeRgS6h zyV`o3VJFbBabWM>SYnW}^b0cfL>(q?AxmSFj{{4R2vvHK!9djSxzMz$-HQ<+S@`c7 z$2q>EGis=a{|Gyn&jB%TZ`cm1|44+o{2XF^lR?*%_aS=AL5wVus0Cle%c_Uv)E6*t z14=#%T<;=rf6*ag`zfdCGuQ2ZJ}C>*zpTqTMcn)ayW5@wnd@-Q)Y|FB)_s3&_nO-` zlNbHSRnzixcQH3uft>m{bS`JFZotlS7J#VEqM?PqaCjF>8G)aFe^wE9pGvY$(Y)YM zYP2UViR&noQSQ}pIpCze6i6OtiE_SvpbgWso{C8YO0NaF`Q=M8Gj<6}+gp1rH>Do6 za{fDn|3t06N=*8!nyLLVO9`ZC!mN%q5s41VAq@6<@nP~IgE=bW2agJ28F*h@GKKE$ zkL(uGO?_b;MSxd27W!bp?AiF|U`eI&U_{Uf%l2ABO$@cWPY_(h^}2^;u#tG;6Cwu^phkSVtY6fW_a`LR-@d zvbPE*ua0h(-!4sHQPU_FtJI$_p8G9|C|FeC<6_w}8;_3oh4oVGI^UNg&^Qz%%+CK9 z)k5YQTrH`pq2osbgw+k-046x{oiOM^nny?7!Hq4?$9QI;ILWu?i2;|1+Qi}{{BjBg zz)b!K_D=y%RHQoUlH=esviqqGa)=QBuSQIMbi=TBkPfX+^0v0e@U`*iI`nC^l4NH? zvgUJyrp$a1z1K=KXXhyLBdKbAhn+bQ!hNk8AFWUU0EZ@O`D)2MoX&a+knK&ji_dA@ zbnT#h7SaMq^}ublU#qlPY-<2Sf3z)I1<3TOHQi!B*xhp_UMSum>lfTgcZQJB1XyOJ z+hk(J?~|5o6W0Z89Qm$*@O}Ntr}Kp|Uk@~W@^a_DYh7#hDA=o)NgjPERs)v`UdVNJ zau?XpQq$3_Aog5&o1rH@boNiJH95YL64qTFvPKJ3bCDk?+rc50;7@y%eJk0fxNejT zFeGA_SJP;&hJ_r(&2$!a59KaVsYXiOUEo-Hc6JH=;KL%trk%C^n95af;t!C$@K1wr z$v=iEpJpgiDYdC8th*;?!EDo1+Uq>tiD|I=6)u}`2*iLBw30pFC^>Or2M))CUN*$( zA%72IYaNf7{aWjx5^i-@Sk0}ARz>xc$p#ERJ{5vPIcd#P)dvK|U z+&^)scG|=qtRog|=1Ox;8SSb08+!)tu9{I{JfZ*&G`J`Ck@>_*nrb*-C<|rd*Tl2T z4(+-O%&P`|Ahfn;4k7*2_1#4`P`8wV^Y2X?Hi>UnpdAaWhd^Anaqe^Fu`|DSj@RMa zCpv}1c!F$-MKygOBeT0fpFHJ>`{Q8kG57r*0!QOxZhJUzLHLv)>pjJjpcu`+R^K*D zupRfB&mSztM<48H^ofScWlYTX+GNAxm?9rm+-5(4_R!$JlDPjko%UyciI4Ir1Pg78 zN9J*a2K@J9;2 z-jAVv%==#CnmehYIQ8W`Tn63+(Qq?*u9u)i5dITia!s+4FKk44A3LU7IhsXCpiQ8( zMxrBg71ly+OL17WFIrVYch9%!SAcmWo%M?DpOfI4=+SWxdwS}|JVa) zO@#UO-*?L&Tr5gb`aEy>Dky~mfeq5aoli8NpV|FRf&MOUt|+2DD&q%`D__dhd(sl%r{d+mGFNR7mQMJTKs+g z&e$-7+FU)blGuF2no31A1@di|UJ+T|?^9_-!4B=8wHhA-;I^Poa)SFQAEIURnP57^ zm+R18aNMJEg%}tC9$gS(=ws>+?nB?KTpw_jm!1e`zJq@A3|WYykUR{ujQz@Ykde>W zJm!UzM(J!$oTvZOdTLiSvIMkAa>_bb)$6>AH`t&RgnzQu`K;5lKP9nSUPyiIWjrVL zZ1TK?%wEm*G_<%dS=iXiHM-^Gf%o-1n)vbOwU=Ok>*OD7SbpKO+IYqaFIBee;cHXH z=t7AO*MURR0D(Nt-K@6Rk*$W(6q<-0e50+LK_J7UGZczzrdZyT0Z`OZx8Qd}*O&2V@09owbkT>xQKj*04QiQNVjiaO26 zS}whTdjWW|v2RZ}x&KkX9($$;*NTwtwklV`!IoKm&|bUpOMDZ*B#qig*FXTbl6rUkGxBI~A5%Ty~TQQCxoMquT8EM#?yl2)+GN1lnJu!&jetK)=OD7JO ze0D*WWndz9Z%O#RJ^}El?kIx%87#>87@8baHrTi}pZpfF6&4yIU z2WRu~N_vLo0D$DdVo@E-`BZw;D(2!L!%6t#*9g`qT%9ERpy8ikW29kCO({+1EWC$? zu1mKSgI=lYvO@dXoRnzOyS6>rKkurVB2CrH-7QIZ_1OjOxP_t;ryIU_i`~pdjfjnV~|D?T=)+%3EI1+FBtvDP-v* zSo_YntRhs+@RT~7c8PP4D}q{+|J$}j?ET;J;mYRHa$j3JkN(3rGUFQo1cc`EcyJWn5z) z+)Y@{A4O5yTaP??+v7r9AtU_ZAR~ty& z9Nxbn);&S$pp;v#xJ|WO2VIs8{D5ywK$EEk3na&-rpP28guB$Y1}9-@&4-4)dAYjE zpuHLIl6jB6v?M0H{*ZGOLL;-IqSqHWXPX>w-XO{MndOh= zHF4n9%S<=pjE6J2qq5G}2spiBZQfVRS+T1!p_f#4kHKpXUSg9rxL`d+_$jvfBruum z=St>j)6pj3gjbw>iL*Bz|`9f^~J_do}<@LB*Eg57JGcK?yK?0&1hGRxJ8s2=!yekM~dCf!kx;8$O>e2{nbRE++?WG`p;g1F3yNgTkTh$GHn|Adz6xUkgSh z6#?v9rL>rV?TAERYL zjbH%kr#M>K53-D%XR)ugZ*Y86omC{352YBYJ4{Pc8RWYyn-BQd;-?4!2n6Q2b3Pxy zjUs%x7B6tN6#E_ zx^A7p$2;)DMZo`*Z!Ne)E12GUBlAaM|F{=>nyNEb80HGc(rt(ze}1z2iMdtT@zw!)XO0?VB_Ze)%8~p6QrO+<|L(QK^1Z6j|n9$vy zS=S}=Irozloxh#{dGuKMVG^WW#lyY{g6uXY`$XJ&QauK(DCq}W1^1(a&Jt!c#JW5o z9}rC55!b!96Mtl?&RD%TU;KXWfx3Tp4ECmap_E!g9HC*t@)M%?tm_?S`RL{t)mDrp z=-^K;#^loL**`j%vL2u%N1*VVq|8|o&n$LCHIKG0>VelZTe($zl_FqmXSC(?+j%li z$(RiWs||AouH3u|X#NVto8+LvmL@!eD*F+W>MYA1de#Us89$GHGE9%utdf%VRKQmW z)RdJW-B_6Pl4<(hwk7<#xtkX3-rR~Z-sr7q*;`BP;x_`<@Bjn$)L1gLZ zxoZpUXwyJft(|=Gp?KE^p}dpCZJ)$*>Y&>tLvDT^_T=k#|o ztRZE(h~IUfeoVs%x5veMqJ>#Cte^HHNowg2Kk%iy){-tT8O}=g-k!hHYKBw*N%loN^D; zmttH@{(g6@19# z)TC}@F~o_SjHZnfX1{%GB5(kp%Fnib3|M+M8G%qxnUu%&X*V{AciapZnew~<=Wh|o z=c2fa3)`&`N?zs;1^e5<-uF)TaS@wKlKU%v%t)8B|I_}dgPK#BJhgq@K*rED0W2B- zBSf`E$5qEe07hyedY;S1Rk)&SXSoaEC<=FvJ$lt^?zB{!D!2AeUxLKsZzn)D8l(tC z;^?lqM{bhXrC>XcUo_NIq2*E&1F!wpzW)mrkn&Mj2_a^CQLqk9!$K4841>?;yfWQe zAzzTRQS#g)O16Je3q2kUFQzhUNkX1tcN9;?@jep>S``c0FvQE-!hIw z2rW7@j{kKwx!?$8b|~LHgw;UZO}G}Hm9MlNag)-5n{iOgqYhzA>r1(Zhw9`TBRamN z0&gb4*Jx3$YiP_GYgoH<&Jf`{yzDW4*%gK4=39g0f$*g9ML+)?K3wh5E)sN{aO2<0=%dJb)rOdv%UioboV4 zvZ=JGc_RxFD#2)gCJby@v)(UJxwI?U&cf1@A=xiPuT-Yr?mWo4u#tW$ga4wFbNxP* zCM~Dm^rXL0OgSM%3rd2w$0iV;M_U+dc&Sfj`)5zj3amr?Y+H|>FT0v5HgI(Gz6VEo zZFX@ey5M63{=jrNRa$H#Q~Lpc;{3b>iN`0v)!7`?XM|{xh5b8P&XvT^Zt80L0wRi4 zLgb$-@QsurRLN**ge-Ix2FQ{x0@eSCGGl`?CPM93Z-5^UaOx`y2EGi+E^#!g42r4t z-m&qFHuuQc{nQQx{Sk+8<Lk*SP{)8Ig0W_xfMdYD2W^qnthVrDr988n z_#vg!oY<0EJ=h`VWHs53)433U&>e8`^$L+Ea2wXwN+r~NXmT?(B}YC4+{5UnMja+ zzsCP=-~U8OW7DRM7^SPCP?a}c<#pK)HPrL3hJ*?JxGTc$@jRhfyg=*d$<~(^;7{oh z7*6^raN5C{7!273hzY8VtOq=2*wX$*S#Qqq_Lv%Xs&YCG!~a(V*w#riq9Q6f*15XT zCg&NeryIszCY>LvI=IiVRkde?j-B+T3&78){V|p#7kbDK6%<%PSmj|&SjgWb4U5&J z8Vf~?2<^K2zCzR_oLAf8S;HE0p{tzQ^28jAXwEo$n`T?`B_oAUF`NoKIutOvk~wh8 zcE_sX`hm5$wg9=j#DVQu^2VAOQOrro>qS%9MLPb6$oy(}Zin9FEtOMIs(*&T7O*a% zkC5beHKCzStNz>vXYu=j)G`=T7t3$|szAZb?>Ylbt?*g6c)Hx8gVq!7!os^#Y4A*> zq<(=oxO8NFFZTnMA9B;A{A!bst6O6nw9;@zW9SLKI}F;=0y*A|3UfgsX~lv^{k z%?D%ptINO?zszYZ{mX{2FGXpDaR%W3e7k#(kjAwikK7! zPlOc$u|lfKubTuRDz8ID9_6^2^ZQPq9(gJ%2CkD=we()Hz7cRkcdMaE%@jxrW8cz# zPr!IjRQ@u+b|tDC*HpsB#{hTWPZ55HM=SaKchX%gRtq|1-}e^&-n!TJ)bZ<1CNbbT zHc|XIyi9j@W8l#tLpGs>sHMe^^U$5mNHahFKSMfk_!{*Izo{`jKf`hNM~#Rz1j111 z#o|$?Yn{DE1pLkln!B2t?>#lcqJ;3Ka+s6$wJ5WZVUug$s%E_{)>$gg*R44Eff~5S zM;(E0H7Rd)jn$vr8E z+Q$&4cz&F}FD8LR=t-W}W7Hg`>_Xbi{(c9ZJyu%OdOmPFuJCj~!UCG;!$U#E>1+oe z3X0KFR-w!eIxN?5lQOx(QNv=D5k{))dngi?rVlmcDS!LnPi{-t3QeF8XTqo8b%wS& z*O4|IdQ-0U{WLdxvViUn+J)fY@z^q5fg*xAGPh=Ze4n^%z2G1X%lddZqZ#ptNsc|r z+!yq9ZD8rp;fBHx?Wba+!$CL%EkJf~u) zCMmCUO8+%DAc{X157R8+TEj=1Zg7l`09mab3IyddeOu$x8C(OW*35p+57zOo*Gx4;K=w3|$Gp{yVXlsy*e2 zREn%{RnCG;X4OhiI~0NC44Ga@DrswuW-&bQ8)vr#K0}7{@opqOAeh+=@E z4)&p^*aH1%moPX4@`eA)u85$^5XwXHp?El5i>R>G`L>0AM$?l0@h<1lpNrye&GyD^ zptt6G7KLlnBWA=l<^Eef4vB}+l@^IRcqUpY=nZ&Yg4=7O>u!ZAC99)R`qfmHt{p48 z_9)i$qT0nS(2viNi2R_3rXwGm))6Mj{ib5FF?)g53qF|OI%WiZ=K>ism%`A zjS2yyslRuVA2goIO8JCEP-A|!ldKJ4wEBu)UGh$_J9*!)-cvrZmkay=v_8V?owo3O z+rRI#crE-ahbJ&)_;D5_On;~jE>{mOvzxdP(oq>zH(bFt7-$0_I-F+5BQE^N~c`P?>8#~}VSj~`OscGcS7pxiBZwyVutXJe z6{{7KS*RXgK5zblw5{2_D$154s{uJ&&)t=2_~|ojmP!+0r~YMC(VjWrUheio18WkiD+RQ(JzcJ9Ox#ZIv01rUz6^ zCZ@tXJ1U{IXQS>0FT8Cqy0NlSKZD#f8AH;7glX}F2&l9%#H-E2Hm0;Eyy79*R3v3a zL+BZ0&0^gzZ}{XM3ko}`o4U5qoHctf$gRx8&RBrbc)rvma(7>qq#>*OfnYPs!mnFl;H*|{;CD&SWionUNqS>?p4L+KE zu{H2w?TY#J_~h8^A~pXSI697F1^@}R`8KhfobyuaBigbAM59@wHQ-6YX^zQ++6~hp zu=7A#k5XZ4h*(Jq6GX;j-Fy%dg$9S8uQL@{PgE8x{$R%vS-u8S1DHx#6%@S$*IoJ0 z{dA(ZXnGNmd@#N1*Cw!;Ke`itbWL_auqio zRGR*iCN_yaQLmC}emoVpraWQm_Leyh@OA_$p*n-u2zTe9L^89dj3okHN(3Cyr1h)! zXuH-|UfkjGMagnPAUJX7J}hy4cG8p9`k#WZdNi+{RvNW2{M;KuJLiynhIQ|98wj?G zAkGRC^0rQA!0z|dJ!09H^yiN_v);CTlXVW~(){i9c->*mW45F{XoIzyI(>w1LvDgo zEK&CdCEc5?^R34{K1Z|2)#%Bo#6FL#S z?p5aMDc~SF8haG3*a0iv%Y?z>mjlP$k`YYPcxO+QxTh=kIA9K*A}L+QSn^qf^{9Gm zMr_k}Jj!fho|Mf+_fW*daj)W%4xfl1%1)J2L&{O0rBCSLn9FJ_1`4`}dA%jX!y`ip z)ojeke^=b4FQ41L(9_dX^26h64NYtN{4gK+F@BXBcQQi)HBhS6yP%?qDnr*AJ?O^%Q!$GX6sLICI*3H?^CG19PDV?7N=KdlRiZWMD#NtCe!9W z?!j^GzCN(h2^i>szg2(lDt-J-gT(9%yh>1V-X}+Q=?T4ZhSvGZd~Hc{g1vw>^ zwMZ*$C-kqFl0U63apwy6P&DQHwKnl5k6a!N4>Wk3(iwZ@~Pc%DNuvrgyz`0%@ z-`@Vov+Q|)LA|R9pPtNzMS&-zRqP5Fs z4a-Nc_epYLuy?PNxmDAk)vZxp8reI+(4n0nB|HN2)?_5I@pY0XcXnXeS<5g1=PT8j zxDE=i|A-V-HQ~ar@?)G&sd2VtagU<1 z9=~V(DNxq$pwm#8<-ZJF6MR9}Q%U$`+1XFA%sr#7NQM#qwMcoDZAR zOH?JNu+fZMSrI^(nJDj97|$?RQ20!Xp*V88faRc-Z=R>+^xk61(DAZSP{67`Zi2H} zM~}^%O6F?rz$QKOY4#)(j;V9?EyCJEC$7`_4UPLftyzyQeGTg-Gq7$fcBpgj?S8X{ zyh4hh7hY=F7Y`}U#CWL+b(}v0dfm0vJ3&{6dbcLl9zs&+RUQs83tb`g1=h|1Ng#FD zZ}>L!>lUgArZ)J)!u^JN|9Nce=wTWkP(QGSx&iK6wxUeseid_9q94*6sFb~&df(hS zn1tMPx&{orTm|_7h@@jW&@HPOJ{X?`fk0fr%GPP=iGfqM9q6i4j?plaE9JLnm2SST z#6nYLQgE83af|Ln3)9WdyJ_#XSLjlS?1`lYLVc;z2vc^#%@3|u!5;Xyjzk$`ndM0i zlJv4FaYqX^=zmeGY&!%mzgo|hIsG2<1G@y$es!k!<#6F1o!F%8&&JDn4O0wjRz}Z> z5?+wPvZH1~xLwVp7;^azrnVdv+I}Druk3w752;NuP<&V7-WL+3BNz8;TbFlI-iVu5 z#WG^Vyd8P882`nC1dWs~1VKL-{8hf!ELfhnV9S~PD&z)IjS<68NcjFGwp{MZUIvaQum5K5j9po%ZBHR-FYB#AZG$Y2{_vOG4wfC% zbLcAcpt!tsed@&oFfLO5cuQ1*aQUT7<-|kaSxNDsysx(4^Xyzd-XoDqi~X@BtaR#8 z&%3_&0zrBbCR^#uf3*hCg1z;=2NBFK;R0^F_{OHtbYSxeFaixxx2@6~Fxfx4A5jc(D6`a0&(e^?{@?8LjP|_27x2>Ail*-fL)1hhm z9(tGe6ln2H>j<->f$N^=ogj#^p~4IEs$9D&Q%C8;4dtJGr+1QZca2Abt5?Sl(gaXY z!P;IDIc5+!*wJrJ@#$m=j{aLh^Mg!Hlr)DEriMg8`{FjQ{^WHZkH>)LUPnxA{-qQ3dCbAIJP+pAR1R*>zQ@up?=bj7w!=qa)TS3VP~1C7 zF|$cNoOLNsc=ff1tvD+F9}(cRiM5Si@;u66vB9d_JJImukr=iCMM@Rc03e4fLyx}J z4>-=Wys)-l{ij2TOuJh>@~p9e$elfo^^@-Nk2u>Y7uMrcibFOeGg4Y`iZXAu4meX8 z;h3kojrQKt`nOVT+ok4-XJ`uXI~?ns(uS-4=K5^=q+|P#v~^v>-DYIqJ+=@m-UA_> zzxQJWoBreX3p%62P`*jgAj3l`DOIlrPW3AyGfnNlEz!^nt0*ZH_SUUGT>VcQaP1Hnm>_{`eL50h@5>d=jO2v<*W za`#KGa}rdo)iRT>!2Xgx9lSg~2J%bEY5UEz<-7~rn(gBv)nk8b-hxKXisVsd2$?9p zsNYkW;c%pGlPT`7+Myy`>94?7C&pkPLP+S z+{tT8XW>NLe-K(3NZ^dCh3+&nYG!=o`Ym$GvYC5wj`)9T(?Ic!e8Esn(`i8*FGX|B za43u49@521&*2xG;~qosdGS3>#SaB<#jnT=)<2%^RO-lglNG@xvZbF_qYFOh6er)d zo>1b+ee*4BN_Fk59rWz)i)W;NyrefDst9t!!@;Cxq1!q+y0>?J|74ju;!PY}yUfNf zzHFRWH@7NBsxI}Vi5R0FQz!8-ZA##uJ;feDrPv?0H|n^_ofDJgP!G+gK|I!qqbw=Wq}CK%1ZK_46jJ;54P;O zLXW80K|=^7I$|sC|2hzIeNd~)Nmc%EA(IQ6$*fQ3;9Nd$_1-xR^J*pWaD3DJWh%MY zznTsBEfGlnn5TDEzV6&KqvvP9-_n5Tk03PdLCG1qh1tm~>~_(BCylhF%KwlkiJfb8 zTHzX?MLsP?0nQiP9Q8KiGKr+34(X zZl=%1(*dQN@^SgxWY+U!@$0X1DtNJWwCd7&o5`EgWTZkS`$gF)LLLrDBrIypiS})e z(U#l`t8~b4Vx$~#O!Wwv(PN9**sh`=$?97#Fy@fcc^iNrDzVm!IPt6r#n=_kDyn6E z`EEiJ^LNP7#MUS|Q35sTN|5UgDNISggw&P;lN(xis}P-)FXD)ONomI)L^Rst_9t%e zZ?Ja59w+M2hru+jFLWo&jf{)CFIdl%ifmSw!zoUveweVwd%>6&P(jP(fkrDe{uNcF zzC3||-g-B>-_d&I&oFCS^h<3_ITRt?bo)=P8ZgxSRb8nMjRzIm)z-RUMA^R$IU5E8 zc-IVs3WX}}v`+eM79;!`BR%K6FH>$OY;2X!4vPZSD1hc7lP zYbzFh^upWI(&5D}tm2FYlRZCudd5_m+lq3U3h?Zr=2JPiVKV+RA{EQ9Flhy2oVhAt zG41?)L2H^n|H9zPB$_G!GpcxwI^U@Kp$nq&nXWfUe$FckHA#GFV+op4i%z;An70#f zAcrJi68V%(S%2uFoiYvbcw3+W!n?qk1k(rFY zgC8ohUnGRtIb9IeC34jf=ixJt$fPa){?79u@MU7dm-y(_i%OkWM@>~S*J#N{q;4fs zF@gRd15A1#r2>5)ah@=owkmX2ua(C2RA@yfx*KwEe8cKb-yUhVK^Z}cIn`!Y^fg`y z13lqmkKKK8{kYiWYt4QU=I`?&GVpPzjC$D402}_+5NmPDB1LPo>_GR zCoY)w()*y=jpZP_1~I~8fm}&Li$`wjzW0mrFn5MVha4CTAR(yPOe9r zo}ePhD5YHT(G!|zrZg*7883qO-R}?Vd)pgdz)Q6soAGibF~1>{#87FohRKqWuT@8~ zL%#idDChh?01rX%zPonAXgtgx#%mE%?qk}B-P@6AW6qR$LDq%Im;paE z9M{?4Vh&>Ct3YSGU~ef*edwWwraWm%lHsq&o!6d%)N)!r_a*~(TiRy86p6{}K6Z>7 zBYqU49B~D&Xt`9oMHT>KYAcNM7k}{=C*>=;WOe7FsSo2kks7AvjO?s2$(#-W(e#x6TfdG0z_-b1d&e`^bu8R8|9GWrfSeWS zrNBy#y+E+fjj=Yv)g~T0D!gv|>6ySDeO@yd<1O{Al+kRBIgpO#@&6pucn-b~#7Cp# z3mLfN{HVsb=N5f!&3i9i$(Eoq+hd-ey-^f(tOquZI;L@eSP#yjY>{j;PR4+kaROhi zH!r%pv+Fk57n^wL^8edUfm;J;7*;~2JOPYl={oasXfl3yaBRxDO)>aw$WoweX#ZnS z@+OSJeFO>EgV4^iv9A|&Za^lK38KJ*Ym9MBDVyI{N?j2Q&neVw4olIdbu1pe+{eiB zECRWv1q2c>=!-XH1dmJ4b)OU!H4VHjj9#GAI6Jt~Gk9vMDJbPG#f9Fb>gZ-g=$Tsv zf8lh-5n6dpDSYk)czx$J!y}CF5zvgD^X8P1oFYABt?K}USB=fPd5I|le5YOm5I`fH z!4!LsHBeVAmNFtb$0TDyG|AquW_Zn7N^PLDtiNJ^(XjWv_q~&sO~JL3#+T~ZcAY~8 zi=W0*dr@Qc{?YR{8oqc=%V-TLC{w94b^9uRoAUI3azJB|r#|(m(>>DW?!W*3DMiBb z-G?7awdfq7eLeZ5bJbNW5Z(SSs+`dP#Q380n_~vV-0R#L9>Xwi9Dw0p{ncMhtzzw0 zYk+q}T1B(~+JF|oU{6MyIt4ba(k?T8i+HUvrb@>{e_t7kt{*+h#)Ph$TEbROQ=J+o z3d6}cSyvfR*p=^(d3}3DT3JQ>u&+%I~xSTN6=+%XEtgmZa?omsWBgnJ7 z$DIAnUhCji_s~0ZJllsH5lNZ@rB!Qkh+aNqoiF#E1_f>fps@)Uw+D8m#2jhWixkf$heUYUD~-9I>U$J9&?dU={;Q8r`BR_{!>B&2;V1qkqDT=IY`h$pal_&u92&dbNh zu_mR{Wb6b87^1w5o?8L2>wSr!nYT6Sgkqlq5O~FXKn-uCT8Y*;FE76EOzTfcC)|yZ z5(;4L3KBEM(2b`%qmZ#|p1e~JKKS4?Hs^P|-~}%@Dq!9OU`y4?t5^yTa8(B#ye2nX z!9Qi}tP%TwQ78q;9IUCjnfva$@1W?CHS{dN6nM@cDD9y%ta8HJha*U^kMWw8wALC; zD@w&{WQO_y(FMH8-~-B};8Z*;$|O=FT|-2z6VAsx^j-IT`+RQ1lq!n1YC~QmG|))COa?q-@o%XGtqp_3snaBB&M;@8F+IfyP(|~wU_uFP5MI%O| zx*2PV&dvt|OiTG2*Ulb62lOcwx=6WouKj&KM_sdRL?phRuTx#o$m2j3nVBxaAMudA zP@8`L$7}Rb#_)Xo{Xaf+uW|3ACpYb@_W3x+_%FJyb38<=-Mc&NGe=0y zpTI4dw2w~Sw1$`ep9ckQ1)xD{n_W878{hcG$>31fJCD$e0Ko5^dtk#bd`C~8|1ml# z;f!h}B7_Hu;#=}9Zf!PX?3E;ewlx7Mc50=l$J08;+R?#*^k++**1^_%ms-wB0ujq`Z z1_KeWxvuC8rBlRGT9GyB91uS%qGjNI?Q36~q8IiIFoSmXiWDS=Sy~e`b^se7R1L&B z4`~K!21dq(_3&)p__?C~fT#ag3t;^#oF8k+%taozv>NpPs#NNoI~~}S=|a>{_>YkJURyeuIr@s(YtHiN}=_8wFDvtqimaxe;k)N zk4;;mfwlGC{=Z2k$8+|&R>h)?_+08Txnzz@#_%Q@p7Q_AndckdYYC61r3;#~NNN$4 z-J!0`e?=!HcDC?mGT$)_epN0qx$i?tR7z(3Kh=pc!k++8;&EtWymT#&`9e`B=7@ zZ~G;OV@`wQy8m0RoGGr$X7O3)*Z7^2Nrd13&9B2xjkUVmbbx2N*ZiM&;)&_JG1pjY z@(~Z4uasb*T;1uWn%-^J>vGNcQQ+19n$nqm_Gf=~Vsyp5n+mBz_crs0>IqDSO|=@e zYGqW}Jg%qkYSDZhq3k>&#+(7>QC=9Th322`iTd$)Hmy7PFar0c$84S8e7 z7O#0Qz1LbxtqI7E-Yer^#K}YVvowpjS1bUOQt?c}oPuHu7(<=$00AHdEkqj?)b`v> zYjw!~qn#8ub7b86tano$rFsDd3gsybCxxR8!xtT0kG23AqsLsV8Ka@pgjIXRb26UL zwk8-ICTZ>JU}r2vSbdJ~NANy^9{hp#jKSfaYU_X*hh+k_y6t&4M+k+f9tb}GW2Hf% ztu=El4SoQm8A@c0pBYWNrgyE1qO{k|&3q}&%)=Pm2u&DWGY@SO{hIl1a`?=oVPb;N!Kj#Q3jQto6=uL z=T_iL+A^60(A(E_=9|}I9ObV%)4~8xg!8=0-(vo0Nx1xOa1cMCIHYF>}$MHWZanio;=Fo zk!|9=?x7>o-~MOKfaVzk`%@^RK{Xm+<@W|?#zI4%NjYN(G8oj*FzEBPc27zsFB+j$Lxd&3kYSWz z&H!UTCFpqKQUC-uL8xYhu{B2KqJQHkTH|>|2r`-|fj}J)NtvcV1IHPxMR+J+!r0jS zZX81G)~?zaotNYkD^O-UTy(A5Dy7Wtr4ZolynLk-Sf`v3eot9vNaw+9d>yh2FltC)T^Xy2 zp>^u=Ty<3b#+Ue0J;)dl3~-55JpAy(Q;m?lCpzJ*8uB2Ie#XS6{hV<#H_qc5`yxQ* zS=F%Nn|Y0hIRTT_Cc|L~AhcKfY)%=9<_5Ik_aZ~3+ybmTpqxbB4Vc(VqZ}9wsrDki z#tV#N``P_9`G`KN=#?>q_k7tWzUNWu=H&S~oR-erS+yR4Chws`9Ly{-1(a(;{lXW% za59p{Gg;O>%Y2UEP};;l^Z#eu%>k|XSqJN@{^@=1d*8HvqO0l?@PEAS8Tdhpq3i8M zbSUy~Z^=wpYA_S1##j3^pjiiM+V3L(Z{3Q>kLibb_cuj_=OE%L*R6~P^a;d|&dH)1 z=IH-(pwfJcR0SmF`!E{Kfu_HsV}P)R9MuUg%|8Go>P|M0;~jI*-X!}wI}goEHS?a1 zjTk!!)()Rss5>lLkS+L&?yI(F1WmOC_YT(vc?n5W&5ss>2m#fQs7nrnhw`w zd@4}pA+|Z@-pox)Wdi{;j20WVlqMUN0iIWW-&B)L?_Vd_EBvps3sl{Z1IG`N{KCvj)#2v^!%UFo$Lxr0JgOd+;W2x4H9#i)>I%3e*9v zL%IWP!(;dwh{5ZDM#ivtS{{|8*uPdEXdA#WaxBQ-LN`D(kWWfO^WEeC@RZj9P_ zG3z)+^!6^#74f0C7+v_C!ZW5?jE-JL|1Z_dGXjc!r{vHTr~=ZggW4Y{Wt6gcnZIvi zGfwp&V~gq0;3-lE{Fx_jKAPYw9&SJ=`k~3Ve!CdC0Uhu2F3<6u;qHC&y%7D1Hjm?O zU*J1)t>X~@`PHv}b!ur|lN9vCmkd{+Rcamnu0{!el6!&nj3n>3X8sO@XKZ-C{o)!8 z7Uq1tWJKi=2GWep-$g?4QbwH5lL19yjW6J{$v_=F<^ke=_=kVENB1+0A>2<^Xk-G6 zjSVH10At4Wb?b(Bqew%42W0GDK!!ub9Lz_pseKgy3e3=-)jHt6&P_>vjnN!F5Wcq0d$eUy{T-n3q&^_6YWUJo!9 z)o3qz-qaCP8f2Yyku9Aqotu1Eo+baJ7=Tyi1(tOzmqMS_WhyjdAa{IDR8R*jg$fP z8B!f=INe)T)zPe5jyw_!9w{by<0}X*-}5FmO`erL6mlx{x{gAlH*Qo^o8F)DE2343lD`>s3_1DYn_3C?oDno-I8rLSRioihYhB8m zGNQDs2LM3e6YhQom>G5qXk!4nc!hObQn0RYz0;wuipS8x92n#&P-D$o>%9z8QH_s% z>|>L`|D5MMXBw*lY6|GZFMjbv$Baj`Km+p-Y2pw7%$y6g&B354rlMHJLx}-0qfr%2 zN>doyj|RAF%WMDgFauZiEx^Qpf9$cxrjBO7sY4d6SEr+oodoA=@9#XVX!D6rd}8wI zn`_FHVz!1Ezv%0kqIhF!$0nZcT>+Sp@8=GHxxrVuWS&xnCW8ckL6d-B1>eSuJ{%3k zj`5YMWB>pl07*naRAzWH5Sek2d>PlkGZ-tr+H?N)yZL%o5iQ1(XJ;I*a#9$#=i<={ zyglE%d{sAPE$r_)J61ceZOYl)I5uYQ0BSUf;3V^WW431j0S2Alml`gN#`R6AeDgp* zyzU$(`y4Rj!G8VgUw@~)b_R86cX*?w7uML^bIe$iyye!d^=;oXkO3gq*spkM-koX= zAU&h!=S0c~x3(Fst5Vp;{ps4Pl9z|9r7@C6TjQA?IiOSk&FMV+T0vQoU`!plHk3M5DR(94VN(l;I6Pmu;n)PVci{@RxCs7x>3@ z)jxWNIgM%k0qvSAb^cDEC$NGCa^hCEx&&5}`^QRkJY>Bu_ud=@ZWW-hsT|xWM;MTi zFR$o(pA*&#-;$KGMAJ$gV4DyfSf4Q>t6S|tw4>nru>0LKU46v@1A>eQ)HvZ&S!uM1K!$5kEkuS zbz`IgT$~@Cr(G?FfX-$as?j0?eV+bdo_Qdp=a_3s-96O|8J9UoS(3U`g9^Nhb{RLN z=*-vGdsfLBvdKL0TzezHjmJO!@sCgcFO|ib;>CGaG3%r4G8&XlX1=BEEEy2J2On9h z5%BmO;K=cV=6J|BYaW1hMV{uyl4~G9jn4v0r88B(H5$a5=y6K_ z&v_K659Ha4_N28*j^ZzCv#Q;7PM>NEJsZ97ed)oT8?Z#XWSIA~CtJ7VoN>6$x3A>5 zG23%MjWKadNj>eDgq#=wv+7*}Ql2+PbXymnCzrLi zIF?%jXe?d_H&)=Q8xc^$Q?#qcZf*!Clusd)J@*=4^_aYm0?MniYh6dt8HOWcl0wrB zYwSd44g^@j)ZYYMz@aWLgf&CPnl5>jmNs$p@{_`4s2G!!i;*{~xe@xWul^XtnH$sN zb1Cs!+)76(TH~4SqkznXl2;?caG=c1O9Vt+hV&jlVV^X~ZH1RH8=FWAKqE!UdT;<# z%S6%e$bIHBpP9xC%<#?`qbLI)zzB9ddNNu9`-~3vRC6POA#!TIyxU`4+mfLkIP^Zx z$>X=`T#>oK?{Dws4Fw=6aA2mhD6BgJm6zI>v>pDVKl-BySl8-(^eW>wbHop|D9z9d zXrm`kqXUoVV@4fCTsog;8bb;jZ!-$)17N2bq|vBb0yiVi;nOdC;R{oDxjCpQwQ_u3 zoq@bhA$X1+V=G;Co=(;nM&mY*DgI$NX(oXNuCX^69kux$J*~+PX(ky+6`99-?f-!4 z2w44Yo<*3d?{RP4?efqb;&4La_A_~R^!ir zo4JVmkQsP^v+tY>{*!@4B@dBPH^s}R{N7syXb26SVFka^&Is{sx}B|Cpb%4?TFepQHEK*B8hiv0FywT8s)4{5 zwaa^1npDR9XyjYV&SWZ;1SoXI2?G#q=BYIdoPavp>>Dgx(7gE$j%GsQ{+qm?>}avfqiMe?|toE zyqA$b8vi5zZsMEL&WmIxcRL#^aAG}cKX08&nYW(!#2RueRI`ZhtMT-X#^hd3IJIO& z%9pYuj%h0ewrT=^&+rqT*ws0`@0uO+zT{S!5j9Ylr(_=Uop z@s4*)dBbBN`^-7-Uq!hYI~2=O*mvn23}l_L6uj#kM2UH>vk#nukbuev%&Z(h9E0FYDf}Y{RGlXw|FW#x5DytMu(GT|-FTra* zbyze0vED;U7m&xmR=41r0>7#hmW&ZfteTEG%$QSaF_-OFwcGf zqMs?i=I-+f{Y5Sm<5RZY_1xz^cT$kvQ`*|Nn<9}SI_;$Pw4+GAl5mN*Lzw!P%+>n=?;nDbd z2jjwbg~E*G_FcvUTJu_3s}4m(tD-UXY|kqVE3h(-#k!P|Qp)An`HBp1jd2I?JU9B5 zdX;RL=dxF@@0syLdwQ3>oH3uQ8Tn!x9u#Uz>HsOU*L8Dz`H-AwYPBME) zDvoyojiPVr0-yD)XHDQ{OpRG3dmC@ScB883xb@mmU3nu zyq5$^IsUbJ_MTc|6ZjOpNX`HI|Ju8^9{HNG{{LSd%nuV!j6sD21q8)_aHFynaCd-F zkez@C0)imm20;{55Cv|)04fjx0r3Wuje-Yy;3n#Ui4VkoUMqRezgTon_ssOn^vu*G zlb-3dR;{Y5uDa@TZR=qV`=)%XtieGrgNvKI3ALp^b5EK zKD^IwfYltc@+#UOyvD}ooDmzIa5xGF&&>J&#Wv^!I-rBTHc}uekA!!dd+*?_;jIJA z&{lb5UZ9E**i-@YXt1`tZ{oQ;2sUGLX$3qV?KC~c*x8buv`(1_{*p9j$V#Ha2}bg zu4BktMgjK%4(J^K1&}(-G$p7o7xLA205iVGP?S>A{o_2fCYp<^)$Zy3^YVH;=pFG) zawOi#Fw-2OM|@J*_Plp_fze>h@4!!4S%E={4TB7$gmsdYTzixQp`+$(Y$J=ju{Lj% z1@jlU)DOIcC*qCf)2H>zQ3NbWiXf_rEK&z^5xrRr^1E`aU0oMR}-}}_>F|;{;KQEt*&yDkNi1x0!jrgIE zFnnSSlc(0F@6jL4J3O?oJu=HrCD6&h40VMIXW*kK`wr;tOtPy`W=6i!{|bI=%PYES z`mf(-J=j}|#6UB&AJ0Ha23Am}5Kb86CVHH%o95fzf9n()i}7n;``S`ji51O4eB>h^ zxs+7h6~BM1lXR8+8bNASv(ZwXaa9(Fz-o z@mY}smH6(v@4kCad)m`>dVns%2ZU`QCRy(E4G^Jhe&7QiSilkiD^$rwB8P@xUY`bE zKLQ4$!h&8bX+p}fQUz3iL>n(!n+Ih(S#|-sK#a{(?ptMwquCfoKLS_n8Czb5He=&T zQK-Mh@13lgz&MW+etgxdUbXa#cZOmK9FBd*$KFvWlJ??%*D86sX?{-Q=@H1S??cm0 zL^q~#Vm5j23Q_a^ydU_>-@v4C3-Kz%$9u^|fE)P5>tl60bOUCDPTF`&ST6L>vp~@3 zZCvyDTB#eA7wtDUW6n-b2c^1uTREcsVx%CO=6l%*7p=&Mh7)*Kjz zckp!LWrm1Ro~BHBccCsmE89(p$+Komh4&=gr>{p9f+3$|q7vL^CSPd?v8;r*}QI)%nUPG9vR-G6GX{?07zHatSlrWzP6ha`}`YZ1B+Xk|^5k71(~Vq-J5 zVGH-6U|wdv2;l+T0E3NiAfl0q@dD^(>h=To4S+y}}FkS@Q;1}JQWXJ!o944f63je01 zD?OkX=qul2eSaPZo2fY^|4O0_*re#-f9o1Jw2JK}7rz*_Q9uk}KK8xJu(ZJ*qvl#FBDM?u}B_|Lt0 zHwM6mXjkcH|0iR7R#8q%$q#h3u2SZ=(YZO6?B7xh&McVHIBFVd?k2rw|7YM{8wsT9E|w5XQ!v2r#ZKWryGa;}xC) z-~zWmQB}-7@rh3?Az*~6HQ^IIoHJwYp8x#kFUm6G5cxQdG(Vb=x$n&)ZiX-qC|3))? zr2J9jyc1~ESL?Xgd21VRB?npeJIGGo%wY%axnKL*Q_Mm8HedxS7y+#PQ91Yg7+x{* z?Fi^o?(%>nOS~&3*8CQNVLcQkJQ@Z7h7@#s=N*NNCC{~oC*oPx^)YXnH3(>!$CRf4 zN=lGx`eMuh{4wOFFcS3Pogpvr><2&i!6m;Y58L>z-cg^&ad{WxTj8LE`QRrxhvNaY zDKdo>rZ8>-pJ+jr@rI(Qxi8es`^(G0ALPCFxxOh}ZkLj=R!nx?e+U_O*V`D^Sh6F! zXJm;+eIK99`PrX)@)%{b=+IDWC|uz?vLM-0k&gBxYteVsFXJe~1UWs*&p9Xi&&m4E zTdC04oVo9XkFbQZtjnblLSq92L?||Z43IY-l6I9bqUWEs%X!iw3Fy?|tvGVM~Ok9BezC_@bjr@DMJ30njS$Xge?- zsKhVEjaPs;02!a4gCC$?d0(H~fZN#T=mF@AUktp;Dak6GRmynsW)aGG0gwZjfPR1o@G6=2H0$38 ze)0siIippcrNC|#wah2}blq7q#^V|QX^a#YReAJ}Vo|6?Yda9=9r#i?Tk~$rcyv^> zqoJ_@xYh#DoE6{LEAgFYM#x&p`&m#^Y|A6rMVHaDm6xMu*KN3N6E&ZBRv#%P=Bhqf zy8t}zmA+SLE`3e#S=V-=H#SJ*pGEycwd#xPFow}yru}GIJ7SEG#P{S zSJ+(R&0C{C)@$Aq;9DQeHJL(=Rc_Y&q$CH>wM#A(sz{O0zY2p?FavGSsX`d~(R{{3 zjcfEgmgHU$4m7tPQF8K1j=)pD@Q`;gaOAnpz+v1~Bur+F*7lUPZ9JvVogrggmRD0- zT}P+9Z=*6(^1k+W834;O(q_tOMv&{Y^j~yOxe3pY@-$x9}K(NxI?$a*sq0lC@5aLxB&t>piNx)TcqG-z7DQ83&mvsS* z@=OWUG`EyaidLmk$JqU>-@s9c_5i4RjAitE?wYgssJUwcPI)foxGY5|$`wHD8T3c5G2~>l#5Rr!dyL>z8=m)5SQRCyFj4oSD}Ek9t9yJ$ zZtki;G}5B^^u82U{mvsC-%_gZR$fNNs1y9P=bIo}04SsI@W`!+}Fr_^nkC)dm^Mxn2Zq_`JvYSV0Z44Nd;kELzugpk^Oo!~p2=Fx3dt%rmL}eJmG3E(9nM%AKF2UnG|8*E=9kuHo7^VFy>X-zt}W(cKbv!7j_xDq?Aer{ z5{K8GgT^2KcrJy>T$9nfEogzK7#@r>8d)>eCV9iyK*8}0a6rbSOyao|J?*2L`{lL( zBbCN(JwN>64?n5e7vFi0ewx$KyEW!Ktvzn*eW7_6m7J`u>uV!#N6*oFxV|2bpEoph zJ-^LOv`(SP+hl$^0JA~@_QrC2^f|d}PAZ38afGqFdtRoj+TPt*3&ku^y7jq_{FwJl z3S1@y$Pr<>qI=|brofOpLQyFld2TzH)Mt5uu8$s=){HfxJ^Kl4QYf#zluRwMK-*QM z9HWOaB4h}=Od8yzwr{OM6Bw|ONlM3%1g^wiC!d!uK;zD{z>R2A1%QA}%t+V*#AA5W zZB}lw%B?q*LQ;TPi_Qt<@-GUj3M>H`JYMJ}p8rE1`p^O$-~pGsKTe9iS~)(%38U=hAV&;V8XTdqaPv1gh0Kup$E;{mWG;QMZ@9Cq2( zNqxcF`21rZ``8i^Mq%kxa01u?pAME}d2)@n0zeF~nEyaXCxoN5@68dQ)7dX)QO?T$ zr~;L_uvrw^g%1Pi0kV-dN02xIHvKl2Ky4mFmiDozgL}+*A!pkZnt-3ami*~ivZ2tf z=5X9-)?pv5t*U2r7@=qJ*rs(+d)_yK-EF+M%WAg?)Nb3bTGP=uCEfq}J0`Ce+C{N* ztsFb|;t_JgJ+4*4w_*|{#Itl#^zjqk0t(P(tX{_8(4nyS1RXjT#dC#Lme(awpF)$j zR45|(PVpWU$mRwuMkCHPrEgR^PuuXLcb~h!!TNpip83Z&bmL!)iyvO6|LE zQ>E3LMdicw_Esx2S)OcyLMH$^00)!FpQ%$T01%?3P0At$P=E*(&OGm6N+Ev$V~MW2 zl#>!^v(&EvU2)*Gs zyXa5A6WZsg90C%`hF_~T=H6`^)5u5r0@jO4jCgPi^|MY2Eu?(4 zmfL(O4W6qgLSci(X?~3{KIvK>;fzTgv^&b7)@@417*>ZKc@ogC9FgwrfWY|1_0Hp2 zudlVTcU83BHCM)KEq0Dqo{YR5S!s>!w0$U=Y7afg_mrdzK;`(9cStt*J0&$^g*h*j zu3RebHV4{wt$r0+>6-TP=B0#bv(nqv!@h-AcF}EKpA@(V1*{jwThTlseef-8VDGj^ zq*RnAi;wXy?@HAZ@qCI%N_z&Ayw>d_d2g((&j0gFMxu;+$+E_FTd?Ov=IVqN|F^H{ z=n!wahBn&R02?5iC0b|=D<4p10|m$_E&!At*LZXQ+^Q=9bsadEmA4HKBir0J9eQ^S zLq7GDr&$awqE39aXn+h!!9uan4=n&0o0~ zv|t0Mb-YlNst^SP+YsI7z2nK~SqnQ{v5k{pNq}r4OjhK=DhQwvoL>akXdCUEjr6Hc zeQGI&pg;wor?28@$Mcc&5$%n6S6R(nyp=M8=jF|KC+~|nAsm7EkAC!{3z$bs<%?Ob zT{Fj35Cf(NYJ8vK>fMDRjR}lrrB!{zYvnFg_R@OvzD}c0S#Ew(T(^0+0&qZ@62xdq zxh@pO|8k1tc~NK?EjERmt(B&=f-#O6Kc`ryP!}@L_vON%b7uk_LV4ZA4`;bvI3eY% zsy)SjTc6fd8*K^=x{wd#;Hab*28s@Ap7x#!iR2MI&BK>Pf84~^%1;}A-;d9Xo{h8e z>RIS_$a}2^hBn3p{U5`DlgX5TG3+rOZH=awb`BJU!@QtDMG|~xZLmC>!@MzTk5%)H zFTB%SCx0otRgSZ6_1%0XyDJf%0+SI;zx2mGJQbQla?fn*ycDp87`jyhwSSZ=C|^&W zXw@F^bZaj!da|X@4xY7M?ZfL3_R(lF?yVX5y6*X~K}MnO&*-~0j3_`{RR91W07*na zRNWLgciwp#pS#r_ng*IhG@z^?0B;N?1>i9z&xnnic{aIdZ zv{fZ)LkGOqmUPdQv_0k;>_ba)x(WDPWRVr7Mx=}TW)&ipaoJTu*Sep8|O0SrCp2&+q*ag@O9zq@$x8xP7BxaEX*gm0XBkv3SZB7Z67N9wszy& zJSp12-x=)~4PXH4VIQyjh|vo3wm!YbI&VE%*ZM$Ck8*S@tG{VYwRdIg$p|u1|JV#} z+c@J+C^QD&#)-)SP?Eizv(RCFgsp277@)@K%QlK?{%Z%L5ag`wPMpU?X~tQdysN|e z%Ej2U*^UdWWZJBhRn*$GqzfIWfI;s5(ei0C>pmU<-X{yx_v23q);1KsK(6x-QZ{T% zRX?MUbe;sy7a(X8bMFuQzz-}{Jy_e?z{bS_w05pXF4fTr-znYt2NZVBgZ?!a!1wy> z7T2pz7HB;S&|X{KchUZ|>u3QCvhEnGa;!WvRWu{q9A4=p=imIz-&_RcrYb<^?E@N% z{e8<@-m-Wfy!)jued#i8S`TADUYi>KeQV|)C!LzL+x9gHPqQ$h2)k~ROP!} z048tgnERGu(mJSWTP2|TzdV_8$ubUlH@VJGc9-Y-D%SUHc}I~R0sC=3>KY{*Z;~U{ zNs%H&)@mVD;dR(@^s0tat&1>Q-s+4) zRo5*4uUt3ZQ@$AHyemb{I)cOC&>|Ds!)v=@2FZ|&Aw`OKcR8)cZe+MK3eC7l=6)}J zyK@RKp)fPv7>mF}`os**QXz;F$|ks&NNrUz=&>-5*1_<{g;3Dm?(F zGQq6*`@}?dIylP~<^^Oic7RXPbK!M?6!#d1O{`PS^~vI5rGC{Iuhb+k6kwV{wG%8S=<}XhH@MP`b2ViA z@E2a2XP6uO2@kfV`aE5&&;Q>h#b;?SjU>I3TC;(eXrl;(%A>$4oXwy z?W_D~JsR_vliEJ#IqTWpXzg{zScdbM2*VXK`*}*?cjWJQQA_u>|VakHQ4Jr|K|cLh;%KxCRAn5Lu~5 z<)JW_lqb*XFC`7gF8MvB0YkUQJX1Y&W9QLQa@d0NV?zWWwV`Skz^o4?eWuNip-X6B z0ossR*i#HUTLRq)q1u=4(YXiu=zRgKVn{tpQ0S|7S^Rm%u5$6A7a_nKpu!uak7tAc zB~Vgk{{HX(ehG5|)*kYZhunMRD_^;Q@v$81CQrb=!U4Ys|NGgW{n_Q6N;L!Q0Q~*; z-@gbh`A-ji@Pog_YP0V<4s~C=y|(=9V+jgOS2mi!Hs5Q|itjK zV^*<-k#Aa0-dC7UN=fTDizNKltzg4VS|2x&e{M^ktpUa;JYJPO{DzOL1-PI{ z67mGLVK^lJDbGa$B*WMI-#UZKL^+Uiupfn{kgp6qr}3=k^c5~h(Xh@a@uLT|J>NZ* zZ%&CYrgBRCod-Ousl5yap10Oqv%U*4E&sX59AofZIgxpt@~|3z>(=<(YmYsY;q@~3 z@J=c;5weYVBy8Rr*Ye=RGz7O?2pW4@F`;gT&pj352cA6={#9iSGY*H`pQnXyP3OJ!O;S3;`KDuVdlhDhkx?AjEY6YaRy`+TRL zgoSsYLmT9%AY8`c8QaXFESyU1-lWCQ9wYG}C zJqy67KWNEY=rB_>eDj;%yZ{NIhUh8}ih`48<~;AXYe9Nd% z{+GFMz^O1ib0oKh=W5K$%0p2Mc8dBYFTp5FHqrAY)A+Qx7(EAP1;m@$9XEAKIVF^g z75d84h-Y_&L+#5uH~GHysX`hva-mHighC_YfYLybx8}3Lj-j0A<;@g)`yv2FhLJaU zjK+D&t3)>W$rwdmj}_FWGINu;@@o4|E-E?Qs)~ru`K@;s{Sht^5ku5_Jn(C9jg+u&LA=92z3YhO$J>_>=q1m>fwn_UO z!^BF?#BdlX%kLMy@P);aSiXnrC5y4pOT-=z3QXG_tD@zSET3Ss3a>#!z+v3P3d!ma z$4%DMal!R$v6mGIuy8;4!4EC~xR^@s7mDUE#u7GN%R`grq|MYA{Afe_7A%)7rOk$C zVOV7Dr-T$D=RUw6Fb#NS2`Mf#WvGqN2HBx|7PV(PZ0{--9oks@<{l5c|NZY@#z<)( zY`^-|uP&YvLR83;d>KlsuqWsBusXAtu3gq^56ze>b=szn2ExsL@fUyb-oO0Izbr~F z@0mDYbBGs(<7LGPy?Q36q?{lB~+_J$i z1ibN$Z#+3n6#YGePi+9!ul7f)-dds6P8(!ZohJC+pHt90AYh#<+M=Ym^&}^6=2m<& zE!PIQaV?im$aCUv>mu2k2k{VVKgFtI9T|~EP}n|dtrmGO8rRl&a{F`4-MTXzum;ZN zz;|~|)}(BZ(G(gOhTNsR2f9Zbl@T-^1peV2xP;vHoPBy7VPgFhQ4-FAOY8~yP}qKa zJQ^EPR`e~SwzbmV`qVw1E0itoi2hU5Jntu^B#i0)l$?|#pH)K4D6iiY?=W8N;TiW; zti%8IJkJyfWACdNNfj)QIV5x{G^fbKO&*+pwMnnkB<3rx1mpW#xJ2PqS>G{+g@>8B zJ)icpr+t&Q8QFX9VJF&6XjoMG!}0<6 zXbVu+_EmWv^he)>a;b#EqD)EUxk69%>r-3+wBPuR-&m9yGyu*V@+a?zw<{jm1TwtG z^E`uj0Rts#_BGEGE9c25*5F-=J5;Elg!HVWhX6I#2`dO-iW}PSuCPL9=_&_J|J;Wj zc*F*TXDFJKA^-XCKZfG4@<1<9+Y}}DmwK#I} zR7fk&;xUIhoNbhse|IvtWa;(ZaL694y_4HySin1j+_)DVvOl+_t&C+OKyD9gZ_Lm| zX8CRmg}p6wIBgswZQ?Jy9w1*o8;(LZSd7sX&HPVh+b{8No?y>wR}=^XB^i&-VWNz_ zqo<~|H|F#dN)N~Q^}M|*1tq1w_mvY|ArEq}kkW9LwOzQW@w%RJ6MoVM?{m%gG9nv~ zJ<)YzE~g02CIqqXc0n z)-qu(z!XNcF#L?x_%??uTzw|Myd#%o?`s3`4uC7Lo6E71nC^@5wHH7rj z09m=_s|a17Le6OC?$Bn8l>>y zk7A75*a&J~8EX~aQ-XfxXMX17;8zL_g(&M2G@XTCQ~$%oH@b7Aq~z$5?o_(FyOG}L zM(GY|5K!q7knU!nbT=q5y5r&dd%d1NVRv_*-FwdaoFjgTg^c`?O}F*|c5kd_5p#`2 z)^4#ErDu_Pa~$_}Uv)Kqwh2$?unU1^IqgF6-J|NoqcoZB+n^&9f`KawPwKw*yuHT20Y6AW#nb zlx4k0co}l|-4Cy*J1&AfODU3nmhPa=C zWf^;`QkTQ3q&a!uzTemG%05C(>3RZd+J&+ieF}SGB1RW8$5 zQ|jq{--BZ}%qGn~Vjii`>**I3Vv1knnY9d!w|@(s|1xyW_<~3tX}&5;09{)F|IN@f zpCa}Y2mH*%DRQ{-&#@sa7={Cu7;4_U@aCVzxj}K*L2n^_Z`NuXC)%$6i*lV2#LpgG z=7m&Pj*>H%#pUxi+Zpp``n1*IjT5a(8kyE3`L;*ZFn=M6eP1NS+xXa8@X$)gs?%;_ z?1A!mZl^4z9(u?OPty~PKS^6LnZ@9Y7ZhmRjN=zS+o+4CuZmX}NIPm+R*(&q2qX?)rw*ga3qXGh7-EQ_L~3{V&zM`=}3ioU8;)^p;_Y*DWYU(q>%8 ziP9ZIv=k7!FIqr?YFpQw(VUusAIK|?8QZM3_(C@M=$CMoQSX?r3GmNsy)jWl!%Fs$ zJNrw~GhO)oxLsmtG{>sucS2A%eWZ$>1Ap$U$kIcJ6OUl{y^eWBEL-3AsiO}aD_wxt zepiIDuP^^n^qzWevKoZ9L1Z^t5dhZl=pN5qi$<0RA}2Dm-dsQ(GSbSvg#qHsmIC`Q z+t|Hwa(5*qX-u7`CxT-hC%*kR$Dsq-7}>~KFUzJE_MLw@HIcEt_}!Ir#ImRY38mgc z$fGIAdJ5Dca$-xFIa+6c0%D5$J=;zepvwEW73#s992B+zFKR3}Nwz%&$ zsh>AX69*+}gr}x&ee(Rns6ZQf9shJ<(-m?;csLVjgVEPS$kXTEK%kl@V{MDs_%8Q) z25~zms>es?I2es?dhY3L95Wp}y*pwnr=Ju=Et7r8N>A;GpW4Tp5R+1`Vy2QF(h)i_ zsq)gu6tT8R?r;;+8=MR1Dvnp}t=6&J4EF8O`xcGJDuva`JkO5mp!VZBJYS37pJw|< zz%&ipVQrh;A8Gq|()v@_W7{EYCjcKjmgY~fHcD|9%eUx%IDZX^*Qeg_7+ z5!!pYWiVJt<$%67A%CZ!exmF^1nZS3E&40i914dd`XUU~_^%!k_nh6&cLi;t>hw=t zO}}ct#j*r`lYBh;lf;?>2|U|SD;;E&Y9(6boJ#66Go3$axckTiJds0wC}L=Aqj6IL zP|?Yb>67Nb<>ow!xBS0FYp6rN?7OYZY!R-nMzlvlP^u!33l*4z6EWiB*U6NC3cVhTpE|bEJfn ze~G&^GTji45ITI+Aw%c%+^f-?$3#p9 zpSuA&q}N*ClnPhN#r0`s;Wf@4s2RJ^>CPM5_@3^ySogqr3&&?y+C8cB{D@3D4>LnA ztFXXWSTR-4Ntq0r0?;zqr~de7?GN|RYWA&dX(|WLY=qOspVJsL#FKLGh4u$*dqw(w zyY-!x>@@wbMasbLUjRDxriO)g&i1X#lE5u=N(42sUTK8PJc`NBUFVIb8)%*nL&1+j zrH`loJpsp$j(;QlLM@@AbLOj?b-VU}{x2uV{tI%#G;l(*cmTXDc439BWf=G2Hoq7k zYF}vyhYX1j!!xg>9H{%S2YE$)8t~>|Zu~?XD=U5erF(F<=G2G!IBXaYqNcBMA(@&3 zLCfkv1*k>O8YIpMEuw=c{m+l?nE48RVPI&hq_fesVMx`(b)7#^XembxP}2<6#DPb= z0!B~KIIWFmsD$J+;35AK0%+g?W|1{q>KcIKHZrnA3u^H(W9`qc1S{>vwaP=-yXUMS zQQJjS;n;y^{IxTxZTSiheN8G>yriwoU%m_B%<9nIfMQPMVN>ywiamvw7*R4YhVo;9 z9BIt?!S&ia--wEsa|aw)MI|X~HdL8*)>pS^aYvVRG7WGhL#l3xmR#wf^RKw&R9PR3 zlJV~nW4xXbx1mcjse;}bW8|P`$ijPBTt_^;HZ3A)95{N$>7g75Dh|EzJy}lg_M5h0B5uwfnkx6@R z`}B(OuKE5?m9IBgP!GDx42ISvX_)@EaK?UFxzY$rTXaES7#I=axmopHf9oZ3IU~A- z8ulkNiJxctpmcfn}GD+!-5{Ey=kG0ZZd`KD|tD! z0$+2xIj(`G#1BjXJ8`T-Ok9d?=bG{Q?Au=TYYRi7+k4d?Yq-<80bkIj`i1}l4kpWqRY!L{tqO=I4z$7b3 zkML(dZFN<`#1i?=5s_)z3Qoz48$1@EYVSxdeF)l55IOv-vzMbMEuYRir%>n`qYJ)c zTZ|=xzHdgLiBttrt#QKHN9FmeWjD-+G6VlOHB&6SM=gmft=J0k ziM6K_-}V1iYwJ#tkhfp=L!Wt6sVLethgUm4Pu`|&$!esuh-c8+GCaR-7`6gFx_9i| z{HupmqLXLBnBz9t8eP{d^josm{~A!f3NyUxc<@(tR%Z zIOp~>SKr!x?BXUO#A;*G>ih>%&$tQ3AfRH2@@0>x(PTE+F&T^)s>HPFExG+;n2;dK z<59@Ww+D_K46HJEJcdd@X7)^Ts*8v^chMWZmoHR4tO*}B1k_of-jZa8=fBe(Br4dk zIX}a5q5g#(U}YK!LX1e4Z^}4Ric#{&t{l{wV-B+F?sl5wDdt*_yc4^m7{Bh!Mj+z> z^Fj#Ch=``Cgu?82sBxDHcC=Ftxul4cpIoq0fZ8_sklj3?~-1d{qFQO6nvyjuU9cmkzwU_j}@|vs}bzC>9E^T&C*ciYHj9F_sToG zRR!~9#eD>bhRR@OhSZWi5#yw#FTK6)P{Fn@lKf=F!2AQ*t>h=(X#BU!+QWz;efF5Q zVfdQj1Qdr8oMX0gXWif74On~9$fRuVpH5XJ3@j8iXH_lCuU38U%UrsRyB^g(I<^U_ zx&PW00)tJ&3CqwM(!ezM%%`~@E6(v(aFY;UQr=8wsvoT+rAZesRGAEHiH;$#VPTQZ z!CuL&<5=GarrqZUc-PH;3-e4E@%;f-FMSe^eFFrI-@oWb>&Iu@^YbV+f8=faXG|k1 z^Vuf~L%dk<`M=3^&13l%10kUY47;4HQ)wz}Pg9piD_s`f3B4H0^w3Jq(a%4;Dv#T3 zK;*6RNIZU;DA4990QK_D*+0myU+}7C5Tz6H6AN0H-Ky4z*99fD0&H&2<$9b?V$Y&- zMi$7~bxJx^fI8vF%x`S3W|7aN+l?xA@YRqOms-4!b0_OG{Y0&m<7j8Wda>G@uf|LQ z`8@xY{$acH8XBPZ)0$==x_1Kd_H-FC)fo6i8AENyy)28QGe36}Vx)lpZ_v{rs!Xzd zNSP9NxLvru(sK30m0z(8jH+|}x5O9C1u$mO++p#!nZjnopEaMfNcN7LI{fk?Qa*)t zhu2$@-$Ff#G4a#W8Vv=r9Z6UhKV&un_#DEDiwjiO>=*!`IYdY14Cu9`Az{c8v7f8n zUSe%-WOIYSyWxc_#wC61^EAAyePvnWY@gY+M={3Yxd8@Qn(Jti(iM1LdjDNY_+H(2 zfO)o2;B7w~UJFtB-bVP?|6_tD3IQ;H@^A*PhLA%h+7TANIK_COhf%%G`ke{VH#E)x zKe9KmF+Rc3Nz)tRE?yfXuC_jqDY=v=cp0-&M}SIC>8S69&p(@moHB|1{eTG>UpVV_o(K9~X22nBi;}0S= zD&0GbAd7|QE{t^`j#A!kI1+ZIn}xp1DaJ(r0wE8J+$Y6NvNuetflj_(f?o#qt8|6Jj| zeEp=ZuDO}q{KviBKiX1uyDpWwyi#;OuGSyF{*VpGAP(B!5kR{TKq!z{8G*)UPSJeX z*0>bw#5gt&?_jFfF%Xd(km`h_TrDY0ADb7XJrW@az8qQ)eUa`1d}HG!qC`WoMyDdm zkxccCGnaaC?nE+uHzP6>N%VywCG)#4=&cX2Qee8SFlDnx68jIFMK6;A%=rx>ngDcT zn$>rx1e#b0=quSw-A5*Hzxj^;e4$u8{MYtHYEX6PhVJu&Fx#`mj|jj$LmO6Mp6F$H2I{ps)`!~)xowWgS?-ju;;t(S3kdsq0b z!?#kph2BmqA1%awPLdJS`ATdksa2nm=QP1Pd)-GAa@-i~6vz|u z|6Y-y?-JcMR`a0fwC=N=I}&o*^c*jhUB}^5pE_=(Dtc+zuBhP*&s#G1CPGOFVusQ* zsbT4YxDqfsS0?02pi^lv;U>|`_7xT#r1EhHLfPoVB1%7Ki|C%;(5RJQ*4J+XtF&Ka~Cz5YRPGuPbd{TU1TIyQ8#BE%OB8G`-|+y{M;r0Cf!=X%Xo;}H|reK5;3K}&yI`R6RQD5cXA&UNpmF4F#_9KS zEVq$;iI+R%ThZ5w28p_sjZlzwqrAwbOGs)MP~tB2^;vAQfjI;`tE0?|vo`P}?^xt! zpGaKqio&m`D}a79MAk%XT}=q``tgiu3y7~JmcO4Y?FJv`#-}-AI_J*wYU^_ z1tG@I+c z_p^h(61xeR5j5nHs@%PRH?yy>i2oE=h)oDVE|GNFt9?>v6Nfj4QyM-%r%$!fF zGo?=^Fv2ob(@N#vewoZHj#Q$F3!jfiaSJqte@c3~A^)mv9ZLvG^`y3jWM7FaI@{w3 z%D4Z!HaU1shJC&Oy8e<9f4yUrPYt(-J9FTey04efnP6|N_(c=MrK(tQ#?1%FfD8`e zdTr2N`~~QZdRfE`?S(E6kX+nbFoz6XoNkdl#;FY>1^aUSd}r5nGy^$DDG+;2LK(*# z?;zLdt~lYIFG3;Dj1RA?a*H3%D0gddrzqmduYCep;ez4SaMHI`s5}V4%Uw?=Z#qTE zAwxkXS!T;>#Sk{xIIv$Uia@IbZW9S__f^hN3irRR%!+pzmHZ z{!hlnEspZ~NZZm|F7C(sBZAn70m;u2y15apav!GZ*#Ie`JQ40M)LJ_9R=2O8Xju+A z?03C1-{~_F&+YRW%JwMnav{+bhnNQ>%Pd6DVw z>V`+_Dw7uzD@sDVWvL7hA*cWsv;O_+F%6o`%8|;GYrTAHf7w9}CoK`_31+!D)Y9ml z%0EYEIj8d;L9qJuMENzW9x@jbR;+@zunjx0B<$<%Xt5)U*t+vZI;2r1)e)zv0*2ok z|7=eL*CKR1B#s}}A3E0Cj-GA-xEef~rvG_L9Qxz9rnd`2;y)n|r=Y&X4w$Ko?qc)v zea*}LPOgkc+}6V@3i+_JLKPjQgv=$t64kl)uErpyFv-bUn5w^R?ZcYbWfAUEMpE-w z*+!m@Jx5M$+tLt#--etWh0WTMZNI%DzWaDdr$vPf^DD9?8Xbs8PJl4LD4JM`Zq; z?|Xe&lxr<&AND+>jEoASQ+;-L%{CJ>vw6k6n~*QY257t$PC8s8_%v23N%%(c&Ul%v zMUvaEzOyh#e@@Y}#h04i(Aml>-q^E%2r~s+Tot#cB$1?(ey58VO zpBnw=;Y+Zm-y(xx$NWQljg0A8SUfH> za4@vGsn_GH|KdT z&$Sqb*w{%2LEpf@v@W$t21nB|`UIbNg{<$Xwg3|iTs5;U5l?SABTl%PdBO4RtOUY+ksr$Lcu6Ag zPoMr%c4ObEQXE5xzVyaoKv%ww#|8MXdDidq7(tpfH@mGBbKqb;k3>y-dE5!cfse=8 z@jmuEhM+*VhHFCR8q1{Ch92u|zLz=m{l(hKjDsMuMt&5hcy8o_pYja{KBp@X9L(Ai zDl4ABn2cMI!@3Fem$4$Uc%EIlU+YWMYTHychBw~Na%=n3g_f%4cqK0RC4=E}I~YP+ z(dBWwq$(Gms74Rd1(dpsn(vjxw<_EMw%^41qC8Dy7RjIub3^lPOk^%PFEnWGLO9Rd zF@o1q5nmo*FYP)rE zYrwxsD6r~OBN7$Ep~gP?CZ+9u8p^4`OQ-Gx)Qg z%Ae^Q?^ z`%}m?Lgbp|!PI zpZXveW-Y2n{H6214C)#*-xILa;{x{vAaFB`g3=NGLQ$y5W^tWL7x0dT$d+dQ{s1))@u)VM$mplc3I;no*% zql>}*b7II%@LF!2u+@i0u!wzzw1O+djff(&X^=dLY!8QgbfRs~jw=b;M?^ zOK?dN{DY0FhlcETK;4(`5w^yY-_OPj&wpojdbuB$Wt}vfPPgMBN)Pws^JA~kxSS^~ z-cDLZDyjeEcKJfMU3$8O`-Uu1p!u9ua?=L(9m?f9IP5nY<#^P&Uu5y2c~R4zKN87C zL**|&?i?eaha7fj8DEH7whh#rhyMnvT4qf$ALXXeLY(j|xZqyb5)g+s#1Gx$y z3s>EWrkT^2(!eoHt16~o=fda7Zh`{x=IckKz17TeJ3adkqU-yHi+_VX;&`l#v6z;a zZpJ%ciX*mIKR}m7cLHz?BFt$zbHDud$T8D54NtLJ-4vObOG_%%Dd^YAqx|9k$tqeN z=EQ0{k^bMFPhR6;9gAYpO=Pr`iZTN-BVoOTPYp80DeTFKpN6Z%W0#pD~CmGyWh zwtq#qyJ&`~k(21i=}yzFUJpl5l7hUPb%doQ|LNaB;RVdRr=&GXb};&jab6(~`Z&D- z?~9wv;JZvr++qniJ$iKU;;f`Uy>lZUj?uD9j}{1-`8w@Hbs~m?EU22|n;{<~>L`_B zUTwjIr2W|7CMiVa2y$C`tcHwzT428YLASH}4H8|?R&z>V4$}J@pqD7#$vn!$3XFLe zjWITctFN1&lPy6NYYj=YQSF;u;Ff%Iotu)Vgf49~PNjWKz%Fa1I)U_HO&H*R{@eXS zK60OYucNdDI>k~P#h&73nv6_K!7iYlf1)>lxuud5De=N(Tr5)Y1P9sE%!$Gj;hs5P z$CP}ZrXp8`AaS^A%8iyUeOG)=sITk31{NWbU+NsUHW5ffKBvy7!1T&*DToBbb7IgP zV*S9Wd-rl+BSQ+K29G;m%9}D$PWLcolF+Hk`Yfbju>LX2$$cg(fhlr5taaSyGGzbW zUgU4U;&k-%+2@ZlSK*G(KKj~}chgvZ{mfZkf@jo7D|4j5 z_HdnW2Km$n8$EVNt|XW)lC2l2?P6x>eRyjq8~W+Tj|7G+=-~!p`xaG964d?qiy0|Q zJgQt6^ar>Jq9jFxY1p66{fD%&uk#k)PX_d+-AVLuuth6hV9_z{Y3Ly5N;muoTedivQUiB&(3p3r8Y(Z_N0 zW2oQ6E;fj}TIt7r!b6I>0*9m$QIL)r+60uO-!GIgR7A@PBFLDb+Y52;m2z=y6OD~} zT0ZM*P3hZ{;-=RXG^OZou(P@a(kkokx5p0jKN1Q{S6qLdb(5J}D;re;^YM@1*m`oB zJ6Q=|UpP)p1gP6PpIDK7oF6UfDFF4T(AwEce#%kc1CUoXZ6#wi6w>R*M)4aA%O z%sWof-kuex{ZXv+BxxZL>S>?`ZKQ66hdfG~LSsVTJ2EVVfl<=Sis6FZ@0^~`WzwG6 zGaZu4=t|PEPama!tJU2?wd{{Xk-O2O{vM@Prg8g~<_+~`^C1C0Yuv+vJ(tbDT z0&x`XoSe^m70juS4x2;zklibx6Yqg5zHk#4+VMGH<4t^Qq|z{5WErp36j!9Yv=s*S zeN{zn5k3qVmm(xgVS2@nIk!Y}s0y=hs&gH`KhbHoz3^F-zCK|r7EIsc8%H^487GN> zT$xmh0nFN-vz*04GzWJ5;s0^VWSA=e4#wu0wrB*|QI-YhVJwSO^NBDV4As_CY)R1Cm^JLhD2JC143nA{o7pry^A61AU)GhJYh+J zzOw71@N39nRbay7`)W}AkB+)eqZ9jCV`Nh273zy=k{IzTM1xAnJUSV(nC+5xco>}P znZTw%Keqb_g`LOI@}jAoqBB)F1k_ZA5{Q;}T=D+*IC+Iy^t zbKa;x`}stE#O^G%c%d?KlPQDn5P>cOkw5}(AZ3-i1x3#Cn)T;zagwxY3n{NAu7=9Y`VJu(v`%7%cS z#JIxUynMRDmYyy$Z*fnl)Cti3g_v%4&(Bd);p93SU6fbB=J@h`*P);^gt>Y1P~ z1f?C}?eqnL(DsWtBk9m-t>5cbmpx0VDb1;(g*4@W&DM4% z>*ge@2S!QXIYr;IPT!^sL(~zQm&`wV@HjQ=^uS@e%`LFWToV_UlPHZbj&lr_kU7kq zdSE~h%r~gGmrQBxXy*p4=G{4e)U*w;^O~8n*Z#%e?D@U#{h-Y)-+nuP4DJYe`h4tO zNwho?dLJvBW~UQ0c2AJ)Tp;*DAhtYOLxBT}f-w!NQ-fJDQWy1ej*_Vas#YI4 z;t)#&^3QgVjKvP5Fl&I1WN4E)qGBvp+u|afPJHS=#Veh7lA;YH;t+$(H#FMiyW+lE z2d&X}r4d?fE)r{|O_ynHr&6vN^!lONkklJ?lKYj78~==a_7K|rXJj@e?l~ZH@gLTj zY=VKsT^VmmesT8N$7nlJ5vehb_^2Lf0Oh-8Hx;!zOukAeDx6y2CA(vmQ-kypX54T+ zo}w)x2KMszu1*NxCg44A_fsZvcVe1Y&0Ie#i2c*Rq$(mU82Z-4xmzyXz*&mpMJ*M+ z3DA@vmN@Nnks0VdVM-b;1)8@aat|?Pw%DQ3!^tb~<9prye3UO*lqOFvMEVY!7S?We zrb4wU&i$62@E!4Hz)yFh5V4DmPdbeO6>!$<4kS-|?eOCv=`kqduQ1M$;j1gel)@Oc zNp=<2(g^VJx`UB5&;KRo9Dg|J^OVy`TCH9zh#=BS3E25_E3td`*rB*7efku*5rJpT*Kbt<~Aac5~$A&BR@E4oQNg7k^Odc+>o#({yHG^9emr zdKj-ILfOW1Xq3STsIdun!ylqNb-q=p6kT0)K{sV!HfEG+&OtHC+vpyGc90&z2%PHO z^w`m;FGOopJz}KchA@@1={nfQ%0d$3d!Inf_(`TW^ku_D{2At)0uGHSyI0J)uoXm| zT_J3&@epTe#EETuMYhe9=+}N>*>9hN<^1Q!FM0NROSGBYsf)s+()C2KA&0My z%e~o36TBgSdaMj&PHI&j-*exqkk*X^_PeOBH9hn3C?WBoN3SQ$m_J)}*#c=O3U?B_ z_LQ!NdJi4tjXX3@h)Am@aRyaQ;B3EPJ#)Djt-cF1E-(n1ro?R(NIc}&bxUyvq9ptlU-kOtgo12c*&e^&M-K3AzO?YCrz(?XKbtxQV1ru9e=|; zMB3@Av)vUB=a#T;N~%Mpf|&Q&O>+Qx7UC8>LNGfmRpyp`G5EAdi9^oPSI_mxV^EY68D#LQit>`((*AT zh&%(9SS{>Bts+b|aY(Sp{4mkB})4q zfB>3EuA2A2(g9vc-i_l9w#_xCfVE^B)>8Y%b3p6~DwkG*{u_XMk?s&^pXykCserMv z0g2oQHMOcjG2Kk|1Fb#U_muS3FyOxTce{WhF=T|aM0wA5TTpD~PJTy4?A`a(sIiz% z;|^v36GEjr>Z3*)ur0|ok}kmBxTUGqYDlL zus#nv8bC69&r&ourc*0|GWQRE{he7!*Z(yl9p?s-bJ3n-x#z0S)xD%$a@I^SW?kw3 zb#3*O`X+k3L$PQvExIMcA}8&F&n))DaDLX@bYr+fz;s>I=I z3KIVyb!u?M>0wBUOJ;I=v*_;$Rf|&$bX#mR^Dd~1_rDMytT>~566>@o;O9t0Xs+0O zE4(4f^YCAVPH|H7#3|jb{2GR-{DTJcI9Og?)Qme?bjf0{eQv~4o2^nWA~{H(I)~-! zFW>$TdZpv4k$dD2F|Li-Wl(CJ5qA`f)G8;0ISBugH*QbdnaeX0Z&d&AGmJoticjz0 zy&@XTw>*}7n`9t4m>0c9@ehJbR&IUYA9Hu$4qJnP^gJz!Y2JfA#ZFY* zZh4KxD97cfB*W>riSpf;p^8C8Z%*sT56VwCSh{f*xi`1NqG%F?Jdp{Bkbb(1l;7R$ z#`x%Ak}Xhoc^sNGe}T}}TW3Ptlln@tPwM*+2zKRDq5u>HsxdXTo+kS_hUI513wnjQ zP4TyN-}S&)-?66>ciL{{sfXhL^!4o_Z}<2=zq(L*=xXizm}M_Tp^W7nB6xEvRW{K# zyy{%cEknXhAC7-K6BWdrkC2Ww|F-3RfPTgm`nlHT(lE{(&fhW5>y$X#Y0k=kP2Kp{ zC^P6V_Fem!q&E(KL837N@_6=BJR1P*SOn5XfdJhp{ir!eyKW;|N8&eUntm3~)WV`_ zr<25KVNc@dq5XdH&y-w&1WW|J#49(RBo|jg*_=FHy|lt_I=&&3(9{*b=HuCN%~!VL z*W8p8=J`H5Xd0S}F(C(rOzy)P4g`P`SBD0cK*9`bW!0YUZ+VZaR|FRoE+@YVELXuI zq(qVqXN_nWwBLV!FfFP39g{>ohp32QQ$`Bg@{KQDdGiLn7EWe9^Y%~1b!5#VTf!%n02IBJ>*mG7|M@khvg zhY2&(Vz9_ZiPF*@548D%Ir4Gh%XaUL44A##YF#wdL^Sj5YTK$rwbMM^#pY)VcOAA6 zUzCvjFM4&zT|^uy$QZamq=X9 z>ip3xK6M^I{1BseokVFL_EIT<}cw>w<0fx43M+##Wu^w?VT5 z9XSsTYA14@G1$1C$CO4bd)%YLhyVVdp`C@b5?AMh#daCV>H?>2s7=!;6sYs^*{SAaHq$C$wcbXw%6shSySJBpE7ei6|Dw|&&6Y2=NNS3K(% zU~1`8A z=_7xyzVfvJ=8}I_C|`0;Y&wt68_qTzE*zisHpl(5lVP1UVxM>lH@-O8`P09_E$Uw< z@`JgnVo=1qUTPr_gPq75u(>DADbQAOS{{GO@(iVNtl|TdEIq50J2$>gQ@^AQdvWmb zHU8vvX1ov*dO-b9M{OjIi8#*k9Yo)aTbqpIwaUE?lfX#jrt+|N6FcKX* zV9BYZ_bz3qm_m?6NK^~H|3mT-Scr)V=n3Hx&p1 zMQ+n>`o;1Z+m(9eGH!r>4opnr#LrID*&r!D4p0H;(OYs{#DSz0V<1CYwn1mnYA1zd zG8B2ge{`qDb-0RgU6{V(gv-3u2H{Ydhak4MTnP_+k$Y?xtz&p8OpK9b4_6G~(eD4$ zgd`buOGuD2dS$W~k@%+Z#HP7mDZXDHb*@m&QY~(!+dv~MqA7M@pIw%01<&J59+^Rn z>Q{*x?WE-k|EX!TilOgZ!%J0B93e#h5?BJ{6x<%M$qx&9M_1tXdZ@R8vM?ql^4i|~ z@ia&Lgm?*vBZy?(H6C6rs%p!WD{Q(M^G{GYBky>}(3lhc-?%g86o>3*&5JlYQLDu` zRy}e_Ud}W$?_)6H9{w%z_^6tFI5iP**ME{jytd?q(QBv5aQ(noh9zETqbqHC5n2iSlkyVP^ATs#QZcn*Y%POgS(_WQ_ zQoG*3*U9X2W$s%_zI_q+nW^L(8kVjS48_Rc(p+twTJe%lA->=xUbVT*!=N9UQ3+ty ziO=48-Z^Paa~nV3d8R*BxwTWJp-D)-Ep@)Et+bK5bWGUd_RKCOHY@2na3P0sM$U+W zCihIAz&Cg`8!z|ePiYFl+w7aR6H*>0EDjKZdN@Z@Z22(Z1)H@n=LB;!a~+o0UK>|q zn{0{D+=o!B;{UuM2Yy!vvzV7=AyBF~!iJSL8o$cxMT`;bRx1(%{hIK>Vf^#-9F1l!Vf!&TW$%csTvLtIX); zA1ZFt7G66A%UV2BFJkN<(eUGcq6Dm!07}b@^~yogW%hM3;|$KUD&1oy)&6`_;xAHI z2a`Tj&6z)Eozk4d+NEQt{5`pZ`Y}=$LGr2w=@H+2KV7O|bl{&A>Wpr8AMfqtZK%sl zRyhlK2A5h9c}1MgFkMam!RN(y(b2!eCj(>T0l#y zx2@#tLDD3E`WKNw;4fh1>IX`wxao0&Vo_Lnu4U zdGca+yvNyF)7%W>HA-owIPt(C8_G6B2N7gE?Q;6a&u^Z*n>R}xQbwxo%8VeLs&5PP z;2){>rTTYoSXalr9QTCKYxcNB0uOv`k`emOH$*-mTqW6UMRk#76VOf`y46mT4yO{+ zh1$&pO2Cc%Il2qi(^WaxK1pBs2H*H9?bfT_seT5RQIe`|8fW6FwvJxlGm}a9P9Vx{ z|2KPFOp&l(eNb|<`DShvM4tcFYH2SyZa$8rkVE3@3ga5V=g6_s&w& z92HqU7K%PZ(8wx+eN_8PKQ_-^dx5n&jL+j$|<3%A7S02vLClsQ$eGXhOBJ4UUrYfc@>i9+DV3i##ZAc1*ZtXDA z!Ii8uF}^>+umg!wm0{|E2^gc>5E{$$uVOr_kwg0-JgH4y*ZHQr;v9mt1+9c)BNw>m zwYrs94MJCJ*~R^phWX`fRlMN*e8t_~htLn9gsl(3+oQQ&*`O?tV3A~ua(WzK7of>~ z6_Nt++xR5?=Z>>H}zCsB8(*Bc%}TjdXDj^1IQU z0py))?#>od6rMp`RncJw8ah%>%xNrg!5~a9HiW|DaOX5dM@zd2*-eB<4(Y}whvt;UWY1R5{oK#}KRnmxdVOB6 z>xa+lbzPss^?iT7ukZW&?OeUk)R+KwQt?EFG;XdsVZ%MV!%aB`+EKnl2d$tddKzYgU8g=fvL*P28_7T-}S)Vcryo z_o=}uPdnu=IF`2rkJK~QptGJ7%;4^2q4jQ;NuAuOP0jH;$J|YC`^TweU-mp;6yxaA zT|mc2d{SlE&u`ODeUz2twe?|W{v6f6ZfZs&uC;#k%-IHGneWtA6|1t*JK|K5h$&3` zui{2>1DD4$5K2m$Yc`BLbe2^lLjmQ=RirH!gez|OBySqKc^C{3HyxSXCjOZ~8Dsg$Ek-C3vj z`|&70H_nolW@RVc74^QpR_Zx6$wF#_*SV{89s8AU}h1z_~}Nw*^lT@T5$<;xkdP_J+Z8{nfTf*_QMCg;`6>pRGvoo z^$|LJv0LlGU7%YIH$2_aE0!v(aP|HcU=Pi`rl^Kd?OjjUu{Js}Q-s!OWfcWw5^DWv zQW7ChB<)9-ajb0XrbOT`;T8Z57{mqw0n3FM)OdtCeKNPUkgHRdAt6R(DuL(84dweD zAbB~@W=BJIl;~znkDS{mAbJmFX9SLb@98D#sOX&(;H zuRG`O2+6099B%uFKy8kC-;`y1o&`l#Pi=!0x<)V87Wg*K&SgF>ztVbw#I)>bTfjEa z_+!saz(GYwsGmtU=i9=mLSREr5z5y><#`{Cg&n{{b@!}K1qdx^$e`mgE9OMuY ziQql(ZD`%M84Dr5wxnApN%eWnW`bx>>;_ws`&R#iaU4brVk#0J5n}qq_n!(Jq!NC~ zEE!#@G?pShA34rHl&#-nQOf^-A!CeYSosfJZP!X%l5(+*dsfvFOfm=drd_%k{AH9Y zp6){+5f_5sNR?bQV{Gj`C<9OfN|f?rD<^?&NwG-4+lLZILR{QK&ur;f*j7j^xk3)=3)iwwT9o!f-2U>_eYeq~97$$L z9}VatBr68USL%7Qjj5L#Xd^K^Uv`bPN`CS+xKVz;teHu8${YYtX&TQnuc=j+XT|g8 ziuk096AdqzuU1eIDKlbDu3Q%-nJ%BYfeFO55EhQnZ{8F{yAY8hdWp|EqYxgex62Ip zx5BUvi+9&Wo#J(r=%(;^w_xx!whnNp?uDs1 z0K8d-{jc*`&D#R>Jgtw$IPy>?T?UoOy0VeW>bK#-r$t|=zF{yuD_);VskKP_oPzLYZ$WP@&j$11(tR)I&#eP#)7{ZB6O@a=vI8!<7@a z_C9PV-RsEp$8%uZF8ndxtF*F#>{hfYJ0z({)rCVB5;q>HEVIk>_P=bFdLs~&R9Z4e z$xne4LZ%?esh(-zSCLsASF>7NndsHXqV?I2>*v}g7~|t7%UR*mZ13fbf1I&4eCYX@ zDC`^a(Kj8e#)~O3MWMcFW4xZfk^m}6g1zV$D>IaPC=}6hQ$o152td-Os=3Y%rC1_K z`}m5kJd3`oTGn~W@9^p;Z`L%0A;EV*LM|A^zwnB>PXAY`uA!XEcGMA7#88-}5U224 zhYd4z`9)sFoOroD^M+}`$zo@tA+Q*}$}496IM$Ek2^vFN)Yi`c^{I;&#gY#NTz+GPAy zhbhIp_RQemPNp3!5tAi=-1=ZB3n~;?;JZ=z}}`6Qr<)_YVgBr~h0%SR75rFLLWZD`<8e9cj#&vR{WaG$V<`Iy1l zdx6Mg7R5{lD$S>7i^R;d7lH{f`nOAA?Ehmrd|ko1#|!LJQfavQ=7y6nbi=km)zjhB zWnS%HVWmm^9oVWu)YeHO`8_42mhxKY_250CI1yhoOzXdeTlG}>l%>#kXW@$6$MUN= ze7G_Cow@79?2H=4sKLvkhEu)gb%zCtTBkCBhSAgSGrA3Lz=YlBMh^aPy~O=BDdG_; zC)x;s^!R0a#My^$WUZ4*fv+kj^UCcoXc$yF64&mN`8jd`zE6*G*}01Wp)aOI`vynS zWGBsqgnXlpke~jN=;{Q?@TXWVCbsO6euzqEV9ZZ6U_6%wV*|Kvox4=cvv?E&hD@3# zy$&+-I_0iXegD;+lNr3Kzdu{>WjGJqaVkOFhC;j(Frsvxw^O5rm_q%ujkb@F6pktR z^o@6Z`NEG{m(=^;Evn=V&hL^o!HS+!K{ZcDR6D^dx x&j;pz0`Grnu^H^220x$vZ}va#{jW>Di2(VOKl(mzDy0I%m!rM2T{Su&{@+2Ds@DJj literal 0 HcmV?d00001 diff --git a/app/service/image2sketch/infer.py b/app/service/image2sketch/infer.py new file mode 100644 index 0000000..266b37c --- /dev/null +++ b/app/service/image2sketch/infer.py @@ -0,0 +1,89 @@ +import os + +import numpy as np +import torch +import torchvision.transforms as transforms +from PIL import Image + +from .models import create_model + + +def tensor2im(input_image, imtype=np.uint8): + if not isinstance(input_image, np.ndarray): + if isinstance(input_image, torch.Tensor): # get the data from a variable + image_tensor = input_image.data + else: + return input_image + image_numpy = image_tensor[0].cpu().float().numpy() # convert it into a numpy array + if image_numpy.shape[0] == 1: # grayscale to RGB + image_numpy = np.tile(image_numpy, (3, 1, 1)) + image_numpy = (np.transpose(image_numpy, (1, 2, 0)) + 1) / 2.0 * 255.0 # post-processing: tranpose and scaling + else: # if it is a numpy array, do nothing + image_numpy = input_image + return image_numpy.astype(imtype) + + +def save_image(image_numpy, image_path, w, h, aspect_ratio=1.0): + """Save a numpy image to the disk + + Parameters: + image_numpy (numpy array) -- input numpy array + image_path (str) -- the path of the image + """ + + image_pil = Image.fromarray(image_numpy) + image_pil = image_pil.resize((w, h)) + image_pil.save(image_path) + + +def save_img(image_tensor, w, h, filename): + image_pil = tensor2im(image_tensor) + + save_image(image_pil, filename, w, h, aspect_ratio=1.0) + print("Image saved as {}".format(filename)) + + +def load_img(filepath): + img = Image.open(filepath).convert('L') + # print(img.size) + width = img.size[0] + height = img.size[1] + # img = img.resize((512, 512), Image.BICUBIC) + return img, width, height + + +if __name__ == '__main__': + img_A = "/workspace/Semi_ref2sketch_code/datasets/ref_unpair/testA/real_Dress_732caedc416a0cbfedd0e6528040eac7.jpg_Img.jpg" + img_B = "/workspace/Semi_ref2sketch_code/datasets/ref_unpair/testC/styleA.png" + from opt import Config + + opt = Config() # get test options + # hard-code some parameters for test + opt.num_threads = 0 # test code only supports num_threads = 0 + opt.batch_size = 1 # test code only supports batch_size = 1 + opt.serial_batches = True # disable data shuffling; comment this line if results on randomly chosen images are needed. + opt.no_flip = True # no flip; comment this line if results on flipped images are needed. + opt.display_id = -1 # no visdom display; the test code saves the results to a HTML file. + device = torch.device("cuda:0") + model = create_model(opt) # create a model given opt.model and other options + model.setup(opt) + transform_list = [transforms.ToTensor(), transforms.Normalize([0.5], [0.5])] + transform = transforms.Compose(transform_list) + if opt.eval: + model.eval() + data = {} + print(os.getcwd()) + B = reference, _, _ = load_img(r"E:\workspace\trinity_client_aida\app\service\image2sketch\datasets\ref_unpair\testC\styleA.png") + style_img = transform(reference) + data['B'] = style_img + data['B'] = data['B'].unsqueeze(0).to(device) + A = Image.open(r"E:\workspace\trinity_client_aida\app\service\image2sketch\datasets\ref_unpair\testA\real_Dress_3200fecdc83d0c556c2bd96aedbd7fbf.jpg_Img.jpg") + width = A.size[0] + height = A.size[1] + # data['A'] = A.resize((512, 512)) + data['A'] = transform(A) + data['A'] = data['A'].unsqueeze(0).to(device) + model.set_input(data) + model.test() # run inference + visuals = model.get_current_visuals() # get image results + save_img(visuals['content_output'].cpu(), width, height, "result/result.jpg") diff --git a/app/service/image2sketch/models/__init__.py b/app/service/image2sketch/models/__init__.py new file mode 100644 index 0000000..809105c --- /dev/null +++ b/app/service/image2sketch/models/__init__.py @@ -0,0 +1,49 @@ +import importlib + +from app.service.image2sketch.models import unpaired_model as modellib +from .base_model import BaseModel + + +def find_model_using_name(model_name): + """Import the module "models/[model_name]_model.py". + + In the file, the class called DatasetNameModel() will + be instantiated. It has to be a subclass of BaseModel, + and it is case-insensitive. + """ + # model_filename = "." + model_name + "_model" + # modellib = importlib.import_module(model_filename) + model = None + target_model_name = model_name.replace('_', '') + 'model' + for name, cls in modellib.__dict__.items(): + if name.lower() == target_model_name.lower() \ + and issubclass(cls, BaseModel): + model = cls + + if model is None: + print("In %s.py, there should be a subclass of BaseModel with class name that matches %s in lowercase." % (model_filename, target_model_name)) + exit(0) + + return model + + +def get_option_setter(model_name): + """Return the static method of the model class.""" + model_class = find_model_using_name(model_name) + return model_class.modify_commandline_options + + +def create_model(opt): + """Create a model given the option. + + This function warps the class CustomDatasetDataLoader. + This is the main interface between this package and 'train.py'/'test.py' + + Example: + >>> from .models import create_model + >>> model = create_model(opt) + """ + model = find_model_using_name(opt.model) + instance = model(opt) + print("model [%s] was created" % type(instance).__name__) + return instance diff --git a/app/service/image2sketch/models/base_model.py b/app/service/image2sketch/models/base_model.py new file mode 100644 index 0000000..6de961b --- /dev/null +++ b/app/service/image2sketch/models/base_model.py @@ -0,0 +1,230 @@ +import os +import torch +from collections import OrderedDict +from abc import ABC, abstractmethod +from . import networks + + +class BaseModel(ABC): + """This class is an abstract base class (ABC) for models. + To create a subclass, you need to implement the following five functions: + -- <__init__>: initialize the class; first call BaseModel.__init__(self, opt). + -- : unpack data from dataset and apply preprocessing. + -- : produce intermediate results. + -- : calculate losses, gradients, and update network weights. + -- : (optionally) add model-specific options and set default options. + """ + + def __init__(self, opt): + """Initialize the BaseModel class. + + Parameters: + opt (Option class)-- stores all the experiment flags; needs to be a subclass of BaseOptions + + When creating your custom class, you need to implement your own initialization. + In this function, you should first call + Then, you need to define four lists: + -- self.loss_names (str list): specify the training losses that you want to plot and save. + -- self.model_names (str list): define networks used in our training. + -- self.visual_names (str list): specify the images that you want to display and save. + -- self.optimizers (optimizer list): define and initialize optimizers. You can define one optimizer for each network. If two networks are updated at the same time, you can use itertools.chain to group them. See cycle_gan_model.py for an example. + """ + self.opt = opt + self.gpu_ids = opt.gpu_ids + self.isTrain = opt.isTrain + self.device = torch.device('cuda:{}'.format(self.gpu_ids[0])) if self.gpu_ids else torch.device('cpu') # get device name: CPU or GPU + self.save_dir = os.path.join(opt.checkpoints_dir, opt.name) # save all the checkpoints to save_dir + if opt.preprocess != 'scale_width': # with [scale_width], input images might have different sizes, which hurts the performance of cudnn.benchmark. + torch.backends.cudnn.benchmark = True + self.loss_names = [] + self.model_names = [] + self.visual_names = [] + self.optimizers = [] + self.image_paths = [] + self.metric = 0 # used for learning rate policy 'plateau' + + @staticmethod + def modify_commandline_options(parser, is_train): + """Add new model-specific options, and rewrite default values for existing options. + + Parameters: + parser -- original option parser + is_train (bool) -- whether training phase or test phase. You can use this flag to add training-specific or test-specific options. + + Returns: + the modified parser. + """ + return parser + + @abstractmethod + def set_input(self, input): + """Unpack input data from the dataloader and perform necessary pre-processing steps. + + Parameters: + input (dict): includes the data itself and its metadata information. + """ + pass + + @abstractmethod + def forward(self): + """Run forward pass; called by both functions and .""" + pass + + @abstractmethod + def optimize_parameters(self): + """Calculate losses, gradients, and update network weights; called in every training iteration""" + pass + + def setup(self, opt): + """Load and print networks; create schedulers + + Parameters: + opt (Option class) -- stores all the experiment flags; needs to be a subclass of BaseOptions + """ + if self.isTrain: + self.schedulers = [networks.get_scheduler(optimizer, opt) for optimizer in self.optimizers] + if not self.isTrain or opt.continue_train: + load_suffix = 'iter_%d' % opt.load_iter if opt.load_iter > 0 else opt.epoch + self.load_networks(load_suffix) + self.print_networks(opt.verbose) + + def eval(self): + """Make models eval mode during test time""" + for name in self.model_names: + if isinstance(name, str): + net = getattr(self, 'net' + name) + net.eval() + + def test(self): + """Forward function used in test time. + + This function wraps function in no_grad() so we don't save intermediate steps for backprop + It also calls to produce additional visualization results + """ + with torch.no_grad(): + self.forward() + self.compute_visuals() + + def compute_visuals(self): + """Calculate additional output images for visdom and HTML visualization""" + pass + + def get_image_paths(self): + """ Return image paths that are used to load current data""" + return self.image_paths + + def update_learning_rate(self): + """Update learning rates for all the networks; called at the end of every epoch""" + old_lr = self.optimizers[0].param_groups[0]['lr'] + for scheduler in self.schedulers: + if self.opt.lr_policy == 'plateau': + scheduler.step(self.metric) + else: + scheduler.step() + + lr = self.optimizers[0].param_groups[0]['lr'] + print('learning rate %.7f -> %.7f' % (old_lr, lr)) + + def get_current_visuals(self): + """Return visualization images. train.py will display these images with visdom, and save the images to a HTML""" + visual_ret = OrderedDict() + for name in self.visual_names: + if isinstance(name, str): + visual_ret[name] = getattr(self, name) + return visual_ret + + def get_current_losses(self): + """Return traning losses / errors. train.py will print out these errors on console, and save them to a file""" + errors_ret = OrderedDict() + for name in self.loss_names: + if isinstance(name, str): + errors_ret[name] = float(getattr(self, 'loss_' + name)) # float(...) works for both scalar tensor and float number + return errors_ret + + def save_networks(self, epoch): + """Save all the networks to the disk. + + Parameters: + epoch (int) -- current epoch; used in the file name '%s_net_%s.pth' % (epoch, name) + """ + for name in self.model_names: + if isinstance(name, str): + save_filename = '%s_net_%s.pth' % (epoch, name) + save_path = os.path.join(self.save_dir, save_filename) + net = getattr(self, 'net' + name) + + if len(self.gpu_ids) > 0 and torch.cuda.is_available(): + torch.save(net.module.cpu().state_dict(), save_path) + net.cuda(self.gpu_ids[0]) + else: + torch.save(net.cpu().state_dict(), save_path) + + def __patch_instance_norm_state_dict(self, state_dict, module, keys, i=0): + """Fix InstanceNorm checkpoints incompatibility (prior to 0.4)""" + key = keys[i] + if i + 1 == len(keys): # at the end, pointing to a parameter/buffer + if module.__class__.__name__.startswith('InstanceNorm') and \ + (key == 'running_mean' or key == 'running_var'): + if getattr(module, key) is None: + state_dict.pop('.'.join(keys)) + if module.__class__.__name__.startswith('InstanceNorm') and \ + (key == 'num_batches_tracked'): + state_dict.pop('.'.join(keys)) + else: + self.__patch_instance_norm_state_dict(state_dict, getattr(module, key), keys, i + 1) + + def load_networks(self, epoch): + """Load all the networks from the disk. + + Parameters: + epoch (int) -- current epoch; used in the file name '%s_net_%s.pth' % (epoch, name) + """ + for name in self.model_names: + if isinstance(name, str): + load_filename = '%s_net_%s.pth' % (epoch, name) + load_path = os.path.join(self.save_dir, load_filename) + net = getattr(self, 'net' + name) + if isinstance(net, torch.nn.DataParallel): + net = net.module + print('loading the model from %s' % load_path) + # if you are using PyTorch newer than 0.4 (e.g., built from + # GitHub source), you can remove str() on self.device + state_dict = torch.load(load_path, map_location=str(self.device)) + if hasattr(state_dict, '_metadata'): + del state_dict._metadata + + # patch InstanceNorm checkpoints prior to 0.4 + for key in list(state_dict.keys()): # need to copy keys here because we mutate in loop + self.__patch_instance_norm_state_dict(state_dict, net, key.split('.')) + net.load_state_dict(state_dict) + + def print_networks(self, verbose): + """Print the total number of parameters in the network and (if verbose) network architecture + + Parameters: + verbose (bool) -- if verbose: print the network architecture + """ + print('---------- Networks initialized -------------') + for name in self.model_names: + if isinstance(name, str): + net = getattr(self, 'net' + name) + num_params = 0 + for param in net.parameters(): + num_params += param.numel() + if verbose: + print(net) + print('[Network %s] Total number of parameters : %.3f M' % (name, num_params / 1e6)) + print('-----------------------------------------------') + + def set_requires_grad(self, nets, requires_grad=False): + """Set requies_grad=Fasle for all the networks to avoid unnecessary computations + Parameters: + nets (network list) -- a list of networks + requires_grad (bool) -- whether the networks require gradients or not + """ + if not isinstance(nets, list): + nets = [nets] + for net in nets: + if net is not None: + for param in net.parameters(): + param.requires_grad = requires_grad diff --git a/app/service/image2sketch/models/layer.py b/app/service/image2sketch/models/layer.py new file mode 100644 index 0000000..df96a35 --- /dev/null +++ b/app/service/image2sketch/models/layer.py @@ -0,0 +1,354 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F + + +class CNR2d(nn.Module): + def __init__(self, nch_in, nch_out, kernel_size=4, stride=1, padding=1, norm='bnorm', relu=0.0, drop=[], bias=[]): + super().__init__() + + if bias == []: + if norm == 'bnorm': + bias = False + else: + bias = True + + layers = [] + layers += [Conv2d(nch_in, nch_out, kernel_size=kernel_size, stride=stride, padding=padding, bias=bias)] + + if norm != []: + layers += [Norm2d(nch_out, norm)] + + if relu != []: + layers += [ReLU(relu)] + + if drop != []: + layers += [nn.Dropout2d(drop)] + + self.cbr = nn.Sequential(*layers) + + def forward(self, x): + return self.cbr(x) + + +class DECNR2d(nn.Module): + def __init__(self, nch_in, nch_out, kernel_size=4, stride=1, padding=1, output_padding=0, norm='bnorm', relu=0.0, drop=[], bias=[]): + super().__init__() + + if bias == []: + if norm == 'bnorm': + bias = False + else: + bias = True + + layers = [] + layers += [Deconv2d(nch_in, nch_out, kernel_size=kernel_size, stride=stride, padding=padding, output_padding=output_padding, bias=bias)] + + if norm != []: + layers += [Norm2d(nch_out, norm)] + + if relu != []: + layers += [ReLU(relu)] + + if drop != []: + layers += [nn.Dropout2d(drop)] + + self.decbr = nn.Sequential(*layers) + + def forward(self, x): + return self.decbr(x) + + +class ResBlock(nn.Module): + def __init__(self, nch_in, nch_out, kernel_size=3, stride=1, padding=1, padding_mode='reflection', norm='inorm', relu=0.0, drop=[], bias=[]): + super().__init__() + + if bias == []: + if norm == 'bnorm': + bias = False + else: + bias = True + + layers = [] + + # 1st conv + layers += [Padding(padding, padding_mode=padding_mode)] + layers += [CNR2d(nch_in, nch_out, kernel_size=kernel_size, stride=stride, padding=0, norm=norm, relu=relu)] + + if drop != []: + layers += [nn.Dropout2d(drop)] + + # 2nd conv + layers += [Padding(padding, padding_mode=padding_mode)] + layers += [CNR2d(nch_in, nch_out, kernel_size=kernel_size, stride=stride, padding=0, norm=norm, relu=[])] + + self.resblk = nn.Sequential(*layers) + + def forward(self, x): + return x + self.resblk(x) + + +class ResBlock_cat(nn.Module): + def __init__(self, nch_in, nch_out, kernel_size=3, stride=1, padding=1, padding_mode='reflection', norm='inorm', relu=0.0, drop=[], bias=[]): + super().__init__() + + if bias == []: + if norm == 'bnorm': + bias = False + else: + bias = True + + layers = [] + + # 1st conv + layers += [Padding(padding, padding_mode=padding_mode)] + layers += [CNR2d(nch_in*2, nch_out, kernel_size=kernel_size, stride=stride, padding=0, norm=norm, relu=relu)] + + if drop != []: + layers += [nn.Dropout2d(drop)] + + # 2nd conv + layers += [Padding(padding, padding_mode=padding_mode)] + layers += [CNR2d(nch_in, nch_out, kernel_size=kernel_size, stride=stride, padding=0, norm=norm, relu=[])] + + self.resblk = nn.Sequential(*layers) + + def forward(self,x,y): + output = x + self.resblk(torch.cat([x,y],dim=1)) + return output + +class LinearBlock(nn.Module): + def __init__(self, input_dim, output_dim, norm='none', activation='relu'): + super(LinearBlock, self).__init__() + use_bias = True + # initialize fully connected layer + if norm == 'sn': + self.fc = SpectralNorm(nn.Linear(input_dim, output_dim, bias=use_bias)) + else: + self.fc = nn.Linear(input_dim, output_dim, bias=use_bias) + + # initialize normalization + norm_dim = output_dim + if norm == 'bn': + self.norm = nn.BatchNorm1d(norm_dim) + elif norm == 'in': + self.norm = nn.InstanceNorm1d(norm_dim) + elif norm == 'ln': + self.norm = LayerNorm(norm_dim) + elif norm == 'none' or norm == 'sn': + self.norm = None + else: + assert 0, "Unsupported normalization: {}".format(norm) + + # initialize activation + if activation == 'relu': + self.activation = nn.ReLU(inplace=True) + elif activation == 'lrelu': + self.activation = nn.LeakyReLU(0.2, inplace=True) + elif activation == 'prelu': + self.activation = nn.PReLU() + elif activation == 'selu': + self.activation = nn.SELU(inplace=True) + elif activation == 'tanh': + self.activation = nn.Tanh() + elif activation == 'none': + self.activation = None + else: + assert 0, "Unsupported activation: {}".format(activation) + + def forward(self, x): + out = self.fc(x) + if self.norm: + out = self.norm(out) + if self.activation: + out = self.activation(out) + return out + +class MLP(nn.Module): + def __init__(self, input_dim, output_dim, dim, n_blk, norm='none', activ='relu'): + + super(MLP, self).__init__() + self.model = [] + self.model += [LinearBlock(input_dim, dim, norm=norm, activation=activ)] + for i in range(n_blk - 2): + self.model += [LinearBlock(dim, dim, norm=norm, activation=activ)] + self.model += [LinearBlock(dim, output_dim, norm='none', activation='none')] # no output activations + self.model = nn.Sequential(*self.model) + + def forward(self, x): + return self.model(x.view(x.size(0), -1)) + +class CNR1d(nn.Module): + def __init__(self, nch_in, nch_out, norm='bnorm', relu=0.0, drop=[]): + super().__init__() + + if norm == 'bnorm': + bias = False + else: + bias = True + + layers = [] + layers += [nn.Linear(nch_in, nch_out, bias=bias)] + + if norm != []: + layers += [Norm2d(nch_out, norm)] + + if relu != []: + layers += [ReLU(relu)] + + if drop != []: + layers += [nn.Dropout2d(drop)] + + self.cbr = nn.Sequential(*layers) + + def forward(self, x): + return self.cbr(x) + + +class Conv2d(nn.Module): + def __init__(self, nch_in, nch_out, kernel_size=4, stride=1, padding=1, bias=True): + super(Conv2d, self).__init__() + self.conv = nn.Conv2d(nch_in, nch_out, kernel_size=kernel_size, stride=stride, padding=padding, bias=bias) + + def forward(self, x): + return self.conv(x) + + +class Deconv2d(nn.Module): + def __init__(self, nch_in, nch_out, kernel_size=4, stride=1, padding=1, output_padding=0, bias=True): + super(Deconv2d, self).__init__() + self.deconv = nn.ConvTranspose2d(nch_in, nch_out, kernel_size=kernel_size, stride=stride, padding=padding, output_padding=output_padding, bias=bias) + + # layers = [nn.Upsample(scale_factor=2, mode='bilinear'), + # nn.ReflectionPad2d(1), + # nn.Conv2d(nch_in , nch_out, kernel_size=3, stride=1, padding=0)] + # + # self.deconv = nn.Sequential(*layers) + + def forward(self, x): + return self.deconv(x) + + +class Linear(nn.Module): + def __init__(self, nch_in, nch_out): + super(Linear, self).__init__() + self.linear = nn.Linear(nch_in, nch_out) + + def forward(self, x): + return self.linear(x) + + +class Norm2d(nn.Module): + def __init__(self, nch, norm_mode): + super(Norm2d, self).__init__() + if norm_mode == 'bnorm': + self.norm = nn.BatchNorm2d(nch) + elif norm_mode == 'inorm': + self.norm = nn.InstanceNorm2d(nch) + + def forward(self, x): + return self.norm(x) + + +class ReLU(nn.Module): + def __init__(self, relu): + super(ReLU, self).__init__() + if relu > 0: + self.relu = nn.LeakyReLU(relu, True) + elif relu == 0: + self.relu = nn.ReLU(True) + + def forward(self, x): + return self.relu(x) + + +class Padding(nn.Module): + def __init__(self, padding, padding_mode='zeros', value=0): + super(Padding, self).__init__() + if padding_mode == 'reflection': + self. padding = nn.ReflectionPad2d(padding) + elif padding_mode == 'replication': + self.padding = nn.ReplicationPad2d(padding) + elif padding_mode == 'constant': + self.padding = nn.ConstantPad2d(padding, value) + elif padding_mode == 'zeros': + self.padding = nn.ZeroPad2d(padding) + + def forward(self, x): + return self.padding(x) + + +class Pooling2d(nn.Module): + def __init__(self, nch=[], pool=2, type='avg'): + super().__init__() + + if type == 'avg': + self.pooling = nn.AvgPool2d(pool) + elif type == 'max': + self.pooling = nn.MaxPool2d(pool) + elif type == 'conv': + self.pooling = nn.Conv2d(nch, nch, kernel_size=pool, stride=pool) + + def forward(self, x): + return self.pooling(x) + + +class UnPooling2d(nn.Module): + def __init__(self, nch=[], pool=2, type='nearest'): + super().__init__() + + if type == 'nearest': + self.unpooling = nn.Upsample(scale_factor=pool, mode='nearest', align_corners=True) + elif type == 'bilinear': + self.unpooling = nn.Upsample(scale_factor=pool, mode='bilinear', align_corners=True) + elif type == 'conv': + self.unpooling = nn.ConvTranspose2d(nch, nch, kernel_size=pool, stride=pool) + + def forward(self, x): + return self.unpooling(x) + + +class Concat(nn.Module): + def __init__(self): + super().__init__() + + def forward(self, x1, x2): + diffy = x2.size()[2] - x1.size()[2] + diffx = x2.size()[3] - x1.size()[3] + + x1 = F.pad(x1, [diffx // 2, diffx - diffx // 2, + diffy // 2, diffy - diffy // 2]) + + return torch.cat([x2, x1], dim=1) + + +class TV1dLoss(nn.Module): + def __init__(self): + super(TV1dLoss, self).__init__() + + def forward(self, input): + # loss = torch.mean(torch.abs(input[:, :, :, :-1] - input[:, :, :, 1:])) + \ + # torch.mean(torch.abs(input[:, :, :-1, :] - input[:, :, 1:, :])) + loss = torch.mean(torch.abs(input[:, :-1] - input[:, 1:])) + + return loss + + +class TV2dLoss(nn.Module): + def __init__(self): + super(TV2dLoss, self).__init__() + + def forward(self, input): + loss = torch.mean(torch.abs(input[:, :, :, :-1] - input[:, :, :, 1:])) + \ + torch.mean(torch.abs(input[:, :, :-1, :] - input[:, :, 1:, :])) + return loss + + +class SSIM2dLoss(nn.Module): + def __init__(self): + super(SSIM2dLoss, self).__init__() + + def forward(self, input, targer): + loss = 0 + return loss + diff --git a/app/service/image2sketch/models/networks.py b/app/service/image2sketch/models/networks.py new file mode 100644 index 0000000..fc341c2 --- /dev/null +++ b/app/service/image2sketch/models/networks.py @@ -0,0 +1,734 @@ +import functools + +from torch.nn import init +from torch.optim import lr_scheduler + +from .layer import * + + +############################################################################### +# Helper Functions +############################################################################### + + +class Identity(nn.Module): + def forward(self, x): + return x + + +def get_norm_layer(norm_type='instance'): + """Return a normalization layer + + Parameters: + norm_type (str) -- the name of the normalization layer: batch | instance | none + + For BatchNorm, we use learnable affine parameters and track running statistics (mean/stddev). + For InstanceNorm, we do not use learnable affine parameters. We do not track running statistics. + """ + if norm_type == 'batch': + norm_layer = functools.partial(nn.BatchNorm2d, affine=True, track_running_stats=True) + elif norm_type == 'instance': + norm_layer = functools.partial(nn.InstanceNorm2d, affine=False, track_running_stats=False) + elif norm_type == 'none': + def norm_layer(x): + return Identity() + else: + raise NotImplementedError('normalization layer [%s] is not found' % norm_type) + return norm_layer + + +def get_scheduler(optimizer, opt): + """Return a learning rate scheduler + + Parameters: + optimizer -- the optimizer of the network + opt (option class) -- stores all the experiment flags; needs to be a subclass of BaseOptions.  + opt.lr_policy is the name of learning rate policy: linear | step | plateau | cosine + + For 'linear', we keep the same learning rate for the first epochs + and linearly decay the rate to zero over the next epochs. + For other schedulers (step, plateau, and cosine), we use the default PyTorch schedulers. + See https://pytorch.org/docs/stable/optim.html for more details. + """ + if opt.lr_policy == 'linear': + def lambda_rule(epoch): + lr_l = 1.0 - max(0, epoch + opt.epoch_count - opt.n_epochs) / float(opt.n_epochs_decay + 1) + return lr_l + + scheduler = lr_scheduler.LambdaLR(optimizer, lr_lambda=lambda_rule) + elif opt.lr_policy == 'step': + scheduler = lr_scheduler.StepLR(optimizer, step_size=opt.lr_decay_iters, gamma=0.1) + elif opt.lr_policy == 'plateau': + scheduler = lr_scheduler.ReduceLROnPlateau(optimizer, mode='min', factor=0.2, threshold=0.01, patience=5) + elif opt.lr_policy == 'cosine': + scheduler = lr_scheduler.CosineAnnealingLR(optimizer, T_max=opt.n_epochs, eta_min=0) + else: + return NotImplementedError('learning rate policy [%s] is not implemented', opt.lr_policy) + return scheduler + + +def init_weights(net, init_type='normal', init_gain=0.02): + """Initialize network weights. + + Parameters: + net (network) -- network to be initialized + init_type (str) -- the name of an initialization method: normal | xavier | kaiming | orthogonal + init_gain (float) -- scaling factor for normal, xavier and orthogonal. + + We use 'normal' in the original pix2pix and CycleGAN paper. But xavier and kaiming might + work better for some applications. Feel free to try yourself. + """ + + def init_func(m): # define the initialization function + classname = m.__class__.__name__ + if hasattr(m, 'weight') and (classname.find('Conv') != -1 or classname.find('Linear') != -1): + if init_type == 'normal': + init.normal_(m.weight.data, 0.0, init_gain) + elif init_type == 'xavier': + init.xavier_normal_(m.weight.data, gain=init_gain) + elif init_type == 'kaiming': + init.kaiming_normal_(m.weight.data, a=0, mode='fan_in') + elif init_type == 'orthogonal': + init.orthogonal_(m.weight.data, gain=init_gain) + else: + raise NotImplementedError('initialization method [%s] is not implemented' % init_type) + if hasattr(m, 'bias') and m.bias is not None: + init.constant_(m.bias.data, 0.0) + elif classname.find('BatchNorm2d') != -1: # BatchNorm Layer's weight is not a matrix; only normal distribution applies. + init.normal_(m.weight.data, 1.0, init_gain) + init.constant_(m.bias.data, 0.0) + + print('initialize network with %s' % init_type) + net.apply(init_func) # apply the initialization function + + +def init_net(net, init_type='normal', init_gain=0.02, gpu_ids=[]): + """Initialize a network: 1. register CPU/GPU device (with multi-GPU support); 2. initialize the network weights + Parameters: + net (network) -- the network to be initialized + init_type (str) -- the name of an initialization method: normal | xavier | kaiming | orthogonal + gain (float) -- scaling factor for normal, xavier and orthogonal. + gpu_ids (int list) -- which GPUs the network runs on: e.g., 0,1,2 + + Return an initialized network. + """ + if len(gpu_ids) > 0: + assert (torch.cuda.is_available()) + net.to(gpu_ids[0]) + net = torch.nn.DataParallel(net, gpu_ids) # multi-GPUs + init_weights(net, init_type, init_gain=init_gain) + return net + + +def define_G(input_nc, output_nc, ngf, netG, norm='batch', use_dropout=False, init_type='normal', init_gain=0.02, gpu_ids=[]): + net = None + norm_layer = get_norm_layer(norm_type=norm) + + if netG == 'ref_unpair_cbam_cat': + net = ref_unpair(input_nc, output_nc, ngf, norm='inorm', status='ref_unpair_cbam_cat') + elif netG == 'ref_unpair_recon': + net = ref_unpair(input_nc, output_nc, ngf, norm='inorm', status='ref_unpair_recon') + elif netG == 'triplet': + net = triplet(input_nc, output_nc, ngf, norm='inorm') + + else: + raise NotImplementedError('Generator model name [%s] is not recognized' % netG) + return init_net(net, init_type, init_gain, gpu_ids) + + +class AdaIN(nn.Module): + def __init__(self): + super().__init__() + + def forward(self, x, y): + eps = 1e-5 + mean_x = torch.mean(x, dim=[2, 3]) + mean_y = torch.mean(y, dim=[2, 3]) + + std_x = torch.std(x, dim=[2, 3]) + std_y = torch.std(y, dim=[2, 3]) + + mean_x = mean_x.unsqueeze(-1).unsqueeze(-1) + mean_y = mean_y.unsqueeze(-1).unsqueeze(-1) + + std_x = std_x.unsqueeze(-1).unsqueeze(-1) + eps + std_y = std_y.unsqueeze(-1).unsqueeze(-1) + eps + + out = (x - mean_x) / std_x * std_y + mean_y + + return out + + +class HED(nn.Module): + def __init__(self): + super(HED, self).__init__() + + self.moduleVggOne = nn.Sequential( + nn.Conv2d(in_channels=3, out_channels=64, kernel_size=3, stride=1, padding=1), + nn.ReLU(inplace=False), + nn.Conv2d(in_channels=64, out_channels=64, kernel_size=3, stride=1, padding=1), + nn.ReLU(inplace=False) + ) + + self.moduleVggTwo = nn.Sequential( + nn.MaxPool2d(kernel_size=2, stride=2), + nn.Conv2d(in_channels=64, out_channels=128, kernel_size=3, stride=1, padding=1), + nn.ReLU(inplace=False), + nn.Conv2d(in_channels=128, out_channels=128, kernel_size=3, stride=1, padding=1), + nn.ReLU(inplace=False) + ) + + self.moduleVggThr = nn.Sequential( + nn.MaxPool2d(kernel_size=2, stride=2), + nn.Conv2d(in_channels=128, out_channels=256, kernel_size=3, stride=1, padding=1), + nn.ReLU(inplace=False), + nn.Conv2d(in_channels=256, out_channels=256, kernel_size=3, stride=1, padding=1), + nn.ReLU(inplace=False), + nn.Conv2d(in_channels=256, out_channels=256, kernel_size=3, stride=1, padding=1), + nn.ReLU(inplace=False) + ) + + self.moduleVggFou = nn.Sequential( + nn.MaxPool2d(kernel_size=2, stride=2), + nn.Conv2d(in_channels=256, out_channels=512, kernel_size=3, stride=1, padding=1), + nn.ReLU(inplace=False), + nn.Conv2d(in_channels=512, out_channels=512, kernel_size=3, stride=1, padding=1), + nn.ReLU(inplace=False), + nn.Conv2d(in_channels=512, out_channels=512, kernel_size=3, stride=1, padding=1), + nn.ReLU(inplace=False) + ) + + self.moduleVggFiv = nn.Sequential( + nn.MaxPool2d(kernel_size=2, stride=2), + nn.Conv2d(in_channels=512, out_channels=512, kernel_size=3, stride=1, padding=1), + nn.ReLU(inplace=False), + nn.Conv2d(in_channels=512, out_channels=512, kernel_size=3, stride=1, padding=1), + nn.ReLU(inplace=False), + nn.Conv2d(in_channels=512, out_channels=512, kernel_size=3, stride=1, padding=1), + nn.ReLU(inplace=False) + ) + + self.moduleScoreOne = nn.Conv2d(in_channels=64, out_channels=1, kernel_size=1, stride=1, padding=0) + self.moduleScoreTwo = nn.Conv2d(in_channels=128, out_channels=1, kernel_size=1, stride=1, padding=0) + self.moduleScoreThr = nn.Conv2d(in_channels=256, out_channels=1, kernel_size=1, stride=1, padding=0) + self.moduleScoreFou = nn.Conv2d(in_channels=512, out_channels=1, kernel_size=1, stride=1, padding=0) + self.moduleScoreFiv = nn.Conv2d(in_channels=512, out_channels=1, kernel_size=1, stride=1, padding=0) + + self.moduleCombine = nn.Sequential( + nn.Conv2d(in_channels=5, out_channels=1, kernel_size=1, stride=1, padding=0), + nn.Sigmoid() + ) + + def forward(self, tensorInput): + tensorBlue = (tensorInput[:, 2:3, :, :] * 255.0) - 104.00698793 + tensorGreen = (tensorInput[:, 1:2, :, :] * 255.0) - 116.66876762 + tensorRed = (tensorInput[:, 0:1, :, :] * 255.0) - 122.67891434 + tensorInput = torch.cat([tensorBlue, tensorGreen, tensorRed], 1) + + tensorVggOne = self.moduleVggOne(tensorInput) + tensorVggTwo = self.moduleVggTwo(tensorVggOne) + tensorVggThr = self.moduleVggThr(tensorVggTwo) + tensorVggFou = self.moduleVggFou(tensorVggThr) + tensorVggFiv = self.moduleVggFiv(tensorVggFou) + + tensorScoreOne = self.moduleScoreOne(tensorVggOne) + tensorScoreTwo = self.moduleScoreTwo(tensorVggTwo) + tensorScoreThr = self.moduleScoreThr(tensorVggThr) + tensorScoreFou = self.moduleScoreFou(tensorVggFou) + tensorScoreFiv = self.moduleScoreFiv(tensorVggFiv) + + tensorScoreOne = nn.functional.interpolate(input=tensorScoreOne, size=(tensorInput.size(2), tensorInput.size(3)), mode='bilinear', align_corners=False) + tensorScoreTwo = nn.functional.interpolate(input=tensorScoreTwo, size=(tensorInput.size(2), tensorInput.size(3)), mode='bilinear', align_corners=False) + tensorScoreThr = nn.functional.interpolate(input=tensorScoreThr, size=(tensorInput.size(2), tensorInput.size(3)), mode='bilinear', align_corners=False) + tensorScoreFou = nn.functional.interpolate(input=tensorScoreFou, size=(tensorInput.size(2), tensorInput.size(3)), mode='bilinear', align_corners=False) + tensorScoreFiv = nn.functional.interpolate(input=tensorScoreFiv, size=(tensorInput.size(2), tensorInput.size(3)), mode='bilinear', align_corners=False) + + return self.moduleCombine(torch.cat([tensorScoreOne, tensorScoreTwo, tensorScoreThr, tensorScoreFou, tensorScoreFiv], 1)) + # return self.moduleCombine(torch.cat([ tensorScoreOne, tensorScoreTwo, tensorScoreThr, tensorScoreOne, tensorScoreTwo ], 1)) + + # return torch.sigmoid(tensorScoreOne),torch.sigmoid(tensorScoreTwo),torch.sigmoid(tensorScoreThr),torch.sigmoid(tensorScoreFou),torch.sigmoid(tensorScoreFiv),self.moduleCombine(torch.cat([ tensorScoreOne, tensorScoreTwo, tensorScoreThr, tensorScoreFou, tensorScoreFiv ], 1)) + # return torch.sigmoid(tensorScoreTwo) + + +def define_HED(init_weights_, gpu_ids_=[]): + net = HED() + + if len(gpu_ids_) > 0: + assert (torch.cuda.is_available()) + net.to(gpu_ids_[0]) + net = torch.nn.DataParallel(net, gpu_ids_) # multi-GPUs + + if not init_weights_ == None: + device = torch.device('cuda:{}'.format(gpu_ids_[0])) if gpu_ids_ else torch.device('cpu') + print('Loading model from: %s' % init_weights_) + state_dict = torch.load(init_weights_, map_location=str(device)) + if isinstance(net, torch.nn.DataParallel): + net.module.load_state_dict(state_dict) + else: + net.load_state_dict(state_dict) + print('load the weights successfully') + + return net + + +def define_styletps(init_weights_, gpu_ids_=[], shape=False): + net = None + if shape == False: + net = triplet() + if len(gpu_ids_) > 0: + assert (torch.cuda.is_available()) + net.to(gpu_ids_[0]) + net = torch.nn.DataParallel(net, gpu_ids_) # multi-GPUs + + if not init_weights_ == None: + device = torch.device('cuda:{}'.format(gpu_ids_[0])) if gpu_ids_ else torch.device('cpu') + print('Loading model from: %s' % init_weights_) + state_dict = torch.load(init_weights_, map_location=str(device)) + if isinstance(net, torch.nn.DataParallel): + net.module.load_state_dict(state_dict) + else: + net.load_state_dict(state_dict) + print('load the weights successfully') + + return net + + +class triplet(nn.Module): + def __init__(self): # mnblk=4 + super(triplet, self).__init__() + + # self.channels = nch_in + self.nch_in = 1 + self.nch_out = 1 + self.nch_ker = 64 + self.norm = 'bnorm' + # self.nblk = nblk + + if self.norm == 'bnorm': + self.bias = False + else: + self.bias = True + + self.conv0 = CNR2d(self.nch_in, self.nch_ker, kernel_size=7, stride=1, padding=3, norm=self.norm, relu=0.0) + self.conv1 = CNR2d(self.nch_ker, 2 * self.nch_ker, kernel_size=4, stride=2, padding=1, norm=self.norm, relu=0.0) + self.conv2 = CNR2d(2 * self.nch_ker, 4 * self.nch_ker, kernel_size=4, stride=2, padding=1, norm=self.norm, relu=0.0) + + self.final_pool = nn.AdaptiveAvgPool2d((1, 1)) + self.linear = nn.Linear(256, 128) + + def forward(self, x, y, z): + + x = self.conv0(x) + x = self.conv1(x) + x = self.conv2(x) + x = self.final_pool(x) + x = torch.flatten(x, 1) + x = self.linear(x) + + y = self.conv0(y) + y = self.conv1(y) + y = self.conv2(y) + y = self.final_pool(y) + y = torch.flatten(y, 1) + y = self.linear(y) + + z = self.conv0(z) + z = self.conv1(z) + z = self.conv2(z) + z = self.final_pool(z) + z = torch.flatten(z, 1) + z = self.linear(z) + + return x, y, z + + +class MLP(nn.Module): + def __init__(self, input_dim, output_dim, dim, n_blk, norm='none', activ='relu'): + super(MLP, self).__init__() + self.model = [] + self.model += [LinearBlock(input_dim, dim, norm=norm, activation=activ)] + for i in range(n_blk - 2): + self.model += [LinearBlock(dim, dim, norm=norm, activation=activ)] + self.model += [LinearBlock(dim, output_dim, norm='none', activation='none')] # no output activations + self.model = nn.Sequential(*self.model) + + def forward(self, x): + return self.model(x.view(x.size(0), -1)) + + +class ref_unpair(nn.Module): + def __init__(self, nch_in, nch_out, nch_ker=64, norm='bnorm', nblk=4, status='ref_unpair'): + super(ref_unpair, self).__init__() + + nch_ker = 64 + # self.channels = nch_in + self.nch_in = nch_in + self.nchs_in = 1 + self.status = status + + if self.status == 'ref_unpair_recon': + self.nch_out = 3 + self.nch_in = 1 + else: + self.nch_out = 1 + + self.nch_ker = nch_ker + self.norm = norm + self.nblk = nblk + self.dec0 = [] + + if status == 'ref_unpair_cbam_cat': + self.cbam_c = CBAM(nch_ker * 8, 16, 3, cbam_status="channel") + self.cbam_s = CBAM(nch_ker * 8, 16, 3, cbam_status="spatial") + + self.enc1_s = CNR2d(self.nchs_in, self.nch_ker, kernel_size=7, stride=1, padding=3, norm=self.norm, relu=0.0) + self.enc2_s = CNR2d(self.nch_ker, 2 * self.nch_ker, kernel_size=4, stride=2, padding=1, norm=self.norm, relu=0.0) + self.enc3_s = CNR2d(2 * self.nch_ker, 4 * self.nch_ker, kernel_size=4, stride=2, padding=1, norm=self.norm, relu=0.0) + self.enc4_s = CNR2d(4 * self.nch_ker, 8 * self.nch_ker, kernel_size=4, stride=2, padding=1, norm=self.norm, relu=0.0) + + if norm == 'bnorm': + self.bias = False + else: + self.bias = True + + self.enc1_c = CNR2d(self.nch_in, self.nch_ker, kernel_size=7, stride=1, padding=3, norm=self.norm, relu=0.0) + self.enc2_c = CNR2d(self.nch_ker, 2 * self.nch_ker, kernel_size=4, stride=2, padding=1, norm=self.norm, relu=0.0) + self.enc3_c = CNR2d(2 * self.nch_ker, 4 * self.nch_ker, kernel_size=4, stride=2, padding=1, norm=self.norm, relu=0.0) + self.enc4_c = CNR2d(4 * self.nch_ker, 8 * self.nch_ker, kernel_size=4, stride=2, padding=1, norm=self.norm, relu=0.0) + + if status == 'ref_unpair_cbam_cat': + self.res_cat1 = ResBlock_cat(8 * self.nch_ker, 8 * self.nch_ker, kernel_size=3, stride=1, padding=1, norm=self.norm, relu=0.0, padding_mode='reflection') + self.res_cat2 = ResBlock_cat(8 * self.nch_ker, 8 * self.nch_ker, kernel_size=3, stride=1, padding=1, norm=self.norm, relu=0.0, padding_mode='reflection') + self.res_cat3 = ResBlock_cat(8 * self.nch_ker, 8 * self.nch_ker, kernel_size=3, stride=1, padding=1, norm=self.norm, relu=0.0, padding_mode='reflection') + self.res_cat4 = ResBlock_cat(8 * self.nch_ker, 8 * self.nch_ker, kernel_size=3, stride=1, padding=1, norm=self.norm, relu=0.0, padding_mode='reflection') + + if self.nblk and status != 'ref_unpair_cbam_cat': + res = [] + for i in range(self.nblk): + res += [ResBlock(8 * self.nch_ker, 8 * self.nch_ker, kernel_size=3, stride=1, padding=1, norm=self.norm, relu=0.0, padding_mode='reflection')] + self.res1 = nn.Sequential(*res) + + # self.dec0 += [DECNR2d(16 * self.nch_ker, 8 * self.nch_ker, kernel_size=4, stride=2, padding=1, norm=self.norm, relu=0.0)] + self.dec0 += [DECNR2d(8 * self.nch_ker, 4 * self.nch_ker, kernel_size=4, stride=2, padding=1, norm=self.norm, relu=0.0)] + self.dec0 += [DECNR2d(4 * self.nch_ker, 2 * self.nch_ker, kernel_size=4, stride=2, padding=1, norm=self.norm, relu=0.0)] + self.dec0 += [DECNR2d(2 * self.nch_ker, 1 * self.nch_ker, kernel_size=4, stride=2, padding=1, norm=self.norm, relu=0.0)] + self.dec0 += [DECNR2d(1 * self.nch_ker, 1 * self.nch_ker, kernel_size=7, stride=1, padding=3, norm=self.norm, relu=0.0)] + self.dec0 += [nn.Conv2d(1 * self.nch_ker, self.nch_out, kernel_size=3, stride=1, padding=1)] + + self.dec = nn.Sequential(*self.dec0) + + def forward(self, content, style): + + content_cs = self.enc1_c(content) + content_cs = self.enc2_c(content_cs) + content_cs = self.enc3_c(content_cs) + content_cs = self.enc4_c(content_cs) + # content_cs = self.enc5_c(content_cs) + + if self.status == 'ref_unpair_cbam_cat': + cbam_content_cs = self.cbam_s(content_cs) + sp_content_cs = content_cs + cbam_content_cs + + style_cs = self.enc1_s(style) + style_cs = self.enc2_s(style_cs) + style_cs = self.enc3_s(style_cs) + style_cs = self.enc4_s(style_cs) + + cbam_style_cs = self.cbam_c(style_cs) + ch_style_cs = style_cs + cbam_style_cs + + content_output = self.adaptive_instance_normalization(content_cs, style_cs) + cbam_content_output = self.adaptive_instance_normalization(sp_content_cs, ch_style_cs) + + content_output = self.res_cat1(content_output, cbam_content_output) + content_output = self.res_cat2(content_output, cbam_content_output) + content_output = self.res_cat3(content_output, cbam_content_output) + content_output = self.res_cat4(content_output, cbam_content_output) + + + else: + content_output = content_cs + + if self.nblk and self.status != 'ref_unpair_cbam_cat': + content_cs = self.res1(content_output) + + content_output = self.dec(content_output) + + content_output = torch.tanh(content_output) + + return content_output + + def calc_mean_std(self, feat, eps=1e-5): + # eps is a small value added to the variance to avoid divide-by-zero. + size = feat.size() + assert (len(size) == 4) + N, C = size[:2] + feat_var = feat.view(N, C, -1).var(dim=2) + eps + feat_std = feat_var.sqrt().view(N, C, 1, 1) + feat_mean = feat.view(N, C, -1).mean(dim=2).view(N, C, 1, 1) + return feat_mean, feat_std + + def adaptive_instance_normalization(self, content_feat, style_feat): + assert (content_feat.size()[:2] == style_feat.size()[:2]) + size = content_feat.size() + style_mean, style_std = self.calc_mean_std(style_feat) + content_mean, content_std = self.calc_mean_std(content_feat) + + normalized_feat = (content_feat - content_mean.expand(size)) / content_std.expand(size) + return normalized_feat * style_std.expand(size) + style_mean.expand(size) + + +def define_D(input_nc, ndf, netD, n_layers_D=3, norm='batch', init_type='normal', init_gain=0.02, gpu_ids=[]): + net = None + norm_layer = get_norm_layer(norm_type=norm) + + if netD == 'basic': # default PatchGAN classifier + net = NLayerDiscriminator(input_nc, ndf, n_layers=3, norm_layer=norm_layer) + elif netD == 'n_layers': # more options + net = NLayerDiscriminator(input_nc, ndf, n_layers_D, norm_layer=norm_layer) + elif netD == 'pixel': # classify if each pixel is real or fake + net = PixelDiscriminator(input_nc, ndf, norm_layer=norm_layer) + else: + raise NotImplementedError('Discriminator model name [%s] is not recognized' % netD) + return init_net(net, init_type, init_gain, gpu_ids) + + +############################################################################## +# Classes +############################################################################## +class GANLoss(nn.Module): + """Define different GAN objectives. + + The GANLoss class abstracts away the need to create the target label tensor + that has the same size as the input. + """ + + def __init__(self, gan_mode, target_real_label=1.0, target_fake_label=0.0): + """ Initialize the GANLoss class. + + Parameters: + gan_mode (str) - - the type of GAN objective. It currently supports vanilla, lsgan, and wgangp. + target_real_label (bool) - - label for a real image + target_fake_label (bool) - - label of a fake image + + Note: Do not use sigmoid as the last layer of Discriminator. + LSGAN needs no sigmoid. vanilla GANs will handle it with BCEWithLogitsLoss. + """ + super(GANLoss, self).__init__() + self.register_buffer('real_label', torch.tensor(target_real_label)) + self.register_buffer('fake_label', torch.tensor(target_fake_label)) + self.gan_mode = gan_mode + if gan_mode == 'lsgan': + self.loss = nn.MSELoss() + elif gan_mode == 'vanilla': + self.loss = nn.BCEWithLogitsLoss() + elif gan_mode in ['wgangp']: + self.loss = None + else: + raise NotImplementedError('gan mode %s not implemented' % gan_mode) + + def get_target_tensor(self, prediction, target_is_real): + if target_is_real: + target_tensor = self.real_label + else: + target_tensor = self.fake_label + return target_tensor.expand_as(prediction) + + def __call__(self, prediction, target_is_real): + if self.gan_mode in ['lsgan', 'vanilla']: + target_tensor = self.get_target_tensor(prediction, target_is_real) + loss = self.loss(prediction, target_tensor) + elif self.gan_mode == 'wgangp': + if target_is_real: + loss = -prediction.mean() + else: + loss = prediction.mean() + return loss + + +def cal_gradient_penalty(netD, real_data, fake_data, device, type='mixed', constant=1.0, lambda_gp=10.0): + if lambda_gp > 0.0: + if type == 'real': # either use real images, fake images, or a linear interpolation of two. + interpolatesv = real_data + elif type == 'fake': + interpolatesv = fake_data + elif type == 'mixed': + alpha = torch.rand(real_data.shape[0], 1, device=device) + alpha = alpha.expand(real_data.shape[0], real_data.nelement() // real_data.shape[0]).contiguous().view(*real_data.shape) + interpolatesv = alpha * real_data + ((1 - alpha) * fake_data) + else: + raise NotImplementedError('{} not implemented'.format(type)) + interpolatesv.requires_grad_(True) + disc_interpolates = netD(interpolatesv) + gradients = torch.autograd.grad(outputs=disc_interpolates, inputs=interpolatesv, + grad_outputs=torch.ones(disc_interpolates.size()).to(device), + create_graph=True, retain_graph=True, only_inputs=True) + gradients = gradients[0].view(real_data.size(0), -1) # flat the data + gradient_penalty = (((gradients + 1e-16).norm(2, dim=1) - constant) ** 2).mean() * lambda_gp # added eps + return gradient_penalty, gradients + else: + return 0.0, None + + +class NLayerDiscriminator(nn.Module): + """Defines a PatchGAN discriminator""" + + def __init__(self, input_nc, ndf=64, n_layers=3, norm_layer=nn.BatchNorm2d): + """Construct a PatchGAN discriminator + + Parameters: + input_nc (int) -- the number of channels in input images + ndf (int) -- the number of filters in the last conv layer + n_layers (int) -- the number of conv layers in the discriminator + norm_layer -- normalization layer + """ + super(NLayerDiscriminator, self).__init__() + if type(norm_layer) == functools.partial: # no need to use bias as BatchNorm2d has affine parameters + use_bias = norm_layer.func == nn.InstanceNorm2d + else: + use_bias = norm_layer == nn.InstanceNorm2d + kw = 4 + padw = 1 + sequence = [nn.Conv2d(input_nc, ndf, kernel_size=kw, stride=2, padding=padw), nn.LeakyReLU(0.2, True)] + nf_mult = 1 + nf_mult_prev = 1 + for n in range(1, n_layers): # gradually increase the number of filters + nf_mult_prev = nf_mult + nf_mult = min(2 ** n, 8) + sequence += [ + nn.Conv2d(ndf * nf_mult_prev, ndf * nf_mult, kernel_size=kw, stride=2, padding=padw, bias=use_bias), + norm_layer(ndf * nf_mult), + nn.LeakyReLU(0.2, True) + ] + + nf_mult_prev = nf_mult + nf_mult = min(2 ** n_layers, 8) + sequence += [ + nn.Conv2d(ndf * nf_mult_prev, ndf * nf_mult, kernel_size=kw, stride=1, padding=padw, bias=use_bias), + norm_layer(ndf * nf_mult), + nn.LeakyReLU(0.2, True) + ] + + sequence += [nn.Conv2d(ndf * nf_mult, 1, kernel_size=kw, stride=1, padding=padw)] # output 1 channel prediction map + self.model = nn.Sequential(*sequence) + + def forward(self, input): + """Standard forward.""" + return self.model(input) + + +class PixelDiscriminator(nn.Module): + """Defines a 1x1 PatchGAN discriminator (pixelGAN)""" + + def __init__(self, input_nc, ndf=64, norm_layer=nn.BatchNorm2d): + """Construct a 1x1 PatchGAN discriminator + + Parameters: + input_nc (int) -- the number of channels in input images + ndf (int) -- the number of filters in the last conv layer + norm_layer -- normalization layer + """ + super(PixelDiscriminator, self).__init__() + if type(norm_layer) == functools.partial: # no need to use bias as BatchNorm2d has affine parameters + use_bias = norm_layer.func == nn.InstanceNorm2d + else: + use_bias = norm_layer == nn.InstanceNorm2d + + self.net = [ + nn.Conv2d(input_nc, ndf, kernel_size=1, stride=1, padding=0), + nn.LeakyReLU(0.2, True), + nn.Conv2d(ndf, ndf * 2, kernel_size=1, stride=1, padding=0, bias=use_bias), + norm_layer(ndf * 2), + nn.LeakyReLU(0.2, True), + nn.Conv2d(ndf * 2, 1, kernel_size=1, stride=1, padding=0, bias=use_bias)] + + self.net = nn.Sequential(*self.net) + + def forward(self, input): + """Standard forward.""" + return self.net(input) + + +class CBAM(nn.Module): + def __init__(self, n_channels_in, reduction_ratio, kernel_size, cbam_status): + super(CBAM, self).__init__() + self.n_channels_in = n_channels_in + self.reduction_ratio = reduction_ratio + self.kernel_size = kernel_size + self.channel_attention = ChannelAttention_nopara(n_channels_in, reduction_ratio) + self.spatial_attention = SpatialAttention_nopara(kernel_size) + self.status = cbam_status + + def forward(self, x): + ## We don't use cbam in this version + if self.status == "cbam": + chan_att = self.channel_attention(x) + fp = chan_att * x + spat_att = self.spatial_attention(fp) + fpp = spat_att * fp + + if self.status == "spatial": + spat_att = self.spatial_attention(x) # * s_para_1d + fpp = spat_att * x + if self.status == "channel": + chan_att = self.channel_attention(x) # * c_para_1d + fpp = chan_att * x + + return fpp # ,c_wgt,s_wgt + + +class SpatialAttention_nopara(nn.Module): + def __init__(self, kernel_size): + super(SpatialAttention_nopara, self).__init__() + self.kernel_size = kernel_size + assert kernel_size % 2 == 1, "Odd kernel size required" + self.conv = nn.Conv2d(in_channels=2, out_channels=1, kernel_size=kernel_size, padding=int((kernel_size - 1) / 2)) + + def forward(self, x): + max_pool = self.agg_channel(x, "max") + avg_pool = self.agg_channel(x, "avg") + pool = torch.cat([max_pool, avg_pool], dim=1) + conv = self.conv(pool) + conv = conv.repeat(1, x.size()[1], 1, 1) + att = torch.sigmoid(conv) + return att + + def agg_channel(self, x, pool="max"): + b, c, h, w = x.size() + x = x.view(b, c, h * w) + x = x.permute(0, 2, 1) + if pool == "max": + x = F.max_pool1d(x, c) + elif pool == "avg": + x = F.avg_pool1d(x, c) + x = x.permute(0, 2, 1) + x = x.view(b, 1, h, w) + return x + + +class ChannelAttention_nopara(nn.Module): + def __init__(self, n_channels_in, reduction_ratio): + super(ChannelAttention_nopara, self).__init__() + self.n_channels_in = n_channels_in + self.reduction_ratio = reduction_ratio + self.middle_layer_size = int(self.n_channels_in / float(self.reduction_ratio)) + self.bottleneck = nn.Sequential( + nn.Linear(self.n_channels_in, self.middle_layer_size), + nn.ReLU(), + nn.Linear(self.middle_layer_size, self.n_channels_in) + ) + + def forward(self, x): + kernel = (x.size()[2], x.size()[3]) + avg_pool = F.avg_pool2d(x, kernel) + max_pool = F.max_pool2d(x, kernel) + avg_pool = avg_pool.view(avg_pool.size()[0], -1) + max_pool = max_pool.view(max_pool.size()[0], -1) + avg_pool_bck = self.bottleneck(avg_pool) + max_pool_bck = self.bottleneck(max_pool) + pool_sum = avg_pool_bck + max_pool_bck + sig_pool = torch.sigmoid(pool_sum) + sig_pool = sig_pool.unsqueeze(2).unsqueeze(3) + # out = sig_pool.repeat(1,1,kernel[0], kernel[1]) + + return sig_pool diff --git a/app/service/image2sketch/models/perceptual.py b/app/service/image2sketch/models/perceptual.py new file mode 100644 index 0000000..666fab8 --- /dev/null +++ b/app/service/image2sketch/models/perceptual.py @@ -0,0 +1,86 @@ +import torch +import torchvision + +class VGGPerceptualLoss(torch.nn.Module): + def __init__(self, resize=True): + super(VGGPerceptualLoss, self).__init__() + blocks = [] + blocks.append(torchvision.models.vgg16(pretrained=True).features[:4].eval()) + blocks.append(torchvision.models.vgg16(pretrained=True).features[4:9].eval()) + blocks.append(torchvision.models.vgg16(pretrained=True).features[9:16].eval()) + blocks.append(torchvision.models.vgg16(pretrained=True).features[16:23].eval()) + for bl in blocks: + for p in bl: + p.requires_grad = False + self.blocks = torch.nn.ModuleList(blocks) + self.transform = torch.nn.functional.interpolate + self.mean = torch.nn.Parameter(torch.tensor([0.485, 0.456, 0.406]).view(1,3,1,1)) + self.std = torch.nn.Parameter(torch.tensor([0.229, 0.224, 0.225]).view(1,3,1,1)) + self.resize = resize + + def forward(self, input, target, feature_layers=[0, 1, 2, 3], style_layers=[]): + if input.shape[1] != 3: + input = input.repeat(1, 3, 1, 1) + target = target.repeat(1, 3, 1, 1) + input = (input-self.mean) / self.std + target = (target-self.mean) / self.std + if self.resize: + input = self.transform(input, mode='bilinear', size=(224, 224), align_corners=False) + target = self.transform(target, mode='bilinear', size=(224, 224), align_corners=False) + loss = 0.0 + x = input + y = target + for i, block in enumerate(self.blocks): + x = block(x) + y = block(y) + if i in feature_layers: + loss += torch.nn.functional.l1_loss(x, y) + if i in style_layers: + act_x = x.reshape(x.shape[0], x.shape[1], -1) + act_y = y.reshape(y.shape[0], y.shape[1], -1) + gram_x = act_x @ act_x.permute(0, 2, 1) + gram_y = act_y @ act_y.permute(0, 2, 1) + loss += torch.nn.functional.l1_loss(gram_x, gram_y) + return loss + +class VGGstyleLoss(torch.nn.Module): + def __init__(self, resize=True): + super(VGGstyleLoss, self).__init__() + blocks = [] + blocks.append(torchvision.models.vgg16(pretrained=True).features[:4].eval()) + blocks.append(torchvision.models.vgg16(pretrained=True).features[4:9].eval()) + blocks.append(torchvision.models.vgg16(pretrained=True).features[9:16].eval()) + blocks.append(torchvision.models.vgg16(pretrained=True).features[16:23].eval()) + for bl in blocks: + for p in bl: + p.requires_grad = False + self.blocks = torch.nn.ModuleList(blocks) + self.transform = torch.nn.functional.interpolate + self.mean = torch.nn.Parameter(torch.tensor([0.485, 0.456, 0.406]).view(1,3,1,1)) + self.std = torch.nn.Parameter(torch.tensor([0.229, 0.224, 0.225]).view(1,3,1,1)) + self.resize = resize + + def forward(self, input, target, feature_layers=[0,1,2,3], style_layers=[]): + if input.shape[1] != 3: + input = input.repeat(1, 3, 1, 1) + target = target.repeat(1, 3, 1, 1) + input = (input-self.mean) / self.std + target = (target-self.mean) / self.std + if self.resize: + input = self.transform(input, mode='bilinear', size=(224, 224), align_corners=False) + target = self.transform(target, mode='bilinear', size=(224, 224), align_corners=False) + loss = 0.0 + x = input + y = target + for i, block in enumerate(self.blocks): + x = block(x) + y = block(y) + if i in feature_layers: + loss += torch.nn.functional.l1_loss(x, y) + if i in style_layers: + act_x = x.reshape(x.shape[0], x.shape[1], -1) + act_y = y.reshape(y.shape[0], y.shape[1], -1) + gram_x = act_x @ act_x.permute(0, 2, 1) + gram_y = act_y @ act_y.permute(0, 2, 1) + loss += torch.nn.functional.l1_loss(gram_x, gram_y) + return loss diff --git a/app/service/image2sketch/models/template_model.py b/app/service/image2sketch/models/template_model.py new file mode 100644 index 0000000..45c68b2 --- /dev/null +++ b/app/service/image2sketch/models/template_model.py @@ -0,0 +1,82 @@ +import torch +from .base_model import BaseModel +from . import networks + + +class TemplateModel(BaseModel): + @staticmethod + def modify_commandline_options(parser, is_train=True): + """Add new model-specific options and rewrite default values for existing options. + + Parameters: + parser -- the option parser + is_train -- if it is training phase or test phase. You can use this flag to add training-specific or test-specific options. + + Returns: + the modified parser. + """ + parser.set_defaults(dataset_mode='aligned') # You can rewrite default values for this model. For example, this model usually uses aligned dataset as its dataset. + if is_train: + parser.add_argument('--lambda_regression', type=float, default=1.0, help='weight for the regression loss') # You can define new arguments for this model. + + return parser + + def __init__(self, opt): + """Initialize this model class. + + Parameters: + opt -- training/test options + + A few things can be done here. + - (required) call the initialization function of BaseModel + - define loss function, visualization images, model names, and optimizers + """ + BaseModel.__init__(self, opt) # call the initialization method of BaseModel + # specify the training losses you want to print out. The program will call base_model.get_current_losses to plot the losses to the console and save them to the disk. + self.loss_names = ['loss_G'] + # specify the images you want to save and display. The program will call base_model.get_current_visuals to save and display these images. + self.visual_names = ['data_A', 'data_B', 'output'] + # specify the models you want to save to the disk. The program will call base_model.save_networks and base_model.load_networks to save and load networks. + # you can use opt.isTrain to specify different behaviors for training and test. For example, some networks will not be used during test, and you don't need to load them. + self.model_names = ['G'] + # define networks; you can use opt.isTrain to specify different behaviors for training and test. + self.netG = networks.define_G(opt.input_nc, opt.output_nc, opt.ngf, opt.netG, gpu_ids=self.gpu_ids) + if self.isTrain: # only defined during training time + # define your loss functions. You can use losses provided by torch.nn such as torch.nn.L1Loss. + # We also provide a GANLoss class "networks.GANLoss". self.criterionGAN = networks.GANLoss().to(self.device) + self.criterionLoss = torch.nn.L1Loss() + # define and initialize optimizers. You can define one optimizer for each network. + # If two networks are updated at the same time, you can use itertools.chain to group them. See cycle_gan_model.py for an example. + self.optimizer = torch.optim.Adam(self.netG.parameters(), lr=opt.lr, betas=(opt.beta1, 0.999)) + self.optimizers = [self.optimizer] + + # Our program will automatically call to define schedulers, load networks, and print networks + + def set_input(self, input): + """Unpack input data from the dataloader and perform necessary pre-processing steps. + + Parameters: + input: a dictionary that contains the data itself and its metadata information. + """ + AtoB = self.opt.direction == 'AtoB' # use to swap data_A and data_B + self.data_A = input['A' if AtoB else 'B'].to(self.device) # get image data A + self.data_B = input['B' if AtoB else 'A'].to(self.device) # get image data B + self.image_paths = input['A_paths' if AtoB else 'B_paths'] # get image paths + + def forward(self): + """Run forward pass. This will be called by both functions and .""" + self.output = self.netG(self.data_A) # generate output image given the input data_A + + def backward(self): + """Calculate losses, gradients, and update network weights; called in every training iteration""" + # caculate the intermediate results if necessary; here self.output has been computed during function + # calculate loss given the input and intermediate results + self.loss_G = self.criterionLoss(self.output, self.data_B) * self.opt.lambda_regression + self.loss_G.backward() # calculate gradients of network G w.r.t. loss_G + + def optimize_parameters(self): + """Update network weights; it will be called in every training iteration.""" + self.forward() # first call forward to calculate intermediate results + self.optimizer.zero_grad() # clear network G's existing gradients + self.backward() # calculate gradients for network G + self.optimizer.step() # update gradients for network G diff --git a/app/service/image2sketch/models/test_model.py b/app/service/image2sketch/models/test_model.py new file mode 100644 index 0000000..2f70821 --- /dev/null +++ b/app/service/image2sketch/models/test_model.py @@ -0,0 +1,45 @@ +from .base_model import BaseModel +from . import networks + + +class TestModel(BaseModel): + """ This TesteModel can be used to generate CycleGAN results for only one direction. + This model will automatically set '--dataset_mode single', which only loads the images from one collection. + + See the test instruction for more details. + """ + @staticmethod + def modify_commandline_options(parser, is_train=True): + assert not is_train, 'TestModel cannot be used during training time' + parser.set_defaults(dataset_mode='single') + parser.add_argument('--model_suffix', type=str, default='', help='In checkpoints_dir, [epoch]_net_G[model_suffix].pth will be loaded as the generator.') + + return parser + + def __init__(self, opt): + assert(not opt.isTrain) + BaseModel.__init__(self, opt) + # specify the training losses you want to print out. The training/test scripts will call + self.loss_names = [] + # specify the images you want to save/display. The training/test scripts will call + self.visual_names = ['real', 'fake'] + # specify the models you want to save to the disk. The training/test scripts will call and + self.model_names = ['G' + opt.model_suffix] # only generator is needed. + self.netG = networks.define_G(opt.input_nc, opt.output_nc, opt.ngf, opt.netG, + opt.norm, not opt.no_dropout, opt.init_type, opt.init_gain, self.gpu_ids) + + # assigns the model to self.netG_[suffix] so that it can be loaded + # please see + setattr(self, 'netG' + opt.model_suffix, self.netG) # store netG in self. + + def set_input(self, input): + self.real = input['A'].to(self.device) + self.image_paths = input['A_paths'] + + def forward(self): + """Run forward pass.""" + self.fake = self.netG(self.real) # G(real) + + def optimize_parameters(self): + """No optimization for test model.""" + pass diff --git a/app/service/image2sketch/models/triplet_model.py b/app/service/image2sketch/models/triplet_model.py new file mode 100644 index 0000000..a667d49 --- /dev/null +++ b/app/service/image2sketch/models/triplet_model.py @@ -0,0 +1,68 @@ +import torch +from .base_model import BaseModel +from . import networks +from util.image_pool import ImagePool + + +class TripletModel(BaseModel): + + @staticmethod + def modify_commandline_options(parser, is_train=True): + parser.set_defaults(norm='batch', netG='triplet', dataset_mode='triplet') + if is_train: + parser.set_defaults(pool_size=0, gan_mode='vanilla') + parser.add_argument('--lambda_L1', type=float, default=100.0, help='weight for L1 loss') + + return parser + + def __init__(self, opt): + + BaseModel.__init__(self, opt) + + self.loss_names = ['G_triplet'] + self.visual_names = ['x','y'] + + if self.isTrain: + self.model_names = ['G'] + else: + self.model_names = ['G'] + self.netG = networks.define_G(1, 1, opt.ngf, opt.netG, opt.norm, + not opt.no_dropout, opt.init_type, opt.init_gain, self.gpu_ids) + + + if self.isTrain: + self.fake_A_pool = ImagePool(opt.pool_size) # create image buffer to store previously generated images + self.fake_B_pool = ImagePool(opt.pool_size) # create image buffer to store previously generated images + + self.criterionGAN = networks.GANLoss(opt.gan_mode).to(self.device) + self.criterionL1 = torch.nn.L1Loss() + + self.triplet = torch.nn.TripletMarginLoss(margin=3.0) + self.optimizer_G = torch.optim.Adam(self.netG.parameters(), lr=opt.lr, betas=(opt.beta1, 0.999)) + self.optimizers.append(self.optimizer_G) + + def set_input(self, input): + AtoB = self.opt.direction == 'AtoB' + self.real_A = input['A' if AtoB else 'B'].to(self.device) + self.real_B = input['B' if AtoB else 'A'].to(self.device) + self.real_C = input['C'].to(self.device) + + self.image_paths = input['A_paths' if AtoB else 'B_paths'] + + + + def forward(self): + self.x,self.y,self.z = self.netG(self.real_A,self.real_B,self.real_C) + + + def backward_G(self): + self.loss_G_triplet_1 = self.triplet(self.x,self.y,self.z) + self.loss_G_triplet = self.loss_G_triplet_1 + + self.loss_G = self.loss_G_triplet + self.loss_G.backward() + + def optimize_parameters(self): + self.optimizer_G.zero_grad() + self.backward_G() + self.optimizer_G.step() diff --git a/app/service/image2sketch/models/unpaired_model.py b/app/service/image2sketch/models/unpaired_model.py new file mode 100644 index 0000000..9c043ca --- /dev/null +++ b/app/service/image2sketch/models/unpaired_model.py @@ -0,0 +1,144 @@ +import torch + +from . import networks +from .base_model import BaseModel +from .perceptual import VGGPerceptualLoss +from ..util.image_pool import ImagePool + + +class UnpairedModel(BaseModel): + + @staticmethod + def modify_commandline_options(parser, is_train=True): + parser.set_defaults(norm='batch', netG='ref_unpair_cbam_cat', netG2='ref_unpair_recon', dataset_mode='unaligned') + if is_train: + parser.set_defaults(pool_size=0, gan_mode='vanilla') + parser.add_argument('--lambda_L1', type=float, default=100.0, help='weight for L1 loss') + + return parser + + def __init__(self, opt): + BaseModel.__init__(self, opt) + # specify the training losses you want to print out. The training/test scripts will call + self.loss_names = ['G_GAN', 'G_L1_1', 'G_Rec', 'G_line', 'D_real', 'D_fake'] + self.visual_names = ['real_A', 'content_output', 'real_B'] + + if self.isTrain: + self.model_names = ['G_A', 'G_B', 'D'] + else: # during test time, only load G + self.model_names = ['G_A', 'G_B'] + # define networks (both generator and discriminator) + self.netG_A = networks.define_G(opt.input_nc, opt.output_nc, opt.ngf, opt.netG, opt.norm, + not opt.no_dropout, opt.init_type, opt.init_gain, self.gpu_ids) + self.netG_B = networks.define_G(opt.input_nc, opt.output_nc, opt.ngf, opt.netG2, opt.norm, + not opt.no_dropout, opt.init_type, opt.init_gain, self.gpu_ids) + + if self.isTrain: # define a discriminator; conditional GANs need to take both input and output images; Therefore, #channels for D is input_nc + output_nc + self.netD = networks.define_D(1, opt.ndf, opt.netD, + opt.n_layers_D, opt.norm, opt.init_type, opt.init_gain, self.gpu_ids) + self.styletps = networks.define_styletps(init_weights_='./checkpoints/contrastive_pretrained.pth', gpu_ids_=self.gpu_ids, shape=False) + self.HED = networks.define_HED(init_weights_='./checkpoints/network-bsds500.pytorch', gpu_ids_=self.gpu_ids) + + if self.isTrain: # define discriminators + self.netD_A = networks.define_D(opt.output_nc, opt.ndf, opt.netD, + opt.n_layers_D, opt.norm, opt.init_type, opt.init_gain, self.gpu_ids) + self.netD_B = networks.define_D(opt.input_nc, opt.ndf, opt.netD, + opt.n_layers_D, opt.norm, opt.init_type, opt.init_gain, self.gpu_ids) + + if self.isTrain: + self.fake_A_pool = ImagePool(opt.pool_size) # create image buffer to store previously generated images + self.fake_B_pool = ImagePool(opt.pool_size) # create image buffer to store previously generated images + # define loss functions + self.criterionGAN = networks.GANLoss(opt.gan_mode).to(self.device) + self.criterionL1_1 = torch.nn.L1Loss() + self.criterionL1_2 = torch.nn.L1Loss() + self.criterionL1_3 = torch.nn.L1Loss() + self.per_loss_1 = VGGPerceptualLoss().to(self.device) + self.per_loss_2 = VGGPerceptualLoss().to(self.device) + self.per_loss_3 = VGGPerceptualLoss().to(self.device) + + self.optimizer_GA = torch.optim.Adam(self.netG_A.parameters(), lr=opt.lr, betas=(opt.beta1, 0.999)) + self.optimizer_GB = torch.optim.Adam(self.netG_B.parameters(), lr=opt.lr, betas=(opt.beta1, 0.999)) + + self.optimizer_D = torch.optim.Adam(self.netD.parameters(), lr=opt.lr, betas=(opt.beta1, 0.999)) + self.optimizers.append(self.optimizer_GA) + self.optimizers.append(self.optimizer_GB) + + self.optimizers.append(self.optimizer_D) + + def set_input(self, input): + """Unpack input data from the dataloader and perform necessary pre-processing steps. + + Parameters: + input (dict): include the data itself and its metadata information. + + The option 'direction' can be used to swap images in domain A and domain B. + """ + AtoB = self.opt.direction == 'AtoB' + self.real_A = input['A' if AtoB else 'B'].to(self.device) + self.real_B = input['B' if AtoB else 'A'].to(self.device) + # self.image_paths = input['A_paths' if AtoB else 'B_paths'] + + def forward(self): + """Run forward pass; called by both functions and .""" + self.content_output = self.netG_A(self.real_A, self.real_B) + self.rec_output = self.netG_B(self.content_output, self.content_output) + + def update_process(self, epoch, total_epoch): + self.epoch_count = epoch + self.epoch_count_total = total_epoch + + def backward_D(self): + """Calculate GAN loss for the discriminator + + Parameters: + netD (network) -- the discriminator D + real (tensor array) -- real images + fake (tensor array) -- images generated by a generator + + Return the discriminator loss. + We also call loss_D.backward() to calculate the gradients. + """ + # Real + pred_real = self.netD(self.real_B) + self.loss_D_real = self.criterionGAN(pred_real, True) + # Fake + pred_fake = self.netD(self.content_output.detach()) + self.loss_D_fake = self.criterionGAN(pred_fake, False) + # Combined loss and calculate gradients + loss_D = (self.loss_D_real + self.loss_D_fake) * 0.5 + loss_D.backward() + return loss_D + + def backward_G(self): + """Calculate GAN and L1 loss for the generator""" + + pred_fake = self.netD(self.content_output) + self.loss_G_GAN = self.criterionGAN(pred_fake, True) + + self.content_output_line = self.HED(self.real_A) + self.rec_output_line = self.HED(self.rec_output) + self.t1, self.t2, _ = self.styletps(self.content_output, self.real_B, self.real_B) + + decay_lambda = 5 - ((self.epoch_count * 4.5) / self.epoch_count_total) + self.loss_G_L1_1 = self.criterionL1_1(self.t1, self.t2) * 10 + self.loss_G_Rec = self.per_loss_2(self.real_A, self.rec_output) * decay_lambda + self.loss_G_line = self.per_loss_3(self.content_output_line, self.rec_output_line) * decay_lambda + + self.loss_G = self.loss_G_GAN + self.loss_G_L1_1 + self.loss_G_Rec + self.loss_G_line + self.loss_G.backward() + + def optimize_parameters(self): + self.forward() # compute fake images: G(A) + # update D + self.set_requires_grad(self.netD, True) # enable backprop for D + self.optimizer_D.zero_grad() # set D's gradients to zero + self.backward_D() # calculate gradients for backward_D_unsuper + self.optimizer_D.step() # update D's weights + # update G + self.set_requires_grad(self.netD, False) # D requires no gradients when optimizing G + self.optimizer_GA.zero_grad() # set G's gradients to zero + self.optimizer_GB.zero_grad() # set G's gradients to zero + self.backward_G() # calculate graidents for G + self.optimizer_GA.step() # udpate G's weights + self.optimizer_GB.step() # udpate G's weights diff --git a/app/service/image2sketch/opt.py b/app/service/image2sketch/opt.py new file mode 100644 index 0000000..7af09e1 --- /dev/null +++ b/app/service/image2sketch/opt.py @@ -0,0 +1,45 @@ +class Config: + def __init__(self): + # 基本参数 + self.dataroot = "service/image2sketch/datasets/ref_unpair" + self.name = 'semi_unpair' + self.gpu_ids = [0] + self.checkpoints_dir = 'service/image2sketch/checkpoints/' + # 模型参数 + self.model = 'unpaired' + self.input_nc = 3 + self.output_nc = 3 + self.ngf = 64 + self.ndf = 64 + self.netD = 'basic' + self.netG = 'ref_unpair_cbam_cat' + self.netG2 = 'ref_unpair_recon' + self.n_layers_D = 3 + self.norm = 'instance' + self.init_type = 'normal' + self.init_gain = 0.02 + self.no_dropout = False # 对应 `--no_dropout` + # 数据集参数 + self.dataset_mode = 'single' + self.direction = 'AtoB' + self.serial_batches = True # 对应 `--serial_batches` + self.num_threads = 4 + self.batch_size = 4 + self.load_size = 512 + self.crop_size = 512 + self.max_dataset_size = float("inf") + self.preprocess = 'resize_and_crop' + self.no_flip = False # 对应 `--no_flip` + self.display_winsize = 256 + # 额外参数 + self.epoch = '100' + self.load_iter = 0 + self.verbose = False # 对应 `--verbose` + self.suffix = '' + self.isTrain = False + self.results_dir = 'service/image2sketch/results' + self.aspect_ratio = 1.0 + self.phase = 'test' + self.eval = False + self.num_test = 1000 + self.morm = 'batch' diff --git a/app/service/image2sketch/server.py b/app/service/image2sketch/server.py new file mode 100644 index 0000000..accd4b8 --- /dev/null +++ b/app/service/image2sketch/server.py @@ -0,0 +1,79 @@ +import logging + +import cv2 +import numpy as np +import torch +import torchvision.transforms as transforms +from PIL import Image + +from app.schemas.image2sketch import Image2SketchModel +from app.service.image2sketch.infer import tensor2im +from app.service.image2sketch.models import create_model +from app.service.image2sketch.opt import Config +from app.service.utils.oss_client import oss_get_image, oss_upload_image + +logger = logging.getLogger() + + +def tensor2im(input_image, imtype=np.uint8): + if not isinstance(input_image, np.ndarray): + if isinstance(input_image, torch.Tensor): # get the data from a variable + image_tensor = input_image.data + else: + return input_image + image_numpy = image_tensor[0].cpu().float().numpy() # convert it into a numpy array + if image_numpy.shape[0] == 1: # grayscale to RGB + image_numpy = np.tile(image_numpy, (3, 1, 1)) + image_numpy = (np.transpose(image_numpy, (1, 2, 0)) + 1) / 2.0 * 255.0 # post-processing: tranpose and scaling + else: # if it is a numpy array, do nothing + image_numpy = input_image + return image_numpy.astype(imtype) + + +class Image2SketchServer: + def __init__(self, request_data): + self.image_url = request_data.image_url + self.sketch_bucket = request_data.sketch_bucket + self.sketch_name = request_data.sketch_name + self.opt = Config() + self.opt.num_threads = 0 # test code only supports num_threads = 0 + self.opt.batch_size = 1 # test code only supports batch_size = 1 + self.opt.serial_batches = True # disable data shuffling; comment this line if results on randomly chosen images are needed. + self.opt.no_flip = True # no flip; comment this line if results on flipped images are needed. + self.opt.display_id = -1 # no visdom display; the test code saves the results to a HTML file. + self.data = {} + device = torch.device("cuda:0") + self.model = create_model(self.opt) + self.model.setup(self.opt) + transform_list = [transforms.ToTensor(), transforms.Normalize([0.5], [0.5])] + transform = transforms.Compose(transform_list) + style_img = Image.open(r"E:\workspace\trinity_client_aida\app\service\image2sketch\datasets\ref_unpair\testC\20180422151845_stEe4.jpeg").convert('L') + style_img = transform(style_img) + self.data['B'] = style_img + self.data['B'] = self.data['B'].unsqueeze(0).to(device) + A, self.width, self.height = self.get_image(self.image_url) + self.data['A'] = transform(A) + self.data['A'] = self.data['A'].unsqueeze(0).to(device) + + def get_result(self): + self.model.set_input(self.data) + self.model.test() # run inference + visuals = self.model.get_current_visuals() # get image results + image_numpy = tensor2im(visuals['content_output'].cpu()) + image_bytes = cv2.imencode(".jpg", image_numpy)[1].tobytes() + req = oss_upload_image(bucket=self.sketch_bucket, object_name=self.sketch_name, image_bytes=image_bytes) + return f"{req.bucket_name}/{req.object_name}" + + def get_image(self, image_url): + image = oss_get_image(bucket=image_url.split('/')[0], object_name=image_url[image_url.find('/') + 1:], data_type="PIL") + image = image.convert('RGB') + width = image.size[0] + height = image.size[1] + return image, width, height + + +if __name__ == '__main__': + data = Image2SketchModel(image_url="test/real_Dress_790b2c6e370644e134df7abdfe7e54d9.jpg_Img.jpg", sketch_bucket="test", sketch_name="test123.jpg") + server = Image2SketchServer(data) + sketch_url = server.get_result() + print(sketch_url) diff --git a/app/service/image2sketch/util/__init__.py b/app/service/image2sketch/util/__init__.py new file mode 100644 index 0000000..ae36f63 --- /dev/null +++ b/app/service/image2sketch/util/__init__.py @@ -0,0 +1 @@ +"""This package includes a miscellaneous collection of useful helper functions.""" diff --git a/app/service/image2sketch/util/get_data.py b/app/service/image2sketch/util/get_data.py new file mode 100644 index 0000000..97edc3c --- /dev/null +++ b/app/service/image2sketch/util/get_data.py @@ -0,0 +1,110 @@ +from __future__ import print_function +import os +import tarfile +import requests +from warnings import warn +from zipfile import ZipFile +from bs4 import BeautifulSoup +from os.path import abspath, isdir, join, basename + + +class GetData(object): + """A Python script for downloading CycleGAN or pix2pix datasets. + + Parameters: + technique (str) -- One of: 'cyclegan' or 'pix2pix'. + verbose (bool) -- If True, print additional information. + + Examples: + >>> from util.get_data import GetData + >>> gd = GetData(technique='cyclegan') + >>> new_data_path = gd.get(save_path='./datasets') # options will be displayed. + + Alternatively, You can use bash scripts: 'scripts/download_pix2pix_model.sh' + and 'scripts/download_cyclegan_model.sh'. + """ + + def __init__(self, technique='cyclegan', verbose=True): + url_dict = { + 'pix2pix': 'http://efrosgans.eecs.berkeley.edu/pix2pix/datasets/', + 'cyclegan': 'https://people.eecs.berkeley.edu/~taesung_park/CycleGAN/datasets' + } + self.url = url_dict.get(technique.lower()) + self._verbose = verbose + + def _print(self, text): + if self._verbose: + print(text) + + @staticmethod + def _get_options(r): + soup = BeautifulSoup(r.text, 'lxml') + options = [h.text for h in soup.find_all('a', href=True) + if h.text.endswith(('.zip', 'tar.gz'))] + return options + + def _present_options(self): + r = requests.get(self.url) + options = self._get_options(r) + print('Options:\n') + for i, o in enumerate(options): + print("{0}: {1}".format(i, o)) + choice = input("\nPlease enter the number of the " + "dataset above you wish to download:") + return options[int(choice)] + + def _download_data(self, dataset_url, save_path): + if not isdir(save_path): + os.makedirs(save_path) + + base = basename(dataset_url) + temp_save_path = join(save_path, base) + + with open(temp_save_path, "wb") as f: + r = requests.get(dataset_url) + f.write(r.content) + + if base.endswith('.tar.gz'): + obj = tarfile.open(temp_save_path) + elif base.endswith('.zip'): + obj = ZipFile(temp_save_path, 'r') + else: + raise ValueError("Unknown File Type: {0}.".format(base)) + + self._print("Unpacking Data...") + obj.extractall(save_path) + obj.close() + os.remove(temp_save_path) + + def get(self, save_path, dataset=None): + """ + + Download a dataset. + + Parameters: + save_path (str) -- A directory to save the data to. + dataset (str) -- (optional). A specific dataset to download. + Note: this must include the file extension. + If None, options will be presented for you + to choose from. + + Returns: + save_path_full (str) -- the absolute path to the downloaded data. + + """ + if dataset is None: + selected_dataset = self._present_options() + else: + selected_dataset = dataset + + save_path_full = join(save_path, selected_dataset.split('.')[0]) + + if isdir(save_path_full): + warn("\n'{0}' already exists. Voiding Download.".format( + save_path_full)) + else: + self._print('Downloading Data...') + url = "{0}/{1}".format(self.url, selected_dataset) + self._download_data(url, save_path=save_path) + + return abspath(save_path_full) diff --git a/app/service/image2sketch/util/html.py b/app/service/image2sketch/util/html.py new file mode 100644 index 0000000..cc3262a --- /dev/null +++ b/app/service/image2sketch/util/html.py @@ -0,0 +1,86 @@ +import dominate +from dominate.tags import meta, h3, table, tr, td, p, a, img, br +import os + + +class HTML: + """This HTML class allows us to save images and write texts into a single HTML file. + + It consists of functions such as (add a text header to the HTML file), + (add a row of images to the HTML file), and (save the HTML to the disk). + It is based on Python library 'dominate', a Python library for creating and manipulating HTML documents using a DOM API. + """ + + def __init__(self, web_dir, title, refresh=0): + """Initialize the HTML classes + + Parameters: + web_dir (str) -- a directory that stores the webpage. HTML file will be created at /index.html; images will be saved at 0: + with self.doc.head: + meta(http_equiv="refresh", content=str(refresh)) + + def get_image_dir(self): + """Return the directory that stores images""" + return self.img_dir + + def add_header(self, text): + """Insert a header to the HTML file + + Parameters: + text (str) -- the header text + """ + with self.doc: + h3(text) + + def add_images(self, ims, txts, links, width=400): + """add images to the HTML file + + Parameters: + ims (str list) -- a list of image paths + txts (str list) -- a list of image names shown on the website + links (str list) -- a list of hyperref links; when you click an image, it will redirect you to a new page + """ + self.t = table(border=1, style="table-layout: fixed;") # Insert a table + self.doc.add(self.t) + with self.t: + with tr(): + for im, txt, link in zip(ims, txts, links): + with td(style="word-wrap: break-word;", halign="center", valign="top"): + with p(): + with a(href=os.path.join('images', link)): + img(style="width:%dpx" % width, src=os.path.join('images', im)) + br() + p(txt) + + def save(self): + """save the current content to the HMTL file""" + html_file = '%s/index.html' % self.web_dir + f = open(html_file, 'wt') + f.write(self.doc.render()) + f.close() + + +if __name__ == '__main__': # we show an example usage here. + html = HTML('web/', 'test_html') + html.add_header('hello world') + + ims, txts, links = [], [], [] + for n in range(4): + ims.append('image_%d.png' % n) + txts.append('text_%d' % n) + links.append('image_%d.png' % n) + html.add_images(ims, txts, links) + html.save() diff --git a/app/service/image2sketch/util/image_pool.py b/app/service/image2sketch/util/image_pool.py new file mode 100644 index 0000000..6d086f8 --- /dev/null +++ b/app/service/image2sketch/util/image_pool.py @@ -0,0 +1,54 @@ +import random +import torch + + +class ImagePool(): + """This class implements an image buffer that stores previously generated images. + + This buffer enables us to update discriminators using a history of generated images + rather than the ones produced by the latest generators. + """ + + def __init__(self, pool_size): + """Initialize the ImagePool class + + Parameters: + pool_size (int) -- the size of image buffer, if pool_size=0, no buffer will be created + """ + self.pool_size = pool_size + if self.pool_size > 0: # create an empty pool + self.num_imgs = 0 + self.images = [] + + def query(self, images): + """Return an image from the pool. + + Parameters: + images: the latest generated images from the generator + + Returns images from the buffer. + + By 50/100, the buffer will return input images. + By 50/100, the buffer will return images previously stored in the buffer, + and insert the current images to the buffer. + """ + if self.pool_size == 0: # if the buffer size is 0, do nothing + return images + return_images = [] + for image in images: + image = torch.unsqueeze(image.data, 0) + if self.num_imgs < self.pool_size: # if the buffer is not full; keep inserting current images to the buffer + self.num_imgs = self.num_imgs + 1 + self.images.append(image) + return_images.append(image) + else: + p = random.uniform(0, 1) + if p > 0.5: # by 50% chance, the buffer will return a previously stored image, and insert the current image into the buffer + random_id = random.randint(0, self.pool_size - 1) # randint is inclusive + tmp = self.images[random_id].clone() + self.images[random_id] = image + return_images.append(tmp) + else: # by another 50% chance, the buffer will return the current image + return_images.append(image) + return_images = torch.cat(return_images, 0) # collect all the images and return + return return_images diff --git a/app/service/image2sketch/util/util.py b/app/service/image2sketch/util/util.py new file mode 100644 index 0000000..b050c13 --- /dev/null +++ b/app/service/image2sketch/util/util.py @@ -0,0 +1,103 @@ +"""This module contains simple helper functions """ +from __future__ import print_function +import torch +import numpy as np +from PIL import Image +import os + + +def tensor2im(input_image, imtype=np.uint8): + """"Converts a Tensor array into a numpy image array. + + Parameters: + input_image (tensor) -- the input image tensor array + imtype (type) -- the desired type of the converted numpy array + """ + if not isinstance(input_image, np.ndarray): + if isinstance(input_image, torch.Tensor): # get the data from a variable + image_tensor = input_image.data + else: + return input_image + image_numpy = image_tensor[0].cpu().float().numpy() # convert it into a numpy array + if image_numpy.shape[0] == 1: # grayscale to RGB + image_numpy = np.tile(image_numpy, (3, 1, 1)) + image_numpy = (np.transpose(image_numpy, (1, 2, 0)) + 1) / 2.0 * 255.0 # post-processing: tranpose and scaling + else: # if it is a numpy array, do nothing + image_numpy = input_image + return image_numpy.astype(imtype) + + +def diagnose_network(net, name='network'): + """Calculate and print the mean of average absolute(gradients) + + Parameters: + net (torch network) -- Torch network + name (str) -- the name of the network + """ + mean = 0.0 + count = 0 + for param in net.parameters(): + if param.grad is not None: + mean += torch.mean(torch.abs(param.grad.data)) + count += 1 + if count > 0: + mean = mean / count + print(name) + print(mean) + + +def save_image(image_numpy, image_path, aspect_ratio=1.0): + """Save a numpy image to the disk + + Parameters: + image_numpy (numpy array) -- input numpy array + image_path (str) -- the path of the image + """ + + image_pil = Image.fromarray(image_numpy) + h, w, _ = image_numpy.shape + + if aspect_ratio > 1.0: + image_pil = image_pil.resize((h, int(w * aspect_ratio)), Image.BICUBIC) + if aspect_ratio < 1.0: + image_pil = image_pil.resize((int(h / aspect_ratio), w), Image.BICUBIC) + image_pil.save(image_path) + + +def print_numpy(x, val=True, shp=False): + """Print the mean, min, max, median, std, and size of a numpy array + + Parameters: + val (bool) -- if print the values of the numpy array + shp (bool) -- if print the shape of the numpy array + """ + x = x.astype(np.float64) + if shp: + print('shape,', x.shape) + if val: + x = x.flatten() + print('mean = %3.3f, min = %3.3f, max = %3.3f, median = %3.3f, std=%3.3f' % ( + np.mean(x), np.min(x), np.max(x), np.median(x), np.std(x))) + + +def mkdirs(paths): + """create empty directories if they don't exist + + Parameters: + paths (str list) -- a list of directory paths + """ + if isinstance(paths, list) and not isinstance(paths, str): + for path in paths: + mkdir(path) + else: + mkdir(paths) + + +def mkdir(path): + """create a single empty directory if it didn't exist + + Parameters: + path (str) -- a single directory path + """ + if not os.path.exists(path): + os.makedirs(path) diff --git a/app/service/image2sketch/util/visualizer.py b/app/service/image2sketch/util/visualizer.py new file mode 100644 index 0000000..239c5ee --- /dev/null +++ b/app/service/image2sketch/util/visualizer.py @@ -0,0 +1,223 @@ +import numpy as np +import os +import sys +import ntpath +import time +from . import util, html +from subprocess import Popen, PIPE + + +if sys.version_info[0] == 2: + VisdomExceptionBase = Exception +else: + VisdomExceptionBase = ConnectionError + + +def save_images(webpage, visuals, image_path, aspect_ratio=1.0, width=256): + """Save images to the disk. + + Parameters: + webpage (the HTML class) -- the HTML webpage class that stores these imaegs (see html.py for more details) + visuals (OrderedDict) -- an ordered dictionary that stores (name, images (either tensor or numpy) ) pairs + image_path (str) -- the string is used to create image paths + aspect_ratio (float) -- the aspect ratio of saved images + width (int) -- the images will be resized to width x width + + This function will save images stored in 'visuals' to the HTML file specified by 'webpage'. + """ + image_dir = webpage.get_image_dir() + short_path = ntpath.basename(image_path[0]) + name = os.path.splitext(short_path)[0] + + webpage.add_header(name) + ims, txts, links = [], [], [] + + for label, im_data in visuals.items(): + im = util.tensor2im(im_data) + image_name = '%s_%s.png' % (name, label) + save_path = os.path.join(image_dir, image_name) + util.save_image(im, save_path, aspect_ratio=aspect_ratio) + ims.append(image_name) + txts.append(label) + links.append(image_name) + webpage.add_images(ims, txts, links, width=width) + + +class Visualizer(): + """This class includes several functions that can display/save images and print/save logging information. + + It uses a Python library 'visdom' for display, and a Python library 'dominate' (wrapped in 'HTML') for creating HTML files with images. + """ + + def __init__(self, opt): + """Initialize the Visualizer class + + Parameters: + opt -- stores all the experiment flags; needs to be a subclass of BaseOptions + Step 1: Cache the training/test options + Step 2: connect to a visdom server + Step 3: create an HTML object for saveing HTML filters + Step 4: create a logging file to store training losses + """ + self.opt = opt # cache the option + self.display_id = opt.display_id + self.use_html = opt.isTrain and not opt.no_html + self.win_size = opt.display_winsize + self.name = opt.name + self.port = opt.display_port + self.saved = False + ''' + if self.display_id > 0: # connect to a visdom server given and + import visdom + self.ncols = opt.display_ncols + self.vis = visdom.Visdom(server=opt.display_server, port=opt.display_port, env=opt.display_env) + if not self.vis.check_connection(): + self.create_visdom_connections() + ''' + if self.use_html: # create an HTML object at /web/; images will be saved under /web/images/ + self.web_dir = os.path.join(opt.checkpoints_dir, opt.name, 'web') + self.img_dir = os.path.join(self.web_dir, 'images') + print('create web directory %s...' % self.web_dir) + util.mkdirs([self.web_dir, self.img_dir]) + # create a logging file to store training losses + self.log_name = os.path.join(opt.checkpoints_dir, opt.name, 'loss_log.txt') + with open(self.log_name, "a") as log_file: + now = time.strftime("%c") + log_file.write('================ Training Loss (%s) ================\n' % now) + + def reset(self): + """Reset the self.saved status""" + self.saved = False + ''' + def create_visdom_connections(self): + """If the program could not connect to Visdom server, this function will start a new server at port < self.port > """ + cmd = sys.executable + ' -m visdom.server -p %d &>/dev/null &' % self.port + print('\n\nCould not connect to Visdom server. \n Trying to start a server....') + print('Command: %s' % cmd) + Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE) + + def display_current_results(self, visuals, epoch, save_result): + """Display current results on visdom; save current results to an HTML file. + + Parameters: + visuals (OrderedDict) - - dictionary of images to display or save + epoch (int) - - the current epoch + save_result (bool) - - if save the current results to an HTML file + """ + if self.display_id > 0: # show images in the browser using visdom + ncols = self.ncols + if ncols > 0: # show all the images in one visdom panel + ncols = min(ncols, len(visuals)) + h, w = next(iter(visuals.values())).shape[:2] + table_css = """""" % (w, h) # create a table css + # create a table of images. + title = self.name + label_html = '' + label_html_row = '' + images = [] + idx = 0 + for label, image in visuals.items(): + image_numpy = util.tensor2im(image) + label_html_row += '%s' % label + images.append(image_numpy.transpose([2, 0, 1])) + idx += 1 + if idx % ncols == 0: + label_html += '%s' % label_html_row + label_html_row = '' + white_image = np.ones_like(image_numpy.transpose([2, 0, 1])) * 255 + while idx % ncols != 0: + images.append(white_image) + label_html_row += '' + idx += 1 + if label_html_row != '': + label_html += '%s' % label_html_row + try: + self.vis.images(images, nrow=ncols, win=self.display_id + 1, + padding=2, opts=dict(title=title + ' images')) + label_html = '%s
' % label_html + self.vis.text(table_css + label_html, win=self.display_id + 2, + opts=dict(title=title + ' labels')) + except VisdomExceptionBase: + self.create_visdom_connections() + + else: # show each image in a separate visdom panel; + idx = 1 + try: + for label, image in visuals.items(): + image_numpy = util.tensor2im(image) + self.vis.image(image_numpy.transpose([2, 0, 1]), opts=dict(title=label), + win=self.display_id + idx) + idx += 1 + except VisdomExceptionBase: + self.create_visdom_connections() + + if self.use_html and (save_result or not self.saved): # save images to an HTML file if they haven't been saved. + self.saved = True + # save images to the disk + for label, image in visuals.items(): + image_numpy = util.tensor2im(image) + img_path = os.path.join(self.img_dir, 'epoch%.3d_%s.png' % (epoch, label)) + util.save_image(image_numpy, img_path) + + # update website + webpage = html.HTML(self.web_dir, 'Experiment name = %s' % self.name, refresh=1) + for n in range(epoch, 0, -1): + webpage.add_header('epoch [%d]' % n) + ims, txts, links = [], [], [] + + for label, image_numpy in visuals.items(): + image_numpy = util.tensor2im(image) + img_path = 'epoch%.3d_%s.png' % (n, label) + ims.append(img_path) + txts.append(label) + links.append(img_path) + webpage.add_images(ims, txts, links, width=self.win_size) + webpage.save() + ''' + def plot_current_losses(self, epoch, counter_ratio, losses): + """display the current losses on visdom display: dictionary of error labels and values + + Parameters: + epoch (int) -- current epoch + counter_ratio (float) -- progress (percentage) in the current epoch, between 0 to 1 + losses (OrderedDict) -- training losses stored in the format of (name, float) pairs + """ + if not hasattr(self, 'plot_data'): + self.plot_data = {'X': [], 'Y': [], 'legend': list(losses.keys())} + self.plot_data['X'].append(epoch + counter_ratio) + self.plot_data['Y'].append([losses[k] for k in self.plot_data['legend']]) + ''' + try: + self.vis.line( + X=np.stack([np.array(self.plot_data['X'])] * len(self.plot_data['legend']), 1), + Y=np.array(self.plot_data['Y']), + opts={ + 'title': self.name + ' loss over time', + 'legend': self.plot_data['legend'], + 'xlabel': 'epoch', + 'ylabel': 'loss'}, + win=self.display_id) + except VisdomExceptionBase: + self.create_visdom_connections() + ''' + # losses: same format as |losses| of plot_current_losses + def print_current_losses(self, epoch, iters, losses, t_comp, t_data): + """print current losses on console; also save the losses to the disk + + Parameters: + epoch (int) -- current epoch + iters (int) -- current training iteration during this epoch (reset to 0 at the end of every epoch) + losses (OrderedDict) -- training losses stored in the format of (name, float) pairs + t_comp (float) -- computational time per data point (normalized by batch_size) + t_data (float) -- data loading time per data point (normalized by batch_size) + """ + message = '(epoch: %d, iters: %d, time: %.3f, data: %.3f) ' % (epoch, iters, t_comp, t_data) + for k, v in losses.items(): + message += '%s: %.3f ' % (k, v) + + print(message) # print the message + with open(self.log_name, "a") as log_file: + log_file.write('%s\n' % message) # save the message diff --git a/download_checkpoints.py b/download_checkpoints.py new file mode 100644 index 0000000..03cc2c6 --- /dev/null +++ b/download_checkpoints.py @@ -0,0 +1,45 @@ +import os + +from minio import Minio +from minio.error import S3Error + +MINIO_URL = "www.minio.aida.com.hk:12024" +MINIO_ACCESS = 'vXKFLSJkYeEq2DrSZvkB' +MINIO_SECRET = 'uKTZT3x7C43WvPN9QTc99DiRkwddWZrG9Uh3JVlR' +MINIO_SECURE = True +# 配置MinIO客户端 +minio_client = Minio(MINIO_URL, access_key=MINIO_ACCESS, secret_key=MINIO_SECRET, secure=MINIO_SECURE) + + +# 下载函数 +def download_folder(bucket_name, folder_name, local_dir): + try: + # 确保本地目录存在 + if not os.path.exists(local_dir): + os.makedirs(local_dir) + + # 遍历MinIO中的文件 + objects = minio_client.list_objects(bucket_name, prefix=folder_name, recursive=True) + for obj in objects: + # 构造本地文件路径 + local_file_path = os.path.join(local_dir, obj.object_name[len(folder_name):]) + local_file_dir = os.path.dirname(local_file_path) + + # 确保本地目录存在 + if not os.path.exists(local_file_dir): + os.makedirs(local_file_dir) + + # 下载文件 + minio_client.fget_object(bucket_name, obj.object_name, local_file_path) + print(f"Downloaded {obj.object_name} to {local_file_path}") + + except S3Error as e: + print(f"Error occurred: {e}") + + +# 使用示例 +bucket_name = "test" # 替换成你的bucket名称 +folder_name = "checkpoints/" # 权重文件夹的路径 +local_dir = "app/service/image2sketch/checkpoints" # 替换成你希望保存到的本地目录 + +download_folder(bucket_name, folder_name, local_dir) From 44730520c4e5516e78b0db67f8f81089e35d0529 Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Wed, 14 Aug 2024 16:49:38 +0800 Subject: [PATCH 035/103] =?UTF-8?q?feat=20=20=20sketch=20=E6=8F=90?= =?UTF-8?q?=E5=8F=96=E6=8E=A5=E5=8F=A3=20fix?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../service/image2sketch/checkpoints/download_checkpoints.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) rename download_checkpoints.py => app/service/image2sketch/checkpoints/download_checkpoints.py (95%) diff --git a/download_checkpoints.py b/app/service/image2sketch/checkpoints/download_checkpoints.py similarity index 95% rename from download_checkpoints.py rename to app/service/image2sketch/checkpoints/download_checkpoints.py index 03cc2c6..c361a12 100644 --- a/download_checkpoints.py +++ b/app/service/image2sketch/checkpoints/download_checkpoints.py @@ -40,6 +40,6 @@ def download_folder(bucket_name, folder_name, local_dir): # 使用示例 bucket_name = "test" # 替换成你的bucket名称 folder_name = "checkpoints/" # 权重文件夹的路径 -local_dir = "app/service/image2sketch/checkpoints" # 替换成你希望保存到的本地目录 +local_dir = "app\service\image2sketch\checkpoints" # 替换成你希望保存到的本地目录 download_folder(bucket_name, folder_name, local_dir) From 2505c55bfd6cc88c868b9cc49f01f17a9e506d9a Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Wed, 14 Aug 2024 16:57:39 +0800 Subject: [PATCH 036/103] =?UTF-8?q?feat=20=20=20sketch=20=E6=8F=90?= =?UTF-8?q?=E5=8F=96=E6=8E=A5=E5=8F=A3=20fix?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/service/image2sketch/checkpoints/download_checkpoints.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app/service/image2sketch/checkpoints/download_checkpoints.py b/app/service/image2sketch/checkpoints/download_checkpoints.py index c361a12..03cc2c6 100644 --- a/app/service/image2sketch/checkpoints/download_checkpoints.py +++ b/app/service/image2sketch/checkpoints/download_checkpoints.py @@ -40,6 +40,6 @@ def download_folder(bucket_name, folder_name, local_dir): # 使用示例 bucket_name = "test" # 替换成你的bucket名称 folder_name = "checkpoints/" # 权重文件夹的路径 -local_dir = "app\service\image2sketch\checkpoints" # 替换成你希望保存到的本地目录 +local_dir = "app/service/image2sketch/checkpoints" # 替换成你希望保存到的本地目录 download_folder(bucket_name, folder_name, local_dir) From 90d8b47884139c8ab391622fbed4364d34b93d9a Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Wed, 14 Aug 2024 17:13:47 +0800 Subject: [PATCH 037/103] =?UTF-8?q?feat=20=20=20sketch=20=E6=8F=90?= =?UTF-8?q?=E5=8F=96=E6=8E=A5=E5=8F=A3=20fix?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/service/image2sketch/opt.py | 11 +++++++++-- app/service/image2sketch/server.py | 2 +- 2 files changed, 10 insertions(+), 3 deletions(-) diff --git a/app/service/image2sketch/opt.py b/app/service/image2sketch/opt.py index 7af09e1..03cf7a3 100644 --- a/app/service/image2sketch/opt.py +++ b/app/service/image2sketch/opt.py @@ -1,10 +1,13 @@ +from app.core.config import DEBUG + + class Config: def __init__(self): # 基本参数 - self.dataroot = "service/image2sketch/datasets/ref_unpair" + self.dataroot = "app/service/image2sketch/datasets/ref_unpair" self.name = 'semi_unpair' self.gpu_ids = [0] - self.checkpoints_dir = 'service/image2sketch/checkpoints/' + self.checkpoints_dir = 'app/service/image2sketch/checkpoints/' # 模型参数 self.model = 'unpaired' self.input_nc = 3 @@ -43,3 +46,7 @@ class Config: self.eval = False self.num_test = 1000 self.morm = 'batch' + if DEBUG: + self.style_image = "service/image2sketch/datasets/ref_unpair/testC/20180422151845_stEe4.jpeg" + else: + self.style_image = "app/service/image2sketch/datasets/ref_unpair/testC/20180422151845_stEe4.jpeg" diff --git a/app/service/image2sketch/server.py b/app/service/image2sketch/server.py index accd4b8..ebd363e 100644 --- a/app/service/image2sketch/server.py +++ b/app/service/image2sketch/server.py @@ -47,7 +47,7 @@ class Image2SketchServer: self.model.setup(self.opt) transform_list = [transforms.ToTensor(), transforms.Normalize([0.5], [0.5])] transform = transforms.Compose(transform_list) - style_img = Image.open(r"E:\workspace\trinity_client_aida\app\service\image2sketch\datasets\ref_unpair\testC\20180422151845_stEe4.jpeg").convert('L') + style_img = Image.open(self.opt.style_image).convert('L') style_img = transform(style_img) self.data['B'] = style_img self.data['B'] = self.data['B'].unsqueeze(0).to(device) From af3c7c2e51fedbe2639c33fdd70a7e0f629119e8 Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Wed, 14 Aug 2024 17:24:03 +0800 Subject: [PATCH 038/103] =?UTF-8?q?feat=20=20=20sketch=20=E6=8F=90?= =?UTF-8?q?=E5=8F=96=E6=8E=A5=E5=8F=A3=20fix?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/api/api_image2sketch.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/app/api/api_image2sketch.py b/app/api/api_image2sketch.py index 98d94ee..414361d 100644 --- a/app/api/api_image2sketch.py +++ b/app/api/api_image2sketch.py @@ -26,11 +26,11 @@ def image2sketch(request_item: Image2SketchModel): "sketch_name": "12341556-89.jpg" } """ - # try: - logger.info(f"image2sketch request item is : @@@@@@:{json.dumps(request_item.dict())}") - service = Image2SketchServer(request_item) - sketch_url = service.get_result() - # except Exception as e: - # logger.warning(f"image2sketch Run Exception @@@@@@:{e}") - # raise HTTPException(status_code=404, detail=str(e)) + try: + logger.info(f"image2sketch request item is : @@@@@@:{json.dumps(request_item.dict())}") + service = Image2SketchServer(request_item) + sketch_url = service.get_result() + except Exception as e: + logger.warning(f"image2sketch Run Exception @@@@@@:{e}") + raise HTTPException(status_code=404, detail=str(e)) return ResponseModel(data=sketch_url) From 8b876b103f050252fe2f5417097391a1260e76e2 Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Wed, 14 Aug 2024 17:43:19 +0800 Subject: [PATCH 039/103] =?UTF-8?q?feat=20=20=20sketch=20=E6=8F=90?= =?UTF-8?q?=E5=8F=96=E6=8E=A5=E5=8F=A3=20fix?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/service/image2sketch/server.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/app/service/image2sketch/server.py b/app/service/image2sketch/server.py index ebd363e..7a15b55 100644 --- a/app/service/image2sketch/server.py +++ b/app/service/image2sketch/server.py @@ -55,6 +55,9 @@ class Image2SketchServer: self.data['A'] = transform(A) self.data['A'] = self.data['A'].unsqueeze(0).to(device) + def __del__(self): + torch.cuda.empty_cache() + def get_result(self): self.model.set_input(self.data) self.model.test() # run inference From 7ff3a72d8c1231e6444c9db34187166a60a0dd7a Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Thu, 15 Aug 2024 10:24:19 +0800 Subject: [PATCH 040/103] =?UTF-8?q?feat=20=20=20sketch=20=E6=8F=90?= =?UTF-8?q?=E5=8F=96=E6=8E=A5=E5=8F=A3=20fix?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/api/api_image2sketch.py | 3 +++ app/service/image2sketch/opt.py | 3 ++- app/service/image2sketch/server.py | 3 --- 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/app/api/api_image2sketch.py b/app/api/api_image2sketch.py index 414361d..5bc6191 100644 --- a/app/api/api_image2sketch.py +++ b/app/api/api_image2sketch.py @@ -1,5 +1,6 @@ import json import logging +import time from fastapi import APIRouter, HTTPException @@ -27,9 +28,11 @@ def image2sketch(request_item: Image2SketchModel): } """ try: + start_time = time.time() logger.info(f"image2sketch request item is : @@@@@@:{json.dumps(request_item.dict())}") service = Image2SketchServer(request_item) sketch_url = service.get_result() + logger.info(f"run time is : {time.time() - start_time}") except Exception as e: logger.warning(f"image2sketch Run Exception @@@@@@:{e}") raise HTTPException(status_code=404, detail=str(e)) diff --git a/app/service/image2sketch/opt.py b/app/service/image2sketch/opt.py index 03cf7a3..8f33b9c 100644 --- a/app/service/image2sketch/opt.py +++ b/app/service/image2sketch/opt.py @@ -7,7 +7,6 @@ class Config: self.dataroot = "app/service/image2sketch/datasets/ref_unpair" self.name = 'semi_unpair' self.gpu_ids = [0] - self.checkpoints_dir = 'app/service/image2sketch/checkpoints/' # 模型参数 self.model = 'unpaired' self.input_nc = 3 @@ -48,5 +47,7 @@ class Config: self.morm = 'batch' if DEBUG: self.style_image = "service/image2sketch/datasets/ref_unpair/testC/20180422151845_stEe4.jpeg" + self.checkpoints_dir = 'service/image2sketch/checkpoints/' else: + self.checkpoints_dir = 'app/service/image2sketch/checkpoints/' self.style_image = "app/service/image2sketch/datasets/ref_unpair/testC/20180422151845_stEe4.jpeg" diff --git a/app/service/image2sketch/server.py b/app/service/image2sketch/server.py index 7a15b55..ebd363e 100644 --- a/app/service/image2sketch/server.py +++ b/app/service/image2sketch/server.py @@ -55,9 +55,6 @@ class Image2SketchServer: self.data['A'] = transform(A) self.data['A'] = self.data['A'].unsqueeze(0).to(device) - def __del__(self): - torch.cuda.empty_cache() - def get_result(self): self.model.set_input(self.data) self.model.test() # run inference From 9fa9c620a87d7b5c23de9a2cce4fee6779fc7863 Mon Sep 17 00:00:00 2001 From: xupei Date: Fri, 16 Aug 2024 15:09:08 +0800 Subject: [PATCH 041/103] =?UTF-8?q?=E6=9C=BA=E5=99=A8=E4=BA=BA=20--=20?= =?UTF-8?q?=E5=BC=80=E5=90=AF=E7=94=A8=E6=88=B7=E6=8C=87=E5=BC=95?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../chat_robot/script/service/CallQWen.py | 64 +++++++++++++------ 1 file changed, 44 insertions(+), 20 deletions(-) diff --git a/app/service/chat_robot/script/service/CallQWen.py b/app/service/chat_robot/script/service/CallQWen.py index f8e6bd5..d2e2c06 100644 --- a/app/service/chat_robot/script/service/CallQWen.py +++ b/app/service/chat_robot/script/service/CallQWen.py @@ -1,5 +1,4 @@ import json -from typing import Dict, Any from dashscope import Generation from retry import retry @@ -8,8 +7,7 @@ from urllib3.exceptions import NewConnectionError from app.core.config import * from app.service.chat_robot.script.callbacks.qwen_callback_handler import QWenCallbackHandler from app.service.chat_robot.script.database import CustomDatabase -from app.service.chat_robot.script.prompt import FASHION_CHAT_BOT_PREFIX, TOOLS_FUNCTIONS_SUFFIX - +from app.service.chat_robot.script.prompt import FASHION_CHAT_BOT_PREFIX, TOOLS_FUNCTIONS_SUFFIX, TUTORIAL_TOOL_RETURN get_database_table_description = "Input is an empty string, output is a comma separated list of tables in the database." @@ -22,17 +20,20 @@ get_table_info_description = ( ) query_database_description = ( - "The input of this tool is a detailed and correct SQL select query statement, " - "and the output is the result of the database, and it can only return up to 4 results." - "If the query is not correct, an error message will be returned." - "If an error is returned, rewrite the query, check the query, and try again." - "If you encounter an issue with Unknown column 'xxxx' in 'field list' or Table 'attribute_retrieval.xxxx' doesn't exist," - "use get_table_info to query the correct table fields." + "The input of this tool is a detailed and correct SQL select query statement, " + "and the output is the result of the database, and it can only return up to 4 results." + "If the query is not correct, an error message will be returned." + "If an error is returned, rewrite the query, check the query, and try again." + "If you encounter an issue with Unknown column 'xxxx' in 'field list' or Table 'attribute_retrieval.xxxx' doesn't exist," + "use get_table_info to query the correct table fields." - "Example Input: 'SELECT img_name FROM female_skirt WHERE opening_type = 'Button' ORDER BY RAND() LIMIT 1'" - "Example Input 2: 'SELECT img_name FROM female_top WHERE sleeve_length = 'Long' AND type = 'Blouse' " - "order by rand() LIMIT 2'" - ) + "Example Input: 'SELECT img_name FROM female_skirt WHERE opening_type = 'Button' ORDER BY RAND() LIMIT 1'" + "Example Input 2: 'SELECT img_name FROM female_top WHERE sleeve_length = 'Long' AND type = 'Blouse' " + "order by rand() LIMIT 2'" +) + +tutorial_description = ("Utilize this tool to retrieve specific statements related to user guidance tutorials." + "Input is an empty string") tools = [ # 工具一 @@ -97,6 +98,23 @@ tools = [ }, "required": ["sql_string"] } + }, + # 工具四 + { + "type": "function", + "function": { + "name": "tutorial_tool", + "description": tutorial_description, + "parameters": { + "type": "object", + "properties": { + "sql_string": { + "type": "string", + "description": "由模型生成的sql语句" + } + } + }, + } } ] @@ -106,6 +124,7 @@ db = CustomDatabase.from_uri(f'mysql+pymysql://{DB_USERNAME}:{DB_PASSWORD}@{DB_H engine_args={"pool_recycle": 7200}) qwen = QWenCallbackHandler() + def search_from_internet(message): response = Generation.call( model='qwen-turbo', @@ -118,15 +137,19 @@ def search_from_internet(message): ) return response + def get_database_table(): return 'female_top, female_skirt, female_pants, female_dress, female_outwear, male_bottom, male_top, male_outwear' + def get_table_info(table_names): return CustomDatabase.get_table_info(db, table_names) + def query_database(sql_string): return CustomDatabase.run(db, sql_string) + @retry(exceptions=NewConnectionError, tries=3, delay=1) def get_response(messages): response = Generation.call( @@ -206,8 +229,12 @@ def call_with_messages(message): sql_string = json.loads(assistant_output.tool_calls[0]['function']['arguments'])['sql_string'] tool_info['content'] = query_database(sql_string) flag = False - result_content = tool_info['content'] + result_content = tool_info['content'] response_type = "image" + elif assistant_output.tool_calls[0]['function']['name'] == 'tutorial_tool': + tool_info = {"name": "tutorial_tool", "role": "tool", 'content': tutorial_tool()} + flag = False + result_content = tool_info['content'] print(f"工具输出信息:{tool_info['content']}\n") messages.append(tool_info) @@ -217,8 +244,6 @@ def call_with_messages(message): final_output["response_type"] = response_type QWenCallbackHandler.on_chain_end(qwen, final_output) - - # 模型的第二轮调用,对工具的输出进行总结 # if flag : # second_response = get_response(messages) @@ -229,9 +254,8 @@ def call_with_messages(message): return final_output +def tutorial_tool(): + return TUTORIAL_TOOL_RETURN + if __name__ == '__main__': call_with_messages() - - - - From c24ade7696f0d9458a95ce6e62ba219fdb9804ee Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Fri, 16 Aug 2024 15:31:59 +0800 Subject: [PATCH 042/103] =?UTF-8?q?feat=20=20=20sketch=20=E6=8F=90?= =?UTF-8?q?=E5=8F=96=E6=8E=A5=E5=8F=A3=20fix?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/service/image2sketch/server.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/app/service/image2sketch/server.py b/app/service/image2sketch/server.py index ebd363e..82f1843 100644 --- a/app/service/image2sketch/server.py +++ b/app/service/image2sketch/server.py @@ -52,8 +52,7 @@ class Image2SketchServer: self.data['B'] = style_img self.data['B'] = self.data['B'].unsqueeze(0).to(device) A, self.width, self.height = self.get_image(self.image_url) - self.data['A'] = transform(A) - self.data['A'] = self.data['A'].unsqueeze(0).to(device) + def get_result(self): self.model.set_input(self.data) From 6cf52665586fbdeb2e22f58261efd418230ccac6 Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Mon, 19 Aug 2024 10:56:21 +0800 Subject: [PATCH 043/103] =?UTF-8?q?feat=20=20=20sketch=20=E6=8F=90?= =?UTF-8?q?=E5=8F=96=E6=8E=A5=E5=8F=A3=20=E5=92=8C=20minio=E8=B6=85?= =?UTF-8?q?=E6=97=B6=E8=AE=BE=E7=BD=AE=20fix?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/api/api_image2sketch.py | 6 +++--- app/service/image2sketch/server.py | 3 ++- app/service/utils/oss_client.py | 17 ++++++++++++++--- 3 files changed, 19 insertions(+), 7 deletions(-) diff --git a/app/api/api_image2sketch.py b/app/api/api_image2sketch.py index 5bc6191..5d15daa 100644 --- a/app/api/api_image2sketch.py +++ b/app/api/api_image2sketch.py @@ -16,9 +16,9 @@ logger = logging.getLogger() def image2sketch(request_item: Image2SketchModel): """ 创建一个具有以下参数的请求体: - - **sr_image_url**: 超分图片的minio或s3 url地址 - - **sr_xn**: 超分的倍数,只接受2或4 - - **sr_tasks_id**: 任务id 用于取消超分任务和获取超分结果 + - **image_url**: 提取图片url + - **sketch_bucket**: sketch保存的bucket + - **sketch_name**: sketch保存的object name 示例参数: { diff --git a/app/service/image2sketch/server.py b/app/service/image2sketch/server.py index 82f1843..ebd363e 100644 --- a/app/service/image2sketch/server.py +++ b/app/service/image2sketch/server.py @@ -52,7 +52,8 @@ class Image2SketchServer: self.data['B'] = style_img self.data['B'] = self.data['B'].unsqueeze(0).to(device) A, self.width, self.height = self.get_image(self.image_url) - + self.data['A'] = transform(A) + self.data['A'] = self.data['A'].unsqueeze(0).to(device) def get_result(self): self.model.set_input(self.data) diff --git a/app/service/utils/oss_client.py b/app/service/utils/oss_client.py index 370cd7c..20794b9 100644 --- a/app/service/utils/oss_client.py +++ b/app/service/utils/oss_client.py @@ -5,12 +5,23 @@ from io import BytesIO import boto3 import cv2 import numpy as np +import urllib3 from PIL import Image from minio import Minio from app.core.config import * logger = logging.getLogger() +timeout = urllib3.Timeout(connect=1, read=10.0) # 连接超时 5 秒,读取超时 10 秒 +http_client = urllib3.PoolManager( + timeout=timeout, + cert_reqs='CERT_REQUIRED', # 需要证书验证 + retries=urllib3.Retry( + total=5, + backoff_factor=0.2, + status_forcelist=[500, 502, 503, 504], + ), +) # 获取图片 @@ -19,7 +30,7 @@ def oss_get_image(bucket, object_name, data_type): image_object = None try: if OSS == "minio": - oss_client = Minio(MINIO_URL, access_key=MINIO_ACCESS, secret_key=MINIO_SECRET, secure=MINIO_SECURE) + oss_client = Minio(MINIO_URL, access_key=MINIO_ACCESS, secret_key=MINIO_SECRET, secure=MINIO_SECURE, http_client=http_client) image_data = oss_client.get_object(bucket_name=bucket, object_name=object_name) else: oss_client = boto3.client('s3', aws_access_key_id=S3_ACCESS_KEY, aws_secret_access_key=S3_AWS_SECRET_ACCESS_KEY, region_name=S3_REGION_NAME) @@ -64,8 +75,8 @@ if __name__ == '__main__': # url = "aida-users/89/sketchboard/female/Dress/e6724ab7-8d3f-4677-abe0-c3e42ab7af85.jpeg" # url = "aida-users/87/print/956614a2-7e75-4fbe-9ed0-c1831e37a2c9-4-87.png" # url = "aida-users/89/single_logo/123-89.png" - # url = "aida-users/89/product_image/string-89.png" - url = "aida-collection-element/12148/Sketchboard/95ea577b-305b-4a62-b30a-39c0dd3ddb3f.png" + url = "aida-users/89/product_image/string-89.png" + # url = "aida-collection-element/12148/Sketchboard/95ea577b-305b-4a62-b30a-39c0dd3ddb3f.png" read_type = "cv2" if read_type == "cv2": img = oss_get_image(bucket=url.split('/')[0], object_name=url[url.find('/') + 1:], data_type=read_type) From ca2e2320621face05cd61750da397bc510ed0077 Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Mon, 19 Aug 2024 11:02:50 +0800 Subject: [PATCH 044/103] =?UTF-8?q?feat=20=20=20sketch=20=E6=8F=90?= =?UTF-8?q?=E5=8F=96=E6=8E=A5=E5=8F=A3=20=E5=92=8C=20minio=E8=B6=85?= =?UTF-8?q?=E6=97=B6=E8=AE=BE=E7=BD=AE=20fix?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/service/utils/oss_client.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/app/service/utils/oss_client.py b/app/service/utils/oss_client.py index 20794b9..f37a5ce 100644 --- a/app/service/utils/oss_client.py +++ b/app/service/utils/oss_client.py @@ -14,6 +14,8 @@ from app.core.config import * logger = logging.getLogger() timeout = urllib3.Timeout(connect=1, read=10.0) # 连接超时 5 秒,读取超时 10 秒 http_client = urllib3.PoolManager( + num_pools=10, # 设置连接池大小 + maxsize=10, timeout=timeout, cert_reqs='CERT_REQUIRED', # 需要证书验证 retries=urllib3.Retry( From 75140f57071cdd9809b3a646a9c10405a6d5e20b Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Mon, 19 Aug 2024 11:02:56 +0800 Subject: [PATCH 045/103] =?UTF-8?q?feat=20=20=20sketch=20=E6=8F=90?= =?UTF-8?q?=E5=8F=96=E6=8E=A5=E5=8F=A3=20=E5=92=8C=20minio=E8=B6=85?= =?UTF-8?q?=E6=97=B6=E8=AE=BE=E7=BD=AE=20fix?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/service/utils/oss_client.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/app/service/utils/oss_client.py b/app/service/utils/oss_client.py index f37a5ce..37229ad 100644 --- a/app/service/utils/oss_client.py +++ b/app/service/utils/oss_client.py @@ -14,8 +14,8 @@ from app.core.config import * logger = logging.getLogger() timeout = urllib3.Timeout(connect=1, read=10.0) # 连接超时 5 秒,读取超时 10 秒 http_client = urllib3.PoolManager( - num_pools=10, # 设置连接池大小 - maxsize=10, + num_pools=100, # 设置连接池大小 + maxsize=100, timeout=timeout, cert_reqs='CERT_REQUIRED', # 需要证书验证 retries=urllib3.Retry( From 49eb0e4088dbe657b3e56d831024b34d5da508f4 Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Mon, 19 Aug 2024 11:06:31 +0800 Subject: [PATCH 046/103] =?UTF-8?q?feat=20=20=20sketch=20=E6=8F=90?= =?UTF-8?q?=E5=8F=96=E6=8E=A5=E5=8F=A3=20=E5=92=8C=20minio=E8=B6=85?= =?UTF-8?q?=E6=97=B6=E8=AE=BE=E7=BD=AE=20fix?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/service/utils/oss_client.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/app/service/utils/oss_client.py b/app/service/utils/oss_client.py index 37229ad..f37a5ce 100644 --- a/app/service/utils/oss_client.py +++ b/app/service/utils/oss_client.py @@ -14,8 +14,8 @@ from app.core.config import * logger = logging.getLogger() timeout = urllib3.Timeout(connect=1, read=10.0) # 连接超时 5 秒,读取超时 10 秒 http_client = urllib3.PoolManager( - num_pools=100, # 设置连接池大小 - maxsize=100, + num_pools=10, # 设置连接池大小 + maxsize=10, timeout=timeout, cert_reqs='CERT_REQUIRED', # 需要证书验证 retries=urllib3.Retry( From aa501d848aabd6c99edc38dc41a32eddc1d0dea0 Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Mon, 19 Aug 2024 11:10:48 +0800 Subject: [PATCH 047/103] =?UTF-8?q?feat=20=20=20sketch=20=E6=8F=90?= =?UTF-8?q?=E5=8F=96=E6=8E=A5=E5=8F=A3=20=E5=92=8C=20minio=E8=B6=85?= =?UTF-8?q?=E6=97=B6=E8=AE=BE=E7=BD=AE=20fix?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/service/design_pre_processing/service.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app/service/design_pre_processing/service.py b/app/service/design_pre_processing/service.py index 4cbff8f..70502d0 100644 --- a/app/service/design_pre_processing/service.py +++ b/app/service/design_pre_processing/service.py @@ -151,7 +151,7 @@ class DesignPreprocessing: # 推理得到keypoint sketch['keypoint_result'] = self.keypoint_cache(sketch) if sketch['site'] == 'up': - _, seg_cache = self.load_seg_result(sketch['obj']) + _, seg_cache = self.load_seg_result(sketch['image_id']) if not _: # 推理获得seg 结果 seg_result = get_seg_result(sketch["image_id"], sketch['obj'])[0] From 539398ea8e665b07d390732f6f4c534a9a4dd609 Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Mon, 19 Aug 2024 11:15:00 +0800 Subject: [PATCH 048/103] =?UTF-8?q?feat=20=20=20sketch=20=E6=8F=90?= =?UTF-8?q?=E5=8F=96=E6=8E=A5=E5=8F=A3=20=E5=92=8C=20minio=E8=B6=85?= =?UTF-8?q?=E6=97=B6=E8=AE=BE=E7=BD=AE=20fix?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/service/utils/oss_client.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/app/service/utils/oss_client.py b/app/service/utils/oss_client.py index f37a5ce..5557641 100644 --- a/app/service/utils/oss_client.py +++ b/app/service/utils/oss_client.py @@ -11,6 +11,17 @@ from minio import Minio from app.core.config import * + +# 自定义 Retry 类 +class CustomRetry(urllib3.Retry): + def increment(self, method=None, url=None, response=None, error=None, **kwargs): + # 调用父类的 increment 方法 + new_retry = super(CustomRetry, self).increment(method, url, response, error, **kwargs) + # 打印重试信息 + logger.info(f"重试连接: {method} {url},错误: {error},重试次数: {self.total - new_retry.total}") + return new_retry + + logger = logging.getLogger() timeout = urllib3.Timeout(connect=1, read=10.0) # 连接超时 5 秒,读取超时 10 秒 http_client = urllib3.PoolManager( @@ -18,7 +29,7 @@ http_client = urllib3.PoolManager( maxsize=10, timeout=timeout, cert_reqs='CERT_REQUIRED', # 需要证书验证 - retries=urllib3.Retry( + retries=CustomRetry( total=5, backoff_factor=0.2, status_forcelist=[500, 502, 503, 504], From 34fdc77cb3e7a139d11c3964979181e26eccf0ac Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Tue, 20 Aug 2024 10:36:05 +0800 Subject: [PATCH 049/103] feat fix seg cache debug --- app/service/design/items/pipelines/segmentation.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/app/service/design/items/pipelines/segmentation.py b/app/service/design/items/pipelines/segmentation.py index 2966ee7..fba6082 100644 --- a/app/service/design/items/pipelines/segmentation.py +++ b/app/service/design/items/pipelines/segmentation.py @@ -1,3 +1,4 @@ +import logging import os import numpy as np @@ -6,6 +7,8 @@ from app.core.config import SEG_CACHE_PATH from ..builder import PIPELINES from ...utils.design_ensemble import get_seg_result +logger = logging.getLogger() + @PIPELINES.register_module() class Segmentation(object): @@ -39,6 +42,7 @@ class Segmentation(object): @staticmethod def load_seg_result(image_id): file_path = f"{SEG_CACHE_PATH}{image_id}.npy" + logger.info(f"load seg file name is :{SEG_CACHE_PATH}{image_id}.npy") try: seg_result = np.load(file_path) return True, seg_result From aeaf7fbd9dfb22ceeca9d95a3422a9ceda9f0a27 Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Tue, 20 Aug 2024 10:42:47 +0800 Subject: [PATCH 050/103] feat fix seg cache debug --- app/service/design/items/pipelines/painting.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/app/service/design/items/pipelines/painting.py b/app/service/design/items/pipelines/painting.py index 07c6a84..c3c496d 100644 --- a/app/service/design/items/pipelines/painting.py +++ b/app/service/design/items/pipelines/painting.py @@ -1,13 +1,15 @@ +import logging import random import cv2 import numpy as np from PIL import Image -from app.service.utils.decorator import RunTime, ClassCallRunTime from app.service.utils.oss_client import oss_get_image from ..builder import PIPELINES +logger = logging.getLogger() + @PIPELINES.register_module() class Painting(object): @@ -28,6 +30,7 @@ class Painting(object): resize_pattern = cv2.resize(pattern, (dim_image_w, dim_image_h), interpolation=cv2.INTER_AREA) closed_mo = np.expand_dims(result['mask'], axis=2).repeat(3, axis=2) gray_mo = np.expand_dims(result['gray'], axis=2).repeat(3, axis=2) + logger.info(f"image id is :{result['image_id']}") get_image_fir = resize_pattern * (closed_mo / 255) * (gray_mo / 255) result['pattern_image'] = get_image_fir.astype(np.uint8) result['final_image'] = result['pattern_image'] From 547b47174522dd0bb304387c5584936883064db7 Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Tue, 20 Aug 2024 11:15:24 +0800 Subject: [PATCH 051/103] feat fix seg cache debug --- app/service/design_pre_processing/service.py | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/app/service/design_pre_processing/service.py b/app/service/design_pre_processing/service.py index 70502d0..98590ab 100644 --- a/app/service/design_pre_processing/service.py +++ b/app/service/design_pre_processing/service.py @@ -13,6 +13,8 @@ from app.schemas.pre_processing import DesignPreProcessingModel from app.service.design.utils.design_ensemble import get_keypoint_result, get_seg_result from app.service.utils.oss_client import oss_get_image, oss_upload_image +logger = logging.getLogger() + class DesignPreprocessing: # def __init__(self): @@ -21,19 +23,19 @@ class DesignPreprocessing: # @ RunTime def pipeline(self, image_list): sketches_list = self.read_image(image_list) - logging.info("read image success") + # logging.info("read image success") bounding_box_sketches_list = self.bounding_box(sketches_list) - logging.info("bounding box image success") + # logging.info("bounding box image success") super_resolution_list = self.super_resolution(bounding_box_sketches_list) - logging.info("super_resolution_list image success") + # logging.info("super_resolution_list image success") infer_sketches_list = self.infer_image(super_resolution_list) - logging.info("infer image success") + # logging.info("infer image success") result = self.composing_image(infer_sketches_list) - logging.info("Replenish white edge image success") + # logging.info("Replenish white edge image success") for d in result: if 'image_obj' in d: @@ -100,7 +102,7 @@ class DesignPreprocessing: padding_left, padding_right, cv2.BORDER_CONSTANT, - value=(255, 255, 255) # 你可以选择填充颜色,例如黑色 + value=(255, 255, 255) ) item['obj'] = padded_image return image_list @@ -156,6 +158,9 @@ class DesignPreprocessing: # 推理获得seg 结果 seg_result = get_seg_result(sketch["image_id"], sketch['obj'])[0] self.save_seg_result(seg_result, sketch['image_id']) + logger.info(f"{sketch['image_id']} image size is :{sketch['obj'].shape} , seg cache size is :{seg_result.shape}") + else: + logger.info(f"{sketch['image_id']} image size is :{sketch['obj'].shape} , seg cache size is :{seg_cache.shape}") if IF_DEBUG_SHOW: debug_show_image = sketch['obj'].copy() From c393064fa67166de914e3167ff2e239331efd918 Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Wed, 21 Aug 2024 10:35:27 +0800 Subject: [PATCH 052/103] =?UTF-8?q?feat=20fix=20=20=20=20=E8=B6=85?= =?UTF-8?q?=E5=88=86size=E5=88=A4=E6=96=AD=E6=AF=94=E4=BE=8B=E5=87=8F?= =?UTF-8?q?=E5=B0=91=20=E6=98=BE=E5=AD=98=E4=B8=8D=E8=B6=B3?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/service/design_pre_processing/service.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app/service/design_pre_processing/service.py b/app/service/design_pre_processing/service.py index 98590ab..42cfd6b 100644 --- a/app/service/design_pre_processing/service.py +++ b/app/service/design_pre_processing/service.py @@ -112,7 +112,7 @@ class DesignPreprocessing: # 判断 两边是否同时都小于512 因为此处做四倍超分 if item['obj'].shape[0] <= 512 and item['obj'].shape[1] <= 512: # 如果任意一边小于256则超分 - if item['obj'].shape[0] <= 256 or item['obj'].shape[1] <= 256: + if item['obj'].shape[0] <= 200 or item['obj'].shape[1] <= 200: # 超分 img = item['obj'].astype(np.float32) / 255. sample = np.transpose(img if img.shape[2] == 1 else img[:, :, [2, 1, 0]], (2, 0, 1)) From ac4f03c422c8eae579777f7299c0ac780c22db3b Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Wed, 21 Aug 2024 16:37:14 +0800 Subject: [PATCH 053/103] =?UTF-8?q?feat=20fix=20=20=20=20MILVUS=E9=9B=86?= =?UTF-8?q?=E5=90=88=E6=9B=BF=E6=8D=A2?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/core/config.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app/core/config.py b/app/core/config.py index 2b713fd..35c12b7 100644 --- a/app/core/config.py +++ b/app/core/config.py @@ -64,7 +64,7 @@ RABBITMQ_PARAMS = { MILVUS_URL = "http://10.1.1.240:19530" MILVUS_TOKEN = "root:Milvus" MILVUS_ALIAS = "default" -MILVUS_TABLE_KEYPOINT = "keypoint_cache" +MILVUS_TABLE_KEYPOINT = "keypoint_cache_2" MILVUS_TABLE_SEG = "seg_cache" # Mysql 配置 From 1d299e9ad186d0f4f103cd170c9bc6bb2bdecd11 Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Fri, 23 Aug 2024 11:08:06 +0800 Subject: [PATCH 054/103] =?UTF-8?q?feat=20=20=20=E7=BB=99=E5=89=8D?= =?UTF-8?q?=E7=AB=AF=E6=8F=90=E4=BE=9Bsketch=20mask=20=E6=A0=87=E6=B3=A8?= =?UTF-8?q?=20fix?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/service/design/items/clothing.py | 4 +-- app/service/design/items/pipelines/split.py | 28 +++++++++++++++------ 2 files changed, 23 insertions(+), 9 deletions(-) diff --git a/app/service/design/items/clothing.py b/app/service/design/items/clothing.py index 7dd845b..953cecf 100644 --- a/app/service/design/items/clothing.py +++ b/app/service/design/items/clothing.py @@ -30,7 +30,7 @@ class Clothing(object): image=self.result["front_image"], # mask_image=self.result['front_mask_image'], image_url=self.result['front_image_url'], - mask_url=self.result['front_mask_url'], + mask_url=self.result['mask_url'], sacle=self.result['scale'], clothes_keypoint=self.result['clothes_keypoint'], position=start_point, @@ -48,7 +48,7 @@ class Clothing(object): image=self.result["back_image"], # mask_image=self.result['back_mask_image'], image_url=self.result['back_image_url'], - mask_url=self.result['back_mask_url'], + mask_url=self.result['mask_url'], sacle=self.result['scale'], clothes_keypoint=self.result['clothes_keypoint'], position=start_point, diff --git a/app/service/design/items/pipelines/split.py b/app/service/design/items/pipelines/split.py index 5b7f1bc..6569c5d 100644 --- a/app/service/design/items/pipelines/split.py +++ b/app/service/design/items/pipelines/split.py @@ -1,3 +1,4 @@ +import io import logging import cv2 @@ -5,8 +6,9 @@ import numpy as np from PIL import Image from cv2 import cvtColor, COLOR_BGR2RGBA -from app.service.utils.decorator import ClassCallRunTime +from app.core.config import AIDA_CLOTHING from app.service.utils.generate_uuid import generate_uuid +from app.service.utils.oss_client import oss_upload_image from ..builder import PIPELINES from ...utils.conversion_image import rgb_to_rgba from ...utils.upload_image import upload_png_mask @@ -33,20 +35,32 @@ class Split(object): front_mask = cv2.resize(front_mask, new_size) result_front_image[front_mask != 0] = rgba_image[front_mask != 0] result_front_image_pil = Image.fromarray(cvtColor(result_front_image, COLOR_BGR2RGBA)) - result['front_image'], result["front_image_url"], result["front_mask_url"] = upload_png_mask(result_front_image_pil, f'{generate_uuid()}', mask=front_mask) + result['front_image'], result["front_image_url"], _ = upload_png_mask(result_front_image_pil, f'{generate_uuid()}', mask=None) + + height, width = front_mask.shape + mask_image = np.zeros((height, width, 3)) + mask_image[front_mask != 0] = [0, 0, 255] + if result["name"] in ('blouse', 'dress', 'outwear', 'tops'): result_back_image = np.zeros_like(rgba_image) back_mask = cv2.resize(back_mask, new_size) result_back_image[back_mask != 0] = rgba_image[back_mask != 0] result_back_image_pil = Image.fromarray(cvtColor(result_back_image, COLOR_BGR2RGBA)) - result['back_image'], result["back_image_url"], result["back_mask_url"] = upload_png_mask(result_back_image_pil, f'{generate_uuid()}', mask=back_mask) + result['back_image'], result["back_image_url"], _ = upload_png_mask(result_back_image_pil, f'{generate_uuid()}', mask=None) + mask_image[back_mask != 0] = [0, 255, 0] + + image_bytes = cv2.imencode(".jpg", mask_image)[1].tobytes() + req = oss_upload_image(bucket=AIDA_CLOTHING, object_name=f"mask/mask_{generate_uuid()}.jpg", image_bytes=image_bytes) + result['mask_url'] = req.bucket_name + "/" + req.object_name else: + image_bytes = cv2.imencode(".jpg", mask_image)[1].tobytes() + req = oss_upload_image(bucket=AIDA_CLOTHING, object_name=f"mask/mask_{generate_uuid()}.jpg", image_bytes=image_bytes) + result['mask_url'] = req.bucket_name + "/" + req.object_name result['back_image'] = None result["back_image_url"] = None - result["back_mask_url"] = None - result['back_mask_image'] = None - - # 创建中间图层 + # result["back_mask_url"] = None + # result['back_mask_image'] = None + # 创建中间图层 result_pattern_image_rgba = rgb_to_rgba(result['pattern_image'], result['mask']) result_pattern_image_pil = Image.fromarray(cvtColor(result_pattern_image_rgba, COLOR_BGR2RGBA)) result['pattern_image'], result['pattern_image_url'], _ = upload_png_mask(result_pattern_image_pil, f'{generate_uuid()}') From e30ede517dd93df847afa3c9e74d6f8c15e59b8c Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Mon, 26 Aug 2024 13:09:16 +0800 Subject: [PATCH 055/103] =?UTF-8?q?feat=20=20=20=E5=8F=96=E6=B6=88?= =?UTF-8?q?=E8=B6=85=E5=88=86=E6=89=80=E6=9C=89=E5=BA=94=E7=94=A8=E6=9C=8D?= =?UTF-8?q?=E5=8A=A1=20fix?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/service/design_pre_processing/service.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/app/service/design_pre_processing/service.py b/app/service/design_pre_processing/service.py index 42cfd6b..a5b3a40 100644 --- a/app/service/design_pre_processing/service.py +++ b/app/service/design_pre_processing/service.py @@ -28,10 +28,10 @@ class DesignPreprocessing: bounding_box_sketches_list = self.bounding_box(sketches_list) # logging.info("bounding box image success") - super_resolution_list = self.super_resolution(bounding_box_sketches_list) + # super_resolution_list = self.super_resolution(bounding_box_sketches_list) # logging.info("super_resolution_list image success") - infer_sketches_list = self.infer_image(super_resolution_list) + infer_sketches_list = self.infer_image(bounding_box_sketches_list) # logging.info("infer image success") result = self.composing_image(infer_sketches_list) From 02dd3200797c55556d379d3979ad6ff1ccfef09d Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Tue, 27 Aug 2024 11:28:41 +0800 Subject: [PATCH 056/103] feat fix --- app/service/design/items/pipelines/split.py | 15 +++++++-------- app/service/utils/oss_client.py | 2 +- 2 files changed, 8 insertions(+), 9 deletions(-) diff --git a/app/service/design/items/pipelines/split.py b/app/service/design/items/pipelines/split.py index 6569c5d..62e61f9 100644 --- a/app/service/design/items/pipelines/split.py +++ b/app/service/design/items/pipelines/split.py @@ -1,4 +1,3 @@ -import io import logging import cv2 @@ -38,8 +37,8 @@ class Split(object): result['front_image'], result["front_image_url"], _ = upload_png_mask(result_front_image_pil, f'{generate_uuid()}', mask=None) height, width = front_mask.shape - mask_image = np.zeros((height, width, 3)) - mask_image[front_mask != 0] = [0, 0, 255] + mask_image = np.zeros((height, width, 4), dtype=np.uint8) + mask_image[front_mask != 0] = [0, 0, 255, 255] if result["name"] in ('blouse', 'dress', 'outwear', 'tops'): result_back_image = np.zeros_like(rgba_image) @@ -47,14 +46,14 @@ class Split(object): result_back_image[back_mask != 0] = rgba_image[back_mask != 0] result_back_image_pil = Image.fromarray(cvtColor(result_back_image, COLOR_BGR2RGBA)) result['back_image'], result["back_image_url"], _ = upload_png_mask(result_back_image_pil, f'{generate_uuid()}', mask=None) - mask_image[back_mask != 0] = [0, 255, 0] + mask_image[back_mask != 0] = [0, 255, 0, 255] - image_bytes = cv2.imencode(".jpg", mask_image)[1].tobytes() - req = oss_upload_image(bucket=AIDA_CLOTHING, object_name=f"mask/mask_{generate_uuid()}.jpg", image_bytes=image_bytes) + image_bytes = cv2.imencode(".png", mask_image)[1].tobytes() + req = oss_upload_image(bucket=AIDA_CLOTHING, object_name=f"mask/mask_{generate_uuid()}.png", image_bytes=image_bytes) result['mask_url'] = req.bucket_name + "/" + req.object_name else: - image_bytes = cv2.imencode(".jpg", mask_image)[1].tobytes() - req = oss_upload_image(bucket=AIDA_CLOTHING, object_name=f"mask/mask_{generate_uuid()}.jpg", image_bytes=image_bytes) + image_bytes = cv2.imencode(".png", mask_image)[1].tobytes() + req = oss_upload_image(bucket=AIDA_CLOTHING, object_name=f"mask/mask_{generate_uuid()}.png", image_bytes=image_bytes) result['mask_url'] = req.bucket_name + "/" + req.object_name result['back_image'] = None result["back_image_url"] = None diff --git a/app/service/utils/oss_client.py b/app/service/utils/oss_client.py index 5557641..65ce3a2 100644 --- a/app/service/utils/oss_client.py +++ b/app/service/utils/oss_client.py @@ -88,7 +88,7 @@ if __name__ == '__main__': # url = "aida-users/89/sketchboard/female/Dress/e6724ab7-8d3f-4677-abe0-c3e42ab7af85.jpeg" # url = "aida-users/87/print/956614a2-7e75-4fbe-9ed0-c1831e37a2c9-4-87.png" # url = "aida-users/89/single_logo/123-89.png" - url = "aida-users/89/product_image/string-89.png" + url = "aida-clothing/mask/mask_f354afb5-6423-11ef-8b08-0826ae3ad6b3.png" # url = "aida-collection-element/12148/Sketchboard/95ea577b-305b-4a62-b30a-39c0dd3ddb3f.png" read_type = "cv2" if read_type == "cv2": From 87988d7ebee903204eb66cb20de2dd4659e1d513 Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Tue, 27 Aug 2024 11:47:22 +0800 Subject: [PATCH 057/103] =?UTF-8?q?feat=20fix=20=20=20design=20mask=20?= =?UTF-8?q?=E4=BF=AE=E6=94=B9=E4=B8=BA=E9=80=8F=E6=98=8E=E8=83=8C=E6=99=AF?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/service/design/items/pipelines/split.py | 25 +++++++++++++++------ 1 file changed, 18 insertions(+), 7 deletions(-) diff --git a/app/service/design/items/pipelines/split.py b/app/service/design/items/pipelines/split.py index 62e61f9..5fb568e 100644 --- a/app/service/design/items/pipelines/split.py +++ b/app/service/design/items/pipelines/split.py @@ -1,3 +1,4 @@ +import io import logging import cv2 @@ -37,8 +38,8 @@ class Split(object): result['front_image'], result["front_image_url"], _ = upload_png_mask(result_front_image_pil, f'{generate_uuid()}', mask=None) height, width = front_mask.shape - mask_image = np.zeros((height, width, 4), dtype=np.uint8) - mask_image[front_mask != 0] = [0, 0, 255, 255] + mask_image = np.zeros((height, width, 3)) + mask_image[front_mask != 0] = [0, 0, 255] if result["name"] in ('blouse', 'dress', 'outwear', 'tops'): result_back_image = np.zeros_like(rgba_image) @@ -46,14 +47,24 @@ class Split(object): result_back_image[back_mask != 0] = rgba_image[back_mask != 0] result_back_image_pil = Image.fromarray(cvtColor(result_back_image, COLOR_BGR2RGBA)) result['back_image'], result["back_image_url"], _ = upload_png_mask(result_back_image_pil, f'{generate_uuid()}', mask=None) - mask_image[back_mask != 0] = [0, 255, 0, 255] + mask_image[back_mask != 0] = [0, 255, 0] - image_bytes = cv2.imencode(".png", mask_image)[1].tobytes() - req = oss_upload_image(bucket=AIDA_CLOTHING, object_name=f"mask/mask_{generate_uuid()}.png", image_bytes=image_bytes) + rbga_mask = rgb_to_rgba(mask_image, front_mask + back_mask) + mask_pil = Image.fromarray(cvtColor(rbga_mask.astype(np.uint8), COLOR_BGR2RGBA)) + image_data = io.BytesIO() + mask_pil.save(image_data, format='PNG') + image_data.seek(0) + image_bytes = image_data.read() + req = oss_upload_image(bucket=AIDA_CLOTHING, object_name=f"mask/mask_{generate_uuid()}.jpg", image_bytes=image_bytes) result['mask_url'] = req.bucket_name + "/" + req.object_name else: - image_bytes = cv2.imencode(".png", mask_image)[1].tobytes() - req = oss_upload_image(bucket=AIDA_CLOTHING, object_name=f"mask/mask_{generate_uuid()}.png", image_bytes=image_bytes) + rbga_mask = rgb_to_rgba(mask_image, front_mask) + mask_pil = Image.fromarray(cvtColor(rbga_mask.astype(np.uint8), COLOR_BGR2RGBA)) + image_data = io.BytesIO() + mask_pil.save(image_data, format='PNG') + image_data.seek(0) + image_bytes = image_data.read() + req = oss_upload_image(bucket=AIDA_CLOTHING, object_name=f"mask/mask_{generate_uuid()}.jpg", image_bytes=image_bytes) result['mask_url'] = req.bucket_name + "/" + req.object_name result['back_image'] = None result["back_image_url"] = None From fbaa6da92a60e432d2af1022d1626593e94b6eb3 Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Wed, 28 Aug 2024 11:45:33 +0800 Subject: [PATCH 058/103] =?UTF-8?q?feat=20fix=20=20=20design=20=E7=94=A8?= =?UTF-8?q?=E6=88=B7=E8=87=AA=E5=AE=9A=E4=B9=89mask?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../design/items/pipelines/segmentation.py | 42 +++++++++++++------ 1 file changed, 29 insertions(+), 13 deletions(-) diff --git a/app/service/design/items/pipelines/segmentation.py b/app/service/design/items/pipelines/segmentation.py index fba6082..ac38b34 100644 --- a/app/service/design/items/pipelines/segmentation.py +++ b/app/service/design/items/pipelines/segmentation.py @@ -1,9 +1,11 @@ import logging import os +import cv2 import numpy as np from app.core.config import SEG_CACHE_PATH +from app.service.utils.oss_client import oss_get_image from ..builder import PIPELINES from ...utils.design_ensemble import get_seg_result @@ -15,19 +17,33 @@ class Segmentation(object): # @ClassCallRunTime def __call__(self, result): - # 本地查询seg 缓存是否存在 - _, seg_result = self.load_seg_result(result["image_id"]) - result['seg_result'] = seg_result - if not _: - # 推理获得seg 结果 - seg_result = get_seg_result(result["image_id"], result['image'])[0] - self.save_seg_result(seg_result, result['image_id']) - # 处理前片后片 - temp_front = seg_result == 1.0 - result['front_mask'] = (255 * (temp_front + 0).astype(np.uint8)) - temp_back = seg_result == 2.0 - result['back_mask'] = (255 * (temp_back + 0).astype(np.uint8)) - result['mask'] = result['front_mask'] + result['back_mask'] + if "seg_mask_url" in result.keys() and result['seg_mask_url'] != "": + seg_mask = oss_get_image(bucket=result['seg_mask_url'].split('/')[0], object_name=result['seg_mask_url'][result['seg_mask_url'].find('/') + 1:], data_type="cv2") + seg_mask = cv2.resize(seg_mask, (result['img_shape'][1], result['img_shape'][0])) + # 转换颜色空间为 RGB(OpenCV 默认是 BGR) + image_rgb = cv2.cvtColor(seg_mask, cv2.COLOR_BGR2RGB) + + front_color = np.array([255, 0, 0], dtype=np.uint8) + back_color = np.array([0, 255, 0], dtype=np.uint8) + + # 创建红色和绿色掩码 + result['front_mask'] = cv2.inRange(image_rgb, front_color, front_color) + result['back_mask'] = cv2.inRange(image_rgb, back_color, back_color) + result['mask'] = result['front_mask'] + result['back_mask'] + else: + # 本地查询seg 缓存是否存在 + _, seg_result = self.load_seg_result(result["image_id"]) + result['seg_result'] = seg_result + if not _: + # 推理获得seg 结果 + seg_result = get_seg_result(result["image_id"], result['image'])[0] + self.save_seg_result(seg_result, result['image_id']) + # 处理前片后片 + temp_front = seg_result == 1.0 + result['front_mask'] = (255 * (temp_front + 0).astype(np.uint8)) + temp_back = seg_result == 2.0 + result['back_mask'] = (255 * (temp_back + 0).astype(np.uint8)) + result['mask'] = result['front_mask'] + result['back_mask'] return result @staticmethod From 14ff063f87005096210b205bf915ab7dc919c091 Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Wed, 28 Aug 2024 11:47:00 +0800 Subject: [PATCH 059/103] =?UTF-8?q?feat=20fix=20=20=20design=20=E7=94=A8?= =?UTF-8?q?=E6=88=B7=E8=87=AA=E5=AE=9A=E4=B9=89mask?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/api/api_design.py | 176 ++++++++++++++++++++++++++---------------- 1 file changed, 109 insertions(+), 67 deletions(-) diff --git a/app/api/api_design.py b/app/api/api_design.py index d4537c1..bc3d1b9 100644 --- a/app/api/api_design.py +++ b/app/api/api_design.py @@ -24,28 +24,28 @@ def design(request_data: DesignModel): "basic": { "body_point_test": { "waistband_right": [ - 203, - 249 + 200, + 241 ], "hand_point_right": [ - 229, - 343 + 223, + 297 ], "waistband_left": [ - 119, - 248 + 112, + 241 ], "hand_point_left": [ - 97, - 343 + 92, + 305 ], "shoulder_left": [ - 108, - 107 + 99, + 116 ], "shoulder_right": [ - 212, - 107 + 215, + 116 ] }, "layer_order": true, @@ -57,65 +57,33 @@ def design(request_data: DesignModel): }, "items": [ { - "businessId": 255303, - "color": "139 148 156", - "image_id": 95159, + "businessId": 270372, + "color": "30 28 28", + "image_id": 69780, "offset": [ 0, 0 ], - "path": "aida-users/89/sketch/c89d75f3-581f-4edd-9f8e-b08e84a2cbe7-3-89.png", + "path": "aida-sys-image/images/female/trousers/0825000630.jpg", + "seg_mask_url": "test/result.png", "print": { - "single": { - "location": [ - [ - 200.0, - 200.0 - ] - ], - "print_angle_list": [ - 0.0 - ], - "print_path_list": [ - "aida-users/89/slogan_image/ce0b2423-9e5a-466f-9611-c254940a7819-1-89.png" - ], - "print_scale_list": [ - 1.0 - ] + "element": { + "element_angle_list": [], + "element_path_list": [], + "element_scale_list": [], + "location": [] }, "overall": { - "location": [ - [ - 512.0, - 512.0 - ] - ], - "print_angle_list": [ - 0.0 - ], - "print_path_list": [ - "aida-users/89/print/468643b4-bc2d-41b2-9a16-79766606a2db-3-89.png" - ], - "print_scale_list": [ - 1.0 - ] + "location": [], + "print_angle_list": [], + "print_path_list": [], + "print_scale_list": [] }, - "element": { - "element_angle_list": [ - 0.0 - ], - "element_path_list": [ - "aida-users/88/designelements/Embroidery/a4d9605a-675e-4606-93e0-77ca6baaf55f.png" - ], - "element_scale_list": [ - 0.2731036750637755 - ], - "location": [ - [ - 228.63694825464364, - 406.4843844199667 - ] - ] + "single": { + "location": [], + "print_angle_list": [], + "print_path_list": [], + "print_scale_list": [] } }, "priority": 10, @@ -123,17 +91,91 @@ def design(request_data: DesignModel): 1.0, 1.0 ], - "type": "Dress" + "type": "Trousers" }, { - "body_path": "aida-sys-image/models/female/2e4815b9-1191-419d-94ed-5771239ca4a5.png", - "image_id": 67277, + "businessId": 270373, + "color": "30 28 28", + "image_id": 98243, + "offset": [ + 0, + 0 + ], + "path": "aida-sys-image/images/female/blouse/0902003811.jpg", + "seg_mask_url": "test/result.png", + "print": { + "element": { + "element_angle_list": [], + "element_path_list": [], + "element_scale_list": [], + "location": [] + }, + "overall": { + "location": [], + "print_angle_list": [], + "print_path_list": [], + "print_scale_list": [] + }, + "single": { + "location": [], + "print_angle_list": [], + "print_path_list": [], + "print_scale_list": [] + } + }, + "priority": 11, + "resize_scale": [ + 1.0, + 1.0 + ], + "type": "Blouse" + }, + { + "businessId": 270374, + "color": "172 68 68", + "image_id": 98244, + "offset": [ + 0, + 0 + ], + "path": "aida-sys-image/images/female/outwear/0825000410.jpg", + "seg_mask_url": "test/result.png", + "print": { + "element": { + "element_angle_list": [], + "element_path_list": [], + "element_scale_list": [], + "location": [] + }, + "overall": { + "location": [], + "print_angle_list": [], + "print_path_list": [], + "print_scale_list": [] + }, + "single": { + "location": [], + "print_angle_list": [], + "print_path_list": [], + "print_scale_list": [] + } + }, + "priority": 12, + "resize_scale": [ + 1.0, + 1.0 + ], + "type": "Outwear" + }, + { + "body_path": "aida-sys-image/models/female/5bdfe7ca-64eb-44e4-b03d-8e517520c795.png", + "image_id": 96090, "type": "Body" } ] } ], - "process_id": "89" + "process_id": "83" } """ logger.info(f"design request item is : @@@@@@:{json.dumps(request_data.dict())}") From 70b05a7beb55a382af45978cdc8c2c89b11e64e2 Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Wed, 28 Aug 2024 16:39:39 +0800 Subject: [PATCH 060/103] =?UTF-8?q?feat=20=20=20=E6=96=B0=E5=A2=9E?= =?UTF-8?q?=E5=9B=BE=E7=89=87=E4=BA=AE=E5=BA=A6=E6=8F=90=E9=AB=98=E6=8E=A5?= =?UTF-8?q?=E5=8F=A3=20fix?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/api/api_brighten.py | 65 +++++++++++++++++++++++++++++++++++++++++ app/api/api_route.py | 2 ++ app/schemas/brighten.py | 8 +++++ 3 files changed, 75 insertions(+) create mode 100644 app/api/api_brighten.py create mode 100644 app/schemas/brighten.py diff --git a/app/api/api_brighten.py b/app/api/api_brighten.py new file mode 100644 index 0000000..c06acd9 --- /dev/null +++ b/app/api/api_brighten.py @@ -0,0 +1,65 @@ +import io +import json +import logging +import time + +from PIL import ImageEnhance +from fastapi import APIRouter, HTTPException + +from app.schemas.brighten import BrightenModel +from app.schemas.response_template import ResponseModel +from app.service.utils.oss_client import oss_get_image, oss_upload_image + +router = APIRouter() +logger = logging.getLogger() + + +def increase_brightness(img, factor): + enhancer = ImageEnhance.Brightness(img) + bright_img = enhancer.enhance(factor) + return bright_img + + +@router.post("/brighten") +async def brighten(request_item: BrightenModel): + """ + 创建一个具有以下参数的请求体: + - **image_url**: 提亮图片url + - **brighten_value**: 提高亮度的比重 亮度因子 1.0 表示原始亮度,1.5 表示增加 50% 的亮度 + - **brighten_bucket**: 结果图保存的bucket + - **brighten_name**: 结果图保存的object name + + 示例参数: + { + "image_url": "aida-users/89/relight_image/3850e17b-3efd-4597-90ef-2a7bcd1a1a0b-0-89.png", + "brighten_value": 1.5, + "brighten_bucket": "test", + "brighten_name": "12341556-89.jpg" + } + """ + try: + start_time = time.time() + logger.info(f"brighten request item is : @@@@@@:{json.dumps(request_item.dict())}") + image = oss_get_image(bucket=request_item.image_url.split('/')[0], object_name=request_item.image_url[request_item.image_url.find('/') + 1:], data_type="PIL") + new_image = increase_brightness(image, request_item.brighten_value) + image_data = io.BytesIO() + new_image.save(image_data, format='PNG') + image_data.seek(0) + image_bytes = image_data.read() + req = oss_upload_image(bucket=request_item.brighten_bucket, object_name=request_item.brighten_name, image_bytes=image_bytes) + brighten_url = f"{req.bucket_name}/{req.object_name}" + logger.info(f"run time is : {time.time() - start_time}") + except Exception as e: + logger.warning(f"brighten Run Exception @@@@@@:{e}") + raise HTTPException(status_code=404, detail=str(e)) + return ResponseModel(data=brighten_url) + + +if __name__ == '__main__': + request_item = BrightenModel(image_url="aida-users/89/relight_image/3850e17b-3efd-4597-90ef-2a7bcd1a1a0b-0-89.png", + brighten_value=1.5, + brighten_bucket="test", + brighten_name="12341556-89.png") + image = oss_get_image(bucket=request_item.image_url.split('/')[0], object_name=request_item.image_url[request_item.image_url.find('/') + 1:], data_type="PIL") + new_image = increase_brightness(image, request_item.brighten_value) + new_image.show() diff --git a/app/api/api_route.py b/app/api/api_route.py index 8bcbe44..7ee774d 100644 --- a/app/api/api_route.py +++ b/app/api/api_route.py @@ -1,6 +1,7 @@ from fastapi import APIRouter from app.api import api_attribute_retrieve +from app.api import api_brighten from app.api import api_chat_robot from app.api import api_design from app.api import api_design_pre_processing @@ -21,3 +22,4 @@ router.include_router(api_chat_robot.router, tags=['chat_robot'], prefix="/api") router.include_router(api_prompt_generation.router, tags=['prompt_generation'], prefix="/api") router.include_router(api_design_pre_processing.router, tags=['design_pre_processing'], prefix="/api") router.include_router(api_image2sketch.router, tags=['api_image2sketch'], prefix="/api") +router.include_router(api_brighten.router, tags=['api_brighten'], prefix="/api") diff --git a/app/schemas/brighten.py b/app/schemas/brighten.py new file mode 100644 index 0000000..c25c6d0 --- /dev/null +++ b/app/schemas/brighten.py @@ -0,0 +1,8 @@ +from pydantic import BaseModel + + +class BrightenModel(BaseModel): + image_url: str + brighten_value: float + brighten_bucket: str + brighten_name: str From b867c5ddb71b971b2738eb4ded547ab9c3ba7743 Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Thu, 29 Aug 2024 14:39:17 +0800 Subject: [PATCH 061/103] =?UTF-8?q?feat=20fix=20=20=20design=20mask?= =?UTF-8?q?=E6=A0=87=E6=B3=A8=E6=8F=90=E5=8F=96=20=E8=AE=BE=E7=BD=AE?= =?UTF-8?q?=E9=A2=9C=E8=89=B2=E7=9A=84=E4=B8=8A=E7=95=8C=E4=B8=8B=E7=95=8C?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/service/design/items/pipelines/segmentation.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/app/service/design/items/pipelines/segmentation.py b/app/service/design/items/pipelines/segmentation.py index ac38b34..2937df6 100644 --- a/app/service/design/items/pipelines/segmentation.py +++ b/app/service/design/items/pipelines/segmentation.py @@ -23,12 +23,18 @@ class Segmentation(object): # 转换颜色空间为 RGB(OpenCV 默认是 BGR) image_rgb = cv2.cvtColor(seg_mask, cv2.COLOR_BGR2RGB) - front_color = np.array([255, 0, 0], dtype=np.uint8) - back_color = np.array([0, 255, 0], dtype=np.uint8) + # 定义红色和绿色的颜色范围 + # 红色范围: 下界 [R-10, G-10, B-10], 上界 [R+10, G+10, B+10] + red_lower = np.array([50, 0, 0], dtype=np.uint8) + red_upper = np.array([255, 50, 50], dtype=np.uint8) + + # 绿色范围: 下界 [R-10, G-10, B-10], 上界 [R+10, G+10, B+10] + green_lower = np.array([0, 50, 0], dtype=np.uint8) + green_upper = np.array([50, 255, 50], dtype=np.uint8) # 创建红色和绿色掩码 - result['front_mask'] = cv2.inRange(image_rgb, front_color, front_color) - result['back_mask'] = cv2.inRange(image_rgb, back_color, back_color) + result['front_mask'] = cv2.inRange(image_rgb, red_lower, red_upper) + result['back_mask'] = cv2.inRange(image_rgb, green_lower, green_upper) result['mask'] = result['front_mask'] + result['back_mask'] else: # 本地查询seg 缓存是否存在 From 49fa11bfc1170f405f681eee6139d975e54e253d Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Fri, 30 Aug 2024 09:57:08 +0800 Subject: [PATCH 062/103] =?UTF-8?q?feat=20fix=20=20=20design=20mask?= =?UTF-8?q?=E6=A0=87=E6=B3=A8=E6=8F=90=E5=8F=96=20single=E6=A8=A1=E5=BC=8F?= =?UTF-8?q?=E5=85=BC=E5=AE=B9?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/service/design/service.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/app/service/design/service.py b/app/service/design/service.py index 798b6f4..cd67253 100644 --- a/app/service/design/service.py +++ b/app/service/design/service.py @@ -129,7 +129,7 @@ def process_object(cfg, process_id, total): 'position': None, 'priority': 0, 'image_url': item.result['front_image_url'], - 'mask_url': item.result['front_mask_url'], + 'mask_url': item.result['mask_url'], "gradient_string": item.result['gradient_string'] if 'gradient_string' in item.result.keys() else "" }) @@ -139,7 +139,7 @@ def process_object(cfg, process_id, total): 'position': None, 'priority': 0, 'image_url': item.result['back_image_url'], - 'mask_url': item.result['back_mask_url'], + 'mask_url': item.result['mask_url'], "gradient_string": item.result['gradient_string'] if 'gradient_string' in item.result.keys() else "" }) From d8883f6eb6787dc3c5df1e0cc6c4e4fe8abcf12f Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Fri, 30 Aug 2024 10:44:50 +0800 Subject: [PATCH 063/103] =?UTF-8?q?feat=20fix=20=20=20design=20single=20?= =?UTF-8?q?=E7=BB=93=E6=9E=9C=E6=96=B0=E5=A2=9Eprint=E4=B8=AD=E9=97=B4?= =?UTF-8?q?=E5=9B=BE?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/service/design/service.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/app/service/design/service.py b/app/service/design/service.py index cd67253..ba7e987 100644 --- a/app/service/design/service.py +++ b/app/service/design/service.py @@ -130,7 +130,8 @@ def process_object(cfg, process_id, total): 'priority': 0, 'image_url': item.result['front_image_url'], 'mask_url': item.result['mask_url'], - "gradient_string": item.result['gradient_string'] if 'gradient_string' in item.result.keys() else "" + "gradient_string": item.result['gradient_string'] if 'gradient_string' in item.result.keys() else "", + 'pattern_image_url': item.result['pattern_image_url'] if 'pattern_image_url' in item.result.keys() else None, }) items_response['layers'].append({ @@ -140,7 +141,8 @@ def process_object(cfg, process_id, total): 'priority': 0, 'image_url': item.result['back_image_url'], 'mask_url': item.result['mask_url'], - "gradient_string": item.result['gradient_string'] if 'gradient_string' in item.result.keys() else "" + "gradient_string": item.result['gradient_string'] if 'gradient_string' in item.result.keys() else "", + 'pattern_image_url': item.result['pattern_image_url'] if 'pattern_image_url' in item.result.keys() else None, }) items_response['synthesis_url'] = synthesis_single(item.result['front_image'], item.result['back_image']) From f5fc6b0c682d3f0664bc2e7fe812e179247c027b Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Tue, 3 Sep 2024 15:49:42 +0800 Subject: [PATCH 064/103] =?UTF-8?q?feat=20fix=20=20=20relight=20=E8=A1=A5?= =?UTF-8?q?=E5=85=89=E6=8E=A5=E5=8F=A3=E4=BF=AE=E6=94=B9?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/api/api_brighten.py | 12 +++--------- app/schemas/brighten.py | 2 -- 2 files changed, 3 insertions(+), 11 deletions(-) diff --git a/app/api/api_brighten.py b/app/api/api_brighten.py index c06acd9..cc5a03f 100644 --- a/app/api/api_brighten.py +++ b/app/api/api_brighten.py @@ -26,15 +26,11 @@ async def brighten(request_item: BrightenModel): 创建一个具有以下参数的请求体: - **image_url**: 提亮图片url - **brighten_value**: 提高亮度的比重 亮度因子 1.0 表示原始亮度,1.5 表示增加 50% 的亮度 - - **brighten_bucket**: 结果图保存的bucket - - **brighten_name**: 结果图保存的object name 示例参数: { "image_url": "aida-users/89/relight_image/3850e17b-3efd-4597-90ef-2a7bcd1a1a0b-0-89.png", - "brighten_value": 1.5, - "brighten_bucket": "test", - "brighten_name": "12341556-89.jpg" + "brighten_value": 1.5 } """ try: @@ -46,7 +42,7 @@ async def brighten(request_item: BrightenModel): new_image.save(image_data, format='PNG') image_data.seek(0) image_bytes = image_data.read() - req = oss_upload_image(bucket=request_item.brighten_bucket, object_name=request_item.brighten_name, image_bytes=image_bytes) + req = oss_upload_image(bucket=request_item.image_url.split('/')[0], object_name=request_item.image_url[request_item.image_url.find('/') + 1:], image_bytes=image_bytes) brighten_url = f"{req.bucket_name}/{req.object_name}" logger.info(f"run time is : {time.time() - start_time}") except Exception as e: @@ -57,9 +53,7 @@ async def brighten(request_item: BrightenModel): if __name__ == '__main__': request_item = BrightenModel(image_url="aida-users/89/relight_image/3850e17b-3efd-4597-90ef-2a7bcd1a1a0b-0-89.png", - brighten_value=1.5, - brighten_bucket="test", - brighten_name="12341556-89.png") + brighten_value=1.5) image = oss_get_image(bucket=request_item.image_url.split('/')[0], object_name=request_item.image_url[request_item.image_url.find('/') + 1:], data_type="PIL") new_image = increase_brightness(image, request_item.brighten_value) new_image.show() diff --git a/app/schemas/brighten.py b/app/schemas/brighten.py index c25c6d0..e407905 100644 --- a/app/schemas/brighten.py +++ b/app/schemas/brighten.py @@ -4,5 +4,3 @@ from pydantic import BaseModel class BrightenModel(BaseModel): image_url: str brighten_value: float - brighten_bucket: str - brighten_name: str From cfa2cd1987151cc84b2e05b956a9cdfa38cf98f8 Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Wed, 4 Sep 2024 15:05:05 +0800 Subject: [PATCH 065/103] =?UTF-8?q?feat=20fix=20=20=20design=20mask=20?= =?UTF-8?q?=E7=BA=A2=E7=BB=BF=E5=88=A4=E6=96=AD=E4=BF=AE=E6=AD=A3?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../design/items/pipelines/segmentation.py | 17 ++++++----------- app/service/design/items/pipelines/split.py | 4 ++-- 2 files changed, 8 insertions(+), 13 deletions(-) diff --git a/app/service/design/items/pipelines/segmentation.py b/app/service/design/items/pipelines/segmentation.py index 2937df6..e7f09ed 100644 --- a/app/service/design/items/pipelines/segmentation.py +++ b/app/service/design/items/pipelines/segmentation.py @@ -19,22 +19,17 @@ class Segmentation(object): def __call__(self, result): if "seg_mask_url" in result.keys() and result['seg_mask_url'] != "": seg_mask = oss_get_image(bucket=result['seg_mask_url'].split('/')[0], object_name=result['seg_mask_url'][result['seg_mask_url'].find('/') + 1:], data_type="cv2") - seg_mask = cv2.resize(seg_mask, (result['img_shape'][1], result['img_shape'][0])) + seg_mask = cv2.resize(seg_mask, (result['img_shape'][1], result['img_shape'][0]), interpolation=cv2.INTER_NEAREST) # 转换颜色空间为 RGB(OpenCV 默认是 BGR) image_rgb = cv2.cvtColor(seg_mask, cv2.COLOR_BGR2RGB) - # 定义红色和绿色的颜色范围 - # 红色范围: 下界 [R-10, G-10, B-10], 上界 [R+10, G+10, B+10] - red_lower = np.array([50, 0, 0], dtype=np.uint8) - red_upper = np.array([255, 50, 50], dtype=np.uint8) - - # 绿色范围: 下界 [R-10, G-10, B-10], 上界 [R+10, G+10, B+10] - green_lower = np.array([0, 50, 0], dtype=np.uint8) - green_upper = np.array([50, 255, 50], dtype=np.uint8) + r, g, b = cv2.split(image_rgb) + red_mask = r > g + green_mask = g > r # 创建红色和绿色掩码 - result['front_mask'] = cv2.inRange(image_rgb, red_lower, red_upper) - result['back_mask'] = cv2.inRange(image_rgb, green_lower, green_upper) + result['front_mask'] = np.array(red_mask, dtype=np.uint8) * 255 + result['back_mask'] = np.array(green_mask, dtype=np.uint8) * 255 result['mask'] = result['front_mask'] + result['back_mask'] else: # 本地查询seg 缓存是否存在 diff --git a/app/service/design/items/pipelines/split.py b/app/service/design/items/pipelines/split.py index 5fb568e..3485453 100644 --- a/app/service/design/items/pipelines/split.py +++ b/app/service/design/items/pipelines/split.py @@ -55,7 +55,7 @@ class Split(object): mask_pil.save(image_data, format='PNG') image_data.seek(0) image_bytes = image_data.read() - req = oss_upload_image(bucket=AIDA_CLOTHING, object_name=f"mask/mask_{generate_uuid()}.jpg", image_bytes=image_bytes) + req = oss_upload_image(bucket=AIDA_CLOTHING, object_name=f"mask/mask_{generate_uuid()}.png", image_bytes=image_bytes) result['mask_url'] = req.bucket_name + "/" + req.object_name else: rbga_mask = rgb_to_rgba(mask_image, front_mask) @@ -64,7 +64,7 @@ class Split(object): mask_pil.save(image_data, format='PNG') image_data.seek(0) image_bytes = image_data.read() - req = oss_upload_image(bucket=AIDA_CLOTHING, object_name=f"mask/mask_{generate_uuid()}.jpg", image_bytes=image_bytes) + req = oss_upload_image(bucket=AIDA_CLOTHING, object_name=f"mask/mask_{generate_uuid()}.png", image_bytes=image_bytes) result['mask_url'] = req.bucket_name + "/" + req.object_name result['back_image'] = None result["back_image_url"] = None From 44bb38094429e988d072d3c20984fd89040421f1 Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Thu, 12 Sep 2024 10:05:38 +0800 Subject: [PATCH 066/103] feat design batch fix --- .../request_data/requests_data.json | 90 +++ .../design/design_batch/items/__init__.py | 0 app/service/design/design_batch/items/item.py | 281 ++++++++++ .../design_batch/items/pipeline/__init__.py | 20 + .../design_batch/items/pipeline/color.py | 60 ++ .../items/pipeline/contour_detection.py | 37 ++ .../design_batch/items/pipeline/keypoint.py | 114 ++++ .../design_batch/items/pipeline/loading.py | 68 +++ .../items/pipeline/print_painting.py | 523 ++++++++++++++++++ .../design_batch/items/pipeline/scale.py | 49 ++ .../items/pipeline/segmentation.py | 67 +++ .../design_batch/items/pipeline/split.py | 71 +++ .../design_batch/items/utils/__init__.py | 0 .../items/utils/conversion_image.py | 31 ++ .../items/utils/design_ensemble.py | 143 +++++ .../design_batch/items/utils/redis_utils.py | 99 ++++ .../items/utils/synthesis_item.py | 181 ++++++ .../design_batch/items/utils/upload_image.py | 55 ++ 18 files changed, 1889 insertions(+) create mode 100644 app/design_batch/request_data/requests_data.json create mode 100644 app/service/design/design_batch/items/__init__.py create mode 100644 app/service/design/design_batch/items/item.py create mode 100644 app/service/design/design_batch/items/pipeline/__init__.py create mode 100644 app/service/design/design_batch/items/pipeline/color.py create mode 100644 app/service/design/design_batch/items/pipeline/contour_detection.py create mode 100644 app/service/design/design_batch/items/pipeline/keypoint.py create mode 100644 app/service/design/design_batch/items/pipeline/loading.py create mode 100644 app/service/design/design_batch/items/pipeline/print_painting.py create mode 100644 app/service/design/design_batch/items/pipeline/scale.py create mode 100644 app/service/design/design_batch/items/pipeline/segmentation.py create mode 100644 app/service/design/design_batch/items/pipeline/split.py create mode 100644 app/service/design/design_batch/items/utils/__init__.py create mode 100644 app/service/design/design_batch/items/utils/conversion_image.py create mode 100644 app/service/design/design_batch/items/utils/design_ensemble.py create mode 100644 app/service/design/design_batch/items/utils/redis_utils.py create mode 100644 app/service/design/design_batch/items/utils/synthesis_item.py create mode 100644 app/service/design/design_batch/items/utils/upload_image.py diff --git a/app/design_batch/request_data/requests_data.json b/app/design_batch/request_data/requests_data.json new file mode 100644 index 0000000..1dba8d1 --- /dev/null +++ b/app/design_batch/request_data/requests_data.json @@ -0,0 +1,90 @@ +{ + "objects": [ + { + "basic": { + "body_point_test": { + "waistband_right": [ + 201, + 242 + ], + "hand_point_right": [ + 222, + 312 + ], + "waistband_left": [ + 114, + 243 + ], + "hand_point_left": [ + 94, + 310 + ], + "shoulder_left": [ + 102, + 116 + ], + "shoulder_right": [ + 211, + 115 + ] + }, + "layer_order": true, + "scale_bag": 0.7, + "scale_earrings": 0.16, + "self_template": true, + "single_overall": "overall", + "switch_category": "" + }, + "items": [ + { + "businessId": 264931, + "color": "145 220 232", + "image_id": 96844, + "offset": [ + 0, + 0 + ], + "path": "aida-users/87/sketch/2aa7aad5-74bb-41fa-9cdf-f06611b3e89a-2-87.png", + "print": { + "element": { + "element_angle_list": [], + "element_path_list": [], + "element_scale_list": [], + "location": [] + }, + "overall": { + "location": [], + "print_angle_list": [], + "print_path_list": [], + "print_scale_list": [] + }, + "single": { + "location": [], + "print_angle_list": [], + "print_path_list": [], + "print_scale_list": [] + } + }, + "priority": 10, + "resize_scale": [ + 1.0, + 1.0 + ], + "type": "Dress" + }, + { + "body_path": "aida-sys-image/models/female/79805ec3-3f01-466d-91e0-36028d079699.png", + "image_id": 95444, + "type": "Body" + } + ] + } + + ], + "process_id": "87", + "tasks_id": , +} + + +//用 openai jsonl +// \ No newline at end of file diff --git a/app/service/design/design_batch/items/__init__.py b/app/service/design/design_batch/items/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/service/design/design_batch/items/item.py b/app/service/design/design_batch/items/item.py new file mode 100644 index 0000000..1e51fe8 --- /dev/null +++ b/app/service/design/design_batch/items/item.py @@ -0,0 +1,281 @@ +import time +from concurrent.futures import ThreadPoolExecutor +from pprint import pprint + +import cv2 + +from app.core.config import PRIORITY_DICT +from app.service.design.design_batch.items.pipeline import LoadImage, KeyPoint, Segmentation, Color, PrintPainting, Scaling, Split, ContourDetection, LoadBodyImage +from app.service.design.utils.synthesis_item import synthesis, synthesis_single + + +class BaseItem: + def __init__(self, data, basic): + self.result = data.copy() + self.result['name'] = data['type'].lower() + self.result.pop("type") + self.result.update(basic) + + +class TopItem(BaseItem): + def __init__(self, data, basic): + super().__init__(data, basic) + self.top_pipeline = [ + LoadImage(), + KeyPoint(), + Segmentation(), + Color(), + PrintPainting(), + Scaling(), + Split() + ] + + def process(self): + for item in self.top_pipeline: + self.result = item(self.result) + return self.result + + +class BottomItem(BaseItem): + def __init__(self, data, basic): + super().__init__(data, basic) + self.bottom_pipeline = [ + LoadImage(), + KeyPoint(), + ContourDetection(), + # Segmentation(), + Color(), + PrintPainting(), + Scaling(), + Split() + ] + + def process(self): + for item in self.bottom_pipeline: + self.result = item(self.result) + return self.result + + +class BodyItem(BaseItem): + def __init__(self, data, basic): + super().__init__(data, basic) + self.top_pipeline = [ + LoadBodyImage(), + ] + + def process(self): + for item in self.top_pipeline: + self.result = item(self.result) + return self.result + + +def process_item(item, basic): + if item['type'] == "Body": + body_server = BodyItem(data=item, basic=basic) + item_data = body_server.process() + elif item['type'].lower() in ['blouse', 'outwear', 'dress', 'tops']: + top_server = TopItem(data=item, basic=basic) + item_data = top_server.process() + else: + bottom_server = BottomItem(data=item, basic=basic) + item_data = bottom_server.process() + return item_data + + +def calculate_start_point(keypoint_type, scale, clothes_point, body_point, offset, resize_scale): + """ + Align left + Args: + keypoint_type: string, "waistband" | "shoulder" | "ear_point" + scale: float + clothes_point: dict{'left': [x1, y1, z1], 'right': [x2, y2, z2]} + body_point: dict, containing keypoint data of body figure + + Returns: + start_point: tuple (x', y') + x' = y_body - y1 * scale + offset + y' = x_body - x1 * scale + offset + + """ + side_indicator = f'{keypoint_type}_left' + start_point = ( + int(body_point[side_indicator][1] + offset[1] - int(clothes_point[side_indicator][0]) * scale), # y + int(body_point[side_indicator][0] + offset[0] - int(clothes_point[side_indicator][1]) * scale) # x + ) + return start_point + + +# 服装图层给数据组装 +def organize_clothing(layer): + # 起始坐标 + start_point = calculate_start_point(layer['keypoint'], layer['scale'], layer['clothes_keypoint'], layer['body_point_test'], layer["offset"], layer["resize_scale"]) + # 前片数据 + front_layer = dict(priority=layer['priority'] if layer.get("layer_order", False) else PRIORITY_DICT.get(f'{layer["name"].lower()}_front', None), + name=f'{layer["name"].lower()}_front', + image=layer["front_image"], + # mask_image=layer['front_mask_image'], + image_url=layer['front_image_url'], + mask_url=layer['mask_url'], + sacle=layer['scale'], + clothes_keypoint=layer['clothes_keypoint'], + position=start_point, + resize_scale=layer["resize_scale"], + mask=cv2.resize(layer['mask'], layer["front_image"].size), + gradient_string=layer['gradient_string'] if 'gradient_string' in layer.keys() else "", + pattern_image_url=layer['pattern_image_url'], + pattern_image=layer['pattern_image'] + + ) + # 后片数据 + back_layer = dict(priority=-layer.get("priority", 0) if layer.get("layer_order", False) else PRIORITY_DICT.get(f'{layer["name"].lower()}_back', None), + name=f'{layer["name"].lower()}_back', + image=layer["back_image"], + # mask_image=layer['back_mask_image'], + image_url=layer['back_image_url'], + mask_url=layer['mask_url'], + sacle=layer['scale'], + clothes_keypoint=layer['clothes_keypoint'], + position=start_point, + resize_scale=layer["resize_scale"], + mask=cv2.resize(layer['mask'], layer["front_image"].size), + gradient_string=layer['gradient_string'] if 'gradient_string' in layer.keys() else "", + pattern_image_url=layer['pattern_image_url'], + ) + return front_layer, back_layer + + +# 模特图层给数据组装 +def organize_body(layer): + body_layer = dict(priority=0, + name=layer["name"].lower(), + image=layer['body_image'], + image_url=layer['body_path'], + mask_image=None, + mask_url=None, + sacle=1, + # mask=layer['body_mask'], + position=(0, 0)) + return body_layer + + +def process_layer(item, layers): + if item['name'] == "mannequin": + body_layer = organize_body(item) + layers.append(body_layer) + return item['body_image'].size + else: + front_layer, back_layer = organize_clothing(item) + layers.append(front_layer) + layers.append(back_layer) + + +def process_object(object_data): + basic = object_data['basic'] + items_response = {'layers': []} + + if basic['single_overall'] == "overall": + item_results = [process_item(item, basic) for item in object_data['items']] + layers = [] + futures = [] + body_size = None + for item in item_results: + futures = [process_layer(item, layers)] + for future in futures: + if future is not None: + body_size = future + layers = sorted(layers, key=lambda s: s.get("priority", float('inf'))) + + layers, new_size = update_base_size_priority(layers, body_size) + + for lay in layers: + items_response['layers'].append({ + 'image_category': lay['name'], + 'position': lay['position'], + 'priority': lay.get("priority", None), + 'resize_scale': lay['resize_scale'] if "resize_scale" in lay.keys() else None, + 'image_size': lay['image'] if lay['image'] is None else lay['image'].size, + 'gradient_string': lay['gradient_string'] if 'gradient_string' in lay.keys() else "", + 'mask_url': lay['mask_url'], + 'image_url': lay['image_url'] if 'image_url' in lay.keys() else None, + 'pattern_image_url': lay['pattern_image_url'] if 'pattern_image_url' in lay.keys() else None, + + # 'image': lay['image'], + # 'mask_image': lay['mask_image'], + }) + items_response['synthesis_url'] = synthesis(layers, new_size, basic) + else: + item_results = process_item(object_data['items'][0], basic) + items_response['layers'].append({ + 'image_category': f"{item_results['name']}_front", + 'image_size': item_results['back_image'].size if item_results['back_image'] else None, + 'position': None, + 'priority': 0, + 'image_url': item_results['front_image_url'], + 'mask_url': item_results['mask_url'], + "gradient_string": item_results['gradient_string'] if 'gradient_string' in item_results.keys() else "", + 'pattern_image_url': item_results['pattern_image_url'] if 'pattern_image_url' in item_results.keys() else None, + + }) + items_response['layers'].append({ + 'image_category': f"{item_results['name']}_back", + 'image_size': item_results['front_image'].size if item_results['front_image'] else None, + 'position': None, + 'priority': 0, + 'image_url': item_results['back_image_url'], + 'mask_url': item_results['mask_url'], + "gradient_string": item_results['gradient_string'] if 'gradient_string' in item_results.keys() else "", + 'pattern_image_url': item_results['pattern_image_url'] if 'pattern_image_url' in item_results.keys() else None, + + }) + items_response['synthesis_url'] = synthesis_single(item_results['front_image'], item_results['back_image']) + return items_response + + +def update_base_size_priority(layers, size): + # 计算透明背景图片的宽度 + min_x = min(info['position'][1] for info in layers) + x_list = [] + for info in layers: + if info['image'] is not None: + x_list.append(info['position'][1] + info['image'].width) + max_x = max(x_list) + new_width = max_x - min_x + new_height = 700 + # 更新坐标 + for info in layers: + info['adaptive_position'] = (info['position'][0], info['position'][1] - min_x) + return layers, (new_width, new_height) + + +def run(): + object = {"objects": [{"basic": {"body_point_test": {"waistband_right": [199, 239], "hand_point_right": [220, 308], "waistband_left": [113, 239], "hand_point_left": [92, 310], "shoulder_left": [99, 111], "shoulder_right": [214, 111]}, "layer_order": False, "scale_bag": 0.7, "scale_earrings": 0.16, "self_template": True, "single_overall": "single", "switch_category": "Outwear"}, "items": [ + {"color": "189 112 112", "icon": "none", "image_id": 116441, "offset": [1, 1], "path": "aida-sys-image/images/female/outwear/outwear_p3139.jpg", "print": {"element": {"element_angle_list": [], "element_path_list": [], "element_scale_list": [], "location": []}, "overall": {"location": [[0.0, 0.0]], "print_angle_list": [0.0, 0.0], "print_path_list": [], "print_scale_list": [0.0, 0.0]}, "single": {"location": [], "print_angle_list": [], "print_path_list": [], "print_scale_list": []}}, + "resize_scale": [1.0, 1.0], "type": "Outwear"}]}, {"basic": {"body_point_test": {"waistband_right": [199, 239], "hand_point_right": [220, 308], "waistband_left": [113, 239], "hand_point_left": [92, 310], "shoulder_left": [99, 111], "shoulder_right": [214, 111]}, "layer_order": False, "scale_bag": 0.7, "scale_earrings": 0.16, "self_template": True, "single_overall": "single", "switch_category": "Outwear"}, "items": [ + {"color": "189 112 112", "icon": "none", "image_id": 81518, "offset": [1, 1], "path": "aida-sys-image/images/female/outwear/0628000071.jpg", "print": {"element": {"element_angle_list": [], "element_path_list": [], "element_scale_list": [], "location": []}, "overall": {"location": [[0.0, 0.0]], "print_angle_list": [0.0, 0.0], "print_path_list": [], "print_scale_list": [0.0, 0.0]}, "single": {"location": [], "print_angle_list": [], "print_path_list": [], "print_scale_list": []}}, + "resize_scale": [1.0, 1.0], "type": "Outwear"}]}, {"basic": {"body_point_test": {"waistband_right": [199, 239], "hand_point_right": [220, 308], "waistband_left": [113, 239], "hand_point_left": [92, 310], "shoulder_left": [99, 111], "shoulder_right": [214, 111]}, "layer_order": False, "scale_bag": 0.7, "scale_earrings": 0.16, "self_template": True, "single_overall": "single", "switch_category": "Outwear"}, "items": [ + {"color": "189 112 112", "icon": "none", "image_id": 65687, "offset": [1, 1], "path": "aida-sys-image/images/female/outwear/outwear_746.jpg", "print": {"element": {"element_angle_list": [], "element_path_list": [], "element_scale_list": [], "location": []}, "overall": {"location": [[0.0, 0.0]], "print_angle_list": [0.0, 0.0], "print_path_list": [], "print_scale_list": [0.0, 0.0]}, "single": {"location": [], "print_angle_list": [], "print_path_list": [], "print_scale_list": []}}, + "resize_scale": [1.0, 1.0], "type": "Outwear"}]}, {"basic": {"body_point_test": {"waistband_right": [199, 239], "hand_point_right": [220, 308], "waistband_left": [113, 239], "hand_point_left": [92, 310], "shoulder_left": [99, 111], "shoulder_right": [214, 111]}, "layer_order": False, "scale_bag": 0.7, "scale_earrings": 0.16, "self_template": True, "single_overall": "single", "switch_category": "Outwear"}, "items": [ + {"color": "189 112 112", "icon": "none", "image_id": 90051, "offset": [1, 1], "path": "aida-sys-image/images/female/outwear/0628000864.jpg", "print": {"element": {"element_angle_list": [], "element_path_list": [], "element_scale_list": [], "location": []}, "overall": {"location": [[0.0, 0.0]], "print_angle_list": [0.0, 0.0], "print_path_list": [], "print_scale_list": [0.0, 0.0]}, "single": {"location": [], "print_angle_list": [], "print_path_list": [], "print_scale_list": []}}, + "resize_scale": [1.0, 1.0], "type": "Outwear"}]}, {"basic": {"body_point_test": {"waistband_right": [199, 239], "hand_point_right": [220, 308], "waistband_left": [113, 239], "hand_point_left": [92, 310], "shoulder_left": [99, 111], "shoulder_right": [214, 111]}, "layer_order": False, "scale_bag": 0.7, "scale_earrings": 0.16, "self_template": True, "single_overall": "single", "switch_category": "Outwear"}, "items": [ + {"color": "189 112 112", "icon": "none", "image_id": 67420, "offset": [1, 1], "path": "aida-sys-image/images/female/outwear/0825001648.jpg", "print": {"element": {"element_angle_list": [], "element_path_list": [], "element_scale_list": [], "location": []}, "overall": {"location": [[0.0, 0.0]], "print_angle_list": [0.0, 0.0], "print_path_list": [], "print_scale_list": [0.0, 0.0]}, "single": {"location": [], "print_angle_list": [], "print_path_list": [], "print_scale_list": []}}, + "resize_scale": [1.0, 1.0], "type": "Outwear"}]}, {"basic": {"body_point_test": {"waistband_right": [199, 239], "hand_point_right": [220, 308], "waistband_left": [113, 239], "hand_point_left": [92, 310], "shoulder_left": [99, 111], "shoulder_right": [214, 111]}, "layer_order": False, "scale_bag": 0.7, "scale_earrings": 0.16, "self_template": True, "single_overall": "single", "switch_category": "Outwear"}, "items": [ + {"color": "189 112 112", "icon": "none", "image_id": 90354, "offset": [1, 1], "path": "aida-sys-image/images/female/outwear/0628001300.jpg", "print": {"element": {"element_angle_list": [], "element_path_list": [], "element_scale_list": [], "location": []}, "overall": {"location": [[0.0, 0.0]], "print_angle_list": [0.0, 0.0], "print_path_list": [], "print_scale_list": [0.0, 0.0]}, "single": {"location": [], "print_angle_list": [], "print_path_list": [], "print_scale_list": []}}, + "resize_scale": [1.0, 1.0], "type": "Outwear"}]}, {"basic": {"body_point_test": {"waistband_right": [199, 239], "hand_point_right": [220, 308], "waistband_left": [113, 239], "hand_point_left": [92, 310], "shoulder_left": [99, 111], "shoulder_right": [214, 111]}, "layer_order": False, "scale_bag": 0.7, "scale_earrings": 0.16, "self_template": True, "single_overall": "single", "switch_category": "Outwear"}, "items": [ + {"color": "189 112 112", "icon": "none", "image_id": 67420, "offset": [1, 1], "path": "aida-sys-image/images/female/outwear/0825001648.jpg", "print": {"element": {"element_angle_list": [], "element_path_list": [], "element_scale_list": [], "location": []}, "overall": {"location": [[0.0, 0.0]], "print_angle_list": [0.0, 0.0], "print_path_list": [], "print_scale_list": [0.0, 0.0]}, "single": {"location": [], "print_angle_list": [], "print_path_list": [], "print_scale_list": []}}, + "resize_scale": [1.0, 1.0], "type": "Outwear"}]}, {"basic": {"body_point_test": {"waistband_right": [199, 239], "hand_point_right": [220, 308], "waistband_left": [113, 239], "hand_point_left": [92, 310], "shoulder_left": [99, 111], "shoulder_right": [214, 111]}, "layer_order": False, "scale_bag": 0.7, "scale_earrings": 0.16, "self_template": True, "single_overall": "single", "switch_category": "Outwear"}, "items": [ + {"color": "189 112 112", "icon": "none", "image_id": 101477, "offset": [1, 1], "path": "aida-sys-image/images/female/outwear/903000063.jpg", "print": {"element": {"element_angle_list": [], "element_path_list": [], "element_scale_list": [], "location": []}, "overall": {"location": [[0.0, 0.0]], "print_angle_list": [0.0, 0.0], "print_path_list": [], "print_scale_list": [0.0, 0.0]}, "single": {"location": [], "print_angle_list": [], "print_path_list": [], "print_scale_list": []}}, + "resize_scale": [1.0, 1.0], "type": "Outwear"}]}], "process_id": "3615898424593104"} + + object_result = {} + with ThreadPoolExecutor() as executor: + results = list(executor.map(process_object, object['objects'])) + for i, result in enumerate(results): + object_result[i] = result + + pprint(object_result) + + +if __name__ == '__main__': + start_time = time.time() + run() + print(time.time() - start_time) diff --git a/app/service/design/design_batch/items/pipeline/__init__.py b/app/service/design/design_batch/items/pipeline/__init__.py new file mode 100644 index 0000000..ec55933 --- /dev/null +++ b/app/service/design/design_batch/items/pipeline/__init__.py @@ -0,0 +1,20 @@ +from .color import Color +from .contour_detection import ContourDetection +from .keypoint import KeyPoint +from .keypoint import KeyPoint +from .loading import LoadImage, LoadBodyImage +from .print_painting import PrintPainting +from .scale import Scaling +from .segmentation import Segmentation +from .split import Split + +__all__ = [ + 'LoadBodyImage', 'LoadImage', + 'KeyPoint', + 'ContourDetection', + 'Segmentation', + 'Color', + 'PrintPainting', + 'Scaling', + 'Split' +] diff --git a/app/service/design/design_batch/items/pipeline/color.py b/app/service/design/design_batch/items/pipeline/color.py new file mode 100644 index 0000000..bc3676f --- /dev/null +++ b/app/service/design/design_batch/items/pipeline/color.py @@ -0,0 +1,60 @@ +import logging + +import cv2 +import numpy as np + +from app.service.utils.oss_client import oss_get_image + +logger = logging.getLogger() + + +class Color: + def __call__(self, result): + dim_image_h, dim_image_w = result['image'].shape[0:2] + if "gradient" in result.keys() and result['gradient'] != "": + bucket_name = result['gradient'].split('/')[0] + object_name = result['gradient'][result['gradient'].find('/') + 1:] + pattern = self.get_gradient(bucket_name=bucket_name, object_name=object_name) + resize_pattern = cv2.resize(pattern, (dim_image_w, dim_image_h), interpolation=cv2.INTER_AREA) + else: + pattern = self.get_pattern(result['color']) + resize_pattern = cv2.resize(pattern, (dim_image_w, dim_image_h), interpolation=cv2.INTER_AREA) + closed_mo = np.expand_dims(result['mask'], axis=2).repeat(3, axis=2) + gray_mo = np.expand_dims(result['gray'], axis=2).repeat(3, axis=2) + get_image_fir = resize_pattern * (closed_mo / 255) * (gray_mo / 255) + result['pattern_image'] = get_image_fir.astype(np.uint8) + result['final_image'] = result['pattern_image'] + canvas = np.full_like(result['final_image'], 255) + temp_bg = np.expand_dims(cv2.bitwise_not(result['mask']), axis=2).repeat(3, axis=2) + tmp1 = (canvas * (temp_bg / 255)).astype(np.uint8) + temp_fg = np.expand_dims(result['mask'], axis=2).repeat(3, axis=2) + tmp2 = (result['final_image'] * (temp_fg / 255)).astype(np.uint8) + result['single_image'] = cv2.add(tmp1, tmp2) + result['alpha'] = 100 / 255.0 + return result + + @staticmethod + def get_gradient(bucket_name, object_name): + # 获取渐变色图案 + image = oss_get_image(bucket=bucket_name, object_name=object_name, data_type="cv2") + if image.shape[2] == 4: + image = cv2.cvtColor(image, cv2.COLOR_BGRA2BGR) + return image + + @staticmethod + def crop_image(image, image_size_h, image_size_w): + x_offset = np.random.randint(low=0, high=int(image_size_h / 5) - 6) + y_offset = np.random.randint(low=0, high=int(image_size_w / 5) - 6) + image = image[x_offset: x_offset + image_size_h, y_offset: y_offset + image_size_w, :] + return image + + @staticmethod + def get_pattern(single_color): + if single_color is None: + raise False + R, G, B = single_color.split(' ') + pattern = np.zeros([1, 1, 3], np.uint8) + pattern[0, 0, 0] = int(B) + pattern[0, 0, 1] = int(G) + pattern[0, 0, 2] = int(R) + return pattern diff --git a/app/service/design/design_batch/items/pipeline/contour_detection.py b/app/service/design/design_batch/items/pipeline/contour_detection.py new file mode 100644 index 0000000..2b76c0b --- /dev/null +++ b/app/service/design/design_batch/items/pipeline/contour_detection.py @@ -0,0 +1,37 @@ +import cv2 +import numpy as np + + +class ContourDetection: + def __call__(self, result): + Contour = self.get_contours(result['image']) + Mask = np.zeros(result['image'].shape[:2], np.uint8) + if len(Contour): + Max_contour = Contour[0] + Epsilon = 0.001 * cv2.arcLength(Max_contour, True) + Approx = cv2.approxPolyDP(Max_contour, Epsilon, True) + cv2.drawContours(Mask, [Approx], -1, 255, -1) + else: + Mask = np.ones(result['image'].shape[:2], np.uint8) * 255 + # TODO 修复部分图片出现透明的情况 下版本上线 + # img2gray = cv2.cvtColor(result['image'], cv2.COLOR_BGR2GRAY) + # ret, Mask = cv2.threshold(img2gray, 126, 255, cv2.THRESH_BINARY) + # Mask = cv2.bitwise_not(Mask) + if result['pre_mask'] is None: + result['mask'] = Mask + else: + result['mask'] = cv2.bitwise_and(Mask, result['pre_mask']) + result['front_mask'] = result['mask'] + result['back_mask'] = result['mask'] + return result + + @staticmethod + def get_contours(image): + gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY) + Edge = cv2.Canny(gray, 10, 150) + kernel = np.ones((5, 5), np.uint8) + Edge = cv2.dilate(Edge, kernel=kernel, iterations=1) + Edge = cv2.erode(Edge, kernel=kernel, iterations=1) + Contour, _ = cv2.findContours(Edge, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE) + Contour = sorted(Contour, key=cv2.contourArea, reverse=True) + return Contour diff --git a/app/service/design/design_batch/items/pipeline/keypoint.py b/app/service/design/design_batch/items/pipeline/keypoint.py new file mode 100644 index 0000000..243cf4e --- /dev/null +++ b/app/service/design/design_batch/items/pipeline/keypoint.py @@ -0,0 +1,114 @@ +import logging + +import numpy as np +from pymilvus import MilvusClient + +from app.core.config import * +from app.service.design.utils.design_ensemble import get_keypoint_result + +logger = logging.getLogger(__name__) + + +class KeyPoint: + name = "KeyPoint" + + @classmethod + def get_name(cls): + return cls.name + + def __call__(self, result): + if result['name'] in ['blouse', 'skirt', 'dress', 'outwear', 'trousers', 'tops', 'bottoms']: # 查询是否有数据 且类别相同 相同则直接读 不同则推理后更新 + # result['clothes_keypoint'] = self.infer_keypoint_result(result) + site = 'up' if result['name'] in ['blouse', 'outwear', 'dress', 'tops'] else 'down' + # keypoint_cache = search_keypoint_cache(result["image_id"], site) + keypoint_cache = self.keypoint_cache(result, site) + # 取消向量查询 直接过模型推理 + # keypoint_cache = False + if keypoint_cache is False: + keypoint_infer_result, site = self.infer_keypoint_result(result) + result['clothes_keypoint'] = self.save_keypoint_cache(result["image_id"], keypoint_infer_result, site) + else: + result['clothes_keypoint'] = keypoint_cache + return result + + @staticmethod + def infer_keypoint_result(result): + site = 'up' if result['name'] in ['blouse', 'outwear', 'dress', 'tops'] else 'down' + keypoint_infer_result = get_keypoint_result(result["image"], site) # 推理结果 + return keypoint_infer_result, site + + @staticmethod + def save_keypoint_cache(keypoint_id, cache, site): + if site == "down": + zeros = np.zeros(20, dtype=int) + result = np.concatenate([zeros, cache.flatten()]) + else: + zeros = np.zeros(4, dtype=int) + result = np.concatenate([cache.flatten(), zeros]) + # 取消向量保存 直接拿结果 + data = [ + {"keypoint_id": keypoint_id, + "keypoint_site": site, + "keypoint_vector": result.tolist() + } + ] + try: + client = MilvusClient(uri=MILVUS_URL, token=MILVUS_TOKEN, db_name=MILVUS_ALIAS) + res = client.upsert(collection_name=MILVUS_TABLE_KEYPOINT, data=data) + client.close() + return dict(zip(KEYPOINT_RESULT_TABLE_FIELD_SET, result.reshape(12, 2).astype(int).tolist())) + except Exception as e: + logger.info(f"save keypoint cache milvus error : {e}") + return dict(zip(KEYPOINT_RESULT_TABLE_FIELD_SET, result.reshape(12, 2).astype(int).tolist())) + + @staticmethod + def update_keypoint_cache(keypoint_id, infer_result, search_result, site): + if site == "up": + # 需要的是up 即推理出来的是up 那么查询的就是down + result = np.concatenate([infer_result.flatten(), search_result[-4:]]) + else: + # 需要的是down 即推理出来的是down 那么查询的就是up + result = np.concatenate([search_result[:20], infer_result.flatten()]) + data = [ + {"keypoint_id": keypoint_id, + "keypoint_site": "all", + "keypoint_vector": result.tolist() + } + ] + + try: + client = MilvusClient(uri=MILVUS_URL, token=MILVUS_TOKEN, db_name=MILVUS_ALIAS) + client.upsert( + collection_name=MILVUS_TABLE_KEYPOINT, + data=data + ) + return dict(zip(KEYPOINT_RESULT_TABLE_FIELD_SET, result.reshape(12, 2).astype(int).tolist())) + except Exception as e: + logger.info(f"save keypoint cache milvus error : {e}") + return dict(zip(KEYPOINT_RESULT_TABLE_FIELD_SET, result.reshape(12, 2).astype(int).tolist())) + + # @ RunTime + def keypoint_cache(self, result, site): + try: + client = MilvusClient(uri=MILVUS_URL, token=MILVUS_TOKEN, db_name=MILVUS_ALIAS) + keypoint_id = result['image_id'] + res = client.query( + collection_name=MILVUS_TABLE_KEYPOINT, + # ids=[keypoint_id], + filter=f"keypoint_id == {keypoint_id}", + output_fields=['keypoint_vector', 'keypoint_site'] + ) + if len(res) == 0: + # 没有结果 直接推理拿结果 并保存 + keypoint_infer_result, site = self.infer_keypoint_result(result) + return self.save_keypoint_cache(result['image_id'], keypoint_infer_result, site) + elif res[0]["keypoint_site"] == "all" or res[0]["keypoint_site"] == site: + # 需要的类型和查询的类型一致,或者查询的类型为all 则直接返回查询的结果 + return dict(zip(KEYPOINT_RESULT_TABLE_FIELD_SET, np.array(res[0]['keypoint_vector']).astype(int).reshape(12, 2).tolist())) + elif res[0]["keypoint_site"] != site: + # 需要的类型和查询到的不一致,则更新类型为all + keypoint_infer_result, site = self.infer_keypoint_result(result) + return self.update_keypoint_cache(result["image_id"], keypoint_infer_result, res[0]['keypoint_vector'], site) + except Exception as e: + logger.info(f"search keypoint cache milvus error {e}") + return False diff --git a/app/service/design/design_batch/items/pipeline/loading.py b/app/service/design/design_batch/items/pipeline/loading.py new file mode 100644 index 0000000..8786db0 --- /dev/null +++ b/app/service/design/design_batch/items/pipeline/loading.py @@ -0,0 +1,68 @@ +import cv2 + +from app.service.utils.oss_client import oss_get_image + + +class LoadBodyImage: + name = "LoadBodyImage" + + @classmethod + def get_name(cls): + return cls.name + + def __call__(self, result): + result["name"] = "mannequin" + result['body_image'] = oss_get_image(bucket=result['body_path'].split("/", 1)[0], object_name=result['body_path'].split("/", 1)[1], data_type="PIL") + return result + + +class LoadImage: + name = "LoadImage" + + @classmethod + def get_name(cls): + return cls.name + + def __call__(self, result): + result['image'], result['pre_mask'] = self.read_image(result['path']) + result['gray'] = cv2.cvtColor(result['image'], cv2.COLOR_BGR2GRAY) + result['keypoint'] = self.get_keypoint(result['name']) + result['img_shape'] = result['image'].shape + result['ori_shape'] = result['image'].shape + return result + + @staticmethod + def read_image(image_path): + image_mask = None + image = oss_get_image(bucket=image_path.split("/", 1)[0], object_name=image_path.split("/", 1)[1], data_type="cv2") + if len(image.shape) == 2: + image = cv2.cvtColor(image, cv2.COLOR_GRAY2RGB) + if image.shape[2] == 4: # 如果是四通道 mask + image_mask = image[:, :, 3] + image = image[:, :, :3] + + if image.shape[:2] <= (50, 50): + # 计算新尺寸 + new_size = (image.shape[1] * 2, image.shape[0] * 2) + # 调整大小 + image = cv2.resize(image, new_size, interpolation=cv2.INTER_LINEAR) + return image, image_mask + + @staticmethod + def get_keypoint(name): + if name == 'blouse' or name == 'outwear' or name == 'dress' or name == 'tops': + keypoint = 'shoulder' + elif name == 'trousers' or name == 'skirt' or name == 'bottoms': + keypoint = 'waistband' + elif name == 'bag': + keypoint = 'hand_point' + elif name == 'shoes': + keypoint = 'toe' + elif name == 'hairstyle': + keypoint = 'head_point' + elif name == 'earring': + keypoint = 'ear_point' + else: + raise KeyError(f"{name} does not belong to item category list: blouse, outwear, dress, trousers, skirt, " + f"bag, shoes, hairstyle, earring.") + return keypoint diff --git a/app/service/design/design_batch/items/pipeline/print_painting.py b/app/service/design/design_batch/items/pipeline/print_painting.py new file mode 100644 index 0000000..a620872 --- /dev/null +++ b/app/service/design/design_batch/items/pipeline/print_painting.py @@ -0,0 +1,523 @@ +import random + +import cv2 +import numpy as np +from PIL import Image + +from app.service.utils.oss_client import oss_get_image + + +class PrintPainting: + def __call__(self, result): + single_print = result['print']['single'] + overall_print = result['print']['overall'] + element_print = result['print']['element'] + result['single_image'] = None + result['print_image'] = None + if overall_print['print_path_list']: + painting_dict = {'dim_image_h': result['pattern_image'].shape[0], 'dim_image_w': result['pattern_image'].shape[1]} + result['print_image'] = result['pattern_image'] + if "print_angle_list" in overall_print.keys() and overall_print['print_angle_list'][0] != 0: + painting_dict = self.painting_collection(painting_dict, overall_print, print_trigger=True) + painting_dict['tile_print'] = self.rotate_crop_image(img=painting_dict['tile_print'], angle=-overall_print['print_angle_list'][0], crop=True) + painting_dict['mask_inv_print'] = self.rotate_crop_image(img=painting_dict['mask_inv_print'], angle=-overall_print['print_angle_list'][0], crop=True) + + # resize 到sketch大小 + painting_dict['tile_print'] = self.resize_and_crop(img=painting_dict['tile_print'], target_width=painting_dict['dim_image_w'], target_height=painting_dict['dim_image_h']) + painting_dict['mask_inv_print'] = self.resize_and_crop(img=painting_dict['mask_inv_print'], target_width=painting_dict['dim_image_w'], target_height=painting_dict['dim_image_h']) + else: + painting_dict = self.painting_collection(painting_dict, overall_print, print_trigger=True, is_single=False) + result['print_image'] = self.printpaint(result, painting_dict, print_=True) + result['single_image'] = result['final_image'] = result['pattern_image'] = result['print_image'] + + if single_print['print_path_list']: + print_background = np.zeros((result['pattern_image'].shape[0], result['pattern_image'].shape[1], 3), dtype=np.uint8) + mask_background = np.zeros((result['pattern_image'].shape[0], result['pattern_image'].shape[1], 3), dtype=np.uint8) + for i in range(len(single_print['print_path_list'])): + image, image_mode = self.read_image(single_print['print_path_list'][i]) + if image_mode == "RGBA": + new_size = (int(image.width * single_print['print_scale_list'][i]), int(image.height * single_print['print_scale_list'][i])) + + mask = image.split()[3] + resized_source = image.resize(new_size) + resized_source_mask = mask.resize(new_size) + + rotated_resized_source = resized_source.rotate(-single_print['print_angle_list'][i]) + rotated_resized_source_mask = resized_source_mask.rotate(-single_print['print_angle_list'][i]) + + source_image_pil = Image.fromarray(cv2.cvtColor(print_background, cv2.COLOR_BGR2RGB)) + source_image_pil_mask = Image.fromarray(cv2.cvtColor(mask_background, cv2.COLOR_BGR2RGB)) + + source_image_pil.paste(rotated_resized_source, (int(single_print['location'][i][0]), int(single_print['location'][i][1])), rotated_resized_source) + source_image_pil_mask.paste(rotated_resized_source_mask, (int(single_print['location'][i][0]), int(single_print['location'][i][1])), rotated_resized_source_mask) + + print_background = cv2.cvtColor(np.array(source_image_pil), cv2.COLOR_RGBA2BGR) + mask_background = cv2.cvtColor(np.array(source_image_pil_mask), cv2.COLOR_RGBA2BGR) + ret, mask_background = cv2.threshold(mask_background, 124, 255, cv2.THRESH_BINARY) + else: + mask = self.get_mask_inv(image) + mask = np.expand_dims(mask, axis=2) + mask = cv2.cvtColor(mask, cv2.COLOR_GRAY2BGR) + mask = cv2.bitwise_not(mask) + # 旋转后的坐标需要重新算 + rotate_mask, _ = self.img_rotate(mask, single_print['print_angle_list'][i], single_print['print_scale_list'][i]) + rotate_image, rotated_new_size = self.img_rotate(image, single_print['print_angle_list'][i], single_print['print_scale_list'][i]) + # x, y = int(result['print']['location'][i][0] - rotated_new_size[0] - (rotate_mask.shape[0] - image.shape[0]) / 2), int(result['print']['location'][i][1] - rotated_new_size[1] - (rotate_mask.shape[1] - image.shape[1]) / 2) + x, y = int(single_print['location'][i][0] - rotated_new_size[0]), int(single_print['location'][i][1] - rotated_new_size[1]) + + image_x = print_background.shape[1] + image_y = print_background.shape[0] + print_x = rotate_image.shape[1] + print_y = rotate_image.shape[0] + + # 有bug + # if x + print_x > image_x: + # rotate_image = rotate_image[:, :x + print_x - image_x] + # rotate_mask = rotate_mask[:, :x + print_x - image_x] + # # + # if y + print_y > image_y: + # rotate_image = rotate_image[:y + print_y - image_y] + # rotate_mask = rotate_mask[:y + print_y - image_y] + + # 不能是并行 + # 当前第一轮的if (108以及115)是判断有没有过下界和右界。第二轮的是判断左上有没有超出。 如果这个样子的话,先裁了右边,再左移,region就会有问题 + # 先挪 再判断 最后裁剪 + + # 如果print旋转了 或者 print贴边了 则需要判断 判断左界和上界是否小于0 + if x <= 0: + rotate_image = rotate_image[:, -x:] + rotate_mask = rotate_mask[:, -x:] + start_x = x = 0 + else: + start_x = x + + if y <= 0: + rotate_image = rotate_image[-y:, :] + rotate_mask = rotate_mask[-y:, :] + start_y = y = 0 + else: + start_y = y + + # ------------------ + # 如果print-size大于image-size 则需要裁剪print + + if x + print_x > image_x: + rotate_image = rotate_image[:, :image_x - x] + rotate_mask = rotate_mask[:, :image_x - x] + + if y + print_y > image_y: + rotate_image = rotate_image[:image_y - y, :] + rotate_mask = rotate_mask[:image_y - y, :] + + # mask_background[start_y:y + rotate_mask.shape[0], start_x:x + rotate_mask.shape[1]] = cv2.bitwise_xor(mask_background[start_y:y + rotate_mask.shape[0], start_x:x + rotate_mask.shape[1]], rotate_mask) + # print_background[start_y:y + rotate_image.shape[0], start_x:x + rotate_image.shape[1]] = cv2.add(print_background[start_y:y + rotate_image.shape[0], start_x:x + rotate_image.shape[1]], rotate_image) + + # mask_background[start_y:y + rotate_mask.shape[0], start_x:x + rotate_mask.shape[1]] = rotate_mask + # print_background[start_y:y + rotate_image.shape[0], start_x:x + rotate_image.shape[1]] = rotate_image + mask_background = self.stack_prin(mask_background, result['pattern_image'], rotate_mask, start_y, y, start_x, x) + print_background = self.stack_prin(print_background, result['pattern_image'], rotate_image, start_y, y, start_x, x) + + # gray_image = cv2.cvtColor(mask_background, cv2.COLOR_BGR2GRAY) + # print_background = cv2.bitwise_and(print_background, print_background, mask=gray_image) + + print_mask = cv2.bitwise_and(result['mask'], cv2.cvtColor(mask_background, cv2.COLOR_BGR2GRAY)) + img_fg = cv2.bitwise_or(print_background, print_background, mask=print_mask) + img_bg = cv2.bitwise_and(result['pattern_image'], result['pattern_image'], mask=cv2.bitwise_not(print_mask)) + mask_mo = np.expand_dims(print_mask, axis=2).repeat(3, axis=2) + gray_mo = np.expand_dims(result['gray'], axis=2).repeat(3, axis=2) + img_fg = (img_fg * (mask_mo / 255) * (gray_mo / 255)).astype(np.uint8) + result['final_image'] = cv2.add(img_bg, img_fg) + canvas = np.full_like(result['final_image'], 255) + temp_bg = np.expand_dims(cv2.bitwise_not(result['mask']), axis=2).repeat(3, axis=2) + tmp1 = (canvas * (temp_bg / 255)).astype(np.uint8) + temp_fg = np.expand_dims(result['mask'], axis=2).repeat(3, axis=2) + tmp2 = (result['final_image'] * (temp_fg / 255)).astype(np.uint8) + result['single_image'] = cv2.add(tmp1, tmp2) + + if element_print['element_path_list']: + print_background = np.zeros((result['final_image'].shape[0], result['final_image'].shape[1], 3), dtype=np.uint8) + mask_background = np.zeros((result['final_image'].shape[0], result['final_image'].shape[1], 3), dtype=np.uint8) + for i in range(len(element_print['element_path_list'])): + image, image_mode = self.read_image(element_print['element_path_list'][i]) + if image_mode == "RGBA": + new_size = (int(image.width * element_print['element_scale_list'][i]), int(image.height * element_print['element_scale_list'][i])) + + mask = image.split()[3] + resized_source = image.resize(new_size) + resized_source_mask = mask.resize(new_size) + + rotated_resized_source = resized_source.rotate(-element_print['element_angle_list'][i]) + rotated_resized_source_mask = resized_source_mask.rotate(-element_print['element_angle_list'][i]) + + source_image_pil = Image.fromarray(cv2.cvtColor(print_background, cv2.COLOR_BGR2RGB)) + source_image_pil_mask = Image.fromarray(cv2.cvtColor(mask_background, cv2.COLOR_BGR2RGB)) + + source_image_pil.paste(rotated_resized_source, (int(element_print['location'][i][0]), int(element_print['location'][i][1])), rotated_resized_source) + source_image_pil_mask.paste(rotated_resized_source_mask, (int(element_print['location'][i][0]), int(element_print['location'][i][1])), rotated_resized_source_mask) + + print_background = cv2.cvtColor(np.array(source_image_pil), cv2.COLOR_RGBA2BGR) + mask_background = cv2.cvtColor(np.array(source_image_pil_mask), cv2.COLOR_RGBA2BGR) + else: + mask = self.get_mask_inv(image) + mask = np.expand_dims(mask, axis=2) + mask = cv2.cvtColor(mask, cv2.COLOR_GRAY2BGR) + mask = cv2.bitwise_not(mask) + # 旋转后的坐标需要重新算 + rotate_mask, _ = self.img_rotate(mask, element_print['element_angle_list'][i], element_print['element_scale_list'][i]) + rotate_image, rotated_new_size = self.img_rotate(image, element_print['element_angle_list'][i], element_print['element_scale_list'][i]) + # x, y = int(result['print']['location'][i][0] - rotated_new_size[0] - (rotate_mask.shape[0] - image.shape[0]) / 2), int(result['print']['location'][i][1] - rotated_new_size[1] - (rotate_mask.shape[1] - image.shape[1]) / 2) + x, y = int(element_print['location'][i][0] - rotated_new_size[0]), int(element_print['location'][i][1] - rotated_new_size[1]) + + image_x = print_background.shape[1] + image_y = print_background.shape[0] + print_x = rotate_image.shape[1] + print_y = rotate_image.shape[0] + + # 有bug + # if x + print_x > image_x: + # rotate_image = rotate_image[:, :x + print_x - image_x] + # rotate_mask = rotate_mask[:, :x + print_x - image_x] + # # + # if y + print_y > image_y: + # rotate_image = rotate_image[:y + print_y - image_y] + # rotate_mask = rotate_mask[:y + print_y - image_y] + + # 不能是并行 + # 当前第一轮的if (108以及115)是判断有没有过下界和右界。第二轮的是判断左上有没有超出。 如果这个样子的话,先裁了右边,再左移,region就会有问题 + # 先挪 再判断 最后裁剪 + + # 如果print旋转了 或者 print贴边了 则需要判断 判断左界和上界是否小于0 + if x <= 0: + rotate_image = rotate_image[:, -x:] + rotate_mask = rotate_mask[:, -x:] + start_x = x = 0 + else: + start_x = x + + if y <= 0: + rotate_image = rotate_image[-y:, :] + rotate_mask = rotate_mask[-y:, :] + start_y = y = 0 + else: + start_y = y + + # ------------------ + # 如果print-size大于image-size 则需要裁剪print + + if x + print_x > image_x: + rotate_image = rotate_image[:, :image_x - x] + rotate_mask = rotate_mask[:, :image_x - x] + + if y + print_y > image_y: + rotate_image = rotate_image[:image_y - y, :] + rotate_mask = rotate_mask[:image_y - y, :] + + # mask_background[start_y:y + rotate_mask.shape[0], start_x:x + rotate_mask.shape[1]] = cv2.bitwise_xor(mask_background[start_y:y + rotate_mask.shape[0], start_x:x + rotate_mask.shape[1]], rotate_mask) + # print_background[start_y:y + rotate_image.shape[0], start_x:x + rotate_image.shape[1]] = cv2.add(print_background[start_y:y + rotate_image.shape[0], start_x:x + rotate_image.shape[1]], rotate_image) + + # mask_background[start_y:y + rotate_mask.shape[0], start_x:x + rotate_mask.shape[1]] = rotate_mask + # print_background[start_y:y + rotate_image.shape[0], start_x:x + rotate_image.shape[1]] = rotate_image + mask_background = self.stack_prin(mask_background, result['pattern_image'], rotate_mask, start_y, y, start_x, x) + print_background = self.stack_prin(print_background, result['pattern_image'], rotate_image, start_y, y, start_x, x) + + # gray_image = cv2.cvtColor(mask_background, cv2.COLOR_BGR2GRAY) + # print_background = cv2.bitwise_and(print_background, print_background, mask=gray_image) + + print_mask = cv2.bitwise_and(result['mask'], cv2.cvtColor(mask_background, cv2.COLOR_BGR2GRAY)) + img_fg = cv2.bitwise_or(print_background, print_background, mask=print_mask) + # TODO element 丢失信息 + three_channel_image = cv2.merge([cv2.bitwise_not(print_mask), cv2.bitwise_not(print_mask), cv2.bitwise_not(print_mask)]) + img_bg = cv2.bitwise_and(result['final_image'], three_channel_image) + # mask_mo = np.expand_dims(print_mask, axis=2).repeat(3, axis=2) + # gray_mo = np.expand_dims(result['gray'], axis=2).repeat(3, axis=2) + # img_fg = (img_fg * (mask_mo / 255) * (gray_mo / 255)).astype(np.uint8) + result['final_image'] = cv2.add(img_bg, img_fg) + canvas = np.full_like(result['final_image'], 255) + temp_bg = np.expand_dims(cv2.bitwise_not(result['mask']), axis=2).repeat(3, axis=2) + tmp1 = (canvas * (temp_bg / 255)).astype(np.uint8) + temp_fg = np.expand_dims(result['mask'], axis=2).repeat(3, axis=2) + tmp2 = (result['final_image'] * (temp_fg / 255)).astype(np.uint8) + result['single_image'] = cv2.add(tmp1, tmp2) + return result + + @staticmethod + def stack_prin(print_background, pattern_image, rotate_image, start_y, y, start_x, x): + temp_print = np.zeros((pattern_image.shape[0], pattern_image.shape[1], 3), dtype=np.uint8) + temp_print[start_y:y + rotate_image.shape[0], start_x:x + rotate_image.shape[1]] = rotate_image + img2gray = cv2.cvtColor(temp_print, cv2.COLOR_BGR2GRAY) + ret, mask_ = cv2.threshold(img2gray, 1, 255, cv2.THRESH_BINARY) + mask_inv = cv2.bitwise_not(mask_) + img1_bg = cv2.bitwise_and(print_background, print_background, mask=mask_inv) + img2_fg = cv2.bitwise_and(temp_print, temp_print, mask=mask_) + print_background = img1_bg + img2_fg + return print_background + + def painting_collection(self, painting_dict, print_dict, print_trigger=False, is_single=False): + if print_trigger: + print_ = self.get_print(print_dict) + painting_dict['Trigger'] = not is_single + painting_dict['location'] = print_['location'] + single_mask_inv_print = self.get_mask_inv(print_['image']) + dim_max = max(painting_dict['dim_image_h'], painting_dict['dim_image_w']) + dim_pattern = (int(dim_max * print_['scale'] / 5), int(dim_max * print_['scale'] / 5)) + if not is_single: + self.random_seed = random.randint(0, 1000) + # 如果print 模式为overall 且 有角度的话 , 组合的print为正方形,方便裁剪 + if "print_angle_list" in print_dict.keys() and print_dict['print_angle_list'][0] != 0: + painting_dict['mask_inv_print'] = self.tile_image(single_mask_inv_print, dim_pattern, print_['scale'], dim_max, dim_max, painting_dict['location'], trigger=True) + painting_dict['tile_print'] = self.tile_image(print_['image'], dim_pattern, print_['scale'], dim_max, dim_max, painting_dict['location'], trigger=True) + else: + painting_dict['mask_inv_print'] = self.tile_image(single_mask_inv_print, dim_pattern, print_['scale'], painting_dict['dim_image_h'], painting_dict['dim_image_w'], painting_dict['location'], trigger=True) + painting_dict['tile_print'] = self.tile_image(print_['image'], dim_pattern, print_['scale'], painting_dict['dim_image_h'], painting_dict['dim_image_w'], painting_dict['location'], trigger=True) + else: + painting_dict['mask_inv_print'] = self.tile_image(single_mask_inv_print, dim_pattern, print_['scale'], painting_dict['dim_image_h'], painting_dict['dim_image_w'], painting_dict['location']) + painting_dict['tile_print'] = self.tile_image(print_['image'], dim_pattern, print_['scale'], painting_dict['dim_image_h'], painting_dict['dim_image_w'], painting_dict['location']) + painting_dict['dim_print_h'], painting_dict['dim_print_w'] = dim_pattern + return painting_dict + + def tile_image(self, pattern, dim, scale, dim_image_h, dim_image_w, location, trigger=False): + tile = None + if not trigger: + tile = cv2.resize(pattern, dim, interpolation=cv2.INTER_AREA) + else: + resize_pattern = cv2.resize(pattern, dim, interpolation=cv2.INTER_AREA) + if len(pattern.shape) == 2: + tile = np.tile(resize_pattern, (int((5 + 1) / scale) + 4, int((5 + 1) / scale) + 4)) + if len(pattern.shape) == 3: + tile = np.tile(resize_pattern, (int((5 + 1) / scale) + 4, int((5 + 1) / scale) + 4, 1)) + tile = self.crop_image(tile, dim_image_h, dim_image_w, location, resize_pattern.shape) + return tile + + def get_mask_inv(self, print_): + if print_[0][0][0] == 255 and print_[0][0][1] == 255 and print_[0][0][2] == 255: + bg_color = cv2.cvtColor(print_, cv2.COLOR_BGR2LAB)[0][0] + print_tile = cv2.cvtColor(print_, cv2.COLOR_BGR2LAB) + bg_l, bg_a, bg_b = bg_color[0], bg_color[1], bg_color[2] + bg_L_high, bg_L_low = self.get_low_high_lab(bg_l, L=True) + bg_a_high, bg_a_low = self.get_low_high_lab(bg_a) + bg_b_high, bg_b_low = self.get_low_high_lab(bg_b) + lower = np.array([bg_L_low, bg_a_low, bg_b_low]) + upper = np.array([bg_L_high, bg_a_high, bg_b_high]) + mask_inv = cv2.inRange(print_tile, lower, upper) + return mask_inv + else: + # bg_color = cv2.cvtColor(print_, cv2.COLOR_BGR2LAB)[0][0] + # print_tile = cv2.cvtColor(print_, cv2.COLOR_BGR2LAB) + # bg_l, bg_a, bg_b = bg_color[0], bg_color[1], bg_color[2] + # bg_L_high, bg_L_low = self.get_low_high_lab(bg_l, L=True) + # bg_a_high, bg_a_low = self.get_low_high_lab(bg_a) + # bg_b_high, bg_b_low = self.get_low_high_lab(bg_b) + # lower = np.array([bg_L_low, bg_a_low, bg_b_low]) + # upper = np.array([bg_L_high, bg_a_high, bg_b_high]) + + # print_tile = cv2.cvtColor(print_, cv2.COLOR_BGR2LAB) + # mask_inv = cv2.cvtColor(print_tile, cv2.COLOR_BGR2GRAY) + + # mask_inv = cv2.cvtColor(print_, cv2.COLOR_BGR2GRAY) + mask_inv = np.zeros(print_.shape[:2], dtype=np.uint8) + return mask_inv + + @staticmethod + def printpaint(result, painting_dict, print_=False): + + if print_ and painting_dict['Trigger']: + print_mask = cv2.bitwise_and(result['mask'], cv2.bitwise_not(painting_dict['mask_inv_print'])) + img_fg = cv2.bitwise_and(painting_dict['tile_print'], painting_dict['tile_print'], mask=print_mask) + else: + print_mask = result['mask'] + img_fg = result['final_image'] + if print_ and not painting_dict['Trigger']: + index_ = None + try: + index_ = len(painting_dict['location']) + except: + assert f'there must be parameter of location if choose IfSingle' + + for i in range(index_): + start_h, start_w = int(painting_dict['location'][i][1]), int(painting_dict['location'][i][0]) + + length_h = min(start_h + painting_dict['dim_print_h'], img_fg.shape[0]) + length_w = min(start_w + painting_dict['dim_print_w'], img_fg.shape[1]) + + change_region = img_fg[start_h: length_h, start_w: length_w, :] + # problem in change_mask + change_mask = print_mask[start_h: length_h, start_w: length_w] + # get real part into change mask + _, change_mask = cv2.threshold(change_mask, 220, 255, cv2.THRESH_BINARY) + mask = cv2.bitwise_not(painting_dict['mask_inv_print']) + img_fg[start_h:start_h + painting_dict['dim_print_h'], start_w:start_w + painting_dict['dim_print_w'], :] = change_region + + clothes_mask_print = cv2.bitwise_not(print_mask) + + img_bg = cv2.bitwise_and(result['pattern_image'], result['pattern_image'], mask=clothes_mask_print) + mask_mo = np.expand_dims(print_mask, axis=2).repeat(3, axis=2) + gray_mo = np.expand_dims(result['gray'], axis=2).repeat(3, axis=2) + img_fg = (img_fg * (mask_mo / 255) * (gray_mo / 255)).astype(np.uint8) + print_image = cv2.add(img_bg, img_fg) + return print_image + + @staticmethod + def get_print(print_dict): + if 'print_scale_list' not in print_dict.keys() or print_dict['print_scale_list'][0] < 0.3: + print_dict['scale'] = 0.3 + else: + print_dict['scale'] = print_dict['print_scale_list'][0] + + bucket_name = print_dict['print_path_list'][0].split("/", 1)[0] + object_name = print_dict['print_path_list'][0].split("/", 1)[1] + image = oss_get_image(bucket=bucket_name, object_name=object_name, data_type="PIL") + # 判断图片格式,如果是RGBA 则贴在一张纯白图片上 防止透明转黑 + if image.mode == "RGBA": + new_background = Image.new('RGB', image.size, (255, 255, 255)) + new_background.paste(image, mask=image.split()[3]) + image = new_background + print_dict['image'] = cv2.cvtColor(np.asarray(image), cv2.COLOR_RGB2BGR) + return print_dict + + def crop_image(self, image, image_size_h, image_size_w, location, print_shape): + print_w = print_shape[1] + print_h = print_shape[0] + + random.seed(self.random_seed) + # logging.info(f'overall print location : {location}') + # x_offset = random.randint(0, image.shape[0] - image_size_h) + # y_offset = random.randint(0, image.shape[1] - image_size_w) + + # 1.拿到偏移量后和resize后的print宽高取余 得到真正偏移量 + x_offset = print_w - int(location[0][1] % print_w) + y_offset = print_w - int(location[0][0] % print_h) + + # y_offset = int(location[0][0]) + # x_offset = int(location[0][1]) + + if len(image.shape) == 2: + image = image[x_offset: x_offset + image_size_h, y_offset: y_offset + image_size_w] + elif len(image.shape) == 3: + image = image[x_offset: x_offset + image_size_h, y_offset: y_offset + image_size_w, :] + return image + + @staticmethod + def get_low_high_lab(Lab_value, L=False): + if L: + high = Lab_value + 30 if Lab_value + 30 < 255 else 255 + low = Lab_value - 30 if Lab_value - 30 > 0 else 0 + else: + high = Lab_value + 30 if Lab_value + 30 < 255 else 255 + low = Lab_value - 30 if Lab_value - 30 > 0 else 0 + return high, low + + @staticmethod + def img_rotate(image, angel, scale): + """顺时针旋转图像任意角度 + + Args: + image (np.array): [原始图像] + angel (float): [逆时针旋转的角度] + + Returns: + [array]: [旋转后的图像] + """ + + h, w = image.shape[:2] + center = (w // 2, h // 2) + # if type(angel) is not int: + # angel = 0 + M = cv2.getRotationMatrix2D(center, -angel, scale) + # 调整旋转后的图像长宽 + rotated_h = int((w * np.abs(M[0, 1]) + (h * np.abs(M[0, 0])))) + rotated_w = int((h * np.abs(M[0, 1]) + (w * np.abs(M[0, 0])))) + M[0, 2] += (rotated_w - w) // 2 + M[1, 2] += (rotated_h - h) // 2 + # 旋转图像 + rotated_img = cv2.warpAffine(image, M, (rotated_w, rotated_h)) + + return rotated_img, ((rotated_img.shape[1] - image.shape[1] * scale) // 2, (rotated_img.shape[0] - image.shape[0] * scale) // 2) + # return rotated_img, (0, 0) + + @staticmethod + def rotate_crop_image(img, angle, crop): + """ + angle: 旋转的角度 + crop: 是否需要进行裁剪,布尔向量 + """ + crop_image = lambda img, x0, y0, w, h: img[y0:y0 + h, x0:x0 + w] + w, h = img.shape[:2] + # 旋转角度的周期是360° + angle %= 360 + # 计算仿射变换矩阵 + M_rotation = cv2.getRotationMatrix2D((w / 2, h / 2), angle, 1) + # 得到旋转后的图像 + img_rotated = cv2.warpAffine(img, M_rotation, (w, h)) + + # 如果需要去除黑边 + if crop: + # 裁剪角度的等效周期是180° + angle_crop = angle % 180 + if angle > 90: + angle_crop = 180 - angle_crop + # 转化角度为弧度 + theta = angle_crop * np.pi / 180 + # 计算高宽比 + hw_ratio = float(h) / float(w) + # 计算裁剪边长系数的分子项 + tan_theta = np.tan(theta) + numerator = np.cos(theta) + np.sin(theta) * np.tan(theta) + + # 计算分母中和高宽比相关的项 + r = hw_ratio if h > w else 1 / hw_ratio + # 计算分母项 + denominator = r * tan_theta + 1 + # 最终的边长系数 + crop_mult = numerator / denominator + + # 得到裁剪区域 + w_crop = int(crop_mult * w) + h_crop = int(crop_mult * h) + x0 = int((w - w_crop) / 2) + y0 = int((h - h_crop) / 2) + + img_rotated = crop_image(img_rotated, x0, y0, w_crop, h_crop) + + return img_rotated + + @staticmethod + def read_image(image_url): + image = oss_get_image(bucket=image_url.split("/", 1)[0], object_name=image_url.split("/", 1)[1], data_type="cv2") + if image.shape[2] == 4: + image_rgb = cv2.cvtColor(image, cv2.COLOR_BGRA2RGBA) + image = Image.fromarray(image_rgb) + image_mode = "RGBA" + else: + image_mode = "RGB" + return image, image_mode + + @staticmethod + def resize_and_crop(img, target_width, target_height): + # 获取原始图像的尺寸 + original_height, original_width = img.shape[:2] + + # 计算目标尺寸的宽高比 + target_ratio = target_width / target_height + + # 计算原始图像的宽高比 + original_ratio = original_width / original_height + + # 调整尺寸 + if original_ratio > target_ratio: + # 原始图像更宽,按高度resize,然后裁剪宽度 + new_height = target_height + new_width = int(original_width * (target_height / original_height)) + resized_img = cv2.resize(img, (new_width, new_height)) + # 裁剪宽度 + start_x = (new_width - target_width) // 2 + cropped_img = resized_img[:, start_x:start_x + target_width] + else: + # 原始图像更高,按宽度resize,然后裁剪高度 + new_width = target_width + new_height = int(original_height * (target_width / original_width)) + resized_img = cv2.resize(img, (new_width, new_height)) + # 裁剪高度 + start_y = (new_height - target_height) // 2 + cropped_img = resized_img[start_y:start_y + target_height, :] + + return cropped_img diff --git a/app/service/design/design_batch/items/pipeline/scale.py b/app/service/design/design_batch/items/pipeline/scale.py new file mode 100644 index 0000000..732fcd8 --- /dev/null +++ b/app/service/design/design_batch/items/pipeline/scale.py @@ -0,0 +1,49 @@ +import math + +import cv2 + + +class Scaling: + def __call__(self, result): + if result['keypoint'] in ['waistband', 'shoulder', 'head_point']: + # milvus_db_keypoint_cache + distance_clo = math.sqrt( + (int(result['clothes_keypoint'][result['keypoint'] + '_left'][0]) - int(result['clothes_keypoint'][result['keypoint'] + '_right'][0])) ** 2 + + + (int(result['clothes_keypoint'][result['keypoint'] + '_left'][1]) - int(result['clothes_keypoint'][result['keypoint'] + '_right'][1])) ** 2 + ) + + distance_bdy = math.sqrt( + (int(result['body_point_test'][result['keypoint'] + '_left'][0]) + - + int(result['body_point_test'][result['keypoint'] + '_right'][0])) ** 2 + 1 + ) + + if distance_clo == 0: + result['scale'] = 1 + else: + result['scale'] = distance_bdy / distance_clo + elif result['keypoint'] == 'toe': + distance_bdy = math.sqrt( + (int(result['body_point_test']['foot_length'][0]) - int(result['body_point_test']['foot_length'][2])) ** 2 + + + (int(result['body_point_test']['foot_length'][1]) - int(result['body_point_test']['foot_length'][3])) ** 2 + ) + + Blur = cv2.GaussianBlur(result['gray'], (3, 3), 0) + Edge = cv2.Canny(Blur, 10, 200) + Edge = cv2.dilate(Edge, None) + Edge = cv2.erode(Edge, None) + Contour, _ = cv2.findContours(Edge, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE) + Contours = sorted(Contour, key=cv2.contourArea, reverse=True) + + Max_contour = Contours[0] + x, y, w, h = cv2.boundingRect(Max_contour) + width = w + distance_clo = width + result['scale'] = distance_bdy / distance_clo + elif result['keypoint'] == 'hand_point': + result['scale'] = result['scale_bag'] + elif result['keypoint'] == 'ear_point': + result['scale'] = result['scale_earrings'] + return result diff --git a/app/service/design/design_batch/items/pipeline/segmentation.py b/app/service/design/design_batch/items/pipeline/segmentation.py new file mode 100644 index 0000000..d8aa6d2 --- /dev/null +++ b/app/service/design/design_batch/items/pipeline/segmentation.py @@ -0,0 +1,67 @@ +import logging +import os + +import cv2 +import numpy as np + +from app.core.config import SEG_CACHE_PATH +from app.service.design.utils.design_ensemble import get_seg_result +from app.service.utils.oss_client import oss_get_image + +logger = logging.getLogger() + + +class Segmentation: + def __call__(self, result): + if "seg_mask_url" in result.keys() and result['seg_mask_url'] != "": + seg_mask = oss_get_image(bucket=result['seg_mask_url'].split('/')[0], object_name=result['seg_mask_url'][result['seg_mask_url'].find('/') + 1:], data_type="cv2") + seg_mask = cv2.resize(seg_mask, (result['img_shape'][1], result['img_shape'][0]), interpolation=cv2.INTER_NEAREST) + # 转换颜色空间为 RGB(OpenCV 默认是 BGR) + image_rgb = cv2.cvtColor(seg_mask, cv2.COLOR_BGR2RGB) + + r, g, b = cv2.split(image_rgb) + red_mask = r > g + green_mask = g > r + + # 创建红色和绿色掩码 + result['front_mask'] = np.array(red_mask, dtype=np.uint8) * 255 + result['back_mask'] = np.array(green_mask, dtype=np.uint8) * 255 + result['mask'] = result['front_mask'] + result['back_mask'] + else: + # 本地查询seg 缓存是否存在 + _, seg_result = self.load_seg_result(result["image_id"]) + result['seg_result'] = seg_result + if not _: + # 推理获得seg 结果 + seg_result = get_seg_result(result["image_id"], result['image'])[0] + self.save_seg_result(seg_result, result['image_id']) + # 处理前片后片 + temp_front = seg_result == 1.0 + result['front_mask'] = (255 * (temp_front + 0).astype(np.uint8)) + temp_back = seg_result == 2.0 + result['back_mask'] = (255 * (temp_back + 0).astype(np.uint8)) + result['mask'] = result['front_mask'] + result['back_mask'] + return result + + @staticmethod + def save_seg_result(seg_result, image_id): + file_path = f"{SEG_CACHE_PATH}{image_id}.npy" + try: + np.save(file_path, seg_result) + print("保存成功", os.path.abspath(file_path)) + except Exception as e: + print(f"保存失败: {e}") + + @staticmethod + def load_seg_result(image_id): + file_path = f"{SEG_CACHE_PATH}{image_id}.npy" + logger.info(f"load seg file name is :{SEG_CACHE_PATH}{image_id}.npy") + try: + seg_result = np.load(file_path) + return True, seg_result + except FileNotFoundError: + print("文件不存在") + return False, None + except Exception as e: + print(f"加载失败: {e}") + return False, None diff --git a/app/service/design/design_batch/items/pipeline/split.py b/app/service/design/design_batch/items/pipeline/split.py new file mode 100644 index 0000000..2fba315 --- /dev/null +++ b/app/service/design/design_batch/items/pipeline/split.py @@ -0,0 +1,71 @@ +import io +import logging + +import cv2 +import numpy as np +from PIL import Image +from cv2 import cvtColor, COLOR_BGR2RGBA + +from app.core.config import AIDA_CLOTHING +from app.service.design.utils.conversion_image import rgb_to_rgba +from app.service.design.utils.upload_image import upload_png_mask +from app.service.utils.generate_uuid import generate_uuid +from app.service.utils.oss_client import oss_upload_image + + +class Split(object): + def __call__(self, result): + try: + + if result['name'] in ('outwear', 'dress', 'blouse', 'skirt', 'trousers', 'tops', 'bottoms'): + front_mask = result['front_mask'] + back_mask = result['back_mask'] + rgba_image = rgb_to_rgba(result['final_image'], front_mask + back_mask) + new_size = (int(rgba_image.shape[1] * result["scale"] * result["resize_scale"][0]), int(rgba_image.shape[0] * result["scale"] * result["resize_scale"][1])) + rgba_image = cv2.resize(rgba_image, new_size) + result_front_image = np.zeros_like(rgba_image) + front_mask = cv2.resize(front_mask, new_size) + result_front_image[front_mask != 0] = rgba_image[front_mask != 0] + result_front_image_pil = Image.fromarray(cvtColor(result_front_image, COLOR_BGR2RGBA)) + result['front_image'], result["front_image_url"], _ = upload_png_mask(result_front_image_pil, f'{generate_uuid()}', mask=None) + + height, width = front_mask.shape + mask_image = np.zeros((height, width, 3)) + mask_image[front_mask != 0] = [0, 0, 255] + + if result["name"] in ('blouse', 'dress', 'outwear', 'tops'): + result_back_image = np.zeros_like(rgba_image) + back_mask = cv2.resize(back_mask, new_size) + result_back_image[back_mask != 0] = rgba_image[back_mask != 0] + result_back_image_pil = Image.fromarray(cvtColor(result_back_image, COLOR_BGR2RGBA)) + result['back_image'], result["back_image_url"], _ = upload_png_mask(result_back_image_pil, f'{generate_uuid()}', mask=None) + mask_image[back_mask != 0] = [0, 255, 0] + + rbga_mask = rgb_to_rgba(mask_image, front_mask + back_mask) + mask_pil = Image.fromarray(cvtColor(rbga_mask.astype(np.uint8), COLOR_BGR2RGBA)) + image_data = io.BytesIO() + mask_pil.save(image_data, format='PNG') + image_data.seek(0) + image_bytes = image_data.read() + req = oss_upload_image(bucket=AIDA_CLOTHING, object_name=f"mask/mask_{generate_uuid()}.png", image_bytes=image_bytes) + result['mask_url'] = req.bucket_name + "/" + req.object_name + else: + rbga_mask = rgb_to_rgba(mask_image, front_mask) + mask_pil = Image.fromarray(cvtColor(rbga_mask.astype(np.uint8), COLOR_BGR2RGBA)) + image_data = io.BytesIO() + mask_pil.save(image_data, format='PNG') + image_data.seek(0) + image_bytes = image_data.read() + req = oss_upload_image(bucket=AIDA_CLOTHING, object_name=f"mask/mask_{generate_uuid()}.png", image_bytes=image_bytes) + result['mask_url'] = req.bucket_name + "/" + req.object_name + result['back_image'] = None + result["back_image_url"] = None + # result["back_mask_url"] = None + # result['back_mask_image'] = None + # 创建中间图层 + result_pattern_image_rgba = rgb_to_rgba(result['pattern_image'], result['mask']) + result_pattern_image_pil = Image.fromarray(cvtColor(result_pattern_image_rgba, COLOR_BGR2RGBA)) + result['pattern_image'], result['pattern_image_url'], _ = upload_png_mask(result_pattern_image_pil, f'{generate_uuid()}') + return result + except Exception as e: + logging.warning(f"split runtime exception : {e} image_id : {result['image_id']}") diff --git a/app/service/design/design_batch/items/utils/__init__.py b/app/service/design/design_batch/items/utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/service/design/design_batch/items/utils/conversion_image.py b/app/service/design/design_batch/items/utils/conversion_image.py new file mode 100644 index 0000000..11e39ae --- /dev/null +++ b/app/service/design/design_batch/items/utils/conversion_image.py @@ -0,0 +1,31 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +""" +@Project :trinity_client +@File :conversion_image.py +@Author :周成融 +@Date :2023/8/21 10:40:29 +@detail : +""" +import numpy as np + + +# def rgb_to_rgba(rgb_size, rgb_image, mask): +# alpha_channel = np.full(rgb_size, 255, dtype=np.uint8) +# # 创建四通道的结果图像 +# rgba_image = np.dstack((rgb_image, alpha_channel)) +# alpha_channel = np.where(mask > 0, 255, 0) +# # 更新RGBA图像的透明度通道 +# rgba_image[:, :, 3] = alpha_channel +# return rgba_image + +def rgb_to_rgba(rgb_image, mask): + # 创建全透明的alpha通道 + alpha_channel = np.where(mask > 0, 255, 0).astype(np.uint8) + # 合并RGB图像和alpha通道 + rgba_image = np.dstack((rgb_image, alpha_channel)) + return rgba_image + + +if __name__ == '__main__': + image = open("") diff --git a/app/service/design/design_batch/items/utils/design_ensemble.py b/app/service/design/design_batch/items/utils/design_ensemble.py new file mode 100644 index 0000000..f4f6a34 --- /dev/null +++ b/app/service/design/design_batch/items/utils/design_ensemble.py @@ -0,0 +1,143 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +""" +@Project :trinity_client +@File :design_ensemble.py +@Author :周成融 +@Date :2023/8/16 19:36:21 +@detail :发起请求 获取推理结果 +""" +import logging + +import cv2 +import mmcv +import numpy as np +import torch +import torch.nn.functional as F +import tritonclient.http as httpclient + +from app.core.config import * + +""" + keypoint + 预处理 推理 后处理 +""" + + +def keypoint_preprocess(img_path): + img = mmcv.imread(img_path) + img_scale = (256, 256) + h, w = img.shape[:2] + img = cv2.resize(img, img_scale) + w_scale = img_scale[0] / w + h_scale = img_scale[1] / h + img = mmcv.imnormalize(img, mean=np.array([123.675, 116.28, 103.53]), std=np.array([58.395, 57.12, 57.375]), to_rgb=True) + preprocessed_img = np.expand_dims(img.transpose(2, 0, 1), axis=0) + return preprocessed_img, (w_scale, h_scale) + + +# @ RunTime +# 推理 +def get_keypoint_result(image, site): + keypoint_result = None + try: + image, scale_factor = keypoint_preprocess(image) + client = httpclient.InferenceServerClient(url=DESIGN_MODEL_URL) + transformed_img = image.astype(np.float32) + inputs = [httpclient.InferInput(f"input", transformed_img.shape, datatype="FP32")] + inputs[0].set_data_from_numpy(transformed_img, binary_data=True) + outputs = [httpclient.InferRequestedOutput(f"output", binary_data=True)] + results = client.infer(model_name=f"keypoint_{site}_ocrnet_hr18", inputs=inputs, outputs=outputs) + inference_output = torch.from_numpy(results.as_numpy(f'output')) + keypoint_result = keypoint_postprocess(inference_output, scale_factor) + except Exception as e: + logging.warning(f"get_keypoint_result : {e}") + return keypoint_result + + +def keypoint_postprocess(output, scale_factor): + max_indices = torch.argmax(output.view(output.size(0), output.size(1), -1), dim=2).unsqueeze(dim=2) + max_coords = torch.cat((max_indices / output.size(3), max_indices % output.size(3)), dim=2) + segment_result = max_coords.numpy() + scale_factor = [1 / x for x in scale_factor[::-1]] + scale_matrix = np.diag(scale_factor) + nan = np.isinf(scale_matrix) + scale_matrix[nan] = 0 + return np.ceil(np.dot(segment_result, scale_matrix) * 4) + + +""" + seg + 预处理 推理 后处理 +""" + + +# KNet +def seg_preprocess(img_path): + img = mmcv.imread(img_path) + ori_shape = img.shape[:2] + img_scale_w, img_scale_h = ori_shape + if ori_shape[0] > 1024: + img_scale_w = 1024 + if ori_shape[1] > 1024: + img_scale_h = 1024 + # 如果图片size任意一边 大于 1024, 则会resize 成1024 + if ori_shape != (img_scale_w, img_scale_h): + # mmcv.imresize(img, img_scale_h, img_scale_w) # 老代码 引以为戒!哈哈哈~ h和w写反了 + img = cv2.resize(img, (img_scale_h, img_scale_w)) + img = mmcv.imnormalize(img, mean=np.array([123.675, 116.28, 103.53]), std=np.array([58.395, 57.12, 57.375]), to_rgb=True) + preprocessed_img = np.expand_dims(img.transpose(2, 0, 1), axis=0) + return preprocessed_img, ori_shape + + +# @ RunTime +def get_seg_result(image_id, image): + image, ori_shape = seg_preprocess(image) + client = httpclient.InferenceServerClient(url=f"{DESIGN_MODEL_URL}") + transformed_img = image.astype(np.float32) + # 输入集 + inputs = [ + httpclient.InferInput(SEGMENTATION['input'], transformed_img.shape, datatype="FP32") + ] + inputs[0].set_data_from_numpy(transformed_img, binary_data=True) + # 输出集 + outputs = [ + httpclient.InferRequestedOutput(SEGMENTATION['output'], binary_data=True), + ] + results = client.infer(model_name=SEGMENTATION['new_model_name'], inputs=inputs, outputs=outputs) + # 推理 + # 取结果 + inference_output1 = results.as_numpy(SEGMENTATION['output']) + seg_result = seg_postprocess(int(image_id), inference_output1, ori_shape) + return seg_result + + +# no cache +def seg_postprocess(image_id, output, ori_shape): + seg_logit = F.interpolate(torch.tensor(output).float(), size=ori_shape, scale_factor=None, mode='bilinear', align_corners=False) + seg_pred = seg_logit.cpu().numpy() + return seg_pred[0] + + +def key_point_show(image_path, key_point_result=None): + img = cv2.imread(image_path) + points_list = key_point_result + point_size = 1 + point_color = (0, 0, 255) # BGR + thickness = 4 # 可以为 0 、4、8 + for point in points_list: + cv2.circle(img, point[::-1], point_size, point_color, thickness) + cv2.imshow("0", img) + cv2.waitKey(0) + + +if __name__ == '__main__': + image = cv2.imread("9070101c-e5be-49b5-9602-4113a968969b.png") + a = get_keypoint_result(image, "up") + new_list = [] + print(list) + for i in a[0]: + new_list.append((int(i[0]), int(i[1]))) + key_point_show("9070101c-e5be-49b5-9602-4113a968969b.png", new_list) + # a = get_seg_result(1, image) + print(a) diff --git a/app/service/design/design_batch/items/utils/redis_utils.py b/app/service/design/design_batch/items/utils/redis_utils.py new file mode 100644 index 0000000..012fbe0 --- /dev/null +++ b/app/service/design/design_batch/items/utils/redis_utils.py @@ -0,0 +1,99 @@ +import redis + +from app.core.config import REDIS_HOST, REDIS_PORT + + +class Redis(object): + """ + redis数据库操作 + """ + + @staticmethod + def _get_r(): + host = REDIS_HOST + port = REDIS_PORT + db = 0 + r = redis.StrictRedis(host, port, db) + return r + + @classmethod + def write(cls, key, value, expire=None): + """ + 写入键值对 + """ + # 判断是否有过期时间,没有就设置默认值 + if expire: + expire_in_seconds = expire + else: + expire_in_seconds = 100 + r = cls._get_r() + r.set(key, value, ex=expire_in_seconds) + + @classmethod + def read(cls, key): + """ + 读取键值对内容 + """ + r = cls._get_r() + value = r.get(key) + return value.decode('utf-8') if value else value + + @classmethod + def hset(cls, name, key, value): + """ + 写入hash表 + """ + r = cls._get_r() + r.hset(name, key, value) + + @classmethod + def hget(cls, name, key): + """ + 读取指定hash表的键值 + """ + r = cls._get_r() + value = r.hget(name, key) + return value.decode('utf-8') if value else value + + @classmethod + def hgetall(cls, name): + """ + 获取指定hash表所有的值 + """ + r = cls._get_r() + return r.hgetall(name) + + @classmethod + def delete(cls, *names): + """ + 删除一个或者多个 + """ + r = cls._get_r() + r.delete(*names) + + @classmethod + def hdel(cls, name, key): + """ + 删除指定hash表的键值 + """ + r = cls._get_r() + r.hdel(name, key) + + @classmethod + def expire(cls, name, expire=None): + """ + 设置过期时间 + """ + if expire: + expire_in_seconds = expire + else: + expire_in_seconds = 100 + r = cls._get_r() + r.expire(name, expire_in_seconds) + + +if __name__ == '__main__': + redis_client = Redis() + # print(redis_client.write(key="1230", value=0)) + redis_client.write(key="1230", value=10) + # print(redis_client.read(key="1230")) diff --git a/app/service/design/design_batch/items/utils/synthesis_item.py b/app/service/design/design_batch/items/utils/synthesis_item.py new file mode 100644 index 0000000..9527cd2 --- /dev/null +++ b/app/service/design/design_batch/items/utils/synthesis_item.py @@ -0,0 +1,181 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +""" +@Project :trinity_client +@File :synthesis_item.py +@Author :周成融 +@Date :2023/8/26 14:13:04 +@detail : +""" +import io +import logging + +import cv2 +import numpy as np +from PIL import Image + +from app.service.utils.generate_uuid import generate_uuid +from app.service.utils.oss_client import oss_upload_image + + +def positioning(all_mask_shape, mask_shape, offset): + all_start = 0 + all_end = 0 + mask_start = 0 + mask_end = 0 + if offset == 0: + all_start = 0 + all_end = min(all_mask_shape, mask_shape) + + mask_start = 0 + mask_end = min(all_mask_shape, mask_shape) + elif offset > 0: + all_start = min(offset, all_mask_shape) + all_end = min(offset + mask_shape, all_mask_shape) + + mask_start = 0 + mask_end = 0 if offset > all_mask_shape else min(all_mask_shape - offset, mask_shape) + elif offset < 0: + if abs(offset) > mask_shape: + all_start = 0 + all_end = 0 + else: + all_start = 0 + if mask_shape - abs(offset) > all_mask_shape: + all_end = min(mask_shape - abs(offset), all_mask_shape) + else: + all_end = mask_shape - abs(offset) + + if abs(offset) > mask_shape: + mask_start = mask_shape + mask_end = mask_shape + else: + mask_start = abs(offset) + if mask_shape - abs(offset) >= all_mask_shape: + mask_end = all_mask_shape + abs(offset) + else: + mask_end = mask_shape + return all_start, all_end, mask_start, mask_end + + +# @RunTime +def synthesis(data, size, basic_info): + # 创建底图 + base_image = Image.new('RGBA', size, (0, 0, 0, 0)) + try: + all_mask_shape = (size[1], size[0]) + body_mask = None + for d in data: + if d['name'] == 'body' or d['name'] == 'mannequin': + # 创建一个新的宽高透明图像, 把模特贴上去获取mask + transparent_image = Image.new("RGBA", size, (0, 0, 0, 0)) + transparent_image.paste(d['image'], (d['adaptive_position'][1], d['adaptive_position'][0]), d['image']) # 此处可变数组会被paste篡改值,所以使用下标获取position + body_mask = np.array(transparent_image.split()[3]) + + # 根据新的坐标获取新的肩点 + left_shoulder = [x + y for x, y in zip(basic_info['body_point_test']['shoulder_left'], [d['adaptive_position'][1], d['adaptive_position'][0]])] + right_shoulder = [x + y for x, y in zip(basic_info['body_point_test']['shoulder_right'], [d['adaptive_position'][1], d['adaptive_position'][0]])] + body_mask[:min(left_shoulder[1], right_shoulder[1]), left_shoulder[0]:right_shoulder[0]] = 255 + _, binary_body_mask = cv2.threshold(body_mask, 127, 255, cv2.THRESH_BINARY) + top_outer_mask = np.array(binary_body_mask) + bottom_outer_mask = np.array(binary_body_mask) + + top = True + bottom = True + i = len(data) + while i: + i -= 1 + if top and data[i]['name'] in ["blouse_front", "outwear_front", "dress_front", "tops_front"]: + top = False + mask_shape = data[i]['mask'].shape + y_offset, x_offset = data[i]['adaptive_position'] + # 初始化叠加区域的起始和结束位置 + all_y_start, all_y_end, mask_y_start, mask_y_end = positioning(all_mask_shape=all_mask_shape[0], mask_shape=mask_shape[0], offset=y_offset) + all_x_start, all_x_end, mask_x_start, mask_x_end = positioning(all_mask_shape=all_mask_shape[1], mask_shape=mask_shape[1], offset=x_offset) + # 将叠加区域赋值为相应的像素值 + _, sketch_mask = cv2.threshold(data[i]['mask'], 127, 255, cv2.THRESH_BINARY) + background = np.zeros_like(top_outer_mask) + background[all_y_start:all_y_end, all_x_start:all_x_end] = sketch_mask[mask_y_start:mask_y_end, mask_x_start:mask_x_end] + top_outer_mask = background + top_outer_mask + elif bottom and data[i]['name'] in ["trousers_front", "skirt_front", "bottoms_front", "dress_front"]: + bottom = False + mask_shape = data[i]['mask'].shape + y_offset, x_offset = data[i]['adaptive_position'] + # 初始化叠加区域的起始和结束位置 + all_y_start, all_y_end, mask_y_start, mask_y_end = positioning(all_mask_shape=all_mask_shape[0], mask_shape=mask_shape[0], offset=y_offset) + all_x_start, all_x_end, mask_x_start, mask_x_end = positioning(all_mask_shape=all_mask_shape[1], mask_shape=mask_shape[1], offset=x_offset) + # 将叠加区域赋值为相应的像素值 + _, sketch_mask = cv2.threshold(data[i]['mask'], 127, 255, cv2.THRESH_BINARY) + background = np.zeros_like(top_outer_mask) + background[all_y_start:all_y_end, all_x_start:all_x_end] = sketch_mask[mask_y_start:mask_y_end, mask_x_start:mask_x_end] + bottom_outer_mask = background + bottom_outer_mask + elif bottom is False and top is False: + break + + all_mask = cv2.bitwise_or(top_outer_mask, bottom_outer_mask) + + for layer in data: + if layer['image'] is not None: + if layer['name'] != "body": + test_image = Image.new('RGBA', size, (0, 0, 0, 0)) + test_image.paste(layer['image'], (layer['adaptive_position'][1], layer['adaptive_position'][0]), layer['image']) + mask_data = np.where(all_mask > 0, 255, 0).astype(np.uint8) + mask_alpha = Image.fromarray(mask_data) + cropped_image = Image.composite(test_image, Image.new("RGBA", test_image.size, (255, 255, 255, 0)), mask_alpha) + base_image.paste(test_image, (0, 0), cropped_image) # test_image 已经按照坐标贴到最大宽值的图片上 坐着这里坐标为00 + else: + base_image.paste(layer['image'], (layer['adaptive_position'][1], layer['adaptive_position'][0]), layer['image']) + + result_image = base_image + + image_data = io.BytesIO() + result_image.save(image_data, format='PNG') + image_data.seek(0) + + # oss upload + image_bytes = image_data.read() + bucket_name = "aida-results" + object_name = f'result_{generate_uuid()}.png' + req = oss_upload_image(bucket=bucket_name, object_name=object_name, image_bytes=image_bytes) + return f"{bucket_name}/{object_name}" + # return f"aida-results/{minio_client.put_object('aida-results', f'result_{generate_uuid()}.png', io.BytesIO(image_bytes), len(image_bytes), content_type='image/png').object_name}" + + # object_name = f'result_{generate_uuid()}.png' + # response = s3.put_object(Bucket="aida-results", Key=object_name, Body=data, ContentType='image/png') + # object_url = f"aida-results/{object_name}" + # if response['ResponseMetadata']['HTTPStatusCode'] == 200: + # return object_url + # else: + # return "" + + except Exception as e: + logging.warning(f"synthesis runtime exception : {e}") + + +def synthesis_single(front_image, back_image): + result_image = None + if front_image: + result_image = front_image + if back_image: + result_image.paste(back_image, (0, 0), back_image) + + # with io.BytesIO() as output: + # result_image.save(output, format='PNG') + # data = output.getvalue() + # object_name = f'result_{generate_uuid()}.png' + # response = s3.put_object(Bucket="aida-results", Key=object_name, Body=data, ContentType='image/png') + # object_url = f"aida-results/{object_name}" + # if response['ResponseMetadata']['HTTPStatusCode'] == 200: + # return object_url + # else: + # return "" + image_data = io.BytesIO() + result_image.save(image_data, format='PNG') + image_data.seek(0) + image_bytes = image_data.read() + # return f"aida-results/{minio_client.put_object('aida-results', f'result_{generate_uuid()}.png', io.BytesIO(image_bytes), len(image_bytes), content_type='image/png').object_name}" + # oss upload + bucket_name = 'aida-results' + object_name = f'result_{generate_uuid()}.png' + req = oss_upload_image(bucket=bucket_name, object_name=object_name, image_bytes=image_bytes) + return f"{bucket_name}/{object_name}" diff --git a/app/service/design/design_batch/items/utils/upload_image.py b/app/service/design/design_batch/items/utils/upload_image.py new file mode 100644 index 0000000..9039ce7 --- /dev/null +++ b/app/service/design/design_batch/items/utils/upload_image.py @@ -0,0 +1,55 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +""" +@Project :trinity_client +@File :upload_image.py +@Author :周成融 +@Date :2023/8/28 13:49:20 +@detail : +""" +import io +import logging + +import cv2 + +from app.core.config import * +from app.service.utils.decorator import RunTime +from app.service.utils.oss_client import oss_upload_image + + +# @RunTime +def upload_png_mask(front_image, object_name, mask=None): + try: + mask_url = None + if mask is not None: + mask_inverted = cv2.bitwise_not(mask) + # 将掩模的3通道转换为4通道,白色部分不透明,黑色部分透明 + rgba_image = cv2.cvtColor(mask_inverted, cv2.COLOR_BGR2BGRA) + rgba_image[rgba_image[:, :, 0] == 0] = [0, 0, 0, 0] + # image_bytes = io.BytesIO() + # image_bytes.write(cv2.imencode('.png', rgba_image)[1].tobytes()) + # image_bytes.seek(0) + # mask_url = f"{AIDA_CLOTHING}/{minio_client.put_object('aida-clothing', f'mask/mask_{object_name}.png', image_bytes, len(image_bytes.getvalue()), content_type='image/png').object_name}" + # oss upload #################### + req = oss_upload_image(bucket=AIDA_CLOTHING, object_name=f"mask/mask_{object_name}.png", image_bytes=cv2.imencode('.png', rgba_image)[1]) + mask_url = f"{AIDA_CLOTHING}/mask/mask_{object_name}.png" + + image_data = io.BytesIO() + front_image.save(image_data, format='PNG') + image_data.seek(0) + image_bytes = image_data.read() + # image_url = f"{AIDA_CLOTHING}/{minio_client.put_object('aida-clothing', f'image/image_{object_name}.png', io.BytesIO(image_bytes), len(image_bytes), content_type='image/png').object_name}" + req = oss_upload_image(bucket=AIDA_CLOTHING, object_name=f"image/image_{object_name}.png", image_bytes=image_bytes) + image_url = f"{AIDA_CLOTHING}/image/image_{object_name}.png" + return front_image, image_url, mask_url + except Exception as e: + logging.warning(f"upload_png_mask runtime exception : {e}") + + +# @RunTime +# def upload_png_mask(front_image, object_name, mask=None): +# mask_url = None +# if mask is not None: +# mask_url = f"{AIDA_CLOTHING}/mask/mask_{object_name}.png" +# image_url = f"{AIDA_CLOTHING}/image/image_{object_name}.png" +# return front_image, image_url, mask_url From 423ff8dd26ee8705f924761334ed91d5728f96ff Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Thu, 12 Sep 2024 10:06:25 +0800 Subject: [PATCH 067/103] feat design batch fix --- app/api/api_design.py | 41 ++++++- app/schemas/design.py | 51 ++------ .../design/service_design_batch_generate.py | 114 ++++++++++++++++++ 3 files changed, 160 insertions(+), 46 deletions(-) create mode 100644 app/service/design/service_design_batch_generate.py diff --git a/app/api/api_design.py b/app/api/api_design.py index bc3d1b9..5210477 100644 --- a/app/api/api_design.py +++ b/app/api/api_design.py @@ -1,12 +1,14 @@ import json import logging +import os -from fastapi import APIRouter, HTTPException +from fastapi import APIRouter, HTTPException, UploadFile, File, Form -from app.schemas.design import DesignModel, DesignProgressModel, ModelProgressModel +from app.schemas.design import DesignModel, DesignProgressModel, ModelProgressModel, DBGConfigModel from app.schemas.response_template import ResponseModel from app.service.design.model_process_service import model_transpose from app.service.design.service import generate +from app.service.design.service_design_batch_generate import start_design_batch_generate from app.service.design.utils.redis_utils import Redis router = APIRouter() @@ -238,3 +240,38 @@ def model_process(request_data: ModelProgressModel): logger.warning(f"model_process Run Exception @@@@@@:{e}") raise HTTPException(status_code=404, detail=str(e)) return ResponseModel(data=data) + + +# ############################################################## + + +@router.post("/design_batch_generate") +async def design(file: UploadFile = File(...), + tasks_id: str = Form(...), + user_id: str = Form(...), + priority: int = Form(...), + total: int = Form(...) + ): + # file_content = await file.read() + dbg_config = DBGConfigModel( + tasks_id=tasks_id, + user_id=user_id, + priority=priority, + total=total + ) + contents = await file.read() + file_name = file.filename + await save_request_file(contents, file_name) + + return await start_design_batch_generate(dbg_config, contents) + + +async def save_request_file(contents, file_name): + # 创建保存文件的目录(如果不存在) + save_dir = os.path.join(os.getcwd(), "design_batch", "request_data") + if not os.path.exists(save_dir): + os.makedirs(save_dir) + # 处理文件 + file_path = os.path.join(save_dir, file_name) + with open(file_path, "wb") as f: + f.write(contents) diff --git a/app/schemas/design.py b/app/schemas/design.py index edcc392..763e0a0 100644 --- a/app/schemas/design.py +++ b/app/schemas/design.py @@ -1,50 +1,6 @@ from pydantic import BaseModel -# class BodyPointModel(BaseModel): -# waistband_right: list[int] -# hand_point_right: list[int] -# waistband_left: list[int] -# hand_point_left: list[int] -# shoulder_left: list[int] -# shoulder_right: list[int] -# -# -# class BasicModel(BaseModel): -# body_point: BodyPointModel -# layer_order: bool -# scale_bag: float -# scale_earrings: float -# self_template: bool -# single_overall: str -# switch_category: str -# body_path: str -# -# -# class PrintModel(BaseModel): -# if_single: bool -# print_path_list: list[str] -# -# -# class ItemModel(BaseModel): -# color: str -# image_id: str -# offset: list[int] -# path: str -# print: PrintModel -# resize_scale: float -# type: str -# -# -# class CollocationModel(BaseModel): -# basic: BasicModel -# item: list[ItemModel] -# -# -# class DesignModel(BaseModel): -# object: list[CollocationModel] -# process_id: str - class DesignModel(BaseModel): objects: list[dict] process_id: str @@ -56,3 +12,10 @@ class DesignProgressModel(BaseModel): class ModelProgressModel(BaseModel): model_path: str + + +class DBGConfigModel(BaseModel): + tasks_id: str + user_id: str + priority: int + total: int diff --git a/app/service/design/service_design_batch_generate.py b/app/service/design/service_design_batch_generate.py new file mode 100644 index 0000000..0696176 --- /dev/null +++ b/app/service/design/service_design_batch_generate.py @@ -0,0 +1,114 @@ +import json + +import pika +from celery import Celery + +from app.service.design.design_batch.items.item import process_layer, process_item, update_base_size_priority +from app.service.design.utils.synthesis_item import synthesis_single, synthesis + +celery_app = Celery('clothes_generation', broker='amqp://guest:guest@localhost:5672//') + + + +@celery_app.task +def design_batch_generate(design_objects, total_steps, task_id): + objects_response = [] + for step, object in enumerate(design_objects): + basic = object['basic'] + items_response = {'layers': []} + if basic['single_overall'] == "overall": + item_results = [process_item(item, basic) for item in object['items']] + layers = [] + futures = [] + body_size = None + for item in item_results: + futures = [process_layer(item, layers)] + for future in futures: + if future is not None: + body_size = future + layers = sorted(layers, key=lambda s: s.get("priority", float('inf'))) + + layers, new_size = update_base_size_priority(layers, body_size) + + for lay in layers: + items_response['layers'].append({ + 'image_category': lay['name'], + 'position': lay['position'], + 'priority': lay.get("priority", None), + 'resize_scale': lay['resize_scale'] if "resize_scale" in lay.keys() else None, + 'image_size': lay['image'] if lay['image'] is None else lay['image'].size, + 'gradient_string': lay['gradient_string'] if 'gradient_string' in lay.keys() else "", + 'mask_url': lay['mask_url'], + 'image_url': lay['image_url'] if 'image_url' in lay.keys() else None, + 'pattern_image_url': lay['pattern_image_url'] if 'pattern_image_url' in lay.keys() else None, + + # 'image': lay['image'], + # 'mask_image': lay['mask_image'], + }) + items_response['synthesis_url'] = synthesis(layers, new_size, basic) + else: + item_results = process_item(object['items'][0], basic) + items_response['layers'].append({ + 'image_category': f"{item_results['name']}_front", + 'image_size': item_results['back_image'].size if item_results['back_image'] else None, + 'position': None, + 'priority': 0, + 'image_url': item_results['front_image_url'], + 'mask_url': item_results['mask_url'], + "gradient_string": item_results['gradient_string'] if 'gradient_string' in item_results.keys() else "", + 'pattern_image_url': item_results['pattern_image_url'] if 'pattern_image_url' in item_results.keys() else None, + + }) + items_response['layers'].append({ + 'image_category': f"{item_results['name']}_back", + 'image_size': item_results['front_image'].size if item_results['front_image'] else None, + 'position': None, + 'priority': 0, + 'image_url': item_results['back_image_url'], + 'mask_url': item_results['mask_url'], + "gradient_string": item_results['gradient_string'] if 'gradient_string' in item_results.keys() else "", + 'pattern_image_url': item_results['pattern_image_url'] if 'pattern_image_url' in item_results.keys() else None, + + }) + items_response['synthesis_url'] = synthesis_single(item_results['front_image'], item_results['back_image']) + objects_response.append(items_response) + publish_status(task_id, f"{step + 1}/{total_steps}", objects_response) + print(objects_response) + return objects_response + + +def publish_status(task_id, progress, result): + connection = pika.BlockingConnection(pika.ConnectionParameters('localhost')) + channel = connection.channel() + channel.queue_declare(queue='DesignBatch', durable=True) + message = {'task_id': task_id, 'progress': progress, "result": result} + print(message) + channel.basic_publish(exchange='', + routing_key='DesignBatch', + body=json.dumps(message), + properties=pika.BasicProperties( + delivery_mode=2, + )) + connection.close() + + +async def start_design_batch_generate(data, file): + generate_clothes_task = design_batch_generate.delay(json.loads(file.decode())['objects'], data.total, data.tasks_id) + print(generate_clothes_task) + publish_status(data.tasks_id, "0/100", "") + return {"task_id": data.tasks_id} +# +# +# if __name__ == '__main__': +# data = {"objects": [{"basic": {"body_point_test": {"waistband_right": [200, 241], "hand_point_right": [223, 297], "waistband_left": [112, 241], "hand_point_left": [92, 305], "shoulder_left": [99, 116], "shoulder_right": [215, 116]}, "layer_order": True, "scale_bag": 0.7, "scale_earrings": 0.16, "self_template": True, "single_overall": "overall", "switch_category": ""}, "items": [ +# {"businessId": 270372, "color": "30 28 28", "image_id": 69780, "offset": [0, 0], "path": "aida-sys-image/images/female/trousers/0825000630.jpg", "seg_mask_url": "test/result.png", +# "print": {"element": {"element_angle_list": [], "element_path_list": [], "element_scale_list": [], "location": []}, "overall": {"location": [], "print_angle_list": [], "print_path_list": [], "print_scale_list": []}, "single": {"location": [], "print_angle_list": [], "print_path_list": [], "print_scale_list": []}}, "priority": 10, "resize_scale": [1.0, 1.0], "type": "Trousers"}, +# {"businessId": 270373, "color": "30 28 28", "image_id": 98243, "offset": [0, 0], "path": "aida-sys-image/images/female/blouse/0902003811.jpg", "seg_mask_url": "test/result.png", +# "print": {"element": {"element_angle_list": [], "element_path_list": [], "element_scale_list": [], "location": []}, "overall": {"location": [], "print_angle_list": [], "print_path_list": [], "print_scale_list": []}, "single": {"location": [], "print_angle_list": [], "print_path_list": [], "print_scale_list": []}}, "priority": 11, "resize_scale": [1.0, 1.0], "type": "Blouse"}, +# {"businessId": 270374, "color": "172 68 68", "image_id": 98244, "offset": [0, 0], "path": "aida-sys-image/images/female/outwear/0825000410.jpg", "seg_mask_url": "test/result.png", +# "print": {"element": {"element_angle_list": [], "element_path_list": [], "element_scale_list": [], "location": []}, "overall": {"location": [], "print_angle_list": [], "print_path_list": [], "print_scale_list": []}, "single": {"location": [], "print_angle_list": [], "print_path_list": [], "print_scale_list": []}}, "priority": 12, "resize_scale": [1.0, 1.0], "type": "Outwear"}, +# {"body_path": "aida-sys-image/models/female/5bdfe7ca-64eb-44e4-b03d-8e517520c795.png", "image_id": 96090, "type": "Body"}]}], "process_id": "83"} +# total_steps = 1 +# task_id = 1 +# design_batch_generate(data['objects'], total_steps, task_id) +# # publish_status(task_id="0/100", progress=100) From dac5940199818cb867c18c66effa37d667dc315e Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Thu, 19 Sep 2024 14:20:56 +0800 Subject: [PATCH 068/103] =?UTF-8?q?feat=20=20design=20=20=E6=8F=90?= =?UTF-8?q?=E9=80=9F=E6=B5=8B=E8=AF=95=20fix?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/api/api_design.py | 9 +- app/core/config.py | 4 +- app/service/design/test.py | 15 + app/service/design/utils/upload_image.py | 8 +- app/service/design_test/__init__.py | 0 app/service/design_test/batch_design.py | 1743 +++++++++++++++++ app/service/design_test/item.py | 281 +++ app/service/design_test/pipeline/__init__.py | 20 + app/service/design_test/pipeline/color.py | 62 + .../design_test/pipeline/contour_detection.py | 37 + app/service/design_test/pipeline/keypoint.py | 114 ++ app/service/design_test/pipeline/loading.py | 80 + .../design_test/pipeline/print_painting.py | 524 +++++ app/service/design_test/pipeline/scale.py | 49 + .../design_test/pipeline/segmentation.py | 70 + app/service/design_test/pipeline/split.py | 74 + app/service/design_test/utils/__init__.py | 0 .../design_test/utils/conversion_image.py | 31 + .../design_test/utils/design_ensemble.py | 143 ++ app/service/design_test/utils/redis_utils.py | 99 + .../design_test/utils/synthesis_item.py | 181 ++ app/service/design_test/utils/upload_image.py | 55 + app/service/utils/oss_client.py | 27 +- 23 files changed, 3599 insertions(+), 27 deletions(-) create mode 100644 app/service/design/test.py create mode 100644 app/service/design_test/__init__.py create mode 100644 app/service/design_test/batch_design.py create mode 100644 app/service/design_test/item.py create mode 100644 app/service/design_test/pipeline/__init__.py create mode 100644 app/service/design_test/pipeline/color.py create mode 100644 app/service/design_test/pipeline/contour_detection.py create mode 100644 app/service/design_test/pipeline/keypoint.py create mode 100644 app/service/design_test/pipeline/loading.py create mode 100644 app/service/design_test/pipeline/print_painting.py create mode 100644 app/service/design_test/pipeline/scale.py create mode 100644 app/service/design_test/pipeline/segmentation.py create mode 100644 app/service/design_test/pipeline/split.py create mode 100644 app/service/design_test/utils/__init__.py create mode 100644 app/service/design_test/utils/conversion_image.py create mode 100644 app/service/design_test/utils/design_ensemble.py create mode 100644 app/service/design_test/utils/redis_utils.py create mode 100644 app/service/design_test/utils/synthesis_item.py create mode 100644 app/service/design_test/utils/upload_image.py diff --git a/app/api/api_design.py b/app/api/api_design.py index 5210477..4db9fc2 100644 --- a/app/api/api_design.py +++ b/app/api/api_design.py @@ -7,9 +7,9 @@ from fastapi import APIRouter, HTTPException, UploadFile, File, Form from app.schemas.design import DesignModel, DesignProgressModel, ModelProgressModel, DBGConfigModel from app.schemas.response_template import ResponseModel from app.service.design.model_process_service import model_transpose -from app.service.design.service import generate from app.service.design.service_design_batch_generate import start_design_batch_generate from app.service.design.utils.redis_utils import Redis +from app.service.design_test.batch_design import design_generate router = APIRouter() logger = logging.getLogger() @@ -180,9 +180,14 @@ def design(request_data: DesignModel): "process_id": "83" } """ + # logger.info(f"design request item is : @@@@@@:{json.dumps(request_data.dict())}") + # data = generate(request_data=request_data) + # logger.info(f"design response @@@@@@:{json.dumps(data)}") + logger.info(f"design request item is : @@@@@@:{json.dumps(request_data.dict())}") - data = generate(request_data=request_data) + data = design_generate(request_data=request_data) logger.info(f"design response @@@@@@:{json.dumps(data)}") + # try: # logger.info(f"design request item is : @@@@@@:{json.dumps(request_data.dict())}") # data = generate(request_data=request_data) diff --git a/app/core/config.py b/app/core/config.py index 35c12b7..97098b0 100644 --- a/app/core/config.py +++ b/app/core/config.py @@ -20,7 +20,7 @@ class Settings(BaseSettings): OSS = "minio" -DEBUG = False +DEBUG = True if DEBUG: LOGS_PATH = "logs/" CATEGORY_PATH = "service/attribute/config/descriptor/category/category_dis.csv" @@ -50,7 +50,7 @@ S3_REGION_NAME = "ap-east-1" # redis 配置 REDIS_HOST = "10.1.1.240" REDIS_PORT = "6379" -REDIS_DB = "2" +REDIS_DB = "10" # rabbitmq config RABBITMQ_PARAMS = { diff --git a/app/service/design/test.py b/app/service/design/test.py new file mode 100644 index 0000000..0235f0e --- /dev/null +++ b/app/service/design/test.py @@ -0,0 +1,15 @@ +from app.service.design.service_design_batch_generate import design_batch_generate + +if __name__ == '__main__': + data = {"objects": [{"basic": {"body_point_test": {"waistband_right": [200, 241], "hand_point_right": [223, 297], "waistband_left": [112, 241], "hand_point_left": [92, 305], "shoulder_left": [99, 116], "shoulder_right": [215, 116]}, "layer_order": True, "scale_bag": 0.7, "scale_earrings": 0.16, "self_template": True, "single_overall": "overall", "switch_category": ""}, "items": [ + {"businessId": 270372, "color": "30 28 28", "image_id": 69780, "offset": [0, 0], "path": "aida-sys-image/images/female/trousers/0825000630.jpg", "seg_mask_url": "test/result.png", + "print": {"element": {"element_angle_list": [], "element_path_list": [], "element_scale_list": [], "location": []}, "overall": {"location": [], "print_angle_list": [], "print_path_list": [], "print_scale_list": []}, "single": {"location": [], "print_angle_list": [], "print_path_list": [], "print_scale_list": []}}, "priority": 10, "resize_scale": [1.0, 1.0], "type": "Trousers"}, + {"businessId": 270373, "color": "30 28 28", "image_id": 98243, "offset": [0, 0], "path": "aida-sys-image/images/female/blouse/0902003811.jpg", "seg_mask_url": "test/result.png", + "print": {"element": {"element_angle_list": [], "element_path_list": [], "element_scale_list": [], "location": []}, "overall": {"location": [], "print_angle_list": [], "print_path_list": [], "print_scale_list": []}, "single": {"location": [], "print_angle_list": [], "print_path_list": [], "print_scale_list": []}}, "priority": 11, "resize_scale": [1.0, 1.0], "type": "Blouse"}, + {"businessId": 270374, "color": "172 68 68", "image_id": 98244, "offset": [0, 0], "path": "aida-sys-image/images/female/outwear/0825000410.jpg", "seg_mask_url": "test/result.png", + "print": {"element": {"element_angle_list": [], "element_path_list": [], "element_scale_list": [], "location": []}, "overall": {"location": [], "print_angle_list": [], "print_path_list": [], "print_scale_list": []}, "single": {"location": [], "print_angle_list": [], "print_path_list": [], "print_scale_list": []}}, "priority": 12, "resize_scale": [1.0, 1.0], "type": "Outwear"}, + {"body_path": "aida-sys-image/models/female/5bdfe7ca-64eb-44e4-b03d-8e517520c795.png", "image_id": 96090, "type": "Body"}]}], "process_id": "83"} + total_steps = 1 + task_id = 1 + design_batch_generate.delay(data['objects'], total_steps, task_id) + # publish_status(task_id="0/100", progress=100) diff --git a/app/service/design/utils/upload_image.py b/app/service/design/utils/upload_image.py index 9039ce7..8d20061 100644 --- a/app/service/design/utils/upload_image.py +++ b/app/service/design/utils/upload_image.py @@ -13,12 +13,11 @@ import logging import cv2 from app.core.config import * -from app.service.utils.decorator import RunTime from app.service.utils.oss_client import oss_upload_image # @RunTime -def upload_png_mask(front_image, object_name, mask=None): +def upload_png_mask(minio_client, front_image, object_name, mask=None): try: mask_url = None if mask is not None: @@ -31,7 +30,7 @@ def upload_png_mask(front_image, object_name, mask=None): # image_bytes.seek(0) # mask_url = f"{AIDA_CLOTHING}/{minio_client.put_object('aida-clothing', f'mask/mask_{object_name}.png', image_bytes, len(image_bytes.getvalue()), content_type='image/png').object_name}" # oss upload #################### - req = oss_upload_image(bucket=AIDA_CLOTHING, object_name=f"mask/mask_{object_name}.png", image_bytes=cv2.imencode('.png', rgba_image)[1]) + req = oss_upload_image(oss_client=minio_client, bucket=AIDA_CLOTHING, object_name=f"mask/mask_{object_name}.png", image_bytes=cv2.imencode('.png', rgba_image)[1]) mask_url = f"{AIDA_CLOTHING}/mask/mask_{object_name}.png" image_data = io.BytesIO() @@ -39,13 +38,12 @@ def upload_png_mask(front_image, object_name, mask=None): image_data.seek(0) image_bytes = image_data.read() # image_url = f"{AIDA_CLOTHING}/{minio_client.put_object('aida-clothing', f'image/image_{object_name}.png', io.BytesIO(image_bytes), len(image_bytes), content_type='image/png').object_name}" - req = oss_upload_image(bucket=AIDA_CLOTHING, object_name=f"image/image_{object_name}.png", image_bytes=image_bytes) + req = oss_upload_image(oss_client=minio_client, bucket=AIDA_CLOTHING, object_name=f"image/image_{object_name}.png", image_bytes=image_bytes) image_url = f"{AIDA_CLOTHING}/image/image_{object_name}.png" return front_image, image_url, mask_url except Exception as e: logging.warning(f"upload_png_mask runtime exception : {e}") - # @RunTime # def upload_png_mask(front_image, object_name, mask=None): # mask_url = None diff --git a/app/service/design_test/__init__.py b/app/service/design_test/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/service/design_test/batch_design.py b/app/service/design_test/batch_design.py new file mode 100644 index 0000000..5a07429 --- /dev/null +++ b/app/service/design_test/batch_design.py @@ -0,0 +1,1743 @@ +import io +import json +import logging +import threading +import time +import uuid + +import cv2 +import numpy as np +from PIL import Image +from minio import Minio + +from app.core.config import PRIORITY_DICT +from app.service.design.utils.redis_utils import Redis +from app.service.design_test.item import BodyItem, TopItem, BottomItem +from app.service.utils.oss_client import oss_upload_image + +id_lock = threading.Lock() + +logger = logging.getLogger() + +# minio 配置 +MINIO_URL = "www.minio.aida.com.hk:12024" +MINIO_ACCESS = 'vXKFLSJkYeEq2DrSZvkB' +MINIO_SECRET = 'uKTZT3x7C43WvPN9QTc99DiRkwddWZrG9Uh3JVlR' +MINIO_SECURE = True + +minio_client = Minio(MINIO_URL, access_key=MINIO_ACCESS, secret_key=MINIO_SECRET, secure=MINIO_SECURE) + + +def process_item(item, basic): + if item['type'] == "Body": + body_server = BodyItem(data=item, basic=basic, minio_client=minio_client) + item_data = body_server.process() + elif item['type'].lower() in ['blouse', 'outwear', 'dress', 'tops']: + top_server = TopItem(data=item, basic=basic, minio_client=minio_client) + item_data = top_server.process() + else: + bottom_server = BottomItem(data=item, basic=basic, minio_client=minio_client) + item_data = bottom_server.process() + return item_data + + +def process_layer(item, layers): + if item['name'] == "mannequin": + body_layer = organize_body(item) + layers.append(body_layer) + return item['body_image'].size + else: + front_layer, back_layer = organize_clothing(item) + layers.append(front_layer) + layers.append(back_layer) + + +def organize_body(layer): + body_layer = dict(priority=0, + name=layer["name"].lower(), + image=layer['body_image'], + image_url=layer['body_path'], + mask_image=None, + mask_url=None, + sacle=1, + # mask=layer['body_mask'], + position=(0, 0)) + return body_layer + + +def organize_clothing(layer): + # 起始坐标 + start_point = calculate_start_point(layer['keypoint'], layer['scale'], layer['clothes_keypoint'], layer['body_point_test'], layer["offset"], layer["resize_scale"]) + # 前片数据 + front_layer = dict(priority=layer['priority'] if layer.get("layer_order", False) else PRIORITY_DICT.get(f'{layer["name"].lower()}_front', None), + name=f'{layer["name"].lower()}_front', + image=layer["front_image"], + # mask_image=layer['front_mask_image'], + image_url=layer['front_image_url'], + mask_url=layer['mask_url'], + sacle=layer['scale'], + clothes_keypoint=layer['clothes_keypoint'], + position=start_point, + resize_scale=layer["resize_scale"], + mask=cv2.resize(layer['mask'], layer["front_image"].size), + gradient_string=layer['gradient_string'] if 'gradient_string' in layer.keys() else "", + pattern_image_url=layer['pattern_image_url'], + pattern_image=layer['pattern_image'] + + ) + # 后片数据 + back_layer = dict(priority=-layer.get("priority", 0) if layer.get("layer_order", False) else PRIORITY_DICT.get(f'{layer["name"].lower()}_back', None), + name=f'{layer["name"].lower()}_back', + image=layer["back_image"], + # mask_image=layer['back_mask_image'], + image_url=layer['back_image_url'], + mask_url=layer['mask_url'], + sacle=layer['scale'], + clothes_keypoint=layer['clothes_keypoint'], + position=start_point, + resize_scale=layer["resize_scale"], + mask=cv2.resize(layer['mask'], layer["front_image"].size), + gradient_string=layer['gradient_string'] if 'gradient_string' in layer.keys() else "", + pattern_image_url=layer['pattern_image_url'], + ) + return front_layer, back_layer + + +def calculate_start_point(keypoint_type, scale, clothes_point, body_point, offset, resize_scale): + """ + Align left + Args: + keypoint_type: string, "waistband" | "shoulder" | "ear_point" + scale: float + clothes_point: dict{'left': [x1, y1, z1], 'right': [x2, y2, z2]} + body_point: dict, containing keypoint data of body figure + + Returns: + start_point: tuple (x', y') + x' = y_body - y1 * scale + offset + y' = x_body - x1 * scale + offset + + """ + side_indicator = f'{keypoint_type}_left' + start_point = ( + int(body_point[side_indicator][1] + offset[1] - int(clothes_point[side_indicator][0]) * scale), # y + int(body_point[side_indicator][0] + offset[0] - int(clothes_point[side_indicator][1]) * scale) # x + ) + return start_point + + +def update_base_size_priority(layers, size): + # 计算透明背景图片的宽度 + min_x = min(info['position'][1] for info in layers) + x_list = [] + for info in layers: + if info['image'] is not None: + x_list.append(info['position'][1] + info['image'].width) + max_x = max(x_list) + new_width = max_x - min_x + new_height = 700 + # 更新坐标 + for info in layers: + info['adaptive_position'] = (info['position'][0], info['position'][1] - min_x) + return layers, (new_width, new_height) + + +def synthesis_single(front_image, back_image): + result_image = None + if front_image: + result_image = front_image + if back_image: + result_image.paste(back_image, (0, 0), back_image) + image_data = io.BytesIO() + result_image.save(image_data, format='PNG') + image_data.seek(0) + image_bytes = image_data.read() + bucket_name = 'aida-results' + object_name = f'result_{generate_uuid()}.png' + oss_upload_image(oss_client=minio_client, bucket=bucket_name, object_name=object_name, image_bytes=image_bytes) + return f"{bucket_name}/{object_name}" + + +def oss_upload_json(json_data, object_name): + try: + with open(f"app/service/design/design_batch/response_json/{object_name}", 'w') as file: + json.dump(json_data, file, indent=4) + + oss_client = Minio(MINIO_URL, access_key=MINIO_ACCESS, secret_key=MINIO_SECRET, secure=MINIO_SECURE) + oss_client.fput_object("test", object_name, f"app/service/design/design_batch/response_json/{object_name}") + except Exception as e: + logger.warning(str(e)) + + +def generate_uuid(): + with id_lock: + unique_id = str(uuid.uuid1()) + return unique_id + + +def positioning(all_mask_shape, mask_shape, offset): + all_start = 0 + all_end = 0 + mask_start = 0 + mask_end = 0 + if offset == 0: + all_start = 0 + all_end = min(all_mask_shape, mask_shape) + + mask_start = 0 + mask_end = min(all_mask_shape, mask_shape) + elif offset > 0: + all_start = min(offset, all_mask_shape) + all_end = min(offset + mask_shape, all_mask_shape) + + mask_start = 0 + mask_end = 0 if offset > all_mask_shape else min(all_mask_shape - offset, mask_shape) + elif offset < 0: + if abs(offset) > mask_shape: + all_start = 0 + all_end = 0 + else: + all_start = 0 + if mask_shape - abs(offset) > all_mask_shape: + all_end = min(mask_shape - abs(offset), all_mask_shape) + else: + all_end = mask_shape - abs(offset) + + if abs(offset) > mask_shape: + mask_start = mask_shape + mask_end = mask_shape + else: + mask_start = abs(offset) + if mask_shape - abs(offset) >= all_mask_shape: + mask_end = all_mask_shape + abs(offset) + else: + mask_end = mask_shape + return all_start, all_end, mask_start, mask_end + + +def synthesis(data, size, basic_info): + # 创建底图 + base_image = Image.new('RGBA', size, (0, 0, 0, 0)) + try: + all_mask_shape = (size[1], size[0]) + body_mask = None + for d in data: + if d['name'] == 'body' or d['name'] == 'mannequin': + # 创建一个新的宽高透明图像, 把模特贴上去获取mask + transparent_image = Image.new("RGBA", size, (0, 0, 0, 0)) + transparent_image.paste(d['image'], (d['adaptive_position'][1], d['adaptive_position'][0]), d['image']) # 此处可变数组会被paste篡改值,所以使用下标获取position + body_mask = np.array(transparent_image.split()[3]) + + # 根据新的坐标获取新的肩点 + left_shoulder = [x + y for x, y in zip(basic_info['body_point_test']['shoulder_left'], [d['adaptive_position'][1], d['adaptive_position'][0]])] + right_shoulder = [x + y for x, y in zip(basic_info['body_point_test']['shoulder_right'], [d['adaptive_position'][1], d['adaptive_position'][0]])] + body_mask[:min(left_shoulder[1], right_shoulder[1]), left_shoulder[0]:right_shoulder[0]] = 255 + _, binary_body_mask = cv2.threshold(body_mask, 127, 255, cv2.THRESH_BINARY) + top_outer_mask = np.array(binary_body_mask) + bottom_outer_mask = np.array(binary_body_mask) + + top = True + bottom = True + i = len(data) + while i: + i -= 1 + if top and data[i]['name'] in ["blouse_front", "outwear_front", "dress_front", "tops_front"]: + top = False + mask_shape = data[i]['mask'].shape + y_offset, x_offset = data[i]['adaptive_position'] + # 初始化叠加区域的起始和结束位置 + all_y_start, all_y_end, mask_y_start, mask_y_end = positioning(all_mask_shape=all_mask_shape[0], mask_shape=mask_shape[0], offset=y_offset) + all_x_start, all_x_end, mask_x_start, mask_x_end = positioning(all_mask_shape=all_mask_shape[1], mask_shape=mask_shape[1], offset=x_offset) + # 将叠加区域赋值为相应的像素值 + _, sketch_mask = cv2.threshold(data[i]['mask'], 127, 255, cv2.THRESH_BINARY) + background = np.zeros_like(top_outer_mask) + background[all_y_start:all_y_end, all_x_start:all_x_end] = sketch_mask[mask_y_start:mask_y_end, mask_x_start:mask_x_end] + top_outer_mask = background + top_outer_mask + elif bottom and data[i]['name'] in ["trousers_front", "skirt_front", "bottoms_front", "dress_front"]: + bottom = False + mask_shape = data[i]['mask'].shape + y_offset, x_offset = data[i]['adaptive_position'] + # 初始化叠加区域的起始和结束位置 + all_y_start, all_y_end, mask_y_start, mask_y_end = positioning(all_mask_shape=all_mask_shape[0], mask_shape=mask_shape[0], offset=y_offset) + all_x_start, all_x_end, mask_x_start, mask_x_end = positioning(all_mask_shape=all_mask_shape[1], mask_shape=mask_shape[1], offset=x_offset) + # 将叠加区域赋值为相应的像素值 + _, sketch_mask = cv2.threshold(data[i]['mask'], 127, 255, cv2.THRESH_BINARY) + background = np.zeros_like(top_outer_mask) + background[all_y_start:all_y_end, all_x_start:all_x_end] = sketch_mask[mask_y_start:mask_y_end, mask_x_start:mask_x_end] + bottom_outer_mask = background + bottom_outer_mask + elif bottom is False and top is False: + break + + all_mask = cv2.bitwise_or(top_outer_mask, bottom_outer_mask) + + for layer in data: + if layer['image'] is not None: + if layer['name'] != "body": + test_image = Image.new('RGBA', size, (0, 0, 0, 0)) + test_image.paste(layer['image'], (layer['adaptive_position'][1], layer['adaptive_position'][0]), layer['image']) + mask_data = np.where(all_mask > 0, 255, 0).astype(np.uint8) + mask_alpha = Image.fromarray(mask_data) + cropped_image = Image.composite(test_image, Image.new("RGBA", test_image.size, (255, 255, 255, 0)), mask_alpha) + base_image.paste(test_image, (0, 0), cropped_image) # test_image 已经按照坐标贴到最大宽值的图片上 坐着这里坐标为00 + else: + base_image.paste(layer['image'], (layer['adaptive_position'][1], layer['adaptive_position'][0]), layer['image']) + + result_image = base_image + + image_data = io.BytesIO() + result_image.save(image_data, format='PNG') + image_data.seek(0) + + # oss upload + image_bytes = image_data.read() + bucket_name = "aida-results" + object_name = f'result_{generate_uuid()}.png' + oss_upload_image(oss_client=minio_client, bucket=bucket_name, object_name=object_name, image_bytes=image_bytes) + return f"{bucket_name}/{object_name}" + except Exception as e: + logging.warning(f"synthesis runtime exception : {e}") + + +def design_generate(request_data): + objects_data = request_data.dict()['objects'] + process_id = request_data.dict()['process_id'] + object_response = [] + threads = [] + active_threads = 0 + lock = threading.Lock() + total = len(objects_data) + + def process_object(step, object): + nonlocal active_threads + basic = object['basic'] + items_response = {'layers': []} + if basic['single_overall'] == "overall": + item_results = [] + for item in object['items']: + item_results.append(process_item(item, basic)) + layers = [] + body_size = None + for item in item_results: + body_size = process_layer(item, layers) + layers = sorted(layers, key=lambda s: s.get("priority", float('inf'))) + + layers, new_size = update_base_size_priority(layers, body_size) + + for lay in layers: + items_response['layers'].append({ + 'image_category': lay['name'], + 'position': lay['position'], + 'priority': lay.get("priority", None), + 'resize_scale': lay['resize_scale'] if "resize_scale" in lay.keys() else None, + 'image_size': lay['image'] if lay['image'] is None else lay['image'].size, + 'gradient_string': lay['gradient_string'] if 'gradient_string' in lay.keys() else "", + 'mask_url': lay['mask_url'], + 'image_url': lay['image_url'] if 'image_url' in lay.keys() else None, + 'pattern_image_url': lay['pattern_image_url'] if 'pattern_image_url' in lay.keys() else None, + }) + items_response['synthesis_url'] = synthesis(layers, new_size, basic) + else: + item_result = process_item(object['items'][0], basic) + items_response['layers'].append({ + 'image_category': f"{item_result['name']}_front", + 'image_size': item_result['back_image'].size if item_result['back_image'] else None, + 'position': None, + 'priority': 0, + 'image_url': item_result['front_image_url'], + 'mask_url': item_result['mask_url'], + "gradient_string": item_result['gradient_string'] if 'gradient_string' in item_result.keys() else "", + 'pattern_image_url': item_result['pattern_image_url'] if 'pattern_image_url' in item_result.keys() else None, + }) + items_response['layers'].append({ + 'image_category': f"{item_result['name']}_back", + 'image_size': item_result['front_image'].size if item_result['front_image'] else None, + 'position': None, + 'priority': 0, + 'image_url': item_result['back_image_url'], + 'mask_url': item_result['mask_url'], + "gradient_string": item_result['gradient_string'] if 'gradient_string' in item_result.keys() else "", + 'pattern_image_url': item_result['pattern_image_url'] if 'pattern_image_url' in item_result.keys() else None, + }) + items_response['synthesis_url'] = synthesis_single(item_result['front_image'], item_result['back_image']) + update_progress(process_id, total) + + with lock: + object_response.append(items_response) + active_threads -= 1 + + for step, object in enumerate(objects_data): + t = threading.Thread(target=process_object, args=(step, object)) + threads.append(t) + t.start() + with lock: + active_threads += 1 + + for t in threads: + t.join() + final_progress(process_id) + return object_response + + +def update_progress(process_id, total): + logger.info(f"{process_id} , {total}") + r = Redis() + progress = r.read(key=process_id) + if progress and total != 1: + if int(progress) <= 100: + r.write(key=process_id, value=int(progress) + int(100 / total)) + else: + r.write(key=process_id, value=99) + return progress + elif total == 1: + r.write(key=process_id, value=100) + return progress + else: + r.write(key=process_id, value=int(100 / total)) + return progress + + +def final_progress(process_id): + r = Redis() + progress = r.read(key=process_id) + r.write(key=process_id, value=100) + return progress + + +if __name__ == '__main__': + object_data = { + "objects": [ + { + "basic": { + "body_point_test": { + "waistband_right": [ + 203, + 249 + ], + "hand_point_right": [ + 229, + 343 + ], + "waistband_left": [ + 119, + 248 + ], + "hand_point_left": [ + 97, + 343 + ], + "shoulder_left": [ + 108, + 107 + ], + "shoulder_right": [ + 212, + 107 + ] + }, + "layer_order": False, + "scale_bag": 0.7, + "scale_earrings": 0.16, + "self_template": True, + "single_overall": "overall", + "switch_category": "" + }, + "items": [ + { + "color": "28 26 26", + "icon": "none", + "image_id": 98419, + "offset": [ + 1, + 1 + ], + "path": "aida-sys-image/images/female/dress/0825000526.jpg", + "print": { + "element": { + "element_angle_list": [], + "element_path_list": [], + "element_scale_list": [], + "location": [] + }, + "overall": { + "location": [ + [ + 0.0, + 0.0 + ] + ], + "print_angle_list": [ + 0.0, + 0.0 + ], + "print_path_list": [], + "print_scale_list": [ + 0.0, + 0.0 + ] + }, + "single": { + "location": [], + "print_angle_list": [], + "print_path_list": [], + "print_scale_list": [] + } + }, + "resize_scale": [ + 1.0, + 1.0 + ], + "type": "Dress" + }, + { + "body_path": "aida-sys-image/models/female/2e4815b9-1191-419d-94ed-5771239ca4a5.png", + "image_id": 67277, + "offset": [ + 1, + 1 + ], + "resize_scale": [ + 1.0, + 1.0 + ], + "type": "Body" + } + ] + }, + { + "basic": { + "body_point_test": { + "waistband_right": [ + 203, + 249 + ], + "hand_point_right": [ + 229, + 343 + ], + "waistband_left": [ + 119, + 248 + ], + "hand_point_left": [ + 97, + 343 + ], + "shoulder_left": [ + 108, + 107 + ], + "shoulder_right": [ + 212, + 107 + ] + }, + "layer_order": False, + "scale_bag": 0.7, + "scale_earrings": 0.16, + "self_template": True, + "single_overall": "overall", + "switch_category": "" + }, + "items": [ + { + "color": "28 26 26", + "icon": "none", + "image_id": 98420, + "offset": [ + 1, + 1 + ], + "path": "aida-sys-image/images/female/skirt/903000127.jpg", + "print": { + "element": { + "element_angle_list": [], + "element_path_list": [], + "element_scale_list": [], + "location": [] + }, + "overall": { + "location": [ + [ + 0.0, + 0.0 + ] + ], + "print_angle_list": [ + 0.0, + 0.0 + ], + "print_path_list": [], + "print_scale_list": [ + 0.0, + 0.0 + ] + }, + "single": { + "location": [], + "print_angle_list": [], + "print_path_list": [], + "print_scale_list": [] + } + }, + "resize_scale": [ + 1.0, + 1.0 + ], + "type": "Skirt" + }, + { + "color": "28 26 26", + "icon": "none", + "image_id": 69140, + "offset": [ + 1, + 1 + ], + "path": "aida-sys-image/images/female/blouse/0902001100.jpg", + "print": { + "element": { + "element_angle_list": [], + "element_path_list": [], + "element_scale_list": [], + "location": [] + }, + "overall": { + "location": [ + [ + 0.0, + 0.0 + ] + ], + "print_angle_list": [ + 0.0, + 0.0 + ], + "print_path_list": [], + "print_scale_list": [ + 0.0, + 0.0 + ] + }, + "single": { + "location": [], + "print_angle_list": [], + "print_path_list": [], + "print_scale_list": [] + } + }, + "resize_scale": [ + 1.0, + 1.0 + ], + "type": "Blouse" + }, + { + "color": "28 26 26", + "icon": "none", + "image_id": 81604, + "offset": [ + 1, + 1 + ], + "path": "aida-sys-image/images/female/outwear/outwear_p5_729.jpg", + "print": { + "element": { + "element_angle_list": [], + "element_path_list": [], + "element_scale_list": [], + "location": [] + }, + "overall": { + "location": [ + [ + 0.0, + 0.0 + ] + ], + "print_angle_list": [ + 0.0, + 0.0 + ], + "print_path_list": [], + "print_scale_list": [ + 0.0, + 0.0 + ] + }, + "single": { + "location": [], + "print_angle_list": [], + "print_path_list": [], + "print_scale_list": [] + } + }, + "resize_scale": [ + 1.0, + 1.0 + ], + "type": "Outwear" + }, + { + "body_path": "aida-sys-image/models/female/2e4815b9-1191-419d-94ed-5771239ca4a5.png", + "image_id": 67277, + "offset": [ + 1, + 1 + ], + "resize_scale": [ + 1.0, + 1.0 + ], + "type": "Body" + } + ] + }, + { + "basic": { + "body_point_test": { + "waistband_right": [ + 203, + 249 + ], + "hand_point_right": [ + 229, + 343 + ], + "waistband_left": [ + 119, + 248 + ], + "hand_point_left": [ + 97, + 343 + ], + "shoulder_left": [ + 108, + 107 + ], + "shoulder_right": [ + 212, + 107 + ] + }, + "layer_order": False, + "scale_bag": 0.7, + "scale_earrings": 0.16, + "self_template": True, + "single_overall": "overall", + "switch_category": "" + }, + "items": [ + { + "color": "28 26 26", + "icon": "none", + "image_id": 63964, + "offset": [ + 1, + 1 + ], + "path": "aida-sys-image/images/female/outwear/0825001572.jpg", + "print": { + "element": { + "element_angle_list": [], + "element_path_list": [], + "element_scale_list": [], + "location": [] + }, + "overall": { + "location": [ + [ + 0.0, + 0.0 + ] + ], + "print_angle_list": [ + 0.0, + 0.0 + ], + "print_path_list": [], + "print_scale_list": [ + 0.0, + 0.0 + ] + }, + "single": { + "location": [], + "print_angle_list": [], + "print_path_list": [], + "print_scale_list": [] + } + }, + "resize_scale": [ + 1.0, + 1.0 + ], + "type": "Outwear" + }, + { + "color": "28 26 26", + "icon": "none", + "image_id": 98421, + "offset": [ + 1, + 1 + ], + "path": "aida-sys-image/images/female/blouse/blouse_506.jpg", + "print": { + "element": { + "element_angle_list": [], + "element_path_list": [], + "element_scale_list": [], + "location": [] + }, + "overall": { + "location": [ + [ + 0.0, + 0.0 + ] + ], + "print_angle_list": [ + 0.0, + 0.0 + ], + "print_path_list": [], + "print_scale_list": [ + 0.0, + 0.0 + ] + }, + "single": { + "location": [], + "print_angle_list": [], + "print_path_list": [], + "print_scale_list": [] + } + }, + "resize_scale": [ + 1.0, + 1.0 + ], + "type": "Blouse" + }, + { + "color": "28 26 26", + "icon": "none", + "image_id": 98422, + "offset": [ + 1, + 1 + ], + "path": "aida-sys-image/images/female/trousers/0628001244.jpg", + "print": { + "element": { + "element_angle_list": [], + "element_path_list": [], + "element_scale_list": [], + "location": [] + }, + "overall": { + "location": [ + [ + 0.0, + 0.0 + ] + ], + "print_angle_list": [ + 0.0, + 0.0 + ], + "print_path_list": [], + "print_scale_list": [ + 0.0, + 0.0 + ] + }, + "single": { + "location": [], + "print_angle_list": [], + "print_path_list": [], + "print_scale_list": [] + } + }, + "resize_scale": [ + 1.0, + 1.0 + ], + "type": "Trousers" + }, + { + "body_path": "aida-sys-image/models/female/2e4815b9-1191-419d-94ed-5771239ca4a5.png", + "image_id": 67277, + "offset": [ + 1, + 1 + ], + "resize_scale": [ + 1.0, + 1.0 + ], + "type": "Body" + } + ] + }, + { + "basic": { + "body_point_test": { + "waistband_right": [ + 203, + 249 + ], + "hand_point_right": [ + 229, + 343 + ], + "waistband_left": [ + 119, + 248 + ], + "hand_point_left": [ + 97, + 343 + ], + "shoulder_left": [ + 108, + 107 + ], + "shoulder_right": [ + 212, + 107 + ] + }, + "layer_order": False, + "scale_bag": 0.7, + "scale_earrings": 0.16, + "self_template": True, + "single_overall": "overall", + "switch_category": "" + }, + "items": [ + { + "color": "28 26 26", + "icon": "none", + "image_id": 79927, + "offset": [ + 1, + 1 + ], + "path": "aida-sys-image/images/female/outwear/0825000378.jpg", + "print": { + "element": { + "element_angle_list": [], + "element_path_list": [], + "element_scale_list": [], + "location": [] + }, + "overall": { + "location": [ + [ + 0.0, + 0.0 + ] + ], + "print_angle_list": [ + 0.0, + 0.0 + ], + "print_path_list": [], + "print_scale_list": [ + 0.0, + 0.0 + ] + }, + "single": { + "location": [], + "print_angle_list": [], + "print_path_list": [], + "print_scale_list": [] + } + }, + "resize_scale": [ + 1.0, + 1.0 + ], + "type": "Outwear" + }, + { + "color": "28 26 26", + "icon": "none", + "image_id": 67473, + "offset": [ + 1, + 1 + ], + "path": "aida-sys-image/images/female/blouse/0825001350.jpg", + "print": { + "element": { + "element_angle_list": [], + "element_path_list": [], + "element_scale_list": [], + "location": [] + }, + "overall": { + "location": [ + [ + 0.0, + 0.0 + ] + ], + "print_angle_list": [ + 0.0, + 0.0 + ], + "print_path_list": [], + "print_scale_list": [ + 0.0, + 0.0 + ] + }, + "single": { + "location": [], + "print_angle_list": [], + "print_path_list": [], + "print_scale_list": [] + } + }, + "resize_scale": [ + 1.0, + 1.0 + ], + "type": "Blouse" + }, + { + "color": "28 26 26", + "icon": "none", + "image_id": 80046, + "offset": [ + 1, + 1 + ], + "path": "aida-sys-image/images/female/skirt/0628001443.jpg", + "print": { + "element": { + "element_angle_list": [], + "element_path_list": [], + "element_scale_list": [], + "location": [] + }, + "overall": { + "location": [ + [ + 0.0, + 0.0 + ] + ], + "print_angle_list": [ + 0.0, + 0.0 + ], + "print_path_list": [], + "print_scale_list": [ + 0.0, + 0.0 + ] + }, + "single": { + "location": [], + "print_angle_list": [], + "print_path_list": [], + "print_scale_list": [] + } + }, + "resize_scale": [ + 1.0, + 1.0 + ], + "type": "Skirt" + }, + { + "body_path": "aida-sys-image/models/female/2e4815b9-1191-419d-94ed-5771239ca4a5.png", + "image_id": 67277, + "offset": [ + 1, + 1 + ], + "resize_scale": [ + 1.0, + 1.0 + ], + "type": "Body" + } + ] + }, + { + "basic": { + "body_point_test": { + "waistband_right": [ + 203, + 249 + ], + "hand_point_right": [ + 229, + 343 + ], + "waistband_left": [ + 119, + 248 + ], + "hand_point_left": [ + 97, + 343 + ], + "shoulder_left": [ + 108, + 107 + ], + "shoulder_right": [ + 212, + 107 + ] + }, + "layer_order": False, + "scale_bag": 0.7, + "scale_earrings": 0.16, + "self_template": True, + "single_overall": "overall", + "switch_category": "" + }, + "items": [ + { + "color": "28 26 26", + "icon": "none", + "image_id": 84148, + "offset": [ + 1, + 1 + ], + "path": "aida-sys-image/images/female/trousers/0628000751.jpeg", + "print": { + "element": { + "element_angle_list": [], + "element_path_list": [], + "element_scale_list": [], + "location": [] + }, + "overall": { + "location": [ + [ + 0.0, + 0.0 + ] + ], + "print_angle_list": [ + 0.0, + 0.0 + ], + "print_path_list": [], + "print_scale_list": [ + 0.0, + 0.0 + ] + }, + "single": { + "location": [], + "print_angle_list": [], + "print_path_list": [], + "print_scale_list": [] + } + }, + "resize_scale": [ + 1.0, + 1.0 + ], + "type": "Trousers" + }, + { + "color": "28 26 26", + "icon": "none", + "image_id": 97321, + "offset": [ + 1, + 1 + ], + "path": "aida-sys-image/images/female/blouse/0902000222.jpg", + "print": { + "element": { + "element_angle_list": [], + "element_path_list": [], + "element_scale_list": [], + "location": [] + }, + "overall": { + "location": [ + [ + 0.0, + 0.0 + ] + ], + "print_angle_list": [ + 0.0, + 0.0 + ], + "print_path_list": [], + "print_scale_list": [ + 0.0, + 0.0 + ] + }, + "single": { + "location": [], + "print_angle_list": [], + "print_path_list": [], + "print_scale_list": [] + } + }, + "resize_scale": [ + 1.0, + 1.0 + ], + "type": "Blouse" + }, + { + "color": "28 26 26", + "icon": "none", + "image_id": 90718, + "offset": [ + 1, + 1 + ], + "path": "aida-sys-image/images/female/outwear/0825000314.jpg", + "print": { + "element": { + "element_angle_list": [], + "element_path_list": [], + "element_scale_list": [], + "location": [] + }, + "overall": { + "location": [ + [ + 0.0, + 0.0 + ] + ], + "print_angle_list": [ + 0.0, + 0.0 + ], + "print_path_list": [], + "print_scale_list": [ + 0.0, + 0.0 + ] + }, + "single": { + "location": [], + "print_angle_list": [], + "print_path_list": [], + "print_scale_list": [] + } + }, + "resize_scale": [ + 1.0, + 1.0 + ], + "type": "Outwear" + }, + { + "body_path": "aida-sys-image/models/female/2e4815b9-1191-419d-94ed-5771239ca4a5.png", + "image_id": 67277, + "offset": [ + 1, + 1 + ], + "resize_scale": [ + 1.0, + 1.0 + ], + "type": "Body" + } + ] + }, + { + "basic": { + "body_point_test": { + "waistband_right": [ + 203, + 249 + ], + "hand_point_right": [ + 229, + 343 + ], + "waistband_left": [ + 119, + 248 + ], + "hand_point_left": [ + 97, + 343 + ], + "shoulder_left": [ + 108, + 107 + ], + "shoulder_right": [ + 212, + 107 + ] + }, + "layer_order": False, + "scale_bag": 0.7, + "scale_earrings": 0.16, + "self_template": True, + "single_overall": "overall", + "switch_category": "" + }, + "items": [ + { + "color": "28 26 26", + "icon": "none", + "image_id": 86403, + "offset": [ + 1, + 1 + ], + "path": "aida-sys-image/images/female/skirt/0902000231.jpg", + "print": { + "element": { + "element_angle_list": [], + "element_path_list": [], + "element_scale_list": [], + "location": [] + }, + "overall": { + "location": [ + [ + 0.0, + 0.0 + ] + ], + "print_angle_list": [ + 0.0, + 0.0 + ], + "print_path_list": [], + "print_scale_list": [ + 0.0, + 0.0 + ] + }, + "single": { + "location": [], + "print_angle_list": [], + "print_path_list": [], + "print_scale_list": [] + } + }, + "resize_scale": [ + 1.0, + 1.0 + ], + "type": "Skirt" + }, + { + "color": "28 26 26", + "icon": "none", + "image_id": 87135, + "offset": [ + 1, + 1 + ], + "path": "aida-sys-image/images/female/blouse/0902001315.jpg", + "print": { + "element": { + "element_angle_list": [], + "element_path_list": [], + "element_scale_list": [], + "location": [] + }, + "overall": { + "location": [ + [ + 0.0, + 0.0 + ] + ], + "print_angle_list": [ + 0.0, + 0.0 + ], + "print_path_list": [], + "print_scale_list": [ + 0.0, + 0.0 + ] + }, + "single": { + "location": [], + "print_angle_list": [], + "print_path_list": [], + "print_scale_list": [] + } + }, + "resize_scale": [ + 1.0, + 1.0 + ], + "type": "Blouse" + }, + { + "color": "28 26 26", + "icon": "none", + "image_id": 87428, + "offset": [ + 1, + 1 + ], + "path": "aida-sys-image/images/female/outwear/0902000566.jpg", + "print": { + "element": { + "element_angle_list": [], + "element_path_list": [], + "element_scale_list": [], + "location": [] + }, + "overall": { + "location": [ + [ + 0.0, + 0.0 + ] + ], + "print_angle_list": [ + 0.0, + 0.0 + ], + "print_path_list": [], + "print_scale_list": [ + 0.0, + 0.0 + ] + }, + "single": { + "location": [], + "print_angle_list": [], + "print_path_list": [], + "print_scale_list": [] + } + }, + "resize_scale": [ + 1.0, + 1.0 + ], + "type": "Outwear" + }, + { + "body_path": "aida-sys-image/models/female/2e4815b9-1191-419d-94ed-5771239ca4a5.png", + "image_id": 67277, + "offset": [ + 1, + 1 + ], + "resize_scale": [ + 1.0, + 1.0 + ], + "type": "Body" + } + ] + }, + { + "basic": { + "body_point_test": { + "waistband_right": [ + 203, + 249 + ], + "hand_point_right": [ + 229, + 343 + ], + "waistband_left": [ + 119, + 248 + ], + "hand_point_left": [ + 97, + 343 + ], + "shoulder_left": [ + 108, + 107 + ], + "shoulder_right": [ + 212, + 107 + ] + }, + "layer_order": False, + "scale_bag": 0.7, + "scale_earrings": 0.16, + "self_template": True, + "single_overall": "overall", + "switch_category": "" + }, + "items": [ + { + "color": "28 26 26", + "icon": "none", + "image_id": 98423, + "offset": [ + 1, + 1 + ], + "path": "aida-sys-image/images/female/dress/0916001596.jpg", + "print": { + "element": { + "element_angle_list": [], + "element_path_list": [], + "element_scale_list": [], + "location": [] + }, + "overall": { + "location": [ + [ + 0.0, + 0.0 + ] + ], + "print_angle_list": [ + 0.0, + 0.0 + ], + "print_path_list": [], + "print_scale_list": [ + 0.0, + 0.0 + ] + }, + "single": { + "location": [], + "print_angle_list": [], + "print_path_list": [], + "print_scale_list": [] + } + }, + "resize_scale": [ + 1.0, + 1.0 + ], + "type": "Dress" + }, + { + "body_path": "aida-sys-image/models/female/2e4815b9-1191-419d-94ed-5771239ca4a5.png", + "image_id": 67277, + "offset": [ + 1, + 1 + ], + "resize_scale": [ + 1.0, + 1.0 + ], + "type": "Body" + } + ] + }, + { + "basic": { + "body_point_test": { + "waistband_right": [ + 203, + 249 + ], + "hand_point_right": [ + 229, + 343 + ], + "waistband_left": [ + 119, + 248 + ], + "hand_point_left": [ + 97, + 343 + ], + "shoulder_left": [ + 108, + 107 + ], + "shoulder_right": [ + 212, + 107 + ] + }, + "layer_order": False, + "scale_bag": 0.7, + "scale_earrings": 0.16, + "self_template": True, + "single_overall": "overall", + "switch_category": "" + }, + "items": [ + { + "color": "28 26 26", + "icon": "none", + "image_id": 86345, + "offset": [ + 1, + 1 + ], + "path": "aida-sys-image/images/female/outwear/0825000695.jpg", + "print": { + "element": { + "element_angle_list": [], + "element_path_list": [], + "element_scale_list": [], + "location": [] + }, + "overall": { + "location": [ + [ + 0.0, + 0.0 + ] + ], + "print_angle_list": [ + 0.0, + 0.0 + ], + "print_path_list": [], + "print_scale_list": [ + 0.0, + 0.0 + ] + }, + "single": { + "location": [], + "print_angle_list": [], + "print_path_list": [], + "print_scale_list": [] + } + }, + "resize_scale": [ + 1.0, + 1.0 + ], + "type": "Outwear" + }, + { + "color": "28 26 26", + "icon": "none", + "image_id": 78743, + "offset": [ + 1, + 1 + ], + "path": "aida-sys-image/images/female/blouse/0902001412.jpg", + "print": { + "element": { + "element_angle_list": [], + "element_path_list": [], + "element_scale_list": [], + "location": [] + }, + "overall": { + "location": [ + [ + 0.0, + 0.0 + ] + ], + "print_angle_list": [ + 0.0, + 0.0 + ], + "print_path_list": [], + "print_scale_list": [ + 0.0, + 0.0 + ] + }, + "single": { + "location": [], + "print_angle_list": [], + "print_path_list": [], + "print_scale_list": [] + } + }, + "resize_scale": [ + 1.0, + 1.0 + ], + "type": "Blouse" + }, + { + "color": "28 26 26", + "icon": "none", + "image_id": 68988, + "offset": [ + 1, + 1 + ], + "path": "aida-sys-image/images/female/trousers/0825000403.jpg", + "print": { + "element": { + "element_angle_list": [], + "element_path_list": [], + "element_scale_list": [], + "location": [] + }, + "overall": { + "location": [ + [ + 0.0, + 0.0 + ] + ], + "print_angle_list": [ + 0.0, + 0.0 + ], + "print_path_list": [], + "print_scale_list": [ + 0.0, + 0.0 + ] + }, + "single": { + "location": [], + "print_angle_list": [], + "print_path_list": [], + "print_scale_list": [] + } + }, + "resize_scale": [ + 1.0, + 1.0 + ], + "type": "Trousers" + }, + { + "body_path": "aida-sys-image/models/female/2e4815b9-1191-419d-94ed-5771239ca4a5.png", + "image_id": 67277, + "offset": [ + 1, + 1 + ], + "resize_scale": [ + 1.0, + 1.0 + ], + "type": "Body" + } + ] + } + ], + "process_id": "123" + } + start_time = time.time() + X = design_generate(object_data) + print(time.time() - start_time) + print(X) diff --git a/app/service/design_test/item.py b/app/service/design_test/item.py new file mode 100644 index 0000000..5a4667c --- /dev/null +++ b/app/service/design_test/item.py @@ -0,0 +1,281 @@ +import time +from concurrent.futures import ThreadPoolExecutor +from pprint import pprint + +import cv2 + +from app.core.config import PRIORITY_DICT +from app.service.design.utils.synthesis_item import synthesis, synthesis_single +from app.service.design_test.pipeline import LoadImage, KeyPoint, Segmentation, Color, PrintPainting, Scaling, Split, LoadBodyImage, ContourDetection + + +class BaseItem: + def __init__(self, data, basic): + self.result = data.copy() + self.result['name'] = data['type'].lower() + self.result.pop("type") + self.result.update(basic) + + +class TopItem(BaseItem): + def __init__(self, data, basic, minio_client): + super().__init__(data, basic) + self.top_pipeline = [ + LoadImage(minio_client), + KeyPoint(), + Segmentation(minio_client), + Color(minio_client), + PrintPainting(minio_client), + Scaling(), + Split(minio_client) + ] + + def process(self): + for item in self.top_pipeline: + self.result = item(self.result) + return self.result + + +class BottomItem(BaseItem): + def __init__(self, data, basic, minio_client): + super().__init__(data, basic) + self.bottom_pipeline = [ + LoadImage(minio_client), + KeyPoint(), + ContourDetection(), + # Segmentation(), + Color(minio_client), + PrintPainting(minio_client), + Scaling(), + Split(minio_client) + ] + + def process(self): + for item in self.bottom_pipeline: + self.result = item(self.result) + return self.result + + +class BodyItem(BaseItem): + def __init__(self, data, basic, minio_client): + super().__init__(data, basic) + self.top_pipeline = [ + LoadBodyImage(minio_client), + ] + + def process(self): + for item in self.top_pipeline: + self.result = item(self.result) + return self.result + + +def process_item(item, basic, minio_client): + if item['type'] == "Body": + body_server = BodyItem(data=item, basic=basic, minio_client=minio_client) + item_data = body_server.process() + elif item['type'].lower() in ['blouse', 'outwear', 'dress', 'tops']: + top_server = TopItem(data=item, basic=basic, minio_client=minio_client) + item_data = top_server.process() + else: + bottom_server = BottomItem(data=item, basic=basic, minio_client=minio_client) + item_data = bottom_server.process() + return item_data + + +def calculate_start_point(keypoint_type, scale, clothes_point, body_point, offset, resize_scale): + """ + Align left + Args: + keypoint_type: string, "waistband" | "shoulder" | "ear_point" + scale: float + clothes_point: dict{'left': [x1, y1, z1], 'right': [x2, y2, z2]} + body_point: dict, containing keypoint data of body figure + + Returns: + start_point: tuple (x', y') + x' = y_body - y1 * scale + offset + y' = x_body - x1 * scale + offset + + """ + side_indicator = f'{keypoint_type}_left' + start_point = ( + int(body_point[side_indicator][1] + offset[1] - int(clothes_point[side_indicator][0]) * scale), # y + int(body_point[side_indicator][0] + offset[0] - int(clothes_point[side_indicator][1]) * scale) # x + ) + return start_point + + +# 服装图层给数据组装 +def organize_clothing(layer): + # 起始坐标 + start_point = calculate_start_point(layer['keypoint'], layer['scale'], layer['clothes_keypoint'], layer['body_point_test'], layer["offset"], layer["resize_scale"]) + # 前片数据 + front_layer = dict(priority=layer['priority'] if layer.get("layer_order", False) else PRIORITY_DICT.get(f'{layer["name"].lower()}_front', None), + name=f'{layer["name"].lower()}_front', + image=layer["front_image"], + # mask_image=layer['front_mask_image'], + image_url=layer['front_image_url'], + mask_url=layer['mask_url'], + sacle=layer['scale'], + clothes_keypoint=layer['clothes_keypoint'], + position=start_point, + resize_scale=layer["resize_scale"], + mask=cv2.resize(layer['mask'], layer["front_image"].size), + gradient_string=layer['gradient_string'] if 'gradient_string' in layer.keys() else "", + pattern_image_url=layer['pattern_image_url'], + pattern_image=layer['pattern_image'] + + ) + # 后片数据 + back_layer = dict(priority=-layer.get("priority", 0) if layer.get("layer_order", False) else PRIORITY_DICT.get(f'{layer["name"].lower()}_back', None), + name=f'{layer["name"].lower()}_back', + image=layer["back_image"], + # mask_image=layer['back_mask_image'], + image_url=layer['back_image_url'], + mask_url=layer['mask_url'], + sacle=layer['scale'], + clothes_keypoint=layer['clothes_keypoint'], + position=start_point, + resize_scale=layer["resize_scale"], + mask=cv2.resize(layer['mask'], layer["front_image"].size), + gradient_string=layer['gradient_string'] if 'gradient_string' in layer.keys() else "", + pattern_image_url=layer['pattern_image_url'], + ) + return front_layer, back_layer + + +# 模特图层给数据组装 +def organize_body(layer): + body_layer = dict(priority=0, + name=layer["name"].lower(), + image=layer['body_image'], + image_url=layer['body_path'], + mask_image=None, + mask_url=None, + sacle=1, + # mask=layer['body_mask'], + position=(0, 0)) + return body_layer + + +def process_layer(item, layers): + if item['name'] == "mannequin": + body_layer = organize_body(item) + layers.append(body_layer) + return item['body_image'].size + else: + front_layer, back_layer = organize_clothing(item) + layers.append(front_layer) + layers.append(back_layer) + + +def process_object(object_data): + basic = object_data['basic'] + items_response = {'layers': []} + + if basic['single_overall'] == "overall": + item_results = [process_item(item, basic) for item in object_data['items']] + layers = [] + futures = [] + body_size = None + for item in item_results: + futures = [process_layer(item, layers)] + for future in futures: + if future is not None: + body_size = future + layers = sorted(layers, key=lambda s: s.get("priority", float('inf'))) + + layers, new_size = update_base_size_priority(layers, body_size) + + for lay in layers: + items_response['layers'].append({ + 'image_category': lay['name'], + 'position': lay['position'], + 'priority': lay.get("priority", None), + 'resize_scale': lay['resize_scale'] if "resize_scale" in lay.keys() else None, + 'image_size': lay['image'] if lay['image'] is None else lay['image'].size, + 'gradient_string': lay['gradient_string'] if 'gradient_string' in lay.keys() else "", + 'mask_url': lay['mask_url'], + 'image_url': lay['image_url'] if 'image_url' in lay.keys() else None, + 'pattern_image_url': lay['pattern_image_url'] if 'pattern_image_url' in lay.keys() else None, + + # 'image': lay['image'], + # 'mask_image': lay['mask_image'], + }) + items_response['synthesis_url'] = synthesis(layers, new_size, basic) + else: + item_results = process_item(object_data['items'][0], basic) + items_response['layers'].append({ + 'image_category': f"{item_results['name']}_front", + 'image_size': item_results['back_image'].size if item_results['back_image'] else None, + 'position': None, + 'priority': 0, + 'image_url': item_results['front_image_url'], + 'mask_url': item_results['mask_url'], + "gradient_string": item_results['gradient_string'] if 'gradient_string' in item_results.keys() else "", + 'pattern_image_url': item_results['pattern_image_url'] if 'pattern_image_url' in item_results.keys() else None, + + }) + items_response['layers'].append({ + 'image_category': f"{item_results['name']}_back", + 'image_size': item_results['front_image'].size if item_results['front_image'] else None, + 'position': None, + 'priority': 0, + 'image_url': item_results['back_image_url'], + 'mask_url': item_results['mask_url'], + "gradient_string": item_results['gradient_string'] if 'gradient_string' in item_results.keys() else "", + 'pattern_image_url': item_results['pattern_image_url'] if 'pattern_image_url' in item_results.keys() else None, + + }) + items_response['synthesis_url'] = synthesis_single(item_results['front_image'], item_results['back_image']) + return items_response + + +def update_base_size_priority(layers, size): + # 计算透明背景图片的宽度 + min_x = min(info['position'][1] for info in layers) + x_list = [] + for info in layers: + if info['image'] is not None: + x_list.append(info['position'][1] + info['image'].width) + max_x = max(x_list) + new_width = max_x - min_x + new_height = 700 + # 更新坐标 + for info in layers: + info['adaptive_position'] = (info['position'][0], info['position'][1] - min_x) + return layers, (new_width, new_height) + + +def run(): + object = {"objects": [{"basic": {"body_point_test": {"waistband_right": [199, 239], "hand_point_right": [220, 308], "waistband_left": [113, 239], "hand_point_left": [92, 310], "shoulder_left": [99, 111], "shoulder_right": [214, 111]}, "layer_order": False, "scale_bag": 0.7, "scale_earrings": 0.16, "self_template": True, "single_overall": "single", "switch_category": "Outwear"}, "items": [ + {"color": "189 112 112", "icon": "none", "image_id": 116441, "offset": [1, 1], "path": "aida-sys-image/images/female/outwear/outwear_p3139.jpg", "print": {"element": {"element_angle_list": [], "element_path_list": [], "element_scale_list": [], "location": []}, "overall": {"location": [[0.0, 0.0]], "print_angle_list": [0.0, 0.0], "print_path_list": [], "print_scale_list": [0.0, 0.0]}, "single": {"location": [], "print_angle_list": [], "print_path_list": [], "print_scale_list": []}}, + "resize_scale": [1.0, 1.0], "type": "Outwear"}]}, {"basic": {"body_point_test": {"waistband_right": [199, 239], "hand_point_right": [220, 308], "waistband_left": [113, 239], "hand_point_left": [92, 310], "shoulder_left": [99, 111], "shoulder_right": [214, 111]}, "layer_order": False, "scale_bag": 0.7, "scale_earrings": 0.16, "self_template": True, "single_overall": "single", "switch_category": "Outwear"}, "items": [ + {"color": "189 112 112", "icon": "none", "image_id": 81518, "offset": [1, 1], "path": "aida-sys-image/images/female/outwear/0628000071.jpg", "print": {"element": {"element_angle_list": [], "element_path_list": [], "element_scale_list": [], "location": []}, "overall": {"location": [[0.0, 0.0]], "print_angle_list": [0.0, 0.0], "print_path_list": [], "print_scale_list": [0.0, 0.0]}, "single": {"location": [], "print_angle_list": [], "print_path_list": [], "print_scale_list": []}}, + "resize_scale": [1.0, 1.0], "type": "Outwear"}]}, {"basic": {"body_point_test": {"waistband_right": [199, 239], "hand_point_right": [220, 308], "waistband_left": [113, 239], "hand_point_left": [92, 310], "shoulder_left": [99, 111], "shoulder_right": [214, 111]}, "layer_order": False, "scale_bag": 0.7, "scale_earrings": 0.16, "self_template": True, "single_overall": "single", "switch_category": "Outwear"}, "items": [ + {"color": "189 112 112", "icon": "none", "image_id": 65687, "offset": [1, 1], "path": "aida-sys-image/images/female/outwear/outwear_746.jpg", "print": {"element": {"element_angle_list": [], "element_path_list": [], "element_scale_list": [], "location": []}, "overall": {"location": [[0.0, 0.0]], "print_angle_list": [0.0, 0.0], "print_path_list": [], "print_scale_list": [0.0, 0.0]}, "single": {"location": [], "print_angle_list": [], "print_path_list": [], "print_scale_list": []}}, + "resize_scale": [1.0, 1.0], "type": "Outwear"}]}, {"basic": {"body_point_test": {"waistband_right": [199, 239], "hand_point_right": [220, 308], "waistband_left": [113, 239], "hand_point_left": [92, 310], "shoulder_left": [99, 111], "shoulder_right": [214, 111]}, "layer_order": False, "scale_bag": 0.7, "scale_earrings": 0.16, "self_template": True, "single_overall": "single", "switch_category": "Outwear"}, "items": [ + {"color": "189 112 112", "icon": "none", "image_id": 90051, "offset": [1, 1], "path": "aida-sys-image/images/female/outwear/0628000864.jpg", "print": {"element": {"element_angle_list": [], "element_path_list": [], "element_scale_list": [], "location": []}, "overall": {"location": [[0.0, 0.0]], "print_angle_list": [0.0, 0.0], "print_path_list": [], "print_scale_list": [0.0, 0.0]}, "single": {"location": [], "print_angle_list": [], "print_path_list": [], "print_scale_list": []}}, + "resize_scale": [1.0, 1.0], "type": "Outwear"}]}, {"basic": {"body_point_test": {"waistband_right": [199, 239], "hand_point_right": [220, 308], "waistband_left": [113, 239], "hand_point_left": [92, 310], "shoulder_left": [99, 111], "shoulder_right": [214, 111]}, "layer_order": False, "scale_bag": 0.7, "scale_earrings": 0.16, "self_template": True, "single_overall": "single", "switch_category": "Outwear"}, "items": [ + {"color": "189 112 112", "icon": "none", "image_id": 67420, "offset": [1, 1], "path": "aida-sys-image/images/female/outwear/0825001648.jpg", "print": {"element": {"element_angle_list": [], "element_path_list": [], "element_scale_list": [], "location": []}, "overall": {"location": [[0.0, 0.0]], "print_angle_list": [0.0, 0.0], "print_path_list": [], "print_scale_list": [0.0, 0.0]}, "single": {"location": [], "print_angle_list": [], "print_path_list": [], "print_scale_list": []}}, + "resize_scale": [1.0, 1.0], "type": "Outwear"}]}, {"basic": {"body_point_test": {"waistband_right": [199, 239], "hand_point_right": [220, 308], "waistband_left": [113, 239], "hand_point_left": [92, 310], "shoulder_left": [99, 111], "shoulder_right": [214, 111]}, "layer_order": False, "scale_bag": 0.7, "scale_earrings": 0.16, "self_template": True, "single_overall": "single", "switch_category": "Outwear"}, "items": [ + {"color": "189 112 112", "icon": "none", "image_id": 90354, "offset": [1, 1], "path": "aida-sys-image/images/female/outwear/0628001300.jpg", "print": {"element": {"element_angle_list": [], "element_path_list": [], "element_scale_list": [], "location": []}, "overall": {"location": [[0.0, 0.0]], "print_angle_list": [0.0, 0.0], "print_path_list": [], "print_scale_list": [0.0, 0.0]}, "single": {"location": [], "print_angle_list": [], "print_path_list": [], "print_scale_list": []}}, + "resize_scale": [1.0, 1.0], "type": "Outwear"}]}, {"basic": {"body_point_test": {"waistband_right": [199, 239], "hand_point_right": [220, 308], "waistband_left": [113, 239], "hand_point_left": [92, 310], "shoulder_left": [99, 111], "shoulder_right": [214, 111]}, "layer_order": False, "scale_bag": 0.7, "scale_earrings": 0.16, "self_template": True, "single_overall": "single", "switch_category": "Outwear"}, "items": [ + {"color": "189 112 112", "icon": "none", "image_id": 67420, "offset": [1, 1], "path": "aida-sys-image/images/female/outwear/0825001648.jpg", "print": {"element": {"element_angle_list": [], "element_path_list": [], "element_scale_list": [], "location": []}, "overall": {"location": [[0.0, 0.0]], "print_angle_list": [0.0, 0.0], "print_path_list": [], "print_scale_list": [0.0, 0.0]}, "single": {"location": [], "print_angle_list": [], "print_path_list": [], "print_scale_list": []}}, + "resize_scale": [1.0, 1.0], "type": "Outwear"}]}, {"basic": {"body_point_test": {"waistband_right": [199, 239], "hand_point_right": [220, 308], "waistband_left": [113, 239], "hand_point_left": [92, 310], "shoulder_left": [99, 111], "shoulder_right": [214, 111]}, "layer_order": False, "scale_bag": 0.7, "scale_earrings": 0.16, "self_template": True, "single_overall": "single", "switch_category": "Outwear"}, "items": [ + {"color": "189 112 112", "icon": "none", "image_id": 101477, "offset": [1, 1], "path": "aida-sys-image/images/female/outwear/903000063.jpg", "print": {"element": {"element_angle_list": [], "element_path_list": [], "element_scale_list": [], "location": []}, "overall": {"location": [[0.0, 0.0]], "print_angle_list": [0.0, 0.0], "print_path_list": [], "print_scale_list": [0.0, 0.0]}, "single": {"location": [], "print_angle_list": [], "print_path_list": [], "print_scale_list": []}}, + "resize_scale": [1.0, 1.0], "type": "Outwear"}]}], "process_id": "3615898424593104"} + + object_result = {} + with ThreadPoolExecutor() as executor: + results = list(executor.map(process_object, object['objects'])) + for i, result in enumerate(results): + object_result[i] = result + + pprint(object_result) + + +if __name__ == '__main__': + start_time = time.time() + run() + print(time.time() - start_time) diff --git a/app/service/design_test/pipeline/__init__.py b/app/service/design_test/pipeline/__init__.py new file mode 100644 index 0000000..ec55933 --- /dev/null +++ b/app/service/design_test/pipeline/__init__.py @@ -0,0 +1,20 @@ +from .color import Color +from .contour_detection import ContourDetection +from .keypoint import KeyPoint +from .keypoint import KeyPoint +from .loading import LoadImage, LoadBodyImage +from .print_painting import PrintPainting +from .scale import Scaling +from .segmentation import Segmentation +from .split import Split + +__all__ = [ + 'LoadBodyImage', 'LoadImage', + 'KeyPoint', + 'ContourDetection', + 'Segmentation', + 'Color', + 'PrintPainting', + 'Scaling', + 'Split' +] diff --git a/app/service/design_test/pipeline/color.py b/app/service/design_test/pipeline/color.py new file mode 100644 index 0000000..d065aba --- /dev/null +++ b/app/service/design_test/pipeline/color.py @@ -0,0 +1,62 @@ +import logging + +import cv2 +import numpy as np + +from app.service.utils.oss_client import oss_get_image + +logger = logging.getLogger() + + +class Color: + def __init__(self, minio_client): + self.minio_client = minio_client + + def __call__(self, result): + dim_image_h, dim_image_w = result['image'].shape[0:2] + if "gradient" in result.keys() and result['gradient'] != "": + bucket_name = result['gradient'].split('/')[0] + object_name = result['gradient'][result['gradient'].find('/') + 1:] + pattern = self.get_gradient(bucket_name=bucket_name, object_name=object_name) + resize_pattern = cv2.resize(pattern, (dim_image_w, dim_image_h), interpolation=cv2.INTER_AREA) + else: + pattern = self.get_pattern(result['color']) + resize_pattern = cv2.resize(pattern, (dim_image_w, dim_image_h), interpolation=cv2.INTER_AREA) + closed_mo = np.expand_dims(result['mask'], axis=2).repeat(3, axis=2) + gray_mo = np.expand_dims(result['gray'], axis=2).repeat(3, axis=2) + get_image_fir = resize_pattern * (closed_mo / 255) * (gray_mo / 255) + result['pattern_image'] = get_image_fir.astype(np.uint8) + result['final_image'] = result['pattern_image'] + canvas = np.full_like(result['final_image'], 255) + temp_bg = np.expand_dims(cv2.bitwise_not(result['mask']), axis=2).repeat(3, axis=2) + tmp1 = (canvas * (temp_bg / 255)).astype(np.uint8) + temp_fg = np.expand_dims(result['mask'], axis=2).repeat(3, axis=2) + tmp2 = (result['final_image'] * (temp_fg / 255)).astype(np.uint8) + result['single_image'] = cv2.add(tmp1, tmp2) + result['alpha'] = 100 / 255.0 + return result + + def get_gradient(self, bucket_name, object_name): + # 获取渐变色图案 + image = oss_get_image(oss_client=self.minio_client, bucket=bucket_name, object_name=object_name, data_type="cv2") + if image.shape[2] == 4: + image = cv2.cvtColor(image, cv2.COLOR_BGRA2BGR) + return image + + @staticmethod + def crop_image(image, image_size_h, image_size_w): + x_offset = np.random.randint(low=0, high=int(image_size_h / 5) - 6) + y_offset = np.random.randint(low=0, high=int(image_size_w / 5) - 6) + image = image[x_offset: x_offset + image_size_h, y_offset: y_offset + image_size_w, :] + return image + + @staticmethod + def get_pattern(single_color): + if single_color is None: + raise False + R, G, B = single_color.split(' ') + pattern = np.zeros([1, 1, 3], np.uint8) + pattern[0, 0, 0] = int(B) + pattern[0, 0, 1] = int(G) + pattern[0, 0, 2] = int(R) + return pattern diff --git a/app/service/design_test/pipeline/contour_detection.py b/app/service/design_test/pipeline/contour_detection.py new file mode 100644 index 0000000..2b76c0b --- /dev/null +++ b/app/service/design_test/pipeline/contour_detection.py @@ -0,0 +1,37 @@ +import cv2 +import numpy as np + + +class ContourDetection: + def __call__(self, result): + Contour = self.get_contours(result['image']) + Mask = np.zeros(result['image'].shape[:2], np.uint8) + if len(Contour): + Max_contour = Contour[0] + Epsilon = 0.001 * cv2.arcLength(Max_contour, True) + Approx = cv2.approxPolyDP(Max_contour, Epsilon, True) + cv2.drawContours(Mask, [Approx], -1, 255, -1) + else: + Mask = np.ones(result['image'].shape[:2], np.uint8) * 255 + # TODO 修复部分图片出现透明的情况 下版本上线 + # img2gray = cv2.cvtColor(result['image'], cv2.COLOR_BGR2GRAY) + # ret, Mask = cv2.threshold(img2gray, 126, 255, cv2.THRESH_BINARY) + # Mask = cv2.bitwise_not(Mask) + if result['pre_mask'] is None: + result['mask'] = Mask + else: + result['mask'] = cv2.bitwise_and(Mask, result['pre_mask']) + result['front_mask'] = result['mask'] + result['back_mask'] = result['mask'] + return result + + @staticmethod + def get_contours(image): + gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY) + Edge = cv2.Canny(gray, 10, 150) + kernel = np.ones((5, 5), np.uint8) + Edge = cv2.dilate(Edge, kernel=kernel, iterations=1) + Edge = cv2.erode(Edge, kernel=kernel, iterations=1) + Contour, _ = cv2.findContours(Edge, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE) + Contour = sorted(Contour, key=cv2.contourArea, reverse=True) + return Contour diff --git a/app/service/design_test/pipeline/keypoint.py b/app/service/design_test/pipeline/keypoint.py new file mode 100644 index 0000000..243cf4e --- /dev/null +++ b/app/service/design_test/pipeline/keypoint.py @@ -0,0 +1,114 @@ +import logging + +import numpy as np +from pymilvus import MilvusClient + +from app.core.config import * +from app.service.design.utils.design_ensemble import get_keypoint_result + +logger = logging.getLogger(__name__) + + +class KeyPoint: + name = "KeyPoint" + + @classmethod + def get_name(cls): + return cls.name + + def __call__(self, result): + if result['name'] in ['blouse', 'skirt', 'dress', 'outwear', 'trousers', 'tops', 'bottoms']: # 查询是否有数据 且类别相同 相同则直接读 不同则推理后更新 + # result['clothes_keypoint'] = self.infer_keypoint_result(result) + site = 'up' if result['name'] in ['blouse', 'outwear', 'dress', 'tops'] else 'down' + # keypoint_cache = search_keypoint_cache(result["image_id"], site) + keypoint_cache = self.keypoint_cache(result, site) + # 取消向量查询 直接过模型推理 + # keypoint_cache = False + if keypoint_cache is False: + keypoint_infer_result, site = self.infer_keypoint_result(result) + result['clothes_keypoint'] = self.save_keypoint_cache(result["image_id"], keypoint_infer_result, site) + else: + result['clothes_keypoint'] = keypoint_cache + return result + + @staticmethod + def infer_keypoint_result(result): + site = 'up' if result['name'] in ['blouse', 'outwear', 'dress', 'tops'] else 'down' + keypoint_infer_result = get_keypoint_result(result["image"], site) # 推理结果 + return keypoint_infer_result, site + + @staticmethod + def save_keypoint_cache(keypoint_id, cache, site): + if site == "down": + zeros = np.zeros(20, dtype=int) + result = np.concatenate([zeros, cache.flatten()]) + else: + zeros = np.zeros(4, dtype=int) + result = np.concatenate([cache.flatten(), zeros]) + # 取消向量保存 直接拿结果 + data = [ + {"keypoint_id": keypoint_id, + "keypoint_site": site, + "keypoint_vector": result.tolist() + } + ] + try: + client = MilvusClient(uri=MILVUS_URL, token=MILVUS_TOKEN, db_name=MILVUS_ALIAS) + res = client.upsert(collection_name=MILVUS_TABLE_KEYPOINT, data=data) + client.close() + return dict(zip(KEYPOINT_RESULT_TABLE_FIELD_SET, result.reshape(12, 2).astype(int).tolist())) + except Exception as e: + logger.info(f"save keypoint cache milvus error : {e}") + return dict(zip(KEYPOINT_RESULT_TABLE_FIELD_SET, result.reshape(12, 2).astype(int).tolist())) + + @staticmethod + def update_keypoint_cache(keypoint_id, infer_result, search_result, site): + if site == "up": + # 需要的是up 即推理出来的是up 那么查询的就是down + result = np.concatenate([infer_result.flatten(), search_result[-4:]]) + else: + # 需要的是down 即推理出来的是down 那么查询的就是up + result = np.concatenate([search_result[:20], infer_result.flatten()]) + data = [ + {"keypoint_id": keypoint_id, + "keypoint_site": "all", + "keypoint_vector": result.tolist() + } + ] + + try: + client = MilvusClient(uri=MILVUS_URL, token=MILVUS_TOKEN, db_name=MILVUS_ALIAS) + client.upsert( + collection_name=MILVUS_TABLE_KEYPOINT, + data=data + ) + return dict(zip(KEYPOINT_RESULT_TABLE_FIELD_SET, result.reshape(12, 2).astype(int).tolist())) + except Exception as e: + logger.info(f"save keypoint cache milvus error : {e}") + return dict(zip(KEYPOINT_RESULT_TABLE_FIELD_SET, result.reshape(12, 2).astype(int).tolist())) + + # @ RunTime + def keypoint_cache(self, result, site): + try: + client = MilvusClient(uri=MILVUS_URL, token=MILVUS_TOKEN, db_name=MILVUS_ALIAS) + keypoint_id = result['image_id'] + res = client.query( + collection_name=MILVUS_TABLE_KEYPOINT, + # ids=[keypoint_id], + filter=f"keypoint_id == {keypoint_id}", + output_fields=['keypoint_vector', 'keypoint_site'] + ) + if len(res) == 0: + # 没有结果 直接推理拿结果 并保存 + keypoint_infer_result, site = self.infer_keypoint_result(result) + return self.save_keypoint_cache(result['image_id'], keypoint_infer_result, site) + elif res[0]["keypoint_site"] == "all" or res[0]["keypoint_site"] == site: + # 需要的类型和查询的类型一致,或者查询的类型为all 则直接返回查询的结果 + return dict(zip(KEYPOINT_RESULT_TABLE_FIELD_SET, np.array(res[0]['keypoint_vector']).astype(int).reshape(12, 2).tolist())) + elif res[0]["keypoint_site"] != site: + # 需要的类型和查询到的不一致,则更新类型为all + keypoint_infer_result, site = self.infer_keypoint_result(result) + return self.update_keypoint_cache(result["image_id"], keypoint_infer_result, res[0]['keypoint_vector'], site) + except Exception as e: + logger.info(f"search keypoint cache milvus error {e}") + return False diff --git a/app/service/design_test/pipeline/loading.py b/app/service/design_test/pipeline/loading.py new file mode 100644 index 0000000..7175881 --- /dev/null +++ b/app/service/design_test/pipeline/loading.py @@ -0,0 +1,80 @@ +import io +import logging + +import cv2 +import numpy as np +from PIL import Image + +from app.service.utils.oss_client import oss_get_image + +logger = logging.getLogger() + + +class LoadBodyImage: + name = "LoadBodyImage" + + def __init__(self, minio_client): + self.minio_client = minio_client + + @classmethod + def get_name(cls): + return cls.name + + def __call__(self, result): + result["name"] = "mannequin" + result['body_image'] = oss_get_image(oss_client=self.minio_client, bucket=result['body_path'].split("/", 1)[0], object_name=result['body_path'].split("/", 1)[1], data_type="PIL") + return result + + +class LoadImage: + name = "LoadImage" + + def __init__(self, minio_client): + self.minio_client = minio_client + + @classmethod + def get_name(cls): + return cls.name + + def __call__(self, result): + result['image'], result['pre_mask'] = self.read_image(result['path']) + result['gray'] = cv2.cvtColor(result['image'], cv2.COLOR_BGR2GRAY) + result['keypoint'] = self.get_keypoint(result['name']) + result['img_shape'] = result['image'].shape + result['ori_shape'] = result['image'].shape + return result + + def read_image(self, image_path): + image_mask = None + image = oss_get_image(oss_client=self.minio_client, bucket=image_path.split("/", 1)[0], object_name=image_path.split("/", 1)[1], data_type="cv2") + if len(image.shape) == 2: + image = cv2.cvtColor(image, cv2.COLOR_GRAY2RGB) + if image.shape[2] == 4: # 如果是四通道 mask + image_mask = image[:, :, 3] + image = image[:, :, :3] + + if image.shape[:2] <= (50, 50): + # 计算新尺寸 + new_size = (image.shape[1] * 2, image.shape[0] * 2) + # 调整大小 + image = cv2.resize(image, new_size, interpolation=cv2.INTER_LINEAR) + return image, image_mask + + @staticmethod + def get_keypoint(name): + if name == 'blouse' or name == 'outwear' or name == 'dress' or name == 'tops': + keypoint = 'shoulder' + elif name == 'trousers' or name == 'skirt' or name == 'bottoms': + keypoint = 'waistband' + elif name == 'bag': + keypoint = 'hand_point' + elif name == 'shoes': + keypoint = 'toe' + elif name == 'hairstyle': + keypoint = 'head_point' + elif name == 'earring': + keypoint = 'ear_point' + else: + raise KeyError(f"{name} does not belong to item category list: blouse, outwear, dress, trousers, skirt, " + f"bag, shoes, hairstyle, earring.") + return keypoint diff --git a/app/service/design_test/pipeline/print_painting.py b/app/service/design_test/pipeline/print_painting.py new file mode 100644 index 0000000..4a85399 --- /dev/null +++ b/app/service/design_test/pipeline/print_painting.py @@ -0,0 +1,524 @@ +import random + +import cv2 +import numpy as np +from PIL import Image + +from app.service.utils.oss_client import oss_get_image + + +class PrintPainting: + def __init__(self, minio_client): + self.minio_client = minio_client + + def __call__(self, result): + single_print = result['print']['single'] + overall_print = result['print']['overall'] + element_print = result['print']['element'] + result['single_image'] = None + result['print_image'] = None + if overall_print['print_path_list']: + painting_dict = {'dim_image_h': result['pattern_image'].shape[0], 'dim_image_w': result['pattern_image'].shape[1]} + result['print_image'] = result['pattern_image'] + if "print_angle_list" in overall_print.keys() and overall_print['print_angle_list'][0] != 0: + painting_dict = self.painting_collection(painting_dict, overall_print, print_trigger=True) + painting_dict['tile_print'] = self.rotate_crop_image(img=painting_dict['tile_print'], angle=-overall_print['print_angle_list'][0], crop=True) + painting_dict['mask_inv_print'] = self.rotate_crop_image(img=painting_dict['mask_inv_print'], angle=-overall_print['print_angle_list'][0], crop=True) + + # resize 到sketch大小 + painting_dict['tile_print'] = self.resize_and_crop(img=painting_dict['tile_print'], target_width=painting_dict['dim_image_w'], target_height=painting_dict['dim_image_h']) + painting_dict['mask_inv_print'] = self.resize_and_crop(img=painting_dict['mask_inv_print'], target_width=painting_dict['dim_image_w'], target_height=painting_dict['dim_image_h']) + else: + painting_dict = self.painting_collection(painting_dict, overall_print, print_trigger=True, is_single=False) + result['print_image'] = self.printpaint(result, painting_dict, print_=True) + result['single_image'] = result['final_image'] = result['pattern_image'] = result['print_image'] + + if single_print['print_path_list']: + print_background = np.zeros((result['pattern_image'].shape[0], result['pattern_image'].shape[1], 3), dtype=np.uint8) + mask_background = np.zeros((result['pattern_image'].shape[0], result['pattern_image'].shape[1], 3), dtype=np.uint8) + for i in range(len(single_print['print_path_list'])): + image, image_mode = self.read_image(single_print['print_path_list'][i]) + if image_mode == "RGBA": + new_size = (int(image.width * single_print['print_scale_list'][i]), int(image.height * single_print['print_scale_list'][i])) + + mask = image.split()[3] + resized_source = image.resize(new_size) + resized_source_mask = mask.resize(new_size) + + rotated_resized_source = resized_source.rotate(-single_print['print_angle_list'][i]) + rotated_resized_source_mask = resized_source_mask.rotate(-single_print['print_angle_list'][i]) + + source_image_pil = Image.fromarray(cv2.cvtColor(print_background, cv2.COLOR_BGR2RGB)) + source_image_pil_mask = Image.fromarray(cv2.cvtColor(mask_background, cv2.COLOR_BGR2RGB)) + + source_image_pil.paste(rotated_resized_source, (int(single_print['location'][i][0]), int(single_print['location'][i][1])), rotated_resized_source) + source_image_pil_mask.paste(rotated_resized_source_mask, (int(single_print['location'][i][0]), int(single_print['location'][i][1])), rotated_resized_source_mask) + + print_background = cv2.cvtColor(np.array(source_image_pil), cv2.COLOR_RGBA2BGR) + mask_background = cv2.cvtColor(np.array(source_image_pil_mask), cv2.COLOR_RGBA2BGR) + ret, mask_background = cv2.threshold(mask_background, 124, 255, cv2.THRESH_BINARY) + else: + mask = self.get_mask_inv(image) + mask = np.expand_dims(mask, axis=2) + mask = cv2.cvtColor(mask, cv2.COLOR_GRAY2BGR) + mask = cv2.bitwise_not(mask) + # 旋转后的坐标需要重新算 + rotate_mask, _ = self.img_rotate(mask, single_print['print_angle_list'][i], single_print['print_scale_list'][i]) + rotate_image, rotated_new_size = self.img_rotate(image, single_print['print_angle_list'][i], single_print['print_scale_list'][i]) + # x, y = int(result['print']['location'][i][0] - rotated_new_size[0] - (rotate_mask.shape[0] - image.shape[0]) / 2), int(result['print']['location'][i][1] - rotated_new_size[1] - (rotate_mask.shape[1] - image.shape[1]) / 2) + x, y = int(single_print['location'][i][0] - rotated_new_size[0]), int(single_print['location'][i][1] - rotated_new_size[1]) + + image_x = print_background.shape[1] + image_y = print_background.shape[0] + print_x = rotate_image.shape[1] + print_y = rotate_image.shape[0] + + # 有bug + # if x + print_x > image_x: + # rotate_image = rotate_image[:, :x + print_x - image_x] + # rotate_mask = rotate_mask[:, :x + print_x - image_x] + # # + # if y + print_y > image_y: + # rotate_image = rotate_image[:y + print_y - image_y] + # rotate_mask = rotate_mask[:y + print_y - image_y] + + # 不能是并行 + # 当前第一轮的if (108以及115)是判断有没有过下界和右界。第二轮的是判断左上有没有超出。 如果这个样子的话,先裁了右边,再左移,region就会有问题 + # 先挪 再判断 最后裁剪 + + # 如果print旋转了 或者 print贴边了 则需要判断 判断左界和上界是否小于0 + if x <= 0: + rotate_image = rotate_image[:, -x:] + rotate_mask = rotate_mask[:, -x:] + start_x = x = 0 + else: + start_x = x + + if y <= 0: + rotate_image = rotate_image[-y:, :] + rotate_mask = rotate_mask[-y:, :] + start_y = y = 0 + else: + start_y = y + + # ------------------ + # 如果print-size大于image-size 则需要裁剪print + + if x + print_x > image_x: + rotate_image = rotate_image[:, :image_x - x] + rotate_mask = rotate_mask[:, :image_x - x] + + if y + print_y > image_y: + rotate_image = rotate_image[:image_y - y, :] + rotate_mask = rotate_mask[:image_y - y, :] + + # mask_background[start_y:y + rotate_mask.shape[0], start_x:x + rotate_mask.shape[1]] = cv2.bitwise_xor(mask_background[start_y:y + rotate_mask.shape[0], start_x:x + rotate_mask.shape[1]], rotate_mask) + # print_background[start_y:y + rotate_image.shape[0], start_x:x + rotate_image.shape[1]] = cv2.add(print_background[start_y:y + rotate_image.shape[0], start_x:x + rotate_image.shape[1]], rotate_image) + + # mask_background[start_y:y + rotate_mask.shape[0], start_x:x + rotate_mask.shape[1]] = rotate_mask + # print_background[start_y:y + rotate_image.shape[0], start_x:x + rotate_image.shape[1]] = rotate_image + mask_background = self.stack_prin(mask_background, result['pattern_image'], rotate_mask, start_y, y, start_x, x) + print_background = self.stack_prin(print_background, result['pattern_image'], rotate_image, start_y, y, start_x, x) + + # gray_image = cv2.cvtColor(mask_background, cv2.COLOR_BGR2GRAY) + # print_background = cv2.bitwise_and(print_background, print_background, mask=gray_image) + + print_mask = cv2.bitwise_and(result['mask'], cv2.cvtColor(mask_background, cv2.COLOR_BGR2GRAY)) + img_fg = cv2.bitwise_or(print_background, print_background, mask=print_mask) + img_bg = cv2.bitwise_and(result['pattern_image'], result['pattern_image'], mask=cv2.bitwise_not(print_mask)) + mask_mo = np.expand_dims(print_mask, axis=2).repeat(3, axis=2) + gray_mo = np.expand_dims(result['gray'], axis=2).repeat(3, axis=2) + img_fg = (img_fg * (mask_mo / 255) * (gray_mo / 255)).astype(np.uint8) + result['final_image'] = cv2.add(img_bg, img_fg) + canvas = np.full_like(result['final_image'], 255) + temp_bg = np.expand_dims(cv2.bitwise_not(result['mask']), axis=2).repeat(3, axis=2) + tmp1 = (canvas * (temp_bg / 255)).astype(np.uint8) + temp_fg = np.expand_dims(result['mask'], axis=2).repeat(3, axis=2) + tmp2 = (result['final_image'] * (temp_fg / 255)).astype(np.uint8) + result['single_image'] = cv2.add(tmp1, tmp2) + + if element_print['element_path_list']: + print_background = np.zeros((result['final_image'].shape[0], result['final_image'].shape[1], 3), dtype=np.uint8) + mask_background = np.zeros((result['final_image'].shape[0], result['final_image'].shape[1], 3), dtype=np.uint8) + for i in range(len(element_print['element_path_list'])): + image, image_mode = self.read_image(element_print['element_path_list'][i]) + if image_mode == "RGBA": + new_size = (int(image.width * element_print['element_scale_list'][i]), int(image.height * element_print['element_scale_list'][i])) + + mask = image.split()[3] + resized_source = image.resize(new_size) + resized_source_mask = mask.resize(new_size) + + rotated_resized_source = resized_source.rotate(-element_print['element_angle_list'][i]) + rotated_resized_source_mask = resized_source_mask.rotate(-element_print['element_angle_list'][i]) + + source_image_pil = Image.fromarray(cv2.cvtColor(print_background, cv2.COLOR_BGR2RGB)) + source_image_pil_mask = Image.fromarray(cv2.cvtColor(mask_background, cv2.COLOR_BGR2RGB)) + + source_image_pil.paste(rotated_resized_source, (int(element_print['location'][i][0]), int(element_print['location'][i][1])), rotated_resized_source) + source_image_pil_mask.paste(rotated_resized_source_mask, (int(element_print['location'][i][0]), int(element_print['location'][i][1])), rotated_resized_source_mask) + + print_background = cv2.cvtColor(np.array(source_image_pil), cv2.COLOR_RGBA2BGR) + mask_background = cv2.cvtColor(np.array(source_image_pil_mask), cv2.COLOR_RGBA2BGR) + else: + mask = self.get_mask_inv(image) + mask = np.expand_dims(mask, axis=2) + mask = cv2.cvtColor(mask, cv2.COLOR_GRAY2BGR) + mask = cv2.bitwise_not(mask) + # 旋转后的坐标需要重新算 + rotate_mask, _ = self.img_rotate(mask, element_print['element_angle_list'][i], element_print['element_scale_list'][i]) + rotate_image, rotated_new_size = self.img_rotate(image, element_print['element_angle_list'][i], element_print['element_scale_list'][i]) + # x, y = int(result['print']['location'][i][0] - rotated_new_size[0] - (rotate_mask.shape[0] - image.shape[0]) / 2), int(result['print']['location'][i][1] - rotated_new_size[1] - (rotate_mask.shape[1] - image.shape[1]) / 2) + x, y = int(element_print['location'][i][0] - rotated_new_size[0]), int(element_print['location'][i][1] - rotated_new_size[1]) + + image_x = print_background.shape[1] + image_y = print_background.shape[0] + print_x = rotate_image.shape[1] + print_y = rotate_image.shape[0] + + # 有bug + # if x + print_x > image_x: + # rotate_image = rotate_image[:, :x + print_x - image_x] + # rotate_mask = rotate_mask[:, :x + print_x - image_x] + # # + # if y + print_y > image_y: + # rotate_image = rotate_image[:y + print_y - image_y] + # rotate_mask = rotate_mask[:y + print_y - image_y] + + # 不能是并行 + # 当前第一轮的if (108以及115)是判断有没有过下界和右界。第二轮的是判断左上有没有超出。 如果这个样子的话,先裁了右边,再左移,region就会有问题 + # 先挪 再判断 最后裁剪 + + # 如果print旋转了 或者 print贴边了 则需要判断 判断左界和上界是否小于0 + if x <= 0: + rotate_image = rotate_image[:, -x:] + rotate_mask = rotate_mask[:, -x:] + start_x = x = 0 + else: + start_x = x + + if y <= 0: + rotate_image = rotate_image[-y:, :] + rotate_mask = rotate_mask[-y:, :] + start_y = y = 0 + else: + start_y = y + + # ------------------ + # 如果print-size大于image-size 则需要裁剪print + + if x + print_x > image_x: + rotate_image = rotate_image[:, :image_x - x] + rotate_mask = rotate_mask[:, :image_x - x] + + if y + print_y > image_y: + rotate_image = rotate_image[:image_y - y, :] + rotate_mask = rotate_mask[:image_y - y, :] + + # mask_background[start_y:y + rotate_mask.shape[0], start_x:x + rotate_mask.shape[1]] = cv2.bitwise_xor(mask_background[start_y:y + rotate_mask.shape[0], start_x:x + rotate_mask.shape[1]], rotate_mask) + # print_background[start_y:y + rotate_image.shape[0], start_x:x + rotate_image.shape[1]] = cv2.add(print_background[start_y:y + rotate_image.shape[0], start_x:x + rotate_image.shape[1]], rotate_image) + + # mask_background[start_y:y + rotate_mask.shape[0], start_x:x + rotate_mask.shape[1]] = rotate_mask + # print_background[start_y:y + rotate_image.shape[0], start_x:x + rotate_image.shape[1]] = rotate_image + mask_background = self.stack_prin(mask_background, result['pattern_image'], rotate_mask, start_y, y, start_x, x) + print_background = self.stack_prin(print_background, result['pattern_image'], rotate_image, start_y, y, start_x, x) + + # gray_image = cv2.cvtColor(mask_background, cv2.COLOR_BGR2GRAY) + # print_background = cv2.bitwise_and(print_background, print_background, mask=gray_image) + + print_mask = cv2.bitwise_and(result['mask'], cv2.cvtColor(mask_background, cv2.COLOR_BGR2GRAY)) + img_fg = cv2.bitwise_or(print_background, print_background, mask=print_mask) + # TODO element 丢失信息 + three_channel_image = cv2.merge([cv2.bitwise_not(print_mask), cv2.bitwise_not(print_mask), cv2.bitwise_not(print_mask)]) + img_bg = cv2.bitwise_and(result['final_image'], three_channel_image) + # mask_mo = np.expand_dims(print_mask, axis=2).repeat(3, axis=2) + # gray_mo = np.expand_dims(result['gray'], axis=2).repeat(3, axis=2) + # img_fg = (img_fg * (mask_mo / 255) * (gray_mo / 255)).astype(np.uint8) + result['final_image'] = cv2.add(img_bg, img_fg) + canvas = np.full_like(result['final_image'], 255) + temp_bg = np.expand_dims(cv2.bitwise_not(result['mask']), axis=2).repeat(3, axis=2) + tmp1 = (canvas * (temp_bg / 255)).astype(np.uint8) + temp_fg = np.expand_dims(result['mask'], axis=2).repeat(3, axis=2) + tmp2 = (result['final_image'] * (temp_fg / 255)).astype(np.uint8) + result['single_image'] = cv2.add(tmp1, tmp2) + return result + + @staticmethod + def stack_prin(print_background, pattern_image, rotate_image, start_y, y, start_x, x): + temp_print = np.zeros((pattern_image.shape[0], pattern_image.shape[1], 3), dtype=np.uint8) + temp_print[start_y:y + rotate_image.shape[0], start_x:x + rotate_image.shape[1]] = rotate_image + img2gray = cv2.cvtColor(temp_print, cv2.COLOR_BGR2GRAY) + ret, mask_ = cv2.threshold(img2gray, 1, 255, cv2.THRESH_BINARY) + mask_inv = cv2.bitwise_not(mask_) + img1_bg = cv2.bitwise_and(print_background, print_background, mask=mask_inv) + img2_fg = cv2.bitwise_and(temp_print, temp_print, mask=mask_) + print_background = img1_bg + img2_fg + return print_background + + def painting_collection(self, painting_dict, print_dict, print_trigger=False, is_single=False): + if print_trigger: + print_ = self.get_print(print_dict) + painting_dict['Trigger'] = not is_single + painting_dict['location'] = print_['location'] + single_mask_inv_print = self.get_mask_inv(print_['image']) + dim_max = max(painting_dict['dim_image_h'], painting_dict['dim_image_w']) + dim_pattern = (int(dim_max * print_['scale'] / 5), int(dim_max * print_['scale'] / 5)) + if not is_single: + self.random_seed = random.randint(0, 1000) + # 如果print 模式为overall 且 有角度的话 , 组合的print为正方形,方便裁剪 + if "print_angle_list" in print_dict.keys() and print_dict['print_angle_list'][0] != 0: + painting_dict['mask_inv_print'] = self.tile_image(single_mask_inv_print, dim_pattern, print_['scale'], dim_max, dim_max, painting_dict['location'], trigger=True) + painting_dict['tile_print'] = self.tile_image(print_['image'], dim_pattern, print_['scale'], dim_max, dim_max, painting_dict['location'], trigger=True) + else: + painting_dict['mask_inv_print'] = self.tile_image(single_mask_inv_print, dim_pattern, print_['scale'], painting_dict['dim_image_h'], painting_dict['dim_image_w'], painting_dict['location'], trigger=True) + painting_dict['tile_print'] = self.tile_image(print_['image'], dim_pattern, print_['scale'], painting_dict['dim_image_h'], painting_dict['dim_image_w'], painting_dict['location'], trigger=True) + else: + painting_dict['mask_inv_print'] = self.tile_image(single_mask_inv_print, dim_pattern, print_['scale'], painting_dict['dim_image_h'], painting_dict['dim_image_w'], painting_dict['location']) + painting_dict['tile_print'] = self.tile_image(print_['image'], dim_pattern, print_['scale'], painting_dict['dim_image_h'], painting_dict['dim_image_w'], painting_dict['location']) + painting_dict['dim_print_h'], painting_dict['dim_print_w'] = dim_pattern + return painting_dict + + def tile_image(self, pattern, dim, scale, dim_image_h, dim_image_w, location, trigger=False): + tile = None + if not trigger: + tile = cv2.resize(pattern, dim, interpolation=cv2.INTER_AREA) + else: + resize_pattern = cv2.resize(pattern, dim, interpolation=cv2.INTER_AREA) + if len(pattern.shape) == 2: + tile = np.tile(resize_pattern, (int((5 + 1) / scale) + 4, int((5 + 1) / scale) + 4)) + if len(pattern.shape) == 3: + tile = np.tile(resize_pattern, (int((5 + 1) / scale) + 4, int((5 + 1) / scale) + 4, 1)) + tile = self.crop_image(tile, dim_image_h, dim_image_w, location, resize_pattern.shape) + return tile + + def get_mask_inv(self, print_): + if print_[0][0][0] == 255 and print_[0][0][1] == 255 and print_[0][0][2] == 255: + bg_color = cv2.cvtColor(print_, cv2.COLOR_BGR2LAB)[0][0] + print_tile = cv2.cvtColor(print_, cv2.COLOR_BGR2LAB) + bg_l, bg_a, bg_b = bg_color[0], bg_color[1], bg_color[2] + bg_L_high, bg_L_low = self.get_low_high_lab(bg_l, L=True) + bg_a_high, bg_a_low = self.get_low_high_lab(bg_a) + bg_b_high, bg_b_low = self.get_low_high_lab(bg_b) + lower = np.array([bg_L_low, bg_a_low, bg_b_low]) + upper = np.array([bg_L_high, bg_a_high, bg_b_high]) + mask_inv = cv2.inRange(print_tile, lower, upper) + return mask_inv + else: + # bg_color = cv2.cvtColor(print_, cv2.COLOR_BGR2LAB)[0][0] + # print_tile = cv2.cvtColor(print_, cv2.COLOR_BGR2LAB) + # bg_l, bg_a, bg_b = bg_color[0], bg_color[1], bg_color[2] + # bg_L_high, bg_L_low = self.get_low_high_lab(bg_l, L=True) + # bg_a_high, bg_a_low = self.get_low_high_lab(bg_a) + # bg_b_high, bg_b_low = self.get_low_high_lab(bg_b) + # lower = np.array([bg_L_low, bg_a_low, bg_b_low]) + # upper = np.array([bg_L_high, bg_a_high, bg_b_high]) + + # print_tile = cv2.cvtColor(print_, cv2.COLOR_BGR2LAB) + # mask_inv = cv2.cvtColor(print_tile, cv2.COLOR_BGR2GRAY) + + # mask_inv = cv2.cvtColor(print_, cv2.COLOR_BGR2GRAY) + mask_inv = np.zeros(print_.shape[:2], dtype=np.uint8) + return mask_inv + + @staticmethod + def printpaint(result, painting_dict, print_=False): + + if print_ and painting_dict['Trigger']: + print_mask = cv2.bitwise_and(result['mask'], cv2.bitwise_not(painting_dict['mask_inv_print'])) + img_fg = cv2.bitwise_and(painting_dict['tile_print'], painting_dict['tile_print'], mask=print_mask) + else: + print_mask = result['mask'] + img_fg = result['final_image'] + if print_ and not painting_dict['Trigger']: + index_ = None + try: + index_ = len(painting_dict['location']) + except: + assert f'there must be parameter of location if choose IfSingle' + + for i in range(index_): + start_h, start_w = int(painting_dict['location'][i][1]), int(painting_dict['location'][i][0]) + + length_h = min(start_h + painting_dict['dim_print_h'], img_fg.shape[0]) + length_w = min(start_w + painting_dict['dim_print_w'], img_fg.shape[1]) + + change_region = img_fg[start_h: length_h, start_w: length_w, :] + # problem in change_mask + change_mask = print_mask[start_h: length_h, start_w: length_w] + # get real part into change mask + _, change_mask = cv2.threshold(change_mask, 220, 255, cv2.THRESH_BINARY) + mask = cv2.bitwise_not(painting_dict['mask_inv_print']) + img_fg[start_h:start_h + painting_dict['dim_print_h'], start_w:start_w + painting_dict['dim_print_w'], :] = change_region + + clothes_mask_print = cv2.bitwise_not(print_mask) + + img_bg = cv2.bitwise_and(result['pattern_image'], result['pattern_image'], mask=clothes_mask_print) + mask_mo = np.expand_dims(print_mask, axis=2).repeat(3, axis=2) + gray_mo = np.expand_dims(result['gray'], axis=2).repeat(3, axis=2) + img_fg = (img_fg * (mask_mo / 255) * (gray_mo / 255)).astype(np.uint8) + print_image = cv2.add(img_bg, img_fg) + return print_image + + def get_print(self, print_dict): + if 'print_scale_list' not in print_dict.keys() or print_dict['print_scale_list'][0] < 0.3: + print_dict['scale'] = 0.3 + else: + print_dict['scale'] = print_dict['print_scale_list'][0] + + bucket_name = print_dict['print_path_list'][0].split("/", 1)[0] + object_name = print_dict['print_path_list'][0].split("/", 1)[1] + image = oss_get_image(oss_client=self.minio_client, bucket=bucket_name, object_name=object_name, data_type="PIL") + # 判断图片格式,如果是RGBA 则贴在一张纯白图片上 防止透明转黑 + if image.mode == "RGBA": + new_background = Image.new('RGB', image.size, (255, 255, 255)) + new_background.paste(image, mask=image.split()[3]) + image = new_background + print_dict['image'] = cv2.cvtColor(np.asarray(image), cv2.COLOR_RGB2BGR) + return print_dict + + def crop_image(self, image, image_size_h, image_size_w, location, print_shape): + print_w = print_shape[1] + print_h = print_shape[0] + + random.seed(self.random_seed) + # logging.info(f'overall print location : {location}') + # x_offset = random.randint(0, image.shape[0] - image_size_h) + # y_offset = random.randint(0, image.shape[1] - image_size_w) + + # 1.拿到偏移量后和resize后的print宽高取余 得到真正偏移量 + x_offset = print_w - int(location[0][1] % print_w) + y_offset = print_w - int(location[0][0] % print_h) + + # y_offset = int(location[0][0]) + # x_offset = int(location[0][1]) + + if len(image.shape) == 2: + image = image[x_offset: x_offset + image_size_h, y_offset: y_offset + image_size_w] + elif len(image.shape) == 3: + image = image[x_offset: x_offset + image_size_h, y_offset: y_offset + image_size_w, :] + return image + + @staticmethod + def get_low_high_lab(Lab_value, L=False): + if L: + high = Lab_value + 30 if Lab_value + 30 < 255 else 255 + low = Lab_value - 30 if Lab_value - 30 > 0 else 0 + else: + high = Lab_value + 30 if Lab_value + 30 < 255 else 255 + low = Lab_value - 30 if Lab_value - 30 > 0 else 0 + return high, low + + @staticmethod + def img_rotate(image, angel, scale): + """顺时针旋转图像任意角度 + + Args: + image (np.array): [原始图像] + angel (float): [逆时针旋转的角度] + + Returns: + [array]: [旋转后的图像] + """ + + h, w = image.shape[:2] + center = (w // 2, h // 2) + # if type(angel) is not int: + # angel = 0 + M = cv2.getRotationMatrix2D(center, -angel, scale) + # 调整旋转后的图像长宽 + rotated_h = int((w * np.abs(M[0, 1]) + (h * np.abs(M[0, 0])))) + rotated_w = int((h * np.abs(M[0, 1]) + (w * np.abs(M[0, 0])))) + M[0, 2] += (rotated_w - w) // 2 + M[1, 2] += (rotated_h - h) // 2 + # 旋转图像 + rotated_img = cv2.warpAffine(image, M, (rotated_w, rotated_h)) + + return rotated_img, ((rotated_img.shape[1] - image.shape[1] * scale) // 2, (rotated_img.shape[0] - image.shape[0] * scale) // 2) + # return rotated_img, (0, 0) + + @staticmethod + def rotate_crop_image(img, angle, crop): + """ + angle: 旋转的角度 + crop: 是否需要进行裁剪,布尔向量 + """ + crop_image = lambda img, x0, y0, w, h: img[y0:y0 + h, x0:x0 + w] + w, h = img.shape[:2] + # 旋转角度的周期是360° + angle %= 360 + # 计算仿射变换矩阵 + M_rotation = cv2.getRotationMatrix2D((w / 2, h / 2), angle, 1) + # 得到旋转后的图像 + img_rotated = cv2.warpAffine(img, M_rotation, (w, h)) + + # 如果需要去除黑边 + if crop: + # 裁剪角度的等效周期是180° + angle_crop = angle % 180 + if angle > 90: + angle_crop = 180 - angle_crop + # 转化角度为弧度 + theta = angle_crop * np.pi / 180 + # 计算高宽比 + hw_ratio = float(h) / float(w) + # 计算裁剪边长系数的分子项 + tan_theta = np.tan(theta) + numerator = np.cos(theta) + np.sin(theta) * np.tan(theta) + + # 计算分母中和高宽比相关的项 + r = hw_ratio if h > w else 1 / hw_ratio + # 计算分母项 + denominator = r * tan_theta + 1 + # 最终的边长系数 + crop_mult = numerator / denominator + + # 得到裁剪区域 + w_crop = int(crop_mult * w) + h_crop = int(crop_mult * h) + x0 = int((w - w_crop) / 2) + y0 = int((h - h_crop) / 2) + + img_rotated = crop_image(img_rotated, x0, y0, w_crop, h_crop) + + return img_rotated + + def read_image(self, image_url): + image = oss_get_image(oss_client=self.minio_client, bucket=image_url.split("/", 1)[0], object_name=image_url.split("/", 1)[1], data_type="cv2") + if image.shape[2] == 4: + image_rgb = cv2.cvtColor(image, cv2.COLOR_BGRA2RGBA) + image = Image.fromarray(image_rgb) + image_mode = "RGBA" + else: + image_mode = "RGB" + return image, image_mode + + @staticmethod + def resize_and_crop(img, target_width, target_height): + # 获取原始图像的尺寸 + original_height, original_width = img.shape[:2] + + # 计算目标尺寸的宽高比 + target_ratio = target_width / target_height + + # 计算原始图像的宽高比 + original_ratio = original_width / original_height + + # 调整尺寸 + if original_ratio > target_ratio: + # 原始图像更宽,按高度resize,然后裁剪宽度 + new_height = target_height + new_width = int(original_width * (target_height / original_height)) + resized_img = cv2.resize(img, (new_width, new_height)) + # 裁剪宽度 + start_x = (new_width - target_width) // 2 + cropped_img = resized_img[:, start_x:start_x + target_width] + else: + # 原始图像更高,按宽度resize,然后裁剪高度 + new_width = target_width + new_height = int(original_height * (target_width / original_width)) + resized_img = cv2.resize(img, (new_width, new_height)) + # 裁剪高度 + start_y = (new_height - target_height) // 2 + cropped_img = resized_img[start_y:start_y + target_height, :] + + return cropped_img diff --git a/app/service/design_test/pipeline/scale.py b/app/service/design_test/pipeline/scale.py new file mode 100644 index 0000000..732fcd8 --- /dev/null +++ b/app/service/design_test/pipeline/scale.py @@ -0,0 +1,49 @@ +import math + +import cv2 + + +class Scaling: + def __call__(self, result): + if result['keypoint'] in ['waistband', 'shoulder', 'head_point']: + # milvus_db_keypoint_cache + distance_clo = math.sqrt( + (int(result['clothes_keypoint'][result['keypoint'] + '_left'][0]) - int(result['clothes_keypoint'][result['keypoint'] + '_right'][0])) ** 2 + + + (int(result['clothes_keypoint'][result['keypoint'] + '_left'][1]) - int(result['clothes_keypoint'][result['keypoint'] + '_right'][1])) ** 2 + ) + + distance_bdy = math.sqrt( + (int(result['body_point_test'][result['keypoint'] + '_left'][0]) + - + int(result['body_point_test'][result['keypoint'] + '_right'][0])) ** 2 + 1 + ) + + if distance_clo == 0: + result['scale'] = 1 + else: + result['scale'] = distance_bdy / distance_clo + elif result['keypoint'] == 'toe': + distance_bdy = math.sqrt( + (int(result['body_point_test']['foot_length'][0]) - int(result['body_point_test']['foot_length'][2])) ** 2 + + + (int(result['body_point_test']['foot_length'][1]) - int(result['body_point_test']['foot_length'][3])) ** 2 + ) + + Blur = cv2.GaussianBlur(result['gray'], (3, 3), 0) + Edge = cv2.Canny(Blur, 10, 200) + Edge = cv2.dilate(Edge, None) + Edge = cv2.erode(Edge, None) + Contour, _ = cv2.findContours(Edge, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE) + Contours = sorted(Contour, key=cv2.contourArea, reverse=True) + + Max_contour = Contours[0] + x, y, w, h = cv2.boundingRect(Max_contour) + width = w + distance_clo = width + result['scale'] = distance_bdy / distance_clo + elif result['keypoint'] == 'hand_point': + result['scale'] = result['scale_bag'] + elif result['keypoint'] == 'ear_point': + result['scale'] = result['scale_earrings'] + return result diff --git a/app/service/design_test/pipeline/segmentation.py b/app/service/design_test/pipeline/segmentation.py new file mode 100644 index 0000000..156742f --- /dev/null +++ b/app/service/design_test/pipeline/segmentation.py @@ -0,0 +1,70 @@ +import logging +import os + +import cv2 +import numpy as np + +from app.core.config import SEG_CACHE_PATH +from app.service.design.utils.design_ensemble import get_seg_result +from app.service.utils.oss_client import oss_get_image + +logger = logging.getLogger() + + +class Segmentation: + def __init__(self, minio_client): + self.minio_client = minio_client + + def __call__(self, result): + if "seg_mask_url" in result.keys() and result['seg_mask_url'] != "": + seg_mask = oss_get_image(oss_client=self.minio_client, bucket=result['seg_mask_url'].split('/')[0], object_name=result['seg_mask_url'][result['seg_mask_url'].find('/') + 1:], data_type="cv2") + seg_mask = cv2.resize(seg_mask, (result['img_shape'][1], result['img_shape'][0]), interpolation=cv2.INTER_NEAREST) + # 转换颜色空间为 RGB(OpenCV 默认是 BGR) + image_rgb = cv2.cvtColor(seg_mask, cv2.COLOR_BGR2RGB) + + r, g, b = cv2.split(image_rgb) + red_mask = r > g + green_mask = g > r + + # 创建红色和绿色掩码 + result['front_mask'] = np.array(red_mask, dtype=np.uint8) * 255 + result['back_mask'] = np.array(green_mask, dtype=np.uint8) * 255 + result['mask'] = result['front_mask'] + result['back_mask'] + else: + # 本地查询seg 缓存是否存在 + _, seg_result = self.load_seg_result(result["image_id"]) + result['seg_result'] = seg_result + if not _: + # 推理获得seg 结果 + seg_result = get_seg_result(result["image_id"], result['image'])[0] + self.save_seg_result(seg_result, result['image_id']) + # 处理前片后片 + temp_front = seg_result == 1.0 + result['front_mask'] = (255 * (temp_front + 0).astype(np.uint8)) + temp_back = seg_result == 2.0 + result['back_mask'] = (255 * (temp_back + 0).astype(np.uint8)) + result['mask'] = result['front_mask'] + result['back_mask'] + return result + + @staticmethod + def save_seg_result(seg_result, image_id): + file_path = f"{SEG_CACHE_PATH}{image_id}.npy" + try: + np.save(file_path, seg_result) + print("保存成功", os.path.abspath(file_path)) + except Exception as e: + print(f"保存失败: {e}") + + @staticmethod + def load_seg_result(image_id): + file_path = f"{SEG_CACHE_PATH}{image_id}.npy" + logger.info(f"load seg file name is :{SEG_CACHE_PATH}{image_id}.npy") + try: + seg_result = np.load(file_path) + return True, seg_result + except FileNotFoundError: + print("文件不存在") + return False, None + except Exception as e: + print(f"加载失败: {e}") + return False, None diff --git a/app/service/design_test/pipeline/split.py b/app/service/design_test/pipeline/split.py new file mode 100644 index 0000000..1fa4215 --- /dev/null +++ b/app/service/design_test/pipeline/split.py @@ -0,0 +1,74 @@ +import io +import logging + +import cv2 +import numpy as np +from PIL import Image +from cv2 import cvtColor, COLOR_BGR2RGBA + +from app.core.config import AIDA_CLOTHING +from app.service.design.utils.conversion_image import rgb_to_rgba +from app.service.design.utils.upload_image import upload_png_mask +from app.service.utils.generate_uuid import generate_uuid +from app.service.utils.oss_client import oss_upload_image + + +class Split(object): + def __init__(self, minio_client): + self.minio_client = minio_client + + def __call__(self, result): + try: + + if result['name'] in ('outwear', 'dress', 'blouse', 'skirt', 'trousers', 'tops', 'bottoms'): + front_mask = result['front_mask'] + back_mask = result['back_mask'] + rgba_image = rgb_to_rgba(result['final_image'], front_mask + back_mask) + new_size = (int(rgba_image.shape[1] * result["scale"] * result["resize_scale"][0]), int(rgba_image.shape[0] * result["scale"] * result["resize_scale"][1])) + rgba_image = cv2.resize(rgba_image, new_size) + result_front_image = np.zeros_like(rgba_image) + front_mask = cv2.resize(front_mask, new_size) + result_front_image[front_mask != 0] = rgba_image[front_mask != 0] + result_front_image_pil = Image.fromarray(cvtColor(result_front_image, COLOR_BGR2RGBA)) + result['front_image'], result["front_image_url"], _ = upload_png_mask(self.minio_client, result_front_image_pil, f'{generate_uuid()}', mask=None) + + height, width = front_mask.shape + mask_image = np.zeros((height, width, 3)) + mask_image[front_mask != 0] = [0, 0, 255] + + if result["name"] in ('blouse', 'dress', 'outwear', 'tops'): + result_back_image = np.zeros_like(rgba_image) + back_mask = cv2.resize(back_mask, new_size) + result_back_image[back_mask != 0] = rgba_image[back_mask != 0] + result_back_image_pil = Image.fromarray(cvtColor(result_back_image, COLOR_BGR2RGBA)) + result['back_image'], result["back_image_url"], _ = upload_png_mask(self.minio_client, result_back_image_pil, f'{generate_uuid()}', mask=None) + mask_image[back_mask != 0] = [0, 255, 0] + + rbga_mask = rgb_to_rgba(mask_image, front_mask + back_mask) + mask_pil = Image.fromarray(cvtColor(rbga_mask.astype(np.uint8), COLOR_BGR2RGBA)) + image_data = io.BytesIO() + mask_pil.save(image_data, format='PNG') + image_data.seek(0) + image_bytes = image_data.read() + req = oss_upload_image(oss_client=self.minio_client, bucket=AIDA_CLOTHING, object_name=f"mask/mask_{generate_uuid()}.png", image_bytes=image_bytes) + result['mask_url'] = req.bucket_name + "/" + req.object_name + else: + rbga_mask = rgb_to_rgba(mask_image, front_mask) + mask_pil = Image.fromarray(cvtColor(rbga_mask.astype(np.uint8), COLOR_BGR2RGBA)) + image_data = io.BytesIO() + mask_pil.save(image_data, format='PNG') + image_data.seek(0) + image_bytes = image_data.read() + req = oss_upload_image(oss_client=self.minio_client, bucket=AIDA_CLOTHING, object_name=f"mask/mask_{generate_uuid()}.png", image_bytes=image_bytes) + result['mask_url'] = req.bucket_name + "/" + req.object_name + result['back_image'] = None + result["back_image_url"] = None + # result["back_mask_url"] = None + # result['back_mask_image'] = None + # 创建中间图层 + result_pattern_image_rgba = rgb_to_rgba(result['pattern_image'], result['mask']) + result_pattern_image_pil = Image.fromarray(cvtColor(result_pattern_image_rgba, COLOR_BGR2RGBA)) + result['pattern_image'], result['pattern_image_url'], _ = upload_png_mask(self.minio_client, result_pattern_image_pil, f'{generate_uuid()}') + return result + except Exception as e: + logging.warning(f"split runtime exception : {e} image_id : {result['image_id']}") diff --git a/app/service/design_test/utils/__init__.py b/app/service/design_test/utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/service/design_test/utils/conversion_image.py b/app/service/design_test/utils/conversion_image.py new file mode 100644 index 0000000..11e39ae --- /dev/null +++ b/app/service/design_test/utils/conversion_image.py @@ -0,0 +1,31 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +""" +@Project :trinity_client +@File :conversion_image.py +@Author :周成融 +@Date :2023/8/21 10:40:29 +@detail : +""" +import numpy as np + + +# def rgb_to_rgba(rgb_size, rgb_image, mask): +# alpha_channel = np.full(rgb_size, 255, dtype=np.uint8) +# # 创建四通道的结果图像 +# rgba_image = np.dstack((rgb_image, alpha_channel)) +# alpha_channel = np.where(mask > 0, 255, 0) +# # 更新RGBA图像的透明度通道 +# rgba_image[:, :, 3] = alpha_channel +# return rgba_image + +def rgb_to_rgba(rgb_image, mask): + # 创建全透明的alpha通道 + alpha_channel = np.where(mask > 0, 255, 0).astype(np.uint8) + # 合并RGB图像和alpha通道 + rgba_image = np.dstack((rgb_image, alpha_channel)) + return rgba_image + + +if __name__ == '__main__': + image = open("") diff --git a/app/service/design_test/utils/design_ensemble.py b/app/service/design_test/utils/design_ensemble.py new file mode 100644 index 0000000..f4f6a34 --- /dev/null +++ b/app/service/design_test/utils/design_ensemble.py @@ -0,0 +1,143 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +""" +@Project :trinity_client +@File :design_ensemble.py +@Author :周成融 +@Date :2023/8/16 19:36:21 +@detail :发起请求 获取推理结果 +""" +import logging + +import cv2 +import mmcv +import numpy as np +import torch +import torch.nn.functional as F +import tritonclient.http as httpclient + +from app.core.config import * + +""" + keypoint + 预处理 推理 后处理 +""" + + +def keypoint_preprocess(img_path): + img = mmcv.imread(img_path) + img_scale = (256, 256) + h, w = img.shape[:2] + img = cv2.resize(img, img_scale) + w_scale = img_scale[0] / w + h_scale = img_scale[1] / h + img = mmcv.imnormalize(img, mean=np.array([123.675, 116.28, 103.53]), std=np.array([58.395, 57.12, 57.375]), to_rgb=True) + preprocessed_img = np.expand_dims(img.transpose(2, 0, 1), axis=0) + return preprocessed_img, (w_scale, h_scale) + + +# @ RunTime +# 推理 +def get_keypoint_result(image, site): + keypoint_result = None + try: + image, scale_factor = keypoint_preprocess(image) + client = httpclient.InferenceServerClient(url=DESIGN_MODEL_URL) + transformed_img = image.astype(np.float32) + inputs = [httpclient.InferInput(f"input", transformed_img.shape, datatype="FP32")] + inputs[0].set_data_from_numpy(transformed_img, binary_data=True) + outputs = [httpclient.InferRequestedOutput(f"output", binary_data=True)] + results = client.infer(model_name=f"keypoint_{site}_ocrnet_hr18", inputs=inputs, outputs=outputs) + inference_output = torch.from_numpy(results.as_numpy(f'output')) + keypoint_result = keypoint_postprocess(inference_output, scale_factor) + except Exception as e: + logging.warning(f"get_keypoint_result : {e}") + return keypoint_result + + +def keypoint_postprocess(output, scale_factor): + max_indices = torch.argmax(output.view(output.size(0), output.size(1), -1), dim=2).unsqueeze(dim=2) + max_coords = torch.cat((max_indices / output.size(3), max_indices % output.size(3)), dim=2) + segment_result = max_coords.numpy() + scale_factor = [1 / x for x in scale_factor[::-1]] + scale_matrix = np.diag(scale_factor) + nan = np.isinf(scale_matrix) + scale_matrix[nan] = 0 + return np.ceil(np.dot(segment_result, scale_matrix) * 4) + + +""" + seg + 预处理 推理 后处理 +""" + + +# KNet +def seg_preprocess(img_path): + img = mmcv.imread(img_path) + ori_shape = img.shape[:2] + img_scale_w, img_scale_h = ori_shape + if ori_shape[0] > 1024: + img_scale_w = 1024 + if ori_shape[1] > 1024: + img_scale_h = 1024 + # 如果图片size任意一边 大于 1024, 则会resize 成1024 + if ori_shape != (img_scale_w, img_scale_h): + # mmcv.imresize(img, img_scale_h, img_scale_w) # 老代码 引以为戒!哈哈哈~ h和w写反了 + img = cv2.resize(img, (img_scale_h, img_scale_w)) + img = mmcv.imnormalize(img, mean=np.array([123.675, 116.28, 103.53]), std=np.array([58.395, 57.12, 57.375]), to_rgb=True) + preprocessed_img = np.expand_dims(img.transpose(2, 0, 1), axis=0) + return preprocessed_img, ori_shape + + +# @ RunTime +def get_seg_result(image_id, image): + image, ori_shape = seg_preprocess(image) + client = httpclient.InferenceServerClient(url=f"{DESIGN_MODEL_URL}") + transformed_img = image.astype(np.float32) + # 输入集 + inputs = [ + httpclient.InferInput(SEGMENTATION['input'], transformed_img.shape, datatype="FP32") + ] + inputs[0].set_data_from_numpy(transformed_img, binary_data=True) + # 输出集 + outputs = [ + httpclient.InferRequestedOutput(SEGMENTATION['output'], binary_data=True), + ] + results = client.infer(model_name=SEGMENTATION['new_model_name'], inputs=inputs, outputs=outputs) + # 推理 + # 取结果 + inference_output1 = results.as_numpy(SEGMENTATION['output']) + seg_result = seg_postprocess(int(image_id), inference_output1, ori_shape) + return seg_result + + +# no cache +def seg_postprocess(image_id, output, ori_shape): + seg_logit = F.interpolate(torch.tensor(output).float(), size=ori_shape, scale_factor=None, mode='bilinear', align_corners=False) + seg_pred = seg_logit.cpu().numpy() + return seg_pred[0] + + +def key_point_show(image_path, key_point_result=None): + img = cv2.imread(image_path) + points_list = key_point_result + point_size = 1 + point_color = (0, 0, 255) # BGR + thickness = 4 # 可以为 0 、4、8 + for point in points_list: + cv2.circle(img, point[::-1], point_size, point_color, thickness) + cv2.imshow("0", img) + cv2.waitKey(0) + + +if __name__ == '__main__': + image = cv2.imread("9070101c-e5be-49b5-9602-4113a968969b.png") + a = get_keypoint_result(image, "up") + new_list = [] + print(list) + for i in a[0]: + new_list.append((int(i[0]), int(i[1]))) + key_point_show("9070101c-e5be-49b5-9602-4113a968969b.png", new_list) + # a = get_seg_result(1, image) + print(a) diff --git a/app/service/design_test/utils/redis_utils.py b/app/service/design_test/utils/redis_utils.py new file mode 100644 index 0000000..012fbe0 --- /dev/null +++ b/app/service/design_test/utils/redis_utils.py @@ -0,0 +1,99 @@ +import redis + +from app.core.config import REDIS_HOST, REDIS_PORT + + +class Redis(object): + """ + redis数据库操作 + """ + + @staticmethod + def _get_r(): + host = REDIS_HOST + port = REDIS_PORT + db = 0 + r = redis.StrictRedis(host, port, db) + return r + + @classmethod + def write(cls, key, value, expire=None): + """ + 写入键值对 + """ + # 判断是否有过期时间,没有就设置默认值 + if expire: + expire_in_seconds = expire + else: + expire_in_seconds = 100 + r = cls._get_r() + r.set(key, value, ex=expire_in_seconds) + + @classmethod + def read(cls, key): + """ + 读取键值对内容 + """ + r = cls._get_r() + value = r.get(key) + return value.decode('utf-8') if value else value + + @classmethod + def hset(cls, name, key, value): + """ + 写入hash表 + """ + r = cls._get_r() + r.hset(name, key, value) + + @classmethod + def hget(cls, name, key): + """ + 读取指定hash表的键值 + """ + r = cls._get_r() + value = r.hget(name, key) + return value.decode('utf-8') if value else value + + @classmethod + def hgetall(cls, name): + """ + 获取指定hash表所有的值 + """ + r = cls._get_r() + return r.hgetall(name) + + @classmethod + def delete(cls, *names): + """ + 删除一个或者多个 + """ + r = cls._get_r() + r.delete(*names) + + @classmethod + def hdel(cls, name, key): + """ + 删除指定hash表的键值 + """ + r = cls._get_r() + r.hdel(name, key) + + @classmethod + def expire(cls, name, expire=None): + """ + 设置过期时间 + """ + if expire: + expire_in_seconds = expire + else: + expire_in_seconds = 100 + r = cls._get_r() + r.expire(name, expire_in_seconds) + + +if __name__ == '__main__': + redis_client = Redis() + # print(redis_client.write(key="1230", value=0)) + redis_client.write(key="1230", value=10) + # print(redis_client.read(key="1230")) diff --git a/app/service/design_test/utils/synthesis_item.py b/app/service/design_test/utils/synthesis_item.py new file mode 100644 index 0000000..9527cd2 --- /dev/null +++ b/app/service/design_test/utils/synthesis_item.py @@ -0,0 +1,181 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +""" +@Project :trinity_client +@File :synthesis_item.py +@Author :周成融 +@Date :2023/8/26 14:13:04 +@detail : +""" +import io +import logging + +import cv2 +import numpy as np +from PIL import Image + +from app.service.utils.generate_uuid import generate_uuid +from app.service.utils.oss_client import oss_upload_image + + +def positioning(all_mask_shape, mask_shape, offset): + all_start = 0 + all_end = 0 + mask_start = 0 + mask_end = 0 + if offset == 0: + all_start = 0 + all_end = min(all_mask_shape, mask_shape) + + mask_start = 0 + mask_end = min(all_mask_shape, mask_shape) + elif offset > 0: + all_start = min(offset, all_mask_shape) + all_end = min(offset + mask_shape, all_mask_shape) + + mask_start = 0 + mask_end = 0 if offset > all_mask_shape else min(all_mask_shape - offset, mask_shape) + elif offset < 0: + if abs(offset) > mask_shape: + all_start = 0 + all_end = 0 + else: + all_start = 0 + if mask_shape - abs(offset) > all_mask_shape: + all_end = min(mask_shape - abs(offset), all_mask_shape) + else: + all_end = mask_shape - abs(offset) + + if abs(offset) > mask_shape: + mask_start = mask_shape + mask_end = mask_shape + else: + mask_start = abs(offset) + if mask_shape - abs(offset) >= all_mask_shape: + mask_end = all_mask_shape + abs(offset) + else: + mask_end = mask_shape + return all_start, all_end, mask_start, mask_end + + +# @RunTime +def synthesis(data, size, basic_info): + # 创建底图 + base_image = Image.new('RGBA', size, (0, 0, 0, 0)) + try: + all_mask_shape = (size[1], size[0]) + body_mask = None + for d in data: + if d['name'] == 'body' or d['name'] == 'mannequin': + # 创建一个新的宽高透明图像, 把模特贴上去获取mask + transparent_image = Image.new("RGBA", size, (0, 0, 0, 0)) + transparent_image.paste(d['image'], (d['adaptive_position'][1], d['adaptive_position'][0]), d['image']) # 此处可变数组会被paste篡改值,所以使用下标获取position + body_mask = np.array(transparent_image.split()[3]) + + # 根据新的坐标获取新的肩点 + left_shoulder = [x + y for x, y in zip(basic_info['body_point_test']['shoulder_left'], [d['adaptive_position'][1], d['adaptive_position'][0]])] + right_shoulder = [x + y for x, y in zip(basic_info['body_point_test']['shoulder_right'], [d['adaptive_position'][1], d['adaptive_position'][0]])] + body_mask[:min(left_shoulder[1], right_shoulder[1]), left_shoulder[0]:right_shoulder[0]] = 255 + _, binary_body_mask = cv2.threshold(body_mask, 127, 255, cv2.THRESH_BINARY) + top_outer_mask = np.array(binary_body_mask) + bottom_outer_mask = np.array(binary_body_mask) + + top = True + bottom = True + i = len(data) + while i: + i -= 1 + if top and data[i]['name'] in ["blouse_front", "outwear_front", "dress_front", "tops_front"]: + top = False + mask_shape = data[i]['mask'].shape + y_offset, x_offset = data[i]['adaptive_position'] + # 初始化叠加区域的起始和结束位置 + all_y_start, all_y_end, mask_y_start, mask_y_end = positioning(all_mask_shape=all_mask_shape[0], mask_shape=mask_shape[0], offset=y_offset) + all_x_start, all_x_end, mask_x_start, mask_x_end = positioning(all_mask_shape=all_mask_shape[1], mask_shape=mask_shape[1], offset=x_offset) + # 将叠加区域赋值为相应的像素值 + _, sketch_mask = cv2.threshold(data[i]['mask'], 127, 255, cv2.THRESH_BINARY) + background = np.zeros_like(top_outer_mask) + background[all_y_start:all_y_end, all_x_start:all_x_end] = sketch_mask[mask_y_start:mask_y_end, mask_x_start:mask_x_end] + top_outer_mask = background + top_outer_mask + elif bottom and data[i]['name'] in ["trousers_front", "skirt_front", "bottoms_front", "dress_front"]: + bottom = False + mask_shape = data[i]['mask'].shape + y_offset, x_offset = data[i]['adaptive_position'] + # 初始化叠加区域的起始和结束位置 + all_y_start, all_y_end, mask_y_start, mask_y_end = positioning(all_mask_shape=all_mask_shape[0], mask_shape=mask_shape[0], offset=y_offset) + all_x_start, all_x_end, mask_x_start, mask_x_end = positioning(all_mask_shape=all_mask_shape[1], mask_shape=mask_shape[1], offset=x_offset) + # 将叠加区域赋值为相应的像素值 + _, sketch_mask = cv2.threshold(data[i]['mask'], 127, 255, cv2.THRESH_BINARY) + background = np.zeros_like(top_outer_mask) + background[all_y_start:all_y_end, all_x_start:all_x_end] = sketch_mask[mask_y_start:mask_y_end, mask_x_start:mask_x_end] + bottom_outer_mask = background + bottom_outer_mask + elif bottom is False and top is False: + break + + all_mask = cv2.bitwise_or(top_outer_mask, bottom_outer_mask) + + for layer in data: + if layer['image'] is not None: + if layer['name'] != "body": + test_image = Image.new('RGBA', size, (0, 0, 0, 0)) + test_image.paste(layer['image'], (layer['adaptive_position'][1], layer['adaptive_position'][0]), layer['image']) + mask_data = np.where(all_mask > 0, 255, 0).astype(np.uint8) + mask_alpha = Image.fromarray(mask_data) + cropped_image = Image.composite(test_image, Image.new("RGBA", test_image.size, (255, 255, 255, 0)), mask_alpha) + base_image.paste(test_image, (0, 0), cropped_image) # test_image 已经按照坐标贴到最大宽值的图片上 坐着这里坐标为00 + else: + base_image.paste(layer['image'], (layer['adaptive_position'][1], layer['adaptive_position'][0]), layer['image']) + + result_image = base_image + + image_data = io.BytesIO() + result_image.save(image_data, format='PNG') + image_data.seek(0) + + # oss upload + image_bytes = image_data.read() + bucket_name = "aida-results" + object_name = f'result_{generate_uuid()}.png' + req = oss_upload_image(bucket=bucket_name, object_name=object_name, image_bytes=image_bytes) + return f"{bucket_name}/{object_name}" + # return f"aida-results/{minio_client.put_object('aida-results', f'result_{generate_uuid()}.png', io.BytesIO(image_bytes), len(image_bytes), content_type='image/png').object_name}" + + # object_name = f'result_{generate_uuid()}.png' + # response = s3.put_object(Bucket="aida-results", Key=object_name, Body=data, ContentType='image/png') + # object_url = f"aida-results/{object_name}" + # if response['ResponseMetadata']['HTTPStatusCode'] == 200: + # return object_url + # else: + # return "" + + except Exception as e: + logging.warning(f"synthesis runtime exception : {e}") + + +def synthesis_single(front_image, back_image): + result_image = None + if front_image: + result_image = front_image + if back_image: + result_image.paste(back_image, (0, 0), back_image) + + # with io.BytesIO() as output: + # result_image.save(output, format='PNG') + # data = output.getvalue() + # object_name = f'result_{generate_uuid()}.png' + # response = s3.put_object(Bucket="aida-results", Key=object_name, Body=data, ContentType='image/png') + # object_url = f"aida-results/{object_name}" + # if response['ResponseMetadata']['HTTPStatusCode'] == 200: + # return object_url + # else: + # return "" + image_data = io.BytesIO() + result_image.save(image_data, format='PNG') + image_data.seek(0) + image_bytes = image_data.read() + # return f"aida-results/{minio_client.put_object('aida-results', f'result_{generate_uuid()}.png', io.BytesIO(image_bytes), len(image_bytes), content_type='image/png').object_name}" + # oss upload + bucket_name = 'aida-results' + object_name = f'result_{generate_uuid()}.png' + req = oss_upload_image(bucket=bucket_name, object_name=object_name, image_bytes=image_bytes) + return f"{bucket_name}/{object_name}" diff --git a/app/service/design_test/utils/upload_image.py b/app/service/design_test/utils/upload_image.py new file mode 100644 index 0000000..9039ce7 --- /dev/null +++ b/app/service/design_test/utils/upload_image.py @@ -0,0 +1,55 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +""" +@Project :trinity_client +@File :upload_image.py +@Author :周成融 +@Date :2023/8/28 13:49:20 +@detail : +""" +import io +import logging + +import cv2 + +from app.core.config import * +from app.service.utils.decorator import RunTime +from app.service.utils.oss_client import oss_upload_image + + +# @RunTime +def upload_png_mask(front_image, object_name, mask=None): + try: + mask_url = None + if mask is not None: + mask_inverted = cv2.bitwise_not(mask) + # 将掩模的3通道转换为4通道,白色部分不透明,黑色部分透明 + rgba_image = cv2.cvtColor(mask_inverted, cv2.COLOR_BGR2BGRA) + rgba_image[rgba_image[:, :, 0] == 0] = [0, 0, 0, 0] + # image_bytes = io.BytesIO() + # image_bytes.write(cv2.imencode('.png', rgba_image)[1].tobytes()) + # image_bytes.seek(0) + # mask_url = f"{AIDA_CLOTHING}/{minio_client.put_object('aida-clothing', f'mask/mask_{object_name}.png', image_bytes, len(image_bytes.getvalue()), content_type='image/png').object_name}" + # oss upload #################### + req = oss_upload_image(bucket=AIDA_CLOTHING, object_name=f"mask/mask_{object_name}.png", image_bytes=cv2.imencode('.png', rgba_image)[1]) + mask_url = f"{AIDA_CLOTHING}/mask/mask_{object_name}.png" + + image_data = io.BytesIO() + front_image.save(image_data, format='PNG') + image_data.seek(0) + image_bytes = image_data.read() + # image_url = f"{AIDA_CLOTHING}/{minio_client.put_object('aida-clothing', f'image/image_{object_name}.png', io.BytesIO(image_bytes), len(image_bytes), content_type='image/png').object_name}" + req = oss_upload_image(bucket=AIDA_CLOTHING, object_name=f"image/image_{object_name}.png", image_bytes=image_bytes) + image_url = f"{AIDA_CLOTHING}/image/image_{object_name}.png" + return front_image, image_url, mask_url + except Exception as e: + logging.warning(f"upload_png_mask runtime exception : {e}") + + +# @RunTime +# def upload_png_mask(front_image, object_name, mask=None): +# mask_url = None +# if mask is not None: +# mask_url = f"{AIDA_CLOTHING}/mask/mask_{object_name}.png" +# image_url = f"{AIDA_CLOTHING}/image/image_{object_name}.png" +# return front_image, image_url, mask_url diff --git a/app/service/utils/oss_client.py b/app/service/utils/oss_client.py index 65ce3a2..28015e9 100644 --- a/app/service/utils/oss_client.py +++ b/app/service/utils/oss_client.py @@ -2,7 +2,6 @@ import io import logging from io import BytesIO -import boto3 import cv2 import numpy as np import urllib3 @@ -11,6 +10,8 @@ from minio import Minio from app.core.config import * +minio_client = Minio(MINIO_URL, access_key=MINIO_ACCESS, secret_key=MINIO_SECRET, secure=MINIO_SECURE) + # 自定义 Retry 类 class CustomRetry(urllib3.Retry): @@ -38,16 +39,11 @@ http_client = urllib3.PoolManager( # 获取图片 -def oss_get_image(bucket, object_name, data_type): +def oss_get_image(oss_client, bucket, object_name, data_type): # cv2 默认全通道读取 image_object = None try: - if OSS == "minio": - oss_client = Minio(MINIO_URL, access_key=MINIO_ACCESS, secret_key=MINIO_SECRET, secure=MINIO_SECURE, http_client=http_client) - image_data = oss_client.get_object(bucket_name=bucket, object_name=object_name) - else: - oss_client = boto3.client('s3', aws_access_key_id=S3_ACCESS_KEY, aws_secret_access_key=S3_AWS_SECRET_ACCESS_KEY, region_name=S3_REGION_NAME) - image_data = oss_client.get_object(Bucket=bucket, Key=object_name)['Body'] + image_data = oss_client.get_object(bucket_name=bucket, object_name=object_name) if data_type == "cv2": image_bytes = image_data.read() image_array = np.frombuffer(image_bytes, np.uint8) # 转成8位无符号整型 @@ -62,15 +58,10 @@ def oss_get_image(bucket, object_name, data_type): return image_object -def oss_upload_image(bucket, object_name, image_bytes): +def oss_upload_image(oss_client, bucket, object_name, image_bytes): req = None try: - if OSS == "minio": - oss_client = Minio(MINIO_URL, access_key=MINIO_ACCESS, secret_key=MINIO_SECRET, secure=MINIO_SECURE) - req = oss_client.put_object(bucket_name=bucket, object_name=object_name, data=io.BytesIO(image_bytes), length=len(image_bytes), content_type='image/png') - else: - oss_client = boto3.client('s3', aws_access_key_id=S3_ACCESS_KEY, aws_secret_access_key=S3_AWS_SECRET_ACCESS_KEY, region_name=S3_REGION_NAME) - req = oss_client.put_object(Bucket=bucket, Key=object_name, Body=io.BytesIO(image_bytes), ContentType='image/png') + req = oss_client.put_object(bucket_name=bucket, object_name=object_name, data=io.BytesIO(image_bytes), length=len(image_bytes), content_type='image/png') except Exception as e: logger.warning(f"{OSS} | 上传图片出现异常 ######: {e}") return req @@ -88,13 +79,13 @@ if __name__ == '__main__': # url = "aida-users/89/sketchboard/female/Dress/e6724ab7-8d3f-4677-abe0-c3e42ab7af85.jpeg" # url = "aida-users/87/print/956614a2-7e75-4fbe-9ed0-c1831e37a2c9-4-87.png" # url = "aida-users/89/single_logo/123-89.png" - url = "aida-clothing/mask/mask_f354afb5-6423-11ef-8b08-0826ae3ad6b3.png" + url = "aida-users/31/sketchboard/female/dress/6edcbf92-7da9-4809-a0a8-a4b4f06dec1e0628000041.jpg" # url = "aida-collection-element/12148/Sketchboard/95ea577b-305b-4a62-b30a-39c0dd3ddb3f.png" read_type = "cv2" if read_type == "cv2": - img = oss_get_image(bucket=url.split('/')[0], object_name=url[url.find('/') + 1:], data_type=read_type) + img = oss_get_image(oss_client=minio_client, bucket=url.split('/')[0], object_name=url[url.find('/') + 1:], data_type=read_type) cv2.imshow("", img) cv2.waitKey(0) else: - img = oss_get_image(bucket=url.split('/')[0], object_name=url[url.find('/') + 1:], data_type=read_type) + img = oss_get_image(oss_client=minio_client, bucket=url.split('/')[0], object_name=url[url.find('/') + 1:], data_type=read_type) img.show() From 19dc9e2739e6247e00e2064eb5c237d0c23f55de Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Thu, 19 Sep 2024 14:26:55 +0800 Subject: [PATCH 069/103] =?UTF-8?q?feat=20=20design=20=20=E6=8F=90?= =?UTF-8?q?=E9=80=9F=E6=B5=8B=E8=AF=95=20fix?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/core/config.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/app/core/config.py b/app/core/config.py index 97098b0..35c12b7 100644 --- a/app/core/config.py +++ b/app/core/config.py @@ -20,7 +20,7 @@ class Settings(BaseSettings): OSS = "minio" -DEBUG = True +DEBUG = False if DEBUG: LOGS_PATH = "logs/" CATEGORY_PATH = "service/attribute/config/descriptor/category/category_dis.csv" @@ -50,7 +50,7 @@ S3_REGION_NAME = "ap-east-1" # redis 配置 REDIS_HOST = "10.1.1.240" REDIS_PORT = "6379" -REDIS_DB = "10" +REDIS_DB = "2" # rabbitmq config RABBITMQ_PARAMS = { From 2038cf6ba324f745b8be8408e3d836bf73038289 Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Thu, 19 Sep 2024 14:28:33 +0800 Subject: [PATCH 070/103] =?UTF-8?q?feat=20=20design=20=20=E6=8F=90?= =?UTF-8?q?=E9=80=9F=E6=B5=8B=E8=AF=95=20fix?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- requirements.txt | Bin 1832 -> 1856 bytes 1 file changed, 0 insertions(+), 0 deletions(-) diff --git a/requirements.txt b/requirements.txt index 1bfec2b331173f5c059f68262d4358586f8f04d1..a0d17d948549739e7724a3932da8fcfe3ada08be 100644 GIT binary patch delta 36 ocmZ3%cYtrhELQPkhE#?eh9ZVa23sIBWzb_V0b+yAGg Date: Thu, 19 Sep 2024 14:30:20 +0800 Subject: [PATCH 071/103] =?UTF-8?q?feat=20=20design=20=20=E6=8F=90?= =?UTF-8?q?=E9=80=9F=E6=B5=8B=E8=AF=95=20fix?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- requirements.txt | Bin 1856 -> 1842 bytes 1 file changed, 0 insertions(+), 0 deletions(-) diff --git a/requirements.txt b/requirements.txt index a0d17d948549739e7724a3932da8fcfe3ada08be..21dbcb5b32cf478aad263cb15d648e0c87fc4c31 100644 GIT binary patch delta 12 TcmX@Ww~24V64uR|So;_OBBcc3 delta 26 gcmdnQcYtrh5>{SY23rPG20aE7AU4>%n6;k~0A02QPXGV_ From fabe64785ef244054435346a4c75cb66696ec595 Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Thu, 19 Sep 2024 15:10:50 +0800 Subject: [PATCH 072/103] =?UTF-8?q?feat=20=20design=20=20=E6=8F=90?= =?UTF-8?q?=E9=80=9F=E6=B5=8B=E8=AF=95=20fix?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/api/api_design.py | 3 +- app/service/design/utils/upload_image.py | 20 +--- app/service/design_test/batch_design.py | 8 +- app/service/design_test/pipeline/color.py | 2 +- app/service/design_test/pipeline/loading.py | 2 +- .../design_test/pipeline/print_painting.py | 2 +- .../design_test/pipeline/segmentation.py | 2 +- app/service/design_test/pipeline/split.py | 4 +- app/service/design_test/utils/upload_image.py | 24 +---- app/service/utils/new_oss_client.py | 91 ++++++++++++++++++ app/service/utils/oss_client.py | 27 ++++-- requirements.txt | Bin 1842 -> 1828 bytes 12 files changed, 129 insertions(+), 56 deletions(-) create mode 100644 app/service/utils/new_oss_client.py diff --git a/app/api/api_design.py b/app/api/api_design.py index 4db9fc2..ba8f04d 100644 --- a/app/api/api_design.py +++ b/app/api/api_design.py @@ -7,6 +7,7 @@ from fastapi import APIRouter, HTTPException, UploadFile, File, Form from app.schemas.design import DesignModel, DesignProgressModel, ModelProgressModel, DBGConfigModel from app.schemas.response_template import ResponseModel from app.service.design.model_process_service import model_transpose +from app.service.design.service import generate from app.service.design.service_design_batch_generate import start_design_batch_generate from app.service.design.utils.redis_utils import Redis from app.service.design_test.batch_design import design_generate @@ -183,7 +184,7 @@ def design(request_data: DesignModel): # logger.info(f"design request item is : @@@@@@:{json.dumps(request_data.dict())}") # data = generate(request_data=request_data) # logger.info(f"design response @@@@@@:{json.dumps(data)}") - + # logger.info(f"design request item is : @@@@@@:{json.dumps(request_data.dict())}") data = design_generate(request_data=request_data) logger.info(f"design response @@@@@@:{json.dumps(data)}") diff --git a/app/service/design/utils/upload_image.py b/app/service/design/utils/upload_image.py index 8d20061..388f8b8 100644 --- a/app/service/design/utils/upload_image.py +++ b/app/service/design/utils/upload_image.py @@ -17,7 +17,7 @@ from app.service.utils.oss_client import oss_upload_image # @RunTime -def upload_png_mask(minio_client, front_image, object_name, mask=None): +def upload_png_mask(front_image, object_name, mask=None): try: mask_url = None if mask is not None: @@ -25,29 +25,15 @@ def upload_png_mask(minio_client, front_image, object_name, mask=None): # 将掩模的3通道转换为4通道,白色部分不透明,黑色部分透明 rgba_image = cv2.cvtColor(mask_inverted, cv2.COLOR_BGR2BGRA) rgba_image[rgba_image[:, :, 0] == 0] = [0, 0, 0, 0] - # image_bytes = io.BytesIO() - # image_bytes.write(cv2.imencode('.png', rgba_image)[1].tobytes()) - # image_bytes.seek(0) - # mask_url = f"{AIDA_CLOTHING}/{minio_client.put_object('aida-clothing', f'mask/mask_{object_name}.png', image_bytes, len(image_bytes.getvalue()), content_type='image/png').object_name}" - # oss upload #################### - req = oss_upload_image(oss_client=minio_client, bucket=AIDA_CLOTHING, object_name=f"mask/mask_{object_name}.png", image_bytes=cv2.imencode('.png', rgba_image)[1]) + req = oss_upload_image(bucket=AIDA_CLOTHING, object_name=f"mask/mask_{object_name}.png", image_bytes=cv2.imencode('.png', rgba_image)[1]) mask_url = f"{AIDA_CLOTHING}/mask/mask_{object_name}.png" image_data = io.BytesIO() front_image.save(image_data, format='PNG') image_data.seek(0) image_bytes = image_data.read() - # image_url = f"{AIDA_CLOTHING}/{minio_client.put_object('aida-clothing', f'image/image_{object_name}.png', io.BytesIO(image_bytes), len(image_bytes), content_type='image/png').object_name}" - req = oss_upload_image(oss_client=minio_client, bucket=AIDA_CLOTHING, object_name=f"image/image_{object_name}.png", image_bytes=image_bytes) + req = oss_upload_image(bucket=AIDA_CLOTHING, object_name=f"image/image_{object_name}.png", image_bytes=image_bytes) image_url = f"{AIDA_CLOTHING}/image/image_{object_name}.png" return front_image, image_url, mask_url except Exception as e: logging.warning(f"upload_png_mask runtime exception : {e}") - -# @RunTime -# def upload_png_mask(front_image, object_name, mask=None): -# mask_url = None -# if mask is not None: -# mask_url = f"{AIDA_CLOTHING}/mask/mask_{object_name}.png" -# image_url = f"{AIDA_CLOTHING}/image/image_{object_name}.png" -# return front_image, image_url, mask_url diff --git a/app/service/design_test/batch_design.py b/app/service/design_test/batch_design.py index 5a07429..27846cb 100644 --- a/app/service/design_test/batch_design.py +++ b/app/service/design_test/batch_design.py @@ -13,7 +13,8 @@ from minio import Minio from app.core.config import PRIORITY_DICT from app.service.design.utils.redis_utils import Redis from app.service.design_test.item import BodyItem, TopItem, BottomItem -from app.service.utils.oss_client import oss_upload_image +from app.service.utils.decorator import RunTime +from app.service.utils.new_oss_client import oss_upload_image id_lock = threading.Lock() @@ -298,10 +299,11 @@ def synthesis(data, size, basic_info): logging.warning(f"synthesis runtime exception : {e}") +@RunTime def design_generate(request_data): objects_data = request_data.dict()['objects'] process_id = request_data.dict()['process_id'] - object_response = [] + object_response = {} threads = [] active_threads = 0 lock = threading.Lock() @@ -362,7 +364,7 @@ def design_generate(request_data): update_progress(process_id, total) with lock: - object_response.append(items_response) + object_response[step] = items_response active_threads -= 1 for step, object in enumerate(objects_data): diff --git a/app/service/design_test/pipeline/color.py b/app/service/design_test/pipeline/color.py index d065aba..546c671 100644 --- a/app/service/design_test/pipeline/color.py +++ b/app/service/design_test/pipeline/color.py @@ -3,7 +3,7 @@ import logging import cv2 import numpy as np -from app.service.utils.oss_client import oss_get_image +from app.service.utils.new_oss_client import oss_get_image logger = logging.getLogger() diff --git a/app/service/design_test/pipeline/loading.py b/app/service/design_test/pipeline/loading.py index 7175881..0ce0dfa 100644 --- a/app/service/design_test/pipeline/loading.py +++ b/app/service/design_test/pipeline/loading.py @@ -5,7 +5,7 @@ import cv2 import numpy as np from PIL import Image -from app.service.utils.oss_client import oss_get_image +from app.service.utils.new_oss_client import oss_get_image logger = logging.getLogger() diff --git a/app/service/design_test/pipeline/print_painting.py b/app/service/design_test/pipeline/print_painting.py index 4a85399..6fe40d8 100644 --- a/app/service/design_test/pipeline/print_painting.py +++ b/app/service/design_test/pipeline/print_painting.py @@ -4,7 +4,7 @@ import cv2 import numpy as np from PIL import Image -from app.service.utils.oss_client import oss_get_image +from app.service.utils.new_oss_client import oss_get_image class PrintPainting: diff --git a/app/service/design_test/pipeline/segmentation.py b/app/service/design_test/pipeline/segmentation.py index 156742f..5c248b2 100644 --- a/app/service/design_test/pipeline/segmentation.py +++ b/app/service/design_test/pipeline/segmentation.py @@ -6,7 +6,7 @@ import numpy as np from app.core.config import SEG_CACHE_PATH from app.service.design.utils.design_ensemble import get_seg_result -from app.service.utils.oss_client import oss_get_image +from app.service.utils.new_oss_client import oss_get_image logger = logging.getLogger() diff --git a/app/service/design_test/pipeline/split.py b/app/service/design_test/pipeline/split.py index 1fa4215..50e167d 100644 --- a/app/service/design_test/pipeline/split.py +++ b/app/service/design_test/pipeline/split.py @@ -8,9 +8,9 @@ from cv2 import cvtColor, COLOR_BGR2RGBA from app.core.config import AIDA_CLOTHING from app.service.design.utils.conversion_image import rgb_to_rgba -from app.service.design.utils.upload_image import upload_png_mask +from app.service.design_test.utils.upload_image import upload_png_mask from app.service.utils.generate_uuid import generate_uuid -from app.service.utils.oss_client import oss_upload_image +from app.service.utils.new_oss_client import oss_upload_image class Split(object): diff --git a/app/service/design_test/utils/upload_image.py b/app/service/design_test/utils/upload_image.py index 9039ce7..2c79f9f 100644 --- a/app/service/design_test/utils/upload_image.py +++ b/app/service/design_test/utils/upload_image.py @@ -13,12 +13,11 @@ import logging import cv2 from app.core.config import * -from app.service.utils.decorator import RunTime -from app.service.utils.oss_client import oss_upload_image +from app.service.utils.new_oss_client import oss_upload_image # @RunTime -def upload_png_mask(front_image, object_name, mask=None): +def upload_png_mask(minio_client, front_image, object_name, mask=None): try: mask_url = None if mask is not None: @@ -26,30 +25,15 @@ def upload_png_mask(front_image, object_name, mask=None): # 将掩模的3通道转换为4通道,白色部分不透明,黑色部分透明 rgba_image = cv2.cvtColor(mask_inverted, cv2.COLOR_BGR2BGRA) rgba_image[rgba_image[:, :, 0] == 0] = [0, 0, 0, 0] - # image_bytes = io.BytesIO() - # image_bytes.write(cv2.imencode('.png', rgba_image)[1].tobytes()) - # image_bytes.seek(0) - # mask_url = f"{AIDA_CLOTHING}/{minio_client.put_object('aida-clothing', f'mask/mask_{object_name}.png', image_bytes, len(image_bytes.getvalue()), content_type='image/png').object_name}" - # oss upload #################### - req = oss_upload_image(bucket=AIDA_CLOTHING, object_name=f"mask/mask_{object_name}.png", image_bytes=cv2.imencode('.png', rgba_image)[1]) + req = oss_upload_image(oss_client=minio_client, bucket=AIDA_CLOTHING, object_name=f"mask/mask_{object_name}.png", image_bytes=cv2.imencode('.png', rgba_image)[1]) mask_url = f"{AIDA_CLOTHING}/mask/mask_{object_name}.png" image_data = io.BytesIO() front_image.save(image_data, format='PNG') image_data.seek(0) image_bytes = image_data.read() - # image_url = f"{AIDA_CLOTHING}/{minio_client.put_object('aida-clothing', f'image/image_{object_name}.png', io.BytesIO(image_bytes), len(image_bytes), content_type='image/png').object_name}" - req = oss_upload_image(bucket=AIDA_CLOTHING, object_name=f"image/image_{object_name}.png", image_bytes=image_bytes) + req = oss_upload_image(oss_client=minio_client, bucket=AIDA_CLOTHING, object_name=f"image/image_{object_name}.png", image_bytes=image_bytes) image_url = f"{AIDA_CLOTHING}/image/image_{object_name}.png" return front_image, image_url, mask_url except Exception as e: logging.warning(f"upload_png_mask runtime exception : {e}") - - -# @RunTime -# def upload_png_mask(front_image, object_name, mask=None): -# mask_url = None -# if mask is not None: -# mask_url = f"{AIDA_CLOTHING}/mask/mask_{object_name}.png" -# image_url = f"{AIDA_CLOTHING}/image/image_{object_name}.png" -# return front_image, image_url, mask_url diff --git a/app/service/utils/new_oss_client.py b/app/service/utils/new_oss_client.py new file mode 100644 index 0000000..28015e9 --- /dev/null +++ b/app/service/utils/new_oss_client.py @@ -0,0 +1,91 @@ +import io +import logging +from io import BytesIO + +import cv2 +import numpy as np +import urllib3 +from PIL import Image +from minio import Minio + +from app.core.config import * + +minio_client = Minio(MINIO_URL, access_key=MINIO_ACCESS, secret_key=MINIO_SECRET, secure=MINIO_SECURE) + + +# 自定义 Retry 类 +class CustomRetry(urllib3.Retry): + def increment(self, method=None, url=None, response=None, error=None, **kwargs): + # 调用父类的 increment 方法 + new_retry = super(CustomRetry, self).increment(method, url, response, error, **kwargs) + # 打印重试信息 + logger.info(f"重试连接: {method} {url},错误: {error},重试次数: {self.total - new_retry.total}") + return new_retry + + +logger = logging.getLogger() +timeout = urllib3.Timeout(connect=1, read=10.0) # 连接超时 5 秒,读取超时 10 秒 +http_client = urllib3.PoolManager( + num_pools=10, # 设置连接池大小 + maxsize=10, + timeout=timeout, + cert_reqs='CERT_REQUIRED', # 需要证书验证 + retries=CustomRetry( + total=5, + backoff_factor=0.2, + status_forcelist=[500, 502, 503, 504], + ), +) + + +# 获取图片 +def oss_get_image(oss_client, bucket, object_name, data_type): + # cv2 默认全通道读取 + image_object = None + try: + image_data = oss_client.get_object(bucket_name=bucket, object_name=object_name) + if data_type == "cv2": + image_bytes = image_data.read() + image_array = np.frombuffer(image_bytes, np.uint8) # 转成8位无符号整型 + image_object = cv2.imdecode(image_array, cv2.IMREAD_UNCHANGED) + if image_object.dtype == np.uint16: + image_object = (image_object / 256).astype('uint8') + else: + data_bytes = BytesIO(image_data.read()) + image_object = Image.open(data_bytes) + except Exception as e: + logger.warning(f"{OSS} | 获取图片出现异常 ######: {e}") + return image_object + + +def oss_upload_image(oss_client, bucket, object_name, image_bytes): + req = None + try: + req = oss_client.put_object(bucket_name=bucket, object_name=object_name, data=io.BytesIO(image_bytes), length=len(image_bytes), content_type='image/png') + except Exception as e: + logger.warning(f"{OSS} | 上传图片出现异常 ######: {e}") + return req + + +if __name__ == '__main__': + # url = "aida-results/result_0002186a-e631-11ee-86a6-b48351119060.png" + # url = "aida-collection-element/11523/Moodboard/f60af0d2-94c2-48f9-90ff-74b8e8a481b5.jpg" + # url = "aida-sys-image/images/female/outwear/0628000054.jpg" + # url = "aida-users/89/product_image/string-89.png" + # url = "test/845046c7-4f62-4f54-a4a9-c26d49c6969335b5b3a9-d335-4871-a46c-3cc3caf07da259629dfd1f1f555a2e2a9def7e719366.png" + # url = 'aida-users/89/relight_image/123-89.png' + # url = 'aida-users/89/relight_image/123-89.png' + # url = 'aida-users/89/relight_image/123-89.png' + # url = "aida-users/89/sketchboard/female/Dress/e6724ab7-8d3f-4677-abe0-c3e42ab7af85.jpeg" + # url = "aida-users/87/print/956614a2-7e75-4fbe-9ed0-c1831e37a2c9-4-87.png" + # url = "aida-users/89/single_logo/123-89.png" + url = "aida-users/31/sketchboard/female/dress/6edcbf92-7da9-4809-a0a8-a4b4f06dec1e0628000041.jpg" + # url = "aida-collection-element/12148/Sketchboard/95ea577b-305b-4a62-b30a-39c0dd3ddb3f.png" + read_type = "cv2" + if read_type == "cv2": + img = oss_get_image(oss_client=minio_client, bucket=url.split('/')[0], object_name=url[url.find('/') + 1:], data_type=read_type) + cv2.imshow("", img) + cv2.waitKey(0) + else: + img = oss_get_image(oss_client=minio_client, bucket=url.split('/')[0], object_name=url[url.find('/') + 1:], data_type=read_type) + img.show() diff --git a/app/service/utils/oss_client.py b/app/service/utils/oss_client.py index 28015e9..65ce3a2 100644 --- a/app/service/utils/oss_client.py +++ b/app/service/utils/oss_client.py @@ -2,6 +2,7 @@ import io import logging from io import BytesIO +import boto3 import cv2 import numpy as np import urllib3 @@ -10,8 +11,6 @@ from minio import Minio from app.core.config import * -minio_client = Minio(MINIO_URL, access_key=MINIO_ACCESS, secret_key=MINIO_SECRET, secure=MINIO_SECURE) - # 自定义 Retry 类 class CustomRetry(urllib3.Retry): @@ -39,11 +38,16 @@ http_client = urllib3.PoolManager( # 获取图片 -def oss_get_image(oss_client, bucket, object_name, data_type): +def oss_get_image(bucket, object_name, data_type): # cv2 默认全通道读取 image_object = None try: - image_data = oss_client.get_object(bucket_name=bucket, object_name=object_name) + if OSS == "minio": + oss_client = Minio(MINIO_URL, access_key=MINIO_ACCESS, secret_key=MINIO_SECRET, secure=MINIO_SECURE, http_client=http_client) + image_data = oss_client.get_object(bucket_name=bucket, object_name=object_name) + else: + oss_client = boto3.client('s3', aws_access_key_id=S3_ACCESS_KEY, aws_secret_access_key=S3_AWS_SECRET_ACCESS_KEY, region_name=S3_REGION_NAME) + image_data = oss_client.get_object(Bucket=bucket, Key=object_name)['Body'] if data_type == "cv2": image_bytes = image_data.read() image_array = np.frombuffer(image_bytes, np.uint8) # 转成8位无符号整型 @@ -58,10 +62,15 @@ def oss_get_image(oss_client, bucket, object_name, data_type): return image_object -def oss_upload_image(oss_client, bucket, object_name, image_bytes): +def oss_upload_image(bucket, object_name, image_bytes): req = None try: - req = oss_client.put_object(bucket_name=bucket, object_name=object_name, data=io.BytesIO(image_bytes), length=len(image_bytes), content_type='image/png') + if OSS == "minio": + oss_client = Minio(MINIO_URL, access_key=MINIO_ACCESS, secret_key=MINIO_SECRET, secure=MINIO_SECURE) + req = oss_client.put_object(bucket_name=bucket, object_name=object_name, data=io.BytesIO(image_bytes), length=len(image_bytes), content_type='image/png') + else: + oss_client = boto3.client('s3', aws_access_key_id=S3_ACCESS_KEY, aws_secret_access_key=S3_AWS_SECRET_ACCESS_KEY, region_name=S3_REGION_NAME) + req = oss_client.put_object(Bucket=bucket, Key=object_name, Body=io.BytesIO(image_bytes), ContentType='image/png') except Exception as e: logger.warning(f"{OSS} | 上传图片出现异常 ######: {e}") return req @@ -79,13 +88,13 @@ if __name__ == '__main__': # url = "aida-users/89/sketchboard/female/Dress/e6724ab7-8d3f-4677-abe0-c3e42ab7af85.jpeg" # url = "aida-users/87/print/956614a2-7e75-4fbe-9ed0-c1831e37a2c9-4-87.png" # url = "aida-users/89/single_logo/123-89.png" - url = "aida-users/31/sketchboard/female/dress/6edcbf92-7da9-4809-a0a8-a4b4f06dec1e0628000041.jpg" + url = "aida-clothing/mask/mask_f354afb5-6423-11ef-8b08-0826ae3ad6b3.png" # url = "aida-collection-element/12148/Sketchboard/95ea577b-305b-4a62-b30a-39c0dd3ddb3f.png" read_type = "cv2" if read_type == "cv2": - img = oss_get_image(oss_client=minio_client, bucket=url.split('/')[0], object_name=url[url.find('/') + 1:], data_type=read_type) + img = oss_get_image(bucket=url.split('/')[0], object_name=url[url.find('/') + 1:], data_type=read_type) cv2.imshow("", img) cv2.waitKey(0) else: - img = oss_get_image(oss_client=minio_client, bucket=url.split('/')[0], object_name=url[url.find('/') + 1:], data_type=read_type) + img = oss_get_image(bucket=url.split('/')[0], object_name=url[url.find('/') + 1:], data_type=read_type) img.show() diff --git a/requirements.txt b/requirements.txt index 21dbcb5b32cf478aad263cb15d648e0c87fc4c31..6c9e38f1ded86de71e2126d5c357903ea0d08a05 100644 GIT binary patch delta 12 TcmdnQw}fxQEY{5{Si2YjAo~QZ delta 22 dcmZ3&w~24VELP5BhE#?eh9ZW_%~M(X839pz2FCyZ From ac6a21d8c080314587c9f08e4d3c1a7b8470971d Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Thu, 19 Sep 2024 15:37:56 +0800 Subject: [PATCH 073/103] =?UTF-8?q?feat=20=20design=20=20=E6=8F=90?= =?UTF-8?q?=E9=80=9F=E6=B5=8B=E8=AF=95=20fix?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/service/design_test/batch_design.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app/service/design_test/batch_design.py b/app/service/design_test/batch_design.py index 27846cb..1d0fe99 100644 --- a/app/service/design_test/batch_design.py +++ b/app/service/design_test/batch_design.py @@ -327,7 +327,7 @@ def design_generate(request_data): for lay in layers: items_response['layers'].append({ - 'image_category': lay['name'], + 'image_category': "body" if lay['name'] == 'mannequin' else lay['name'], 'position': lay['position'], 'priority': lay.get("priority", None), 'resize_scale': lay['resize_scale'] if "resize_scale" in lay.keys() else None, From 1385fde9ced352085f0c7434acdc5a4d1474d045 Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Fri, 20 Sep 2024 16:27:26 +0800 Subject: [PATCH 074/103] =?UTF-8?q?feat=20=20design=20=20=E6=8F=90?= =?UTF-8?q?=E9=80=9F=E6=B5=8B=E8=AF=95=20fix?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/service/design/items/pipelines/painting.py | 1 - app/service/design/items/pipelines/segmentation.py | 4 ++-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/app/service/design/items/pipelines/painting.py b/app/service/design/items/pipelines/painting.py index c3c496d..993697c 100644 --- a/app/service/design/items/pipelines/painting.py +++ b/app/service/design/items/pipelines/painting.py @@ -30,7 +30,6 @@ class Painting(object): resize_pattern = cv2.resize(pattern, (dim_image_w, dim_image_h), interpolation=cv2.INTER_AREA) closed_mo = np.expand_dims(result['mask'], axis=2).repeat(3, axis=2) gray_mo = np.expand_dims(result['gray'], axis=2).repeat(3, axis=2) - logger.info(f"image id is :{result['image_id']}") get_image_fir = resize_pattern * (closed_mo / 255) * (gray_mo / 255) result['pattern_image'] = get_image_fir.astype(np.uint8) result['final_image'] = result['pattern_image'] diff --git a/app/service/design/items/pipelines/segmentation.py b/app/service/design/items/pipelines/segmentation.py index e7f09ed..c6c7b15 100644 --- a/app/service/design/items/pipelines/segmentation.py +++ b/app/service/design/items/pipelines/segmentation.py @@ -5,6 +5,7 @@ import cv2 import numpy as np from app.core.config import SEG_CACHE_PATH +from app.service.utils.decorator import ClassCallRunTime from app.service.utils.oss_client import oss_get_image from ..builder import PIPELINES from ...utils.design_ensemble import get_seg_result @@ -15,7 +16,7 @@ logger = logging.getLogger() @PIPELINES.register_module() class Segmentation(object): - # @ClassCallRunTime + @ClassCallRunTime def __call__(self, result): if "seg_mask_url" in result.keys() and result['seg_mask_url'] != "": seg_mask = oss_get_image(bucket=result['seg_mask_url'].split('/')[0], object_name=result['seg_mask_url'][result['seg_mask_url'].find('/') + 1:], data_type="cv2") @@ -59,7 +60,6 @@ class Segmentation(object): @staticmethod def load_seg_result(image_id): file_path = f"{SEG_CACHE_PATH}{image_id}.npy" - logger.info(f"load seg file name is :{SEG_CACHE_PATH}{image_id}.npy") try: seg_result = np.load(file_path) return True, seg_result From 2e07bc2de948ecb003354308d7a9b2f0d0b4e872 Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Fri, 20 Sep 2024 17:03:48 +0800 Subject: [PATCH 075/103] =?UTF-8?q?feat=20=20image2sketch=20=E6=96=B0?= =?UTF-8?q?=E5=A2=9E=E9=A3=8E=E6=A0=BC=E4=B8=8A=E4=BC=A0=20=E8=87=AA?= =?UTF-8?q?=E5=AE=9A=E4=B9=89=E9=A3=8E=E6=A0=BC=20fix?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/api/api_image2sketch.py | 10 +++++++--- app/core/config.py | 2 +- app/schemas/image2sketch.py | 2 ++ .../datasets/ref_unpair/testC/style_1.jpg | Bin 0 -> 103844 bytes ...{20180422151845_stEe4.jpeg => style_2.jpeg} | Bin .../datasets/ref_unpair/testC/style_3.png | Bin 0 -> 58049 bytes app/service/image2sketch/infer.py | 4 ++-- app/service/image2sketch/opt.py | 8 ++++++-- app/service/image2sketch/server.py | 11 ++++++++++- 9 files changed, 28 insertions(+), 9 deletions(-) create mode 100644 app/service/image2sketch/datasets/ref_unpair/testC/style_1.jpg rename app/service/image2sketch/datasets/ref_unpair/testC/{20180422151845_stEe4.jpeg => style_2.jpeg} (100%) create mode 100644 app/service/image2sketch/datasets/ref_unpair/testC/style_3.png diff --git a/app/api/api_image2sketch.py b/app/api/api_image2sketch.py index 5d15daa..cf8df13 100644 --- a/app/api/api_image2sketch.py +++ b/app/api/api_image2sketch.py @@ -17,14 +17,18 @@ def image2sketch(request_item: Image2SketchModel): """ 创建一个具有以下参数的请求体: - **image_url**: 提取图片url + - **style_image_url**: 被模仿sketch图片url + - **default_style**: 默认风格 粗1,、中2、细3 - **sketch_bucket**: sketch保存的bucket - **sketch_name**: sketch保存的object name 示例参数: { - "image_url": "test/real_Top_971fe3085a69f31f3e66c225eabb0eea.jpg_Img.jpg", - "sketch_bucket": "test", - "sketch_name": "12341556-89.jpg" + "image_url": "test/image2sketch/real_Dress_3200fecdc83d0c556c2bd96aedbd7fbf.jpg_Img.jpg", + "style_image_url": "test/image2sketch/style_3.png", + "default_style": "1", + "sketch_bucket": "test", + "sketch_name": "image2sketch/test.png" } """ try: diff --git a/app/core/config.py b/app/core/config.py index 35c12b7..2e4d7bd 100644 --- a/app/core/config.py +++ b/app/core/config.py @@ -20,7 +20,7 @@ class Settings(BaseSettings): OSS = "minio" -DEBUG = False +DEBUG = True if DEBUG: LOGS_PATH = "logs/" CATEGORY_PATH = "service/attribute/config/descriptor/category/category_dis.csv" diff --git a/app/schemas/image2sketch.py b/app/schemas/image2sketch.py index a124739..b4650b9 100644 --- a/app/schemas/image2sketch.py +++ b/app/schemas/image2sketch.py @@ -3,5 +3,7 @@ from pydantic import BaseModel class Image2SketchModel(BaseModel): image_url: str + style_image_url: str + default_style: str sketch_bucket: str sketch_name: str diff --git a/app/service/image2sketch/datasets/ref_unpair/testC/style_1.jpg b/app/service/image2sketch/datasets/ref_unpair/testC/style_1.jpg new file mode 100644 index 0000000000000000000000000000000000000000..3a66b7fe120e3c05793b4f99ab9281b51e4b1d21 GIT binary patch literal 103844 zcmc$`2UHVn)Gj*IPz}99R8UYvdM^P{iU^1Wkg6iRNe>bdihy(h6-6W{ARuC-OOep4 zO7As~8;UD+$c8fMn)nR(y6_wzhEr{kws!0QIg-30(l zO@Ye*05Ab`GztJMb&H2;3UTfLJ@uQ0Y6di10NuZA|MM4hD9FjHX~?N+$g7`Kkke35 z)R0pF0NPx-e?5ZED3|tMw`uMEIpTC4;J9gEYyi+uPeJp~Sx=_`eSnUZ_MZ=R&{IDQ zObiV4^b9N@5F-;C3mY3N3o9%887>a?Gn{8wSvmMPIJtRvd3o8u`~rMD0$e=2JpY`8 zhK_m+Jp(fX12YdhD?88s^f+w-IGN~-Xzl1|&I7cZG<2Lar)>a)dYue3|1$n@;GaQ5 zOT9)$5EC;CEA=0$H~?B2Iyzckx;LDs-~`?fBlAmp^@>;yZ7!}T3J7^aeCtH z;_Bw^5fB&@90Ch{7X3UXHtt1yLVCul*Kac4zRUWQmtXL?u&DS;c|~PabdFAiw8ewO5Z=ZNTIz0NvFBp5dH4BbS~9=#d}yd4)(Oo@;3z%bJFKcVfVIMavK``MGn?u=@^HqkW9Qyw48;5fe2p3;J!e@dR^{CXnI?KByoQ}(Ix2BiF;Gu-)C&e@YMRbv4IO6F`f)?ZLJ zCOe;YY|R9tu*IMay@fYsNOx4?-=Bn1NwewadmZKhS-{a`0;#^kv3$GJmyV=qJ**>) z7(mF4Y3Y0W42`cgUQ8KX6E1Xs=Sh@ASf61&3^@34@keR|vI@L;sg$fzLCDVG!k+>i z;k4=n6(glef@j_>uq!Mp#y`%P=zKGHRgYOfBKB^_8gRn&sS!a^K6ZsGlUhr4B0YA~rZ`rJcWr=pa`1DCy0I7olAYW^I?P zIl7FRWUEf%$@+xg1yOTi94W4dM2>rq*&-y~_#S)gvM2RkBD?s{gg}W1xoL%mrVJ-L zwcDifY%xcW6kfOv5!A9%6GBFoCNJLJFPNgH*Y8GP(>aW6n_e zav+&$&UtOhKi$v=VlHjcW3fq5XJGqBPE9!gzOyx%S;?2hO(-NT@P{$8=jSO2&gc%a z5xQkH`i$xEMjl&JItwd(lyS@kkvbXqLcn%*kvi+Aq!o0{g;TJ+LwkcbQyRo zba{Pn{#Rs)KUE-^hX7u9u z>T)41%om(}t%6&Xic_mgo|7ibP*6R8WvHO4fZeXh7tL~wk|N4Z zDG+c92qImPaY#sI_vcNno-^jmAR3L~M%4M?!sp_0E^WoZ7j(2xF1H z68P%6)QhV^p24g_I^ z=G<VV7uXmD+9@f)u0hPoj0s z4t|vEP?<9@wR9|r}cQMExgW;x20e+OG9#=t2 zT73afKB6C0W&XU8!MyNqspwb&JnRcO%P_i9fP8#B+Rge3;;$8@T9f}?pGR%yrzX*k zfY}r*v4s^9(}#(xe#R@SbF6{VQFYF!pyMY4)$6sfX1Xm=5RlUb+Ux{{be3f!v+&#G zhwvlh2(u--o;u2-Z+nqpR}Bb17Tcn8bfv&OCUJBL=oMmxb0&0T)aX-<%i% zMAGjJUQnB}EHkW&m91qTBxiD_-YzeR2#Wwe1NQ1-9UMjW{)9sdOm6`25kegC3ce

6RBqT#uDW@39hLACz~j9@W=OEZfSu${22%qQ#WiiV=oqF&fj-JE7|BT*> za=qhuQXWxd1R(-T*zG`8)WjRMhWl9VqTw=usj_iF0i8X>*7w2lV1=vq!l(Hpe8~JK zWe*9^#Pg&ef@*69S%y?45-P-3eDOHkB)!o&xo25Dnpe_eH~Y(Bq+kSSsPcXo_gj$N zL6=}(qZ;|g=H%D^_a~3_tmNpXmS$mJ@tW1>q)VnVx1GJitkL;c9=oNSYr6M4HZq#l zo;_TK8xk6$@pG*Qs$oKlNF$)Vuh&K{ClQF)6XeBhA z{hAIf3Cy<1Sg!sndkoia zVDA9{U4&Z69$;~U=if3K%Vql%9>`lDV71S@cys+XJmy%fhAf4ee7{iG9KsBaBVTXr zY~;UP&mlodAu>?b^1rYYJ~7Z<-*|Q?OC$}s;%`C02n~13ZIwKHpOs1JHMLj=oKfBR z*&SukF3Yp3PBwohGPf z3Hot13S3~HOPe5-OWV+g>uj0>!JpwX+{EMu@`g8Y=RIxTPPc|QtGatHJ7%fQ2V%=A zWAEH}=FCGw2zjm}2b&_^CgddI$8ainvM(BVzuu}w2#~UxO0IOi*x0|a;z@{9B|PCO zu)NDZp&~_SuGX(@R#)#kJO$XSAcSaaTpUG#1h&UJ#(VSa-W>Tl$;=b^w!7PyBZsc! za$Qb4jka_L=FuqtSyy%7Ab?xY!VckiopWs4R@WP|GwP0hNqGu4fzKK<(aG$pwEXTB z0*-I&ywc$&?!Dri~CT>Y8_y zDXLBpJpKXqb(G0=__<~<-5ja4JuBV)nM^`dU-F>YVNW|3q0!(hP%CPkPMhJVMphun zFZfRCcRn-k@J+U{T@ALBSt03T+YaSc<@WhiOH#8|n+=$BCz_PDDZN9l?@WI=8F>sEB2#L& zv))W+hzS?XXYUug{f8yVS6LyLl^LNgRvC3>4Ihp+8SyG8tB*d$_avoYUN{|EZtBZv zj)77bK@NFGbh;2}WU~(09O;Q)Z$mLR7zQ4QNs;Won&z5IU=Rzx)Wb9*QSJMs_FUd!n6Gq`);{{4#&D6mA72yL7OJ{UYzz98FU={ z)*SvhtnTj4`FH{4D1MmGAuc8;j4?$xUPJFG zMvDve3FB#U+5L2MpgqWQIgagMu?ujm>lqJJF&xtC*4gtzj3J+Ky&}bK#Xwfh4U5U%qAJkDqmU zhz!&8zaw?;70RS-X+8(>{|zZ0Gq{yw+*IcE^a3ClNOyK zzNky&`~?3-F(UY38N@U~HHy%QyZG+vd+)~`xTaZ}+JGf@i>JL|PxIkpraLdqr-jXE z?lIBuH>0BwvZNE-dvFR=SVpLQ!?q7^)moA~YZlMX;Inl*qi!>Hw3qhoxrA7zwD812 z@qG4%hi@d#FoJvlfj+x{u~M0+*AcNawptO$tC~+9;^r`ST_RYdKK-XmOO^PGr0z#( zo##&=Ykq#m-KK)SSn}yN3razAJy@ar`%&3lF+=fR2j4P?&ZKrCy&Nx& zJG?8Bf&noOOGpKz^ET;{u2%L9;-34b;JNVUP%J}ndO$y#dk@$E?Mr4#;=~T9KEOCCt7roM9j&S{=!mDW{@R|X0bLf5aOKJ(Kwm_0UHh{a4l$T21q#>U+ zYT!2{f%jowZv>~Ig&!xljohQ@cYPV6I?f^TQLR%O+P6aLr+@Wd7p;F5u7A&X7m!t{ zo03@cL^QUvGofnLGOUwPRMp6U*4_XoXx+5N^MntPP`hSLHwyP9{lxY~=Fp62xh|B< zmAOp@+AdDTz6nuhW-~IH5zmMbq$_^RYm9D!OvVx45*#-*2^zcYIub6`m3R8x>u%k$ z5!IQJpx1p?Bj|e8IUf6z2k=7*HeQ9HaB*$gwjDY$!@{>ms+d`y@a=p|mGrWpHMSm; zklI_%NQY$w#>tRq+EpS{8pIb$otm>|zLn*!Wj@m>c0c=ub&5IZ?)BNC9z8Mg?zGT} z@X5PVVE(&Cb*4;C3?Zv+U4`z6$%6fP?M%Nj?^m8blAx)d?&_uTjLgE8iwv}rKG#R6 zFQNz?i#5m+yGW|gj?}25Z}0QFFQ5|xlk2wQWFMY2^)8x%DDYiFY;9*L%shWZvq_w~ zsWNL2mhvp5*s~YuBAd*IfiyD1Ts!(V61I-OvKE!Ph7+<*Co^rf=D&b6>u*~(%X6Mv z75|@0^>6oFYg|?XT-7_EeZ0!>-D3wbau~t8Dx~_nf%{2NYND9Z%hepgp?=1lY}BlM z&l%k)-ox?iq?l`1%D#9bX_WRcPKd8XBK}U@)%! zBil##DZ=4P*Aw~PQ-FT`dt$Sg0I3Q;DANs&)(R~eA8$F{sWuab32aG_+vX{(B(PB1>A>_-bcod_Kwth9MFZ;T5E-3-G zJYHvdur=xKN9I(QK-OFy8?#I(7Q9an`29hms~Mg;iXiyisbEhd7pbOMDD};`vA~Hd zZG&tt3Z~wEKI%McC^m1EgJ^;B`089l{HddVHYfnUkFAx!kJeLBG=%2VHSx!%z-C?O z9o2@|2%XqZX<^6ziR{(dL3V6!oB8iS)+idu_pac9?(U3(ot!l`S?@x;7R~zRVX|h^ z8C8B3nm_$UD=E?GPt9+Volb$M*Ocj00_s5>$#p*5FeFlmIw^;?OCbj~zgsc}Dt7e* zJEq~~DiR`B?$InkYE(Bf!;bHf_Ebqff8*GAt|~w$b=)2 zM9Mf;H1;i=aySKAEatzr{=v@{i!0r%Ro{_Nm&;KDc)Qi__&I*ZR2Xi)$IlV72gWFz zAMTeblW<)2q%eyfhpD4YAno$;N9g9mCBxkcTLky0vg4Vx(X4MVMKkVo?O&TWUCUlC z{4m@YsPhN+M}@g&1IXK>F+%5^)a@vcST!=H+e~-YI}P|n-GmtwUqMP;qb<3x+eE~zAGPhUiyy^FMNtg z{?0_dFDXHR9|x>DupfKE&kj>Ak;TLMSQYhf+srdJ7R=f03=~cQubOF!`H$DC7d7~# zY-4!m5Pfs}O4}(YP=hS)ryPCsgUSc_&$;07&6l(!wK7D${521MC&H;K7W01uHsf{p z`y`h>I{Re9um2Q)_VLhk!yQS-z-fc%W{K* zEwTNi=xz&VHxV6;VU@kJ?l?|&qtr9{N>$wto*Kp5QUXij1gd160 zdwmvn_CrG%?jyNuuN|I-T{;C~XpTcYv!q=htmH>S(5lM)z$-6o(xXT5a z@#97gDKdpTA{$)T%LRYl)*$C#}PtflE>eO*j zoW`PtE4m^xy!+rNaL7u7f7FzWPR1m2g- zX}-YsnS)f)*(ln79+#}ans~@R#dh9q|F2Ol_z#OT(W)7Wr&iQ;S~n?Dgpl}#8~$IJ zOdK!vfW%@{Zq7EuiT+eTMBr7)++#!-UZQ>j!MAGjJOYfebq@kMMT6}$i&iIePu%DZ z4+kpk^&RU?WUjWX-)RWgnVM)BtPkh)N8UVuFd$k`m7q2bXgPw%6E7){VHp`XEdEX3 zPM~>x*aef@$4)ZQ$G?s&?+>(xUaJtGoieXZnU&wW4j^(~R|#N$GSA-zEnuQB^T`mw zAT0gt*)g~*7Up>JL^eQXht?;!6j8@$K=dPHmB?4sK39&brThD}?;BuNBHd zrooqwx@#$8W$0RNPj%S+{=C|6!vQQMMH&J@l4sc#$9!QCNfD|dSIzatQ1F1xpYYem z)}(qX(iARSljwt2UL3WmTQmv6c|T@+JgJlP)J9fMxuv?lkYolvV+75Gf3aNA?=c`(Z^KKJt?l5)R-@2C;zdnC;JCqI}cX@7@YDIhGibQs5>u!M20xKAY z{de=JOsisa_;2sAGqfEts@I3k+1hFv8TRtrlq<<8j2zwxOApn2MdPr@{Zare*jMN< z(^(1+z+Y27d5`gw6@rVt3eRkA)V3n{KDRXAm$!G~A9df{@nurGmjqcrSClOGO_gAC zN*BLmAz+d7N|5*s6?{aUD7*_kX&${H^0|eDiK7xPC7my zdwAdw>|~QcgdAB8KbIVM4OtOXqEwfnqV8smC#W79Vbd^^ckN&QyOqJeS2g_WK3*)Q zTDGHpI|>StRfd3DiG9MJRvdqI1j<97uvGOTjkJEa^{b}2E=bYr?$8V zK&za5(h-$xM@Hpn<5SuvB3~lV4s!YuSH%TYGG8Wr4cP2pS(KmD z@izg$Xt$y(U2=pWEs^~L@+qM6bM4u`$$94$84>Ed?VDaQ|B>&rBSc1o_6=!+monSt zNRneo4d)2v)}%yS>6sPc+x{T++e6rJd+2 zPXQ?w-rKmB;BqPSTZAxT1_N%eSm4E@lMG7mC7m8$(AYj<+eBuO-Pl@oLO|5pUI0dL`2d;stRNl3Ui?6Z2Sr}wg3X6Sgz3?l`+gA!7pnZo zQTil{Ecq2d&Cs~Gi7yFD$-k3}V>T#aFdr{uTX}Oi`(A>7BYZ7{uK(i&eXz9egr)8a zvn8p3B)&c~=$UuzfW}B$+9D(~S8$Qd54A!X9;Sjk_v_Lunwlk2N~XR+xnN?oCHHQF zI_4^DP!m(cWKtxaYc)+vz9f2qTE7{XjigLkN{6^aAD0<1-`hyoB;O}qsMacNie=yR z0jc-F$9^Ge;v$2b=4U_)W6!Y)q%myF-!ZSeJ9_mMUAF7`YvtEzv`>McnB$J0yI6Ee zrDJ>}!+_QU>=df%Z`SXxA=T+PHj7~qISL>B%ii%Q@Y#Nl$^T}CORwnPqqzHoLou}! zUV!Hul%2v5OQVqcHF7W;7{YG(!q5jtR0+#bwKLn(XHVzS7#q!Np!gVMvx(10qpi6R zT+7(-IOI*{dq0w4A5ItYKv(XoHW3CMS1#ZSJ zwahJ;Jzy!LW|M2hOOBDUm#*;Xh+l9%Yfs}4N?|$2vPR(}sbjmly*1mA+nh`5UAhSJQno&wDKsodlnv7%O;b6oms6saGzw^=$h;BOv4gQXsa=Qz%= z{Q~vyHNsA^P60QF7+H1*{s_O0O^pgdree;Jl|yCXVj3QOhE$w43NfkO`}}J%Fb3O(xgUnCQr-(H^cb)A{X?-)2T+;lE!Ckzoq zhA3A^4;-{@^-Un6Zd<sambJ0VOccYJKnKgKO_w>V+ww3T;BlFwKEsM1z$p4$a`^ zATiXfqS2lt6jKY`{bN0P{Y1cWKNPx=;j4_^sKv)>M%~J5u`$y#jY0M=w4_nGS}45h z|0WCguk0XAJK2n6INz6er0GUc#*d;F?6p$UrH`FowV3%2lr5`3XK$&BKC}A0etZ#b zLgJMFae|^;=ym?|QB#Wn_xLC_9dvN+EkuVw#L354V(TG*E~6sDFE>sB-wULWFM1#0 zA$Z0Le8#;j3*R%}dx^&z z_pwWU^tpbo(wHqpf^}wYn11_fDJEX_W2nkjINZeahiQh5eM^P@<=xkTUM1kiP@%)P zHq>{^80@A$mGfcLvMq&k5a@p|YrqhW(3`bB9btmtH@X{N?t|>Obs^RRj-u#J)Fk)3 zwLIC9ASx%hBXmLI~JRoV09L7notPef~YET{GAKlAY zQzwp_CZj&w*}BLG=JoLV&@y5Bfmr6l@Z!++a_@%$IrDgo@Hc8zJtt%b=OM`~aC>8~ z$6p|%rh4UHiFa#@c6GsRcT1_s90V^0vOC(5uizOY@%2p_vd_u#g*Wp5X5FKmogF-P zV0h=(vx>3{@A|I%vfgo{3p0v;SZwTjGXI(po zdU^_YD%5x!pKrW>@>2`)#a%}fKiAR6WGkF11edL&4D$_SD-+)?%IsGl%OA5X?z5yb zEF}ZshAvoaPxY2L`~-R{U`~De?idtLRo7)1wmCAoQKb5Pb>{Nch-D6w(@qR|L2QbD z4W1f4X3~nbXZYXBNqKi%H={847l{73@`KdZX6FcYvwZI#cKa3UKGqG7G=RpVzpgKy z0;EfD^FqX*>e;JFF6JOuhVfYc5l@|U#o;Yv>kr`Q+f2F0LuWH)Iv|_nO^N$%D6#jf zW;DF%D7xze+P&u0Pvan$r`7Zq~!89`}|GL#M!Clc#uV|uJkHmtRE6eSs%xEBbwbGCn5rLV9x^)c@u zCErwn%W;Yxzk4$E!4K;zh-eWaqQAm4b%&{QTKxWxbmDHShL`g`nAJ}HaH;YdG}Nmp zyHLzpP~uURF8I?wB1{fACrK2C6z*v<$y@a|@hUxVz@}#U6s}7f=sX|ma^%+HM&g|) z%mdWNKfgyaPZ&#C{N68~;M@h!^KfzYq3HI4KnRo@HGWxp#~ zWt}vg0!GZK4d+b11ROlMZOA4?x9-dO_f3uvo`}j@{p(aVw27;Xgw~6Aa2n3@fLp3A z3*WyaNWZ1Nm_oic{I9F#fAHT@J2Yg|^99+o(jc`rpmG=!V{T@Tl&NGd!IM$EmW^iK{`Ao$=I&qA2l zq!+u(a*txM$Mx{hU1k}!&_BG^@*>rWWr5K7Ptcmcxyq~ce)Eou_F72x!1{<=X;fL| zJGEW2P>zqsFJNlB0zS+&*pl&I0crKYx?U?|s9kd39U2em;}04{=@QMt>KyU6g*7h8 zFIT>N?|_=Gh4*3SHkSz7*!O6nVY{gR7BAh2@PnY?Pn}dM9pgbD3ut?;)Jf6pp|OQz zK2(Q>=LJj(nKj=N|FksZ)3mX(l-Sqw&=-rlL(bvhcoc;NiH9sa@3)$ zv@Adq{g9)rf=_fP+@qmL$Ee}OvtazTE@Zedu4vNZ$jfS!|E1&jG+2T4z5DgDuDFj@ z^s!?52uv0_8d6EE1tL`u$S5tRBDbfEa~ED@{Hl0GMUjm}l)PU8-FEX&4W#$<=fPw? zmtKn><%1L%(i8V9D^GIH_UreuVllR5uMwssvZi5Vc31>0EyT6QS z=y{(|&g!X)jXX=BBCkZvIw&1Q3bspp+V}zYeLzA-0&iE5e(OlPH+_KCyl?T=p8`yL zV$R6^{+sJ++5zj?VaWaC{cXpqlo=iFVYq4`=`OA?rZmf!hpbqqIwseY7rvD&*^BhC z#zRAPW1)dpek`s3?V}~tm=IQ;2co##GOryNj(F!aRQ|Atlra1Fidv}cDbdvb@1&#- z7mht)&T#wVOJwVzo7>{8A(<~QvoLYRT(XV)p;h%LG@;TOegk`AYh1fLWl`~FP><4^ zcj-hZf?DiMpxaicnzDn|WWW5~Cm8(x^BuLVOnguQx zig@?rE8NPrXliY;uo;zjhvteErci8}N*Mt`*hfM4|UySJ@&zFhQMAPhIV)jKni9Dp#d>D6|M(J_|Ub0=)#2s7u zc*h{!oXtVLXZ-giK{@r#OmWDobA7BAfbixUR^@lTXYWa#brLu+Uv0Pk4^%3b3wjou z!`sc|&Y0J(_RrJlvIf;WwPexjoZaq&eA9WJK>kR5jvpwq&?zgS%w}X<-HVhCW_C<` z)q9s2;v2}b9+kcsP`tU7GjmHE`vwMor*Enh!R}@SWh3(z#SoTDy(}qPd<=VGRu1nd zf=hQ)B73en7N=kYuJ#Hd92q)DV2jtDe2Aoa?HAEyf|xAz5Ukb9+G}Q?$Y^VIW;vc5zeFaV+b{6FeS?n z);xV#NL!k|jIc@Xmo0>8-l2U#ut3aAe9Eop7ZcrD-Yy8xg0jPqoB#*&%O8{Xzr!~7 zwI1%#`)fvk*KW2cwA_E;{nY@Q;<|*N7y6Gna8W@{ShvH)mbn6lX7T=`}f!kNk!MyVm>3 z&IITJ-|P9gNvC<&&Vxo<**J_yYD)wN_o z=TCw03T#l!4o{Y?U=;(15;{HoJ3tIN&n*)!{_oRWaKf=SVx$hbQHU?M_i%|?Z@0pnt zNSrZl$)@T414xYRHXmD32I}TE6$z=9gy7Fls>^1G-jb%SJrg$Pywg@b`aF1>!m7y` z83AbZbu>ziz`Ykfn70Wn?bYR!6`LKXhGeL$yfJuejWUDLa#`C#IRv4tsL7WYiVKP|oBPp!x##ySk@M)mc=qwv`-LlY0fHLa*DP-Bm1x&4Y5~C!f&8w6Ib~G0Vx8QR7A5PoFXzX68}jU}6km zcIY$w%9zY!$!$+&v7Ed){wwbUwiM?EF}a&`t#yvEU*Gf@4|ywNco5hlzUM9wGiTvRez8(?dA=_@VBj`^+md zlpT(@1IG$R4B!q^oai2)l2Qy{-hIFaD!uhVGD~V5E6iapYv)cX4t0hKIZe z_BrsR39*a-X+JiUnTODi8)L!jGyZq)*1EGg%v@BH`mr;7GAiJcJ2qCW)nc1t7P?Id zuiM|YTWOgC{yMUfne$2Jtr~&PEMxN9iD9-zozu6Cdm#(K5*6}G?-IF8p6mw#mOf~w zGX?DJr87V3Ds%W^t?gCwUreyYn6t4giX6=cm0p&j!#Pgy?KnoD__SP)l|6R(%LZ#; ziOs62XH4we`Om<+{6n4C3SO;jBlh-E8r!QL4lFv+6QXl81NP9F7+)~$I$w@3e!Bz7 zXnUBWL3-pL8Sj1QUH_Ch9R$tM-7;ZT0CsX%6>W4D99Q}{sVGAkHCFWu-0{#l&=SFC8-IpPo4=?+ysc7e2!%8YC_LI|vV)G8pv zMeakvTa;a!KeF_AU5+4zAA90isCf$55sZD0$ zE4`qcoamC=hs_;w`VW&A+_64p$g z0$UkUF1!IC;4sBDF|m`(D_a;>WU7sV16-!I;U&qWee#zM;$5i6jv!ah-`6L<2IL=$ zru17u&wo;Uim5WEVv5|@S!#2H9VtZgbNW$V@?ME&+GkRfXJV*~#OsEMv|GP;=$6>m zcRoMW7+aN1gv;c^I*scFgElYs+79~Mb#9OS=vC3Gu-htri9O}VrXwF^4ElYn1qq^d z=RpZp5e>oxB*S(a<#Ny9$*+h20in?w@1oc@zbm<)$KLbrF`vl=QJpK5v?#o}g0b!P8C5)TLK>-}NxQ)ww#oCnX1kr9wVZDVfEv5aSWqMi#hZ zrES;wtV`l}G|F4;A+u-b_1v3@FZCaC<#RA!tiU{m^3=40Q%-?OaGNQ(wXQmP-jQQw z)sjf?v`qAto}1Fe&%`7t()?i!$T84W2k17uStf=%c41iH@A^ z3d(N^yjm|ja4|DuR3|ET?WZxon25pt*_u zot{&Yl)$%6EHzcc_jOD*1yk`S zSQA%|Cog|YKS)&chm@~utRV5L`zSH8er=*7seWOhVqGIcDzJRZQJxAnffk!Xa-%L`5iB^zsp z(ZnrM_*Z+FsvfCy{_m|EP0}M+&^piE=uqe68e*KNrBSTW+LQ|VsvF(vd&hn2)#%mK zA@s^!D_MMJCl$aw1%ST}SNmOd_mVySNUys>VbZypwgZ`4-#6|>cPS75FY7@;J#+Xw zCmOyaXK$yr;CRe!{O97&p4Cmkux;@>Ncs?TVPN84Wm6sDDc4=^t$U^-s5whP(2n>H zTIr;ie1VEx*KBMOpaz_wgEJ)0+eY^Y?`||CL!jHwp)vPQQW`9&v*iticWu@eB~1$M zVP2f64;;8&W`yK7P@>sT`maK1-d?ydK3hiv4~=h7_NTL1x4Hl4y_~j+2~S!z67=nJ zZuZsvD+Zvyp_D+HT~{(Ux_yUKPt?b=h2&FLvcLVmKiRk7E?%sDw{Ray{B_+k8Si!q z#Cn`m?QesOB`3(-1V0JFnWUtp$JL`)3u2ev-_Q5jp6BFW$F$o(%deE+SBMcOpD4V& zFYZ7&3X=kdDg4p5Z@32|E4V+QhqG3xQ83OXE&;(Pz?o9gkrm^AnIJlx_k#X0MNX0=VT~Y%9@dtWMw2xtyGOkO7)Y z*mTPUr(&>)WOqEm*ryR7TaPq|x|KFUL9kcV=#Qu)m#AC4fR^)60CAq?CeqV4ehxIY zff7Zqt-Yx>rc|_3XlF{?tsy%D3uk+MvLTHRe%4uy^U}9oRq+2I;`DLlvpNsWGL!}C z4vvM2lzyUe@9KlftM;nzrM_v^w~3*BHp+{r@)+sihxF&Vw>t~54#ET{q7q4w@M+n$ zkDCNG73x*bJhr+e;}nKD-n_zAdQQ~fq)BYH5r!!TwL{`^(Jg(G)bQzC1bjo`Y>l+^ zn5as8hZ~mhPXPh3hJMhPl2y0q7sU81Thd9%HPR!(#mV!$wl^DUhh(2v44406yV;qN z%@T#2?8HIiA{}`VJ+m^^7mO^hn=WGy7~-zoq=`(=J~7AlgT7G)k(HoWj4PVyYHew+ zc1JU8cYP;1i6I1jvZU&q5JmodSYLuo)sHyy zp~nF-R1gt7Q&BD0`7Y{uN?~?+iCm~42|B^5OMUqwNgr{tNl5N_S%PeG(Yy&4vzsHH3pEf$GT2|YAtDe1+K6uHTm{gqbpUZr0Z1& zbldI+O<#Q_k2#HN=K9OULv!qSMKsu&J5 zl#Nv3-6`DooY#V>+}<>qPiy}Ii#8`HnVc+Rs&+3z4+dqIsMP#`#nj?;#e=%Etx)9) z|H5tY3P<skU^!oSL0_U@ws?8vrpPCA-Scz_M6{nKo9Xr4Di;4wywjVo*kb@b9sr;IG#k`q<fWskhv)0_+eT35tm^m%2=ol zI#IKLCSRS4M6eKA&l~DgeC||kvj`vlLHA@0{8YlKvs#L7=0O&!O;Wl`)J@>clC~8g-+QM5SXQFni#-W8WHU{S~waa6tX3 zlx`{&RzM{Ow-&`)CowOy>{VfV=4TwbnvQ*w8ABll{mU;fzV6Cu<1i!RBphBqddeo%On6HY@BLq#y=PQY@wV2MN=s(~B5?dWcBT}Q#yJ564>?OY)Vj*{6#q};!JvgrAECt_= z;%+%j;V>s9C-3)eF%FUqZe+F356=sUTMFGs6L>)c9gp?0yb}o1tv(=o{!FpR3>@FhhC&ux6vg@NwuG> z<~vF#_MI19OXPiT2zk{sJsG)uMYUaTmYaLl?1S|q$|8B4_zVl8%OT$1qSzGu1wFk~ z`eL5hk~2rPPL1{ZRo(98aL}xng+P0cr6ss;3&w?7Km&zJi%=`pj^g!c?a+-;W4yV% zL@gzhWLYgj)V*&oqLs@%co1@2q{BeHgRt}PYCXz-{Hau4Gb?1l0;3=nW#*WW9yZ4a zh?+dvB{pTA19%q+0ZV-&epE7x^E(-T&pxhDixNnds# znb7!Jru9$A!9@l2wwdIsZsR_X72X-x1p2+$z`{F0Ywqof54af{$p+wW!5dD-V8L7O>D2%Q`Oo; zbdFxuH^KVOa2^5`=>8r49eKDo*3~PfN{J>30hQAr&dNWmFr9Z?Zm_`jGrZ~h+{!`e zGa8bVb5s+`@0%zATF5YxUwGhHsdZk=k}vr{vEuy?)0xY$M2Ku*o7+M`P%jmAmIjO0mrWq=D7Y zr5q8m{P+Cqg|d04#hvhEH_#qF9#vb^ZW1AWw&o|&i|DS)VevJ_NDTW%eJk}>)Urzc zvjFaxpB1n6WblajzEur;YZFcV>uAc?U&&o zGZ0v1p`e)a0)eaPA^GM~jw<;^s7?#%&3tXeP0{ zbs6dW(Hmn#8_%(B)B9wPQE^B(?r@|Z|IH^t**B0F=Ql&2f@(?L8SDT@T>|&A@sF3I z1dGg0X)e!<@b8DbV?Jx(mlN`5Pb`S8p$kGL!ZzNM(>sfa-S}E?ykfrNxja%~rpHHd zztnFWL7bvKIf$}BKs0SrgmSuCDHLXw+WaG+@A%~`7wi$4DfYxgyH5ER?aB;Jc0OYT z_M_wjfO)W66p4KTo0@6gx|7-dft~1e%+%aRLb1)^u*BhSY8_}sz`ths(gg+9QTTXy z(W>KSeOMT5F2Njizwr#9NqGf4vUFwdo3?W}r$GNnz-YQjCHaj&{;va8*wndlNZShH3*`EY4!Q`kH{5 zaH>if+Tr2vm+h?6by^%+8-XX8!ous*ARBVqyogVR)M;4MHvAX;a8QK&m0h|K?NP>|ZYqbGZ)>Cc=?C}gk?wV+ESu4S$hOwHyQrUgTnb!Zg@QQ~hmfg0~Uo&9e9 zO$tW=VpeGzd71#kXLx`LK+A@e#x9D+=R3EBvct#4irV@mLIvfr zpD2vpHd$bRMFuUm;`uR2EnJ8*oh|{2r5`PAm$iNy^2E^Cq)55k^Zzfg{}vPAb-ta_ zsNa}I+OP|LCE0%agopw`5A44hCPCv>`a_=D8d1#wcid^ODJ&r;L2B2riPV3Xl2cWn zNJh7fU0@uoq4r=l9}rl0J0xb4z6pu@%Jv5eZbTEol)HdX0*Ra+O7G7=fh`TRkNQQ8 zMX{74jiAkcIlTD$kylAC-Vgzz+|{7Yt*0`H&4ioz->!> z{hRE0wpxl8dE^eKC;g0jxxG=r>uiJug)&!AD!qfrHlbx(I$Qo&3ADJsYMRB z4TX%(Q0&LbAD+=Np*}F6z7a2&7>1pqoi3yqkd$I{O6Gmw8@Y?z~`g~vf>X|h;)RXz?)Z&?Wuz@D%?qvnEq9W<%C|`QI)i65F z#y`ixg(2=A^euhw4hu?f6)a0Iu1ED=-h2%1f+U54DB905Lobn|k{65Pwv*0KYmJH+ zwRVVejpXD-&XCe&Dl7u<<8-ZC40dAT>)>2_y`#c9x}@&O4$TtNkW0&nQMbM%2c5p@RP9~M4nnB5E_OR@e{Uz&H-V~-5S{JTjl!`1Q6a4ymI%Si6 z-G4)b>3BNVs8TJMizYp+fl9PhOSCvw6eVAf(azFi<{n>nMnJS)_59*bpX{FuRGFT6 z@#8|Q8=CbJze2OYQ_R`aUTR2QY7Fe4U#{OD!i&61!f!5;+n}cq28Lw+{((1nmzw-N zOg2)cd%i%VWJ+EnONoa)058k5$-5yMeNqYaO1_U&%sVxXKZtOoNV z3L0`5j1#AQQvaBb)%_5ck6_hR84;_b_)$h{7unl5udSFGuQl_Omye zOwj}P^;;JheQ&-<8>}#MIeG zyOV3xj6R%7s9(4Jb`#`##L@v+>=E98gDy%vQNC!7%Kz$Kitd+E8&yt5x>gb3!@cTT zJjtmk8gY3**%!Q^2a7bh2|_4-r3%ov$&lD0^7(QSw&U=qtAE61P^WI=#!hv#^YeiT z`POXJ>}8e+_k=N^Va!G|JB|JtqbWFIkQ(ET-RpQ}_e=rB0`AY)D-uT%``t!^WB*WI zvbHxE0|<#4%~}MdWc2A~uY+)T%eGrH&uCF9T54B{-wtMPf?e$ANKCgN~e4;k@h{`txuQmWrf|( zHv1m&g6*uR!%N(9G5~2*M760aN>?4fC6KZ=2K^Q`QIAM_Wyi(_FLwO)c*7~?V`e+f z8{UA`ECK0$Tuc+eogeA$BPiscr!TE5qIMvwaEohSO@#;8mdUv+{}rO)V_1OKZMR!* z$D=_yS3UCn#1iIM4G3WH9d6`gEytr>DHS5!N;NtbwO&$zTM+E}3Q2;WL_&yt{5Rt* zE0bL9UhzsaFi8(392h;g2_$UH^X5*bMqHs4G0Nbj{y2nckbr4eTn_(nLZ}?giDFOu z@B-gX2U|ubHmhiC>(?}Cdn)AF8o&L0uT&(WKh_X(F_$|>6v+l$w=esJr&s`XmCKWE zhUt8Zk%^NJ|AN?Sm>>bE?i(CBLh%>n%Y4C@Mc}xsaNq3!?xNl0r~~wf&gl@6-@;*1 zZK81pRJxYToJn#AVg4?PQt;rq3R}yFNlty1A%V^&6`384{x04Q9!}y^#|P~ zgLVZsC5~x@!z8Fg|Jx0k)M8B69JR-ULvb*JXAPe1oA=jWl)4~7=$E0~ znJ;vN7qoMy6N-&wHnXCoBt&^m$^Q8hk^PgUkoNSrDwLnbKgyk6iU@NTznI$Gprf)l ziLKEPcK8d*34Msv$JM_zT?;bL1`E$a1sVPYeG^Oh zBkA=plSN3f5d&EDsG0!imCI2Y%0ha9&im2(tH1c9>imx@^qqhoiRlBgDXKm={dw>cwo(s;@;irO-uo3?~G9-D+KRKaA?zYO9j?Z(iv!U#_8E^zPKqpp)u{YlQk4nirhh{qr}%Z5(db7VFfL4$78xPdfQ zoj3;47b|95KO)l4A|H^F+o?P#AVbBsDlSy*_`UD*e3u7SxW{HQyB&HFi@pQoQ1C)Dhfygng+kKS8~H0pu2qE}4LfZM z+td0&es`#vn_lG5>%xE;0<`C9u+6I>La&BA&{k+1FwkN;71$r*_{nuV%Piqh#wF6U zWqaP)hQGo?UTd?^1RBBrV}({@_XA?o=L=Rk?b_xt~w5J=ZYqAFLognY=$Sagp6;Ym)B2%X1uE3veg5y<#9Wnt+aST9{k9 zZg~IlUhqYOLtEBxqLz;G`sTnt-@Vymw0dx!zu$rh#n5X|A-H@k!2HfdK<2cy!BRFs z{_`@&bpVqJ<}((mK(Uz%R9T(qL-h5E#&FajR`-Y#z$Xy>1}QM=oTkJMi)#+{1|Olh z5q4Y7GBZK?x>t_&F>a*#4KfMa=9}*V<3h^h8$pcZYuukBkG6Q6b+!6~6sqKXHq2N{ zTkk1@4ggmk5?_Z&Xx0k@X37qfPpGe3Esc)cX^A;kzFw9}rQg)Yo`Ja~26!F7H|7cu zz89Gv-SO+L+f5379sV$Zq^$40o^^q7X`9k-^YHvT!)6M0MrBa3GiId z2d9?-!co;k*CjzfnA!tMSj0s8(Mt>WP{9ULNcQlwvb=q&jS!pEThMo+tYJtahI8WZ;M)z+W|eQB{))XjT&N{%ryIC!n@4>F$gdP* zq7CdIg~VS|FM^F;peiM(2b(>a{++L8?Gd`!$q%19aD^(xK17A zjb@U+7v&bfMQaM0EFZqZWRcQdW@h}PPB1}v!Mxt>$R;ypuW#iq|Hr#aF^A%od8zN% zHjEe}`55+Tx9(rc@d-!UUfkzzIH3bFsMJYz`!CJ%e~NWukGsBma$h&(3VQXy&_7H( zATS52%>b(8>Ts$>mSp>ezzHT?8V*T^RlwMfx)l>ADgrUDp%;~CXR)OF$uC73{+Pry zpDT0Woc}l$xdkI{kGEiAKtkMMdwc(0VVE~CO38p|jnE}d)D;i`dI@>&C!GVqVnxQq zU{2!~~w`*cU`bQty89dgnT$^}0o*oNgO(58uzj>cg^lsMk6d5u^n zwx@2yhI`%DilQF*%(t{wN_0+2FMjb`-(LwKcl+tFJ;iP;7_$1b{RPRO2lv;WO~liO z0rQJGa0sKpHR9q}08OEDv&5iarHT9fS)9UL2eY)G{f-34u`=w|??zZ8zBY2cmV9<_ z5!Z>1w!#7oO*a*t>DwZSqvmy1MHzVor)>34=oE8X>JfE)T^L89nn+=iGtPu%mSW$1 zBq}oh?(A0b5238-Yu;6@#NSC*b4q)eFcK-D7n2Pst9i&L!zict?f5DeC_lEQVNBiY zOyJ4=;|JZkpP%nH$Smb3ODkMpUo^82^@zjX^dNuPlwn#%`(*twIG9gZf$_r}|AGXs zz4uapN?NbD!w&7RiTQ6(3)duI?4W&6xaFo z>ntz(TBL{ZUzKqyzARqPG7GaMLOxxZ^f13VWX-rb8v2SoD~w%MQ|QU0bP{OO){z{4 zkU;Suo$s7QEO%dViuk2z;I1CIRk*Y(suGnNrhBV|u?0|0+|{f{H}-;!Z7 zOG!@z_y&4+f9tE1VAwL@--*O-Dv_F4)D&Q5lq`(SZq-uJ;+F}S3W)4%=G5ZI) zj$}AdL|Sg&l(C{r5ekSKQ0_Xl>DD0TEK*|3tG}SWoAcVIRI+}=x4~)!ak0|CeW|)D z%e>WutG~k~=M+b=yCQlnjof1R9=)I9{4+f62-TLuK{^b7?oxy01fbu{Z+M_KN$)&U z^gdJ-hh12?!j6Bun+5uVUv%D3BPB0-7Ed>l))5_D53ckACJP(QgAcSS@E;c|Ju`UG zkGL!BN9;d%&Uo4s`K*r~^aA1RTRA}MU7<$M?7}k9F4PFNsF7bJF2)DkE;=aR^HvXY z*5-e&81mYwqf9iN5v^GG{ z=Fby;3Qf(Jvp5k&V>OWA>~cL3ey$8&d)VdVQNDl)o4$c``6 z>FHS_3nL3gyk(9AOsRk?D&2(y53hl7H*sNIsw)#7YmVDz+Hc);A3l9q3z%1=8AM^9 zL1HSH^NDZ}O`_6U%#L*M?tC>$XmAcyLtlq&%7}khg*cgvrnpaFk1tn-nK>gw>i zl*tHu;#i#L_4c8KPxy38{l_pJa4V`(sTC9L$(>5VbbQT!)&!K9ZR3D)+u8a#5eD59<|4!ah@Vs2ifWDZJgRiQl;;l?3#_Z#m(DVId z=K~uOH#YEMx~Vl}gSxq_bP)eS+3>OWb(Q2m7V4jso8GYcqDX%Tr)$n>%Jw1x=yn!A zEKhIQCHhV@-7f0;_A2R>EJRIaD_l}EYgdVK?spym&bS5_p`Su5;8@~lx`dNsPELPr z5PqwQSah>&^+ujE4S$m@$~iHc)t&5$aTRQOl;@J!6tdZzarh|Rw+bgANrZM8v8-JH zkpg1&wgoBn#uGL1dBFv18AG;Hvz!D&hnt8==WPBbz9H^TEk?%?8t?JUD|8*onTpxk z@lq1*(WOV#edEi<4RgA$i;h2XgwuO&9Sxj*eR=SK*81pA#+CmEU&J5S!H|p|rH`tF zC=-jq<6x7v&WE9R#+yeQn9rM&%1ED=J$pSYM-$&+qzAw5$Spvl3A;Pkmo%Gv_Xv}% zlZ`yfe?e!>$1e`G)PUChiC3*KCqO#EoHVtc&J)en<&zYYK^W+8I`Mm(NTm$B(2lYF z8FOjkCy>8c5A7BueGwCBEB3xqT;ZRCG?&V!w%ZRHE$*Y!c>MabKq;je+6dErPEr6aY5Ckmu2qS`VHR)Y63`aE#$(9+=r}p_1M`{ zN5pn(j-J@B!=>m$R&&^{!D;LQyn~A;>XZMXjYqR1K}yI{@Q&fx^HYv;hd}!ML{6xK z?s~;>G0f$TR|oytV7vWkp)r*ZqQ=DoL3@ zXSnd9nk?|mznp^aR601mM!4KR|BYZgd{^iO)Z=TPEQ=&xou*@U3b@QnPlhhiQ_b3LwT5d*sUCqqhJ708=J%vV;_ugCIsF^Xhd0O;?OFTygQrLGMtu8Xcz-H3Sad`t!m*s82 zq@_y%^^A_=mA>sE*VY?V2mXT89F{gA7Y0GxAeNYE7kJI#rh-p^Vtr}R26@2}attvU zzGg#7+T!MxMh99uEzx>wyDwE`I~>7*x@od>(Y&qys{eZ~Mk=a{`Ko-*d9r%+{n3KV z<_JqYp2pTQ`DTNKz9!h~iP4x5CVNuJg%g*xuK6UScdI-w*CHkht=KXSdDdch-Gflk zdoibkz5LI-iqMeAMs9d(lM`~rcBRqv2ud`{uYkr(2_Qh?4~NhSwJ7=3AW@}(>vOaA zzeSZFJMMM_fkOQLldINGm!<#2bA_@`_sS&F-18~k1))!B8?(=3K2D`J2r9`)9FOVB zb(~p%ZE}%6-T|7_P!)p(G09m^625)UJwO%}iTTypxY5dDds+8p0H8jh>@Y7;te&R# zf6{pDi`OMP>S`-W;du3Tzi7R&JNTjtg3z2?an~~XEPylH%0vZ{;0u{?NWUWMtI~Z0 zF(3WZLc?&2?Z`sy3T1}#8k4vk88qeUVsa9`POxL{G!8058Gwo-jBV>R*%pB|@{yc} ze|7K$g%CNN;@MMrv(S4g<1PJUAFhRCM40|FMYV<}fQ#0%b^%YigZiTikHqh<`vZf6 zdOilF4(#Nah2$`=_h#?gh}~CcuQa;riEK%LG_lXC{Ps21$eRRl=$k#8$mT~GB}}Ib zJUE}o-sFvZ2n1so!YO9|T`(ZuCyFb=aDjWQopzlvLpof%RYFE5J~!d&=}_q?#&>a6 z?V!fG`y0O=59x{R^Dl}(HmgZz;4h55;^+Be4yc}=#4ylqJM6N~Lb$%PwLKFeE%(-I@N~w9c+UH`j7(mxT5WreNk<1h4 zQs&oQ$SS>I=JzC?mkVQg`a7RE!^f~T`hQ><|1T=pzyC&3sUZjtnkiKhX;#hlTmFi4 zW*2IGF0vc7Blu@cS9<{+i(OQsTwOq)^4f*fs#|N%(OVoY;3x2FL9Z#e#e5<-a$e)- zBPE@+Y5vXdWNN#ibg~%Oh914*{sXpv4wZslI8F)`n16>H5_m0tJ=pr|)uf`LH^7f) z7~{&V2v2U`s|%c*#GENYHSKDtrAtPp;8Eg+h<{?4g3e)pDl0KoP|L}$Jv=Sc6Z-)f zQjD~w#8VyDCZG_CW}yE9{8in+cvewoS~{{&g7ZdoFWjo>2nOf;7Iwvz3ix^?4;;bJ)X(VFV-=yLB;=Ar}xA@Oxx$t zg~CflI5*Ir!GkV}0RhH0O)(`Xb@V&{fGFTCyyLa@k>nBD{bMbN568%z-q8>?_u^^X^2x-2D+Zf*Ds+9Q zbdqLH+DTJ|PRU)BTta?zWcNkgzQ@u7VR&wm2;+sS(t^H_4)XFTQk|N%7aFR`CP|)? zaJRaHVoL@24Fx}HL+6ulIeyLpCP@PS9yS^(L}NXU_3)&b6ELS{lKJqZM*c)#ZPkfg zORR)gpYSs+qRU10DF2xTmB$m2NCdflV}jg=8*zhjkep-bipU39OtHTx!RLMz@?GHh zHuiH$$fg0V>7H^~#^sMC2~1~uS~O2-M0$|DZhfgte0RQvbUry)_g5Jcvdv5JymMg8 z;DrP!rw0N25~sjTbav!p(l!nPvO@H=`#qjr(H#C!H)3P01MJOUSM%`Z*1XuZ1(Te) z+`-%??aku$XN2IjnMf*K_|%VirBLBOHi;ve)z*;U5#`Kz2w{UhNV)*8LOzT8v&-E> z-EuKh$fg!m-|*(_IJ_DY?)>;|`8$6w@2o!J4?1Qu-k-ZznGJd+c$ry*rLst5a>qf`aqdVj#h65gML`A9e*t|i zj&PPI@a%$P?#P6+ruTr2g?)-&vOYzZDoj(OXngLiuvi&Jq<5vk-fPA+8)?q$8>H;} z$%nDdgzri)B{lDaY(bjt`mzm*U0@`5iG{W`ok2p$nXzfb@-Z!9d^eCLw~VHoe^lrXpNw97UAD0)s+qAI>?zgP(x9gs#rGu0 zE)0qW(rt``C>Jg}U6zID&cs`H?JG&+`TjE05_dJ!EnsT&^n>i3Gr*|T$V&;20$a_} zc`;eo{5YVfiHJoMny}MxsMCef=Ly?)lkX(5m}_eCSf{|)?z&IYg()^VJfz(YThSP> zlHrI)nV{Eg1KwQR@vffOKNkw{zaem;?vVOlXGLL3rdJ=SB)l8r zswa(!R8Vz3pA7F%?a9q{J&X?+ndnvY+b}!|3Jad&Cy0ZOtLV&>?+ex|z#tf@yxa3o zS~Dxcs%L38(lRbXCrQP2&|b&#?_u))z97fUxZcpl!H3=> zQwe=Jx_6YKJ=XMR;-G_qFGuPOt3ysAc(|6kV4^>coTKw@%*%JfhLx6B!crvh=_z1o zesc-NSV}$leK$;`MRpCwMCW!(1nz(vAJ24$YKQ9#73fc~xw-{_ChqEwWDE63IFwwR zMqWIQrh}2%sy4=p7l5+*;+YV9OtTesI$(nL@I~NNy;%n87nP)C>H5X)cv9(s7{&By z9wNT`ihiQ}SB$@y>Eq_4GvDmUU!1a|Ja*E|*>hFcKXV&OlXvLzL&)nn#wOvw=v&8z z?7NUw`WXjMG7AKR7b`0Go=HV8aAcE!m57H5!3V(9aQc8Je zQd0NV;TP#Y0dB`eILYkSn1Ap-@IX5?MudrF!%I_%Bqo8OuOw^6fY%T0v&~(g+(1e} z#1O(uzhH@!CvD`aA^aAbJq~cDInvn7-};Ytzf`4<+Vigh`Vz_kaGo+%oW4iej?8Z) za|pNs+Xo@FYY()j!+|@#pPJHzKnJB)io z;=RRG``xkNE&1HM9y{;+z2LjMq2HM{Fv^Qle33FOY6H5m)^?mo2hKyGV=(N4@js*3 z;>J{Sgdkl2wy_KZ9X^6<216Ft+dLr0I?_&tsGYNUG+Cu7Mw|=*w$VZA=v0!-3jV-6 zE*A?XoA9za!V*%y1^t+`uc;uvf2~HMEPn(S_f~v-AZTx zd|UW(P{`H53rSbstR|W08i{frM-$|h2B#h>wGB>d5;NNhyIL}JXKl4eGzmadtDWcF zw1;$vn#$&iH)1gBOG?wdQWSgYeLB#2@DXpuacti8Vsz&{%#f=+{}bZ!>og1?`#o+u zWNQ=FYFkr#@bJ>)iQA}P>7-XFY`A(2jeohQ{=M4y@BY@GR2BEt6AkjGBQUWjJGdAH zwJ^9*Nr*@BuRl5xh$J0$WeotEX`V01pU}pt+yk*WpGUpEAaDGH+ZQ^O4&WJ-RrWRh z43dlwyoaG@>60jN4;^^%yj!R+DLOv0^F z*d8i--0W4ZbBA~gdq8q-ZE&!xK{56G*2eQhY7cMUYx~}j_UY_{fYS|HXT(ivYIJ3B z*M>LuHdTi?xS&G<9KpsGojzAZ!uWZZPZWbdxgh3~#)atz9tAu##6o7)9vowMHn_+> zMF9w(@h20+f;u|tG=rjw0+~M)P0iHycl{cPS&UQfbqWNtf|fEi(|?M9zG~7!islDq zpBf1-V=;R)_g~xb!0tl*$`T5}D-y!ESBes%>DyO77;`*Vlg!m^#BAKiG9L=Hh<*V2 z6T>>|2^?Aknaz@5STqzoZ$pt<6jdCbuJ7zsnAYw$WhzKnbu*M@V-kKFp^8_1JtH%W zW@tG{F$T1}K-WbjmenB(i+1CFE$!c5U!!v@3tCkTCxxGZl=;ng%=j3i{;_UPL1tn6 zQwZThDBVk(b`LOt1ePNDOil3BkU#dIzn}~r`^4B$hOg{=;ECne2)B z%XZjK0LHw`Wxlw)$wGCe2?1A;u0dyzBRxJ8?;5|z|8my#h53Qx6npQBswmfNNx5cz zVNM}%N>~UZ0XB2JRh(lC(u!q4$_0|GyY7qO^hvtScHKj<-Of$-m%1A-%jZQ9f8+Sm zpbW_8$9Wtkxno-dDZ{n^ zW4lhGtZFJ0xr?86zwZHkhY~trxP0be1fj*0X#6gwWKGqM02 z>VS!4cj0dO0hkeCaQEj~hbi>~@^)i(kdgLp^Zky~2@{{%K19pt9NF)c{6{X6^?dj4 z;L~G+MYPo)b2=LqfT5yZ_XeGQ>H*8&eZbz2mo?u}AYQ`}=o#K+F<6Ka-Yq zz`xy>wx{ymx54T(H&qrg>=ZCtHG={c7wNDXy+^;W+rNh>_gE%4@Y5OZ1ZRnbzR)+K z8`nYHYO~@YU%<>^&U|h3x(aN+^n7V?s^*2?c-%fv%g-FWzVb3BlXDfOctipajU@dM%fAcss=TxVQUPW0PL4? zNHwA`9w4jgn#iGF>{^GMnH0lSq-6nA2wK`+W{JhcjsvJZM;;v)!CWR>!(H!puAK4M~&U^*A^%Q8uBmSvN0^x92FlM1CdjSn?i$0C&uf&*Dl^XfSb zz!HH=$-tq$PPqduIxZznkTts!eN+-nx~)bJJ)6^Oq#aLt`s&Grfwia=8SiG3I2>JMWRwcryk^~5`XzH17zXpnT)A21Q1IM&M6 zt~R_k44YLnl6e+>T}gVUNMo_7XyXE&!?9r;80KIF-SAxd4YAoS=9$Cuh|t|}t-N%< zl4l@k)7R6640Tm&5Hb2>{bm(@5!I0>k!0n597^zqw%%aqXA7o2bK~5(T7={tIVBos zKH|2c1#L`D@cX&j1@ebiCvO6yV8Md%W;wz)QHl%!9d)Y{RGQZdNpl?@`CNZ%aSZDOKiS2Pgs5tTwIjNpBFzvzMRE0I+ z#-T(rt&q9f;mw`e2N_P+2J*z}{o<&0T{;bKJ>@lha%SxzMNeQQpvwOY!!W5a_SzP3 z=cWRT3p_D%r^yd!)hX@Acj z@Xlj8@RQmL&K=ZM4LKCUwEJ{p$c|f!RO-HQbPA*;^rxj+)sl6RJZWo#!3wb0uEh-+Z@vK+*ZJ z{})tf`=NB5tD`PT@V$vb298+E0bez6RE@$D$I7o8^pgOZ z)QZ_6#y(q=F`Jy6i^+ij8yQ&9-S{e0az?k>ae4l+m+i zo3+x9?Oig?At8ku=B98(Ac%0uzpoTC6>yt0{z)?3g5*?MOmU**3}5yAWw0okn%v&< z@&6nG{{MN+S)E|Ywm58&>9O1|aCBR<0#E{qQoem^00ftXZ*rX=Fo7zMb{Jd-p1l^6 z9x9ths;BXpFSL}E0LPFFFhEDOC(u2T@|L(2vyi^i0v3~ zD%U7&-v5M@9(|@Gxp;iuOC0%d;%c-JDKul@yFS#l!a>X8sqIEbw&)1~eLZeGaS#V% z0XmCUIcLd9W%3Hqz3%R(cx~{v5fL@Hrtof`1O83{f0uph{FTySXr|vxsYS~ zBuzDZeA$p<LQJ`LcDXL*kgmS9c%RSTNhXZt zaSqEG^gggu3r@f$*h)2%G&k@CR+P_#X%iV!N_2@cmBUEKJ3;NB(c70_Euo|%tP>P$ z08llirCRaT1K|SYgk$H8^jWkb&ba1ylb$DWnavAK&d%mFyk{A3iWS2E<)X|{;cwn| z3tmZX#k>O25*=59Gsxl?@Y4V}@Sfw7z+_kZUAaLk z(506?gv~&+-_aE(Yh>h}g^7Y*W&32?wM5owUGvK(zRbBSj$O_Ik>GsDQ>5s2$W%H( zGm;LW_)c7}P4BRZ+1a?CWgQ#ZCmWsi=-g8J@5Q%iJ_#fhrH$+UCzm%ONr~mtT2gTE zwEjF?Z|&>dki(>qUE7HgT3=1Kqy8C7Jx*4cl&i$loaP{6G3}U=xd4tuS3V^rN44-F z? zTpkhLgr7%R#vp-ARzx^Wf_!TuX#yuMGNvQxCxt3C506~Z{x&zzwudQS{{^93=udY+ zv(eN}>~12T zl3nIQCxIofm{9P{<5{3^%O_opTkcMqybYw%p25{TU*1-|$o1=U#15aQnM~^QeH5TC9F1HId(>iWP39e^vai9lfX7?*HJDc{= z(KYeq#t**LkEqXBOb5UJinDkcr7rv;P_+0S3t1f0tHsF2fO)FiP|q9o9g`@Jnn&NA9w+f^A#WDXjrj=*imS!e%XUGMznsLHBff zRr&ab+|{Ov*z*|ST>-kim;6o)nZ`c!X8y8W*~2%ohAbxoGfm`+M1oc^#6*@8qNr8_QXa2>kt1j3i9C zgpldLM)uZzF5CaqEC0V92M7%_E-m2zN9qarp_{Q4+pFU%f(={wO1xeUyeEZoqf_vlDC)1IMDFj*z3Z#}j^?h_(N)ldz7 zi`!SF(>UrGDd3rrIu?j)PeDv5KEUhU#fdayd7$!ZaNWbWeRU?Q<$_E1Q{xfoTM1A0 z+dpn5f^>G2USfHHFTZ}WCFQ$UC1TFFYYAx!yV-E_u`n`!4 zZ-Y>4Z-PZMYrB3#pl($SsRt|D*L@i&9~V0Nc$vKvr!^3qo<87iHFFFXX4K{Og{I=g9si62!K@zSR9&NL>{3+N+GS=Jv)N37}M=*P`(_7%J#Vl)B#DLR<4-ad4~ zQ{x-+p-H@$*;3??P;ivLI>Q77We#}#W9>g%!rr-tFa;=M4^u}~(LHfLF(;2-B3)W^ zk5gI%+`=*tH7`*x*Y918T0b(=az5zHm*Zo z6FwV~tGfS!%w9Z#+}f6HUXpvcC#=iT4=kQ)!#?qfDJLr(+@=H;55B=xeWCX>C(@7l z-MUX$&%JW{3o_tu51qV=6>QN)@>9+T?e49A-6d?F?))Zrw??Md2(_9$!VEu5c@X;D zRH+?3&2bP(G4mEUb90aVPH#u-h5j@Jv1y^{0b|%BFDx~Rx|EzCkW#xz08=y_I(!$t3mEf&i zzZm%i>b$6{Jpae@e0pCZpL2$N7|ST&dmO^o2mlr8{UMsdB7|v;4yA+>SRJl7W!go_ zSqxdo4f=bBTIAbwqz`ys=Cla(&)j%gL317@kOeno2!C|k1F(bjB>k7tbvfcA$)pOt zvTDuSYRa&7WNV{@IHn?W3dN|Sjom~G>WKe%c~XAg!c#a<>ZJXPHe#Wg1(t+|RrhJj zAs_t=_D{tL{{{K&RbV^t)z~%^SFcz`CTXTEqux9GEKWSp?Hq0n|J@T7AiU4=?YSn5 z70D(E^!BW_;XT@wg?a_R1nq+^PO3f#L~K@|!fCu@zXB1HE;yT!e8mm#JEhmXYo=9c$~USVOKN|r!qHT4t)*V;7EE(3JTFPqQ#lL%hi^L>t`h)aKd zr4fz3InS>+q->?AgtRp*hP9{YfsU@kIoxTq*jpm60weB!>ny-JfFGW$2z!ZV(Qzl5 z-^jWHHq6-(I!8_W{ux;V{XrF^pR^68vLh7%)6fQSv9b1|$aFSkvmJu1JVRh5o8mQ*HOm2cjW5n@yM?p4)y*~SgTL!k#MQ%}mXQ5>CJ!A5>21s9#n2G75 z8mHR0H8v%V+Apt#AIO;b2}yjpcC~}&C)Io!&j*CcGU*qG=@1L#4T^brQ5S>*K6SoG ztUv(eG}`sGkkspy$(zFZy@VV5{nS1PqPT(=r=LUkwR0`rf#B#;!!&h9<`MZ?%f&i& z+?z=uWYI_w+-e_7Ie)x#0I!>9UzM_}(w!R?1tOYSW7M3^{e8^4?bQ7D_A;;6F4Z8U zo-#!=Kmq))pDmfnP=}vkSv2|!DxO6hG5%!PGZye}JuF@7j;sLo{TUzBqA=wH zRh{ySyBAoFa5Jy>0-aTZ)m97mFNc^Y6#LQVAHOe&C!lc2{~u-V9o1yJu89W`X+jjF zm!MQZL_m58Hj0Q86%>@HpmdQY0YX9%kQxC2sX-ACk=~?+UPM4?p_fFYLqZ9GlyBZ| zpR@PbGryVNocSwRSu0s@-ts*6ece}i0Vj7M09_u7!tnAp0G~1aYI|N|PONcczxb(+ zu8o)DU!eJufGfpJ@cG3}D>2XOm7^XL&CN3=y0?PzKplIMG;=Gay#Ggk4kmjD)gAsA zUPr!9DJe{uMRU!~emrmrQ`MF`kWoHFO3o;>{snr073o<3PhP_(6v}LltR$^kvWQeE zPEu98HlF2jCzAt6SK*BdkPbbhjY|#yJk<(towq{4Jw{$i_;aO*?HC;YolJlUwp@Z2 z6Fj>VjTTVt3V(*5jf82$A$*u-E*k>~a8G`tuUiiYeV;hNl9&PIq{vc@;8DPX61<3w z5ErRT-6mwLPhmnYuXBrR*eE|#?e{;I6%jcn^t0-efsP5wy-EEEdZs>;3#6*2gVX$^ z%Ylk(;JXA|MoWz!rrOoN^6`Lf`tk*XR;jliu3WhCl*w&Dsdb1A!9?UMi)@n2=M-`5 ztFKS=pSAz-QaIo@L*uDxPKd|7_Aj`CZf_AyRz0TG>kg57vZ1&C>!B|m6l{nP8iKtr zMBN&TiS$Z3;g$9H==%pb8z(zn7Qg!W~P=|VJdWL!j!?cUwvGKN9#C{ z`1UV_wISiciI+jbfF~1e4`{*OO~T6-ah=fP@V`Ke$I%!ogre?Yv+SkyE=Q&EQ}a-q0C-~gp7EShtM9m z)5h{O(T~eEIN4r^%p~8=Z6(+7f~hu5;=~;xO%SBwz~=N+_wayx2Txp@Pc^tm*9PMT zt7dJV=Q245Y`#1Q>HM2nm2>N_%FBf$-wqa5Y{CZp6ZEAhWq7WsAv~IA-TrI?FoIIO z-;%nzV?@9F6q&Wx)9mei-ik?3Zx>e<2x26&ZrTx{7)sL|6U6)s&?P-^19xLxK)D7H*7J=?W{zxxH z$|3wb22l z=!!H;>)bx^aAl{KZ{M8zhtq5E-yI62!MR4%m zDVD+EL3NrzJS3%Tw-23sbZE~`q!Vs@?&l3r)7g8I|JkwV^n%3)v`6noI6S z7#wIOYcqi;!O_+e(`pg@^lZ>dV*TBzrX-oLuKO*| zRYB9<#dW{k6AHf(NMeDzSs8nOJFhcc;J$aCd30}IQ1XaCDw~EX zTWL8wqf>MG{`m2yGH=2+HdQdrT}T;&#t!DUI&XCzC)0}eMHpa#A^S=We-UmOxyFfSd zA2V9*dD9pIg&j~{UFgatLB~v;l|ka)?UQFf>H)WQpoC(nEr&DC(JqZbefgJfHrBD2 z4^Zn96Sm1HINQO1?8Xprh5}fBZUiV8cHAKup=%V8#^AZrUll;8!nBi~e}R~D!8o}y z@>!z4fhp1QvApb%mzV4$Uuo}2X8En%(4Tp7f46TNR{$+Q38#kWD^RvcrnV%R3c%?6 z3`s5G9nv!dfQ1%T6&?J5T=mM{q-ai2A^t`0Z_6y}b5KX~Q{XemA5e~ch*qf-fP;CP zDV#FBnfmJ!Cb^h90N;cHve_0*_FyOBHU#xV_ZMogC|v$No29?nINDc#D2Q8!Rw|Nn zELy>d!HzCfucw|t=(tF$47t|TdOKyeJla?b*8}%e6^_&ZsZ9T`I~q)yG3|{Orda^l z=ln*|(7Go3hNV3nV&&xOuk_#XJ$d|s+wU~DVKz709(hlQ{{?!>@Y}I}y^II!?0ve@ zDz!4rDIN>c9?Awnra{wxRIBK>hPUe+2}QY!-a+e)0U4!U@c3 zv8kS&xbaJ2pRZ_1$oU(9O`QR#dg;#sEpm@VFN|0mVKCeQtIYa#4}d z`+Y;Nuk7pQ#3=$)rB+b9M~G3ggQcB0V*5Z(pxKo$17e!1&BOZ&>^5I*I8i5E=ikLa zg^wVMY4M6kNrWyD7qd~Ws5YQJZS^Mb!mkS<0OD@JYWB~wZD|^pJOXlJO&;P?^nwWK z%K=mt7KSK5_Bf)2DN5d`a!W6ta4|opiK7?3{ixI!0}8)Ksb83eMf0P;7j>^W=3H*6 zcANGPd!OzY?f*MOFL8IH1U59Z8BGjDok$2#&T*-a(@QV^<}lz-Mjo*UG1iPf z2ak@eD@~!hu8|9BDD$7c{A+9dpQ(g;v$hTXWAIB@nUm=H|k-; z%PZY%%+1IU09zA-I`^+b`G8%HwZ^Xe$YW2%HAB8%jz0F?VK71k`3%koZjQSZgj>t0 zTWp4OO+7q%-u&r-TgKg7&zjK^!nepPKY6~gX|4AHX+%1xHeHmn5rsDm* z#FD{^CCiHovcKrh3x=Bt(3%y|*2z?HeX4wO6(!U*?>DKrlcy{|2gGiKI4JoQ<* zWy6ZTWwUAqciOC3B;c`=PWaOI6}p4ES^>U^uepU5uc?H9gMVEFz&rn}w@kEUai0mW zeY4xo7C{;ZX}c?y%l@D}2k3aRb&P9McaC;6Yp1X97!Fbk+$B7UQ_Q9_DTZ$31F|nr z3vyCA7H}O{!G2<4n~io((c-N7j>}+_hs3V!l*(pUzW)m#G$m_UR*HU_9$Pr&l6Nbn zon-`psfxXizY+0N7eGVKoEJUCswLFT1cLmyY$c`fdH1^1{a%%x#^+0{(k4B~D2c2C zAW?@4)h5dTx#oygVjHSR5AV#{8Tqby6?>%Wt$e)~l?sx#gIEMB$Ql7>#sHx@%uu?5 z_a||G2wHgm$1a7K-PM8@JWZ`@xM&h#-d~aIWzSi095<7r(%dxMBvNYclDIv(=`Ncz zr7XVjqe9j2L$E(nXbuIq1q3JxWJUU0&S;$K(1A^pFsDon51d7Hq2JXn&9W9lnA zPfyq9p|Oy*H-bFz*ccv)r_Cth!R0A*G~!O#TnFFqywC*p=nfxTNCR}Nav8vD-T&4o zgy%33ojN?DY{kPD6ztq!>|LiY=KRS*!@em^Lz*`q#AU{&0^~G828afbuEP4@Pj>;8 zIS+%?Il~d=JgVju^u&!{pH`;!)fu{7$hVCZL z6Dw9va~=PXZ%YHfl(5i=-r7E|T$;$R#?0*ilbWDk4RTS`^WvhYI>C~mDG^2LOGa;N z((P>lNakmNJM4I!nF0_E%h6jIero1Q`;4$`1Hl$0;$Sub%C*K2AKM(?zB%T;RNfeh z=D8;S&g(A_q5=~CcYAhx*=rFTVJ}HB3Kq3yoTd2SIh$pOhK;s=w4_~1wkjA?^$;S* z(f22*76C${fmzFk%rHsHS4b$f?jXvqYg!-*skBm-XTMvGa=RZ--q8ZuE1vSt`-%W_ zYAQvU>sya!q2mWhb z1a<}gjZ^D*90f;G5EyKE1O4YAl5ZO-rt1O-CANXFHO;Efu|Xz0 z03{;6DXOCV`QSnfRCsr2LZ@||NF`sPJjnNHc_gEE)2!p|!EGGi9CK?LwQ!lK2l;(| z=M}2v{WiVqBWjfwH?HuLZoXTV1a!HtwWBsAsdwDYR-9>(su&>`N~(`f>jSAUY+kVY z(Uv)JnXYopBRvK=Gj;1w&yLA{F1l_w>qPf_s zwULEKxQgOagBfMBa z$+;Ix3l#|_(hvO~8nYiMIyLnym~YZ0n;H>Hq%@P$9|rliWy5dK=vkI*kj$m%_pi+=PU*86Ux-8-$XYSKP ze_b>gsSeACY;i!aZ{5VmpM$Pu@KCmf&jL&+zoSqRs;3-rf#QUp7kYS&{2mCfUADT! z9IliiqG?V!4MM8X7FSpCusGl$nNl-pxD+KgOZ@n(vGV8`%4^TCOo3jDau@)U zt?6-1TrQ&OIEGRS>9Xu2oP))!5w-jgwvW#bU(4BWj%Z#mcz&)PMv=1V22%`BE&+)1 ze$bRc_=vfi1zS7kHzN!ugu<51+7uR|FYxVxWB-ex;920FpI74xW~K%rL}6jj8m0I> zPCHc9%7G1Zs^RJ9LzbFuM-I$JoT$E5!ngx|L3@|IBq?#*Rn_MW#E?LxTnpCb-?p>PTb|X_4(`dZ@fKlyJerE(1 z_|3i``Ok7^RSii}509FOTgZAt%?WgfB0d0Q4foyjB}R4G2>~r95n@F8UV~u-IO@U^ zx8~=@cJ6i3Z#rKdGq?c?(>bPU069z3q{kqok=;lY3UJU}4FzeA0|Z5#TaCk)55@6z zc5Y#$0jWwk_P25U5f}8mc3(j&$FIM?ZLYPV+`2h3%}Uz*iG0cU39w`VV+1jo-8K~1 zPw%E@ygvIF%`|^z3ZCwp$g&lG!A9OzrzSj{-MTLwO{|z{2CwW#9mVk`akKX_Fw5A$ ze{oa(MGv_@C--v`VfJy&RzEj?kA8L7^!zV7mZTf(e{X z2*dy>mLPyeGSgg!{lvU~`3;33r8A~YH3Ea{1@r~6PftsyynB%=h-~r+ov#>D`#r}`FGI4!8_n!yb?{}cb7Yy~{IxszN5E#kzW1*G1v+L4;8C6E zY5xUOh@!n4`c1>_=Ff7+=1qtL-(79ec#=C^L@&(iygad)m&F_A%-jQY$4+7wpdFCc z01n%7-js;DsekEPMUbAZq11V4df(1t-x{tZfw{$KU{TjJ_^C>RXXQvtJad{ zSGvYkw3cKLXTP@8%DrKi^9ua}pyS7tkTDcFjSZ5hW50AuJf8j8UEPnDc}E=af_M3Vw24#-$}2b_7K$$jSgf5jytMBBTiQt(U1`PrDOSux_bHY$pjtka(Sbtg*C6v zfNTa}L<=RRSqg?6Jc=;MlY^b1@caP_IPV_BxM}Po&^UJ|OxuTjvC^W{K&ieo&?4ORUqMSuN~ z83G_k9N%hg036IPf;5@2=PEFoI_U;EmUrM_sWL!!-K8b|kG@_=+1JM&z2<=F%}>I= z!EXOLEWQ-bAz?bX!qE^BBaD$Gcc_m6j9|cZKI7 z2D~oZImpr+-#B;E{6Ny`2THnU(EtKe-fjBAda$dMU--BBW!=UH3DUF|b$|;H!o;s=DjGX3lgW8m{be0B*984kVJ+SA z(XI4Z^m_+NlqTd_>p>UDYyO!21s9kok_Yb86R7&9$FABAt)Qj86Fm7uwoxi(#oYT- z0$VA$MT4FbLeECI42jdY8hpdsTcoW?Y2l|MdDPw}20ss{7MOC0b;fuFp>_p~+LU7O>$B>K_W>vK_T8!Xss6;^ZJV3&58(?pSJvcRib?s8<&V zK?gc@Ln4eOjjh1MK8v?EE#b!x-HTBZ^U_SXvT@w17a60l+h|CCjntxX{q#VpPj`JB za#%Fpq?z)xebz1WI__TOc3ZS2~yDdn2a;!JF?p3vL#gZyQO80PI}+ z!~pFs)8ahQ0v4_hrT`1^5Q?|dbIYSi8m};I4qnqPi9MU`p~RZ)AbU3D-lO~(W66VN zRoE5UIiSuDMsQNJ6!YgW3&p{n_o71|*>sxIKV0$(`}&6P-75dK(CPb+y=LHmK9uPZ z!jvATuf`bC*fE2~>I==yxg2>x*GnZ6teZw;Ut=Ul?_c( zr5W*lbz3FhdHLGWPXMMxc`kThej3YQuOq2;!9>)CAT;%nbEMm7ylJSvPhpqW84dnB zn@l^>kEINsd{g-y)_FQasJVy1o=4?^r;&Xd#u09$V}kBYPrlgFBXOoNmI@E-1dM#7 z-rQ7eGz{l3Hpt7WXWrw80ez^ycKrj4m6hrKW8R+I`YrF<)?q@(4u(<%MfEeVCi;^+ zI&E!YtHtcs4sK|8{4Q-Qe0X#C6L~fkF_d!qef#fd4tl-)6TqtO6~&80Zs$~jP;U2h zo;%}M_w8M^jrs!Sz0qYy;}D`TS9rn=P@rD4%zR>Xn0KKN1`Sh}=}YDMMMO6{zovY)26 z1X~^wbYH9)VNpXFpp5@rkCI#e=4RYiX2T8^<6fE%K;0|^Tswj~QI$h&^8)tGZ== z3TDY3eFJ2~$!{&?kdq<}WPMneik}e$7N4x;y|Q;u&rJphy=dN@dGA@; z;&zOL$JJ+=)A@6W9==%Og-&dIFwCP}B{}KSF_XdDEFd$BK~|{pZH&f~L04Bm@odig zg$4y~-2}v~pS!Uzaf*^T=@jeM;do`{rEvPk#_6515)afE!1P99VLfF8m>hDH1tCU# zjQoLAIs91H&^)HA93-K0Ew&i7=O;U5FaW!6M;{-0xZ4c{BYbn&o&TG*8S=8A3f7;; zz{qb!u0n4FrG4T~1p`!S#0PwcE+D*SktaUOLl7y@#m%kCPr+R|_?4-|{hi%I+(*Wg zy)?QVRhib+zZ{%SoL%}@ci@<#`uCtI)L3B<$=TTeIL9%QqsEx55YRkI(Dm0jjn6|% zMUh?^fJ1a3Y|}HO0UUbgAPK(QHKi1F&ns)I{sH#g8KzBCl~Y7!8?0)(!{fyDo#hdl zPGOr05SD$#QxHW9Qd{K{YdHQ!KK{#zV+6tf#nHYj9oQXmbF^EC2d$IqC!%Z@GbU^h|&fE%anf+3EnnvfhW< zJn!aA8?1(39a6aM1lZT!0YOFlZDQymsW|0M$-$9~jWAKkaM>RLS4J6}E$)mGP> ziQ*fx|KL@ZT^OD)8zj!Ar&Sh@JOibvm~ZM8(G(gioroj&>$`3_+yr-PVf2aJ+pJq( zYWd!R(^x=%t{FgFf8E>2yN~xMu2bw={-dWlq*pQYSib8YQY@QqX3JtoD>gkgJ!HzR z(WWF(vJ*y3UxfrzWj(N9d0kg@wSjLZ>4b*SU%^Z;Nd}A+;xj_<$^po~&8s3kGa<}+8i3+l& zmKt(;Zok!YA}cjAVSEwqs(?N4}KoroCLznp3VT3~Az?b!m{Mz_`SS)*%@~}yY?hI>jH0!(tU&~3^LA^n_ zVR&iC@r$|wRJGORyN)i8M;TJCslyjzKk=MVddoHdz-s#Uys8mj@8w+}Z94-20-j9v zYeY8~uirc`;`#XEIpcglJicppb!NH07R|<6G_i>Lqqa8$(Dzk4wAETt@&Mn-+i9*X zkD9YzE1W#7^zmHC7rhYX5>SBC#u8QA8CEqEzC5x~y5)yGsauH1UNk%oa=C76epR8igmCiXd|)lX8-B(`G4^?K>P)wBJ8FgB70$cEF&M! ztyHQUCT?3Tj^_3hJqGDj*@si1ZYW|B?Ib1W>x);~=%{MykMGo(!_;3brv6a10DsyU zb1O3?vKpmG)k9}*VxdFSvCm5wOlKQcqVc+Q)iv~N^gme)w!b5TU3hf!puHi)Axo{P zKHjLZOA0_@m;grc=Ys239>y&k{msYtJ5g~ho_7%%j_{qAheNuLWZ#IJZ=HK6n7ON} zWUsJR{P0b$76G?>4C4pC=_#hJW(I{VE&wC-fffr5dd>ktN3URNl6Go9M>@q`a z20=g+8*^fl(!_wD?DV5FdOp)nDw6@kZhF}*mD*Lmw}%JdNYy3LDYyw3@%0X_Q0YT0Z%0&gexck9|o4-nRx3Gw?!SEW=k2= zUbF~3^pv0H(Tc(fnic-Mqb-MjxCxqnRdKIbeq2GsIEr{p`1p+@P%hZr;>1_ZT3xTi z5c4u%=lB@HLBdScEtr*UvJe}VJ21{3N{?z1{q#5=M8}vGx}M7FkS|{{yge@YHg2NB zLvr#D3?@EVaX5@PYq;#b+jNGhBW zqHOsRm5Hr7@Ax?csRISK9!SLP?uzCZ1~M=ah;XU}#mR<_K}tP94vTYCWo!+zn1bKf zD!zMMTUY?%^EA-xB)2j*QnMdem^7|oS>c#lq{7I{1E?EKv6jPrI=+q7jUh|Y(W5Hm zM`2K5(B#MeG8DJ|Uy{25n%jOw|ND?NcezBpw_d*KW=RAO2-|4(Hn#eC1=@?;BEy zkx9@m_0RMda2_zpzq`vVDIzSAA&n4jOl+AXPIn%>vUZc5)Andnow|vdo4u%40kge& z>633;t9W?b>i-AV{jUIY`yf&kK`8>z$=`yfbsY#;H(dD^Gpd)Jg}d$O(Ph{>ns^ZS z7wDl)m>qIz?h@sFnG4Zw0Ue95{{hG8nhCEoSg>Z}AKXv(%6j)u)K^S}X@8S%x2_$` zjDVwr%VJs-XokbWy>=!%aLM+m$EKp?5SW^JIHPMb*{pd9C7wBaTqG|y}shqF&k17>b zWvL5iD7ohFJNLCdQm}h%;((>6_+?yS?1PY{(?0wL%vlTEOxs6)@;@(jLxiY3MMZMV ziIn0+EX9-|O2J;8#8PdH8Y+4YntGqzLJ`J(qlmZZ{tO<-M91Qz@y$Pa$|V4L@X#p< zLNA0}`Ej*EwtRspD+i^02xG9Pa;H--_b-NR3@oB}){f4foRqg({nQPOqI;U70zpSD z|GR4cfA?ALJD!abUZEpvfkC7DnWdR;_NFH=i$-6a6Q2rM6NuV7; z6Z_k$Tzp&wa99<4c^mvq{or^ODROTB`a@YO?FD{$%l7~h+It|8 zqZ-2G`p*-SoBd13^cCa5vBIxt9yfJ;kzvpMctao`y}UMQD_~|p%BxtXg3Q4k9GweJ z+0FxSzcHXp=dP-C;#~pM$GR^>mU@L-K3@9J+%otfifXK|9(y%XvOG*RM;mx@VBcGS zsy-}mB`vV5JsX8Dt$9Io!}9D?hj?x!?Mhj9UlXflroWA@M@_!98GCtgg4Auuq~G*3 zMC$LA<#Ya;>PUDNK~PQcinmIk;A(s!1=|N)oeGD~Gj0%~#k{~un!oCt^Gk$!oyp}t z&yyw}Y~Qb4VXR7<9LKzmY+a~%31z1}^Pq|{h6~AvF{LP)guT+0uW)gf-%ovumb1sr zE>-y&rf?c1ye_zQxumX!A~8=)V$1;q*vMk>5pHzBiuPr1F#dU6z(eFou9tuy#0nTO zW|(&Z7!Zvma_xb~MKv%_-(qKdt>qtsi}>$D2a(${#?}T{OHAf*<}DB1OCj6T&fkQ> zT_{@5#~O%on^YwfRN%`f#=s2x`u(p+c?$ShX$7@az%n#sM@=p=;K8Y4ErOe-rucJ( zRa12^=&>Mkul1T9`fn!6Kd2EF1AUfR-dZ#tT4>SdT<}}9>!=6_(Q;O0iFMK!9V^b0 zI+n=@bPe>Hfe>q_ute>ntrIg}edY^uH;;TMxbMK3`>hWq7Iwg5Pl1wX`lJNRXM3}m z`?mnG>^ETMlXMnlV;6EN#3vkGnET)F?G#b~Z%-f$xbF2niVq&SyP7VwZKxqa)OAkAs$YlVp~L9bHu(yNA;^1 zc`a85YuTf%mJ!x*DSSsQgI>DB67VUcUDnuf4>9Mz7oiA%HZ5QwvX}7wV zh*z4@>2UOLQ;HGJU+bn@DTMM$h|ga{2_Mz^Oo;Lrg0Y_KFj>ZrTxTqxhKGGDqCFb#m_!W?PRNj-R64^(ufEZwEh%_fI;SVp%l zwJvGqVcediPg^e@d-e6>+&-Z}6fg-ON>g zMY7@=05tF2q2O?q$9Qv^SD5zx8D+c!Ooq?g={y*RRQ(2Au^cDyN|3cz$WtSc+RdYx zj*vK003T>wU*kAbBGe+!B4;s7O@iTr)x_HDBKU|!-F`1s4OlB%pR?}fg7azYi>44} zxW;wjpEO8v!K%8el(H_spjDxvTy?=_+jnD>nTC$jM6z{-g2;?iBYxd#n$LVYw< zYlRM!2PPUq{DOK8e$#R`e$rFGBPyP+!jYE}kltk#+! zZT!}N1RmvyiVrOK;G7W|y=cxk0hhMK&x^G-42*q8TNNUlL_<1sg+agQ`MO*vyoW|R zKqqS(*N%##X*r1PH#Zk};UDqb*p;gC3Nw-0l!EthwX&oO=^ZRYb$PH7oH`VFyIX|5 z?e2M4E2&GgDs&owMPt}G&y_!Wc08mtyvPo0H5*CsGgY9qEE?c+oj%)zjr2=pvR-Eu zk@(g+pC$j+CqD5!i0U0vT#z-roR!?GhxVfQ(%1-s{Qr>sFn>q6!lF9xX&Y3MNP96` z&a&0d7fEAa8tvodx2WIO0Tm)O| zG{0I*QRyUbCIt&f0(yBG@8At0Hd0?6p(6WToRu_QR5;m2pFZ!%(~9B`kYipClvi*+ zM9w@uB4sfGik$x?Kl*1O{=dF{@fUqms&czsovgLL&%hAnl|)XSU|--3GZI?uk;>_4 z;RcxBG1zKIOp7`kXGt+3iZ3CkPN(1)yn35-!kg@q)?3&Mb=O572e-oy!&N{~0&bH9 z?+Kw@$z1SnJ-ps>@g?gU+9Sw-+hUNll+KAE4Jk+Y{f8kk+{V{*g!~I6tlfDN^Z%P)=o# z7d#uW-=sXh<(T8&{{FoO3cWnsG_euVY z%yYRMn5qh3ec>e!(erdq5}xIh`wj`Z{6fJ8AZfs}bU=}#DXi=GH0UNLXYp=nLO+)! z>nj@SvI6@92ap`%v60G9CTO?a5sQOCh?l5*+Sch9G>nu)}D<|g!GA@IXU#gBA zhxE&|SWvt%6F@a#ITWWJ^;;VkzL#wd3DP$`6dLl(4}PMAoduGbikF0b1lrKfkOU#; z{UEPNZymiA5F>1)1P7Z!&y`CRj~NKHt{H$xoN> zCQke4ZFKhQ+fmB5zJvYl6{D(w1*$1>c;c z)>Jka@i0zkK;uEdFwX-pGLRs&M_0BQIVWg!!dy1_KSW(@Gf%=FCuuU*AY!T3+B8Rc z9)q2BCKNg5;1M{dNjkUjy~oAbxYh{Lne@X}T`ZvkcF*l7KQ+8?cC8_0^i`l=9N5Rt z>-<#W^_U0Q=#QTrj>u)e*!fBGMFJjR1*5UfW-lg7wp%3Z4~5m52#QW0rz&EY!Vhjm zaQ02c02>$|DEMB0u6k;n>WuEoF5&7*6Il+vfIc$lBo3gy5^o2Y0048)-^H-f({l%S zrI>x_Y4^14p3_-fXJ9uj)#|+|?I9B=VMt-dcL=Xv^UjS6849aEwH=Ll_y38Md!JT=oh?vs#n%bag4?|v0)vr$+QygcjBfU2<sV@8rmQ^o=i`J1nf%CjyS#otXu5?-ZF*TZ`)(djngSBPc!@3iN2 zB=E*E^{UIu`v{|$wuOes>v!u&YB}t7>*ugLqsN`5-cMX{d?&uco`fwWWw=EY;b>5U}y#9boCzi z1Rt_ZEWmY*8gWD~e7)Y9zEOAQVTMC>8+ZzH5jaQ^b{)!RM8l5cz-uau;t97 zHUCQ#0df@aRe`Q5NygHruS*_K*v|s%1dodh34Z68x`DWb(txS&Y(Tb&UPep;&&{j_(LJ#*H$jo^FFAfY zI_*P$a2#qZhGBtO!NNzVurE0CI|&`(i{ZVRmrGR*ju@iv_3V_DD*T?0^TKcd7|qod zW;f-K!Y+MroUWci&ilG6{(?}VIzB|d>`mmu@WhFuku?Qx4pE)}#zk!Wf7sCdtLpq` z_4$w2du~zF5+W7d2BPA$IASne?%0425mD3 z7lmB{AM&YJTn=~$BM5x~J4r!xTheqWUInIr@mM~|xnz%v8$LdFLbT#rqi2#D$JZa1 zdN?#ZU>Q|S($sV7C?1+gA)HE1AhFcN@AqGo8EF!FT&WawIhFNu^bQWZE>(ZrQEZJb zgca&TFbBkkbt(f63+ZaG2rRRdgBUI*PDan6(~>zdp^w|F=>oTIyGo#kgo)r$+QN1_ zh6AwrQp7~)isjj@Waxe9jbuuee5+x+fpqc-O?_(8y20Yt;LWyqnvY+~;#lW5j@Sec zb7<)F4uS7^$l1)IZl+eP7w?DEcA_>^FvLVk4>_JxMeDGpLNWg#6(^1Pa#=gM)#9jSr^V+i#m9r?j#Eg!kDV7r0ZnDUTYhd)^(4zYCbt ze69yATsk!G$COcsP~k(R#akF+HPmw`c23PAY1RJ3&#wqxCzZV<7by)JDO8=v*0qiH zkPFE9t%eiHM2GHQkufkXgyHZ-nN(<#V!l&?BT#alusv#+*|L^R2=o{4Zm|Ux(Uesy z7^53<>Urpu#tMFcmzRV-1uMgpA>!x6m~|QSs() zxm?1BIz2rbfb%{q(z}aM2~_2QDOvRl{KJ57@xy^&2be4kphUlW`?uR*g$7W}{#;>R z>t+3pDueaReJUtJm1zGf*!%x4BB*;rECkSFW0IN zRwPtM*(A;+JJnO<5tmVC($(L3rO~bo$87!8bOPIJ;w^U||J!C&U481=YxP$aMs7|o zVozSW-e`5%;*5Mk2uSl-h{)wHiM@92uDQzoD|toGD|1*3EZSai6y=PWmNg|k`pqMq z7LPFTpuek=eBpj$d18SudO^p8u~UN_V9WqivgecsRAr!Mcv$V-@NUXwc;h&t_r=AB zcHNe!=+VBWKo(cp0X-MVdV}ajE~m#|K6~7hr3MQaZ(tOCMC&ZpB1$28eF1$mXJ+yV z=66G7nuA6+lk!4XteN9PAD?viiM;<`Gganv3L5W6^bAFqmnpXn`!>eny|Mzl8qxF< zP38>tu0_%!cA{!AxOTQgEv~Ml=Z@Ns%H6^D47Zz*4Tewt`)kEN&y!IBBQ8u z4VSn&Wu56JJ-hU~e)_m_*p60@CP!hR{yQdMKskWt+=c+A1!M$qY1(!jc52I~c(xrG z488t(yCHd^m$%;nj?g|RE7U!o>jM1XpUjUg>O~IutF^x#*iIR>UpeD^TGH|-%RS4U zd_)z^{-0}S#sT!7vGTFK9^WQ}GTyC<9qEpJn;9fA>lX9BW z46lc+)gwe4iWheOhl-6XNHlWqMfmOj zB9nL@3`VsX0AG0|eS5-h7HlxiueA04iTd)sD`088@C8*x$8txGSG zw&u?MF?;q0Z#; z>lu2bc@BPj`|&d-dd;KY7OlKzERVoiR%{Z#iEf51m+0AkW=7=als*(yo07y>LvhD_b z5*6C15DfXj*q)Yjqyx@{-&?K&i)A(p8rj0MlA~;Tw;9DosWQPmt$Q^kMo53lHQCGh zqisqx?4{9Vru~|F*ql76rrhX5H>zyi3O9hM!fyT|CX=H~gC4PV7Js6IUCo92!_IpI z+9ki~{_|k)66)2D3F-_{KcQB{K{AJZ9SwUScg*%5p(Vh4>z}XkLgiYzWugvRM_}6> zY&3wU_OxTeJv3-CNa!zLozEdkg?+x#O;3i-u}Jr>e=mH|WWic;S0ygT&_;9_yW9oP z;8}tA@1yMb$jd-a2IWFrA;^$Ul<}t(u4~VJ%CJs;wK>=;^lF&T*Ay0)in76H0`iK{u6 zGoFgqKiFM%wfx4##2K23=)KeNKt9F=rxq3^Fd4^*|p)UIYfNCWK;J=}iWLx#Y z4ZCWb@7pA#_bI*0S)U9dSy|mUjyM;9-385r;P`16zX0RB@63;pzeTP9YZJu&r0+@< z(*HP2YYK`-zwi;#^A3uGd=JqVAsw`P9tSMRi=d8;=1g$co{rQL>z({O(+{pYxRt52 z3H->omYR}Q`Y?4P&}Jr;76qslFd!#@6NGa8X};-6m0`kB9f0A$u;%(Y+Yz>c5f%ag z1azzbk=0U3a0YHlwjebL2fY~Er+gsNfVtuJ zKf~QVx{Ob`U-z9nX)4&WP_yf_=_r7QZt74qXex%g9t-16q?|%wz?D~S!$D>OOI14W zD~<;`+_Nb$S-->g7Q75Z3%k%_s{nOe9|9PD9~HBV?3ro1P5Qhwz9%KuW7WyRJQ_oq zp8zj1J}g2e=WpR^h~}}no_^5Sn&`~4Ie1cdk;I<9Mbt|Md2I>%LZ=qcTLbG6tZFP4 zcuGeF?vYMk3+Il1j(@-mS=efZr1l`VN1-gAivt^pHVu)r>O+Z82or2GGikSk6gNVUfO*2MN@Y zr=IH%U^N(82GF3wsJv=u`q^)Vj6T$k8}tNVbZYhsyB8@8+86Rl3VeKWVJ86JVu5`& z22AK09B;04m{RUt*!u1hm3()HjJX!Z9lXA_nx!D0Lx6q;m_K(YR^T_Ng%9)MepbLB z|Fu(xt#}NWj*xtW+NfsQ_rS6lDu>4fBK6j6UAc16fqws2zhb`hQrB0uYVl)q`lz+_ zt8%^0172*Js8_CZc4oPK{idzoPSoZyKHj`@%JVhgq-(wWon+M6KU6-`2T^qRWO!|E zCTC)DXA>HH>-~~5Gn6bdT&k}K%)A9Q`ATdrXo#yfctAVKA6~Q{x?byfQH`0RyKJ?? zy~Kx&X{oOfjQy<3&~K#zsTJ0+a9y2z%C+{R=b<0%6jKxP?Rp$s+|xTRb%&ll5wLi5 zzr7inKz1P zfo3BK1$!E`ke8!BI*FKuM%ir&)*C>P$D%Y+B1Jj9Sn~XjTI#d`sQ_JMj2>}L|L9IlRE7}c zK}S!*ncK?ofz7>fV9(G2JSh%x&E!X`9>y@dwHQJw5*JF z4cV~Vi8=EK`6K0{UtBFAH)nq$JTWm-D`>dWV<^4iezhR+g*Om z1JwEQ<7bW-9U#Zz707qh`nfC|cn zO|$%S$UCv(51^af7aa`~z_d5L0KtHSrpVqden#TGO&KC-3t0!D`6*lbNzb&-yBzDR zt*HC-9a5 zzxLYWI{UkPd5O9FYqOSZ4}&A1=(+&Zonk7w`7Mn3VmU7;!MJTmcXzLV(XRzeNL{g0 z9SHwYr~8 z`~gL6tP)cQsDlNt_m38D0*(h&=e0OZB@Xi8Up5$^gf z4F|}Wc3g<~3KIi*CGKVTir_xwX*!{CHLNlz$lfq5JOta}Sv$k^3?LJeCSF_W4dS6| zW*P|Eg&P9Qj9Jk`&JHZa8acDr^A+zw1%Gt3m#mi1>}0kU-Q4(BFPnb4dN>3KQ!S;p*8Zq(;V;;&6 zS{elGM&?#p!^h5o*4iiWjd7<=NTU8SL9{Qfl!$(=*X+J8FL=mRYTt-}-W*hAZ=~F{zI( z^n9+fKyLDq3MX1Miz{vGF=S5 z?>~C^@6*sJ1KEYme|evwvRZ7Oc|Eo24F zUN6-R?vVWqaS@!rj3L2se+p=iQJ7bT3HOvBJj>aS7Zer$JfJwJ9{!g0Xp=^Za91v#l)@E|0UmcK5X*2`BpYag;vpK4Ri# zOgZnygu`9DqYG;IwTIDVDHt+RjKMQM!h=q+5M{wm#lH+OI61&Rk6g%l=o;VC$@t*1e; zogI58sH>~vgGJT5;eH~Et&H@if=#Kz|$kP*jr(ZPI)H^RKNR8<2 zD6HQV^KH-v`bxaV*r>Qv{KIQYKr1;V9xT+kN!O*Q(H}0QaL3pT+36Qi%vvc0c-e5B6`-|8eT#L8XA~qX2P+(+g0evNP59fiZs63jR%LTBnAxuUAnG z-agZp&KlOb^ABY4a@)r+NJaSKKah#Thi!(yv28Feupg+R-u-As9AURJrO+;?nKd`uM0a&dHfJ^#uTnfoG|f`3Y}vk`=L1i z&9b#a52aAj(T&O^Vug<2y!t}1&&zOUBx%;~wAHw2_`pe~<-(dPg_I_&J0ck-l>cH@ zIr;&GHB!S@!#-STt7~5Li7nfY^|e;#JM};OX$kA=FKTORqbik^&xWR%iXIyt?t8<* zzAL;^7mNpkFaqIWNGAFj!nbv$&8V_m%SN`K`~VpdWifa+=NC>!ztwDu-(EZQW-+y} z_c_S%A<+67TB!08#=WAMwUI-kyO-`e>I>bQdwNbU@In2Ju;WLG{s))~z+xZxU%=p= zJ^)LCYQG}Dj@@)ysn-69yfBpDR&0_MGtUnblOg1<->J5D#&?JHcxOoqoOB6)9QLA(>N|` z6KO^DNn*G@lz31#-@Dy5&Fp3V3G0Evn%8~bwO7*UT~IJrIOkj+vB1JE?(;6n+FBf} zv6!YUkLU7RG|H~X{#hSEdY5?RmnmDqznU07xXGicip{(WDlR{FuvCx8WpJE!$<~_I zKB$>vhUY`jCA1UaB>kYhXzJkdhe?L?Qz0u|<)GPFX;bzQI(H1wQI396txFXs+0zBe z9`kk3EbZFR0U6( zL7RuUT*H1V+jsxUf;JR*DTV-zsF6Kw0Et5S*EhNBJqjzZu6Q4UhM}*(bq*!}Gj@>4 zh%7jQi&K$jz>}iaxhlyG6cLWy(xiB&)P-uC7_ssgRjh)O9Q#1r>=4_lFAlr#%TRE# zS=iZj19(*aZzdp=>et1;nLN_hCNb7>?@_DZqwu8nYxqswC&nAcA)%gZKjl8$^!a_) zNt0@lPr!jzvAsI1PQidMQn|m8g~P zA^Q@2J(6k@TX3(DM1BBvCD_Y=1trbUq~3hXep=78^z9!3gsF(Ck=Su%+sLHBy!LKp z`@I0lWI4@@Zi$?Q_cnzte@OBKUCMftqeUgRCwD9vD( z7QR@cjij`=J#Y!}fG>Z@XAa)C_y+=+W2d{X&4fLf1!hr)D^gPf3o?NKIM6vRNvSW4 z&KV&tMtJ>wW_qQy?WIu-^#$|4f_F|l<)-CXz?ke#9-3n4PkBATZhvXyY~Ha#-nzja z03oCr?&-TPj{s+F4{bN{-#_hJ@kHL8z|E2wAz5v&!OEBXimvdx!e)nCXLN1rml2B) zrMCL|yddOE3W15f%Djx5mZ~fYrd?id&j($df&niRmWl6D2JU#%QH(o_%z|JEPI!+O zz_A9pwjCey9|+vhG=C-s`yk&(7eFGi86t9BP4fhWV3LLTt;ynBb2oph6!${k-W_MC z+wZBqL>mGfv}+K?@2?I6bxL~!5|B_h>@wo^ zJ0N?~jT|Gx3W#{1fqs9bm9Y2JH?hhKh*eN27tdmR0jc6Q-9I8@%W zc$ksfYM{bDZ7v>tgJfIFDeZieACISq9_DesNxYxu;<}O?9cFOnC>9-Dv6>|*2b>~@ zqy>->(HX_*PS>zYGQ|T~HD9a>3~wm#yg4z5?w)#&nHJi43IokpJ>l1|z*TjTXFv0l znwy!YO4%B$oUCX_5RwRzYeg~N_k0weV{&~!!0Je%wJJ-F#TZe}+x3j+G~=wlMENxQ z>G@kLd-s8X3%YL&xF-4^d5Ksg9`P*RkOcHw4GsJNF|cr_0nStf!c#-^mp0BAH0d}qz8#yi4*=Zwi}F@zF1Hl*_r(&) z)V>D0aqu?5(C<@LdJr=)9T!~Hev09^-6o&&lzIEJ`1bgCd-JO?eW?-P{uBcbh~Z2s z-+ix8XWt-~+!?npRkgtFP@P6feT#GS(%|BMxf23>`qFOpMc;RI}tpC=JHYf20RQ{`=Q z_rd3llKY#-bI>KLZd@8B&z$%5gF#*@n|4-VbzdCgWP5=EAVPvc=?^G+|3L659Sn`T zD&3#4L5T4Kc{4F9;ZMP`^(1z<4=UsGXbYm`=Wm_K*KXy%8_o%Z=&lN(9y=6>oRCT> z?Ljw_aMOLafZG)gsJ=ZPDkoZRO4d((@rCleWy)41wh>&W530{FW3(zQt{(S;3XQ_2B0QcT3{E^r2*+Bl)7 zi@E$%^!8)y=qE4Jp&vp!{r;WxOT){LRlpvpUakuP(#JrRYM}tYv0~E%v{H&J(`Tj} z8;{p*9-HMXT{-W2tYP*cr!cD}6I<5o+goA!wtFss=Nn81yy`4j>Pud}7Y}}b7aKG1nraJ5=sZPL;(hM9o#J)7xcHkPG4?vq z&fygE(9TTmui$=b{)=0U!VsoI3z`E!?plpPLlM*e}#TVYDn-KLF=~)cdfdC`K z>=czYA=G|Xo8p*;M#iRfgqG+^Eallz=NVla-HpQ1X<>!&g`_-)DAPIssS!BaV-bJ4U9uQMl-R_G? zKoQZMnlrWiRuDH~Y;dmP!iIYl?i(l#$5Za%nVz}}Hl(z`DgY;HuDj#!DDR3O7p`vp z^mB4@t|l@Q$rhoS1pHhAL9ScS<_AL#z$go%D{c~gk9wO&y>@Y6?M-GNp(bXRaV0KPrC@7IZ$NIV3e%Y%cx1T9A;!agyX23Dn zyxpJ9e;_ptFU|llALnjNX16JhXKvCpKQ#94{0Uu;P{lXLE+soFLb&HJBa7ARXjSU* z?Ua@P()l)NwA}=e4+&)LoVixvJOF=E6U$`;mD8ei3%r=GGSpq1=_5?$f=adR7&PYx zn1+(F^x5Xevr~RGr?Q^x2Pyi{WjNTu7UARIcz~{wf<#l>1mZ_Sd|wSTz7#Ko`>Rqj6F=dh_$S^%baU!LJ{7qa+gvwM z_YKbPYQQ$y;VEM@qV4s8M3&1Q;qR=WKGwLCBDO118jidy7F9usll7zapkn<{BOHYXGa^0tRo-V!{$ThOOn})`ivY*!H^goSUbq3 zkfdnFWQYc)d3-)tezva~kRAk%4owwT#L?5>-LhcG7RFd`(niKCRwH^O5Zo&cuA^?x zJZQhn>xDj=ejkDoosuY;?Rd&31`nXn`PdqH^~a`%?A-SO@#~d~l(Srm6nP44F8%=- zAB%(-@Ljf^>n>19xrdL<|GFpYEAqhEOs~hTY;z0mKKTe=Mw*YjMmXP z<^e3BK*;O?O&Bd7ZxD0C4KeX6ZDg&tExb`7Efwlq(6Yoq0YO2IC;cK7)`?9C{H~V@2;9}Mqr|H& z*Cr5`ZZ7dS^j^<&%6&fo>aB8taKfev2?|;+xYh$7f}sHuo{wr<>L1>*5GX(I*p;cb zB)G&2iszN-zQDjIx<9peu{|a7F!hwm3G69gvisn(s}MieE9n3h{h`4f`LeHj%OV-S z!2k!YOVCI>)W47oaK7oNa&=6a&*(%)AVDZtn5oIcBg<lhdyUna^x)oWy`hYkO#epR z^4@%#d@-X#qS~l`g@Q}QKUpuHXwza$g0wOID@oFmQk3PEN`t%b8c!}?^ilYe%ceBQ z)O5|(&u6NQD-t0)3Z#@78BJ`B>89W(ffjcIC931$SIlnMhi~7ucm5SIf%Ra7n;W=i z!B@e$PFz9oy1Rd=$0HYNVcEaJstd`VaQP?i@^-3o(g9xc!gvUo&UkUMh~- z_&~PDRL*^>oH2xD+hj1)IVdsV4<0BsS-(2v$EtX?DNxnHEMr*IHZ|<|F)#&s&b%>U zukomYG*xkZk*)y1)p{N)TI6~nySd)foL~Lz(R^XCv*X%yO9mFg_N-X=3vn{pC)rIl~%ThEYr3h}g@RRe(8etL8|ax0LK)&nX`E?{4NT8#9$HQoRoP!M$Gf?$7&=ziXX# z)E7Yvl~h@V*!JPM+N9S0UDrjR(JmB3{B@`ZU5E1P3sK+Es^(W{!Pf^YgGbA#Kq5Xa zZLT$ROh-X=g`OCBr0|El*Amypfqnv%qu>F=e;|o4&cC_lVXdqcCBOgFdM(~nE#K-5 zJ9`uXA`p%qI%LBvHk+TbNBf1jS&{D7Nsx?M%zF1=W-{9fvuGL3@!$WoS;Cx)aJ#@9E)t zdsTxUb9qV$h3hn}Cl2k+d7{;kxd# z@0-5uVC!HrVNtn9Te_BPZZr$bP!?81*&oZlsyT*2kT7AzyHKIKVP_gbXLyHBNZ%FC zczOSs2<3!}U@u+t=u3-sXfZ0r4CpI$g>hk|P&S}nMa_0BfZIgt;g?ipqR&%_2EAL5 zYs^}N<;Y_X;8mapLaK5*0WV4SA)oYlU7N&l4b(0m@1&o2xsB*Sv<5CPA4M~mQLIW$ zww;C0pMt$Jbs-PRuAk8UDM}7zo40^qP-|fE1yV~@k807}IP@iO%qI)ucAijCpTqIE z3aRz`=N`P#*r{j=zJJLtgNak*e)09N*pv16O(HA-@A=KUeILu+2Bn*rT`Kk3tgLJ? zev@!Q`IfPO@v?U63afUtaQIhhUdG|zJ6D}2;ZK!z%Q-!D#>Hcmr0%AQGv&YGdB(P+ zVBrV4sVTXgixjiPS(%;;uh#GWDO|nNbIQ2#o-xQ$#g~_n11x-bkxu`{wgS?oab9VN z__lLyc3#kLk}1XJ!Ohc`l6VZ75dfy~has%6fEzsR>U7~B1?SZ_LwcNKUI)86ZE?Da zF^&ZYFKhBJEd58ewLItJ%NncDj+D7$Dm=d# zXDBMYb0Y~({NvL03HSUxeH3xZ(lXmBv!qAj*b}~f1%D6SK6Yjl^(net;y1CmJ8#V9 z?5dqQE* z&DS(#I$w247e-)e_wGdU{U2z);?}QU=M9iGCYCi2ZAEsnTycnL4=x-FWoUvDbkv{D zKm)mW!AzwbDm>IH-IG7~7MB^H=STDNES_v|`KiI@wp~y$Ognc_8OqeWkQCDqnE9c3 zOldO>ZT$8v{8E)pDeaC7q&koHVxscieV#Tlxe#0%q@?GAR7iy5C=j7Oq z*tU0D&rj-RtL=-HDv;>oPb-HKaBq{IxJA;ktvE8S;e!4LOs2+H>w+X66Ab zRUGfDm!h;tt|4{ET%eja@NWRkb2QYr{ytbARv6 z3C|O*a}fs4d3h_V;8-kg{O>nlrT37nYg07Ty*pt3;YN%$e`&0LfgF;-w%4Lq@az&W&q64mc!?tEZH|+L@vLvT@vJi4l!-Q zYrQu)iSU{AE}>a`?Dm;mO20|fyDYpr-TRQTl~=MKcu@oKZTOK4o+X`A9UoCxErAQCJK^~-Qb7DR3ELC* zb{cK=@k$&|p`_Wxd-f^mEXQ+M&|N-rb&iv537_wO&jUGGDGM%gi*u_w{)7k!^QT^E zy_JGivwb-{@bGmY=6(wy*=Md`VTla8Ugso8bEZFS29zifYh>fNO1eAl=hZi;C*KU{ zvq>H2`H=gn4#_kh@O5h@ltYv8^;pnY5%=|$HxIzyqsRX4I_txnRn7d+bNeP(MQn}q z)U*cq#Ip-(eZq7&PhnJW2}tC(8w3-?REUc#cB&)liC4G1+B=$>Q?nbhS4V~9;ZcDq zPyhbkSiS$v8;Jjp*+pT+dv3p7GzCP~hV))^{mS#Ds86W1q<~~^W<#vna!$S~xuvk0 zQx+s@F0VlEY9dP|lG&yYLtN@0elS*s%y0ZDj1b`5+hpwZc}h{b_3H2qkHJ5^i<>fD zCi}M?GtrM}3JxhVQ)EOE+{N+g>r1AARMT+TJc7BiTGp4k#0IYKW?#a@Qy)9G3+}1E zQs86Zi}1<&2jYa4JObAh?#--FjWuH*w~L@#$v+A7eOe}-RV$dmrDC6Mf{ipm zv&Wl@F)9hGOAm#E#R^hl#_p|>LF!IY_S07Lj?+I4m%|PUd66te2wCM3kr^m~ika>aUKN_9&f zlCwAd3a2XjOqerwRJ!kJ0`no%azb5ZU#w9bwKwuo4m56~@Yr4N&3-dZrrx_AzHA&0 zIBWIIeV>>#g99o4k0l~ndIG@8+M7LGvVnXNqaR!u&KLHScGLqdRQ|h{;KDD(PLKU% zN^>hsqszBn@`aZL(nilfLJN7@AN-WO`Fw0m%z91;(!Oyq*&x3yQ~f_GGxR*|KAnG( zZUu{JJ4e?`G^N_Ndrjq7-sCCs94JI!x8qJdvwSY3B@^NT4$E9XrlS+==uMmG?>1%1 zZl!eLg@t>F?!8m)DLS*(Yn3k;^48JD$3l;7U6fJYZo*&G`kj1mgn1)5K`rcqvaT&w zr*yX>$oMdiN)@``$NnM`EUI>id0*-uNdK)_#jr<-{G-fWE!2sQTJs~48h~;D2#!Vx(wH zgc&Pf$s=t|^@9Ch#({B}g$c_`82yu4q=^NTG)t%Q97{XO*9)89#i?F zmNQjKIN*>OJmGUF3wRbL*^ zueM(lpr4b~U*FKZ&w}Q9w3K3O2opr>14UFESYf`nG;$n_6*tQ0(s6w8Pyyvdf8Zw3 z!Z}`3|82BNmcRS8@Rn?r?35|dd&am2CeRY_fQ*|xg{-A|m6d!2D=n7Q)j{IXX?)aI zt$Dq|O|!Jr8LnfShYO9~3GkCGXDA_E8K{h2yT7;?e-zudfPnL?mzfOixJ`hzfdI@y zruGA0hz$6v$e99g3K>bo#mQ+rH%WQ2ulDit!qT&}(%-GQPtOf=YUnUIDbR$_+LIYq zrWhB1ovD5*$i6MFolw4?Dlr>aaFSE(0z#f&ksqObzv9pNN|?#_7He7nYN~7D9920A z9@WO;n$3Ls$FBZ$JtyD;0do3FQ$lLis=e>q{s6D!q|80otju1)4+iIqy{o7%^d#B} zToCyYw@*y-PHEo$tSk9=SSn0&*s=e>GB5o-ga@`~fd$O4ZeWN?uVu=vLLr}9n~9! zV(5zIq-n9+fvlUk&A(c!!#DtN&)7_%(p_lh*P7}|eM(VIYZA9Kl48%EIy?K!X8@rb z_(bAD-pX5=eB*WkhBby{+l3THT@RcQYAu>~OI~vE&J}p`V2krtm;oEWriZU{>A5h zRFr`N$`3J0M^IL-O#Z!Hy87p}VnD2ywG#i9RB+;>`)_+W3QlcR!o&5{D$v$Pac#Vy z!~BK;q8A#a@yC9qH$G^-%XOQ(w@^Bz?M$?~JU+yyGxO)n2;f5VM$1rPoB24j1(hwn zg05X`PH=-4`cgn(em^pU*StnkzEPyyAFSH zdMSae#Xw=#WKLUL>FnB}_1eMY{^V`h??WX(=6GtCw#dLzfWG_F|LlJ`g-JXfcpJ6!>fH@`-G57CmABm*yYbBf z6S&WruJ6jc{;?uhbx!7+%qO3B%lWSjk$!nTprlU;ysY+GsI<&UhCBcxhBos!Qv##T zmZ_<{b?>|o+-tc0#UhWl^JqoOZNrdp#cNSIqvVi8V>~d@QIy$JXgi`6>_k*Id~LA86*US8F0_2 z<0AB6?@^CsEi*K7pT;S+V==ch;Ruo}Z7T3h=}KDfbw*y`8z}ElA>1t#PWF8|duA^q zh=7mPVgB@dKk~0f=*Mr#JiubOqEg=bFo>xoGZ#(T9`bnPbz+_%=;78)De)Ri*Lm3H za8#$KK*;6%CPxx%=+HHxVUiqQ%MO*ea6|MWt}e$eNvq*o?(LnGJ))}0*3sU(L&JdmDDB* zQhMYu-=4QDa8UIpYRqvcr>~JN{#S?JH>Ryp^{gu$K*MxPA#2+?m<2?|uPT+d>J@yGa z?o>x12Sb|I^+l!G$hJtImnxe>5Kaz&yd>n(XFrLjjvCS}4_`V%ZoT>!qBe0w z{ECr&{!R9N_1%R`7oNf=S@n}tyE#J^%Z_6#$(JCRq{vrqZu zhwE9+avlO>IV7|Q?G`w?1Rs044*_W7L&3WZq$m#cY3mo<7NkOWF=GPXm`%nauAOT| zJcrOQzgud4J>q`zsH(j;SX++^Vg}+Od9T3Tk+V-h`YyeH3eJsz9n8u_SY;;KC9M-2 z*W|^b9R#oF8{Cn3^(}a6Q;F)g_Y#OBYU#ns&|Fm0FtmEvHD@YM5rNi^HY@#kH(l^u z!`=83gRG~kgD$kw-mZ@u@L_%;Yx=@!El#yduhEDP#+1@W=eoe4MlTPilTK=i(Hb|> z*_F4mWU~%Hn9*m&JUxrv6I%MFTDy9T(5g)vg&aysa*HmxRcp;alq!@>(%1fcbuuGJ zV(1aJEzy+NH#YSy_WlJeSxH0%#NdAcl>Q%Ip@movScGT;m=F0|QkG)TF~&F#oDH04 zQ9)J9vp*(SxSS<1U)4Eh!t!zH?B33Pxqo2|wr^8rFA@l5#Xn}rv8iI|9dr0zDO@p} z`8-CzzF(O20C6Kq_gX)j)brN%0jVHiIW2uu2ktfI5@T3WdbCXjTt5E<&X>)i{77o0 z7WUVhjgMZFP^V;zzl*<*)VtBLD=zUK5~|CwX|p*%{#>T80*!#x!(zaeCmF#4)PG$e z;<{NrPJw5m1tJ@ z$C1qW z&fCuWIfBA8*;kxHsGG+k(n`6%fOqO|O7dHh3UHYr=XMy=ftSR=W+de^ZAJ+$$18A+ z@sDIBs?p5{Giiq6Q|&2nv)da~vMzLnOlm9ob$xva5bMo8ceV11d)}E(70pXc30od= zzuLvBo!tz&#>PLf`J74MzZW*JJFH*+jiF3GO~G!mQGSuTF!I%+^$DnVu3nzYb4s=O z&W#P(FqyZsie%r8aFftQo!@W~x(^Mucn`2Adt_r&0GBVGyV!V}Dm8Nt_7hpNwQgB9 z?aad~A?Nyi9aEV%n-mnfMCViD{LMY>&D-k7cjv|h<`!nYR(I)ns7gqf+1#EklS+Q^ zSEu&BP-J-O*M%x$t@uG@TVTd~sF)g>OZPsAGCN-jB_x+N7kB~yP5SRfNi zlJO5E0{86!#}7!?wfxxf z1?LaIXhtZpy>QBV(d`Jjsdss&pv4fgN03GZ-TabbP+w&Gu-~}QE#YnYHL6z%L4{?# zK$^;_XvdoAtiMdVrZ@7lw|E+Y`NMmmJT1M8P3jm^C;bK`CK8m^FNWo`#XK&baer>m z>cWUD|Giax^?3imyVobQZOo4G>yQ=Z%h~o6fFEUa1Yp?&PY5!ip6Y1eCTs#bALXC!24|;&6g=t^l2c`_lsgZq7q6n5%#>JG)9Ja8Qi+)OZ8XBHM zzTadYo104$yIpP6xy){&8=Dq|&|owZp%Bj7qDzGnw=y*{D3$wz-}QXnbAH*na&AX} z^+TJxZbQ~~3}nS?KHgZ(l5OlChy;cO+^tCLR!H|*Q4#Si7qWb>Ez_0C>mr9G3?Ju; z@T-V0i%2B1vn@-QGT3$9ok=u=0ANd6sB!;v?{i3Si|xBvRJe7xmS+ZwiIg)-GIYKV z5=twc@4`9@PPF`Ru)oPtWx*L^#=G-L_?Z!lc#2a=S%V==7QRtI#dmXgqHSr`;PBWy zBTj}g6|GonU(>7A7+b@mHLM$A`vJ{&^wL0re!Q_653-lkS7Ix}Y2LAQ_E|0CTXAbn zPkxWRX-RC_zU`A)5s;c?qWNWJi}#*UF2;wuId58u8Hx>oYx z;s^;2N`WknVqc?G$lkNC_7^%>XSz|-xF=a;HmjlPeU&u%^Tl*C@~=Ls?Z0u(owKlw zdD>l6B(b2l= z`TSM*=FLA`rK;Tg-qsaRb0M@+_RPLU_Q~)d%+!iP*|HVa3@z+ho zGep~$j#B;8Y33~4&4Cd=LPuF4>@|aXcpk}2SS0^^ive1QJ#?gAP7ciD-v)Zs6T2~WquN=6?AH}wvMA^3Ed0Qmjpsxa0V(ZgE5;n?{Zc1El^}7~vZ&PJ6 zakfV9f!Ql0ys1J5-21=s@YD`R; zr*z`wj+TUe-sJH2$P1=)`G~f$1`xk-K{dqLpafJW)BC5xS%ftqE?;KoO8%8 zI9oB7(f0r;LbtFv%7VKWLeZBSq^H`kCl_Ems8?zAVvg2KG{Ns9^YdT%G1k_Zxq$md z`R!NJe+Gtcnyx;#WSNK};ZM35@Qit@Ct+{Z|ERUsLeJ%2-h05sTY2+d-e=Bfzr+BNOjz3BnR5I=s?dyrfh+=*q zMgB^tkmNee)Pw!MxpW+&{fe_AA1TX3xE1|GlO$4df1N(KCm2-TSIfN4xjk3u9lQU> z4bH?61xdfl{mugGA4_h0-(1?_{`r+%rXbu`N5Bl$4t|J9WG^5CBqGHe8b>V>anY8- z;^&8%+Ok#4WFo`bQ+<#&s6b9mN6}I4i%Rdt$Z`A&y6Vudb8dY{zl`CA5f6e;y_gqp{Ol$hw|M)Q>g2za* z&bRH_QH-A3N8o8+50Nz*gtnDnyPI z>c1Em+?*KW+{i5R-UHwTUfK@$=*{&L{xBzz^^DR+F+1$GA?&XGaXxxWvVpe~D|Gpu zfgGhbg+x2=-x_qSYI1kxq<8JOXF3X*F(J;2LVY(zQQ@51v&zN$kWf2!)1(KGdGf;|q-^2yAA8#9XMaDazlRHxBb#+*q? zVkC=5k%*MT(1z}G?cZ4rJRjP=ybCjcgW}Ews%DqA9V{7syahpFd{yP6n-^8D>)iU2 zv}1mP^^6{YyYIu*kd@AnBnAW+xWC`VN=;59xfWebzq_FNvjz; zmF&V2y-=;~@5x0|IrJ?+mv#;i+e-v0z)E7+3QP1fx1fHSe=>_qnfhH5V<++}q!V_@Mlo+_6Ivz0bT4|xkC8uRRp)@2AbVBzSm^%xX2m7@+ zda5E!wcbGAm6m#DX2SfubW;xgfoLC&kN=Of<~o77;p_`$(aCpX&wVxQU=c_I7?~qzV<4D_jcYP?1!vI< z*(AsF(U_5sv&Ee<;$dsT67R7n-cWAzvfKtATbS1}9)MBdJ@^y9lqLLQU33bPVfjOQ zK}vtT$mMzW?_07>srRC-04u_F!XZo3sU$t!2PA#vtMEpAEc_+5C8_+0JuK;{y99! z6ny&DqpFVb$)Td|r{vyBUmbd?R$$K5hp>doQK#tqSxTaZD^`FE^>+6n*W;H5NyWKe zmK&53A!m}Wq?vLK2FIklk(8mH+Os=quC?IbH%#fXh4cE`ce{E?*HcBm{py{!X3Q*C zu_$vSCSKgI!&|kSqAGM^c`FLmI~U}Bpe;N;a#|EQ_4$Vd4w!&_PAGvGZ#EI6ztH-neXDj4C{myztYD-PcD_yZsNx#g5MK2uw#M0$fva)6Y|75Hz*~ zw0bd+jSxQ&t;n;zh;vAlylOlyU@uy=yP4rbhQ)zn8yQ1NR>5@6=9i&rO?q_{^sLFQ)HCr?wsg$QBA3Al zfX|lXh5Pgr&c!&bS3*;BpqK~S)tC92=)FBg^+kN;+3@L7Ez@mt0SZ4d#_)n@Zc1M} zQZPUl&a(;z(R0_wHa&Nv&aOW%P7BB~&+Y&5`u@&KlN(l>4;`YNR53jOCr#Pn1JH(y zUldYjp%(VcO?+MOm@iqi3ZSXK2<272`51Ol@h5$Bs}}&Bd6ZY!jw=Q{ZfY`OSWeV! z+bQxrY_fff&R(I~7lc-9(|J=ql$zKBA zlfzT=W^Iq}sol+DkG+P#jJJ;pQEKj%eMs;wt(&ZSy?Hs>PC;YYz3ktq5pvZw7H zA5}7!vPe3!+*?BNyWMyJW!`5i=kw#y6Ir*ZrAI8Vj`OnTtadhRcMRr7SM1Br{Yt)? zr40#4<1voVx^F5hla_qpc}^d-FUsFreATv1lFF0q{mC_(@=AH5vIr_(>enIR*T>?^ zbE2aIcBYRqMJn02r`-%VWiJY0xV(eUQUu3`0vtu4L-vT&GPNr$48n5U{R>;)q#!h> z_#J3Cwieg5Ju6_IzTKJg(|2+1taq(_(ehKf;9-stX7rlT*L2n9mzz%@7yp*~!~(zP z@<0XRzrwTQ#y-hR9o#$QDLt&>_{1+z!_QC#ya~2l`QE*4=UZt8xeq*U1QOdr20kx1 zw%n%-gYNI2SqFDgnRam9&=*c3N5xoAT%wcU|0Xenj@Cfs85vA&9eC86J`#5=JhNj1M-`s!}Owj6mY>ebe>Rm0Q*W7YJN zRaV}6(!gyTO0(T=C5i6C(L?1V3B6m8ntaf~{iNE*^GOhT&N*?{%Q`<>_S_k)J2t{@ zB<|t&HU+d+k~2_62%WNXjdi9ujbv!#%k5`fRXizataiV0PnP)?_$JicIfbu_i3f8B zarg2Wr+|ll7y^r2!1;Hao~3<$QY>c4qGE1FMgq@z3TxCuz#wl<1eTkAfl}Dxs63u; zODu`q2q?Z5EwZt2HDV3==y8Bj^1xr577$4->?+WrA~ptT*MM6-$7`DoD7LXmd1*@3 z3*|D#zVH|JL!a6up|28EMa*cNq{WESz-vlFFXm*LA!Gi>*6w!hyV^9Jvcrgjf6X=e zj5%AIHlAic1>6H6QuRwBTZ~THC5^ki)G;WEmCH7mdw2dHh$DBXpGQYp?666ckBPdyi4Q8Jdr@toXlj7h*MYy#lhfRO zj+5%g7n+o_x!XDQ%+lJPWOCs2_#(rW$5Ma_N+hXoqnyIP#$&{QF){*XL3|v&ew#89 z=WDUlbZdo^(le?UmuZ!{D`3U5FH0c1^l;kf?v#391E2Mr&-d zICrG3QR*4*P5ThZcBTQrHKYXQJRl95sp)HzYo5B)db>;2)zv$l>=~QvAbD9U5eZ$RC)M-beSRXLUDUTPZ9sJ^?er^yHi^ zCoayI6P*ex6w}uDTs3+~xw>Q(e2xZ)f6TKi2UVrC1l(JLMj(Loy**? zAsZXEkO41dXf}rxyCL7Lw$7^ilZ42$sY)}1@eBMN*=V2{8dKMai*vY5)&#PWG_e-X=gQBI zyopcT43PH?Un-^KkkQ#t*EGFRp$=o}2u2*^>H zJ7Be5aV`p)F^?U^dMGpHYL2Tx{15Uz5Wf&JGr#j_8X%(t|7InX+O0MDYi4PJD{=?< zZ^wID;;92-kgs3cQ}2`s%anF}l)OM$0K=boo-!mRHq-a7Pk72!wbTc;XNg#+ji>b= zH^aRG>75{LbIpm(Kptde-t$*(F{e<&Mz^1E@f5xN8e})5>A%x)_q8bpi#r_zJoN~Q z!`A+cz!%m*gb7j^v_+bs^?k}11B~;NmzL%~0Z&MM$SUl?uf2V%Y%IX_1Toz9u2Pq@ z7%2mUQ|5XY!Y_#d-z2wh$nNoX$)ZE(yG#v472a7?Lzmr?FZbs@)hW1cOL^Z(Gd@o@ zVxv|(J(yy!FO&ADB^y4J-nH8_nv!;H*~!1g?h12Zp?lpR8ui_O(UXrA6a};9y88wD zGOSzpL>{Gru|=hd2*Ppn04U!0{`ocjv4vcBw$H0NsL<2d0=$0-ZD+p!4dc8hthZHQ zuakNUPyw5ZicccwxCfN^jSB;X&5BieRt&G-cJS(v7pCD@7ihE{J3`qW&ND4b);^+! ziz4ahI>__P;D%mA7#!NB^ScB=U5xSvUAvd7PR856qUz)2j_wS`q{#mH9Pvyo^`cV- z=+=51KxrnIY?KOZ6YJ~Xz1SFy%_hWgAm@J5MBaU7M$~iD?d#!T$?v1Ytjz0)n{XyP zI1*!Gd>6`2AP&-Ul>*x{$LVUHE-TgP3JL$LG3-ud(!Y~AY~p(Y=kR}KUx8vg7-($< z36bvOiY=zz|AVWuj%qso+x`eaB_<&aLqe1g>1H5GhzJ7GFqM{;8ZtIQK)M7JNhuZS z5~LfIF3B+l(y);Ow*8*3_x;@WInVP4hkrQiZ0Gy=>>bzjx`fdY>Nhq=+70j}p@_H( zCo0fGZS~P?aC$Q(#!{cRgYV)ph2rM!cpi*r16mJ@MN+~u4RN%)A`A^V!rO0UQ88Ap z-yVfn(}HX&q}-%97A_z3>78ApM7B&gC9eFu{mBu&YP7ZSii0WHckl9XKAq|R6`-^J z|G!OTn0}~D1@kcFrXM!;x6kR=;r5e`{uzGgR7Kn52?Zf9yjElPw4)!5?dren?OiCO z{(?x72vXcCefA7oxSfU1OCm}xi)pvu6=M977_GlX9Ucc0%I*6M0Xg~TAxy3pxQ(p| zMQs2EFX{EswVxCyFhYs0$m;%|v%(8aE%D79G@NR_-U|*SoGe~I33fY6?VG%)>18MlQ5A z7Y?%4cMFRU`r{=Nb&|8_6qaN|h7; z0{gfn!`kZuqjw7~M0Q1r2nkc#&!$z`y|{qMpsl^|`@q3H8u*Z%$;Hc!!F0oLvcQ|y zyk^tByCVb8VYlt8z(3e0SVQWE-xb*s>2ZmTN$=!8u1qx&QLiueG7-H=WmOuatUCJ$ z;HFz!q0K?-=%p>d-VKIrv&bc z$CeZP%a~B^C>?r5L7ML z)gde%{KkDbXw3o>H}qllDU-R6L^i09kO=QbO@fedaBK3_HT?PtG)8s=3^(~Fd8s@u z*ItwGaA#gR(7PERvIf}{rRB*`y+xha8AaS|y?Ak^To`w|#-9Qo|-a zMMC}Bs`FmYy46`Vc^bV5d;04%bUS4NfBgUz^?09fdnBGyMxe4=|EW*_)QiW6cA5>n zb#9-l;~l(?|Kgewvb4Sp(4q6cV*|Ag)@e&2`Cm5j!IXrt3qFJ@VX&<7tz%saE;FG( zK;MgwUuV2YU|0k#NK{FvqqHE0T*;-o{*+{s~^%1>rhURDWv7G%uh2HJ$gG z9+Q*|xY>44DBREnTv7ImERP(6Ft$~80L>hASa&Tjn)sqSdw_l%WDr-3&`m}~Lk%c_S<-(?WlN9!f8;6r9fC+O)m z4%<#={60qh2X1&tN0+5%nmNH*M<`W2XoWK_fRQ!IYu~;c6yoN{k$4m2{{l5a2;kuT zlIc+&1sdUn)25+`%PHHIZCCKf7;HP(dOMeQTHJq^Z~t%je?SH~PT?AgXO^!RrNX7X zHF>M8>nP6tJx&4y&PpQxxT+pD57d~pJi=yYwFFP?Qna)mKX!4}i8GBcMdUioBpRB% z^6rg7kLNME1uO~S$a!Vf_B(TLm~6H6hM#m*4enijzPs5g8RsVkX5E-M=PPq$EG^zL z+w}-^R9?Fo8D?&*63$H>z_~q2Ou-v038fUIrI`ddkTHh9?L z&2+J&L&1^3-FolhF%6paoqOz^WEoNcnNNPrgRVbqD`0bu+dz}q;%P{GKtH~S6i){S z`T@Z5Ft}oY9Y4P1a za4LPT+1e0F%fDb(2_=8WdZS7mgM4}9GZk-&+!fEYTbvIBUnU3FN#Bz9|_e?HxI&9urU?{x$> z<6R|HijHIQQ+#RvBDyATS1=#pQxArOslJ#dbK&PYyR7wpu)R(T=t{N^P-71})D_za zzA3?-me=OfN2B%wOH7o~;1dCmV6>T7=WzG6(Y;I^poNOsqbbLPxGJX39jia*hMcY4 zvaQUeq2R|RrFyhUpsinK{L?r^%Eq-S7+2+ync5ThXd9AFKMSZZY;}tay=Mdj_G`;h zMz!Xf3?0UVXBOT*@z~D;r+?ezNo5FzKpe{o6M=+DkUNHf?dB82R=lDp;6L)>GBgQU zYSqR;`lxDe4-h>cRuK&DR7kcl6Lw+N1pA7h-(&fi>zR)Z!9oca*ulZ2rtY6#)iDi1 zg)H$IySx2I`Lx-4BktyaAhuyT&L8Mp+b)p`{kWZTv7G>I#)*(MN0He&x(9KOfb={I zkp8?uC!=s#Y`b{?e>2H(MNRrpXL47dZ9Zp(TWq)eUy;sfIMUf4gDl7Z(brlb~|H#^^B9<^f)JS(S0Abhub}3@m|s#1m=WHEEweGmnX@w@+}a;drg4zvj_dU6b$Mwi+aBR2 zP+hdZ%>co-W&IYY$M1)W*N38_tjm-PO3kM~M_6fX<~sVaH5S4Za7dtX=f zLD)CiYXiggb0?nQRkinfo6GAyr0I@PP?{bwKfV%w8_P+3Z8?9ld2p47BRP;4?xc+7uW_ z-1kh~@62S*oN7_cu1@Xz_~C^eKD$xt&@s2I6$g#Gq@sOG^h?f;dO`uW5iz&P5eXMU(F;ZGfqWq7VO9^Z!3SfN-eSrLcs#3uZEjNgFtxYoDWAN|loldd{z{*olXCgA= z_xsfl8*MbW*8R7YRFzrN)hl#JM*0cn>aJ2=%g_OUMR&?hWWG3&H1JxvQ-D|KK|4$l zp%^!;TfrdBri9mip;y*lX$>^JLBcm0x++1EZAjpSiEZbyBo!P0-gQab9O<^>Cq(<5 zKe|$E_15I!gb`H;!rtap^);E~{qHr^4j_^Qt`IYvfXl@Eja=Rb&(qlo>NMzE2yzI8 zP5lX_QfaXLNaZUdPa#V&F`xVtAMYiYRIU%4*8YoM$NvFw0Vf?zEJ9(yq?jy2a=hHJ z`yYCRkR`Bs&x^Jv1d9j%(z5iih~*svXg8Pew}SJGXAacC&#r>b`y`u@rDs_oI-nl- zejiFSHT3H6R3&D7CHG{{W%pb8inM`j&E;uXPVwLUp`djU(Cg)FN5Vy~m3df;+!uJX z-IRo%EsH^;>%L?qqd<%@uxAghysDfE;^a?C0Ct`zhVAdX%n? zD>E*3I2$}_ARM5J(t^$Cq5CGmt0F`*4G~D zQIBx`4Z5oTObT|5JlS4*y0clLg$dCDL^rrqE}Iv#vTFQSmd4COqv-qt3~h^j(`Km6 zfrLS0$tdN_Axs_^e%j~5lyWl+aVpJ)70zV_tbSpQOmV3HuqyGKw<)+ z-Foel7ToE|J5^V#CzPK&jHP%bQ{5h|w4niY`kGkgOjF<{S3cEZE z*IlntG9A%q6ma`WkE*(2;*#oIn(M!3@hreLp`=QG8<9Ywm z5m6<{iZHXB(3f)|g`5C31Z+!-^R9f9)3V2e=)(GiZQhFPgj-Q30z>srR6DMQdw<~& zLQGPem! z+>iebuCxSq0k#!XZ{?pwpmK;ogAu;Y*tm$eAy~(#i~Zw<_Ii3d-Q5!de!Zu9Qjr>p z9kf?DsE(Xy7Ro2Qq4H|4fyz1cJ1iW+RtHXm(AODq5nke!T$Qt}C*VEF-xc9Syg3}# z)oK(6R!>d;XOB#AFtWS5lAvXQONv&fBLsAfU-Y6__lm|P=8fL!T`JDj|7&MkbC`CO z^%-bDq6z76J;v<<45r=wzcm^_9qELj86jy)e@Zp2Y8- zx_g?mNzaEsaPyr!{D?K7ybyRfJEg|DDnib3g4TtYT_bQuha%mAzx$4)E8iAWT(bzJ zxfJY3N+>>#KWV(C6X>R@&B^b_n8ZoVDhe0XnA2Q7t<01tHvx3p#uvralq#s}2s!H< zJ(HqFw$nBn-#j1YH-N>+qHS{kXk}ff4g-6DH~|SH@3sPfs&RfR>FKvmLrC1L2PPOu z(ZdgpqjG-2)gISJWbpb907zG^Eg?ZsMv}t$se~`^a(0?nN0C_vvDl-S(Db} zb)j`HVaw>(1mr}2u4Kj@Di-e=VMsW|g=xp&1-pGH0Z!va>w^X7!kma_9r>x}Bqx-*XSbd}Ob@hjEk0wSM-s)I;*eLo> znX4QNI+HwE#x3r5=a_e*>zt#?g_@?xke@fEck&_Xr3JrxJq7v$;)2ox5nlt9)*gf? zVHkW3@z7{@x$>KPDkL$JNJ&6ytfW_vqE+~1?`jqQ=~mE?Ugaj66*(uU_+>z~-Dc*= zzN^27v{$0b!gasr8=8W=r@`hw^Qw`Rfw;KdTq#mE;(?4BI#Srfy6e3m)XGH(^Ba~9~4 z_Spa`;~?a)G9I;Dd~&UUZ=$oEacM(}^Puuhr986wZsoV~PNwB!4qmCbw#TH*!xi-0 z`bhykjI>*1m63^wGb3Z_8XUjFE_RcFiY;4^31||*pJK~jNpEL+bgG{AH~J3rf`U$W zY4J(-30H8xtnh737k|FpnD=nvDO<9s?vdXJ>Q-5HZd?+)tb3UEXMVY%X&FlIrmliN zq^pdzQ)x1=h5?(2Utd$mNe&S;U2iB`LVLSX3-k_Q43FCzymsXgx{n%Bm)lsj7~G1! zHW1`;;HYm+K!~4;H@oYWzMv#nIHb(;RYKq09&g{#<9ynCAPtIP#gPg{^1Jg}`Inub z@|P8tNrvvhGsA$>F>hy}o#{A4q+gw33f2j|UMD_SLdX~lxH6M>OATktMapkHW2f!qN zYRxtIA>yz70b~lW_HslR&`hz#by%54`Warcvt8IkaY(FL5b69e8^5zAXMP;q&>h`N zU5tC7ffkMCcU(1E1n&%87~2uhrl=?si=Qik0srw~gPx~fT(l+|;2_&&bAMAvY^kZZ zmyH$W2nBz+_4PzJi`hCp_wIa@3J<{5^c~!C|6krb1rX&x0TV*b3b$|qo&3oQLR7oB znceM=-R9tVr(*Csy5QUQbS7t}j}OtiGq@i{gz5_=LaqK7^#MG#i+WnNuJPVMNXguT z5BESy0hjTgRi$1jNAgpl%WW-fByrWWXXK_4tcm%1@^BuHACPE=kq^TfP#$o<-Oo2{ zu|2QZo@U3T5uXLmO$0Xv?g~Dc|HF7fL{XWq|)>BLN`DA1y$F(a91zoj*it~)C z=W@V#bM855UOSzytDODSefq}IweWY;dY{6rKdv@Z?{|~G(DlF9OuoY#le7n=b2ZCi zi*zlqu(WnmXUwV~qCp*HG= zY=1y7@(6W3B-p9oMn@;78Vz)0qVe(68l;B3yNK}gLK-29>*_qQEC;q3o}b9OzkjXU zAYTpMAiB-`_fkzsx$Uq$DmqOsd^Lw@iyq<{QwrC zwGHDyqG3EY(gp}h&nvYsf8>JMCJ}Bc{JkHwMz2pf^Me8nlzMJ`7mlOulosQsl`Mn* z&?l80x7x3ulH_E^piVH##Tr-nOvl3Lp3`JV*1m{`^I<`>Np$fWj5)TGZI@ln-aX#`>rz2i!A{E`l^~V$a^Yt-# z5p58rJP<-6`F2df13KQ5{K6Q{8e66s&@1VmfKEQE+beaRhF#TOJ#W1f!2Sn$btTPK zVlunvxZy33Wi=LUc&KSh;t(G$p5kyp(b>1(7@cRbgo{PV#({{h9#DZFh16F#xc`j)s^ z84UdvU#d2wcF^Y>)=D=D0#x-R)RRZo-<4IY310Ha9;N*$jp}@aa^8&SFND`V!9O7T z@Z}Y4SVfFTUKA`pP>Yg@PuYfX6JfZd-jTV(lN#LW>zPPdL;3~%e1or>z&m=IOvih} zQ@&sAp!=|8=+55*?lt4DkhLM?H+Y)OcncFIN5^uiB7fX3?o-^&fK<*S=p1)f)oPG*Vd)`-cO?G#C zW;B-#=l3xs?Uj;IM5pt-E)?4`)hEK%P~Ezq5OZq#945rYAu_f@^CWiD`of6!5v$Pl>(X&UZ``6CjOsKpUQTfia?$v>k zF@2)OWeTOr3cvZc73F3wH}&7z(CEG3FZ@D9eeJQf)bLBaq@i#|MX1R<-x0-G=Y=A9 zT>Uc6eAhAL6Txm}`#uiVGyj1PkOfaNI0s?!m`bjX>*}ePt-5m>KP|dJ%e2z2D^X=X zz%eM4^!hCGWSEpjl*9kPkeezjHby(~T6)!Pi*$C?37=Y7x)1}{h3xMyu7WzlgNonL zte&HGy72*MY-I=R5@Bnhf@IZcw|VVa_2Lbw1%7HAn`tumnN;5`s>%Nu7-7{#;Z*ks zWy`zHol7EBw))-Yt%cT$9a}-1!j^zJM|a_y^wV_RPO8A)cI@?}LSSB4u1YYT=mP9D z5F)L^N7+_0^wT)~Y4P2e$SA)wG5HS4ir)%|1$_OL zJgA2Rz=;t}=IlOMnNbz(-&tZLbW^!CTdliGxg~>-20C^eAL$wfNR@|#IECHd=ZQZC z#G3uv=yH&F|KCnp9>^DfmwaY@(m*gThr@cfbo$$)*-~xbr(NE%uN%@{&eOiFb5bd- z_wW22OQpw0Ar#f&%(Q#n_62>7!y#E}cwzmJS2<~9hLr;6+>!K_3?Z=KMKsm9n^}+} zQ1(6K3?{hl?8T^m#=^}ipK+n|sdQ78`ZTXabGg&r_lCFL8Rh48gHEvXiE7obTnMK+ zgA0@z;Z{zd>#6Ri1p=s%JN;h1T-OaR6XJU_%pBjS zK-?hY+%k+sQYQ$sHaQEqS)ur}3;B@F^u=1$YZe91%5OXXoyWErlbTf}J`*0OLh$?D zYzaOqkf>9b%VhZqrt(OSaee^&;_2gK$%Fhu3&fwwuM(Y1^x2KC;6fyo5YgOf50B6D zhW3FTyldq>60Fk`eY`gl!sy@dT^5YURRSI7Usi>zNXYaqaZboBvh8}+B|M%}nb&_e zJo^Hu>*>$SsE$Qc|26<&BY*ZzwwIH7!Fe+hv>oMg*1imDZ|C z3sjxbZ=tR(L!aewR|2q12S{Xdz7pK;&4j~<@yWO8-~Eg6D(%ZptyhwEt8x7p^xM3y zlg7*?W&CGh^DZ)DoyWbS#(O*!f=N`VoRguNohxlmNXcYj1Y%;DkN`M-Ff(0?SAWiH z;~%W86Q9l%TFa+q8mw!%;O+%Bj9w8&nxra6B!ed&Mm?fcFQU)W00`uEZI6M*&;I1s z3~T*Jf;uzYk@kk4t@O$6YkFUwc@Fo);{gN==e4V{EE$QE!#h=9Xm0eOACBEQri-JK zq_JN8=A#m%CwNy0aox=GO|}uOBOs#rLu*42UPRyOx#MNjwQjaQdhkE z)#&=9=04p}dByC1XI+uPlubCr)FBIBISUlQz*y`JxPR#`p&eg}3GDGu=*hh+RbJAP z%bSYT8~1ZGnm!r*S){y9k@phOYnnc&1R_xq6mVttfZW3Qdw_B{-$mAgZ_xvDNO@M1 zTPYr?L8Hf0TMeSL!HLyXUL)872ox4V&9g>adruC{-vdm;gVF_q6S2(aeQZ}wbVI6I zrR=6WXYVn-Dy4-!I?uB^0wkrThLNxOg`UwnE3Ka7(>zvpKTIfc@JT8-t^{y_F8=|| z#X_R2+A^2DF1tz-)>qnoycs8om3~);@rQgm(CWEcA@d#QJ=;}QgGIQJN`R87-xwiG zxZw(pQ5SeOT>7nXs1)ub@bGL#;?znr+>Ea@v8i(xI__>M;nNvdtU6z0voS zVOA`MstlbfsHqGiw3tK)yNlL?@3a(&-XxtFA~_vsW85y37ZH!6_3$%co-~p8v@XG2 zJKkyg8>+`!9x{GKS>tXv>b&VK1kcW zMlRcfY*p#oWM*Fg^Q>-mb!!WxRuUV#5S|c`@;!bX3Y$slD$b|;9|w0e;z_wHnRDXK zaf0=eIQ{W&&t3QS_KGZ)W90^&Qs&p5f|v~(?fQm`5H;g$ruPkg?~Z{6g$_3u85KLX zv(0;ILuya{1H!Ks2xm>4yuuZ4n(O}gf-fc-WP3xg<2UEySLLtJeNW=2dP88qH|l;o zS!-L_hq?GF6h=dp7?Fb*y92sS6ZMrElgX=ayz|MeM3rdpS{*C zKKG&QmYQoI=YU_#AWrD5r(L0pc|fXIMr7gKW|sdqGv$zMw@a->Oqa3Yav&j>xPz}> zmoM3YK#b{yz=pzn5RH41Du<40yM$vqOyY&3AB;LB(5#Gd{vMYyTzPt6bm7!iU6}@b zsn#Y)ssyT!B)SHnNI3>)>s*;X^HzfQ(5Fi-R9o8tOOZ8~MFe@J{;Z?R^`CTy^G2z^ z07EqYJNk{*Aj2_p)XcUp$Q@W z-b#INMQS8;rW6ayg5e&cc}4)I#w1~CW&6{?JiMY0Yd*eewovYr71r=pq5112$X-1t zWD`iPOpL@?WAfetS;CyZmCX`dW{gFh5kq;4T{$$TuRgaVT35)xKIT9)KMKa+QqhdZ;%x}Inc%$#- zX79$4s3>RzGAaxMut&%uD(%5Ip@g*XS&Ah7XZ{1!A>hho@h5gV7x$54fHpq&h)!kk zx#zK(o5?$?jU~Qudzme+@;=nmgKMi{DH+gP!!qCVpMh;Ywab%LkYmt##t3NC4p0+z zErIanu@zSUhF*y!%5_VA8nCJOf3_K*m+EDv+}n^(%}ywjU&=f2X(g4bDTa_-27FPGZj#iSnc>-vh)896ukK z!wWAvP0+fq+6qR_;sN5p6RiV)Xhl#GVZKmI2?YU^lW*H}y<~w2gm9tV%k$|%x1x=c zs@g@nrQgZjlzt1qDC*W~Qfns(JVb}1JIQWM`P54KQoaLU-%+QiadOyC9Y^FD-S!*# zBX)o0c-{cEvY&)_iqAS2YngWgUt4%v_nn8&(mjnX+dd-w7%*_aA^jr>d;+90e_ke{ z8g9Q&U4-=hs2nhh@wbgWd+xttDc~fUa^J#0I6uY2=SlXSP}x^3T2<-ajEa=~hAm4q zR}yu_=6*^~vsvk9wo_hETiZx^>se#G z4j2E%bsdm>71eyasnt?KsasV@rfF5fq(>65ra98_QLeJQ4(cK3V4smZ-A+x^;c@B* z^uZ2+9Tp$08)J=iY4?;E&=bc)es;G;+aL(P6DWW7n(|!>B4N z{p@cU%IU{VCR^0Lpv&76^&oqL3p!h(Y;N#=D2x@Mg)9H{0UTWcyD+)$wEr>aV8EKE z;jdyJEl&r!uCX5)20+yZaYz)iE0eSO9fHG92u+7lkodw`N?1bghm@eMFZ@%zz_{cU zLG0lxRS7&hkVT+z5tmPBwm8Y{pw(9F(-EnsJ&`A*d>Y{HhSr07^esBLwVNf5Vb6cP z+0pS@*Wjzgp-$ctZ1GDJBH^lzB`;4OAZ8|h=RRGvFOc-QIYad|E$K;GEPiCxNTD+i zXkJjAdNLlxdV!FH76nLg&Jx?25wj1E%q7h|>*SxHO> z3NeL9qW>q1yWB61y&mhvI%a$K^*7< z=C-14QQt{L>Q_2XTGfxVQ7cWjA1uu*@hu~bYwl}PBito{jwG$h9H=`u?7TF|Z;_=R zNe!zZTHv%A3i53Q->}CFd^F4yqD}Na({KkZ>Y95m^L@ZUu;6H<3h~aEsuw0W&8K+F zXYXKj#m$u#OkF6#7=JZaS8+hRNNRuk5QSmy%aO#0Bpd+e*m_g;CJ&c`GNf^5*kB04@Omd~O8pWzv z*DF`AMITytB+;|(Cz~R9acJP-pO4nA!9!vYj{pfi68thnqBkk3MGT(yvbnXEKB~FZ z=IvC3FBjEh*=I+3HOXb2osuQyX-OgquY7`Dq3vz;go>V;evgx%bEhwo%SmOr_})I3G1ZIR6en(4hYVe>D(kX*GUh{kOLF#KWGJZ_18?o>JKqcD&2Cpcrc4)W#Bx z#vtwixym@t4=cjdmo$Zo@}7%=HXhb0U}2VXyG$6pXN=O1^-iJd<84NyQVzJ_Ne}4* z;$=(&-Nlf8SLQfsMAP^3g(#VM|0N(+pjr5)+bM)cmg_>OFiD#Ig8bDa1a5(RT&OI26}2N2@@hOwR$Ts8H74`)JGv9n zi&YDdl=g|b&xHX6v#R|CV$p6R_d1Y81G#|eNHwn>fFy38Ol15QETrh%0SV+PYwTcuy$U zh;Ru9F#SOYI@)MekrGlW(~aOCO`#ck=f+6a3SqLs3u!}*yZNAe^U|(BG3Q)EwFHE- z+r>TNLsGiGn+NwvXuTH;fi2KFSTfd%;RBPl<&^15v$EgxzB$_|IbT0%{k?mB*v&N* z{kBd;&0N2|9?13lGYyv%cd~r;frFAy!C2E|@|vL)X!29+Ey^o>mak;cRFys{XR*MZ zGn0U-0CrV4(x)~aYqj3q7JYCMLc#Nct=A(PRI%tPg^wrAazCeC(JgN`6ckU*B^WGU~9s4f$!at-;pCFPyh%3W< zQGeQ>P#TUE6V;wovtB!gFj6c*Q*IsFD@#W*TQX>aR;4a}H(< z7p!56uRQlW{b_of%&wNN z_CTj@{JG5OsVDqVsnRFPv$Gm{7KKm34i)Yg8V_X%rS@c}9ZGgiFaftzBy#L_85>ubOB+P_&VK6&7#Ow` zouPI}h0fTl^Et{#K|*Z%jU~Y=p{m;_u>=oD??^asrTJ>}+1hJ=Z)_C8x?ku;9+H2D zaWxtKL9K)m>M!oXSuEeBe3}1Kk@-a%>sGwjxaWAMz00EXLy{iFzTLc1cYIDEFHn;o z5Vu#MZw?`|hbJTW#_pzf)^OO0pL6dLuO_)d?E%fA_9ANDJKSU^P6P6MCL0kH8Xmy>mMfmHD zayOjIF1Q_xOTzG;e=zlQ@vpqLt7uI7bI>C;*u<`_hZN+J0@hh7;s5!?C@O3x7=7LH;} zKZuT002~6H4~apt5a8*KmN%<~=vf{9esV^o0kOOfqNn`=Xx&1j@7~KkC+B=Q`ln!_ zScTSC_GsZMfr#>ymA8MdEdU|abZ#R4P49w?kgt?yI;>DJH%L)rejs|M=J41TcXOPZ zSK-&fc~q17H*jhwV(4NvXV(1hpYlW>m@)A3n(ZsqH3Xn;wf+s`flv2w6QHdn^DJs( zL@6es7}ZHQ?q)#D$Dwgdh zE24*C$=aEI>d?8LJHh^Bi7NU(#6VWs z8`7u;fH_z45y5}imac0~9O^vo7ktaz!(NDusV_|Pg`&7ms*bVVp4(r}d{_dILcf;u z0XX#b-CJZk^#@bOAl;6=OEuwYT*~KrqaF#-A^l3L>)Ysg0Q{`>&t3li{{%sHEpcDL z2`~`RcX^f=8BDsPU7xYprFH!`swG^<{`B6!TfHNZ2Dcfpb?E&13hyakU^IBu{Xl%_ zCf>b@V4@v~Jjzr^T1#%Td+i*Dt4{<^3&O>U2%~k8|_aH9^&owcv578E59&&Ul@4ejju-0$yloweFU1^V}&m?g_fgT89Tsr z8=u9yFDD*0#fYf4uM^KdZ2T9mp)#%A%Yf^&vmuv;u$^z!+V=zZ%a}bdUM}J2n4^y*c9VMRAr+(gjPb22SLnp0Z(ClwxGu!9NBbI7 zjcN7l?#a3yRb6Z=eDdM5ChubTB%s%S4CwU}FN}qcLGg5oJZ%A1#|2z+~ZOVDS76;uq~!WtS$H2Lz-InEe2`oJP%MCgs^wHQ`5`asa{>L#c! ze5AYCJdUXLWkY%K-Rs=+FLQd%fsRrFN}rNh9TeAvm}!)@V~I;sNKSZV=hDq8!kw(h z9vE}WH3z&%XFHprKKPS=lSc13{68MeI>%0dv4eHU)F$3Rh3{z}>h@E4 z3)yoj4xXV-#tC$o%E(th`f4p90SNjt@A+8%0&T)|sjpJK@bh4)PW!IO{m%gWfq!0* zDm)q#x~4ut%7VIbd%Z=~8pBU}0|gDHRpq>V-B$j*za(!S5c=`cPet2K>QKfw(Zr?- znG*(_E&6Sp8VcJUyDJYzuNd@XrilNfay@_8HZS&c1SjiK0c~kBKXsw%@&EV>MTxi? zfu-$gpG#ZO+H7om@$;7!p4A5)8(emxXsfJZ87r-y(-CeRc}Ia>CTOo5j|MP=GrPPv zXfKLOo%Pq}p<*CPe8cQDQ!L%DLm$e1Dbbpr^jjYvc3owo|DmI#KGzN+-Y!IS8h-nI z_ZMS=FXjWnpDi_%o24vf=;I>+=q-=vA@t;cW~Do+xs4a{7Frt^duOL8W2Q%)HIB^e zA^0S2Eo=JVH)r7e?&P(n6Jn!HY8v^}IK$m}oNq5|F5@&;Ag$wkI>AfmJUY7Nxu@U! z#nT5%%Y8(ZNaJ|{C(p+kz=!e}H(ZGCUU^)7@etnJWewYv4Uy@XbDZ-Ck9ClRw zmU-#JVFm?fcuwjqAg;;)2a6%P79gky#rXZ+LA=TG@V6HqEn6e*Md%kMb}|kx|{(<>2cbhI6ov_By@~@Fng^k%sBIzb?my;FqJM zJd+6rhA7JdFRg>UdnWv!m5hP@vktOLfiavt?ZUeuZ?>c%h6Sp|-QWER`l6pqZHAA! zg#ODz8q0fs!q2PIi;4lhZgemzXjVd_Rl}&Cn9!oy;xDG7`&7c&dCjqOQXEv@&kVJ2 zsCcO7qV`Nr9<%JOP6vP!D|lyENZ`|!g2AKhz4Tk}Q#C8b%ne@6)n8Sj;4=WOevqm) zP>lEWuaxdmxHODeGbu!pIURta{JyfjUlCY+GioCIyUCQ!KuSnNX~qE~Hk-#M8(9bq z8&-3nLHN@6v=om~U;d^08la~j5Gy@JFYHq+=;zFlo5Aa0 zH->vl+>7cf@=+_}TT((Y+8t$rU#PEjzU2?UEW`x6v5lfu^|;{Q%F2sF($3ltYB6h_ z3ReAHU;euH4%>y?9{#J-F&HfpaQAw5 z*CRnFrZb(_1VgMB@j4wan0S*+5Lq^lu0V>xe>%H(?Cm~F*%|8>PGOPsO(@*kNlKpo zLk|LfYN5NZMTp=VyEc*hIx82pV)tvr7z-F{MXhZ%50mv8vv_BjQ!igu@DzL7ooM*m zP8=xDLy20X0-$7AM$HCyW-5Oxr1QagJZ5N^20neRKCJt*a9!!t*#V|ib2VHeUZc7<{+cGI1>W?~lkjQeQgTm7(^cD~3a-7gI3AgE&>`s|OWV(*Rk+{&G8!#Zb2ze-}T~hi0}R+Ts`Tb$@+R zN;3>=CJjnE%`t+KsUe3W1rSfyyS*L`Rhn0=yTN+AL`uW%6+`mnUCsws5RSa$~xV? zks&1@-JnTd`ZXn42XWZvhHVcEG`zTh@c+2{xJuO1-~Ul+y975oYsBb4yigz({7 zVCAP>s>{H{y)`$4q#W4$!0#upt>ghI^$R6Shn$0LfIbd}b>8&=1a3S17$_4$)~f+N zvtkjgi_3ZM)21?8lUkeBMan+=_=i|9riCbJtvvA~h!v56B!|mKlGLvBo`O~J^+Vh1 zzgwDon_aVWUkzJVYKG0VDU#yJ)SJ@C>2@~2FsiRM(~SnX1L1GWsf$mPI#REr(CV=&&WN`6|FWg< z#L~i2a%!Qs9Y+*wq3WIS zAVxLUDW0olsw`<_UtkYr^!Q`WYk#WE>%iXNd!)QRX_tFX=66rX zW=na!wGg=9#v(PUdIyDjj|e1{s^3tbVAzinz_tGH>v`=?$tJdg=q>&C3Yiyz%Qqvi zwk7g7SiCHX$rBLGI^IDPo_+XCwKXsA#Jsl!Ij7n}m5#T#{hz+RJF2NJ+A|1(fEap_ z5~?Ug1f)j-q7+e4snUW1(tDSf&_Q|;P+Cw>M0y9s(3?n+UK9vLR0t%L5J++E_q}=Z z-n=zy{>Tbh3s`XPIp^-Ze~s8pnpW#XMHYz>F6Rrh5#&DPdB?h!e=h4Nc!^~x+x3$E zDx)E;q%~WAI}sZ#{Ne67&9w};6va%Rv`O}(sNofqLQz-@S~^9=bwCkQBDPoQsGVu> z^}{oP_w--e!|<5hP82JR899{4_*(LOOERlHY1q68@l`JUcy_nrz4Dil^U8Shvo28{ zpGhunew_ezLTj>%>hhilL3~IzuQt!b8(mtO&(%LJA$sHdSQDxEVj%jUfvf`8CCC!v zNymhW2d^v*CU*@Y4;~h5IMmmkGA^gv<<8>H(r&AfBe|>!v?~Ij368W(NIjNpn$Z>; zsrJY(g|x?3J9%h1L$=Gs;v2@9>VPY!)%bIk#v59)`gvP{7s%!4J&8{^^Wvu=uKg%+ ziH`PV`|-6ROl@TFK&sLe=EF6fO-KVQ%Alua{s=o3N2wF~7#!(*c8>ptT4tOD#kP*( zG4UUzJ1|cAw}k)S@28ljZPN#2bQsRH{=^uO@*AeQQch8ZU!-*!OZOU^B*KJzB*c9ZmLy+o z3(!atRmxtjRXeJK+7STZr1>xC4;d*82vQf@vhEW?w!ghuEY7mbU(LyW-(5KJxi)(T zyduE%%tWalG$fh;DGq^Fu$wtMHePiqx;4(UJX69#HI!sG@ryK_9jw9I5MM`)?BY;v@|VeGak}Ea z)rwde5_@YsU%t&YdLetRvLv#w7*UByAzwxyr;w_yNU_OwlQ#;nz_`Hpo|j3cHlF#W&bIC-OZLF~Yks>H7mI;# zD7UwOV4I0-NKqgy)7d;@wN4L}?b|k8=iOHfH!g#8O1x0r+$A^CWXPu$sSkzs0*XEd zkb-J70VO`i9FPTiZ4E-d9Oj}g&f_G`YXGl;aW(HHzg8(5=Mfjbn5{%<{}vdOUBbYqH5dU%ti5b2>36Yfy@xf=NG{Q!w}VyJyHe$|K&1Z1 z1;elSbDF2t^sBsr(AQEPa#-0Lw}lZCzO)ztO=MD`cQNIa?-!&@cYz* z=FXPUJIF|Y?lHRHHndB~S4jD*Uq9Y#jP3%8gg`zc?Xe@druXOO^C8_ATHN@HD%a7+ z&hQfS*{e2iHi`xwljihX_#OJ~!a{c)b^JMtOe8Ksb{}Yn%BV>QK483bi*S@06h2}) zfaZ~?iykB7e0m0LxS9sN|K*#$)BO2t7nXCYd=N$cn(?vIsKK&Q2lmsYqG#9XHma|L zp8%I~-=G(j`%jula*gTAh#92p72JTdeA9#@)D%8(9;V^bc#OLZUs@KtygUGVi&!H- zXb<%8(X`$lHh)2SHRT(kKyGL#&FL>l2FM>L-p1R)g-|Pb_QLq=-CWHrZOGKs*f0~0 z!Ilt?8$FYb;sqyfX3?}Rdm#c7j!!KHqlmc4NF3x&X)B2OdT}x35l6{4?)^ zV^xIBipQ(v;->GHUZS(O-n+xVtc^&o7l=5?THVAcVohRDtOxM(9=S^OfABpQa!VG( zhfwdo{7IHyN%FnCtdw?BU9yO$op+3AOfbPcX|ab=i!XaO8ZUlQD&oVZW(CO^W386Y z=5n!{eclPK8=TMXvVE-HslD3RnN%HKkRAplmRD>5jykfdN6&Y55JrGYpxrE*pwL;J zB!3X&XfrwY7gPej@N^@%;!x~MP7g0VoESlEpa~(z_ArGXmWNltd|tsx-6x`ae7y{E zXN!evFMlumYB1aur{fMf+Im%z3NNf(^rxfv8%$y}LvV0m+C&H56cz`g&)mH|4m9Sm z&sXQ7#TqBx!Rr=bM=osw_Y2E5biM|!tVP4+fb*I2FUSo{y7;axtT^=6G(rkb6O4=6 z{+;Wc_ZZ|m6o4rHymcUA_i?*NBJenV4%VPX;p)NGY;SutJ@o3psuGg*WUJkC{&3E> z4mGtfluDEjKFP}3KMy)a=}`fCoSVtwU(o%AS(?&T1LYdYb5Cw&3DO+X`vi`4t(?m$ zGTE&*R@(h?$)0IxnGSGqr~#}D`=d@!kx(_SvSGQ$vY#LLEG^lu>^*rEM;tniiePuy z0I|!?cnJo&WQGmE#*ZND`=<98e4q6k?kEDu(*KVMU1$f+5-*H$g~WwepZ>f|G)eF* zO*6;S>SHtSXg!F?i82o|jj+N!)?gWFbYTd;sjIEVHUK)ia(44Qt&wgOQXhgl1z6!@ z_$wEtYbOw4ufGPOxLhYL&o)d+XxSzD?P&QR3*L-^OQ8CNR6qhxvi0VAv ztbnI=Lj(CTC1HMPcScW9A9`&1EEwFFf;~}4o`E--#H|*Yu>KLbn@xZiHws2CQ0G)s z(jZsUv!c5^9nLbQ=R;cFH39HH7Ti+?240!c@VWjanZg^>bFn9{WFV7kDE?osS zkH_jVPv}bnwt-ZNP?2m$6GwcwnAD%XcZ+gmKhHFO5HNF4Pv<>naSgMHhcN+UJL(&L zMTZ52_|&yz^!JW3Nbc5DA6et!%@4>$R3oN}+qF5*OdB%pdA)in<#fK0_~~u>4=cgH zEs&?Y+Nhsk?yuH6Jp_GN7B4In+5Fx!L;+2S1+Wrb?vRd(XfHvCyz^uPt~@& zt=ga%NjA2#yNp|Qm%BZV4agi4h68)nKWYk4E3b>LWObqIA-xqz{>XV$+2srt@Mp1) zujJ<~Y4oyO2gNK9{GA%sVJxuCxw>X&qczJC?6>ScXVIP-`T1@G=MfS>8LG>OSp0iJ z?lxdMb6X;K8EglaL9d28tDPzN{xbHdWjFft0DcQw77M*}Hp;5;+J(v62Eu3JxRWHd z8UR8B0U7PJT650hywt*Tc@@D+UeedqoF(-w)_%Ftr&7`F=6^xV9i8xaq?poiTKO+t zKJTwT1XlEYm-WBHSdMd7;EAzCHDnirn?vT1bEVzyTEo8}CH}yC=>?`Y={>MpJHP{> z=#q#OH61GD2*ng5*!)E2pIA1|(m}pY z5{62U*%6|o8%Y}|pFc|i_{zGqSCVdy*+WP2tcut+>5ZLB$cc1))Ji^twOxbbtLEq2 zn8tQs>hpC`bC=kwM;Li(fNLKsM*tBxf#%Z62&0B6-{n7p%|;M35#y6fCstB*Ye9q$v5NGU=1E#eb` z+r)Dpc2bl&MXj_Gjr7PL*z-l>ll_BUzaHcUaOE z?BTsYL^X^K$-47U?r8&Aqq=)~6*(EbOEU;Q zzNPPjj_XId7Ay=1a3;hp(k5PQt*A|;A`D-SltJ8?aR^VLw`}W}QZclo^QvN<_l{fO z(YJeNF**azF+bLUSv-;~S06Ymn~gw@!W*;4`nGntLg9Ty0=Zp4f`y{or4O!gzibC2 zA6)oV?U13vIUvfAe3otyXcyWS>YO~+G&Y5=g%&fv2gYe}2uODPSzzjT>zK7OA4yMJ zLMq{-y`p*D)tpnSSEn7mx72|x0;&6C?wZ=I4Ik1))kwb*gf_mTk1D>16A>lB zs}{HFWX`q|?}!hCd!exg$?=ZA&lH2->LOl#Y%-tr(XQXYN7$o65! z*eMSPd?`W3R*ti4oVK3}?$+7pxk*NQy6k=kyDceZV0+;Q8=UBa-|hAhf+NSCzSzHs z&sz1Y-x4`HWxLacU+Q2`jqpJDQ;V4f6iCuOeQ`5Yd^ZT2 z@iU8t1l3saY!AWp_J}L7ixSfUYC?#9Oj$_=)5Ii4%5Z-0{*8Ik=mxoZ|dVt z5C{{%Kp5pu15!uIZGi!B(F?RQfv>k!U#i2mMD^)Cb%BlO+h^{*et4@Ur#`}o5=Jcr zQbXi{<1;eU#R`bjS&6?j-p&)`CjL<3$M9)NivjOWtt?OQO(r)15dC+{w}3rQ8uk>k zw^RcmS@UT&Y=JY>ep|P;g_C!&5VHxnHBwLT*&ZXG{!K)lZ%*x>?FU10n)AFerl2>G zUyYx*m|tey^xYTWDv&AeP5}e#1A#wZE+ejyl_~c-CSU13Y_d_8laek%lm&YRc1eE* z`2!C!a0=!W?9?_z2(TJz?(nMoqkS{^sQmQwlu3}}bD7?C#d8q#fvPb3x4|=zx=mc2 zge(X!t)=J*`omH^R0OtCt&mJDOZGiL#xg4MFNj^?aXb)CUqCU&Z{bqKe0a93MQvPv zG&BldpKt%319jl|QOjr$O`Ba}>Y!%P82{w)^>pw8RACo9eD!T-?#R@W#IQiBj**wz zg2bm}aHHfmoBAXAC&s)QzTkf-->`W{iv};bzM^}5@=@K)AcN& zS!ctb-)kRt8E+I)lh#6TkQK=M(T%9xex!J`$;U#Y#);|nMe<~USKN=VfJ~+%T~SWu z4D55l*y8Kdqq$8@><1%y?K^F7i1%Bh+T-s4p*=uKFa= zE&Y%~2u(S;y!zN7~Yw77F_Bs_swn!*bn0WBBHzve1wl zEfWX%TH$P(-w-e5tHi9{q!`>ZyT_w*a1XRj*XQv+on-&VXU9;qQ9Xh2RZ!_43qSYs zUKFrbh==d%H#tnFo%Bt+7*x+d*-h-@_ZzS#-6(E~I<*ZcNRgVV$+yC*MN}TvaFeW8 zA6Wa;+V?jP`CRg5j&k-*4!fpfE>m)lXD-+xkJmpy)<0Fc+=$S5e-3tgu{YB28ZsXPm%ezQ$R{{r~zwn`elXwHlMs)J~FXjPuK zs!G%Fvp^fK55@os*`7k*qVKs*XbwK1%$EZX%dQRidGr)wuBSo$d(*d|P^uuc3 zz070k0m4p9VzTpwS4|Rd>$E=~2TrA9=2&e*x`TjvHcG>t?kTEUuo7LfyHkOoroz_m zQ$GL@zy}hr@pab%enIl5CaM1lS(cXTOO4+Ja(t&t-({LWAASJ60q&3DqXVZ?$&>Y0Q(aOWE^{ zWZqq9n8k)4A-#*0unGJXCK9)v9hL<(D7=57_Sk1zS*e%I#R z+~Z6=5+y1^JBXSwN#lhNEH6l`_9lCaWx-9D+`=(`Y<~ZG^8mg5Wk>K2+cWr4$$D5k zy&LUFgX#GE@h**deq|F#uL{Bcyfj5^Fm24TYR#Y>R0S!IRxXi+nZ{X&sOKWe^#Co# zhcmR|-h$kLo$9me{^6zfKLJ;@mEubO( zJn(Ti@-N6sc5Cno7t0kUU^1VA02(TWZM@xB9L)Kj=Hw+?O=y338n1gm+A*M5IZgn| zOmWB;OinQvMjFPK^}xtR9G3TRXYMLm36qQ2i!nnAagNQ-IE3@ z?aSj=|AM&P)bjM}%DO7tENk<_w!PnQ_Wc85#XO3CFD3Gt zn%0xXN|BT3?zvfuf4kFkxpo1AmhUm)>0guz(O2U0-RW(9TH6C@!H6Iymacu%mIojM zbU$!Aig;$)P2E%m;+v{C_Z~J<^@D_98d=jb!-kH|u90Nw$f zYHFjgxMw7kOb=8I}h9(~T(Uv9zzrFlgC}2qWvf(}GwtgEnIAW;rJ=0cSOq|uL1PN&g z>1*+T&;oi?p>n$9Imet3rB@_ia3$@orr+OJmNe>m$R`LdwuXF1HL~?DwTI z7RG0>wg3nMvaa##dEFllSiBx{!Ic4*D;U9|Y83#*fu;&oai^G6(qAQc68G5^g(`8o zw>eg%oLpK{m{BXbuF6tac?I~+C}#wq2!7j+?|)nFA=mYDrUs#l^m&r*5LVfJFYb0S zzLgN9zv;FX>h4j|Rpkf=G^04Q-XR3Iv%(LH^B~D`;r!kK` zim5N#CDHY>I5TOXGP;EOK(-;iVW>JyFO;bUj#e=W%n0{y#u-^S$6Y?@g8`~e*TSs5qGWcy`b zP|{Hfius0GDMfQ*1X0~8Zts6zALzLcNuY^MY^dUoxwzBa1QWC!!-mA{zMqziSiHL& zL_qc0nOe_I5<{tZUUGp~$Hi=(q5A5|RYmxtUpGf2AGaij&==qwLY$A<5wj`c4I~aM zgpKwEaIT_o_N@U0oP_qFrM07B;E8v;1 z)A2FTOAc}e8Swtk1zh^+tF z|KXtmr@O@Hw9onzsUo+ruewki{Dsq@hV;Cs*O#Yfg&cx`+Ux0N!NfSDMMvp^N3a^N z8WI2Fp`HGtBG!c7j9~#F1$c z+L`=keQyhh-=aobJCq|uv+oTYg?OF4&dzm6-={f>#{S?vvY7UHG#NO|FS7cR_y;b- zFMiK>a3rHqCdz1V?KAQoZQ`Cia(a9L0iSj#8t6L7O*mMlC$V(n!|nulAL)E|zny%F z@kPmd1`wC8Cr)1!Vj|sxehz7F$9+KDYnj&OM+htX)Z@98ubyviPDDNoy2QuGeGQRL zM_+!-YjASZg*^Wa0l|Kf=j&N;u1+bL7foF1ALU8BXco-U4z}0aglv-)I&48@vM}m#ekoc?hsbLPOpWluNL7)xcLir(MTS!XrlH_>zksm|g(+z)Nkmh=?3b@J%pom9^|cy< zoNu+2_a9x+D+Y^LBfQ=XYC6hQVLs4Ja_G_fzKT!}n`x&9^-Ifl&JPRKa}5hf%Yihy zKfGUI0g?A0tzk@ZVG!%g*NqJNi*fwvChJXu_vaI5ii{k7GK{HO|q{u5qUaX!^xqcWN9(3m^#WZpY&j$N2Z5Ft_l- z{V7|QzaVkTA;3%z+;`^zcyXARgtz;;*H`R*veXdd!iuU;Yi=nQiiIug6>#DBYd+D= zxk~iFm=SCxjwvv`x~744UwtN8l*hhlw{PEy$->TZKcQNFl|-l}Jqd*L2N|2)9IJJ9 z)PYp8r)4MnGSXg-->10WN@V;FNsSi@{G}Og)<}2ui$H{gXlhZZ2&@WGNWP3CwBR0+ z?~=ANmkj9>O9l|8L!J`egsimor%!`nY4H96 zTpWG*fbJ}X@W6}@r>Az(*c%+@#T4AmvFPV6pa>sGql!!Y-A& zYTlVHG9n-4-0=|O-}K9IrWZaTRu2Vf^o0~N8;p zy3yeGum|VP9Bj^cA(Yj+QPCNpsLqce#O#6iMpKtJZnKkKWaF-LgA$p#5v=v^tk#k< zlAS;|2}f&b(LmhcHOl=pdAqC*JU zfi7!nAq=LS2310h^cqsYrOF%QaNMf>(KMCLrv$D(ziPspO|*-j1p2OLM=QQAMjoQPj#FM^@p4Z z)zTlYir_tQE>OMiMK36xa^Ejar#KJ?*FbgYH$C6Mu(|@;!@#Y$=!c}VRe81UnNjr9 zt2tPE!2B@ev6qhv&!K#|lx?%}C(>R0D~czMls)7F(ja^QF#H61JQJ2MeGi2*p(s>_XrW-@dqC`G|qlSicevnS0&(Q5onLS^!_fvv2g|3(PCOifEDy%DwaG07Pq0t1;5SAN5J52{R! z(@f)gkO~vxjoIVx5bt-zw32&vAXh&yZM4dr1umOO?=`KJ8h})+MLM#Wyxeq$cT|Dn zp?l7WS8Lu+n~wIjBpe}s#uiRoFsQ;qy;emk)S{Ij zJWb2iM&#$V)EdNx4*q&r`!wadWi-MHc(^?SwmK{AvU}c9G=_BvVL%G|FmLl0)LU_j z;PlaXC(XAWNs`VpYD>Mz63pp0_8r7Y_Zz}aXzam%A9OZV)?;NRs3%G=*S z+hW?xbF4&eN_#RlnP=+&!7yXPTOFLJN8p$u75v+(q9p7hJf0>xK^%G;n%}Lu<@n4+ zyjd@r7rLa>niY0DbXI5w0&F5Rj50zK-;zv0t=NRWf@*;in$S4Zxvm;5$%g3jH|3+R zruKgC5|43m{CHOwX>0lGxpVXN2_x|{$4bK&K>X`j<7+~jX#-d3@Rr+kYbZKezZ!%G z%0+Oiq-d=DkXiRoTT*BZd*Z`m^`%$WH+iEpSpRX=sr4PHIO4wHp&)9}biiIJHpqmL zHiFS6FvTW3L^U=kpUa1S6A9_@K2~vtodg z#X=TXW=+#>(!PB={2@FeZ`1R1qpoSHan^C~xQ?q+U0pPz>*r_szVn4e(Qobld`x}K znETFZ(I_M+L^+`(PSPSwGDJGo{Pc4P`tnglxCm_?!#)euA{!z22mtkpOCh4&Wp!Qw zD)p?AmBT;Frtz`W*Yt+d{;2{H&>X<8pv~@b6`9Rr&&@Op(iqa$Kl?VJJl}MBV&7OH zy4A?R(JWJ(@vL)qy06jb8K!ukWqF5CAsd87eL4zTHT3}yFOGfyKiQG|Gup}k`+R0mgAPh9_U+3dGR-Ckz>)Jvx`wqgkMNTdqI%F}R()Z~>3)2MMX>0`Tq897m*Q|2>FQbjzF3;@kJR19+!OF`J zu+Ia15&@;}>fl1SkrfbT^(1FPc<*eH0^aAjlOa@!Kz~2+YN)sBe%W&UVFqE(us7$L zu5ALXw_ZOL1j!j1>Q^3{ngx#Q`op&^I7>306_7=LTalFWbGn*xt#U+B$#8P<*0&y| zHilO>kT%cH40>{E431M@X^V%Vv1Pp)9WJ|xnpy5kKvL43%@~R??8^!5MaM<(R=^oS z+kiIn{DAF!A^NSSGJ>~{4Wywf0YwEUJ=6aqtpC@|^B+gg|NgRfBkLeM@w=Un_^0S( zsBz%qhGv}{Vg{$TLUrnG)$wBT=sHK_H$CM?90_C8*C^K-X4;shRMG2I*;n2Qhs?Ze z3VOAADzq}c*3*e+^cWu-gYI5YqLa+FAfRL5bnVUrpH&ewpLx z9~vx2|I;K^#B2VV4Zpqsi{klSx=&tcJFKa@MKqy!y;PL~$K&|`>S_fDrz#4s3iTcgfhbSW( zDCX0cFRc`+?WCCWXmd6yp~j$}lYjzWsHP6)$|g(pSwZ!tKt+6@D(@GmKbD^^nE(C@ zI#m;gx!?|8SjHQH<0t=u#?%A(kwnz=QXd3+qs2oIlu+fQvZS+mt*tt+cH)s`0lt!Fyvc$n4-ymlY;%>?g8Bjq@^qshK19H z;9Q5?-@Uy9<0p!(Xvg6#Hs%t)o-j~lzhO?3zyNa5)xi*YBbL)OPo*&Clcs<~EYBj1 zp(~$IbDH`QwpveU**?PeaStL4)?Ay6TRS$JQVZh3V=0ATEZnm5wcc8Co%&x8485kq zX{Ovf!3?{&PGt-*1sE4O%Pj-(hd^GtO^-c5pVXh!z6gkFv)+5Als95`aYw# zaw+-(CWG4iPP3&KHlN#L7$85;nCGO%rOFk|+SLgkQT~ zZ9>xoWVi1WW!`&X+G$p0v$A-=x?!y&&YqK?R5)DuTQx618xtriO&hT@2mKcb;U{{u1O9aYtO!^&P5{d3ggmeI8v~vp z49(M-DyNeqR3UUpE@DRM*(*>RyrTgg01Lb)CB~wEn=F%*pp)Mva2pVfDtQ36iAU#v;Nk%3q0BZ?xkKI%U0PR z&g-8zL#0ge1g=v`pG|~$pD8|PNt~NO7!@NY(G84mSH@Vu{Gs&%2z}3k=L&1}iJ=Q$ zcnaF9t6Cl_B7lQT=k!a*pXgH*uGx3gQm6sGV>PGj8Q!F8305TRJ=`V2)drhe*oqPN zu&PduxzVGZDQQ=e&gWngg!-T@Op;y$pp*r@ZWJf-0_oH+_T8%$I~n5at;xqTkEe=G z_I~jfnY)q_75N@3r>i^YFY8llF>L5$7$}IW#iK|n(@)+)LOwqaxYqpI>y^YsR4m#j z$D}kxM~G<~?N@9gLOt$$Zq~Nzi&`!{x?lU}yyO!E)l!8J(mONR;jSQv9Kxt# zW>l{68Iod4;J-w~+Qu)BG`teim3{b1_osB5QARwIdx6ja*#&uwoI7^cq(7~ZCk?A4L^K<+bVPJl!K6#Oow9bCxB~_5AEbr{Nvs~FZXfR~*xG+J zWaZYky3Z_`yb%WwHdfBh6fsiYcDS};CRfM!Z7B5kf)(gfrdadp!_9=Oy^*Kn8%lJ^ zSM{LS^egU19Vu`|r~=_rXGjq%K|e9~qttigy^FjWUipRfopVycPuI`-zK*;;y@sks z_kp=WVSOJvI!&G*)w|m(;Wt__l_{(8qL~seqwcySAaVUhU-lkmyo?A_EyJ@+ z5Pmv0#F>7mb&L!Z$xJGYnEe-1>5AzA75$iWtLpWwjQ_FD)&7*Y?bit+}j`h zHYuz=YSgbzuhEA%A_wTMFj3KmMo6(goWB+440H3Y9TvQrdNh>`DPkR$19+Kf3|mVanX3zzL!aM;{f z-&5(n8lW1@BxSt*bI;?c%Hd4XTep^Pm(2GZnq<0kB_l>HtCj8*T=s)I1BTqTD zyL_wT7dnPw`kFM^L;N@!e;Cf@bEozG`jXqET775bMwM`I_oP|vr}`;* zaZK&U$l}(hK#NqvMVT+1hY!G_ghWaSNdpff2H>Tmky15%KHNV-Zyj|PDNa$;PH zBe32lJ+7`fJ!KCr+>VKXELY+b3Zxd=l<3=|^zX8{Qh2EOG!{fK{_6l1Jy4Svg5*j_ z*g;5@h1MIp%BK_7hdd-tnP2AuHPf-E)}4w_);@=v0T_2o+Fot)dZU^MxmQNS%y}SDOYt_DEoX|my73noOJXFLF7N^Z_*U|@no|`X!@IzKN zapA>kUU=hwl?U70rAo!jUv3`2>7+{Csp6|Mv9hi zH^VhoDq0^q7p6kHjgEc&%OtiG;$W;$S$tY|Fh&?37wd?{$;>l&__&GunB)JxDG;L0 z@X4ueJ!>XHTXh`vcvl!XVt=llu-m=My9B4)kD`CJI>LFPpLLsH8~e4vV55M)^ua@k zbkJQ?d;r%p!Y&5}Bq}_?`}CvZP8s_^NOp_8A|jT@Z*3BcY<{TfroD3Fcp=-FUh1^L zEWK?s3i+w7mnsxV*EmXE!*k`C6#CV8Vgw;%)Mh|NeO?O9+3b zrC9Z3Gd~(vXOVc3{-CVKx|@{S^Hjf#|BL;iUN8%F`IR{dmVy|1Bl`=5H-+$8N0SR( zu(8dh-=bT&&Xue)QW$qf0Sy#2;ugt!cFmwCUKD(WqBB*}5OLPC{#A}Ecr^swkExWa zy)YyA;;hS}1gspm7snccG+q-5cX^k#b8Zic0ZevzZ$CUl@%lbH%f9AD9|1PGb@4xA z`f!?OsP!FA%`JkveMbpV@BGNpCME z)7&13C;~%zH__d|)tCa9B}F2#+E=J4NPhdqB#Fh}yJfM$zMDLGDY$OC79`rrq z1C&7jsDb|Xi>{xdN2*%sh$SqJ0mT8&FASe?`VEDOKOEM_t)jyDbKVlZD^F+H04aM8 z-lQ`Hd}m5!Yi2q~*xD@Ga6B-O&pNI`qL!noU6k@?9-XaP#mUJ<@zvn>yE7PDV8j+Uc#<2b zE$@-6q%!vWBk@iBCe_vDxtb#hHJy>>4*W>PKTOX5+($CZqx?ag{l3c4Mc`inz>+-< zsX)M{+7+;{RQS1ety5*swOL4`%c~~QSf6aRnBviyL9I6fKQ~a%3r{1{C>Kddr4(yY z5cV%fz}5fwqeYWzZo1P(`jrjHx9R#yEu{?Lcg_HU+2)q+Yc72vyx(XNq35rlxwM79 zqr6;4KlJ|SE~dU4>fRfYb4aNeawrky9l8k$8`$o`|L$>%S2`pC_ z(R;EI$w8a%5Jn33J@P62jgf4eL^!@)w9%1BH1>~UZNG^1)1r%{PIzn!`dUbx)w?2; zt_Z9PB}#89^8oC+2exBwd1W>ldWSGfj3v;@u@!7Kz0EL=CP6_qi$4k)Rpz%a;ugpE z8^ojrMh1leS`3Kn1KLcE(1r9JbBwKe06BdXE5!U2pfFh$LC}FI;xO*^s}i5>91U6d zr?C4aTsOlYm#&60KelpuImKz_1sq9u6M&P#7z9pU<_&rVTkYswPAGzq4joG`RJ_?b z(Qi(AgnIk+?$EX)S@Y%U+2A4Oek1h-V>#3BcXSYB8E_uBE5ztz4s~s>4e@P zQUZh~O`0G3?0xpx`~CCHpC?yxU2El8vu5U=duIO5{9OUuSCUth2jJbxUA(&hz~6a* zEC576KuCZOA|xcd3j*CGen3o2L_|zQexLLKEfpOt4HXSFJ=2S)^bF4!sA-saSe~(S zaB*|dJ>?VRh}S7Kp^lAK9Jxa0=fgj1Ka`P6HpL7c)~05@TsOb=#eWkUvOMD!H0Fl7`WtR*$8S^aUO$5M#*%9jd!+*3EHMiX;AYz zQe95uxh}?Dr^MUJ{yz&om6Zi2Uw*A5%)C+?IU>f^LE6_Qr$*2bIuJ5KQDLIGa#aEwZ-Cn_#kle*;N z_bab9W*b;Mt-C%6#pfUg(WI08i(<+EWUIAICtm1ouHe(sB05{9J)j| zb0Oid^}m4DW9OVJtAzO0{EP4=< zl1I2;O!c*f{qE^No-a4mwqY-Z&N{4$#K$|Nzga~ad+|jzkDbx!``4J=M!xadtcV9& z3x1&>pe;D)*f2#*<ZyNx@` zv|@`Gn0PzK$NtTi#iuZhd$PRvkdQ?4`YUt>um56Q$uV(?dMaQT z`J`3&HJKDoJJ_sXvo_+XpcXzayt+bwQcc{4e8=K9-2p8}kT`=k_Q>GVkO%;_G zpAq%#>v2WZT`09ZH4or|?QUdbF=w30`;4+;s%{CD{CU?9W)MZ_ehm)!<(6IDr&U;2+u+l63@nMcBaENdjkzua9w44Lt}F9hGE zu};k;ZnHDF9wn$bJgjhyj>oHARv?7K4DJ29=%QbKCfmS?nbP%6%dbwBfg`mw|L^oD zC@gl&MI-dOl?3BX(2D!dg(-C3?eomq7@m~ z0^S;PQAN~gx;6^;G_?4?X1c-nx^7dhJ$(@`r_B-0E>1{tNQWo6kQOuM<}W@J-C0iq z?5BPSC6lP2ePqC~U0Ty?S7f`L?Iir>?#BvGh-)Vb+*MVOA{u$tvs+()o(CtvbO&oS zZ>EJMQj1%p?n-&c8ZF?B!t`EbPmI=i(}*!_S;qu3OGw<^lSmS^+uVON33s4;E(Y*0 zVgk>3T{WN)PBCdsQIet+lmfh@~`F5o)F7R?dj(qphf`@V6wMqN-i-sG2iG+~Q z`-kNZmFMpE$ZorA2BbbRQ70d?{ey)JDLP2QwpS>-{>$yMr{a!6Rp&BIO}YEJQ|V;o zO_)bW?F~g(F36TTUsoAD%kWroI*6I$tKZELkCLZ2qeE$lW_Isn%jpZMs%p2rn|nLD zPkL+OgASeM#KZii}(u&*>bdM(z;A!fu_7=LoJv=E-Y>s`*n>a8RqOtvdXg) zt42%+g+^qA27}ychg`V0PPz0>mBi91>EleCR2!Aok`#D?UcL5nv?X1M+v)YEEPnA@ z>xU+V#aJz-zMi`87;>!9eje6%yVVYM24b`3T=1N%VH+Kmd;u^R~ZT7R!t zcC+wHzq%q|QDA>}swPk}SaU`IxAfaDf6^!KoRXqP4xA(G6cx$1JAAfi|7XbdASp8}f%$gAUyCtNTbKV?pE~`CwU`y_L3J zr-E0lo?$`Zm#!FJae0yRVc?ulh6n~bYlo6}ynSmR7=ZvTv9+bGfpB5|5DC;6w7lNy z7^liR0Yk)x=%4d0nyrpCzy+%Wv#2&Rd5#dG9EI##0ZPxcw}q1NP?46=^R4ywEA&eR zi)xpU8DFP#G_XIR!K^B(QasYX9X|)HxOF1{^k(H6**PKe&IMWdP-sYo&<{G0^e7ML z*Q31u)r&4|)W(i(ilTY+xQ=3Ec67S(2#`_kw*_L^rnXa7Rls?U&qK`BH-_bKTer~H zWb}sd;$>7Myk_K6dy9}i-pbAA3V<*$C%??Y-LhCH-1X6j$PQe*(JUTfAN8_JYTI z_oo%^#}xUKe2?)2<9R5{`$w&lmi+}Ncp@#!mTmLm)%UGRGn9%mNzCHO0e5!IEXXa} zV*fa? zV-T^JiHWXT(z@BFq5N3?WU}spv%bs9i!H$pjaF7)x43vaX8XNi=~>Fq$K z^0&Q}!KDBJ5`ZY6?Vqpf26KoDi?KIG&mgFTumvlXOS$cxPS@G^7T=POj4xbc_MbX4 zt&lhZC3tFU5@072aTDWxvu0l{%bisMr!86keP&kfq+H5-r)O1qDE#K4vpuGsBZ!-H zuOg6PV2|su?W4V@9A*vrl{2TK5b2K9h83Te+1WKSVU?d><9%Om7*-Hj+2ly8vn#ek z(aHVv8sxOUd1p;SE%Q7^SRB`}8TQ+LGEVi~rR7hl*s|!(RC2u8%Z7{7-9WkbztasK zdLmvdc?1qGxs=BJ`w!iIkS#AoEX)5{nwFv$5<2m17R;SEHKpofH@KHlZ(E#{)Vm%f|G52f2@S1t?>52i5@P9&C13f9zGI0$b;JsgeLOMzw|U5AOzg)c z%(=t=0zSFfNXa~kxZomUZx$FTEr?&4Y9T6!AHv}CcpIUzdjlymCWVxSgx6-!ZeBSdaq_A;dyYlCjh1GXZ%HK~|UvUD9t_C&q(eySgpk~Ne9-P~I^ zzx;BwRBD~9y8myWU0w z8NLO@XpH3pyKY4uK>**^nEiT}?yY^9Egye~7DJz15twdn_DMnmbEPfg08xaxIu}8& zzl6>Q+qWz^Ut($s#PLub*XW%fy@dbO2GxgUMZs-$-)Mi?6ylaQMVJl;XjXr(A}T<1 zRWV7~(Avx%o8hu{^V~~BoB_d@B=>Pz7;^k4Wg6e9?1kT^((`q9+UpHplUh5gw5Cu{JAgo&afVTHAb1Mt$D4{LOGX1$HBNX9IB#_XmZwuJhkPD+&zT)oN%3=PDWd=#i&$V{hnBS7LjE z(4%BVH3vMUW4E`b99f6<8`5bb0R&-rTo%qCTM{F9iU@h-`@ZzkydC!ym=H6jGqiZRfN zG01FEcg=+me-M2+@h_!nme{l5CirzH#TD?m7S0%g2vWv9XUk|Uzi7;{t8lsXJX16r zW0pnSW%+^|Ga|aoeNX0|FxxR;pwS(Jbb&C+ZKU*Kb)nNL6RHKv>dI=Y0>Ae@Epd|s zgq6{m_KG1E*p6u}=eFxk^gV-!P5e0wR2IX+Vz@e)Gdjr9EJ~%in|%w9-RU+nlz#T( z9Tr;zoV}4@`rvCu{ujVu_Rwf z8VDD_I{}m{r6}18Hc_uSXjD*jlLW#Zc-g<&H`1jfAeA#-dYctJF11CdzC|g7$o8W$ zouhQQ&5SlMx>|3C3Wf$~eg7ELwug+pjF&a~(m@om=qOA)J3*O0;$j{Z{xJ4w97wH_W&&^90=vr{LxHT4o$})w)?$6J%gw1ax4RNAMVvd0oTa+fO z#Xi@scYgG~yca9nm0Q5wXSV+YEX)Ad0};zd7IC|ywYH5|`PFB1+pNT0&puiXUE}Gw zb{Xh$rnXcO{(*Ebi{CH<540%s=JqrfR5haWGdVQ2Jwtd0TrbSvfof6xZgKF$=O|{` znZpqBSLD?1Je4V(yNxjK{i-IgqF8*_Z;Y9mb_>HI04_sy*gCixTC+vpDneSOT@LkeZnU43zF*jL-JpTShwQ))VJDxM*~Xb@O-FfbgQh!wLY-!R1G#y7CZi z54wCV0zD(J$`}q_4_3V%C&j(P~ zPy=KU0a;pJ!NKBv8y4P0L*h?>8@t8-jRF67YHaTu8@j2gn=oGqyV9F(ij$!V-lcz) z09quXkv%8To;bSCH%_G^PGo0zLGpw=0DKP-6KGk~VQIQDFei~iz;|6j`FVih)TMdw z@_CZu+o=&N%ho^hCYqGIsDq?CtLSV%nBOfU8&GW2Ql)MrXBX-gV3RbsdRb9T7g483 zYO+`V!eI)-YjxI&i=vhxT^;US6pVc) zR$mE>>y@C~<@=!tvpO9%Yi3Tc zcMW=(vsUkC=@NmC+NU3BB3WF1Tn{tR@^~ynvHBM!e z{DF%#F)Yy8+!I#Q;lBXF6h&deU0V-znV?GAW+i3!b9Z%he3jzjthj{34x_~3jrCzG z-h~GsuuIEc4py_YbO^1ddZ4HwY5e-V=c#-=nmK=AtRlP~a7Q6~>v4{-s?l};r}o5< z(yWPfZSS0Cd;C3b;7$;k1kqB?fATJ!J3fqaqSJEY>Kq2nfBLQBzTF(jD>MmVkbGqR z4Mv|BW^&vXJSuZpE&!EMM6_1pllr#QucwW|R5e|<4K28=S7-?(5))UIbtT+^Sz=GZ zWm#G+@M7VKxjtua?R zJ~=cNWZt@j={A(;U-isCyit4kZ634B@N77>a#*5z?2XP1Q;W`s%1mEw@DhFg(3%^< zaQOg%C)wg*;BTsQtyl&-LC_ep*{}+p43Z2O(-QV(_5{Z$gw9&th^w3W7NO@D2Je-Z zCn{koXE-fXTy1)J`-erYhd+-BOpY2e*G;wJ_)HuthdshEzP#;m)0Z8*qvketJZyQ- zodC}>09||4!u_@{1g$bl3QHW>zKxIZ765;^&CyR;*Yh53yHI`Fb%1mF%BINQWe$;` zq)b$w)kVL}CxtQ()_8T0N2cL`2*VFNV&*owgvgvCe->d{^0OVco}t@&OED1^=g02cz*^gfs*rZX)vkCPJ?Sf4(4gmrVvq2`MwQg;mgGygdY<< z*~~$Q;je$f4!M}~FnYK7@5f&Pn{^h8px;u1$Zb4u1rruzRGqr-KKawbN+E(;ba25l? zApNXME+;oQ1V{$8baYfay44wk-+0km1EB1dByMtYbaoljBk(?>juAg!PN^UEKUAnv z1Jdfwqs!ZM`C>(d0niv`-zElL6cFi8wG2; zqXqoB;B1fP!sw!Zuy8qopJCraKP$!t0j*FP3rRIl^lid#mxI+f8=@@!)}yL*&vzab zc7Ju4-o=+ZFz2EuBSK|Hn4BmjF_EWlZWBlEGWF}?d)b%Z6su0JN&VTU2%kS-eX;Ha zsFZt9rdr15#E)oQLD6tbS^4*&am=Ye+@?!@9;18d$_Jl#QAD~@oaKG&wW4MLElQ43 zP+>%q1TK27YP!aUa8RAz6*2JP6x6%H@X_3*J}K3m_sgxHzSmrdh7b=+it6?|jCs3%&VlwwP#R>qNVR^Ldmb||>& ze2^Lg{UfGO-E_Kc`aLlVS(t)$%gYPeWYzy%qS-zfrU%4 zu}JGy$G=@7en$&(iWv2OOe9i->M@*>zuqsb>y`oqjh@x`(*Fg>75ocL8<0m5n$Dhj znOkiRdFID#2_Gqpaw-2h2s#p(M*wb(E{oDcux^ZU+~|v~I8pb&@-w3w`rqqUiTWD0 zMD>eS<@K6%>y#ED2F!oT@djO190Nq~AI5Esig7Hm6%;$_Qcg}HqQB@yVh|||2C5J7 z<~nxoV3HoFe`vyPVd75;IhcJjuA~8@J`^w!5z^s!y0mL zsjDAr;XJDo;|G%nKGXGq8XH+u`F$a&GFf>UbLy5xvcB@A2G$2AMgUNjlHGZBX6-|T z&XoHljVi=X;jWaN)@QV0acIx2(tY8EQi{sxPm}Inz_D6CqK_DVf#|;kQBsUe3bx-b zolDsi*Afaz7(y^f{u=IsVAn~pX-A5yj*mejk3I-J-4|=(aql=i)<{(p>|%1_v}Yc8 z$Cms755(*iv4rd8MY)?G*TXU1Ez@07wXwrohbtbR_n~#d5QHccocJXt3$4lbrnp?q z?Qo|>Plv3+21&gF?l;Ut{*-S2JZ5J zabcCn!l<%D(MX2r9~`nXtD_+htqsyk2k5xt|v@~>7 z^*Sl#9F?~-sW-b9;lkSh8vXxB3nQZhO;lhy{5nHDF7g%ugkHesF*(1z+oWs?3rP+K z3^;=?{Rm$P$Ecm77+N0_`EE3xe||$+1rhL1U(Fj~$x&*gR@VavRde-e09vkc+>%a)#k!z-z8n zHa~sQKE|>2YK0rE(Q&Ce%M&jy;;ppb<`;eSPkSK8(H0$7c8m+tFM}=WZZ9etS;JgS zx%i_}?!8l%_mLE)7)I6qRFLiU(lfNykSXHCl@>UMF~2j<0Euyj=pAf`EdLtR}>fBqCy*t%c_+kHvl1bngr0 zB)&0rVKH}7aDcm1&)H3Ei`900*Qrydm+wy|RJJmkTQgZ^HvbFA{=B^f%?OcyvrtWQ z)yUYdv4lG_WZzh-xik=%`JoH!Gb^#7cMOzo@sJ?GI8M_#Ys=|7eH$)F;ei%smj(5l zcVfux!+CreqUsQbl97#k%OBEf4XwfllPvxx@)(p%vS@7&a3-iYeEp?N4V@!?o%(@%8W2>z4%3w09Nef9f;Y4S z0203DI-1G_PK0l5Oy$q2>3Sfxn1V$Vjn=jed%dI~``Hh+UK*xlMsQ{4wXVf+l2~b1 zeBvs;N^XIqz>k(XPZ9?JLl8q&p@*@Zs-{cBL~U;hTFaJ=d5M1cKDl@@_)o|26`|0J z|1{9g65gY8c7FlSKe<(ntG~QPE|%dghdOsD9Ri7t)7(`vMA>F*%;eegNwC4|i!(5T z%rFl_!%P1)tgQHPLzAwy_M63W@?Zq%=Y4@=TnN>#nPS7~#1!@jarmbdSAqtVi1+3e zXiTPYaTA^J=sqp2h*epvwpQnx?v^wsZxn#Hl!ha%3EA{GGC%(jUw?Sd&q8IxP1lYu zUgnqr=24C(&$3EBih?v>p5aNSF+3xG4))t8fy|25B}TlXySHu>LeLu6o+_8W~07h21b? zM{J#8NtIrb*vqACZJ>;MYsiV5;Hik_aN2-ilWo~m1J+Wgq3J6*ZMUsqlRxz?I`yQE zd5gsC^4VEYZyCgey@HOsdPQ*(WMV!X(ISO5f?PGa8b7C@5!FnI4CxI_m=MSl|2NCC z(;Mn=*?0Kr#}5R&rFi2yb{{o=58)8BACd3+ecR%qx3I^FoJa}yO#VPm(g83QG}UA5 zh8-TQ;=$U{?e~JUE^Fzm!$p-|0`|FF1}w;W7wBC>XKgX(fyi%aOjLticyuJ=6ruA; zu+y!And9|(H7&`MNnln13&rE8O~HoU(KT6xF{6Lx7lS(~*1FhFPYM#fN@QnQUk2gI z4{2BAC0Z47imlT8FHY1o{h@ZnS#U^avp`MIv5mDtpUkgJZU81Y<=QbI)x*z!-1j6o)j1WxD`e`--sHQa$SX!mHHj;PV!H9YQI*O)2h zcGp;Rf5g+2^MFzL%?Q6OnS_tXO#w9+L?kz@F%SJNB=dYwU!8{WC7p5wy4P6xNp3y6 zRYq*Pn$co-OobVy06=hcau{+bt9|$ZNjH&1VPB&q2guFy=zMz?LtQ)mFkE=g8j!|= zhsa&K?d-FOnqz-9e|#EWXUJyWfe>Lh6Aazzh2Idb?feDQNESvRWz{Y@?|0VsgT>=% z$i(PZ1Wvc4HP+4-i%J-Q+vP8jk@9yfCEAlV!HT{{aiMS$b058L=ie+^7i?`-Zqe*y z-{J=C&u29Zpd?5KYC561#V~h5fX~QXH3hM1$Mkb9d9hmzSo0@ER`IvMjWXCWx9Hsc z&49ZcZ2FoU_Do1m&1Ilf9mypN)tA0W?-+o)%X8`61 z(-IR&1a_N@xQ^bKBn=hFVr|;&K(;&f>jn^F~bZ{NnVOP6p>6C^cS#Z zd|VUAUsNkN0}`y7K)oJ@aD^rJVUU~cM}{^3bMNIC%8<0|`Xl^xDC*5W1hSx^I;uqu zCTASm&2VC7jr1|2@dtG?-mb>{d_+j5eZcRucV~YA4`WzSHx2Zhlkp0=C=;(_fvX`u zu1}4UZ-VU&X52QOqp9_E&~=%blbWJ_O*XUI^hNt!O?eqv#rIWBM+hm)_BrQmq- z0E=tdCst>Z?|W;!Xs}+W`nbMe^#ZrLn_bCW&)~}n9{(va5M*bs{6EQJ3JS0G9^!@R zk;udy)+RP?in$iWWo3{g=V~rpg_r#Y5QN#SL-N~zevi_%lvn!#JN0~7ZsSC!jG_{R zkILHZ?SjiF7Qm>s?jso#s$o>iI*d4jxjTH({$a%F9Dq^hzxhpv04gxy^#U~Z=8i&N z+bd%@Y)jZlp)0)SkM`W;pW2AHI5%L@$osYQcHY+Z)YLvI3mMb2OH-!Qo_~?ZJJ!8N z&JhHtIYdh&#!q{oJq(_GXB6~oB{cu-S9F-MU?G?n#0!T>ZZ|2mJ-3f}z^*5k>xgi5 z{>1F3l+-KT2@bx15_=!MN=pD{Tl=6oPt_wmGaKeI8@n(aMLy}tnKjN(I5io_4HKvHp4b2_($xOnn+X`BVU zIKg~aVo6r5hT*(2SdN*+4}Qq%#{UGGz7Zg~<3cFKDv2#PPGFrHLYBHH%eggEZ?P-S zD6hndJ8=UL_kckPph8o9RmOs2+@|t8_3Tb=am$d55$1Sq2tEJ|?v~#@k>EXET$VcZ ztr}o1aY*3KDl%h*lQ?zxIysXvk%a=c^RC#M7HiPoas+iA$h;jI$tiR_(h)A3%GAWp zZ?)1U_AAS?glD|S&G{uiDYyJ-S`TyN6Krw7n^^&P z0f}`@-Lh-6M3Jkyl$=~NFXK9M!`6Me-)+sMM2X=NuU}uwIQ#{8TG12eIQ4GPyAaZg zKJ1W8D;-MJ95v?CP+F9yxm<{h#nGWltG8l>GIak*On`zPGi+hr>8Ik zIT_FIOl9173*jM#0?orJ-xgo1%ze&xY691Z6nit07^E|{=) zDXeyzO{W*k5Xyt5=_39lq$6e=sk~maD5z}wYTX1RL6RVcd%E%e!*smtlBN+i*Luz9 z)pR~$A84(8{}!UR1|Ir@WcHdgaE*F&u{2Q2|VRnoE)w7n~$ z?$Cg;Egchr$JBR|27p{mUe%N$(C*0GxsgOSabEcD>?6xudJiJiIlj3l(V4fKgVWUE z;NS>^uupVnNqF7?2ASQoXKy5@(XV{?Wi^uxxD#7G_Fv}ek{;tPHcNN)xG_&~1z3Rz z4Mh428xh` zkY(@W(Qan9nB5deg*V*@2_R)y)25=^ zALRI(en~A)ZROe_kLh|E1}ztPA9{+d5<%QhqCe25%q^`G%+YHE{}r7}gAqR{g85fT z&}iFML2#6bTe)U8Y}wYC^{ykP-Qz!V24ss%^)FRz-7xM>({`9+3jP>tl7s>)xU~K_=r6BUqUOE`N_PvdwE>r#%#ArFCcdM%WSFE#cdS7;z1QE zAC_P3w-{Y6Jdj9q3RM~@31Avq+L#9;er_tNod%vHsCm0BEgt}bGnY^dOBE$MTmr(( zcqRE3!v%3wU6}4%w>L~Ra0+z2W+x`htU*WnhrS}KK)0V(MJ4V8-z;q7l=$}z!#0)8 z{NBdAHOuTy`q{g|-w$J?jbcIdb(GJ_&ohPy3QtJe;^WW@9G0Q$bRA}HInK$OK$`!oYc=&j= z!k~q1gO!@yJaZd_u$c=P!}1|LgFMla-9Q+5H>tU`dsTR13flLff{A>`(~pUQ4RN{Q zwdW9V^KL`~hdz?EN=E#Q_8#r|-mueL=yab@q{)?-cXvSXv&ug+~>n$zT*c~O`l1@if&Ea(Ooj<$kmvJr6977Ot~%Vj+Si? zt;)<zyZ^ki$2Yvl*CCa&s!ERI;-WY56HX2#40Ok>z>c4| zmi)M_i<7unkRpBM=^DUUh>yM;hWFa6=2fMx^Hc*1G>E?bXI2{7poVJ8kbnIbK(PLA zJ0wE>$hYC-Q`ZJXYntfBB!4gR_>+%P$p8Q=0C-ys+hU;scH!L*Pem*R8@zTUkT|t(g{)y!(U$pL zTrb#(F)#C?SSsRK@-6Z_(*DVMSf22pwe)%+yuRFTcZ5-eE%QMkbJG^>+&x+$buKa9 zzktixn<)AEobtkP-7kjp8;dp=L4mMn{BP8Jw1iT=bgp~3TgpSMr{PuWa@Ui_gGC@$ z<`EKapd^CK7D&ytRm&Y+CI9xcc90pLqLU%{t-s=d?;ATGl2 zx+q0p6Itl~E-`CzE!W-v|DP*N11u%LPa8=*ntTH)eJ|PJ^^m=p3R_Qri3; z{3f2e$!`EzttjB{-lDd)&uUnI-aK1q#fSo{95%Y3urOR-Ra5USUFu^IXOOs=Ffgnh zJ#`NIk&)B%Yip%2lPrmT7b?#j-9RH|)BzOVq|r!6(tY1g+&Rt4ITx|}S(qS;tg(0ZSg%R>&8UsCuCBkk z_OsGBq+t^lKyCA@h9NuX^sUm6tdUbms*#?Y_V35>Y`as_M{frmKmWYf>EWlv+Y8v% z&)+JQ;7&tVq9+G1(}TY?^@ZHp^xZopf;t#F|IvSTD>^16cJFyLHGZTNr-ip%&!mX9 zT#McI#t@4s>Z==g-;OEm#w>&J9xm*>9jouz9Ym0PglF8f0r+xU$_988=WOHVEib9@ zP!u&Mj2q%NczVk(t2DWP@a;F%!6m`3xpdd19%4;M( z+~)a{%;mv%EN=ahEtuIXu7sD5Q#LN@p2JR=FJ7h^b(*Ne6)T{UuD1jubSJ_m%N`>qkDgs1kQogJ-Pcme$tB9 zR`Rn5Nj);}aqQj2`w|P;!QStr6yOTK`;Ekrngl8h@C8%_lHInm*a`xyFh^%|efK0s z`mT7SbuXGi+M=4~%f8pF*_HSja};4#C%;IyHzr?Wm!;O86%Dpg~ctw48P?RcWDCtoXg9 z$oIypbVvww4)6!L!}0SB*E`Cyo4Fur&ue zBEK+0DbdUD-AAz>hsmJf7T4g6%9!rwG5G%zq_@3Z(4Wpf^}0ET1r{&` zdG$_(&JTi~`0)J?)MDQmnoB|1rL+~zLnC1)5-E};&lOw?o8%i*)y0Hi8NFl1)9Lyle%oHf9yTFL6F`kLc+7W9U5FgWQ779*!8F8Tw2I4A$^v z#0pk9I!f?(s?Kx=vZ_l@>)G``jWXTUi~NWW8{DIDYd=WySE_3JY35&wV}Kgb`}y3L zxq58XxI0m1cC)9Xfx*V^l8=^Y)@W@vhQ7C7FaGbB|l%DG#ze4w@@)f%7m1Q)>-l)SBr6EdSs&L5L zjS(zAZAGd|=xi<8mxXvb+v4v<4|;XvFgz)9gxl-Bv+| zbM~!;rQ)ipGA(nMz*H{u>06RKTlS>x0Qd`4TyPXq&q`(o z4m6hL)xBcNh5CRp+P_FsYm_|}QqVQ>iJQ%>mOsxi!}O#DkssUOHuPw38>UndLOL@(pLHDDPg@s<|ZB$4O%}CrPpr%CaR5uo^l~ZCulFfct=Uku*fw zRPgpPyut3iYWgqo?e#7Dc63RaKj~_;UFPOQ5|tR2n#mk<1T2yWdgrR_d>*ZFF`taA zGu*2M)G3#`9nE0#i`#@+5zyY--qTY9U(2=(tc`or%9%P>3X(U;aY6R3pE^pMYQ=4u zK=(=?UE|p*sJ0#9^#DC3{WQISP-36rM5=iM{}|g9jdXf=%ZG&}DUdFGIss#YT!d{;jMzB%6=$<3GlzhH4Yza zKXV;1Zc~uDov79O{P^&WH+xa3p;f}=^R~%48e&CTOMda+5HpH1X)AJ2U{2MfCsB&B zvB!?{W8%*@dTG@$Am*XxW1*S-Y87t_VTBapX8?$kGu*cOnw^c5bMs0M#|mRFx0!U|~g} z?gWYa^msUG-@eU#^3^?2i;Gf)Ph2r76;2LGwAJg_&2N7Jv5+eW5w-Qm@(TyoK2y?E zOI4#&vo}}ZAZSeM2J@`_Bvm=%Rk`kSbz+N9B*aDH+@wU_Y3f@_hryb-HJUdqkn?5p zpu+(hGBBa$gttEd#46*RTL>qD&a zhFIQ?rD*||z)8;fv7d38Z8;9v4hA>O&ZcQIJ0+TM=EPz;%pyi-pv^FGdlTCV`!vX zvRj1wC=VF^023l@7#51=Z`awmY|9e4PIVxhblzoH*&V)O)F%|Ut%mA;g_V6jPnCFG zW}jQ9A>Rj`KV#%bZ|u0h!O7C8ivFnqDy16}uD#;NnWUw)O7C5%cFbEh)^4?PGULyes$j_9IGxSdzLXHjH5I59D7;Se7^?BZ>Mxhi#% z_*!cD{e{4!s;i8$KVL(e#{R)x%~kDC5> zhd$pD4mRlfs{t)od9YtgAAqN`yf-yhBBx8T7D#A=VBu+a?ZO{@4y?lIGuy*Xs@BLq z_WcUUyA%#m_@$daPB<~Q{oW1O2|wH;d{x%oFo88qiG;MaJ=&dFh>H+hAkT?=b%c{~ zx7upBf3V!bhdyi1`F|#SjjcmyQ#?C0A3h7iklmIh-yAa?R8w5>jq9B752@QYvSl(b zh3NO7pjTWjHdg=xm&KQ^)6tbijuT&}79nSP)BW_02=e4^j}Uo0uKhGjf#tJ5*4gLP z4YC}JcJ*~K_=`rYicX`hqcy-?A9LgR9iuV4hfvI8w_567nQS?mKD5VC;t*+Q10$>Wgq{lZ)s`c= z;LDE)Q8P?f&A5*|v18R=0J5Z7Uzsy2qQyItIM?2gxH+zxW;Y*unRn{5f%RQD@ay5a z6Ylbc@;-@RYW)L0l>=XfMZHdn$7|PeKi@(N3;g&hBt>WqH%^)0_uFi-YxyBBn8IQ^ zAHH@)(a9S22H$2?01$xS>y$oL(k@kTjy;w1bi2Fq(lg3NVL71#hI!K1!F|EOVItTp zKgXpfuUJLz*P_Op)aQ*`h*!_j)pb*Tf1az%pEBm$rY-bE{Er>;!p7z8j4&@suJ)gy zMs8j7rq0CC(<7?#mLD5$j43T#N2cci*KcQ`Km|*TVJFS2+;Mp|C^=2*ka8Q;Cnm8< zZ1X;JTMuZk#Rar1k4i*YG)|b#7(M?vyPfw?j^-~Qa!oc=8rQM=c(Hl4tfkSfxw!m& zc%-~^)N0f$2LHYkjI;yx|8e!!VNHPD-}rEZlA_WZpaYbYt}#MdN~EL&9NkEFNr*5? zkZu^AqZPp+Bc!{#yBpv6yx;fwKEL1n$Jjqy7Z=yL&pG$`#1X-DAsU`H%gxU=`7y&( zMDC2rBzzixPeLhNLq)52>D6uI73anKc^`F zv6iLY#fsZ*Y-i`ib#_@R5Yf&~r`+ocHzi&xMzA^ukb#Wvtar(&)^l$EyAIzsF3IICtpcu5av{dtk^{g$1hTGKCWFPRn`Xkd8h?X~r|8?Ag7l-31VwQB)?Yi!KM2&7Xw^-cJhFUj3dH;6u?1D3W5jk z)G$!{fps*uE%y?sXPMguQ#zNq7(3w*7pLp!p9t6w3M6_zYNd}erTBSO_7e5>*<~&a zXkoni{eT{y%;%#Ha?4K0ek>7(DE)CYE~?fkw4uX;v34#DD7-}}q~IgnIPFS&rKb0L zK}UnH9Z`R7#6bO$5$x0sen-F^%>Klh4_aaC`v380SPcUA|~bm%q+X2;};0 zsCSxg1!=YH-LH)3749dDm*2EWOuPuuXLeO~-D!I3QC{kNKVg{QGIu4D(fMb!?XuhD-N8( zTjRjHbPAGV&G-DDoPJ$-2;d&2X`qSH!Ah8BJq&O<-Wr#nAcoq*g5Q3 z+Oc=!-TM^Vk9D({{7-2xbM0NKo;5ck$#nW|kilByh;wj!p4TCXmX|Q#N`Wd$<#XB_ zPs4nr9%+q!G=v@^6uTJR?xYNKW@x0WGHP$1MSIXAL1gU*HKJS?ik6`AQ`HVl1Fnig zQ#ZE&iBGPreQIFEGwMu@kH!6g2Ah^1Gk%7Lrle_mOv^KXmc_&-4?}gEI(ap&+valI zFj&j)=5U(LNwwOU7Ke6Za#Tch<~_?WMlbT#PFUPDS@+18G*k9|qJ`?<9WZcsnp)i= za=^NfO~1BMM}9#;)N{IrtVKTI?ahmFU;BuI`4}sQaC? zd`)8~7*2PnBDWh0YTMwlY*$z$^7af4mJ+k5Tn;GOS8LwzPJi<4 z#n#>#t)lzDxJK(RjGPgLezEm|Q+N)50S>$nJzjL7Ezi6y9ZcYnbxaQ#3iV9ZoJ*o! zhJZyMZ+xGY^wBB3aMyrUuiF=yLGpkrM&AWV8p&L|10C1XoG;+)vuvb^7xnoob}jYJ z!3q)ik(2y1%aH+YP-;fUMa(db`KaEvMOlWCGz?E{M=PseKJ*9`#TU3Hc*ASJ>+8sKQ!DWEJ_=C`+!%r^n5jw<1{1UF^1w(E%I0FcGS;FL*8OT`jNDOMUjR68o7J3)i0X zkyCMWrTV^k2K!oOoO%4yVjuOz2|$uj*XaVBew)YJ*RSv*+)mqiv+QT(Mkcg4srS!i z5%23x$j9{O=`BMQctKj1$VcTeib`OWG z8oh3!sHHZ+^hy;vn>=FEc*S#*XbAI+E2ASo+r=&7Vx!1vwc!HmPy|1)dPgfvP3^Q! zeZ~-UT~I%1>yW4|_yQU8=#t5A6TIEkg#i=LcmERYG4r4~C#3{T%d$4$OP=6oWG5_z0D84SV^Rk}Xn$Fj-$_Ddw3GV=O zBaFW2d99LeYYNfDf2pheI^y8ABp!@Nzvd|QQr$izT)%eL8rB+^d^Y>MbM)}3jTg5J zK`fHS>ABMasfghn<>tAeEBJkFQ9c3zE=ZY{=}5f!4hg+<3Q+QF?bq7gO1X4!HRRWL z+cB;)(8&#=zv^74!vgQJbb-j$Pm%0{E?V)<>L^n>1DC?yBC}7`~&j1 zDw`lX63Z)`R^3^PEL**`>7)AB77yTSv`LPWITDL*u&Ur+cg`c)B=CbUowwRUBNs9E zr$NrO`^3d0_F`xz{?G+@;4t9yeXuF^ZniwrH8L43n@deeTj@z*mK?+B|u{ z74p-EW7cm-t5&Gl=hFjLy^dsD(-mRk&<5f&`Ww>JS*k;>;o_yBXMV4nY#YXxPxu6@9;0eIJ!JN|O zaXOU;4ba=M%-%OZ=kN=>8UOn$*Y|3K*lf(B_^hY31iRV3Jt+SW8@QVY`>u@dD0sW1 zCjv^S3ANl}T~49GvYByuokjj313lIMl6Kd0&&ox*o48%fkW62iXQiCPv+{z%r~>P* zFS4AE4R)~YA+X^N7~N}rB5OL%&i_rGCRa!|uZT;;J+yb4y5kTL3=^I~eF%-(vrLHZ z$K2TM5!5qY`pdF?)!=2E(r%{~o~8!ioIFrhS#HNNSZmvQAyo{g(K&DX*sLaBz;pYx z#f(vi`!`SMqQJ^p3@E!&cropaBKd;y;DALH_?wc7tbj6BGPct7r&1WOks+tx2qr6T zv)o8FyZBaKWQ>)CDHLtN$*iVZJzCFJ!w%21f0As2orml7l1-iBtS2J_nmJ82-2MW> z5RclPs18y-C5A%GoZSc_;g5vfFbnmT|9gwD-?C=xpSXD&oSxPQ((0MhFnKotB(?#5 zgx{2vILAaB1h~Z$bIMt$v*K}14q_97ppQpRcAzg(o)uv3x(4Q!gAx<1au4z|ZG7Y# zKo8e(;uL8xSFA#rkKYd8VIp)bH;VhiICjm!eE38YeNupWPA<#}{SBO7-EDlnYK{C!$&d=d`~|4Jf|3tcAN+7Qz<?pTZ( z%V@q)8zCMMEzq4c$@AdncQEf{xo#IJT@D~}dP)N$s0%F*s-8Y%SF+YtY$Dvku`MGx zr%T#5`Ht8&RIx`^&zbsyRD$Tz%Hzzr@oPt^um1vc1R8`v%P_62m5W>t5^vgt`;6on z@{l7#b*e?2-nI>G9r;&NN@n&cMPD`POe>JYu}ktkHQxvA&nDcKZ-k9RX&-Q$)J}cW z3A^h~i}=XvU-R>2_#^MY#_x`TW!C>99J7h+H7u+((-80d$v>r#iE@NT-t%o`VpCe( ztAE?#E0S+Ww|hd~0!?=!Wu&YR|KQllOTS*7ZaY5aVMp0)m%-{l@fX0#aK;80GNF^a zmW~+%c!+Fnpm=z14aU0oEA>)id{}UxRU3)`;7Rwou6to^x5`j3zh`SnaBn3Dr?H{) z91>7oR1jK_9yi=M33ojDQQisJ%I@}ig&O$_Xw4+~cEhzX?#mP4k7rtvUoW)QYsS~| zK5x1G>G#(J7Nj?hWa?LJHhVo{9!{g}TZ$7jF~?(k89dQKizk}?xT*Y^gqs${ z(=7#7Ye)5#fbWNaftQZ??|7sSI%d`1oU2u5#|3A2W=5~o|9_%A0uKwjo7eUl>a&|5 z0~?(rVgRIhCjeJ5y!6{xu4)%oohB=*-AIl&I@>aumo_y`%8$q-0h^Jx+ChTh`@GTD z9w}>wH_r(!5i=a}=n~u%8})hHQ?5gO;>hAW=W12VYTn6o{NmY)FXdic%t*CR`x-`! zYxePB8hz3gdE0Bzjp7>xOq|Dk)2fV8;pGEZ^s9oLXU{n7>jq-B3h`w5{{om@kp!)o z>E)dr1u)3GzGdhbV_ zFSZXwZPG8x%@QZnzR;=mKV1_}5xuO6^TpY4{F%3i96}vbhpRR&EX=EpH`x>!M*~Mh5QbeB`&=Y1!ZAQrf;GJx}H#qG#*W*)I6q)Ot+w z>fXaHwU?lKe?-bpHk+eWzi5FX6B*A$%KveGunO0vf05e_mDlv?g+Kj)rG4qYy-Lo4 z<10D>P!fe$h1Qp&b44|*DIwI^PgV3sU+U^~Jv!jyaqK{xWPg!HskXOVQXXO!UMrZA z+z0(bLfv`0-jLRCwuc=vk^24;eDpN*y(;y1Vw&4Jb^pqf(8g+kUQW!1;lBVw3aQGQ zPvu25ltW@Mrbz63m<0;Y^~-uW<$cRvO-q-?P_w0g34w-A;w*RXAqWlG=^Qw7uB9I= zJ(82g7hm&jwx|auLxajD1j-w%8B;;rS}FmJ4tlRzYm zbAJ{g!A_u|k($wa-YW-w{>vkoH~KVMKrqe5MP|Sn{meZNNbF95 zrpDvsA~elgeA**XM6+7`(aujRr1#WVKTVJUab;R`+br3wBeqMGl~pe;MWw{ifoiLG zTePY0EkYOW0e^mq?Ib833q{ z9C!vV5?+orM!8cGVtKFf{Nh4Xii{ZJ88RdjTNEAp5*54HQIo|R)pwvfMUzde=bR4Z zGuZ-R!xsrBzXuwwYk&XjWapQWC0v&2NvQJ@5i3hbbtlDCF-QkhjSKbrU3l|XwkX;s zD)!7w7)bFAr-}SHj-77NDo<>veoZLCdRaH#&XNzyd0y(Te6rxh`TAez_G`$4%}c-2 zB>n1S%|;U3aLuff+4t;zd;QuIZU!4m@mR56le?5ULhO3!W!0@F+m^8RYb>UFiZU`SSM3s@Ro2_|3SD}S`nQfygN^7Np%XgNi^!CMM& z?ZsAQQJ5EaIHc)ua*6k~RxChx0a}Q0cvbr1xl3$g+ygo$3tWW(QmD&9v|~YqK*NN- z;bC#yGLYKuJ-8Nz4{(S%?iuTFd9exontxPaM4xqcHvp7BK)C^rH3mPXP^ze27@jj(SzVEb z=)z~V^>|^&X)qdWlHIRc%{XJfJL%|3y`lMwr z&lc+R;pnhDOe`vnh8BdWkCEDVwXr6!JGFL@`v9!gSV20)6TCZIadmuZXq-YDe*Ww1 z#}DD%-Q5$zL8{}kf(xS}HgSlWM~dzmQXr^6#7c-tiQaFybwdwe3AzW3WM@}#Z||N` z#FFT2#D2YMqgzA^y5J?_l|qVk2Ti!fU~2M3cGyRMA-0BPnMNb@s+dxxd=N8x!ZQGp zt_9LGxW6pkPnC`979`TAqM|q%_4TCoy#uNGs*RV~1a6$@L#mpNkFEYGWd(DtcrtOJ zI}^`>>4Wk90*ZilvXW~(wihKdpEFHdKh-Id;@I#$*u8GJ(K?OO5x2>+x1T4hZS@Ws zYrZORk&)5mWPLsDS2&7_O}xKafj|l5#SzDFLq(`p|1g=z47t6qEIa>!byBb@vYut; zhDXQIh#l@eZ&sBrcr`H1hD2EC+kn{J?;l4-|>JL<6X;vD{b^kpoOZb|HLyu-47Xms3-sobsk|i6$*6 z*-5bQE3b#&*T!8+FEl<<+N_vNz1J~MToJZ|eqKa4n9N+cZVqEbSn)DlAG}RJFHkmy zF$NDrIz@7YWcQ*|V>gP6%iapiaT2d3FUMCka$%&p+Q60m_nvsv60zomW^B?Gu6-qj zp7VON?aifMoUk1tY?44jL~5@@O)LaPkJgd=-)qkz7&1z10!-;1ko8|WzRDl`3!rj2 zLly*ytlY*Lu}I|?J5oPwMu1y8L`}(ZepZZ&>Wz%w?B&!+!tIyPZ)PXXEJY*&jQWiz z{1|!v0z881m9T`AVu`PD@~Dm-zL(5-=YjGYJ(ASb6_Dd{;m84y^LNb+%{LiBp0 z)i%mZLPb3wp)C0Fbkfv=0|0J<1uBCO+Tu92PBB(qXl?I8B_ae_MMxrxt98v-)?=OY zAi^WPY64`I=OT6_%Edu-MbJR5@-e{ikMGwX(D+p&yYE9?DSB5{n+!2L5Ypj09e#ME z>oVbDFiFQwMQz&X>092R9$$9%QA2uGIIu?HxOs!__Q@r4LDf_?GNkcfnOurUKkkMY z0JzoBH-r>f-dy{5D#pE?OYJx6K`tP3{$F@Md&Wf}z@t8-sLod>|FBEE?Nz-LWF@X8 zKh+|&hnms+#kJ7!%3r`n8FpT)IXb-_kvFcNq0f4EMCIEhc_%**a)g<#V*iP#z)5GnyjUigool z9`lO|)#nCohCFplNnr~$*Ayoa>9bi|b5fMLyan&%CW`SOsaqLOu5eBb zHfuS`;jH^-qN&bvOGA=}SCc2=|5+TLxn*K+n2 zl(}T_{g=FhHM@jbKKV!$?QyCAp1?H>5ad`jPxE58Vq{8nUc`)qTtI-=!#J9}mzV^v zj6kFdOoqGYeiHYrb-=&mufAoqT!Qp@R8;}5YTJzMya|6dt60WTH?>)4_2KYkZMA3$o8`=G z+kyq`Vzci)&6y$Rm67_0Tz7L_Zd^+JFXM-U{2sQB*VAbWO2-?5ffKA>-J5@xi+cGg z%Jd7xjH{a+tY^*45C};qTZCl8H%p3=ne0sup^1%%&YSy2#NI`E9m?wxm0trd2;j&g z4RWf)!C8L?pAU_rn8(Enm2%y*a+1P_^g^%KGm^mgFv_cW8*bIDrY=A0s!8c2W#^{wKptlY%nVr^+$&X;lWY4|8t=C{)%vNvrm-+W;Jab^yhHL2@r32^v| zP(6ce4)%9t4IS%KVwjQ1Bd0vKT?A^mVXq|O)*TvYjE-C*?O@LGJZByZ-}}{fC*fPA zLMlL~l-+zQLtPU;v+^qW^CYiiR(VNZ_yWk}`!ek=w20HIco5v(uiag2ahQ1U4(p2S zpxxj_VILm;eeW>8sJ5W7ph=#8iO$xKAe?@fa(5D{Kkg^ougr;eF01E|=wWF@j`U@B z8yb;rkD({k^UHb!U3@bwfQME8<0p+h;?-q#y41@66dDU#c~FB6V<%1*-`k{{DCL|a zXf2s6DG-FIJoe=4mXFP)N1kYUefYFtNM%?gKvZwUC8;$7#%62}1pcy^(9_@w5tyho ze|q60e=P~{{G!bK(no$z9Ma`|xxdD2h|Lk0H$!4K22G<6k~U)G+GtGaG>k#5lrsxL8xHHGVmvK#HijN87Amji)=YLC3hPu@eSU>N_`+V@1v zJg`sqFr-9^K*)&OM!7D1AAu^m@`@`$z)srgML~L>4X*9;=g8_lAF*0Zy^~xcU8*o0 zeg1^EH2_Knr-?Zj4a%GI!~bT;XVDrucoghg*5;-U<#^Y>2Mmapu*xsnd|pj#NjTWN zFmve9==?SOPI7e!bs7oHb>xdoLs0hqL%1kh#|H7HlH^%7R>Jopv>i8QL@5j1w$aO_ zX2n};(vH)KE0Xa@CcaU8MboO~N ze#i-12GklRn~qcJo}8lhZqR;X2xT~41t{{2T6jCJv*0JDC&0)CVlSt$JG~yRZ~3`-i8&cTG$2MK zJUy!KWk0iGCDz&f568}|R-tIrI-V>HTrC{Vy=t%d>{WGQbonhWa(@dwZ@O`Gb-7E`87dNABL>1HF}Z{h!~8>f=%5ke}6iU9i%+ z*%Q;%V=WSZA5lW=&Rd(QNBfsvNg|lLVIhPvSz-?844u4o*AX6r39G ziL7lb%K|SBGnYE?Kf*19m#c&cSO3M%{~RBY-YA4K0qQN6#FF}UC2j+(t?nx&-PE|C!HN-g&xIbatunrHg3MB>~k zU*Az+U3b6IAcB#JC4s6uIJUrFwQs2>hf+J3eQK$QyIBZuSdq{CQhg zQef-kxrOXWqiJQtk9hzI#Ar-5gef1Kk7yyz4D#4A}Vf!r^Bd_`0slhT@c zYJogx_~XnbS{zQBsy0BH+rah%whyi;O15)g_ul9S-*;jI5ie~H#Pn4tS@9WW>-P|^}WjPvVSHolm+*+)JbZZOot@iKWN#|fM9#9JvSreFzdpxp&nhE;pb~&bI!b55B_Q-++_NWV^5eZ3Q!>tzPcO3D&{VFPA z!!m;)epKWma281eNhjchzPP~7yEOSDNA^&xPMg|ev!ViofhPY$v8<24P)9va;RQgQ zmxGpmtt>}Da?Nl*&Lj+M;P>Pp^RUAUIB_Bow9P3tmJC*xrpwP4>roJm_P|tJc+q$J zf?Wk%RDJo&V)z#jAz=fEY^8tgfvmokSypL!!pmSg84J!t2?%D1XLHt1aFoM?xJf&b z_~TmhWv#qx!JdbGc0#(-ZOTh!(%D1{bt^k{c1w2S=HK3ele+5H;>F3ag6={w(y0OT zP*Y*D(PAO#V~)Eux1bOHQn8)F66zOhy(&r?J{MNr*>RNO3znuP0mjkP ze4LmnM)J)`jX}M=69!VQ^X~3*16LB)YWXmMpJoJ;@gQ}~4d=25Zo^8ZEL7Q$)qcc{ z&6zz@Usk2sT(x!5H1c=g3ri+DH=JddV~O?4)4HEuwEiH%-xsNXl&x~3Dl!5CZgW-f z6}ZrKx7<`&gW7HS%9EROpkD+d-sBV4tP1}^(yB1wY7UEmKn%uYDOkU7Eud;|0;gl- z7aGPM?SHN-5Ji0?^eHGSG@+iY&r(B)fd_!*LAUv9@>%~D%{*g1N<$>aYkk+v-|cBn zA3l`W%T32guJePfI*KO}rwo_{44~5d-_}o;?6YW`3SSOev6Vh#d;PZn$f>fiKVcRC za06tveQ?SAMc7orXEs`^>QbZ79jr(rCZPXq$=~v=eiO=}J?+fh;k%ysx!m_Im-FPkA$oFkDn8EG7s&|7Z^kKIpExcO5QBOFGB)gc&;xI0j9hVWWnz1!v4 z0%tr~Y*qq>nu%aSp^p?6t+1tx(xj@F>Ni2m+64t=XprD>lL$g&GxD{^ujseqHA|;I zmgIKg2Kmj!Z(gG_k!BTI?lgFu9kV>nDNJ97V*u;UiZo}q?6Ms>=?VN;$cGP9#sQg# zd?D(m2)6hfzk9gwhu7}ZYWgNXJ3Mz(Q(j(P082^LeXP_aMhqxA@rCAA-Jn+z_DT~* z8SKteGXOW-sQX0%%4Ozh#Yhh4*>~vIV|+Bs%MM^DJCPx-cC!{IF2k&b%tY!5kWi0} z&}dh5sd0~C&!tM9;akCxQsZC1SI7Srv{FKr>}=0^U^$CGQ~&N+tGw7M)FXHo88zeG zCQO7#U;pik4K_Aq`xq&@yotW@);@xtpciK^K55f(14A!9h4~xy9fR(eb)S_rPg+)d zCCe-O;(cG(S#_K{HWGyhF;D*3ywUshJW^*eNgCd;*ItD>InTsDMZF6{z#8jh27uW66GQ(Dz4YMZzDdHuuiUte}tZ_zyhD{z)>t({_8*VKJ4Bn zfq#3SNPUB{P;V%^oPJV6d|f?`00ie8{hwy85o(QRT0+yQ+qNfy4QIU`Ma7|<`B@$o z`(011+HgwA#GKRca*&Pp_bO^f9&OUlF`1@%aEbt+gOesF1l93+=KTGk>AQ9^xwaM; z-u5@5Cuds35#<*@XU$JSHew!G_4^1z^>C@^~l1|0vu+sKPh~JhYiAh z@Q_(^S5&p%?}$)v3$w2@Yb(@o!4uLO*vE}}GQgXdNDaA;<(3l_(1B$}IPz7AeZ9Kxg_3(2(pm4)E&;GO!?PrG zk5|+<#I*pQKPrpiFCaLq#oy6=uPm1QgKoaPsQIP($Kki1Us%tX6{EWMK*wn<&odjA zG2ixXg@4~Ro!d9{z(80l&HMMPb@gj^Iv26;D{p%Fg+3A?nqLw*5rsS&@Em*$G7O)rDxxR-K zZB@VcV>k0|jGsyXT@F|mPh%IIPOlE_XI{&B7sbh;`VB+vMoR#7x-|BsV9q^rtEtr1 zu9KIu7miSSa1ob0csqtg({|j{RKG71k|sCIFqbvV3W%v?-G)8Lb=g1C=nbP%oqf*31d?@7m-Ctsn8%8ck%U$Um~0qW!fl$N~2b<>|!aR1OF!g=Dx}_R2{$ zsH0NYVtPwU1K_wWbkk|%fgP~1?xs1m8xiVtS@j3A*i}rBnyS&3ZdlGXvl5+((PJwv zVtNlDN2iW8FVtNG*2&o1;XR39&2aqpUEUUJlg)T?M~2JSF_S*NjvF_`5w98c-EN1@ zZwGwQ*u`3__jVaRo0S>c7$z|U$jQ$xBfjJ}>Gas-CrX8NJqGO;0SA?glKKfVN>qpX zIrxUftsg92Bam)wk?W+E3(>YOQ}pv?sI>L=IQ~>~x6p?2*)U_Ym(zqvd!k&Ti^jCK z1O-{21_|}Qa9j-oa7X{PRHu|!yf!PX9h9HsX?4UmrMeU zVu0#XuY%W2HFm@Ne;P*W>9&ZflvE|!4@a&9eh!*rww>b zWjtG(9w-bD_yJf)(x?3wHs076B)pp*->r``4697aOa{*ol zfYTimuR3q8oo}_dG=G4wLMsuogL?A_`OL?>A#IQ5XXzPusso1PcVSr0poaRt`sKWu zsdGq@U~&D$4wysla1r+-Z?R!#B246V$3$n&j<|GOP*e@*`y*y#q#u}`^aT?-ar9?7 z$PmE|qsX@lYfY?PHM_$&CpXX6(^-QEaCXI_XS~Jgb%?c`5-oa;^ zUUd<y@!ZZxV^`s`b4zbBw5yn6EG>4mW9bO3jpVdZHU7j-Qy&`V;gSM zl?i+vhX!rT?(S|uLyo2{S@r3-IBZ{fU_R3;d!F>GW6C9DPo3&zU6A(l@Z=;HKFoQp zIA-7+{3}}8lcjcSI~b4C_&W2i@cV3|4K+H}?GD>T<&8)gGv+zVO!X^13Bj}d$K0yon~Lq5%= zZ*ZfO{E6O_+}KdlqjSs*EDZ0H%Tu!x-QtqQ*=?KXC~j}7NhnQi98YfL=_!%v7l8_U z(z=bNa~UV~w}Xlg{sKUGa%A+${R@ryk_94>QVRD`VMfE0b!Z`q|`RH z!MF;|7@bqXt)&MNm!s^JLRd|sbIvV3NT4y*snU)G4KnK*KcODZq9WLrOZSH;4zXkt zv{@_YgwYLh(r-pGs@|KzHf6q(M*T;Xu8sQmo+zbTziK-kB0J*eVuA}lk2K6Ly2$UT zN6%`l@ZEMzPxc8+f8ea|$jife*`}s*O@t2X=PQMk9%c>Cm?0a!xccZDJwJbp@?>sZ z@ee@{BXb(CEwvGITUH|m@PabDE!4W@C-iyLUjP?SAKUfd9e|rP?MSb`rt=KH{0kU9 z(f-1hzpMVPTxjaP;Zub5un`@@V2&}?27J2RreBp8Id}7-=vj&1*-hS`c%F6PPF$yH zg4jXq1{JI!i%kG01eZQZGl#GT*4~tiD6ISAF3u4(vUh5{H$3CEgB%&e4k0F(Y)p-z zx!zvbMkvokol|RAut3SxV_J*q++pLn#Rpg`2+_StZrs8rj5(-VeuEK-(@-R?`tj-B z_yhgm_+Mj_g}?FQ;u5Mb3?YxDKfy_65l+#r#yJL{i?W25QH|cuoGj0QA>EyS0guqH zZhPIVmRGO@FYWsgBx7Y?DZQ}nmlM-ZWL8c~B94g5;uW5K+x7&1uWy^&l>; zfv*}}bbD0nWnEPkw9$_KR+=v3Hi`Ze2bn+=V;(Wz>uOAmKN)xXB%Sm-9m4$us$6 ztNEi-iT1GrkJcICnR{45@VB)nieD_lShf`U>6~I%iT`kMt$K0cVp;I7^wiW#@)Jl- zwr#DRYhTdn^Ip9)H_RKq6VFy+kIm>qmoq{!r*s;ejL&gnR+px{;(}18EGyh^m98$H zTZ27bDEf#iIrVu?U|X7^itsF`viXP;f+fPpb`0yXv$z(wI5E3$&i}+|;qV2vf?;G$ z_1M-;$3LOBM&&WM?sZfp-_{Y`t>%RZA_HIg;UvgdpbQ*9jU$=O({kzT3_lk771_Ko zvwYBv(sCu?p5Isp_V6(PvAQol|5!p5Ux-avEq^qO4mV)T*rwmu zaDJ_QNrG>g`V^xoDdx=@US&iDK^TPyb>{bB+Q%G4y1XSZ^d;s15`g2qEAU$YW%mStQuiLyT- z<;d8616fAd9V_?)D3c>?ZHeKIwA_FlswobY7J1FFEt~7j8J_1mQ}UEY8_Ua_80}<( z8(+$nl=QEtnBkf|nSsF*AW_q^ywP~^R29u$I=%IFxCa>x2v)rN|56wBCxv;=vXUXx z-}KvGKxIX`;O^Dik=yAS#RvV$m-lLAWJ#Om)trzAY&0?VofQ_U(77qz4|CKPDj;Jr`ZW+=|I8=Eh&EZTSbcs8!L zdac0Z5XRw29mh($m*&q!S;wl&uM#Ijz=O@6vV-L0S~{}FLSI~r4i>r6(RGqj^PLIS z#r!uJFtDtVbwo1#{82EB!x(ETln>w*8*oafFEbzR)0MBZm~Z6 z{hpWO=ujvN+5!2wVXAWqU)1r5m4&a0bP=n8q&e;oLXb@EFW}3iUxb+$`B3YG30#+s zfP`qDQCJA_JnM?%`y--?_U<%ilDOE?HZV(ST68X5^krfhh27Btr#5ZVVm-Hv!OTgY zSnlz!U@3dbYuAh+>ai9^rf=xJ*4KNR$RBqJ^9c(% z&c7HEbbNN;UxhyNN3VeV2(h9b`NF)Q^7MhejxM@phRw|jj;>IolBOcxy2}YJKd+)u z4^Jz+Kf4du?7nEZQS!6``RJE#8XBGCdw=IuoSc4$taCOMb*6Qxcj{8+qm=Amc+y$S zT4WISVt^b^3@ie@-C$2isek$dazohTvs3V0^gj3GW>3J&$v6cNd64Sx2ps5O>fSwL zUAF7~S?R@Pll(da-g4|0I17syz>51L02V(DtcgA+RCw-45AKtGU!|0Z0Fi!W`0z$aQIw&J zlc1oYv>g6CYRP>$5VIFOKEbZQh{&M)ja|qjltmO`rqkU)-4WKyI{m~C$>S?GXU7 z;s2_XoLt?CcT87>&#-ybnmKqgEJFfNq5i*VhvI#X{q0K^V`u#oT{$&?C*kez;C+|g zYQ}>@Fo((vbTPn9+cG@7u}QFt20k^;9r;im*UQAh$h4$>T$;1muqyfm*}Cx#xD zvyB&6T|Rc7A8?(-nMAW9X?^DK3eYgO=3KmsB477ak@P&7=;y1s!gC2P-r;iSQUv;6 zS$}>8875`&`}Fclun+raweQNk^|=Z}_&q~w*rU}F9% ze|I7o_y+!yPr993`Yh!Qny+`HZ1ZhRW~=w1$9n*aTbH`JUyNB%*?r!b>&&wsJ;ft5 zg3aJbo&-S@wptWVG;V%qI`~K&Y|-)sBBFuW)Y}6kEU;+%cMFRkJ~EOqKsk z_+<%;lj7J|r{ zDL*yN4W_NbQ~Ea*gB0KwPrSoICwH8l`VB(Gp7&JO;)G9+2NrdB6>qLUH9 zlIFV;w|wc%?joBq)|xIpRvYCZz~ev=7_i_^8fc^`$;O)bz$Z40*gk67K>U-7fhFhH zx(PQ8*&wg)X^rzb^siICEbiDO>8-7h2f@D0QR?nd%s;TEmD1v_oc z`yWVw`PJnV8Nn1!G;QVDZocVZ(XIYnUK1FdttO~YXbyica&!$djHvVJIU?wyqwn37Q%CB;Y2M5p<2 zVFry#{i14`6);*>h3?^OB`hTog$oaqW}B`40v_Iaa|S)jYw`_Q8hrbHjQjP58Yq1S zs>hAx;&rJsVb{@Qlqb0+KIiuaIsUZMi1?zyk)!sQN9g4pB$8oNzQWOUZu<3yISf^| z>P+YkkU>?Aa8z>=WFEJu-esG>Bs6RzX^Fi)v6L1@p=?? z|IFXy6F=$BZ0jv;sk)XXTKDx|0R3Et!h=6w_n_|U&4z=>6Y*{>3Kxn&Ds*E$k`&%C z&W&hyGpe;~CykA|RtR+Vb^(ir<$MNHn#QzGFxQU}cM&Czq}}tq4=IG)QXTjG9B`#H ziV&RU4AQxYx-##9vD1B*rjE_WSQN0>SYyg&1fCZP@svxdvk&z^dc5Xp7R)rrbM#a{))r7*h1Cz_M0IWXzhJoX~nE&&pl# z$7jeo5Dx!twfOiIUjV zY{sAY*>6=V4$YXDwVg;uZ;LcPFhvAMUA9y4NFtqqxto2`r~=$v(iSdm-}*T8FW?V1 zivMuOrDAK7-d`1P6BHAkr9G{y4zYgf9;ic&W_CaxWUBA9*d+{&O0ipKJ=Buit>y+r zTL?`czWe}q+l}=4&)cdHYzKS~R{PT{i?w&5(7Mw|(StMNHQ(q%IAhV^nkD#T&(wc4 zCgc__rsgUAFi6UnPp!eu#9RQ?H?o1UNB`pBW!!*%P)njom0!&O;)R8|7D^i6iPn>oOH`8F_YqHETEYZ8x)R0*LiHg2(LFzb9-Kci1|FcirXhwqce=Pc7-04ZEFD-$r(E{fYDd zcO|v|hpqRFYwFv#y@LUwbVNEKDj)*VdkIClfPge<0)~$C&^t(nARtN&y%(i-q)F(# zcL-g2@Bb&B{hWKxxqHtyN!BNMk+tTUYtH!_<2weTkw_7p->W=UG^0xYG<)1`p3eftL?J%hlZxNY53^ zvJjDaQx|^IcA+&_;73P#Z91%u7~oQjtOI61$3za` z-PEAV;I5--_=_+MYC8hOs%b8 zE-Bs!DTTbUrtfUCA=?_E7mKwap=cBL7w(`?A5jyVSYL++T~>NSwDL7;TWKu^AFhb{ zMSHJm_^#YhAs{x)+mK&XwCwx1uReG3bB{cViiaTH!L5!hE zpM0pQo`)0tsD97dBsgnrlU;Vy_gP(;YS;MG=3#gDUK<@9=XNCd$oEJw^1a|;_w2-_ zsH#&G9c=i5>hB!?zgwiJq-oxi;Cp=VisqPoK`Y9O*t_5>#(f|)Bn+MV*W4AUuh9F)`!o(_I+8dj z=Z;|!ycGFZyis+=mUg$M!liic3P$XTRvSEj{*r%ki*401-r0z>AA{PIbBwC`2QaK| zS^Of)8&0Vgmaca2N~5}k*8G%e3ykPE)XvS^Wq0TnBPlc^ud!;fywI9UJrL9PK1EN| z?Tlbz2n;Qm0;USr@)P3Zb?}*J{W`wI--a$@RK-a;N_keCPY9RPrFc^+W5k@~HpHBu z60$9|?;`LG*`KzO$aG&WI{>sWMwO!dGfv;V3G)}%xIY107__-$vn>TaW>Bm@9<`uP zd4tXi($8g=N}6VQ*5x5IOVS{dWIVO zOIGIx2ag^f(_$HJ^GO(iBQD_W#Y%Lm_!`IQu#_TuSs$(b9U@g>n6|p{{RpjtFzN&7 z6I-Y3rNTp8%AG4mLVF3>mFtb<7@b?)f?92j25V8n>df_ioKlYXfw`ayVbC?FNtGVU zC++Sn%j6YoA3?hGp=k%GKi=>EyH+HH*}27FDx>7PRd%inb?O5Y4C99*@R*=m-f>tc#jpx=+K-4ds7%-Qh|HJJK?b+VW z1*L3`NxL)nP&9G~_5G|p{k2(MrN-@_6tpbR8-^6bjh-(MQT98Y5KC-JY|%LQPIjBH zih&NQ@)n*}_bQz4lZa9a(M7SGs@;|nJVD<-v9(wO7-^`GixDU5)CjHO>@#NooUd?@ zMa_#gj5GOrj(xn4tKdHD-TwXjgyKk~T61d!rPNJS$Bbe**;oi;reb~}=tdd*oV%AO(a-=Tkm@3)} z5)wK^nTde0@;>Hs)l89^b8r0Fo;rLp0Qy)|+mMW>8u#TBT%%RNn=|I77KqF~+pqKt z+_*{m^JbetRHd!olPfP4`m8d*qBR@-qT8SFXt~FtfvAccN+?7^iFEz|(n}!FMt;QEQ^Tyw}B@GPl&Ca#b}{!Ub_&A@hQb2VP;e5TA{ws!=nYOtne?L4AIdrKr1lo zrsvd1+-?Apejj-#6t$iPrQAQau-nC)h&>NPZuRecU-lV(*sQY4A`^BQEJ%Qi{yn~A z-!X$P^k|DRL*Fbg3pB1VID`K7+70-Hd%gAAd%}tMHXdO{RD@!4w zUS1rviZT?Q^$IIr>d=Q%NhV%b>PJwxS<<=STD1VG}V$=dvt0n!8 zif%HTijL;|@jl=c$L+V|IibSpv3}ttGS#iztmm&biYnw?YJW3_GWWj|?F?(*i?N55 zFQkH=o}=FO?FY8h#0IiO`EjdMiLiYZ3)e?f@cDE^If>=pG6Z|j&M*L{H|pp=!wpGE zEvi%dv&PEm6&r0^EQ@|}4^BnJbUIW4u^oQ>otB!MF`mu7Ua=~vLNT6S|M?7e(Ns2< zJmSHQe(J%sYwAF(jG$QYh4lp|j|$A3-txQZ?xbtbX|(9c_e3w^5ANk$%&yA$Mk-@w zb3Yi0kP}iWviiqb8uQOSX}EuQX%k|=v&bV>M2=DH6=%dw4oK&3&*|3M@*9FB;pEsv zZ#zP}Kwx=4GaLG}w2=uF_R%c7ohiNnb~e3{km$@3n+|{aP3AG`_V0@MmIdoPRpUKs z8(|A#3B;V3jO@*TT>$=Z4cqQpx80ox76ww2k|J2e=6T<&JLI@v{5!is_r_hONs#v^ zoRGK@Xa${<*Z{VdL!DIdj$wdCu6@;@#09|@q6NK^+Xl9m2Zs$HMSdnNHvQDgBX<~Z zVH*F-gtoS^;``}TMJg|aHXw4IbwSUHA2+P1pk}glbM%{*v^xEa7>Fx$ADAZz3O4$f z>H4)!>OJECQHB?e40m1P(;{}}&KM{)Ge&%4d``Qjxfed6mpj^MA0++o6YT!zPIY-0 zblbb4F`-GT)h?K*R9J+hW|*G8v|r}?B1(^(XLFFE#;bwsRWI^p5Jse?6dSMyCHZFZprl3Fns29OB1tOP1psC?Z6-ww&ed704bH=I{Hwv79HVO zt=3Be{c(_>J-9-Y6%sP#vEM)wozP@pdsBK=x>Q$V`4(k*{RQ}I zxbMC5HKtf`3Vdj6H~rDnR%|62;2XI;^hy1DWA9{Cx-q$UdgKNz|LptJo?>F^o#)sg z1g>Z#`Re}|aGswuGGW(9L|)KXKELmC$m9J&CuvNCO)~1>J;V@jr;~;GZj6>9jsZQP zj7n@OIDZ`8UYe{_TB8PwJWrq(#%g`ru{SK_E324aQoBI?)BT-rwqt}`bpKu-hNstV z!5Qmm%mw9egG2_ z&WMVtEqFIc^xMHFe|6yI?YK0UPlqZAo_RP8G<*7RgYm*wTIiHozZ26Pw9XHiyw5zk z6K?d^yQr34LXdB=BUae`ep7Eh!z`i4?B>`m=)&{73}_LFGbF+&YBh1z;i(BLJ?7aR zS`{4?r+jbOE1-v4NL#BA?V3dmMP8q??c#u{-gN&9?dYS_4MDiG2jI|si;}rT$>&@Q z_cqNU;UB>AS0du!fS3a9tw_>>KLd@c;5bE$B-*Ygkn?5_WA&#*FehKQI0HvzFVE{w&1F(&IsJWx9xXo&{( zT_OB#Uta8lTa^sKpHf2@9ip#LX?qsNs(z{?+TPON+!_h=0)Yz=9b7hmGI5l~!5o&%5JC)zCW`02OvbX zCgS>$4THhnclmeu1Hk@Uf0sx9r@d;)3?83NQJr5UO-mrk`Qq^CQ*>(^>LYJ(fq_}V z*I{Z#*U1?BElND41a_M>#3S&kZ1hNTFv47(UI(E0*F=p)x%2!-NtL?J7mJ`h+9(AS zg%~V9sZPuyU_>sqmJP1zB7Ak(Mio8%DWJ&$8w(;3B3`BGfw?hd?gL@K6YI)m#ltdD`PAVJV`T(23F+->7oY%g{ z%a=9`ZlOkYUo}TGxt4>XDArtHDQ)0~d+q7Ar9N_(tR{!7A8-1Hh`^~G3;CxQ^Ajnl z&O%hk;-ztX2hy)1x}$)(?wZICXKB~+rN^<9Dr9X~V#EY);H-WtKi*&9mBuuQOy0m1 zUw)y`;q32j^PYRg@K!p+15P;d)#X8QX^H9AQO`G1)gv*#TXAfkn&B_#K;qKwt^B({ zm;V5!W)w>TAp8|AWwaoAN@ITrCVR!@aMW@_$EzdmuSD@WionkHPf zF&+qcjm?ML zP`6%ds$4rK94#^>|2Fh1cCh1&Tm-u_!2pk@u0_-tE&Zr?nkZY|_&ilt_2hZe;iFF( zM5$kh?M?X;Y|V{q;_Y@#;(n%jF2`PZ`o4*nDTTYpu{|xz&GD-Jos_bi&dfo#?2xPf z!}iw@BnX(IV8$bP;%^$A>YlhmW8^Qi=Jd-&bV*&`yz(b6dBO+Ab~zCLaHn7$Y&^~1 zp$fykChRIghQywcm6xFawmr;0={P8gK&@BBtbU6-+;xs&$>M)-MTw`5wF~}x@8Iui zvE9*|8=HRG&#@HU!(%ZGQYiH1t{{X*Hmu@wSGk0C@j(`U8D2Vtyx8YB*aV?uA{?}VQE;%_MZ$&NjWlpWYN20x>M z)(N-Yh149V`}e_EbZ@oDFgo59OK5XD$Y`1djME^uIS~G#pV*#d`;v7*zHHZ10M~c% z2ZCjd2=4eAC00k&is=mj3904QrBd*`!rU=Et<&M;{5q|9GeQ_}(Ewdu-$Q+qxwQ2| zH+!FwDmO;WXtOXvo_~p5Qv993GKmzsXt%4!8rvV0h#Awo!;oq|dwsT*x%_=L+h%*JVW)XhFdqzKX8ydxj`ON+k7 zJOIAlle_esi3$otCBmS!j&%Wu1uXg%#|EVn{_)AYuBq^ftA7A(m?e;gKV@sK#f=^7 zXCj+9sTKDWxv8KR(#}&15LKR-vh!$SXRoD|3Zag^+AvHokvMED;Oul0&o#XOv6=VM ze0ndgb-v=QsxryDX{LY#OSdMcrdtS?WR4VQn-`E#`;4Z zW$GmA764M1o`?5II@0HZvTDX$9L8s592jOi@6H?SG*xvR~IHk}g)vQm<>lWJ!AvUxF%#dK@ zi1D$Y&D+#YgK&fgS(P9*@WkMa#h@!rBp!w2RAYXtR_cW8=-yY z(d?V~&)!+uoVaixG8_2TRGNReQN8j?igN9m$ild{9gDmP5O}T|Ss8m4*&sKFF{Sd@ ztZkBWjnvyg#XqPsR{ppMh3DVO3jAzJ+`>68LHj?wuuu~^i@9+rx>hsTZ@H8$@?vj( zQmqc`b|0jD#@bIO$%x2asmVf^^Y2n@uXQw-m2iNe5Ah^AShwtq2tQ4H;cZ7)DkTrB z-^ii6RKDRkI_yTEVHnUed3~|<^ z(Ld$9+kOajES!6=?-H-?lKiG4sd$+@Dc4FfjPZT~IQf%m&%G6YDP>#j4SjNG16vF! zGUzf%^V&!|xQPI=eV=9Dpm0k2(!0*nEDTZf3TrJ#7p^1rtaaiCou*y zN?*)<*fQG-<%ViJ_kLAY9H)?Y?yrl_{X`mA8%nEiT}a{1mRHs?AngQmba0ljr)Lw2 zPIYD9EF+I{nVNOLc;Tr-Jy>3BL6ev9u6>KD$GE)L;zr22dRClOkJ}S)uBX%-!QRy1 z$nx96dewqb&AopBJ`wV#pB#mto!0;fruIN&*(NVrnPVc%CTP1*6t%U95n#<-_$BQ8 zuY2lwX58{imtZJ%@C}EW@QCC35kr?G>h*h!3WK#pZO67M!EOmEN`IG|61EAdkvF?g zkyTY_?{(KD5gHts0s_UGSlCh)rxQP1;Ff1IA^}+1za3qA5ER3%7WVzZi5B_s5?+w+ zc(VKieEPW@OWY7Eu{70dA`t={_8j4iszXl`?>;^UnQbO7nDM^5Ry49{=A33F^Jm}RXZgl!`baTWNGfceoQ!jM&>;K}(JfrRbjd88e8a*y|AnzWM^7o_5^y1EgV!j5RAPE>P!_iI-WYy7?O57ikh0dER z6sMR*SI^r@)UvnmM!5)VZe$1_a6E0_wOc!?I;9J?;Qak)=G8sX=LzSIM}&*B1e9cB zHi|uTPM$=RRKD!W8%m4~K%FrZ8QrFc9$+M3jM{;&@YIj2z&gi9!g$H5LYP-Q1ebLB zstzi2s;>I`nlL^#&xMOy6y`S9{n%xVMD;u3S5}PaWjOjB&$@1|3vE$er;&-ZR|?gs zRhR2g>gjf?VOcuq-s8#9k@3+5tEULn$2qF@UyQNpGit4l)3Sw`lpxeyNS1v=o)6bD|EV03Hs!!z2``-m;^=TjgCp)o(Ai zUx@;5N}<1)LVkIx{!}00-;~$nS$F*UaL4MFMQ^5+%BBIUzRC5b$+0~>b5Dn+5Ro-b zI;rUGj=|y8bv}qzKqDQdun>vvL|9=fr@DOk(%soxkb%O-9*yEIN#COqR>M> zz1u82L}22$g{x&77s4v)4tESu)I@7=&tiGUti>blTKtIu^qzC@j!CHvRJQh{Y@G*6 zXlcT8|v3YAAqa$1jqOEJ={dN*te*oj zq*L$IZ%~A+RFB4oO@ntG&|O>YczZL#y>uLWM99QP#DR(NocH|3MZDIS^}lbou3O0hfmtu;oFid& z9ctA%pt`j-evTP<)IL@IKdN=IcwW&^zP)^oC3WV7;MBT%aA_{|~@vSI&>J%OGW<3~wypZPqHg=L|}LvsJ3>X7*L$S{th@hFYo8}$k{!xm(0 ztaGE~pkuA@8AG5~VZ{O{az*3yHUK+m=o@N%XN^^o&bQ1vN!P; zLltaK==%d&jXsCeaxCKzELaB3cfFmVo7{gcApw?vli#SgWR^IZlB+|P(RTZO97Wcg zagx+faZW|#)y1E)r1VPO7hCwbL-R^9!X)#ziZV~RWF~fLeIRfK~gQYPbY$D zT`LKJmZ$VV!FcCNh0pSBYFhHLgiymm!b?|rT120Xi%ZA9H<#TnIC(2O9?&s#*g@-r z+Q)TdwoA`Qqf{I4dPJXI6^Bc1qw;`-y{ zxOid<-Ra_xs5sI*ChI3gYc#oC?0sYb+n$NSV8E~0v}p3X4h6<+kHD?W*`P8}^oZjp zxNG;VB1Rf6fi>DK3I9yA8FfdA*4NO`XnanCi9ZeP_W1|!nP_@*E4K|>b0lmfy8NfB zWvjZ4-K*ShLJZMkov<$&Vte}Dw-#KLEhzvev6;CLtC#VdH$3b zY8c#bB*C?6sg7sE18`ZDVF$>h^E*+N1~(;j{m3#7=i+^Sp}05oDI0^_YgEUzWuRYK zDMmhdc}sLW+{cv%bXN@YWfH?qo;VMxtE(pqnR9|REo|_!$O&hFVQU^dkGhxXBf18= zceYsm&~NrJpsANyO@P?A2-+YN-hy0laYzL4)nJDwfeZ6b;Dn|?_!*fP%&g;jPR$7<3#@-aW=Apk#WgIj|DvArP=n^LFHt#c6b8-k2!vR{egpcoBYE>OMw4kc~V zJ-By%qtfdmnWizp-E^Ov#F48O_w40Tr|hg1GL=;37jbsF%(vm z@`~G**|u6z6Bv48_khKLYgA8O8}S*2sMn$yw@wy)pBtuv2ch+_AIjJx?gOCbCCiowLBwa6PmMRJS|MGf~E;%ArQ zK9)20>DI58KXf>;dz{Bk=4K}!tj?fQi5#Q&HUox$S!s+PJ*UxB9EdS^7{lDJ-f__t#RAP8FQPWdoC^p%!U8v{`BpTkW4CoU0hNjXR1ZX|K zAxlYoS)6AzINIwR6!Y4Fdn;R?-6a9T5UwuU{B!`L1=AX}h!_w#4hwf84G|r5><_XV zmt{e``TsEiK~UVxI9+v0}7LvG<>iAPLbf)JBwqK=BhrXekB z>%QAF0WU(B75izYyrHAHQe>?yS0@)1Gqx#xljDRXyem0lY;yS@0M^&x$~}g#Dj%fe ziP#xDv@vGbfl@!%MN8)NXxE#sGOfH-2#HbK+(rQl|D57Id4kAj8ACZ7t`m5HvDGVg zS3vAkR6CFuL9I%y^JN@a-=+E!I@oRW&`zEb+q3Xdg^luPYQ|G69a&ZMxcA-rWBGpW zMyfE_fsg9e6DfwGB9r92)XS8foII#MW=VXDsfG2SeB*QT>v~A%v;dC4-F{(vWWZk_smy58Qh;80I2==LGGxS zx0GJZ&WRgp8a>NHYy^leg5pnLfxECmj0RX=D>*rjYKJM`Dj2auz|m$UagI3i`(@YR zUlmbPF+whr&P}$UqD>nz1To44(?Mfaf}a?!*XqbC!p$d@I<-DgMR)rC`4zmBxi|9R zRX>TQd_kJxvrgqe3#lq&-1-5kq1x>jWez{WqNfFywjX^2Nd*$h(F>R`P+%RKW+Z!A z8Btbwn>^4>bABf)CphWpYeU1IrycLUfUGTftkGSkDY3~xI7E!$^yylT znn%0?TE>T4EZy9^O+&j1r`rKbTe))uU{%Hh7iJ#=4Djn5=EdFO@0+W!)R9(LIuK*& zVR_;Pu^c+pbE}hU--KHZtrgh1T6;FaU0e@_4+X;$5A zMhIJH!y=j778V*o%$W>$edlUz5K*l$x+c*UPZIg93_aU}0>qmuHfx!Lx59+4I`qU! z0U-zg^>u=X{)c4(t%)z+Hox_y_lq(Ff3S6Xe{5QiTW=RW-2Eb|qdq_5cs+k?|HlGU zM~6IVc;ej~rksS+dwI}4;82dci+k?SsAAE!zxf?>v6 zN(|ujuKJFyu$km}dCX8z*_6*UHj9vFN4${2{}`PaWemWY#+jh(n4bDsy@ySY%F6fJ zVJAO~IVe5peva~VgES7gVg$|(dCD6CM~Z1TX-}eQ<-cF3N_@xXKkTJ0fH$iYne(St zmXY)A&fN?g;kUJJUo?2KXT1_#4Y@lq%X%NQJT~5l?sF&WLr86R&`2Dyx4%5oQXf%Q z(cqqY!kq*@1u}UowcEfaPClF^v}2qIZl3ixT(NWWy~YlJZtMpuW55suW#0du5C7LE zPO-~)6?Ln!N*fxAx)ibLQUY_O?J&__SVLoO6?4Na-^wTqSm=zqiNt3F z5aN$9GHFk=?i{o935aE#DPD`CNe0#4{G#oM2bNo5iWvgrI0)%a zQy0Ys(O-DVqs#Ig?P$Cdu+ACTJ<;l7@UQWz%%tt3C#OI*-VF9LuIapbgOA=iwAIo( zd8TQwo(}3$LLmP8$VW#UXhv;48(G1N3+fceH?CVQ=L;`Hx<8a+Fe0rA&ZSYCqfGEu z5&Q40ZV20j?;y}T6!nyyVY);-urLmjNu14E*!dv%AbIiJZjN2p)pad?!wM?#ISQvm zM(tHU_ly;={>6*mfOqb-DoN-hk^*j zoz+MylFB`LFQK4y`!g|D{MCq;GY%zRs$0Zj=n#!SIg#^g#{}G<_DD^wHW^;v>Jc+) z0bwi&9DXbk<*ES-J~lGcsQlz8y&P)n(HF){-+|?$F~3^Wpgqj}%Sw)6(T%{POa86A ztOmK{82OL9qB(Gp+kXD{Do#hHem5$@A=T^O)xAw4qm)uq$jrSPs!*NuQ#vz;%n-0} zeos+)4-ow7IClKZamBeP1;XT&Z!xp`z*v9d&;V=bgFq_TUV_C5m-mnU%Tl<|Am%oy zdBQHC%19B5mS5?474x-njDwN54{w6>iD!}d#Je6~xTSlS{k*X*I!-f$A$$*jhxIs&5xJ+fHoBrRaQ?H3Dd2?3)2Hc@9jS9q8sXC z$ZL7t_gR^^wII)d!}tyru)V#N?f!LwKQr!9H6-VwK$G(-uP_)>Suy->?IU5ATS!jY zdO$)zc--9T0}O{`RcYBat1CJ@fgfCjhX{sUkyascPEmLxGKCrWCBQE8z2=9GR*&R=Y$wS+$TLg?*b5dYoZydg;kAuzc|^P6qDbpyOhFKT4L8!HpuzXP=i|D|pd#{$ ztRa@$%uwSb|I%=zDy#m?>=gqlq4Ckt!<$m{&mH-n>d~{w6Hb9Ny6Nmtb;~PhvVhNz zozB-R=u2-Cvf%@n@!|S%!_u|5p`ykNG4*M`Lb%E>S4Ts8v~@Q8IWnayr=A!$!*}1Y z%G8r|{q{lUcJbOQrA9PqL0P$y3Y)hQzw+8aoLKA%6cs(t1yb$9Qmr)1oRFZ{fBa5N zRW`pcn9UTHydh#G>`t{M34k2`kd)Q&{b5_wrmKCyA05Hlv&Gwb`=IZZr6Piz`$qlH zop=6z%AT6^eiIWNxLW(~!w%+11x?_bbM=B;&QH|IV?JD+Va$|QS-QzLj;JHEOT6|x zvYPyO0;fGtyHAy?8;UAc*p+`R%7(2an4(bMM!Wigbymmy>lhI}7?FfpfIi%pz>y}u zI#wlL)@VrVNaXl>UXXA+HyTfwxU4id z!9h!=3;JumhsagP!9faCba@x4uE7g2gd@#)@1LlUVhTrz1e-IuWACKo-#*I=#`wPZ zgMb*CyN=-xJ_;J3u2X?6ROne_+KwmS!GWxyl4Gu!3b~9Hi9}AUkZ}GHUZnm`ukEw^ zB}3liv5{Zbm<6`yo0_uqTuYl`zvjZIH#`HM`Htsf^0zR;-M>DX$ul9X3CbH4TDt4V zgjb*Jv6JL*p*|GiH%Fo?d$Kk9E-o8*-PZ5lZ)L%9=Al2ii0QmeKiGOrO|r{o{rTh> zF~@(sZjX>waePu1keTZ4EIil-8;XBwma}Xc*bG}y2}pKcNy|zonPL1HkyJ~hv?xe- zCj^nK32VaC*Im5lUY+CV$f6h7XmL?4&Yq0{gTt-6$a(e&elhs?$XMq>dFKeNdA;oRfC=TPmq5x4B-xfEUL7ncj` zPY*Fv)9QiUm3g)&@6!yG|8Pe7gs*%%UZ{OoV+T&2aS+Ao2z|Zi_~l8OC?6$0^R6zv z(n=JS8Ruj7*!#6;1_UwHUKB^NQy?zoAG0z8C4W#74UwrzUWUb{GOUc4^WKP?z0P&*v76z4INe zr6!)XpZkV%7hc{(_=uMDU+>#T(5x; z0+PhxP28GUr{$7FQK3iNWDLv;LXz1}db{6)7QOs|Ct4V%hHNMg@tpQQfC?eM^KTx+ znVZ4*clBdP^WX3gH-f8YeLX0XfT#<>$y zmx8KZnbkjI6HC#*ok3SaU}yqv+Q?TOuZ_&S`uQ@KnA(Yl`1g(-?r>W_cB|Gd2LRqR zJ6pypj3%Fv;=B@K$(nzp`jvEK9||(2B-Ph&c$GII;HoCK5(NJiUf$ky6xgc3*$qyX ze;bq#Hlb&)+NNPYTvIsb(DI>p`cJ9B!4hATF;Cr_dnG3jArOFta@jkr>d(mzL2~Y6 zcCl*z0ZUFj5ey`hP87PYUNasK^L_4kOHj#VB_ zkDWl0pM?SA%>m3i9t~L(%-oJTYhQnnAGfg9IA@W^0I_;_^jzY^5s@ZR_;W`|EMRkU zHA)VcwhHzKj>QMeydB_o;Y~T$Xk+uB6aHZZiN$^;o-*^v#U&3Zxy`qyYZ*p5z&49f z+Cs6GU7uNTXp$2&j?NcK=aN2{YUL8Nh+gtXLu4`X4vKs77B&LCt`YU)S~g{0#{R+K zb0Y8hEvZe2fwO<5(ZhYCo59O`9F&@-JoB!5?eNHVdOG2GZcI;~!q`^vubr}zY3h`D z%YT^c>YQ+PL7(`74fK7+T+*4D7Iict6fUtT)&~Lbr`HZV+=z&ae*or%pPr0Q;aO

-LpOz$Q8E?+lB(@#p?c9~f*N5LQ?enM6^;-SB+ zJbTLQOG+9idyNR$YtqL0-iiP*acp(n^z-cE(XNd@4L-ZtK(9AHMY(?zxy;Uv6LSI% zDvHq~z}&&xNAu#$Iy}$7aVSeih^4hPSUo6qml)bS{}@eOoNj1yR|_DJrQqGTqf(ZmSG!6 zW}6S(eTmC-L_%GX^buwi$&Cv7-cZFHTP8VHSBFZA}nq}TodvSyXpi9r{&@XMk#mHoAcv2NJg0}_S}dK(XQUs_&O;2o5F`2I*W8~Wc*Ub| zAQuqlev0LB(KaA;KQa5Gb~A=wYf^1sggb^Lq^fhu_SEVie!rRNKi?gs+LQ0uwK~Z8 zNHA(s4vDzqp~zWgWqrOIPVN~2xX1$9jEo3a_>h5Po9AVj znaKxEFcrCN57@)LkCuP@i6yokEP`$9Oub%Oa@1l$zIvCE%`bwENNBHr2wVo}l$_n! z3ZzzBHKsKzG%0CbA-lnh#xV_sv6s$4!5~fVm(5Z}@_r)}xHFtjn0_1^06SOwjfHan z1`fP~#NL~1ewO){(eR9{f;xk}$sk5$@2aDBmoZW3Z)r2sw_4Dt6~aDsZ|R5|&+*YA zB}IIF6*~2Y^F&TkT>yG?{9ds5-l3qvha7V9@=5O#PpyjFc7sjfo$(6R6O zIc_uTBZ`W0Do|`YJL!(MW3T-0HYx48G!wm9Gn|;i_WbN} zT(*RS@o3GOW}BqdiTCu3-M9};D!J>n-aCM|xSV&3HpY@|)C^eJUZyeC90E+MpQ!Sk zy<#Rhsm+c79`GHF_$HnCrVqssh#f{hix-)Mr`qun$y%L~+n07fGQR7T6ja+032+bsOW-scta+J4A4BKf zk6`}Q`-6~g7bw$`W=#Suqn)%5w`T-T1q4pPG$1N8n|0)%ui)Z~>RYaoyxjPX!Ujb! zwndc5=0I#vo3X>KmUdi)j`yo(^-)c29xw3GZd44uRetew7oP)ns4YtF^!!83f1E1* zb>bB-*BkEGPB!X%#^&eNn)rTtIK8Ylq+ExNk@WeFpYrmLo^gwnCG&~xW57t9#Al2N z{)QE;XcCy2&g9u>bzQN#+`-##&z#N+`9?Zan~!%K=wm3?zKt0KjjT@A;?g!ADPTm; zIH`U~y7C29Xd7|YM_cp<3rVS-#-#1#`6YQJF+O~|iGBgCh4^d!8}<2_D`r(t&GJ!$ zS1$GoI+lgrGRPm{oh{71>_WGcU|om8v5P*lgL4-%7ohGd{?~Dwq=%*tdISREMm9mW zt>^7Ssz!&S2N;$Td$szcDK+pVYZfzesF2(+mtz}LXToqwdS>4&cABF?dBcM5)R{!@ z;T}pgYu3ucw4ho9MyO1p^bAfq!B-)I+_do1;{XI0WbNA*>)GqAZJ4HiyNqlQ|Zgf4T2iS0o z0K72lkYnY=H)*3_W~(|d&-y62$lK=v*>TH|K_79XGi08fb}Z3;nDfh>7G^XbY);GS(n0Cl-*bxp(6ox=LVKQH(3y zf1|niAse-E9gk|{by@HjZ=a&fR?_fF%Tr&dBnJQ;kjnno$9CJw-d_q>Fb?jsD zIHY#hwQj-HbgZ?5Sr|b9?I@nx5cQ}2%pTtk{g$ZDgt|zD!uyFLGJmZxsg9D>^oLzWcc)vj?{- zZCbmRUdCZvn3R&d!}7}Zl7u>x>Y~<(_scQ<1YGMQ#~*Itt60$XPyF+6LI-f*ZN(e7 z%Qd^8I%8Qz(NMP{Lj{%mEfh63^-HQZtq=~S&#W7ppV5HgjDE!;R|aJaI!K}C7(mCG zH4%3{dDbl&Y9sOmw$ZI-K7jjSsNnA|zja8sUNKQYl{Hn`jd%eyZ1s%?4J?@l(CrKk z_O7||}k?u&TNc$X?5{eZxp6cNIrDm%<{fWIq zxi5IZB6>Zpg_1|`;RV6+GW|T-^5PjLCe$qD?X6&i{)_1H0YY(8<9p`E==lqYvk@Co z^={d$Q^uRB?;u>#+7Oj#OajbUzLxucJi}pyrN=e3YC4)IT5ln$S(B%3?;debVXA*w znBK$sr}|q6CSe%t{lM}qaV9n#lZ=R$2bJ*5v`LaG$o2MX=&k#l<7KyuJ~WETnm`A+ zWw=S*A6y|tuU*yIs#LTyy?Kn-&^DQ_dX}pMzDp~Ma;`Tfnme?6n=4n*2L*B4)(O*( z$E(P&P=uUzBBwoPRC@2LKRIg1^0$Y0zF)Udh%Y9RbKi2jWYhj&J7nkU^a6gfKR5L! zxaT@#h|J$*^gH7o;LX?|@7&DmNd4_?;?B7jjr)}*?up)3<$cK1Qm<4y#M^FU@aQ7dY)Z7l(8CHS|RcA^K17VB04OOe_S^>P6R( zXp9@Zfgexlzvg!20tMA$3#Yk*fyI4KhJ`5ouz=4x@?T@!J^}NgYNfY_mgZD(4>Duy ze-+iZM2x&pDuvti^xROKK>}}WBgT6=2Wm#cX}G4`Rl4be5E3{G=d;v~tLm+{@P6r- z5jVukRlDiwNj_H~*1PvvacmnIpW^HXG{BIQ{hytK(E8romH^kw-276N=n83Dgi>mU z5CGi~btff8tbEPh%NZ(MUOxCQCd|mgn>x;BK!fMr*>mgHl6%4~Iy7hEK zm1kk_CK|~~3D8BV9xra}%OP&E?XjSt5kn$R&cC*!e`}h86|DP{eL4yKankBsL(iht zn>;Cv+1CE`MFZ<+M}nOYT?J~VdU@Jga}j&D5qL&b?c*^D7PWr$&Y(Gmj^=a@fyJ*y4)a%j)@`coN4kdeTtoqdrG&?NVUz8X6CB~)^pItS+cO=rIxECI_ zU9;-54K&zKJ2ww`Gr%8DcBxz|@OibtQf(f#7P!Fy82V?c`eo4%c>Ivei>%d(ua{r0>0IoG5l@PJ zU~GGbdOIH_Y!=r?jlYeYqMi&waE*CsZkr0%~uZP*Rx(9pQR zw1FqJcoeb*!Y3VIx#LBV2MYIwl$C$oe6nkvJ$ZLWDCHSf+5fMt^9*ZZ>-Kn%5|k!N zuPS&z1nHd!h!m+(LPV?}u~l zd-sQVX1>hqXZGG}?XuSIFD_1^lJgBALLB{iek*=DE$;n%_u&`g(vM=h=ayAQ5~hmc z^(D#UM_)ZTgtt9TNsB8R^vwiNx=@vwTgfeK@~E-6g=kresU($`sL9Eb1AXZ&!B8GK zCiL>yC6N@C`3M#Od+&5FO;?A@I$SZO(W`sAXemlo;JYoe1b}c|ye_@0C)Wc3>NWrJ z|2wsgEUipDk>G2HaDK=Pikrba$t$2sda7T$qQq(i-Vpi(_=P_u%x`p4W;5hm_BZI4 zdLK^xt)tfmZCaN*SO$bOqfnyfsCUO><#nHak*&R&_6Fx{_=Xnuv*)$XvHtx{Qk*M< zXOYndxd)u#S?6v>jsj{niE0H&66{ze2T+W=6eT4E>c_P-?59uZxzD)5^0&{3lGi2I zp{AsFeG2MEhD>jxRPWZa%m7_hw`k`a?klyq zACDt8ebM=Z&12l_#BBEr^T|Z+IH0KT$`thrl)@tHB%nMPUyEYAPa zzko+&rQ$bnh1rF+^U3tps9r=0^D5WLeN34dwmj-cU^7M7qlQ=x^M= z-3VVQEtC$o1%Hll4@&w3yyNM%)}lk1TbUqRNV5NN_pyXdMAgO8XnJfjQ8U zz*vyW>&w?STkZ|DzA=5r>pd|9tVtSiq5(=AHuLqe3bIi|`kF1v-*cDRFmF90-H800 zj^E`E;KQ24qcA60JlPNS*O8j*yHA6e;lm6O`H1(^i_o-sY6;>_w9g_7IO@?g1mcR)?29znsmgjTA!F5n>3{w>BurC=HCqFz{YrVPZ=00lafr?410N~ zSU;e~g6vB2CuGn=WgF&@k|M~Sg$6~V=T#7aK;R0~SEd7Es@V1E%5UNd)oh~FW{#3< zzNAKAcSPl0PkOD#IOFiQ0gWHRe!a0ZsBTl51o7P0puuJGrmAB-#(feFbPTH+D!1pG*zh{tXIuC-!X2 z;stpPjB@JO*nJXsG>}(rI5tX&LRD9|om8g=c>5q|a$A*GLs;=^5*f-VWbb|%Rw?Xg zdh{VeHroE0zgk$2>Q*F^N}sqo@Rg$k#=p>3FR_y?D3*DX8&HAd76=wz7IL*~pe8ba zl}{+`awGiZD}sun zfF~GOP}As)o6fpU`bu!7`ri8vP4xLzTT+NW$ekEo?e%pwqJQSM!I%3WMpVs zOGttRhdwE;NW4Z~ad(T-b8f4G_VbRMGtG*HM>m0wMdj(3q6Xt!s6^kxm(MM0zn7wp z(o16t+E0}Il!O{Ha=DE&hjlYcDpr7tNmUVkx_)S*U z*j6%8U(iV)WhpCM$%XEv%Q6GCOwc%L#k0VK($(HXzo{ z{;75<3gcLO`qPD~PX+od&;&Qj6@Jhf(|AML!w~J5_8pNwIKXPk>+nWj&&}WG>jF7H z^9FIA6ouxWX5`Fgo32yIDHQ0*dTPDelkjeK4^CprXJ>PlZHnesg37btmp1-c-EZ^T z%-tIL$$%|~)Iyi?gKc57P&^xzu;(aqLA=NI1Y>V=@@I!@Fa_9=Ol*Q(H7m$WY%^3#QbE-o zHu{CCW89=@D#_pn%aePKOV+#ULMIC%?M#@W zht?&EPkFn#?s52tmoqu31VIp`)`0UfLDZJ<&bL$&qGZ4aTb;g@TQC4Go$%Zao8%2tli}LbQ3J4y!bY5Op3L&jhMxNyP zc}HXF778HkV%68|kL4}qZ&QvSEZ8oujjGOIsvTs#+Zmi`9|25ck4!aUzC&7t)-R`J z00X|kGbdOSC53rBp%a*{J%^(+ zQMa=j1+#F-v8tS|sVT)uA(&QM0UMV_VCsju{)Drv%DzcP>3{CZ_!5dA9?sVJtg#?+x?-aIojzkVW1Wauq^8kI=-~k#k);4M3Ld}gyJb2DfvKvqkv zZcoGfz+2aM7wfENAbACnoveY|4t12GWWXdS()U$HUX z8)f`4yJ@gFu@I-Ez3mskQrJd3JmS;juBt9c4ZKn;7MKV{yuHIy-Ea8IXDe(}$5H01 z!>ep66R^rP^#uorius*BRIT0UPYMPjR?c%=|>}K$J}5nF1BIkc6ga>A8zna$kXy zeS>M=gYQT6EIBO?Z-XpVK2Szb2Q>@MSL0@5DxG!2yeA^qU*F-$yehX#Q+26+iUgFr zf7lept7`s1TPmmDQ90LqzB{|rKB;mkn!takDFhjU*%K~CkOAgiLhfnK!1#?Ikd`c9}jG^ z_V4HS`^kKGEi?QOl89x`J#4XRt(}V49*7%(5CMHA@kh$p7JIQvK>7gLkP{xxJ}8wQ zm;qp%S!69h3hWBlWIQ%y_iqZ$KgR*1v^DLcQyP`fkh{==TPalXnvN1_1@od`4-wHF z2}How)-W!ieJ3OfIOc47->;SD7~vpa+s?D##LP1@TWpyP@|wdg%kHt$*KL10n@>&> zJatOueMr)VaBFot6nr;UxfbB7iY24>w2q8-)EBXRhuh#!oIj*VsEu%J_Z!u%O`lJ-ks-@9I|?1ggV$| zsG}KoE8bjom3HV5vzi1Hc`MC*TdDFlrs1~s#QjKad#pU7B4PZ^)fI*bg{0Ui=}=D6 zYZYE@ODmZWsHiUA8~PNA$mTDkt4XZfTuSs=efJv4!&3x-2m#OooO%=9iy%-od*IbeK!LUvY^56BZG+^a zS^!i2uky5_Y|nYB8o3yTJ#a8jG1h{FFJAo(iqK+}|LY~dTN=gbGZ!}Nu(}U8t@*CL zx?qIR>tJTMOA#ysrFff?cO3cCoY`zXFy+iPl$I-Er+(1wM!~IzmJD%@mh2thz+C%B zX863qNkiVXoRS{vD=>d77Tj=lJs`lnWu_vgSvNKEn@INo-B_bVA7*yHi(_yYuvq2> zkK|o&gYB90Zg1o|`W8{SlY){bX~u&Y^&hLQpAQ@G#aP~2lebHekji}7dc@qh*^n|Q z>{dIW8(X-VYi&wN4dNWvRE|A)=OGqgj8d7fE=#DUYKzbd@VW*Ns-IHWX61b%c8I&( zNV6>0a>xgK(?$$#89A42WlE{<*=dhgrP>;Yjc?iJH%TIV+ojAtw}adbzeYwzHSgGe zYJ+Df8-A;vzSR(+f*Bu%L~d5YUNoC=mGU|J4#qUEVZ;S=?VWf$VN5SPE-)ZFo}%Mt zF~B6~UnOVG=W?iIPOhC|A5veDB!8Ku^&51O@h3<$caD44CHV%}ng8tA&1$U4hiJ-&$^YzFUzooL6q8G&4ygs*W1l+h!1IicCqmS{!mn^9G6rP~ryJ?Y~!c{Qs4l>qzluTBjN7}r)4{HgsH!C!OK;6NWUdsFTPR9lPB zd$fv-)iw1yIa4E6UQ>^062?w`S<={TTOV$964`qHc&nPf$-uNS57ieF4qb?_U~Sx{ z$t65i?#s>OYqsl>W#!hRs(8Mk2xT~C4SHS!g-rAvrB1Xhb})7F^E7c@lu$+Ge0L(C zZC~t=*G|IcRKQ`K!ibbC)HDELy_5aDN+O8f%PRd??2+}Cpuk4={*?mO42RkuTj~+w zWl^bX+ykARuGmqVgcFEMIu(l>eYrzrnGcad=rhundqNrKR_MAI(R?!T1B=LxH+XDO zUg#Spr;VSGy`Z`92n+sKPX<^XJKBLhtgn}u6KW?IknX5AHJbQnQj(Eh6Z$IH)qcac zIV_+d#b5-JQXg^w18hi#5~n~Q9*p?XqR||ah$$B^XhBaq4DnFyv{lS^{s!^F=^jHR zhfGuXDD~BEmA^TuJqRK0Cvd&qnlTt)^$yPptzkG%Mu+NI3bLU_M%Yl7Hvsh}xWmt? zR#@88jaKGqO??0$Y{%utbpc7Pxx*KvV=yQn;E}eu2M%4^JmhvEj+?m;-s8YycQa>Q z!pT3HAWj2}i{D~qy0}8A$v{|0gf~v&jHx^Y4ZG%1&tjZG0nUSgIJ5P}^&QoZ;AP@a zYM=QsF8Q7v0&cUag%g3--MiUjt8Uzthzk5or|r(5sBb_ zhvTnlFi3(Vv~&Ml6R#RBT2Gvjg`0ur{+Xo=nHQA^B=0Sj_qEqNer8iS3eU#VY(N%yZ-%%PYlWK)j5L!|=9D)Y5Mk9M+0~vyeQIeD(nee)Z z({ePh?QZmSOT1*cl8K=u=lWzEDC&6+C&7K`0$gLtXN^j4Pj?@^Wc9hjTqWOchbMLX+o&t^nkRDV>p<3{s+gNoi=^+4HKhfIyYS$z6G=*g4w^82#o2p5 z`7DWsiC^7+?FS-SMK^o}dV1G5O;k(BGOH>EN#|ooFY-{p=n=8XK=F#*GPxANi1$y2 zjpp0<4Vuv?Ewt}VEdA9v-YfP2fawFaKa)8-}`Ogd3=sWMLqYXKb_t6SRu_ z(+%|&+4*{mN{^=JhZ+^+?ad%*BNn|)H6rw7EwC2b{EyH=>xK%lNp0^u1tvenIK_;M=Hy$V-k z<2wX5MWYKuNnEKOutrKdpvS<(JHN_Pdji04{stKnNjH~?QrR}iQ$!|(M_3JDC1SIj zf*l{_53S9uK@8Tx}7zv`1){XR%j;wpzoKn(7BiOCC-vdz#Ti3L>;P3HF z2PvT-P^M=mGe{sgPG3AURR6oW{XA|e#~6^gi%&98MznW$xR~jmi@=f9F)p@{Dn|K; z(Zt;Fn(EEn8Sb2fhH1)6c?1D4Dvyl;sl3-PB@d^(@I=fJl-Wn*5+rqq8?|V+7@Ul7vZZ0kY59m1@EI>MrU(8Sax*<`Tp!+ScAw@;b+o$vqp zv@Gz%>N)QI8u3p|Y3<+RADWsWaCdu3fYj9As>vzBc<$lPj1ASxkMuU7dvH>d>c#3` zkA#n)M2$9Z@D6p?LXJbiKj-GO{>Ob)I~PBUc(kQX7boG>QX@!cCX82Kq02zlnf`aXhsB^?; z8KL?8u@sJV`w&;DpLj5ZTM&9cATg7?+=>jcPyfHG&Ej@r2f@jhs60>Zuw`Q#JL Date: Fri, 20 Sep 2024 17:04:22 +0800 Subject: [PATCH 076/103] =?UTF-8?q?feat=20=20image2sketch=20=E6=96=B0?= =?UTF-8?q?=E5=A2=9E=E9=A3=8E=E6=A0=BC=E4=B8=8A=E4=BC=A0=20=E8=87=AA?= =?UTF-8?q?=E5=AE=9A=E4=B9=89=E9=A3=8E=E6=A0=BC=20fix?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .gitignore | 2 -- 1 file changed, 2 deletions(-) diff --git a/.gitignore b/.gitignore index fe14af2..8fd7817 100644 --- a/.gitignore +++ b/.gitignore @@ -124,7 +124,6 @@ seg_cache logs seg_result/ seg_result -*.png uwsgi *.yaml *.yml @@ -134,7 +133,6 @@ Dockerfile app/logs app/logs/* *.log -*.jpg /qodana.yaml .pth .pytorch \ No newline at end of file From 1c1efeaeb2640bbab16505e175a66c8d79adee1b Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Fri, 20 Sep 2024 17:06:26 +0800 Subject: [PATCH 077/103] =?UTF-8?q?feat=20=20image2sketch=20=E6=96=B0?= =?UTF-8?q?=E5=A2=9E=E9=A3=8E=E6=A0=BC=E4=B8=8A=E4=BC=A0=20=E8=87=AA?= =?UTF-8?q?=E5=AE=9A=E4=B9=89=E9=A3=8E=E6=A0=BC=20fix?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/core/config.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app/core/config.py b/app/core/config.py index 2e4d7bd..35c12b7 100644 --- a/app/core/config.py +++ b/app/core/config.py @@ -20,7 +20,7 @@ class Settings(BaseSettings): OSS = "minio" -DEBUG = True +DEBUG = False if DEBUG: LOGS_PATH = "logs/" CATEGORY_PATH = "service/attribute/config/descriptor/category/category_dis.csv" From 9b940c9cf8466eb1e4e69e37de0f1aeb67eecd4c Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Mon, 23 Sep 2024 10:47:34 +0800 Subject: [PATCH 078/103] =?UTF-8?q?feat=20=20api=20=E8=B0=83=E7=94=A8?= =?UTF-8?q?=E6=AC=A1=E6=95=B0=E8=AE=B0=E5=BD=95=20fix?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/core/record_api_count.py | 44 ++++++++++++++++++++++++++++++++++++ app/main.py | 2 ++ 2 files changed, 46 insertions(+) create mode 100644 app/core/record_api_count.py diff --git a/app/core/record_api_count.py b/app/core/record_api_count.py new file mode 100644 index 0000000..c93a642 --- /dev/null +++ b/app/core/record_api_count.py @@ -0,0 +1,44 @@ +from fastapi import Request +from sqlalchemy import Column, Integer, String +from sqlalchemy import create_engine +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import sessionmaker + +# 创建数据库引擎 +DATABASE_URL = "sqlite:///./api_count.db" +engine = create_engine(DATABASE_URL) + +# 创建数据库会话 +SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) + +# 创建数据库模型基类 +Base = declarative_base() + + +# 定义存储调用次数的数据库模型 +class CallCount(Base): + __tablename__ = "call_count" + id = Column(Integer, primary_key=True, index=True) + service_name = Column(String, nullable=False) + call_count = Column(Integer, default=0) + + +# 创建数据库表(如果不存在) +Base.metadata.create_all(bind=engine) + + +# 定义中间件函数,用于记录接口调用次数 + +def count_api_calls(request: Request, call_next): + db = SessionLocal() + service_name = request.url.path + call_record = db.query(CallCount).filter_by(service_name=service_name).first() + if call_record is None: + call_record = CallCount(service_name=service_name, call_count=1) + db.add(call_record) + else: + call_record.call_count += 1 + db.commit() + db.refresh(call_record) + response = call_next(request) + return response diff --git a/app/main.py b/app/main.py index b085d7d..95c666a 100644 --- a/app/main.py +++ b/app/main.py @@ -8,6 +8,7 @@ from fastapi import FastAPI from app.api.api_route import router from app.core.config import settings +from app.core.record_api_count import count_api_calls from app.schemas.response_template import ResponseModel from logging_env import LOGGER_CONFIG_DICT @@ -34,6 +35,7 @@ def get_application() -> FastAPI: allow_methods=["*"], allow_headers=["*"], ) + application.middleware("http")(count_api_calls) application.include_router(router=router, prefix=settings.API_PREFIX) return application From e62b35a721af67aa42997e8e24a2bbe96672c3a9 Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Wed, 25 Sep 2024 11:13:25 +0800 Subject: [PATCH 079/103] =?UTF-8?q?feat=20=20=E4=BB=A3=E7=A0=81=E6=95=B4?= =?UTF-8?q?=E7=90=86=20fix?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/api/api_design.py | 18 +++++++----------- .../items/pipeline/segmentation.py | 8 ++++---- .../design/items/pipelines/segmentation.py | 8 ++++---- .../design_test/pipeline/segmentation.py | 8 ++++---- 4 files changed, 19 insertions(+), 23 deletions(-) diff --git a/app/api/api_design.py b/app/api/api_design.py index ba8f04d..68f2a44 100644 --- a/app/api/api_design.py +++ b/app/api/api_design.py @@ -7,7 +7,6 @@ from fastapi import APIRouter, HTTPException, UploadFile, File, Form from app.schemas.design import DesignModel, DesignProgressModel, ModelProgressModel, DBGConfigModel from app.schemas.response_template import ResponseModel from app.service.design.model_process_service import model_transpose -from app.service.design.service import generate from app.service.design.service_design_batch_generate import start_design_batch_generate from app.service.design.utils.redis_utils import Redis from app.service.design_test.batch_design import design_generate @@ -185,17 +184,14 @@ def design(request_data: DesignModel): # data = generate(request_data=request_data) # logger.info(f"design response @@@@@@:{json.dumps(data)}") # - logger.info(f"design request item is : @@@@@@:{json.dumps(request_data.dict())}") - data = design_generate(request_data=request_data) - logger.info(f"design response @@@@@@:{json.dumps(data)}") - # try: - # logger.info(f"design request item is : @@@@@@:{json.dumps(request_data.dict())}") - # data = generate(request_data=request_data) - # logger.info(f"design response @@@@@@:{json.dumps(data)}") - # except Exception as e: - # logger.warning(f"design Run Exception @@@@@@:{e}") - # raise HTTPException(status_code=404, detail=str(e)) + try: + logger.info(f"design request item is : @@@@@@:{json.dumps(request_data.dict())}") + data = design_generate(request_data=request_data) + logger.info(f"design response @@@@@@:{json.dumps(data)}") + except Exception as e: + logger.warning(f"design Run Exception @@@@@@:{e}") + raise HTTPException(status_code=404, detail=str(e)) return ResponseModel(data=data) diff --git a/app/service/design/design_batch/items/pipeline/segmentation.py b/app/service/design/design_batch/items/pipeline/segmentation.py index d8aa6d2..2a6d760 100644 --- a/app/service/design/design_batch/items/pipeline/segmentation.py +++ b/app/service/design/design_batch/items/pipeline/segmentation.py @@ -48,9 +48,9 @@ class Segmentation: file_path = f"{SEG_CACHE_PATH}{image_id}.npy" try: np.save(file_path, seg_result) - print("保存成功", os.path.abspath(file_path)) + logging.info("保存成功", os.path.abspath(file_path)) except Exception as e: - print(f"保存失败: {e}") + logger.error(f"保存失败: {e}") @staticmethod def load_seg_result(image_id): @@ -60,8 +60,8 @@ class Segmentation: seg_result = np.load(file_path) return True, seg_result except FileNotFoundError: - print("文件不存在") + logger.warning("文件不存在") return False, None except Exception as e: - print(f"加载失败: {e}") + logger.error(f"加载失败: {e}") return False, None diff --git a/app/service/design/items/pipelines/segmentation.py b/app/service/design/items/pipelines/segmentation.py index c6c7b15..0fe028e 100644 --- a/app/service/design/items/pipelines/segmentation.py +++ b/app/service/design/items/pipelines/segmentation.py @@ -53,9 +53,9 @@ class Segmentation(object): file_path = f"{SEG_CACHE_PATH}{image_id}.npy" try: np.save(file_path, seg_result) - print("保存成功", os.path.abspath(file_path)) + logger.info("保存成功", os.path.abspath(file_path)) except Exception as e: - print(f"保存失败: {e}") + logger.error(f"保存失败: {e}") @staticmethod def load_seg_result(image_id): @@ -64,8 +64,8 @@ class Segmentation(object): seg_result = np.load(file_path) return True, seg_result except FileNotFoundError: - print("文件不存在") + logger.warning("文件不存在") return False, None except Exception as e: - print(f"加载失败: {e}") + logger.error(f"加载失败: {e}") return False, None diff --git a/app/service/design_test/pipeline/segmentation.py b/app/service/design_test/pipeline/segmentation.py index 5c248b2..3897129 100644 --- a/app/service/design_test/pipeline/segmentation.py +++ b/app/service/design_test/pipeline/segmentation.py @@ -51,9 +51,9 @@ class Segmentation: file_path = f"{SEG_CACHE_PATH}{image_id}.npy" try: np.save(file_path, seg_result) - print("保存成功", os.path.abspath(file_path)) + logger.info("保存成功", os.path.abspath(file_path)) except Exception as e: - print(f"保存失败: {e}") + logger.error(f"保存失败: {e}") @staticmethod def load_seg_result(image_id): @@ -63,8 +63,8 @@ class Segmentation: seg_result = np.load(file_path) return True, seg_result except FileNotFoundError: - print("文件不存在") + logger.warning("文件不存在") return False, None except Exception as e: - print(f"加载失败: {e}") + logger.error(f"加载失败: {e}") return False, None From 60bf85bf8841e6b97309e76887881c8e2390c99e Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Wed, 25 Sep 2024 11:40:11 +0800 Subject: [PATCH 080/103] =?UTF-8?q?feat=20=20=E4=BB=A3=E7=A0=81=E6=95=B4?= =?UTF-8?q?=E7=90=86=20fix?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/api/api_design.py | 4 +- .../{design_test => design_fast}/__init__.py | 0 .../design_generate.py} | 294 +----------------- app/service/design_fast/item.py | 61 ++++ .../pipeline/__init__.py | 0 .../pipeline/color.py | 0 .../pipeline/contour_detection.py | 0 .../pipeline/keypoint.py | 0 .../pipeline/loading.py | 0 .../pipeline/print_painting.py | 0 .../pipeline/scale.py | 0 .../pipeline/segmentation.py | 0 .../pipeline/split.py | 2 +- .../utils/__init__.py | 0 .../utils/conversion_image.py | 0 .../utils/design_ensemble.py | 0 app/service/design_fast/utils/organize.py | 77 +++++ app/service/design_fast/utils/progress.py | 30 ++ .../utils/redis_utils.py | 0 .../utils/synthesis_item.py | 16 + .../utils/upload_image.py | 0 app/service/design_test/item.py | 281 ----------------- 22 files changed, 194 insertions(+), 571 deletions(-) rename app/service/{design_test => design_fast}/__init__.py (100%) rename app/service/{design_test/batch_design.py => design_fast/design_generate.py} (81%) create mode 100644 app/service/design_fast/item.py rename app/service/{design_test => design_fast}/pipeline/__init__.py (100%) rename app/service/{design_test => design_fast}/pipeline/color.py (100%) rename app/service/{design_test => design_fast}/pipeline/contour_detection.py (100%) rename app/service/{design_test => design_fast}/pipeline/keypoint.py (100%) rename app/service/{design_test => design_fast}/pipeline/loading.py (100%) rename app/service/{design_test => design_fast}/pipeline/print_painting.py (100%) rename app/service/{design_test => design_fast}/pipeline/scale.py (100%) rename app/service/{design_test => design_fast}/pipeline/segmentation.py (100%) rename app/service/{design_test => design_fast}/pipeline/split.py (98%) rename app/service/{design_test => design_fast}/utils/__init__.py (100%) rename app/service/{design_test => design_fast}/utils/conversion_image.py (100%) rename app/service/{design_test => design_fast}/utils/design_ensemble.py (100%) create mode 100644 app/service/design_fast/utils/organize.py create mode 100644 app/service/design_fast/utils/progress.py rename app/service/{design_test => design_fast}/utils/redis_utils.py (100%) rename app/service/{design_test => design_fast}/utils/synthesis_item.py (94%) rename app/service/{design_test => design_fast}/utils/upload_image.py (100%) delete mode 100644 app/service/design_test/item.py diff --git a/app/api/api_design.py b/app/api/api_design.py index 68f2a44..2720e36 100644 --- a/app/api/api_design.py +++ b/app/api/api_design.py @@ -8,8 +8,8 @@ from app.schemas.design import DesignModel, DesignProgressModel, ModelProgressMo from app.schemas.response_template import ResponseModel from app.service.design.model_process_service import model_transpose from app.service.design.service_design_batch_generate import start_design_batch_generate -from app.service.design.utils.redis_utils import Redis -from app.service.design_test.batch_design import design_generate +from app.service.design_fast.design_generate import design_generate +from app.service.design_fast.utils.redis_utils import Redis router = APIRouter() logger = logging.getLogger() diff --git a/app/service/design_test/__init__.py b/app/service/design_fast/__init__.py similarity index 100% rename from app/service/design_test/__init__.py rename to app/service/design_fast/__init__.py diff --git a/app/service/design_test/batch_design.py b/app/service/design_fast/design_generate.py similarity index 81% rename from app/service/design_test/batch_design.py rename to app/service/design_fast/design_generate.py index 1d0fe99..ac1f79c 100644 --- a/app/service/design_test/batch_design.py +++ b/app/service/design_fast/design_generate.py @@ -1,35 +1,25 @@ -import io -import json import logging import threading import time -import uuid -import cv2 -import numpy as np -from PIL import Image from minio import Minio -from app.core.config import PRIORITY_DICT -from app.service.design.utils.redis_utils import Redis -from app.service.design_test.item import BodyItem, TopItem, BottomItem +from app.core.config import * +from app.service.design_fast.item import BodyItem, TopItem, BottomItem +from app.service.design_fast.utils.organize import organize_body, organize_clothing +from app.service.design_fast.utils.progress import final_progress, update_progress +from app.service.design_fast.utils.synthesis_item import synthesis, synthesis_single, update_base_size_priority from app.service.utils.decorator import RunTime -from app.service.utils.new_oss_client import oss_upload_image id_lock = threading.Lock() logger = logging.getLogger() -# minio 配置 -MINIO_URL = "www.minio.aida.com.hk:12024" -MINIO_ACCESS = 'vXKFLSJkYeEq2DrSZvkB' -MINIO_SECRET = 'uKTZT3x7C43WvPN9QTc99DiRkwddWZrG9Uh3JVlR' -MINIO_SECURE = True - minio_client = Minio(MINIO_URL, access_key=MINIO_ACCESS, secret_key=MINIO_SECRET, secure=MINIO_SECURE) def process_item(item, basic): + # 处理project中单个item if item['type'] == "Body": body_server = BodyItem(data=item, basic=basic, minio_client=minio_client) item_data = body_server.process() @@ -43,6 +33,7 @@ def process_item(item, basic): def process_layer(item, layers): + # item处理结束后 对图层数据组装 if item['name'] == "mannequin": body_layer = organize_body(item) layers.append(body_layer) @@ -53,252 +44,6 @@ def process_layer(item, layers): layers.append(back_layer) -def organize_body(layer): - body_layer = dict(priority=0, - name=layer["name"].lower(), - image=layer['body_image'], - image_url=layer['body_path'], - mask_image=None, - mask_url=None, - sacle=1, - # mask=layer['body_mask'], - position=(0, 0)) - return body_layer - - -def organize_clothing(layer): - # 起始坐标 - start_point = calculate_start_point(layer['keypoint'], layer['scale'], layer['clothes_keypoint'], layer['body_point_test'], layer["offset"], layer["resize_scale"]) - # 前片数据 - front_layer = dict(priority=layer['priority'] if layer.get("layer_order", False) else PRIORITY_DICT.get(f'{layer["name"].lower()}_front', None), - name=f'{layer["name"].lower()}_front', - image=layer["front_image"], - # mask_image=layer['front_mask_image'], - image_url=layer['front_image_url'], - mask_url=layer['mask_url'], - sacle=layer['scale'], - clothes_keypoint=layer['clothes_keypoint'], - position=start_point, - resize_scale=layer["resize_scale"], - mask=cv2.resize(layer['mask'], layer["front_image"].size), - gradient_string=layer['gradient_string'] if 'gradient_string' in layer.keys() else "", - pattern_image_url=layer['pattern_image_url'], - pattern_image=layer['pattern_image'] - - ) - # 后片数据 - back_layer = dict(priority=-layer.get("priority", 0) if layer.get("layer_order", False) else PRIORITY_DICT.get(f'{layer["name"].lower()}_back', None), - name=f'{layer["name"].lower()}_back', - image=layer["back_image"], - # mask_image=layer['back_mask_image'], - image_url=layer['back_image_url'], - mask_url=layer['mask_url'], - sacle=layer['scale'], - clothes_keypoint=layer['clothes_keypoint'], - position=start_point, - resize_scale=layer["resize_scale"], - mask=cv2.resize(layer['mask'], layer["front_image"].size), - gradient_string=layer['gradient_string'] if 'gradient_string' in layer.keys() else "", - pattern_image_url=layer['pattern_image_url'], - ) - return front_layer, back_layer - - -def calculate_start_point(keypoint_type, scale, clothes_point, body_point, offset, resize_scale): - """ - Align left - Args: - keypoint_type: string, "waistband" | "shoulder" | "ear_point" - scale: float - clothes_point: dict{'left': [x1, y1, z1], 'right': [x2, y2, z2]} - body_point: dict, containing keypoint data of body figure - - Returns: - start_point: tuple (x', y') - x' = y_body - y1 * scale + offset - y' = x_body - x1 * scale + offset - - """ - side_indicator = f'{keypoint_type}_left' - start_point = ( - int(body_point[side_indicator][1] + offset[1] - int(clothes_point[side_indicator][0]) * scale), # y - int(body_point[side_indicator][0] + offset[0] - int(clothes_point[side_indicator][1]) * scale) # x - ) - return start_point - - -def update_base_size_priority(layers, size): - # 计算透明背景图片的宽度 - min_x = min(info['position'][1] for info in layers) - x_list = [] - for info in layers: - if info['image'] is not None: - x_list.append(info['position'][1] + info['image'].width) - max_x = max(x_list) - new_width = max_x - min_x - new_height = 700 - # 更新坐标 - for info in layers: - info['adaptive_position'] = (info['position'][0], info['position'][1] - min_x) - return layers, (new_width, new_height) - - -def synthesis_single(front_image, back_image): - result_image = None - if front_image: - result_image = front_image - if back_image: - result_image.paste(back_image, (0, 0), back_image) - image_data = io.BytesIO() - result_image.save(image_data, format='PNG') - image_data.seek(0) - image_bytes = image_data.read() - bucket_name = 'aida-results' - object_name = f'result_{generate_uuid()}.png' - oss_upload_image(oss_client=minio_client, bucket=bucket_name, object_name=object_name, image_bytes=image_bytes) - return f"{bucket_name}/{object_name}" - - -def oss_upload_json(json_data, object_name): - try: - with open(f"app/service/design/design_batch/response_json/{object_name}", 'w') as file: - json.dump(json_data, file, indent=4) - - oss_client = Minio(MINIO_URL, access_key=MINIO_ACCESS, secret_key=MINIO_SECRET, secure=MINIO_SECURE) - oss_client.fput_object("test", object_name, f"app/service/design/design_batch/response_json/{object_name}") - except Exception as e: - logger.warning(str(e)) - - -def generate_uuid(): - with id_lock: - unique_id = str(uuid.uuid1()) - return unique_id - - -def positioning(all_mask_shape, mask_shape, offset): - all_start = 0 - all_end = 0 - mask_start = 0 - mask_end = 0 - if offset == 0: - all_start = 0 - all_end = min(all_mask_shape, mask_shape) - - mask_start = 0 - mask_end = min(all_mask_shape, mask_shape) - elif offset > 0: - all_start = min(offset, all_mask_shape) - all_end = min(offset + mask_shape, all_mask_shape) - - mask_start = 0 - mask_end = 0 if offset > all_mask_shape else min(all_mask_shape - offset, mask_shape) - elif offset < 0: - if abs(offset) > mask_shape: - all_start = 0 - all_end = 0 - else: - all_start = 0 - if mask_shape - abs(offset) > all_mask_shape: - all_end = min(mask_shape - abs(offset), all_mask_shape) - else: - all_end = mask_shape - abs(offset) - - if abs(offset) > mask_shape: - mask_start = mask_shape - mask_end = mask_shape - else: - mask_start = abs(offset) - if mask_shape - abs(offset) >= all_mask_shape: - mask_end = all_mask_shape + abs(offset) - else: - mask_end = mask_shape - return all_start, all_end, mask_start, mask_end - - -def synthesis(data, size, basic_info): - # 创建底图 - base_image = Image.new('RGBA', size, (0, 0, 0, 0)) - try: - all_mask_shape = (size[1], size[0]) - body_mask = None - for d in data: - if d['name'] == 'body' or d['name'] == 'mannequin': - # 创建一个新的宽高透明图像, 把模特贴上去获取mask - transparent_image = Image.new("RGBA", size, (0, 0, 0, 0)) - transparent_image.paste(d['image'], (d['adaptive_position'][1], d['adaptive_position'][0]), d['image']) # 此处可变数组会被paste篡改值,所以使用下标获取position - body_mask = np.array(transparent_image.split()[3]) - - # 根据新的坐标获取新的肩点 - left_shoulder = [x + y for x, y in zip(basic_info['body_point_test']['shoulder_left'], [d['adaptive_position'][1], d['adaptive_position'][0]])] - right_shoulder = [x + y for x, y in zip(basic_info['body_point_test']['shoulder_right'], [d['adaptive_position'][1], d['adaptive_position'][0]])] - body_mask[:min(left_shoulder[1], right_shoulder[1]), left_shoulder[0]:right_shoulder[0]] = 255 - _, binary_body_mask = cv2.threshold(body_mask, 127, 255, cv2.THRESH_BINARY) - top_outer_mask = np.array(binary_body_mask) - bottom_outer_mask = np.array(binary_body_mask) - - top = True - bottom = True - i = len(data) - while i: - i -= 1 - if top and data[i]['name'] in ["blouse_front", "outwear_front", "dress_front", "tops_front"]: - top = False - mask_shape = data[i]['mask'].shape - y_offset, x_offset = data[i]['adaptive_position'] - # 初始化叠加区域的起始和结束位置 - all_y_start, all_y_end, mask_y_start, mask_y_end = positioning(all_mask_shape=all_mask_shape[0], mask_shape=mask_shape[0], offset=y_offset) - all_x_start, all_x_end, mask_x_start, mask_x_end = positioning(all_mask_shape=all_mask_shape[1], mask_shape=mask_shape[1], offset=x_offset) - # 将叠加区域赋值为相应的像素值 - _, sketch_mask = cv2.threshold(data[i]['mask'], 127, 255, cv2.THRESH_BINARY) - background = np.zeros_like(top_outer_mask) - background[all_y_start:all_y_end, all_x_start:all_x_end] = sketch_mask[mask_y_start:mask_y_end, mask_x_start:mask_x_end] - top_outer_mask = background + top_outer_mask - elif bottom and data[i]['name'] in ["trousers_front", "skirt_front", "bottoms_front", "dress_front"]: - bottom = False - mask_shape = data[i]['mask'].shape - y_offset, x_offset = data[i]['adaptive_position'] - # 初始化叠加区域的起始和结束位置 - all_y_start, all_y_end, mask_y_start, mask_y_end = positioning(all_mask_shape=all_mask_shape[0], mask_shape=mask_shape[0], offset=y_offset) - all_x_start, all_x_end, mask_x_start, mask_x_end = positioning(all_mask_shape=all_mask_shape[1], mask_shape=mask_shape[1], offset=x_offset) - # 将叠加区域赋值为相应的像素值 - _, sketch_mask = cv2.threshold(data[i]['mask'], 127, 255, cv2.THRESH_BINARY) - background = np.zeros_like(top_outer_mask) - background[all_y_start:all_y_end, all_x_start:all_x_end] = sketch_mask[mask_y_start:mask_y_end, mask_x_start:mask_x_end] - bottom_outer_mask = background + bottom_outer_mask - elif bottom is False and top is False: - break - - all_mask = cv2.bitwise_or(top_outer_mask, bottom_outer_mask) - - for layer in data: - if layer['image'] is not None: - if layer['name'] != "body": - test_image = Image.new('RGBA', size, (0, 0, 0, 0)) - test_image.paste(layer['image'], (layer['adaptive_position'][1], layer['adaptive_position'][0]), layer['image']) - mask_data = np.where(all_mask > 0, 255, 0).astype(np.uint8) - mask_alpha = Image.fromarray(mask_data) - cropped_image = Image.composite(test_image, Image.new("RGBA", test_image.size, (255, 255, 255, 0)), mask_alpha) - base_image.paste(test_image, (0, 0), cropped_image) # test_image 已经按照坐标贴到最大宽值的图片上 坐着这里坐标为00 - else: - base_image.paste(layer['image'], (layer['adaptive_position'][1], layer['adaptive_position'][0]), layer['image']) - - result_image = base_image - - image_data = io.BytesIO() - result_image.save(image_data, format='PNG') - image_data.seek(0) - - # oss upload - image_bytes = image_data.read() - bucket_name = "aida-results" - object_name = f'result_{generate_uuid()}.png' - oss_upload_image(oss_client=minio_client, bucket=bucket_name, object_name=object_name, image_bytes=image_bytes) - return f"{bucket_name}/{object_name}" - except Exception as e: - logging.warning(f"synthesis runtime exception : {e}") - - @RunTime def design_generate(request_data): objects_data = request_data.dict()['objects'] @@ -380,31 +125,6 @@ def design_generate(request_data): return object_response -def update_progress(process_id, total): - logger.info(f"{process_id} , {total}") - r = Redis() - progress = r.read(key=process_id) - if progress and total != 1: - if int(progress) <= 100: - r.write(key=process_id, value=int(progress) + int(100 / total)) - else: - r.write(key=process_id, value=99) - return progress - elif total == 1: - r.write(key=process_id, value=100) - return progress - else: - r.write(key=process_id, value=int(100 / total)) - return progress - - -def final_progress(process_id): - r = Redis() - progress = r.read(key=process_id) - r.write(key=process_id, value=100) - return progress - - if __name__ == '__main__': object_data = { "objects": [ diff --git a/app/service/design_fast/item.py b/app/service/design_fast/item.py new file mode 100644 index 0000000..e10320d --- /dev/null +++ b/app/service/design_fast/item.py @@ -0,0 +1,61 @@ +from app.service.design_fast.pipeline import LoadImage, KeyPoint, Segmentation, Color, PrintPainting, Scaling, Split, LoadBodyImage, ContourDetection + + +class BaseItem: + def __init__(self, data, basic): + self.result = data.copy() + self.result['name'] = data['type'].lower() + self.result.pop("type") + self.result.update(basic) + + +class TopItem(BaseItem): + def __init__(self, data, basic, minio_client): + super().__init__(data, basic) + self.top_pipeline = [ + LoadImage(minio_client), + KeyPoint(), + Segmentation(minio_client), + Color(minio_client), + PrintPainting(minio_client), + Scaling(), + Split(minio_client) + ] + + def process(self): + for item in self.top_pipeline: + self.result = item(self.result) + return self.result + + +class BottomItem(BaseItem): + def __init__(self, data, basic, minio_client): + super().__init__(data, basic) + self.bottom_pipeline = [ + LoadImage(minio_client), + KeyPoint(), + ContourDetection(), + # Segmentation(), + Color(minio_client), + PrintPainting(minio_client), + Scaling(), + Split(minio_client) + ] + + def process(self): + for item in self.bottom_pipeline: + self.result = item(self.result) + return self.result + + +class BodyItem(BaseItem): + def __init__(self, data, basic, minio_client): + super().__init__(data, basic) + self.top_pipeline = [ + LoadBodyImage(minio_client), + ] + + def process(self): + for item in self.top_pipeline: + self.result = item(self.result) + return self.result diff --git a/app/service/design_test/pipeline/__init__.py b/app/service/design_fast/pipeline/__init__.py similarity index 100% rename from app/service/design_test/pipeline/__init__.py rename to app/service/design_fast/pipeline/__init__.py diff --git a/app/service/design_test/pipeline/color.py b/app/service/design_fast/pipeline/color.py similarity index 100% rename from app/service/design_test/pipeline/color.py rename to app/service/design_fast/pipeline/color.py diff --git a/app/service/design_test/pipeline/contour_detection.py b/app/service/design_fast/pipeline/contour_detection.py similarity index 100% rename from app/service/design_test/pipeline/contour_detection.py rename to app/service/design_fast/pipeline/contour_detection.py diff --git a/app/service/design_test/pipeline/keypoint.py b/app/service/design_fast/pipeline/keypoint.py similarity index 100% rename from app/service/design_test/pipeline/keypoint.py rename to app/service/design_fast/pipeline/keypoint.py diff --git a/app/service/design_test/pipeline/loading.py b/app/service/design_fast/pipeline/loading.py similarity index 100% rename from app/service/design_test/pipeline/loading.py rename to app/service/design_fast/pipeline/loading.py diff --git a/app/service/design_test/pipeline/print_painting.py b/app/service/design_fast/pipeline/print_painting.py similarity index 100% rename from app/service/design_test/pipeline/print_painting.py rename to app/service/design_fast/pipeline/print_painting.py diff --git a/app/service/design_test/pipeline/scale.py b/app/service/design_fast/pipeline/scale.py similarity index 100% rename from app/service/design_test/pipeline/scale.py rename to app/service/design_fast/pipeline/scale.py diff --git a/app/service/design_test/pipeline/segmentation.py b/app/service/design_fast/pipeline/segmentation.py similarity index 100% rename from app/service/design_test/pipeline/segmentation.py rename to app/service/design_fast/pipeline/segmentation.py diff --git a/app/service/design_test/pipeline/split.py b/app/service/design_fast/pipeline/split.py similarity index 98% rename from app/service/design_test/pipeline/split.py rename to app/service/design_fast/pipeline/split.py index 50e167d..35605b8 100644 --- a/app/service/design_test/pipeline/split.py +++ b/app/service/design_fast/pipeline/split.py @@ -8,7 +8,7 @@ from cv2 import cvtColor, COLOR_BGR2RGBA from app.core.config import AIDA_CLOTHING from app.service.design.utils.conversion_image import rgb_to_rgba -from app.service.design_test.utils.upload_image import upload_png_mask +from app.service.design_fast.utils.upload_image import upload_png_mask from app.service.utils.generate_uuid import generate_uuid from app.service.utils.new_oss_client import oss_upload_image diff --git a/app/service/design_test/utils/__init__.py b/app/service/design_fast/utils/__init__.py similarity index 100% rename from app/service/design_test/utils/__init__.py rename to app/service/design_fast/utils/__init__.py diff --git a/app/service/design_test/utils/conversion_image.py b/app/service/design_fast/utils/conversion_image.py similarity index 100% rename from app/service/design_test/utils/conversion_image.py rename to app/service/design_fast/utils/conversion_image.py diff --git a/app/service/design_test/utils/design_ensemble.py b/app/service/design_fast/utils/design_ensemble.py similarity index 100% rename from app/service/design_test/utils/design_ensemble.py rename to app/service/design_fast/utils/design_ensemble.py diff --git a/app/service/design_fast/utils/organize.py b/app/service/design_fast/utils/organize.py new file mode 100644 index 0000000..8190de0 --- /dev/null +++ b/app/service/design_fast/utils/organize.py @@ -0,0 +1,77 @@ +import cv2 + +from app.core.config import PRIORITY_DICT + + +def organize_body(layer): + body_layer = dict(priority=0, + name=layer["name"].lower(), + image=layer['body_image'], + image_url=layer['body_path'], + mask_image=None, + mask_url=None, + sacle=1, + # mask=layer['body_mask'], + position=(0, 0)) + return body_layer + + +def organize_clothing(layer): + # 起始坐标 + start_point = calculate_start_point(layer['keypoint'], layer['scale'], layer['clothes_keypoint'], layer['body_point_test'], layer["offset"], layer["resize_scale"]) + # 前片数据 + front_layer = dict(priority=layer['priority'] if layer.get("layer_order", False) else PRIORITY_DICT.get(f'{layer["name"].lower()}_front', None), + name=f'{layer["name"].lower()}_front', + image=layer["front_image"], + # mask_image=layer['front_mask_image'], + image_url=layer['front_image_url'], + mask_url=layer['mask_url'], + sacle=layer['scale'], + clothes_keypoint=layer['clothes_keypoint'], + position=start_point, + resize_scale=layer["resize_scale"], + mask=cv2.resize(layer['mask'], layer["front_image"].size), + gradient_string=layer['gradient_string'] if 'gradient_string' in layer.keys() else "", + pattern_image_url=layer['pattern_image_url'], + pattern_image=layer['pattern_image'] + + ) + # 后片数据 + back_layer = dict(priority=-layer.get("priority", 0) if layer.get("layer_order", False) else PRIORITY_DICT.get(f'{layer["name"].lower()}_back', None), + name=f'{layer["name"].lower()}_back', + image=layer["back_image"], + # mask_image=layer['back_mask_image'], + image_url=layer['back_image_url'], + mask_url=layer['mask_url'], + sacle=layer['scale'], + clothes_keypoint=layer['clothes_keypoint'], + position=start_point, + resize_scale=layer["resize_scale"], + mask=cv2.resize(layer['mask'], layer["front_image"].size), + gradient_string=layer['gradient_string'] if 'gradient_string' in layer.keys() else "", + pattern_image_url=layer['pattern_image_url'], + ) + return front_layer, back_layer + + +def calculate_start_point(keypoint_type, scale, clothes_point, body_point, offset, resize_scale): + """ + Align left + Args: + keypoint_type: string, "waistband" | "shoulder" | "ear_point" + scale: float + clothes_point: dict{'left': [x1, y1, z1], 'right': [x2, y2, z2]} + body_point: dict, containing keypoint data of body figure + + Returns: + start_point: tuple (x', y') + x' = y_body - y1 * scale + offset + y' = x_body - x1 * scale + offset + + """ + side_indicator = f'{keypoint_type}_left' + start_point = ( + int(body_point[side_indicator][1] + offset[1] - int(clothes_point[side_indicator][0]) * scale), # y + int(body_point[side_indicator][0] + offset[0] - int(clothes_point[side_indicator][1]) * scale) # x + ) + return start_point diff --git a/app/service/design_fast/utils/progress.py b/app/service/design_fast/utils/progress.py new file mode 100644 index 0000000..c6a5d74 --- /dev/null +++ b/app/service/design_fast/utils/progress.py @@ -0,0 +1,30 @@ +import logging + +from app.service.design_fast.utils.redis_utils import Redis + +logger = logging.getLogger(__name__) + + +def update_progress(process_id, total): + logger.info(f"{process_id} , {total}") + r = Redis() + progress = r.read(key=process_id) + if progress and total != 1: + if int(progress) <= 100: + r.write(key=process_id, value=int(progress) + int(100 / total)) + else: + r.write(key=process_id, value=99) + return progress + elif total == 1: + r.write(key=process_id, value=100) + return progress + else: + r.write(key=process_id, value=int(100 / total)) + return progress + + +def final_progress(process_id): + r = Redis() + progress = r.read(key=process_id) + r.write(key=process_id, value=100) + return progress diff --git a/app/service/design_test/utils/redis_utils.py b/app/service/design_fast/utils/redis_utils.py similarity index 100% rename from app/service/design_test/utils/redis_utils.py rename to app/service/design_fast/utils/redis_utils.py diff --git a/app/service/design_test/utils/synthesis_item.py b/app/service/design_fast/utils/synthesis_item.py similarity index 94% rename from app/service/design_test/utils/synthesis_item.py rename to app/service/design_fast/utils/synthesis_item.py index 9527cd2..272ab23 100644 --- a/app/service/design_test/utils/synthesis_item.py +++ b/app/service/design_fast/utils/synthesis_item.py @@ -179,3 +179,19 @@ def synthesis_single(front_image, back_image): object_name = f'result_{generate_uuid()}.png' req = oss_upload_image(bucket=bucket_name, object_name=object_name, image_bytes=image_bytes) return f"{bucket_name}/{object_name}" + + +def update_base_size_priority(layers, size): + # 计算透明背景图片的宽度 + min_x = min(info['position'][1] for info in layers) + x_list = [] + for info in layers: + if info['image'] is not None: + x_list.append(info['position'][1] + info['image'].width) + max_x = max(x_list) + new_width = max_x - min_x + new_height = 700 + # 更新坐标 + for info in layers: + info['adaptive_position'] = (info['position'][0], info['position'][1] - min_x) + return layers, (new_width, new_height) diff --git a/app/service/design_test/utils/upload_image.py b/app/service/design_fast/utils/upload_image.py similarity index 100% rename from app/service/design_test/utils/upload_image.py rename to app/service/design_fast/utils/upload_image.py diff --git a/app/service/design_test/item.py b/app/service/design_test/item.py deleted file mode 100644 index 5a4667c..0000000 --- a/app/service/design_test/item.py +++ /dev/null @@ -1,281 +0,0 @@ -import time -from concurrent.futures import ThreadPoolExecutor -from pprint import pprint - -import cv2 - -from app.core.config import PRIORITY_DICT -from app.service.design.utils.synthesis_item import synthesis, synthesis_single -from app.service.design_test.pipeline import LoadImage, KeyPoint, Segmentation, Color, PrintPainting, Scaling, Split, LoadBodyImage, ContourDetection - - -class BaseItem: - def __init__(self, data, basic): - self.result = data.copy() - self.result['name'] = data['type'].lower() - self.result.pop("type") - self.result.update(basic) - - -class TopItem(BaseItem): - def __init__(self, data, basic, minio_client): - super().__init__(data, basic) - self.top_pipeline = [ - LoadImage(minio_client), - KeyPoint(), - Segmentation(minio_client), - Color(minio_client), - PrintPainting(minio_client), - Scaling(), - Split(minio_client) - ] - - def process(self): - for item in self.top_pipeline: - self.result = item(self.result) - return self.result - - -class BottomItem(BaseItem): - def __init__(self, data, basic, minio_client): - super().__init__(data, basic) - self.bottom_pipeline = [ - LoadImage(minio_client), - KeyPoint(), - ContourDetection(), - # Segmentation(), - Color(minio_client), - PrintPainting(minio_client), - Scaling(), - Split(minio_client) - ] - - def process(self): - for item in self.bottom_pipeline: - self.result = item(self.result) - return self.result - - -class BodyItem(BaseItem): - def __init__(self, data, basic, minio_client): - super().__init__(data, basic) - self.top_pipeline = [ - LoadBodyImage(minio_client), - ] - - def process(self): - for item in self.top_pipeline: - self.result = item(self.result) - return self.result - - -def process_item(item, basic, minio_client): - if item['type'] == "Body": - body_server = BodyItem(data=item, basic=basic, minio_client=minio_client) - item_data = body_server.process() - elif item['type'].lower() in ['blouse', 'outwear', 'dress', 'tops']: - top_server = TopItem(data=item, basic=basic, minio_client=minio_client) - item_data = top_server.process() - else: - bottom_server = BottomItem(data=item, basic=basic, minio_client=minio_client) - item_data = bottom_server.process() - return item_data - - -def calculate_start_point(keypoint_type, scale, clothes_point, body_point, offset, resize_scale): - """ - Align left - Args: - keypoint_type: string, "waistband" | "shoulder" | "ear_point" - scale: float - clothes_point: dict{'left': [x1, y1, z1], 'right': [x2, y2, z2]} - body_point: dict, containing keypoint data of body figure - - Returns: - start_point: tuple (x', y') - x' = y_body - y1 * scale + offset - y' = x_body - x1 * scale + offset - - """ - side_indicator = f'{keypoint_type}_left' - start_point = ( - int(body_point[side_indicator][1] + offset[1] - int(clothes_point[side_indicator][0]) * scale), # y - int(body_point[side_indicator][0] + offset[0] - int(clothes_point[side_indicator][1]) * scale) # x - ) - return start_point - - -# 服装图层给数据组装 -def organize_clothing(layer): - # 起始坐标 - start_point = calculate_start_point(layer['keypoint'], layer['scale'], layer['clothes_keypoint'], layer['body_point_test'], layer["offset"], layer["resize_scale"]) - # 前片数据 - front_layer = dict(priority=layer['priority'] if layer.get("layer_order", False) else PRIORITY_DICT.get(f'{layer["name"].lower()}_front', None), - name=f'{layer["name"].lower()}_front', - image=layer["front_image"], - # mask_image=layer['front_mask_image'], - image_url=layer['front_image_url'], - mask_url=layer['mask_url'], - sacle=layer['scale'], - clothes_keypoint=layer['clothes_keypoint'], - position=start_point, - resize_scale=layer["resize_scale"], - mask=cv2.resize(layer['mask'], layer["front_image"].size), - gradient_string=layer['gradient_string'] if 'gradient_string' in layer.keys() else "", - pattern_image_url=layer['pattern_image_url'], - pattern_image=layer['pattern_image'] - - ) - # 后片数据 - back_layer = dict(priority=-layer.get("priority", 0) if layer.get("layer_order", False) else PRIORITY_DICT.get(f'{layer["name"].lower()}_back', None), - name=f'{layer["name"].lower()}_back', - image=layer["back_image"], - # mask_image=layer['back_mask_image'], - image_url=layer['back_image_url'], - mask_url=layer['mask_url'], - sacle=layer['scale'], - clothes_keypoint=layer['clothes_keypoint'], - position=start_point, - resize_scale=layer["resize_scale"], - mask=cv2.resize(layer['mask'], layer["front_image"].size), - gradient_string=layer['gradient_string'] if 'gradient_string' in layer.keys() else "", - pattern_image_url=layer['pattern_image_url'], - ) - return front_layer, back_layer - - -# 模特图层给数据组装 -def organize_body(layer): - body_layer = dict(priority=0, - name=layer["name"].lower(), - image=layer['body_image'], - image_url=layer['body_path'], - mask_image=None, - mask_url=None, - sacle=1, - # mask=layer['body_mask'], - position=(0, 0)) - return body_layer - - -def process_layer(item, layers): - if item['name'] == "mannequin": - body_layer = organize_body(item) - layers.append(body_layer) - return item['body_image'].size - else: - front_layer, back_layer = organize_clothing(item) - layers.append(front_layer) - layers.append(back_layer) - - -def process_object(object_data): - basic = object_data['basic'] - items_response = {'layers': []} - - if basic['single_overall'] == "overall": - item_results = [process_item(item, basic) for item in object_data['items']] - layers = [] - futures = [] - body_size = None - for item in item_results: - futures = [process_layer(item, layers)] - for future in futures: - if future is not None: - body_size = future - layers = sorted(layers, key=lambda s: s.get("priority", float('inf'))) - - layers, new_size = update_base_size_priority(layers, body_size) - - for lay in layers: - items_response['layers'].append({ - 'image_category': lay['name'], - 'position': lay['position'], - 'priority': lay.get("priority", None), - 'resize_scale': lay['resize_scale'] if "resize_scale" in lay.keys() else None, - 'image_size': lay['image'] if lay['image'] is None else lay['image'].size, - 'gradient_string': lay['gradient_string'] if 'gradient_string' in lay.keys() else "", - 'mask_url': lay['mask_url'], - 'image_url': lay['image_url'] if 'image_url' in lay.keys() else None, - 'pattern_image_url': lay['pattern_image_url'] if 'pattern_image_url' in lay.keys() else None, - - # 'image': lay['image'], - # 'mask_image': lay['mask_image'], - }) - items_response['synthesis_url'] = synthesis(layers, new_size, basic) - else: - item_results = process_item(object_data['items'][0], basic) - items_response['layers'].append({ - 'image_category': f"{item_results['name']}_front", - 'image_size': item_results['back_image'].size if item_results['back_image'] else None, - 'position': None, - 'priority': 0, - 'image_url': item_results['front_image_url'], - 'mask_url': item_results['mask_url'], - "gradient_string": item_results['gradient_string'] if 'gradient_string' in item_results.keys() else "", - 'pattern_image_url': item_results['pattern_image_url'] if 'pattern_image_url' in item_results.keys() else None, - - }) - items_response['layers'].append({ - 'image_category': f"{item_results['name']}_back", - 'image_size': item_results['front_image'].size if item_results['front_image'] else None, - 'position': None, - 'priority': 0, - 'image_url': item_results['back_image_url'], - 'mask_url': item_results['mask_url'], - "gradient_string": item_results['gradient_string'] if 'gradient_string' in item_results.keys() else "", - 'pattern_image_url': item_results['pattern_image_url'] if 'pattern_image_url' in item_results.keys() else None, - - }) - items_response['synthesis_url'] = synthesis_single(item_results['front_image'], item_results['back_image']) - return items_response - - -def update_base_size_priority(layers, size): - # 计算透明背景图片的宽度 - min_x = min(info['position'][1] for info in layers) - x_list = [] - for info in layers: - if info['image'] is not None: - x_list.append(info['position'][1] + info['image'].width) - max_x = max(x_list) - new_width = max_x - min_x - new_height = 700 - # 更新坐标 - for info in layers: - info['adaptive_position'] = (info['position'][0], info['position'][1] - min_x) - return layers, (new_width, new_height) - - -def run(): - object = {"objects": [{"basic": {"body_point_test": {"waistband_right": [199, 239], "hand_point_right": [220, 308], "waistband_left": [113, 239], "hand_point_left": [92, 310], "shoulder_left": [99, 111], "shoulder_right": [214, 111]}, "layer_order": False, "scale_bag": 0.7, "scale_earrings": 0.16, "self_template": True, "single_overall": "single", "switch_category": "Outwear"}, "items": [ - {"color": "189 112 112", "icon": "none", "image_id": 116441, "offset": [1, 1], "path": "aida-sys-image/images/female/outwear/outwear_p3139.jpg", "print": {"element": {"element_angle_list": [], "element_path_list": [], "element_scale_list": [], "location": []}, "overall": {"location": [[0.0, 0.0]], "print_angle_list": [0.0, 0.0], "print_path_list": [], "print_scale_list": [0.0, 0.0]}, "single": {"location": [], "print_angle_list": [], "print_path_list": [], "print_scale_list": []}}, - "resize_scale": [1.0, 1.0], "type": "Outwear"}]}, {"basic": {"body_point_test": {"waistband_right": [199, 239], "hand_point_right": [220, 308], "waistband_left": [113, 239], "hand_point_left": [92, 310], "shoulder_left": [99, 111], "shoulder_right": [214, 111]}, "layer_order": False, "scale_bag": 0.7, "scale_earrings": 0.16, "self_template": True, "single_overall": "single", "switch_category": "Outwear"}, "items": [ - {"color": "189 112 112", "icon": "none", "image_id": 81518, "offset": [1, 1], "path": "aida-sys-image/images/female/outwear/0628000071.jpg", "print": {"element": {"element_angle_list": [], "element_path_list": [], "element_scale_list": [], "location": []}, "overall": {"location": [[0.0, 0.0]], "print_angle_list": [0.0, 0.0], "print_path_list": [], "print_scale_list": [0.0, 0.0]}, "single": {"location": [], "print_angle_list": [], "print_path_list": [], "print_scale_list": []}}, - "resize_scale": [1.0, 1.0], "type": "Outwear"}]}, {"basic": {"body_point_test": {"waistband_right": [199, 239], "hand_point_right": [220, 308], "waistband_left": [113, 239], "hand_point_left": [92, 310], "shoulder_left": [99, 111], "shoulder_right": [214, 111]}, "layer_order": False, "scale_bag": 0.7, "scale_earrings": 0.16, "self_template": True, "single_overall": "single", "switch_category": "Outwear"}, "items": [ - {"color": "189 112 112", "icon": "none", "image_id": 65687, "offset": [1, 1], "path": "aida-sys-image/images/female/outwear/outwear_746.jpg", "print": {"element": {"element_angle_list": [], "element_path_list": [], "element_scale_list": [], "location": []}, "overall": {"location": [[0.0, 0.0]], "print_angle_list": [0.0, 0.0], "print_path_list": [], "print_scale_list": [0.0, 0.0]}, "single": {"location": [], "print_angle_list": [], "print_path_list": [], "print_scale_list": []}}, - "resize_scale": [1.0, 1.0], "type": "Outwear"}]}, {"basic": {"body_point_test": {"waistband_right": [199, 239], "hand_point_right": [220, 308], "waistband_left": [113, 239], "hand_point_left": [92, 310], "shoulder_left": [99, 111], "shoulder_right": [214, 111]}, "layer_order": False, "scale_bag": 0.7, "scale_earrings": 0.16, "self_template": True, "single_overall": "single", "switch_category": "Outwear"}, "items": [ - {"color": "189 112 112", "icon": "none", "image_id": 90051, "offset": [1, 1], "path": "aida-sys-image/images/female/outwear/0628000864.jpg", "print": {"element": {"element_angle_list": [], "element_path_list": [], "element_scale_list": [], "location": []}, "overall": {"location": [[0.0, 0.0]], "print_angle_list": [0.0, 0.0], "print_path_list": [], "print_scale_list": [0.0, 0.0]}, "single": {"location": [], "print_angle_list": [], "print_path_list": [], "print_scale_list": []}}, - "resize_scale": [1.0, 1.0], "type": "Outwear"}]}, {"basic": {"body_point_test": {"waistband_right": [199, 239], "hand_point_right": [220, 308], "waistband_left": [113, 239], "hand_point_left": [92, 310], "shoulder_left": [99, 111], "shoulder_right": [214, 111]}, "layer_order": False, "scale_bag": 0.7, "scale_earrings": 0.16, "self_template": True, "single_overall": "single", "switch_category": "Outwear"}, "items": [ - {"color": "189 112 112", "icon": "none", "image_id": 67420, "offset": [1, 1], "path": "aida-sys-image/images/female/outwear/0825001648.jpg", "print": {"element": {"element_angle_list": [], "element_path_list": [], "element_scale_list": [], "location": []}, "overall": {"location": [[0.0, 0.0]], "print_angle_list": [0.0, 0.0], "print_path_list": [], "print_scale_list": [0.0, 0.0]}, "single": {"location": [], "print_angle_list": [], "print_path_list": [], "print_scale_list": []}}, - "resize_scale": [1.0, 1.0], "type": "Outwear"}]}, {"basic": {"body_point_test": {"waistband_right": [199, 239], "hand_point_right": [220, 308], "waistband_left": [113, 239], "hand_point_left": [92, 310], "shoulder_left": [99, 111], "shoulder_right": [214, 111]}, "layer_order": False, "scale_bag": 0.7, "scale_earrings": 0.16, "self_template": True, "single_overall": "single", "switch_category": "Outwear"}, "items": [ - {"color": "189 112 112", "icon": "none", "image_id": 90354, "offset": [1, 1], "path": "aida-sys-image/images/female/outwear/0628001300.jpg", "print": {"element": {"element_angle_list": [], "element_path_list": [], "element_scale_list": [], "location": []}, "overall": {"location": [[0.0, 0.0]], "print_angle_list": [0.0, 0.0], "print_path_list": [], "print_scale_list": [0.0, 0.0]}, "single": {"location": [], "print_angle_list": [], "print_path_list": [], "print_scale_list": []}}, - "resize_scale": [1.0, 1.0], "type": "Outwear"}]}, {"basic": {"body_point_test": {"waistband_right": [199, 239], "hand_point_right": [220, 308], "waistband_left": [113, 239], "hand_point_left": [92, 310], "shoulder_left": [99, 111], "shoulder_right": [214, 111]}, "layer_order": False, "scale_bag": 0.7, "scale_earrings": 0.16, "self_template": True, "single_overall": "single", "switch_category": "Outwear"}, "items": [ - {"color": "189 112 112", "icon": "none", "image_id": 67420, "offset": [1, 1], "path": "aida-sys-image/images/female/outwear/0825001648.jpg", "print": {"element": {"element_angle_list": [], "element_path_list": [], "element_scale_list": [], "location": []}, "overall": {"location": [[0.0, 0.0]], "print_angle_list": [0.0, 0.0], "print_path_list": [], "print_scale_list": [0.0, 0.0]}, "single": {"location": [], "print_angle_list": [], "print_path_list": [], "print_scale_list": []}}, - "resize_scale": [1.0, 1.0], "type": "Outwear"}]}, {"basic": {"body_point_test": {"waistband_right": [199, 239], "hand_point_right": [220, 308], "waistband_left": [113, 239], "hand_point_left": [92, 310], "shoulder_left": [99, 111], "shoulder_right": [214, 111]}, "layer_order": False, "scale_bag": 0.7, "scale_earrings": 0.16, "self_template": True, "single_overall": "single", "switch_category": "Outwear"}, "items": [ - {"color": "189 112 112", "icon": "none", "image_id": 101477, "offset": [1, 1], "path": "aida-sys-image/images/female/outwear/903000063.jpg", "print": {"element": {"element_angle_list": [], "element_path_list": [], "element_scale_list": [], "location": []}, "overall": {"location": [[0.0, 0.0]], "print_angle_list": [0.0, 0.0], "print_path_list": [], "print_scale_list": [0.0, 0.0]}, "single": {"location": [], "print_angle_list": [], "print_path_list": [], "print_scale_list": []}}, - "resize_scale": [1.0, 1.0], "type": "Outwear"}]}], "process_id": "3615898424593104"} - - object_result = {} - with ThreadPoolExecutor() as executor: - results = list(executor.map(process_object, object['objects'])) - for i, result in enumerate(results): - object_result[i] = result - - pprint(object_result) - - -if __name__ == '__main__': - start_time = time.time() - run() - print(time.time() - start_time) From 63e70add266bfaa4abf5c703c936d49c46ab4b8a Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Wed, 25 Sep 2024 15:38:44 +0800 Subject: [PATCH 081/103] =?UTF-8?q?feat=20=20=E4=BB=A3=E7=A0=81=E6=95=B4?= =?UTF-8?q?=E7=90=86=20fix?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/service/design/design_batch/items/pipeline/segmentation.py | 2 +- app/service/design/items/pipelines/segmentation.py | 2 +- app/service/design_fast/pipeline/segmentation.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/app/service/design/design_batch/items/pipeline/segmentation.py b/app/service/design/design_batch/items/pipeline/segmentation.py index 2a6d760..6fc81e3 100644 --- a/app/service/design/design_batch/items/pipeline/segmentation.py +++ b/app/service/design/design_batch/items/pipeline/segmentation.py @@ -48,7 +48,7 @@ class Segmentation: file_path = f"{SEG_CACHE_PATH}{image_id}.npy" try: np.save(file_path, seg_result) - logging.info("保存成功", os.path.abspath(file_path)) + logger.info(f"保存成功 :{os.path.abspath(file_path)}") except Exception as e: logger.error(f"保存失败: {e}") diff --git a/app/service/design/items/pipelines/segmentation.py b/app/service/design/items/pipelines/segmentation.py index 0fe028e..19eb1fd 100644 --- a/app/service/design/items/pipelines/segmentation.py +++ b/app/service/design/items/pipelines/segmentation.py @@ -53,7 +53,7 @@ class Segmentation(object): file_path = f"{SEG_CACHE_PATH}{image_id}.npy" try: np.save(file_path, seg_result) - logger.info("保存成功", os.path.abspath(file_path)) + logger.info(f"保存成功 :{os.path.abspath(file_path)}") except Exception as e: logger.error(f"保存失败: {e}") diff --git a/app/service/design_fast/pipeline/segmentation.py b/app/service/design_fast/pipeline/segmentation.py index 3897129..802487f 100644 --- a/app/service/design_fast/pipeline/segmentation.py +++ b/app/service/design_fast/pipeline/segmentation.py @@ -51,7 +51,7 @@ class Segmentation: file_path = f"{SEG_CACHE_PATH}{image_id}.npy" try: np.save(file_path, seg_result) - logger.info("保存成功", os.path.abspath(file_path)) + logger.info(f"保存成功 :{os.path.abspath(file_path)}") except Exception as e: logger.error(f"保存失败: {e}") From a53992320710fb2bb82f8dde9210a9dff42f4548 Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Wed, 25 Sep 2024 15:54:39 +0800 Subject: [PATCH 082/103] =?UTF-8?q?feat=20=20=E4=BB=A3=E7=A0=81=E6=95=B4?= =?UTF-8?q?=E7=90=86=20fix?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/service/design_fast/utils/progress.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app/service/design_fast/utils/progress.py b/app/service/design_fast/utils/progress.py index c6a5d74..0f2c9cf 100644 --- a/app/service/design_fast/utils/progress.py +++ b/app/service/design_fast/utils/progress.py @@ -6,7 +6,7 @@ logger = logging.getLogger(__name__) def update_progress(process_id, total): - logger.info(f"{process_id} , {total}") + # logger.info(f"{process_id} , {total}") r = Redis() progress = r.read(key=process_id) if progress and total != 1: From 04b15aa200d017d8dabf14bffaf7db52d5002bd7 Mon Sep 17 00:00:00 2001 From: alab Date: Thu, 26 Sep 2024 06:09:05 +0000 Subject: [PATCH 083/103] =?UTF-8?q?design=20=20batch=20=20=E4=BB=A3?= =?UTF-8?q?=E7=A0=81=E6=95=B4=E7=90=86?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/api/api_design.py | 13 +- app/schemas/design.py | 2 +- .../design/design_batch/items/__init__.py | 0 .../design_batch/items/utils/__init__.py | 0 app/service/design/utils/__init__.py | 0 .../design_batch/design_batch_celery.py | 126 ++++++++++++++ app/service/design_batch/item.py | 61 +++++++ .../pipeline/__init__.py | 0 .../items => design_batch}/pipeline/color.py | 10 +- .../pipeline/contour_detection.py | 0 .../pipeline/keypoint.py | 2 +- .../pipeline/loading.py | 24 ++- .../pipeline/print_painting.py | 15 +- .../items => design_batch}/pipeline/scale.py | 0 .../pipeline/segmentation.py | 13 +- .../items => design_batch}/pipeline/split.py | 19 +- app/service/design_batch/service.py | 12 ++ app/service/design_batch/test.py | 162 ++++++++++++++++++ app/service/design_batch/utils/MQ.py | 17 ++ .../core => design_batch/utils}/__init__.py | 0 .../utils/conversion_image.py | 0 .../utils/design_ensemble.py | 0 app/service/design_batch/utils/organize.py | 77 +++++++++ app/service/design_batch/utils/progress.py | 30 ++++ .../utils/redis_utils.py | 0 app/service/design_batch/utils/save_json.py | 13 ++ .../utils/synthesis_item.py | 16 ++ .../utils/upload_image.py | 8 +- app/service/design_fast/pipeline/keypoint.py | 2 +- .../design_fast/pipeline/segmentation.py | 2 +- app/service/design_fast/pipeline/split.py | 2 +- app/service/design_pre_processing/service.py | 2 +- app/service/utils/oss_client.py | 18 +- 33 files changed, 585 insertions(+), 61 deletions(-) delete mode 100644 app/service/design/design_batch/items/__init__.py delete mode 100644 app/service/design/design_batch/items/utils/__init__.py delete mode 100644 app/service/design/utils/__init__.py create mode 100644 app/service/design_batch/design_batch_celery.py create mode 100644 app/service/design_batch/item.py rename app/service/{design/design_batch/items => design_batch}/pipeline/__init__.py (100%) rename app/service/{design/design_batch/items => design_batch}/pipeline/color.py (88%) rename app/service/{design/design_batch/items => design_batch}/pipeline/contour_detection.py (100%) rename app/service/{design/design_batch/items => design_batch}/pipeline/keypoint.py (98%) rename app/service/{design/design_batch/items => design_batch}/pipeline/loading.py (74%) rename app/service/{design/design_batch/items => design_batch}/pipeline/print_painting.py (98%) rename app/service/{design/design_batch/items => design_batch}/pipeline/scale.py (100%) rename app/service/{design/design_batch/items => design_batch}/pipeline/segmentation.py (82%) rename app/service/{design/design_batch/items => design_batch}/pipeline/split.py (79%) create mode 100644 app/service/design_batch/service.py create mode 100644 app/service/design_batch/test.py create mode 100644 app/service/design_batch/utils/MQ.py rename app/service/{design/core => design_batch/utils}/__init__.py (100%) rename app/service/{design/design_batch/items => design_batch}/utils/conversion_image.py (100%) rename app/service/{design/design_batch/items => design_batch}/utils/design_ensemble.py (100%) create mode 100644 app/service/design_batch/utils/organize.py create mode 100644 app/service/design_batch/utils/progress.py rename app/service/{design/design_batch/items => design_batch}/utils/redis_utils.py (100%) create mode 100644 app/service/design_batch/utils/save_json.py rename app/service/{design/design_batch/items => design_batch}/utils/synthesis_item.py (94%) rename app/service/{design => design_batch}/utils/upload_image.py (69%) diff --git a/app/api/api_design.py b/app/api/api_design.py index 2720e36..389e43e 100644 --- a/app/api/api_design.py +++ b/app/api/api_design.py @@ -6,8 +6,9 @@ from fastapi import APIRouter, HTTPException, UploadFile, File, Form from app.schemas.design import DesignModel, DesignProgressModel, ModelProgressModel, DBGConfigModel from app.schemas.response_template import ResponseModel -from app.service.design.model_process_service import model_transpose -from app.service.design.service_design_batch_generate import start_design_batch_generate +from app.service.design_batch.service import start_design_batch_generate +# from app.service.design.model_process_service import model_transpose +# from app.service.design.service_design_batch_generate import start_design_batch_generate from app.service.design_fast.design_generate import design_generate from app.service.design_fast.utils.redis_utils import Redis @@ -236,7 +237,7 @@ def model_process(request_data: ModelProgressModel): try: logger.info(f"model_process request item is : @@@@@@:{json.dumps(request_data.dict())}") - data = model_transpose(image_path=request_data.model_path) + # data = model_transpose(image_path=request_data.model_path) logger.info(f"model_process response @@@@@@:{json.dumps(data)}") except Exception as e: logger.warning(f"model_process Run Exception @@@@@@:{e}") @@ -251,20 +252,18 @@ def model_process(request_data: ModelProgressModel): async def design(file: UploadFile = File(...), tasks_id: str = Form(...), user_id: str = Form(...), - priority: int = Form(...), + file_name: str = Form(...), total: int = Form(...) ): - # file_content = await file.read() dbg_config = DBGConfigModel( tasks_id=tasks_id, user_id=user_id, - priority=priority, + file_name=file_name, total=total ) contents = await file.read() file_name = file.filename await save_request_file(contents, file_name) - return await start_design_batch_generate(dbg_config, contents) diff --git a/app/schemas/design.py b/app/schemas/design.py index 763e0a0..7ebd8e6 100644 --- a/app/schemas/design.py +++ b/app/schemas/design.py @@ -17,5 +17,5 @@ class ModelProgressModel(BaseModel): class DBGConfigModel(BaseModel): tasks_id: str user_id: str - priority: int + file_name: str total: int diff --git a/app/service/design/design_batch/items/__init__.py b/app/service/design/design_batch/items/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/app/service/design/design_batch/items/utils/__init__.py b/app/service/design/design_batch/items/utils/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/app/service/design/utils/__init__.py b/app/service/design/utils/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/app/service/design_batch/design_batch_celery.py b/app/service/design_batch/design_batch_celery.py new file mode 100644 index 0000000..3f12862 --- /dev/null +++ b/app/service/design_batch/design_batch_celery.py @@ -0,0 +1,126 @@ +import logging +import threading + +from celery import Celery +from minio import Minio + +from app.core.config import * +from app.service.design_batch.item import BodyItem, TopItem, BottomItem +from app.service.design_batch.utils.MQ import publish_status +from app.service.design_batch.utils.organize import organize_body, organize_clothing +from app.service.design_batch.utils.save_json import oss_upload_json +from app.service.design_batch.utils.synthesis_item import update_base_size_priority, synthesis, synthesis_single + +id_lock = threading.Lock() +celery_app = Celery('tasks', broker='amqp://guest:guest@10.1.2.213:5672//', backend='rpc://') +celery_app.conf.worker_log_format = '%(asctime)s %(filename)s [line:%(lineno)d] %(levelname)s %(message)s' +celery_app.conf.worker_hijack_root_logger = False +logging.getLogger('pika').setLevel(logging.WARNING) +logger = logging.getLogger() +minio_client = Minio(MINIO_URL, access_key=MINIO_ACCESS, secret_key=MINIO_SECRET, secure=MINIO_SECURE) + + +def process_item(item, basic): + # 处理project中单个item + if item['type'] == "Body": + body_server = BodyItem(data=item, basic=basic, minio_client=minio_client) + item_data = body_server.process() + elif item['type'].lower() in ['blouse', 'outwear', 'dress', 'tops']: + top_server = TopItem(data=item, basic=basic, minio_client=minio_client) + item_data = top_server.process() + else: + bottom_server = BottomItem(data=item, basic=basic, minio_client=minio_client) + item_data = bottom_server.process() + return item_data + + +def process_layer(item, layers): + # item处理结束后 对图层数据组装 + if item['name'] == "mannequin": + body_layer = organize_body(item) + layers.append(body_layer) + return item['body_image'].size + else: + front_layer, back_layer = organize_clothing(item) + layers.append(front_layer) + layers.append(back_layer) + + +@celery_app.task +def batch_design(objects_data, tasks_id, json_name): + object_response = [] + threads = [] + active_threads = 0 + lock = threading.Lock() + + def process_object(step, object): + nonlocal active_threads + basic = object['basic'] + items_response = {'layers': []} + if basic['single_overall'] == "overall": + item_results = [] + for item in object['items']: + item_results.append(process_item(item, basic)) + layers = [] + body_size = None + for item in item_results: + body_size = process_layer(item, layers) + layers = sorted(layers, key=lambda s: s.get("priority", float('inf'))) + + layers, new_size = update_base_size_priority(layers, body_size) + + for lay in layers: + items_response['layers'].append({ + 'image_category': lay['name'], + 'position': lay['position'], + 'priority': lay.get("priority", None), + 'resize_scale': lay['resize_scale'] if "resize_scale" in lay.keys() else None, + 'image_size': lay['image'] if lay['image'] is None else lay['image'].size, + 'gradient_string': lay['gradient_string'] if 'gradient_string' in lay.keys() else "", + 'mask_url': lay['mask_url'], + 'image_url': lay['image_url'] if 'image_url' in lay.keys() else None, + 'pattern_image_url': lay['pattern_image_url'] if 'pattern_image_url' in lay.keys() else None, + }) + items_response['synthesis_url'] = synthesis(layers, new_size, basic) + else: + item_result = process_item(object['items'][0], basic) + items_response['layers'].append({ + 'image_category': f"{item_result['name']}_front", + 'image_size': item_result['back_image'].size if item_result['back_image'] else None, + 'position': None, + 'priority': 0, + 'image_url': item_result['front_image_url'], + 'mask_url': item_result['mask_url'], + "gradient_string": item_result['gradient_string'] if 'gradient_string' in item_result.keys() else "", + 'pattern_image_url': item_result['pattern_image_url'] if 'pattern_image_url' in item_result.keys() else None, + }) + items_response['layers'].append({ + 'image_category': f"{item_result['name']}_back", + 'image_size': item_result['front_image'].size if item_result['front_image'] else None, + 'position': None, + 'priority': 0, + 'image_url': item_result['back_image_url'], + 'mask_url': item_result['mask_url'], + "gradient_string": item_result['gradient_string'] if 'gradient_string' in item_result.keys() else "", + 'pattern_image_url': item_result['pattern_image_url'] if 'pattern_image_url' in item_result.keys() else None, + }) + items_response['synthesis_url'] = synthesis_single(item_result['front_image'], item_result['back_image']) + + with lock: + object_response.append(items_response) + publish_status(tasks_id, step + 1, items_response) + active_threads -= 1 + + for step, object in enumerate(objects_data): + t = threading.Thread(target=process_object, args=(step, object)) + threads.append(t) + t.start() + with lock: + active_threads += 1 + + for t in threads: + t.join() + + oss_upload_json(minio_client, object_response, json_name) + publish_status(tasks_id, "ok", json_name) + return object_response diff --git a/app/service/design_batch/item.py b/app/service/design_batch/item.py new file mode 100644 index 0000000..cad1488 --- /dev/null +++ b/app/service/design_batch/item.py @@ -0,0 +1,61 @@ +from app.service.design_batch.pipeline import * + + +class BaseItem: + def __init__(self, data, basic): + self.result = data.copy() + self.result['name'] = data['type'].lower() + self.result.pop("type") + self.result.update(basic) + + +class TopItem(BaseItem): + def __init__(self, data, basic, minio_client): + super().__init__(data, basic) + self.top_pipeline = [ + LoadImage(minio_client), + KeyPoint(), + Segmentation(minio_client), + Color(minio_client), + PrintPainting(minio_client), + Scaling(), + Split(minio_client) + ] + + def process(self): + for item in self.top_pipeline: + self.result = item(self.result) + return self.result + + +class BottomItem(BaseItem): + def __init__(self, data, basic, minio_client): + super().__init__(data, basic) + self.bottom_pipeline = [ + LoadImage(minio_client), + KeyPoint(), + ContourDetection(), + # Segmentation(), + Color(minio_client), + PrintPainting(minio_client), + Scaling(), + Split(minio_client) + ] + + def process(self): + for item in self.bottom_pipeline: + self.result = item(self.result) + return self.result + + +class BodyItem(BaseItem): + def __init__(self, data, basic, minio_client): + super().__init__(data, basic) + self.top_pipeline = [ + LoadBodyImage(minio_client), + ] + + def process(self): + for item in self.top_pipeline: + self.result = item(self.result) + return self.result diff --git a/app/service/design/design_batch/items/pipeline/__init__.py b/app/service/design_batch/pipeline/__init__.py similarity index 100% rename from app/service/design/design_batch/items/pipeline/__init__.py rename to app/service/design_batch/pipeline/__init__.py diff --git a/app/service/design/design_batch/items/pipeline/color.py b/app/service/design_batch/pipeline/color.py similarity index 88% rename from app/service/design/design_batch/items/pipeline/color.py rename to app/service/design_batch/pipeline/color.py index bc3676f..546c671 100644 --- a/app/service/design/design_batch/items/pipeline/color.py +++ b/app/service/design_batch/pipeline/color.py @@ -3,12 +3,15 @@ import logging import cv2 import numpy as np -from app.service.utils.oss_client import oss_get_image +from app.service.utils.new_oss_client import oss_get_image logger = logging.getLogger() class Color: + def __init__(self, minio_client): + self.minio_client = minio_client + def __call__(self, result): dim_image_h, dim_image_w = result['image'].shape[0:2] if "gradient" in result.keys() and result['gradient'] != "": @@ -33,10 +36,9 @@ class Color: result['alpha'] = 100 / 255.0 return result - @staticmethod - def get_gradient(bucket_name, object_name): + def get_gradient(self, bucket_name, object_name): # 获取渐变色图案 - image = oss_get_image(bucket=bucket_name, object_name=object_name, data_type="cv2") + image = oss_get_image(oss_client=self.minio_client, bucket=bucket_name, object_name=object_name, data_type="cv2") if image.shape[2] == 4: image = cv2.cvtColor(image, cv2.COLOR_BGRA2BGR) return image diff --git a/app/service/design/design_batch/items/pipeline/contour_detection.py b/app/service/design_batch/pipeline/contour_detection.py similarity index 100% rename from app/service/design/design_batch/items/pipeline/contour_detection.py rename to app/service/design_batch/pipeline/contour_detection.py diff --git a/app/service/design/design_batch/items/pipeline/keypoint.py b/app/service/design_batch/pipeline/keypoint.py similarity index 98% rename from app/service/design/design_batch/items/pipeline/keypoint.py rename to app/service/design_batch/pipeline/keypoint.py index 243cf4e..313a613 100644 --- a/app/service/design/design_batch/items/pipeline/keypoint.py +++ b/app/service/design_batch/pipeline/keypoint.py @@ -4,7 +4,7 @@ import numpy as np from pymilvus import MilvusClient from app.core.config import * -from app.service.design.utils.design_ensemble import get_keypoint_result +from app.service.design_batch.utils.design_ensemble import get_keypoint_result logger = logging.getLogger(__name__) diff --git a/app/service/design/design_batch/items/pipeline/loading.py b/app/service/design_batch/pipeline/loading.py similarity index 74% rename from app/service/design/design_batch/items/pipeline/loading.py rename to app/service/design_batch/pipeline/loading.py index 8786db0..0ce0dfa 100644 --- a/app/service/design/design_batch/items/pipeline/loading.py +++ b/app/service/design_batch/pipeline/loading.py @@ -1,24 +1,37 @@ -import cv2 +import io +import logging -from app.service.utils.oss_client import oss_get_image +import cv2 +import numpy as np +from PIL import Image + +from app.service.utils.new_oss_client import oss_get_image + +logger = logging.getLogger() class LoadBodyImage: name = "LoadBodyImage" + def __init__(self, minio_client): + self.minio_client = minio_client + @classmethod def get_name(cls): return cls.name def __call__(self, result): result["name"] = "mannequin" - result['body_image'] = oss_get_image(bucket=result['body_path'].split("/", 1)[0], object_name=result['body_path'].split("/", 1)[1], data_type="PIL") + result['body_image'] = oss_get_image(oss_client=self.minio_client, bucket=result['body_path'].split("/", 1)[0], object_name=result['body_path'].split("/", 1)[1], data_type="PIL") return result class LoadImage: name = "LoadImage" + def __init__(self, minio_client): + self.minio_client = minio_client + @classmethod def get_name(cls): return cls.name @@ -31,10 +44,9 @@ class LoadImage: result['ori_shape'] = result['image'].shape return result - @staticmethod - def read_image(image_path): + def read_image(self, image_path): image_mask = None - image = oss_get_image(bucket=image_path.split("/", 1)[0], object_name=image_path.split("/", 1)[1], data_type="cv2") + image = oss_get_image(oss_client=self.minio_client, bucket=image_path.split("/", 1)[0], object_name=image_path.split("/", 1)[1], data_type="cv2") if len(image.shape) == 2: image = cv2.cvtColor(image, cv2.COLOR_GRAY2RGB) if image.shape[2] == 4: # 如果是四通道 mask diff --git a/app/service/design/design_batch/items/pipeline/print_painting.py b/app/service/design_batch/pipeline/print_painting.py similarity index 98% rename from app/service/design/design_batch/items/pipeline/print_painting.py rename to app/service/design_batch/pipeline/print_painting.py index a620872..6fe40d8 100644 --- a/app/service/design/design_batch/items/pipeline/print_painting.py +++ b/app/service/design_batch/pipeline/print_painting.py @@ -4,10 +4,13 @@ import cv2 import numpy as np from PIL import Image -from app.service.utils.oss_client import oss_get_image +from app.service.utils.new_oss_client import oss_get_image class PrintPainting: + def __init__(self, minio_client): + self.minio_client = minio_client + def __call__(self, result): single_print = result['print']['single'] overall_print = result['print']['overall'] @@ -356,8 +359,7 @@ class PrintPainting: print_image = cv2.add(img_bg, img_fg) return print_image - @staticmethod - def get_print(print_dict): + def get_print(self, print_dict): if 'print_scale_list' not in print_dict.keys() or print_dict['print_scale_list'][0] < 0.3: print_dict['scale'] = 0.3 else: @@ -365,7 +367,7 @@ class PrintPainting: bucket_name = print_dict['print_path_list'][0].split("/", 1)[0] object_name = print_dict['print_path_list'][0].split("/", 1)[1] - image = oss_get_image(bucket=bucket_name, object_name=object_name, data_type="PIL") + image = oss_get_image(oss_client=self.minio_client, bucket=bucket_name, object_name=object_name, data_type="PIL") # 判断图片格式,如果是RGBA 则贴在一张纯白图片上 防止透明转黑 if image.mode == "RGBA": new_background = Image.new('RGB', image.size, (255, 255, 255)) @@ -480,9 +482,8 @@ class PrintPainting: return img_rotated - @staticmethod - def read_image(image_url): - image = oss_get_image(bucket=image_url.split("/", 1)[0], object_name=image_url.split("/", 1)[1], data_type="cv2") + def read_image(self, image_url): + image = oss_get_image(oss_client=self.minio_client, bucket=image_url.split("/", 1)[0], object_name=image_url.split("/", 1)[1], data_type="cv2") if image.shape[2] == 4: image_rgb = cv2.cvtColor(image, cv2.COLOR_BGRA2RGBA) image = Image.fromarray(image_rgb) diff --git a/app/service/design/design_batch/items/pipeline/scale.py b/app/service/design_batch/pipeline/scale.py similarity index 100% rename from app/service/design/design_batch/items/pipeline/scale.py rename to app/service/design_batch/pipeline/scale.py diff --git a/app/service/design/design_batch/items/pipeline/segmentation.py b/app/service/design_batch/pipeline/segmentation.py similarity index 82% rename from app/service/design/design_batch/items/pipeline/segmentation.py rename to app/service/design_batch/pipeline/segmentation.py index 6fc81e3..cba3446 100644 --- a/app/service/design/design_batch/items/pipeline/segmentation.py +++ b/app/service/design_batch/pipeline/segmentation.py @@ -5,16 +5,19 @@ import cv2 import numpy as np from app.core.config import SEG_CACHE_PATH -from app.service.design.utils.design_ensemble import get_seg_result -from app.service.utils.oss_client import oss_get_image +from app.service.design_batch.utils.design_ensemble import get_seg_result +from app.service.utils.new_oss_client import oss_get_image logger = logging.getLogger() class Segmentation: + def __init__(self, minio_client): + self.minio_client = minio_client + def __call__(self, result): if "seg_mask_url" in result.keys() and result['seg_mask_url'] != "": - seg_mask = oss_get_image(bucket=result['seg_mask_url'].split('/')[0], object_name=result['seg_mask_url'][result['seg_mask_url'].find('/') + 1:], data_type="cv2") + seg_mask = oss_get_image(oss_client=self.minio_client, bucket=result['seg_mask_url'].split('/')[0], object_name=result['seg_mask_url'][result['seg_mask_url'].find('/') + 1:], data_type="cv2") seg_mask = cv2.resize(seg_mask, (result['img_shape'][1], result['img_shape'][0]), interpolation=cv2.INTER_NEAREST) # 转换颜色空间为 RGB(OpenCV 默认是 BGR) image_rgb = cv2.cvtColor(seg_mask, cv2.COLOR_BGR2RGB) @@ -45,7 +48,7 @@ class Segmentation: @staticmethod def save_seg_result(seg_result, image_id): - file_path = f"{SEG_CACHE_PATH}{image_id}.npy" + file_path = f"seg_cache/{image_id}.npy" try: np.save(file_path, seg_result) logger.info(f"保存成功 :{os.path.abspath(file_path)}") @@ -54,7 +57,7 @@ class Segmentation: @staticmethod def load_seg_result(image_id): - file_path = f"{SEG_CACHE_PATH}{image_id}.npy" + file_path = f"seg_cache/{image_id}.npy" logger.info(f"load seg file name is :{SEG_CACHE_PATH}{image_id}.npy") try: seg_result = np.load(file_path) diff --git a/app/service/design/design_batch/items/pipeline/split.py b/app/service/design_batch/pipeline/split.py similarity index 79% rename from app/service/design/design_batch/items/pipeline/split.py rename to app/service/design_batch/pipeline/split.py index 2fba315..5dbcef5 100644 --- a/app/service/design/design_batch/items/pipeline/split.py +++ b/app/service/design_batch/pipeline/split.py @@ -7,13 +7,16 @@ from PIL import Image from cv2 import cvtColor, COLOR_BGR2RGBA from app.core.config import AIDA_CLOTHING -from app.service.design.utils.conversion_image import rgb_to_rgba -from app.service.design.utils.upload_image import upload_png_mask +from app.service.design_batch.utils.conversion_image import rgb_to_rgba +from app.service.design_batch.utils.upload_image import upload_png_mask from app.service.utils.generate_uuid import generate_uuid -from app.service.utils.oss_client import oss_upload_image +from app.service.utils.new_oss_client import oss_upload_image class Split(object): + def __init__(self, minio_client): + self.minio_client = minio_client + def __call__(self, result): try: @@ -27,7 +30,7 @@ class Split(object): front_mask = cv2.resize(front_mask, new_size) result_front_image[front_mask != 0] = rgba_image[front_mask != 0] result_front_image_pil = Image.fromarray(cvtColor(result_front_image, COLOR_BGR2RGBA)) - result['front_image'], result["front_image_url"], _ = upload_png_mask(result_front_image_pil, f'{generate_uuid()}', mask=None) + result['front_image'], result["front_image_url"], _ = upload_png_mask(self.minio_client, result_front_image_pil, f'{generate_uuid()}', mask=None) height, width = front_mask.shape mask_image = np.zeros((height, width, 3)) @@ -38,7 +41,7 @@ class Split(object): back_mask = cv2.resize(back_mask, new_size) result_back_image[back_mask != 0] = rgba_image[back_mask != 0] result_back_image_pil = Image.fromarray(cvtColor(result_back_image, COLOR_BGR2RGBA)) - result['back_image'], result["back_image_url"], _ = upload_png_mask(result_back_image_pil, f'{generate_uuid()}', mask=None) + result['back_image'], result["back_image_url"], _ = upload_png_mask(self.minio_client, result_back_image_pil, f'{generate_uuid()}', mask=None) mask_image[back_mask != 0] = [0, 255, 0] rbga_mask = rgb_to_rgba(mask_image, front_mask + back_mask) @@ -47,7 +50,7 @@ class Split(object): mask_pil.save(image_data, format='PNG') image_data.seek(0) image_bytes = image_data.read() - req = oss_upload_image(bucket=AIDA_CLOTHING, object_name=f"mask/mask_{generate_uuid()}.png", image_bytes=image_bytes) + req = oss_upload_image(oss_client=self.minio_client, bucket=AIDA_CLOTHING, object_name=f"mask/mask_{generate_uuid()}.png", image_bytes=image_bytes) result['mask_url'] = req.bucket_name + "/" + req.object_name else: rbga_mask = rgb_to_rgba(mask_image, front_mask) @@ -56,7 +59,7 @@ class Split(object): mask_pil.save(image_data, format='PNG') image_data.seek(0) image_bytes = image_data.read() - req = oss_upload_image(bucket=AIDA_CLOTHING, object_name=f"mask/mask_{generate_uuid()}.png", image_bytes=image_bytes) + req = oss_upload_image(oss_client=self.minio_client, bucket=AIDA_CLOTHING, object_name=f"mask/mask_{generate_uuid()}.png", image_bytes=image_bytes) result['mask_url'] = req.bucket_name + "/" + req.object_name result['back_image'] = None result["back_image_url"] = None @@ -65,7 +68,7 @@ class Split(object): # 创建中间图层 result_pattern_image_rgba = rgb_to_rgba(result['pattern_image'], result['mask']) result_pattern_image_pil = Image.fromarray(cvtColor(result_pattern_image_rgba, COLOR_BGR2RGBA)) - result['pattern_image'], result['pattern_image_url'], _ = upload_png_mask(result_pattern_image_pil, f'{generate_uuid()}') + result['pattern_image'], result['pattern_image_url'], _ = upload_png_mask(self.minio_client, result_pattern_image_pil, f'{generate_uuid()}') return result except Exception as e: logging.warning(f"split runtime exception : {e} image_id : {result['image_id']}") diff --git a/app/service/design_batch/service.py b/app/service/design_batch/service.py new file mode 100644 index 0000000..db8246e --- /dev/null +++ b/app/service/design_batch/service.py @@ -0,0 +1,12 @@ +import json + +import pika +from app.service.design_batch.design_batch_celery import batch_design +from app.service.design_batch.utils.MQ import publish_status + + +async def start_design_batch_generate(data, file): + generate_clothes_task = batch_design.delay(json.loads(file.decode())['objects'], data.total, data.tasks_id) + print(generate_clothes_task) + publish_status(data.tasks_id, "0/100", "") + return {"task_id": data.tasks_id} diff --git a/app/service/design_batch/test.py b/app/service/design_batch/test.py new file mode 100644 index 0000000..6b94bc6 --- /dev/null +++ b/app/service/design_batch/test.py @@ -0,0 +1,162 @@ +from app.service.design_batch.design_batch_celery import batch_design + +if __name__ == '__main__': + data = { + "objects": [ + { + "basic": { + "body_point_test": { + "waistband_right": [ + 200, + 241 + ], + "hand_point_right": [ + 223, + 297 + ], + "waistband_left": [ + 112, + 241 + ], + "hand_point_left": [ + 92, + 305 + ], + "shoulder_left": [ + 99, + 116 + ], + "shoulder_right": [ + 215, + 116 + ] + }, + "layer_order": True, + "scale_bag": 0.7, + "scale_earrings": 0.16, + "self_template": True, + "single_overall": "overall", + "switch_category": "" + }, + "items": [ + { + "businessId": 270372, + "color": "30 28 28", + "image_id": 69780, + "offset": [ + 0, + 0 + ], + "path": "aida-sys-image/images/female/trousers/0825000630.jpg", + "print": { + "element": { + "element_angle_list": [], + "element_path_list": [], + "element_scale_list": [], + "location": [] + }, + "overall": { + "location": [], + "print_angle_list": [], + "print_path_list": [], + "print_scale_list": [] + }, + "single": { + "location": [], + "print_angle_list": [], + "print_path_list": [], + "print_scale_list": [] + } + }, + "priority": 10, + "resize_scale": [ + 1.0, + 1.0 + ], + "type": "Trousers" + }, + { + "businessId": 270373, + "color": "30 28 28", + "image_id": 98243, + "offset": [ + 0, + 0 + ], + "path": "aida-sys-image/images/female/blouse/0902003811.jpg", + "print": { + "element": { + "element_angle_list": [], + "element_path_list": [], + "element_scale_list": [], + "location": [] + }, + "overall": { + "location": [], + "print_angle_list": [], + "print_path_list": [], + "print_scale_list": [] + }, + "single": { + "location": [], + "print_angle_list": [], + "print_path_list": [], + "print_scale_list": [] + } + }, + "priority": 11, + "resize_scale": [ + 1.0, + 1.0 + ], + "type": "Blouse" + }, + { + "businessId": 270374, + "color": "172 68 68", + "image_id": 98244, + "offset": [ + 0, + 0 + ], + "path": "aida-sys-image/images/female/outwear/0825000410.jpg", + "print": { + "element": { + "element_angle_list": [], + "element_path_list": [], + "element_scale_list": [], + "location": [] + }, + "overall": { + "location": [], + "print_angle_list": [], + "print_path_list": [], + "print_scale_list": [] + }, + "single": { + "location": [], + "print_angle_list": [], + "print_path_list": [], + "print_scale_list": [] + } + }, + "priority": 12, + "resize_scale": [ + 1.0, + 1.0 + ], + "type": "Outwear" + }, + { + "body_path": "aida-sys-image/models/female/5bdfe7ca-64eb-44e4-b03d-8e517520c795.png", + "image_id": 96090, + "type": "Body" + } + ] + } + ], + "process_id": "83" + } + task_id = 1 + json_name = "test.json" + batch_design.delay(data['objects'], task_id, json_name) diff --git a/app/service/design_batch/utils/MQ.py b/app/service/design_batch/utils/MQ.py new file mode 100644 index 0000000..50e98c2 --- /dev/null +++ b/app/service/design_batch/utils/MQ.py @@ -0,0 +1,17 @@ +import json + +import pika + + +def publish_status(task_id, progress, result): + connection = pika.BlockingConnection(pika.ConnectionParameters('10.1.2.213')) + channel = connection.channel() + channel.queue_declare(queue='DesignBatch', durable=True) + message = {'task_id': task_id, 'progress': progress, "result": result} + channel.basic_publish(exchange='', + routing_key='DesignBatch', + body=json.dumps(message), + properties=pika.BasicProperties( + delivery_mode=2, + )) + connection.close() diff --git a/app/service/design/core/__init__.py b/app/service/design_batch/utils/__init__.py similarity index 100% rename from app/service/design/core/__init__.py rename to app/service/design_batch/utils/__init__.py diff --git a/app/service/design/design_batch/items/utils/conversion_image.py b/app/service/design_batch/utils/conversion_image.py similarity index 100% rename from app/service/design/design_batch/items/utils/conversion_image.py rename to app/service/design_batch/utils/conversion_image.py diff --git a/app/service/design/design_batch/items/utils/design_ensemble.py b/app/service/design_batch/utils/design_ensemble.py similarity index 100% rename from app/service/design/design_batch/items/utils/design_ensemble.py rename to app/service/design_batch/utils/design_ensemble.py diff --git a/app/service/design_batch/utils/organize.py b/app/service/design_batch/utils/organize.py new file mode 100644 index 0000000..8190de0 --- /dev/null +++ b/app/service/design_batch/utils/organize.py @@ -0,0 +1,77 @@ +import cv2 + +from app.core.config import PRIORITY_DICT + + +def organize_body(layer): + body_layer = dict(priority=0, + name=layer["name"].lower(), + image=layer['body_image'], + image_url=layer['body_path'], + mask_image=None, + mask_url=None, + sacle=1, + # mask=layer['body_mask'], + position=(0, 0)) + return body_layer + + +def organize_clothing(layer): + # 起始坐标 + start_point = calculate_start_point(layer['keypoint'], layer['scale'], layer['clothes_keypoint'], layer['body_point_test'], layer["offset"], layer["resize_scale"]) + # 前片数据 + front_layer = dict(priority=layer['priority'] if layer.get("layer_order", False) else PRIORITY_DICT.get(f'{layer["name"].lower()}_front', None), + name=f'{layer["name"].lower()}_front', + image=layer["front_image"], + # mask_image=layer['front_mask_image'], + image_url=layer['front_image_url'], + mask_url=layer['mask_url'], + sacle=layer['scale'], + clothes_keypoint=layer['clothes_keypoint'], + position=start_point, + resize_scale=layer["resize_scale"], + mask=cv2.resize(layer['mask'], layer["front_image"].size), + gradient_string=layer['gradient_string'] if 'gradient_string' in layer.keys() else "", + pattern_image_url=layer['pattern_image_url'], + pattern_image=layer['pattern_image'] + + ) + # 后片数据 + back_layer = dict(priority=-layer.get("priority", 0) if layer.get("layer_order", False) else PRIORITY_DICT.get(f'{layer["name"].lower()}_back', None), + name=f'{layer["name"].lower()}_back', + image=layer["back_image"], + # mask_image=layer['back_mask_image'], + image_url=layer['back_image_url'], + mask_url=layer['mask_url'], + sacle=layer['scale'], + clothes_keypoint=layer['clothes_keypoint'], + position=start_point, + resize_scale=layer["resize_scale"], + mask=cv2.resize(layer['mask'], layer["front_image"].size), + gradient_string=layer['gradient_string'] if 'gradient_string' in layer.keys() else "", + pattern_image_url=layer['pattern_image_url'], + ) + return front_layer, back_layer + + +def calculate_start_point(keypoint_type, scale, clothes_point, body_point, offset, resize_scale): + """ + Align left + Args: + keypoint_type: string, "waistband" | "shoulder" | "ear_point" + scale: float + clothes_point: dict{'left': [x1, y1, z1], 'right': [x2, y2, z2]} + body_point: dict, containing keypoint data of body figure + + Returns: + start_point: tuple (x', y') + x' = y_body - y1 * scale + offset + y' = x_body - x1 * scale + offset + + """ + side_indicator = f'{keypoint_type}_left' + start_point = ( + int(body_point[side_indicator][1] + offset[1] - int(clothes_point[side_indicator][0]) * scale), # y + int(body_point[side_indicator][0] + offset[0] - int(clothes_point[side_indicator][1]) * scale) # x + ) + return start_point diff --git a/app/service/design_batch/utils/progress.py b/app/service/design_batch/utils/progress.py new file mode 100644 index 0000000..0f2c9cf --- /dev/null +++ b/app/service/design_batch/utils/progress.py @@ -0,0 +1,30 @@ +import logging + +from app.service.design_fast.utils.redis_utils import Redis + +logger = logging.getLogger(__name__) + + +def update_progress(process_id, total): + # logger.info(f"{process_id} , {total}") + r = Redis() + progress = r.read(key=process_id) + if progress and total != 1: + if int(progress) <= 100: + r.write(key=process_id, value=int(progress) + int(100 / total)) + else: + r.write(key=process_id, value=99) + return progress + elif total == 1: + r.write(key=process_id, value=100) + return progress + else: + r.write(key=process_id, value=int(100 / total)) + return progress + + +def final_progress(process_id): + r = Redis() + progress = r.read(key=process_id) + r.write(key=process_id, value=100) + return progress diff --git a/app/service/design/design_batch/items/utils/redis_utils.py b/app/service/design_batch/utils/redis_utils.py similarity index 100% rename from app/service/design/design_batch/items/utils/redis_utils.py rename to app/service/design_batch/utils/redis_utils.py diff --git a/app/service/design_batch/utils/save_json.py b/app/service/design_batch/utils/save_json.py new file mode 100644 index 0000000..9acd916 --- /dev/null +++ b/app/service/design_batch/utils/save_json.py @@ -0,0 +1,13 @@ +import json +import logging + +logger = logging.getLogger() + + +def oss_upload_json(oss_client, json_data, object_name): + try: + with open(f"app/service/design_batch/response_json/{object_name}", 'w') as file: + json.dump(json_data, file, indent=4) + oss_client.fput_object("test", object_name, f"app/service/design_batch/response_json/{object_name}") + except Exception as e: + logger.warning(str(e)) diff --git a/app/service/design/design_batch/items/utils/synthesis_item.py b/app/service/design_batch/utils/synthesis_item.py similarity index 94% rename from app/service/design/design_batch/items/utils/synthesis_item.py rename to app/service/design_batch/utils/synthesis_item.py index 9527cd2..272ab23 100644 --- a/app/service/design/design_batch/items/utils/synthesis_item.py +++ b/app/service/design_batch/utils/synthesis_item.py @@ -179,3 +179,19 @@ def synthesis_single(front_image, back_image): object_name = f'result_{generate_uuid()}.png' req = oss_upload_image(bucket=bucket_name, object_name=object_name, image_bytes=image_bytes) return f"{bucket_name}/{object_name}" + + +def update_base_size_priority(layers, size): + # 计算透明背景图片的宽度 + min_x = min(info['position'][1] for info in layers) + x_list = [] + for info in layers: + if info['image'] is not None: + x_list.append(info['position'][1] + info['image'].width) + max_x = max(x_list) + new_width = max_x - min_x + new_height = 700 + # 更新坐标 + for info in layers: + info['adaptive_position'] = (info['position'][0], info['position'][1] - min_x) + return layers, (new_width, new_height) diff --git a/app/service/design/utils/upload_image.py b/app/service/design_batch/utils/upload_image.py similarity index 69% rename from app/service/design/utils/upload_image.py rename to app/service/design_batch/utils/upload_image.py index 388f8b8..2c79f9f 100644 --- a/app/service/design/utils/upload_image.py +++ b/app/service/design_batch/utils/upload_image.py @@ -13,11 +13,11 @@ import logging import cv2 from app.core.config import * -from app.service.utils.oss_client import oss_upload_image +from app.service.utils.new_oss_client import oss_upload_image # @RunTime -def upload_png_mask(front_image, object_name, mask=None): +def upload_png_mask(minio_client, front_image, object_name, mask=None): try: mask_url = None if mask is not None: @@ -25,14 +25,14 @@ def upload_png_mask(front_image, object_name, mask=None): # 将掩模的3通道转换为4通道,白色部分不透明,黑色部分透明 rgba_image = cv2.cvtColor(mask_inverted, cv2.COLOR_BGR2BGRA) rgba_image[rgba_image[:, :, 0] == 0] = [0, 0, 0, 0] - req = oss_upload_image(bucket=AIDA_CLOTHING, object_name=f"mask/mask_{object_name}.png", image_bytes=cv2.imencode('.png', rgba_image)[1]) + req = oss_upload_image(oss_client=minio_client, bucket=AIDA_CLOTHING, object_name=f"mask/mask_{object_name}.png", image_bytes=cv2.imencode('.png', rgba_image)[1]) mask_url = f"{AIDA_CLOTHING}/mask/mask_{object_name}.png" image_data = io.BytesIO() front_image.save(image_data, format='PNG') image_data.seek(0) image_bytes = image_data.read() - req = oss_upload_image(bucket=AIDA_CLOTHING, object_name=f"image/image_{object_name}.png", image_bytes=image_bytes) + req = oss_upload_image(oss_client=minio_client, bucket=AIDA_CLOTHING, object_name=f"image/image_{object_name}.png", image_bytes=image_bytes) image_url = f"{AIDA_CLOTHING}/image/image_{object_name}.png" return front_image, image_url, mask_url except Exception as e: diff --git a/app/service/design_fast/pipeline/keypoint.py b/app/service/design_fast/pipeline/keypoint.py index 243cf4e..45debc2 100644 --- a/app/service/design_fast/pipeline/keypoint.py +++ b/app/service/design_fast/pipeline/keypoint.py @@ -4,7 +4,7 @@ import numpy as np from pymilvus import MilvusClient from app.core.config import * -from app.service.design.utils.design_ensemble import get_keypoint_result +from app.service.design_fast.utils.design_ensemble import get_keypoint_result logger = logging.getLogger(__name__) diff --git a/app/service/design_fast/pipeline/segmentation.py b/app/service/design_fast/pipeline/segmentation.py index 802487f..686e7b5 100644 --- a/app/service/design_fast/pipeline/segmentation.py +++ b/app/service/design_fast/pipeline/segmentation.py @@ -5,7 +5,7 @@ import cv2 import numpy as np from app.core.config import SEG_CACHE_PATH -from app.service.design.utils.design_ensemble import get_seg_result +from app.service.design_fast.utils.design_ensemble import get_seg_result from app.service.utils.new_oss_client import oss_get_image logger = logging.getLogger() diff --git a/app/service/design_fast/pipeline/split.py b/app/service/design_fast/pipeline/split.py index 35605b8..737b50e 100644 --- a/app/service/design_fast/pipeline/split.py +++ b/app/service/design_fast/pipeline/split.py @@ -7,7 +7,7 @@ from PIL import Image from cv2 import cvtColor, COLOR_BGR2RGBA from app.core.config import AIDA_CLOTHING -from app.service.design.utils.conversion_image import rgb_to_rgba +from app.service.design_fast.utils.conversion_image import rgb_to_rgba from app.service.design_fast.utils.upload_image import upload_png_mask from app.service.utils.generate_uuid import generate_uuid from app.service.utils.new_oss_client import oss_upload_image diff --git a/app/service/design_pre_processing/service.py b/app/service/design_pre_processing/service.py index a5b3a40..16ca870 100644 --- a/app/service/design_pre_processing/service.py +++ b/app/service/design_pre_processing/service.py @@ -10,7 +10,7 @@ from urllib3.exceptions import ResponseError from app.core.config import * from app.schemas.pre_processing import DesignPreProcessingModel -from app.service.design.utils.design_ensemble import get_keypoint_result, get_seg_result +from app.service.design_fast.utils.design_ensemble import get_seg_result, get_keypoint_result from app.service.utils.oss_client import oss_get_image, oss_upload_image logger = logging.getLogger() diff --git a/app/service/utils/oss_client.py b/app/service/utils/oss_client.py index 65ce3a2..5704ced 100644 --- a/app/service/utils/oss_client.py +++ b/app/service/utils/oss_client.py @@ -1,8 +1,6 @@ import io import logging from io import BytesIO - -import boto3 import cv2 import numpy as np import urllib3 @@ -42,12 +40,8 @@ def oss_get_image(bucket, object_name, data_type): # cv2 默认全通道读取 image_object = None try: - if OSS == "minio": - oss_client = Minio(MINIO_URL, access_key=MINIO_ACCESS, secret_key=MINIO_SECRET, secure=MINIO_SECURE, http_client=http_client) - image_data = oss_client.get_object(bucket_name=bucket, object_name=object_name) - else: - oss_client = boto3.client('s3', aws_access_key_id=S3_ACCESS_KEY, aws_secret_access_key=S3_AWS_SECRET_ACCESS_KEY, region_name=S3_REGION_NAME) - image_data = oss_client.get_object(Bucket=bucket, Key=object_name)['Body'] + oss_client = Minio(MINIO_URL, access_key=MINIO_ACCESS, secret_key=MINIO_SECRET, secure=MINIO_SECURE, http_client=http_client) + image_data = oss_client.get_object(bucket_name=bucket, object_name=object_name) if data_type == "cv2": image_bytes = image_data.read() image_array = np.frombuffer(image_bytes, np.uint8) # 转成8位无符号整型 @@ -65,12 +59,8 @@ def oss_get_image(bucket, object_name, data_type): def oss_upload_image(bucket, object_name, image_bytes): req = None try: - if OSS == "minio": - oss_client = Minio(MINIO_URL, access_key=MINIO_ACCESS, secret_key=MINIO_SECRET, secure=MINIO_SECURE) - req = oss_client.put_object(bucket_name=bucket, object_name=object_name, data=io.BytesIO(image_bytes), length=len(image_bytes), content_type='image/png') - else: - oss_client = boto3.client('s3', aws_access_key_id=S3_ACCESS_KEY, aws_secret_access_key=S3_AWS_SECRET_ACCESS_KEY, region_name=S3_REGION_NAME) - req = oss_client.put_object(Bucket=bucket, Key=object_name, Body=io.BytesIO(image_bytes), ContentType='image/png') + oss_client = Minio(MINIO_URL, access_key=MINIO_ACCESS, secret_key=MINIO_SECRET, secure=MINIO_SECURE) + req = oss_client.put_object(bucket_name=bucket, object_name=object_name, data=io.BytesIO(image_bytes), length=len(image_bytes), content_type='image/png') except Exception as e: logger.warning(f"{OSS} | 上传图片出现异常 ######: {e}") return req From dbc90ea350c62da96e2ddccc1aaa41e9320edaa4 Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Thu, 26 Sep 2024 14:12:17 +0800 Subject: [PATCH 084/103] =?UTF-8?q?feat=20=20=E4=BB=A3=E7=A0=81=E6=95=B4?= =?UTF-8?q?=E7=90=86=20fix?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../design/service_design_batch_generate.py | 91 +-- app/service/design_batch/design_batch.py | 613 ++++++++++++++++++ app/service/design_batch/item.py | 2 +- app/service/design_batch/pipeline/keypoint.py | 2 +- .../design_batch/pipeline/segmentation.py | 6 +- app/service/design_batch/pipeline/split.py | 4 +- app/service/design_batch/service.py | 12 - 7 files changed, 622 insertions(+), 108 deletions(-) create mode 100644 app/service/design_batch/design_batch.py diff --git a/app/service/design/service_design_batch_generate.py b/app/service/design/service_design_batch_generate.py index 0696176..a7b976f 100644 --- a/app/service/design/service_design_batch_generate.py +++ b/app/service/design/service_design_batch_generate.py @@ -1,80 +1,8 @@ import json import pika -from celery import Celery -from app.service.design.design_batch.items.item import process_layer, process_item, update_base_size_priority -from app.service.design.utils.synthesis_item import synthesis_single, synthesis - -celery_app = Celery('clothes_generation', broker='amqp://guest:guest@localhost:5672//') - - - -@celery_app.task -def design_batch_generate(design_objects, total_steps, task_id): - objects_response = [] - for step, object in enumerate(design_objects): - basic = object['basic'] - items_response = {'layers': []} - if basic['single_overall'] == "overall": - item_results = [process_item(item, basic) for item in object['items']] - layers = [] - futures = [] - body_size = None - for item in item_results: - futures = [process_layer(item, layers)] - for future in futures: - if future is not None: - body_size = future - layers = sorted(layers, key=lambda s: s.get("priority", float('inf'))) - - layers, new_size = update_base_size_priority(layers, body_size) - - for lay in layers: - items_response['layers'].append({ - 'image_category': lay['name'], - 'position': lay['position'], - 'priority': lay.get("priority", None), - 'resize_scale': lay['resize_scale'] if "resize_scale" in lay.keys() else None, - 'image_size': lay['image'] if lay['image'] is None else lay['image'].size, - 'gradient_string': lay['gradient_string'] if 'gradient_string' in lay.keys() else "", - 'mask_url': lay['mask_url'], - 'image_url': lay['image_url'] if 'image_url' in lay.keys() else None, - 'pattern_image_url': lay['pattern_image_url'] if 'pattern_image_url' in lay.keys() else None, - - # 'image': lay['image'], - # 'mask_image': lay['mask_image'], - }) - items_response['synthesis_url'] = synthesis(layers, new_size, basic) - else: - item_results = process_item(object['items'][0], basic) - items_response['layers'].append({ - 'image_category': f"{item_results['name']}_front", - 'image_size': item_results['back_image'].size if item_results['back_image'] else None, - 'position': None, - 'priority': 0, - 'image_url': item_results['front_image_url'], - 'mask_url': item_results['mask_url'], - "gradient_string": item_results['gradient_string'] if 'gradient_string' in item_results.keys() else "", - 'pattern_image_url': item_results['pattern_image_url'] if 'pattern_image_url' in item_results.keys() else None, - - }) - items_response['layers'].append({ - 'image_category': f"{item_results['name']}_back", - 'image_size': item_results['front_image'].size if item_results['front_image'] else None, - 'position': None, - 'priority': 0, - 'image_url': item_results['back_image_url'], - 'mask_url': item_results['mask_url'], - "gradient_string": item_results['gradient_string'] if 'gradient_string' in item_results.keys() else "", - 'pattern_image_url': item_results['pattern_image_url'] if 'pattern_image_url' in item_results.keys() else None, - - }) - items_response['synthesis_url'] = synthesis_single(item_results['front_image'], item_results['back_image']) - objects_response.append(items_response) - publish_status(task_id, f"{step + 1}/{total_steps}", objects_response) - print(objects_response) - return objects_response +from app.service.design_batch.design_batch import batch_design def publish_status(task_id, progress, result): @@ -93,22 +21,7 @@ def publish_status(task_id, progress, result): async def start_design_batch_generate(data, file): - generate_clothes_task = design_batch_generate.delay(json.loads(file.decode())['objects'], data.total, data.tasks_id) + generate_clothes_task = batch_design.delay(json.loads(file.decode())['objects'], data.total, data.tasks_id) print(generate_clothes_task) publish_status(data.tasks_id, "0/100", "") return {"task_id": data.tasks_id} -# -# -# if __name__ == '__main__': -# data = {"objects": [{"basic": {"body_point_test": {"waistband_right": [200, 241], "hand_point_right": [223, 297], "waistband_left": [112, 241], "hand_point_left": [92, 305], "shoulder_left": [99, 116], "shoulder_right": [215, 116]}, "layer_order": True, "scale_bag": 0.7, "scale_earrings": 0.16, "self_template": True, "single_overall": "overall", "switch_category": ""}, "items": [ -# {"businessId": 270372, "color": "30 28 28", "image_id": 69780, "offset": [0, 0], "path": "aida-sys-image/images/female/trousers/0825000630.jpg", "seg_mask_url": "test/result.png", -# "print": {"element": {"element_angle_list": [], "element_path_list": [], "element_scale_list": [], "location": []}, "overall": {"location": [], "print_angle_list": [], "print_path_list": [], "print_scale_list": []}, "single": {"location": [], "print_angle_list": [], "print_path_list": [], "print_scale_list": []}}, "priority": 10, "resize_scale": [1.0, 1.0], "type": "Trousers"}, -# {"businessId": 270373, "color": "30 28 28", "image_id": 98243, "offset": [0, 0], "path": "aida-sys-image/images/female/blouse/0902003811.jpg", "seg_mask_url": "test/result.png", -# "print": {"element": {"element_angle_list": [], "element_path_list": [], "element_scale_list": [], "location": []}, "overall": {"location": [], "print_angle_list": [], "print_path_list": [], "print_scale_list": []}, "single": {"location": [], "print_angle_list": [], "print_path_list": [], "print_scale_list": []}}, "priority": 11, "resize_scale": [1.0, 1.0], "type": "Blouse"}, -# {"businessId": 270374, "color": "172 68 68", "image_id": 98244, "offset": [0, 0], "path": "aida-sys-image/images/female/outwear/0825000410.jpg", "seg_mask_url": "test/result.png", -# "print": {"element": {"element_angle_list": [], "element_path_list": [], "element_scale_list": [], "location": []}, "overall": {"location": [], "print_angle_list": [], "print_path_list": [], "print_scale_list": []}, "single": {"location": [], "print_angle_list": [], "print_path_list": [], "print_scale_list": []}}, "priority": 12, "resize_scale": [1.0, 1.0], "type": "Outwear"}, -# {"body_path": "aida-sys-image/models/female/5bdfe7ca-64eb-44e4-b03d-8e517520c795.png", "image_id": 96090, "type": "Body"}]}], "process_id": "83"} -# total_steps = 1 -# task_id = 1 -# design_batch_generate(data['objects'], total_steps, task_id) -# # publish_status(task_id="0/100", progress=100) diff --git a/app/service/design_batch/design_batch.py b/app/service/design_batch/design_batch.py new file mode 100644 index 0000000..4a13c4e --- /dev/null +++ b/app/service/design_batch/design_batch.py @@ -0,0 +1,613 @@ +import io +import json +import logging.config +import threading +import uuid + +import cv2 +import numpy as np +import urllib3 +from PIL import Image +from celery import Celery +from minio import Minio + +from app.core.config import * +from app.service.design_batch.item import BodyItem, TopItem, BottomItem + +id_lock = threading.Lock() +celery_app = Celery('tasks', broker='amqp://guest:guest@10.1.2.213:5672//', backend='rpc://') +celery_app.conf.worker_log_format = '%(asctime)s %(filename)s [line:%(lineno)d] %(levelname)s %(message)s' +celery_app.conf.worker_hijack_root_logger = False +logging.getLogger('pika').setLevel(logging.WARNING) +logger = logging.getLogger() + +timeout = urllib3.Timeout(connect=1, read=10.0) # 连接超时 5 秒,读取超时 10 秒 + + +# 自定义 Retry 类 +class CustomRetry(urllib3.Retry): + def increment(self, method=None, url=None, response=None, error=None, **kwargs): + # 调用父类的 increment 方法 + new_retry = super(CustomRetry, self).increment(method, url, response, error, **kwargs) + # 打印重试信息 + logger.info(f"重试连接: {method} {url},错误: {error},重试次数: {self.total - new_retry.total}") + return new_retry + + +http_client = urllib3.PoolManager( + num_pools=50, # 设置连接池大小 + maxsize=50, + timeout=timeout, + cert_reqs='CERT_REQUIRED', # 需要证书验证 + retries=CustomRetry( + total=5, + backoff_factor=0.2, + status_forcelist=[500, 502, 503, 504], + ), +) +minio_client = Minio(MINIO_URL, access_key=MINIO_ACCESS, secret_key=MINIO_SECRET, secure=MINIO_SECURE, http_client=http_client) + + +def oss_upload_image(bucket, object_name, image_bytes): + req = None + try: + oss_client = Minio(MINIO_URL, access_key=MINIO_ACCESS, secret_key=MINIO_SECRET, secure=MINIO_SECURE) + req = oss_client.put_object(bucket_name=bucket, object_name=object_name, data=io.BytesIO(image_bytes), length=len(image_bytes), content_type='image/png') + except Exception as e: + logger.warning(f" 上传图片出现异常 ######: {e}") + return req + + +# 优先级 +priority_dict = { + 'earring_front': 99, + 'bag_front': 98, + 'hairstyle_front': 97, + 'outwear_front': 20, + 'tops_front': 19, + 'dress_front': 18, + 'blouse_front': 17, + 'skirt_front': 16, + 'trousers_front': 15, + 'bottoms_front': 14, + 'shoes_right': 1, + 'shoes_left': 1, + 'body': 0, + 'bottoms_back': -14, + 'trousers_back': -15, + 'skirt_back': -16, + 'blouse_back': -17, + 'dress_back': -18, + 'tops_back': -19, + 'outwear_back': -20, + 'hairstyle_back': -97, + 'bag_back': -98, + 'earring_back': -99, +} + + +def process_item(item, basic): + if item['type'] == "Body": + body_server = BodyItem(data=item, basic=basic, minio_client=minio_client) + item_data = body_server.process() + elif item['type'].lower() in ['blouse', 'outwear', 'dress', 'tops']: + top_server = TopItem(data=item, basic=basic, minio_client=minio_client) + item_data = top_server.process() + else: + bottom_server = BottomItem(data=item, basic=basic, minio_client=minio_client) + item_data = bottom_server.process() + return item_data + + +def process_layer(item, layers): + if item['name'] == "mannequin": + body_layer = organize_body(item) + layers.append(body_layer) + return item['body_image'].size + else: + front_layer, back_layer = organize_clothing(item) + layers.append(front_layer) + layers.append(back_layer) + + +def organize_body(layer): + body_layer = dict(priority=0, + name=layer["name"].lower(), + image=layer['body_image'], + image_url=layer['body_path'], + mask_image=None, + mask_url=None, + sacle=1, + # mask=layer['body_mask'], + position=(0, 0)) + return body_layer + + +def organize_clothing(layer): + # 起始坐标 + start_point = calculate_start_point(layer['keypoint'], layer['scale'], layer['clothes_keypoint'], layer['body_point_test'], layer["offset"], layer["resize_scale"]) + # 前片数据 + front_layer = dict(priority=layer['priority'] if layer.get("layer_order", False) else priority_dict.get(f'{layer["name"].lower()}_front', None), + name=f'{layer["name"].lower()}_front', + image=layer["front_image"], + # mask_image=layer['front_mask_image'], + image_url=layer['front_image_url'], + mask_url=layer['mask_url'], + sacle=layer['scale'], + clothes_keypoint=layer['clothes_keypoint'], + position=start_point, + resize_scale=layer["resize_scale"], + mask=cv2.resize(layer['mask'], layer["front_image"].size), + gradient_string=layer['gradient_string'] if 'gradient_string' in layer.keys() else "", + pattern_image_url=layer['pattern_image_url'], + pattern_image=layer['pattern_image'] + + ) + # 后片数据 + back_layer = dict(priority=-layer.get("priority", 0) if layer.get("layer_order", False) else priority_dict.get(f'{layer["name"].lower()}_back', None), + name=f'{layer["name"].lower()}_back', + image=layer["back_image"], + # mask_image=layer['back_mask_image'], + image_url=layer['back_image_url'], + mask_url=layer['mask_url'], + sacle=layer['scale'], + clothes_keypoint=layer['clothes_keypoint'], + position=start_point, + resize_scale=layer["resize_scale"], + mask=cv2.resize(layer['mask'], layer["front_image"].size), + gradient_string=layer['gradient_string'] if 'gradient_string' in layer.keys() else "", + pattern_image_url=layer['pattern_image_url'], + ) + return front_layer, back_layer + + +def calculate_start_point(keypoint_type, scale, clothes_point, body_point, offset, resize_scale): + """ + Align left + Args: + keypoint_type: string, "waistband" | "shoulder" | "ear_point" + scale: float + clothes_point: dict{'left': [x1, y1, z1], 'right': [x2, y2, z2]} + body_point: dict, containing keypoint data of body figure + + Returns: + start_point: tuple (x', y') + x' = y_body - y1 * scale + offset + y' = x_body - x1 * scale + offset + + """ + side_indicator = f'{keypoint_type}_left' + start_point = ( + int(body_point[side_indicator][1] + offset[1] - int(clothes_point[side_indicator][0]) * scale), # y + int(body_point[side_indicator][0] + offset[0] - int(clothes_point[side_indicator][1]) * scale) # x + ) + return start_point + + +def update_base_size_priority(layers, size): + # 计算透明背景图片的宽度 + min_x = min(info['position'][1] for info in layers) + x_list = [] + for info in layers: + if info['image'] is not None: + x_list.append(info['position'][1] + info['image'].width) + max_x = max(x_list) + new_width = max_x - min_x + new_height = 700 + # 更新坐标 + for info in layers: + info['adaptive_position'] = (info['position'][0], info['position'][1] - min_x) + return layers, (new_width, new_height) + + +def synthesis_single(front_image, back_image): + result_image = None + if front_image: + result_image = front_image + if back_image: + result_image.paste(back_image, (0, 0), back_image) + image_data = io.BytesIO() + result_image.save(image_data, format='PNG') + image_data.seek(0) + image_bytes = image_data.read() + bucket_name = 'aida-results' + object_name = f'result_{generate_uuid()}.png' + oss_upload_image(bucket=bucket_name, object_name=object_name, image_bytes=image_bytes) + return f"{bucket_name}/{object_name}" + + +def oss_upload_json(json_data, object_name): + try: + with open(f"app/service/design/design_batch/response_json/{object_name}", 'w') as file: + json.dump(json_data, file, indent=4) + + oss_client = Minio(MINIO_URL, access_key=MINIO_ACCESS, secret_key=MINIO_SECRET, secure=MINIO_SECURE) + oss_client.fput_object("test", object_name, f"app/service/design/design_batch/response_json/{object_name}") + except Exception as e: + logger.warning(str(e)) + + +def generate_uuid(): + with id_lock: + unique_id = str(uuid.uuid1()) + return unique_id + + +def positioning(all_mask_shape, mask_shape, offset): + all_start = 0 + all_end = 0 + mask_start = 0 + mask_end = 0 + if offset == 0: + all_start = 0 + all_end = min(all_mask_shape, mask_shape) + + mask_start = 0 + mask_end = min(all_mask_shape, mask_shape) + elif offset > 0: + all_start = min(offset, all_mask_shape) + all_end = min(offset + mask_shape, all_mask_shape) + + mask_start = 0 + mask_end = 0 if offset > all_mask_shape else min(all_mask_shape - offset, mask_shape) + elif offset < 0: + if abs(offset) > mask_shape: + all_start = 0 + all_end = 0 + else: + all_start = 0 + if mask_shape - abs(offset) > all_mask_shape: + all_end = min(mask_shape - abs(offset), all_mask_shape) + else: + all_end = mask_shape - abs(offset) + + if abs(offset) > mask_shape: + mask_start = mask_shape + mask_end = mask_shape + else: + mask_start = abs(offset) + if mask_shape - abs(offset) >= all_mask_shape: + mask_end = all_mask_shape + abs(offset) + else: + mask_end = mask_shape + return all_start, all_end, mask_start, mask_end + + +def synthesis(data, size, basic_info): + # 创建底图 + base_image = Image.new('RGBA', size, (0, 0, 0, 0)) + try: + all_mask_shape = (size[1], size[0]) + body_mask = None + for d in data: + if d['name'] == 'body' or d['name'] == 'mannequin': + # 创建一个新的宽高透明图像, 把模特贴上去获取mask + transparent_image = Image.new("RGBA", size, (0, 0, 0, 0)) + transparent_image.paste(d['image'], (d['adaptive_position'][1], d['adaptive_position'][0]), d['image']) # 此处可变数组会被paste篡改值,所以使用下标获取position + body_mask = np.array(transparent_image.split()[3]) + + # 根据新的坐标获取新的肩点 + left_shoulder = [x + y for x, y in zip(basic_info['body_point_test']['shoulder_left'], [d['adaptive_position'][1], d['adaptive_position'][0]])] + right_shoulder = [x + y for x, y in zip(basic_info['body_point_test']['shoulder_right'], [d['adaptive_position'][1], d['adaptive_position'][0]])] + body_mask[:min(left_shoulder[1], right_shoulder[1]), left_shoulder[0]:right_shoulder[0]] = 255 + _, binary_body_mask = cv2.threshold(body_mask, 127, 255, cv2.THRESH_BINARY) + top_outer_mask = np.array(binary_body_mask) + bottom_outer_mask = np.array(binary_body_mask) + + top = True + bottom = True + i = len(data) + while i: + i -= 1 + if top and data[i]['name'] in ["blouse_front", "outwear_front", "dress_front", "tops_front"]: + top = False + mask_shape = data[i]['mask'].shape + y_offset, x_offset = data[i]['adaptive_position'] + # 初始化叠加区域的起始和结束位置 + all_y_start, all_y_end, mask_y_start, mask_y_end = positioning(all_mask_shape=all_mask_shape[0], mask_shape=mask_shape[0], offset=y_offset) + all_x_start, all_x_end, mask_x_start, mask_x_end = positioning(all_mask_shape=all_mask_shape[1], mask_shape=mask_shape[1], offset=x_offset) + # 将叠加区域赋值为相应的像素值 + _, sketch_mask = cv2.threshold(data[i]['mask'], 127, 255, cv2.THRESH_BINARY) + background = np.zeros_like(top_outer_mask) + background[all_y_start:all_y_end, all_x_start:all_x_end] = sketch_mask[mask_y_start:mask_y_end, mask_x_start:mask_x_end] + top_outer_mask = background + top_outer_mask + elif bottom and data[i]['name'] in ["trousers_front", "skirt_front", "bottoms_front", "dress_front"]: + bottom = False + mask_shape = data[i]['mask'].shape + y_offset, x_offset = data[i]['adaptive_position'] + # 初始化叠加区域的起始和结束位置 + all_y_start, all_y_end, mask_y_start, mask_y_end = positioning(all_mask_shape=all_mask_shape[0], mask_shape=mask_shape[0], offset=y_offset) + all_x_start, all_x_end, mask_x_start, mask_x_end = positioning(all_mask_shape=all_mask_shape[1], mask_shape=mask_shape[1], offset=x_offset) + # 将叠加区域赋值为相应的像素值 + _, sketch_mask = cv2.threshold(data[i]['mask'], 127, 255, cv2.THRESH_BINARY) + background = np.zeros_like(top_outer_mask) + background[all_y_start:all_y_end, all_x_start:all_x_end] = sketch_mask[mask_y_start:mask_y_end, mask_x_start:mask_x_end] + bottom_outer_mask = background + bottom_outer_mask + elif bottom is False and top is False: + break + + all_mask = cv2.bitwise_or(top_outer_mask, bottom_outer_mask) + + for layer in data: + if layer['image'] is not None: + if layer['name'] != "body": + test_image = Image.new('RGBA', size, (0, 0, 0, 0)) + test_image.paste(layer['image'], (layer['adaptive_position'][1], layer['adaptive_position'][0]), layer['image']) + mask_data = np.where(all_mask > 0, 255, 0).astype(np.uint8) + mask_alpha = Image.fromarray(mask_data) + cropped_image = Image.composite(test_image, Image.new("RGBA", test_image.size, (255, 255, 255, 0)), mask_alpha) + base_image.paste(test_image, (0, 0), cropped_image) # test_image 已经按照坐标贴到最大宽值的图片上 坐着这里坐标为00 + else: + base_image.paste(layer['image'], (layer['adaptive_position'][1], layer['adaptive_position'][0]), layer['image']) + + result_image = base_image + + image_data = io.BytesIO() + result_image.save(image_data, format='PNG') + image_data.seek(0) + + # oss upload + image_bytes = image_data.read() + bucket_name = "aida-results" + object_name = f'result_{generate_uuid()}.png' + oss_upload_image(bucket=bucket_name, object_name=object_name, image_bytes=image_bytes) + return f"{bucket_name}/{object_name}" + # return f"aida-results/{minio_client.put_object('aida-results', f'result_{generate_uuid()}.png', io.BytesIO(image_bytes), len(image_bytes), content_type='image/png').object_name}" + + # object_name = f'result_{generate_uuid()}.png' + # response = s3.put_object(Bucket="aida-results", Key=object_name, Body=data, ContentType='image/png') + # object_url = f"aida-results/{object_name}" + # if response['ResponseMetadata']['HTTPStatusCode'] == 200: + # return object_url + # else: + # return "" + + except Exception as e: + logging.warning(f"synthesis runtime exception : {e}") + + +def publish_status(task_id, progress, result): + connection = pika.BlockingConnection(pika.ConnectionParameters('10.1.2.213')) + channel = connection.channel() + channel.queue_declare(queue='DesignBatch', durable=True) + message = {'task_id': task_id, 'progress': progress, "result": result} + channel.basic_publish(exchange='', + routing_key='DesignBatch', + body=json.dumps(message), + properties=pika.BasicProperties( + delivery_mode=2, + )) + connection.close() + + +@celery_app.task +def batch_design(objects_data, tasks_id, json_name): + object_response = [] + threads = [] + active_threads = 0 + lock = threading.Lock() + + def process_object(step, object): + nonlocal active_threads + basic = object['basic'] + items_response = {'layers': []} + if basic['single_overall'] == "overall": + item_results = [] + for item in object['items']: + item_results.append(process_item(item, basic)) + layers = [] + body_size = None + for item in item_results: + body_size = process_layer(item, layers) + layers = sorted(layers, key=lambda s: s.get("priority", float('inf'))) + + layers, new_size = update_base_size_priority(layers, body_size) + + for lay in layers: + items_response['layers'].append({ + 'image_category': lay['name'], + 'position': lay['position'], + 'priority': lay.get("priority", None), + 'resize_scale': lay['resize_scale'] if "resize_scale" in lay.keys() else None, + 'image_size': lay['image'] if lay['image'] is None else lay['image'].size, + 'gradient_string': lay['gradient_string'] if 'gradient_string' in lay.keys() else "", + 'mask_url': lay['mask_url'], + 'image_url': lay['image_url'] if 'image_url' in lay.keys() else None, + 'pattern_image_url': lay['pattern_image_url'] if 'pattern_image_url' in lay.keys() else None, + }) + items_response['synthesis_url'] = synthesis(layers, new_size, basic) + else: + item_result = process_item(object['items'][0], basic) + items_response['layers'].append({ + 'image_category': f"{item_result['name']}_front", + 'image_size': item_result['back_image'].size if item_result['back_image'] else None, + 'position': None, + 'priority': 0, + 'image_url': item_result['front_image_url'], + 'mask_url': item_result['mask_url'], + "gradient_string": item_result['gradient_string'] if 'gradient_string' in item_result.keys() else "", + 'pattern_image_url': item_result['pattern_image_url'] if 'pattern_image_url' in item_result.keys() else None, + }) + items_response['layers'].append({ + 'image_category': f"{item_result['name']}_back", + 'image_size': item_result['front_image'].size if item_result['front_image'] else None, + 'position': None, + 'priority': 0, + 'image_url': item_result['back_image_url'], + 'mask_url': item_result['mask_url'], + "gradient_string": item_result['gradient_string'] if 'gradient_string' in item_result.keys() else "", + 'pattern_image_url': item_result['pattern_image_url'] if 'pattern_image_url' in item_result.keys() else None, + }) + items_response['synthesis_url'] = synthesis_single(item_result['front_image'], item_result['back_image']) + + with lock: + object_response.append(items_response) + publish_status(tasks_id, step + 1, items_response) + active_threads -= 1 + + for step, object in enumerate(objects_data): + t = threading.Thread(target=process_object, args=(step, object)) + threads.append(t) + t.start() + with lock: + active_threads += 1 + + for t in threads: + t.join() + + oss_upload_json(object_response, json_name) + publish_status(tasks_id, "ok", json_name) + return object_response + + +if __name__ == '__main__': + object_data = { + "objects": [ + { + "basic": { + "body_point_test": { + "waistband_right": [ + 199, + 239 + ], + "hand_point_right": [ + 220, + 308 + ], + "waistband_left": [ + 113, + 239 + ], + "hand_point_left": [ + 92, + 310 + ], + "shoulder_left": [ + 99, + 111 + ], + "shoulder_right": [ + 214, + 111 + ] + }, + "layer_order": False, + "scale_bag": 0.7, + "scale_earrings": 0.16, + "self_template": True, + "single_overall": "overall", + "switch_category": "" + }, + "items": [ + { + "color": "195 195 196", + "icon": "none", + "image_id": 116207, + "offset": [ + 1, + 1 + ], + "path": "aida-sys-image/images/female/trousers/trousers_973.jpg", + "print": { + "element": { + "element_angle_list": [], + "element_path_list": [], + "element_scale_list": [], + "location": [] + }, + "overall": { + "location": [ + [ + 0.0, + 0.0 + ] + ], + "print_angle_list": [ + 0.0, + 0.0 + ], + "print_path_list": [], + "print_scale_list": [ + 0.0, + 0.0 + ] + }, + "single": { + "location": [], + "print_angle_list": [], + "print_path_list": [], + "print_scale_list": [] + } + }, + "resize_scale": [ + 1.0, + 1.0 + ], + "type": "Trousers" + }, + { + "color": "203 204 202", + "icon": "none", + "image_id": 95825, + "offset": [ + 1, + 1 + ], + "path": "aida-sys-image/images/female/blouse/0902003606.jpg", + "print": { + "element": { + "element_angle_list": [], + "element_path_list": [], + "element_scale_list": [], + "location": [] + }, + "overall": { + "location": [ + [ + 0.0, + 0.0 + ] + ], + "print_angle_list": [ + 0.0, + 0.0 + ], + "print_path_list": [], + "print_scale_list": [ + 0.0, + 0.0 + ] + }, + "single": { + "location": [], + "print_angle_list": [], + "print_path_list": [], + "print_scale_list": [] + } + }, + "resize_scale": [ + 1.0, + 1.0 + ], + "type": "Blouse" + }, + { + "body_path": "aida-sys-image/models/female/23ecb158-7b70-4468-a9d1-bac3ded9da62.png", + "image_id": 116612, + "offset": [ + 1, + 1 + ], + "resize_scale": [ + 1.0, + 1.0 + ], + "type": "Body" + } + ] + } + ], + "process_id": "9062885798571902" + } + X = batch_design(object_data['objects'], "123", "test.json") + print(X) diff --git a/app/service/design_batch/item.py b/app/service/design_batch/item.py index cad1488..e10320d 100644 --- a/app/service/design_batch/item.py +++ b/app/service/design_batch/item.py @@ -1,4 +1,4 @@ -from app.service.design_batch.pipeline import * +from app.service.design_fast.pipeline import LoadImage, KeyPoint, Segmentation, Color, PrintPainting, Scaling, Split, LoadBodyImage, ContourDetection class BaseItem: diff --git a/app/service/design_batch/pipeline/keypoint.py b/app/service/design_batch/pipeline/keypoint.py index 313a613..243cf4e 100644 --- a/app/service/design_batch/pipeline/keypoint.py +++ b/app/service/design_batch/pipeline/keypoint.py @@ -4,7 +4,7 @@ import numpy as np from pymilvus import MilvusClient from app.core.config import * -from app.service.design_batch.utils.design_ensemble import get_keypoint_result +from app.service.design.utils.design_ensemble import get_keypoint_result logger = logging.getLogger(__name__) diff --git a/app/service/design_batch/pipeline/segmentation.py b/app/service/design_batch/pipeline/segmentation.py index cba3446..802487f 100644 --- a/app/service/design_batch/pipeline/segmentation.py +++ b/app/service/design_batch/pipeline/segmentation.py @@ -5,7 +5,7 @@ import cv2 import numpy as np from app.core.config import SEG_CACHE_PATH -from app.service.design_batch.utils.design_ensemble import get_seg_result +from app.service.design.utils.design_ensemble import get_seg_result from app.service.utils.new_oss_client import oss_get_image logger = logging.getLogger() @@ -48,7 +48,7 @@ class Segmentation: @staticmethod def save_seg_result(seg_result, image_id): - file_path = f"seg_cache/{image_id}.npy" + file_path = f"{SEG_CACHE_PATH}{image_id}.npy" try: np.save(file_path, seg_result) logger.info(f"保存成功 :{os.path.abspath(file_path)}") @@ -57,7 +57,7 @@ class Segmentation: @staticmethod def load_seg_result(image_id): - file_path = f"seg_cache/{image_id}.npy" + file_path = f"{SEG_CACHE_PATH}{image_id}.npy" logger.info(f"load seg file name is :{SEG_CACHE_PATH}{image_id}.npy") try: seg_result = np.load(file_path) diff --git a/app/service/design_batch/pipeline/split.py b/app/service/design_batch/pipeline/split.py index 5dbcef5..35605b8 100644 --- a/app/service/design_batch/pipeline/split.py +++ b/app/service/design_batch/pipeline/split.py @@ -7,8 +7,8 @@ from PIL import Image from cv2 import cvtColor, COLOR_BGR2RGBA from app.core.config import AIDA_CLOTHING -from app.service.design_batch.utils.conversion_image import rgb_to_rgba -from app.service.design_batch.utils.upload_image import upload_png_mask +from app.service.design.utils.conversion_image import rgb_to_rgba +from app.service.design_fast.utils.upload_image import upload_png_mask from app.service.utils.generate_uuid import generate_uuid from app.service.utils.new_oss_client import oss_upload_image diff --git a/app/service/design_batch/service.py b/app/service/design_batch/service.py index db8246e..e69de29 100644 --- a/app/service/design_batch/service.py +++ b/app/service/design_batch/service.py @@ -1,12 +0,0 @@ -import json - -import pika -from app.service.design_batch.design_batch_celery import batch_design -from app.service.design_batch.utils.MQ import publish_status - - -async def start_design_batch_generate(data, file): - generate_clothes_task = batch_design.delay(json.loads(file.decode())['objects'], data.total, data.tasks_id) - print(generate_clothes_task) - publish_status(data.tasks_id, "0/100", "") - return {"task_id": data.tasks_id} From 396a62677c233c7c87cc1364ee3e34054fbb10c8 Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Thu, 26 Sep 2024 14:12:26 +0800 Subject: [PATCH 085/103] =?UTF-8?q?feat=20=20=E4=BB=A3=E7=A0=81=E6=95=B4?= =?UTF-8?q?=E7=90=86=20fix?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/service/design_fast/__init__.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100644 app/service/design_fast/__init__.py diff --git a/app/service/design_fast/__init__.py b/app/service/design_fast/__init__.py deleted file mode 100644 index e69de29..0000000 From 6a93f89f0da84ea176bff1499702b4bf97babf83 Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Thu, 26 Sep 2024 14:15:35 +0800 Subject: [PATCH 086/103] =?UTF-8?q?feat=20=20=E4=BB=A3=E7=A0=81=E6=95=B4?= =?UTF-8?q?=E7=90=86=20fix?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/service/design_batch/design_batch.py | 613 ------------------ app/service/design_batch/item.py | 2 +- app/service/design_batch/pipeline/keypoint.py | 2 +- app/service/design_batch/pipeline/loading.py | 3 - app/service/design_batch/pipeline/scale.py | 2 +- .../design_batch/pipeline/segmentation.py | 6 +- app/service/design_batch/pipeline/split.py | 4 +- app/service/design_batch/service.py | 11 + 8 files changed, 19 insertions(+), 624 deletions(-) delete mode 100644 app/service/design_batch/design_batch.py diff --git a/app/service/design_batch/design_batch.py b/app/service/design_batch/design_batch.py deleted file mode 100644 index 4a13c4e..0000000 --- a/app/service/design_batch/design_batch.py +++ /dev/null @@ -1,613 +0,0 @@ -import io -import json -import logging.config -import threading -import uuid - -import cv2 -import numpy as np -import urllib3 -from PIL import Image -from celery import Celery -from minio import Minio - -from app.core.config import * -from app.service.design_batch.item import BodyItem, TopItem, BottomItem - -id_lock = threading.Lock() -celery_app = Celery('tasks', broker='amqp://guest:guest@10.1.2.213:5672//', backend='rpc://') -celery_app.conf.worker_log_format = '%(asctime)s %(filename)s [line:%(lineno)d] %(levelname)s %(message)s' -celery_app.conf.worker_hijack_root_logger = False -logging.getLogger('pika').setLevel(logging.WARNING) -logger = logging.getLogger() - -timeout = urllib3.Timeout(connect=1, read=10.0) # 连接超时 5 秒,读取超时 10 秒 - - -# 自定义 Retry 类 -class CustomRetry(urllib3.Retry): - def increment(self, method=None, url=None, response=None, error=None, **kwargs): - # 调用父类的 increment 方法 - new_retry = super(CustomRetry, self).increment(method, url, response, error, **kwargs) - # 打印重试信息 - logger.info(f"重试连接: {method} {url},错误: {error},重试次数: {self.total - new_retry.total}") - return new_retry - - -http_client = urllib3.PoolManager( - num_pools=50, # 设置连接池大小 - maxsize=50, - timeout=timeout, - cert_reqs='CERT_REQUIRED', # 需要证书验证 - retries=CustomRetry( - total=5, - backoff_factor=0.2, - status_forcelist=[500, 502, 503, 504], - ), -) -minio_client = Minio(MINIO_URL, access_key=MINIO_ACCESS, secret_key=MINIO_SECRET, secure=MINIO_SECURE, http_client=http_client) - - -def oss_upload_image(bucket, object_name, image_bytes): - req = None - try: - oss_client = Minio(MINIO_URL, access_key=MINIO_ACCESS, secret_key=MINIO_SECRET, secure=MINIO_SECURE) - req = oss_client.put_object(bucket_name=bucket, object_name=object_name, data=io.BytesIO(image_bytes), length=len(image_bytes), content_type='image/png') - except Exception as e: - logger.warning(f" 上传图片出现异常 ######: {e}") - return req - - -# 优先级 -priority_dict = { - 'earring_front': 99, - 'bag_front': 98, - 'hairstyle_front': 97, - 'outwear_front': 20, - 'tops_front': 19, - 'dress_front': 18, - 'blouse_front': 17, - 'skirt_front': 16, - 'trousers_front': 15, - 'bottoms_front': 14, - 'shoes_right': 1, - 'shoes_left': 1, - 'body': 0, - 'bottoms_back': -14, - 'trousers_back': -15, - 'skirt_back': -16, - 'blouse_back': -17, - 'dress_back': -18, - 'tops_back': -19, - 'outwear_back': -20, - 'hairstyle_back': -97, - 'bag_back': -98, - 'earring_back': -99, -} - - -def process_item(item, basic): - if item['type'] == "Body": - body_server = BodyItem(data=item, basic=basic, minio_client=minio_client) - item_data = body_server.process() - elif item['type'].lower() in ['blouse', 'outwear', 'dress', 'tops']: - top_server = TopItem(data=item, basic=basic, minio_client=minio_client) - item_data = top_server.process() - else: - bottom_server = BottomItem(data=item, basic=basic, minio_client=minio_client) - item_data = bottom_server.process() - return item_data - - -def process_layer(item, layers): - if item['name'] == "mannequin": - body_layer = organize_body(item) - layers.append(body_layer) - return item['body_image'].size - else: - front_layer, back_layer = organize_clothing(item) - layers.append(front_layer) - layers.append(back_layer) - - -def organize_body(layer): - body_layer = dict(priority=0, - name=layer["name"].lower(), - image=layer['body_image'], - image_url=layer['body_path'], - mask_image=None, - mask_url=None, - sacle=1, - # mask=layer['body_mask'], - position=(0, 0)) - return body_layer - - -def organize_clothing(layer): - # 起始坐标 - start_point = calculate_start_point(layer['keypoint'], layer['scale'], layer['clothes_keypoint'], layer['body_point_test'], layer["offset"], layer["resize_scale"]) - # 前片数据 - front_layer = dict(priority=layer['priority'] if layer.get("layer_order", False) else priority_dict.get(f'{layer["name"].lower()}_front', None), - name=f'{layer["name"].lower()}_front', - image=layer["front_image"], - # mask_image=layer['front_mask_image'], - image_url=layer['front_image_url'], - mask_url=layer['mask_url'], - sacle=layer['scale'], - clothes_keypoint=layer['clothes_keypoint'], - position=start_point, - resize_scale=layer["resize_scale"], - mask=cv2.resize(layer['mask'], layer["front_image"].size), - gradient_string=layer['gradient_string'] if 'gradient_string' in layer.keys() else "", - pattern_image_url=layer['pattern_image_url'], - pattern_image=layer['pattern_image'] - - ) - # 后片数据 - back_layer = dict(priority=-layer.get("priority", 0) if layer.get("layer_order", False) else priority_dict.get(f'{layer["name"].lower()}_back', None), - name=f'{layer["name"].lower()}_back', - image=layer["back_image"], - # mask_image=layer['back_mask_image'], - image_url=layer['back_image_url'], - mask_url=layer['mask_url'], - sacle=layer['scale'], - clothes_keypoint=layer['clothes_keypoint'], - position=start_point, - resize_scale=layer["resize_scale"], - mask=cv2.resize(layer['mask'], layer["front_image"].size), - gradient_string=layer['gradient_string'] if 'gradient_string' in layer.keys() else "", - pattern_image_url=layer['pattern_image_url'], - ) - return front_layer, back_layer - - -def calculate_start_point(keypoint_type, scale, clothes_point, body_point, offset, resize_scale): - """ - Align left - Args: - keypoint_type: string, "waistband" | "shoulder" | "ear_point" - scale: float - clothes_point: dict{'left': [x1, y1, z1], 'right': [x2, y2, z2]} - body_point: dict, containing keypoint data of body figure - - Returns: - start_point: tuple (x', y') - x' = y_body - y1 * scale + offset - y' = x_body - x1 * scale + offset - - """ - side_indicator = f'{keypoint_type}_left' - start_point = ( - int(body_point[side_indicator][1] + offset[1] - int(clothes_point[side_indicator][0]) * scale), # y - int(body_point[side_indicator][0] + offset[0] - int(clothes_point[side_indicator][1]) * scale) # x - ) - return start_point - - -def update_base_size_priority(layers, size): - # 计算透明背景图片的宽度 - min_x = min(info['position'][1] for info in layers) - x_list = [] - for info in layers: - if info['image'] is not None: - x_list.append(info['position'][1] + info['image'].width) - max_x = max(x_list) - new_width = max_x - min_x - new_height = 700 - # 更新坐标 - for info in layers: - info['adaptive_position'] = (info['position'][0], info['position'][1] - min_x) - return layers, (new_width, new_height) - - -def synthesis_single(front_image, back_image): - result_image = None - if front_image: - result_image = front_image - if back_image: - result_image.paste(back_image, (0, 0), back_image) - image_data = io.BytesIO() - result_image.save(image_data, format='PNG') - image_data.seek(0) - image_bytes = image_data.read() - bucket_name = 'aida-results' - object_name = f'result_{generate_uuid()}.png' - oss_upload_image(bucket=bucket_name, object_name=object_name, image_bytes=image_bytes) - return f"{bucket_name}/{object_name}" - - -def oss_upload_json(json_data, object_name): - try: - with open(f"app/service/design/design_batch/response_json/{object_name}", 'w') as file: - json.dump(json_data, file, indent=4) - - oss_client = Minio(MINIO_URL, access_key=MINIO_ACCESS, secret_key=MINIO_SECRET, secure=MINIO_SECURE) - oss_client.fput_object("test", object_name, f"app/service/design/design_batch/response_json/{object_name}") - except Exception as e: - logger.warning(str(e)) - - -def generate_uuid(): - with id_lock: - unique_id = str(uuid.uuid1()) - return unique_id - - -def positioning(all_mask_shape, mask_shape, offset): - all_start = 0 - all_end = 0 - mask_start = 0 - mask_end = 0 - if offset == 0: - all_start = 0 - all_end = min(all_mask_shape, mask_shape) - - mask_start = 0 - mask_end = min(all_mask_shape, mask_shape) - elif offset > 0: - all_start = min(offset, all_mask_shape) - all_end = min(offset + mask_shape, all_mask_shape) - - mask_start = 0 - mask_end = 0 if offset > all_mask_shape else min(all_mask_shape - offset, mask_shape) - elif offset < 0: - if abs(offset) > mask_shape: - all_start = 0 - all_end = 0 - else: - all_start = 0 - if mask_shape - abs(offset) > all_mask_shape: - all_end = min(mask_shape - abs(offset), all_mask_shape) - else: - all_end = mask_shape - abs(offset) - - if abs(offset) > mask_shape: - mask_start = mask_shape - mask_end = mask_shape - else: - mask_start = abs(offset) - if mask_shape - abs(offset) >= all_mask_shape: - mask_end = all_mask_shape + abs(offset) - else: - mask_end = mask_shape - return all_start, all_end, mask_start, mask_end - - -def synthesis(data, size, basic_info): - # 创建底图 - base_image = Image.new('RGBA', size, (0, 0, 0, 0)) - try: - all_mask_shape = (size[1], size[0]) - body_mask = None - for d in data: - if d['name'] == 'body' or d['name'] == 'mannequin': - # 创建一个新的宽高透明图像, 把模特贴上去获取mask - transparent_image = Image.new("RGBA", size, (0, 0, 0, 0)) - transparent_image.paste(d['image'], (d['adaptive_position'][1], d['adaptive_position'][0]), d['image']) # 此处可变数组会被paste篡改值,所以使用下标获取position - body_mask = np.array(transparent_image.split()[3]) - - # 根据新的坐标获取新的肩点 - left_shoulder = [x + y for x, y in zip(basic_info['body_point_test']['shoulder_left'], [d['adaptive_position'][1], d['adaptive_position'][0]])] - right_shoulder = [x + y for x, y in zip(basic_info['body_point_test']['shoulder_right'], [d['adaptive_position'][1], d['adaptive_position'][0]])] - body_mask[:min(left_shoulder[1], right_shoulder[1]), left_shoulder[0]:right_shoulder[0]] = 255 - _, binary_body_mask = cv2.threshold(body_mask, 127, 255, cv2.THRESH_BINARY) - top_outer_mask = np.array(binary_body_mask) - bottom_outer_mask = np.array(binary_body_mask) - - top = True - bottom = True - i = len(data) - while i: - i -= 1 - if top and data[i]['name'] in ["blouse_front", "outwear_front", "dress_front", "tops_front"]: - top = False - mask_shape = data[i]['mask'].shape - y_offset, x_offset = data[i]['adaptive_position'] - # 初始化叠加区域的起始和结束位置 - all_y_start, all_y_end, mask_y_start, mask_y_end = positioning(all_mask_shape=all_mask_shape[0], mask_shape=mask_shape[0], offset=y_offset) - all_x_start, all_x_end, mask_x_start, mask_x_end = positioning(all_mask_shape=all_mask_shape[1], mask_shape=mask_shape[1], offset=x_offset) - # 将叠加区域赋值为相应的像素值 - _, sketch_mask = cv2.threshold(data[i]['mask'], 127, 255, cv2.THRESH_BINARY) - background = np.zeros_like(top_outer_mask) - background[all_y_start:all_y_end, all_x_start:all_x_end] = sketch_mask[mask_y_start:mask_y_end, mask_x_start:mask_x_end] - top_outer_mask = background + top_outer_mask - elif bottom and data[i]['name'] in ["trousers_front", "skirt_front", "bottoms_front", "dress_front"]: - bottom = False - mask_shape = data[i]['mask'].shape - y_offset, x_offset = data[i]['adaptive_position'] - # 初始化叠加区域的起始和结束位置 - all_y_start, all_y_end, mask_y_start, mask_y_end = positioning(all_mask_shape=all_mask_shape[0], mask_shape=mask_shape[0], offset=y_offset) - all_x_start, all_x_end, mask_x_start, mask_x_end = positioning(all_mask_shape=all_mask_shape[1], mask_shape=mask_shape[1], offset=x_offset) - # 将叠加区域赋值为相应的像素值 - _, sketch_mask = cv2.threshold(data[i]['mask'], 127, 255, cv2.THRESH_BINARY) - background = np.zeros_like(top_outer_mask) - background[all_y_start:all_y_end, all_x_start:all_x_end] = sketch_mask[mask_y_start:mask_y_end, mask_x_start:mask_x_end] - bottom_outer_mask = background + bottom_outer_mask - elif bottom is False and top is False: - break - - all_mask = cv2.bitwise_or(top_outer_mask, bottom_outer_mask) - - for layer in data: - if layer['image'] is not None: - if layer['name'] != "body": - test_image = Image.new('RGBA', size, (0, 0, 0, 0)) - test_image.paste(layer['image'], (layer['adaptive_position'][1], layer['adaptive_position'][0]), layer['image']) - mask_data = np.where(all_mask > 0, 255, 0).astype(np.uint8) - mask_alpha = Image.fromarray(mask_data) - cropped_image = Image.composite(test_image, Image.new("RGBA", test_image.size, (255, 255, 255, 0)), mask_alpha) - base_image.paste(test_image, (0, 0), cropped_image) # test_image 已经按照坐标贴到最大宽值的图片上 坐着这里坐标为00 - else: - base_image.paste(layer['image'], (layer['adaptive_position'][1], layer['adaptive_position'][0]), layer['image']) - - result_image = base_image - - image_data = io.BytesIO() - result_image.save(image_data, format='PNG') - image_data.seek(0) - - # oss upload - image_bytes = image_data.read() - bucket_name = "aida-results" - object_name = f'result_{generate_uuid()}.png' - oss_upload_image(bucket=bucket_name, object_name=object_name, image_bytes=image_bytes) - return f"{bucket_name}/{object_name}" - # return f"aida-results/{minio_client.put_object('aida-results', f'result_{generate_uuid()}.png', io.BytesIO(image_bytes), len(image_bytes), content_type='image/png').object_name}" - - # object_name = f'result_{generate_uuid()}.png' - # response = s3.put_object(Bucket="aida-results", Key=object_name, Body=data, ContentType='image/png') - # object_url = f"aida-results/{object_name}" - # if response['ResponseMetadata']['HTTPStatusCode'] == 200: - # return object_url - # else: - # return "" - - except Exception as e: - logging.warning(f"synthesis runtime exception : {e}") - - -def publish_status(task_id, progress, result): - connection = pika.BlockingConnection(pika.ConnectionParameters('10.1.2.213')) - channel = connection.channel() - channel.queue_declare(queue='DesignBatch', durable=True) - message = {'task_id': task_id, 'progress': progress, "result": result} - channel.basic_publish(exchange='', - routing_key='DesignBatch', - body=json.dumps(message), - properties=pika.BasicProperties( - delivery_mode=2, - )) - connection.close() - - -@celery_app.task -def batch_design(objects_data, tasks_id, json_name): - object_response = [] - threads = [] - active_threads = 0 - lock = threading.Lock() - - def process_object(step, object): - nonlocal active_threads - basic = object['basic'] - items_response = {'layers': []} - if basic['single_overall'] == "overall": - item_results = [] - for item in object['items']: - item_results.append(process_item(item, basic)) - layers = [] - body_size = None - for item in item_results: - body_size = process_layer(item, layers) - layers = sorted(layers, key=lambda s: s.get("priority", float('inf'))) - - layers, new_size = update_base_size_priority(layers, body_size) - - for lay in layers: - items_response['layers'].append({ - 'image_category': lay['name'], - 'position': lay['position'], - 'priority': lay.get("priority", None), - 'resize_scale': lay['resize_scale'] if "resize_scale" in lay.keys() else None, - 'image_size': lay['image'] if lay['image'] is None else lay['image'].size, - 'gradient_string': lay['gradient_string'] if 'gradient_string' in lay.keys() else "", - 'mask_url': lay['mask_url'], - 'image_url': lay['image_url'] if 'image_url' in lay.keys() else None, - 'pattern_image_url': lay['pattern_image_url'] if 'pattern_image_url' in lay.keys() else None, - }) - items_response['synthesis_url'] = synthesis(layers, new_size, basic) - else: - item_result = process_item(object['items'][0], basic) - items_response['layers'].append({ - 'image_category': f"{item_result['name']}_front", - 'image_size': item_result['back_image'].size if item_result['back_image'] else None, - 'position': None, - 'priority': 0, - 'image_url': item_result['front_image_url'], - 'mask_url': item_result['mask_url'], - "gradient_string": item_result['gradient_string'] if 'gradient_string' in item_result.keys() else "", - 'pattern_image_url': item_result['pattern_image_url'] if 'pattern_image_url' in item_result.keys() else None, - }) - items_response['layers'].append({ - 'image_category': f"{item_result['name']}_back", - 'image_size': item_result['front_image'].size if item_result['front_image'] else None, - 'position': None, - 'priority': 0, - 'image_url': item_result['back_image_url'], - 'mask_url': item_result['mask_url'], - "gradient_string": item_result['gradient_string'] if 'gradient_string' in item_result.keys() else "", - 'pattern_image_url': item_result['pattern_image_url'] if 'pattern_image_url' in item_result.keys() else None, - }) - items_response['synthesis_url'] = synthesis_single(item_result['front_image'], item_result['back_image']) - - with lock: - object_response.append(items_response) - publish_status(tasks_id, step + 1, items_response) - active_threads -= 1 - - for step, object in enumerate(objects_data): - t = threading.Thread(target=process_object, args=(step, object)) - threads.append(t) - t.start() - with lock: - active_threads += 1 - - for t in threads: - t.join() - - oss_upload_json(object_response, json_name) - publish_status(tasks_id, "ok", json_name) - return object_response - - -if __name__ == '__main__': - object_data = { - "objects": [ - { - "basic": { - "body_point_test": { - "waistband_right": [ - 199, - 239 - ], - "hand_point_right": [ - 220, - 308 - ], - "waistband_left": [ - 113, - 239 - ], - "hand_point_left": [ - 92, - 310 - ], - "shoulder_left": [ - 99, - 111 - ], - "shoulder_right": [ - 214, - 111 - ] - }, - "layer_order": False, - "scale_bag": 0.7, - "scale_earrings": 0.16, - "self_template": True, - "single_overall": "overall", - "switch_category": "" - }, - "items": [ - { - "color": "195 195 196", - "icon": "none", - "image_id": 116207, - "offset": [ - 1, - 1 - ], - "path": "aida-sys-image/images/female/trousers/trousers_973.jpg", - "print": { - "element": { - "element_angle_list": [], - "element_path_list": [], - "element_scale_list": [], - "location": [] - }, - "overall": { - "location": [ - [ - 0.0, - 0.0 - ] - ], - "print_angle_list": [ - 0.0, - 0.0 - ], - "print_path_list": [], - "print_scale_list": [ - 0.0, - 0.0 - ] - }, - "single": { - "location": [], - "print_angle_list": [], - "print_path_list": [], - "print_scale_list": [] - } - }, - "resize_scale": [ - 1.0, - 1.0 - ], - "type": "Trousers" - }, - { - "color": "203 204 202", - "icon": "none", - "image_id": 95825, - "offset": [ - 1, - 1 - ], - "path": "aida-sys-image/images/female/blouse/0902003606.jpg", - "print": { - "element": { - "element_angle_list": [], - "element_path_list": [], - "element_scale_list": [], - "location": [] - }, - "overall": { - "location": [ - [ - 0.0, - 0.0 - ] - ], - "print_angle_list": [ - 0.0, - 0.0 - ], - "print_path_list": [], - "print_scale_list": [ - 0.0, - 0.0 - ] - }, - "single": { - "location": [], - "print_angle_list": [], - "print_path_list": [], - "print_scale_list": [] - } - }, - "resize_scale": [ - 1.0, - 1.0 - ], - "type": "Blouse" - }, - { - "body_path": "aida-sys-image/models/female/23ecb158-7b70-4468-a9d1-bac3ded9da62.png", - "image_id": 116612, - "offset": [ - 1, - 1 - ], - "resize_scale": [ - 1.0, - 1.0 - ], - "type": "Body" - } - ] - } - ], - "process_id": "9062885798571902" - } - X = batch_design(object_data['objects'], "123", "test.json") - print(X) diff --git a/app/service/design_batch/item.py b/app/service/design_batch/item.py index e10320d..cad1488 100644 --- a/app/service/design_batch/item.py +++ b/app/service/design_batch/item.py @@ -1,4 +1,4 @@ -from app.service.design_fast.pipeline import LoadImage, KeyPoint, Segmentation, Color, PrintPainting, Scaling, Split, LoadBodyImage, ContourDetection +from app.service.design_batch.pipeline import * class BaseItem: diff --git a/app/service/design_batch/pipeline/keypoint.py b/app/service/design_batch/pipeline/keypoint.py index 243cf4e..313a613 100644 --- a/app/service/design_batch/pipeline/keypoint.py +++ b/app/service/design_batch/pipeline/keypoint.py @@ -4,7 +4,7 @@ import numpy as np from pymilvus import MilvusClient from app.core.config import * -from app.service.design.utils.design_ensemble import get_keypoint_result +from app.service.design_batch.utils.design_ensemble import get_keypoint_result logger = logging.getLogger(__name__) diff --git a/app/service/design_batch/pipeline/loading.py b/app/service/design_batch/pipeline/loading.py index 0ce0dfa..8f02378 100644 --- a/app/service/design_batch/pipeline/loading.py +++ b/app/service/design_batch/pipeline/loading.py @@ -1,9 +1,6 @@ -import io import logging import cv2 -import numpy as np -from PIL import Image from app.service.utils.new_oss_client import oss_get_image diff --git a/app/service/design_batch/pipeline/scale.py b/app/service/design_batch/pipeline/scale.py index 732fcd8..1908a9c 100644 --- a/app/service/design_batch/pipeline/scale.py +++ b/app/service/design_batch/pipeline/scale.py @@ -18,7 +18,7 @@ class Scaling: - int(result['body_point_test'][result['keypoint'] + '_right'][0])) ** 2 + 1 ) - + if distance_clo == 0: result['scale'] = 1 else: diff --git a/app/service/design_batch/pipeline/segmentation.py b/app/service/design_batch/pipeline/segmentation.py index 802487f..cba3446 100644 --- a/app/service/design_batch/pipeline/segmentation.py +++ b/app/service/design_batch/pipeline/segmentation.py @@ -5,7 +5,7 @@ import cv2 import numpy as np from app.core.config import SEG_CACHE_PATH -from app.service.design.utils.design_ensemble import get_seg_result +from app.service.design_batch.utils.design_ensemble import get_seg_result from app.service.utils.new_oss_client import oss_get_image logger = logging.getLogger() @@ -48,7 +48,7 @@ class Segmentation: @staticmethod def save_seg_result(seg_result, image_id): - file_path = f"{SEG_CACHE_PATH}{image_id}.npy" + file_path = f"seg_cache/{image_id}.npy" try: np.save(file_path, seg_result) logger.info(f"保存成功 :{os.path.abspath(file_path)}") @@ -57,7 +57,7 @@ class Segmentation: @staticmethod def load_seg_result(image_id): - file_path = f"{SEG_CACHE_PATH}{image_id}.npy" + file_path = f"seg_cache/{image_id}.npy" logger.info(f"load seg file name is :{SEG_CACHE_PATH}{image_id}.npy") try: seg_result = np.load(file_path) diff --git a/app/service/design_batch/pipeline/split.py b/app/service/design_batch/pipeline/split.py index 35605b8..5dbcef5 100644 --- a/app/service/design_batch/pipeline/split.py +++ b/app/service/design_batch/pipeline/split.py @@ -7,8 +7,8 @@ from PIL import Image from cv2 import cvtColor, COLOR_BGR2RGBA from app.core.config import AIDA_CLOTHING -from app.service.design.utils.conversion_image import rgb_to_rgba -from app.service.design_fast.utils.upload_image import upload_png_mask +from app.service.design_batch.utils.conversion_image import rgb_to_rgba +from app.service.design_batch.utils.upload_image import upload_png_mask from app.service.utils.generate_uuid import generate_uuid from app.service.utils.new_oss_client import oss_upload_image diff --git a/app/service/design_batch/service.py b/app/service/design_batch/service.py index e69de29..ca6908e 100644 --- a/app/service/design_batch/service.py +++ b/app/service/design_batch/service.py @@ -0,0 +1,11 @@ +import json + +from app.service.design_batch.design_batch_celery import batch_design +from app.service.design_batch.utils.MQ import publish_status + + +async def start_design_batch_generate(data, file): + generate_clothes_task = batch_design.delay(json.loads(file.decode())['objects'], data.total, data.tasks_id) + print(generate_clothes_task) + publish_status(data.tasks_id, "0/100", "") + return {"task_id": data.tasks_id} From 84d207087b7ea356887f1239c63a9048450f5ae2 Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Thu, 26 Sep 2024 14:19:32 +0800 Subject: [PATCH 087/103] =?UTF-8?q?feat=20=20=E4=BB=A3=E7=A0=81=E6=95=B4?= =?UTF-8?q?=E7=90=86=20fix?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/api/api_design.py | 5 +- app/service/design/design_batch/items/item.py | 281 ------- .../design_batch/items/utils/upload_image.py | 55 -- app/service/design/fastapi_request.json | 771 ------------------ .../design/service_design_batch_generate.py | 27 - app/service/design/test.py | 15 - 6 files changed, 2 insertions(+), 1152 deletions(-) delete mode 100644 app/service/design/design_batch/items/item.py delete mode 100644 app/service/design/design_batch/items/utils/upload_image.py delete mode 100644 app/service/design/fastapi_request.json delete mode 100644 app/service/design/service_design_batch_generate.py delete mode 100644 app/service/design/test.py diff --git a/app/api/api_design.py b/app/api/api_design.py index 389e43e..aa9fe43 100644 --- a/app/api/api_design.py +++ b/app/api/api_design.py @@ -6,9 +6,8 @@ from fastapi import APIRouter, HTTPException, UploadFile, File, Form from app.schemas.design import DesignModel, DesignProgressModel, ModelProgressModel, DBGConfigModel from app.schemas.response_template import ResponseModel +from app.service.design.model_process_service import model_transpose from app.service.design_batch.service import start_design_batch_generate -# from app.service.design.model_process_service import model_transpose -# from app.service.design.service_design_batch_generate import start_design_batch_generate from app.service.design_fast.design_generate import design_generate from app.service.design_fast.utils.redis_utils import Redis @@ -237,7 +236,7 @@ def model_process(request_data: ModelProgressModel): try: logger.info(f"model_process request item is : @@@@@@:{json.dumps(request_data.dict())}") - # data = model_transpose(image_path=request_data.model_path) + data = model_transpose(image_path=request_data.model_path) logger.info(f"model_process response @@@@@@:{json.dumps(data)}") except Exception as e: logger.warning(f"model_process Run Exception @@@@@@:{e}") diff --git a/app/service/design/design_batch/items/item.py b/app/service/design/design_batch/items/item.py deleted file mode 100644 index 1e51fe8..0000000 --- a/app/service/design/design_batch/items/item.py +++ /dev/null @@ -1,281 +0,0 @@ -import time -from concurrent.futures import ThreadPoolExecutor -from pprint import pprint - -import cv2 - -from app.core.config import PRIORITY_DICT -from app.service.design.design_batch.items.pipeline import LoadImage, KeyPoint, Segmentation, Color, PrintPainting, Scaling, Split, ContourDetection, LoadBodyImage -from app.service.design.utils.synthesis_item import synthesis, synthesis_single - - -class BaseItem: - def __init__(self, data, basic): - self.result = data.copy() - self.result['name'] = data['type'].lower() - self.result.pop("type") - self.result.update(basic) - - -class TopItem(BaseItem): - def __init__(self, data, basic): - super().__init__(data, basic) - self.top_pipeline = [ - LoadImage(), - KeyPoint(), - Segmentation(), - Color(), - PrintPainting(), - Scaling(), - Split() - ] - - def process(self): - for item in self.top_pipeline: - self.result = item(self.result) - return self.result - - -class BottomItem(BaseItem): - def __init__(self, data, basic): - super().__init__(data, basic) - self.bottom_pipeline = [ - LoadImage(), - KeyPoint(), - ContourDetection(), - # Segmentation(), - Color(), - PrintPainting(), - Scaling(), - Split() - ] - - def process(self): - for item in self.bottom_pipeline: - self.result = item(self.result) - return self.result - - -class BodyItem(BaseItem): - def __init__(self, data, basic): - super().__init__(data, basic) - self.top_pipeline = [ - LoadBodyImage(), - ] - - def process(self): - for item in self.top_pipeline: - self.result = item(self.result) - return self.result - - -def process_item(item, basic): - if item['type'] == "Body": - body_server = BodyItem(data=item, basic=basic) - item_data = body_server.process() - elif item['type'].lower() in ['blouse', 'outwear', 'dress', 'tops']: - top_server = TopItem(data=item, basic=basic) - item_data = top_server.process() - else: - bottom_server = BottomItem(data=item, basic=basic) - item_data = bottom_server.process() - return item_data - - -def calculate_start_point(keypoint_type, scale, clothes_point, body_point, offset, resize_scale): - """ - Align left - Args: - keypoint_type: string, "waistband" | "shoulder" | "ear_point" - scale: float - clothes_point: dict{'left': [x1, y1, z1], 'right': [x2, y2, z2]} - body_point: dict, containing keypoint data of body figure - - Returns: - start_point: tuple (x', y') - x' = y_body - y1 * scale + offset - y' = x_body - x1 * scale + offset - - """ - side_indicator = f'{keypoint_type}_left' - start_point = ( - int(body_point[side_indicator][1] + offset[1] - int(clothes_point[side_indicator][0]) * scale), # y - int(body_point[side_indicator][0] + offset[0] - int(clothes_point[side_indicator][1]) * scale) # x - ) - return start_point - - -# 服装图层给数据组装 -def organize_clothing(layer): - # 起始坐标 - start_point = calculate_start_point(layer['keypoint'], layer['scale'], layer['clothes_keypoint'], layer['body_point_test'], layer["offset"], layer["resize_scale"]) - # 前片数据 - front_layer = dict(priority=layer['priority'] if layer.get("layer_order", False) else PRIORITY_DICT.get(f'{layer["name"].lower()}_front', None), - name=f'{layer["name"].lower()}_front', - image=layer["front_image"], - # mask_image=layer['front_mask_image'], - image_url=layer['front_image_url'], - mask_url=layer['mask_url'], - sacle=layer['scale'], - clothes_keypoint=layer['clothes_keypoint'], - position=start_point, - resize_scale=layer["resize_scale"], - mask=cv2.resize(layer['mask'], layer["front_image"].size), - gradient_string=layer['gradient_string'] if 'gradient_string' in layer.keys() else "", - pattern_image_url=layer['pattern_image_url'], - pattern_image=layer['pattern_image'] - - ) - # 后片数据 - back_layer = dict(priority=-layer.get("priority", 0) if layer.get("layer_order", False) else PRIORITY_DICT.get(f'{layer["name"].lower()}_back', None), - name=f'{layer["name"].lower()}_back', - image=layer["back_image"], - # mask_image=layer['back_mask_image'], - image_url=layer['back_image_url'], - mask_url=layer['mask_url'], - sacle=layer['scale'], - clothes_keypoint=layer['clothes_keypoint'], - position=start_point, - resize_scale=layer["resize_scale"], - mask=cv2.resize(layer['mask'], layer["front_image"].size), - gradient_string=layer['gradient_string'] if 'gradient_string' in layer.keys() else "", - pattern_image_url=layer['pattern_image_url'], - ) - return front_layer, back_layer - - -# 模特图层给数据组装 -def organize_body(layer): - body_layer = dict(priority=0, - name=layer["name"].lower(), - image=layer['body_image'], - image_url=layer['body_path'], - mask_image=None, - mask_url=None, - sacle=1, - # mask=layer['body_mask'], - position=(0, 0)) - return body_layer - - -def process_layer(item, layers): - if item['name'] == "mannequin": - body_layer = organize_body(item) - layers.append(body_layer) - return item['body_image'].size - else: - front_layer, back_layer = organize_clothing(item) - layers.append(front_layer) - layers.append(back_layer) - - -def process_object(object_data): - basic = object_data['basic'] - items_response = {'layers': []} - - if basic['single_overall'] == "overall": - item_results = [process_item(item, basic) for item in object_data['items']] - layers = [] - futures = [] - body_size = None - for item in item_results: - futures = [process_layer(item, layers)] - for future in futures: - if future is not None: - body_size = future - layers = sorted(layers, key=lambda s: s.get("priority", float('inf'))) - - layers, new_size = update_base_size_priority(layers, body_size) - - for lay in layers: - items_response['layers'].append({ - 'image_category': lay['name'], - 'position': lay['position'], - 'priority': lay.get("priority", None), - 'resize_scale': lay['resize_scale'] if "resize_scale" in lay.keys() else None, - 'image_size': lay['image'] if lay['image'] is None else lay['image'].size, - 'gradient_string': lay['gradient_string'] if 'gradient_string' in lay.keys() else "", - 'mask_url': lay['mask_url'], - 'image_url': lay['image_url'] if 'image_url' in lay.keys() else None, - 'pattern_image_url': lay['pattern_image_url'] if 'pattern_image_url' in lay.keys() else None, - - # 'image': lay['image'], - # 'mask_image': lay['mask_image'], - }) - items_response['synthesis_url'] = synthesis(layers, new_size, basic) - else: - item_results = process_item(object_data['items'][0], basic) - items_response['layers'].append({ - 'image_category': f"{item_results['name']}_front", - 'image_size': item_results['back_image'].size if item_results['back_image'] else None, - 'position': None, - 'priority': 0, - 'image_url': item_results['front_image_url'], - 'mask_url': item_results['mask_url'], - "gradient_string": item_results['gradient_string'] if 'gradient_string' in item_results.keys() else "", - 'pattern_image_url': item_results['pattern_image_url'] if 'pattern_image_url' in item_results.keys() else None, - - }) - items_response['layers'].append({ - 'image_category': f"{item_results['name']}_back", - 'image_size': item_results['front_image'].size if item_results['front_image'] else None, - 'position': None, - 'priority': 0, - 'image_url': item_results['back_image_url'], - 'mask_url': item_results['mask_url'], - "gradient_string": item_results['gradient_string'] if 'gradient_string' in item_results.keys() else "", - 'pattern_image_url': item_results['pattern_image_url'] if 'pattern_image_url' in item_results.keys() else None, - - }) - items_response['synthesis_url'] = synthesis_single(item_results['front_image'], item_results['back_image']) - return items_response - - -def update_base_size_priority(layers, size): - # 计算透明背景图片的宽度 - min_x = min(info['position'][1] for info in layers) - x_list = [] - for info in layers: - if info['image'] is not None: - x_list.append(info['position'][1] + info['image'].width) - max_x = max(x_list) - new_width = max_x - min_x - new_height = 700 - # 更新坐标 - for info in layers: - info['adaptive_position'] = (info['position'][0], info['position'][1] - min_x) - return layers, (new_width, new_height) - - -def run(): - object = {"objects": [{"basic": {"body_point_test": {"waistband_right": [199, 239], "hand_point_right": [220, 308], "waistband_left": [113, 239], "hand_point_left": [92, 310], "shoulder_left": [99, 111], "shoulder_right": [214, 111]}, "layer_order": False, "scale_bag": 0.7, "scale_earrings": 0.16, "self_template": True, "single_overall": "single", "switch_category": "Outwear"}, "items": [ - {"color": "189 112 112", "icon": "none", "image_id": 116441, "offset": [1, 1], "path": "aida-sys-image/images/female/outwear/outwear_p3139.jpg", "print": {"element": {"element_angle_list": [], "element_path_list": [], "element_scale_list": [], "location": []}, "overall": {"location": [[0.0, 0.0]], "print_angle_list": [0.0, 0.0], "print_path_list": [], "print_scale_list": [0.0, 0.0]}, "single": {"location": [], "print_angle_list": [], "print_path_list": [], "print_scale_list": []}}, - "resize_scale": [1.0, 1.0], "type": "Outwear"}]}, {"basic": {"body_point_test": {"waistband_right": [199, 239], "hand_point_right": [220, 308], "waistband_left": [113, 239], "hand_point_left": [92, 310], "shoulder_left": [99, 111], "shoulder_right": [214, 111]}, "layer_order": False, "scale_bag": 0.7, "scale_earrings": 0.16, "self_template": True, "single_overall": "single", "switch_category": "Outwear"}, "items": [ - {"color": "189 112 112", "icon": "none", "image_id": 81518, "offset": [1, 1], "path": "aida-sys-image/images/female/outwear/0628000071.jpg", "print": {"element": {"element_angle_list": [], "element_path_list": [], "element_scale_list": [], "location": []}, "overall": {"location": [[0.0, 0.0]], "print_angle_list": [0.0, 0.0], "print_path_list": [], "print_scale_list": [0.0, 0.0]}, "single": {"location": [], "print_angle_list": [], "print_path_list": [], "print_scale_list": []}}, - "resize_scale": [1.0, 1.0], "type": "Outwear"}]}, {"basic": {"body_point_test": {"waistband_right": [199, 239], "hand_point_right": [220, 308], "waistband_left": [113, 239], "hand_point_left": [92, 310], "shoulder_left": [99, 111], "shoulder_right": [214, 111]}, "layer_order": False, "scale_bag": 0.7, "scale_earrings": 0.16, "self_template": True, "single_overall": "single", "switch_category": "Outwear"}, "items": [ - {"color": "189 112 112", "icon": "none", "image_id": 65687, "offset": [1, 1], "path": "aida-sys-image/images/female/outwear/outwear_746.jpg", "print": {"element": {"element_angle_list": [], "element_path_list": [], "element_scale_list": [], "location": []}, "overall": {"location": [[0.0, 0.0]], "print_angle_list": [0.0, 0.0], "print_path_list": [], "print_scale_list": [0.0, 0.0]}, "single": {"location": [], "print_angle_list": [], "print_path_list": [], "print_scale_list": []}}, - "resize_scale": [1.0, 1.0], "type": "Outwear"}]}, {"basic": {"body_point_test": {"waistband_right": [199, 239], "hand_point_right": [220, 308], "waistband_left": [113, 239], "hand_point_left": [92, 310], "shoulder_left": [99, 111], "shoulder_right": [214, 111]}, "layer_order": False, "scale_bag": 0.7, "scale_earrings": 0.16, "self_template": True, "single_overall": "single", "switch_category": "Outwear"}, "items": [ - {"color": "189 112 112", "icon": "none", "image_id": 90051, "offset": [1, 1], "path": "aida-sys-image/images/female/outwear/0628000864.jpg", "print": {"element": {"element_angle_list": [], "element_path_list": [], "element_scale_list": [], "location": []}, "overall": {"location": [[0.0, 0.0]], "print_angle_list": [0.0, 0.0], "print_path_list": [], "print_scale_list": [0.0, 0.0]}, "single": {"location": [], "print_angle_list": [], "print_path_list": [], "print_scale_list": []}}, - "resize_scale": [1.0, 1.0], "type": "Outwear"}]}, {"basic": {"body_point_test": {"waistband_right": [199, 239], "hand_point_right": [220, 308], "waistband_left": [113, 239], "hand_point_left": [92, 310], "shoulder_left": [99, 111], "shoulder_right": [214, 111]}, "layer_order": False, "scale_bag": 0.7, "scale_earrings": 0.16, "self_template": True, "single_overall": "single", "switch_category": "Outwear"}, "items": [ - {"color": "189 112 112", "icon": "none", "image_id": 67420, "offset": [1, 1], "path": "aida-sys-image/images/female/outwear/0825001648.jpg", "print": {"element": {"element_angle_list": [], "element_path_list": [], "element_scale_list": [], "location": []}, "overall": {"location": [[0.0, 0.0]], "print_angle_list": [0.0, 0.0], "print_path_list": [], "print_scale_list": [0.0, 0.0]}, "single": {"location": [], "print_angle_list": [], "print_path_list": [], "print_scale_list": []}}, - "resize_scale": [1.0, 1.0], "type": "Outwear"}]}, {"basic": {"body_point_test": {"waistband_right": [199, 239], "hand_point_right": [220, 308], "waistband_left": [113, 239], "hand_point_left": [92, 310], "shoulder_left": [99, 111], "shoulder_right": [214, 111]}, "layer_order": False, "scale_bag": 0.7, "scale_earrings": 0.16, "self_template": True, "single_overall": "single", "switch_category": "Outwear"}, "items": [ - {"color": "189 112 112", "icon": "none", "image_id": 90354, "offset": [1, 1], "path": "aida-sys-image/images/female/outwear/0628001300.jpg", "print": {"element": {"element_angle_list": [], "element_path_list": [], "element_scale_list": [], "location": []}, "overall": {"location": [[0.0, 0.0]], "print_angle_list": [0.0, 0.0], "print_path_list": [], "print_scale_list": [0.0, 0.0]}, "single": {"location": [], "print_angle_list": [], "print_path_list": [], "print_scale_list": []}}, - "resize_scale": [1.0, 1.0], "type": "Outwear"}]}, {"basic": {"body_point_test": {"waistband_right": [199, 239], "hand_point_right": [220, 308], "waistband_left": [113, 239], "hand_point_left": [92, 310], "shoulder_left": [99, 111], "shoulder_right": [214, 111]}, "layer_order": False, "scale_bag": 0.7, "scale_earrings": 0.16, "self_template": True, "single_overall": "single", "switch_category": "Outwear"}, "items": [ - {"color": "189 112 112", "icon": "none", "image_id": 67420, "offset": [1, 1], "path": "aida-sys-image/images/female/outwear/0825001648.jpg", "print": {"element": {"element_angle_list": [], "element_path_list": [], "element_scale_list": [], "location": []}, "overall": {"location": [[0.0, 0.0]], "print_angle_list": [0.0, 0.0], "print_path_list": [], "print_scale_list": [0.0, 0.0]}, "single": {"location": [], "print_angle_list": [], "print_path_list": [], "print_scale_list": []}}, - "resize_scale": [1.0, 1.0], "type": "Outwear"}]}, {"basic": {"body_point_test": {"waistband_right": [199, 239], "hand_point_right": [220, 308], "waistband_left": [113, 239], "hand_point_left": [92, 310], "shoulder_left": [99, 111], "shoulder_right": [214, 111]}, "layer_order": False, "scale_bag": 0.7, "scale_earrings": 0.16, "self_template": True, "single_overall": "single", "switch_category": "Outwear"}, "items": [ - {"color": "189 112 112", "icon": "none", "image_id": 101477, "offset": [1, 1], "path": "aida-sys-image/images/female/outwear/903000063.jpg", "print": {"element": {"element_angle_list": [], "element_path_list": [], "element_scale_list": [], "location": []}, "overall": {"location": [[0.0, 0.0]], "print_angle_list": [0.0, 0.0], "print_path_list": [], "print_scale_list": [0.0, 0.0]}, "single": {"location": [], "print_angle_list": [], "print_path_list": [], "print_scale_list": []}}, - "resize_scale": [1.0, 1.0], "type": "Outwear"}]}], "process_id": "3615898424593104"} - - object_result = {} - with ThreadPoolExecutor() as executor: - results = list(executor.map(process_object, object['objects'])) - for i, result in enumerate(results): - object_result[i] = result - - pprint(object_result) - - -if __name__ == '__main__': - start_time = time.time() - run() - print(time.time() - start_time) diff --git a/app/service/design/design_batch/items/utils/upload_image.py b/app/service/design/design_batch/items/utils/upload_image.py deleted file mode 100644 index 9039ce7..0000000 --- a/app/service/design/design_batch/items/utils/upload_image.py +++ /dev/null @@ -1,55 +0,0 @@ -#!/usr/bin/env python -# -*- coding: UTF-8 -*- -""" -@Project :trinity_client -@File :upload_image.py -@Author :周成融 -@Date :2023/8/28 13:49:20 -@detail : -""" -import io -import logging - -import cv2 - -from app.core.config import * -from app.service.utils.decorator import RunTime -from app.service.utils.oss_client import oss_upload_image - - -# @RunTime -def upload_png_mask(front_image, object_name, mask=None): - try: - mask_url = None - if mask is not None: - mask_inverted = cv2.bitwise_not(mask) - # 将掩模的3通道转换为4通道,白色部分不透明,黑色部分透明 - rgba_image = cv2.cvtColor(mask_inverted, cv2.COLOR_BGR2BGRA) - rgba_image[rgba_image[:, :, 0] == 0] = [0, 0, 0, 0] - # image_bytes = io.BytesIO() - # image_bytes.write(cv2.imencode('.png', rgba_image)[1].tobytes()) - # image_bytes.seek(0) - # mask_url = f"{AIDA_CLOTHING}/{minio_client.put_object('aida-clothing', f'mask/mask_{object_name}.png', image_bytes, len(image_bytes.getvalue()), content_type='image/png').object_name}" - # oss upload #################### - req = oss_upload_image(bucket=AIDA_CLOTHING, object_name=f"mask/mask_{object_name}.png", image_bytes=cv2.imencode('.png', rgba_image)[1]) - mask_url = f"{AIDA_CLOTHING}/mask/mask_{object_name}.png" - - image_data = io.BytesIO() - front_image.save(image_data, format='PNG') - image_data.seek(0) - image_bytes = image_data.read() - # image_url = f"{AIDA_CLOTHING}/{minio_client.put_object('aida-clothing', f'image/image_{object_name}.png', io.BytesIO(image_bytes), len(image_bytes), content_type='image/png').object_name}" - req = oss_upload_image(bucket=AIDA_CLOTHING, object_name=f"image/image_{object_name}.png", image_bytes=image_bytes) - image_url = f"{AIDA_CLOTHING}/image/image_{object_name}.png" - return front_image, image_url, mask_url - except Exception as e: - logging.warning(f"upload_png_mask runtime exception : {e}") - - -# @RunTime -# def upload_png_mask(front_image, object_name, mask=None): -# mask_url = None -# if mask is not None: -# mask_url = f"{AIDA_CLOTHING}/mask/mask_{object_name}.png" -# image_url = f"{AIDA_CLOTHING}/image/image_{object_name}.png" -# return front_image, image_url, mask_url diff --git a/app/service/design/fastapi_request.json b/app/service/design/fastapi_request.json deleted file mode 100644 index 8c27a56..0000000 --- a/app/service/design/fastapi_request.json +++ /dev/null @@ -1,771 +0,0 @@ -{ - "objects": [ - { - "basic": { - "body_point_test": { - "waistband_right": [ - 336, - 264 - ], - "hand_point_right": [ - 350, - 303 - ], - "waistband_left": [ - 245, - 274 - ], - "hand_point_left": [ - 219, - 315 - ], - "shoulder_left": [ - 227, - 155 - ], - "shoulder_right": [ - 338, - 149 - ] - }, - "layer_order": false, - "scale_bag": 0.7, - "scale_earrings": 0.16, - "self_template": true, - "single_overall": "overall", - "switch_category": "" - }, - "items": [ - { - "businessId": 493827, - "color": "127 61 21", - "elementId": 493827, - "icon": "none", - "image_id": 110201, - "offset": [ - 1, - 1 - ], - "path": "aida-users/31/sketch/62302527-2910-4740-808d-2cb8221daa34-3-31.png", - "print": { - "IfSingle": false, - "print_path_list": [] - }, - "resize_scale": [ - 1.0, - 1.0 - ], - "type": "Dress" - }, - { - "body_path": "aida-users/31/models/female/845046c7-4f62-4f54-a4a9-c26d49c6969335b5b3a9-d335-4871-a46c-3cc3caf07da259629dfd1f1f555a2e2a9def7e719366.png", - "image_id": 82966, - "offset": [ - 1, - 1 - ], - "resize_scale": [ - 1.0, - 1.0 - ], - "type": "Body" - } - ] - }, - { - "basic": { - "body_point_test": { - "waistband_right": [ - 336, - 264 - ], - "hand_point_right": [ - 350, - 303 - ], - "waistband_left": [ - 245, - 274 - ], - "hand_point_left": [ - 219, - 315 - ], - "shoulder_left": [ - 227, - 155 - ], - "shoulder_right": [ - 338, - 149 - ] - }, - "layer_order": false, - "scale_bag": 0.7, - "scale_earrings": 0.16, - "self_template": true, - "single_overall": "overall", - "switch_category": "" - }, - "items": [ - { - "color": "27 25 23", - "icon": "none", - "image_id": 110202, - "offset": [ - 1, - 1 - ], - "path": "aida-sys-image/images/female/skirt/0916000602.jpg", - "print": { - "IfSingle": false, - "print_path_list": [] - }, - "resize_scale": [ - 1.0, - 1.0 - ], - "type": "Skirt" - }, - { - "businessId": 493825, - "color": "229 214 200", - "elementId": 493825, - "icon": "none", - "image_id": 107101, - "offset": [ - 1, - 1 - ], - "path": "aida-users/31/sketchboard/female/Blouse/de8f5656-d7ae-4642-bc90-f7f9d85da09b.jpg", - "print": { - "IfSingle": false, - "print_path_list": [] - }, - "resize_scale": [ - 1.0, - 1.0 - ], - "type": "Blouse" - }, - { - "businessId": 493824, - "color": "76 124 124", - "elementId": 493824, - "icon": "none", - "image_id": 104522, - "offset": [ - 1, - 1 - ], - "path": "aida-users/31/sketch/3e82214a-0191-11ef-96d2-b48351119060_1.png", - "print": { - "IfSingle": false, - "print_path_list": [] - }, - "resize_scale": [ - 1.0, - 1.0 - ], - "type": "Outwear" - }, - { - "body_path": "aida-users/31/models/female/845046c7-4f62-4f54-a4a9-c26d49c6969335b5b3a9-d335-4871-a46c-3cc3caf07da259629dfd1f1f555a2e2a9def7e719366.png", - "image_id": 82966, - "offset": [ - 1, - 1 - ], - "resize_scale": [ - 1.0, - 1.0 - ], - "type": "Body" - } - ] - }, - { - "basic": { - "body_point_test": { - "waistband_right": [ - 336, - 264 - ], - "hand_point_right": [ - 350, - 303 - ], - "waistband_left": [ - 245, - 274 - ], - "hand_point_left": [ - 219, - 315 - ], - "shoulder_left": [ - 227, - 155 - ], - "shoulder_right": [ - 338, - 149 - ] - }, - "layer_order": false, - "scale_bag": 0.7, - "scale_earrings": 0.16, - "self_template": true, - "single_overall": "overall", - "switch_category": "" - }, - "items": [ - { - "color": "229 214 200", - "icon": "none", - "image_id": 110203, - "offset": [ - 1, - 1 - ], - "path": "aida-sys-image/images/female/blouse/0825001576.jpg", - "print": { - "IfSingle": false, - "print_path_list": [] - }, - "resize_scale": [ - 1.0, - 1.0 - ], - "type": "Blouse" - }, - { - "color": "76 124 124", - "icon": "none", - "image_id": 96071, - "offset": [ - 1, - 1 - ], - "path": "aida-sys-image/images/female/skirt/903000097.jpg", - "print": { - "IfSingle": false, - "print_path_list": [] - }, - "resize_scale": [ - 1.0, - 1.0 - ], - "type": "Skirt" - }, - { - "color": "209 125 29", - "icon": "none", - "image_id": 93798, - "offset": [ - 1, - 1 - ], - "path": "aida-sys-image/images/female/outwear/outwear_p4_561.jpg", - "print": { - "IfSingle": false, - "print_path_list": [] - }, - "resize_scale": [ - 1.0, - 1.0 - ], - "type": "Outwear" - }, - { - "body_path": "aida-users/31/models/female/845046c7-4f62-4f54-a4a9-c26d49c6969335b5b3a9-d335-4871-a46c-3cc3caf07da259629dfd1f1f555a2e2a9def7e719366.png", - "image_id": 82966, - "offset": [ - 1, - 1 - ], - "resize_scale": [ - 1.0, - 1.0 - ], - "type": "Body" - } - ] - }, - { - "basic": { - "body_point_test": { - "waistband_right": [ - 336, - 264 - ], - "hand_point_right": [ - 350, - 303 - ], - "waistband_left": [ - 245, - 274 - ], - "hand_point_left": [ - 219, - 315 - ], - "shoulder_left": [ - 227, - 155 - ], - "shoulder_right": [ - 338, - 149 - ] - }, - "layer_order": false, - "scale_bag": 0.7, - "scale_earrings": 0.16, - "self_template": true, - "single_overall": "overall", - "switch_category": "" - }, - "items": [ - { - "businessId": 493824, - "color": "209 125 29", - "elementId": 493824, - "icon": "none", - "image_id": 104522, - "offset": [ - 1, - 1 - ], - "path": "aida-users/31/sketch/3e82214a-0191-11ef-96d2-b48351119060_1.png", - "print": { - "IfSingle": false, - "print_path_list": [] - }, - "resize_scale": [ - 1.0, - 1.0 - ], - "type": "Outwear" - }, - { - "color": "118 123 115", - "icon": "none", - "image_id": 110204, - "offset": [ - 1, - 1 - ], - "path": "aida-sys-image/images/female/blouse/0902000457.jpg", - "print": { - "IfSingle": false, - "print_path_list": [] - }, - "resize_scale": [ - 1.0, - 1.0 - ], - "type": "Blouse" - }, - { - "color": "118 123 115", - "icon": "none", - "image_id": 79259, - "offset": [ - 1, - 1 - ], - "path": "aida-sys-image/images/female/trousers/826000094.jpg", - "print": { - "IfSingle": false, - "print_path_list": [] - }, - "resize_scale": [ - 1.0, - 1.0 - ], - "type": "Trousers" - }, - { - "body_path": "aida-users/31/models/female/845046c7-4f62-4f54-a4a9-c26d49c6969335b5b3a9-d335-4871-a46c-3cc3caf07da259629dfd1f1f555a2e2a9def7e719366.png", - "image_id": 82966, - "offset": [ - 1, - 1 - ], - "resize_scale": [ - 1.0, - 1.0 - ], - "type": "Body" - } - ] - }, - { - "basic": { - "body_point_test": { - "waistband_right": [ - 336, - 264 - ], - "hand_point_right": [ - 350, - 303 - ], - "waistband_left": [ - 245, - 274 - ], - "hand_point_left": [ - 219, - 315 - ], - "shoulder_left": [ - 227, - 155 - ], - "shoulder_right": [ - 338, - 149 - ] - }, - "layer_order": false, - "scale_bag": 0.7, - "scale_earrings": 0.16, - "self_template": true, - "single_overall": "overall", - "switch_category": "" - }, - "items": [ - { - "color": "127 61 21", - "icon": "none", - "image_id": 96038, - "offset": [ - 1, - 1 - ], - "path": "aida-sys-image/images/female/dress/0902003549.jpg", - "print": { - "IfSingle": false, - "print_path_list": [] - }, - "resize_scale": [ - 1.0, - 1.0 - ], - "type": "Dress" - }, - { - "body_path": "aida-users/31/models/female/845046c7-4f62-4f54-a4a9-c26d49c6969335b5b3a9-d335-4871-a46c-3cc3caf07da259629dfd1f1f555a2e2a9def7e719366.png", - "image_id": 82966, - "offset": [ - 1, - 1 - ], - "resize_scale": [ - 1.0, - 1.0 - ], - "type": "Body" - } - ] - }, - { - "basic": { - "body_point_test": { - "waistband_right": [ - 336, - 264 - ], - "hand_point_right": [ - 350, - 303 - ], - "waistband_left": [ - 245, - 274 - ], - "hand_point_left": [ - 219, - 315 - ], - "shoulder_left": [ - 227, - 155 - ], - "shoulder_right": [ - 338, - 149 - ] - }, - "layer_order": false, - "scale_bag": 0.7, - "scale_earrings": 0.16, - "self_template": true, - "single_overall": "overall", - "switch_category": "" - }, - "items": [ - { - "businessId": 493822, - "color": "127 61 21", - "elementId": 493822, - "icon": "none", - "image_id": 62309, - "offset": [ - 1, - 1 - ], - "path": "aida-users/31/sketchboard/female/trousers/c37c2ea6-8955-4b40-8339-c737e672ca3d.jpg", - "print": { - "IfSingle": false, - "print_path_list": [] - }, - "resize_scale": [ - 1.0, - 1.0 - ], - "type": "Trousers" - }, - { - "businessId": 493825, - "color": "118 123 115", - "elementId": 493825, - "icon": "none", - "image_id": 107101, - "offset": [ - 1, - 1 - ], - "path": "aida-users/31/sketchboard/female/Blouse/de8f5656-d7ae-4642-bc90-f7f9d85da09b.jpg", - "print": { - "IfSingle": false, - "print_path_list": [] - }, - "resize_scale": [ - 1.0, - 1.0 - ], - "type": "Blouse" - }, - { - "body_path": "aida-users/31/models/female/845046c7-4f62-4f54-a4a9-c26d49c6969335b5b3a9-d335-4871-a46c-3cc3caf07da259629dfd1f1f555a2e2a9def7e719366.png", - "image_id": 82966, - "offset": [ - 1, - 1 - ], - "resize_scale": [ - 1.0, - 1.0 - ], - "type": "Body" - } - ] - }, - { - "basic": { - "body_point_test": { - "waistband_right": [ - 336, - 264 - ], - "hand_point_right": [ - 350, - 303 - ], - "waistband_left": [ - 245, - 274 - ], - "hand_point_left": [ - 219, - 315 - ], - "shoulder_left": [ - 227, - 155 - ], - "shoulder_right": [ - 338, - 149 - ] - }, - "layer_order": false, - "scale_bag": 0.7, - "scale_earrings": 0.16, - "self_template": true, - "single_overall": "overall", - "switch_category": "" - }, - "items": [ - { - "businessId": 493826, - "color": "127 61 21", - "elementId": 493826, - "icon": "none", - "image_id": 107105, - "offset": [ - 1, - 1 - ], - "path": "aida-users/31/sketchboard/female/Skirt/58710352-6301-450d-b69a-fb2922b5429a.png", - "print": { - "IfSingle": false, - "print_path_list": [] - }, - "resize_scale": [ - 1.0, - 1.0 - ], - "type": "Skirt" - }, - { - "color": "118 123 115", - "icon": "none", - "image_id": 79114, - "offset": [ - 1, - 1 - ], - "path": "aida-sys-image/images/female/blouse/903000169.jpg", - "print": { - "IfSingle": false, - "print_path_list": [] - }, - "resize_scale": [ - 1.0, - 1.0 - ], - "type": "Blouse" - }, - { - "color": "229 214 200", - "icon": "none", - "image_id": 90573, - "offset": [ - 1, - 1 - ], - "path": "aida-sys-image/images/female/outwear/0628000541.jpg", - "print": { - "IfSingle": false, - "print_path_list": [] - }, - "resize_scale": [ - 1.0, - 1.0 - ], - "type": "Outwear" - }, - { - "body_path": "aida-users/31/models/female/845046c7-4f62-4f54-a4a9-c26d49c6969335b5b3a9-d335-4871-a46c-3cc3caf07da259629dfd1f1f555a2e2a9def7e719366.png", - "image_id": 82966, - "offset": [ - 1, - 1 - ], - "resize_scale": [ - 1.0, - 1.0 - ], - "type": "Body" - } - ] - }, - { - "basic": { - "body_point_test": { - "waistband_right": [ - 336, - 264 - ], - "hand_point_right": [ - 350, - 303 - ], - "waistband_left": [ - 245, - 274 - ], - "hand_point_left": [ - 219, - 315 - ], - "shoulder_left": [ - 227, - 155 - ], - "shoulder_right": [ - 338, - 149 - ] - }, - "layer_order": false, - "scale_bag": 0.7, - "scale_earrings": 0.16, - "self_template": true, - "single_overall": "overall", - "switch_category": "" - }, - "items": [ - { - "color": "229 214 200", - "icon": "none", - "image_id": 110205, - "offset": [ - 1, - 1 - ], - "path": "aida-sys-image/images/female/trousers/0916000217.jpg", - "print": { - "IfSingle": false, - "print_path_list": [] - }, - "resize_scale": [ - 1.0, - 1.0 - ], - "type": "Trousers" - }, - { - "businessId": 493825, - "color": "209 125 29", - "elementId": 493825, - "icon": "none", - "image_id": 107101, - "offset": [ - 1, - 1 - ], - "path": "aida-users/31/sketchboard/female/Blouse/de8f5656-d7ae-4642-bc90-f7f9d85da09b.jpg", - "print": { - "IfSingle": false, - "print_path_list": [] - }, - "resize_scale": [ - 1.0, - 1.0 - ], - "type": "Blouse" - }, - { - "body_path": "aida-users/31/models/female/845046c7-4f62-4f54-a4a9-c26d49c6969335b5b3a9-d335-4871-a46c-3cc3caf07da259629dfd1f1f555a2e2a9def7e719366.png", - "image_id": 82966, - "offset": [ - 1, - 1 - ], - "resize_scale": [ - 1.0, - 1.0 - ], - "type": "Body" - } - ] - } - ], - "process_id": "6878547032381675" -} \ No newline at end of file diff --git a/app/service/design/service_design_batch_generate.py b/app/service/design/service_design_batch_generate.py deleted file mode 100644 index a7b976f..0000000 --- a/app/service/design/service_design_batch_generate.py +++ /dev/null @@ -1,27 +0,0 @@ -import json - -import pika - -from app.service.design_batch.design_batch import batch_design - - -def publish_status(task_id, progress, result): - connection = pika.BlockingConnection(pika.ConnectionParameters('localhost')) - channel = connection.channel() - channel.queue_declare(queue='DesignBatch', durable=True) - message = {'task_id': task_id, 'progress': progress, "result": result} - print(message) - channel.basic_publish(exchange='', - routing_key='DesignBatch', - body=json.dumps(message), - properties=pika.BasicProperties( - delivery_mode=2, - )) - connection.close() - - -async def start_design_batch_generate(data, file): - generate_clothes_task = batch_design.delay(json.loads(file.decode())['objects'], data.total, data.tasks_id) - print(generate_clothes_task) - publish_status(data.tasks_id, "0/100", "") - return {"task_id": data.tasks_id} diff --git a/app/service/design/test.py b/app/service/design/test.py deleted file mode 100644 index 0235f0e..0000000 --- a/app/service/design/test.py +++ /dev/null @@ -1,15 +0,0 @@ -from app.service.design.service_design_batch_generate import design_batch_generate - -if __name__ == '__main__': - data = {"objects": [{"basic": {"body_point_test": {"waistband_right": [200, 241], "hand_point_right": [223, 297], "waistband_left": [112, 241], "hand_point_left": [92, 305], "shoulder_left": [99, 116], "shoulder_right": [215, 116]}, "layer_order": True, "scale_bag": 0.7, "scale_earrings": 0.16, "self_template": True, "single_overall": "overall", "switch_category": ""}, "items": [ - {"businessId": 270372, "color": "30 28 28", "image_id": 69780, "offset": [0, 0], "path": "aida-sys-image/images/female/trousers/0825000630.jpg", "seg_mask_url": "test/result.png", - "print": {"element": {"element_angle_list": [], "element_path_list": [], "element_scale_list": [], "location": []}, "overall": {"location": [], "print_angle_list": [], "print_path_list": [], "print_scale_list": []}, "single": {"location": [], "print_angle_list": [], "print_path_list": [], "print_scale_list": []}}, "priority": 10, "resize_scale": [1.0, 1.0], "type": "Trousers"}, - {"businessId": 270373, "color": "30 28 28", "image_id": 98243, "offset": [0, 0], "path": "aida-sys-image/images/female/blouse/0902003811.jpg", "seg_mask_url": "test/result.png", - "print": {"element": {"element_angle_list": [], "element_path_list": [], "element_scale_list": [], "location": []}, "overall": {"location": [], "print_angle_list": [], "print_path_list": [], "print_scale_list": []}, "single": {"location": [], "print_angle_list": [], "print_path_list": [], "print_scale_list": []}}, "priority": 11, "resize_scale": [1.0, 1.0], "type": "Blouse"}, - {"businessId": 270374, "color": "172 68 68", "image_id": 98244, "offset": [0, 0], "path": "aida-sys-image/images/female/outwear/0825000410.jpg", "seg_mask_url": "test/result.png", - "print": {"element": {"element_angle_list": [], "element_path_list": [], "element_scale_list": [], "location": []}, "overall": {"location": [], "print_angle_list": [], "print_path_list": [], "print_scale_list": []}, "single": {"location": [], "print_angle_list": [], "print_path_list": [], "print_scale_list": []}}, "priority": 12, "resize_scale": [1.0, 1.0], "type": "Outwear"}, - {"body_path": "aida-sys-image/models/female/5bdfe7ca-64eb-44e4-b03d-8e517520c795.png", "image_id": 96090, "type": "Body"}]}], "process_id": "83"} - total_steps = 1 - task_id = 1 - design_batch_generate.delay(data['objects'], total_steps, task_id) - # publish_status(task_id="0/100", progress=100) From d965352c205c35ced3c05891506a27bc43d9131d Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Thu, 26 Sep 2024 14:23:54 +0800 Subject: [PATCH 088/103] =?UTF-8?q?feat=20=20=E4=BB=A3=E7=A0=81=E6=95=B4?= =?UTF-8?q?=E7=90=86=20fix?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/service/design_fast/pipeline/keypoint.py | 2 ++ app/service/design_fast/pipeline/segmentation.py | 2 ++ 2 files changed, 4 insertions(+) diff --git a/app/service/design_fast/pipeline/keypoint.py b/app/service/design_fast/pipeline/keypoint.py index 45debc2..dd2ebe5 100644 --- a/app/service/design_fast/pipeline/keypoint.py +++ b/app/service/design_fast/pipeline/keypoint.py @@ -5,6 +5,7 @@ from pymilvus import MilvusClient from app.core.config import * from app.service.design_fast.utils.design_ensemble import get_keypoint_result +from app.service.utils.decorator import ClassCallRunTime logger = logging.getLogger(__name__) @@ -16,6 +17,7 @@ class KeyPoint: def get_name(cls): return cls.name + @ClassCallRunTime def __call__(self, result): if result['name'] in ['blouse', 'skirt', 'dress', 'outwear', 'trousers', 'tops', 'bottoms']: # 查询是否有数据 且类别相同 相同则直接读 不同则推理后更新 # result['clothes_keypoint'] = self.infer_keypoint_result(result) diff --git a/app/service/design_fast/pipeline/segmentation.py b/app/service/design_fast/pipeline/segmentation.py index 686e7b5..3884a48 100644 --- a/app/service/design_fast/pipeline/segmentation.py +++ b/app/service/design_fast/pipeline/segmentation.py @@ -6,6 +6,7 @@ import numpy as np from app.core.config import SEG_CACHE_PATH from app.service.design_fast.utils.design_ensemble import get_seg_result +from app.service.utils.decorator import ClassCallRunTime from app.service.utils.new_oss_client import oss_get_image logger = logging.getLogger() @@ -15,6 +16,7 @@ class Segmentation: def __init__(self, minio_client): self.minio_client = minio_client + @ClassCallRunTime def __call__(self, result): if "seg_mask_url" in result.keys() and result['seg_mask_url'] != "": seg_mask = oss_get_image(oss_client=self.minio_client, bucket=result['seg_mask_url'].split('/')[0], object_name=result['seg_mask_url'][result['seg_mask_url'].find('/') + 1:], data_type="cv2") From 3f880a69a58692012accf0ee31e6579c84721ff4 Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Thu, 26 Sep 2024 14:26:30 +0800 Subject: [PATCH 089/103] =?UTF-8?q?feat=20=20=E4=BB=A3=E7=A0=81=E6=95=B4?= =?UTF-8?q?=E7=90=86=20fix?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/service/design_fast/pipeline/keypoint.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/app/service/design_fast/pipeline/keypoint.py b/app/service/design_fast/pipeline/keypoint.py index dd2ebe5..d46eb33 100644 --- a/app/service/design_fast/pipeline/keypoint.py +++ b/app/service/design_fast/pipeline/keypoint.py @@ -5,7 +5,7 @@ from pymilvus import MilvusClient from app.core.config import * from app.service.design_fast.utils.design_ensemble import get_keypoint_result -from app.service.utils.decorator import ClassCallRunTime +from app.service.utils.decorator import ClassCallRunTime, RunTime logger = logging.getLogger(__name__) @@ -89,7 +89,7 @@ class KeyPoint: logger.info(f"save keypoint cache milvus error : {e}") return dict(zip(KEYPOINT_RESULT_TABLE_FIELD_SET, result.reshape(12, 2).astype(int).tolist())) - # @ RunTime + @RunTime def keypoint_cache(self, result, site): try: client = MilvusClient(uri=MILVUS_URL, token=MILVUS_TOKEN, db_name=MILVUS_ALIAS) From 8968e914c9eee27dd7898d6ebc17bdd736f4b671 Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Thu, 26 Sep 2024 14:28:35 +0800 Subject: [PATCH 090/103] =?UTF-8?q?feat=20=20=E4=BB=A3=E7=A0=81=E6=95=B4?= =?UTF-8?q?=E7=90=86=20fix?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/service/design_fast/pipeline/keypoint.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/app/service/design_fast/pipeline/keypoint.py b/app/service/design_fast/pipeline/keypoint.py index d46eb33..7ef48a0 100644 --- a/app/service/design_fast/pipeline/keypoint.py +++ b/app/service/design_fast/pipeline/keypoint.py @@ -23,7 +23,8 @@ class KeyPoint: # result['clothes_keypoint'] = self.infer_keypoint_result(result) site = 'up' if result['name'] in ['blouse', 'outwear', 'dress', 'tops'] else 'down' # keypoint_cache = search_keypoint_cache(result["image_id"], site) - keypoint_cache = self.keypoint_cache(result, site) + # keypoint_cache = self.keypoint_cache(result, site) + keypoint_cache = False # 取消向量查询 直接过模型推理 # keypoint_cache = False if keypoint_cache is False: From 404cb079a9a95bb2e93d26d56a7470f64f21747f Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Thu, 26 Sep 2024 14:35:37 +0800 Subject: [PATCH 091/103] =?UTF-8?q?feat=20=20=E4=BB=A3=E7=A0=81=E6=95=B4?= =?UTF-8?q?=E7=90=86=20fix?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/service/design_fast/pipeline/keypoint.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/app/service/design_fast/pipeline/keypoint.py b/app/service/design_fast/pipeline/keypoint.py index 7ef48a0..7c4d37c 100644 --- a/app/service/design_fast/pipeline/keypoint.py +++ b/app/service/design_fast/pipeline/keypoint.py @@ -23,10 +23,9 @@ class KeyPoint: # result['clothes_keypoint'] = self.infer_keypoint_result(result) site = 'up' if result['name'] in ['blouse', 'outwear', 'dress', 'tops'] else 'down' # keypoint_cache = search_keypoint_cache(result["image_id"], site) - # keypoint_cache = self.keypoint_cache(result, site) - keypoint_cache = False - # 取消向量查询 直接过模型推理 + keypoint_cache = self.keypoint_cache(result, site) # keypoint_cache = False + # 取消向量查询 直接过模型推理 if keypoint_cache is False: keypoint_infer_result, site = self.infer_keypoint_result(result) result['clothes_keypoint'] = self.save_keypoint_cache(result["image_id"], keypoint_infer_result, site) From 19fa165a1e2e76be855416f6ea2fae65794e99b6 Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Thu, 26 Sep 2024 14:36:42 +0800 Subject: [PATCH 092/103] =?UTF-8?q?feat=20=20=E4=BB=A3=E7=A0=81=E6=95=B4?= =?UTF-8?q?=E7=90=86=20fix?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/service/design_fast/pipeline/keypoint.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/app/service/design_fast/pipeline/keypoint.py b/app/service/design_fast/pipeline/keypoint.py index 7c4d37c..73d7586 100644 --- a/app/service/design_fast/pipeline/keypoint.py +++ b/app/service/design_fast/pipeline/keypoint.py @@ -23,8 +23,8 @@ class KeyPoint: # result['clothes_keypoint'] = self.infer_keypoint_result(result) site = 'up' if result['name'] in ['blouse', 'outwear', 'dress', 'tops'] else 'down' # keypoint_cache = search_keypoint_cache(result["image_id"], site) - keypoint_cache = self.keypoint_cache(result, site) - # keypoint_cache = False + # keypoint_cache = self.keypoint_cache(result, site) + keypoint_cache = False # 取消向量查询 直接过模型推理 if keypoint_cache is False: keypoint_infer_result, site = self.infer_keypoint_result(result) From 85942167f3f65db4caede62e4945a4d5b60cfecb Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Mon, 30 Sep 2024 10:57:12 +0800 Subject: [PATCH 093/103] =?UTF-8?q?feat=20=20image2sketch=20=E5=8F=98?= =?UTF-8?q?=E6=9B=B4=E6=A8=A1=E5=9E=8B=20fix?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .gitignore | 3 +- app/api/api_image2sketch.py | 13 +- app/schemas/image2sketch.py | 1 - .../image2sketch_2/download_checkpoints.py | 45 ++++++ app/service/image2sketch_2/server.py | 142 ++++++++++++++++++ app/service/utils/new_oss_client.py | 3 + 6 files changed, 200 insertions(+), 7 deletions(-) create mode 100644 app/service/image2sketch_2/download_checkpoints.py create mode 100644 app/service/image2sketch_2/server.py diff --git a/.gitignore b/.gitignore index 8fd7817..3f9e525 100644 --- a/.gitignore +++ b/.gitignore @@ -135,4 +135,5 @@ app/logs/* *.log /qodana.yaml .pth -.pytorch \ No newline at end of file +.pytorch +*.png \ No newline at end of file diff --git a/app/api/api_image2sketch.py b/app/api/api_image2sketch.py index cf8df13..24acf46 100644 --- a/app/api/api_image2sketch.py +++ b/app/api/api_image2sketch.py @@ -6,7 +6,7 @@ from fastapi import APIRouter, HTTPException from app.schemas.image2sketch import Image2SketchModel from app.schemas.response_template import ResponseModel -from app.service.image2sketch.server import Image2SketchServer +from app.service.image2sketch_2.server import processing_pipeline router = APIRouter() logger = logging.getLogger() @@ -25,8 +25,7 @@ def image2sketch(request_item: Image2SketchModel): 示例参数: { "image_url": "test/image2sketch/real_Dress_3200fecdc83d0c556c2bd96aedbd7fbf.jpg_Img.jpg", - "style_image_url": "test/image2sketch/style_3.png", - "default_style": "1", + "default_style": 0, "sketch_bucket": "test", "sketch_name": "image2sketch/test.png" } @@ -34,8 +33,12 @@ def image2sketch(request_item: Image2SketchModel): try: start_time = time.time() logger.info(f"image2sketch request item is : @@@@@@:{json.dumps(request_item.dict())}") - service = Image2SketchServer(request_item) - sketch_url = service.get_result() + sketch_url = processing_pipeline( + image_url=request_item.image_url, + thickness=request_item.default_style, + sketch_bucket=request_item.sketch_bucket, + sketch_name=request_item.sketch_name + ) logger.info(f"run time is : {time.time() - start_time}") except Exception as e: logger.warning(f"image2sketch Run Exception @@@@@@:{e}") diff --git a/app/schemas/image2sketch.py b/app/schemas/image2sketch.py index b4650b9..dbbbbb5 100644 --- a/app/schemas/image2sketch.py +++ b/app/schemas/image2sketch.py @@ -3,7 +3,6 @@ from pydantic import BaseModel class Image2SketchModel(BaseModel): image_url: str - style_image_url: str default_style: str sketch_bucket: str sketch_name: str diff --git a/app/service/image2sketch_2/download_checkpoints.py b/app/service/image2sketch_2/download_checkpoints.py new file mode 100644 index 0000000..9048c34 --- /dev/null +++ b/app/service/image2sketch_2/download_checkpoints.py @@ -0,0 +1,45 @@ +import os + +from minio import Minio +from minio.error import S3Error + +MINIO_URL = "www.minio.aida.com.hk:12024" +MINIO_ACCESS = 'vXKFLSJkYeEq2DrSZvkB' +MINIO_SECRET = 'uKTZT3x7C43WvPN9QTc99DiRkwddWZrG9Uh3JVlR' +MINIO_SECURE = True +# 配置MinIO客户端 +minio_client = Minio(MINIO_URL, access_key=MINIO_ACCESS, secret_key=MINIO_SECRET, secure=MINIO_SECURE) + + +# 下载函数 +def download_folder(bucket_name, folder_name, local_dir): + try: + # 确保本地目录存在 + if not os.path.exists(local_dir): + os.makedirs(local_dir) + + # 遍历MinIO中的文件 + objects = minio_client.list_objects(bucket_name, prefix=folder_name, recursive=True) + for obj in objects: + # 构造本地文件路径 + local_file_path = os.path.join(local_dir, obj.object_name[len(folder_name):]) + local_file_dir = os.path.dirname(local_file_path) + + # 确保本地目录存在 + if not os.path.exists(local_file_dir): + os.makedirs(local_file_dir) + + # 下载文件 + minio_client.fget_object(bucket_name, obj.object_name, local_file_path) + print(f"Downloaded {obj.object_name} to {local_file_path}") + + except S3Error as e: + print(f"Error occurred: {e}") + + +# 使用示例 +bucket_name = "test" # 替换成你的bucket名称 +folder_name = "checkpoints/lineart/" # 权重文件夹的路径 +local_dir = "app/service/image2sketch_2" # 替换成你希望保存到的本地目录 + +download_folder(bucket_name, folder_name, local_dir) diff --git a/app/service/image2sketch_2/server.py b/app/service/image2sketch_2/server.py new file mode 100644 index 0000000..93c9574 --- /dev/null +++ b/app/service/image2sketch_2/server.py @@ -0,0 +1,142 @@ +import cv2 +import numpy +import numpy as np +import torch +import torch.nn as nn +import torchvision.transforms as transforms +from PIL import Image + +from app.service.utils.oss_client import oss_get_image, oss_upload_image + +norm_layer = nn.InstanceNorm2d + +weights = [(0.7, 0.3), (0.5, 0.5), (0.3, 0.7), (0.1, 0.9), (0, 1)] +kernel = np.ones((3, 3), np.uint8) + + +class ResidualBlock(nn.Module): + def __init__(self, in_features): + super(ResidualBlock, self).__init__() + + conv_block = [nn.ReflectionPad2d(1), + nn.Conv2d(in_features, in_features, 3), + norm_layer(in_features), + nn.ReLU(inplace=True), + nn.ReflectionPad2d(1), + nn.Conv2d(in_features, in_features, 3), + norm_layer(in_features) + ] + + self.conv_block = nn.Sequential(*conv_block) + + def forward(self, x): + return x + self.conv_block(x) + + +class Generator(nn.Module): + def __init__(self, input_nc, output_nc, n_residual_blocks=9, sigmoid=True): + super(Generator, self).__init__() + + # Initial convolution block + model0 = [nn.ReflectionPad2d(3), + nn.Conv2d(input_nc, 64, 7), + norm_layer(64), + nn.ReLU(inplace=True)] + self.model0 = nn.Sequential(*model0) + + # Downsampling + model1 = [] + in_features = 64 + out_features = in_features * 2 + for _ in range(2): + model1 += [nn.Conv2d(in_features, out_features, 3, stride=2, padding=1), + norm_layer(out_features), + nn.ReLU(inplace=True)] + in_features = out_features + out_features = in_features * 2 + self.model1 = nn.Sequential(*model1) + + model2 = [] + # Residual blocks + for _ in range(n_residual_blocks): + model2 += [ResidualBlock(in_features)] + self.model2 = nn.Sequential(*model2) + + # Upsampling + model3 = [] + out_features = in_features // 2 + for _ in range(2): + model3 += [nn.ConvTranspose2d(in_features, out_features, 3, stride=2, padding=1, output_padding=1), + norm_layer(out_features), + nn.ReLU(inplace=True)] + in_features = out_features + out_features = in_features // 2 + self.model3 = nn.Sequential(*model3) + + # Output layer + model4 = [nn.ReflectionPad2d(3), + nn.Conv2d(64, output_nc, 7)] + if sigmoid: + model4 += [nn.Sigmoid()] + + self.model4 = nn.Sequential(*model4) + + def forward(self, x, cond=None): + out = self.model0(x) + out = self.model1(out) + out = self.model2(out) + out = self.model3(out) + out = self.model4(out) + + return out + + +model1 = Generator(3, 1, 3) +model1.load_state_dict(torch.load('service/image2sketch_2/model.pth', map_location=torch.device('cpu'))) +model1.eval() + + +def predict(input_img, width): + transform = transforms.Compose([transforms.Resize(width, Image.BICUBIC), transforms.ToTensor()]) + input_img = transform(input_img) + input_img = torch.unsqueeze(input_img, 0) + + with torch.no_grad(): + drawing = model1(input_img)[0].detach() + + drawing = transforms.ToPILImage()(drawing) + + # 转ndarray + drawing = numpy.array(drawing) + return drawing + + +def get_image(image_url): + image = oss_get_image(bucket=image_url.split('/')[0], object_name=image_url[image_url.find('/') + 1:], data_type="PIL") + image = image.convert('RGB') + width = image.size[0] + height = image.size[1] + return image, width, height + + +def processing_pipeline(image_url, thickness, sketch_bucket, sketch_name): + thickness = int(thickness) + # 提取sketch + image, width, height = get_image(image_url) + sketch_image = predict(image, width) + + # 设定线条粗细 + if thickness != 0: + dilated = cv2.erode(sketch_image, kernel, iterations=1) + # 将原图与膨胀后的图像进行混合,使用不同的权重 + sketch_image = cv2.addWeighted(sketch_image, weights[thickness][0], dilated, weights[thickness][1], 0) + + # 上传minio + image_bytes = cv2.imencode(".jpg", sketch_image)[1].tobytes() + req = oss_upload_image(bucket=sketch_bucket, object_name=sketch_name, image_bytes=image_bytes) + return f"{req.bucket_name}/{req.object_name}" + + +if __name__ == '__main__': + result_url = processing_pipeline("aida-users/89/relight_image/d5f0d967-f8e8-424d-98f9-a8ad8313deec-0-89.png", 1, "test", "test123.jpg") + print(result_url) diff --git a/app/service/utils/new_oss_client.py b/app/service/utils/new_oss_client.py index 28015e9..95a0fbf 100644 --- a/app/service/utils/new_oss_client.py +++ b/app/service/utils/new_oss_client.py @@ -9,6 +9,7 @@ from PIL import Image from minio import Minio from app.core.config import * +from app.service.utils.decorator import RunTime minio_client = Minio(MINIO_URL, access_key=MINIO_ACCESS, secret_key=MINIO_SECRET, secure=MINIO_SECURE) @@ -39,6 +40,7 @@ http_client = urllib3.PoolManager( # 获取图片 +@RunTime def oss_get_image(oss_client, bucket, object_name, data_type): # cv2 默认全通道读取 image_object = None @@ -58,6 +60,7 @@ def oss_get_image(oss_client, bucket, object_name, data_type): return image_object +@RunTime def oss_upload_image(oss_client, bucket, object_name, image_bytes): req = None try: From 2d9b74a3987f9af60720edb466f544c9cd7bacbb Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Mon, 30 Sep 2024 11:03:32 +0800 Subject: [PATCH 094/103] =?UTF-8?q?feat=20=20image2sketch=20=E5=8F=98?= =?UTF-8?q?=E6=9B=B4=E6=A8=A1=E5=9E=8B=20fix?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/api/api_image2sketch.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/app/api/api_image2sketch.py b/app/api/api_image2sketch.py index 24acf46..d562bee 100644 --- a/app/api/api_image2sketch.py +++ b/app/api/api_image2sketch.py @@ -17,8 +17,7 @@ def image2sketch(request_item: Image2SketchModel): """ 创建一个具有以下参数的请求体: - **image_url**: 提取图片url - - **style_image_url**: 被模仿sketch图片url - - **default_style**: 默认风格 粗1,、中2、细3 + - **default_style**: 原始、 1、2、3、4、5 - **sketch_bucket**: sketch保存的bucket - **sketch_name**: sketch保存的object name From 5a5bb07f3bb4ac5a39762ce14beaed095654dbc0 Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Mon, 30 Sep 2024 11:15:43 +0800 Subject: [PATCH 095/103] =?UTF-8?q?feat=20=20image2sketch=20=E5=8F=98?= =?UTF-8?q?=E6=9B=B4=E6=A8=A1=E5=9E=8B=20fix?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/service/image2sketch_2/server.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app/service/image2sketch_2/server.py b/app/service/image2sketch_2/server.py index 93c9574..41c0278 100644 --- a/app/service/image2sketch_2/server.py +++ b/app/service/image2sketch_2/server.py @@ -92,7 +92,7 @@ class Generator(nn.Module): model1 = Generator(3, 1, 3) -model1.load_state_dict(torch.load('service/image2sketch_2/model.pth', map_location=torch.device('cpu'))) +model1.load_state_dict(torch.load('app/service/image2sketch_2/model.pth', map_location=torch.device('cpu'))) model1.eval() From 9b415fc502998a1401f9dcdab01a1f298ca16cf9 Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Thu, 3 Oct 2024 14:51:21 +0800 Subject: [PATCH 096/103] =?UTF-8?q?feat=20=20image2sketch=20=20triton?= =?UTF-8?q?=E9=83=A8=E7=BD=B2=20fix?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/api/api_image2sketch.py | 17 ++---- app/service/lineart/service.py | 94 ++++++++++++++++++++++++++++++++++ 2 files changed, 98 insertions(+), 13 deletions(-) create mode 100644 app/service/lineart/service.py diff --git a/app/api/api_image2sketch.py b/app/api/api_image2sketch.py index d562bee..f630194 100644 --- a/app/api/api_image2sketch.py +++ b/app/api/api_image2sketch.py @@ -1,12 +1,10 @@ -import json import logging -import time from fastapi import APIRouter, HTTPException from app.schemas.image2sketch import Image2SketchModel from app.schemas.response_template import ResponseModel -from app.service.image2sketch_2.server import processing_pipeline +from app.service.lineart.service import LineArtService router = APIRouter() logger = logging.getLogger() @@ -30,16 +28,9 @@ def image2sketch(request_item: Image2SketchModel): } """ try: - start_time = time.time() - logger.info(f"image2sketch request item is : @@@@@@:{json.dumps(request_item.dict())}") - sketch_url = processing_pipeline( - image_url=request_item.image_url, - thickness=request_item.default_style, - sketch_bucket=request_item.sketch_bucket, - sketch_name=request_item.sketch_name - ) - logger.info(f"run time is : {time.time() - start_time}") + service = LineArtService(request_item) + result_url = service.get_result() except Exception as e: logger.warning(f"image2sketch Run Exception @@@@@@:{e}") raise HTTPException(status_code=404, detail=str(e)) - return ResponseModel(data=sketch_url) + return ResponseModel(data=result_url) diff --git a/app/service/lineart/service.py b/app/service/lineart/service.py new file mode 100644 index 0000000..e8fc78f --- /dev/null +++ b/app/service/lineart/service.py @@ -0,0 +1,94 @@ +import logging + +import cv2 +import mmcv +import numpy as np +import torch +import torch.nn.functional as F +import tritonclient.http as httpclient + +from app.core.config import DESIGN_MODEL_URL +from app.schemas.image2sketch import Image2SketchModel +from app.service.utils.oss_client import oss_get_image, oss_upload_image + +logger = logging.getLogger() + + +class LineArtService: + def __init__(self, request_item): + self.line_style = int(request_item.default_style) + self.image_url = request_item.image_url + self.sketch_bucket = request_item.sketch_bucket + self.sketch_name = request_item.sketch_name + self.weights = [(0.7, 0.3), (0.5, 0.5), (0.3, 0.7), (0.1, 0.9), (0, 1)] + + def get_result(self): + client = httpclient.InferenceServerClient(url=DESIGN_MODEL_URL) + input_image = self.get_image() + input_img, ori_shape = self.line_art_preprocess(input_image) + transformed_img = input_img.astype(np.float32) + + inputs = [httpclient.InferInput(f"input__0", transformed_img.shape, datatype="FP32")] + inputs[0].set_data_from_numpy(transformed_img, binary_data=True) + outputs = [httpclient.InferRequestedOutput(f"output__0", binary_data=True)] + results = client.infer(model_name=f"lineart", inputs=inputs, outputs=outputs) + inference_output1 = results.as_numpy("output__0") + line_art_result = self.line_art_postprocess(inference_output1, ori_shape) + + line_art_result = (line_art_result[0] * 255.0).round().astype(np.uint8) + if self.line_style != 0: + logger.info(self.line_style) + kernel = np.ones((3, 3), np.uint8) + dilated = cv2.erode(line_art_result, kernel, iterations=1) + # 将原图与膨胀后的图像进行混合,使用不同的权重 + line_art_result = cv2.addWeighted(line_art_result, self.weights[self.line_style][0], dilated, self.weights[self.line_style][1], 0) + # cv2.imshow("", line_art_result) + # cv2.waitKey(0) + return self.put_image(line_art_result) + + def get_image(self): + image = oss_get_image(bucket=self.image_url.split('/')[0], object_name=self.image_url[self.image_url.find('/') + 1:], data_type="cv2") + return image + + def put_image(self, image): + try: + image_bytes = cv2.imencode('.jpg', image)[1].tobytes() + oss_upload_image(bucket=self.sketch_bucket, object_name=self.sketch_name, image_bytes=image_bytes) + return f"{self.sketch_bucket}/{self.sketch_name}" + except Exception as e: + logger.warning(e) + + @staticmethod + def line_art_preprocess(image): + img = mmcv.imread(image) + ori_shape = img.shape[:2] + img_scale_w, img_scale_h = ori_shape + if ori_shape[0] > 1024: + img_scale_w = 1024 + if ori_shape[1] > 1024: + img_scale_h = 1024 + # 如果图片size任意一边 大于 1024, 则会resize 成1024 + if ori_shape != (img_scale_w, img_scale_h): + # mmcv.imresize(img, img_scale_h, img_scale_w) # 老代码 引以为戒!哈哈哈~ h和w写反了 + img = cv2.resize(img, (img_scale_h, img_scale_w)) + img = mmcv.imnormalize(img, mean=np.array([123.675, 116.28, 103.53]), std=np.array([58.395, 57.12, 57.375]), to_rgb=True) + preprocessed_img = np.expand_dims(img.transpose(2, 0, 1), axis=0) + return preprocessed_img, ori_shape + + @staticmethod + def line_art_postprocess(output, ori_shape): + seg_logit = F.interpolate(torch.tensor(output).float(), size=ori_shape, scale_factor=None, mode='bilinear', align_corners=False) + seg_pred = seg_logit.cpu().numpy() + return seg_pred[0] + + +if __name__ == '__main__': + request_item = Image2SketchModel( + image_url="aida-users/89/relight_image/d5f0d967-f8e8-424d-98f9-a8ad8313deec-0-89.png", + default_style="4", + sketch_bucket="test", + sketch_name="test123.jpg" + ) + service = LineArtService(request_item) + result_url = service.get_result() + print(result_url) From 571a84ac6882bb99485149e9eb55512623945f21 Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Fri, 4 Oct 2024 17:37:42 +0800 Subject: [PATCH 097/103] =?UTF-8?q?feat=20fix=20=20=20=20=20=20seg=20?= =?UTF-8?q?=E7=BC=93=E5=AD=98=E9=80=BB=E8=BE=91=E6=96=B0=E5=A2=9E=20size?= =?UTF-8?q?=E5=88=A4=E6=96=AD=20=E9=81=BF=E5=85=8D=E5=87=BA=E7=8E=B0seg?= =?UTF-8?q?=E7=BC=93=E5=AD=98=E4=B8=8E=E5=9B=BE=E7=89=87=E5=A4=A7=E5=B0=8F?= =?UTF-8?q?=E4=B8=8D=E4=B8=80=E8=87=B4=E9=97=AE=E9=A2=98?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/service/design_fast/pipeline/segmentation.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/app/service/design_fast/pipeline/segmentation.py b/app/service/design_fast/pipeline/segmentation.py index 3884a48..8447514 100644 --- a/app/service/design_fast/pipeline/segmentation.py +++ b/app/service/design_fast/pipeline/segmentation.py @@ -36,7 +36,8 @@ class Segmentation: # 本地查询seg 缓存是否存在 _, seg_result = self.load_seg_result(result["image_id"]) result['seg_result'] = seg_result - if not _: + # 判断缓存和实际图片size是否相同 + if not _ or result["image"].shape[:2] != seg_result.shape: # 推理获得seg 结果 seg_result = get_seg_result(result["image_id"], result['image'])[0] self.save_seg_result(seg_result, result['image_id']) From 4e420f8ae8bb45325540550954f1f8938e320ec4 Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Fri, 4 Oct 2024 17:43:08 +0800 Subject: [PATCH 098/103] =?UTF-8?q?feat=20fix=20=20=20=20=20=20sketch=20?= =?UTF-8?q?=E6=8F=90=E5=8F=96=E4=BF=AE=E5=A4=8D=E6=B2=A1=E6=9C=89=E6=96=87?= =?UTF-8?q?=E4=BB=B6=E5=90=8E=E7=BC=80=E9=97=AE=E9=A2=98?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/service/lineart/service.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/app/service/lineart/service.py b/app/service/lineart/service.py index e8fc78f..34f29dc 100644 --- a/app/service/lineart/service.py +++ b/app/service/lineart/service.py @@ -53,8 +53,8 @@ class LineArtService: def put_image(self, image): try: image_bytes = cv2.imencode('.jpg', image)[1].tobytes() - oss_upload_image(bucket=self.sketch_bucket, object_name=self.sketch_name, image_bytes=image_bytes) - return f"{self.sketch_bucket}/{self.sketch_name}" + oss_upload_image(bucket=self.sketch_bucket, object_name=f"{self.sketch_name}.jpg", image_bytes=image_bytes) + return f"{self.sketch_bucket}/{self.sketch_name}.jpg" except Exception as e: logger.warning(e) From 102dd53d68da96ab0915819c0f56630c6daa3699 Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Mon, 7 Oct 2024 10:24:01 +0800 Subject: [PATCH 099/103] =?UTF-8?q?feat=20fix=20=20=20=20=20=20sketch=20?= =?UTF-8?q?=E6=8F=90=E5=8F=96=E4=BF=AE=E5=A4=8D=E6=B2=A1=E6=9C=89=E6=96=87?= =?UTF-8?q?=E4=BB=B6=E5=90=8E=E7=BC=80=E9=97=AE=E9=A2=98?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/service/lineart/service.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/app/service/lineart/service.py b/app/service/lineart/service.py index 34f29dc..d822dfa 100644 --- a/app/service/lineart/service.py +++ b/app/service/lineart/service.py @@ -48,6 +48,11 @@ class LineArtService: def get_image(self): image = oss_get_image(bucket=self.image_url.split('/')[0], object_name=self.image_url[self.image_url.find('/') + 1:], data_type="cv2") + # 将其转换为彩色图像 + if len(image.shape) == 3 and image.shape[2] == 4: + image = cv2.cvtColor(image, cv2.COLOR_BGRA2BGR) + elif len(image.shape) == 2: + image = cv2.cvtColor(image, cv2.COLOR_GRAY2BGR) return image def put_image(self, image): @@ -84,10 +89,10 @@ class LineArtService: if __name__ == '__main__': request_item = Image2SketchModel( - image_url="aida-users/89/relight_image/d5f0d967-f8e8-424d-98f9-a8ad8313deec-0-89.png", + image_url="aida-collection-element/87/Sketchboard/555a443f-fd6b-4cd7-8147-b92d55513af0.png", default_style="4", sketch_bucket="test", - sketch_name="test123.jpg" + sketch_name="test123" ) service = LineArtService(request_item) result_url = service.get_result() From df5e01ea06c1910b56702e71cbd70f5ab252b7b3 Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Mon, 7 Oct 2024 10:27:25 +0800 Subject: [PATCH 100/103] =?UTF-8?q?feat=20fix=20=20=20=20=20=20img2sketch?= =?UTF-8?q?=20=E6=97=A5=E5=BF=97=E4=BC=98=E5=8C=96?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/api/api_image2sketch.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/app/api/api_image2sketch.py b/app/api/api_image2sketch.py index f630194..8a9b4d9 100644 --- a/app/api/api_image2sketch.py +++ b/app/api/api_image2sketch.py @@ -1,3 +1,4 @@ +import json import logging from fastapi import APIRouter, HTTPException @@ -28,6 +29,7 @@ def image2sketch(request_item: Image2SketchModel): } """ try: + logger.info(f"image2sketch request item is : @@@@@@:{json.dumps(request_item.dict())}") service = LineArtService(request_item) result_url = service.get_result() except Exception as e: From e1677feb02acbced0791e07e1ab7fb3c48bbe2a5 Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Tue, 8 Oct 2024 15:41:07 +0800 Subject: [PATCH 101/103] =?UTF-8?q?feat=20fix=20=20=20=20=20seg=20?= =?UTF-8?q?=E6=96=B0=E5=A2=9Epreview=5Fsubmit=E5=88=A4=E6=96=AD=20?= =?UTF-8?q?=E5=BD=93preview=E6=97=B6=E4=B8=8D=E4=BF=9D=E5=AD=98seg?= =?UTF-8?q?=E7=BC=93=E5=AD=98?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../design_fast/pipeline/segmentation.py | 18 ++++++++++++------ 1 file changed, 12 insertions(+), 6 deletions(-) diff --git a/app/service/design_fast/pipeline/segmentation.py b/app/service/design_fast/pipeline/segmentation.py index 8447514..786cf03 100644 --- a/app/service/design_fast/pipeline/segmentation.py +++ b/app/service/design_fast/pipeline/segmentation.py @@ -33,14 +33,20 @@ class Segmentation: result['back_mask'] = np.array(green_mask, dtype=np.uint8) * 255 result['mask'] = result['front_mask'] + result['back_mask'] else: - # 本地查询seg 缓存是否存在 - _, seg_result = self.load_seg_result(result["image_id"]) - result['seg_result'] = seg_result - # 判断缓存和实际图片size是否相同 - if not _ or result["image"].shape[:2] != seg_result.shape: + # design信号判断 preview 不保存seg缓存 + if "preview_submit" in result.keys() and result['preview_submit'] == "preview": # 推理获得seg 结果 seg_result = get_seg_result(result["image_id"], result['image'])[0] - self.save_seg_result(seg_result, result['image_id']) + else: + # 本地查询seg 缓存是否存在 + _, seg_result = self.load_seg_result(result["image_id"]) + # 判断缓存和实际图片size是否相同 + if not _ or result["image"].shape[:2] != seg_result.shape: + # 推理获得seg 结果 + seg_result = get_seg_result(result["image_id"], result['image'])[0] + self.save_seg_result(seg_result, result['image_id']) + result['seg_result'] = seg_result + # 处理前片后片 temp_front = seg_result == 1.0 result['front_mask'] = (255 * (temp_front + 0).astype(np.uint8)) From 5333bb6d6932ce2a1d1656b3fd324858e998a203 Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Tue, 8 Oct 2024 16:01:08 +0800 Subject: [PATCH 102/103] =?UTF-8?q?feat=20fix=20=20=20=20=20seg=20?= =?UTF-8?q?=E6=96=B0=E5=A2=9Epreview=5Fsubmit=E5=88=A4=E6=96=AD=20?= =?UTF-8?q?=E5=BD=93preview=E6=97=B6=E4=B8=8D=E4=BF=9D=E5=AD=98seg?= =?UTF-8?q?=E7=BC=93=E5=AD=98?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/service/design_fast/pipeline/segmentation.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/app/service/design_fast/pipeline/segmentation.py b/app/service/design_fast/pipeline/segmentation.py index 786cf03..ebf02b4 100644 --- a/app/service/design_fast/pipeline/segmentation.py +++ b/app/service/design_fast/pipeline/segmentation.py @@ -33,10 +33,16 @@ class Segmentation: result['back_mask'] = np.array(green_mask, dtype=np.uint8) * 255 result['mask'] = result['front_mask'] + result['back_mask'] else: - # design信号判断 preview 不保存seg缓存 + # preview 过模型 不缓存 if "preview_submit" in result.keys() and result['preview_submit'] == "preview": # 推理获得seg 结果 seg_result = get_seg_result(result["image_id"], result['image'])[0] + # submit 过模型 缓存 + elif "preview_submit" in result.keys() and result['preview_submit'] == "submit": + # 推理获得seg 结果 + seg_result = get_seg_result(result["image_id"], result['image'])[0] + self.save_seg_result(seg_result, result['image_id']) + # null 正常流程 加载本地缓存 无缓存则过模型 else: # 本地查询seg 缓存是否存在 _, seg_result = self.load_seg_result(result["image_id"]) From 5e3a57060a2fd04a67b2a0350a7741053fa03ef2 Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Fri, 18 Oct 2024 15:53:40 +0800 Subject: [PATCH 103/103] =?UTF-8?q?feat=20fix=20=20=20=20=20=E5=AE=BD?= =?UTF-8?q?=E5=BA=A6=E8=87=AA=E9=80=82=E5=BA=94=E4=BC=9A=E9=BB=98=E8=AE=A4?= =?UTF-8?q?=E6=A8=A1=E7=89=B9=E9=AB=98=E5=BA=A6=E4=B8=BA700=20=E5=AF=BC?= =?UTF-8?q?=E8=87=B4=E7=BB=93=E6=9E=9C=E5=9B=BE=E9=AB=98=E5=BA=A6=E9=94=81?= =?UTF-8?q?=E5=AE=9A=E5=9C=A8700=20=E4=BF=AE=E5=A4=8Dbug=20=E7=8E=B0?= =?UTF-8?q?=E9=AB=98=E5=BA=A6=E7=94=B1=E6=A8=A1=E7=89=B9=E9=AB=98=E5=BA=A6?= =?UTF-8?q?=E6=9D=A5=E5=AE=9A?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/api/api_image2sketch.py | 2 +- app/service/design_fast/utils/synthesis_item.py | 4 +++- app/service/utils/oss_client.py | 2 +- 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/app/api/api_image2sketch.py b/app/api/api_image2sketch.py index 8a9b4d9..cac7652 100644 --- a/app/api/api_image2sketch.py +++ b/app/api/api_image2sketch.py @@ -25,7 +25,7 @@ def image2sketch(request_item: Image2SketchModel): "image_url": "test/image2sketch/real_Dress_3200fecdc83d0c556c2bd96aedbd7fbf.jpg_Img.jpg", "default_style": 0, "sketch_bucket": "test", - "sketch_name": "image2sketch/test.png" + "sketch_name": "image2sketch/area_fill_img.png" } """ try: diff --git a/app/service/design_fast/utils/synthesis_item.py b/app/service/design_fast/utils/synthesis_item.py index 272ab23..08bf4ec 100644 --- a/app/service/design_fast/utils/synthesis_item.py +++ b/app/service/design_fast/utils/synthesis_item.py @@ -185,12 +185,14 @@ def update_base_size_priority(layers, size): # 计算透明背景图片的宽度 min_x = min(info['position'][1] for info in layers) x_list = [] + new_height = 700 for info in layers: if info['image'] is not None: x_list.append(info['position'][1] + info['image'].width) + if info['name'] == 'mannequin': + new_height = info['image'].height max_x = max(x_list) new_width = max_x - min_x - new_height = 700 # 更新坐标 for info in layers: info['adaptive_position'] = (info['position'][0], info['position'][1] - min_x) diff --git a/app/service/utils/oss_client.py b/app/service/utils/oss_client.py index 5704ced..0bd9853 100644 --- a/app/service/utils/oss_client.py +++ b/app/service/utils/oss_client.py @@ -78,7 +78,7 @@ if __name__ == '__main__': # url = "aida-users/89/sketchboard/female/Dress/e6724ab7-8d3f-4677-abe0-c3e42ab7af85.jpeg" # url = "aida-users/87/print/956614a2-7e75-4fbe-9ed0-c1831e37a2c9-4-87.png" # url = "aida-users/89/single_logo/123-89.png" - url = "aida-clothing/mask/mask_f354afb5-6423-11ef-8b08-0826ae3ad6b3.png" + url = "aida-results/result_e2673d92-8d25-11ef-be24-0826ae3ad6b3.png" # url = "aida-collection-element/12148/Sketchboard/95ea577b-305b-4a62-b30a-39c0dd3ddb3f.png" read_type = "cv2" if read_type == "cv2":