diff --git a/.gitignore b/.gitignore index e2493c6..1bf82fb 100644 --- a/.gitignore +++ b/.gitignore @@ -125,10 +125,13 @@ seg_result/ seg_result *.png uwsgi -#*.yaml +*.yaml +*.yml +Dockerfile .conf app/logs app/logs/* *.log -*.jpg \ No newline at end of file +*.jpg +/qodana.yaml diff --git a/Dockerfile b/Dockerfile deleted file mode 100644 index fa30e10..0000000 --- a/Dockerfile +++ /dev/null @@ -1,22 +0,0 @@ -FROM python:3.9 -ENV TZ=Asia/Shanghai -RUN apt update -RUN apt install -y vim -RUN apt install -y libgl1-mesa-glx -COPY ./requirements.txt /requirements.txt -RUN pip install --upgrade pip -RUN pip install -r requirements.txt -RUN pip install gunicorn -RUN pip3 install torch torchvision torchaudio --index-url https://download.pytorch.org/whl/cu118 -RUN #pip install mmcv==1.4.2 -f https://download.openmmlab.com/mmcv/dist/cu117/torch1.13/index.html - -WORKDIR /app -COPY . . -ENV FLASK_APP=manage.py -LABEL maintainer="zchengrong@yeah.net" \ - description="My Python 3.9 - trinity aida " \ - version="1.0" \ - name="trinity_aida" - - -CMD ["gunicorn", "-c", "gunicorn_config.py", "app.main:app" , "-e", "RABBITMQ_QUEUES=SuperResolution-dev"] \ No newline at end of file diff --git a/app/api/api_attribute_retrieve.py b/app/api/api_attribute_retrieve.py new file mode 100644 index 0000000..fdecfa8 --- /dev/null +++ b/app/api/api_attribute_retrieve.py @@ -0,0 +1,45 @@ +import json +import logging +from fastapi import APIRouter + +from app.schemas.attribute_retrieve import * +from app.service.attribute.config import const +from app.service.attribute.service_att_recognition import AttributeRecognition +from app.service.attribute.service_category_recognition import CategoryRecognition + +router = APIRouter() +logger = logging.getLogger() + + +# 属性识别 +@router.post("/attribute_recognition") +def attribute_recognition(request_item: list[AttributeRecognitionModel]): + try: + service = AttributeRecognition(const=const, request_data=request_item) + data = service.get_result() + code = 200 + message = "access" + logger.info(f"attribute_recognition response @@@@@@:{json.dumps(data, indent=4)}") + except Exception as e: + code = 400 + message = e + data = e + logger.warning(f"attribute_recognition Run Exception @@@@@@:{e}") + return {"code": code, "message": message, "data": data} + + +# 类别识别 +@router.post("/category_recognition") +def category_recognition(request_item: list[CategoryRecognitionModel]): + try: + service = CategoryRecognition(request_data=request_item) + data = service.get_result() + code = 200 + message = "access" + logger.info(f"category_recognition response @@@@@@:{json.dumps(data, indent=4)}") + except Exception as e: + code = 400 + message = e + data = e + logger.warning(f"category_recognition Run Exception @@@@@@:{e}") + return {"code": code, "message": message, "data": data} diff --git a/app/api/api_generate_image.py b/app/api/api_generate_image.py new file mode 100644 index 0000000..a9aecec --- /dev/null +++ b/app/api/api_generate_image.py @@ -0,0 +1,27 @@ +import logging +from fastapi import APIRouter, BackgroundTasks +from app.schemas.generate_image import GenerateImageModel +from app.service.generate_image.service import GenerateImage, infer_cancel + +router = APIRouter() +logger = logging.getLogger() + + +@router.post("/generate_image") +def generate_image(request_item: GenerateImageModel, background_tasks: BackgroundTasks): + try: + service = GenerateImage(request_item) + background_tasks.add_task(service.get_result) + code = 200 + message = "access" + except Exception as e: + code = 400 + message = e + logger.warning(e) + return {"code": code, "message": message} + + +@router.get("/generate_cancel/{tasks_id}>") +def generate_image(tasks_id): + result = infer_cancel(tasks_id) + return {"code": 200, "message": result['message'], "data": result['data']} diff --git a/app/api/api_route.py b/app/api/api_route.py index fd299ee..2513204 100644 --- a/app/api/api_route.py +++ b/app/api/api_route.py @@ -2,8 +2,12 @@ from fastapi import APIRouter from app.api import api_test from app.api import api_super_resolution +from app.api import api_generate_image +from app.api import api_attribute_retrieve router = APIRouter() router.include_router(api_test.router, tags=["test"], prefix="/test") -router.include_router(api_super_resolution.router, tags=["api_super_resolution"], prefix="/api") +router.include_router(api_super_resolution.router, tags=["super_resolution"], prefix="/api") +router.include_router(api_generate_image.router, tags=["generate_image"], prefix="/api") +router.include_router(api_attribute_retrieve.router, tags=["attribute_retrieve"], prefix="/api") diff --git a/app/api/api_super_resolution.py b/app/api/api_super_resolution.py index a92d206..63f4498 100644 --- a/app/api/api_super_resolution.py +++ b/app/api/api_super_resolution.py @@ -18,7 +18,7 @@ def super_resolution(request_item: SuperResolutionModel, background_tasks: Backg code = 200 message = "access" except Exception as e: - code = 000 + code = 400 message = e logger.warning(e) return {"code": code, "message": message} diff --git a/app/api/api_test.py b/app/api/api_test.py index d6e7dcc..63ef1aa 100644 --- a/app/api/api_test.py +++ b/app/api/api_test.py @@ -1,8 +1,6 @@ import logging - from fastapi import APIRouter - -from app.core.config import RABBITMQ_QUEUES +from app.core.config import SR_RABBITMQ_QUEUES, GI_RABBITMQ_QUEUES logger = logging.getLogger() router = APIRouter() @@ -10,6 +8,6 @@ router = APIRouter() @router.get("") def test(): - logger.info(RABBITMQ_QUEUES) + logger.info(SR_RABBITMQ_QUEUES) logger.info("test") - return {"message": RABBITMQ_QUEUES} + return {"SR_RABBITMQ_QUEUES message": SR_RABBITMQ_QUEUES, "GI_RABBITMQ_QUEUES": GI_RABBITMQ_QUEUES} diff --git a/app/core/config.py b/app/core/config.py index 8b8ce60..70410a4 100644 --- a/app/core/config.py +++ b/app/core/config.py @@ -19,50 +19,32 @@ class Settings(BaseSettings): LOGGING_CONFIG_FILE = os.path.join(BASE_DIR, 'logging_env.py') +DEBUG = False +if DEBUG: + LOGS_PATH = "logs/" + CATEGORY_PATH = "service/attribute/config/descriptor/category/category_dis.csv" +else: + LOGS_PATH = "app/logs/" + CATEGORY_PATH = "app/service/attribute/config/descriptor/category/category_dis.csv" + +# RABBITMQ_ENV = "" # 生产环境 +# RABBITMQ_ENV = "-dev" # 开发环境 +RABBITMQ_ENV = "-local" # 本地测试环境 + settings = Settings() -ckpt = 'service/super_resolution_ccsr/weights/real-world_ccsr.ckpt' -config = 'service/super_resolution_ccsr/configs/model/ccsr_stage2.yaml' -steps = 45 -sr_scale = 4 -repeat_times = 1 -tiled = False -tile_size = 512 -tile_stride = 256 -color_fix_type = "adain" -t_max = 0.6667 -t_min = 0.3333 -show_lq = False -skip_if_exist = False -seed = 233 -device = "cuda" -tile_diffusion = False # -tile_diffusion_size = 512 -tile_diffusion_stride = 256 -tile_vae = True -vae_decoder_tile_size = 224 -vae_encoder_tile_size = 1024 -strength = 1 # minio 配置 -sr_bucket = "test" -MINIO_IP = "www.minio.aida.com.hk" -MINIO_PORT = 9000 +MINIO_URL = "www.minio.aida.com.hk:9000" MINIO_ACCESS = 'vXKFLSJkYeEq2DrSZvkB' MINIO_SECRET = 'uKTZT3x7C43WvPN9QTc99DiRkwddWZrG9Uh3JVlR' +MINIO_SECURE = True # redis 配置 REDIS_HOST = "10.1.1.240" REDIS_PORT = "6379" REDIS_DB = "2" -MINIO_SECURE = True -LOGS_PATH = "app/logs/errors.log" -# LOGS_PATH = "logs/errors.log" - -SR_MODEL_NAME = "super_resolution" - # rabbitmq config - RABBITMQ_PARAMS = { "host": "18.167.251.121", "port": 5672, @@ -70,4 +52,26 @@ RABBITMQ_PARAMS = { "virtual_host": "/" } -RABBITMQ_QUEUES = os.getenv("RABBITMQ_QUEUES", "SuperResolution-local") +# attribute service config +ATT_TRITON_URL = "10.1.1.240:8020" + +# SR service config +SR_MODEL_NAME = "super_resolution" +SR_TRITON_URL = "10.1.1.240:10031" +SR_MINIO_BUCKET = "aida-users" +SR_RABBITMQ_QUEUES = os.getenv("SR_RABBITMQ_QUEUES", f"SuperResolution{RABBITMQ_ENV}") + +# GenerateImage service config +GI_MODEL_NAME = 'stable_diffusion_xl_lcm' +GI_MODEL_URL = '10.1.1.150:8001' +GI_MINIO_BUCKET = "aida-users" +GI_RABBITMQ_QUEUES = os.getenv("GI_RABBITMQ_QUEUES", f"GenerateImage{RABBITMQ_ENV}") +GI_SYS_IMAGE_URL = "aida-sys-image/generate_image/white_image.jpg" + +# SEG service config +SEG_MODEL_URL = '10.1.1.240:10000' +SEGMENTATION = { + "name": "seg_ocrnet_hr18", + "input": "seg_input__0", + "output": "seg_output__0", +} diff --git a/app/main.py b/app/main.py index ad48d7d..07bd258 100644 --- a/app/main.py +++ b/app/main.py @@ -7,6 +7,7 @@ from app.api.api_route import router from app.core.config import settings from logging_env import LOGGER_CONFIG_DICT + logging.config.dictConfig(LOGGER_CONFIG_DICT) from starlette.middleware.cors import CORSMiddleware diff --git a/app/schemas/attribute_retrieve.py b/app/schemas/attribute_retrieve.py new file mode 100644 index 0000000..c22298e --- /dev/null +++ b/app/schemas/attribute_retrieve.py @@ -0,0 +1,12 @@ +from pydantic import BaseModel + + +class CategoryRecognitionModel(BaseModel): + colony: str + sketch_img_url: str + + +class AttributeRecognitionModel(BaseModel): + category: str + colony: str + sketch_img_url: str diff --git a/app/schemas/generate_image.py b/app/schemas/generate_image.py new file mode 100644 index 0000000..1e7617c --- /dev/null +++ b/app/schemas/generate_image.py @@ -0,0 +1,9 @@ +from pydantic import BaseModel + + +class GenerateImageModel(BaseModel): + tasks_id: str + prompt: str + image_url: str + mode: str + category: str diff --git a/app/service/attribute/config/const.py b/app/service/attribute/config/const.py new file mode 100644 index 0000000..24d9412 --- /dev/null +++ b/app/service/attribute/config/const.py @@ -0,0 +1,78 @@ +top_description_list = ['service/attribute/config/descriptor/top/length.csv', + 'service/attribute/config/descriptor/top/type.csv', + 'service/attribute/config/descriptor/top/sleeve_length.csv', + 'service/attribute/config/descriptor/top/sleeve_shape.csv', + 'service/attribute/config/descriptor/top/sleeve_shoulder.csv', + 'service/attribute/config/descriptor/top/neckline.csv', + 'service/attribute/config/descriptor/top/design.csv', + 'service/attribute/config/descriptor/top/opening_type.csv', + 'service/attribute/config/descriptor/top/silhouette.csv', + 'service/attribute/config/descriptor/top/collar.csv'] + +top_model_list = ['attr_retrieve_T_length', + 'attr_retrieve_T_type', + 'attr_retrieve_T_sleeve_length', + 'attr_retrieve_T_sleeve_shape', + 'attr_retrieve_T_sleeve_shoulder', + 'attr_retrieve_T_neckline', + 'attr_retrieve_T_design', + 'attr_retrieve_T_optype', + 'attr_retrieve_T_silhouette', + 'attr_retrieve_T_collar' + ] + +bottom_description_list = [ + 'service/attribute/config/descriptor/bottom/subtype.csv', + 'service/attribute/config/descriptor/bottom/length.csv', + 'service/attribute/config/descriptor/bottom/silhouette.csv', + 'service/attribute/config/descriptor/bottom/opening_type.csv', + 'service/attribute/config/descriptor/bottom/design.csv'] + +bottom_model_list = [ + 'attr_retrieve_B_subtype', + 'attr_retrieve_B_length', + 'attr_retrieve_B_silhouette', + 'attr_recong_B_optype', + 'attr_retrieve_B_design'] + +outwear_description_list = ['service/attribute/config/descriptor/outwear/length.csv', + 'service/attribute/config/descriptor/outwear/sleeve_length.csv', + 'service/attribute/config/descriptor/outwear/sleeve_shape.csv', + 'service/attribute/config/descriptor/outwear/sleeve_shoulder.csv', + 'service/attribute/config/descriptor/outwear/collar.csv', + 'service/attribute/config/descriptor/outwear/design.csv', + 'service/attribute/config/descriptor/outwear/opening_type.csv', + 'service/attribute/config/descriptor/outwear/silhouette.csv', ] + +outwear_model_list = ['attr_recong_O_length', + 'attr_retrieve_O_sleeve_length', + 'attr_retrieve_O_sleeve_shape', + 'attr_retrieve_O_sleeve_shoulder', + 'attr_retrieve_O_collar', + 'attr_retrieve_O_design', + 'attr_recong_O_optype', + 'attr_retrieve_O_silhouette'] + +dress_description_list = [ # 'service/attribute/config/descriptor/dress/D_length.csv', + 'service/attribute/config/descriptor/dress/sleeve_length.csv', + 'service/attribute/config/descriptor/dress/sleeve_shape.csv', + # 'service/attribute/config/descriptor/dress/D_sleeve_shoulder.csv', + 'service/attribute/config/descriptor/dress/neckline.csv', + 'service/attribute/config/descriptor/dress/collar.csv', + 'service/attribute/config/descriptor/dress/design.csv', + 'service/attribute/config/descriptor/dress/silhouette.csv', + 'service/attribute/config/descriptor/dress/type.csv'] + +dress_model_list = [ # 'attr_recong_D_length', + 'attr_retrieve_D_sleeve_length', + 'attr_retrieve_D_sleeve_shape', + # 'attr_retrieve_D_sleeve_shoulder', + 'attr_retrieve_D_neckline', + 'attr_retrieve_D_collar', + 'attr_retrieve_D_design', + 'attr_retrieve_D_silohouette', + 'attr_retrieve_D_type' +] + +category_description = './description/category/category_dis.csv' +category_model = 'attr_retrieve_category' diff --git a/app/service/attribute/config/descriptor/bottom/B_material.csv b/app/service/attribute/config/descriptor/bottom/B_material.csv new file mode 100644 index 0000000..8a447c9 --- /dev/null +++ b/app/service/attribute/config/descriptor/bottom/B_material.csv @@ -0,0 +1,28 @@ +labelName,join_attr,taskName,taskId +Canvas,attr_Material_O_1,Material_B,8 +Chambray,attr_Material_O_2,Material_B,8 +Chenille,attr_Material_O_3,Material_B,8 +Chiffon,attr_Material_O_4,Material_B,8 +Corduroy,attr_Material_O_5,Material_B,8 +Crepe,attr_Material_O_6,Material_B,8 +Denim,attr_Material_O_7,Material_B,8 +Faux_fur,attr_Material_O_8,Material_B,8 +Faux_leather,attr_Material_O_9,Material_B,8 +Flannel,attr_Material_O_10,Material_B,8 +Fleece,attr_Material_O_11,Material_B,8 +Gingham,attr_Material_O_12,Material_B,8 +Jersey,attr_Material_O_13,Material_B,8 +Knit,attr_Material_O_14,Material_B,8 +Lace,attr_Material_O_15,Material_B,8 +Lawn,attr_Material_O_16,Material_B,8 +Neoprene,attr_Material_O_17,Material_B,8 +Organza,attr_Material_O_18,Material_B,8 +Plush,attr_Material_O_19,Material_B,8 +Satin,attr_Material_O_20,Material_B,8 +Serge,attr_Material_O_21,Material_B,8 +Taffeta,attr_Material_O_22,Material_B,8 +Tulle,attr_Material_O_23,Material_B,8 +Tweed,attr_Material_O_24,Material_B,8 +Twill,attr_Material_O_25,Material_B,8 +Velvet,attr_Material_O_26,Material_B,8 +Vinyl,attr_Material_O_27,Material_B,8 diff --git a/app/service/attribute/config/descriptor/bottom/B_print.csv b/app/service/attribute/config/descriptor/bottom/B_print.csv new file mode 100644 index 0000000..a661a18 --- /dev/null +++ b/app/service/attribute/config/descriptor/bottom/B_print.csv @@ -0,0 +1,16 @@ +labelName,join_attr,taskName,taskId +Abstract,attr_Print_O_1,Print_B,7 +Allover,attr_Print_O_2,Print_B,7 +Animal,attr_Print_O_3,Print_B,7 +Camouflage,attr_Print_O_4,Print_B,7 +Checks,attr_Print_O_5,Print_B,7 +Color_block,attr_Print_O_6,Print_B,7 +Disty print,attr_Print_O_7,Print_B,7 +Dotted,attr_Print_O_8,Print_B,7 +Floral,attr_Print_O_9,Print_B,7 +Graphic print,attr_Print_O_10,Print_B,7 +Logo and slogan,attr_Print_O_11,Print_B,7 +Patchwork,attr_Print_O_12,Print_B,7 +Plain,attr_Print_O_13,Print_B,7 +Plain_dnim,attr_Print_O_14,Print_B,7 +Stripe,attr_Print_O_15,Print_B,7 diff --git a/app/service/attribute/config/descriptor/bottom/B_softness.csv b/app/service/attribute/config/descriptor/bottom/B_softness.csv new file mode 100644 index 0000000..137443e --- /dev/null +++ b/app/service/attribute/config/descriptor/bottom/B_softness.csv @@ -0,0 +1,4 @@ +labelName,join_attr,taskName,taskId +B_softness_Soft,attr_Softness_B_1,Softness_B,5 +B_softness_Medium,attr_Softness_B_2,Softness_B,5 +B_softness_Hard,attr_Softness_B_3,Softness_B,5 diff --git a/app/service/attribute/config/descriptor/bottom/design.csv b/app/service/attribute/config/descriptor/bottom/design.csv new file mode 100644 index 0000000..0207a01 --- /dev/null +++ b/app/service/attribute/config/descriptor/bottom/design.csv @@ -0,0 +1,17 @@ +labelName,join_attr,taskName,taskId +Asymmetrical,attr_Design_B_1,Design_B,6 +Tiered,attr_Design_B_2,Design_B,6 +Tassel,attr_Design_B_3,Design_B,6 +Ruffle,attr_Design_B_4,Design_B,6 +Pleated,attr_Design_B_5,Design_B,6 +Wrap,attr_Design_B_6,Design_B,6 +Ripped,attr_Design_B_7,Design_B,6 +Cut_out,attr_Design_B_8,Design_B,6 +Eyelet,attr_Design_B_9,Design_B,6 +Folded,attr_Design_B_10,Design_B,6 +Tied,attr_Design_B_11,Design_B,6 +Drapes,attr_Design_B_12,Design_B,6 +Ribbon,attr_Design_B_13,Design_B,6 +Button,attr_Design_B_14,Design_B,6 +Split,attr_Design_B_15,Design_B,6 +Fishtail,attr_Design_B_16,Design_B,6 diff --git a/app/service/attribute/config/descriptor/bottom/length.csv b/app/service/attribute/config/descriptor/bottom/length.csv new file mode 100644 index 0000000..dede302 --- /dev/null +++ b/app/service/attribute/config/descriptor/bottom/length.csv @@ -0,0 +1,6 @@ +labelName,join_attr,taskName,taskId +Short,attr_BTM_length_1,BTM_length,3 +Middle,attr_BTM_length_2,BTM_length,3 +Seven,attr_BTM_length_3,BTM_length,3 +Nine,attr_BTM_length_4,BTM_length,3 +Long,attr_BTM_length_5,BTM_length,3 diff --git a/app/service/attribute/config/descriptor/bottom/opening_type.csv b/app/service/attribute/config/descriptor/bottom/opening_type.csv new file mode 100644 index 0000000..c779051 --- /dev/null +++ b/app/service/attribute/config/descriptor/bottom/opening_type.csv @@ -0,0 +1,6 @@ +labelName,join_attr,taskName,taskId +Button,attr_OPType_B_1,OPType_B,7 +Zipper,attr_OPType_B_2,OPType_B,7 +Thread,attr_OPType_B_3,OPType_B,7 +Hook,attr_OPType_B_4,OPType_B,7 +Elastic,attr_OPType_B_5,OPType_B,7 diff --git a/app/service/attribute/config/descriptor/bottom/silhouette.csv b/app/service/attribute/config/descriptor/bottom/silhouette.csv new file mode 100644 index 0000000..c5efde5 --- /dev/null +++ b/app/service/attribute/config/descriptor/bottom/silhouette.csv @@ -0,0 +1,6 @@ +labelName,join_attr,taskName,taskId +A_Line,attr_Silhouette_B_1,Silhouette_B,8 +H_Shape,attr_Silhouette_B_2,Silhouette_B,8 +Slim,attr_Silhouette_B_3,Silhouette_B,8 +Peg_leg,attr_Silhouette_B_4,Silhouette_B,8 +Peplum,attr_Silhouette_B_5,Silhouette_B,8 diff --git a/app/service/attribute/config/descriptor/bottom/subtype.csv b/app/service/attribute/config/descriptor/bottom/subtype.csv new file mode 100644 index 0000000..3cf89ab --- /dev/null +++ b/app/service/attribute/config/descriptor/bottom/subtype.csv @@ -0,0 +1,21 @@ +labelName,join_attr,taskName,taskId +A_Line_Skirt,bottom_Sub-Type_1,BTM_Sub-Type,2 +Bodycon_Skirt,bottom_Sub-Type_2,BTM_Sub-Type,2 +Boot_Cut,bottom_Sub-Type_3,BTM_Sub-Type,2 +Bubble_skirt,bottom_Sub-Type_4,BTM_Sub-Type,2 +Cargo_Pants,bottom_Sub-Type_5,BTM_Sub-Type,2 +Culottes,bottom_Sub-Type_6,BTM_Sub-Type,2 +Handkerchief_Skirt,bottom_Sub-Type_7,BTM_Sub-Type,2 +Jeans,bottom_Sub-Type_8,BTM_Sub-Type,2 +Joggers,bottom_Sub-Type_9,BTM_Sub-Type,2 +Leather_pants,bottom_Sub-Type_10,BTM_Sub-Type,2 +Leggings,bottom_Sub-Type_11,BTM_Sub-Type,2 +Mermaid,bottom_Sub-Type_12,BTM_Sub-Type,2 +Pattened_pants,bottom_Sub-Type_13,BTM_Sub-Type,2 +Peg_leg_Pants_Cigarette_Pants,bottom_Sub-Type_14,BTM_Sub-Type,2 +Pencil_Skirt,bottom_Sub-Type_15,BTM_Sub-Type,2 +Pleated_Skirt,bottom_Sub-Type_16,BTM_Sub-Type,2 +Shorts,bottom_Sub-Type_17,BTM_Sub-Type,2 +Skater_Skirt,bottom_Sub-Type_18,BTM_Sub-Type,2 +Suit_Trousers,bottom_Sub-Type_19,BTM_Sub-Type,2 +Tier_Skirt,bottom_Sub-Type_20,BTM_Sub-Type,2 diff --git a/app/service/attribute/config/descriptor/category/category_dis.csv b/app/service/attribute/config/descriptor/category/category_dis.csv new file mode 100644 index 0000000..724edb2 --- /dev/null +++ b/app/service/attribute/config/descriptor/category/category_dis.csv @@ -0,0 +1,7 @@ +labelName,join_attr,taskName,taskId +top,attr_top,category,1 +pants,attr_pants,category,1 +skirt,attr_skirt,category,1 +dress,attr_dress,category,1 +outwear,attr_outwear,category,1 +jumpsuit,attr_jumpsuit,category,1 diff --git a/app/service/attribute/config/descriptor/dress/D_Print.csv b/app/service/attribute/config/descriptor/dress/D_Print.csv new file mode 100644 index 0000000..d5a740f --- /dev/null +++ b/app/service/attribute/config/descriptor/dress/D_Print.csv @@ -0,0 +1,16 @@ +labelName,join_attr,taskName,taskId +Abstract,attr_Print_O_1,Print_D,7 +Allover,attr_Print_O_2,Print_D,7 +Animal,attr_Print_O_3,Print_D,7 +Camouflage,attr_Print_O_4,Print_D,7 +Checks,attr_Print_O_5,Print_D,7 +Color_block,attr_Print_O_6,Print_D,7 +Disty print,attr_Print_O_7,Print_D,7 +Dotted,attr_Print_O_8,Print_D,7 +Floral,attr_Print_O_9,Print_D,7 +Graphic print,attr_Print_O_10,Print_D,7 +Logo and slogan,attr_Print_O_11,Print_D,7 +Patchwork,attr_Print_O_12,Print_D,7 +Plain,attr_Print_O_13,Print_D,7 +Plain_dnim,attr_Print_O_14,Print_D,7 +Stripe,attr_Print_O_15,Print_D,7 diff --git a/app/service/attribute/config/descriptor/dress/D_material.csv b/app/service/attribute/config/descriptor/dress/D_material.csv new file mode 100644 index 0000000..f5e4b51 --- /dev/null +++ b/app/service/attribute/config/descriptor/dress/D_material.csv @@ -0,0 +1,28 @@ +labelName,join_attr,taskName,taskId +Canvas,attr_Material_O_1,Material_D,8 +Chambray,attr_Material_O_2,Material_D,8 +Chenille,attr_Material_O_3,Material_D,8 +Chiffon,attr_Material_O_4,Material_D,8 +Corduroy,attr_Material_O_5,Material_D,8 +Crepe,attr_Material_O_6,Material_D,8 +Denim,attr_Material_O_7,Material_D,8 +Faux_fur,attr_Material_O_8,Material_D,8 +Faux_leather,attr_Material_O_9,Material_D,8 +Flannel,attr_Material_O_10,Material_D,8 +Fleece,attr_Material_O_11,Material_D,8 +Gingham,attr_Material_O_12,Material_D,8 +Jersey,attr_Material_O_13,Material_D,8 +Knit,attr_Material_O_14,Material_D,8 +Lace,attr_Material_O_15,Material_D,8 +Lawn,attr_Material_O_16,Material_D,8 +Neoprene,attr_Material_O_17,Material_D,8 +Organza,attr_Material_O_18,Material_D,8 +Plush,attr_Material_O_19,Material_D,8 +Satin,attr_Material_O_20,Material_D,8 +Serge,attr_Material_O_21,Material_D,8 +Taffeta,attr_Material_O_22,Material_D,8 +Tulle,attr_Material_O_23,Material_D,8 +Tweed,attr_Material_O_24,Material_D,8 +Twill,attr_Material_O_25,Material_D,8 +Velvet,attr_Material_O_26,Material_D,8 +Vinyl,attr_Material_O_27,Material_D,8 diff --git a/app/service/attribute/config/descriptor/dress/D_softness.csv b/app/service/attribute/config/descriptor/dress/D_softness.csv new file mode 100644 index 0000000..ea612ab --- /dev/null +++ b/app/service/attribute/config/descriptor/dress/D_softness.csv @@ -0,0 +1,4 @@ +labelName,join_attr,taskName,taskId +Soft,attr_Softness_U_1,Softness_D,9 +Medium,attr_Softness_U_2,Softness_D,9 +Hard,attr_Softness_U_3,Softness_D,9 diff --git a/app/service/attribute/config/descriptor/dress/collar.csv b/app/service/attribute/config/descriptor/dress/collar.csv new file mode 100644 index 0000000..a30c014 --- /dev/null +++ b/app/service/attribute/config/descriptor/dress/collar.csv @@ -0,0 +1,11 @@ +labelName,join_attr,taskName,taskId +Peterpan,attr_Collar_1,Collar,7 +Shirt,attr_Collar_2,Collar,7 +Rib,attr_Collar_3,Collar,7 +Turtle,attr_Collar_4,Collar,7 +Lapel,attr_Collar_5,Collar,7 +Hoodie,attr_Collar_6,Collar,7 +Mandarin,attr_Collar_7,Collar,7 +Tie,attr_Collar_8,Collar,7 +Ruffle,attr_Collar_9,Collar,7 +Cowl,attr_Collar_10,Collar,7 diff --git a/app/service/attribute/config/descriptor/dress/design.csv b/app/service/attribute/config/descriptor/dress/design.csv new file mode 100644 index 0000000..f2fdd97 --- /dev/null +++ b/app/service/attribute/config/descriptor/dress/design.csv @@ -0,0 +1,19 @@ +labelName,join_attr,taskName,taskId +Asymmetrical,attr_Design_U_1,Design_D,9 +Tiered,attr_Design_U_2,Design_D,9 +Tassel,attr_Design_U_3,Design_D,9 +Ruffle,attr_Design_U_4,Design_D,9 +Pleated,attr_Design_U_5,Design_D,9 +Wrap,attr_Design_U_6,Design_D,9 +Ripped,attr_Design_U_7,Design_D,9 +Cut_out,attr_Design_U_8,Design_D,9 +Eyelet,attr_Design_U_9,Design_D,9 +Folded,attr_Design_U_10,Design_D,9 +Tied,attr_Design_U_11,Design_D,9 +Drapes,attr_Design_U_12,Design_D,9 +Ribbon,attr_Design_U_13,Design_D,9 +Button,attr_Design_U_14,Design_D,9 +Split,attr_Design_U_15,Design_D,9 +Fishtail,attr_Design_U_16,Design_D,9 +Cami_dress,attr_Design_U_17,Design_D,9 +Gathering,attr_Design_U_18,Design_D,9 diff --git a/app/service/attribute/config/descriptor/dress/length.csv b/app/service/attribute/config/descriptor/dress/length.csv new file mode 100644 index 0000000..0fd7183 --- /dev/null +++ b/app/service/attribute/config/descriptor/dress/length.csv @@ -0,0 +1,6 @@ +labelName,join_attr,taskName,taskId +Maxi,attr_Dress_length_1,Dress_length,1 +Midi,attr_Dress_length_2,Dress_length,1 +Mini,attr_Dress_length_3,Dress_length,1 +Over_the_knee,attr_Dress_length_4,Dress_length,1 +Floor_Length,attr_Dress_length_5,Dress_length,1 diff --git a/app/service/attribute/config/descriptor/dress/neckline.csv b/app/service/attribute/config/descriptor/dress/neckline.csv new file mode 100644 index 0000000..bd61dc8 --- /dev/null +++ b/app/service/attribute/config/descriptor/dress/neckline.csv @@ -0,0 +1,17 @@ +labelName,join_attr,taskName,taskId +Round,attr_Neckline_D_1,Neckline_D,5 +V,attr_Neckline_D_2,Neckline_D,5 +Square,attr_Neckline_D_3,Neckline_D,5 +One_shoulder,attr_Neckline_D_4,Neckline_D,5 +Off_shoulder,attr_Neckline_D_5,Neckline_D,5 +Strapless,attr_Neckline_D_6,Neckline_D,5 +Turtle,attr_Neckline_D_7,Neckline_D,5 +Boat,attr_Neckline_D_8,Neckline_D,5 +Halter,attr_Neckline_D_9,Neckline_D,5 +Spaghetti_Strap,attr_Neckline_D_10,Neckline_D,5 +Sweetheart,attr_Neckline_D_11,Neckline_D,5 +U,attr_Neckline_D_12,Neckline_D,5 +Choker,attr_Neckline_D_13,Neckline_D,5 +Cowl,attr_Neckline_D_14,Neckline_D,5 +Keyhole,attr_Neckline_D_15,Neckline_D,5 +Split,attr_Neckline_D_16,Neckline_D,5 diff --git a/app/service/attribute/config/descriptor/dress/opening_type.csv b/app/service/attribute/config/descriptor/dress/opening_type.csv new file mode 100644 index 0000000..2223bf2 --- /dev/null +++ b/app/service/attribute/config/descriptor/dress/opening_type.csv @@ -0,0 +1,5 @@ +labelName,join_attr,taskName,taskId +Button,attr_OPType_U_1,OPType_D,11 +Zipper,attr_OPType_U_2,OPType_D,11 +Thread,attr_OPType_U_3,OPType_D,11 +Hook,attr_OPType_U_4,OPType_D,11 diff --git a/app/service/attribute/config/descriptor/dress/silhouette.csv b/app/service/attribute/config/descriptor/dress/silhouette.csv new file mode 100644 index 0000000..43e5220 --- /dev/null +++ b/app/service/attribute/config/descriptor/dress/silhouette.csv @@ -0,0 +1,11 @@ +labelName,join_attr,taskName,taskId +A_Line,attr_Silhouette_U_1,Silhouette_D,11 +H_Shape,attr_Silhouette_U_2,Silhouette_D,11 +Slim,attr_Silhouette_U_3,Silhouette_D,11 +Oversized,attr_Silhouette_U_4,Silhouette_D,11 +Cacoon,attr_Silhouette_U_5,Silhouette_D,11 +Empire,attr_Silhouette_U_6,Silhouette_D,11 +Hourglass,attr_Silhouette_U_7,Silhouette_D,11 +Mermaid,attr_Silhouette_U_8,Silhouette_D,11 +Sheath,attr_Silhouette_U_9,Silhouette_D,11 +Tent,attr_Silhouette_U_10,Silhouette_D,11 diff --git a/app/service/attribute/config/descriptor/dress/sleeve_length.csv b/app/service/attribute/config/descriptor/dress/sleeve_length.csv new file mode 100644 index 0000000..8338fbd --- /dev/null +++ b/app/service/attribute/config/descriptor/dress/sleeve_length.csv @@ -0,0 +1,6 @@ +labelName,join_attr,taskName,taskId +Sleeveless,attr_Sleeve_length_1,Sleeve_length,3 +Short,attr_Sleeve_length_2,Sleeve_length,3 +Middle,attr_Sleeve_length_3,Sleeve_length,3 +Seven,attr_Sleeve_length_4,Sleeve_length,3 +Long,attr_Sleeve_length_5,Sleeve_length,3 diff --git a/app/service/attribute/config/descriptor/dress/sleeve_shape.csv b/app/service/attribute/config/descriptor/dress/sleeve_shape.csv new file mode 100644 index 0000000..ecc7cc8 --- /dev/null +++ b/app/service/attribute/config/descriptor/dress/sleeve_shape.csv @@ -0,0 +1,9 @@ +labelName,join_attr,taskName,taskId +Regular,attr_Sleeve_shape_1,Sleeve_shape,4 +Slim,attr_Sleeve_shape_2,Sleeve_shape,4 +Puff,attr_Sleeve_shape_3,Sleeve_shape,4 +Bell,attr_Sleeve_shape_4,Sleeve_shape,4 +Batwing,attr_Sleeve_shape_5,Sleeve_shape,4 +Shirt,attr_Sleeve_shape_6,Sleeve_shape,4 +Rib,attr_Sleeve_shape_7,Sleeve_shape,4 +Raglan,attr_Sleeve_shape_8,Sleeve_shape,4 diff --git a/app/service/attribute/config/descriptor/dress/sleeve_shoulder.csv b/app/service/attribute/config/descriptor/dress/sleeve_shoulder.csv new file mode 100644 index 0000000..dc7dcff --- /dev/null +++ b/app/service/attribute/config/descriptor/dress/sleeve_shoulder.csv @@ -0,0 +1,5 @@ +labelName,join_attr,taskName,taskId +Regular,attr_Sleeve_shoulder_1,Sleeve_shoulder,5 +Cold,attr_Sleeve_shoulder_2,Sleeve_shoulder,5 +Tucked,attr_Sleeve_shoulder_3,Sleeve_shoulder,5 +Balmain,attr_Sleeve_shoulder_4,Sleeve_shoulder,5 diff --git a/app/service/attribute/config/descriptor/dress/type.csv b/app/service/attribute/config/descriptor/dress/type.csv new file mode 100644 index 0000000..feaf9d6 --- /dev/null +++ b/app/service/attribute/config/descriptor/dress/type.csv @@ -0,0 +1,20 @@ +labelName,join_attr,taskName,taskId +Evening_gown,attr_dresstype_1,Dress_Type,12 +Shirt_dress,attr_dresstype_2,Dress_Type,12 +Coat_dress,attr_dresstype_3,Dress_Type,12 +Handkerchief_dress,attr_dresstype_4,Dress_Type,12 +Jumper_dress,attr_dresstype_5,Dress_Type,12 +Dungaree_dress,attr_dresstype_6,Dress_Type,12 +Skater_dress,attr_dresstype_7,Dress_Type,12 +Tea_dress,attr_dresstype_8,Dress_Type,12 +Mermaid_dress,attr_dresstype_9,Dress_Type,12 +Cocktail_dress,attr_dresstype_10,Dress_Type,12 +A_Line_dress,attr_dresstype_11,Dress_Type,12 +Bodycon_dress,attr_dresstype_12,Dress_Type,12 +Maxi_dress,attr_dresstype_13,Dress_Type,12 +Office_dress,attr_dresstype_14,Dress_Type,12 +Pencil_dress,attr_dresstype_15,Dress_Type,12 +Sheer_dress,attr_dresstype_16,Dress_Type,12 +Shift_dress,attr_dresstype_17,Dress_Type,12 +Slip_dress,attr_dresstype_18,Dress_Type,12 +T_shirt_dress,attr_dresstype_19,Dress_Type,12 diff --git a/app/service/attribute/config/descriptor/outwear/O_material.csv b/app/service/attribute/config/descriptor/outwear/O_material.csv new file mode 100644 index 0000000..acabec5 --- /dev/null +++ b/app/service/attribute/config/descriptor/outwear/O_material.csv @@ -0,0 +1,28 @@ +labelName,join_attr,taskName,taskId +Canvas,attr_Material_O_1,Material_O,8 +Chambray,attr_Material_O_2,Material_O,8 +Chenille,attr_Material_O_3,Material_O,8 +Chiffon,attr_Material_O_4,Material_O,8 +Corduroy,attr_Material_O_5,Material_O,8 +Crepe,attr_Material_O_6,Material_O,8 +Denim,attr_Material_O_7,Material_O,8 +Faux_fur,attr_Material_O_8,Material_O,8 +Faux_leather,attr_Material_O_9,Material_O,8 +Flannel,attr_Material_O_10,Material_O,8 +Fleece,attr_Material_O_11,Material_O,8 +Gingham,attr_Material_O_12,Material_O,8 +Jersey,attr_Material_O_13,Material_O,8 +Knit,attr_Material_O_14,Material_O,8 +Lace,attr_Material_O_15,Material_O,8 +Lawn,attr_Material_O_16,Material_O,8 +Neoprene,attr_Material_O_17,Material_O,8 +Organza,attr_Material_O_18,Material_O,8 +Plush,attr_Material_O_19,Material_O,8 +Satin,attr_Material_O_20,Material_O,8 +Serge,attr_Material_O_21,Material_O,8 +Taffeta,attr_Material_O_22,Material_O,8 +Tulle,attr_Material_O_23,Material_O,8 +Tweed,attr_Material_O_24,Material_O,8 +Twill,attr_Material_O_25,Material_O,8 +Velvet,attr_Material_O_26,Material_O,8 +Vinyl,attr_Material_O_27,Material_O,8 diff --git a/app/service/attribute/config/descriptor/outwear/O_print.csv b/app/service/attribute/config/descriptor/outwear/O_print.csv new file mode 100644 index 0000000..4968479 --- /dev/null +++ b/app/service/attribute/config/descriptor/outwear/O_print.csv @@ -0,0 +1,16 @@ +labelName,join_attr,taskName,taskId +Abstract,attr_Print_O_1,Print_O,7 +Allover,attr_Print_O_2,Print_O,7 +Animal,attr_Print_O_3,Print_O,7 +Camouflage,attr_Print_O_4,Print_O,7 +Checks,attr_Print_O_5,Print_O,7 +Color_block,attr_Print_O_6,Print_O,7 +Disty print,attr_Print_O_7,Print_O,7 +Dotted,attr_Print_O_8,Print_O,7 +Floral,attr_Print_O_9,Print_O,7 +Graphic print,attr_Print_O_10,Print_O,7 +Logo and slogan,attr_Print_O_11,Print_O,7 +Patchwork,attr_Print_O_12,Print_O,7 +Plain,attr_Print_O_13,Print_O,7 +Plain_dnim,attr_Print_O_14,Print_O,7 +Stripe,attr_Print_O_15,Print_O,7 diff --git a/app/service/attribute/config/descriptor/outwear/O_softness.csv b/app/service/attribute/config/descriptor/outwear/O_softness.csv new file mode 100644 index 0000000..7fa1380 --- /dev/null +++ b/app/service/attribute/config/descriptor/outwear/O_softness.csv @@ -0,0 +1,4 @@ +labelName,join_attr,taskName,taskId +O_softness_Soft,attr_Softness_O_1,Softness_O,9 +O_softness_Medium,attr_Softness_O_2,Softness_O,9 +O_softness_Hard,attr_Softness_O_3,Softness_O,9 diff --git a/app/service/attribute/config/descriptor/outwear/collar.csv b/app/service/attribute/config/descriptor/outwear/collar.csv new file mode 100644 index 0000000..e261e15 --- /dev/null +++ b/app/service/attribute/config/descriptor/outwear/collar.csv @@ -0,0 +1,10 @@ +labelName,join_attr,taskName,taskId +Peterpan,attr_Callar_O_1,Collar,6 +Shirt,attr_Callar_O_2,Collar,6 +Rib,attr_Callar_O_3,Collar,6 +Turtle,attr_Callar_O_4,Collar,6 +Lapel,attr_Callar_O_5,Collar,6 +Hoodie,attr_Callar_O_6,Collar,6 +Mandarin,attr_Callar_O_7,Collar,6 +Ruffle,attr_Callar_O_8,Collar,6 +Jewel,attr_Callar_O_9,Collar,6 diff --git a/app/service/attribute/config/descriptor/outwear/design.csv b/app/service/attribute/config/descriptor/outwear/design.csv new file mode 100644 index 0000000..e9c7869 --- /dev/null +++ b/app/service/attribute/config/descriptor/outwear/design.csv @@ -0,0 +1,19 @@ +labelName,join_attr,taskName,taskId +Asymmetrical,attr_Design_O_1,Design_O,10 +Tiered,attr_Design_O_2,Design_O,10 +Tassel,attr_Design_O_3,Design_O,10 +Ruffles,attr_Design_O_4,Design_O,10 +Pleated,attr_Design_O_5,Design_O,10 +Wrap,attr_Design_O_6,Design_O,10 +Ripped,attr_Design_O_7,Design_O,10 +Cut_out,attr_Design_O_8,Design_O,10 +Eyelet,attr_Design_O_9,Design_O,10 +Folded,attr_Design_O_10,Design_O,10 +Tied,attr_Design_O_11,Design_O,10 +Drapes,attr_Design_O_12,Design_O,10 +Ribbon,attr_Design_O_13,Design_O,10 +Button,attr_Design_O_14,Design_O,10 +Crossed_over_zipper,attr_Design_O_15,Design_O,10 +Crossed_over_button,attr_Design_O_16,Design_O,10 +Single_breasted,attr_Design_O_17,Design_O,10 +Double_breasted,attr_Design_O_18,Design_O,10 diff --git a/app/service/attribute/config/descriptor/outwear/length.csv b/app/service/attribute/config/descriptor/outwear/length.csv new file mode 100644 index 0000000..68546a0 --- /dev/null +++ b/app/service/attribute/config/descriptor/outwear/length.csv @@ -0,0 +1,4 @@ +labelName,join_attr,taskName,taskId +Short,attr_Outer_length_1,Outer_length,1 +Regular,attr_Outer_length_2,Outer_length,1 +Long,attr_Outer_length_3,Outer_length,1 diff --git a/app/service/attribute/config/descriptor/outwear/opening.csv b/app/service/attribute/config/descriptor/outwear/opening.csv new file mode 100644 index 0000000..292e480 --- /dev/null +++ b/app/service/attribute/config/descriptor/outwear/opening.csv @@ -0,0 +1,3 @@ +labelName,join_attr,taskName,taskId +Full,attr_Opening_O_1,Opening_O,11 +Half,attr_Opening_O_2,Opening_O,11 diff --git a/app/service/attribute/config/descriptor/outwear/opening_type.csv b/app/service/attribute/config/descriptor/outwear/opening_type.csv new file mode 100644 index 0000000..310c996 --- /dev/null +++ b/app/service/attribute/config/descriptor/outwear/opening_type.csv @@ -0,0 +1,5 @@ +labelName,join_attr,taskName,taskId +Button,attr_OPType_O_1,OPType_O,12 +Zipper,attr_OPType_O_2,OPType_O,12 +Thread,attr_OPType_O_3,OPType_O,12 +Hook,attr_OPType_O_4,OPType_O,12 diff --git a/app/service/attribute/config/descriptor/outwear/silhouette.csv b/app/service/attribute/config/descriptor/outwear/silhouette.csv new file mode 100644 index 0000000..4bd7451 --- /dev/null +++ b/app/service/attribute/config/descriptor/outwear/silhouette.csv @@ -0,0 +1,7 @@ +labelName,join_attr,taskName,taskId +A_Line,attr_Silhouette_O_1,Silhouette_O,13 +H_Shape,attr_Silhouette_O_2,Silhouette_O,13 +Slim,attr_Silhouette_O_3,Silhouette_O,13 +Flyman,attr_Silhouette_O_4,Silhouette_O,13 +Peplum,attr_Silhouette_O_5,Silhouette_O,13 +Oversize,attr_Silhouette_O_6,Silhouette_O,13 diff --git a/app/service/attribute/config/descriptor/outwear/sleeve_length.csv b/app/service/attribute/config/descriptor/outwear/sleeve_length.csv new file mode 100644 index 0000000..3021778 --- /dev/null +++ b/app/service/attribute/config/descriptor/outwear/sleeve_length.csv @@ -0,0 +1,6 @@ +labelName,join_attr,taskName,taskId +Sleeveless,attr_Sleeve_length_O_1,Sleeve_length_O,3 +Short,attr_Sleeve_length_O_2,Sleeve_length_O,3 +Middle,attr_Sleeve_length_O_3,Sleeve_length_O,3 +Seven,attr_Sleeve_length_O_4,Sleeve_length_O,3 +Long,attr_Sleeve_length_O_5,Sleeve_length_O,3 diff --git a/app/service/attribute/config/descriptor/outwear/sleeve_shape.csv b/app/service/attribute/config/descriptor/outwear/sleeve_shape.csv new file mode 100644 index 0000000..bdf6ccc --- /dev/null +++ b/app/service/attribute/config/descriptor/outwear/sleeve_shape.csv @@ -0,0 +1,9 @@ +labelName,join_attr,taskName,taskId +Regular,attr_Sleeve_shape_O_1,Sleeve_shape,4 +Slim,attr_Sleeve_shape_O_2,Sleeve_shape,4 +Puff,attr_Sleeve_shape_O_3,Sleeve_shape,4 +Bell,attr_Sleeve_shape_O_4,Sleeve_shape,4 +Batwing,attr_Sleeve_shape_O_5,Sleeve_shape,4 +Shirt,attr_Sleeve_shape_O_6,Sleeve_shape,4 +Rib,attr_Sleeve_shape_O_7,Sleeve_shape,4 +Raglan,attr_Sleeve_shape_O_8,Sleeve_shape,4 diff --git a/app/service/attribute/config/descriptor/outwear/sleeve_shoulder.csv b/app/service/attribute/config/descriptor/outwear/sleeve_shoulder.csv new file mode 100644 index 0000000..403a9a2 --- /dev/null +++ b/app/service/attribute/config/descriptor/outwear/sleeve_shoulder.csv @@ -0,0 +1,6 @@ +labelName,join_attr,taskName,taskId +Regular,attr_Sleeve_shoulder_O_1,Sleeve_shoulder,5 +Cold,attr_Sleeve_shoulder_O_2,Sleeve_shoulder,5 +Tucked,attr_Sleeve_shoulder_O_3,Sleeve_shoulder,5 +Balmain,attr_Sleeve_shoulder_O_4,Sleeve_shoulder,5 +Drop_Shoulder,attr_Sleeve_shoulder_O_5,Sleeve_shoulder,5 diff --git a/app/service/attribute/config/descriptor/outwear/type.csv b/app/service/attribute/config/descriptor/outwear/type.csv new file mode 100644 index 0000000..13c8264 --- /dev/null +++ b/app/service/attribute/config/descriptor/outwear/type.csv @@ -0,0 +1,18 @@ +labelName,join_attr,taskName,taskId +Coat,attr_Outer_type_1,Outer_Type,2 +Trench,attr_Outer_type_2,Outer_Type,2 +Baseball_jacket,attr_Outer_type_3,Outer_Type,2 +Hoodie_jacket,attr_Outer_type_4,Outer_Type,2 +Active_jacket,attr_Outer_type_5,Outer_Type,2 +Jacket,attr_Outer_type_6,Outer_Type,2 +Blazer,attr_Outer_type_7,Outer_Type,2 +Cardigan,attr_Outer_type_8,Outer_Type,2 +Capes,attr_Outer_type_9,Outer_Type,2 +Fleeces Jacket,attr_Outer_type_10,Outer_Type,2 +Gilets/Puffer,attr_Outer_type_11,Outer_Type,2 +Aviator jacket,attr_Outer_type_12,Outer_Type,2 +Biker jacket,attr_Outer_type_13,Outer_Type,2 +Pea coat,attr_Outer_type_14,Outer_Type,2 +Shacket,attr_Outer_type_15,Outer_Type,2 +Denim jacket,attr_Outer_type_16,Outer_Type,2 +Raincoat,attr_Outer_type_17,Outer_Type,2 diff --git a/app/service/attribute/config/descriptor/top/T_print.csv b/app/service/attribute/config/descriptor/top/T_print.csv new file mode 100644 index 0000000..d315df5 --- /dev/null +++ b/app/service/attribute/config/descriptor/top/T_print.csv @@ -0,0 +1,16 @@ +labelName,join_attr,taskName,taskId +Abstract,attr_Print_O_1,Print_U,7 +Allover,attr_Print_O_2,Print_U,7 +Animal,attr_Print_O_3,Print_U,7 +Camouflage,attr_Print_O_4,Print_U,7 +Checks,attr_Print_O_5,Print_U,7 +Color_block,attr_Print_O_6,Print_U,7 +Disty print,attr_Print_O_7,Print_U,7 +Dotted,attr_Print_O_8,Print_U,7 +Floral,attr_Print_O_9,Print_U,7 +Graphic print,attr_Print_O_10,Print_U,7 +Logo and slogan,attr_Print_O_11,Print_U,7 +Patchwork,attr_Print_O_12,Print_U,7 +Plain,attr_Print_O_13,Print_U,7 +Plain_dnim,attr_Print_O_14,Print_U,7 +Stripe,attr_Print_O_15,Print_U,7 diff --git a/app/service/attribute/config/descriptor/top/T_softness.csv b/app/service/attribute/config/descriptor/top/T_softness.csv new file mode 100644 index 0000000..6ec82a1 --- /dev/null +++ b/app/service/attribute/config/descriptor/top/T_softness.csv @@ -0,0 +1,4 @@ +labelName,join_attr,taskName,taskId +T_softness_Soft,attr_Softness_U_1,Softness_U,9 +T_softness_Medium,attr_Softness_U_2,Softness_U,9 +T_softness_Hard,attr_Softness_U_3,Softness_U,9 diff --git a/app/service/attribute/config/descriptor/top/collar.csv b/app/service/attribute/config/descriptor/top/collar.csv new file mode 100644 index 0000000..a30c014 --- /dev/null +++ b/app/service/attribute/config/descriptor/top/collar.csv @@ -0,0 +1,11 @@ +labelName,join_attr,taskName,taskId +Peterpan,attr_Collar_1,Collar,7 +Shirt,attr_Collar_2,Collar,7 +Rib,attr_Collar_3,Collar,7 +Turtle,attr_Collar_4,Collar,7 +Lapel,attr_Collar_5,Collar,7 +Hoodie,attr_Collar_6,Collar,7 +Mandarin,attr_Collar_7,Collar,7 +Tie,attr_Collar_8,Collar,7 +Ruffle,attr_Collar_9,Collar,7 +Cowl,attr_Collar_10,Collar,7 diff --git a/app/service/attribute/config/descriptor/top/design.csv b/app/service/attribute/config/descriptor/top/design.csv new file mode 100644 index 0000000..c429858 --- /dev/null +++ b/app/service/attribute/config/descriptor/top/design.csv @@ -0,0 +1,15 @@ +labelName,join_attr,taskName,taskId +Asymmetrical,attr_Design_U_1,Design_U,10 +Tiered,attr_Design_U_2,Design_U,10 +Tassel,attr_Design_U_3,Design_U,10 +Ruffle,attr_Design_U_4,Design_U,10 +Pleated,attr_Design_U_5,Design_U,10 +Wrap,attr_Design_U_6,Design_U,10 +Ripped,attr_Design_U_7,Design_U,10 +Cut_out,attr_Design_U_8,Design_U,10 +Eyelet,attr_Design_U_9,Design_U,10 +Folded,attr_Design_U_10,Design_U,10 +Tied,attr_Design_U_11,Design_U,10 +Drapes,attr_Design_U_12,Design_U,10 +Ribbon,attr_Design_U_13,Design_U,10 +Button,attr_Design_U_14,Design_U,10 diff --git a/app/service/attribute/config/descriptor/top/length.csv b/app/service/attribute/config/descriptor/top/length.csv new file mode 100644 index 0000000..fc8baba --- /dev/null +++ b/app/service/attribute/config/descriptor/top/length.csv @@ -0,0 +1,4 @@ +labelName,join_attr,taskName,taskId +Short,attr_Top_length_1,Top_length,1 +Regular,attr_Top_length_2,Top_length,1 +Long,attr_Top_length_3,Top_length,1 diff --git a/app/service/attribute/config/descriptor/top/neckline.csv b/app/service/attribute/config/descriptor/top/neckline.csv new file mode 100644 index 0000000..508fe93 --- /dev/null +++ b/app/service/attribute/config/descriptor/top/neckline.csv @@ -0,0 +1,7 @@ +labelName,join_attr,taskName,taskId +Round,attr_Neckline_1,Neckline,6 +V,attr_Neckline_2,Neckline,6 +Square,attr_Neckline_3,Neckline,6 +One_shoulder,attr_Neckline_4,Neckline,6 +Off_shoulder,attr_Neckline_5,Neckline,6 +Strapless,attr_Neckline_6,Neckline,6 diff --git a/app/service/attribute/config/descriptor/top/opening_type.csv b/app/service/attribute/config/descriptor/top/opening_type.csv new file mode 100644 index 0000000..e44be59 --- /dev/null +++ b/app/service/attribute/config/descriptor/top/opening_type.csv @@ -0,0 +1,5 @@ +labelName,join_attr,taskName,taskId +Button,attr_OPType_U_1,OPType_U,11 +Zipper,attr_OPType_U_2,OPType_U,11 +Thread,attr_OPType_U_3,OPType_U,11 +Hook,attr_OPType_U_4,OPType_U,11 diff --git a/app/service/attribute/config/descriptor/top/silhouette.csv b/app/service/attribute/config/descriptor/top/silhouette.csv new file mode 100644 index 0000000..ed19732 --- /dev/null +++ b/app/service/attribute/config/descriptor/top/silhouette.csv @@ -0,0 +1,7 @@ +labelName,join_attr,taskName,taskId +T_silhouette_A_Line,attr_Silhouette_U_1,Silhouette_U,12 +T_silhouette_H_Shape,attr_Silhouette_U_2,Silhouette_U,12 +T_silhouette_Slim,attr_Silhouette_U_3,Silhouette_U,12 +T_silhouette_Flyman,attr_Silhouette_U_4,Silhouette_U,12 +T_silhouette_Peplum,attr_Silhouette_U_5,Silhouette_U,12 +T_silhouette_Oversize,attr_Silhouette_U_6,Silhouette_U,12 diff --git a/app/service/attribute/config/descriptor/top/sleeve_length.csv b/app/service/attribute/config/descriptor/top/sleeve_length.csv new file mode 100644 index 0000000..8338fbd --- /dev/null +++ b/app/service/attribute/config/descriptor/top/sleeve_length.csv @@ -0,0 +1,6 @@ +labelName,join_attr,taskName,taskId +Sleeveless,attr_Sleeve_length_1,Sleeve_length,3 +Short,attr_Sleeve_length_2,Sleeve_length,3 +Middle,attr_Sleeve_length_3,Sleeve_length,3 +Seven,attr_Sleeve_length_4,Sleeve_length,3 +Long,attr_Sleeve_length_5,Sleeve_length,3 diff --git a/app/service/attribute/config/descriptor/top/sleeve_shape.csv b/app/service/attribute/config/descriptor/top/sleeve_shape.csv new file mode 100644 index 0000000..ecc7cc8 --- /dev/null +++ b/app/service/attribute/config/descriptor/top/sleeve_shape.csv @@ -0,0 +1,9 @@ +labelName,join_attr,taskName,taskId +Regular,attr_Sleeve_shape_1,Sleeve_shape,4 +Slim,attr_Sleeve_shape_2,Sleeve_shape,4 +Puff,attr_Sleeve_shape_3,Sleeve_shape,4 +Bell,attr_Sleeve_shape_4,Sleeve_shape,4 +Batwing,attr_Sleeve_shape_5,Sleeve_shape,4 +Shirt,attr_Sleeve_shape_6,Sleeve_shape,4 +Rib,attr_Sleeve_shape_7,Sleeve_shape,4 +Raglan,attr_Sleeve_shape_8,Sleeve_shape,4 diff --git a/app/service/attribute/config/descriptor/top/sleeve_shoulder.csv b/app/service/attribute/config/descriptor/top/sleeve_shoulder.csv new file mode 100644 index 0000000..dc7dcff --- /dev/null +++ b/app/service/attribute/config/descriptor/top/sleeve_shoulder.csv @@ -0,0 +1,5 @@ +labelName,join_attr,taskName,taskId +Regular,attr_Sleeve_shoulder_1,Sleeve_shoulder,5 +Cold,attr_Sleeve_shoulder_2,Sleeve_shoulder,5 +Tucked,attr_Sleeve_shoulder_3,Sleeve_shoulder,5 +Balmain,attr_Sleeve_shoulder_4,Sleeve_shoulder,5 diff --git a/app/service/attribute/config/descriptor/top/type.csv b/app/service/attribute/config/descriptor/top/type.csv new file mode 100644 index 0000000..6bdb9c6 --- /dev/null +++ b/app/service/attribute/config/descriptor/top/type.csv @@ -0,0 +1,15 @@ +labelName,join_attr,taskName,taskId +Bandeau,attr_toptype_1,Top_Type,2 +Blouse,attr_toptype_2,Top_Type,2 +Bodysuit,attr_toptype_3,Top_Type,2 +Bralets,attr_toptype_4,Top_Type,2 +Camisole,attr_toptype_5,Top_Type,2 +Crop_Top,attr_toptype_6,Top_Type,2 +Hoodie,attr_toptype_7,Top_Type,2 +Pullover,attr_toptype_8,Top_Type,2 +Polo_shirt,attr_toptype_9,Top_Type,2 +Shirt,attr_toptype_10,Top_Type,2 +Strapeless,attr_toptype_11,Top_Type,2 +Sweater,attr_toptype_12,Top_Type,2 +Tank_Top,attr_toptype_13,Top_Type,2 +T_shirt,attr_toptype_14,Top_Type,2 diff --git a/app/service/attribute/config/local_debug_const.py b/app/service/attribute/config/local_debug_const.py new file mode 100644 index 0000000..2721fee --- /dev/null +++ b/app/service/attribute/config/local_debug_const.py @@ -0,0 +1,78 @@ +top_description_list = [r'E:\workspace\trinity_client_aida\app\service\attribute\config\descriptor\top/length.csv', + r'E:\workspace\trinity_client_aida\app\service\attribute\config\descriptor\top/type.csv', + r'E:\workspace\trinity_client_aida\app\service\attribute\config\descriptor\top/sleeve_length.csv', + r'E:\workspace\trinity_client_aida\app\service\attribute\config\descriptor\top/sleeve_shape.csv', + r'E:\workspace\trinity_client_aida\app\service\attribute\config\descriptor\top/sleeve_shoulder.csv', + r'E:\workspace\trinity_client_aida\app\service\attribute\config\descriptor\top/neckline.csv', + r'E:\workspace\trinity_client_aida\app\service\attribute\config\descriptor\top/design.csv', + r'E:\workspace\trinity_client_aida\app\service\attribute\config\descriptor\top/opening_type.csv', + r'E:\workspace\trinity_client_aida\app\service\attribute\config\descriptor\top/silhouette.csv', + r'E:\workspace\trinity_client_aida\app\service\attribute\config\descriptor\top/collar.csv'] + +top_model_list = ['attr_retrieve_T_length', + 'attr_retrieve_T_type', + 'attr_retrieve_T_sleeve_length', + 'attr_retrieve_T_sleeve_shape', + 'attr_retrieve_T_sleeve_shoulder', + 'attr_retrieve_T_neckline', + 'attr_retrieve_T_design', + 'attr_retrieve_T_optype', + 'attr_retrieve_T_silhouette', + 'attr_retrieve_T_collar' + ] + +bottom_description_list = [ + r'E:\workspace\trinity_client_aida\app\service\attribute\config\descriptor\bottom/subtype.csv', + r'E:\workspace\trinity_client_aida\app\service\attribute\config\descriptor\bottom/length.csv', + r'E:\workspace\trinity_client_aida\app\service\attribute\config\descriptor\bottom/silhouette.csv', + r'E:\workspace\trinity_client_aida\app\service\attribute\config\descriptor\bottom/opening_type.csv', + r'E:\workspace\trinity_client_aida\app\service\attribute\config\descriptor\bottom/design.csv'] + +bottom_model_list = [ + 'attr_retrieve_B_subtype', + 'attr_retrieve_B_length', + 'attr_retrieve_B_silhouette', + 'attr_recong_B_optype', + 'attr_retrieve_B_design'] + +outwear_description_list = [r'E:\workspace\trinity_client_aida\app\service\attribute\config\descriptor\outwear/length.csv', + r'E:\workspace\trinity_client_aida\app\service\attribute\config\descriptor\outwear/sleeve_length.csv', + r'E:\workspace\trinity_client_aida\app\service\attribute\config\descriptor\outwear/sleeve_shape.csv', + r'E:\workspace\trinity_client_aida\app\service\attribute\config\descriptor\outwear/sleeve_shoulder.csv', + r'E:\workspace\trinity_client_aida\app\service\attribute\config\descriptor\outwear/collar.csv', + r'E:\workspace\trinity_client_aida\app\service\attribute\config\descriptor\outwear/design.csv', + r'E:\workspace\trinity_client_aida\app\service\attribute\config\descriptor\outwear/opening_type.csv', + r'E:\workspace\trinity_client_aida\app\service\attribute\config\descriptor\outwear/silhouette.csv', ] + +outwear_model_list = ['attr_recong_O_length', + 'attr_retrieve_O_sleeve_length', + 'attr_retrieve_O_sleeve_shape', + 'attr_retrieve_O_sleeve_shoulder', + 'attr_retrieve_O_collar', + 'attr_retrieve_O_design', + 'attr_recong_O_optype', + 'attr_retrieve_O_silhouette'] + +dress_description_list = [ # r'E:\workspace\trinity_client_aida\app\service\attribute\config\descriptor\dress/D_length.csv', + r'E:\workspace\trinity_client_aida\app\service\attribute\config\descriptor\dress/sleeve_length.csv', + r'E:\workspace\trinity_client_aida\app\service\attribute\config\descriptor\dress/sleeve_shape.csv', + # r'E:\workspace\trinity_client_aida\app\service\attribute\config\descriptor\dress/D_sleeve_shoulder.csv', + r'E:\workspace\trinity_client_aida\app\service\attribute\config\descriptor\dress/neckline.csv', + r'E:\workspace\trinity_client_aida\app\service\attribute\config\descriptor\dress/collar.csv', + r'E:\workspace\trinity_client_aida\app\service\attribute\config\descriptor\dress/design.csv', + r'E:\workspace\trinity_client_aida\app\service\attribute\config\descriptor\dress/silhouette.csv', + r'E:\workspace\trinity_client_aida\app\service\attribute\config\descriptor\dress/type.csv'] + +dress_model_list = [ # 'attr_recong_D_length', + 'attr_retrieve_D_sleeve_length', + 'attr_retrieve_D_sleeve_shape', + # 'attr_retrieve_D_sleeve_shoulder', + 'attr_retrieve_D_neckline', + 'attr_retrieve_D_collar', + 'attr_retrieve_D_design', + 'attr_retrieve_D_silohouette', + 'attr_retrieve_D_type' +] + +category_description = './description/category/category_dis.csv' +category_model = 'attr_retrieve_category' diff --git a/app/service/attribute/service_att_recognition.py b/app/service/attribute/service_att_recognition.py new file mode 100644 index 0000000..da71c16 --- /dev/null +++ b/app/service/attribute/service_att_recognition.py @@ -0,0 +1,167 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +import logging +from pprint import pprint +import torch +import cv2 +import mmcv +import numpy as np +import pandas as pd +from minio import Minio +import tritonclient.http as httpclient +from app.core.config import * +from app.schemas.attribute_retrieve import AttributeRecognitionModel + + +class AttributeRecognition: + def __init__(self, const, request_data): + self.minio_client = Minio(MINIO_URL, access_key=MINIO_ACCESS, secret_key=MINIO_SECRET, secure=MINIO_SECURE) + logging.info("实例化完成") + self.request_data = [] + for i, sketch in enumerate(request_data): + self.request_data.append( + { + 'obj': self.preprocess(self.get_image(sketch.sketch_img_url)), + 'category': sketch.category, + 'colony': sketch.colony, + 'sketch_img_url': sketch.sketch_img_url, + } + ) + self.const = const + self.triton_client = httpclient.InferenceServerClient(url=f"{ATT_TRITON_URL}") + + def get_result(self): + for sketch in self.request_data: + if sketch['category'] == "Tops" or sketch['category'] == "Blouse": + attr_dict = {} + for i in range(len(self.const.top_description_list)): + attr_description = self.const.top_description_list[i] + attr_model_path = self.const.top_model_list[i] + present_dict = self.get_attribute(attr_model_path, attr_description, sketch['obj']) + attr_dict = self.merge(attr_dict, present_dict) + + elif sketch['category'] == 'Trousers' or sketch['category'] == "Skirt" or sketch['category'] == "Bottoms": + attr_dict = {} + for i in range(len(self.const.bottom_description_list)): + attr_description = self.const.bottom_description_list[i] + attr_model_path = self.const.bottom_model_list[i] + present_dict = self.get_attribute(attr_model_path, attr_description, sketch['obj']) + attr_dict = self.merge(attr_dict, present_dict) + + elif sketch['category'] == 'Dress': + attr_dict = {} + for i in range(len(self.const.dress_description_list)): + attr_description = self.const.dress_description_list[i] + attr_model_path = self.const.dress_model_list[i] + present_dict = self.get_attribute(attr_model_path, attr_description, sketch['obj']) + attr_dict = self.merge(attr_dict, present_dict) + + elif sketch['category'] == 'Outwear': + attr_dict = {} + for i in range(len(self.const.outwear_description_list)): + attr_description = self.const.outwear_description_list[i] + attr_model_path = self.const.outwear_model_list[i] + present_dict = self.get_attribute(attr_model_path, attr_description, sketch['obj']) + attr_dict = self.merge(attr_dict, present_dict) + + else: + attr_dict = {} + sketch['attr_dict'] = attr_dict + del sketch['obj'] + return self.request_data + + def get_attribute(self, model_name, description, image): + attr_type = pd.read_csv(description) + inputs = [ + httpclient.InferInput("input__0", image.shape, datatype="FP32") + ] + inputs[0].set_data_from_numpy(image, binary_data=True) + results = self.triton_client.infer(model_name=model_name, inputs=inputs) + inference_output = torch.from_numpy(results.as_numpy(f"output__0")) + scores = inference_output.detach().numpy() + colattr = list(attr_type['labelName']) + task = description.split('/')[-1][:-4] + maxsc = np.max(scores[0][:5]) + indexs = np.argwhere(scores == maxsc)[:, 1] + attr = { + task: [] + } + for i in range(len(indexs)): + atr = colattr[indexs[i]] + attr[task].append(atr) + return attr + + @staticmethod + def merge(dict1, dict2): + res = {**dict1, **dict2} + return res + + def get_image(self, url): + response = self.minio_client.get_object(url.split("/", 1)[0], url.split("/", 1)[1]) + img = np.frombuffer(response.data, np.uint8) # 转成8位无符号整型 + img = cv2.imdecode(img, cv2.IMREAD_COLOR) # 解码 + img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB) + return img + + @staticmethod + def preprocess(img): + img = mmcv.imread(img) + ori_shape = img.shape[:2] + img_scale = (224, 224) + scale_factor = [] + img, x, y = mmcv.imresize(img, img_scale, return_scale=True) + scale_factor.append(x) + scale_factor.append(y) + img = mmcv.imnormalize(img, mean=np.array([123.675, 116.28, 103.53]), std=np.array([58.395, 57.12, 57.375]), to_rgb=True) + preprocessed_img = np.expand_dims(img.transpose(2, 0, 1), axis=0) + return preprocessed_img + + +if __name__ == '__main__': + data = [ + { + "category": "Dress", + "colony": "Female", + "sketch_img_url": "aida-users/89/sketchboard/female/Dress/ae976103-d7ec-4eed-b5d1-3e5f04d8be26.jpg" + }, + { + "category": "Dress", + "colony": "Female", + "sketch_img_url": "aida-users/89/sketchboard/female/Dress/6d7d97a7-5a7d-48bd-9e14-b51119b48620.jpg" + }, + { + "category": "Dress", + "colony": "Female", + "sketch_img_url": "aida-users/89/sketchboard/female/Dress/f2437141-1104-40a5-bcb9-f436088698bb.jpg" + }, + { + "category": "Dress", + "colony": "Female", + "sketch_img_url": "aida-users/89/sketchboard/female/Dress/07af8613-eb2e-44fd-97cb-a97249a5754c.jpg" + }, + { + "category": "Blouse", + "colony": "Female", + "sketch_img_url": "aida-users/89/sketchboard/female/Dress/bac9fb15-6860-4112-ac97-f0dea079da75.jpg" + }, + { + "category": "Dress", + "colony": "Female", + "sketch_img_url": "aida-users/89/sketchboard/female/Dress/11d59844-effa-4590-82f9-9ea382c76126.jpg" + }, + { + "category": "Dress", + "colony": "Female", + "sketch_img_url": "aida-users/89/sketchboard/female/Dress/849bf94c-66b8-42f5-8c2e-c1c1f4c8d0e0.jpg" + }, + { + "category": "Dress", + "colony": "Female", + "sketch_img_url": "aida-users/89/sketchboard/female/Dress/e6724ab7-8d3f-4677-abe0-c3e42ab7af85.jpeg" + } + ] + from app.service.attribute.config import local_debug_const + + rq_data = [AttributeRecognitionModel(category=d['category'], colony=d['colony'], sketch_img_url=d['sketch_img_url']) for d in data] + server = AttributeRecognition(local_debug_const, rq_data) + pprint(server.get_result()) diff --git a/app/service/attribute/service_category_recognition.py b/app/service/attribute/service_category_recognition.py new file mode 100644 index 0000000..18ee043 --- /dev/null +++ b/app/service/attribute/service_category_recognition.py @@ -0,0 +1,171 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +""" +@Project :trinity_client +@File :server.py +@Author :周成融 +@Date :2023/9/16 18:31:08 +@detail : +""" +from skimage import transform +import cv2 +import mmcv +import numpy as np +import pandas as pd +from minio import Minio +import tritonclient.http as httpclient +import torch + +from app.core.config import * +from app.schemas.attribute_retrieve import CategoryRecognitionModel + + +class CategoryRecognition: + def __init__(self, request_data): + self.attr_type = pd.read_csv(CATEGORY_PATH) + self.minio_client = Minio(MINIO_URL, access_key=MINIO_ACCESS, secret_key=MINIO_SECRET, secure=MINIO_SECURE) + self.request_data = [] + self.triton_client = httpclient.InferenceServerClient(url=ATT_TRITON_URL) + for sketch in request_data: + self.request_data.append( + { + 'obj': self.preprocess(self.get_image(sketch.sketch_img_url)), + 'colony': sketch.colony, + 'sketch_img_url': sketch.sketch_img_url, + } + ) + + @staticmethod + def preprocess(img): + img = mmcv.imread(img) + # ori_shape = img.shape[:2] + img_scale = (224, 224) + scale_factor = [] + img, x, y = mmcv.imresize(img, img_scale, return_scale=True) + scale_factor.append(x) + scale_factor.append(y) + img = mmcv.imnormalize(img, mean=np.array([123.675, 116.28, 103.53]), std=np.array([58.395, 57.12, 57.375]), to_rgb=True) + preprocessed_img = np.expand_dims(img.transpose(2, 0, 1), axis=0) + return preprocessed_img + + def get_image(self, url): + # Get data of an object. + # Read data from response. + response = self.minio_client.get_object(url.split("/", 1)[0], url.split("/", 1)[1]) + img = np.frombuffer(response.data, np.uint8) # 转成8位无符号整型 + img = cv2.imdecode(img, cv2.IMREAD_COLOR) # 解码 + img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB) + return img + + def get_category(self, image): + inputs = [ + httpclient.InferInput("input__0", image.shape, datatype="FP32") + ] + inputs[0].set_data_from_numpy(image, binary_data=True) + results = self.triton_client.infer(model_name="attr_retrieve_category", inputs=inputs) + inference_output = torch.from_numpy(results.as_numpy(f'output__0')) + + scores = inference_output.detach().numpy() + + colattr = list(self.attr_type['labelName']) + + task = self.attr_type['taskName'][0] + + maxsc = np.max(scores[0][:5]) + indexs = np.argwhere(scores == maxsc)[:, 1] + + # attr = {} + # attr[task] = [] + # for i in range(len(indexs)): + # atr = colattr[indexs[i]] + # attr[task].append(atr) + + return colattr[indexs[0]] + + def get_result(self): + for sketch in self.request_data: + if sketch['colony'] == "Female": + sketch['category'] = self.get_category(sketch["obj"]) + elif sketch['colony'] == "Male": + category = self.get_category(sketch["obj"]) + if category == 'Trousers' or category == 'Skirt': + sketch['category'] = 'Bottoms' + elif category == 'Blouse' or category == 'Dress': + sketch['category'] = 'Tops' + else: + sketch['category'] = 'Outwear' + # 删除图片对象 + del sketch["obj"] + return self.request_data + + +class Rescale(object): + """Rescale the image in a sample to a given size. + + Args: + output_size (tuple or int): Desired output size. If tuple, output is + matched to output_size. If int, smaller of image edges is matched + to output_size keeping aspect ratio the same. + """ + + def __init__(self, output_size): + assert isinstance(output_size, (int, tuple)) + self.output_size = output_size + + def __call__(self, image, landmarks): + h, w = image.shape[:2] + if isinstance(self.output_size, int): + if h > w: + new_h, new_w = self.output_size * h / w, self.output_size + else: + new_h, new_w = self.output_size, self.output_size * w / h + else: + new_h, new_w = self.output_size + + new_h, new_w = int(new_h), int(new_w) + img = transform.resize(image, (new_h, new_w), mode='constant') + # h and w are swapped for landmarks because for images, + # x and y axes are axis 1 and 0 respectively + landmarks = landmarks * [new_w / w, new_h / h] + return img, landmarks + + +if __name__ == '__main__': + data = [ + { + "sketch_img_url": "aida-users/89/sketchboard/female/Dress/ae976103-d7ec-4eed-b5d1-3e5f04d8be26.jpg", + "colony": "Female" + }, + { + "sketch_img_url": "aida-users/89/sketchboard/female/Dress/6d7d97a7-5a7d-48bd-9e14-b51119b48620.jpg", + "colony": "Female" + }, + { + "sketch_img_url": "aida-users/89/sketchboard/female/Dress/f2437141-1104-40a5-bcb9-f436088698bb.jpg", + "colony": "Female" + }, + { + "sketch_img_url": "aida-users/89/sketchboard/female/Dress/07af8613-eb2e-44fd-97cb-a97249a5754c.jpg", + "colony": "Female" + }, + { + "sketch_img_url": "aida-users/89/sketchboard/female/Dress/bac9fb15-6860-4112-ac97-f0dea079da75.jpg", + "colony": "Female" + }, + { + "sketch_img_url": "aida-users/89/sketchboard/female/Dress/11d59844-effa-4590-82f9-9ea382c76126.jpg", + "colony": "Female" + }, + { + "sketch_img_url": "aida-users/89/sketchboard/female/Dress/849bf94c-66b8-42f5-8c2e-c1c1f4c8d0e0.jpg", + "colony": "Female" + }, + { + "sketch_img_url": "aida-users/89/sketchboard/female/Dress/e6724ab7-8d3f-4677-abe0-c3e42ab7af85.jpeg", + "colony": "Female" + } + ] + rq_data = [CategoryRecognitionModel(sketch_img_url=d['sketch_img_url'], colony=d['colony']) for d in data] + server = CategoryRecognition(rq_data) + + print(server.get_result()) diff --git a/app/service/generate_image/service.py b/app/service/generate_image/service.py new file mode 100644 index 0000000..733dc33 --- /dev/null +++ b/app/service/generate_image/service.py @@ -0,0 +1,170 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +""" +@Project :trinity_client +@File :service_att_recognition.py +@Author :周成融 +@Date :2023/7/26 12:01:05 +@detail : +""" +import json +import logging +import time +from io import BytesIO + +import cv2 +import minio +import redis +import tritonclient.grpc as grpcclient +import numpy as np +from minio import Minio +from tritonclient.utils import np_to_triton_dtype + +from app.core.config import * +from app.schemas.generate_image import GenerateImageModel +from app.service.generate_image.utils.image_processing import remove_background, stain_detection +from app.service.generate_image.utils.upload_sd_image import upload_png_sd + +logger = logging.getLogger() + + +class GenerateImage: + def __init__(self, request_data): + if DEBUG is False: + self.connection = pika.BlockingConnection(pika.ConnectionParameters(**RABBITMQ_PARAMS)) + self.channel = self.connection.channel() + self.connection = pika.BlockingConnection(pika.ConnectionParameters(**RABBITMQ_PARAMS)) + self.channel = self.connection.channel() + self.minio_client = Minio(MINIO_URL, access_key=MINIO_ACCESS, secret_key=MINIO_SECRET, secure=MINIO_SECURE) + self.grpc_client = grpcclient.InferenceServerClient(url=GI_MODEL_URL) + self.redis_client = redis.StrictRedis(host=REDIS_HOST, port=REDIS_PORT, db=REDIS_DB, decode_responses=True) + if request_data.mode == "img2img": + self.image = self.get_image(request_data.image_url) + self.prompt = request_data.prompt + else: + self.image = np.random.randint(0, 256, (1024, 1024, 3), dtype=np.uint8) + self.prompt = request_data.prompt + + self.tasks_id = request_data.tasks_id + self.user_id = self.tasks_id[self.tasks_id.rfind('-') + 1:] + self.mode = request_data.mode + self.batch_size = 1 + self.category = request_data.category + self.index = 0 + self.generate_data = {'tasks_id': self.tasks_id, 'status': 'PENDING', 'message': "pending", 'data': ''} + self.redis_client.set(self.tasks_id, json.dumps(self.generate_data)) + self.redis_client.expire(self.tasks_id, 600) + + def get_image(self, image_url): + # Get data of an object. + # Read data from response. + try: + response = self.minio_client.get_object(image_url.split('/')[0], image_url[image_url.find('/') + 1:]) + image_file = BytesIO(response.data) + image_array = np.asarray(bytearray(image_file.read()), dtype=np.uint8) + image_cv2 = cv2.imdecode(image_array, cv2.IMREAD_COLOR) + image = cv2.resize(image_cv2, (1024, 1024)) + except minio.error.S3Error: + image = np.random.randint(0, 256, (1024, 1024, 3), dtype=np.uint8) + return image + + def callback(self, result, error): + if error: + self.generate_data['status'] = "FAILURE" + self.generate_data['message'] = str(error) + self.generate_data['data'] = str(error) + self.redis_client.set(self.tasks_id, json.dumps(self.generate_data)) + else: + image_result = result.as_numpy("generated_image")[0] + if self.category == "sketch": + # 去背景 + remove_bg_image = remove_background(np.asarray(image_result)) + # 污点检测 + is_smudge, not_smudge_image = stain_detection(remove_bg_image) + if is_smudge is False: + self.generate_data['status'] = "SUCCESS" + self.generate_data['message'] = "success" + self.generate_data['data'] = GI_SYS_IMAGE_URL + self.redis_client.set(self.tasks_id, json.dumps(self.generate_data)) + else: + image_result = not_smudge_image + image_url = upload_png_sd(image_result, user_id=self.user_id, category=f"{self.category}", object_name=f"{self.tasks_id}.png") + self.generate_data['status'] = "SUCCESS" + self.generate_data['message'] = "success" + self.generate_data['data'] = str(image_url) + self.redis_client.set(self.tasks_id, json.dumps(self.generate_data)) + + def read_tasks_status(self): + status_data = self.redis_client.get(self.tasks_id) + return json.loads(status_data), status_data + + def infer(self, inputs): + return self.grpc_client.async_infer( + model_name=GI_MODEL_NAME, + inputs=inputs, + callback=self.callback + ) + + def get_result(self): + try: + prompts = [self.prompt] * self.batch_size + modes = [self.mode] * self.batch_size + images = [self.image.astype(np.float16)] * self.batch_size + + text_obj = np.array(prompts, dtype="object").reshape((-1, 1)) + mode_obj = np.array(modes, dtype="object").reshape((-1, 1)) + image_obj = np.array(images, dtype=np.float16).reshape((-1, 1024, 1024, 3)) + + input_text = grpcclient.InferInput("prompt", text_obj.shape, np_to_triton_dtype(text_obj.dtype)) + input_image = grpcclient.InferInput("input_image", image_obj.shape, "FP16") + input_mode = grpcclient.InferInput("mode", mode_obj.shape, np_to_triton_dtype(text_obj.dtype)) + + input_text.set_data_from_numpy(text_obj) + input_image.set_data_from_numpy(image_obj) + input_mode.set_data_from_numpy(mode_obj) + + inputs = [input_text, input_image, input_mode] + ctx = self.infer(inputs) + time_out = 60 + generate_data = None + while time_out > 0: + generate_data, _ = self.read_tasks_status() + if generate_data['status'] in ["REVOKED", "FAILURE"]: + ctx.cancel() + break + elif generate_data['status'] == "SUCCESS": + break + time_out -= 1 + time.sleep(0.1) + return generate_data + except Exception as e: + self.generate_data['status'] = "FAILURE" + self.generate_data['message'] = "failure" + self.generate_data['data'] = str(e) + self.redis_client.set(self.tasks_id, json.dumps(self.generate_data)) + raise Exception(str(e)) + finally: + dict_generate_data, str_generate_data = self.read_tasks_status() + if DEBUG is False: + self.channel.basic_publish(exchange='', routing_key=GI_RABBITMQ_QUEUES, body=str_generate_data) + logger.info(f" [x] Sent {json.dumps(dict_generate_data, indent=4)}") + + +def infer_cancel(tasks_id): + redis_client = redis.StrictRedis(host=REDIS_HOST, port=REDIS_PORT, db=REDIS_DB, decode_responses=True) + data = {'tasks_id': tasks_id, 'status': 'REVOKED', 'message': "revoked", 'data': 'revoked'} + generate_data = json.dumps(data) + redis_client.set(tasks_id, generate_data) + return data + + +if __name__ == '__main__': + rd = GenerateImageModel( + tasks_id="123-89", + prompt='skeleton sitting by the side of a river looking soulful, concert poster, 4k, artistic', + image_url="", + mode='txt2img', + category="test" + ) + server = GenerateImage(rd) + print(server.get_result()) diff --git a/app/service/generate_image/test.py b/app/service/generate_image/test.py new file mode 100644 index 0000000..2b50732 --- /dev/null +++ b/app/service/generate_image/test.py @@ -0,0 +1,232 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +""" +@Project :trinity_client +@File :service_att_recognition.py +@Author :周成融 +@Date :2023/7/26 12:01:05 +@detail : +""" +import json +import logging + +import minio +import numpy as np +import random +import redis +import tritonclient +import tritonclient.grpc as grpc_client +from minio import Minio +import cv2 +from PIL import Image +import time +from app.core.config import * +from app.schemas.generate_image import GenerateImageModel +from app.service.generate_image.utils.image_processing import remove_background +from app.service.generate_image.utils.upload_sd_image import upload_png_sd +from app.service.utils.decorator import RunTime +from app.service.utils.generate_uuid import generate_uuid + +logger = logging.getLogger() + + +class GenerateImage: + def __init__(self, request_data): + self.tasks_id = request_data.tasks_id + self.model = request_data.model + self.request_count = request_data.request_count + self.prompt = request_data.prompt + self.image = request_data.image + self.mode = request_data.mode + self.batch_size = request_data.batch_size + + self.image_url = request_data.image_url + self.user_id = request_data.user_id + self.content = request_data.content + self.category = request_data.category + self.model_name = f"{self.category}{GI_MODEL_NAME}" + self.mode = request_data.mode + self.version = request_data.version + self.triton_client = grpc_client.InferenceServerClient(url="1") + self.redis_client = redis.StrictRedis(host=REDIS_HOST, port=REDIS_PORT, db=REDIS_DB, decode_responses=True) + self.connection = pika.BlockingConnection(pika.ConnectionParameters(**RABBITMQ_PARAMS)) + self.channel = self.connection.channel() + self.minio_client = Minio(MINIO_URL, access_key=MINIO_ACCESS, secret_key=MINIO_SECRET, secure=MINIO_SECURE) + self.samples = 4 # no.of images to generate + self.steps = 24 + self.guidance_scale = 7 + self.seed = random.randint(0, 2000000000) + self.batch_size = 1 + self.generate_data = json.dumps({'status': 'PENDING', 'message': "pending", 'data': ''}) + self.redis_client.set(self.tasks_id, self.generate_data) + + def get_result(self): + + pass + + @staticmethod + def image_grid(imgs, rows, cols): + assert len(imgs) == rows * cols + + w, h = imgs[0].size + grid = Image.new('RGB', size=(cols * w, rows * h)) + + for i, img in enumerate(imgs): + grid.paste(img, box=(i % cols * w, i // cols * h)) + return grid + + @staticmethod + def preprocess_image(image, category): + height, width, _ = image.shape + + if category == "print" or category == "moodboard": + square_size = min(height, width) + start_x = (width - square_size) // 2 + start_y = (height - square_size) // 2 + cropped = image[start_y: start_y + square_size, start_x: start_x + square_size] + resized_image = cv2.resize(cropped, (512, 512)) + + elif category == "sketch": + # below is the way that get "bigger" square image. + max_dimension = max(height, width) + square_image = np.ones((max_dimension, max_dimension, 3), dtype=np.uint8) * 255 + start_h = (max_dimension - height) // 2 + start_w = (max_dimension - width) // 2 + square_image[start_h:start_h + height, start_w:start_w + width] = image + resized_image = cv2.resize(square_image, (512, 512)) + + else: + raise ValueError(f"wrong category {category}, only in moodboard, print and sketch!") + + return resized_image + + def get_image(self): + # Get data of an object. + # Read data from response. + try: + response = self.minio_client.get_object(self.image_url.split('/')[0], self.image_url[self.image_url.find('/') + 1:]) + img = np.frombuffer(response.data, np.uint8) # 转成8位无符号整型 + img = cv2.imdecode(img, cv2.IMREAD_COLOR) # 解码 + img = self.preprocess_image(img, self.category) + img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB) + except minio.error.S3Error: + img = np.random.randn(512, 512, 3) + return img + + def callback(self, result, error): + if error: + generate_data = json.dumps({'status': 'FAILURE', 'message': f"{error}", 'data': f"{error}"}) + self.redis_client.set(self.tasks_id, generate_data) + else: + images = result.as_numpy("IMAGES") + if images.ndim == 3: + images = images[None, ...] + images = (images * 255).round().astype("uint8") + pil_images = [Image.fromarray(image) for image in images] + + # for i in range(len(pil_images)): + # pil = pil_images[i] + # pil.save(f'./temp_i2_{i}.png') + # self.image_grid(pil_images, rows, cols) + url_list = [] + for i, image in enumerate(pil_images): + + if self.category == "sketch": + image = remove_background(np.asarray(image)) + image_url = upload_png_sd(image, user_id=self.user_id, category=f"{self.category}", object_name=f"{generate_uuid()}_{i}.png", ) + url_list.append(image_url) + generate_data = json.dumps({'status': 'SUCCESS', 'message': 'success', 'data': f'{url_list}'}) + self.channel.basic_publish(exchange='', routing_key=GI_RABBITMQ_QUEUES, body=generate_data) + logger.info(f" [x] Sent {generate_data}") + self.redis_client.set(self.tasks_id, generate_data) + + def read_tasks_status(self): + status_data = json.loads(self.redis_client.get(self.tasks_id)) + logging.info(f"{self.tasks_id} ===> {status_data}") + return status_data + + # @RunTime + def get_result(self): + self.triton_client.get_model_metadata(model_name=self.model_name, model_version=self.version) + self.triton_client.get_model_config(model_name=self.model_name, model_version=self.version) + + image = self.get_image() + + # Input placeholder + prompt_in = tritonclient.grpc.InferInput(name="PROMPT", shape=(self.batch_size,), datatype="BYTES") + samples_in = tritonclient.grpc.InferInput("SAMPLES", (self.batch_size,), "INT32") + steps_in = tritonclient.grpc.InferInput("STEPS", (self.batch_size,), "INT32") + guidance_scale_in = tritonclient.grpc.InferInput("GUIDANCE_SCALE", (self.batch_size,), "FP32") + seed_in = tritonclient.grpc.InferInput("SEED", (self.batch_size,), "INT64") + input_images_in = tritonclient.grpc.InferInput("INPUT_IMAGES", image.shape, "FP16") + images = tritonclient.grpc.InferRequestedOutput(name="IMAGES", + # binary_data=False + ) + mode_in = tritonclient.grpc.InferInput("MODE", (self.batch_size,), "INT32") + + # Setting inputs + prompt_in.set_data_from_numpy(np.asarray([self.content] * self.batch_size, dtype=object)) + samples_in.set_data_from_numpy(np.asarray([self.samples], dtype=np.int32)) + steps_in.set_data_from_numpy(np.asarray([self.steps], dtype=np.int32)) + guidance_scale_in.set_data_from_numpy(np.asarray([self.guidance_scale], dtype=np.float32)) + seed_in.set_data_from_numpy(np.asarray([self.seed], dtype=np.int64)) + input_images_in.set_data_from_numpy(image.astype(np.float16)) + mode_in.set_data_from_numpy(np.asarray([self.mode], dtype=np.int32)) + + # inference + # @RunTime + def infer(): + return self.triton_client.async_infer( + model_name=self.model_name, + model_version=self.version, + inputs=[prompt_in, samples_in, steps_in, guidance_scale_in, seed_in, input_images_in, mode_in], + outputs=[images], + callback=self.callback + ) + + ctx = infer() + time_out = 60 + while time_out > 0: + generate_data = self.read_tasks_status() + if generate_data['status'] in ["REVOKED", "FAILURE"]: + ctx.cancel() + self.channel.basic_publish(exchange='', routing_key=GI_RABBITMQ_QUEUES, body=json.dumps(generate_data)) + logger.info(f" [x] Sent {generate_data}") + break + elif generate_data['status'] == "SUCCESS": + break + time_out -= 1 + time.sleep(1) + return self.read_tasks_status() + + +def infer_cancel(tasks_id): + redis_client = redis.StrictRedis(host=REDIS_HOST, port=REDIS_PORT, db=REDIS_DB, decode_responses=True) + data = {'status': 'REVOKED', 'message': "revoked", 'data': 'revoked'} + generate_data = json.dumps({'status': 'REVOKED', 'message': "revoked", 'data': 'revoked'}) + redis_client.set(tasks_id, generate_data) + return data + + +if __name__ == '__main__': + # request_data = { + # "user_id": 78, + # "image_url": "123_123.png", + # "category": "print", + # "mode": 1, + # "str": "a simple print", + # "version": "1" + # } + rd = GenerateImageModel( + mode=1, + content='a blouse', + gender='', + user_id=89, + image_url='test/微信图片_20231206133428.jpg', + category='sketch', + version='1', + tasks_id='123456' + ) + server = GenerateImage(rd) + server.get_result() + # print(infer_cancel(123456)) diff --git a/app/service/generate_image/utils/image_processing.py b/app/service/generate_image/utils/image_processing.py new file mode 100644 index 0000000..f15ae9a --- /dev/null +++ b/app/service/generate_image/utils/image_processing.py @@ -0,0 +1,162 @@ +import logging + +import cv2 +import mmcv +import numpy as np +import torch +import tritonclient.http as httpclient +import torch.nn.functional as F +from app.core.config import * +import cv2 + +logger = logging.getLogger() + + +def seg_preprocess(img_path): + img = mmcv.imread(img_path) + ori_shape = img.shape[:2] + img_scale = (224, 224) + scale_factor = [] + img, x, y = mmcv.imresize(img, img_scale, return_scale=True) + scale_factor.append(x) + scale_factor.append(y) + img = mmcv.imnormalize(img, mean=np.array([123.675, 116.28, 103.53]), std=np.array([58.395, 57.12, 57.375]), to_rgb=True) + preprocessed_img = np.expand_dims(img.transpose(2, 0, 1), axis=0) + return preprocessed_img, ori_shape + + +def get_mask(image_obj): + pre_mask = None + if len(image_obj.shape) == 2: + image_obj = cv2.cvtColor(image_obj, cv2.COLOR_GRAY2RGB) + if image_obj.shape[2] == 4: # 如果是四通道 mask + pre_mask = image_obj[:, :, 3] + image_obj = image_obj[:, :, :3] + + Contour = get_contours(image_obj) + Mask = np.zeros(image_obj.shape[:2], np.uint8) + if len(Contour): + Max_contour = Contour[0] + Epsilon = 0.001 * cv2.arcLength(Max_contour, True) + Approx = cv2.approxPolyDP(Max_contour, Epsilon, True) + cv2.drawContours(Mask, [Approx], -1, 255, -1) + else: + Mask = np.ones(image_obj.shape[:2], np.uint8) * 255 + + if pre_mask is None: + mask = Mask + else: + mask = cv2.bitwise_and(Mask, pre_mask) + return image_obj, mask + + +def get_contours(image): + gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY) + Edge = cv2.Canny(gray, 10, 150) + kernel = np.ones((5, 5), np.uint8) + Edge = cv2.dilate(Edge, kernel=kernel, iterations=1) + Edge = cv2.erode(Edge, kernel=kernel, iterations=1) + Contour, _ = cv2.findContours(Edge, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE) + Contour = sorted(Contour, key=cv2.contourArea, reverse=True) + return Contour + + +def seg_infer_image(image_obj): + image, ori_shape = seg_preprocess(image_obj) + client = httpclient.InferenceServerClient(url=f"{SEG_MODEL_URL}") + transformed_img = image.astype(np.float32) + # 输入集 + inputs = [ + httpclient.InferInput(SEGMENTATION['input'], transformed_img.shape, datatype="FP32") + ] + inputs[0].set_data_from_numpy(transformed_img, binary_data=True) + # 输出集 + outputs = [ + httpclient.InferRequestedOutput(SEGMENTATION['output'], binary_data=True), + ] + results = client.infer(model_name=SEGMENTATION['name'], inputs=inputs, outputs=outputs) + # 推理 + # 取结果 + inference_output1 = torch.from_numpy(results.as_numpy(SEGMENTATION['output'])) + seg_result = seg_postprocess(inference_output1, ori_shape) + return seg_result + + +def seg_postprocess(output, ori_shape): + seg_logit = F.interpolate(output, size=ori_shape, scale_factor=None, mode='bilinear', align_corners=False) + seg_logit = F.softmax(seg_logit, dim=1) + seg_pred = seg_logit.argmax(dim=1) + seg_pred = seg_pred.cpu().numpy() + return seg_pred + + +def remove_background(image): + image_obj, mask = get_mask(image) + seg_result = seg_infer_image(image_obj) + + temp_front = seg_result == 1 + front_mask = (mask * (temp_front + 0).astype(np.uint8)) + temp_back = seg_result == 2 + back_mask = (mask * (temp_back + 0).astype(np.uint8)) + + if len(front_mask.shape) > 2: + front_mask = front_mask[0] + else: + front_mask = front_mask + + if len(back_mask.shape) > 2: + back_mask = back_mask[0] + else: + back_mask = back_mask + + result_mask = front_mask + back_mask + white_background = np.ones_like(image_obj) * 255 + remove_bg_image = np.where(result_mask[:, :, None].astype(bool), image_obj, white_background) + # cv2.imwrite("source_image", image) + # cv2.imwrite("remove_bg_image", remove_bg_image) + + return remove_bg_image + + +def bounding_box(image): + edges = cv2.Canny(image, 50, 150) + # 查找轮廓 + contours, _ = cv2.findContours(edges, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE) + # 初始化包围所有外接矩形的大矩形的坐标 + x_min, y_min, x_max, y_max = float('inf'), float('inf'), -1, -1 + # 遍历所有外接矩形,更新大矩形的坐标 + for contour in contours: + x, y, w, h = cv2.boundingRect(contour) + x_min = min(x_min, x) + y_min = min(y_min, y) + x_max = max(x_max, x + w) + y_max = max(y_max, y + h) + + # 根据大矩形的坐标来裁剪原始图像 + result_image = image[y_min:y_max, x_min:x_max] + # cv2.imshow("result_image", result_image) + # cv2.waitKey(0) + return result_image + + +def stain_detection(image, spot_size=200): + height, width, _ = image.shape + + corners = [ + image[0:spot_size, 0:spot_size], # top left + image[0:spot_size, width - spot_size:width], # top right + image[height - spot_size:height, 0:spot_size], # bottom left + image[height - spot_size:height, width - spot_size:width] # bottom right + ] + + for index, corner in enumerate(corners): + num_white_pixels = (corner == [255, 255, 255]).all(axis=2).sum() + if num_white_pixels != spot_size * spot_size: + logger.info(f"第{index + 1}发现了污点") + return False, None + + if DEBUG: + for corner_coords in [(0, 0), (0, width - spot_size), (height - spot_size, 0), (height - spot_size, width - spot_size)]: + cv2.rectangle(image, corner_coords, (corner_coords[0] + spot_size, corner_coords[1] + spot_size), (0, 0, 255), 2) + + return True, image diff --git a/app/service/generate_image/utils/upload_sd_image.py b/app/service/generate_image/utils/upload_sd_image.py new file mode 100644 index 0000000..c6a0632 --- /dev/null +++ b/app/service/generate_image/utils/upload_sd_image.py @@ -0,0 +1,37 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +""" +@Project :trinity_client +@File :upload_image.py +@Author :周成融 +@Date :2023/8/28 13:49:20 +@detail : +""" +import io +import logging + +from PIL import Image +from minio import Minio + +from app.core.config import * + +minio_client = Minio(MINIO_URL, access_key=MINIO_ACCESS, secret_key=MINIO_SECRET, secure=MINIO_SECURE) + + +def upload_png_sd(image, user_id, category, object_name): + try: + image_file = io.BytesIO() + image = Image.fromarray(image) + image.save(image_file, format='JPEG') + image_file.seek(0) + minio_req = minio_client.put_object( + GI_MINIO_BUCKET, + f'{user_id}/{category}/{object_name}', + image_file, + len(image_file.getvalue()), + content_type='image/jpeg' + ) + image_url = f"aida-users/{minio_req.object_name}" + return image_url + except Exception as e: + logging.warning(f"upload_png_mask runtime exception : {e}") diff --git a/app/service/super_resolution/service.py b/app/service/super_resolution/service.py index f015c6c..e87f1a7 100644 --- a/app/service/super_resolution/service.py +++ b/app/service/super_resolution/service.py @@ -1,52 +1,43 @@ import io import logging import time -from io import BytesIO - import minio.error -import pika import redis import json import cv2 import numpy as np import torch -import tritonclient.http as httpclient import tritonclient.grpc as grpcclient - -from PIL import Image from minio import Minio - -from app.core.config import MINIO_IP, MINIO_ACCESS, MINIO_SECRET, MINIO_SECURE, MINIO_PORT, REDIS_HOST, REDIS_PORT, REDIS_DB, SR_MODEL_NAME, RABBITMQ_PARAMS, RABBITMQ_QUEUES +from app.core.config import * from app.schemas.super_resolution import SuperResolutionModel - from app.service.utils.decorator import RunTime -from app.service.utils.generate_uuid import generate_uuid logger = logging.getLogger() class SuperResolution: def __init__(self, data): - self.triton_client = httpclient.InferenceServerClient(url=f"10.1.1.150:7000") - self.triton_client = grpcclient.InferenceServerClient(url=f"10.1.1.150:7001") + self.triton_client = grpcclient.InferenceServerClient(url=SR_TRITON_URL) self.redis_client = redis.StrictRedis(host=REDIS_HOST, port=REDIS_PORT, db=REDIS_DB, decode_responses=True) self.tasks_id = data.sr_tasks_id + self.user_id = self.tasks_id[self.tasks_id.rfind('-') + 1:] self.sr_image_url = data.sr_image_url self.sr_xn = data.sr_xn - self.minio_client = Minio( - f"{MINIO_IP}:{MINIO_PORT}", - access_key=MINIO_ACCESS, - secret_key=MINIO_SECRET, - secure=MINIO_SECURE) + self.minio_client = Minio(MINIO_URL, access_key=MINIO_ACCESS, secret_key=MINIO_SECRET, secure=MINIO_SECURE) self.redis_client.set(self.tasks_id, json.dumps({'status': 'PENDING', 'message': "pending", 'data': ''})) + self.redis_client.expire(self.tasks_id, 600) + self.connection = pika.BlockingConnection(pika.ConnectionParameters(**RABBITMQ_PARAMS)) + self.channel = self.connection.channel() - @RunTime + # @RunTime def read_image(self): try: image_data = self.minio_client.get_object(self.sr_image_url.split("/", 1)[0], self.sr_image_url.split("/", 1)[1]) except minio.error.S3Error as e: sr_data = json.dumps({'tasks_id': self.tasks_id, 'status': 'ERROR', 'message': f'{e}'}) - publish_message(sr_data) + self.channel.basic_publish(exchange='', routing_key=SR_RABBITMQ_QUEUES, body=sr_data) + logger.info(f" [x] Sent {sr_data}") raise FileNotFoundError(f"Image '{self.sr_image_url.split('/', 1)[1]}' not found in bucket '{self.sr_image_url.split('/', 1)[0]}'") img = np.frombuffer(image_data.data, np.uint8) # 转成8位无符号整型 img = cv2.imdecode(img, cv2.IMREAD_COLOR).astype(np.float32) / 255. # 解码 @@ -57,7 +48,7 @@ class SuperResolution: logging.info(f"{self.tasks_id} ===> {status_data}") return status_data - @RunTime + # @RunTime def infer(self, inputs): return self.triton_client.async_infer( model_name=SR_MODEL_NAME, @@ -65,9 +56,13 @@ class SuperResolution: callback=self.callback ) - @RunTime + # @RunTime def sr_result(self): sample = self.read_image() + if self.sr_xn == 2: + new_shape = (sample.shape[0] // self.sr_xn, sample.shape[1] // self.sr_xn) + sample = cv2.resize(sample, new_shape) + print(new_shape) sample = np.transpose(sample if sample.shape[2] == 1 else sample[:, :, [2, 1, 0]], (2, 0, 1)) sample = torch.from_numpy(sample).float().unsqueeze(0).numpy() inputs = [ @@ -78,10 +73,17 @@ class SuperResolution: ) ctx = self.infer(inputs) - time_out = 120 - while self.read_tasks_status()['status'] == "PENDING" and time_out > 0: - if self.read_tasks_status()['status'] == "REVOKED": + time_out = 60 + while time_out > 0: + generate_data = self.read_tasks_status() + if generate_data['status'] in ["REVOKED", "FAILURE"]: ctx.cancel() + # noinspection PyTypeChecker + self.channel.basic_publish(exchange='', routing_key=SR_RABBITMQ_QUEUES, body=json.dumps(generate_data)) + logger.info(f" [x] Sent {generate_data}") + break + elif generate_data['status'] == "SUCCESS": + break time_out -= 1 time.sleep(1) return self.read_tasks_status() @@ -96,11 +98,11 @@ class SuperResolution: # output_url = self.upload_img_sr(output, generate_uuid()) # return output_url - def upload_img_sr(self, image, object_name): + def upload_img_sr(self, image): try: image_bytes = cv2.imencode('.jpg', image)[1].tobytes() - image_url = f"test/{self.minio_client.put_object(f'test', f'{object_name}.jpg', io.BytesIO(image_bytes), len(image_bytes), content_type='image/png').object_name}" - + res = self.minio_client.put_object(f'{SR_MINIO_BUCKET}', f'{self.user_id}/sr/output/{self.tasks_id}.jpg', io.BytesIO(image_bytes), len(image_bytes), content_type='image/png') + image_url = f"aida-users/{res.object_name}" return image_url except Exception as e: logger.warning(f"upload_png_mask runtime exception : {e}") @@ -117,9 +119,10 @@ class SuperResolution: if output.ndim == 3: output = np.transpose(output[[2, 1, 0], :, :], (1, 2, 0)) # CHW-RGB to HCW-BGR output = (output * 255.0).round().astype(np.uint8) - output_url = self.upload_img_sr(output, generate_uuid()) + output_url = self.upload_img_sr(output) sr_data = json.dumps({'tasks_id': self.tasks_id, 'status': 'SUCCESS', 'message': 'success', 'data': f'{output_url}'}) - publish_message(sr_data) + self.channel.basic_publish(exchange='', routing_key=SR_RABBITMQ_QUEUES, body=sr_data) + logger.info(f" [x] Sent {sr_data}") self.redis_client.set(self.tasks_id, sr_data) @@ -127,20 +130,10 @@ def infer_cancel(tasks_id): redis_client = redis.StrictRedis(host=REDIS_HOST, port=REDIS_PORT, db=REDIS_DB, decode_responses=True) data = {'tasks': tasks_id, 'status': 'REVOKED', 'message': "revoked", 'data': 'revoked'} sr_data = json.dumps({'status': 'REVOKED', 'message': "revoked", 'data': 'revoked'}) - publish_message(sr_data) redis_client.set(tasks_id, sr_data) return data -def publish_message(sr_data): - connection = pika.BlockingConnection(pika.ConnectionParameters(**RABBITMQ_PARAMS)) - channel = connection.channel() - # 发布消息,并设置回调函数 - channel.basic_publish(exchange='', routing_key=RABBITMQ_QUEUES, body=sr_data) - logger.info(f" [x] Sent {sr_data}") - connection.close() - - if __name__ == '__main__': request_data = SuperResolutionModel(sr_image_url="test/512_image/15.png", sr_xn=2, sr_tasks_id="123") service = SuperResolution(request_data) diff --git a/app/service/super_resolution/test.py b/app/service/super_resolution/test.py deleted file mode 100644 index 14675a7..0000000 --- a/app/service/super_resolution/test.py +++ /dev/null @@ -1,31 +0,0 @@ -import time - -import cv2 -import numpy as np -import torch -import tritonclient.http as httpclient -import tritonclient.grpc as grpcclient - -from PIL import Image - -triton_client = grpcclient.InferenceServerClient(url=f"10.1.1.150:7001") - -sample = cv2.imread("1709713346.806274.png", cv2.IMREAD_COLOR).astype(np.float32) / 255. -sample = np.transpose(sample if sample.shape[2] == 1 else sample[:, :, [2, 1, 0]], (2, 0, 1)) -sample = torch.from_numpy(sample).float().unsqueeze(0).numpy() -inputs = [ - grpcclient.InferInput("input", sample.shape, datatype="FP32") -] -inputs[0].set_data_from_numpy(sample - # , binary_data=True - ) -start_time = time.time() -results = triton_client.async_infer(model_name="super_resolution", inputs=inputs) -print(time.time() - start_time) -sr_output = torch.from_numpy(results.as_numpy(f"output")) -output = sr_output.data.squeeze().float().cpu().clamp_(0, 1).numpy() -if output.ndim == 3: - output = np.transpose(output[[2, 1, 0], :, :], (1, 2, 0)) # CHW-RGB to HCW-BGR -output = (output * 255.0).round().astype(np.uint8) -cv2.imshow("", output) -cv2.waitKey(0) diff --git a/docker-compose.yml b/docker-compose.yml deleted file mode 100644 index dacfbab..0000000 --- a/docker-compose.yml +++ /dev/null @@ -1,9 +0,0 @@ -version: "3" -services: - trinity_aida_dev: - image: "trinity_client_aida_dev:latest" - container_name: trinity_aida_dev - volumes: - - ./trinity_client_aida:/trinity - ports: - - "10200:4562" \ No newline at end of file diff --git a/logging_env.py b/logging_env.py index ce4e9da..d1ac9bc 100644 --- a/logging_env.py +++ b/logging_env.py @@ -17,7 +17,7 @@ LOGGER_CONFIG_DICT = { "class": "logging.handlers.RotatingFileHandler", "level": "INFO", "formatter": "simple", - "filename": LOGS_PATH, + "filename": f"{LOGS_PATH}info.log", "maxBytes": 10485760, "backupCount": 50, "encoding": "utf8", @@ -26,7 +26,7 @@ LOGGER_CONFIG_DICT = { "class": "logging.handlers.RotatingFileHandler", "level": "ERROR", "formatter": "simple", - "filename": LOGS_PATH, + "filename": f"{LOGS_PATH}error.log", "maxBytes": 10485760, "backupCount": 20, "encoding": "utf8", @@ -35,7 +35,7 @@ LOGGER_CONFIG_DICT = { "class": "logging.handlers.RotatingFileHandler", "level": "DEBUG", "formatter": "simple", - "filename": LOGS_PATH, + "filename": f"{LOGS_PATH}debug.log", "maxBytes": 10485760, "backupCount": 50, "encoding": "utf8", diff --git a/requirements.txt b/requirements.txt index dc39ccd..1529082 100644 Binary files a/requirements.txt and b/requirements.txt differ