feat generate 升级 attribute retrieve 迁移
This commit is contained in:
45
app/api/api_attribute_retrieve.py
Normal file
45
app/api/api_attribute_retrieve.py
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
import json
|
||||||
|
import logging
|
||||||
|
from fastapi import APIRouter
|
||||||
|
|
||||||
|
from app.schemas.attribute_retrieve import *
|
||||||
|
from app.service.attribute.config import const
|
||||||
|
from app.service.attribute.service_att_recognition import AttributeRecognition
|
||||||
|
from app.service.attribute.service_category_recognition import CategoryRecognition
|
||||||
|
|
||||||
|
router = APIRouter()
|
||||||
|
logger = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
|
# 属性识别
|
||||||
|
@router.post("/attribute_recognition")
|
||||||
|
def attribute_recognition(request_item: list[AttributeRecognitionModel]):
|
||||||
|
try:
|
||||||
|
service = AttributeRecognition(const=const, request_data=request_item)
|
||||||
|
data = service.get_result()
|
||||||
|
code = 200
|
||||||
|
message = "access"
|
||||||
|
logger.info(f"attribute_recognition response @@@@@@:{json.dumps(data, indent=4)}")
|
||||||
|
except Exception as e:
|
||||||
|
code = 400
|
||||||
|
message = e
|
||||||
|
data = e
|
||||||
|
logger.warning(f"attribute_recognition Run Exception @@@@@@:{e}")
|
||||||
|
return {"code": code, "message": message, "data": data}
|
||||||
|
|
||||||
|
|
||||||
|
# 类别识别
|
||||||
|
@router.post("/category_recognition")
|
||||||
|
def category_recognition(request_item: list[CategoryRecognitionModel]):
|
||||||
|
try:
|
||||||
|
service = CategoryRecognition(request_data=request_item)
|
||||||
|
data = service.get_result()
|
||||||
|
code = 200
|
||||||
|
message = "access"
|
||||||
|
logger.info(f"category_recognition response @@@@@@:{json.dumps(data, indent=4)}")
|
||||||
|
except Exception as e:
|
||||||
|
code = 400
|
||||||
|
message = e
|
||||||
|
data = e
|
||||||
|
logger.warning(f"category_recognition Run Exception @@@@@@:{e}")
|
||||||
|
return {"code": code, "message": message, "data": data}
|
||||||
@@ -3,9 +3,11 @@ from fastapi import APIRouter
|
|||||||
from app.api import api_test
|
from app.api import api_test
|
||||||
from app.api import api_super_resolution
|
from app.api import api_super_resolution
|
||||||
from app.api import api_generate_image
|
from app.api import api_generate_image
|
||||||
|
from app.api import api_attribute_retrieve
|
||||||
|
|
||||||
router = APIRouter()
|
router = APIRouter()
|
||||||
|
|
||||||
router.include_router(api_test.router, tags=["test"], prefix="/test")
|
router.include_router(api_test.router, tags=["test"], prefix="/test")
|
||||||
router.include_router(api_super_resolution.router, tags=["super_resolution"], prefix="/api")
|
router.include_router(api_super_resolution.router, tags=["super_resolution"], prefix="/api")
|
||||||
router.include_router(api_generate_image.router, tags=["generate_image"], prefix="/api")
|
router.include_router(api_generate_image.router, tags=["generate_image"], prefix="/api")
|
||||||
|
router.include_router(api_attribute_retrieve.router, tags=["attribute_retrieve"], prefix="/api")
|
||||||
|
|||||||
@@ -19,11 +19,13 @@ class Settings(BaseSettings):
|
|||||||
LOGGING_CONFIG_FILE = os.path.join(BASE_DIR, 'logging_env.py')
|
LOGGING_CONFIG_FILE = os.path.join(BASE_DIR, 'logging_env.py')
|
||||||
|
|
||||||
|
|
||||||
DEBUG = False
|
DEBUG = True
|
||||||
if DEBUG:
|
if DEBUG:
|
||||||
LOGS_PATH = "logs/errors.log"
|
LOGS_PATH = "logs/"
|
||||||
|
CATEGORY_PATH = "service/attribute/config/descriptor/category/category_dis.csv"
|
||||||
else:
|
else:
|
||||||
LOGS_PATH = "app/logs/errors.log"
|
LOGS_PATH = "app/logs/"
|
||||||
|
CATEGORY_PATH = "app/service/attribute/config/descriptor/category/category_dis.csv"
|
||||||
|
|
||||||
# RABBITMQ_ENV = "" # 生产环境
|
# RABBITMQ_ENV = "" # 生产环境
|
||||||
RABBITMQ_ENV = "-dev" # 开发环境
|
RABBITMQ_ENV = "-dev" # 开发环境
|
||||||
@@ -32,8 +34,7 @@ RABBITMQ_ENV = "-dev" # 开发环境
|
|||||||
settings = Settings()
|
settings = Settings()
|
||||||
|
|
||||||
# minio 配置
|
# minio 配置
|
||||||
MINIO_IP = "www.minio.aida.com.hk"
|
MINIO_URL = "www.minio.aida.com.hk:9000"
|
||||||
MINIO_PORT = 9000
|
|
||||||
MINIO_ACCESS = 'vXKFLSJkYeEq2DrSZvkB'
|
MINIO_ACCESS = 'vXKFLSJkYeEq2DrSZvkB'
|
||||||
MINIO_SECRET = 'uKTZT3x7C43WvPN9QTc99DiRkwddWZrG9Uh3JVlR'
|
MINIO_SECRET = 'uKTZT3x7C43WvPN9QTc99DiRkwddWZrG9Uh3JVlR'
|
||||||
MINIO_SECURE = True
|
MINIO_SECURE = True
|
||||||
@@ -51,15 +52,18 @@ RABBITMQ_PARAMS = {
|
|||||||
"virtual_host": "/"
|
"virtual_host": "/"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# attribute service config
|
||||||
|
ATT_TRITON_URL = "10.1.1.240:8020"
|
||||||
|
|
||||||
# SR service config
|
# SR service config
|
||||||
SR_MODEL_NAME = "super_resolution"
|
SR_MODEL_NAME = "super_resolution"
|
||||||
SR_TRITON_URL = "10.1.1.240:10031"
|
SR_TRITON_URL = "10.1.1.240:10031"
|
||||||
SR_MINIO_BUCKET = "aida-users"
|
SR_MINIO_BUCKET = "aida-users"
|
||||||
SR_RABBITMQ_QUEUES = os.getenv("SR_RABBITMQ_QUEUES", "SuperResolution-local")
|
SR_RABBITMQ_QUEUES = os.getenv("SR_RABBITMQ_QUEUES", f"SuperResolution{RABBITMQ_ENV}")
|
||||||
|
|
||||||
# GenerateImage service config
|
# GenerateImage service config
|
||||||
GI_MODEL_NAME = '_stable_diffusion'
|
GI_MODEL_NAME = 'stable_diffusion_xl_lcm'
|
||||||
GI_MODEL_URL = '10.1.1.240:7001'
|
GI_MODEL_URL = '10.1.1.150:8001'
|
||||||
GI_MINIO_BUCKET = "aida-users"
|
GI_MINIO_BUCKET = "aida-users"
|
||||||
GI_RABBITMQ_QUEUES = os.getenv("GI_RABBITMQ_QUEUES", f"GenerateImage{RABBITMQ_ENV}")
|
GI_RABBITMQ_QUEUES = os.getenv("GI_RABBITMQ_QUEUES", f"GenerateImage{RABBITMQ_ENV}")
|
||||||
|
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ from app.api.api_route import router
|
|||||||
from app.core.config import settings
|
from app.core.config import settings
|
||||||
from logging_env import LOGGER_CONFIG_DICT
|
from logging_env import LOGGER_CONFIG_DICT
|
||||||
|
|
||||||
|
|
||||||
logging.config.dictConfig(LOGGER_CONFIG_DICT)
|
logging.config.dictConfig(LOGGER_CONFIG_DICT)
|
||||||
|
|
||||||
from starlette.middleware.cors import CORSMiddleware
|
from starlette.middleware.cors import CORSMiddleware
|
||||||
|
|||||||
12
app/schemas/attribute_retrieve.py
Normal file
12
app/schemas/attribute_retrieve.py
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
|
||||||
|
class CategoryRecognitionModel(BaseModel):
|
||||||
|
colony: str
|
||||||
|
sketch_img_url: str
|
||||||
|
|
||||||
|
|
||||||
|
class AttributeRecognitionModel(BaseModel):
|
||||||
|
category: str
|
||||||
|
colony: str
|
||||||
|
sketch_img_url: str
|
||||||
@@ -2,11 +2,8 @@ from pydantic import BaseModel
|
|||||||
|
|
||||||
|
|
||||||
class GenerateImageModel(BaseModel):
|
class GenerateImageModel(BaseModel):
|
||||||
category: str
|
|
||||||
content: str
|
|
||||||
gender: str
|
|
||||||
image_url: str
|
|
||||||
mode: int
|
|
||||||
tasks_id: str
|
tasks_id: str
|
||||||
user_id: int
|
prompt: str
|
||||||
version: str
|
image_url: str
|
||||||
|
mode: str
|
||||||
|
category: str
|
||||||
|
|||||||
78
app/service/attribute/config/const.py
Normal file
78
app/service/attribute/config/const.py
Normal file
@@ -0,0 +1,78 @@
|
|||||||
|
top_description_list = ['service/attribute/config/descriptor/top/length.csv',
|
||||||
|
'service/attribute/config/descriptor/top/type.csv',
|
||||||
|
'service/attribute/config/descriptor/top/sleeve_length.csv',
|
||||||
|
'service/attribute/config/descriptor/top/sleeve_shape.csv',
|
||||||
|
'service/attribute/config/descriptor/top/sleeve_shoulder.csv',
|
||||||
|
'service/attribute/config/descriptor/top/neckline.csv',
|
||||||
|
'service/attribute/config/descriptor/top/design.csv',
|
||||||
|
'service/attribute/config/descriptor/top/opening_type.csv',
|
||||||
|
'service/attribute/config/descriptor/top/silhouette.csv',
|
||||||
|
'service/attribute/config/descriptor/top/collar.csv']
|
||||||
|
|
||||||
|
top_model_list = ['attr_retrieve_T_length',
|
||||||
|
'attr_retrieve_T_type',
|
||||||
|
'attr_retrieve_T_sleeve_length',
|
||||||
|
'attr_retrieve_T_sleeve_shape',
|
||||||
|
'attr_retrieve_T_sleeve_shoulder',
|
||||||
|
'attr_retrieve_T_neckline',
|
||||||
|
'attr_retrieve_T_design',
|
||||||
|
'attr_retrieve_T_optype',
|
||||||
|
'attr_retrieve_T_silhouette',
|
||||||
|
'attr_retrieve_T_collar'
|
||||||
|
]
|
||||||
|
|
||||||
|
bottom_description_list = [
|
||||||
|
'service/attribute/config/descriptor/bottom/subtype.csv',
|
||||||
|
'service/attribute/config/descriptor/bottom/length.csv',
|
||||||
|
'service/attribute/config/descriptor/bottom/silhouette.csv',
|
||||||
|
'service/attribute/config/descriptor/bottom/opening_type.csv',
|
||||||
|
'service/attribute/config/descriptor/bottom/design.csv']
|
||||||
|
|
||||||
|
bottom_model_list = [
|
||||||
|
'attr_retrieve_B_subtype',
|
||||||
|
'attr_retrieve_B_length',
|
||||||
|
'attr_retrieve_B_silhouette',
|
||||||
|
'attr_recong_B_optype',
|
||||||
|
'attr_retrieve_B_design']
|
||||||
|
|
||||||
|
outwear_description_list = ['service/attribute/config/descriptor/outwear/length.csv',
|
||||||
|
'service/attribute/config/descriptor/outwear/sleeve_length.csv',
|
||||||
|
'service/attribute/config/descriptor/outwear/sleeve_shape.csv',
|
||||||
|
'service/attribute/config/descriptor/outwear/sleeve_shoulder.csv',
|
||||||
|
'service/attribute/config/descriptor/outwear/collar.csv',
|
||||||
|
'service/attribute/config/descriptor/outwear/design.csv',
|
||||||
|
'service/attribute/config/descriptor/outwear/opening_type.csv',
|
||||||
|
'service/attribute/config/descriptor/outwear/silhouette.csv', ]
|
||||||
|
|
||||||
|
outwear_model_list = ['attr_recong_O_length',
|
||||||
|
'attr_retrieve_O_sleeve_length',
|
||||||
|
'attr_retrieve_O_sleeve_shape',
|
||||||
|
'attr_retrieve_O_sleeve_shoulder',
|
||||||
|
'attr_retrieve_O_collar',
|
||||||
|
'attr_retrieve_O_design',
|
||||||
|
'attr_recong_O_optype',
|
||||||
|
'attr_retrieve_O_silhouette']
|
||||||
|
|
||||||
|
dress_description_list = [ # 'service/attribute/config/descriptor/dress/D_length.csv',
|
||||||
|
'service/attribute/config/descriptor/dress/sleeve_length.csv',
|
||||||
|
'service/attribute/config/descriptor/dress/sleeve_shape.csv',
|
||||||
|
# 'service/attribute/config/descriptor/dress/D_sleeve_shoulder.csv',
|
||||||
|
'service/attribute/config/descriptor/dress/neckline.csv',
|
||||||
|
'service/attribute/config/descriptor/dress/collar.csv',
|
||||||
|
'service/attribute/config/descriptor/dress/design.csv',
|
||||||
|
'service/attribute/config/descriptor/dress/silhouette.csv',
|
||||||
|
'service/attribute/config/descriptor/dress/type.csv']
|
||||||
|
|
||||||
|
dress_model_list = [ # 'attr_recong_D_length',
|
||||||
|
'attr_retrieve_D_sleeve_length',
|
||||||
|
'attr_retrieve_D_sleeve_shape',
|
||||||
|
# 'attr_retrieve_D_sleeve_shoulder',
|
||||||
|
'attr_retrieve_D_neckline',
|
||||||
|
'attr_retrieve_D_collar',
|
||||||
|
'attr_retrieve_D_design',
|
||||||
|
'attr_retrieve_D_silohouette',
|
||||||
|
'attr_retrieve_D_type'
|
||||||
|
]
|
||||||
|
|
||||||
|
category_description = './description/category/category_dis.csv'
|
||||||
|
category_model = 'attr_retrieve_category'
|
||||||
@@ -0,0 +1,28 @@
|
|||||||
|
labelName,join_attr,taskName,taskId
|
||||||
|
Canvas,attr_Material_O_1,Material_B,8
|
||||||
|
Chambray,attr_Material_O_2,Material_B,8
|
||||||
|
Chenille,attr_Material_O_3,Material_B,8
|
||||||
|
Chiffon,attr_Material_O_4,Material_B,8
|
||||||
|
Corduroy,attr_Material_O_5,Material_B,8
|
||||||
|
Crepe,attr_Material_O_6,Material_B,8
|
||||||
|
Denim,attr_Material_O_7,Material_B,8
|
||||||
|
Faux_fur,attr_Material_O_8,Material_B,8
|
||||||
|
Faux_leather,attr_Material_O_9,Material_B,8
|
||||||
|
Flannel,attr_Material_O_10,Material_B,8
|
||||||
|
Fleece,attr_Material_O_11,Material_B,8
|
||||||
|
Gingham,attr_Material_O_12,Material_B,8
|
||||||
|
Jersey,attr_Material_O_13,Material_B,8
|
||||||
|
Knit,attr_Material_O_14,Material_B,8
|
||||||
|
Lace,attr_Material_O_15,Material_B,8
|
||||||
|
Lawn,attr_Material_O_16,Material_B,8
|
||||||
|
Neoprene,attr_Material_O_17,Material_B,8
|
||||||
|
Organza,attr_Material_O_18,Material_B,8
|
||||||
|
Plush,attr_Material_O_19,Material_B,8
|
||||||
|
Satin,attr_Material_O_20,Material_B,8
|
||||||
|
Serge,attr_Material_O_21,Material_B,8
|
||||||
|
Taffeta,attr_Material_O_22,Material_B,8
|
||||||
|
Tulle,attr_Material_O_23,Material_B,8
|
||||||
|
Tweed,attr_Material_O_24,Material_B,8
|
||||||
|
Twill,attr_Material_O_25,Material_B,8
|
||||||
|
Velvet,attr_Material_O_26,Material_B,8
|
||||||
|
Vinyl,attr_Material_O_27,Material_B,8
|
||||||
|
16
app/service/attribute/config/descriptor/bottom/B_print.csv
Normal file
16
app/service/attribute/config/descriptor/bottom/B_print.csv
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
labelName,join_attr,taskName,taskId
|
||||||
|
Abstract,attr_Print_O_1,Print_B,7
|
||||||
|
Allover,attr_Print_O_2,Print_B,7
|
||||||
|
Animal,attr_Print_O_3,Print_B,7
|
||||||
|
Camouflage,attr_Print_O_4,Print_B,7
|
||||||
|
Checks,attr_Print_O_5,Print_B,7
|
||||||
|
Color_block,attr_Print_O_6,Print_B,7
|
||||||
|
Disty print,attr_Print_O_7,Print_B,7
|
||||||
|
Dotted,attr_Print_O_8,Print_B,7
|
||||||
|
Floral,attr_Print_O_9,Print_B,7
|
||||||
|
Graphic print,attr_Print_O_10,Print_B,7
|
||||||
|
Logo and slogan,attr_Print_O_11,Print_B,7
|
||||||
|
Patchwork,attr_Print_O_12,Print_B,7
|
||||||
|
Plain,attr_Print_O_13,Print_B,7
|
||||||
|
Plain_dnim,attr_Print_O_14,Print_B,7
|
||||||
|
Stripe,attr_Print_O_15,Print_B,7
|
||||||
|
@@ -0,0 +1,4 @@
|
|||||||
|
labelName,join_attr,taskName,taskId
|
||||||
|
B_softness_Soft,attr_Softness_B_1,Softness_B,5
|
||||||
|
B_softness_Medium,attr_Softness_B_2,Softness_B,5
|
||||||
|
B_softness_Hard,attr_Softness_B_3,Softness_B,5
|
||||||
|
17
app/service/attribute/config/descriptor/bottom/design.csv
Normal file
17
app/service/attribute/config/descriptor/bottom/design.csv
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
labelName,join_attr,taskName,taskId
|
||||||
|
Asymmetrical,attr_Design_B_1,Design_B,6
|
||||||
|
Tiered,attr_Design_B_2,Design_B,6
|
||||||
|
Tassel,attr_Design_B_3,Design_B,6
|
||||||
|
Ruffle,attr_Design_B_4,Design_B,6
|
||||||
|
Pleated,attr_Design_B_5,Design_B,6
|
||||||
|
Wrap,attr_Design_B_6,Design_B,6
|
||||||
|
Ripped,attr_Design_B_7,Design_B,6
|
||||||
|
Cut_out,attr_Design_B_8,Design_B,6
|
||||||
|
Eyelet,attr_Design_B_9,Design_B,6
|
||||||
|
Folded,attr_Design_B_10,Design_B,6
|
||||||
|
Tied,attr_Design_B_11,Design_B,6
|
||||||
|
Drapes,attr_Design_B_12,Design_B,6
|
||||||
|
Ribbon,attr_Design_B_13,Design_B,6
|
||||||
|
Button,attr_Design_B_14,Design_B,6
|
||||||
|
Split,attr_Design_B_15,Design_B,6
|
||||||
|
Fishtail,attr_Design_B_16,Design_B,6
|
||||||
|
@@ -0,0 +1,6 @@
|
|||||||
|
labelName,join_attr,taskName,taskId
|
||||||
|
Short,attr_BTM_length_1,BTM_length,3
|
||||||
|
Middle,attr_BTM_length_2,BTM_length,3
|
||||||
|
Seven,attr_BTM_length_3,BTM_length,3
|
||||||
|
Nine,attr_BTM_length_4,BTM_length,3
|
||||||
|
Long,attr_BTM_length_5,BTM_length,3
|
||||||
|
@@ -0,0 +1,6 @@
|
|||||||
|
labelName,join_attr,taskName,taskId
|
||||||
|
Button,attr_OPType_B_1,OPType_B,7
|
||||||
|
Zipper,attr_OPType_B_2,OPType_B,7
|
||||||
|
Thread,attr_OPType_B_3,OPType_B,7
|
||||||
|
Hook,attr_OPType_B_4,OPType_B,7
|
||||||
|
Elastic,attr_OPType_B_5,OPType_B,7
|
||||||
|
@@ -0,0 +1,6 @@
|
|||||||
|
labelName,join_attr,taskName,taskId
|
||||||
|
A_Line,attr_Silhouette_B_1,Silhouette_B,8
|
||||||
|
H_Shape,attr_Silhouette_B_2,Silhouette_B,8
|
||||||
|
Slim,attr_Silhouette_B_3,Silhouette_B,8
|
||||||
|
Peg_leg,attr_Silhouette_B_4,Silhouette_B,8
|
||||||
|
Peplum,attr_Silhouette_B_5,Silhouette_B,8
|
||||||
|
21
app/service/attribute/config/descriptor/bottom/subtype.csv
Normal file
21
app/service/attribute/config/descriptor/bottom/subtype.csv
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
labelName,join_attr,taskName,taskId
|
||||||
|
A_Line_Skirt,bottom_Sub-Type_1,BTM_Sub-Type,2
|
||||||
|
Bodycon_Skirt,bottom_Sub-Type_2,BTM_Sub-Type,2
|
||||||
|
Boot_Cut,bottom_Sub-Type_3,BTM_Sub-Type,2
|
||||||
|
Bubble_skirt,bottom_Sub-Type_4,BTM_Sub-Type,2
|
||||||
|
Cargo_Pants,bottom_Sub-Type_5,BTM_Sub-Type,2
|
||||||
|
Culottes,bottom_Sub-Type_6,BTM_Sub-Type,2
|
||||||
|
Handkerchief_Skirt,bottom_Sub-Type_7,BTM_Sub-Type,2
|
||||||
|
Jeans,bottom_Sub-Type_8,BTM_Sub-Type,2
|
||||||
|
Joggers,bottom_Sub-Type_9,BTM_Sub-Type,2
|
||||||
|
Leather_pants,bottom_Sub-Type_10,BTM_Sub-Type,2
|
||||||
|
Leggings,bottom_Sub-Type_11,BTM_Sub-Type,2
|
||||||
|
Mermaid,bottom_Sub-Type_12,BTM_Sub-Type,2
|
||||||
|
Pattened_pants,bottom_Sub-Type_13,BTM_Sub-Type,2
|
||||||
|
Peg_leg_Pants_Cigarette_Pants,bottom_Sub-Type_14,BTM_Sub-Type,2
|
||||||
|
Pencil_Skirt,bottom_Sub-Type_15,BTM_Sub-Type,2
|
||||||
|
Pleated_Skirt,bottom_Sub-Type_16,BTM_Sub-Type,2
|
||||||
|
Shorts,bottom_Sub-Type_17,BTM_Sub-Type,2
|
||||||
|
Skater_Skirt,bottom_Sub-Type_18,BTM_Sub-Type,2
|
||||||
|
Suit_Trousers,bottom_Sub-Type_19,BTM_Sub-Type,2
|
||||||
|
Tier_Skirt,bottom_Sub-Type_20,BTM_Sub-Type,2
|
||||||
|
@@ -0,0 +1,7 @@
|
|||||||
|
labelName,join_attr,taskName,taskId
|
||||||
|
top,attr_top,category,1
|
||||||
|
pants,attr_pants,category,1
|
||||||
|
skirt,attr_skirt,category,1
|
||||||
|
dress,attr_dress,category,1
|
||||||
|
outwear,attr_outwear,category,1
|
||||||
|
jumpsuit,attr_jumpsuit,category,1
|
||||||
|
16
app/service/attribute/config/descriptor/dress/D_Print.csv
Normal file
16
app/service/attribute/config/descriptor/dress/D_Print.csv
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
labelName,join_attr,taskName,taskId
|
||||||
|
Abstract,attr_Print_O_1,Print_D,7
|
||||||
|
Allover,attr_Print_O_2,Print_D,7
|
||||||
|
Animal,attr_Print_O_3,Print_D,7
|
||||||
|
Camouflage,attr_Print_O_4,Print_D,7
|
||||||
|
Checks,attr_Print_O_5,Print_D,7
|
||||||
|
Color_block,attr_Print_O_6,Print_D,7
|
||||||
|
Disty print,attr_Print_O_7,Print_D,7
|
||||||
|
Dotted,attr_Print_O_8,Print_D,7
|
||||||
|
Floral,attr_Print_O_9,Print_D,7
|
||||||
|
Graphic print,attr_Print_O_10,Print_D,7
|
||||||
|
Logo and slogan,attr_Print_O_11,Print_D,7
|
||||||
|
Patchwork,attr_Print_O_12,Print_D,7
|
||||||
|
Plain,attr_Print_O_13,Print_D,7
|
||||||
|
Plain_dnim,attr_Print_O_14,Print_D,7
|
||||||
|
Stripe,attr_Print_O_15,Print_D,7
|
||||||
|
28
app/service/attribute/config/descriptor/dress/D_material.csv
Normal file
28
app/service/attribute/config/descriptor/dress/D_material.csv
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
labelName,join_attr,taskName,taskId
|
||||||
|
Canvas,attr_Material_O_1,Material_D,8
|
||||||
|
Chambray,attr_Material_O_2,Material_D,8
|
||||||
|
Chenille,attr_Material_O_3,Material_D,8
|
||||||
|
Chiffon,attr_Material_O_4,Material_D,8
|
||||||
|
Corduroy,attr_Material_O_5,Material_D,8
|
||||||
|
Crepe,attr_Material_O_6,Material_D,8
|
||||||
|
Denim,attr_Material_O_7,Material_D,8
|
||||||
|
Faux_fur,attr_Material_O_8,Material_D,8
|
||||||
|
Faux_leather,attr_Material_O_9,Material_D,8
|
||||||
|
Flannel,attr_Material_O_10,Material_D,8
|
||||||
|
Fleece,attr_Material_O_11,Material_D,8
|
||||||
|
Gingham,attr_Material_O_12,Material_D,8
|
||||||
|
Jersey,attr_Material_O_13,Material_D,8
|
||||||
|
Knit,attr_Material_O_14,Material_D,8
|
||||||
|
Lace,attr_Material_O_15,Material_D,8
|
||||||
|
Lawn,attr_Material_O_16,Material_D,8
|
||||||
|
Neoprene,attr_Material_O_17,Material_D,8
|
||||||
|
Organza,attr_Material_O_18,Material_D,8
|
||||||
|
Plush,attr_Material_O_19,Material_D,8
|
||||||
|
Satin,attr_Material_O_20,Material_D,8
|
||||||
|
Serge,attr_Material_O_21,Material_D,8
|
||||||
|
Taffeta,attr_Material_O_22,Material_D,8
|
||||||
|
Tulle,attr_Material_O_23,Material_D,8
|
||||||
|
Tweed,attr_Material_O_24,Material_D,8
|
||||||
|
Twill,attr_Material_O_25,Material_D,8
|
||||||
|
Velvet,attr_Material_O_26,Material_D,8
|
||||||
|
Vinyl,attr_Material_O_27,Material_D,8
|
||||||
|
@@ -0,0 +1,4 @@
|
|||||||
|
labelName,join_attr,taskName,taskId
|
||||||
|
Soft,attr_Softness_U_1,Softness_D,9
|
||||||
|
Medium,attr_Softness_U_2,Softness_D,9
|
||||||
|
Hard,attr_Softness_U_3,Softness_D,9
|
||||||
|
11
app/service/attribute/config/descriptor/dress/collar.csv
Normal file
11
app/service/attribute/config/descriptor/dress/collar.csv
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
labelName,join_attr,taskName,taskId
|
||||||
|
Peterpan,attr_Collar_1,Collar,7
|
||||||
|
Shirt,attr_Collar_2,Collar,7
|
||||||
|
Rib,attr_Collar_3,Collar,7
|
||||||
|
Turtle,attr_Collar_4,Collar,7
|
||||||
|
Lapel,attr_Collar_5,Collar,7
|
||||||
|
Hoodie,attr_Collar_6,Collar,7
|
||||||
|
Mandarin,attr_Collar_7,Collar,7
|
||||||
|
Tie,attr_Collar_8,Collar,7
|
||||||
|
Ruffle,attr_Collar_9,Collar,7
|
||||||
|
Cowl,attr_Collar_10,Collar,7
|
||||||
|
19
app/service/attribute/config/descriptor/dress/design.csv
Normal file
19
app/service/attribute/config/descriptor/dress/design.csv
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
labelName,join_attr,taskName,taskId
|
||||||
|
Asymmetrical,attr_Design_U_1,Design_D,9
|
||||||
|
Tiered,attr_Design_U_2,Design_D,9
|
||||||
|
Tassel,attr_Design_U_3,Design_D,9
|
||||||
|
Ruffle,attr_Design_U_4,Design_D,9
|
||||||
|
Pleated,attr_Design_U_5,Design_D,9
|
||||||
|
Wrap,attr_Design_U_6,Design_D,9
|
||||||
|
Ripped,attr_Design_U_7,Design_D,9
|
||||||
|
Cut_out,attr_Design_U_8,Design_D,9
|
||||||
|
Eyelet,attr_Design_U_9,Design_D,9
|
||||||
|
Folded,attr_Design_U_10,Design_D,9
|
||||||
|
Tied,attr_Design_U_11,Design_D,9
|
||||||
|
Drapes,attr_Design_U_12,Design_D,9
|
||||||
|
Ribbon,attr_Design_U_13,Design_D,9
|
||||||
|
Button,attr_Design_U_14,Design_D,9
|
||||||
|
Split,attr_Design_U_15,Design_D,9
|
||||||
|
Fishtail,attr_Design_U_16,Design_D,9
|
||||||
|
Cami_dress,attr_Design_U_17,Design_D,9
|
||||||
|
Gathering,attr_Design_U_18,Design_D,9
|
||||||
|
6
app/service/attribute/config/descriptor/dress/length.csv
Normal file
6
app/service/attribute/config/descriptor/dress/length.csv
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
labelName,join_attr,taskName,taskId
|
||||||
|
Maxi,attr_Dress_length_1,Dress_length,1
|
||||||
|
Midi,attr_Dress_length_2,Dress_length,1
|
||||||
|
Mini,attr_Dress_length_3,Dress_length,1
|
||||||
|
Over_the_knee,attr_Dress_length_4,Dress_length,1
|
||||||
|
Floor_Length,attr_Dress_length_5,Dress_length,1
|
||||||
|
17
app/service/attribute/config/descriptor/dress/neckline.csv
Normal file
17
app/service/attribute/config/descriptor/dress/neckline.csv
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
labelName,join_attr,taskName,taskId
|
||||||
|
Round,attr_Neckline_D_1,Neckline_D,5
|
||||||
|
V,attr_Neckline_D_2,Neckline_D,5
|
||||||
|
Square,attr_Neckline_D_3,Neckline_D,5
|
||||||
|
One_shoulder,attr_Neckline_D_4,Neckline_D,5
|
||||||
|
Off_shoulder,attr_Neckline_D_5,Neckline_D,5
|
||||||
|
Strapless,attr_Neckline_D_6,Neckline_D,5
|
||||||
|
Turtle,attr_Neckline_D_7,Neckline_D,5
|
||||||
|
Boat,attr_Neckline_D_8,Neckline_D,5
|
||||||
|
Halter,attr_Neckline_D_9,Neckline_D,5
|
||||||
|
Spaghetti_Strap,attr_Neckline_D_10,Neckline_D,5
|
||||||
|
Sweetheart,attr_Neckline_D_11,Neckline_D,5
|
||||||
|
U,attr_Neckline_D_12,Neckline_D,5
|
||||||
|
Choker,attr_Neckline_D_13,Neckline_D,5
|
||||||
|
Cowl,attr_Neckline_D_14,Neckline_D,5
|
||||||
|
Keyhole,attr_Neckline_D_15,Neckline_D,5
|
||||||
|
Split,attr_Neckline_D_16,Neckline_D,5
|
||||||
|
@@ -0,0 +1,5 @@
|
|||||||
|
labelName,join_attr,taskName,taskId
|
||||||
|
Button,attr_OPType_U_1,OPType_D,11
|
||||||
|
Zipper,attr_OPType_U_2,OPType_D,11
|
||||||
|
Thread,attr_OPType_U_3,OPType_D,11
|
||||||
|
Hook,attr_OPType_U_4,OPType_D,11
|
||||||
|
11
app/service/attribute/config/descriptor/dress/silhouette.csv
Normal file
11
app/service/attribute/config/descriptor/dress/silhouette.csv
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
labelName,join_attr,taskName,taskId
|
||||||
|
A_Line,attr_Silhouette_U_1,Silhouette_D,11
|
||||||
|
H_Shape,attr_Silhouette_U_2,Silhouette_D,11
|
||||||
|
Slim,attr_Silhouette_U_3,Silhouette_D,11
|
||||||
|
Oversized,attr_Silhouette_U_4,Silhouette_D,11
|
||||||
|
Cacoon,attr_Silhouette_U_5,Silhouette_D,11
|
||||||
|
Empire,attr_Silhouette_U_6,Silhouette_D,11
|
||||||
|
Hourglass,attr_Silhouette_U_7,Silhouette_D,11
|
||||||
|
Mermaid,attr_Silhouette_U_8,Silhouette_D,11
|
||||||
|
Sheath,attr_Silhouette_U_9,Silhouette_D,11
|
||||||
|
Tent,attr_Silhouette_U_10,Silhouette_D,11
|
||||||
|
@@ -0,0 +1,6 @@
|
|||||||
|
labelName,join_attr,taskName,taskId
|
||||||
|
Sleeveless,attr_Sleeve_length_1,Sleeve_length,3
|
||||||
|
Short,attr_Sleeve_length_2,Sleeve_length,3
|
||||||
|
Middle,attr_Sleeve_length_3,Sleeve_length,3
|
||||||
|
Seven,attr_Sleeve_length_4,Sleeve_length,3
|
||||||
|
Long,attr_Sleeve_length_5,Sleeve_length,3
|
||||||
|
@@ -0,0 +1,9 @@
|
|||||||
|
labelName,join_attr,taskName,taskId
|
||||||
|
Regular,attr_Sleeve_shape_1,Sleeve_shape,4
|
||||||
|
Slim,attr_Sleeve_shape_2,Sleeve_shape,4
|
||||||
|
Puff,attr_Sleeve_shape_3,Sleeve_shape,4
|
||||||
|
Bell,attr_Sleeve_shape_4,Sleeve_shape,4
|
||||||
|
Batwing,attr_Sleeve_shape_5,Sleeve_shape,4
|
||||||
|
Shirt,attr_Sleeve_shape_6,Sleeve_shape,4
|
||||||
|
Rib,attr_Sleeve_shape_7,Sleeve_shape,4
|
||||||
|
Raglan,attr_Sleeve_shape_8,Sleeve_shape,4
|
||||||
|
@@ -0,0 +1,5 @@
|
|||||||
|
labelName,join_attr,taskName,taskId
|
||||||
|
Regular,attr_Sleeve_shoulder_1,Sleeve_shoulder,5
|
||||||
|
Cold,attr_Sleeve_shoulder_2,Sleeve_shoulder,5
|
||||||
|
Tucked,attr_Sleeve_shoulder_3,Sleeve_shoulder,5
|
||||||
|
Balmain,attr_Sleeve_shoulder_4,Sleeve_shoulder,5
|
||||||
|
20
app/service/attribute/config/descriptor/dress/type.csv
Normal file
20
app/service/attribute/config/descriptor/dress/type.csv
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
labelName,join_attr,taskName,taskId
|
||||||
|
Evening_gown,attr_dresstype_1,Dress_Type,12
|
||||||
|
Shirt_dress,attr_dresstype_2,Dress_Type,12
|
||||||
|
Coat_dress,attr_dresstype_3,Dress_Type,12
|
||||||
|
Handkerchief_dress,attr_dresstype_4,Dress_Type,12
|
||||||
|
Jumper_dress,attr_dresstype_5,Dress_Type,12
|
||||||
|
Dungaree_dress,attr_dresstype_6,Dress_Type,12
|
||||||
|
Skater_dress,attr_dresstype_7,Dress_Type,12
|
||||||
|
Tea_dress,attr_dresstype_8,Dress_Type,12
|
||||||
|
Mermaid_dress,attr_dresstype_9,Dress_Type,12
|
||||||
|
Cocktail_dress,attr_dresstype_10,Dress_Type,12
|
||||||
|
A_Line_dress,attr_dresstype_11,Dress_Type,12
|
||||||
|
Bodycon_dress,attr_dresstype_12,Dress_Type,12
|
||||||
|
Maxi_dress,attr_dresstype_13,Dress_Type,12
|
||||||
|
Office_dress,attr_dresstype_14,Dress_Type,12
|
||||||
|
Pencil_dress,attr_dresstype_15,Dress_Type,12
|
||||||
|
Sheer_dress,attr_dresstype_16,Dress_Type,12
|
||||||
|
Shift_dress,attr_dresstype_17,Dress_Type,12
|
||||||
|
Slip_dress,attr_dresstype_18,Dress_Type,12
|
||||||
|
T_shirt_dress,attr_dresstype_19,Dress_Type,12
|
||||||
|
@@ -0,0 +1,28 @@
|
|||||||
|
labelName,join_attr,taskName,taskId
|
||||||
|
Canvas,attr_Material_O_1,Material_O,8
|
||||||
|
Chambray,attr_Material_O_2,Material_O,8
|
||||||
|
Chenille,attr_Material_O_3,Material_O,8
|
||||||
|
Chiffon,attr_Material_O_4,Material_O,8
|
||||||
|
Corduroy,attr_Material_O_5,Material_O,8
|
||||||
|
Crepe,attr_Material_O_6,Material_O,8
|
||||||
|
Denim,attr_Material_O_7,Material_O,8
|
||||||
|
Faux_fur,attr_Material_O_8,Material_O,8
|
||||||
|
Faux_leather,attr_Material_O_9,Material_O,8
|
||||||
|
Flannel,attr_Material_O_10,Material_O,8
|
||||||
|
Fleece,attr_Material_O_11,Material_O,8
|
||||||
|
Gingham,attr_Material_O_12,Material_O,8
|
||||||
|
Jersey,attr_Material_O_13,Material_O,8
|
||||||
|
Knit,attr_Material_O_14,Material_O,8
|
||||||
|
Lace,attr_Material_O_15,Material_O,8
|
||||||
|
Lawn,attr_Material_O_16,Material_O,8
|
||||||
|
Neoprene,attr_Material_O_17,Material_O,8
|
||||||
|
Organza,attr_Material_O_18,Material_O,8
|
||||||
|
Plush,attr_Material_O_19,Material_O,8
|
||||||
|
Satin,attr_Material_O_20,Material_O,8
|
||||||
|
Serge,attr_Material_O_21,Material_O,8
|
||||||
|
Taffeta,attr_Material_O_22,Material_O,8
|
||||||
|
Tulle,attr_Material_O_23,Material_O,8
|
||||||
|
Tweed,attr_Material_O_24,Material_O,8
|
||||||
|
Twill,attr_Material_O_25,Material_O,8
|
||||||
|
Velvet,attr_Material_O_26,Material_O,8
|
||||||
|
Vinyl,attr_Material_O_27,Material_O,8
|
||||||
|
16
app/service/attribute/config/descriptor/outwear/O_print.csv
Normal file
16
app/service/attribute/config/descriptor/outwear/O_print.csv
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
labelName,join_attr,taskName,taskId
|
||||||
|
Abstract,attr_Print_O_1,Print_O,7
|
||||||
|
Allover,attr_Print_O_2,Print_O,7
|
||||||
|
Animal,attr_Print_O_3,Print_O,7
|
||||||
|
Camouflage,attr_Print_O_4,Print_O,7
|
||||||
|
Checks,attr_Print_O_5,Print_O,7
|
||||||
|
Color_block,attr_Print_O_6,Print_O,7
|
||||||
|
Disty print,attr_Print_O_7,Print_O,7
|
||||||
|
Dotted,attr_Print_O_8,Print_O,7
|
||||||
|
Floral,attr_Print_O_9,Print_O,7
|
||||||
|
Graphic print,attr_Print_O_10,Print_O,7
|
||||||
|
Logo and slogan,attr_Print_O_11,Print_O,7
|
||||||
|
Patchwork,attr_Print_O_12,Print_O,7
|
||||||
|
Plain,attr_Print_O_13,Print_O,7
|
||||||
|
Plain_dnim,attr_Print_O_14,Print_O,7
|
||||||
|
Stripe,attr_Print_O_15,Print_O,7
|
||||||
|
@@ -0,0 +1,4 @@
|
|||||||
|
labelName,join_attr,taskName,taskId
|
||||||
|
O_softness_Soft,attr_Softness_O_1,Softness_O,9
|
||||||
|
O_softness_Medium,attr_Softness_O_2,Softness_O,9
|
||||||
|
O_softness_Hard,attr_Softness_O_3,Softness_O,9
|
||||||
|
10
app/service/attribute/config/descriptor/outwear/collar.csv
Normal file
10
app/service/attribute/config/descriptor/outwear/collar.csv
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
labelName,join_attr,taskName,taskId
|
||||||
|
Peterpan,attr_Callar_O_1,Collar,6
|
||||||
|
Shirt,attr_Callar_O_2,Collar,6
|
||||||
|
Rib,attr_Callar_O_3,Collar,6
|
||||||
|
Turtle,attr_Callar_O_4,Collar,6
|
||||||
|
Lapel,attr_Callar_O_5,Collar,6
|
||||||
|
Hoodie,attr_Callar_O_6,Collar,6
|
||||||
|
Mandarin,attr_Callar_O_7,Collar,6
|
||||||
|
Ruffle,attr_Callar_O_8,Collar,6
|
||||||
|
Jewel,attr_Callar_O_9,Collar,6
|
||||||
|
19
app/service/attribute/config/descriptor/outwear/design.csv
Normal file
19
app/service/attribute/config/descriptor/outwear/design.csv
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
labelName,join_attr,taskName,taskId
|
||||||
|
Asymmetrical,attr_Design_O_1,Design_O,10
|
||||||
|
Tiered,attr_Design_O_2,Design_O,10
|
||||||
|
Tassel,attr_Design_O_3,Design_O,10
|
||||||
|
Ruffles,attr_Design_O_4,Design_O,10
|
||||||
|
Pleated,attr_Design_O_5,Design_O,10
|
||||||
|
Wrap,attr_Design_O_6,Design_O,10
|
||||||
|
Ripped,attr_Design_O_7,Design_O,10
|
||||||
|
Cut_out,attr_Design_O_8,Design_O,10
|
||||||
|
Eyelet,attr_Design_O_9,Design_O,10
|
||||||
|
Folded,attr_Design_O_10,Design_O,10
|
||||||
|
Tied,attr_Design_O_11,Design_O,10
|
||||||
|
Drapes,attr_Design_O_12,Design_O,10
|
||||||
|
Ribbon,attr_Design_O_13,Design_O,10
|
||||||
|
Button,attr_Design_O_14,Design_O,10
|
||||||
|
Crossed_over_zipper,attr_Design_O_15,Design_O,10
|
||||||
|
Crossed_over_button,attr_Design_O_16,Design_O,10
|
||||||
|
Single_breasted,attr_Design_O_17,Design_O,10
|
||||||
|
Double_breasted,attr_Design_O_18,Design_O,10
|
||||||
|
@@ -0,0 +1,4 @@
|
|||||||
|
labelName,join_attr,taskName,taskId
|
||||||
|
Short,attr_Outer_length_1,Outer_length,1
|
||||||
|
Regular,attr_Outer_length_2,Outer_length,1
|
||||||
|
Long,attr_Outer_length_3,Outer_length,1
|
||||||
|
@@ -0,0 +1,3 @@
|
|||||||
|
labelName,join_attr,taskName,taskId
|
||||||
|
Full,attr_Opening_O_1,Opening_O,11
|
||||||
|
Half,attr_Opening_O_2,Opening_O,11
|
||||||
|
@@ -0,0 +1,5 @@
|
|||||||
|
labelName,join_attr,taskName,taskId
|
||||||
|
Button,attr_OPType_O_1,OPType_O,12
|
||||||
|
Zipper,attr_OPType_O_2,OPType_O,12
|
||||||
|
Thread,attr_OPType_O_3,OPType_O,12
|
||||||
|
Hook,attr_OPType_O_4,OPType_O,12
|
||||||
|
@@ -0,0 +1,7 @@
|
|||||||
|
labelName,join_attr,taskName,taskId
|
||||||
|
A_Line,attr_Silhouette_O_1,Silhouette_O,13
|
||||||
|
H_Shape,attr_Silhouette_O_2,Silhouette_O,13
|
||||||
|
Slim,attr_Silhouette_O_3,Silhouette_O,13
|
||||||
|
Flyman,attr_Silhouette_O_4,Silhouette_O,13
|
||||||
|
Peplum,attr_Silhouette_O_5,Silhouette_O,13
|
||||||
|
Oversize,attr_Silhouette_O_6,Silhouette_O,13
|
||||||
|
@@ -0,0 +1,6 @@
|
|||||||
|
labelName,join_attr,taskName,taskId
|
||||||
|
Sleeveless,attr_Sleeve_length_O_1,Sleeve_length_O,3
|
||||||
|
Short,attr_Sleeve_length_O_2,Sleeve_length_O,3
|
||||||
|
Middle,attr_Sleeve_length_O_3,Sleeve_length_O,3
|
||||||
|
Seven,attr_Sleeve_length_O_4,Sleeve_length_O,3
|
||||||
|
Long,attr_Sleeve_length_O_5,Sleeve_length_O,3
|
||||||
|
@@ -0,0 +1,9 @@
|
|||||||
|
labelName,join_attr,taskName,taskId
|
||||||
|
Regular,attr_Sleeve_shape_O_1,Sleeve_shape,4
|
||||||
|
Slim,attr_Sleeve_shape_O_2,Sleeve_shape,4
|
||||||
|
Puff,attr_Sleeve_shape_O_3,Sleeve_shape,4
|
||||||
|
Bell,attr_Sleeve_shape_O_4,Sleeve_shape,4
|
||||||
|
Batwing,attr_Sleeve_shape_O_5,Sleeve_shape,4
|
||||||
|
Shirt,attr_Sleeve_shape_O_6,Sleeve_shape,4
|
||||||
|
Rib,attr_Sleeve_shape_O_7,Sleeve_shape,4
|
||||||
|
Raglan,attr_Sleeve_shape_O_8,Sleeve_shape,4
|
||||||
|
@@ -0,0 +1,6 @@
|
|||||||
|
labelName,join_attr,taskName,taskId
|
||||||
|
Regular,attr_Sleeve_shoulder_O_1,Sleeve_shoulder,5
|
||||||
|
Cold,attr_Sleeve_shoulder_O_2,Sleeve_shoulder,5
|
||||||
|
Tucked,attr_Sleeve_shoulder_O_3,Sleeve_shoulder,5
|
||||||
|
Balmain,attr_Sleeve_shoulder_O_4,Sleeve_shoulder,5
|
||||||
|
Drop_Shoulder,attr_Sleeve_shoulder_O_5,Sleeve_shoulder,5
|
||||||
|
18
app/service/attribute/config/descriptor/outwear/type.csv
Normal file
18
app/service/attribute/config/descriptor/outwear/type.csv
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
labelName,join_attr,taskName,taskId
|
||||||
|
Coat,attr_Outer_type_1,Outer_Type,2
|
||||||
|
Trench,attr_Outer_type_2,Outer_Type,2
|
||||||
|
Baseball_jacket,attr_Outer_type_3,Outer_Type,2
|
||||||
|
Hoodie_jacket,attr_Outer_type_4,Outer_Type,2
|
||||||
|
Active_jacket,attr_Outer_type_5,Outer_Type,2
|
||||||
|
Jacket,attr_Outer_type_6,Outer_Type,2
|
||||||
|
Blazer,attr_Outer_type_7,Outer_Type,2
|
||||||
|
Cardigan,attr_Outer_type_8,Outer_Type,2
|
||||||
|
Capes,attr_Outer_type_9,Outer_Type,2
|
||||||
|
Fleeces Jacket,attr_Outer_type_10,Outer_Type,2
|
||||||
|
Gilets/Puffer,attr_Outer_type_11,Outer_Type,2
|
||||||
|
Aviator jacket,attr_Outer_type_12,Outer_Type,2
|
||||||
|
Biker jacket,attr_Outer_type_13,Outer_Type,2
|
||||||
|
Pea coat,attr_Outer_type_14,Outer_Type,2
|
||||||
|
Shacket,attr_Outer_type_15,Outer_Type,2
|
||||||
|
Denim jacket,attr_Outer_type_16,Outer_Type,2
|
||||||
|
Raincoat,attr_Outer_type_17,Outer_Type,2
|
||||||
|
16
app/service/attribute/config/descriptor/top/T_print.csv
Normal file
16
app/service/attribute/config/descriptor/top/T_print.csv
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
labelName,join_attr,taskName,taskId
|
||||||
|
Abstract,attr_Print_O_1,Print_U,7
|
||||||
|
Allover,attr_Print_O_2,Print_U,7
|
||||||
|
Animal,attr_Print_O_3,Print_U,7
|
||||||
|
Camouflage,attr_Print_O_4,Print_U,7
|
||||||
|
Checks,attr_Print_O_5,Print_U,7
|
||||||
|
Color_block,attr_Print_O_6,Print_U,7
|
||||||
|
Disty print,attr_Print_O_7,Print_U,7
|
||||||
|
Dotted,attr_Print_O_8,Print_U,7
|
||||||
|
Floral,attr_Print_O_9,Print_U,7
|
||||||
|
Graphic print,attr_Print_O_10,Print_U,7
|
||||||
|
Logo and slogan,attr_Print_O_11,Print_U,7
|
||||||
|
Patchwork,attr_Print_O_12,Print_U,7
|
||||||
|
Plain,attr_Print_O_13,Print_U,7
|
||||||
|
Plain_dnim,attr_Print_O_14,Print_U,7
|
||||||
|
Stripe,attr_Print_O_15,Print_U,7
|
||||||
|
@@ -0,0 +1,4 @@
|
|||||||
|
labelName,join_attr,taskName,taskId
|
||||||
|
T_softness_Soft,attr_Softness_U_1,Softness_U,9
|
||||||
|
T_softness_Medium,attr_Softness_U_2,Softness_U,9
|
||||||
|
T_softness_Hard,attr_Softness_U_3,Softness_U,9
|
||||||
|
11
app/service/attribute/config/descriptor/top/collar.csv
Normal file
11
app/service/attribute/config/descriptor/top/collar.csv
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
labelName,join_attr,taskName,taskId
|
||||||
|
Peterpan,attr_Collar_1,Collar,7
|
||||||
|
Shirt,attr_Collar_2,Collar,7
|
||||||
|
Rib,attr_Collar_3,Collar,7
|
||||||
|
Turtle,attr_Collar_4,Collar,7
|
||||||
|
Lapel,attr_Collar_5,Collar,7
|
||||||
|
Hoodie,attr_Collar_6,Collar,7
|
||||||
|
Mandarin,attr_Collar_7,Collar,7
|
||||||
|
Tie,attr_Collar_8,Collar,7
|
||||||
|
Ruffle,attr_Collar_9,Collar,7
|
||||||
|
Cowl,attr_Collar_10,Collar,7
|
||||||
|
15
app/service/attribute/config/descriptor/top/design.csv
Normal file
15
app/service/attribute/config/descriptor/top/design.csv
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
labelName,join_attr,taskName,taskId
|
||||||
|
Asymmetrical,attr_Design_U_1,Design_U,10
|
||||||
|
Tiered,attr_Design_U_2,Design_U,10
|
||||||
|
Tassel,attr_Design_U_3,Design_U,10
|
||||||
|
Ruffle,attr_Design_U_4,Design_U,10
|
||||||
|
Pleated,attr_Design_U_5,Design_U,10
|
||||||
|
Wrap,attr_Design_U_6,Design_U,10
|
||||||
|
Ripped,attr_Design_U_7,Design_U,10
|
||||||
|
Cut_out,attr_Design_U_8,Design_U,10
|
||||||
|
Eyelet,attr_Design_U_9,Design_U,10
|
||||||
|
Folded,attr_Design_U_10,Design_U,10
|
||||||
|
Tied,attr_Design_U_11,Design_U,10
|
||||||
|
Drapes,attr_Design_U_12,Design_U,10
|
||||||
|
Ribbon,attr_Design_U_13,Design_U,10
|
||||||
|
Button,attr_Design_U_14,Design_U,10
|
||||||
|
4
app/service/attribute/config/descriptor/top/length.csv
Normal file
4
app/service/attribute/config/descriptor/top/length.csv
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
labelName,join_attr,taskName,taskId
|
||||||
|
Short,attr_Top_length_1,Top_length,1
|
||||||
|
Regular,attr_Top_length_2,Top_length,1
|
||||||
|
Long,attr_Top_length_3,Top_length,1
|
||||||
|
7
app/service/attribute/config/descriptor/top/neckline.csv
Normal file
7
app/service/attribute/config/descriptor/top/neckline.csv
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
labelName,join_attr,taskName,taskId
|
||||||
|
Round,attr_Neckline_1,Neckline,6
|
||||||
|
V,attr_Neckline_2,Neckline,6
|
||||||
|
Square,attr_Neckline_3,Neckline,6
|
||||||
|
One_shoulder,attr_Neckline_4,Neckline,6
|
||||||
|
Off_shoulder,attr_Neckline_5,Neckline,6
|
||||||
|
Strapless,attr_Neckline_6,Neckline,6
|
||||||
|
@@ -0,0 +1,5 @@
|
|||||||
|
labelName,join_attr,taskName,taskId
|
||||||
|
Button,attr_OPType_U_1,OPType_U,11
|
||||||
|
Zipper,attr_OPType_U_2,OPType_U,11
|
||||||
|
Thread,attr_OPType_U_3,OPType_U,11
|
||||||
|
Hook,attr_OPType_U_4,OPType_U,11
|
||||||
|
@@ -0,0 +1,7 @@
|
|||||||
|
labelName,join_attr,taskName,taskId
|
||||||
|
T_silhouette_A_Line,attr_Silhouette_U_1,Silhouette_U,12
|
||||||
|
T_silhouette_H_Shape,attr_Silhouette_U_2,Silhouette_U,12
|
||||||
|
T_silhouette_Slim,attr_Silhouette_U_3,Silhouette_U,12
|
||||||
|
T_silhouette_Flyman,attr_Silhouette_U_4,Silhouette_U,12
|
||||||
|
T_silhouette_Peplum,attr_Silhouette_U_5,Silhouette_U,12
|
||||||
|
T_silhouette_Oversize,attr_Silhouette_U_6,Silhouette_U,12
|
||||||
|
@@ -0,0 +1,6 @@
|
|||||||
|
labelName,join_attr,taskName,taskId
|
||||||
|
Sleeveless,attr_Sleeve_length_1,Sleeve_length,3
|
||||||
|
Short,attr_Sleeve_length_2,Sleeve_length,3
|
||||||
|
Middle,attr_Sleeve_length_3,Sleeve_length,3
|
||||||
|
Seven,attr_Sleeve_length_4,Sleeve_length,3
|
||||||
|
Long,attr_Sleeve_length_5,Sleeve_length,3
|
||||||
|
@@ -0,0 +1,9 @@
|
|||||||
|
labelName,join_attr,taskName,taskId
|
||||||
|
Regular,attr_Sleeve_shape_1,Sleeve_shape,4
|
||||||
|
Slim,attr_Sleeve_shape_2,Sleeve_shape,4
|
||||||
|
Puff,attr_Sleeve_shape_3,Sleeve_shape,4
|
||||||
|
Bell,attr_Sleeve_shape_4,Sleeve_shape,4
|
||||||
|
Batwing,attr_Sleeve_shape_5,Sleeve_shape,4
|
||||||
|
Shirt,attr_Sleeve_shape_6,Sleeve_shape,4
|
||||||
|
Rib,attr_Sleeve_shape_7,Sleeve_shape,4
|
||||||
|
Raglan,attr_Sleeve_shape_8,Sleeve_shape,4
|
||||||
|
@@ -0,0 +1,5 @@
|
|||||||
|
labelName,join_attr,taskName,taskId
|
||||||
|
Regular,attr_Sleeve_shoulder_1,Sleeve_shoulder,5
|
||||||
|
Cold,attr_Sleeve_shoulder_2,Sleeve_shoulder,5
|
||||||
|
Tucked,attr_Sleeve_shoulder_3,Sleeve_shoulder,5
|
||||||
|
Balmain,attr_Sleeve_shoulder_4,Sleeve_shoulder,5
|
||||||
|
15
app/service/attribute/config/descriptor/top/type.csv
Normal file
15
app/service/attribute/config/descriptor/top/type.csv
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
labelName,join_attr,taskName,taskId
|
||||||
|
Bandeau,attr_toptype_1,Top_Type,2
|
||||||
|
Blouse,attr_toptype_2,Top_Type,2
|
||||||
|
Bodysuit,attr_toptype_3,Top_Type,2
|
||||||
|
Bralets,attr_toptype_4,Top_Type,2
|
||||||
|
Camisole,attr_toptype_5,Top_Type,2
|
||||||
|
Crop_Top,attr_toptype_6,Top_Type,2
|
||||||
|
Hoodie,attr_toptype_7,Top_Type,2
|
||||||
|
Pullover,attr_toptype_8,Top_Type,2
|
||||||
|
Polo_shirt,attr_toptype_9,Top_Type,2
|
||||||
|
Shirt,attr_toptype_10,Top_Type,2
|
||||||
|
Strapeless,attr_toptype_11,Top_Type,2
|
||||||
|
Sweater,attr_toptype_12,Top_Type,2
|
||||||
|
Tank_Top,attr_toptype_13,Top_Type,2
|
||||||
|
T_shirt,attr_toptype_14,Top_Type,2
|
||||||
|
78
app/service/attribute/config/local_debug_const.py
Normal file
78
app/service/attribute/config/local_debug_const.py
Normal file
@@ -0,0 +1,78 @@
|
|||||||
|
top_description_list = [r'E:\workspace\trinity_client_aida\app\service\attribute\config\descriptor\top/length.csv',
|
||||||
|
r'E:\workspace\trinity_client_aida\app\service\attribute\config\descriptor\top/type.csv',
|
||||||
|
r'E:\workspace\trinity_client_aida\app\service\attribute\config\descriptor\top/sleeve_length.csv',
|
||||||
|
r'E:\workspace\trinity_client_aida\app\service\attribute\config\descriptor\top/sleeve_shape.csv',
|
||||||
|
r'E:\workspace\trinity_client_aida\app\service\attribute\config\descriptor\top/sleeve_shoulder.csv',
|
||||||
|
r'E:\workspace\trinity_client_aida\app\service\attribute\config\descriptor\top/neckline.csv',
|
||||||
|
r'E:\workspace\trinity_client_aida\app\service\attribute\config\descriptor\top/design.csv',
|
||||||
|
r'E:\workspace\trinity_client_aida\app\service\attribute\config\descriptor\top/opening_type.csv',
|
||||||
|
r'E:\workspace\trinity_client_aida\app\service\attribute\config\descriptor\top/silhouette.csv',
|
||||||
|
r'E:\workspace\trinity_client_aida\app\service\attribute\config\descriptor\top/collar.csv']
|
||||||
|
|
||||||
|
top_model_list = ['attr_retrieve_T_length',
|
||||||
|
'attr_retrieve_T_type',
|
||||||
|
'attr_retrieve_T_sleeve_length',
|
||||||
|
'attr_retrieve_T_sleeve_shape',
|
||||||
|
'attr_retrieve_T_sleeve_shoulder',
|
||||||
|
'attr_retrieve_T_neckline',
|
||||||
|
'attr_retrieve_T_design',
|
||||||
|
'attr_retrieve_T_optype',
|
||||||
|
'attr_retrieve_T_silhouette',
|
||||||
|
'attr_retrieve_T_collar'
|
||||||
|
]
|
||||||
|
|
||||||
|
bottom_description_list = [
|
||||||
|
r'E:\workspace\trinity_client_aida\app\service\attribute\config\descriptor\bottom/subtype.csv',
|
||||||
|
r'E:\workspace\trinity_client_aida\app\service\attribute\config\descriptor\bottom/length.csv',
|
||||||
|
r'E:\workspace\trinity_client_aida\app\service\attribute\config\descriptor\bottom/silhouette.csv',
|
||||||
|
r'E:\workspace\trinity_client_aida\app\service\attribute\config\descriptor\bottom/opening_type.csv',
|
||||||
|
r'E:\workspace\trinity_client_aida\app\service\attribute\config\descriptor\bottom/design.csv']
|
||||||
|
|
||||||
|
bottom_model_list = [
|
||||||
|
'attr_retrieve_B_subtype',
|
||||||
|
'attr_retrieve_B_length',
|
||||||
|
'attr_retrieve_B_silhouette',
|
||||||
|
'attr_recong_B_optype',
|
||||||
|
'attr_retrieve_B_design']
|
||||||
|
|
||||||
|
outwear_description_list = [r'E:\workspace\trinity_client_aida\app\service\attribute\config\descriptor\outwear/length.csv',
|
||||||
|
r'E:\workspace\trinity_client_aida\app\service\attribute\config\descriptor\outwear/sleeve_length.csv',
|
||||||
|
r'E:\workspace\trinity_client_aida\app\service\attribute\config\descriptor\outwear/sleeve_shape.csv',
|
||||||
|
r'E:\workspace\trinity_client_aida\app\service\attribute\config\descriptor\outwear/sleeve_shoulder.csv',
|
||||||
|
r'E:\workspace\trinity_client_aida\app\service\attribute\config\descriptor\outwear/collar.csv',
|
||||||
|
r'E:\workspace\trinity_client_aida\app\service\attribute\config\descriptor\outwear/design.csv',
|
||||||
|
r'E:\workspace\trinity_client_aida\app\service\attribute\config\descriptor\outwear/opening_type.csv',
|
||||||
|
r'E:\workspace\trinity_client_aida\app\service\attribute\config\descriptor\outwear/silhouette.csv', ]
|
||||||
|
|
||||||
|
outwear_model_list = ['attr_recong_O_length',
|
||||||
|
'attr_retrieve_O_sleeve_length',
|
||||||
|
'attr_retrieve_O_sleeve_shape',
|
||||||
|
'attr_retrieve_O_sleeve_shoulder',
|
||||||
|
'attr_retrieve_O_collar',
|
||||||
|
'attr_retrieve_O_design',
|
||||||
|
'attr_recong_O_optype',
|
||||||
|
'attr_retrieve_O_silhouette']
|
||||||
|
|
||||||
|
dress_description_list = [ # r'E:\workspace\trinity_client_aida\app\service\attribute\config\descriptor\dress/D_length.csv',
|
||||||
|
r'E:\workspace\trinity_client_aida\app\service\attribute\config\descriptor\dress/sleeve_length.csv',
|
||||||
|
r'E:\workspace\trinity_client_aida\app\service\attribute\config\descriptor\dress/sleeve_shape.csv',
|
||||||
|
# r'E:\workspace\trinity_client_aida\app\service\attribute\config\descriptor\dress/D_sleeve_shoulder.csv',
|
||||||
|
r'E:\workspace\trinity_client_aida\app\service\attribute\config\descriptor\dress/neckline.csv',
|
||||||
|
r'E:\workspace\trinity_client_aida\app\service\attribute\config\descriptor\dress/collar.csv',
|
||||||
|
r'E:\workspace\trinity_client_aida\app\service\attribute\config\descriptor\dress/design.csv',
|
||||||
|
r'E:\workspace\trinity_client_aida\app\service\attribute\config\descriptor\dress/silhouette.csv',
|
||||||
|
r'E:\workspace\trinity_client_aida\app\service\attribute\config\descriptor\dress/type.csv']
|
||||||
|
|
||||||
|
dress_model_list = [ # 'attr_recong_D_length',
|
||||||
|
'attr_retrieve_D_sleeve_length',
|
||||||
|
'attr_retrieve_D_sleeve_shape',
|
||||||
|
# 'attr_retrieve_D_sleeve_shoulder',
|
||||||
|
'attr_retrieve_D_neckline',
|
||||||
|
'attr_retrieve_D_collar',
|
||||||
|
'attr_retrieve_D_design',
|
||||||
|
'attr_retrieve_D_silohouette',
|
||||||
|
'attr_retrieve_D_type'
|
||||||
|
]
|
||||||
|
|
||||||
|
category_description = './description/category/category_dis.csv'
|
||||||
|
category_model = 'attr_retrieve_category'
|
||||||
170
app/service/attribute/service_att_recognition.py
Normal file
170
app/service/attribute/service_att_recognition.py
Normal file
@@ -0,0 +1,170 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# -*- coding: UTF-8 -*-
|
||||||
|
import logging
|
||||||
|
from pprint import pprint
|
||||||
|
import torch
|
||||||
|
import cv2
|
||||||
|
import mmcv
|
||||||
|
import numpy as np
|
||||||
|
import pandas as pd
|
||||||
|
from minio import Minio
|
||||||
|
import tritonclient.http as httpclient
|
||||||
|
from app.core.config import *
|
||||||
|
from app.schemas.attribute_retrieve import AttributeRecognitionModel
|
||||||
|
|
||||||
|
|
||||||
|
class AttributeRecognition:
|
||||||
|
def __init__(self, const, request_data):
|
||||||
|
self.minio_client = Minio(MINIO_URL, access_key=MINIO_ACCESS, secret_key=MINIO_SECRET, secure=MINIO_SECURE)
|
||||||
|
logging.info("实例化完成")
|
||||||
|
self.request_data = []
|
||||||
|
for i, sketch in enumerate(request_data):
|
||||||
|
self.request_data.append(
|
||||||
|
{
|
||||||
|
'obj': self.preprocess(self.get_image(sketch.sketch_img_url)),
|
||||||
|
'category': sketch.category,
|
||||||
|
'colony': sketch.colony,
|
||||||
|
'sketch_img_url': sketch.sketch_img_url,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
self.const = const
|
||||||
|
self.triton_client = httpclient.InferenceServerClient(url=f"{ATT_TRITON_URL}")
|
||||||
|
|
||||||
|
def __del__(self):
|
||||||
|
self.triton_client.close()
|
||||||
|
|
||||||
|
def get_result(self):
|
||||||
|
for sketch in self.request_data:
|
||||||
|
if sketch['category'] == "Tops" or sketch['category'] == "Blouse":
|
||||||
|
attr_dict = {}
|
||||||
|
for i in range(len(self.const.top_description_list)):
|
||||||
|
attr_description = self.const.top_description_list[i]
|
||||||
|
attr_model_path = self.const.top_model_list[i]
|
||||||
|
present_dict = self.get_attribute(attr_model_path, attr_description, sketch['obj'])
|
||||||
|
attr_dict = self.merge(attr_dict, present_dict)
|
||||||
|
|
||||||
|
elif sketch['category'] == 'Trousers' or sketch['category'] == "Skirt" or sketch['category'] == "Bottoms":
|
||||||
|
attr_dict = {}
|
||||||
|
for i in range(len(self.const.bottom_description_list)):
|
||||||
|
attr_description = self.const.bottom_description_list[i]
|
||||||
|
attr_model_path = self.const.bottom_model_list[i]
|
||||||
|
present_dict = self.get_attribute(attr_model_path, attr_description, sketch['obj'])
|
||||||
|
attr_dict = self.merge(attr_dict, present_dict)
|
||||||
|
|
||||||
|
elif sketch['category'] == 'Dress':
|
||||||
|
attr_dict = {}
|
||||||
|
for i in range(len(self.const.dress_description_list)):
|
||||||
|
attr_description = self.const.dress_description_list[i]
|
||||||
|
attr_model_path = self.const.dress_model_list[i]
|
||||||
|
present_dict = self.get_attribute(attr_model_path, attr_description, sketch['obj'])
|
||||||
|
attr_dict = self.merge(attr_dict, present_dict)
|
||||||
|
|
||||||
|
elif sketch['category'] == 'Outwear':
|
||||||
|
attr_dict = {}
|
||||||
|
for i in range(len(self.const.outwear_description_list)):
|
||||||
|
attr_description = self.const.outwear_description_list[i]
|
||||||
|
attr_model_path = self.const.outwear_model_list[i]
|
||||||
|
present_dict = self.get_attribute(attr_model_path, attr_description, sketch['obj'])
|
||||||
|
attr_dict = self.merge(attr_dict, present_dict)
|
||||||
|
|
||||||
|
else:
|
||||||
|
attr_dict = {}
|
||||||
|
sketch['attr_dict'] = attr_dict
|
||||||
|
del sketch['obj']
|
||||||
|
return self.request_data
|
||||||
|
|
||||||
|
def get_attribute(self, model_name, description, image):
|
||||||
|
attr_type = pd.read_csv(description)
|
||||||
|
inputs = [
|
||||||
|
httpclient.InferInput("input__0", image.shape, datatype="FP32")
|
||||||
|
]
|
||||||
|
inputs[0].set_data_from_numpy(image, binary_data=True)
|
||||||
|
results = self.triton_client.infer(model_name=model_name, inputs=inputs)
|
||||||
|
inference_output = torch.from_numpy(results.as_numpy(f"output__0"))
|
||||||
|
scores = inference_output.detach().numpy()
|
||||||
|
colattr = list(attr_type['labelName'])
|
||||||
|
task = description.split('/')[-1][:-4]
|
||||||
|
maxsc = np.max(scores[0][:5])
|
||||||
|
indexs = np.argwhere(scores == maxsc)[:, 1]
|
||||||
|
attr = {
|
||||||
|
task: []
|
||||||
|
}
|
||||||
|
for i in range(len(indexs)):
|
||||||
|
atr = colattr[indexs[i]]
|
||||||
|
attr[task].append(atr)
|
||||||
|
return attr
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def merge(dict1, dict2):
|
||||||
|
res = {**dict1, **dict2}
|
||||||
|
return res
|
||||||
|
|
||||||
|
def get_image(self, url):
|
||||||
|
response = self.minio_client.get_object(url.split("/", 1)[0], url.split("/", 1)[1])
|
||||||
|
img = np.frombuffer(response.data, np.uint8) # 转成8位无符号整型
|
||||||
|
img = cv2.imdecode(img, cv2.IMREAD_COLOR) # 解码
|
||||||
|
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
|
||||||
|
return img
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def preprocess(img):
|
||||||
|
img = mmcv.imread(img)
|
||||||
|
ori_shape = img.shape[:2]
|
||||||
|
img_scale = (224, 224)
|
||||||
|
scale_factor = []
|
||||||
|
img, x, y = mmcv.imresize(img, img_scale, return_scale=True)
|
||||||
|
scale_factor.append(x)
|
||||||
|
scale_factor.append(y)
|
||||||
|
img = mmcv.imnormalize(img, mean=np.array([123.675, 116.28, 103.53]), std=np.array([58.395, 57.12, 57.375]), to_rgb=True)
|
||||||
|
preprocessed_img = np.expand_dims(img.transpose(2, 0, 1), axis=0)
|
||||||
|
return preprocessed_img
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
data = [
|
||||||
|
{
|
||||||
|
"category": "Dress",
|
||||||
|
"colony": "Female",
|
||||||
|
"sketch_img_url": "aida-users/89/sketchboard/female/Dress/ae976103-d7ec-4eed-b5d1-3e5f04d8be26.jpg"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"category": "Dress",
|
||||||
|
"colony": "Female",
|
||||||
|
"sketch_img_url": "aida-users/89/sketchboard/female/Dress/6d7d97a7-5a7d-48bd-9e14-b51119b48620.jpg"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"category": "Dress",
|
||||||
|
"colony": "Female",
|
||||||
|
"sketch_img_url": "aida-users/89/sketchboard/female/Dress/f2437141-1104-40a5-bcb9-f436088698bb.jpg"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"category": "Dress",
|
||||||
|
"colony": "Female",
|
||||||
|
"sketch_img_url": "aida-users/89/sketchboard/female/Dress/07af8613-eb2e-44fd-97cb-a97249a5754c.jpg"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"category": "Blouse",
|
||||||
|
"colony": "Female",
|
||||||
|
"sketch_img_url": "aida-users/89/sketchboard/female/Dress/bac9fb15-6860-4112-ac97-f0dea079da75.jpg"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"category": "Dress",
|
||||||
|
"colony": "Female",
|
||||||
|
"sketch_img_url": "aida-users/89/sketchboard/female/Dress/11d59844-effa-4590-82f9-9ea382c76126.jpg"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"category": "Dress",
|
||||||
|
"colony": "Female",
|
||||||
|
"sketch_img_url": "aida-users/89/sketchboard/female/Dress/849bf94c-66b8-42f5-8c2e-c1c1f4c8d0e0.jpg"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"category": "Dress",
|
||||||
|
"colony": "Female",
|
||||||
|
"sketch_img_url": "aida-users/89/sketchboard/female/Dress/e6724ab7-8d3f-4677-abe0-c3e42ab7af85.jpeg"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
from app.service.attribute.config import local_debug_const
|
||||||
|
|
||||||
|
rq_data = [AttributeRecognitionModel(category=d['category'], colony=d['colony'], sketch_img_url=d['sketch_img_url']) for d in data]
|
||||||
|
server = AttributeRecognition(local_debug_const, rq_data)
|
||||||
|
pprint(server.get_result())
|
||||||
171
app/service/attribute/service_category_recognition.py
Normal file
171
app/service/attribute/service_category_recognition.py
Normal file
@@ -0,0 +1,171 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# -*- coding: UTF-8 -*-
|
||||||
|
"""
|
||||||
|
@Project :trinity_client
|
||||||
|
@File :server.py
|
||||||
|
@Author :周成融
|
||||||
|
@Date :2023/9/16 18:31:08
|
||||||
|
@detail :
|
||||||
|
"""
|
||||||
|
from skimage import transform
|
||||||
|
import cv2
|
||||||
|
import mmcv
|
||||||
|
import numpy as np
|
||||||
|
import pandas as pd
|
||||||
|
from minio import Minio
|
||||||
|
import tritonclient.http as httpclient
|
||||||
|
import torch
|
||||||
|
|
||||||
|
from app.core.config import *
|
||||||
|
from app.schemas.attribute_retrieve import CategoryRecognitionModel
|
||||||
|
|
||||||
|
|
||||||
|
class CategoryRecognition:
|
||||||
|
def __init__(self, request_data):
|
||||||
|
self.attr_type = pd.read_csv(CATEGORY_PATH)
|
||||||
|
self.minio_client = Minio(MINIO_URL, access_key=MINIO_ACCESS, secret_key=MINIO_SECRET, secure=MINIO_SECURE)
|
||||||
|
self.request_data = []
|
||||||
|
self.triton_client = httpclient.InferenceServerClient(url=ATT_TRITON_URL)
|
||||||
|
for sketch in request_data:
|
||||||
|
self.request_data.append(
|
||||||
|
{
|
||||||
|
'obj': self.preprocess(self.get_image(sketch.sketch_img_url)),
|
||||||
|
'colony': sketch.colony,
|
||||||
|
'sketch_img_url': sketch.sketch_img_url,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def preprocess(img):
|
||||||
|
img = mmcv.imread(img)
|
||||||
|
# ori_shape = img.shape[:2]
|
||||||
|
img_scale = (224, 224)
|
||||||
|
scale_factor = []
|
||||||
|
img, x, y = mmcv.imresize(img, img_scale, return_scale=True)
|
||||||
|
scale_factor.append(x)
|
||||||
|
scale_factor.append(y)
|
||||||
|
img = mmcv.imnormalize(img, mean=np.array([123.675, 116.28, 103.53]), std=np.array([58.395, 57.12, 57.375]), to_rgb=True)
|
||||||
|
preprocessed_img = np.expand_dims(img.transpose(2, 0, 1), axis=0)
|
||||||
|
return preprocessed_img
|
||||||
|
|
||||||
|
def get_image(self, url):
|
||||||
|
# Get data of an object.
|
||||||
|
# Read data from response.
|
||||||
|
response = self.minio_client.get_object(url.split("/", 1)[0], url.split("/", 1)[1])
|
||||||
|
img = np.frombuffer(response.data, np.uint8) # 转成8位无符号整型
|
||||||
|
img = cv2.imdecode(img, cv2.IMREAD_COLOR) # 解码
|
||||||
|
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
|
||||||
|
return img
|
||||||
|
|
||||||
|
def get_category(self, image):
|
||||||
|
inputs = [
|
||||||
|
httpclient.InferInput("input__0", image.shape, datatype="FP32")
|
||||||
|
]
|
||||||
|
inputs[0].set_data_from_numpy(image, binary_data=True)
|
||||||
|
results = self.triton_client.infer(model_name="attr_retrieve_category", inputs=inputs)
|
||||||
|
inference_output = torch.from_numpy(results.as_numpy(f'output__0'))
|
||||||
|
|
||||||
|
scores = inference_output.detach().numpy()
|
||||||
|
|
||||||
|
colattr = list(self.attr_type['labelName'])
|
||||||
|
|
||||||
|
task = self.attr_type['taskName'][0]
|
||||||
|
|
||||||
|
maxsc = np.max(scores[0][:5])
|
||||||
|
indexs = np.argwhere(scores == maxsc)[:, 1]
|
||||||
|
|
||||||
|
# attr = {}
|
||||||
|
# attr[task] = []
|
||||||
|
# for i in range(len(indexs)):
|
||||||
|
# atr = colattr[indexs[i]]
|
||||||
|
# attr[task].append(atr)
|
||||||
|
|
||||||
|
return colattr[indexs[0]]
|
||||||
|
|
||||||
|
def get_result(self):
|
||||||
|
for sketch in self.request_data:
|
||||||
|
if sketch['colony'] == "Female":
|
||||||
|
sketch['category'] = self.get_category(sketch["obj"])
|
||||||
|
elif sketch['colony'] == "Male":
|
||||||
|
category = self.get_category(sketch["obj"])
|
||||||
|
if category == 'Trousers' or category == 'Skirt':
|
||||||
|
sketch['category'] = 'Bottoms'
|
||||||
|
elif category == 'Blouse' or category == 'Dress':
|
||||||
|
sketch['category'] = 'Tops'
|
||||||
|
else:
|
||||||
|
sketch['category'] = 'Outwear'
|
||||||
|
# 删除图片对象
|
||||||
|
del sketch["obj"]
|
||||||
|
return self.request_data
|
||||||
|
|
||||||
|
|
||||||
|
class Rescale(object):
|
||||||
|
"""Rescale the image in a sample to a given size.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
output_size (tuple or int): Desired output size. If tuple, output is
|
||||||
|
matched to output_size. If int, smaller of image edges is matched
|
||||||
|
to output_size keeping aspect ratio the same.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, output_size):
|
||||||
|
assert isinstance(output_size, (int, tuple))
|
||||||
|
self.output_size = output_size
|
||||||
|
|
||||||
|
def __call__(self, image, landmarks):
|
||||||
|
h, w = image.shape[:2]
|
||||||
|
if isinstance(self.output_size, int):
|
||||||
|
if h > w:
|
||||||
|
new_h, new_w = self.output_size * h / w, self.output_size
|
||||||
|
else:
|
||||||
|
new_h, new_w = self.output_size, self.output_size * w / h
|
||||||
|
else:
|
||||||
|
new_h, new_w = self.output_size
|
||||||
|
|
||||||
|
new_h, new_w = int(new_h), int(new_w)
|
||||||
|
img = transform.resize(image, (new_h, new_w), mode='constant')
|
||||||
|
# h and w are swapped for landmarks because for images,
|
||||||
|
# x and y axes are axis 1 and 0 respectively
|
||||||
|
landmarks = landmarks * [new_w / w, new_h / h]
|
||||||
|
return img, landmarks
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
data = [
|
||||||
|
{
|
||||||
|
"sketch_img_url": "aida-users/89/sketchboard/female/Dress/ae976103-d7ec-4eed-b5d1-3e5f04d8be26.jpg",
|
||||||
|
"colony": "Female"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"sketch_img_url": "aida-users/89/sketchboard/female/Dress/6d7d97a7-5a7d-48bd-9e14-b51119b48620.jpg",
|
||||||
|
"colony": "Female"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"sketch_img_url": "aida-users/89/sketchboard/female/Dress/f2437141-1104-40a5-bcb9-f436088698bb.jpg",
|
||||||
|
"colony": "Female"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"sketch_img_url": "aida-users/89/sketchboard/female/Dress/07af8613-eb2e-44fd-97cb-a97249a5754c.jpg",
|
||||||
|
"colony": "Female"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"sketch_img_url": "aida-users/89/sketchboard/female/Dress/bac9fb15-6860-4112-ac97-f0dea079da75.jpg",
|
||||||
|
"colony": "Female"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"sketch_img_url": "aida-users/89/sketchboard/female/Dress/11d59844-effa-4590-82f9-9ea382c76126.jpg",
|
||||||
|
"colony": "Female"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"sketch_img_url": "aida-users/89/sketchboard/female/Dress/849bf94c-66b8-42f5-8c2e-c1c1f4c8d0e0.jpg",
|
||||||
|
"colony": "Female"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"sketch_img_url": "aida-users/89/sketchboard/female/Dress/e6724ab7-8d3f-4677-abe0-c3e42ab7af85.jpeg",
|
||||||
|
"colony": "Female"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
rq_data = [CategoryRecognitionModel(sketch_img_url=d['sketch_img_url'], colony=d['colony']) for d in data]
|
||||||
|
server = CategoryRecognition(rq_data)
|
||||||
|
|
||||||
|
print(server.get_result())
|
||||||
@@ -2,29 +2,24 @@
|
|||||||
# -*- coding: UTF-8 -*-
|
# -*- coding: UTF-8 -*-
|
||||||
"""
|
"""
|
||||||
@Project :trinity_client
|
@Project :trinity_client
|
||||||
@File :service.py
|
@File :service_att_recognition.py
|
||||||
@Author :周成融
|
@Author :周成融
|
||||||
@Date :2023/7/26 12:01:05
|
@Date :2023/7/26 12:01:05
|
||||||
@detail :
|
@detail :
|
||||||
"""
|
"""
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
import minio
|
|
||||||
import numpy as np
|
|
||||||
import random
|
|
||||||
import redis
|
|
||||||
import tritonclient
|
|
||||||
import tritonclient.grpc as grpc_client
|
|
||||||
from minio import Minio
|
|
||||||
import cv2
|
|
||||||
from PIL import Image
|
|
||||||
import time
|
import time
|
||||||
|
|
||||||
|
import redis
|
||||||
|
import tritonclient.grpc as grpcclient
|
||||||
|
import numpy as np
|
||||||
|
from minio import Minio
|
||||||
|
from tritonclient.utils import np_to_triton_dtype
|
||||||
|
|
||||||
from app.core.config import *
|
from app.core.config import *
|
||||||
from app.schemas.generate_image import GenerateImageModel
|
from app.schemas.generate_image import GenerateImageModel
|
||||||
from app.service.generate_image.utils.remove_background import remove_background
|
|
||||||
from app.service.generate_image.utils.upload_sd_image import upload_png_sd
|
from app.service.generate_image.utils.upload_sd_image import upload_png_sd
|
||||||
from app.service.utils.decorator import RunTime
|
|
||||||
from app.service.utils.generate_uuid import generate_uuid
|
from app.service.utils.generate_uuid import generate_uuid
|
||||||
|
|
||||||
logger = logging.getLogger()
|
logger = logging.getLogger()
|
||||||
@@ -32,170 +27,86 @@ logger = logging.getLogger()
|
|||||||
|
|
||||||
class GenerateImage:
|
class GenerateImage:
|
||||||
def __init__(self, request_data):
|
def __init__(self, request_data):
|
||||||
self.tasks_id = request_data.tasks_id
|
self.minio_client = Minio(MINIO_URL, access_key=MINIO_ACCESS, secret_key=MINIO_SECRET, secure=MINIO_SECURE)
|
||||||
self.image_url = request_data.image_url
|
self.grpc_client = grpcclient.InferenceServerClient(url=GI_MODEL_URL)
|
||||||
self.user_id = request_data.user_id
|
|
||||||
self.content = request_data.content
|
|
||||||
self.category = request_data.category
|
|
||||||
self.model_name = f"{self.category}{GI_MODEL_NAME}"
|
|
||||||
self.mode = request_data.mode
|
|
||||||
self.version = request_data.version
|
|
||||||
self.triton_client = grpc_client.InferenceServerClient(url=f"{GI_MODEL_URL}")
|
|
||||||
self.redis_client = redis.StrictRedis(host=REDIS_HOST, port=REDIS_PORT, db=REDIS_DB, decode_responses=True)
|
self.redis_client = redis.StrictRedis(host=REDIS_HOST, port=REDIS_PORT, db=REDIS_DB, decode_responses=True)
|
||||||
self.connection = pika.BlockingConnection(pika.ConnectionParameters(**RABBITMQ_PARAMS))
|
self.connection = pika.BlockingConnection(pika.ConnectionParameters(**RABBITMQ_PARAMS))
|
||||||
self.channel = self.connection.channel()
|
self.channel = self.connection.channel()
|
||||||
self.minio_client = Minio(
|
if request_data.mode == "txt2img":
|
||||||
f"{MINIO_IP}:{MINIO_PORT}",
|
self.image = np.random.randint(0, 256, (1024, 1024, 3), dtype=np.uint8)
|
||||||
access_key=MINIO_ACCESS,
|
else:
|
||||||
secret_key=MINIO_SECRET,
|
self.image = request_data.image
|
||||||
secure=MINIO_SECURE)
|
self.tasks_id = request_data.tasks_id
|
||||||
|
self.user_id = self.tasks_id[self.tasks_id.rfind('-') + 1:]
|
||||||
self.samples = 4 # no.of images to generate
|
self.prompt = request_data.prompt
|
||||||
self.steps = 24
|
self.mode = request_data.mode
|
||||||
self.guidance_scale = 7
|
|
||||||
self.seed = random.randint(0, 2000000000)
|
|
||||||
self.batch_size = 1
|
self.batch_size = 1
|
||||||
self.generate_data = json.dumps({'status': 'PENDING', 'message': "pending", 'data': ''})
|
self.category = request_data.category
|
||||||
self.redis_client.set(self.tasks_id, self.generate_data)
|
self.index = 0
|
||||||
|
|
||||||
def __del__(self):
|
def __del__(self):
|
||||||
self.redis_client.close()
|
self.redis_client.close()
|
||||||
self.triton_client.close()
|
self.grpc_client.close()
|
||||||
self.connection.close()
|
self.connection.close()
|
||||||
|
|
||||||
@staticmethod
|
def __call__(self, *args, **kwargs):
|
||||||
def image_grid(imgs, rows, cols):
|
self.generate_data = json.dumps({'status': 'PENDING', 'message': "pending", 'data': ''})
|
||||||
assert len(imgs) == rows * cols
|
self.redis_client.set(self.tasks_id, self.generate_data)
|
||||||
|
|
||||||
w, h = imgs[0].size
|
|
||||||
grid = Image.new('RGB', size=(cols * w, rows * h))
|
|
||||||
|
|
||||||
for i, img in enumerate(imgs):
|
|
||||||
grid.paste(img, box=(i % cols * w, i // cols * h))
|
|
||||||
return grid
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def preprocess_image(image, category):
|
|
||||||
height, width, _ = image.shape
|
|
||||||
|
|
||||||
if category == "print" or category == "moodboard":
|
|
||||||
square_size = min(height, width)
|
|
||||||
start_x = (width - square_size) // 2
|
|
||||||
start_y = (height - square_size) // 2
|
|
||||||
cropped = image[start_y: start_y + square_size, start_x: start_x + square_size]
|
|
||||||
resized_image = cv2.resize(cropped, (512, 512))
|
|
||||||
|
|
||||||
elif category == "sketch":
|
|
||||||
# below is the way that get "bigger" square image.
|
|
||||||
max_dimension = max(height, width)
|
|
||||||
square_image = np.ones((max_dimension, max_dimension, 3), dtype=np.uint8) * 255
|
|
||||||
start_h = (max_dimension - height) // 2
|
|
||||||
start_w = (max_dimension - width) // 2
|
|
||||||
square_image[start_h:start_h + height, start_w:start_w + width] = image
|
|
||||||
resized_image = cv2.resize(square_image, (512, 512))
|
|
||||||
|
|
||||||
else:
|
|
||||||
raise ValueError(f"wrong category {category}, only in moodboard, print and sketch!")
|
|
||||||
|
|
||||||
return resized_image
|
|
||||||
|
|
||||||
def get_image(self):
|
|
||||||
# Get data of an object.
|
|
||||||
# Read data from response.
|
|
||||||
try:
|
|
||||||
response = self.minio_client.get_object(self.image_url.split('/')[0], self.image_url[self.image_url.find('/') + 1:])
|
|
||||||
img = np.frombuffer(response.data, np.uint8) # 转成8位无符号整型
|
|
||||||
img = cv2.imdecode(img, cv2.IMREAD_COLOR) # 解码
|
|
||||||
img = self.preprocess_image(img, self.category)
|
|
||||||
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
|
|
||||||
except minio.error.S3Error:
|
|
||||||
img = np.random.randn(512, 512, 3)
|
|
||||||
return img
|
|
||||||
|
|
||||||
def callback(self, result, error):
|
def callback(self, result, error):
|
||||||
if error:
|
if error:
|
||||||
generate_data = json.dumps({'status': 'FAILURE', 'message': f"{error}", 'data': f"{error}"})
|
generate_data = json.dumps({'status': 'FAILURE', 'message': f"{error}", 'data': f"{error}"})
|
||||||
self.redis_client.set(self.tasks_id, generate_data)
|
self.redis_client.set(self.tasks_id, generate_data)
|
||||||
else:
|
else:
|
||||||
images = result.as_numpy("IMAGES")
|
image_result = result.as_numpy("generated_image")[0]
|
||||||
if images.ndim == 3:
|
image_url = upload_png_sd(image_result, user_id=self.user_id, category=f"{self.category}", object_name=f"{self.tasks_id}.png")
|
||||||
images = images[None, ...]
|
generate_data = json.dumps({'status': 'SUCCESS', 'message': 'success', 'data': f'{image_url}'})
|
||||||
images = (images * 255).round().astype("uint8")
|
|
||||||
pil_images = [Image.fromarray(image) for image in images]
|
|
||||||
|
|
||||||
# for i in range(len(pil_images)):
|
|
||||||
# pil = pil_images[i]
|
|
||||||
# pil.save(f'./temp_i2_{i}.png')
|
|
||||||
# self.image_grid(pil_images, rows, cols)
|
|
||||||
url_list = []
|
|
||||||
for i, image in enumerate(pil_images):
|
|
||||||
|
|
||||||
if self.category == "sketch":
|
|
||||||
image = remove_background(np.asarray(image))
|
|
||||||
image_url = upload_png_sd(image, user_id=self.user_id, category=f"{self.category}", object_name=f"{generate_uuid()}_{i}.png", )
|
|
||||||
url_list.append(image_url)
|
|
||||||
generate_data = json.dumps({'status': 'SUCCESS', 'message': 'success', 'data': f'{url_list}'})
|
|
||||||
self.channel.basic_publish(exchange='', routing_key=GI_RABBITMQ_QUEUES, body=generate_data)
|
|
||||||
logger.info(f" [x] Sent {generate_data}")
|
|
||||||
self.redis_client.set(self.tasks_id, generate_data)
|
self.redis_client.set(self.tasks_id, generate_data)
|
||||||
|
|
||||||
def read_tasks_status(self):
|
def read_tasks_status(self):
|
||||||
status_data = json.loads(self.redis_client.get(self.tasks_id))
|
status_data = json.loads(self.redis_client.get(self.tasks_id))
|
||||||
logging.info(f"{self.tasks_id} ===> {status_data}")
|
|
||||||
return status_data
|
return status_data
|
||||||
|
|
||||||
@RunTime
|
def infer(self, inputs):
|
||||||
def get_result(self):
|
return self.grpc_client.async_infer(
|
||||||
self.triton_client.get_model_metadata(model_name=self.model_name, model_version=self.version)
|
model_name=GI_MODEL_NAME,
|
||||||
self.triton_client.get_model_config(model_name=self.model_name, model_version=self.version)
|
inputs=inputs,
|
||||||
|
|
||||||
image = self.get_image()
|
|
||||||
|
|
||||||
# Input placeholder
|
|
||||||
prompt_in = tritonclient.grpc.InferInput(name="PROMPT", shape=(self.batch_size,), datatype="BYTES")
|
|
||||||
samples_in = tritonclient.grpc.InferInput("SAMPLES", (self.batch_size,), "INT32")
|
|
||||||
steps_in = tritonclient.grpc.InferInput("STEPS", (self.batch_size,), "INT32")
|
|
||||||
guidance_scale_in = tritonclient.grpc.InferInput("GUIDANCE_SCALE", (self.batch_size,), "FP32")
|
|
||||||
seed_in = tritonclient.grpc.InferInput("SEED", (self.batch_size,), "INT64")
|
|
||||||
input_images_in = tritonclient.grpc.InferInput("INPUT_IMAGES", image.shape, "FP16")
|
|
||||||
images = tritonclient.grpc.InferRequestedOutput(name="IMAGES",
|
|
||||||
# binary_data=False
|
|
||||||
)
|
|
||||||
mode_in = tritonclient.grpc.InferInput("MODE", (self.batch_size,), "INT32")
|
|
||||||
|
|
||||||
# Setting inputs
|
|
||||||
prompt_in.set_data_from_numpy(np.asarray([self.content] * self.batch_size, dtype=object))
|
|
||||||
samples_in.set_data_from_numpy(np.asarray([self.samples], dtype=np.int32))
|
|
||||||
steps_in.set_data_from_numpy(np.asarray([self.steps], dtype=np.int32))
|
|
||||||
guidance_scale_in.set_data_from_numpy(np.asarray([self.guidance_scale], dtype=np.float32))
|
|
||||||
seed_in.set_data_from_numpy(np.asarray([self.seed], dtype=np.int64))
|
|
||||||
input_images_in.set_data_from_numpy(image.astype(np.float16))
|
|
||||||
mode_in.set_data_from_numpy(np.asarray([self.mode], dtype=np.int32))
|
|
||||||
|
|
||||||
# inference
|
|
||||||
@RunTime
|
|
||||||
def infer():
|
|
||||||
return self.triton_client.async_infer(
|
|
||||||
model_name=self.model_name,
|
|
||||||
model_version=self.version,
|
|
||||||
inputs=[prompt_in, samples_in, steps_in, guidance_scale_in, seed_in, input_images_in, mode_in],
|
|
||||||
outputs=[images],
|
|
||||||
callback=self.callback
|
callback=self.callback
|
||||||
)
|
)
|
||||||
|
|
||||||
ctx = infer()
|
def get_result(self):
|
||||||
|
prompts = [self.prompt] * self.batch_size
|
||||||
|
modes = [self.mode] * self.batch_size
|
||||||
|
images = [self.image.astype(np.float16)] * self.batch_size
|
||||||
|
|
||||||
|
text_obj = np.array(prompts, dtype="object").reshape((-1, 1))
|
||||||
|
mode_obj = np.array(modes, dtype="object").reshape((-1, 1))
|
||||||
|
image_obj = np.array(images, dtype=np.float16).reshape((-1, 1024, 1024, 3))
|
||||||
|
|
||||||
|
input_text = grpcclient.InferInput("prompt", text_obj.shape, np_to_triton_dtype(text_obj.dtype))
|
||||||
|
input_image = grpcclient.InferInput("input_image", image_obj.shape, "FP16")
|
||||||
|
input_mode = grpcclient.InferInput("mode", mode_obj.shape, np_to_triton_dtype(text_obj.dtype))
|
||||||
|
|
||||||
|
input_text.set_data_from_numpy(text_obj)
|
||||||
|
input_image.set_data_from_numpy(image_obj)
|
||||||
|
input_mode.set_data_from_numpy(mode_obj)
|
||||||
|
|
||||||
|
inputs = [input_text, input_image, input_mode]
|
||||||
|
ctx = self.infer(inputs)
|
||||||
time_out = 60
|
time_out = 60
|
||||||
while time_out > 0:
|
while time_out > 0:
|
||||||
generate_data = self.read_tasks_status()
|
generate_data = self.read_tasks_status()
|
||||||
if generate_data['status'] in ["REVOKED", "FAILURE"]:
|
if generate_data['status'] in ["REVOKED", "FAILURE"]:
|
||||||
ctx.cancel()
|
ctx.cancel()
|
||||||
self.channel.basic_publish(exchange='', routing_key=GI_RABBITMQ_QUEUES, body=json.dumps(generate_data))
|
self.channel.basic_publish(exchange='', routing_key=GI_RABBITMQ_QUEUES, body=json.dumps(generate_data))
|
||||||
logger.info(f" [x] Sent {generate_data}")
|
logger.info(f" [x] Sent {json.dumps(generate_data, indent=4)}")
|
||||||
break
|
break
|
||||||
elif generate_data['status'] == "SUCCESS":
|
elif generate_data['status'] == "SUCCESS":
|
||||||
|
self.channel.basic_publish(exchange='', routing_key=GI_RABBITMQ_QUEUES, body=json.dumps(generate_data))
|
||||||
|
logger.info(f" [x] Sent {json.dumps(generate_data, indent=4)}")
|
||||||
break
|
break
|
||||||
time_out -= 1
|
time_out -= 1
|
||||||
time.sleep(1)
|
time.sleep(0.1)
|
||||||
return self.read_tasks_status()
|
return self.read_tasks_status()
|
||||||
|
|
||||||
|
|
||||||
@@ -208,24 +119,12 @@ def infer_cancel(tasks_id):
|
|||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
# request_data = {
|
|
||||||
# "user_id": 78,
|
|
||||||
# "image_url": "123_123.png",
|
|
||||||
# "category": "print",
|
|
||||||
# "mode": 1,
|
|
||||||
# "str": "a simple print",
|
|
||||||
# "version": "1"
|
|
||||||
# }
|
|
||||||
rd = GenerateImageModel(
|
rd = GenerateImageModel(
|
||||||
mode=1,
|
tasks_id="123-89",
|
||||||
content='a blouse',
|
prompt='skeleton sitting by the side of a river looking soulful, concert poster, 4k, artistic',
|
||||||
gender='',
|
image_url="",
|
||||||
user_id=89,
|
mode='txt2img',
|
||||||
image_url='test/微信图片_20231206133428.jpg',
|
category="test"
|
||||||
category='sketch',
|
|
||||||
version='1',
|
|
||||||
tasks_id='123456'
|
|
||||||
)
|
)
|
||||||
server = GenerateImage(rd)
|
server = GenerateImage(rd)
|
||||||
server.get_result()
|
print(server.get_result())
|
||||||
# print(infer_cancel(123456))
|
|
||||||
|
|||||||
237
app/service/generate_image/test.py
Normal file
237
app/service/generate_image/test.py
Normal file
@@ -0,0 +1,237 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# -*- coding: UTF-8 -*-
|
||||||
|
"""
|
||||||
|
@Project :trinity_client
|
||||||
|
@File :service_att_recognition.py
|
||||||
|
@Author :周成融
|
||||||
|
@Date :2023/7/26 12:01:05
|
||||||
|
@detail :
|
||||||
|
"""
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import minio
|
||||||
|
import numpy as np
|
||||||
|
import random
|
||||||
|
import redis
|
||||||
|
import tritonclient
|
||||||
|
import tritonclient.grpc as grpc_client
|
||||||
|
from minio import Minio
|
||||||
|
import cv2
|
||||||
|
from PIL import Image
|
||||||
|
import time
|
||||||
|
from app.core.config import *
|
||||||
|
from app.schemas.generate_image import GenerateImageModel
|
||||||
|
from app.service.generate_image.utils.remove_background import remove_background
|
||||||
|
from app.service.generate_image.utils.upload_sd_image import upload_png_sd
|
||||||
|
from app.service.utils.decorator import RunTime
|
||||||
|
from app.service.utils.generate_uuid import generate_uuid
|
||||||
|
|
||||||
|
logger = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
|
class GenerateImage:
|
||||||
|
def __init__(self, request_data):
|
||||||
|
self.tasks_id = request_data.tasks_id
|
||||||
|
self.model = request_data.model
|
||||||
|
self.request_count = request_data.request_count
|
||||||
|
self.prompt = request_data.prompt
|
||||||
|
self.image = request_data.image
|
||||||
|
self.mode = request_data.mode
|
||||||
|
self.batch_size = request_data.batch_size
|
||||||
|
|
||||||
|
self.image_url = request_data.image_url
|
||||||
|
self.user_id = request_data.user_id
|
||||||
|
self.content = request_data.content
|
||||||
|
self.category = request_data.category
|
||||||
|
self.model_name = f"{self.category}{GI_MODEL_NAME}"
|
||||||
|
self.mode = request_data.mode
|
||||||
|
self.version = request_data.version
|
||||||
|
self.triton_client = grpc_client.InferenceServerClient(url="1")
|
||||||
|
self.redis_client = redis.StrictRedis(host=REDIS_HOST, port=REDIS_PORT, db=REDIS_DB, decode_responses=True)
|
||||||
|
self.connection = pika.BlockingConnection(pika.ConnectionParameters(**RABBITMQ_PARAMS))
|
||||||
|
self.channel = self.connection.channel()
|
||||||
|
self.minio_client = Minio(MINIO_URL, access_key=MINIO_ACCESS, secret_key=MINIO_SECRET, secure=MINIO_SECURE)
|
||||||
|
self.samples = 4 # no.of images to generate
|
||||||
|
self.steps = 24
|
||||||
|
self.guidance_scale = 7
|
||||||
|
self.seed = random.randint(0, 2000000000)
|
||||||
|
self.batch_size = 1
|
||||||
|
self.generate_data = json.dumps({'status': 'PENDING', 'message': "pending", 'data': ''})
|
||||||
|
self.redis_client.set(self.tasks_id, self.generate_data)
|
||||||
|
|
||||||
|
def get_result(self):
|
||||||
|
|
||||||
|
pass
|
||||||
|
|
||||||
|
def __del__(self):
|
||||||
|
self.redis_client.close()
|
||||||
|
self.triton_client.close()
|
||||||
|
self.connection.close()
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def image_grid(imgs, rows, cols):
|
||||||
|
assert len(imgs) == rows * cols
|
||||||
|
|
||||||
|
w, h = imgs[0].size
|
||||||
|
grid = Image.new('RGB', size=(cols * w, rows * h))
|
||||||
|
|
||||||
|
for i, img in enumerate(imgs):
|
||||||
|
grid.paste(img, box=(i % cols * w, i // cols * h))
|
||||||
|
return grid
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def preprocess_image(image, category):
|
||||||
|
height, width, _ = image.shape
|
||||||
|
|
||||||
|
if category == "print" or category == "moodboard":
|
||||||
|
square_size = min(height, width)
|
||||||
|
start_x = (width - square_size) // 2
|
||||||
|
start_y = (height - square_size) // 2
|
||||||
|
cropped = image[start_y: start_y + square_size, start_x: start_x + square_size]
|
||||||
|
resized_image = cv2.resize(cropped, (512, 512))
|
||||||
|
|
||||||
|
elif category == "sketch":
|
||||||
|
# below is the way that get "bigger" square image.
|
||||||
|
max_dimension = max(height, width)
|
||||||
|
square_image = np.ones((max_dimension, max_dimension, 3), dtype=np.uint8) * 255
|
||||||
|
start_h = (max_dimension - height) // 2
|
||||||
|
start_w = (max_dimension - width) // 2
|
||||||
|
square_image[start_h:start_h + height, start_w:start_w + width] = image
|
||||||
|
resized_image = cv2.resize(square_image, (512, 512))
|
||||||
|
|
||||||
|
else:
|
||||||
|
raise ValueError(f"wrong category {category}, only in moodboard, print and sketch!")
|
||||||
|
|
||||||
|
return resized_image
|
||||||
|
|
||||||
|
def get_image(self):
|
||||||
|
# Get data of an object.
|
||||||
|
# Read data from response.
|
||||||
|
try:
|
||||||
|
response = self.minio_client.get_object(self.image_url.split('/')[0], self.image_url[self.image_url.find('/') + 1:])
|
||||||
|
img = np.frombuffer(response.data, np.uint8) # 转成8位无符号整型
|
||||||
|
img = cv2.imdecode(img, cv2.IMREAD_COLOR) # 解码
|
||||||
|
img = self.preprocess_image(img, self.category)
|
||||||
|
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
|
||||||
|
except minio.error.S3Error:
|
||||||
|
img = np.random.randn(512, 512, 3)
|
||||||
|
return img
|
||||||
|
|
||||||
|
def callback(self, result, error):
|
||||||
|
if error:
|
||||||
|
generate_data = json.dumps({'status': 'FAILURE', 'message': f"{error}", 'data': f"{error}"})
|
||||||
|
self.redis_client.set(self.tasks_id, generate_data)
|
||||||
|
else:
|
||||||
|
images = result.as_numpy("IMAGES")
|
||||||
|
if images.ndim == 3:
|
||||||
|
images = images[None, ...]
|
||||||
|
images = (images * 255).round().astype("uint8")
|
||||||
|
pil_images = [Image.fromarray(image) for image in images]
|
||||||
|
|
||||||
|
# for i in range(len(pil_images)):
|
||||||
|
# pil = pil_images[i]
|
||||||
|
# pil.save(f'./temp_i2_{i}.png')
|
||||||
|
# self.image_grid(pil_images, rows, cols)
|
||||||
|
url_list = []
|
||||||
|
for i, image in enumerate(pil_images):
|
||||||
|
|
||||||
|
if self.category == "sketch":
|
||||||
|
image = remove_background(np.asarray(image))
|
||||||
|
image_url = upload_png_sd(image, user_id=self.user_id, category=f"{self.category}", object_name=f"{generate_uuid()}_{i}.png", )
|
||||||
|
url_list.append(image_url)
|
||||||
|
generate_data = json.dumps({'status': 'SUCCESS', 'message': 'success', 'data': f'{url_list}'})
|
||||||
|
self.channel.basic_publish(exchange='', routing_key=GI_RABBITMQ_QUEUES, body=generate_data)
|
||||||
|
logger.info(f" [x] Sent {generate_data}")
|
||||||
|
self.redis_client.set(self.tasks_id, generate_data)
|
||||||
|
|
||||||
|
def read_tasks_status(self):
|
||||||
|
status_data = json.loads(self.redis_client.get(self.tasks_id))
|
||||||
|
logging.info(f"{self.tasks_id} ===> {status_data}")
|
||||||
|
return status_data
|
||||||
|
|
||||||
|
# @RunTime
|
||||||
|
def get_result(self):
|
||||||
|
self.triton_client.get_model_metadata(model_name=self.model_name, model_version=self.version)
|
||||||
|
self.triton_client.get_model_config(model_name=self.model_name, model_version=self.version)
|
||||||
|
|
||||||
|
image = self.get_image()
|
||||||
|
|
||||||
|
# Input placeholder
|
||||||
|
prompt_in = tritonclient.grpc.InferInput(name="PROMPT", shape=(self.batch_size,), datatype="BYTES")
|
||||||
|
samples_in = tritonclient.grpc.InferInput("SAMPLES", (self.batch_size,), "INT32")
|
||||||
|
steps_in = tritonclient.grpc.InferInput("STEPS", (self.batch_size,), "INT32")
|
||||||
|
guidance_scale_in = tritonclient.grpc.InferInput("GUIDANCE_SCALE", (self.batch_size,), "FP32")
|
||||||
|
seed_in = tritonclient.grpc.InferInput("SEED", (self.batch_size,), "INT64")
|
||||||
|
input_images_in = tritonclient.grpc.InferInput("INPUT_IMAGES", image.shape, "FP16")
|
||||||
|
images = tritonclient.grpc.InferRequestedOutput(name="IMAGES",
|
||||||
|
# binary_data=False
|
||||||
|
)
|
||||||
|
mode_in = tritonclient.grpc.InferInput("MODE", (self.batch_size,), "INT32")
|
||||||
|
|
||||||
|
# Setting inputs
|
||||||
|
prompt_in.set_data_from_numpy(np.asarray([self.content] * self.batch_size, dtype=object))
|
||||||
|
samples_in.set_data_from_numpy(np.asarray([self.samples], dtype=np.int32))
|
||||||
|
steps_in.set_data_from_numpy(np.asarray([self.steps], dtype=np.int32))
|
||||||
|
guidance_scale_in.set_data_from_numpy(np.asarray([self.guidance_scale], dtype=np.float32))
|
||||||
|
seed_in.set_data_from_numpy(np.asarray([self.seed], dtype=np.int64))
|
||||||
|
input_images_in.set_data_from_numpy(image.astype(np.float16))
|
||||||
|
mode_in.set_data_from_numpy(np.asarray([self.mode], dtype=np.int32))
|
||||||
|
|
||||||
|
# inference
|
||||||
|
# @RunTime
|
||||||
|
def infer():
|
||||||
|
return self.triton_client.async_infer(
|
||||||
|
model_name=self.model_name,
|
||||||
|
model_version=self.version,
|
||||||
|
inputs=[prompt_in, samples_in, steps_in, guidance_scale_in, seed_in, input_images_in, mode_in],
|
||||||
|
outputs=[images],
|
||||||
|
callback=self.callback
|
||||||
|
)
|
||||||
|
|
||||||
|
ctx = infer()
|
||||||
|
time_out = 60
|
||||||
|
while time_out > 0:
|
||||||
|
generate_data = self.read_tasks_status()
|
||||||
|
if generate_data['status'] in ["REVOKED", "FAILURE"]:
|
||||||
|
ctx.cancel()
|
||||||
|
self.channel.basic_publish(exchange='', routing_key=GI_RABBITMQ_QUEUES, body=json.dumps(generate_data))
|
||||||
|
logger.info(f" [x] Sent {generate_data}")
|
||||||
|
break
|
||||||
|
elif generate_data['status'] == "SUCCESS":
|
||||||
|
break
|
||||||
|
time_out -= 1
|
||||||
|
time.sleep(1)
|
||||||
|
return self.read_tasks_status()
|
||||||
|
|
||||||
|
|
||||||
|
def infer_cancel(tasks_id):
|
||||||
|
redis_client = redis.StrictRedis(host=REDIS_HOST, port=REDIS_PORT, db=REDIS_DB, decode_responses=True)
|
||||||
|
data = {'status': 'REVOKED', 'message': "revoked", 'data': 'revoked'}
|
||||||
|
generate_data = json.dumps({'status': 'REVOKED', 'message': "revoked", 'data': 'revoked'})
|
||||||
|
redis_client.set(tasks_id, generate_data)
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
# request_data = {
|
||||||
|
# "user_id": 78,
|
||||||
|
# "image_url": "123_123.png",
|
||||||
|
# "category": "print",
|
||||||
|
# "mode": 1,
|
||||||
|
# "str": "a simple print",
|
||||||
|
# "version": "1"
|
||||||
|
# }
|
||||||
|
rd = GenerateImageModel(
|
||||||
|
mode=1,
|
||||||
|
content='a blouse',
|
||||||
|
gender='',
|
||||||
|
user_id=89,
|
||||||
|
image_url='test/微信图片_20231206133428.jpg',
|
||||||
|
category='sketch',
|
||||||
|
version='1',
|
||||||
|
tasks_id='123456'
|
||||||
|
)
|
||||||
|
server = GenerateImage(rd)
|
||||||
|
server.get_result()
|
||||||
|
# print(infer_cancel(123456))
|
||||||
@@ -3,11 +3,8 @@ import mmcv
|
|||||||
import numpy as np
|
import numpy as np
|
||||||
import torch
|
import torch
|
||||||
from PIL import Image
|
from PIL import Image
|
||||||
|
|
||||||
import tritonclient.http as httpclient
|
import tritonclient.http as httpclient
|
||||||
|
|
||||||
import torch.nn.functional as F
|
import torch.nn.functional as F
|
||||||
|
|
||||||
from app.core.config import *
|
from app.core.config import *
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -9,25 +9,29 @@
|
|||||||
"""
|
"""
|
||||||
import io
|
import io
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
from PIL import Image
|
||||||
from minio import Minio
|
from minio import Minio
|
||||||
|
|
||||||
from app.core.config import *
|
from app.core.config import *
|
||||||
|
|
||||||
minio_client = Minio(
|
minio_client = Minio(MINIO_URL, access_key=MINIO_ACCESS, secret_key=MINIO_SECRET, secure=MINIO_SECURE)
|
||||||
f"{MINIO_IP}:{MINIO_PORT}",
|
|
||||||
access_key=MINIO_ACCESS,
|
|
||||||
secret_key=MINIO_SECRET,
|
|
||||||
secure=MINIO_SECURE)
|
|
||||||
|
|
||||||
|
|
||||||
def upload_png_sd(image, user_id, category, object_name):
|
def upload_png_sd(image, user_id, category, object_name):
|
||||||
try:
|
try:
|
||||||
image_data = io.BytesIO()
|
image_file = io.BytesIO()
|
||||||
image.save(image_data, format='PNG')
|
image = Image.fromarray(image)
|
||||||
image_data.seek(0)
|
image.save(image_file, format='JPEG')
|
||||||
image_bytes = image_data.read()
|
image_file.seek(0)
|
||||||
image_url = f"aida-users/{minio_client.put_object(f'{GI_MINIO_BUCKET}', f'{user_id}/{category}/{object_name}', io.BytesIO(image_bytes), len(image_bytes), content_type='image/png').object_name}"
|
minio_req = minio_client.put_object(
|
||||||
|
GI_MINIO_BUCKET,
|
||||||
|
f'{user_id}/{category}/{object_name}',
|
||||||
|
image_file,
|
||||||
|
len(image_file.getvalue()),
|
||||||
|
content_type='image/jpeg'
|
||||||
|
)
|
||||||
|
image_url = f"aida-users/{minio_req.object_name}"
|
||||||
return image_url
|
return image_url
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.warning(f"upload_png_mask runtime exception : {e}")
|
logging.warning(f"upload_png_mask runtime exception : {e}")
|
||||||
|
|||||||
@@ -24,11 +24,7 @@ class SuperResolution:
|
|||||||
self.user_id = self.tasks_id[self.tasks_id.rfind('-') + 1:]
|
self.user_id = self.tasks_id[self.tasks_id.rfind('-') + 1:]
|
||||||
self.sr_image_url = data.sr_image_url
|
self.sr_image_url = data.sr_image_url
|
||||||
self.sr_xn = data.sr_xn
|
self.sr_xn = data.sr_xn
|
||||||
self.minio_client = Minio(
|
self.minio_client = Minio(MINIO_URL, access_key=MINIO_ACCESS, secret_key=MINIO_SECRET, secure=MINIO_SECURE)
|
||||||
f"{MINIO_IP}:{MINIO_PORT}",
|
|
||||||
access_key=MINIO_ACCESS,
|
|
||||||
secret_key=MINIO_SECRET,
|
|
||||||
secure=MINIO_SECURE)
|
|
||||||
self.redis_client.set(self.tasks_id, json.dumps({'status': 'PENDING', 'message': "pending", 'data': ''}))
|
self.redis_client.set(self.tasks_id, json.dumps({'status': 'PENDING', 'message': "pending", 'data': ''}))
|
||||||
self.connection = pika.BlockingConnection(pika.ConnectionParameters(**RABBITMQ_PARAMS))
|
self.connection = pika.BlockingConnection(pika.ConnectionParameters(**RABBITMQ_PARAMS))
|
||||||
self.channel = self.connection.channel()
|
self.channel = self.connection.channel()
|
||||||
@@ -38,7 +34,7 @@ class SuperResolution:
|
|||||||
self.triton_client.close()
|
self.triton_client.close()
|
||||||
self.connection.close()
|
self.connection.close()
|
||||||
|
|
||||||
@RunTime
|
# @RunTime
|
||||||
def read_image(self):
|
def read_image(self):
|
||||||
try:
|
try:
|
||||||
image_data = self.minio_client.get_object(self.sr_image_url.split("/", 1)[0], self.sr_image_url.split("/", 1)[1])
|
image_data = self.minio_client.get_object(self.sr_image_url.split("/", 1)[0], self.sr_image_url.split("/", 1)[1])
|
||||||
@@ -56,7 +52,7 @@ class SuperResolution:
|
|||||||
logging.info(f"{self.tasks_id} ===> {status_data}")
|
logging.info(f"{self.tasks_id} ===> {status_data}")
|
||||||
return status_data
|
return status_data
|
||||||
|
|
||||||
@RunTime
|
# @RunTime
|
||||||
def infer(self, inputs):
|
def infer(self, inputs):
|
||||||
return self.triton_client.async_infer(
|
return self.triton_client.async_infer(
|
||||||
model_name=SR_MODEL_NAME,
|
model_name=SR_MODEL_NAME,
|
||||||
@@ -64,7 +60,7 @@ class SuperResolution:
|
|||||||
callback=self.callback
|
callback=self.callback
|
||||||
)
|
)
|
||||||
|
|
||||||
@RunTime
|
# @RunTime
|
||||||
def sr_result(self):
|
def sr_result(self):
|
||||||
sample = self.read_image()
|
sample = self.read_image()
|
||||||
if self.sr_xn == 2:
|
if self.sr_xn == 2:
|
||||||
|
|||||||
@@ -1,2 +0,0 @@
|
|||||||
a = "123-86"
|
|
||||||
print(a[a.rfind('-') + 1:])
|
|
||||||
@@ -17,7 +17,7 @@ LOGGER_CONFIG_DICT = {
|
|||||||
"class": "logging.handlers.RotatingFileHandler",
|
"class": "logging.handlers.RotatingFileHandler",
|
||||||
"level": "INFO",
|
"level": "INFO",
|
||||||
"formatter": "simple",
|
"formatter": "simple",
|
||||||
"filename": LOGS_PATH,
|
"filename": f"{LOGS_PATH}info.log",
|
||||||
"maxBytes": 10485760,
|
"maxBytes": 10485760,
|
||||||
"backupCount": 50,
|
"backupCount": 50,
|
||||||
"encoding": "utf8",
|
"encoding": "utf8",
|
||||||
@@ -26,7 +26,7 @@ LOGGER_CONFIG_DICT = {
|
|||||||
"class": "logging.handlers.RotatingFileHandler",
|
"class": "logging.handlers.RotatingFileHandler",
|
||||||
"level": "ERROR",
|
"level": "ERROR",
|
||||||
"formatter": "simple",
|
"formatter": "simple",
|
||||||
"filename": LOGS_PATH,
|
"filename": f"{LOGS_PATH}error.log",
|
||||||
"maxBytes": 10485760,
|
"maxBytes": 10485760,
|
||||||
"backupCount": 20,
|
"backupCount": 20,
|
||||||
"encoding": "utf8",
|
"encoding": "utf8",
|
||||||
@@ -35,7 +35,7 @@ LOGGER_CONFIG_DICT = {
|
|||||||
"class": "logging.handlers.RotatingFileHandler",
|
"class": "logging.handlers.RotatingFileHandler",
|
||||||
"level": "DEBUG",
|
"level": "DEBUG",
|
||||||
"formatter": "simple",
|
"formatter": "simple",
|
||||||
"filename": LOGS_PATH,
|
"filename": f"{LOGS_PATH}debug.log",
|
||||||
"maxBytes": 10485760,
|
"maxBytes": 10485760,
|
||||||
"backupCount": 50,
|
"backupCount": 50,
|
||||||
"encoding": "utf8",
|
"encoding": "utf8",
|
||||||
|
|||||||
Reference in New Issue
Block a user