feat : 代码梳理 移除所有敏感密钥 通过环境变量方式配置
All checks were successful
git commit AiDA python develop 分支构建部署 / scheduled_deploy (push) Has been skipped

This commit is contained in:
zcr
2025-12-30 16:49:08 +08:00
parent 1be716e414
commit 18024a2d70
167 changed files with 5283 additions and 10464 deletions

View File

@@ -13,17 +13,19 @@ import logging
import cv2
import numpy as np
import pika
import tritonclient.grpc as grpcclient
from PIL import Image
from celery import Celery
from tritonclient.utils import np_to_triton_dtype
from app.core.config import *
from app.core.config import settings, GPI_MODEL_URL, GPI_MODEL_NAME_SINGLE, GPI_MODEL_NAME_OVERALL, BATCH_GPI_RABBITMQ_QUEUES
from app.core.rabbit_mq_config import RABBITMQ_PARAMS
from app.schemas.generate_image import BatchGenerateProductImageModel, ProductItemModel
from app.service.generate_image.utils.upload_sd_image import upload_SDXL_image
from app.service.utils.oss_client import oss_get_image
from app.service.utils.new_oss_client import oss_get_image
celery_app = Celery('product_tasks', broker=f'amqp://rabbit:123456@18.167.251.121:5672//', backend='rpc://', BROKER_CONNECTION_RETRY_ON_STARTUP=True)
celery_app = Celery('product_tasks', broker=f'amqp://rabbit:123456@18.167.251.121:5672//', backend='rpc://')
celery_app.conf.task_default_queue = 'queue_product'
celery_app.conf.worker_log_format = '%(asctime)s %(filename)s [line:%(lineno)d] %(levelname)s %(message)s'
celery_app.conf.worker_hijack_root_logger = False
@@ -104,7 +106,7 @@ def batch_generate_product(batch_request_data):
result_data_list.append(data)
# 发送每条结果
if DEBUG:
if settings.DEBUG:
logger.info(f" [x]Queue : {BATCH_GPI_RABBITMQ_QUEUES} | tasks_id{tasks_id} | progress{i + 1}/{batch_size} | result_data{data}")
print(f" [x]Queue : {BATCH_GPI_RABBITMQ_QUEUES} | tasks_id{tasks_id} | progress{i + 1}/{batch_size} | result_data{data}")
else:
@@ -112,7 +114,7 @@ def batch_generate_product(batch_request_data):
logger.info(f" [x]Queue : {BATCH_GPI_RABBITMQ_QUEUES} | tasks_id{tasks_id} | progress{i + 1}/{batch_size} | result_data{data}")
# 任务完成,发送所有数据结果
if DEBUG:
if settings.DEBUG:
print(result_data_list)
logger.info(f" [x]Queue : {BATCH_GPI_RABBITMQ_QUEUES} | batch_tasks_id{batch_tasks_id} | progressOK | result_data_list{result_data_list}")
print(f" [x]Queue : {BATCH_GPI_RABBITMQ_QUEUES} | batch_tasks_id{batch_tasks_id} | progressOK | result_data_list{result_data_list}")

View File

@@ -12,18 +12,20 @@ import logging
import cv2
import numpy as np
import pika
import tritonclient.grpc as grpcclient
from PIL import Image
from celery import Celery
from tritonclient.utils import np_to_triton_dtype
from app.core.config import *
from app.core.config import settings, GRI_MODEL_URL, BATCH_GRI_RABBITMQ_QUEUES, GRI_MODEL_NAME_SINGLE, GRI_MODEL_NAME_OVERALL
from app.core.rabbit_mq_config import RABBITMQ_PARAMS
from app.schemas.generate_image import BatchGenerateRelightImageModel, RelightItemModel
from app.service.generate_image.utils.upload_sd_image import upload_SDXL_image
from app.service.utils.oss_client import oss_get_image
from app.service.utils.new_oss_client import oss_get_image
logger = logging.getLogger()
celery_app = Celery('relight_tasks', broker=f'amqp://rabbit:123456@18.167.251.121:5672//', backend='rpc://', BROKER_CONNECTION_RETRY_ON_STARTUP=True)
celery_app = Celery('relight_tasks', broker=f'amqp://rabbit:123456@18.167.251.121:5672//', backend='rpc://')
celery_app.conf.task_default_queue = 'queue_relight'
celery_app.conf.worker_log_format = '%(asctime)s %(filename)s [line:%(lineno)d] %(levelname)s %(message)s'
celery_app.conf.worker_hijack_root_logger = False
@@ -133,14 +135,14 @@ def batch_generate_relight(batch_request_data):
result_data_list.append(data)
# 发送每条结果
if DEBUG:
if settings.DEBUG:
logger.info(f" [x]Queue : {BATCH_GRI_RABBITMQ_QUEUES} | tasks_id{tasks_id} | progress{i + 1}/{batch_size} | result_data{data}")
print(f" [x]Queue : {BATCH_GRI_RABBITMQ_QUEUES} | tasks_id{tasks_id} | progress{i + 1}/{batch_size} | result_data{data}")
else:
publish_status(tasks_id, f"{i + 1}/{batch_size}", data)
logger.info(f" [x]Queue : {BATCH_GRI_RABBITMQ_QUEUES} | tasks_id{tasks_id} | progress{i + 1}/{batch_size} | result_data{data}")
# 任务完成,发送所有数据结果
if DEBUG:
if settings.DEBUG:
print(result_data_list)
logger.info(f" [x]Queue : {BATCH_GRI_RABBITMQ_QUEUES} | batch_tasks_id{batch_tasks_id} | progressOK | result_data_list{result_data_list}")
print(f" [x]Queue : {BATCH_GRI_RABBITMQ_QUEUES} | batch_tasks_id{batch_tasks_id} | progressOK | result_data_list{result_data_list}")

View File

@@ -14,22 +14,24 @@ from io import BytesIO
import imageio
import numpy as np
import pika
import tritonclient.grpc as grpcclient
from PIL import Image
from celery import Celery
from minio import Minio
from tritonclient.utils import np_to_triton_dtype
from app.core.config import *
from app.core.config import settings, BATCH_PS_RABBITMQ_QUEUES, PT_MODEL_URL
from app.core.rabbit_mq_config import RABBITMQ_PARAMS
from app.schemas.pose_transform import BatchPoseTransformModel
from app.service.generate_image.utils.pose_transform_upload import upload_gif, upload_video
from app.service.utils.new_oss_client import oss_upload_image
from app.service.utils.oss_client import oss_get_image
from app.service.utils.new_oss_client import oss_get_image
minio_client = Minio(MINIO_URL, access_key=MINIO_ACCESS, secret_key=MINIO_SECRET, secure=MINIO_SECURE)
minio_client = Minio(settings.MINIO_URL, access_key=settings.MINIO_ACCESS, secret_key=settings.MINIO_SECRET, secure=settings.MINIO_SECURE)
logger = logging.getLogger()
celery_app = Celery('post_transform_tasks', broker=f'amqp://rabbit:123456@18.167.251.121:5672//', backend='rpc://', BROKER_CONNECTION_RETRY_ON_STARTUP=True)
celery_app = Celery('post_transform_tasks', broker=f'amqp://rabbit:123456@18.167.251.121:5672//', backend='rpc://')
celery_app.conf.task_default_queue = 'queue_post_transform'
celery_app.conf.worker_log_format = '%(asctime)s %(filename)s [line:%(lineno)d] %(levelname)s %(message)s'
celery_app.conf.worker_hijack_root_logger = False
@@ -45,7 +47,7 @@ def upload_first_image(image, user_id, category, file_name):
image_data.seek(0)
image_bytes = image_data.read()
object_name = f'{user_id}/{category}/{file_name}'
req = oss_upload_image(oss_client=minio_client, bucket=GI_MINIO_BUCKET, object_name=object_name, image_bytes=image_bytes)
req = oss_upload_image(oss_client=minio_client, bucket="aida-users", object_name=object_name, image_bytes=image_bytes)
image_url = f"aida-users/{object_name}"
return image_url
except Exception as e:
@@ -141,7 +143,7 @@ def batch_generate_pose_transform(batch_request_data):
print(e)
data = {}
result_url_list.append(data)
if DEBUG is False:
if settings.DEBUG is False:
if i + 1 < batch_size:
publish_status(tasks_id, f"{i + 1}/{batch_size}", data)
logger.info(f" [x]Queue : {BATCH_PS_RABBITMQ_QUEUES} | tasks_id{tasks_id} | progress{i + 1}/{batch_size} | image_url{image_url}")

View File

@@ -1,16 +1,11 @@
from app.schemas.generate_image import BatchGenerateRelightImageModel, BatchGenerateProductImageModel
from app.schemas.generate_image import BatchGenerateProductImageModel
from app.service.generate_batch_image.service_batch_generate_product_image import batch_generate_product
from app.service.generate_batch_image.service_batch_generate_relight_image import batch_generate_relight
if __name__ == '__main__':
rd = BatchGenerateProductImageModel(
tasks_id="test1-89",
image_strength=0.7,
prompt=" The best quality, masterpiece, real image.Outwear,high quality clothing details,8K realistic,HDR",
image_url="aida-results/result_40b1a2fe-e220-11ef-9bfa-0242ac150003.png",
product_type="single",
batch_size=2
batch_tasks_id="",
batch_data_list="",
user_id=""
)
x = batch_generate_product.delay(rd.dict())
print(x)