From 216ca4587d7d65b164fb4fbc0338958f82c61a8f Mon Sep 17 00:00:00 2001 From: zhouchengrong Date: Sun, 1 Dec 2024 15:35:09 +0800 Subject: [PATCH] =?UTF-8?q?feat=20=20flux=20=E9=83=A8=E7=BD=B2=20fix?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/service/generate_image/service_generate_image.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/app/service/generate_image/service_generate_image.py b/app/service/generate_image/service_generate_image.py index 8cf7cf9..d34db5e 100644 --- a/app/service/generate_image/service_generate_image.py +++ b/app/service/generate_image/service_generate_image.py @@ -35,6 +35,7 @@ class GenerateImage: # self.connection = pika.BlockingConnection(pika.ConnectionParameters(**RABBITMQ_PARAMS)) # self.channel = self.connection.channel() # self.minio_client = Minio(MINIO_URL, access_key=MINIO_ACCESS, secret_key=MINIO_SECRET, secure=MINIO_SECURE) + self.version = request_data.version if request_data.version == "fast": self.grpc_client = grpcclient.InferenceServerClient(url=FAST_GI_MODEL_URL) else: @@ -146,7 +147,10 @@ class GenerateImage: input_mode.set_data_from_numpy(mode_obj) inputs = [input_text, input_image, input_mode] - ctx = self.grpc_client.async_infer(model_name=GI_MODEL_NAME, inputs=inputs, callback=self.callback) + if self.version == "fast": + ctx = self.grpc_client.async_infer(model_name=GI_MODEL_NAME, inputs=inputs, callback=self.callback) + else: + ctx = self.grpc_client.async_infer(model_name=FAST_GI_MODEL_NAME, inputs=inputs, callback=self.callback) time_out = 600 generate_data = None while time_out > 0: