diff --git a/.env_local b/.env_local index ec2ced2..bfdfcfc 100644 --- a/.env_local +++ b/.env_local @@ -1,2 +1,2 @@ GEMINI_API_KEY=AIzaSyAO4zXFke1bqyrXd9-RGfKJTLerwLSFKww -GOOGLE_APPLICATION_CREDENTIALS="/workspace/lc_stylist_agent/request.json" \ No newline at end of file +GOOGLE_APPLICATION_CREDENTIALS="/workspace/Trinity/Litserve_LC_Prod/lc_stylist_agent/app/request.json" \ No newline at end of file diff --git a/app/core/config.py b/app/core/config.py index c5a3ea1..6dd45a3 100644 --- a/app/core/config.py +++ b/app/core/config.py @@ -31,7 +31,7 @@ class Settings(BaseSettings): STYLIST_GUIDE_DIR: str = Field(default="/workspace/lc_stylist_agent/app/core/data/stylist_guide", description="风格指南文本目录") # 向量数据库配置参数 - VECTOR_DB_DIR: str = Field(default="./app/core/data/db", description="向量数据库目录") + VECTOR_DB_DIR: str = Field(default="./app/db", description="向量数据库目录") COLLECTION_NAME: str = Field(default="lc_clothing_embedding", description="向量数据库集合名称") EMBEDDING_MODEL_NAME: str = Field(default="openai/clip-vit-base-patch32", description="CLIP嵌入模型名称") diff --git a/app/main.py b/app/main.py index 7f36184..5680ce0 100644 --- a/app/main.py +++ b/app/main.py @@ -23,4 +23,4 @@ if __name__ == "__main__": chat_boot_api = LCChatBot(enable_async=True, stream=True, api_path='/api/v1/chatbot') agent_api = LCAgent(enable_async=True, api_path='/api/v1/agent') server = ls.LitServer([chat_boot_api, agent_api]) - server.run(port=8001) + server.run(port=8000) diff --git a/request.json b/app/request.json similarity index 100% rename from request.json rename to app/request.json diff --git a/app/server/ChatbotAgent/agent_server.py b/app/server/ChatbotAgent/agent_server.py index e1a40ad..3e2c5fd 100644 --- a/app/server/ChatbotAgent/agent_server.py +++ b/app/server/ChatbotAgent/agent_server.py @@ -1,6 +1,7 @@ import asyncio import json import logging +import os import uuid from typing import List, Dict @@ -33,7 +34,7 @@ class LCAgent(ls.LitAPI): key_prefix=settings.REDIS_HISTORY_KEY_PREFIX ) self.vector_db = VectorDatabase( - vector_db_dir=settings.VECTOR_DB_DIR, + vector_db_dir=os.getenv('VECTOR_DB_DIR', '/app/app/db'), collection_name=settings.COLLECTION_NAME, embedding_model_name=settings.EMBEDDING_MODEL_NAME ) @@ -44,6 +45,16 @@ class LCAgent(ls.LitAPI): } async def decode_request(self, request: AgentRequestModel): + """ + [自动生成文档] + 该方法将 HTTP 请求体解码为模型所需的输入格式。 + + Args: + request: 包含 prompt, history, temperature 的 ChatBotRequest 对象。 + + Returns: + prompt, history, temperature + """ logger.info(f"request: {request.model_dump()}") return request @@ -81,10 +92,12 @@ class LCAgent(ls.LitAPI): history_messages = self.redis.get_history(user_id) input_message = "\n".join([f"{msg.role.value}: {msg.content}" for msg in history_messages]) # 临时调用 LLM 或使用本地逻辑生成总结 - summary = await self.llm.generate_response(history=[Message(role=Role.USER, content=input_message)], system_prompt=SUMMARY_PROMPT) + summary = await self.llm.generate_response(history=[Message(role=Role.USER, content=input_message)], + system_prompt=SUMMARY_PROMPT) return summary - async def recommend_outfit(self, request_summary: str, stylist_name: str, start_outfit=None, num_outfits: int = 1, user_id: str = "test"): + async def recommend_outfit(self, request_summary: str, stylist_name: str, start_outfit=None, num_outfits: int = 1, + user_id: str = "test"): """ 基于用户的对话历史和需求,推荐一套搭配。 diff --git a/docker-compose.yml b/docker-compose.yml index d225ad2..e2ba846 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -8,6 +8,6 @@ services: GOOGLE_APPLICATION_CREDENTIALS: /app/request.json volumes: - ./app:/app/app - - ./logs:/app/logs + - ./.env:/app/.env ports: - - "10200:8000" \ No newline at end of file + - "10070:8000" \ No newline at end of file