Files
2024-10-22 12:11:04 +08:00

74 lines
3.2 KiB
Python

import os
import platform
if platform.system() == 'Linux':
__import__('pysqlite3')
import sys
sys.modules['sqlite3'] = sys.modules.pop('pysqlite3')
import chromadb
from langchain_openai import OpenAIEmbeddings
from langchain_chroma import Chroma
import tritonclient.grpc as grpcclient
from minio import Minio
# OpenAI settings
OPENAI_API_KEY = "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"
OPENAI_API_BASE = "https://pangkaichen-openai-prox-98.deno.dev/v1"
# LangChain settings
LANGCHAIN_TRACING_V2 = "true"
LANGCHAIN_ENDPOINT = "https://api.smith.langchain.com"
LANGCHAIN_API_KEY = "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"
LANGCHAIN_PROJECT = "intelligent_fashion_agent"
MINIO_URL = "18.167.251.121:8000"
MINIO_SECURE = False
MINIO_ACCESS = "e8zc55mzDOh4IzRrZ9Oa"
MINIO_SECRET = "uHfqJ7UkwA1PTDGfnA44Hp9ux5YkZTkzZLjeOYhE"
MINIO_BUCKET = "test"
MINIO_CLIENT = Minio(MINIO_URL, access_key=MINIO_ACCESS, secret_key=MINIO_SECRET, secure=MINIO_SECURE)
# Set environment variables
os.environ["OPENAI_API_KEY"] = OPENAI_API_KEY
os.environ["OPENAI_API_BASE"] = OPENAI_API_BASE
os.environ["LANGCHAIN_TRACING_V2"] = LANGCHAIN_TRACING_V2
os.environ["LANGCHAIN_ENDPOINT"] = LANGCHAIN_ENDPOINT
os.environ["LANGCHAIN_API_KEY"] = LANGCHAIN_API_KEY
os.environ["LANGCHAIN_PROJECT"] = LANGCHAIN_PROJECT
os.environ["HTTPCLIENT_URL"] = "localhost:8000"
os.environ["GRPCCLIENT_URL"] = "localhost:8001"
chroma_client = chromadb.PersistentClient(path=r"./vector_database")
embedding_fn = OpenAIEmbeddings(
openai_api_key=os.environ["OPENAI_API_KEY"],
openai_api_base=os.environ["OPENAI_API_BASE"],
model="text-embedding-3-small",
)
# item_collection = chroma_client.get_or_create_collection(name="fashion-product-embedding")
ITEM_COLLECTION = Chroma(client=chroma_client,
collection_name="fashion-product-description",
embedding_function=embedding_fn)
DOCUMENT_COLLECTION = Chroma(client=chroma_client, collection_name="lookbook", embedding_function=embedding_fn)
ACCESSORY_COLLECTION = Chroma(client=chroma_client, collection_name="accessory", embedding_function=embedding_fn)
# pinecone_client = Pinecone(api_key="a8341f5f-0078-4f1b-880a-612b036b6e70")
# pinecore_index = pinecone_client.Index("fashion-product-embedding", namespace="Polyvore")
OP_SERVER = os.environ.get('OP_SERVER', 'local') # or "aws" or "A6000"
if OP_SERVER == "local":
os.environ["HTTPCLIENT_URL"] = "localhost:8000"
os.environ["GRPCCLIENT_URL"] = "localhost:8001"
os.environ["OOTD_URL"] = "http://localhost:5000/ootd_dc"
elif OP_SERVER == "A6000":
os.environ["HTTPCLIENT_URL"] = "host.docker.internal:20020"
os.environ["GRPCCLIENT_URL"] = "https://relaxing-unbiased-herring.ngrok-free.app"
os.environ["OOTD_URL"] = "http://localhost:5000/ootd_dc"
elif OP_SERVER == "aws":
os.environ["HTTPCLIENT_URL"] = "host.docker.internal:8000"
os.environ["GRPCCLIENT_URL"] = "https://relaxing-unbiased-herring.ngrok-free.app"
# os.environ["OOTD_URL"] = "http://18.167.251.121:10001/ootd/ootd_dc"
os.environ["OOTD_URL"] = "https://muskox-many-bluegill.ngrok-free.app/ootd_dc"
triton_client = grpcclient.InferenceServerClient(url=os.environ['GRPCCLIENT_URL'])