import logging from dashscope import Generation # from langchain.chains import LLMChain from langchain_community.chat_models import QianfanChatEndpoint, ChatTongyi # from langchain.chat_models import ChatOpenAI from langchain_core.prompts import SystemMessagePromptTemplate, HumanMessagePromptTemplate, ChatPromptTemplate from langchain_core.runnables import RunnableSequence from app.core.config import OPENAI_MODEL, OPENAI_API_KEY # os.environ["http_proxy"] = "http://127.0.0.1:7890" # os.environ["https_proxy"] = "http://127.0.0.1:7890" # llm = ChatOpenAI(model_name=OPENAI_MODEL, # openai_api_key=OPENAI_API_KEY, # temperature=0) def translate_to_en(text): template = ( """You are a translation expert, proficient in various languages. And can translate various languages into English. Please translate to grammatically correct English regardless of the input language. If the input is already in English, or consists of letters or numbers such as "cat", "abc", or "1", output the input text exactly as it is without any modifications or additions. If there are grammatical errors, correct them and then output the sentence.""" ) messages = [ { "content": template, # 系统message "role": "system" }, { # "content": input('请输入:'), # 用户message "content": text, # 用户message "role": "user" } ] first_response = get_response(messages) assistant_output = first_response.output.choices[0].message print("translate result : {}".format(assistant_output)) return assistant_output.content def get_response(messages): response = Generation.call( model='qwen-max', # api_key='sk-7658298c6b99443c98184a5e634fe6ab', api_key='sk-a6bdf594e1f54a4aa3e9d4d48f8c661f', messages=messages, # seed=random.randint(1, 10000), # 设置随机数种子seed,如果没有设置,则随机数种子默认为1234 result_format='message', # 将输出设置为message形式 enable_search='True' ) return response def main(): """Main function""" text = translate_to_en("fire") print(text) if __name__ == "__main__": main()