2024-06-28 13:59:53 +08:00
|
|
|
|
import logging
|
2024-05-29 11:12:59 +08:00
|
|
|
|
|
2024-07-08 18:50:01 +08:00
|
|
|
|
from dashscope import Generation
|
|
|
|
|
|
# from langchain.chains import LLMChain
|
|
|
|
|
|
from langchain_community.chat_models import QianfanChatEndpoint, ChatTongyi
|
|
|
|
|
|
# from langchain.chat_models import ChatOpenAI
|
2024-06-28 13:59:53 +08:00
|
|
|
|
from langchain_core.prompts import SystemMessagePromptTemplate, HumanMessagePromptTemplate, ChatPromptTemplate
|
2024-07-08 18:50:01 +08:00
|
|
|
|
from langchain_core.runnables import RunnableSequence
|
2024-05-29 11:12:59 +08:00
|
|
|
|
|
|
|
|
|
|
from app.core.config import OPENAI_MODEL, OPENAI_API_KEY
|
|
|
|
|
|
|
|
|
|
|
|
# os.environ["http_proxy"] = "http://127.0.0.1:7890"
|
|
|
|
|
|
# os.environ["https_proxy"] = "http://127.0.0.1:7890"
|
|
|
|
|
|
|
|
|
|
|
|
|
2024-07-08 18:50:01 +08:00
|
|
|
|
# llm = ChatOpenAI(model_name=OPENAI_MODEL,
|
|
|
|
|
|
# openai_api_key=OPENAI_API_KEY,
|
|
|
|
|
|
# temperature=0)
|
2024-05-29 11:12:59 +08:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def translate_to_en(text):
|
|
|
|
|
|
template = (
|
|
|
|
|
|
"""You are a translation expert, proficient in various languages.
|
|
|
|
|
|
And can translate various languages into English.
|
|
|
|
|
|
Please translate to grammatically correct English regardless of the input language.
|
2024-06-28 13:59:53 +08:00
|
|
|
|
If the input is already in English, or consists of letters or numbers such as "cat", "abc", or "1",
|
|
|
|
|
|
output the input text exactly as it is without any modifications or additions.
|
2024-05-29 11:12:59 +08:00
|
|
|
|
If there are grammatical errors, correct them and then output the sentence."""
|
|
|
|
|
|
)
|
2024-07-08 18:50:01 +08:00
|
|
|
|
messages = [
|
|
|
|
|
|
{
|
|
|
|
|
|
"content": template, # 系统message
|
|
|
|
|
|
"role": "system"
|
|
|
|
|
|
},
|
|
|
|
|
|
{
|
|
|
|
|
|
# "content": input('请输入:'), # 用户message
|
|
|
|
|
|
"content": text, # 用户message
|
|
|
|
|
|
"role": "user"
|
|
|
|
|
|
}
|
|
|
|
|
|
]
|
|
|
|
|
|
first_response = get_response(messages)
|
|
|
|
|
|
assistant_output = first_response.output.choices[0].message
|
|
|
|
|
|
print("translate result : {}".format(assistant_output))
|
|
|
|
|
|
return assistant_output.content
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def get_response(messages):
|
|
|
|
|
|
response = Generation.call(
|
|
|
|
|
|
model='qwen-max',
|
2024-07-18 17:45:36 +08:00
|
|
|
|
# api_key='sk-7658298c6b99443c98184a5e634fe6ab',
|
|
|
|
|
|
api_key='sk-a6bdf594e1f54a4aa3e9d4d48f8c661f',
|
2024-07-08 18:50:01 +08:00
|
|
|
|
messages=messages,
|
|
|
|
|
|
# seed=random.randint(1, 10000), # 设置随机数种子seed,如果没有设置,则随机数种子默认为1234
|
|
|
|
|
|
result_format='message', # 将输出设置为message形式
|
|
|
|
|
|
enable_search='True'
|
2024-05-29 11:12:59 +08:00
|
|
|
|
)
|
2024-07-08 18:50:01 +08:00
|
|
|
|
return response
|
2024-05-29 11:12:59 +08:00
|
|
|
|
|
|
|
|
|
|
def main():
|
|
|
|
|
|
"""Main function"""
|
2024-06-28 13:59:53 +08:00
|
|
|
|
text = translate_to_en("fire")
|
|
|
|
|
|
print(text)
|
2024-05-29 11:12:59 +08:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
|
|
main()
|