feat translator 切换ollama

fix
This commit is contained in:
zhouchengrong
2024-12-02 20:31:46 +08:00
parent dbea3bc975
commit bf8b3b417b
2 changed files with 75 additions and 23 deletions

View File

@@ -6,7 +6,7 @@ from fastapi import APIRouter, HTTPException
from app.schemas.prompt_generation import PromptGenerationImageModel from app.schemas.prompt_generation import PromptGenerationImageModel
from app.schemas.response_template import ResponseModel from app.schemas.response_template import ResponseModel
from app.service.prompt_generation.chatgpt_for_translation import translate_to_en from app.service.prompt_generation.chatgpt_for_translation import translate_to_en, get_translation_from_llama3
router = APIRouter() router = APIRouter()
logger = logging.getLogger() logger = logging.getLogger()
@@ -26,7 +26,7 @@ def prompt_generation(request_data: PromptGenerationImageModel):
""" """
try: try:
logger.info(f"prompt_generation request item is : @@@@@@:{request_data}") logger.info(f"prompt_generation request item is : @@@@@@:{request_data}")
data = translate_to_en("[" + request_data.text + "]") data = get_translation_from_llama3("[" + request_data.text + "]")
logger.info(f"prompt_generation response @@@@@@:{data}") logger.info(f"prompt_generation response @@@@@@:{data}")
except Exception as e: except Exception as e:
logger.warning(f"prompt_generation Run Exception @@@@@@:{e}") logger.warning(f"prompt_generation Run Exception @@@@@@:{e}")

View File

@@ -1,11 +1,16 @@
import json
import logging import logging
import time
import requests
from dashscope import Generation from dashscope import Generation
from requests import RequestException from requests import RequestException
from retry import retry from retry import retry
from app.core.config import QWEN_API_KEY from app.core.config import QWEN_API_KEY
logger = logging.getLogger(__name__)
# os.environ["http_proxy"] = "http://127.0.0.1:7890" # os.environ["http_proxy"] = "http://127.0.0.1:7890"
# os.environ["https_proxy"] = "http://127.0.0.1:7890" # os.environ["https_proxy"] = "http://127.0.0.1:7890"
@@ -15,26 +20,35 @@ from app.core.config import QWEN_API_KEY
# openai_api_key=OPENAI_API_KEY, # openai_api_key=OPENAI_API_KEY,
# temperature=0) # temperature=0)
# prefix_for_llama = (
# """
# Translate everything within the brackets [] into English.
# Never translate or modify any English input.
# The input must be fully translated into coherent English sentences.
# Please only output the translated result.\n
# """
# )
def translate_to_en(text): def translate_to_en(text):
template = ( # template = (
"""You are a translation expert, proficient in various languages. # """You are a translation expert, proficient in various languages.
And can translate various languages into English. # And can translate various languages into English.
Please translate to grammatically correct English regardless of the input language. # Please translate to grammatically correct English regardless of the input language.
If the input is already in English, or consists of letters or numbers such as "cat", "abc", or "1", # If the input is already in English, or consists of letters or numbers such as "cat", "abc", or "1",
output the input text exactly as it is without any modifications or additions. # output the input text exactly as it is without any modifications or additions.
If there are grammatical errors, correct them and then output the sentence.""" # If there are grammatical errors, correct them and then output the sentence."""
) # )
#
prefix = ( # prefix = (
""" # """
Translate everything within the brackets [] into English. # Translate everything within the brackets [] into English.
Never translate or modify any English input. # Never translate or modify any English input.
The input must be fully translated into coherent English sentences. # The input must be fully translated into coherent English sentences.
Never present the translation results in the format # Never present the translation results in the format
"The translation of \"Material suave\" into English would be \"Smooth material.\"". Instead, directly output "Smooth material". # "The translation of \"Material suave\" into English would be \"Smooth material.\"". Instead, directly output "Smooth material".
""" # """
) # )
messages = [ messages = [
# { # {
# Translate the entire text and ensure the output is a complete and coherent sentence in English. # Translate the entire text and ensure the output is a complete and coherent sentence in English.
@@ -43,7 +57,7 @@ def translate_to_en(text):
# }, # },
{ {
# "content": input('请输入:'), # 用户message # "content": input('请输入:'), # 用户message
"content": prefix + text, # 用户message "content": text, # 用户message
"role": "user" "role": "user"
} }
] ]
@@ -52,12 +66,18 @@ def translate_to_en(text):
print("input : {}, translate result : {}".format(text, assistant_output.content)) print("input : {}, translate result : {}".format(text, assistant_output.content))
return assistant_output.content return assistant_output.content
# llama3专用
# data = get_translation_from_llama3(text)
# translation = data
# # print("Response from llama3 : " + translation)
# return translation
@retry(exceptions=RequestException, tries=3, delay=1) @retry(exceptions=RequestException, tries=3, delay=1)
def get_response(messages): def get_response(messages):
response = Generation.call( response = Generation.call(
model='qwen-turbo', model='qwen-turbo',
api_key= QWEN_API_KEY, api_key=QWEN_API_KEY,
messages=messages, messages=messages,
# seed=random.randint(1, 10000), # 设置随机数种子seed如果没有设置则随机数种子默认为1234 # seed=random.randint(1, 10000), # 设置随机数种子seed如果没有设置则随机数种子默认为1234
result_format='message', # 将输出设置为message形式 result_format='message', # 将输出设置为message形式
@@ -65,9 +85,41 @@ def get_response(messages):
) )
return response return response
def get_translation_from_llama3(text):
start_time = time.time()
url = "http://10.1.1.240:11434/api/generate"
# url = "http://10.1.1.240:1143/api/generate"
# prompt = f"System: {prefix_for_llama}\nUser:[{text}]"
# 创建请求的负载
payload = {
"model": "translator",
"prompt": f"[{text}]",
"stream": False
}
# 将负载转换为 JSON 格式
headers = {'Content-Type': 'application/json'}
response = requests.post(url, data=json.dumps(payload), headers=headers)
# 处理响应
if response.status_code == 200:
# print("Response from server:")
# print(response.json())
resp = json.loads(response.content).get("response")
logger.info(f"translation server runtime is {time.time() - start_time} , response is {resp}")
print("input : {}, translate result : {}".format(text, resp))
return resp
else:
logger.info(f"translation server runtime is {time.time() - start_time} , response is {response.content}")
print(f"Request failed with status code {response.status_code}")
print(response.text)
def main(): def main():
"""Main function""" """Main function"""
text = translate_to_en("fire") text = get_translation_from_llama3("[火焰]")
print(text) print(text)