翻译 添加使用llama3

This commit is contained in:
2024-12-02 18:20:45 +08:00
parent 0a17128140
commit bc9aa03445

View File

@@ -1,5 +1,7 @@
import logging
import json
import requests
from dashscope import Generation
from requests import RequestException
from retry import retry
@@ -15,6 +17,15 @@ from app.core.config import QWEN_API_KEY
# openai_api_key=OPENAI_API_KEY,
# temperature=0)
prefix_for_llama = (
"""
Translate everything within the brackets [] into English.
Never translate or modify any English input.
The input must be fully translated into coherent English sentences.
Please only output the translated result.\n
"""
)
def translate_to_en(text):
template = (
@@ -52,6 +63,12 @@ def translate_to_en(text):
print("input : {}, translate result : {}".format(text, assistant_output.content))
return assistant_output.content
# llama3专用
# data = get_translation_from_llama3(text)
# translation = data
# # print("Response from llama3 : " + translation)
# return translation
@retry(exceptions=RequestException, tries=3, delay=1)
def get_response(messages):
@@ -65,6 +82,36 @@ def get_response(messages):
)
return response
def get_translation_from_llama3(text):
url = "http://localhost:11434/api/generate"
# url = "http://10.1.1.240:1143/api/generate"
prompt = f"System: {prefix_for_llama}\nUser:[{text}]"
# 创建请求的负载
payload = {
"model": "llama3.2",
"prompt": prompt,
"stream": False
}
# 将负载转换为 JSON 格式
headers = {'Content-Type': 'application/json'}
response = requests.post(url, data=json.dumps(payload), headers=headers)
# 处理响应
if response.status_code == 200:
# print("Response from server:")
# print(response.json())
resp = json.loads(response.content).get("response")
print("input : {}, translate result : {}".format(text, resp))
return resp
else:
print(f"Request failed with status code {response.status_code}")
print(response.text)
def main():
"""Main function"""
text = translate_to_en("fire")