feat chat robot 接口迁移
This commit is contained in:
6
app/service/chat_robot/script/callbacks/__init__.py
Normal file
6
app/service/chat_robot/script/callbacks/__init__.py
Normal file
@@ -0,0 +1,6 @@
|
||||
from .openai_token_record_callback import OpenAITokenRecordCallbackHandler
|
||||
|
||||
|
||||
__all__ = [
|
||||
'OpenAITokenRecordCallbackHandler'
|
||||
]
|
||||
@@ -0,0 +1,46 @@
|
||||
"""Callback Handler that add on_chain_end function to record Token usage."""
|
||||
from typing import Any, Dict
|
||||
|
||||
from langchain.callbacks import OpenAICallbackHandler
|
||||
from langchain.schema import LLMResult
|
||||
from langchain.callbacks.openai_info import standardize_model_name, MODEL_COST_PER_1K_TOKENS, get_openai_token_cost_for_model
|
||||
|
||||
|
||||
class OpenAITokenRecordCallbackHandler(OpenAICallbackHandler):
|
||||
need_record: bool = True
|
||||
response_type: str = "string"
|
||||
"""Callback Handler that tracks OpenAI info and write to redis after agent finish"""
|
||||
def on_llm_end(self, response: LLMResult, **kwargs: Any) -> None:
|
||||
"""Collect token usage."""
|
||||
if response.llm_output is None:
|
||||
return None
|
||||
self.successful_requests += 1
|
||||
if "token_usage" not in response.llm_output:
|
||||
return None
|
||||
if "function_call" in response.generations[0][0].message.additional_kwargs:
|
||||
if response.generations[0][0].message.additional_kwargs["function_call"]["name"] in ["sql_db_query", "sql_db_schema","tutorial_tool"]:
|
||||
self.need_record = False
|
||||
if response.generations[0][0].message.additional_kwargs["function_call"]["name"] == "sql_db_query":
|
||||
self.response_type = "image"
|
||||
token_usage = response.llm_output["token_usage"]
|
||||
completion_tokens = token_usage.get("completion_tokens", 0)
|
||||
prompt_tokens = token_usage.get("prompt_tokens", 0)
|
||||
model_name = standardize_model_name(response.llm_output.get("model_name", ""))
|
||||
if model_name in MODEL_COST_PER_1K_TOKENS:
|
||||
completion_cost = get_openai_token_cost_for_model(
|
||||
model_name, completion_tokens, is_completion=True
|
||||
)
|
||||
prompt_cost = get_openai_token_cost_for_model(model_name, prompt_tokens)
|
||||
self.total_cost += prompt_cost + completion_cost
|
||||
self.total_tokens += token_usage.get("total_tokens", 0)
|
||||
self.prompt_tokens += prompt_tokens
|
||||
self.completion_tokens += completion_tokens
|
||||
|
||||
def on_chain_end(self, outputs: Dict, **kwargs: Any) -> None:
|
||||
"""Write token usage to redis."""
|
||||
outputs["total_tokens"] = self.total_tokens
|
||||
outputs["total_cost"] = self.total_cost
|
||||
outputs["prompt_tokens"] = self.prompt_tokens
|
||||
outputs["completion_tokens"] = self.completion_tokens
|
||||
outputs["need_record"] = self.need_record
|
||||
outputs["response_type"] = self.response_type
|
||||
Reference in New Issue
Block a user