Skip to content

Commit

Permalink
feat: track token usage when stream chat (#372)
Browse files Browse the repository at this point in the history
* feat: track token usage when stream chat

* chore: update test gemini bot token
  • Loading branch information
xingwanying authored Sep 11, 2024
1 parent 7e5f9a2 commit 4c808a8
Show file tree
Hide file tree
Showing 3 changed files with 42 additions and 26 deletions.
2 changes: 1 addition & 1 deletion assistant/src/Assistant/index.md
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ import { Assistant } from '@petercatai/assistant';

export default () => (
<Assistant
token="45cb2003-98f5-46b4-b299-b60c42f69fbf"
token="36c6b04b-b619-4449-91fa-faf23ab30b80"
clearMessage={true}
/>
);
Expand Down
14 changes: 14 additions & 0 deletions server/agent/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -143,6 +143,20 @@ async def run_stream_chat(self, input_data: ChatData) -> AsyncIterator[str]:
ensure_ascii=False,
)
yield f"data: {json_output}\n\n"
elif kind == "on_chat_model_end":
content = event["data"]["output"]["generations"][0][0][
"message"
].usage_metadata
if content:
json_output = json.dumps(
{
"id": event["run_id"],
"type": "usage",
**content,
},
ensure_ascii=False,
)
yield f"data: {json_output}\n\n"
elif kind == "on_tool_start":
children_value = event["data"].get("input", {})
json_output = json.dumps(
Expand Down
52 changes: 27 additions & 25 deletions server/agent/llm/clients/openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,30 +10,32 @@

OPEN_API_KEY = get_env_variable("OPENAI_API_KEY")


@register_llm_client("openai")
class OpenAIClient(BaseLLMClient):
_client: ChatOpenAI

def __init__(self,
temperature: Optional[int] = 0.2,
max_tokens: Optional[int] = 1500,
streaming: Optional[bool] = False,
api_key: Optional[str] = OPEN_API_KEY
):
self._client = ChatOpenAI(
model_name="gpt-4o",
temperature=temperature,
streaming=streaming,
max_tokens=max_tokens,
openai_api_key=api_key,
)

def get_client(self):
return self._client

def get_tools(self, tools: List[Any]):
return [convert_to_openai_tool(tool) for tool in tools]

def parse_content(self, content: List[MessageContent]):
print(f"parse_content: {content}")
return content
_client: ChatOpenAI

def __init__(
self,
temperature: Optional[int] = 0.2,
max_tokens: Optional[int] = 1500,
streaming: Optional[bool] = False,
api_key: Optional[str] = OPEN_API_KEY,
):
self._client = ChatOpenAI(
model_name="gpt-4o",
temperature=temperature,
streaming=streaming,
max_tokens=max_tokens,
openai_api_key=api_key,
stream_usage=True,
)

def get_client(self):
return self._client

def get_tools(self, tools: List[Any]):
return [convert_to_openai_tool(tool) for tool in tools]

def parse_content(self, content: List[MessageContent]):
return content

0 comments on commit 4c808a8

Please sign in to comment.