Skip to content

Commit

Permalink
fix kwargs key to get prompt
Browse files Browse the repository at this point in the history
  • Loading branch information
the-praxs committed Aug 10, 2024
1 parent dd79719 commit 4690bcd
Showing 1 changed file with 4 additions and 4 deletions.
8 changes: 4 additions & 4 deletions agentops/llm_tracker.py
Original file line number Diff line number Diff line change
Expand Up @@ -651,11 +651,11 @@ def handle_stream_chunk(chunk: Message):
try:
accumulated_delta = ""
self.llm_event.agent_id = check_call_stack_for_agent_id()
self.llm_event.prompt = kwargs["messages"]
self.llm_event.prompt = kwargs["messages"][0]["content"]

if isinstance(chunk, RawMessageStartEvent):
self.llm_event.model = chunk.message.model
self.llm_event.prompt_tokens = chunk.content
self.llm_event.prompt_tokens = chunk.usage.input_tokens
elif isinstance(chunk, RawContentBlockStartEvent):
accumulated_delta += chunk.content_block.text
elif isinstance(chunk, RawContentBlockDeltaEvent):
Expand All @@ -679,7 +679,7 @@ def handle_stream_chunk(chunk: Message):
logger.warning(
f"Unable to parse a chunk for LLM call. Skipping upload to AgentOps\n"
f"chunk:\n {chunk}\n"
f"kwargs:\n {kwargs_str}\n"
f"kwargs:\n {kwargs_str}\n",
)

# if the response is a generator, decorate the generator
Expand Down Expand Up @@ -716,7 +716,7 @@ async def async_generator():
try:
self.llm_event.returns = response.model_dump()
self.llm_event.agent_id = check_call_stack_for_agent_id()
self.llm_event.prompt = kwargs["messages"]
self.llm_event.prompt = kwargs["messages"][0]["content"]
self.llm_event.prompt_tokens = response.usage.input_tokens
self.llm_event.completion = response.content[0].text
self.llm_event.completion_tokens = response.usage.output_tokens
Expand Down

0 comments on commit 4690bcd

Please sign in to comment.