diff --git a/agentops/event.py b/agentops/event.py index a1843c2ec..8e1ab1632 100644 --- a/agentops/event.py +++ b/agentops/event.py @@ -92,7 +92,6 @@ class LLMEvent(Event): completion: str | object = None completion_tokens: Optional[int] = None model: Optional[Models | str] = None - model: Optional[Models | str] = None @dataclass diff --git a/agentops/llm_tracker.py b/agentops/llm_tracker.py index 30542d308..e2ef73278 100644 --- a/agentops/llm_tracker.py +++ b/agentops/llm_tracker.py @@ -121,7 +121,6 @@ def handle_stream_chunk(chunk: ChatCompletionChunk): ) try: - # NOTE: prompt/completion usage not returned in response when streaming # NOTE: prompt/completion usage not returned in response when streaming model = chunk.model choices = chunk.choices