Skip to content

Commit

Permalink
[Feat] OpenAI Assistants (Beta) Integration (#609)
Browse files Browse the repository at this point in the history
* some working code

* refactored and cleaned code

* add exception handling

* add assistants example notebook with images and pdf

* linting

* modify notebook name and link

* remove agentops github link from notebook

* modify image links to github urls

* remove agentops content from conclusion section

* add assistants examples page

* add assistants example to openai integrations page

* modify variable name from `original` to `original_func` when patching function

* remove casting response to str for `returns` attribute

* add partial `LLMEvent` for calculating costs

* add logger to error event

* check if `usage` is not `None`

* add test for assistants api

* add more tests

* fix typo (no idea how it occurred in the first place)
  • Loading branch information
the-praxs authored Jan 4, 2025
1 parent 6bf8bd1 commit 9d6aac3
Show file tree
Hide file tree
Showing 15 changed files with 1,874 additions and 2 deletions.
109 changes: 108 additions & 1 deletion agentops/llms/providers/openai.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import inspect
import pprint
from typing import Optional

Expand All @@ -16,6 +15,8 @@
class OpenAiProvider(InstrumentedProvider):
original_create = None
original_create_async = None
original_assistant_methods = None
assistants_run_steps = {}

def __init__(self, client):
super().__init__(client)
Expand Down Expand Up @@ -138,6 +139,7 @@ async def async_generator():
def override(self):
self._override_openai_v1_completion()
self._override_openai_v1_async_completion()
self._override_openai_assistants_beta()

def _override_openai_v1_completion(self):
from openai.resources.chat import completions
Expand Down Expand Up @@ -228,9 +230,114 @@ async def patched_function(*args, **kwargs):
# Override the original method with the patched one
completions.AsyncCompletions.create = patched_function

def _override_openai_assistants_beta(self):
"""Override OpenAI Assistants API methods"""
from openai._legacy_response import LegacyAPIResponse
from openai.resources import beta
from openai.pagination import BasePage

def handle_response(response, kwargs, init_timestamp, session: Optional[Session] = None) -> dict:
"""Handle response based on return type"""
action_event = ActionEvent(init_timestamp=init_timestamp, params=kwargs)
if session is not None:
action_event.session_id = session.session_id

try:
# Set action type and returns
action_event.action_type = (
response.__class__.__name__.split("[")[1][:-1]
if isinstance(response, BasePage)
else response.__class__.__name__
)
action_event.returns = response.model_dump() if hasattr(response, "model_dump") else response
action_event.end_timestamp = get_ISO_time()
self._safe_record(session, action_event)

# Create LLMEvent if usage data exists
response_dict = response.model_dump() if hasattr(response, "model_dump") else {}

if "id" in response_dict and response_dict.get("id").startswith("run"):
if response_dict["id"] not in self.assistants_run_steps:
self.assistants_run_steps[response_dict.get("id")] = {"model": response_dict.get("model")}

if "usage" in response_dict and response_dict["usage"] is not None:
llm_event = LLMEvent(init_timestamp=init_timestamp, params=kwargs)
if session is not None:
llm_event.session_id = session.session_id

llm_event.model = response_dict.get("model")
llm_event.prompt_tokens = response_dict["usage"]["prompt_tokens"]
llm_event.completion_tokens = response_dict["usage"]["completion_tokens"]
llm_event.end_timestamp = get_ISO_time()
self._safe_record(session, llm_event)

elif "data" in response_dict:
for item in response_dict["data"]:
if "usage" in item and item["usage"] is not None:
llm_event = LLMEvent(init_timestamp=init_timestamp, params=kwargs)
if session is not None:
llm_event.session_id = session.session_id

llm_event.model = self.assistants_run_steps[item["run_id"]]["model"]
llm_event.prompt_tokens = item["usage"]["prompt_tokens"]
llm_event.completion_tokens = item["usage"]["completion_tokens"]
llm_event.end_timestamp = get_ISO_time()
self._safe_record(session, llm_event)

except Exception as e:
self._safe_record(session, ErrorEvent(trigger_event=action_event, exception=e))

kwargs_str = pprint.pformat(kwargs)
response = pprint.pformat(response)
logger.warning(
f"Unable to parse response for Assistants API. Skipping upload to AgentOps\n"
f"response:\n {response}\n"
f"kwargs:\n {kwargs_str}\n"
)

return response

def create_patched_function(original_func):
def patched_function(*args, **kwargs):
init_timestamp = get_ISO_time()

session = kwargs.get("session", None)
if "session" in kwargs.keys():
del kwargs["session"]

response = original_func(*args, **kwargs)
if isinstance(response, LegacyAPIResponse):
return response

return handle_response(response, kwargs, init_timestamp, session=session)

return patched_function

# Store and patch Assistant API methods
assistant_api_methods = {
beta.Assistants: ["create", "retrieve", "update", "delete", "list"],
beta.Threads: ["create", "retrieve", "update", "delete"],
beta.threads.Messages: ["create", "retrieve", "update", "list"],
beta.threads.Runs: ["create", "retrieve", "update", "list", "submit_tool_outputs", "cancel"],
beta.threads.runs.steps.Steps: ["retrieve", "list"],
}

self.original_assistant_methods = {
(cls, method): getattr(cls, method) for cls, methods in assistant_api_methods.items() for method in methods
}

# Override methods and verify
for (cls, method), original_func in self.original_assistant_methods.items():
patched_function = create_patched_function(original_func)
setattr(cls, method, patched_function)

def undo_override(self):
if self.original_create is not None and self.original_create_async is not None:
from openai.resources.chat import completions

completions.AsyncCompletions.create = self.original_create_async
completions.Completions.create = self.original_create

if self.original_assistant_methods is not None:
for (cls, method), original in self.original_assistant_methods.items():
setattr(cls, method, original)
36 changes: 35 additions & 1 deletion agentops/llms/tracker.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,41 @@ class LlmTracker:
SUPPORTED_APIS = {
"litellm": {"1.3.1": ("openai_chat_completions.completion",)},
"openai": {
"1.0.0": ("chat.completions.create",),
"1.0.0": (
"chat.completions.create",
# Assistants
"beta.assistants.create",
"beta.assistants.retrieve",
"beta.assistants.update",
"beta.assistants.delete",
"beta.assistants.list",
"beta.assistants.files.create",
"beta.assistants.files.retrieve",
"beta.assistants.files.delete",
"beta.assistants.files.list",
# Threads
"beta.threads.create",
"beta.threads.retrieve",
"beta.threads.update",
"beta.threads.delete",
# Messages
"beta.threads.messages.create",
"beta.threads.messages.retrieve",
"beta.threads.messages.update",
"beta.threads.messages.list",
"beta.threads.messages.files.retrieve",
"beta.threads.messages.files.list",
# Runs
"beta.threads.runs.create",
"beta.threads.runs.retrieve",
"beta.threads.runs.update",
"beta.threads.runs.list",
"beta.threads.runs.cancel",
"beta.threads.runs.submit_tool_outputs",
# Run Steps
"beta.threads.runs.steps.Steps.retrieve",
"beta.threads.runs.steps.Steps.list",
),
"0.0.0": (
"ChatCompletion.create",
"ChatCompletion.acreate",
Expand Down
4 changes: 4 additions & 0 deletions docs/v1/examples/examples.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,10 @@ mode: "wide"
<Card title="Multi Session" icon="computer" href="/v1/examples/multi_session">
Manage multiple sessions at the same time
</Card>

<Card title="OpenAI Assistants" icon={<img src="https://www.github.com/agentops-ai/agentops/blob/main/docs/images/external/openai/openai-logomark.png?raw=true" alt="OpenAI Assistants" />} iconType="image" href="/v1/integrations/openai" href="/v1/examples/openai_assistants">
Observe OpenAI Assistants
</Card>
</CardGroup>

### Integration Examples
Expand Down
Loading

0 comments on commit 9d6aac3

Please sign in to comment.