Skip to content

Commit

Permalink
Merge pull request #60 from AgentOps-AI/bboynton97/44-update-events-h…
Browse files Browse the repository at this point in the history
…andler-to-read-3rd-party-library-calls

Updates to langchain callback handler
  • Loading branch information
areibman authored Dec 21, 2023
2 parents 27e4881 + 457bb5b commit ee0a656
Show file tree
Hide file tree
Showing 7 changed files with 599 additions and 16 deletions.
2 changes: 1 addition & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -157,7 +157,7 @@ cython_debug/
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/
.idea/

.vscode/
.benchmarks/
Expand Down
23 changes: 13 additions & 10 deletions agentops/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ class Client:
endpoint (str, optional): The endpoint for the AgentOps service. Defaults to 'https://agentops-server-v2.fly.dev'.
max_wait_time (int, optional): The maximum time to wait in milliseconds before flushing the queue. Defaults to 1000.
max_queue_size (int, optional): The maximum size of the event queue. Defaults to 100.
override (bool): Whether to override and LLM calls to emit as events.
Attributes:
session (Session, optional): A Session is a grouping of events (e.g. a run of your agent).
"""
Expand All @@ -44,7 +45,8 @@ def __init__(self, api_key: Optional[str] = None,
tags: Optional[List[str]] = None,
endpoint: Optional[str] = 'https://agentops-server-v2.fly.dev',
max_wait_time: Optional[int] = 1000,
max_queue_size: Optional[int] = 100):
max_queue_size: Optional[int] = 100,
override=True):

# Get API key from env
if api_key is None:
Expand All @@ -71,11 +73,12 @@ def __init__(self, api_key: Optional[str] = None,
# Override sys.excepthook
sys.excepthook = self.handle_exception

self.start_session(tags)
self._start_session(tags)

if 'openai' in sys.modules:
self.llm_tracker = LlmTracker(self)
self.llm_tracker.override_api('openai')
if override:
if 'openai' in sys.modules:
self.llm_tracker = LlmTracker(self)
self.llm_tracker.override_api('openai')

def handle_exception(self, exc_type, exc_value, exc_traceback):
"""
Expand Down Expand Up @@ -232,7 +235,7 @@ async def _record_event_async(self, func, event_name, tags, *args, **kwargs):

return returns

def start_session(self, tags: Optional[List[str]] = None):
def _start_session(self, tags: Optional[List[str]] = None):
"""
Start a new session for recording events.
Expand All @@ -245,10 +248,10 @@ def start_session(self, tags: Optional[List[str]] = None):
self.worker.start_session(self.session)

def end_session(self, end_state: str = Field("Indeterminate",
description="End state of the session",
pattern="^(Success|Fail|Indeterminate)$"),
rating: Optional[str] = None,
video: Optional[str] = None):
description="End state of the session",
pattern="^(Success|Fail|Indeterminate)$"),
rating: Optional[str] = None,
video: Optional[str] = None):
"""
End the current session with the AgentOps service.
Expand Down
28 changes: 25 additions & 3 deletions agentops/event.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
from typing import Optional, List
from pydantic import Field


class Event:
"""
Represents a discrete event to be recorded.
Expand All @@ -32,13 +33,15 @@ class Event:
event_type (str): Type of the event.
params (str, optional): The parameters passed to the operation.
returns (str, optional): The output of the operation.
result (str): Result of the operation.
result (Result): Result of the operation as Enum Result.
action_type (str): Type of action of the event.
model (Models, optional): The model used during the event.
prompt (str, optional): The input prompt for an LLM call.
tags (List[str], optional): Tags associated with the event.
end_timestamp (float): The timestamp for when the event ended, represented as seconds since the epoch.
init_timestamp (float): The timestamp for when the event was initiated, represented as seconds since the epoch.
prompt_tokens (int, optional): The number of tokens in the prompt if the event is an LLM call
completion_tokens (int, optional): The number of tokens in the completion if the event is an LLM call
"""

def __init__(self, event_type: str,
Expand All @@ -54,7 +57,9 @@ def __init__(self, event_type: str,
prompt: Optional[str] = None,
tags: Optional[List[str]] = None,
init_timestamp: Optional[float] = None,
screenshot: Optional[str] = None
screenshot: Optional[str] = None,
prompt_tokens: Optional[int] = None,
completion_tokens: Optional[int] = None
):
self.event_type = event_type
self.params = params
Expand All @@ -66,4 +71,21 @@ def __init__(self, event_type: str,
self.prompt = prompt
self.end_timestamp = get_ISO_time()
self.init_timestamp = init_timestamp if init_timestamp else self.end_timestamp
self.screenshot = screenshot
self.screenshot = screenshot
self.prompt_tokens = prompt_tokens
self.completion_tokens = completion_tokens

def __str__(self):
return str({
"event_type": self.event_type,
"params": self.params,
"returns": self.returns,
"action_type": self.action_type,
"result": self.result,
"model": self.model,
"prompt": self.prompt,
"tags": self.tags,
"init_timestamp": self.init_timestamp,
"prompt_tokens": self.prompt_tokens,
"completion_tokens": self.completion_tokens,
})
1 change: 0 additions & 1 deletion agentops/http.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import json
from enum import Enum
from typing import Optional
import requests
Expand Down
Loading

0 comments on commit ee0a656

Please sign in to comment.