Skip to content

Commit

Permalink
fix issue with some models type (#190)
Browse files Browse the repository at this point in the history
* add stop_instrumenting

* version bump

* test deps

* tests

* handle model name key difference
  • Loading branch information
bboynton97 authored May 7, 2024
1 parent 6059741 commit 5979ec3
Show file tree
Hide file tree
Showing 5 changed files with 41 additions and 21 deletions.
1 change: 1 addition & 0 deletions agentops/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,7 @@ def __init__(self,
self._session: Optional[Session] = None
self._worker: Optional[Worker] = None
self._tags: Optional[List[str]] = tags
self._tags_for_future_session: Optional[List[str]] = None

self._env_data_opt_out = os.getenv('AGENTOPS_ENV_DATA_OPT_OUT') and os.getenv(
'AGENTOPS_ENV_DATA_OPT_OUT').lower() == 'true'
Expand Down
5 changes: 0 additions & 5 deletions agentops/event.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,11 +62,6 @@ class ActionEvent(Event):
logs: Optional[Union[str, Sequence[Any]]] = None
screenshot: Optional[str] = None

# May be needed if we keep Optional for agent_id
# def __post_init__(self):
# if self.agent_id is None:
# raise ValueError("agent_id is required for ActionEvent")


@dataclass
class LLMEvent(Event):
Expand Down
26 changes: 21 additions & 5 deletions agentops/langchain_callback_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,21 @@
from .helpers import debug_print_function_params


def get_model_from_kwargs(kwargs: any) -> str:
if 'model' in kwargs['invocation_params']:
return kwargs['invocation_params']['model']
elif '_type' in kwargs['invocation_params']:
return kwargs['invocation_params']['_type']
else:
return 'unknown_model'


# def get_completion_from_response(response: LLMResult):
# if 'text' in response.generations[0][0]:
# return response.generations[0][0].text
# if ''
#

class Events:
llm: Dict[str, LLMEvent] = {}
tool: Dict[str, ToolEvent] = {}
Expand Down Expand Up @@ -63,7 +78,7 @@ def on_llm_start(
params={**serialized,
**({} if metadata is None else metadata),
**kwargs}, # TODO: params is inconsistent, in ToolEvent we put it in logs
model=kwargs['invocation_params']['model'],
model=get_model_from_kwargs(kwargs),
prompt=prompts[0]
# tags=tags # TODO
)
Expand Down Expand Up @@ -94,14 +109,15 @@ def on_llm_end(
) -> Any:
llm_event: LLMEvent = self.events.llm[str(run_id)]
llm_event.returns = {
"content": response.generations[0][0].message.content,
"content": response.generations[0][0].text,
"generations": response.generations
}
llm_event.end_timestamp = get_ISO_time()
llm_event.completion = response.generations[0][0].text
if response.llm_output is not None:
llm_event.completion = response.generations[0][0].message.content # TODO
llm_event.prompt_tokens = response.llm_output['token_usage']['prompt_tokens']
llm_event.completion_tokens = response.llm_output['token_usage']['completion_tokens']

self.ao_client.record(llm_event)

if len(response.generations) == 0:
Expand Down Expand Up @@ -418,12 +434,12 @@ async def on_llm_end(
) -> Any:
llm_event: LLMEvent = self.events.llm[str(run_id)]
llm_event.returns = {
"content": response.generations[0][0].message.content,
"content": response.generations[0][0].text,
"generations": response.generations
}
llm_event.end_timestamp = get_ISO_time()
llm_event.completion = response.generations[0][0].text
if response.llm_output is not None:
llm_event.completion = response.generations[0][0].message.content # TODO
llm_event.prompt_tokens = response.llm_output['token_usage']['prompt_tokens']
llm_event.completion_tokens = response.llm_output['token_usage']['completion_tokens']
self.ao_client.record(llm_event)
Expand Down
8 changes: 8 additions & 0 deletions agentops/worker.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
from .session import Session
from .helpers import safe_serialize, filter_unjsonable
from typing import Dict, Optional
import os


class Worker:
Expand All @@ -18,6 +19,7 @@ def __init__(self, config: Configuration) -> None:
self.thread.daemon = True
self.thread.start()
self._session: Optional[Session] = None
self._debug_mode = os.getenv('DEBUG_MODE') == 'Y'

def add_event(self, event: dict) -> None:
with self.lock:
Expand All @@ -42,6 +44,12 @@ def flush_queue(self) -> None:
self.config.api_key,
self.config.parent_key)

if self._debug_mode:
print("\n<AGENTOPS_DEBUG_OUTPUT>")
print(f"Worker request to {self.config.endpoint}/events")
print(serialized_payload)
print("</AGENTOPS_DEBUG_OUTPUT>\n")

def start_session(self, session: Session) -> bool:
self._session = session
with self.lock:
Expand Down
22 changes: 11 additions & 11 deletions tests/test_events.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,28 +2,28 @@
import requests_mock
import pytest
import agentops
from agentops import ActionEvent
from agentops import ActionEvent, ErrorEvent


@pytest.fixture
def mock_req():
with requests_mock.Mocker() as m:
url = 'https://api.agentops.ai'
m.post(url + '/events', text='ok')
m.post(url + '/sessions', json={'status': 'success', 'token_cost': 5})
yield m

class TestEvents:
def setup_method(self):
self.api_key = "random_api_key"
self.event_type = 'test_event_type'
self.config = agentops.Configuration(api_key=self.api_key, max_wait_time=50, max_queue_size=1)

def test_record_timestamp(self, mock_req):
def test_record_timestamp(self):
agentops.init(api_key=self.api_key)

event = ActionEvent()
time.sleep(0.15)
agentops.record(event)

assert event.init_timestamp != event.end_timestamp
assert event.init_timestamp != event.end_timestamp

def test_record_error_event(self):
agentops.init(api_key=self.api_key)

event = ErrorEvent()
time.sleep(0.15)
agentops.record(event)

0 comments on commit 5979ec3

Please sign in to comment.