Skip to content

Commit

Permalink
Merge branch 'main' into v2-api
Browse files Browse the repository at this point in the history
  • Loading branch information
siyangqiu authored May 17, 2024
2 parents 27c27f1 + 7fd7d10 commit 1b7ad85
Show file tree
Hide file tree
Showing 5 changed files with 110 additions and 44 deletions.
15 changes: 11 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -36,9 +36,6 @@
<a href="https://discord.gg/mKW3ZhN9p2">
<img src="https://img.shields.io/badge/chat-on%20Discord-blueviolet" alt="Discord community channel"/>
</a>
<a href="mailto:[email protected]">
<img src="https://img.shields.io/website?color=%23f26522&down_message=Y%20Combinator&label=Not%20Backed%20By&logo=ycombinator&style=flat-square&up_message=Y%20Combinator&url=https%3A%2F%2Fwww.ycombinator.com"/>
</a>
<a href="https://github.com/agentops-ai/agentops/issues">
<img src="https://img.shields.io/github/commit-activity/m/agentops-ai/agentops" alt="git commit activity"/>
</a>
Expand Down Expand Up @@ -122,6 +119,9 @@ pip install git+https://github.com/AgentOps-AI/crewAI.git@main

AgentOps works seamlessly with applications built using Langchain. To use the handler, install Langchain as an optional dependency:

<details>
<summary>Installation</summary>

```shell
pip install agentops[langchain]
```
Expand Down Expand Up @@ -151,13 +151,18 @@ agent = initialize_agent(tools,

Check out the [Langchain Examples Notebook](./examples/langchain_examples.ipynb) for more details including Async handlers.

### Cohere
</details>

### Cohere ⌨️

First class support for Cohere(>=5.4.0). This is a living integration, should you need any added functionality please message us on Discord!

- [AgentOps integration example](https://docs.agentops.ai/v1/integrations/cohere)
- [Official Cohere documentation](https://docs.cohere.com/reference/about)

<details>
<summary>Installation</summary>

```bash
pip install cohere
```
Expand Down Expand Up @@ -198,6 +203,8 @@ for event in stream:

agentops.end_session('Success')
```
</details>


### LlamaIndex 🦙

Expand Down
56 changes: 48 additions & 8 deletions agentops/llm_tracker.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
from importlib.metadata import version
from packaging.version import Version, parse
from .log_config import logger
from .event import LLMEvent, ErrorEvent
from .event import LLMEvent, ActionEvent, ToolEvent, ErrorEvent
from .helpers import get_ISO_time, check_call_stack_for_agent_id
import inspect
from typing import Optional
Expand Down Expand Up @@ -228,7 +228,7 @@ async def async_generator():
return response

def _handle_response_cohere(self, response, kwargs, init_timestamp):
# TODO: """Handle responses for Cohere versions >v5.4.0"""
"""Handle responses for Cohere versions >v5.4.0"""
from cohere.types.non_streamed_chat_response import NonStreamedChatResponse
from cohere.types.streamed_chat_response import (
StreamedChatResponse,
Expand All @@ -248,6 +248,8 @@ def _handle_response_cohere(self, response, kwargs, init_timestamp):
params=kwargs
)

self.action_events = {}

def handle_stream_chunk(chunk):

# We take the first chunk and accumulate the deltas from all subsequent chunks to build one full chat completion
Expand All @@ -261,21 +263,59 @@ def handle_stream_chunk(chunk):

try:
if isinstance(chunk, StreamedChatResponse_StreamEnd):
# Streaming is done. Record LLMEvent
# self.llm_event.returns.finish_reason = chunk.is_finished
# StreamedChatResponse_TextGeneration = LLMEvent
self.llm_event.completion = {
"role": "assistant", "content": self.llm_event.completion}
"role": "assistant", "content": chunk.response.text}
self.llm_event.end_timestamp = get_ISO_time()

self.client.record(self.llm_event)

# StreamedChatResponse_SearchResults = ActionEvent
search_results = chunk.response.search_results
for search_result in search_results:
query = search_result.search_query
if query.generation_id in self.action_events:
action_event = self.action_events[query.generation_id]
search_result_dict = search_result.dict()
del search_result_dict["search_query"]
action_event.returns = search_result_dict
action_event.end_timestamp = get_ISO_time()

# StreamedChatResponse_CitationGeneration = ActionEvent
documents = {doc['id']: doc for doc in chunk.response.documents}
citations = chunk.response.citations
for citation in citations:
citation_id = f"{citation.start}.{citation.end}"
if citation_id in self.action_events:
action_event = self.action_events[citation_id]
citation_dict = citation.dict()
# Replace document_ids with the actual documents
citation_dict['documents'] = [documents[doc_id]
for doc_id in citation_dict['document_ids'] if doc_id in documents]
del citation_dict['document_ids']

action_event.returns = citation_dict
action_event.end_timestamp = get_ISO_time()

for key, action_event in self.action_events.items():
self.client.record(action_event)

elif isinstance(chunk, StreamedChatResponse_TextGeneration):
self.llm_event.completion += chunk.text
elif isinstance(chunk, StreamedChatResponse_ToolCallsGeneration):
pass
elif isinstance(chunk, StreamedChatResponse_CitationGeneration):
pass
for citation in chunk.citations:
self.action_events[f"{citation.start}.{citation.end}"] = ActionEvent(
action_type="citation",
init_timestamp=get_ISO_time(),
params=citation.text)
elif isinstance(chunk, StreamedChatResponse_SearchQueriesGeneration):
for query in chunk.search_queries:
self.action_events[query.generation_id] = ActionEvent(
action_type="search_query",
init_timestamp=get_ISO_time(),
params=query.text)
elif isinstance(chunk, StreamedChatResponse_SearchResults):
pass

except Exception as e:
Expand Down Expand Up @@ -498,7 +538,7 @@ def override_api(self):
self._override_method(api, method_path, module)

if api == 'cohere':
# Patch cohere vx.x.x+ methods
# Patch cohere v5.4.0+ methods
module_version = version(api)
if module_version is None:
logger.warning(f'Cannot determine Cohere version. Only Cohere>=5.4.0 supported.')
Expand Down
7 changes: 5 additions & 2 deletions agentops/meta_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ def __new__(cls, name, bases, dct):

return super().__new__(cls, name, bases, dct)

def send_exception_to_server(cls, exception, api_key):
def send_exception_to_server(cls, exception, api_key, session):
"""Class method to send exception to server."""
if api_key:
exception_type = type(exception).__name__
Expand All @@ -33,6 +33,9 @@ def send_exception_to_server(cls, exception, api_key):
"host_env": get_host_env()
}

if session:
developer_error["session_id"] = session.session_id

HttpClient.post("https://api.agentops.ai/v2/developer_errors",
safe_serialize(developer_error).encode("utf-8"),
api_key=api_key)
Expand All @@ -48,7 +51,7 @@ def wrapper(self, *args, **kwargs):
logger.warning(f"Error: {e}")
config = getattr(self, 'config', None)
if config is not None:
type(self).send_exception_to_server(e, self.config._api_key)
type(self).send_exception_to_server(e, self.config._api_key, self._session)
raise e

return wrapper
30 changes: 0 additions & 30 deletions examples/cohere.py

This file was deleted.

46 changes: 46 additions & 0 deletions examples/cohere_example.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
import cohere
import agentops # just
from dotenv import load_dotenv
load_dotenv()

agentops.init(tags=["cohere", "agentops-demo"]) # three
co = cohere.Client()

stream = co.chat_stream(
message="Tell me everything you can about AgentOps",
connectors=[{"id": "web-search"}]
)

response = ""
for event in stream:
if event.event_type == "text-generation":
response += event.text
print(event.text, end='')
elif event.event_type == "stream-end":
print("\n")
print(event)
print("\n")

stream = co.chat_stream(
chat_history=[
{"role": "SYSTEM", "message": "You are Adam Silverman: die-hard advocate of AgentOps, leader in AI Agent observability"},
{
"role": "CHATBOT",
"message": "How's your day going? I'd like to tell you about AgentOps: {response}",
},
],
message="Based on your newfound knowledge of AgentOps, is Cohere a suitable partner for them and how could they integrate?",
connectors=[{"id": "web-search"}]
)

response = ""
for event in stream:
if event.event_type == "text-generation":
response += event.text
print(event.text, end='')
elif event.event_type == "stream-end":
print("\n")
print(event)
print("\n")

agentops.end_session('Success') # lines

0 comments on commit 1b7ad85

Please sign in to comment.