From 4f5a6da315f0e3d435757d0a98b8ae510629bf46 Mon Sep 17 00:00:00 2001 From: Braelyn Boynton Date: Thu, 14 Dec 2023 13:02:36 -0600 Subject: [PATCH 01/15] Langchain callback --- .idea/.gitignore | 8 +++ .idea/agentops.iml | 17 +++++++ .../inspectionProfiles/profiles_settings.xml | 6 +++ .idea/misc.xml | 7 +++ .idea/modules.xml | 8 +++ .idea/vcs.xml | 6 +++ agentops/langchain_callback_handler.py | 51 +++++++++++++++++++ requirements.txt | 3 +- 8 files changed, 105 insertions(+), 1 deletion(-) create mode 100644 .idea/.gitignore create mode 100644 .idea/agentops.iml create mode 100644 .idea/inspectionProfiles/profiles_settings.xml create mode 100644 .idea/misc.xml create mode 100644 .idea/modules.xml create mode 100644 .idea/vcs.xml create mode 100644 agentops/langchain_callback_handler.py diff --git a/.idea/.gitignore b/.idea/.gitignore new file mode 100644 index 000000000..13566b81b --- /dev/null +++ b/.idea/.gitignore @@ -0,0 +1,8 @@ +# Default ignored files +/shelf/ +/workspace.xml +# Editor-based HTTP Client requests +/httpRequests/ +# Datasource local storage ignored files +/dataSources/ +/dataSources.local.xml diff --git a/.idea/agentops.iml b/.idea/agentops.iml new file mode 100644 index 000000000..519512493 --- /dev/null +++ b/.idea/agentops.iml @@ -0,0 +1,17 @@ + + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/inspectionProfiles/profiles_settings.xml b/.idea/inspectionProfiles/profiles_settings.xml new file mode 100644 index 000000000..105ce2da2 --- /dev/null +++ b/.idea/inspectionProfiles/profiles_settings.xml @@ -0,0 +1,6 @@ + + + + \ No newline at end of file diff --git a/.idea/misc.xml b/.idea/misc.xml new file mode 100644 index 000000000..35f08d372 --- /dev/null +++ b/.idea/misc.xml @@ -0,0 +1,7 @@ + + + + + + \ No newline at end of file diff --git a/.idea/modules.xml b/.idea/modules.xml new file mode 100644 index 000000000..4cf190440 --- /dev/null +++ b/.idea/modules.xml @@ -0,0 +1,8 @@ + + + + + + + + \ No newline at end of file diff --git a/.idea/vcs.xml b/.idea/vcs.xml new file mode 100644 index 000000000..35eb1ddfb --- /dev/null +++ b/.idea/vcs.xml @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/agentops/langchain_callback_handler.py b/agentops/langchain_callback_handler.py new file mode 100644 index 000000000..cf132bfd5 --- /dev/null +++ b/agentops/langchain_callback_handler.py @@ -0,0 +1,51 @@ +from typing import Dict, Any, List, Optional +from uuid import UUID + +from langchain_core.outputs import LLMResult + +from agentops import Client as AOClient +from agentops import Event + +from langchain.callbacks.base import BaseCallbackHandler + +from agentops.helpers import get_ISO_time + + +class LangchainCallbackHandler(BaseCallbackHandler): + + def __init__(self): + self.ao_client = AOClient() + self.ao_client.start_session() + + # keypair + self.events = {} + + def on_llm_start( + self, + serialized: Dict[str, Any], + prompts: List[str], + *, + run_id: UUID, + parent_run_id: Optional[UUID] = None, + tags: Optional[List[str]] = None, + metadata: Optional[Dict[str, Any]] = None, + **kwargs: Any, + ) -> Any: + self.events[run_id] = Event( + event_type="langchain_llm", + tags=tags, + prompt="\n--\n".join(prompts), + init_timestamp=get_ISO_time() + ) + + def on_llm_end( + self, + response: LLMResult, + *, + run_id: UUID, + parent_run_id: Optional[UUID] = None, + **kwargs: Any, + ) -> Any: + self.events[run_id].end_timestamp = get_ISO_time() + self.ao_client.record(self.events[run_id]) + diff --git a/requirements.txt b/requirements.txt index 45f6e9f64..0d62c32f5 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,5 @@ pytest==7.4.0 requests==2.31.0 requests-mock==1.11.0 -pydantic==2.4.2 \ No newline at end of file +pydantic==2.4.2 +langchain==0.0.350 \ No newline at end of file From 5755a5d24b2f68cb3ca4fff4ab0a2af0dbe6b99c Mon Sep 17 00:00:00 2001 From: Braelyn Boynton Date: Fri, 15 Dec 2023 11:23:08 -0600 Subject: [PATCH 02/15] langchain callback handler --- agentops/langchain_callback_handler.py | 6 +- examples/langchain_examples.ipynb | 644 +++++++++++++++++++++++++ 2 files changed, 647 insertions(+), 3 deletions(-) create mode 100644 examples/langchain_examples.ipynb diff --git a/agentops/langchain_callback_handler.py b/agentops/langchain_callback_handler.py index cf132bfd5..c6c6b51b2 100644 --- a/agentops/langchain_callback_handler.py +++ b/agentops/langchain_callback_handler.py @@ -13,9 +13,9 @@ class LangchainCallbackHandler(BaseCallbackHandler): - def __init__(self): - self.ao_client = AOClient() - self.ao_client.start_session() + def __init__(self, api_key: str, tags: [str] = None): + self.ao_client = AOClient(api_key=api_key) + self.ao_client.start_session(tags) # keypair self.events = {} diff --git a/examples/langchain_examples.ipynb b/examples/langchain_examples.ipynb new file mode 100644 index 000000000..c62c1b767 --- /dev/null +++ b/examples/langchain_examples.ipynb @@ -0,0 +1,644 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "id": "initial_id", + "metadata": { + "collapsed": true, + "ExecuteTime": { + "end_time": "2023-12-15T17:20:48.236786Z", + "start_time": "2023-12-15T17:20:46.393933Z" + } + }, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/Library/Python/3.9/site-packages/urllib3/__init__.py:34: NotOpenSSLWarning: urllib3 v2 only supports OpenSSL 1.1.1+, currently the 'ssl' module is compiled with 'LibreSSL 2.8.3'. See: https://github.com/urllib3/urllib3/issues/3020\n", + " warnings.warn(\n" + ] + } + ], + "source": [ + "from agentops.langchain_callback_handler import LangchainCallbackHandler\n", + "import os\n", + "# from langchain.llms import OpenAI\n", + "from langchain.chat_models import ChatOpenAI\n", + "from langchain.agents import initialize_agent, AgentType\n", + "from dotenv import load_dotenv" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "outputs": [], + "source": [ + "load_dotenv()\n", + "OPENAI_API_KEY = os.getenv('OPENAI_API_KEY')\n", + "AGENTOPS_API_KEY = os.getenv('AGENTOPS_API_KEY')" + ], + "metadata": { + "collapsed": false, + "ExecuteTime": { + "end_time": "2023-12-15T17:20:48.241802Z", + "start_time": "2023-12-15T17:20:48.237524Z" + } + }, + "id": "1490411415d7317c" + }, + { + "cell_type": "code", + "execution_count": 3, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "created callback handler\n" + ] + } + ], + "source": [ + "# handler = LangchainCallbackHandler(tags=['Langchain Example'])\n", + "handler = LangchainCallbackHandler(api_key=AGENTOPS_API_KEY)\n", + "llm = ChatOpenAI(openai_api_key=OPENAI_API_KEY, callbacks=[handler])\n", + "# llm = ChatOpenAI(openai_api_key=OPENAI_API_KEY)" + ], + "metadata": { + "collapsed": false, + "ExecuteTime": { + "end_time": "2023-12-15T17:20:49.466217Z", + "start_time": "2023-12-15T17:20:48.240866Z" + } + }, + "id": "432921383f39c9d5" + }, + { + "cell_type": "code", + "execution_count": 4, + "outputs": [], + "source": [ + "agent = initialize_agent([],\n", + " llm,\n", + " agent=AgentType.CHAT_ZERO_SHOT_REACT_DESCRIPTION,\n", + " verbose=True,\n", + " handle_parsing_errors=True)" + ], + "metadata": { + "collapsed": false, + "ExecuteTime": { + "end_time": "2023-12-15T17:20:49.470481Z", + "start_time": "2023-12-15T17:20:49.468025Z" + } + }, + "id": "d538b20aa954ee80" + }, + { + "cell_type": "code", + "execution_count": 5, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "\n", + "\u001B[1m> Entering new AgentExecutor chain...\u001B[0m\n", + "on start\n", + "on end\n", + "\u001B[32;1m\u001B[1;3mCould not parse LLM output: I'm sorry, I don't have access to real-time data about movie showtimes. However, you can check online movie ticketing platforms or cinema websites to find out what movies are currently playing.\u001B[0m\n", + "Observation: Invalid or incomplete response\n", + "Thought:on start\n", + "on end\n", + "\u001B[32;1m\u001B[1;3mCould not parse LLM output: I need to find a tool that can provide real-time information about movie showtimes.\u001B[0m\n", + "Observation: Invalid or incomplete response\n", + "Thought:on start\n", + "on end\n", + "\u001B[32;1m\u001B[1;3mCould not parse LLM output: I need to find a tool that can provide real-time information about movie showtimes.\u001B[0m\n", + "Observation: Invalid or incomplete response\n", + "Thought:on start\n", + "on end\n", + "\u001B[32;1m\u001B[1;3mCould not parse LLM output: I need to find a tool that can provide real-time information about movie showtimes.\u001B[0m\n", + "Observation: Invalid or incomplete response\n", + "Thought:on start\n", + "on end\n", + "\u001B[32;1m\u001B[1;3mCould not parse LLM output: I need to find a tool that can provide real-time information about movie showtimes.\u001B[0m\n", + "Observation: Invalid or incomplete response\n", + "Thought:on start\n", + "on end\n", + "\u001B[32;1m\u001B[1;3mCould not parse LLM output: I need to find a tool that can provide real-time information about movie showtimes.\u001B[0m\n", + "Observation: Invalid or incomplete response\n", + "Thought:on start\n", + "on end\n", + "\u001B[32;1m\u001B[1;3mI need to find a tool that can provide real-time information about movie showtimes.\n", + "Action:\n", + "```\n", + "{\n", + " \"action\": \"movie_showtimes\",\n", + " \"action_input\": \"\"\n", + "}\n", + "```\u001B[0m\n", + "Observation: movie_showtimes is not a valid tool, try one of [].\n", + "Thought:on start\n", + "on end\n", + "\u001B[32;1m\u001B[1;3mI need to find a different tool that can provide real-time information about movie showtimes.\n", + "Action:\n", + "```\n", + "{\n", + " \"action\": \"movie_showtimes\",\n", + " \"action_input\": \"\"\n", + "}\n", + "```\n", + "\u001B[0m\n", + "Observation: movie_showtimes is not a valid tool, try one of [].\n", + "Thought:on start\n", + "on end\n", + "\u001B[32;1m\u001B[1;3mI need to find a different tool that can provide real-time information about movie showtimes.\n", + "Action:\n", + "```\n", + "{\n", + " \"action\": \"movie_showtimes_api\",\n", + " \"action_input\": \"\"\n", + "}\n", + "```\n", + "\u001B[0m\n", + "Observation: movie_showtimes_api is not a valid tool, try one of [].\n", + "Thought:on start\n", + "on end\n", + "\u001B[32;1m\u001B[1;3mI need to find a different tool that can provide real-time information about movie showtimes.\n", + "Action:\n", + "```\n", + "{\n", + " \"action\": \"movie_showtimes_api\",\n", + " \"action_input\": \"\"\n", + "}\n", + "```\n", + "\n", + "\u001B[0m\n", + "Observation: movie_showtimes_api is not a valid tool, try one of [].\n", + "Thought:on start\n", + "on end\n", + "\u001B[32;1m\u001B[1;3mI need to find a different tool that can provide real-time information about movie showtimes.\n", + "Action:\n", + "```\n", + "{\n", + " \"action\": \"movie_showtimes_online\",\n", + " \"action_input\": \"\"\n", + "}\n", + "```\u001B[0m\n", + "Observation: movie_showtimes_online is not a valid tool, try one of [].\n", + "Thought:on start\n", + "on end\n", + "\u001B[32;1m\u001B[1;3mI need to find a different tool that can provide real-time information about movie showtimes.\n", + "Action:\n", + "```\n", + "{\n", + " \"action\": \"movie_showtimes_online\",\n", + " \"action_input\": \"\"\n", + "}\n", + "```\u001B[0m\n", + "Observation: movie_showtimes_online is not a valid tool, try one of [].\n", + "Thought:on start\n", + "on end\n", + "\u001B[32;1m\u001B[1;3mI need to find a different tool that can provide real-time information about movie showtimes.\n", + "Action:\n", + "```\n", + "{\n", + " \"action\": \"movie_showtimes_website\",\n", + " \"action_input\": \"\"\n", + "}\n", + "```\u001B[0m\n", + "Observation: movie_showtimes_website is not a valid tool, try one of [].\n", + "Thought:on start\n", + "on end\n", + "\u001B[32;1m\u001B[1;3mCould not parse LLM output: I apologize, but I don't have access to real-time data about movie showtimes. However, you can check online movie ticketing platforms or cinema websites to find out what movies are currently playing.\u001B[0m\n", + "Observation: Invalid or incomplete response\n", + "Thought:on start\n", + "on end\n", + "\u001B[32;1m\u001B[1;3mI apologize, but I don't have access to real-time data about movie showtimes. However, you can check online movie ticketing platforms or cinema websites to find out what movies are currently playing.\n", + "Final Answer: You can check online movie ticketing platforms or cinema websites to find out what movies are currently playing.\u001B[0m\n", + "\n", + "\u001B[1m> Finished chain.\u001B[0m\n" + ] + }, + { + "data": { + "text/plain": "'You can check online movie ticketing platforms or cinema websites to find out what movies are currently playing.'" + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "agent.run(\"What movies are playing?\")" + ], + "metadata": { + "collapsed": false, + "ExecuteTime": { + "end_time": "2023-12-15T17:21:14.951663Z", + "start_time": "2023-12-15T17:20:49.473183Z" + } + }, + "id": "6dfb127553751384" + }, + { + "cell_type": "code", + "execution_count": 6, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "\n", + "\u001B[1m> Entering new AgentExecutor chain...\u001B[0m\n", + "\u001B[32;1m\u001B[1;3mCould not parse LLM output: I can use the `find_movie` tool to find the available movies.\u001B[0m\n", + "Observation: Invalid or incomplete response\n", + "Thought:\u001B[32;1m\u001B[1;3mCould not parse LLM output: I need to use the `find_movie` tool to find the available movies.\u001B[0m\n", + "Observation: Invalid or incomplete response\n", + "Thought:\u001B[32;1m\u001B[1;3mI can use the `find_movie` tool to find the available movies.\n", + "Action:\n", + "```\n", + "{\n", + " \"action\": \"find_movie\",\n", + " \"action_input\": \"\"\n", + "}\n", + "```\u001B[0m\n", + "Observation: \u001B[36;1m\u001B[1;3mCitizen Kane\u001B[0m\n", + "Thought:\u001B[32;1m\u001B[1;3mThe movie \"Citizen Kane\" is currently playing.\n", + "Final Answer: Citizen Kane\u001B[0m\n", + "\n", + "\u001B[1m> Finished chain.\u001B[0m\n" + ] + }, + { + "data": { + "text/plain": "'Citizen Kane'" + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from langchain.agents import AgentType, initialize_agent, load_tools\n", + "from langchain.llms import OpenAI\n", + "from langchain.chat_models import ChatOpenAI\n", + "from langchain.agents import tool\n", + "\n", + "chat_model = ChatOpenAI(temperature=0,\n", + " model='gpt-3.5-turbo',\n", + " openai_api_key='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')\n", + "\n", + "\n", + "@tool\n", + "def find_movie(term) -> str:\n", + " \"\"\"Find available movies\"\"\"\n", + " return 'Citizen Kane'\n", + "\n", + "\n", + "tools = [find_movie]\n", + "\n", + "\n", + "agent = initialize_agent(tools,\n", + " chat_model,\n", + " agent=AgentType.CHAT_ZERO_SHOT_REACT_DESCRIPTION,\n", + " verbose=True,\n", + " handle_parsing_errors=True)\n", + "agent.run(\"What movies are playing?\")\n" + ], + "metadata": { + "collapsed": false, + "ExecuteTime": { + "end_time": "2023-12-15T17:21:21.188515Z", + "start_time": "2023-12-15T17:21:14.951779Z" + } + }, + "id": "8d539e0dcf2ec2f3" + }, + { + "cell_type": "code", + "execution_count": 7, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "absl-py==1.4.0\r\n", + "accelerate==0.21.0\r\n", + "addict==2.4.0\r\n", + "agentops==0.0.7\r\n", + "aiofiles==23.1.0\r\n", + "aiohttp==3.8.4\r\n", + "aiosignal==1.3.1\r\n", + "aiostream==0.4.5\r\n", + "albumentations==1.3.1\r\n", + "aliyun-python-sdk-core==2.13.36\r\n", + "aliyun-python-sdk-kms==2.16.2\r\n", + "altair==5.0.1\r\n", + "antlr4-python3-runtime==4.9.3\r\n", + "anyio==3.6.2\r\n", + "APScheduler==3.8.0\r\n", + "arrow==1.2.3\r\n", + "asgiref==3.6.0\r\n", + "async-timeout==4.0.2\r\n", + "attrs==22.2.0\r\n", + "auth0-python==3.24.1\r\n", + "autotrain-advanced==0.6.15\r\n", + "backoff==2.2.1\r\n", + "beautifulsoup4==4.10.0\r\n", + "bitsandbytes==0.41.1\r\n", + "Brotli==1.0.9\r\n", + "build==1.0.3\r\n", + "CacheControl==0.13.1\r\n", + "cachetools==5.3.1\r\n", + "certifi==2022.12.7\r\n", + "cffi==1.15.1\r\n", + "cfgv==3.4.0\r\n", + "charset-normalizer==3.1.0\r\n", + "chroma-hnswlib==0.7.2\r\n", + "chromadb==0.4.6\r\n", + "cleo==2.1.0\r\n", + "click==8.1.3\r\n", + "cloudpickle==2.2.1\r\n", + "codecarbon==2.2.3\r\n", + "coloredlogs==15.0.1\r\n", + "contourpy==1.1.0\r\n", + "crashtest==0.4.1\r\n", + "crcmod==1.7\r\n", + "cryptography==41.0.3\r\n", + "cycler==0.11.0\r\n", + "datasets==2.8.0\r\n", + "diffusers @ git+https://github.com/huggingface/diffusers.git@f0725c5845b150b3042e9e98b1cda154d3bef5b5\r\n", + "dill==0.3.6\r\n", + "distlib==0.3.7\r\n", + "distro==1.8.0\r\n", + "dnspython==2.3.0\r\n", + "docker==6.0.1\r\n", + "dulwich==0.21.7\r\n", + "einops==0.6.1\r\n", + "evaluate==0.3.0\r\n", + "fastapi==0.99.1\r\n", + "fastjsonschema==2.19.0\r\n", + "ffmpy==0.3.1\r\n", + "filelock==3.12.2\r\n", + "Flask==2.2.3\r\n", + "flatbuffers==23.5.26\r\n", + "fonttools==4.42.0\r\n", + "frozenlist==1.3.3\r\n", + "fsspec==2023.6.0\r\n", + "ftfy==6.1.1\r\n", + "future==0.18.3\r\n", + "fuzzywuzzy==0.18.0\r\n", + "gast==0.5.4\r\n", + "gevent==23.7.0\r\n", + "geventhttpclient==2.0.2\r\n", + "google-auth==2.22.0\r\n", + "google-auth-oauthlib==1.0.0\r\n", + "gradio==3.39.0\r\n", + "gradio_client==0.3.0\r\n", + "greenlet==2.0.2\r\n", + "grpcio==1.56.2\r\n", + "grpclib==0.4.3\r\n", + "h11==0.14.0\r\n", + "h2==4.1.0\r\n", + "hpack==4.0.0\r\n", + "httpcore==0.17.3\r\n", + "httptools==0.6.1\r\n", + "httpx==0.24.1\r\n", + "huggingface-hub==0.16.4\r\n", + "humanfriendly==10.0\r\n", + "hyperframe==6.0.1\r\n", + "identify==2.5.32\r\n", + "idna==3.4\r\n", + "imageio==2.31.1\r\n", + "importlib-metadata==6.8.0\r\n", + "importlib-resources==6.1.1\r\n", + "installer==0.7.0\r\n", + "invisible-watermark==0.2.0\r\n", + "ipadic==1.0.0\r\n", + "itsdangerous==2.1.2\r\n", + "jaraco.classes==3.3.0\r\n", + "Jinja2==3.0.3\r\n", + "jiwer==3.0.2\r\n", + "jmespath==0.10.0\r\n", + "joblib==1.3.1\r\n", + "jsonplus==0.8.0\r\n", + "jsonschema==4.19.0\r\n", + "jsonschema-specifications==2023.7.1\r\n", + "keyring==24.3.0\r\n", + "kiwisolver==1.4.4\r\n", + "lazy_loader==0.3\r\n", + "lightning-utilities==0.9.0\r\n", + "linkify-it-py==2.0.2\r\n", + "loguru==0.7.0\r\n", + "Markdown==3.4.4\r\n", + "markdown-it-py==2.2.0\r\n", + "MarkupSafe==2.1.2\r\n", + "matplotlib==3.7.2\r\n", + "mdit-py-plugins==0.3.3\r\n", + "mdurl==0.1.2\r\n", + "modal-client==0.48.1686\r\n", + "modelscope==1.4.2\r\n", + "monotonic==1.6\r\n", + "more-itertools==10.1.0\r\n", + "mpmath==1.3.0\r\n", + "msgpack==1.0.7\r\n", + "multidict==6.0.4\r\n", + "multiprocess==0.70.14\r\n", + "mutagen==1.47.0\r\n", + "networkx==3.1\r\n", + "nodeenv==1.8.0\r\n", + "numpy==1.26.2\r\n", + "oauthlib==3.1.1\r\n", + "omegaconf==2.3.0\r\n", + "onnxruntime==1.16.3\r\n", + "open-clip-torch==2.20.0\r\n", + "openai==1.3.9\r\n", + "opencv-python==4.8.0.74\r\n", + "opencv-python-headless==4.8.0.74\r\n", + "orjson==3.9.3\r\n", + "oss2==2.18.1\r\n", + "overrides==7.4.0\r\n", + "packaging==23.1\r\n", + "pandas==2.0.3\r\n", + "peft @ git+https://github.com/huggingface/peft.git@ed396a69ed6469be87f90e739f98f19ec9973983\r\n", + "pexpect==4.9.0\r\n", + "Pillow==10.0.0\r\n", + "pkginfo==1.9.6\r\n", + "platformdirs==3.10.0\r\n", + "poetry==1.7.1\r\n", + "poetry-core==1.8.1\r\n", + "poetry-plugin-export==1.6.0\r\n", + "posthog==3.1.0\r\n", + "pre-commit==3.5.0\r\n", + "protobuf==3.20.3\r\n", + "psutil==5.9.5\r\n", + "ptyprocess==0.7.0\r\n", + "pulsar-client==3.3.0\r\n", + "py-cpuinfo==8.0.0\r\n", + "pyarrow==12.0.1\r\n", + "pyasn1==0.5.0\r\n", + "pyasn1-modules==0.3.0\r\n", + "pycparser==2.21\r\n", + "pycryptodome==3.19.0\r\n", + "pycryptodomex==3.19.0\r\n", + "pydantic==1.10.11\r\n", + "pydub==0.25.1\r\n", + "Pygments==2.14.0\r\n", + "PyJWT==2.8.0\r\n", + "pymongo==4.3.3\r\n", + "pynvml==11.5.0\r\n", + "pyparsing==3.0.9\r\n", + "PyPika==0.48.9\r\n", + "pyproject_hooks==1.0.0\r\n", + "PySocks==1.7.1\r\n", + "python-dateutil==2.8.2\r\n", + "python-dotenv==1.0.0\r\n", + "python-http-client==3.3.7\r\n", + "python-multipart==0.0.6\r\n", + "python-rapidjson==1.10\r\n", + "python-twitter==3.5\r\n", + "pytorch-lightning==2.0.9\r\n", + "pytz==2023.3\r\n", + "PyWavelets==1.4.1\r\n", + "PyYAML==6.0.1\r\n", + "qudida==0.0.4\r\n", + "rapidfuzz==3.5.2\r\n", + "referencing==0.30.2\r\n", + "regex==2023.6.3\r\n", + "requests==2.28.2\r\n", + "requests-oauthlib==1.3.0\r\n", + "requests-toolbelt==1.0.0\r\n", + "responses==0.18.0\r\n", + "rich==13.3.5\r\n", + "rpds-py==0.9.2\r\n", + "rsa==4.9\r\n", + "sacremoses==0.0.53\r\n", + "safetensors==0.3.1\r\n", + "scikit-image==0.21.0\r\n", + "scikit-learn==1.3.0\r\n", + "scipy==1.11.1\r\n", + "secure-smtplib==0.1.1\r\n", + "selenium==3.141.0\r\n", + "semantic-version==2.10.0\r\n", + "sendgrid==6.10.0\r\n", + "sentencepiece==0.1.99\r\n", + "shellingham==1.5.4\r\n", + "shortuuid==1.0.11\r\n", + "simplejson==3.19.1\r\n", + "six==1.16.0\r\n", + "sniffio==1.3.0\r\n", + "sortedcontainers==2.4.0\r\n", + "soupsieve==2.4.1\r\n", + "starkbank-ecdsa==2.2.0\r\n", + "starlette==0.27.0\r\n", + "stripe==5.4.0\r\n", + "sympy==1.12\r\n", + "synchronicity==0.3.1\r\n", + "tabulate==0.9.0\r\n", + "tblib==1.7.0\r\n", + "tensorboard==2.13.0\r\n", + "tensorboard-data-server==0.7.1\r\n", + "textual==0.32.0\r\n", + "threadpoolctl==3.2.0\r\n", + "tifffile==2023.7.18\r\n", + "tiktoken==0.3.3\r\n", + "timm==0.9.7\r\n", + "titan-iris==0.19.25\r\n", + "tokenizers==0.13.3\r\n", + "toml==0.10.2\r\n", + "tomli==2.0.1\r\n", + "tomlkit==0.12.3\r\n", + "toolz==0.12.0\r\n", + "torch==2.0.1\r\n", + "torchmetrics==1.1.2\r\n", + "torchvision==0.15.2\r\n", + "tqdm==4.66.1\r\n", + "transformers @ git+https://github.com/huggingface/transformers.git@080a97119c0dabfd0fb5c3e26a872ad2958e4f77\r\n", + "tritonclient==2.30.0\r\n", + "trl @ git+https://github.com/lvwerra/trl.git@77b0cc17071c7aea5b5a5794a6e3e3ccc9332acd\r\n", + "trogon==0.2.1\r\n", + "trove-classifiers==2023.11.29\r\n", + "tweepy==3.10.0\r\n", + "typeguard==3.0.2\r\n", + "typer==0.7.0\r\n", + "types-certifi==2021.10.8.3\r\n", + "types-toml==0.10.8.5\r\n", + "typing_extensions==4.5.0\r\n", + "tzdata==2023.3\r\n", + "tzlocal==2.1\r\n", + "uc-micro-py==1.0.2\r\n", + "urllib3==1.26.15\r\n", + "uvicorn==0.24.0.post1\r\n", + "uvloop==0.19.0\r\n", + "virtualenv==20.25.0\r\n", + "watchfiles==0.18.1\r\n", + "wcwidth==0.2.6\r\n", + "websocket-client==1.6.1\r\n", + "websockets==11.0.3\r\n", + "Werkzeug==2.3.6\r\n", + "wget==3.2\r\n", + "xattr==0.10.1\r\n", + "xxhash==3.3.0\r\n", + "yapf==0.40.1\r\n", + "yarl==1.8.2\r\n", + "yt-dlp==2023.10.13\r\n", + "zipp==3.15.0\r\n", + "zope.event==5.0\r\n", + "zope.interface==6.0\r\n" + ] + } + ], + "source": [ + "!pip3 freeze" + ], + "metadata": { + "collapsed": false, + "ExecuteTime": { + "end_time": "2023-12-15T17:21:22.241827Z", + "start_time": "2023-12-15T17:21:21.186704Z" + } + }, + "id": "3642854c2a0f921d" + }, + { + "cell_type": "code", + "execution_count": 7, + "outputs": [], + "source": [], + "metadata": { + "collapsed": false, + "ExecuteTime": { + "end_time": "2023-12-15T17:21:22.245798Z", + "start_time": "2023-12-15T17:21:22.242907Z" + } + }, + "id": "9e763f402787aba1" + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 2 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython2", + "version": "2.7.6" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} From 2dbdbf84a2cc709135796f156a0c63f805ae2165 Mon Sep 17 00:00:00 2001 From: Braelyn Boynton Date: Fri, 15 Dec 2023 12:01:43 -0600 Subject: [PATCH 03/15] session id property and notebook narration --- agentops/langchain_callback_handler.py | 3 + examples/langchain_examples.ipynb | 624 +++++-------------------- 2 files changed, 120 insertions(+), 507 deletions(-) diff --git a/agentops/langchain_callback_handler.py b/agentops/langchain_callback_handler.py index c6c6b51b2..313202ef2 100644 --- a/agentops/langchain_callback_handler.py +++ b/agentops/langchain_callback_handler.py @@ -49,3 +49,6 @@ def on_llm_end( self.events[run_id].end_timestamp = get_ISO_time() self.ao_client.record(self.events[run_id]) + @property + def session_id(self): + return self.ao_client.session.session_id diff --git a/examples/langchain_examples.ipynb b/examples/langchain_examples.ipynb index c62c1b767..8ff65714a 100644 --- a/examples/langchain_examples.ipynb +++ b/examples/langchain_examples.ipynb @@ -1,5 +1,19 @@ { "cells": [ + { + "cell_type": "markdown", + "source": [ + "# AgentOps Langchain Agent Implementation\n", + "\n", + "Using AgentOps monitoring with Langchain is simple. We've created a LangchainCallbackHandler that will do all of the heavy lifting!\n", + "\n", + "First we'll import the typical Langchain packages:" + ], + "metadata": { + "collapsed": false + }, + "id": "e0deea1ab1db2a19" + }, { "cell_type": "code", "execution_count": 1, @@ -7,8 +21,8 @@ "metadata": { "collapsed": true, "ExecuteTime": { - "end_time": "2023-12-15T17:20:48.236786Z", - "start_time": "2023-12-15T17:20:46.393933Z" + "end_time": "2023-12-15T18:00:15.853166Z", + "start_time": "2023-12-15T18:00:14.651483Z" } }, "outputs": [ @@ -22,18 +36,52 @@ } ], "source": [ - "from agentops.langchain_callback_handler import LangchainCallbackHandler\n", "import os\n", - "# from langchain.llms import OpenAI\n", "from langchain.chat_models import ChatOpenAI\n", "from langchain.agents import initialize_agent, AgentType\n", "from dotenv import load_dotenv" ] }, + { + "cell_type": "markdown", + "source": [ + "The only difference with using AgentOps is that we'll also import this special Callback Handler" + ], + "metadata": { + "collapsed": false + }, + "id": "57ddb8eca4e8a3cb" + }, { "cell_type": "code", "execution_count": 2, "outputs": [], + "source": [ + "from agentops.langchain_callback_handler import LangchainCallbackHandler" + ], + "metadata": { + "collapsed": false, + "ExecuteTime": { + "end_time": "2023-12-15T18:00:15.861201Z", + "start_time": "2023-12-15T18:00:15.854596Z" + } + }, + "id": "585f00bb186711a7" + }, + { + "cell_type": "markdown", + "source": [ + "Next, we'll grab our two API keys. You can use dotenv like below or however else you like to load environment variables" + ], + "metadata": { + "collapsed": false + }, + "id": "523be945b85dc5d5" + }, + { + "cell_type": "code", + "execution_count": 3, + "outputs": [], "source": [ "load_dotenv()\n", "OPENAI_API_KEY = os.getenv('OPENAI_API_KEY')\n", @@ -42,42 +90,63 @@ "metadata": { "collapsed": false, "ExecuteTime": { - "end_time": "2023-12-15T17:20:48.241802Z", - "start_time": "2023-12-15T17:20:48.237524Z" + "end_time": "2023-12-15T18:00:15.865514Z", + "start_time": "2023-12-15T18:00:15.861478Z" } }, "id": "1490411415d7317c" }, + { + "cell_type": "markdown", + "source": [ + "This is where AgentOps comes into play. Before creating our LLM instance via Langchain, first we'll create an instance of the AO LangchainCallbackHandler.\n", + "\n", + "Pass in your API key, and optionally any tags to describe this session for easier lookup in the AO dashboard. You can also retrieve the `session_id` of the newly created session." + ], + "metadata": { + "collapsed": false + }, + "id": "8371ec020e634dd0" + }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 4, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "created callback handler\n" + "Agent Ops session ID: 3c9deaa9-577c-49ac-9783-e7d0b0fecfbd\n" ] } ], "source": [ - "# handler = LangchainCallbackHandler(tags=['Langchain Example'])\n", - "handler = LangchainCallbackHandler(api_key=AGENTOPS_API_KEY)\n", - "llm = ChatOpenAI(openai_api_key=OPENAI_API_KEY, callbacks=[handler])\n", - "# llm = ChatOpenAI(openai_api_key=OPENAI_API_KEY)" + "handler = LangchainCallbackHandler(api_key=AGENTOPS_API_KEY, tags=['Langchain Example'])\n", + "print(\"Agent Ops session ID: \" + handler.session_id)\n", + "llm = ChatOpenAI(openai_api_key=OPENAI_API_KEY, callbacks=[handler])" ], "metadata": { "collapsed": false, "ExecuteTime": { - "end_time": "2023-12-15T17:20:49.466217Z", - "start_time": "2023-12-15T17:20:48.240866Z" + "end_time": "2023-12-15T18:00:16.808358Z", + "start_time": "2023-12-15T18:00:15.865350Z" } }, "id": "432921383f39c9d5" }, + { + "cell_type": "markdown", + "source": [ + "Finally, let's use our agent! All of the actions will be recorded in the AO Dashboard" + ], + "metadata": { + "collapsed": false + }, + "id": "58bbca0b49302b2b" + }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 5, "outputs": [], "source": [ "agent = initialize_agent([],\n", @@ -89,15 +158,15 @@ "metadata": { "collapsed": false, "ExecuteTime": { - "end_time": "2023-12-15T17:20:49.470481Z", - "start_time": "2023-12-15T17:20:49.468025Z" + "end_time": "2023-12-15T18:00:16.812313Z", + "start_time": "2023-12-15T18:00:16.809585Z" } }, "id": "d538b20aa954ee80" }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 6, "outputs": [ { "name": "stdout", @@ -106,177 +175,42 @@ "\n", "\n", "\u001B[1m> Entering new AgentExecutor chain...\u001B[0m\n", - "on start\n", - "on end\n", - "\u001B[32;1m\u001B[1;3mCould not parse LLM output: I'm sorry, I don't have access to real-time data about movie showtimes. However, you can check online movie ticketing platforms or cinema websites to find out what movies are currently playing.\u001B[0m\n", - "Observation: Invalid or incomplete response\n", - "Thought:on start\n", - "on end\n", - "\u001B[32;1m\u001B[1;3mCould not parse LLM output: I need to find a tool that can provide real-time information about movie showtimes.\u001B[0m\n", - "Observation: Invalid or incomplete response\n", - "Thought:on start\n", - "on end\n", - "\u001B[32;1m\u001B[1;3mCould not parse LLM output: I need to find a tool that can provide real-time information about movie showtimes.\u001B[0m\n", - "Observation: Invalid or incomplete response\n", - "Thought:on start\n", - "on end\n", - "\u001B[32;1m\u001B[1;3mCould not parse LLM output: I need to find a tool that can provide real-time information about movie showtimes.\u001B[0m\n", - "Observation: Invalid or incomplete response\n", - "Thought:on start\n", - "on end\n", - "\u001B[32;1m\u001B[1;3mCould not parse LLM output: I need to find a tool that can provide real-time information about movie showtimes.\u001B[0m\n", - "Observation: Invalid or incomplete response\n", - "Thought:on start\n", - "on end\n", - "\u001B[32;1m\u001B[1;3mCould not parse LLM output: I need to find a tool that can provide real-time information about movie showtimes.\u001B[0m\n", - "Observation: Invalid or incomplete response\n", - "Thought:on start\n", - "on end\n", - "\u001B[32;1m\u001B[1;3mI need to find a tool that can provide real-time information about movie showtimes.\n", - "Action:\n", - "```\n", - "{\n", - " \"action\": \"movie_showtimes\",\n", - " \"action_input\": \"\"\n", - "}\n", - "```\u001B[0m\n", - "Observation: movie_showtimes is not a valid tool, try one of [].\n", - "Thought:on start\n", - "on end\n", - "\u001B[32;1m\u001B[1;3mI need to find a different tool that can provide real-time information about movie showtimes.\n", - "Action:\n", - "```\n", - "{\n", - " \"action\": \"movie_showtimes\",\n", - " \"action_input\": \"\"\n", - "}\n", - "```\n", - "\u001B[0m\n", - "Observation: movie_showtimes is not a valid tool, try one of [].\n", - "Thought:on start\n", - "on end\n", - "\u001B[32;1m\u001B[1;3mI need to find a different tool that can provide real-time information about movie showtimes.\n", - "Action:\n", - "```\n", - "{\n", - " \"action\": \"movie_showtimes_api\",\n", - " \"action_input\": \"\"\n", - "}\n", - "```\n", - "\u001B[0m\n", - "Observation: movie_showtimes_api is not a valid tool, try one of [].\n", - "Thought:on start\n", - "on end\n", - "\u001B[32;1m\u001B[1;3mI need to find a different tool that can provide real-time information about movie showtimes.\n", - "Action:\n", - "```\n", - "{\n", - " \"action\": \"movie_showtimes_api\",\n", - " \"action_input\": \"\"\n", - "}\n", - "```\n", + "\u001B[32;1m\u001B[1;3mSure! Here's a short poem about secret agents:\n", "\n", - "\u001B[0m\n", - "Observation: movie_showtimes_api is not a valid tool, try one of [].\n", - "Thought:on start\n", - "on end\n", - "\u001B[32;1m\u001B[1;3mI need to find a different tool that can provide real-time information about movie showtimes.\n", - "Action:\n", - "```\n", - "{\n", - " \"action\": \"movie_showtimes_online\",\n", - " \"action_input\": \"\"\n", - "}\n", - "```\u001B[0m\n", - "Observation: movie_showtimes_online is not a valid tool, try one of [].\n", - "Thought:on start\n", - "on end\n", - "\u001B[32;1m\u001B[1;3mI need to find a different tool that can provide real-time information about movie showtimes.\n", - "Action:\n", - "```\n", - "{\n", - " \"action\": \"movie_showtimes_online\",\n", - " \"action_input\": \"\"\n", - "}\n", - "```\u001B[0m\n", - "Observation: movie_showtimes_online is not a valid tool, try one of [].\n", - "Thought:on start\n", - "on end\n", - "\u001B[32;1m\u001B[1;3mI need to find a different tool that can provide real-time information about movie showtimes.\n", - "Action:\n", - "```\n", - "{\n", - " \"action\": \"movie_showtimes_website\",\n", - " \"action_input\": \"\"\n", - "}\n", - "```\u001B[0m\n", - "Observation: movie_showtimes_website is not a valid tool, try one of [].\n", - "Thought:on start\n", - "on end\n", - "\u001B[32;1m\u001B[1;3mCould not parse LLM output: I apologize, but I don't have access to real-time data about movie showtimes. However, you can check online movie ticketing platforms or cinema websites to find out what movies are currently playing.\u001B[0m\n", - "Observation: Invalid or incomplete response\n", - "Thought:on start\n", - "on end\n", - "\u001B[32;1m\u001B[1;3mI apologize, but I don't have access to real-time data about movie showtimes. However, you can check online movie ticketing platforms or cinema websites to find out what movies are currently playing.\n", - "Final Answer: You can check online movie ticketing platforms or cinema websites to find out what movies are currently playing.\u001B[0m\n", + "In shadows they dwell, unseen by the eye,\n", + "Masters of disguise, their secrets held high.\n", + "They weave through the night, like whispers in air,\n", + "Agents of mystery, with skills beyond compare.\n", "\n", - "\u001B[1m> Finished chain.\u001B[0m\n" - ] - }, - { - "data": { - "text/plain": "'You can check online movie ticketing platforms or cinema websites to find out what movies are currently playing.'" - }, - "execution_count": 5, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "agent.run(\"What movies are playing?\")" - ], - "metadata": { - "collapsed": false, - "ExecuteTime": { - "end_time": "2023-12-15T17:21:14.951663Z", - "start_time": "2023-12-15T17:20:49.473183Z" - } - }, - "id": "6dfb127553751384" - }, - { - "cell_type": "code", - "execution_count": 6, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ + "With steady hands, they handle the unknown,\n", + "Unraveling secrets, their cover not blown.\n", + "They traverse the globe, in search of the truth,\n", + "Stealthily moving, like shadows of youth.\n", "\n", + "Their mission, you ask? To protect and defend,\n", + "The world's safety, until the very end.\n", + "With gadgets and wit, they outsmart their foes,\n", + "Silent warriors, where nobody knows.\n", "\n", - "\u001B[1m> Entering new AgentExecutor chain...\u001B[0m\n", - "\u001B[32;1m\u001B[1;3mCould not parse LLM output: I can use the `find_movie` tool to find the available movies.\u001B[0m\n", - "Observation: Invalid or incomplete response\n", - "Thought:\u001B[32;1m\u001B[1;3mCould not parse LLM output: I need to use the `find_movie` tool to find the available movies.\u001B[0m\n", - "Observation: Invalid or incomplete response\n", - "Thought:\u001B[32;1m\u001B[1;3mI can use the `find_movie` tool to find the available movies.\n", - "Action:\n", - "```\n", - "{\n", - " \"action\": \"find_movie\",\n", - " \"action_input\": \"\"\n", - "}\n", - "```\u001B[0m\n", - "Observation: \u001B[36;1m\u001B[1;3mCitizen Kane\u001B[0m\n", - "Thought:\u001B[32;1m\u001B[1;3mThe movie \"Citizen Kane\" is currently playing.\n", - "Final Answer: Citizen Kane\u001B[0m\n", + "In dark alleys they gather, plans tightly knit,\n", + "Their loyalty unwavering, they never quit.\n", + "For justice they fight, with honor and grace,\n", + "A secret agent's life, a dangerous embrace.\n", + "\n", + "So raise your glass to these guardians unseen,\n", + "The heroes of shadows, in suits so pristine.\n", + "They sacrifice all, for a world that's unknown,\n", + "Secret agents, silent heroes, never alone.\n", + "\n", + "Final Answer: Secret agents, guardians of the night,\n", + "A poem to honor their courage and might.\u001B[0m\n", "\n", "\u001B[1m> Finished chain.\u001B[0m\n" ] }, { "data": { - "text/plain": "'Citizen Kane'" + "text/plain": "'Secret agents, guardians of the night,\\nA poem to honor their courage and might.'" }, "execution_count": 6, "metadata": {}, @@ -284,340 +218,16 @@ } ], "source": [ - "from langchain.agents import AgentType, initialize_agent, load_tools\n", - "from langchain.llms import OpenAI\n", - "from langchain.chat_models import ChatOpenAI\n", - "from langchain.agents import tool\n", - "\n", - "chat_model = ChatOpenAI(temperature=0,\n", - " model='gpt-3.5-turbo',\n", - " openai_api_key='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')\n", - "\n", - "\n", - "@tool\n", - "def find_movie(term) -> str:\n", - " \"\"\"Find available movies\"\"\"\n", - " return 'Citizen Kane'\n", - "\n", - "\n", - "tools = [find_movie]\n", - "\n", - "\n", - "agent = initialize_agent(tools,\n", - " chat_model,\n", - " agent=AgentType.CHAT_ZERO_SHOT_REACT_DESCRIPTION,\n", - " verbose=True,\n", - " handle_parsing_errors=True)\n", - "agent.run(\"What movies are playing?\")\n" + "agent.run(\"Please write me a short poem about secret agents\")" ], "metadata": { "collapsed": false, "ExecuteTime": { - "end_time": "2023-12-15T17:21:21.188515Z", - "start_time": "2023-12-15T17:21:14.951779Z" + "end_time": "2023-12-15T18:00:22.740852Z", + "start_time": "2023-12-15T18:00:16.812664Z" } }, - "id": "8d539e0dcf2ec2f3" - }, - { - "cell_type": "code", - "execution_count": 7, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "absl-py==1.4.0\r\n", - "accelerate==0.21.0\r\n", - "addict==2.4.0\r\n", - "agentops==0.0.7\r\n", - "aiofiles==23.1.0\r\n", - "aiohttp==3.8.4\r\n", - "aiosignal==1.3.1\r\n", - "aiostream==0.4.5\r\n", - "albumentations==1.3.1\r\n", - "aliyun-python-sdk-core==2.13.36\r\n", - "aliyun-python-sdk-kms==2.16.2\r\n", - "altair==5.0.1\r\n", - "antlr4-python3-runtime==4.9.3\r\n", - "anyio==3.6.2\r\n", - "APScheduler==3.8.0\r\n", - "arrow==1.2.3\r\n", - "asgiref==3.6.0\r\n", - "async-timeout==4.0.2\r\n", - "attrs==22.2.0\r\n", - "auth0-python==3.24.1\r\n", - "autotrain-advanced==0.6.15\r\n", - "backoff==2.2.1\r\n", - "beautifulsoup4==4.10.0\r\n", - "bitsandbytes==0.41.1\r\n", - "Brotli==1.0.9\r\n", - "build==1.0.3\r\n", - "CacheControl==0.13.1\r\n", - "cachetools==5.3.1\r\n", - "certifi==2022.12.7\r\n", - "cffi==1.15.1\r\n", - "cfgv==3.4.0\r\n", - "charset-normalizer==3.1.0\r\n", - "chroma-hnswlib==0.7.2\r\n", - "chromadb==0.4.6\r\n", - "cleo==2.1.0\r\n", - "click==8.1.3\r\n", - "cloudpickle==2.2.1\r\n", - "codecarbon==2.2.3\r\n", - "coloredlogs==15.0.1\r\n", - "contourpy==1.1.0\r\n", - "crashtest==0.4.1\r\n", - "crcmod==1.7\r\n", - "cryptography==41.0.3\r\n", - "cycler==0.11.0\r\n", - "datasets==2.8.0\r\n", - "diffusers @ git+https://github.com/huggingface/diffusers.git@f0725c5845b150b3042e9e98b1cda154d3bef5b5\r\n", - "dill==0.3.6\r\n", - "distlib==0.3.7\r\n", - "distro==1.8.0\r\n", - "dnspython==2.3.0\r\n", - "docker==6.0.1\r\n", - "dulwich==0.21.7\r\n", - "einops==0.6.1\r\n", - "evaluate==0.3.0\r\n", - "fastapi==0.99.1\r\n", - "fastjsonschema==2.19.0\r\n", - "ffmpy==0.3.1\r\n", - "filelock==3.12.2\r\n", - "Flask==2.2.3\r\n", - "flatbuffers==23.5.26\r\n", - "fonttools==4.42.0\r\n", - "frozenlist==1.3.3\r\n", - "fsspec==2023.6.0\r\n", - "ftfy==6.1.1\r\n", - "future==0.18.3\r\n", - "fuzzywuzzy==0.18.0\r\n", - "gast==0.5.4\r\n", - "gevent==23.7.0\r\n", - "geventhttpclient==2.0.2\r\n", - "google-auth==2.22.0\r\n", - "google-auth-oauthlib==1.0.0\r\n", - "gradio==3.39.0\r\n", - "gradio_client==0.3.0\r\n", - "greenlet==2.0.2\r\n", - "grpcio==1.56.2\r\n", - "grpclib==0.4.3\r\n", - "h11==0.14.0\r\n", - "h2==4.1.0\r\n", - "hpack==4.0.0\r\n", - "httpcore==0.17.3\r\n", - "httptools==0.6.1\r\n", - "httpx==0.24.1\r\n", - "huggingface-hub==0.16.4\r\n", - "humanfriendly==10.0\r\n", - "hyperframe==6.0.1\r\n", - "identify==2.5.32\r\n", - "idna==3.4\r\n", - "imageio==2.31.1\r\n", - "importlib-metadata==6.8.0\r\n", - "importlib-resources==6.1.1\r\n", - "installer==0.7.0\r\n", - "invisible-watermark==0.2.0\r\n", - "ipadic==1.0.0\r\n", - "itsdangerous==2.1.2\r\n", - "jaraco.classes==3.3.0\r\n", - "Jinja2==3.0.3\r\n", - "jiwer==3.0.2\r\n", - "jmespath==0.10.0\r\n", - "joblib==1.3.1\r\n", - "jsonplus==0.8.0\r\n", - "jsonschema==4.19.0\r\n", - "jsonschema-specifications==2023.7.1\r\n", - "keyring==24.3.0\r\n", - "kiwisolver==1.4.4\r\n", - "lazy_loader==0.3\r\n", - "lightning-utilities==0.9.0\r\n", - "linkify-it-py==2.0.2\r\n", - "loguru==0.7.0\r\n", - "Markdown==3.4.4\r\n", - "markdown-it-py==2.2.0\r\n", - "MarkupSafe==2.1.2\r\n", - "matplotlib==3.7.2\r\n", - "mdit-py-plugins==0.3.3\r\n", - "mdurl==0.1.2\r\n", - "modal-client==0.48.1686\r\n", - "modelscope==1.4.2\r\n", - "monotonic==1.6\r\n", - "more-itertools==10.1.0\r\n", - "mpmath==1.3.0\r\n", - "msgpack==1.0.7\r\n", - "multidict==6.0.4\r\n", - "multiprocess==0.70.14\r\n", - "mutagen==1.47.0\r\n", - "networkx==3.1\r\n", - "nodeenv==1.8.0\r\n", - "numpy==1.26.2\r\n", - "oauthlib==3.1.1\r\n", - "omegaconf==2.3.0\r\n", - "onnxruntime==1.16.3\r\n", - "open-clip-torch==2.20.0\r\n", - "openai==1.3.9\r\n", - "opencv-python==4.8.0.74\r\n", - "opencv-python-headless==4.8.0.74\r\n", - "orjson==3.9.3\r\n", - "oss2==2.18.1\r\n", - "overrides==7.4.0\r\n", - "packaging==23.1\r\n", - "pandas==2.0.3\r\n", - "peft @ git+https://github.com/huggingface/peft.git@ed396a69ed6469be87f90e739f98f19ec9973983\r\n", - "pexpect==4.9.0\r\n", - "Pillow==10.0.0\r\n", - "pkginfo==1.9.6\r\n", - "platformdirs==3.10.0\r\n", - "poetry==1.7.1\r\n", - "poetry-core==1.8.1\r\n", - "poetry-plugin-export==1.6.0\r\n", - "posthog==3.1.0\r\n", - "pre-commit==3.5.0\r\n", - "protobuf==3.20.3\r\n", - "psutil==5.9.5\r\n", - "ptyprocess==0.7.0\r\n", - "pulsar-client==3.3.0\r\n", - "py-cpuinfo==8.0.0\r\n", - "pyarrow==12.0.1\r\n", - "pyasn1==0.5.0\r\n", - "pyasn1-modules==0.3.0\r\n", - "pycparser==2.21\r\n", - "pycryptodome==3.19.0\r\n", - "pycryptodomex==3.19.0\r\n", - "pydantic==1.10.11\r\n", - "pydub==0.25.1\r\n", - "Pygments==2.14.0\r\n", - "PyJWT==2.8.0\r\n", - "pymongo==4.3.3\r\n", - "pynvml==11.5.0\r\n", - "pyparsing==3.0.9\r\n", - "PyPika==0.48.9\r\n", - "pyproject_hooks==1.0.0\r\n", - "PySocks==1.7.1\r\n", - "python-dateutil==2.8.2\r\n", - "python-dotenv==1.0.0\r\n", - "python-http-client==3.3.7\r\n", - "python-multipart==0.0.6\r\n", - "python-rapidjson==1.10\r\n", - "python-twitter==3.5\r\n", - "pytorch-lightning==2.0.9\r\n", - "pytz==2023.3\r\n", - "PyWavelets==1.4.1\r\n", - "PyYAML==6.0.1\r\n", - "qudida==0.0.4\r\n", - "rapidfuzz==3.5.2\r\n", - "referencing==0.30.2\r\n", - "regex==2023.6.3\r\n", - "requests==2.28.2\r\n", - "requests-oauthlib==1.3.0\r\n", - "requests-toolbelt==1.0.0\r\n", - "responses==0.18.0\r\n", - "rich==13.3.5\r\n", - "rpds-py==0.9.2\r\n", - "rsa==4.9\r\n", - "sacremoses==0.0.53\r\n", - "safetensors==0.3.1\r\n", - "scikit-image==0.21.0\r\n", - "scikit-learn==1.3.0\r\n", - "scipy==1.11.1\r\n", - "secure-smtplib==0.1.1\r\n", - "selenium==3.141.0\r\n", - "semantic-version==2.10.0\r\n", - "sendgrid==6.10.0\r\n", - "sentencepiece==0.1.99\r\n", - "shellingham==1.5.4\r\n", - "shortuuid==1.0.11\r\n", - "simplejson==3.19.1\r\n", - "six==1.16.0\r\n", - "sniffio==1.3.0\r\n", - "sortedcontainers==2.4.0\r\n", - "soupsieve==2.4.1\r\n", - "starkbank-ecdsa==2.2.0\r\n", - "starlette==0.27.0\r\n", - "stripe==5.4.0\r\n", - "sympy==1.12\r\n", - "synchronicity==0.3.1\r\n", - "tabulate==0.9.0\r\n", - "tblib==1.7.0\r\n", - "tensorboard==2.13.0\r\n", - "tensorboard-data-server==0.7.1\r\n", - "textual==0.32.0\r\n", - "threadpoolctl==3.2.0\r\n", - "tifffile==2023.7.18\r\n", - "tiktoken==0.3.3\r\n", - "timm==0.9.7\r\n", - "titan-iris==0.19.25\r\n", - "tokenizers==0.13.3\r\n", - "toml==0.10.2\r\n", - "tomli==2.0.1\r\n", - "tomlkit==0.12.3\r\n", - "toolz==0.12.0\r\n", - "torch==2.0.1\r\n", - "torchmetrics==1.1.2\r\n", - "torchvision==0.15.2\r\n", - "tqdm==4.66.1\r\n", - "transformers @ git+https://github.com/huggingface/transformers.git@080a97119c0dabfd0fb5c3e26a872ad2958e4f77\r\n", - "tritonclient==2.30.0\r\n", - "trl @ git+https://github.com/lvwerra/trl.git@77b0cc17071c7aea5b5a5794a6e3e3ccc9332acd\r\n", - "trogon==0.2.1\r\n", - "trove-classifiers==2023.11.29\r\n", - "tweepy==3.10.0\r\n", - "typeguard==3.0.2\r\n", - "typer==0.7.0\r\n", - "types-certifi==2021.10.8.3\r\n", - "types-toml==0.10.8.5\r\n", - "typing_extensions==4.5.0\r\n", - "tzdata==2023.3\r\n", - "tzlocal==2.1\r\n", - "uc-micro-py==1.0.2\r\n", - "urllib3==1.26.15\r\n", - "uvicorn==0.24.0.post1\r\n", - "uvloop==0.19.0\r\n", - "virtualenv==20.25.0\r\n", - "watchfiles==0.18.1\r\n", - "wcwidth==0.2.6\r\n", - "websocket-client==1.6.1\r\n", - "websockets==11.0.3\r\n", - "Werkzeug==2.3.6\r\n", - "wget==3.2\r\n", - "xattr==0.10.1\r\n", - "xxhash==3.3.0\r\n", - "yapf==0.40.1\r\n", - "yarl==1.8.2\r\n", - "yt-dlp==2023.10.13\r\n", - "zipp==3.15.0\r\n", - "zope.event==5.0\r\n", - "zope.interface==6.0\r\n" - ] - } - ], - "source": [ - "!pip3 freeze" - ], - "metadata": { - "collapsed": false, - "ExecuteTime": { - "end_time": "2023-12-15T17:21:22.241827Z", - "start_time": "2023-12-15T17:21:21.186704Z" - } - }, - "id": "3642854c2a0f921d" - }, - { - "cell_type": "code", - "execution_count": 7, - "outputs": [], - "source": [], - "metadata": { - "collapsed": false, - "ExecuteTime": { - "end_time": "2023-12-15T17:21:22.245798Z", - "start_time": "2023-12-15T17:21:22.242907Z" - } - }, - "id": "9e763f402787aba1" + "id": "6dfb127553751384" } ], "metadata": { From dcefdc029b5a631340755a25c9be78cae0549221 Mon Sep 17 00:00:00 2001 From: Braelyn Boynton Date: Fri, 15 Dec 2023 12:03:50 -0600 Subject: [PATCH 04/15] ignore .idea --- .gitignore | 2 +- .idea/.gitignore | 8 -------- .idea/agentops.iml | 17 ----------------- .idea/inspectionProfiles/profiles_settings.xml | 6 ------ .idea/misc.xml | 7 ------- .idea/modules.xml | 8 -------- .idea/vcs.xml | 6 ------ 7 files changed, 1 insertion(+), 53 deletions(-) delete mode 100644 .idea/.gitignore delete mode 100644 .idea/agentops.iml delete mode 100644 .idea/inspectionProfiles/profiles_settings.xml delete mode 100644 .idea/misc.xml delete mode 100644 .idea/modules.xml delete mode 100644 .idea/vcs.xml diff --git a/.gitignore b/.gitignore index eeb33f955..2f39587df 100644 --- a/.gitignore +++ b/.gitignore @@ -157,7 +157,7 @@ cython_debug/ # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore # and can be added to the global gitignore or merged into this file. For a more nuclear # option (not recommended) you can uncomment the following to ignore the entire idea folder. -#.idea/ +.idea/ .vscode/ .benchmarks/ diff --git a/.idea/.gitignore b/.idea/.gitignore deleted file mode 100644 index 13566b81b..000000000 --- a/.idea/.gitignore +++ /dev/null @@ -1,8 +0,0 @@ -# Default ignored files -/shelf/ -/workspace.xml -# Editor-based HTTP Client requests -/httpRequests/ -# Datasource local storage ignored files -/dataSources/ -/dataSources.local.xml diff --git a/.idea/agentops.iml b/.idea/agentops.iml deleted file mode 100644 index 519512493..000000000 --- a/.idea/agentops.iml +++ /dev/null @@ -1,17 +0,0 @@ - - - - - - - - - - - - - - \ No newline at end of file diff --git a/.idea/inspectionProfiles/profiles_settings.xml b/.idea/inspectionProfiles/profiles_settings.xml deleted file mode 100644 index 105ce2da2..000000000 --- a/.idea/inspectionProfiles/profiles_settings.xml +++ /dev/null @@ -1,6 +0,0 @@ - - - - \ No newline at end of file diff --git a/.idea/misc.xml b/.idea/misc.xml deleted file mode 100644 index 35f08d372..000000000 --- a/.idea/misc.xml +++ /dev/null @@ -1,7 +0,0 @@ - - - - - - \ No newline at end of file diff --git a/.idea/modules.xml b/.idea/modules.xml deleted file mode 100644 index 4cf190440..000000000 --- a/.idea/modules.xml +++ /dev/null @@ -1,8 +0,0 @@ - - - - - - - - \ No newline at end of file diff --git a/.idea/vcs.xml b/.idea/vcs.xml deleted file mode 100644 index 35eb1ddfb..000000000 --- a/.idea/vcs.xml +++ /dev/null @@ -1,6 +0,0 @@ - - - - - - \ No newline at end of file From 64ae928a44953da8a0f5a5717fd923ee29c46ded Mon Sep 17 00:00:00 2001 From: Braelyn Boynton Date: Fri, 15 Dec 2023 14:30:27 -0600 Subject: [PATCH 05/15] added support for all callbacks --- agentops/event.py | 17 ++- agentops/langchain_callback_handler.py | 151 ++++++++++++++++++++++++- examples/langchain_examples.ipynb | 85 +++----------- 3 files changed, 182 insertions(+), 71 deletions(-) diff --git a/agentops/event.py b/agentops/event.py index 1bc0b2abb..50edbfca4 100644 --- a/agentops/event.py +++ b/agentops/event.py @@ -4,6 +4,8 @@ Classes: Event: Represents discrete events to be recorded. """ +import json + from .helpers import get_ISO_time, Models from typing import Optional, List from pydantic import Field @@ -32,7 +34,7 @@ class Event: event_type (str): Type of the event. params (str, optional): The parameters passed to the operation. returns (str, optional): The output of the operation. - result (str): Result of the operation. + result (Result): Result of the operation as Enum Result. action_type (str): Type of action of the event. model (Models, optional): The model used during the event. prompt (str, optional): The input prompt for an LLM call. @@ -65,3 +67,16 @@ def __init__(self, event_type: str, self.prompt = prompt self.end_timestamp = get_ISO_time() self.init_timestamp = init_timestamp if init_timestamp else self.end_timestamp + + def __str__(self): + return str({ + "event_type": self.event_type, + "params": self.params, + "returns": self.returns, + "action_type": self.action_type, + "result": self.result, + "model": self.model, + "prompt": self.prompt, + "tags": self.tags, + "init_timestamp": self.init_timestamp + }) diff --git a/agentops/langchain_callback_handler.py b/agentops/langchain_callback_handler.py index 313202ef2..efd2cacf2 100644 --- a/agentops/langchain_callback_handler.py +++ b/agentops/langchain_callback_handler.py @@ -1,14 +1,17 @@ from typing import Dict, Any, List, Optional from uuid import UUID +from langchain_core.agents import AgentFinish from langchain_core.outputs import LLMResult +from langchain_core.documents import Document from agentops import Client as AOClient from agentops import Event -from langchain.callbacks.base import BaseCallbackHandler +from langchain.callbacks.base import BaseCallbackHandler, ChainManagerMixin from agentops.helpers import get_ISO_time +from typing import Any, Dict, List, Optional, Sequence class LangchainCallbackHandler(BaseCallbackHandler): @@ -20,6 +23,7 @@ def __init__(self, api_key: str, tags: [str] = None): # keypair self.events = {} + # LLM Callbacks def on_llm_start( self, serialized: Dict[str, Any], @@ -38,6 +42,19 @@ def on_llm_start( init_timestamp=get_ISO_time() ) + def on_llm_error( + self, + error: BaseException, + *, + run_id: UUID, + parent_run_id: Optional[UUID] = None, + **kwargs: Any, + ) -> Any: + self.events[run_id].end_timestamp = get_ISO_time() + self.events[run_id].result = "Fail" + + self.ao_client.record(self.events[run_id]) + def on_llm_end( self, response: LLMResult, @@ -47,8 +64,140 @@ def on_llm_end( **kwargs: Any, ) -> Any: self.events[run_id].end_timestamp = get_ISO_time() + + if len(response.generations) > 0: + self.events[run_id].result = "Success" + else: + self.events[run_id].result = "Fail" + + self.ao_client.record(self.events[run_id]) + + # Chain callbacks + def on_chain_start( + self, + outputs: Dict[str, Any], + *, + run_id: UUID, + parent_run_id: Optional[UUID] = None, + **kwargs: Any, + ) -> Any: + self.events[run_id] = Event( + event_type="langchain_llm_chain", + init_timestamp=get_ISO_time() + ) + + def on_chain_end( + self, + outputs: Dict[str, Any], + *, + run_id: UUID, + parent_run_id: Optional[UUID] = None, + **kwargs: Any, + ) -> Any: + self.events[run_id].end_timestamp = get_ISO_time() + self.events[run_id].result = "Success" + + self.ao_client.record(self.events[run_id]) + + def on_chain_error( + self, + error: BaseException, + *, + run_id: UUID, + parent_run_id: Optional[UUID] = None, + **kwargs: Any, + ) -> Any: + self.events[run_id].end_timestamp = get_ISO_time() + self.events[run_id].result = "Fail" + self.ao_client.record(self.events[run_id]) + # Tool callbacks + def on_tool_end( + self, + output: str, + *, + run_id: UUID, + parent_run_id: Optional[UUID] = None, + **kwargs: Any, + ) -> Any: + self.ao_client.record(Event( + event_type="langchain_tool_usage", + result="Success", + init_timestamp=get_ISO_time() + )) + + def on_tool_error( + self, + error: BaseException, + *, + run_id: UUID, + parent_run_id: Optional[UUID] = None, + **kwargs: Any, + ) -> Any: + self.ao_client.record(Event( + event_type="langchain_tool_usage", + result="Fail", + init_timestamp=get_ISO_time() + )) + + # Retriever callbacks + async def on_retriever_start( + self, + serialized: Dict[str, Any], + query: str, + *, + run_id: UUID, + parent_run_id: Optional[UUID] = None, + tags: Optional[List[str]] = None, + metadata: Optional[Dict[str, Any]] = None, + **kwargs: Any, + ) -> None: + self.events[run_id] = Event( + event_type="langchain_llm_retriever", + init_timestamp=get_ISO_time() + ) + + async def on_retriever_end( + self, + documents: Sequence[Document], + *, + run_id: UUID, + parent_run_id: Optional[UUID] = None, + tags: Optional[List[str]] = None, + **kwargs: Any, + ) -> None: + self.events[run_id].end_timestamp = get_ISO_time() + self.events[run_id].result = "Successful" + + self.ao_client.record(self.events[run_id]) + + async def on_retriever_error( + self, + error: BaseException, + *, + run_id: UUID, + parent_run_id: Optional[UUID] = None, + tags: Optional[List[str]] = None, + **kwargs: Any, + ) -> None: + self.events[run_id].end_timestamp = get_ISO_time() + self.events[run_id].result = "Fail" + + self.ao_client.record(self.events[run_id]) + + # Agent callbacks + def on_agent_finish( + self, + finish: AgentFinish, + *, + run_id: UUID, + parent_run_id: Optional[UUID] = None, + **kwargs: Any, + ) -> Any: + # TODO: Create a way for the end user to set this based on their conditions + self.ao_client.end_session("Success") + @property def session_id(self): return self.ao_client.session.session_id diff --git a/examples/langchain_examples.ipynb b/examples/langchain_examples.ipynb index 8ff65714a..077ea3317 100644 --- a/examples/langchain_examples.ipynb +++ b/examples/langchain_examples.ipynb @@ -21,8 +21,8 @@ "metadata": { "collapsed": true, "ExecuteTime": { - "end_time": "2023-12-15T18:00:15.853166Z", - "start_time": "2023-12-15T18:00:14.651483Z" + "end_time": "2023-12-15T20:21:11.477270Z", + "start_time": "2023-12-15T20:21:10.289895Z" } }, "outputs": [ @@ -30,7 +30,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "/Library/Python/3.9/site-packages/urllib3/__init__.py:34: NotOpenSSLWarning: urllib3 v2 only supports OpenSSL 1.1.1+, currently the 'ssl' module is compiled with 'LibreSSL 2.8.3'. See: https://github.com/urllib3/urllib3/issues/3020\n", + "/Users/braelynboynton/Developer/agentops/venv/lib/python3.9/site-packages/urllib3/__init__.py:34: NotOpenSSLWarning: urllib3 v2 only supports OpenSSL 1.1.1+, currently the 'ssl' module is compiled with 'LibreSSL 2.8.3'. See: https://github.com/urllib3/urllib3/issues/3020\n", " warnings.warn(\n" ] } @@ -62,8 +62,8 @@ "metadata": { "collapsed": false, "ExecuteTime": { - "end_time": "2023-12-15T18:00:15.861201Z", - "start_time": "2023-12-15T18:00:15.854596Z" + "end_time": "2023-12-15T20:21:11.478111Z", + "start_time": "2023-12-15T20:21:11.471462Z" } }, "id": "585f00bb186711a7" @@ -90,8 +90,8 @@ "metadata": { "collapsed": false, "ExecuteTime": { - "end_time": "2023-12-15T18:00:15.865514Z", - "start_time": "2023-12-15T18:00:15.861478Z" + "end_time": "2023-12-15T20:21:11.494019Z", + "start_time": "2023-12-15T20:21:11.479154Z" } }, "id": "1490411415d7317c" @@ -116,7 +116,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "Agent Ops session ID: 3c9deaa9-577c-49ac-9783-e7d0b0fecfbd\n" + "Agent Ops session ID: 0feeb023-d8eb-4eae-9c77-29c2a3c88b38\n" ] } ], @@ -128,8 +128,8 @@ "metadata": { "collapsed": false, "ExecuteTime": { - "end_time": "2023-12-15T18:00:16.808358Z", - "start_time": "2023-12-15T18:00:15.865350Z" + "end_time": "2023-12-15T20:21:12.346995Z", + "start_time": "2023-12-15T20:21:11.483591Z" } }, "id": "432921383f39c9d5" @@ -137,7 +137,7 @@ { "cell_type": "markdown", "source": [ - "Finally, let's use our agent! All of the actions will be recorded in the AO Dashboard" + "Finally, let's use our agent! All the actions will be recorded in the AO Dashboard" ], "metadata": { "collapsed": false @@ -158,74 +158,21 @@ "metadata": { "collapsed": false, "ExecuteTime": { - "end_time": "2023-12-15T18:00:16.812313Z", - "start_time": "2023-12-15T18:00:16.809585Z" + "end_time": "2023-12-15T20:21:12.352862Z", + "start_time": "2023-12-15T20:21:12.351126Z" } }, "id": "d538b20aa954ee80" }, { "cell_type": "code", - "execution_count": 6, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\n", - "\n", - "\u001B[1m> Entering new AgentExecutor chain...\u001B[0m\n", - "\u001B[32;1m\u001B[1;3mSure! Here's a short poem about secret agents:\n", - "\n", - "In shadows they dwell, unseen by the eye,\n", - "Masters of disguise, their secrets held high.\n", - "They weave through the night, like whispers in air,\n", - "Agents of mystery, with skills beyond compare.\n", - "\n", - "With steady hands, they handle the unknown,\n", - "Unraveling secrets, their cover not blown.\n", - "They traverse the globe, in search of the truth,\n", - "Stealthily moving, like shadows of youth.\n", - "\n", - "Their mission, you ask? To protect and defend,\n", - "The world's safety, until the very end.\n", - "With gadgets and wit, they outsmart their foes,\n", - "Silent warriors, where nobody knows.\n", - "\n", - "In dark alleys they gather, plans tightly knit,\n", - "Their loyalty unwavering, they never quit.\n", - "For justice they fight, with honor and grace,\n", - "A secret agent's life, a dangerous embrace.\n", - "\n", - "So raise your glass to these guardians unseen,\n", - "The heroes of shadows, in suits so pristine.\n", - "They sacrifice all, for a world that's unknown,\n", - "Secret agents, silent heroes, never alone.\n", - "\n", - "Final Answer: Secret agents, guardians of the night,\n", - "A poem to honor their courage and might.\u001B[0m\n", - "\n", - "\u001B[1m> Finished chain.\u001B[0m\n" - ] - }, - { - "data": { - "text/plain": "'Secret agents, guardians of the night,\\nA poem to honor their courage and might.'" - }, - "execution_count": 6, - "metadata": {}, - "output_type": "execute_result" - } - ], + "execution_count": null, + "outputs": [], "source": [ "agent.run(\"Please write me a short poem about secret agents\")" ], "metadata": { - "collapsed": false, - "ExecuteTime": { - "end_time": "2023-12-15T18:00:22.740852Z", - "start_time": "2023-12-15T18:00:16.812664Z" - } + "collapsed": false }, "id": "6dfb127553751384" } From 7055571f263bd586f5e168984fd520efca845e9f Mon Sep 17 00:00:00 2001 From: reibs Date: Tue, 19 Dec 2023 11:58:34 -0800 Subject: [PATCH 06/15] added callback handler --- agentops/langchain_callback_handler.py | 64 ++++++++++++++++++++------ 1 file changed, 49 insertions(+), 15 deletions(-) diff --git a/agentops/langchain_callback_handler.py b/agentops/langchain_callback_handler.py index efd2cacf2..4106136da 100644 --- a/agentops/langchain_callback_handler.py +++ b/agentops/langchain_callback_handler.py @@ -16,12 +16,11 @@ class LangchainCallbackHandler(BaseCallbackHandler): - def __init__(self, api_key: str, tags: [str] = None): - self.ao_client = AOClient(api_key=api_key) - self.ao_client.start_session(tags) + def __init__(self, api_key: str, tags: List[str] = None): + self.ao_client = AOClient(api_key=api_key, tags=tags) # keypair - self.events = {} + self.events: Dict[Any, Event] = {} # LLM Callbacks def on_llm_start( @@ -35,9 +34,22 @@ def on_llm_start( metadata: Optional[Dict[str, Any]] = None, **kwargs: Any, ) -> Any: + # print(f"{serialized=}") + # serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'chat_models', 'openai', 'ChatOpenAI'], 'kwargs': {'openai_api_key': {'lc': 1, 'type': 'secret', 'id': ['OPENAI_API_KEY']}}} + + # print(f"{prompts=}") + # print(f"{run_id=}") + # print(f"{parent_run_id=}") + # print(f"{tags=}") + # print(f"{metadata=}") + # print(f"{kwargs=}") + # kwargs={'invocation_params': {'model': 'gpt-3.5-turbo', 'model_name': 'gpt-3.5-turbo', 'request_timeout': None, 'max_tokens': None, 'stream': False, 'n': 1, 'temperature': 0.7, '_type': 'openai-chat', 'stop': ['Observation:']}, 'options': {'stop': ['Observation:']}, 'name': None} + self.events[run_id] = Event( - event_type="langchain_llm", + event_type="llm", tags=tags, + model=kwargs['invocation_params']['model'], + params={**kwargs, **metadata}, prompt="\n--\n".join(prompts), init_timestamp=get_ISO_time() ) @@ -64,7 +76,7 @@ def on_llm_end( **kwargs: Any, ) -> Any: self.events[run_id].end_timestamp = get_ISO_time() - + self.events[run_id].returns = response.generations[0][0].message.content if len(response.generations) > 0: self.events[run_id].result = "Success" else: @@ -82,7 +94,7 @@ def on_chain_start( **kwargs: Any, ) -> Any: self.events[run_id] = Event( - event_type="langchain_llm_chain", + event_type="chain", init_timestamp=get_ISO_time() ) @@ -113,6 +125,24 @@ def on_chain_error( self.ao_client.record(self.events[run_id]) # Tool callbacks + + def on_tool_start( + self, + serialized: Dict[str, Any], + input_str: str, + *, + run_id: UUID, + parent_run_id: Optional[UUID] = None, + tags: Optional[List[str]] = None, + metadata: Optional[Dict[str, Any]] = None, + **kwargs: Any, + ) -> Any: + """Run when tool starts running.""" + self.events[run_id] = Event( + event_type="tool", + init_timestamp=get_ISO_time() + ) + def on_tool_end( self, output: str, @@ -121,11 +151,10 @@ def on_tool_end( parent_run_id: Optional[UUID] = None, **kwargs: Any, ) -> Any: - self.ao_client.record(Event( - event_type="langchain_tool_usage", - result="Success", - init_timestamp=get_ISO_time() - )) + self.events[run_id].end_timestamp = get_ISO_time() + self.events[run_id].result = "Success" + + self.ao_client.record(self.events[run_id]) def on_tool_error( self, @@ -136,11 +165,16 @@ def on_tool_error( **kwargs: Any, ) -> Any: self.ao_client.record(Event( - event_type="langchain_tool_usage", + event_type="", result="Fail", init_timestamp=get_ISO_time() )) + self.events[run_id].end_timestamp = get_ISO_time() + self.events[run_id].result = "Fail" + + self.ao_client.record(self.events[run_id]) + # Retriever callbacks async def on_retriever_start( self, @@ -154,7 +188,7 @@ async def on_retriever_start( **kwargs: Any, ) -> None: self.events[run_id] = Event( - event_type="langchain_llm_retriever", + event_type="retriever", init_timestamp=get_ISO_time() ) @@ -168,7 +202,7 @@ async def on_retriever_end( **kwargs: Any, ) -> None: self.events[run_id].end_timestamp = get_ISO_time() - self.events[run_id].result = "Successful" + self.events[run_id].result = "Success" self.ao_client.record(self.events[run_id]) From eb7966de1b7abf6dcd0144818d8579cffed8ca86 Mon Sep 17 00:00:00 2001 From: reibs Date: Tue, 19 Dec 2023 12:11:44 -0800 Subject: [PATCH 07/15] removed superfluous imports, renamed _start_sess --- agentops/client.py | 12 ++++++------ agentops/event.py | 5 ++--- agentops/http.py | 1 - 3 files changed, 8 insertions(+), 10 deletions(-) diff --git a/agentops/client.py b/agentops/client.py index cf2067858..44cd98ffa 100644 --- a/agentops/client.py +++ b/agentops/client.py @@ -71,7 +71,7 @@ def __init__(self, api_key: Optional[str] = None, # Override sys.excepthook sys.excepthook = self.handle_exception - self.start_session(tags) + self._start_session(tags) if 'openai' in sys.modules: self.llm_tracker = LlmTracker(self) @@ -232,7 +232,7 @@ async def _record_event_async(self, func, event_name, tags, *args, **kwargs): return returns - def start_session(self, tags: Optional[List[str]] = None): + def _start_session(self, tags: Optional[List[str]] = None): """ Start a new session for recording events. @@ -245,10 +245,10 @@ def start_session(self, tags: Optional[List[str]] = None): self.worker.start_session(self.session) def end_session(self, end_state: str = Field("Indeterminate", - description="End state of the session", - pattern="^(Success|Fail|Indeterminate)$"), - rating: Optional[str] = None, - video: Optional[str] = None): + description="End state of the session", + pattern="^(Success|Fail|Indeterminate)$"), + rating: Optional[str] = None, + video: Optional[str] = None): """ End the current session with the AgentOps service. diff --git a/agentops/event.py b/agentops/event.py index 8a01598c5..b8d55a59e 100644 --- a/agentops/event.py +++ b/agentops/event.py @@ -4,12 +4,11 @@ Classes: Event: Represents discrete events to be recorded. """ -import json - from .helpers import get_ISO_time, Models from typing import Optional, List from pydantic import Field + class Event: """ Represents a discrete event to be recorded. @@ -81,4 +80,4 @@ def __str__(self): "prompt": self.prompt, "tags": self.tags, "init_timestamp": self.init_timestamp - }) \ No newline at end of file + }) diff --git a/agentops/http.py b/agentops/http.py index 685355b66..5460f0745 100644 --- a/agentops/http.py +++ b/agentops/http.py @@ -1,4 +1,3 @@ -import json from enum import Enum from typing import Optional import requests From 1432d7efae5ecaa7b66a813758b5f6e3aee83a99 Mon Sep 17 00:00:00 2001 From: reibs Date: Tue, 19 Dec 2023 16:18:26 -0800 Subject: [PATCH 08/15] updated new states --- agentops/langchain_callback_handler.py | 127 ++++++++++++++++--------- 1 file changed, 84 insertions(+), 43 deletions(-) diff --git a/agentops/langchain_callback_handler.py b/agentops/langchain_callback_handler.py index 4106136da..0b542e6fb 100644 --- a/agentops/langchain_callback_handler.py +++ b/agentops/langchain_callback_handler.py @@ -1,12 +1,13 @@ from typing import Dict, Any, List, Optional from uuid import UUID -from langchain_core.agents import AgentFinish +from langchain_core.agents import AgentFinish, AgentAction from langchain_core.outputs import LLMResult from langchain_core.documents import Document from agentops import Client as AOClient from agentops import Event +from tenacity import RetryCallState from langchain.callbacks.base import BaseCallbackHandler, ChainManagerMixin @@ -34,17 +35,6 @@ def on_llm_start( metadata: Optional[Dict[str, Any]] = None, **kwargs: Any, ) -> Any: - # print(f"{serialized=}") - # serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'chat_models', 'openai', 'ChatOpenAI'], 'kwargs': {'openai_api_key': {'lc': 1, 'type': 'secret', 'id': ['OPENAI_API_KEY']}}} - - # print(f"{prompts=}") - # print(f"{run_id=}") - # print(f"{parent_run_id=}") - # print(f"{tags=}") - # print(f"{metadata=}") - # print(f"{kwargs=}") - # kwargs={'invocation_params': {'model': 'gpt-3.5-turbo', 'model_name': 'gpt-3.5-turbo', 'request_timeout': None, 'max_tokens': None, 'stream': False, 'n': 1, 'temperature': 0.7, '_type': 'openai-chat', 'stop': ['Observation:']}, 'options': {'stop': ['Observation:']}, 'name': None} - self.events[run_id] = Event( event_type="llm", tags=tags, @@ -86,38 +76,44 @@ def on_llm_end( # Chain callbacks def on_chain_start( - self, - outputs: Dict[str, Any], - *, - run_id: UUID, - parent_run_id: Optional[UUID] = None, - **kwargs: Any, + self, + serialized: Dict[str, Any], + inputs: Dict[str, Any], + *, + run_id: UUID, + parent_run_id: Optional[UUID] = None, + tags: Optional[List[str]] = None, + metadata: Optional[Dict[str, Any]] = None, + **kwargs: Any, ) -> Any: self.events[run_id] = Event( event_type="chain", - init_timestamp=get_ISO_time() + init_timestamp=get_ISO_time(), + tags=tags, + params={**inputs, **kwargs, **metadata}, ) def on_chain_end( - self, - outputs: Dict[str, Any], - *, - run_id: UUID, - parent_run_id: Optional[UUID] = None, - **kwargs: Any, + self, + outputs: Dict[str, Any], + *, + run_id: UUID, + parent_run_id: Optional[UUID] = None, + **kwargs: Any, ) -> Any: self.events[run_id].end_timestamp = get_ISO_time() self.events[run_id].result = "Success" + self.events[run_id].returns = outputs self.ao_client.record(self.events[run_id]) def on_chain_error( - self, - error: BaseException, - *, - run_id: UUID, - parent_run_id: Optional[UUID] = None, - **kwargs: Any, + self, + error: BaseException, + *, + run_id: UUID, + parent_run_id: Optional[UUID] = None, + **kwargs: Any, ) -> Any: self.events[run_id].end_timestamp = get_ISO_time() self.events[run_id].result = "Fail" @@ -125,7 +121,6 @@ def on_chain_error( self.ao_client.record(self.events[run_id]) # Tool callbacks - def on_tool_start( self, serialized: Dict[str, Any], @@ -140,7 +135,9 @@ def on_tool_start( """Run when tool starts running.""" self.events[run_id] = Event( event_type="tool", - init_timestamp=get_ISO_time() + init_timestamp=get_ISO_time(), + tags=tags, + params={**serialized, **metadata}, ) def on_tool_end( @@ -151,8 +148,15 @@ def on_tool_end( parent_run_id: Optional[UUID] = None, **kwargs: Any, ) -> Any: + # Tools are capable of failing `on_tool_end` quietly. + # This is a workaround to make sure we can log it as an error. + if kwargs.get('name') == '_Exception': + self.events[run_id].result = "Fail" + else: + self.events[run_id].result = "Success" + self.events[run_id].end_timestamp = get_ISO_time() - self.events[run_id].result = "Success" + self.events[run_id].returns = output self.ao_client.record(self.events[run_id]) @@ -164,19 +168,14 @@ def on_tool_error( parent_run_id: Optional[UUID] = None, **kwargs: Any, ) -> Any: - self.ao_client.record(Event( - event_type="", - result="Fail", - init_timestamp=get_ISO_time() - )) - self.events[run_id].end_timestamp = get_ISO_time() self.events[run_id].result = "Fail" + self.events[run_id].returns = str(error) self.ao_client.record(self.events[run_id]) # Retriever callbacks - async def on_retriever_start( + def on_retriever_start( self, serialized: Dict[str, Any], query: str, @@ -192,7 +191,7 @@ async def on_retriever_start( init_timestamp=get_ISO_time() ) - async def on_retriever_end( + def on_retriever_end( self, documents: Sequence[Document], *, @@ -206,7 +205,7 @@ async def on_retriever_end( self.ao_client.record(self.events[run_id]) - async def on_retriever_error( + def on_retriever_error( self, error: BaseException, *, @@ -221,6 +220,21 @@ async def on_retriever_error( self.ao_client.record(self.events[run_id]) # Agent callbacks + def on_agent_action( + self, + action: AgentAction, + *, + run_id: UUID, + parent_run_id: Optional[UUID] = None, + **kwargs: Any, + ) -> Any: + """Run on agent action.""" + self.events[run_id] = Event( + event_type="agent", + init_timestamp=get_ISO_time(), + params={**kwargs}, + ) + def on_agent_finish( self, finish: AgentFinish, @@ -229,9 +243,36 @@ def on_agent_finish( parent_run_id: Optional[UUID] = None, **kwargs: Any, ) -> Any: + """Run on agent finish.""" + self.events[run_id].end_timestamp = get_ISO_time() + self.events[run_id].result = "Success" + self.events[run_id].returns = finish + + self.ao_client.record(self.events[run_id]) + # TODO: Create a way for the end user to set this based on their conditions self.ao_client.end_session("Success") + # Misc. + def on_retry( + self, + retry_state: RetryCallState, + *, + run_id: UUID, + parent_run_id: Optional[UUID] = None, + **kwargs: Any, + ) -> Any: + """Run on a retry event.""" + event = Event( + event_type="retry", + init_timestamp=get_ISO_time(), + end_timestamp=get_ISO_time(), + params={**kwargs}, + result="Indeterminate", + returns=retry_state + ) + self.ao_client.record(event) + @property def session_id(self): return self.ao_client.session.session_id From c411d1016f89c43310861387b268ea93595cd660 Mon Sep 17 00:00:00 2001 From: reibs Date: Tue, 19 Dec 2023 16:48:39 -0800 Subject: [PATCH 09/15] removed import --- agentops/langchain_callback_handler.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/agentops/langchain_callback_handler.py b/agentops/langchain_callback_handler.py index 0b542e6fb..806fc43b7 100644 --- a/agentops/langchain_callback_handler.py +++ b/agentops/langchain_callback_handler.py @@ -9,13 +9,14 @@ from agentops import Event from tenacity import RetryCallState -from langchain.callbacks.base import BaseCallbackHandler, ChainManagerMixin +from langchain.callbacks.base import BaseCallbackHandler from agentops.helpers import get_ISO_time from typing import Any, Dict, List, Optional, Sequence class LangchainCallbackHandler(BaseCallbackHandler): + """Callback handler for Langchain agents.""" def __init__(self, api_key: str, tags: List[str] = None): self.ao_client = AOClient(api_key=api_key, tags=tags) From 852a5f20a72ba6ce2fd61bfd06363015f1a7dc81 Mon Sep 17 00:00:00 2001 From: reibs Date: Tue, 19 Dec 2023 20:17:52 -0800 Subject: [PATCH 10/15] Fix prompts not rendering --- agentops/langchain_callback_handler.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/agentops/langchain_callback_handler.py b/agentops/langchain_callback_handler.py index 806fc43b7..e599856c6 100644 --- a/agentops/langchain_callback_handler.py +++ b/agentops/langchain_callback_handler.py @@ -38,10 +38,11 @@ def on_llm_start( ) -> Any: self.events[run_id] = Event( event_type="llm", + action_type='llm', tags=tags, model=kwargs['invocation_params']['model'], params={**kwargs, **metadata}, - prompt="\n--\n".join(prompts), + prompt=prompts[0], init_timestamp=get_ISO_time() ) From 0f49a55b747d0acb2c039007fe62901c91d33436 Mon Sep 17 00:00:00 2001 From: reibs Date: Wed, 20 Dec 2023 16:23:55 -0800 Subject: [PATCH 11/15] added prompt tokens to events --- agentops/event.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/agentops/event.py b/agentops/event.py index b8d55a59e..75932c849 100644 --- a/agentops/event.py +++ b/agentops/event.py @@ -40,6 +40,8 @@ class Event: tags (List[str], optional): Tags associated with the event. end_timestamp (float): The timestamp for when the event ended, represented as seconds since the epoch. init_timestamp (float): The timestamp for when the event was initiated, represented as seconds since the epoch. + prompt_tokens (int, optional): The number of tokens in the prompt if the event is an LLM call + completion_tokens (int, optional): The number of tokens in the completion if the event is an LLM call """ def __init__(self, event_type: str, @@ -55,7 +57,9 @@ def __init__(self, event_type: str, prompt: Optional[str] = None, tags: Optional[List[str]] = None, init_timestamp: Optional[float] = None, - screenshot: Optional[str] = None + screenshot: Optional[str] = None, + prompt_tokens: Optional[int] = 0, + completion_tokens: Optional[int] = 0 ): self.event_type = event_type self.params = params @@ -68,6 +72,8 @@ def __init__(self, event_type: str, self.end_timestamp = get_ISO_time() self.init_timestamp = init_timestamp if init_timestamp else self.end_timestamp self.screenshot = screenshot + self.prompt_tokens = prompt_tokens + self.completion_tokens = completion_tokens def __str__(self): return str({ @@ -79,5 +85,7 @@ def __str__(self): "model": self.model, "prompt": self.prompt, "tags": self.tags, - "init_timestamp": self.init_timestamp + "init_timestamp": self.init_timestamp, + "prompt_tokens": self.prompt_tokens, + "completion_tokens": self.completion_tokens, }) From ffeb34bdff11784e1c7b45b040d69eab9510c2f2 Mon Sep 17 00:00:00 2001 From: reibs Date: Wed, 20 Dec 2023 16:24:28 -0800 Subject: [PATCH 12/15] added tokens to callback handler --- agentops/langchain_callback_handler.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/agentops/langchain_callback_handler.py b/agentops/langchain_callback_handler.py index e599856c6..220e9841c 100644 --- a/agentops/langchain_callback_handler.py +++ b/agentops/langchain_callback_handler.py @@ -69,6 +69,9 @@ def on_llm_end( ) -> Any: self.events[run_id].end_timestamp = get_ISO_time() self.events[run_id].returns = response.generations[0][0].message.content + self.events[run_id].prompt_tokens = response.llm_output['token_usage']['prompt_tokens'] + self.events[run_id].completion_tokens = response.llm_output['token_usage']['completion_tokens'] + if len(response.generations) > 0: self.events[run_id].result = "Success" else: From 464af5b8a2d7c0cbbf51be8c1ddbbcfbac18db16 Mon Sep 17 00:00:00 2001 From: reibs Date: Wed, 20 Dec 2023 16:25:58 -0800 Subject: [PATCH 13/15] added override --- agentops/client.py | 11 +++++++---- agentops/langchain_callback_handler.py | 2 +- 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/agentops/client.py b/agentops/client.py index 44cd98ffa..ee173e192 100644 --- a/agentops/client.py +++ b/agentops/client.py @@ -35,6 +35,7 @@ class Client: endpoint (str, optional): The endpoint for the AgentOps service. Defaults to 'https://agentops-server-v2.fly.dev'. max_wait_time (int, optional): The maximum time to wait in milliseconds before flushing the queue. Defaults to 1000. max_queue_size (int, optional): The maximum size of the event queue. Defaults to 100. + override (bool): Whether to override and LLM calls to emit as events. Attributes: session (Session, optional): A Session is a grouping of events (e.g. a run of your agent). """ @@ -44,7 +45,8 @@ def __init__(self, api_key: Optional[str] = None, tags: Optional[List[str]] = None, endpoint: Optional[str] = 'https://agentops-server-v2.fly.dev', max_wait_time: Optional[int] = 1000, - max_queue_size: Optional[int] = 100): + max_queue_size: Optional[int] = 100, + override=True): # Get API key from env if api_key is None: @@ -73,9 +75,10 @@ def __init__(self, api_key: Optional[str] = None, self._start_session(tags) - if 'openai' in sys.modules: - self.llm_tracker = LlmTracker(self) - self.llm_tracker.override_api('openai') + if override: + if 'openai' in sys.modules: + self.llm_tracker = LlmTracker(self) + self.llm_tracker.override_api('openai') def handle_exception(self, exc_type, exc_value, exc_traceback): """ diff --git a/agentops/langchain_callback_handler.py b/agentops/langchain_callback_handler.py index 220e9841c..08981edb6 100644 --- a/agentops/langchain_callback_handler.py +++ b/agentops/langchain_callback_handler.py @@ -19,7 +19,7 @@ class LangchainCallbackHandler(BaseCallbackHandler): """Callback handler for Langchain agents.""" def __init__(self, api_key: str, tags: List[str] = None): - self.ao_client = AOClient(api_key=api_key, tags=tags) + self.ao_client = AOClient(api_key=api_key, tags=tags, override=False) # keypair self.events: Dict[Any, Event] = {} From b15bd8c627bbea4e4ef640bc6c92677eab43b719 Mon Sep 17 00:00:00 2001 From: reibs Date: Wed, 20 Dec 2023 19:43:37 -0800 Subject: [PATCH 14/15] reverted events --- agentops/event.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/agentops/event.py b/agentops/event.py index 75932c849..d3d2f0e29 100644 --- a/agentops/event.py +++ b/agentops/event.py @@ -58,8 +58,8 @@ def __init__(self, event_type: str, tags: Optional[List[str]] = None, init_timestamp: Optional[float] = None, screenshot: Optional[str] = None, - prompt_tokens: Optional[int] = 0, - completion_tokens: Optional[int] = 0 + prompt_tokens: Optional[int] = None, + completion_tokens: Optional[int] = None ): self.event_type = event_type self.params = params From 457bb5babdb58f75b6e80f05320fd3a8eb516506 Mon Sep 17 00:00:00 2001 From: reibs Date: Wed, 20 Dec 2023 19:44:02 -0800 Subject: [PATCH 15/15] updated notebook --- examples/langchain_examples.ipynb | 242 +++++++++++++++++++----------- 1 file changed, 158 insertions(+), 84 deletions(-) diff --git a/examples/langchain_examples.ipynb b/examples/langchain_examples.ipynb index 077ea3317..c573c5800 100644 --- a/examples/langchain_examples.ipynb +++ b/examples/langchain_examples.ipynb @@ -2,198 +2,272 @@ "cells": [ { "cell_type": "markdown", + "id": "e0deea1ab1db2a19", + "metadata": {}, "source": [ "# AgentOps Langchain Agent Implementation\n", "\n", "Using AgentOps monitoring with Langchain is simple. We've created a LangchainCallbackHandler that will do all of the heavy lifting!\n", "\n", "First we'll import the typical Langchain packages:" - ], - "metadata": { - "collapsed": false - }, - "id": "e0deea1ab1db2a19" + ] }, { "cell_type": "code", "execution_count": 1, "id": "initial_id", "metadata": { - "collapsed": true, "ExecuteTime": { "end_time": "2023-12-15T20:21:11.477270Z", "start_time": "2023-12-15T20:21:10.289895Z" } }, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/Users/braelynboynton/Developer/agentops/venv/lib/python3.9/site-packages/urllib3/__init__.py:34: NotOpenSSLWarning: urllib3 v2 only supports OpenSSL 1.1.1+, currently the 'ssl' module is compiled with 'LibreSSL 2.8.3'. See: https://github.com/urllib3/urllib3/issues/3020\n", - " warnings.warn(\n" - ] - } - ], + "outputs": [], "source": [ "import os\n", "from langchain.chat_models import ChatOpenAI\n", "from langchain.agents import initialize_agent, AgentType\n", - "from dotenv import load_dotenv" + "from dotenv import load_dotenv\n", + "from langchain.agents import tool" ] }, { "cell_type": "markdown", + "id": "57ddb8eca4e8a3cb", + "metadata": {}, "source": [ "The only difference with using AgentOps is that we'll also import this special Callback Handler" - ], - "metadata": { - "collapsed": false - }, - "id": "57ddb8eca4e8a3cb" + ] }, { "cell_type": "code", "execution_count": 2, - "outputs": [], - "source": [ - "from agentops.langchain_callback_handler import LangchainCallbackHandler" - ], + "id": "585f00bb186711a7", "metadata": { - "collapsed": false, "ExecuteTime": { "end_time": "2023-12-15T20:21:11.478111Z", "start_time": "2023-12-15T20:21:11.471462Z" } }, - "id": "585f00bb186711a7" + "outputs": [], + "source": [ + "from agentops.langchain_callback_handler import LangchainCallbackHandler" + ] }, { "cell_type": "markdown", + "id": "523be945b85dc5d5", + "metadata": {}, "source": [ "Next, we'll grab our two API keys. You can use dotenv like below or however else you like to load environment variables" - ], - "metadata": { - "collapsed": false - }, - "id": "523be945b85dc5d5" + ] }, { "cell_type": "code", "execution_count": 3, - "outputs": [], - "source": [ - "load_dotenv()\n", - "OPENAI_API_KEY = os.getenv('OPENAI_API_KEY')\n", - "AGENTOPS_API_KEY = os.getenv('AGENTOPS_API_KEY')" - ], + "id": "1490411415d7317c", "metadata": { - "collapsed": false, "ExecuteTime": { "end_time": "2023-12-15T20:21:11.494019Z", "start_time": "2023-12-15T20:21:11.479154Z" } }, - "id": "1490411415d7317c" + "outputs": [ + { + "data": { + "text/plain": [ + "False" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "load_dotenv()" + ] }, { "cell_type": "markdown", + "id": "8371ec020e634dd0", + "metadata": {}, "source": [ - "This is where AgentOps comes into play. Before creating our LLM instance via Langchain, first we'll create an instance of the AO LangchainCallbackHandler.\n", + "This is where AgentOps comes into play. Before creating our LLM instance via Langchain, first we'll create an instance of the AO LangchainCallbackHandler. After the handler is initialiezd, a session will be recorded automatically.\n", "\n", "Pass in your API key, and optionally any tags to describe this session for easier lookup in the AO dashboard. You can also retrieve the `session_id` of the newly created session." - ], - "metadata": { - "collapsed": false - }, - "id": "8371ec020e634dd0" + ] }, { "cell_type": "code", "execution_count": 4, + "id": "432921383f39c9d5", + "metadata": { + "ExecuteTime": { + "end_time": "2023-12-15T20:21:12.346995Z", + "start_time": "2023-12-15T20:21:11.483591Z" + } + }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "Agent Ops session ID: 0feeb023-d8eb-4eae-9c77-29c2a3c88b38\n" + "Agent Ops session ID: 80c6593c-ae90-4b48-98e1-d17fcd801c70\n" ] } ], "source": [ + "AGENTOPS_API_KEY = os.environ.get('AGENTOPS_API_KEY')\n", + "OPENAI_API_KEY = os.environ.get('OPENAI_API_KEY')\n", + "\n", "handler = LangchainCallbackHandler(api_key=AGENTOPS_API_KEY, tags=['Langchain Example'])\n", + "\n", "print(\"Agent Ops session ID: \" + handler.session_id)\n", - "llm = ChatOpenAI(openai_api_key=OPENAI_API_KEY, callbacks=[handler])" - ], - "metadata": { - "collapsed": false, - "ExecuteTime": { - "end_time": "2023-12-15T20:21:12.346995Z", - "start_time": "2023-12-15T20:21:11.483591Z" - } - }, - "id": "432921383f39c9d5" + "llm = ChatOpenAI(openai_api_key=OPENAI_API_KEY,\n", + " model='gpt-3.5-turbo')" + ] }, { "cell_type": "markdown", + "id": "93aa09ec", + "metadata": {}, "source": [ - "Finally, let's use our agent! All the actions will be recorded in the AO Dashboard" - ], - "metadata": { - "collapsed": false - }, - "id": "58bbca0b49302b2b" + "Agents generally use tools. Let's define a simple tool here. Tool usage is also recorded." + ] }, { "cell_type": "code", "execution_count": 5, + "id": "6abf26f9", + "metadata": {}, "outputs": [], "source": [ - "agent = initialize_agent([],\n", - " llm,\n", - " agent=AgentType.CHAT_ZERO_SHOT_REACT_DESCRIPTION,\n", - " verbose=True,\n", - " handle_parsing_errors=True)" - ], + "@tool\n", + "def find_movie(term) -> str:\n", + " \"\"\"Find available movies\"\"\"\n", + " return 'Citizen Kane'\n", + "\n", + "\n", + "tools = [find_movie]" + ] + }, + { + "cell_type": "markdown", + "id": "58bbca0b49302b2b", + "metadata": {}, + "source": [ + "Finally, let's use our agent! Pass in the callback handler to the agent, and all the actions will be recorded in the AO Dashboard" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "d538b20aa954ee80", "metadata": { - "collapsed": false, "ExecuteTime": { "end_time": "2023-12-15T20:21:12.352862Z", "start_time": "2023-12-15T20:21:12.351126Z" } }, - "id": "d538b20aa954ee80" + "outputs": [], + "source": [ + "agent = initialize_agent(tools,\n", + " llm,\n", + " agent=AgentType.CHAT_ZERO_SHOT_REACT_DESCRIPTION,\n", + " verbose=True,\n", + " callbacks=[handler], # You must pass in a callback handler to record your agent\n", + " handle_parsing_errors=True)" + ] }, { "cell_type": "code", - "execution_count": null, - "outputs": [], - "source": [ - "agent.run(\"Please write me a short poem about secret agents\")" + "execution_count": 7, + "id": "6dfb127553751384", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "\n", + "\u001b[1m> Entering new AgentExecutor chain...\u001b[0m\n", + "\u001b[32;1m\u001b[1;3mCould not parse LLM output: I can use the `find_movie` tool to find the available movies.\u001b[0m\n", + "Observation: Invalid or incomplete response\n", + "Thought:\u001b[32;1m\u001b[1;3mCould not parse LLM output: I will use the `find_movie` tool to find the available movies.\u001b[0m\n", + "Observation: Invalid or incomplete response\n", + "Thought:\u001b[32;1m\u001b[1;3mI will use the `find_movie` tool to find the available movies.\n", + "Action:\n", + "```\n", + "{\n", + " \"action\": \"find_movie\",\n", + " \"action_input\": \"\"\n", + "}\n", + "```\u001b[0m\n", + "Observation: \u001b[36;1m\u001b[1;3mCitizen Kane\u001b[0m\n", + "Thought:\u001b[32;1m\u001b[1;3mCould not parse LLM output: The movie \"Citizen Kane\" is currently playing.\u001b[0m\n", + "Observation: Invalid or incomplete response\n", + "Thought:\u001b[32;1m\u001b[1;3mI will use the `find_movie` tool to find the available movies.\n", + "Action:\n", + "```\n", + "{\n", + " \"action\": \"find_movie\",\n", + " \"action_input\": \"\"\n", + "}\n", + "```\u001b[0m\n", + "Observation: \u001b[36;1m\u001b[1;3mCitizen Kane\u001b[0m\n", + "Thought:\u001b[32;1m\u001b[1;3mI will use the `find_movie` tool to find the available movies.\n", + "Action:\n", + "```\n", + "{\n", + " \"action\": \"find_movie\",\n", + " \"action_input\": \"\"\n", + "}\n", + "```\n", + "\u001b[0m\n", + "Observation: \u001b[36;1m\u001b[1;3mCitizen Kane\u001b[0m\n", + "Thought:\u001b[32;1m\u001b[1;3mCould not parse LLM output: I now know the final answer.\u001b[0m\n", + "Observation: Invalid or incomplete response\n", + "Thought:\u001b[32;1m\u001b[1;3mI now know the final answer.\n", + "Final Answer: The movie \"Citizen Kane\" is currently playing.\u001b[0m\n", + "\n", + "\u001b[1m> Finished chain.\u001b[0m\n" + ] + }, + { + "data": { + "text/plain": [ + "'The movie \"Citizen Kane\" is currently playing.'" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } ], - "metadata": { - "collapsed": false - }, - "id": "6dfb127553751384" + "source": [ + "agent.run(\"What movies are playing?\", callbacks=[handler])" + ] } ], "metadata": { "kernelspec": { - "display_name": "Python 3", + "display_name": "Python 3 (ipykernel)", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", - "version": 2 + "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", - "pygments_lexer": "ipython2", - "version": "2.7.6" + "pygments_lexer": "ipython3", + "version": "3.11.3" } }, "nbformat": 4,