Skip to content

Commit

Permalink
litellm tests
Browse files Browse the repository at this point in the history
  • Loading branch information
bboynton97 committed Aug 14, 2024
1 parent f036043 commit 8a772fb
Show file tree
Hide file tree
Showing 2 changed files with 34 additions and 0 deletions.
12 changes: 12 additions & 0 deletions agentops/llms/litellm.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
import pprint
from typing import Optional

import litellm.utils

from ..log_config import logger
from ..event import LLMEvent, ErrorEvent
from ..session import Session
Expand Down Expand Up @@ -96,6 +98,16 @@ def generator():

return generator()

# litellm uses a CustomStreamWrapper
if isinstance(response, litellm.utils.CustomStreamWrapper):

def generator():
for chunk in response:
handle_stream_chunk(chunk)
yield chunk

return generator()

# For asynchronous AsyncStream
elif isinstance(response, AsyncStream):

Expand Down
22 changes: 22 additions & 0 deletions tests/core_manual_tests/providers/litellm_canary.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
import asyncio

import agentops
from dotenv import load_dotenv
import litellm
Expand All @@ -9,6 +11,26 @@
model="gpt-3.5-turbo", messages=[{"content": "Hello, how are you?", "role": "user"}]
)

stream_response = litellm.completion(
model="gpt-3.5-turbo",
messages=[{"content": "Hello, how are you?", "role": "user"}],
stream=True,
)
print(stream_response)
for chunk in stream_response:
print(chunk)


async def main():
async_response = await litellm.acompletion(
model="gpt-3.5-turbo",
messages=[{"content": "Hello, how are you?", "role": "user"}],
)
print(async_response)


asyncio.run(main())

agentops.end_session(end_state="Success")

###
Expand Down

0 comments on commit 8a772fb

Please sign in to comment.