Skip to content

Commit

Permalink
ollama tests
Browse files Browse the repository at this point in the history
  • Loading branch information
bboynton97 committed Aug 14, 2024
1 parent edc2bea commit f036043
Show file tree
Hide file tree
Showing 2 changed files with 26 additions and 3 deletions.
1 change: 1 addition & 0 deletions agentops/llms/ollama.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@ def generator():
def override(self):
self._override_chat_client()
self._override_chat()
self._override_chat_async_client()

Check warning on line 62 in agentops/llms/ollama.py

View check run for this annotation

Codecov / codecov/patch

agentops/llms/ollama.py#L60-L62

Added lines #L60 - L62 were not covered by tests

def undo_override(self):
if "ollama" in sys.modules:
Expand Down
28 changes: 25 additions & 3 deletions tests/core_manual_tests/providers/ollama_canary.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,9 @@
import asyncio

import agentops
from dotenv import load_dotenv
import ollama
from ollama import AsyncClient

load_dotenv()
agentops.init(default_tags=["ollama-provider-test"])
Expand All @@ -10,12 +13,31 @@
messages=[
{
"role": "user",
"content": "Why is the sky blue?",
"content": "say hello sync",
},
],
)
print(response)
print(response["message"]["content"])

stream_response = ollama.chat(
model="llama3.1",
messages=[
{
"role": "user",
"content": "say hello str",
},
],
stream=True,
)
for chunk in stream_response:
print(chunk)


async def main():
message = {"role": "user", "content": "say hello mr. async"}
async_response = await AsyncClient().chat(model="llama3.1", messages=[message])


asyncio.run(main())

agentops.end_session(end_state="Success")

Expand Down

0 comments on commit f036043

Please sign in to comment.