-
Notifications
You must be signed in to change notification settings - Fork 238
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
feat: Add OpenAI integration test with sync/async patterns
- Add integration test demonstrating four OpenAI call patterns: - Sync (non-streaming) - Sync (streaming) - Async (non-streaming) - Async (streaming) - Add python-dotenv to dev and test dependencies Co-Authored-By: Alex Reibman <[email protected]>
- Loading branch information
1 parent
f844e0a
commit bdc813c
Showing
3 changed files
with
75 additions
and
1 deletion.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,72 @@ | ||
import pytest | ||
import agentops | ||
import asyncio | ||
from agentops import record_action | ||
from openai import OpenAI, AsyncOpenAI | ||
from dotenv import load_dotenv | ||
|
||
load_dotenv() | ||
|
||
@pytest.mark.integration | ||
def test_openai_integration(): | ||
"""Integration test demonstrating all four OpenAI call patterns: | ||
1. Sync (non-streaming) | ||
2. Sync (streaming) | ||
3. Async (non-streaming) | ||
4. Async (streaming) | ||
Verifies that AgentOps correctly tracks all LLM calls via analytics. | ||
""" | ||
# Initialize AgentOps without auto-starting session | ||
agentops.init(auto_start_session=False) | ||
session = agentops.start_session() | ||
|
||
@record_action("openai-integration-sync-no-stream") | ||
def sync_no_stream(): | ||
client = OpenAI() | ||
client.chat.completions.create( | ||
model="gpt-3.5-turbo", | ||
messages=[{"role": "user", "content": "Hello from sync no stream"}], | ||
) | ||
|
||
@record_action("openai-integration-sync-stream") | ||
def sync_stream(): | ||
client = OpenAI() | ||
stream_result = client.chat.completions.create( | ||
model="gpt-3.5-turbo", | ||
messages=[{"role": "user", "content": "Hello from sync streaming"}], | ||
stream=True, | ||
) | ||
for _ in stream_result: | ||
pass | ||
|
||
@record_action("openai-integration-async-no-stream") | ||
async def async_no_stream(): | ||
client = AsyncOpenAI() | ||
await client.chat.completions.create( | ||
model="gpt-3.5-turbo", | ||
messages=[{"role": "user", "content": "Hello from async no stream"}], | ||
) | ||
|
||
@record_action("openai-integration-async-stream") | ||
async def async_stream(): | ||
client = AsyncOpenAI() | ||
async_stream_result = await client.chat.completions.create( | ||
model="gpt-3.5-turbo", | ||
messages=[{"role": "user", "content": "Hello from async streaming"}], | ||
stream=True, | ||
) | ||
async for _ in async_stream_result: | ||
pass | ||
|
||
# Call each function | ||
sync_no_stream() | ||
sync_stream() | ||
asyncio.run(async_no_stream()) | ||
asyncio.run(async_stream()) | ||
|
||
session.end_session("Success") | ||
analytics = session.get_analytics() | ||
|
||
# Verify that all LLM calls were tracked | ||
assert analytics["LLM calls"] >= 4, f"Expected at least 4 LLM calls, but got {analytics['LLM calls']}" |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters