Skip to content

Commit

Permalink
fix: update examples to use provider streaming methods
Browse files Browse the repository at this point in the history
Co-Authored-By: Alex Reibman <[email protected]>
  • Loading branch information
devin-ai-integration[bot] and areibman committed Dec 20, 2024
1 parent 5037392 commit 9dd3ae4
Show file tree
Hide file tree
Showing 2 changed files with 20 additions and 22 deletions.
15 changes: 7 additions & 8 deletions examples/anthropic_examples/anthropic-example-async.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
import anthropic
from agentops import Client
from agentops.llms.providers.anthropic import AnthropicProvider
from agentops.session import EndState

# Setup environment and API keys
load_dotenv()
Expand Down Expand Up @@ -65,13 +66,11 @@ async def generate_message(provider, personality, health):
Generate a short status report (2-3 sentences) that reflects both your personality and current health status.
Keep the tone consistent with a military combat AI but influenced by your unique personality."""

messages = [{"role": "user", "content": prompt}]

try:
async with provider.create_stream_async(
max_tokens=1024,
model="claude-3-sonnet-20240229",
messages=messages,
messages=[{"role": "user", "content": prompt}],
stream=True
) as stream:
message = ""
Expand All @@ -96,8 +95,8 @@ async def main():

try:
# Initialize Anthropic client and provider
client = anthropic.Client(api_key=os.getenv("ANTHROPIC_API_KEY"))
provider = AnthropicProvider(client=client, session=session)
anthropic_client = anthropic.Client(api_key=os.getenv("ANTHROPIC_API_KEY"))
provider = AnthropicProvider(client=anthropic_client, session=session)

# Define Titan personality and health status
personality = "Ronin is a swift and aggressive melee specialist who thrives on close-quarters hit-and-run tactics. He talks like a Samurai might."
Expand All @@ -107,16 +106,16 @@ async def main():
print(f"Health Status: {health}")
print("\nCombat log incoming from encrypted area")

# Generate message and UUIDs concurrently
# Generate Titan status message
message = await generate_message(provider, personality, health)
print(f"\nTitan Status Report: {message}")

# End session with success status
ao_client.end_session(end_state="success")
session.end_session(end_state=EndState.SUCCESS)

except Exception as e:
print(f"Error in Titan Support Protocol: {e}")
ao_client.end_session(end_state="error")
session.end_session(end_state=EndState.ERROR)


if __name__ == "__main__":
Expand Down
27 changes: 13 additions & 14 deletions examples/anthropic_examples/anthropic-example-sync.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
from dotenv import load_dotenv
from agentops import Client
from agentops.llms.providers.anthropic import AnthropicProvider
from agentops.session import EndState

# Load environment variables
load_dotenv()
Expand Down Expand Up @@ -70,38 +71,36 @@ def generate_story():

try:
# Initialize Anthropic client and provider
client = anthropic.Client(api_key=os.getenv("ANTHROPIC_API_KEY"))
provider = AnthropicProvider(client=client, session=session)
anthropic_client = anthropic.Client(api_key=os.getenv("ANTHROPIC_API_KEY"))
provider = AnthropicProvider(client=anthropic_client, session=session)

# Generate a random prompt
prompt = f"A {random.choice(first)} {random.choice(second)} {random.choice(third)}."
print(f"Generated prompt: {prompt}\n")
print("Generating story...\n")

messages = [
{
"role": "user",
"content": "Create a story based on the following prompt. Make it dark and atmospheric, similar to NieR:Automata's style.",
},
{"role": "assistant", "content": prompt},
]

# Stream the story generation
# Create message with provider's streaming
with provider.create_stream(
max_tokens=2048,
model="claude-3-sonnet-20240229",
messages=messages,
messages=[
{
"role": "user",
"content": "Create a story based on the following prompt. Make it dark and atmospheric, similar to NieR:Automata's style.",
},
{"role": "assistant", "content": prompt},
],
stream=True
) as stream:
for text in stream.text_stream:
print(text, end="", flush=True)
print("\nStory generation complete!")

# End session with success status
ao_client.end_session(end_state="success")
session.end_session(end_state=EndState.SUCCESS)
except Exception as e:
print(f"Error generating story: {e}")
ao_client.end_session(end_state="error")
session.end_session(end_state=EndState.ERROR)

if __name__ == "__main__":
generate_story()
Expand Down

0 comments on commit 9dd3ae4

Please sign in to comment.