Skip to content

Commit

Permalink
fix(anthropic): instrument anthropics system message as gen_ai.prompt…
Browse files Browse the repository at this point in the history
….0 (#2238)

Co-authored-by: Nir Gazit <[email protected]>
  • Loading branch information
dinmukhamedm and nirga authored Nov 4, 2024
1 parent 15e54a0 commit f06cd01
Show file tree
Hide file tree
Showing 2 changed files with 68 additions and 23 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -172,16 +172,34 @@ async def _aset_input_attributes(span, kwargs):
)

elif kwargs.get("messages") is not None:
has_system_message = False
if kwargs.get("system"):
has_system_message = True
set_span_attribute(
span,
f"{SpanAttributes.LLM_PROMPTS}.0.content",
await _dump_content(
message_index=0, span=span, content=kwargs.get("system")
),
)
set_span_attribute(
span,
f"{SpanAttributes.LLM_PROMPTS}.0.role",
"system",
)
for i, message in enumerate(kwargs.get("messages")):
prompt_index = i + (1 if has_system_message else 0)
set_span_attribute(
span,
f"{SpanAttributes.LLM_PROMPTS}.{i}.content",
f"{SpanAttributes.LLM_PROMPTS}.{prompt_index}.content",
await _dump_content(
message_index=i, span=span, content=message.get("content")
),
)
set_span_attribute(
span, f"{SpanAttributes.LLM_PROMPTS}.{i}.role", message.get("role")
span,
f"{SpanAttributes.LLM_PROMPTS}.{prompt_index}.role",
message.get("role"),
)

if kwargs.get("tools") is not None:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -575,14 +575,16 @@ def test_anthropic_prompt_caching(exporter, reader):
except Exception:
pass

system_message = "You help generate concise summaries of news articles and blog posts that user sends you."

for _ in range(2):
client.beta.prompt_caching.messages.create(
model="claude-3-5-sonnet-20240620",
max_tokens=1024,
system=[
{
"type": "text",
"text": "You help generate concise summaries of news articles and blog posts that user sends you.",
"text": system_message,
},
],
messages=[
Expand All @@ -606,10 +608,15 @@ def test_anthropic_prompt_caching(exporter, reader):
cache_creation_span = spans[0]
cache_read_span = spans[1]

assert cache_creation_span.attributes["gen_ai.prompt.0.role"] == "user"
assert text == cache_creation_span.attributes["gen_ai.prompt.0.content"]
assert cache_read_span.attributes["gen_ai.prompt.0.role"] == "user"
assert text == cache_read_span.attributes["gen_ai.prompt.0.content"]
assert cache_creation_span.attributes["gen_ai.prompt.0.role"] == "system"
assert system_message == cache_creation_span.attributes["gen_ai.prompt.0.content"]
assert cache_read_span.attributes["gen_ai.prompt.0.role"] == "system"
assert system_message == cache_read_span.attributes["gen_ai.prompt.0.content"]

assert cache_creation_span.attributes["gen_ai.prompt.1.role"] == "user"
assert text == cache_creation_span.attributes["gen_ai.prompt.1.content"]
assert cache_read_span.attributes["gen_ai.prompt.1.role"] == "user"
assert text == cache_read_span.attributes["gen_ai.prompt.1.content"]
assert (
cache_creation_span.attributes["gen_ai.usage.cache_creation_input_tokens"]
== cache_read_span.attributes["gen_ai.usage.cache_read_input_tokens"]
Expand Down Expand Up @@ -642,7 +649,6 @@ def test_anthropic_prompt_caching(exporter, reader):
@pytest.mark.vcr
@pytest.mark.asyncio
async def test_anthropic_prompt_caching_async(exporter, reader):

with open(Path(__file__).parent.joinpath("data/1024+tokens.txt"), "r") as f:
# add the unique test name to the prompt to avoid caching leaking to other tests
text = "test_anthropic_prompt_caching_async <- IGNORE THIS. ARTICLES START ON THE NEXT LINE\n" + f.read()
Expand All @@ -655,14 +661,16 @@ async def test_anthropic_prompt_caching_async(exporter, reader):
except Exception:
pass

system_message = "You help generate concise summaries of news articles and blog posts that user sends you."

for _ in range(2):
await client.beta.prompt_caching.messages.create(
model="claude-3-5-sonnet-20240620",
max_tokens=1024,
system=[
{
"type": "text",
"text": "You help generate concise summaries of news articles and blog posts that user sends you.",
"text": system_message,
},
],
messages=[
Expand All @@ -686,10 +694,15 @@ async def test_anthropic_prompt_caching_async(exporter, reader):
cache_creation_span = spans[0]
cache_read_span = spans[1]

assert cache_creation_span.attributes["gen_ai.prompt.0.role"] == "user"
assert text == cache_creation_span.attributes["gen_ai.prompt.0.content"]
assert cache_read_span.attributes["gen_ai.prompt.0.role"] == "user"
assert text == cache_read_span.attributes["gen_ai.prompt.0.content"]
assert cache_creation_span.attributes["gen_ai.prompt.0.role"] == "system"
assert system_message == cache_creation_span.attributes["gen_ai.prompt.0.content"]
assert cache_read_span.attributes["gen_ai.prompt.0.role"] == "system"
assert system_message == cache_read_span.attributes["gen_ai.prompt.0.content"]

assert cache_creation_span.attributes["gen_ai.prompt.1.role"] == "user"
assert text == cache_creation_span.attributes["gen_ai.prompt.1.content"]
assert cache_read_span.attributes["gen_ai.prompt.1.role"] == "user"
assert text == cache_read_span.attributes["gen_ai.prompt.1.content"]
assert (
cache_creation_span.attributes["gen_ai.usage.cache_creation_input_tokens"]
== cache_read_span.attributes["gen_ai.usage.cache_read_input_tokens"]
Expand Down Expand Up @@ -732,6 +745,8 @@ def test_anthropic_prompt_caching_stream(exporter, reader):
except Exception:
pass

system_message = "You help generate concise summaries of news articles and blog posts that user sends you."

for _ in range(2):
response = client.beta.prompt_caching.messages.create(
model="claude-3-5-sonnet-20240620",
Expand All @@ -740,7 +755,7 @@ def test_anthropic_prompt_caching_stream(exporter, reader):
system=[
{
"type": "text",
"text": "You help generate concise summaries of news articles and blog posts that user sends you.",
"text": system_message,
},
],
messages=[
Expand Down Expand Up @@ -768,10 +783,15 @@ def test_anthropic_prompt_caching_stream(exporter, reader):
cache_creation_span = spans[0]
cache_read_span = spans[1]

assert cache_creation_span.attributes["gen_ai.prompt.0.role"] == "user"
assert text == cache_creation_span.attributes["gen_ai.prompt.0.content"]
assert cache_read_span.attributes["gen_ai.prompt.0.role"] == "user"
assert text == cache_read_span.attributes["gen_ai.prompt.0.content"]
assert cache_creation_span.attributes["gen_ai.prompt.0.role"] == "system"
assert system_message == cache_creation_span.attributes["gen_ai.prompt.0.content"]
assert cache_read_span.attributes["gen_ai.prompt.0.role"] == "system"
assert system_message == cache_read_span.attributes["gen_ai.prompt.0.content"]

assert cache_creation_span.attributes["gen_ai.prompt.1.role"] == "user"
assert text == cache_creation_span.attributes["gen_ai.prompt.1.content"]
assert cache_read_span.attributes["gen_ai.prompt.1.role"] == "user"
assert text == cache_read_span.attributes["gen_ai.prompt.1.content"]
assert (
cache_creation_span.attributes["gen_ai.usage.cache_creation_input_tokens"]
== cache_read_span.attributes["gen_ai.usage.cache_read_input_tokens"]
Expand Down Expand Up @@ -815,6 +835,8 @@ async def test_anthropic_prompt_caching_async_stream(exporter, reader):
except Exception:
pass

system_message = "You help generate concise summaries of news articles and blog posts that user sends you."

for _ in range(2):
response = await client.beta.prompt_caching.messages.create(
model="claude-3-5-sonnet-20240620",
Expand All @@ -823,7 +845,7 @@ async def test_anthropic_prompt_caching_async_stream(exporter, reader):
system=[
{
"type": "text",
"text": "You help generate concise summaries of news articles and blog posts that user sends you.",
"text": system_message,
},
],
messages=[
Expand Down Expand Up @@ -851,10 +873,15 @@ async def test_anthropic_prompt_caching_async_stream(exporter, reader):
cache_creation_span = spans[0]
cache_read_span = spans[1]

assert cache_creation_span.attributes["gen_ai.prompt.0.role"] == "user"
assert text == cache_creation_span.attributes["gen_ai.prompt.0.content"]
assert cache_read_span.attributes["gen_ai.prompt.0.role"] == "user"
assert text == cache_read_span.attributes["gen_ai.prompt.0.content"]
assert cache_creation_span.attributes["gen_ai.prompt.0.role"] == "system"
assert system_message == cache_creation_span.attributes["gen_ai.prompt.0.content"]
assert cache_read_span.attributes["gen_ai.prompt.0.role"] == "system"
assert system_message == cache_read_span.attributes["gen_ai.prompt.0.content"]

assert cache_creation_span.attributes["gen_ai.prompt.1.role"] == "user"
assert text == cache_creation_span.attributes["gen_ai.prompt.1.content"]
assert cache_read_span.attributes["gen_ai.prompt.1.role"] == "user"
assert text == cache_read_span.attributes["gen_ai.prompt.1.content"]
assert (
cache_creation_span.attributes["gen_ai.usage.cache_creation_input_tokens"]
== cache_read_span.attributes["gen_ai.usage.cache_read_input_tokens"]
Expand Down

0 comments on commit f06cd01

Please sign in to comment.