Skip to content

Commit

Permalink
fix(ai-proxy): (Bedrock)(AG-166) properly map guardrails between requ…
Browse files Browse the repository at this point in the history
…est and response
  • Loading branch information
tysoekong authored and fffonion committed Nov 25, 2024
1 parent d19edcf commit db9d6ae
Show file tree
Hide file tree
Showing 3 changed files with 47 additions and 1 deletion.
3 changes: 3 additions & 0 deletions changelog/unreleased/kong/ai-bedrock-fix-guardrails.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
message: "**ai-proxy**: Fixed a bug where Bedrock Guardrail config was ignored."
type: bugfix
scope: Plugin
9 changes: 9 additions & 0 deletions kong/llm/drivers/bedrock.lua
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ local _OPENAI_STOP_REASON_MAPPING = {
["max_tokens"] = "length",
["end_turn"] = "stop",
["tool_use"] = "tool_calls",
["guardrail_intervened"] = "guardrail_intervened",
}

_M.bedrock_unsupported_system_role_patterns = {
Expand All @@ -46,6 +47,10 @@ local function to_bedrock_generation_config(request_table)
}
end

local function to_bedrock_guardrail_config(guardrail_config)
return guardrail_config -- may be nil; this is handled
end

-- this is a placeholder and is archaic now,
-- leave it in for backwards compatibility
local function to_additional_request_fields(request_table)
Expand Down Expand Up @@ -310,6 +315,7 @@ local function to_bedrock_chat_openai(request_table, model_info, route_type)
end

new_r.inferenceConfig = to_bedrock_generation_config(request_table)
new_r.guardrailConfig = to_bedrock_guardrail_config(request_table.guardrailConfig)

-- backwards compatibility
new_r.toolConfig = request_table.bedrock
Expand Down Expand Up @@ -375,6 +381,8 @@ local function from_bedrock_chat_openai(response, model_info, route_type)
}
end

client_response.trace = response.trace -- may be nil, **do not** map to cjson.null

return cjson.encode(client_response)
end

Expand Down Expand Up @@ -601,6 +609,7 @@ end
if _G._TEST then
-- export locals for testing
_M._to_tools = to_tools
_M._to_bedrock_chat_openai = to_bedrock_chat_openai
_M._from_tool_call_response = from_tool_call_response
end

Expand Down
36 changes: 35 additions & 1 deletion spec/03-plugins/38-ai-proxy/01-unit_spec.lua
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,24 @@ local SAMPLE_LLM_V1_CHAT_WITH_SOME_OPTS = {
another_extra_param = 0.5,
}

local SAMPLE_LLM_V1_CHAT_WITH_GUARDRAILS = {
messages = {
[1] = {
role = "system",
content = "You are a mathematician."
},
[2] = {
role = "assistant",
content = "What is 1 + 1?"
},
},
guardrailConfig = {
guardrailIdentifier = "yu5xwvfp4sud",
guardrailVersion = "1",
trace = "enabled",
},
}

local SAMPLE_DOUBLE_FORMAT = {
messages = {
[1] = {
Expand Down Expand Up @@ -976,6 +994,22 @@ describe(PLUGIN_NAME .. ": (unit)", function()
arguments = "{\"areas\":[121,212,313]}"
})
end)
end)

it("transforms guardrails into bedrock generation config", function()
local model_info = {
route_type = "llm/v1/chat",
name = "some-model",
provider = "bedrock",
}
local bedrock_guardrails = bedrock_driver._to_bedrock_chat_openai(SAMPLE_LLM_V1_CHAT_WITH_GUARDRAILS, model_info, "llm/v1/chat")

assert.not_nil(bedrock_guardrails)

assert.same(bedrock_guardrails.guardrailConfig, {
['guardrailIdentifier'] = 'yu5xwvfp4sud',
['guardrailVersion'] = '1',
['trace'] = 'enabled',
})
end)
end)
end)

1 comment on commit db9d6ae

@github-actions
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Bazel Build

Docker image available kong/kong:db9d6ae7a9987e3a29efd20ad66122d3b834813e
Artifacts available https://github.com/Kong/kong/actions/runs/12007174708

Please sign in to comment.