Skip to content

Commit

Permalink
feat(ai-proxy): add AWS Bedrock Converse-API Driver (#13354)
Browse files Browse the repository at this point in the history
Supersedes #13054 which was completely broken.

Adds AWS Bedrock "Converse API" support to Kong AI Gateway.

AG-14
  • Loading branch information
tysoekong authored Jul 30, 2024
1 parent 61e2c76 commit 264341d
Show file tree
Hide file tree
Showing 23 changed files with 1,159 additions and 140 deletions.
5 changes: 5 additions & 0 deletions changelog/unreleased/kong/ai-proxy-aws-bedrock.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
message: |
Kong AI Gateway (AI Proxy and associated plugin family) now supports
all AWS Bedrock "Converse API" models.
type: feature
scope: Plugin
3 changes: 2 additions & 1 deletion kong-3.8.0-0.rockspec
Original file line number Diff line number Diff line change
Expand Up @@ -203,6 +203,7 @@ build = {
["kong.tools.cjson"] = "kong/tools/cjson.lua",
["kong.tools.emmy_debugger"] = "kong/tools/emmy_debugger.lua",
["kong.tools.redis.schema"] = "kong/tools/redis/schema.lua",
["kong.tools.aws_stream"] = "kong/tools/aws_stream.lua",

["kong.runloop.handler"] = "kong/runloop/handler.lua",
["kong.runloop.events"] = "kong/runloop/events.lua",
Expand Down Expand Up @@ -612,8 +613,8 @@ build = {
["kong.llm.drivers.anthropic"] = "kong/llm/drivers/anthropic.lua",
["kong.llm.drivers.mistral"] = "kong/llm/drivers/mistral.lua",
["kong.llm.drivers.llama2"] = "kong/llm/drivers/llama2.lua",

["kong.llm.drivers.gemini"] = "kong/llm/drivers/gemini.lua",
["kong.llm.drivers.bedrock"] = "kong/llm/drivers/bedrock.lua",

["kong.plugins.ai-prompt-decorator.handler"] = "kong/plugins/ai-prompt-decorator/handler.lua",
["kong.plugins.ai-prompt-decorator.schema"] = "kong/plugins/ai-prompt-decorator/schema.lua",
Expand Down
56 changes: 39 additions & 17 deletions kong/clustering/compat/checkers.lua
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ local ipairs = ipairs
local type = type


local log_warn_message
local log_warn_message, _AI_PROVIDER_INCOMPATIBLE
do
local ngx_log = ngx.log
local ngx_WARN = ngx.WARN
Expand All @@ -19,8 +19,24 @@ do
KONG_VERSION, hint, dp_version, action)
ngx_log(ngx_WARN, _log_prefix, msg, log_suffix)
end
end

local _AI_PROVIDERS_ADDED = {
[3008000000] = {
"gemini",
"bedrock",
},
}

_AI_PROVIDER_INCOMPATIBLE = function(provider, ver)
for _, v in ipairs(_AI_PROVIDERS_ADDED[ver]) do
if v == provider then
return true
end
end

return false
end
end

local compatible_checkers = {
{ 3008000000, --[[ 3.8.0.0 ]]
Expand All @@ -40,37 +56,43 @@ local compatible_checkers = {

if plugin.name == 'ai-proxy' then
local config = plugin.config
if config.model.provider == "gemini" then
if _AI_PROVIDER_INCOMPATIBLE(config.model.provider, 3008000000) then
log_warn_message('configures ' .. plugin.name .. ' plugin with' ..
' "openai preserve mode", because ' .. config.model.provider .. ' provider ' ..
' is not supported in this release',
dp_version, log_suffix)

config.model.provider = "openai"
config.route_type = "preserve"
log_warn_message('configures ' .. plugin.name .. ' plugin with' ..
' "openai preserve mode", because gemini' ..
' provider is not supported in this release',
dp_version, log_suffix)

has_update = true
end
end

if plugin.name == 'ai-request-transformer' then
local config = plugin.config
if config.llm.model.provider == "gemini" then
config.llm.model.provider = "openai"
if _AI_PROVIDER_INCOMPATIBLE(config.llm.model.provider, 3008000000) then
log_warn_message('configures ' .. plugin.name .. ' plugin with' ..
' "openai preserve mode", because gemini' ..
' provider is not supported in this release',
dp_version, log_suffix)
' "openai preserve mode", because ' .. config.llm.model.provider .. ' provider ' ..
' is not supported in this release',
dp_version, log_suffix)

config.llm.model.provider = "openai"

has_update = true
end
end

if plugin.name == 'ai-response-transformer' then
local config = plugin.config
if config.llm.model.provider == "gemini" then
config.llm.model.provider = "openai"
if _AI_PROVIDER_INCOMPATIBLE(config.llm.model.provider, 3008000000) then
log_warn_message('configures ' .. plugin.name .. ' plugin with' ..
' "openai preserve mode", because gemini' ..
' provider is not supported in this release',
dp_version, log_suffix)
' "openai preserve mode", because ' .. config.llm.model.provider .. ' provider ' ..
' is not supported in this release',
dp_version, log_suffix)

config.llm.model.provider = "openai"

has_update = true
end
end
Expand Down
9 changes: 9 additions & 0 deletions kong/clustering/compat/removed_fields.lua
Original file line number Diff line number Diff line change
Expand Up @@ -172,6 +172,9 @@ return {
"model.options.gemini",
"auth.gcp_use_service_account",
"auth.gcp_service_account_json",
"model.options.bedrock",
"auth.aws_access_key_id",
"auth.aws_secret_access_key",
},
ai_prompt_decorator = {
"max_request_body_size",
Expand All @@ -188,12 +191,18 @@ return {
"llm.model.options.gemini",
"llm.auth.gcp_use_service_account",
"llm.auth.gcp_service_account_json",
"llm.model.options.bedrock",
"llm.auth.aws_access_key_id",
"llm.auth.aws_secret_access_key",
},
ai_response_transformer = {
"max_request_body_size",
"llm.model.options.gemini",
"llm.auth.gcp_use_service_account",
"llm.auth.gcp_service_account_json",
"llm.model.options.bedrock",
"llm.auth.aws_access_key_id",
"llm.auth.aws_secret_access_key",
},
prometheus = {
"ai_metrics",
Expand Down
2 changes: 1 addition & 1 deletion kong/llm/drivers/anthropic.lua
Original file line number Diff line number Diff line change
Expand Up @@ -225,7 +225,7 @@ local function handle_stream_event(event_t, model_info, route_type)
return delta_to_event(event_data, model_info)

elseif event_id == "message_stop" then
return "[DONE]", nil, nil
return ai_shared._CONST.SSE_TERMINATOR, nil, nil

elseif event_id == "ping" then
return nil, nil, nil
Expand Down
Loading

1 comment on commit 264341d

@github-actions
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Bazel Build

Docker image available kong/kong:264341db658ab9d0a17000ffb65bea7960348556
Artifacts available https://github.com/Kong/kong/actions/runs/10158300667

Please sign in to comment.