Skip to content

Commit

Permalink
feat(ai-proxy): add 3.7 compatibility checkers
Browse files Browse the repository at this point in the history
  • Loading branch information
tysoekong committed Apr 24, 2024
1 parent 789bebf commit 6d8605d
Show file tree
Hide file tree
Showing 3 changed files with 170 additions and 1 deletion.
55 changes: 55 additions & 0 deletions kong/clustering/compat/checkers.lua
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,61 @@ end


local compatible_checkers = {
{ 3007000000, --[[ 3.7.0.0 ]]
function(config_table, dp_version, log_suffix)
local has_update

for _, plugin in ipairs(config_table.plugins or {}) do
if plugin.name == 'ai-proxy' then
local config = plugin.config
if config.model and config.model.options then
if config.model.options.response_streaming then
config.model.options.response_streaming = nil
log_warn_message('configures ' .. plugin.name .. ' plugin with' ..
' response_streaming == nil, because it is not supported' ..
' in this release',
dp_version, log_suffix)
has_update = true
end

if config.model.options.upstream_path then
config.model.options.upstream_path = nil
log_warn_message('configures ' .. plugin.name .. ' plugin with' ..
' upstream_path == nil, because it is not supported' ..
' in this release',
dp_version, log_suffix)
has_update = true
end
end

if config.route_type == "preserve" then
config.route_type = "llm/v1/chat"
log_warn_message('configures ' .. plugin.name .. ' plugin with' ..
' route_type == "llm/v1/chat", because preserve' ..
' mode is not supported in this release',
dp_version, log_suffix)
has_update = true
end
end

if plugin.name == 'ai-request-transformer' or plugin.name == 'ai-response-transformer' then
local config = plugin.config
if config.llm.model
and config.llm.model.options
and config.llm.model.options.upstream_path then
config.llm.model.options.upstream_path = nil
log_warn_message('configures ' .. plugin.name .. ' plugin with' ..
' upstream_path == nil, because it is not supported' ..
' in this release',
dp_version, log_suffix)
has_update = true
end
end
end

return has_update
end,
},
{ 3006000000, --[[ 3.6.0.0 ]]
function(config_table, dp_version, log_suffix)
local has_update
Expand Down
2 changes: 1 addition & 1 deletion kong/llm/init.lua
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ local model_options_schema = {
fields = {
{ response_streaming = {
type = "string",
description = "Whether to 'optionally allow', 'deny', or 'always' (force) the streaming of answers via WebSocket.",
description = "Whether to 'optionally allow', 'deny', or 'always' (force) the streaming of answers via server events.",
required = true,
default = "allow",
one_of = { "allow", "deny", "always" } }},
Expand Down
114 changes: 114 additions & 0 deletions spec/02-integration/09-hybrid_mode/09-config-compat_spec.lua
Original file line number Diff line number Diff line change
Expand Up @@ -472,6 +472,120 @@ describe("CP/DP config compat transformations #" .. strategy, function()
end)
end)
end)

describe("ai plugins", function()
it("[ai-proxy] sets unsupported AI LLM properties to nil or defaults", function()
-- [[ 3.7.x ]] --
local ai_proxy = admin.plugins:insert {
name = "ai-proxy",
enabled = true,
config = {
route_type = "preserve", -- becomes 'llm/v1/chat'
auth = {
header_name = "header",
header_value = "value",
},
model = {
name = "any-model-name",
provider = "openai",
options = {
max_tokens = 512,
temperature = 0.5,
response_streaming = "allow", -- becomes nil
upstream_path = "/anywhere", -- becomes nil
},
},
},
}
-- ]]

local expected_ai_proxy_prior_37 = utils.cycle_aware_deep_copy(ai_proxy)
expected_ai_proxy_prior_37.config.model.options.response_streaming = nil
expected_ai_proxy_prior_37.config.model.options.upstream_path = nil
expected_ai_proxy_prior_37.config.route_type = "llm/v1/chat"

do_assert(utils.uuid(), "3.6.0", expected_ai_proxy_prior_37)

-- cleanup
admin.plugins:remove({ id = ai_proxy.id })
end)

it("[ai-request-transformer] sets unsupported AI LLM properties to nil or defaults", function()
-- [[ 3.7.x ]] --
local ai_request_transformer = admin.plugins:insert {
name = "ai-request-transformer",
enabled = true,
config = {
prompt = "Convert my message to XML.",
llm = {
route_type = "llm/v1/chat",
auth = {
header_name = "header",
header_value = "value",
},
model = {
name = "any-model-name",
provider = "azure",
options = {
azure_instance = "azure-1",
azure_deployment_id = "azdep-1",
azure_api_version = "2023-01-01",
max_tokens = 512,
temperature = 0.5,
upstream_path = "/anywhere", -- becomes nil
},
},
},
},
}
-- ]]

local expected_ai_request_transformer_prior_37 = utils.cycle_aware_deep_copy(ai_request_transformer)
expected_ai_request_transformer_prior_37.config.llm.model.options.upstream_path = nil

do_assert(utils.uuid(), "3.6.0", expected_ai_request_transformer_prior_37)

-- cleanup
admin.plugins:remove({ id = ai_request_transformer.id })
end)

it("[ai-response-transformer] sets unsupported AI LLM properties to nil or defaults", function()
-- [[ 3.7.x ]] --
local ai_response_transformer = admin.plugins:insert {
name = "ai-response-transformer",
enabled = true,
config = {
prompt = "Convert my message to XML.",
llm = {
route_type = "llm/v1/chat",
auth = {
header_name = "header",
header_value = "value",
},
model = {
name = "any-model-name",
provider = "cohere",
options = {
azure_api_version = "2023-01-01",
max_tokens = 512,
temperature = 0.5,
upstream_path = "/anywhere", -- becomes nil
},
},
},
},
}
-- ]]

local expected_ai_response_transformer_prior_37 = utils.cycle_aware_deep_copy(ai_response_transformer)
expected_ai_response_transformer_prior_37.config.llm.model.options.upstream_path = nil

do_assert(utils.uuid(), "3.6.0", expected_ai_response_transformer_prior_37)

-- cleanup
admin.plugins:remove({ id = ai_response_transformer.id })
end)
end)
end)
end)

Expand Down

0 comments on commit 6d8605d

Please sign in to comment.