Skip to content

Commit

Permalink
refactor(ai-prompt-template): migrate ai-prompt-template to new
Browse files Browse the repository at this point in the history
framework
  • Loading branch information
fffonion committed Nov 20, 2024
1 parent 97234e0 commit 4eaf256
Show file tree
Hide file tree
Showing 4 changed files with 135 additions and 113 deletions.
9 changes: 5 additions & 4 deletions kong-3.9.0-0.rockspec
Original file line number Diff line number Diff line change
Expand Up @@ -644,14 +644,15 @@ build = {
["kong.llm.plugin.shared-filters.parse-sse-chunk"] = "kong/llm/plugin/shared-filters/parse-sse-chunk.lua",
["kong.llm.plugin.shared-filters.serialize-analytics"] = "kong/llm/plugin/shared-filters/serialize-analytics.lua",

["kong.plugins.ai-prompt-decorator.handler"] = "kong/plugins/ai-prompt-decorator/handler.lua",
["kong.plugins.ai-prompt-decorator.filters.decorate-prompt"] = "kong/plugins/ai-prompt-decorator/filters/decorate-prompt.lua",
["kong.plugins.ai-prompt-decorator.schema"] = "kong/plugins/ai-prompt-decorator/schema.lua",

["kong.plugins.ai-prompt-template.handler"] = "kong/plugins/ai-prompt-template/handler.lua",
["kong.plugins.ai-prompt-template.filters.render-prompt-template"] = "kong/plugins/ai-prompt-template/filters/render-prompt-template.lua",
["kong.plugins.ai-prompt-template.schema"] = "kong/plugins/ai-prompt-template/schema.lua",
["kong.plugins.ai-prompt-template.templater"] = "kong/plugins/ai-prompt-template/templater.lua",

["kong.plugins.ai-prompt-decorator.handler"] = "kong/plugins/ai-prompt-decorator/handler.lua",
["kong.plugins.ai-prompt-decorator.filters.decorate-prompt"] = "kong/plugins/ai-prompt-decorator/filters/decorate-prompt.lua",
["kong.plugins.ai-prompt-decorator.schema"] = "kong/plugins/ai-prompt-decorator/schema.lua",

["kong.plugins.ai-prompt-guard.filters.guard-prompt"] = "kong/plugins/ai-prompt-guard/filters/guard-prompt.lua",
["kong.plugins.ai-prompt-guard.handler"] = "kong/plugins/ai-prompt-guard/handler.lua",
["kong.plugins.ai-prompt-guard.schema"] = "kong/plugins/ai-prompt-guard/schema.lua",
Expand Down
122 changes: 122 additions & 0 deletions kong/plugins/ai-prompt-template/filters/render-prompt-template.lua
Original file line number Diff line number Diff line change
@@ -0,0 +1,122 @@
-- This software is copyright Kong Inc. and its licensors.
-- Use of the software is subject to the agreement between your organization
-- and Kong Inc. If there is no such agreement, use is governed by and
-- subject to the terms of the Kong Master Software License Agreement found
-- at https://konghq.com/enterprisesoftwarelicense/.
-- [ END OF LICENSE 0867164ffc95e54f04670b5169c09574bdbd9bba ]

local ai_plugin_ctx = require("kong.llm.plugin.ctx")
local templater = require("kong.plugins.ai-prompt-template.templater")

local ipairs = ipairs
local type = type

local _M = {
NAME = "render-prompt-template",
STAGE = "REQ_TRANSFORMATION",
}

local FILTER_OUTPUT_SCHEMA = {
transformed = "boolean",
}

local _, set_ctx = ai_plugin_ctx.get_namespaced_accesors(_M.NAME, FILTER_OUTPUT_SCHEMA)


local LOG_ENTRY_KEYS = {
REQUEST_BODY = "ai.payload.original_request",
}


local function bad_request(msg)
kong.log.debug(msg)
return kong.response.exit(400, { error = { message = msg } })
end



-- Checks if the passed in reference looks like a reference, and returns the template name.
-- Valid references start with '{template://' and end with '}'.
-- @tparam string reference reference to check
-- @treturn string the reference template name or nil if it's not a reference
local function extract_template_name(reference)
if type(reference) ~= "string" then
return nil
end

if not (reference:sub(1, 12) == "{template://" and reference:sub(-1) == "}") then
return nil
end

return reference:sub(13, -2)
end



--- Find a template by name in the list of templates.
-- @tparam string reference_name the name of the template to find
-- @tparam table templates the list of templates to search
-- @treturn string the template if found, or nil + error message if not found
local function find_template(reference_name, templates)
for _, v in ipairs(templates) do
if v.name == reference_name then
return v, nil
end
end

return nil, "could not find template name [" .. reference_name .. "]"
end



function _M:run(conf)
if conf.log_original_request then
kong.log.set_serialize_value(LOG_ENTRY_KEYS.REQUEST_BODY, kong.request.get_raw_body(conf.max_request_body_size))
end

-- if plugin ordering was altered, receive the "decorated" request
local request_body_table = kong.request.get_body("application/json", nil, conf.max_request_body_size)
if type(request_body_table) ~= "table" then
return bad_request("this LLM route only supports application/json requests")
end

local messages = request_body_table.messages
local prompt = request_body_table.prompt

if messages and prompt then
return bad_request("cannot run 'messages' and 'prompt' templates at the same time")
end

local reference = messages or prompt
if not reference then
return bad_request("only 'llm/v1/chat' and 'llm/v1/completions' formats are supported for templating")
end

local template_name = extract_template_name(reference)
if not template_name then
if conf.allow_untemplated_requests then
return true -- not a reference, do nothing
end

return bad_request("this LLM route only supports templated requests")
end

local requested_template, err = find_template(template_name, conf.templates)
if not requested_template then
return bad_request(err)
end

-- try to render the replacement request
local rendered_template, err = templater.render(requested_template, request_body_table.properties or {})
if err then
return bad_request(err)
end

kong.service.request.set_raw_body(rendered_template)

set_ctx("transformed", true)
return true
end


return _M
113 changes: 6 additions & 107 deletions kong/plugins/ai-prompt-template/handler.lua
Original file line number Diff line number Diff line change
@@ -1,112 +1,11 @@

local templater = require("kong.plugins.ai-prompt-template.templater")
local llm_state = require("kong.llm.state")
local ipairs = ipairs
local type = type
local ai_plugin_base = require("kong.llm.plugin.base")

local NAME = "ai-prompt-template"
local PRIORITY = 773

local AIPlugin = ai_plugin_base.define(NAME, PRIORITY)

local AIPromptTemplateHandler = {
PRIORITY = 773,
VERSION = require("kong.meta").version,
}
AIPlugin:enable(AIPlugin.register_filter(require("kong.plugins." .. NAME .. ".filters.render-prompt-template")))



local LOG_ENTRY_KEYS = {
REQUEST_BODY = "ai.payload.original_request",
}



local function bad_request(msg)
kong.log.debug(msg)
return kong.response.exit(400, { error = { message = msg } })
end



-- Checks if the passed in reference looks like a reference, and returns the template name.
-- Valid references start with '{template://' and end with '}'.
-- @tparam string reference reference to check
-- @treturn string the reference template name or nil if it's not a reference
local function extract_template_name(reference)
if type(reference) ~= "string" then
return nil
end

if not (reference:sub(1, 12) == "{template://" and reference:sub(-1) == "}") then
return nil
end

return reference:sub(13, -2)
end



--- Find a template by name in the list of templates.
-- @tparam string reference_name the name of the template to find
-- @tparam table templates the list of templates to search
-- @treturn string the template if found, or nil + error message if not found
local function find_template(reference_name, templates)
for _, v in ipairs(templates) do
if v.name == reference_name then
return v, nil
end
end

return nil, "could not find template name [" .. reference_name .. "]"
end



function AIPromptTemplateHandler:access(conf)
kong.service.request.enable_buffering()
llm_state.set_prompt_templated()

if conf.log_original_request then
kong.log.set_serialize_value(LOG_ENTRY_KEYS.REQUEST_BODY, kong.request.get_raw_body(conf.max_request_body_size))
end

local request = kong.request.get_body("application/json", nil, conf.max_request_body_size)
if type(request) ~= "table" then
return bad_request("this LLM route only supports application/json requests")
end

local messages = request.messages
local prompt = request.prompt

if messages and prompt then
return bad_request("cannot run 'messages' and 'prompt' templates at the same time")
end

local reference = messages or prompt
if not reference then
return bad_request("only 'llm/v1/chat' and 'llm/v1/completions' formats are supported for templating")
end

local template_name = extract_template_name(reference)
if not template_name then
if conf.allow_untemplated_requests then
return -- not a reference, do nothing
end

return bad_request("this LLM route only supports templated requests")
end

local requested_template, err = find_template(template_name, conf.templates)
if not requested_template then
return bad_request(err)
end

-- try to render the replacement request
local rendered_template, err = templater.render(requested_template, request.properties or {})
if err then
return bad_request(err)
end

kong.service.request.set_raw_body(rendered_template)
end


return AIPromptTemplateHandler
return AIPlugin:as_kong_plugin()
4 changes: 2 additions & 2 deletions spec/03-plugins/43-ai-prompt-template/02-integration_spec.lua
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ local PLUGIN_NAME = "ai-prompt-template"



for _, strategy in helpers.all_strategies() do if strategy ~= "cassandra" then
for _, strategy in helpers.all_strategies() do
describe(PLUGIN_NAME .. ": (access) [#" .. strategy .. "]", function()
local client

Expand Down Expand Up @@ -426,4 +426,4 @@ for _, strategy in helpers.all_strategies() do if strategy ~= "cassandra" then

end)

end end
end

0 comments on commit 4eaf256

Please sign in to comment.