Skip to content

Commit

Permalink
refactor(ai-prompt-decorator): migrate ai-prompt-decorator to new
Browse files Browse the repository at this point in the history
framework
  • Loading branch information
fffonion committed Nov 20, 2024
1 parent c96cb7e commit bf0d5b6
Show file tree
Hide file tree
Showing 5 changed files with 101 additions and 81 deletions.
1 change: 1 addition & 0 deletions kong-3.9.0-0.rockspec
Original file line number Diff line number Diff line change
Expand Up @@ -645,6 +645,7 @@ build = {
["kong.llm.plugin.shared-filters.serialize-analytics"] = "kong/llm/plugin/shared-filters/serialize-analytics.lua",

["kong.plugins.ai-prompt-decorator.handler"] = "kong/plugins/ai-prompt-decorator/handler.lua",
["kong.plugins.ai-prompt-decorator.filters.decorate-prompt"] = "kong/plugins/ai-prompt-decorator/filters/decorate-prompt.lua",
["kong.plugins.ai-prompt-decorator.schema"] = "kong/plugins/ai-prompt-decorator/schema.lua",

["kong.plugins.ai-prompt-template.handler"] = "kong/plugins/ai-prompt-template/handler.lua",
Expand Down
87 changes: 87 additions & 0 deletions kong/plugins/ai-prompt-decorator/filters/decorate-prompt.lua
Original file line number Diff line number Diff line change
@@ -0,0 +1,87 @@
-- This software is copyright Kong Inc. and its licensors.
-- Use of the software is subject to the agreement between your organization
-- and Kong Inc. If there is no such agreement, use is governed by and
-- subject to the terms of the Kong Master Software License Agreement found
-- at https://konghq.com/enterprisesoftwarelicense/.
-- [ END OF LICENSE 0867164ffc95e54f04670b5169c09574bdbd9bba ]

local new_tab = require("table.new")
local ai_plugin_ctx = require("kong.llm.plugin.ctx")

local _M = {
NAME = "decorate-prompt",
STAGE = "REQ_TRANSFORMATION",
}

local FILTER_OUTPUT_SCHEMA = {
decorated = "boolean",
}

local _, set_ctx = ai_plugin_ctx.get_namespaced_accesors(_M.NAME, FILTER_OUTPUT_SCHEMA)

local EMPTY = {}


local function bad_request(msg)
kong.log.debug(msg)
return kong.response.exit(400, { error = { message = msg } })
end


-- Adds the prompts to the request prepend/append.
-- @tparam table request The deserialized JSON body of the request
-- @tparam table conf The plugin configuration
-- @treturn table The decorated request (same table, content updated)
local function execute(request, conf)
local prepend = conf.prompts.prepend or EMPTY
local append = conf.prompts.append or EMPTY

local old_messages = request.messages
local new_messages = new_tab(#append + #prepend + #old_messages, 0)
request.messages = new_messages

local n = 0

for _, msg in ipairs(prepend) do
n = n + 1
new_messages[n] = { role = msg.role, content = msg.content }
end

for _, msg in ipairs(old_messages) do
n = n + 1
new_messages[n] = msg
end

for _, msg in ipairs(append) do
n = n + 1
new_messages[n] = { role = msg.role, content = msg.content }
end

return request
end

if _G._TEST then
-- only if we're testing export this function (using a different name!)
_M._execute = execute
end


function _M:run(conf)
-- if plugin ordering was altered, receive the "decorated" request
local request_body_table = ai_plugin_ctx.get_request_body_table_inuse()
if not request_body_table then
return bad_request("this LLM route only supports application/json requests")
end

if #(request_body_table.messages or EMPTY) < 1 then
return bad_request("this LLM route only supports llm/chat type requests")
end

kong.service.request.set_body(execute(request_body_table, conf), "application/json")

set_ctx("decorated", true)

return true
end

return _M
82 changes: 7 additions & 75 deletions kong/plugins/ai-prompt-decorator/handler.lua
Original file line number Diff line number Diff line change
@@ -1,79 +1,11 @@
local new_tab = require("table.new")
local llm_state = require("kong.llm.state")
local EMPTY = {}
local ai_plugin_base = require("kong.llm.plugin.base")

local NAME = "ai-prompt-decorator"
local PRIORITY = 772

local plugin = {
PRIORITY = 772,
VERSION = require("kong.meta").version
}
local AIPlugin = ai_plugin_base.define(NAME, PRIORITY)

AIPlugin:enable(AIPlugin.register_filter(require("kong.llm.plugin.shared-filters.parse-request")))
AIPlugin:enable(AIPlugin.register_filter(require("kong.plugins." .. NAME .. ".filters.decorate-prompt")))


local function bad_request(msg)
kong.log.debug(msg)
return kong.response.exit(400, { error = { message = msg } })
end



-- Adds the prompts to the request prepend/append.
-- @tparam table request The deserialized JSON body of the request
-- @tparam table conf The plugin configuration
-- @treturn table The decorated request (same table, content updated)
local function execute(request, conf)
local prepend = conf.prompts.prepend or EMPTY
local append = conf.prompts.append or EMPTY

local old_messages = request.messages
local new_messages = new_tab(#append + #prepend + #old_messages, 0)
request.messages = new_messages

local n = 0

for _, msg in ipairs(prepend) do
n = n + 1
new_messages[n] = { role = msg.role, content = msg.content }
end

for _, msg in ipairs(old_messages) do
n = n + 1
new_messages[n] = msg
end

for _, msg in ipairs(append) do
n = n + 1
new_messages[n] = { role = msg.role, content = msg.content }
end

return request
end



function plugin:access(conf)
kong.service.request.enable_buffering()
llm_state.set_prompt_decorated() -- future use

-- if plugin ordering was altered, receive the "decorated" request
local request = kong.request.get_body("application/json", nil, conf.max_request_body_size)
if type(request) ~= "table" then
return bad_request("this LLM route only supports application/json requests")
end

if #(request.messages or EMPTY) < 1 then
return bad_request("this LLM route only supports llm/chat type requests")
end

kong.service.request.set_body(execute(request, conf), "application/json")
end



if _G._TEST then
-- only if we're testing export this function (using a different name!)
plugin._execute = execute
end


return plugin
return AIPlugin:as_kong_plugin()
4 changes: 2 additions & 2 deletions spec/03-plugins/41-ai-prompt-decorator/01-unit_spec.lua
Original file line number Diff line number Diff line change
Expand Up @@ -113,8 +113,8 @@ describe(PLUGIN_NAME .. ": (unit)", function()

setup(function()
_G._TEST = true
package.loaded["kong.plugins.ai-prompt-decorator.handler"] = nil
access_handler = require("kong.plugins.ai-prompt-decorator.handler")
package.loaded["kong.plugins.ai-prompt-decorator.filters.decorate-prompt"] = nil
access_handler = require("kong.plugins.ai-prompt-decorator.filters.decorate-prompt")
end)

teardown(function()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ local helpers = require "spec.helpers"
local PLUGIN_NAME = "ai-prompt-decorator"


for _, strategy in helpers.all_strategies() do if strategy ~= "cassandra" then
for _, strategy in helpers.all_strategies() do
describe(PLUGIN_NAME .. ": (access) [#" .. strategy .. "]", function()
local client

Expand Down Expand Up @@ -88,7 +88,7 @@ for _, strategy in helpers.all_strategies() do if strategy ~= "cassandra" then

assert.response(r).has.status(400)
local json = assert.response(r).has.jsonbody()
assert.same(json, { error = { message = "this LLM route only supports llm/chat type requests" }})
assert.same({ error = { message = "this LLM route only supports llm/chat type request" }}, json)
end)


Expand All @@ -107,9 +107,9 @@ for _, strategy in helpers.all_strategies() do if strategy ~= "cassandra" then

assert.response(r).has.status(400)
local json = assert.response(r).has.jsonbody()
assert.same(json, { error = { message = "this LLM route only supports llm/chat type requests" }})
assert.same({ error = { message = "this LLM route only supports llm/chat type requests" }}, json)
end)

end)

end end
end

0 comments on commit bf0d5b6

Please sign in to comment.