From a231e49d23a5e3d4a094488270d3f54c0c7bd78c Mon Sep 17 00:00:00 2001 From: Wangchong Zhou Date: Tue, 4 Jun 2024 18:42:11 +0800 Subject: [PATCH] feat(plugins): enable ai plugins to read request body from buffered file --- changelog/unreleased/kong/ai-plugin-read-file.yml | 3 +++ kong/plugins/ai-prompt-decorator/handler.lua | 2 +- kong/plugins/ai-prompt-decorator/schema.lua | 4 +++- kong/plugins/ai-prompt-guard/handler.lua | 2 +- kong/plugins/ai-prompt-guard/schema.lua | 6 ++++++ kong/plugins/ai-prompt-template/handler.lua | 4 ++-- kong/plugins/ai-prompt-template/schema.lua | 6 ++++++ kong/plugins/ai-proxy/handler.lua | 2 +- kong/plugins/ai-proxy/schema.lua | 7 +++++++ kong/plugins/ai-request-transformer/handler.lua | 8 ++++---- kong/plugins/ai-request-transformer/schema.lua | 10 +++++++++- kong/plugins/ai-response-transformer/handler.lua | 4 +++- kong/plugins/ai-response-transformer/schema.lua | 7 +++++++ 13 files changed, 53 insertions(+), 12 deletions(-) create mode 100644 changelog/unreleased/kong/ai-plugin-read-file.yml diff --git a/changelog/unreleased/kong/ai-plugin-read-file.yml b/changelog/unreleased/kong/ai-plugin-read-file.yml new file mode 100644 index 000000000000..d10f38c021d9 --- /dev/null +++ b/changelog/unreleased/kong/ai-plugin-read-file.yml @@ -0,0 +1,3 @@ +message: "allow AI plugin to read request from buffered file" +type: feature +scope: "Plugin" diff --git a/kong/plugins/ai-prompt-decorator/handler.lua b/kong/plugins/ai-prompt-decorator/handler.lua index 7103ce5903b4..23a18ea73997 100644 --- a/kong/plugins/ai-prompt-decorator/handler.lua +++ b/kong/plugins/ai-prompt-decorator/handler.lua @@ -55,7 +55,7 @@ function plugin:access(conf) kong.ctx.shared.ai_prompt_decorated = true -- future use -- if plugin ordering was altered, receive the "decorated" request - local request = kong.request.get_body("application/json") + local request = kong.request.get_body("application/json", nil, conf.max_request_body_size) if type(request) ~= "table" then return bad_request("this LLM route only supports application/json requests") end diff --git a/kong/plugins/ai-prompt-decorator/schema.lua b/kong/plugins/ai-prompt-decorator/schema.lua index ad0c5a85d72f..2d8abfab59fc 100644 --- a/kong/plugins/ai-prompt-decorator/schema.lua +++ b/kong/plugins/ai-prompt-decorator/schema.lua @@ -39,7 +39,9 @@ return { { config = { type = "record", fields = { - { prompts = prompts_record } + { prompts = prompts_record }, + { max_request_body_size = { type = "integer", default = 8 * 1024, gt = 0, + description = "max allowed body size allowed to be introspected" } }, } } } diff --git a/kong/plugins/ai-prompt-guard/handler.lua b/kong/plugins/ai-prompt-guard/handler.lua index 321fefad2024..304b9f55e45e 100644 --- a/kong/plugins/ai-prompt-guard/handler.lua +++ b/kong/plugins/ai-prompt-guard/handler.lua @@ -121,7 +121,7 @@ function plugin:access(conf) kong.ctx.shared.ai_prompt_guarded = true -- future use -- if plugin ordering was altered, receive the "decorated" request - local request = kong.request.get_body("application/json") + local request = kong.request.get_body("application/json", nil, conf.max_request_body_size) if type(request) ~= "table" then return bad_request("this LLM route only supports application/json requests") end diff --git a/kong/plugins/ai-prompt-guard/schema.lua b/kong/plugins/ai-prompt-guard/schema.lua index 9c0172752bdb..0864696cd290 100644 --- a/kong/plugins/ai-prompt-guard/schema.lua +++ b/kong/plugins/ai-prompt-guard/schema.lua @@ -32,6 +32,12 @@ return { type = "boolean", required = true, default = false } }, + { max_request_body_size = { + type = "integer", + default = 8 * 1024, + gt = 0, + description = "max allowed body size allowed to be introspected",} + }, } } } diff --git a/kong/plugins/ai-prompt-template/handler.lua b/kong/plugins/ai-prompt-template/handler.lua index 63224223a434..2be9137c9fe2 100644 --- a/kong/plugins/ai-prompt-template/handler.lua +++ b/kong/plugins/ai-prompt-template/handler.lua @@ -64,10 +64,10 @@ function AIPromptTemplateHandler:access(conf) kong.ctx.shared.ai_prompt_templated = true if conf.log_original_request then - kong.log.set_serialize_value(LOG_ENTRY_KEYS.REQUEST_BODY, kong.request.get_raw_body()) + kong.log.set_serialize_value(LOG_ENTRY_KEYS.REQUEST_BODY, kong.request.get_raw_body(conf.max_request_body_size)) end - local request = kong.request.get_body("application/json") + local request = kong.request.get_body("application/json", nil, conf.max_request_body_size) if type(request) ~= "table" then return bad_request("this LLM route only supports application/json requests") end diff --git a/kong/plugins/ai-prompt-template/schema.lua b/kong/plugins/ai-prompt-template/schema.lua index cce3f8be495d..60b68d80defe 100644 --- a/kong/plugins/ai-prompt-template/schema.lua +++ b/kong/plugins/ai-prompt-template/schema.lua @@ -45,6 +45,12 @@ return { required = true, default = false, }}, + { max_request_body_size = { + type = "integer", + default = 8 * 1024, + gt = 0, + description = "max allowed body size allowed to be introspected", + }}, } }} }, diff --git a/kong/plugins/ai-proxy/handler.lua b/kong/plugins/ai-proxy/handler.lua index e403ebb73c18..a4925f181b6d 100644 --- a/kong/plugins/ai-proxy/handler.lua +++ b/kong/plugins/ai-proxy/handler.lua @@ -323,7 +323,7 @@ function _M:access(conf) -- first, calculate the coordinates of the request local content_type = kong.request.get_header("Content-Type") or "application/json" - request_table = kong.request.get_body(content_type) + request_table = kong.request.get_body(content_type, nil, conf.max_request_body_size) if not request_table then if not string.find(content_type, "multipart/form-data", nil, true) then diff --git a/kong/plugins/ai-proxy/schema.lua b/kong/plugins/ai-proxy/schema.lua index 061925863086..7d9359322fd9 100644 --- a/kong/plugins/ai-proxy/schema.lua +++ b/kong/plugins/ai-proxy/schema.lua @@ -13,6 +13,13 @@ local ai_proxy_only_config = { default = "allow", one_of = { "allow", "deny", "always" }}, }, + { + max_request_body_size = { + type = "integer", + default = 8 * 1024, + gt = 0, + description = "max allowed body size allowed to be introspected",} + }, } for i, v in pairs(ai_proxy_only_config) do diff --git a/kong/plugins/ai-request-transformer/handler.lua b/kong/plugins/ai-request-transformer/handler.lua index 9517be366325..f1037f2b4dc5 100644 --- a/kong/plugins/ai-request-transformer/handler.lua +++ b/kong/plugins/ai-request-transformer/handler.lua @@ -31,7 +31,7 @@ local function create_http_opts(conf) http_opts.proxy_opts = http_opts.proxy_opts or {} http_opts.proxy_opts.https_proxy = fmt("http://%s:%d", conf.https_proxy_host, conf.https_proxy_port) end - + http_opts.http_timeout = conf.http_timeout http_opts.https_verify = conf.https_verify @@ -47,7 +47,7 @@ function _M:access(conf) conf.llm.__plugin_id = conf.__plugin_id conf.llm.__key__ = conf.__key__ local ai_driver, err = llm:new(conf.llm, http_opts) - + if not ai_driver then return internal_server_error(err) end @@ -55,7 +55,7 @@ function _M:access(conf) -- if asked, introspect the request before proxying kong.log.debug("introspecting request with LLM") local new_request_body, err = llm:ai_introspect_body( - kong.request.get_raw_body(), + kong.request.get_raw_body(conf.max_request_body_size), conf.prompt, http_opts, conf.transformation_extract_pattern @@ -64,7 +64,7 @@ function _M:access(conf) if err then return bad_request(err) end - + -- set the body for later plugins kong.service.request.set_raw_body(new_request_body) diff --git a/kong/plugins/ai-request-transformer/schema.lua b/kong/plugins/ai-request-transformer/schema.lua index c7ce498ba68e..9ebd3b4b8d65 100644 --- a/kong/plugins/ai-request-transformer/schema.lua +++ b/kong/plugins/ai-request-transformer/schema.lua @@ -37,6 +37,14 @@ return { default = true, }}, + { + max_request_body_size = { + type = "integer", + default = 8 * 1024, + gt = 0, + description = "max allowed body size allowed to be introspected",} + }, + -- from forward-proxy { http_proxy_host = typedefs.host }, { http_proxy_port = typedefs.port }, @@ -46,7 +54,7 @@ return { { llm = llm.config_schema }, }, }}, - + }, entity_checks = { { diff --git a/kong/plugins/ai-response-transformer/handler.lua b/kong/plugins/ai-response-transformer/handler.lua index 7014d8938526..af270f36875b 100644 --- a/kong/plugins/ai-response-transformer/handler.lua +++ b/kong/plugins/ai-response-transformer/handler.lua @@ -107,7 +107,9 @@ function _M:access(conf) kong.log.debug("intercepting plugin flow with one-shot request") local httpc = http.new() - local res, err = subrequest(httpc, kong.request.get_raw_body(), http_opts) + local res, err = subrequest(httpc, + kong.request.get_raw_body(conf.max_request_body_size), + http_opts) if err then return internal_server_error(err) end diff --git a/kong/plugins/ai-response-transformer/schema.lua b/kong/plugins/ai-response-transformer/schema.lua index c4eb6fe25ac1..5799d01ef366 100644 --- a/kong/plugins/ai-response-transformer/schema.lua +++ b/kong/plugins/ai-response-transformer/schema.lua @@ -46,6 +46,13 @@ return { default = true, }}, + { max_request_body_size = { + type = "integer", + default = 8 * 1024, + gt = 0, + description = "max allowed body size allowed to be introspected",} + }, + -- from forward-proxy { http_proxy_host = typedefs.host }, { http_proxy_port = typedefs.port },