From ae686a42488415c593e950cf0bf48422e0619578 Mon Sep 17 00:00:00 2001 From: Jack Tysoe Date: Fri, 5 Jul 2024 01:09:32 +0100 Subject: [PATCH] fix(ai-proxy): lint --- kong-3.8.0-0.rockspec | 2 ++ kong/llm/drivers/gemini.lua | 16 ++-------------- kong/plugins/ai-proxy/handler.lua | 4 +--- spec/03-plugins/38-ai-proxy/01-unit_spec.lua | 14 -------------- 4 files changed, 5 insertions(+), 31 deletions(-) diff --git a/kong-3.8.0-0.rockspec b/kong-3.8.0-0.rockspec index bcf8b38cb059..ce680566797a 100644 --- a/kong-3.8.0-0.rockspec +++ b/kong-3.8.0-0.rockspec @@ -608,6 +608,8 @@ build = { ["kong.llm.drivers.mistral"] = "kong/llm/drivers/mistral.lua", ["kong.llm.drivers.llama2"] = "kong/llm/drivers/llama2.lua", + ["kong.llm.drivers.gemini"] = "kong/llm/drivers/gemini.lua", + ["kong.plugins.ai-prompt-decorator.handler"] = "kong/plugins/ai-prompt-decorator/handler.lua", ["kong.plugins.ai-prompt-decorator.schema"] = "kong/plugins/ai-prompt-decorator/schema.lua", diff --git a/kong/llm/drivers/gemini.lua b/kong/llm/drivers/gemini.lua index b62f7f7d98dc..59296ee9160b 100644 --- a/kong/llm/drivers/gemini.lua +++ b/kong/llm/drivers/gemini.lua @@ -9,7 +9,6 @@ local string_gsub = string.gsub local buffer = require("string.buffer") local table_insert = table.insert local string_lower = string.lower -local string_sub = string.sub -- -- globals @@ -49,10 +48,7 @@ local function is_response_finished(content) and content.candidates[1].finishReason end -local function handle_stream_event(event_t, model_info, route_type) - local metadata - - +local function handle_stream_event(event_t, model_info, route_type) -- discard empty frames, it should either be a random new line, or comment if (not event_t.data) or (#event_t.data < 1) then return @@ -65,7 +61,7 @@ local function handle_stream_event(event_t, model_info, route_type) end local new_event - local metadata + local metadata = nil if is_response_content(event) then new_event = { @@ -219,14 +215,6 @@ local function from_gemini_chat_openai(response, model_info, route_type) return cjson.encode(messages) end -local function to_gemini_chat_gemini(request_table, model_info, route_type) - return nil, nil, "gemini to gemini not yet implemented" -end - -local function from_gemini_chat_gemini(request_table, model_info, route_type) - return nil, nil, "gemini to gemini not yet implemented" -end - local transformers_to = { ["llm/v1/chat"] = to_gemini_chat_openai, } diff --git a/kong/plugins/ai-proxy/handler.lua b/kong/plugins/ai-proxy/handler.lua index fbe32f84fcb7..8e661e89317e 100644 --- a/kong/plugins/ai-proxy/handler.lua +++ b/kong/plugins/ai-proxy/handler.lua @@ -25,7 +25,6 @@ local _M = { -- static messages -local ERROR_MSG = { error = { message = "" } } local ERROR__NOT_SET = 'data: {"error": true, "message": "empty or unsupported transformer response"}' @@ -497,8 +496,7 @@ function _M:access(conf) if identity_interface and identity_interface.error then kong.ctx.shared.skip_response_transformer = true kong.log.err("error authenticating with cloud-provider, ", identity_interface.error) - - return internal_server_error("LLM request failed before proxying") + return kong.response.exit(500, "LLM request failed before proxying") end -- now re-configure the request for this operation type diff --git a/spec/03-plugins/38-ai-proxy/01-unit_spec.lua b/spec/03-plugins/38-ai-proxy/01-unit_spec.lua index 22fb1e668e34..aeb42600d639 100644 --- a/spec/03-plugins/38-ai-proxy/01-unit_spec.lua +++ b/spec/03-plugins/38-ai-proxy/01-unit_spec.lua @@ -660,20 +660,6 @@ describe(PLUGIN_NAME .. ": (unit)", function() }, formatted) end) - - local function dump(o) - if type(o) == 'table' then - local s = '{ ' - for k,v in pairs(o) do - if type(k) ~= 'number' then k = '"'..k..'"' end - s = s .. '['..k..'] = ' .. dump(v) .. ',' - end - return s .. '} ' - else - return tostring(o) - end - end - describe("streaming transformer tests", function() it("transforms truncated-json type (beginning of stream)", function()