Skip to content

Commit

Permalink
fix(ai-proxy): analytics missing when model is cjson.null
Browse files Browse the repository at this point in the history
  • Loading branch information
tysoekong committed Jun 10, 2024
1 parent 4e761a9 commit a4110b9
Show file tree
Hide file tree
Showing 3 changed files with 39 additions and 2 deletions.
5 changes: 5 additions & 0 deletions changelog/unreleased/kong/ai-proxy-azure-streaming copy.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
message: |
**AI-proxy-plugin**: Fix a bug where setting OpenAI SDK model parameter "null" caused analytics
to not be written to the logging plugin(s).
scope: Plugin
type: bugfix
5 changes: 3 additions & 2 deletions kong/plugins/ai-proxy/handler.lua
Original file line number Diff line number Diff line change
Expand Up @@ -334,7 +334,9 @@ function _M:access(conf)

-- copy from the user request if present
if (not multipart) and (not conf_m.model.name) and (request_table.model) then
conf_m.model.name = request_table.model
if request_table.model ~= cjson.null then
conf_m.model.name = request_table.model
end
elseif multipart then
conf_m.model.name = "NOT_SPECIFIED"
end
Expand All @@ -351,7 +353,6 @@ function _M:access(conf)
return bad_request("model parameter not found in request, nor in gateway configuration")
end

-- stash for analytics later
kong_ctx_plugin.llm_model_requested = conf_m.model.name

-- check the incoming format is the same as the configured LLM format
Expand Down
31 changes: 31 additions & 0 deletions spec/03-plugins/38-ai-proxy/02-openai_integration_spec.lua
Original file line number Diff line number Diff line change
Expand Up @@ -841,6 +841,37 @@ for _, strategy in helpers.all_strategies() do if strategy ~= "cassandra" then
}, json.choices[1].message)
end)

it("good request, parses model of cjson.null", function()
local body = pl_file.read("spec/fixtures/ai-proxy/openai/llm-v1-chat/requests/good.json")
body = cjson.decode(body)
body.model = cjson.null
body = cjson.encode(body)

local r = client:get("/openai/llm/v1/chat/good", {
headers = {
["content-type"] = "application/json",
["accept"] = "application/json",
},
body = body,
})

-- validate that the request succeeded, response status 200
local body = assert.res_status(200 , r)
local json = cjson.decode(body)

-- check this is in the 'kong' response format
assert.equals(json.id, "chatcmpl-8T6YwgvjQVVnGbJ2w8hpOA17SeNy2")
assert.equals(json.model, "gpt-3.5-turbo-0613")
assert.equals(json.object, "chat.completion")

assert.is_table(json.choices)
assert.is_table(json.choices[1].message)
assert.same({
content = "The sum of 1 + 1 is 2.",
role = "assistant",
}, json.choices[1].message)
end)

it("tries to override configured model", function()
local r = client:get("/openai/llm/v1/chat/good", {
headers = {
Expand Down

0 comments on commit a4110b9

Please sign in to comment.