Skip to content

Commit

Permalink
fix(ai-proxy): fix tests
Browse files Browse the repository at this point in the history
  • Loading branch information
tysoekong committed May 2, 2024
1 parent e770b4b commit d0e0fa1
Show file tree
Hide file tree
Showing 4 changed files with 8 additions and 11 deletions.
12 changes: 6 additions & 6 deletions kong/plugins/ai-proxy/handler.lua
Original file line number Diff line number Diff line change
Expand Up @@ -79,28 +79,28 @@ local function handle_streaming_frame(conf)
if (not finished) and (is_gzip) then
chunk = kong_utils.inflate_gzip(chunk)
end

local events = ai_shared.frame_to_events(chunk)

for _, event in ipairs(events) do
local formatted, _, metadata = ai_driver.from_format(event, conf.model, "stream/" .. conf.route_type)

local event_t = nil
local token_t = nil
local err

if formatted then -- only stream relevant frames back to the user
if conf.logging and conf.logging.log_payloads and (formatted ~= "[DONE]") then
-- append the "choice" to the buffer, for logging later. this actually works!
if not event_t then
event_t, err = cjson.decode(formatted)
end

if not err then
if not token_t then
token_t = get_token_text(event_t)
end

kong.ctx.plugin.ai_stream_log_buffer:put(token_t)
end
end
Expand All @@ -112,7 +112,7 @@ local function handle_streaming_frame(conf)
if not event_t then
event_t, err = cjson.decode(formatted)
end

if not err then
if not token_t then
token_t = get_token_text(event_t)
Expand Down
5 changes: 2 additions & 3 deletions spec/03-plugins/38-ai-proxy/02-openai_integration_spec.lua
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,6 @@ local _EXPECTED_CHAT_STATS = {
request_model = 'gpt-3.5-turbo',
response_model = 'gpt-3.5-turbo-0613',
},
payload = {},
usage = {
completion_token = 12,
prompt_token = 25,
Expand Down Expand Up @@ -775,8 +774,8 @@ for _, strategy in helpers.all_strategies() do if strategy ~= "cassandra" then
assert.is_number(log_message.response.size)

-- test request bodies
assert.matches('"content": "What is 1 + 1?"', log_message.ai.payload.request, nil, true)
assert.matches('"role": "user"', log_message.ai.payload.request, nil, true)
assert.matches('"content": "What is 1 + 1?"', log_message.ai['ai-proxy'].payload.request, nil, true)
assert.matches('"role": "user"', log_message.ai['ai-proxy'].payload.request, nil, true)

-- test response bodies
assert.matches('"content": "The sum of 1 + 1 is 2.",', log_message.ai["ai-proxy"].payload.response, nil, true)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,6 @@ local _EXPECTED_CHAT_STATS = {
request_model = 'gpt-4',
response_model = 'gpt-3.5-turbo-0613',
},
payload = {},
usage = {
completion_token = 12,
prompt_token = 25,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -179,7 +179,6 @@ local _EXPECTED_CHAT_STATS = {
request_model = 'gpt-4',
response_model = 'gpt-3.5-turbo-0613',
},
payload = {},
usage = {
completion_token = 12,
prompt_token = 25,
Expand Down

0 comments on commit d0e0fa1

Please sign in to comment.