Skip to content

Commit

Permalink
fix(ai-proxy): llama2 analytics
Browse files Browse the repository at this point in the history
  • Loading branch information
tysoekong committed Jan 11, 2024
1 parent f247160 commit edce9e3
Showing 1 changed file with 8 additions and 2 deletions.
10 changes: 8 additions & 2 deletions kong/llm/drivers/llama2.lua
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ local transformers_from = {
["llm/v1/chat"] = function(response_string, route_type)
local response_table, err = cjson.decode(response_string)
if err then
return nil, "failed to decode cohere response"
return nil, "failed to decode llama2 response"
end

if (not response_table) or (not response_table.data) or (#response_table.data > 1) then
Expand Down Expand Up @@ -145,13 +145,16 @@ local transformers_from = {

end

-- stash analytics for later
if response_table.usage then response_object.usage = response_table.usage end

return cjson.encode(response_object)
end,

["llm/v1/completions"] = function(response_string, route_type)
local response_table, err = cjson.decode(response_string)
if err then
return nil, "failed to decode cohere response"
return nil, "failed to decode llama2 response"
end

if (not response_table) or (not response_table.data) or (#response_table.data > 1) then
Expand Down Expand Up @@ -197,6 +200,9 @@ local transformers_from = {

end

-- stash analytics for later
if response_table.usage then response_object.usage = response_table.usage end

return cjson.encode(response_object)
end,
}
Expand Down

0 comments on commit edce9e3

Please sign in to comment.