Skip to content

Commit

Permalink
chore(llm): fix code
Browse files Browse the repository at this point in the history
  • Loading branch information
oowl committed Aug 8, 2024
1 parent 5910d02 commit f4b1324
Showing 1 changed file with 0 additions and 89 deletions.
89 changes: 0 additions & 89 deletions spec/03-plugins/38-ai-proxy/00-config_spec.lua
Original file line number Diff line number Diff line change
Expand Up @@ -11,12 +11,6 @@ local validate do
end
end

local WWW_MODELS = {
"openai",
"azure",
"anthropic",
"cohere",
}

local SELF_HOSTED_MODELS = {
"mistral",
Expand Down Expand Up @@ -143,89 +137,6 @@ describe(PLUGIN_NAME .. ": (schema)", function()
assert.not_nil(err["config"]["@entity"][1], "anthropic does not support statistics when route_type is llm/v1/completions")
end)

it("requires [azure_instance] field when azure provider is used", function()
local config = {
route_type = "llm/v1/chat",
auth = {
header_name = "Authorization",
header_value = "Bearer token",
},
model = {
name = "azure-chat",
provider = "azure",
options = {
max_tokens = 256,
temperature = 1.0,
},
},
}

local ok, err = validate(config)

assert.not_nil(err["config"]["@entity"])
assert.not_nil(err["config"]["@entity"][1])
assert.equal(err["config"]["@entity"][1], "must set 'model.options.azure_instance' for azure provider")
assert.is_falsy(ok)
end)

for i, v in ipairs(WWW_MODELS) do
it("requires API auth for www-hosted " .. v .. " model", function()
local config = {
route_type = "llm/v1/chat",
model = {
name = "command",
provider = v,
options = {
max_tokens = 256,
temperature = 1.0,
upstream_url = "http://nowhere",
},
},
}

if v == "llama2" then
config.model.options.llama2_format = "raw"
end

if v == "azure" then
config.model.options.azure_instance = "kong"
end

if v == "anthropic" then
config.model.options.anthropic_version = "2021-09-01"
end

local ok, err = validate(config)

assert.not_nil(err["config"]["@entity"])
assert.not_nil(err["config"]["@entity"][1])
assert.equal(err["config"]["@entity"][1], "must set one of 'auth.header_name', 'auth.param_name', "
.. "and its respective options, when provider is not self-hosted")
assert.is_falsy(ok)
end)
end

it("requires [config.auth] block to be set", function()
local config = {
route_type = "llm/v1/chat",
model = {
name = "openai",
provider = "openai",
options = {
max_tokens = 256,
temperature = 1.0,
upstream_url = "http://nowhere",
},
},
}

local ok, err = validate(config)

assert.equal(err["config"]["@entity"][1], "must set one of 'auth.header_name', 'auth.param_name', "
.. "and its respective options, when provider is not self-hosted")
assert.is_falsy(ok)
end)

it("requires both [config.auth.header_name] and [config.auth.header_value] to be set", function()
local config = {
route_type = "llm/v1/chat",
Expand Down

0 comments on commit f4b1324

Please sign in to comment.