From 413a492e830a614fdbcfae567d96693e1442db61 Mon Sep 17 00:00:00 2001 From: Jun Ouyang Date: Thu, 8 Aug 2024 16:20:50 +0800 Subject: [PATCH 1/2] chore(llm): remove llm module auth required --- kong/llm/schemas/init.lua | 6 ------ 1 file changed, 6 deletions(-) diff --git a/kong/llm/schemas/init.lua b/kong/llm/schemas/init.lua index c975c49c26f0..2efcb6b4108a 100644 --- a/kong/llm/schemas/init.lua +++ b/kong/llm/schemas/init.lua @@ -237,12 +237,6 @@ return { { logging = logging_schema }, }, entity_checks = { - -- these three checks run in a chain, to ensure that all auth params for each respective "set" are specified - { conditional_at_least_one_of = { if_field = "model.provider", - if_match = { one_of = { "openai", "azure", "anthropic", "cohere" } }, - then_at_least_one_of = { "auth.header_name", "auth.param_name" }, - then_err = "must set one of %s, and its respective options, when provider is not self-hosted" }}, - { mutually_required = { "auth.header_name", "auth.header_value" }, }, { mutually_required = { "auth.param_name", "auth.param_value", "auth.param_location" }, }, From 70db77785939e84bba53507d081304cd5d637b71 Mon Sep 17 00:00:00 2001 From: Jun Ouyang Date: Thu, 8 Aug 2024 16:43:33 +0800 Subject: [PATCH 2/2] chore(llm): fix code --- .../03-plugins/38-ai-proxy/00-config_spec.lua | 64 ------------------- 1 file changed, 64 deletions(-) diff --git a/spec/03-plugins/38-ai-proxy/00-config_spec.lua b/spec/03-plugins/38-ai-proxy/00-config_spec.lua index 0a15f131b46b..516f5a2080e7 100644 --- a/spec/03-plugins/38-ai-proxy/00-config_spec.lua +++ b/spec/03-plugins/38-ai-proxy/00-config_spec.lua @@ -11,12 +11,6 @@ local validate do end end -local WWW_MODELS = { - "openai", - "azure", - "anthropic", - "cohere", -} local SELF_HOSTED_MODELS = { "mistral", @@ -168,64 +162,6 @@ describe(PLUGIN_NAME .. ": (schema)", function() assert.is_falsy(ok) end) - for i, v in ipairs(WWW_MODELS) do - it("requires API auth for www-hosted " .. v .. " model", function() - local config = { - route_type = "llm/v1/chat", - model = { - name = "command", - provider = v, - options = { - max_tokens = 256, - temperature = 1.0, - upstream_url = "http://nowhere", - }, - }, - } - - if v == "llama2" then - config.model.options.llama2_format = "raw" - end - - if v == "azure" then - config.model.options.azure_instance = "kong" - end - - if v == "anthropic" then - config.model.options.anthropic_version = "2021-09-01" - end - - local ok, err = validate(config) - - assert.not_nil(err["config"]["@entity"]) - assert.not_nil(err["config"]["@entity"][1]) - assert.equal(err["config"]["@entity"][1], "must set one of 'auth.header_name', 'auth.param_name', " - .. "and its respective options, when provider is not self-hosted") - assert.is_falsy(ok) - end) - end - - it("requires [config.auth] block to be set", function() - local config = { - route_type = "llm/v1/chat", - model = { - name = "openai", - provider = "openai", - options = { - max_tokens = 256, - temperature = 1.0, - upstream_url = "http://nowhere", - }, - }, - } - - local ok, err = validate(config) - - assert.equal(err["config"]["@entity"][1], "must set one of 'auth.header_name', 'auth.param_name', " - .. "and its respective options, when provider is not self-hosted") - assert.is_falsy(ok) - end) - it("requires both [config.auth.header_name] and [config.auth.header_value] to be set", function() local config = { route_type = "llm/v1/chat",