Skip to content

Commit

Permalink
Merge branch 'master' into expose_cert_expiry_to_api
Browse files Browse the repository at this point in the history
Signed-off-by: tzssangglass <[email protected]>
  • Loading branch information
tzssangglass committed Nov 7, 2023
2 parents b236139 + 5d2c511 commit 3c73bf3
Show file tree
Hide file tree
Showing 21 changed files with 161 additions and 313 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/changelog-requirement.yml
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ jobs:

- name: Find changelog files
id: changelog-list
uses: tj-actions/changed-files@af292f1e845a0377b596972698a8598734eb2796 # v37
uses: tj-actions/changed-files@18c8a4ecebe93d32ed8a88e1d0c098f5f68c221b # v37
with:
files_yaml: |
changelogs:
Expand Down
3 changes: 3 additions & 0 deletions changelog/unreleased/kong/cookie-name-validator.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
message: Now cookie names are validated against RFC 6265, which allows more characters than the previous validation.
type: bugfix
scope: Core
3 changes: 0 additions & 3 deletions changelog/unreleased/reconfiguration-completion-detection.yml

This file was deleted.

7 changes: 5 additions & 2 deletions kong.conf.default
Original file line number Diff line number Diff line change
Expand Up @@ -1176,9 +1176,12 @@
# roughly 2 seconds.

#nginx_http_lua_regex_cache_max_entries = 8192 # Specifies the maximum number of entries allowed
# in the worker process level compiled regex cache.
# in the worker process level PCRE JIT compiled regex cache.
# It is recommended to set it to at least (number of regex paths * 2)
# to avoid high CPU usages.
# to avoid high CPU usages if you manually specified `router_flavor` to
# `traditional`. `expressions` and `traditional_compat` router does
# not make use of the PCRE library and their behavior
# is unaffected by this setting.

#nginx_http_keepalive_requests = 1000 # Sets the maximum number of client requests that can be served through one
# keep-alive connection. After the maximum number of requests are made,
Expand Down
11 changes: 2 additions & 9 deletions kong/clustering/config_helper.lua
Original file line number Diff line number Diff line change
Expand Up @@ -202,12 +202,7 @@ local function fill_empty_hashes(hashes)
end
end

function _M.update(declarative_config, msg)

local config_table = msg.config_table
local config_hash = msg.config_hash
local hashes = msg.hashes

function _M.update(declarative_config, config_table, config_hash, hashes)
assert(type(config_table) == "table")

if not config_hash then
Expand Down Expand Up @@ -241,13 +236,11 @@ function _M.update(declarative_config, msg)
-- executed by worker 0

local res
res, err = declarative.load_into_cache_with_events(entities, meta, new_hash, hashes, msg.current_transaction_id)
res, err = declarative.load_into_cache_with_events(entities, meta, new_hash, hashes)
if not res then
return nil, err
end

ngx_log(ngx.NOTICE, "loaded configuration with transaction ID " .. msg.current_transaction_id)

return true
end

Expand Down
5 changes: 0 additions & 5 deletions kong/clustering/control_plane.lua
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@ local compat = require("kong.clustering.compat")
local constants = require("kong.constants")
local events = require("kong.clustering.events")
local calculate_config_hash = require("kong.clustering.config_helper").calculate_config_hash
local global = require("kong.global")
local extract_dp_cert = require("kong.clustering.tls").extract_dp_cert


Expand Down Expand Up @@ -117,10 +116,8 @@ function _M:export_deflated_reconfigure_payload()

local config_hash, hashes = calculate_config_hash(config_table)

local current_transaction_id = global.get_current_transaction_id()
local payload = {
type = "reconfigure",
current_transaction_id = current_transaction_id,
timestamp = ngx_now(),
config_table = config_table,
config_hash = config_hash,
Expand All @@ -147,8 +144,6 @@ function _M:export_deflated_reconfigure_payload()
self.current_config_hash = config_hash
self.deflated_reconfigure_payload = payload

ngx_log(ngx_NOTICE, "exported configuration with transaction id " .. current_transaction_id)

return payload, nil, config_hash
end

Expand Down
5 changes: 4 additions & 1 deletion kong/clustering/data_plane.lua
Original file line number Diff line number Diff line change
Expand Up @@ -213,7 +213,10 @@ function _M:communicate(premature)
msg.timestamp and " with timestamp: " .. msg.timestamp or "",
log_suffix)

local pok, res, err = pcall(config_helper.update, self.declarative_config, msg)
local config_table = assert(msg.config_table)

local pok, res, err = pcall(config_helper.update, self.declarative_config,
config_table, msg.config_hash, msg.hashes)
if pok then
ping_immediately = true
end
Expand Down
7 changes: 1 addition & 6 deletions kong/db/declarative/import.lua
Original file line number Diff line number Diff line change
Expand Up @@ -507,7 +507,7 @@ do
local DECLARATIVE_LOCK_KEY = "declarative:lock"

-- make sure no matter which path it exits, we released the lock.
load_into_cache_with_events = function(entities, meta, hash, hashes, transaction_id)
load_into_cache_with_events = function(entities, meta, hash, hashes)
local kong_shm = ngx.shared.kong

local ok, err = kong_shm:add(DECLARATIVE_LOCK_KEY, 0, DECLARATIVE_LOCK_TTL)
Expand All @@ -522,11 +522,6 @@ do
end

ok, err = load_into_cache_with_events_no_lock(entities, meta, hash, hashes)

if ok and transaction_id then
ok, err = kong_shm:set("declarative:current-transaction-id", transaction_id)
end

kong_shm:delete(DECLARATIVE_LOCK_KEY)

return ok, err
Expand Down
2 changes: 1 addition & 1 deletion kong/db/schema/entities/upstreams.lua
Original file line number Diff line number Diff line change
Expand Up @@ -189,7 +189,7 @@ local r = {
{ hash_fallback = hash_on },
{ hash_on_header = typedefs.header_name, },
{ hash_fallback_header = typedefs.header_name, },
{ hash_on_cookie = { description = "The cookie name to take the value from as hash input.", type = "string", custom_validator = utils.validate_cookie_name }, },
{ hash_on_cookie = typedefs.cookie_name{ description = "The cookie name to take the value from as hash input."}, },
{ hash_on_cookie_path = typedefs.path{ default = "/", }, },
{ hash_on_query_arg = simple_param },
{ hash_fallback_query_arg = simple_param },
Expand Down
1 change: 1 addition & 0 deletions kong/db/schema/metaschema.lua
Original file line number Diff line number Diff line change
Expand Up @@ -179,6 +179,7 @@ local field_schema = {
{ required = { type = "boolean" }, },
{ reference = { type = "string" }, },
{ description = { type = "string", len_min = 10, len_max = 500}, },
{ examples = { type = "array", elements = { type = "any" } } },
{ auto = { type = "boolean" }, },
{ unique = { type = "boolean" }, },
{ unique_across_ws = { type = "boolean" }, },
Expand Down
8 changes: 8 additions & 0 deletions kong/db/schema/typedefs.lua
Original file line number Diff line number Diff line change
Expand Up @@ -331,6 +331,14 @@ typedefs.url = Schema.define {
description = "A string representing a URL, such as https://example.com/path/to/resource?q=search."
}


typedefs.cookie_name = Schema.define {
type = "string",
custom_validator = utils.validate_cookie_name,
description = "A string representing an HTTP token defined by RFC 2616."
}

-- should we also allow all http token for this?
typedefs.header_name = Schema.define {
type = "string",
custom_validator = utils.validate_header_name,
Expand Down
8 changes: 2 additions & 6 deletions kong/db/strategies/postgres/connector.lua
Original file line number Diff line number Diff line change
Expand Up @@ -519,11 +519,10 @@ function _mt:query(sql, operation)
end

local phase = get_phase()
local in_admin_api = phase == "content" and ngx.ctx.KONG_PHASE == ADMIN_API_PHASE

if not operation or
not self.config_ro or
in_admin_api
not self.config_ro or
(phase == "content" and ngx.ctx.KONG_PHASE == ADMIN_API_PHASE)
then
-- admin API requests skips the replica optimization
-- to ensure all its results are always strongly consistent
Expand Down Expand Up @@ -553,9 +552,6 @@ function _mt:query(sql, operation)

res, err, partial, num_queries = conn:query(sql)

if in_admin_api and operation == "write" and res and res[1] and res[1]._pg_transaction_id then
kong.response.set_header('X-Kong-Transaction-ID', res[1]._pg_transaction_id)
end
-- if err is string then either it is a SQL error
-- or it is a socket error, here we abort connections
-- that encounter errors instead of reusing them, for
Expand Down
2 changes: 0 additions & 2 deletions kong/db/strategies/postgres/init.lua
Original file line number Diff line number Diff line change
Expand Up @@ -987,8 +987,6 @@ function _M.new(connector, schema, errors)
insert(upsert_expressions, ttl_escaped .. " = " .. "EXCLUDED." .. ttl_escaped)
end

insert(select_expressions, "pg_current_xact_id() as _pg_transaction_id")

local primary_key_escaped = {}
for i, key in ipairs(primary_key) do
local primary_key_field = primary_key_fields[key]
Expand Down
13 changes: 1 addition & 12 deletions kong/global.lua
Original file line number Diff line number Diff line change
Expand Up @@ -68,8 +68,7 @@ end


local _GLOBAL = {
phases = phase_checker.phases,
CURRENT_TRANSACTION_ID = 0,
phases = phase_checker.phases,
}


Expand Down Expand Up @@ -295,14 +294,4 @@ function _GLOBAL.init_timing()
end


function _GLOBAL.get_current_transaction_id()
local rows, err = kong.db.connector:query("select pg_current_xact_id() as _pg_transaction_id")
if not rows then
return nil, "could not query postgres for current transaction id: " .. err
else
return tonumber(rows[1]._pg_transaction_id)
end
end


return _GLOBAL
52 changes: 27 additions & 25 deletions kong/pdk/log.lua
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
-- @module kong.log


local buffer = require "string.buffer"
local errlog = require "ngx.errlog"
local ngx_re = require "ngx.re"
local inspect = require "inspect"
Expand Down Expand Up @@ -137,34 +138,34 @@ end


local serializers = {
[1] = function(buf, to_string, ...)
buf[1] = to_string((select(1, ...)))
[1] = function(buf, sep, to_string, ...)
buf:put(to_string((select(1, ...))))
end,

[2] = function(buf, to_string, ...)
buf[1] = to_string((select(1, ...)))
buf[2] = to_string((select(2, ...)))
[2] = function(buf, sep, to_string, ...)
buf:put(to_string((select(1, ...)))):put(sep)
:put(to_string((select(2, ...))))
end,

[3] = function(buf, to_string, ...)
buf[1] = to_string((select(1, ...)))
buf[2] = to_string((select(2, ...)))
buf[3] = to_string((select(3, ...)))
[3] = function(buf, sep, to_string, ...)
buf:put(to_string((select(1, ...)))):put(sep)
:put(to_string((select(2, ...)))):put(sep)
:put(to_string((select(3, ...))))
end,

[4] = function(buf, to_string, ...)
buf[1] = to_string((select(1, ...)))
buf[2] = to_string((select(2, ...)))
buf[3] = to_string((select(3, ...)))
buf[4] = to_string((select(4, ...)))
[4] = function(buf, sep, to_string, ...)
buf:put(to_string((select(1, ...)))):put(sep)
:put(to_string((select(2, ...)))):put(sep)
:put(to_string((select(3, ...)))):put(sep)
:put(to_string((select(4, ...))))
end,

[5] = function(buf, to_string, ...)
buf[1] = to_string((select(1, ...)))
buf[2] = to_string((select(2, ...)))
buf[3] = to_string((select(3, ...)))
buf[4] = to_string((select(4, ...)))
buf[5] = to_string((select(5, ...)))
[5] = function(buf, sep, to_string, ...)
buf:put(to_string((select(1, ...)))):put(sep)
:put(to_string((select(2, ...)))):put(sep)
:put(to_string((select(3, ...)))):put(sep)
:put(to_string((select(4, ...)))):put(sep)
:put(to_string((select(5, ...))))
end,
}

Expand Down Expand Up @@ -282,7 +283,7 @@ local function gen_log_func(lvl_const, imm_buf, to_string, stack_level, sep)
to_string = to_string or tostring
stack_level = stack_level or 2

local variadic_buf = {}
local variadic_buf = buffer.new()

return function(...)
local sys_log_level = nil
Expand Down Expand Up @@ -320,15 +321,16 @@ local function gen_log_func(lvl_const, imm_buf, to_string, stack_level, sep)
end

if serializers[n] then
serializers[n](variadic_buf, to_string, ...)
serializers[n](variadic_buf, sep or "" , to_string, ...)

else
for i = 1, n do
variadic_buf[i] = to_string((select(i, ...)))
for i = 1, n - 1 do
variadic_buf:put(to_string((select(i, ...)))):put(sep or "")
end
variadic_buf:put(to_string((select(n, ...))))
end

local msg = concat(variadic_buf, sep, 1, n)
local msg = variadic_buf:get()

for i = 1, imm_buf.n_messages do
imm_buf[imm_buf.message_idxs[i]] = msg
Expand Down
Loading

0 comments on commit 3c73bf3

Please sign in to comment.