From 9793768cf9c10f14d6e0a1bf58db29bc0fad3be1 Mon Sep 17 00:00:00 2001 From: samugi Date: Mon, 26 Jun 2023 18:12:24 +0200 Subject: [PATCH] feat(tracing): propagation module / typedef The new propagation module replaces the propagation.lua file, providing a more flexible and extensible way to handle tracing headers propagation from plugins (at the moment OpenTelemetry and Zipkin). It allows configuring the priority of tracing context extraction and what headers to extract and inject the tracing context from/to. It also allows clearing headers from the request after extraction to gain full control on what is propagated to the upstream. New configuration options: propagation.extract: Header formats used to extract tracing context from incoming requests. propagation.inject: Header formats used to inject tracing context. propagation.clear: Header names to clear after context extraction. Each header format is now defined in its own extractor and injector files so that individual logic for extracting and injecting the tracing context is isolated. This is meant to improve maintainability/testability and facilitate extending support to new header formats. apply PR suggestions (squash me) Co-authored-by: Qi --- .../kong/propagation-module-rework.yml | 5 + kong-3.7.0-0.rockspec | 21 +- kong/db/schema/typedefs.lua | 3 + kong/plugins/opentelemetry/handler.lua | 33 +- kong/plugins/opentelemetry/schema.lua | 11 +- kong/plugins/zipkin/handler.lua | 55 +- kong/plugins/zipkin/schema.lua | 13 +- kong/tracing/propagation.lua | 706 ------- kong/tracing/propagation/extractors/_base.lua | 196 ++ kong/tracing/propagation/extractors/aws.lua | 92 + kong/tracing/propagation/extractors/b3.lua | 219 +++ .../propagation/extractors/datadog.lua | 61 + kong/tracing/propagation/extractors/gcp.lua | 46 + .../tracing/propagation/extractors/jaeger.lua | 76 + kong/tracing/propagation/extractors/ot.lua | 68 + kong/tracing/propagation/extractors/w3c.lua | 75 + kong/tracing/propagation/init.lua | 239 +++ kong/tracing/propagation/injectors/_base.lua | 212 ++ kong/tracing/propagation/injectors/aws.lua | 36 + .../propagation/injectors/b3-single.lua | 44 + kong/tracing/propagation/injectors/b3.lua | 44 + .../tracing/propagation/injectors/datadog.lua | 40 + kong/tracing/propagation/injectors/gcp.lua | 29 + kong/tracing/propagation/injectors/jaeger.lua | 42 + kong/tracing/propagation/injectors/ot.lua | 43 + kong/tracing/propagation/injectors/w3c.lua | 33 + kong/tracing/propagation/schema.lua | 61 + kong/tracing/propagation/utils.lua | 98 + .../26-tracing/02-propagation_spec.lua | 1371 ------------- .../02-propagation_strategies_spec.lua | 1750 +++++++++++++++++ .../26-tracing/03-propagation_module_spec.lua | 463 +++++ ...est-id_spec.lua => 04-request-id_spec.lua} | 0 .../14-tracing/02-propagation_spec.lua | 252 +-- ...acy_propagation_parameter_warning_spec.lua | 124 ++ .../34-zipkin/zipkin_no_endpoint_spec.lua | 10 +- spec/03-plugins/34-zipkin/zipkin_spec.lua | 357 +++- .../37-opentelemetry/03-propagation_spec.lua | 441 ++++- .../kong/plugins/trace-propagator/handler.lua | 56 - .../kong/plugins/trace-propagator/schema.lua | 11 - 39 files changed, 5021 insertions(+), 2415 deletions(-) create mode 100644 changelog/unreleased/kong/propagation-module-rework.yml delete mode 100644 kong/tracing/propagation.lua create mode 100644 kong/tracing/propagation/extractors/_base.lua create mode 100644 kong/tracing/propagation/extractors/aws.lua create mode 100644 kong/tracing/propagation/extractors/b3.lua create mode 100644 kong/tracing/propagation/extractors/datadog.lua create mode 100644 kong/tracing/propagation/extractors/gcp.lua create mode 100644 kong/tracing/propagation/extractors/jaeger.lua create mode 100644 kong/tracing/propagation/extractors/ot.lua create mode 100644 kong/tracing/propagation/extractors/w3c.lua create mode 100644 kong/tracing/propagation/init.lua create mode 100644 kong/tracing/propagation/injectors/_base.lua create mode 100644 kong/tracing/propagation/injectors/aws.lua create mode 100644 kong/tracing/propagation/injectors/b3-single.lua create mode 100644 kong/tracing/propagation/injectors/b3.lua create mode 100644 kong/tracing/propagation/injectors/datadog.lua create mode 100644 kong/tracing/propagation/injectors/gcp.lua create mode 100644 kong/tracing/propagation/injectors/jaeger.lua create mode 100644 kong/tracing/propagation/injectors/ot.lua create mode 100644 kong/tracing/propagation/injectors/w3c.lua create mode 100644 kong/tracing/propagation/schema.lua create mode 100644 kong/tracing/propagation/utils.lua delete mode 100644 spec/01-unit/26-tracing/02-propagation_spec.lua create mode 100644 spec/01-unit/26-tracing/02-propagation_strategies_spec.lua create mode 100644 spec/01-unit/26-tracing/03-propagation_module_spec.lua rename spec/01-unit/26-tracing/{03-request-id_spec.lua => 04-request-id_spec.lua} (100%) create mode 100644 spec/03-plugins/02-legacy_propagation_parameter_warning_spec.lua delete mode 100644 spec/fixtures/custom_plugins/kong/plugins/trace-propagator/handler.lua delete mode 100644 spec/fixtures/custom_plugins/kong/plugins/trace-propagator/schema.lua diff --git a/changelog/unreleased/kong/propagation-module-rework.yml b/changelog/unreleased/kong/propagation-module-rework.yml new file mode 100644 index 000000000000..69f3dcb76752 --- /dev/null +++ b/changelog/unreleased/kong/propagation-module-rework.yml @@ -0,0 +1,5 @@ +message: | + **OpenTelemetry, Zipkin**: the propagation module has been reworked, new + options allow better control over the configuration of tracing headers propagation. +type: feature +scope: Plugin diff --git a/kong-3.7.0-0.rockspec b/kong-3.7.0-0.rockspec index 1e45d8209f59..b9639d239e24 100644 --- a/kong-3.7.0-0.rockspec +++ b/kong-3.7.0-0.rockspec @@ -598,7 +598,26 @@ build = { ["kong.vaults.env.schema"] = "kong/vaults/env/schema.lua", ["kong.tracing.instrumentation"] = "kong/tracing/instrumentation.lua", - ["kong.tracing.propagation"] = "kong/tracing/propagation.lua", + ["kong.tracing.propagation"] = "kong/tracing/propagation/init.lua", + ["kong.tracing.propagation.schema"] = "kong/tracing/propagation/schema.lua", + ["kong.tracing.propagation.utils"] = "kong/tracing/propagation/utils.lua", + ["kong.tracing.propagation.extractors._base"] = "kong/tracing/propagation/extractors/_base.lua", + ["kong.tracing.propagation.extractors.w3c"] = "kong/tracing/propagation/extractors/w3c.lua", + ["kong.tracing.propagation.extractors.b3"] = "kong/tracing/propagation/extractors/b3.lua", + ["kong.tracing.propagation.extractors.jaeger"] = "kong/tracing/propagation/extractors/jaeger.lua", + ["kong.tracing.propagation.extractors.ot"] = "kong/tracing/propagation/extractors/ot.lua", + ["kong.tracing.propagation.extractors.gcp"] = "kong/tracing/propagation/extractors/gcp.lua", + ["kong.tracing.propagation.extractors.aws"] = "kong/tracing/propagation/extractors/aws.lua", + ["kong.tracing.propagation.extractors.datadog"] = "kong/tracing/propagation/extractors/datadog.lua", + ["kong.tracing.propagation.injectors._base"] = "kong/tracing/propagation/injectors/_base.lua", + ["kong.tracing.propagation.injectors.w3c"] = "kong/tracing/propagation/injectors/w3c.lua", + ["kong.tracing.propagation.injectors.b3"] = "kong/tracing/propagation/injectors/b3.lua", + ["kong.tracing.propagation.injectors.b3-single"] = "kong/tracing/propagation/injectors/b3-single.lua", + ["kong.tracing.propagation.injectors.jaeger"] = "kong/tracing/propagation/injectors/jaeger.lua", + ["kong.tracing.propagation.injectors.ot"] = "kong/tracing/propagation/injectors/ot.lua", + ["kong.tracing.propagation.injectors.gcp"] = "kong/tracing/propagation/injectors/gcp.lua", + ["kong.tracing.propagation.injectors.aws"] = "kong/tracing/propagation/injectors/aws.lua", + ["kong.tracing.propagation.injectors.datadog"] = "kong/tracing/propagation/injectors/datadog.lua", ["kong.tracing.request_id"] = "kong/tracing/request_id.lua", ["kong.tracing.tracing_context"] = "kong/tracing/tracing_context.lua", diff --git a/kong/db/schema/typedefs.lua b/kong/db/schema/typedefs.lua index cd875302280d..16643abf3cc2 100644 --- a/kong/db/schema/typedefs.lua +++ b/kong/db/schema/typedefs.lua @@ -2,6 +2,7 @@ -- @module kong.db.schema.typedefs local utils = require "kong.tools.utils" local queue_schema = require "kong.tools.queue_schema" +local propagation_schema = require "kong.tracing.propagation.schema" local openssl_pkey = require "resty.openssl.pkey" local openssl_x509 = require "resty.openssl.x509" local Schema = require "kong.db.schema" @@ -882,6 +883,8 @@ typedefs.jwk = Schema.define { typedefs.queue = queue_schema +typedefs.propagation = propagation_schema + local function validate_lua_expression(expression) local sandbox = require "kong.tools.sandbox" return sandbox.validate_safe(expression) diff --git a/kong/plugins/opentelemetry/handler.lua b/kong/plugins/opentelemetry/handler.lua index a265e57c21f0..444e3435a26d 100644 --- a/kong/plugins/opentelemetry/handler.lua +++ b/kong/plugins/opentelemetry/handler.lua @@ -14,10 +14,6 @@ local ngx_ERR = ngx.ERR local ngx_DEBUG = ngx.DEBUG local ngx_now = ngx.now local ngx_update_time = ngx.update_time -local ngx_req = ngx.req -local ngx_get_headers = ngx_req.get_headers -local propagation_parse = propagation.parse -local propagation_set = propagation.set local null = ngx.null local encode_traces = otlp.encode_traces local encode_span = otlp.transform_span @@ -91,8 +87,8 @@ local function http_export(conf, spans) return ok, err end -function OpenTelemetryHandler:access(conf) - local headers = ngx_get_headers() + +local function get_inject_ctx(extracted_ctx, conf) local root_span = ngx.ctx.KONG_SPANS and ngx.ctx.KONG_SPANS[1] -- get the global tracer when available, or instantiate a new one @@ -109,17 +105,21 @@ function OpenTelemetryHandler:access(conf) end local injected_parent_span = tracing_context.get_unlinked_span("balancer") or root_span - local header_type, trace_id, span_id, parent_id, parent_sampled, _ = propagation_parse(headers, conf.header_type) + local trace_id = extracted_ctx.trace_id + local span_id = extracted_ctx.span_id + local parent_id = extracted_ctx.parent_id + local parent_sampled = extracted_ctx.should_sample -- Overwrite trace ids -- with the value extracted from incoming tracing headers if trace_id then -- to propagate the correct trace ID we have to set it here - -- before passing this span to propagation.set() + -- before passing this span to propagation injected_parent_span.trace_id = trace_id -- update the Tracing Context with the trace ID extracted from headers tracing_context.set_raw_trace_id(trace_id) end + -- overwrite root span's parent_id if span_id then root_span.parent_id = span_id @@ -147,7 +147,22 @@ function OpenTelemetryHandler:access(conf) -- Set the sampled flag for the outgoing header's span injected_parent_span.should_sample = sampled - propagation_set(conf.header_type, header_type, injected_parent_span, "w3c") + extracted_ctx.trace_id = injected_parent_span.trace_id + extracted_ctx.span_id = injected_parent_span.span_id + extracted_ctx.should_sample = injected_parent_span.should_sample + extracted_ctx.parent_id = injected_parent_span.parent_id + + -- return the injected ctx (data to be injected with outgoing tracing headers) + return extracted_ctx +end + + +function OpenTelemetryHandler:access(conf) + propagation.propagate( + propagation.get_plugin_params(conf), + get_inject_ctx, + conf + ) end diff --git a/kong/plugins/opentelemetry/schema.lua b/kong/plugins/opentelemetry/schema.lua index 59181655c1a9..bdbd27056f2a 100644 --- a/kong/plugins/opentelemetry/schema.lua +++ b/kong/plugins/opentelemetry/schema.lua @@ -71,9 +71,13 @@ return { default = nil }}, { header_type = { description = "All HTTP requests going through the plugin are tagged with a tracing HTTP request. This property codifies what kind of tracing header the plugin expects on incoming requests.", type = "string", + deprecation = { + message = "opentelemetry: config.header_type is deprecated, please use config.propagation options instead", + removal_in_version = "4.0", + old_default = "preserve" }, required = false, default = "preserve", - one_of = { "preserve", "ignore", "b3", "b3-single", "w3c", "jaeger", "ot", "aws", "gcp" } } }, + one_of = { "preserve", "ignore", "b3", "b3-single", "w3c", "jaeger", "ot", "aws", "gcp", "datadog" } } }, { sampling_rate = { description = "Tracing sampling rate for configuring the probability-based sampler. When set, this value supersedes the global `tracing_sampling_rate` setting from kong.conf.", type = "number", @@ -81,6 +85,11 @@ return { required = false, default = nil, } }, + { propagation = typedefs.propagation { + default = { + default_format = "w3c", + }, + } }, }, }, }, }, diff --git a/kong/plugins/zipkin/handler.lua b/kong/plugins/zipkin/handler.lua index e36c658c2807..a3b450dc7822 100644 --- a/kong/plugins/zipkin/handler.lua +++ b/kong/plugins/zipkin/handler.lua @@ -88,10 +88,10 @@ end local initialize_request -local function get_context(conf, ctx) +local function get_context(conf, ctx, extracted_ctx) local zipkin = ctx.zipkin if not zipkin then - initialize_request(conf, ctx) + initialize_request(conf, ctx, extracted_ctx) zipkin = ctx.zipkin end return zipkin @@ -99,13 +99,33 @@ end if subsystem == "http" then - initialize_request = function(conf, ctx) + initialize_request = function(conf, ctx, extracted_ctx) local req = kong.request local req_headers = req.get_headers() - local header_type, trace_id, span_id, parent_id, should_sample, baggage = - propagation.parse(req_headers, conf.header_type) + extracted_ctx = extracted_ctx + or propagation.extract(propagation.get_plugin_params(conf)) + or {} + local trace_id = extracted_ctx.trace_id + local should_sample = extracted_ctx.should_sample + local baggage = extracted_ctx.baggage + + -- Some formats (e.g. W3C) only provide one span_id, which is the id of the + -- span that the header represents, it is meant to be used as the parent of + -- the server span (span generated by the receiver) and is in fact + -- sometimes called parent_id. + -- Other formats (e.g. B3) support two span IDs, usually span_id and + -- parent_id. In that case the span (and its ID) is shared between client + -- and server and the parent_id identifies its parent. + local parent_id, span_id + if extracted_ctx.reuse_span_id then + span_id = extracted_ctx.span_id + parent_id = extracted_ctx.parent_id + + else + parent_id = extracted_ctx.span_id + end local method = req.get_method() @@ -168,23 +188,38 @@ if subsystem == "http" then ctx.zipkin = { request_span = request_span, - header_type = header_type, proxy_span = nil, header_filter_finished = false, } end - function ZipkinLogHandler:access(conf) -- luacheck: ignore 212 - local zipkin = get_context(conf, kong.ctx.plugin) + local function get_inject_ctx(extract_ctx, conf) + local zipkin = get_context(conf, kong.ctx.plugin, extract_ctx) local ngx_ctx = ngx.ctx local access_start = ngx_ctx.KONG_ACCESS_START and ngx_ctx.KONG_ACCESS_START * 1000 or ngx_now_mu() - get_or_add_proxy_span(zipkin, access_start) - propagation.set(conf.header_type, zipkin.header_type, zipkin.proxy_span, conf.default_header_type) + local proxy_span = get_or_add_proxy_span(zipkin, access_start) + + local inject_ctx = extract_ctx + inject_ctx.trace_id = proxy_span.trace_id or inject_ctx.trace_id or nil + inject_ctx.span_id = proxy_span.span_id or inject_ctx.span_id or nil + inject_ctx.parent_id = proxy_span.parent_id or inject_ctx.parent_id or nil + inject_ctx.should_sample = proxy_span.should_sample or inject_ctx.should_sample or nil + inject_ctx.baggage = proxy_span.baggage or inject_ctx.baggage or nil + return inject_ctx + end + + + function ZipkinLogHandler:access(conf) -- luacheck: ignore 212 + propagation.propagate( + propagation.get_plugin_params(conf), + get_inject_ctx, + conf + ) end diff --git a/kong/plugins/zipkin/schema.lua b/kong/plugins/zipkin/schema.lua index d14ca224d57a..da4b0aa8d3e2 100644 --- a/kong/plugins/zipkin/schema.lua +++ b/kong/plugins/zipkin/schema.lua @@ -56,9 +56,13 @@ return { { include_credential = { description = "Specify whether the credential of the currently authenticated consumer should be included in metadata sent to the Zipkin server.", type = "boolean", required = true, default = true } }, { traceid_byte_count = { description = "The length in bytes of each request's Trace ID.", type = "integer", required = true, default = 16, one_of = { 8, 16 } } }, { header_type = { description = "All HTTP requests going through the plugin are tagged with a tracing HTTP request. This property codifies what kind of tracing header the plugin expects on incoming requests", type = "string", required = true, default = "preserve", - one_of = { "preserve", "ignore", "b3", "b3-single", "w3c", "jaeger", "ot", "aws", "gcp" } } }, + one_of = { "preserve", "ignore", "b3", "b3-single", "w3c", "jaeger", "ot", "aws", "datadog", "gcp" }, + deprecation = { message = "zipkin: config.header_type is deprecated, please use config.propagation options instead", removal_in_version = "4.0", old_default = "preserve" } + } }, { default_header_type = { description = "Allows specifying the type of header to be added to requests with no pre-existing tracing headers and when `config.header_type` is set to `\"preserve\"`. When `header_type` is set to any other value, `default_header_type` is ignored.", type = "string", required = true, default = "b3", - one_of = { "b3", "b3-single", "w3c", "jaeger", "ot", "aws", "gcp" } } }, + one_of = { "b3", "b3-single", "w3c", "jaeger", "ot", "aws", "datadog", "gcp" }, + deprecation = { message = "zipkin: config.default_header_type is deprecated, please use config.propagation.default_format instead", removal_in_version = "4.0", old_default = "b3" } + } }, { tags_header = { description = "The Zipkin plugin will add extra headers to the tags associated with any HTTP requests that come with a header named as configured by this property.", type = "string", required = true, default = "Zipkin-Tags" } }, { static_tags = { description = "The tags specified on this property will be added to the generated request traces.", type = "array", elements = static_tag, custom_validator = validate_static_tags } }, @@ -70,6 +74,11 @@ return { { phase_duration_flavor = { description = "Specify whether to include the duration of each phase as an annotation or a tag.", type = "string", required = true, default = "annotations", one_of = { "annotations", "tags" } } }, { queue = typedefs.queue }, + { propagation = typedefs.propagation { + default = { + default_format = "b3", + }, + } }, }, }, }, }, diff --git a/kong/tracing/propagation.lua b/kong/tracing/propagation.lua deleted file mode 100644 index 606fcfa5b871..000000000000 --- a/kong/tracing/propagation.lua +++ /dev/null @@ -1,706 +0,0 @@ -local to_hex = require "resty.string".to_hex -local openssl_bignum = require "resty.openssl.bn" -local table_merge = require "kong.tools.utils".table_merge -local split = require "kong.tools.utils".split -local strip = require "kong.tools.utils".strip -local tracing_context = require "kong.tracing.tracing_context" -local unescape_uri = ngx.unescape_uri -local char = string.char -local match = string.match -local sub = string.sub -local gsub = string.gsub -local fmt = string.format -local concat = table.concat -local ipairs = ipairs -local to_ot_trace_id - - -local baggage_mt = { - __newindex = function() - error("attempt to set immutable baggage", 2) - end, -} - -local B3_SINGLE_PATTERN = - "^(%x+)%-(%x%x%x%x%x%x%x%x%x%x%x%x%x%x%x%x)%-?([01d]?)%-?(%x*)$" -local W3C_TRACECONTEXT_PATTERN = "^(%x+)%-(%x+)%-(%x+)%-(%x+)$" -local JAEGER_TRACECONTEXT_PATTERN = "^(%x+):(%x+):(%x+):(%x+)$" -local JAEGER_BAGGAGE_PATTERN = "^uberctx%-(.*)$" -local OT_BAGGAGE_PATTERN = "^ot%-baggage%-(.*)$" -local W3C_TRACEID_LEN = 16 - -local AWS_KV_PAIR_DELIM = ";" -local AWS_KV_DELIM = "=" -local AWS_TRACE_ID_KEY = "Root" -local AWS_TRACE_ID_LEN = 35 -local AWS_TRACE_ID_PATTERN = "^(%x+)%-(%x+)%-(%x+)$" -local AWS_TRACE_ID_VERSION = "1" -local AWS_TRACE_ID_TIMESTAMP_LEN = 8 -local AWS_TRACE_ID_UNIQUE_ID_LEN = 24 -local AWS_PARENT_ID_KEY = "Parent" -local AWS_PARENT_ID_LEN = 16 -local AWS_SAMPLED_FLAG_KEY = "Sampled" - -local GCP_TRACECONTEXT_REGEX = "^(?[0-9a-f]{32})/(?[0-9]{1,20})(;o=(?[0-9]))?$" -local GCP_TRACE_ID_LEN = 32 - -local function hex_to_char(c) - return char(tonumber(c, 16)) -end - - -local function from_hex(str) - if str ~= nil then -- allow nil to pass through - str = gsub(str, "%x%x", hex_to_char) - end - return str -end - --- adds `count` number of zeros to the left of the str -local function left_pad_zero(str, count) - return ('0'):rep(count-#str) .. str -end - - -local function to_w3c_trace_id(trace_id) - if #trace_id < W3C_TRACEID_LEN then - return ('\0'):rep(W3C_TRACEID_LEN - #trace_id) .. trace_id - elseif #trace_id > W3C_TRACEID_LEN then - return trace_id:sub(-W3C_TRACEID_LEN) - end - - return trace_id -end - -local function to_gcp_trace_id(trace_id) - if #trace_id < GCP_TRACE_ID_LEN then - return ('0'):rep(GCP_TRACE_ID_LEN - #trace_id) .. trace_id - elseif #trace_id > GCP_TRACE_ID_LEN then - return trace_id:sub(-GCP_TRACE_ID_LEN) - end - - return trace_id -end - - -local function parse_baggage_headers(headers, header_pattern) - -- account for both ot and uber baggage headers - local baggage - for k, v in pairs(headers) do - local baggage_key = match(k, header_pattern) - if baggage_key then - if baggage then - baggage[baggage_key] = unescape_uri(v) - else - baggage = { [baggage_key] = unescape_uri(v) } - end - end - end - - if baggage then - return setmetatable(baggage, baggage_mt) - end -end - - -local function parse_zipkin_b3_headers(headers, b3_single_header) - local warn = kong.log.warn - - -- X-B3-Sampled: if an upstream decided to sample this request, we do too. - local should_sample = headers["x-b3-sampled"] - if should_sample == "1" or should_sample == "true" then - should_sample = true - elseif should_sample == "0" or should_sample == "false" then - should_sample = false - elseif should_sample ~= nil then - warn("x-b3-sampled header invalid; ignoring.") - should_sample = nil - end - - -- X-B3-Flags: if it equals '1' then it overrides sampling policy - -- We still want to warn on invalid sample header, so do this after the above - local debug_header = headers["x-b3-flags"] - if debug_header == "1" then - should_sample = true - elseif debug_header ~= nil then - warn("x-b3-flags header invalid; ignoring.") - end - - local trace_id, span_id, sampled, parent_id - local had_invalid_id = false - - -- B3 single header - -- * For speed, the "-" separators between sampled and parent_id are optional on this implementation - -- This is not guaranteed to happen in future versions and won't be considered a breaking change - -- * The "sampled" section activates sampling with both "1" and "d". This is to match the - -- behavior of the X-B3-Flags header - if b3_single_header and type(b3_single_header) == "string" then - if b3_single_header == "1" or b3_single_header == "d" then - should_sample = true - - elseif b3_single_header == "0" then - should_sample = should_sample or false - - else - trace_id, span_id, sampled, parent_id = - match(b3_single_header, B3_SINGLE_PATTERN) - - local trace_id_len = trace_id and #trace_id or 0 - if trace_id - and (trace_id_len == 16 or trace_id_len == 32) - and (parent_id == "" or #parent_id == 16) - then - - if should_sample or sampled == "1" or sampled == "d" then - should_sample = true - elseif sampled == "0" then - should_sample = false - end - - if parent_id == "" then - parent_id = nil - end - - else - warn("b3 single header invalid; ignoring.") - had_invalid_id = true - end - end - end - - local trace_id_header = headers["x-b3-traceid"] - if trace_id_header and ((#trace_id_header ~= 16 and #trace_id_header ~= 32) - or trace_id_header:match("%X")) then - warn("x-b3-traceid header invalid; ignoring.") - had_invalid_id = true - else - trace_id = trace_id or trace_id_header -- b3 single header overrides x-b3-traceid - end - - local span_id_header = headers["x-b3-spanid"] - if span_id_header and (#span_id_header ~= 16 or span_id_header:match("%X")) then - warn("x-b3-spanid header invalid; ignoring.") - had_invalid_id = true - else - span_id = span_id or span_id_header -- b3 single header overrides x-b3-spanid - end - - local parent_id_header = headers["x-b3-parentspanid"] - if parent_id_header and (#parent_id_header ~= 16 or parent_id_header:match("%X")) then - warn("x-b3-parentspanid header invalid; ignoring.") - had_invalid_id = true - else - parent_id = parent_id or parent_id_header -- b3 single header overrides x-b3-parentid - end - - if trace_id == nil or had_invalid_id then - return nil, nil, nil, should_sample - end - - trace_id = from_hex(trace_id) - span_id = from_hex(span_id) - parent_id = from_hex(parent_id) - - return trace_id, span_id, parent_id, should_sample -end - - -local function parse_w3c_trace_context_headers(w3c_header) - -- allow testing to spy on this. - local warn = kong.log.warn - - local should_sample = false - - if type(w3c_header) ~= "string" then - return nil, nil, should_sample - end - - local version, trace_id, parent_id, trace_flags = match(w3c_header, W3C_TRACECONTEXT_PATTERN) - - -- values are not parseable hexadecimal and therefore invalid. - if version == nil or trace_id == nil or parent_id == nil or trace_flags == nil then - warn("invalid W3C traceparent header; ignoring.") - return nil, nil, nil - end - - -- Only support version 00 of the W3C Trace Context spec. - if version ~= "00" then - warn("invalid W3C Trace Context version; ignoring.") - return nil, nil, nil - end - - -- valid trace_id is required. - if #trace_id ~= 32 or tonumber(trace_id, 16) == 0 then - warn("invalid W3C trace context trace ID; ignoring.") - return nil, nil, nil - end - - -- valid parent_id is required. - if #parent_id ~= 16 or tonumber(parent_id, 16) == 0 then - warn("invalid W3C trace context parent ID; ignoring.") - return nil, nil, nil - end - - -- valid flags are required - if #trace_flags ~= 2 then - warn("invalid W3C trace context flags; ignoring.") - return nil, nil, nil - end - - -- W3C sampled flag: https://www.w3.org/TR/trace-context/#sampled-flag - should_sample = tonumber(trace_flags, 16) % 2 == 1 - - trace_id = from_hex(trace_id) - parent_id = from_hex(parent_id) - - return trace_id, parent_id, should_sample -end - -local function parse_ot_headers(headers) - local warn = kong.log.warn - - local should_sample = headers["ot-tracer-sampled"] - if should_sample == "1" or should_sample == "true" then - should_sample = true - elseif should_sample == "0" or should_sample == "false" then - should_sample = false - elseif should_sample ~= nil then - warn("ot-tracer-sampled header invalid; ignoring.") - should_sample = nil - end - - local trace_id, span_id - local had_invalid_id = false - - local trace_id_header = headers["ot-tracer-traceid"] - if trace_id_header and ((#trace_id_header ~= 16 and #trace_id_header ~= 32) or trace_id_header:match("%X")) then - warn("ot-tracer-traceid header invalid; ignoring.") - had_invalid_id = true - else - trace_id = trace_id_header - end - - local span_id_header = headers["ot-tracer-spanid"] - if span_id_header and (#span_id_header ~= 16 or span_id_header:match("%X")) then - warn("ot-tracer-spanid header invalid; ignoring.") - had_invalid_id = true - else - span_id = span_id_header - end - - if trace_id == nil or had_invalid_id then - return nil, nil, should_sample - end - - trace_id = from_hex(trace_id) - span_id = from_hex(span_id) - - return trace_id, span_id, should_sample -end - - -local function parse_jaeger_trace_context_headers(jaeger_header) - -- allow testing to spy on this. - local warn = kong.log.warn - - if type(jaeger_header) ~= "string" then - return nil, nil, nil, nil - end - - local trace_id, span_id, parent_id, trace_flags = match(jaeger_header, JAEGER_TRACECONTEXT_PATTERN) - - -- values are not parsable hexidecimal and therefore invalid. - if trace_id == nil or span_id == nil or parent_id == nil or trace_flags == nil then - warn("invalid jaeger uber-trace-id header; ignoring.") - return nil, nil, nil, nil - end - - -- valid trace_id is required. - if #trace_id > 32 or tonumber(trace_id, 16) == 0 then - warn("invalid jaeger trace ID; ignoring.") - return nil, nil, nil, nil - end - - -- if trace_id is not of length 32 chars then 0-pad to left - if #trace_id < 32 then - trace_id = left_pad_zero(trace_id, 32) - end - - -- validating parent_id. If it is invalid just logging, as it can be ignored - -- https://www.jaegertracing.io/docs/1.29/client-libraries/#tracespan-identity - if #parent_id ~= 16 and tonumber(parent_id, 16) ~= 0 then - warn("invalid jaeger parent ID; ignoring.") - end - - -- valid span_id is required. - if #span_id > 16 or tonumber(span_id, 16) == 0 then - warn("invalid jaeger span ID; ignoring.") - return nil, nil, nil, nil - end - - -- if span id length is less than 16 then 0-pad left - if #span_id < 16 then - span_id = left_pad_zero(span_id, 16) - end - - -- valid flags are required - if #trace_flags ~= 1 and #trace_flags ~= 2 then - warn("invalid jaeger flags; ignoring.") - return nil, nil, nil, nil - end - - -- Jaeger sampled flag: https://www.jaegertracing.io/docs/1.17/client-libraries/#tracespan-identity - local should_sample = tonumber(trace_flags, 16) % 2 == 1 - - trace_id = from_hex(trace_id) - span_id = from_hex(span_id) - parent_id = from_hex(parent_id) - - return trace_id, span_id, parent_id, should_sample -end - -local function parse_aws_headers(aws_header) - -- allow testing to spy on this. - local warn = kong.log.warn - - if type(aws_header) ~= "string" then - return nil, nil, nil - end - - local trace_id = nil - local span_id = nil - local should_sample = nil - - -- The AWS trace header consists of multiple `key=value` separated by a delimiter `;` - -- We can retrieve the trace id with the `Root` key, the span id with the `Parent` - -- key and the sampling parameter with the `Sampled` flag. Extra information should be ignored. - -- - -- The trace id has a custom format: `version-timestamp-uniqueid` and an opentelemetry compatible - -- id can be deduced by concatenating the timestamp and uniqueid. - -- - -- https://docs.aws.amazon.com/xray/latest/devguide/xray-concepts.html#xray-concepts-tracingheader - for _, key_pair in ipairs(split(aws_header, AWS_KV_PAIR_DELIM)) do - local key_pair_list = split(key_pair, AWS_KV_DELIM) - local key = strip(key_pair_list[1]) - local value = strip(key_pair_list[2]) - - if key == AWS_TRACE_ID_KEY then - local version, timestamp_subset, unique_id_subset = match(value, AWS_TRACE_ID_PATTERN) - - if #value ~= AWS_TRACE_ID_LEN or version ~= AWS_TRACE_ID_VERSION - or #timestamp_subset ~= AWS_TRACE_ID_TIMESTAMP_LEN - or #unique_id_subset ~= AWS_TRACE_ID_UNIQUE_ID_LEN then - warn("invalid aws header trace id; ignoring.") - return nil, nil, nil - end - - trace_id = from_hex(timestamp_subset .. unique_id_subset) - elseif key == AWS_PARENT_ID_KEY then - if #value ~= AWS_PARENT_ID_LEN then - warn("invalid aws header parent id; ignoring.") - return nil, nil, nil - end - span_id = from_hex(value) - elseif key == AWS_SAMPLED_FLAG_KEY then - if value ~= "0" and value ~= "1" then - warn("invalid aws header sampled flag; ignoring.") - return nil, nil, nil - end - should_sample = value == "1" - end - end - return trace_id, span_id, should_sample -end - -local function parse_gcp_headers(gcp_header) - local warn = kong.log.warn - - if type(gcp_header) ~= "string" then - return nil, nil, nil - end - - local match, err = ngx.re.match(gcp_header, GCP_TRACECONTEXT_REGEX, 'jo') - if not match then - local warning = "invalid GCP header" - if err then - warning = warning .. ": " .. err - end - - warn(warning .. "; ignoring.") - - return nil, nil, nil - end - - local trace_id = from_hex(match["trace_id"]) - local span_id = openssl_bignum.from_dec(match["span_id"]):to_binary() - local should_sample = match["trace_flags"] == "1" - - return trace_id, span_id, should_sample -end - --- This plugin understands several tracing header types: --- * Zipkin B3 headers (X-B3-TraceId, X-B3-SpanId, X-B3-ParentId, X-B3-Sampled, X-B3-Flags) --- * Zipkin B3 "single header" (a single header called "B3", composed of several fields) --- * spec: https://github.com/openzipkin/b3-propagation/blob/master/RATIONALE.md#b3-single-header-format --- * W3C "traceparent" header - also a composed field --- * spec: https://www.w3.org/TR/trace-context/ --- * Jaeger's uber-trace-id & baggage headers --- * spec: https://www.jaegertracing.io/docs/1.21/client-libraries/#tracespan-identity --- * OpenTelemetry ot-tracer-* tracing headers. --- * OpenTelemetry spec: https://github.com/open-telemetry/opentelemetry-specification --- * Base implementation followed: https://github.com/open-telemetry/opentelemetry-java/blob/96e8523544f04c305da5382854eee06218599075/extensions/trace_propagators/src/main/java/io/opentelemetry/extensions/trace/propagation/OtTracerPropagator.java --- --- The plugin expects request to be using *one* of these types. If several of them are --- encountered on one request, only one kind will be transmitted further. The order is --- --- B3-single > B3 > W3C > Jaeger > OT --- --- Exceptions: --- --- * When both B3 and B3-single fields are present, the B3 fields will be "ammalgamated" --- into the resulting B3-single field. If they present contradictory information (i.e. --- different TraceIds) then B3-single will "win". --- --- * The erroneous formatting on *any* header (even those overridden by B3 single) results --- in rejection (ignoring) of all headers. This rejection is logged. -local function find_header_type(headers) - local b3_single_header = headers["b3"] - if not b3_single_header then - local tracestate_header = headers["tracestate"] - - -- handling tracestate header if it is multi valued - if type(tracestate_header) == "table" then - -- https://www.w3.org/TR/trace-context/#tracestate-header - -- Handling multi value header : https://httpwg.org/specs/rfc7230.html#field.order - tracestate_header = concat(tracestate_header, ',') - kong.log.debug("header `tracestate` is a table :" .. tracestate_header) - end - - if tracestate_header then - b3_single_header = match(tracestate_header, "^b3=(.+)$") - end - end - - if b3_single_header then - return "b3-single", b3_single_header - end - - if headers["x-b3-sampled"] - or headers["x-b3-flags"] - or headers["x-b3-traceid"] - or headers["x-b3-spanid"] - or headers["x-b3-parentspanid"] - then - return "b3" - end - - local w3c_header = headers["traceparent"] - if w3c_header then - return "w3c", w3c_header - end - - local jaeger_header = headers["uber-trace-id"] - if jaeger_header then - return "jaeger", jaeger_header - end - - local ot_header = headers["ot-tracer-traceid"] - if ot_header then - return "ot", ot_header - end - - local aws_header = headers["x-amzn-trace-id"] - if aws_header then - return "aws", aws_header - end - - local gcp_header = headers["x-cloud-trace-context"] - if gcp_header then - return "gcp", gcp_header - end -end - - -local function parse(headers, conf_header_type) - if conf_header_type == "ignore" then - return nil - end - - -- Check for B3 headers first - local header_type, composed_header = find_header_type(headers) - local trace_id, span_id, parent_id, should_sample - - if header_type == "b3" or header_type == "b3-single" then - trace_id, span_id, parent_id, should_sample = parse_zipkin_b3_headers(headers, composed_header) - elseif header_type == "w3c" then - trace_id, parent_id, should_sample = parse_w3c_trace_context_headers(composed_header) - elseif header_type == "jaeger" then - trace_id, span_id, parent_id, should_sample = parse_jaeger_trace_context_headers(composed_header) - elseif header_type == "ot" then - trace_id, parent_id, should_sample = parse_ot_headers(headers) - elseif header_type == "aws" then - trace_id, span_id, should_sample = parse_aws_headers(composed_header) - elseif header_type == "gcp" then - trace_id, span_id, should_sample = parse_gcp_headers(composed_header) - end - - if not trace_id then - return header_type, trace_id, span_id, parent_id, should_sample - end - - -- Parse baggage headers - local baggage - local ot_baggage = parse_baggage_headers(headers, OT_BAGGAGE_PATTERN) - local jaeger_baggage = parse_baggage_headers(headers, JAEGER_BAGGAGE_PATTERN) - if ot_baggage and jaeger_baggage then - baggage = table_merge(ot_baggage, jaeger_baggage) - else - baggage = ot_baggage or jaeger_baggage or nil - end - - - return header_type, trace_id, span_id, parent_id, should_sample, baggage -end - - --- set outgoing propagation headers --- --- @tparam string conf_header_type type of tracing header to use --- @tparam string found_header_type type of tracing header found in request --- @tparam table proxy_span span to be propagated --- @tparam string conf_default_header_type used when conf_header_type=ignore -local function set(conf_header_type, found_header_type, proxy_span, conf_default_header_type) - -- proxy_span can be noop, in which case it should not be propagated. - if proxy_span.is_recording == false then - kong.log.debug("skipping propagation of noop span") - return - end - - local set_header = kong.service.request.set_header - - -- If conf_header_type is set to `preserve`, found_header_type is used over default_header_type; - -- if conf_header_type is set to `ignore`, found_header_type is not set, thus default_header_type is used. - if conf_header_type ~= "preserve" and - conf_header_type ~= "ignore" and - found_header_type ~= nil and - conf_header_type ~= found_header_type - then - kong.log.warn("Mismatched header types. conf: " .. conf_header_type .. ". found: " .. found_header_type) - end - - found_header_type = found_header_type or conf_default_header_type or "b3" - - -- contains all the different formats of the current trace ID, with zero or - -- more of the following entries: - -- { - -- ["b3"] = "", -- the trace_id when the request has a b3 or X-B3-TraceId (zipkin) header - -- ["w3c"] = "", -- the trace_id when the request has a W3C header - -- ["jaeger"] = "", -- the trace_id when the request has a jaeger tracing header - -- ["ot"] = "", -- the trace_id when the request has an OpenTelemetry tracing header - -- ["aws"] = "", -- the trace_id when the request has an aws tracing header - -- ["gcp"] = "", -- the trace_id when the request has a gcp tracing header - -- } - local trace_id_formats = {} - - if conf_header_type == "b3" or found_header_type == "b3" - then - local trace_id = to_hex(proxy_span.trace_id) - trace_id_formats.b3 = trace_id - - set_header("x-b3-traceid", trace_id) - set_header("x-b3-spanid", to_hex(proxy_span.span_id)) - if proxy_span.parent_id then - set_header("x-b3-parentspanid", to_hex(proxy_span.parent_id)) - end - local Flags = kong.request.get_header("x-b3-flags") -- Get from request headers - if Flags then - set_header("x-b3-flags", Flags) - else - set_header("x-b3-sampled", proxy_span.should_sample and "1" or "0") - end - end - - if conf_header_type == "b3-single" or found_header_type == "b3-single" then - local trace_id = to_hex(proxy_span.trace_id) - trace_id_formats.b3 = trace_id - - set_header("b3", trace_id .. - "-" .. to_hex(proxy_span.span_id) .. - "-" .. (proxy_span.should_sample and "1" or "0") .. - (proxy_span.parent_id and "-" .. to_hex(proxy_span.parent_id) or "")) - end - - if conf_header_type == "w3c" or found_header_type == "w3c" then - -- OTEL uses w3c trace context format so to_ot_trace_id works here - local trace_id = to_hex(to_w3c_trace_id(proxy_span.trace_id)) - trace_id_formats.w3c = trace_id - - set_header("traceparent", fmt("00-%s-%s-%s", - trace_id, - to_hex(proxy_span.span_id), - proxy_span.should_sample and "01" or "00")) - end - - if conf_header_type == "jaeger" or found_header_type == "jaeger" then - local trace_id = to_hex(proxy_span.trace_id) - trace_id_formats.jaeger = trace_id - - set_header("uber-trace-id", fmt("%s:%s:%s:%s", - trace_id, - to_hex(proxy_span.span_id), - proxy_span.parent_id and to_hex(proxy_span.parent_id) or "0", - proxy_span.should_sample and "01" or "00")) - end - - if conf_header_type == "ot" or found_header_type == "ot" then - to_ot_trace_id = to_ot_trace_id or require "kong.plugins.opentelemetry.otlp".to_ot_trace_id - local trace_id = to_hex(to_ot_trace_id(proxy_span.trace_id)) - trace_id_formats.ot = trace_id - - set_header("ot-tracer-traceid", trace_id) - set_header("ot-tracer-spanid", to_hex(proxy_span.span_id)) - set_header("ot-tracer-sampled", proxy_span.should_sample and "1" or "0") - - for key, value in proxy_span:each_baggage_item() do - set_header("ot-baggage-"..key, ngx.escape_uri(value)) - end - end - - for key, value in proxy_span:each_baggage_item() do - -- XXX: https://github.com/opentracing/specification/issues/117 - set_header("uberctx-"..key, ngx.escape_uri(value)) - end - - if conf_header_type == "aws" or found_header_type == "aws" then - local trace_id = to_hex(proxy_span.trace_id) - trace_id_formats.aws = trace_id - - set_header("x-amzn-trace-id", "Root=" .. AWS_TRACE_ID_VERSION .. "-" .. - sub(trace_id, 1, AWS_TRACE_ID_TIMESTAMP_LEN) .. "-" .. - sub(trace_id, AWS_TRACE_ID_TIMESTAMP_LEN + 1, #trace_id) .. - ";Parent=" .. to_hex(proxy_span.span_id) .. ";Sampled=" .. - (proxy_span.should_sample and "1" or "0") - ) - end - - if conf_header_type == "gcp" or found_header_type == "gcp" then - local trace_id = to_gcp_trace_id(to_hex(proxy_span.trace_id)) - trace_id_formats.gcp = trace_id - - set_header("x-cloud-trace-context", trace_id .. - "/" .. openssl_bignum.from_binary(proxy_span.span_id):to_dec() .. - ";o=" .. (proxy_span.should_sample and "1" or "0") - ) - end - - trace_id_formats = tracing_context.add_trace_id_formats(trace_id_formats) - -- add trace IDs to log serializer output - kong.log.set_serialize_value("trace_id", trace_id_formats) -end - - -return { - parse = parse, - set = set, - from_hex = from_hex, -} diff --git a/kong/tracing/propagation/extractors/_base.lua b/kong/tracing/propagation/extractors/_base.lua new file mode 100644 index 000000000000..6aa6ff496bf5 --- /dev/null +++ b/kong/tracing/propagation/extractors/_base.lua @@ -0,0 +1,196 @@ +local propagation_utils = require "kong.tracing.propagation.utils" + +local ipairs = ipairs +local type = type + +local to_kong_trace_id = propagation_utils.to_kong_trace_id +local to_kong_span_id = propagation_utils.to_kong_span_id + + +local _EXTRACTOR = { + name = "base_extractor", + headers_validate = { + any = {}, + all = {}, + }, +} +_EXTRACTOR.__index = _EXTRACTOR + + +--- Instantiate a new extractor. +-- +-- Constructor to create a new extractor instance. It accepts a name (might be +-- used in the future for logging purposes) and a `headers_validate` table that +-- specifies the extractor's header requirements. +-- +-- @function _EXTRACTOR:new +-- @param table e Extractor instance to use for creating the new object +-- the table can have the following fields: +-- * `name` (string, optional): the name of the extractor, currently not used, +-- might be used in the future for logging from this class. +-- * `headers_validate` (table, optional): a table with the following fields: +-- * `any` (table, optional): a list of headers that are required to be +-- present in the request. If any of the headers is present, the extractor +-- will be considered valid. +-- * `all` (table, optional): a list of headers that are required to be +-- present in the request. All headers must be present for the extractor +-- to be considered valid. +-- +-- @usage +-- local my_extractor = _EXTRACTOR:new("my_extractor", { +-- headers_validate = { +-- all = { "Some-Required-Header" }, +-- any = { "Semi", "Optional", "Headers" } +-- } +-- }) +function _EXTRACTOR:new(e) + e = e or {} + local inst = setmetatable(e, _EXTRACTOR) + + local err = "invalid extractor instance: " + assert(type(inst.headers_validate) == "table", + err .. "invalid headers_validate variable") + + return inst +end + + +function _EXTRACTOR:verify_any(headers) + local any = self.headers_validate.any + if not any or #any == 0 then + return true + end + + if not headers or type(headers) ~= "table" then + return false + end + + for _, header in ipairs(any) do + if headers[header] ~= nil then + return true + end + end + + return false +end + + +function _EXTRACTOR:verify_all(headers) + local all = self.headers_validate.all + if not all or #all == 0 then + return true + end + + if not headers or type(headers) ~= "table" then + return false + end + + for _, header in ipairs(all) do + if headers[header] == nil then + return false + end + end + + return true +end + + +-- extractors fail silently if tracing headers are just missing from the +-- request, which is a valid scenario. +function _EXTRACTOR:verify_headers(headers) + return self:verify_any(headers) and + self:verify_all(headers) +end + + +--- Extract tracing context from request headers. +-- +-- Function to call the extractor instance's get_context function +-- and format the tracing context to match Kong's internal interface. +-- +-- @function_EXTRACTOR:extract(headers) +-- @param table headers The request headers +-- @return table|nil Extracted tracing context as described in get_context +-- returning nil (and silently failing) is valid to indicate the extractor +-- failed to fetch any tracing context from the request headers, which is +-- a valid scenario. +function _EXTRACTOR:extract(headers) + local headers_verified = self:verify_headers(headers) + if not headers_verified then + return + end + + local ext_tracing_ctx, ext_err = self:get_context(headers) + if ext_err then + -- extractors should never return errors, they should fail silently + -- even when ext_tracing_ctx is nil or empty. + -- Only the base extractor returns a "not implemented method" error message + kong.log.err("failed to extract tracing context: ", ext_err) + end + + if not ext_tracing_ctx then + return + end + + -- update extracted context adding the extracted trace id's original size + -- this is used by injectors to determine the most appropriate size for the + -- trace ID in case multiple sizes are allowed (e.g. B3, ot) + if ext_tracing_ctx.trace_id then + ext_tracing_ctx.trace_id_original_size = #ext_tracing_ctx.trace_id + end + + -- convert IDs to internal format + ext_tracing_ctx.trace_id = to_kong_trace_id(ext_tracing_ctx.trace_id) + ext_tracing_ctx.span_id = to_kong_span_id(ext_tracing_ctx.span_id) + ext_tracing_ctx.parent_id = to_kong_span_id(ext_tracing_ctx.parent_id) + + return ext_tracing_ctx +end + + +--- Obtain tracing context from request headers. +-- +-- Function to be implemented by Extractor subclasses, it extracts the tracing +-- context from request headers. +-- +-- @function _EXTRACTOR:get_context(headers) +-- @param table headers The request headers +-- @return table|nil Extracted tracing context. +-- This is a table with the following structure: +-- { +-- trace_id = {encoded_string | nil}, +-- span_id = {encoded_string | nil}, +-- parent_id = {encoded_string | nil}, +-- reuse_span_id = {boolean | nil}, +-- should_sample = {boolean | nil}, +-- baggage = {table | nil}, +-- flags = {string | nil}, +-- single_header = {boolean | nil}, +-- } +-- +-- 1. trace_id: The trace ID extracted from the incoming tracing headers. +-- 2. span_id: The span_id field can have different meanings depending on the +-- format: +-- * Formats that support reusing span ID on both sides of the request +-- and provide two span IDs (span, parent): span ID is the ID of the +-- sender-receiver span. +-- * Formats that provide only one span ID (sometimes called parent_id): +-- span ID is the ID of the sender's span. +-- 3. parent_id: Only used to identify the parent of the span for formats +-- that support reusing span IDs on both sides of the request. +-- Plugins can ignore this field if they do not support this feature +-- (like OTel does) and use span_id as the parent of the span instead. +-- 4. reuse_span_id: Whether the format the ctx was extracted from supports +-- reusing span_ids on both sides of the request. +-- 5. should_sample: Whether the trace should be sampled or not. +-- 6. baggage: A table with the baggage items extracted from the incoming +-- tracing headers. +-- 7. flags: Flags extracted from the incoming tracing headers (B3) +-- 8. single_header: For extractors that support multiple formats, whether the +-- context was extracted from the single or the multi-header format. +function _EXTRACTOR:get_context(headers) + return nil, "get_context() not implemented in base class" +end + + +return _EXTRACTOR diff --git a/kong/tracing/propagation/extractors/aws.lua b/kong/tracing/propagation/extractors/aws.lua new file mode 100644 index 000000000000..cfcb5ca406d5 --- /dev/null +++ b/kong/tracing/propagation/extractors/aws.lua @@ -0,0 +1,92 @@ +local _EXTRACTOR = require "kong.tracing.propagation.extractors._base" +local propagation_utils = require "kong.tracing.propagation.utils" + +local split = require "kong.tools.utils".split +local strip = require "kong.tools.utils".strip + +local from_hex = propagation_utils.from_hex +local match = string.match +local ipairs = ipairs +local type = type + +local AWS_KV_PAIR_DELIM = ";" +local AWS_KV_DELIM = "=" +local AWS_TRACE_ID_KEY = "Root" +local AWS_TRACE_ID_LEN = 35 +local AWS_TRACE_ID_PATTERN = "^(%x+)%-(%x+)%-(%x+)$" +local AWS_TRACE_ID_VERSION = "1" +local AWS_TRACE_ID_TIMESTAMP_LEN = 8 +local AWS_TRACE_ID_UNIQUE_ID_LEN = 24 +local AWS_PARENT_ID_KEY = "Parent" +local AWS_PARENT_ID_LEN = 16 +local AWS_SAMPLED_FLAG_KEY = "Sampled" + +local AWS_EXTRACTOR = _EXTRACTOR:new({ + headers_validate = { + any = { "x-amzn-trace-id" } + } +}) + + +function AWS_EXTRACTOR:get_context(headers) + local aws_header = headers["x-amzn-trace-id"] + + if type(aws_header) ~= "string" then + return + end + + local trace_id, parent_id, should_sample + + -- The AWS trace header consists of multiple `key=value` separated by a delimiter `;` + -- We can retrieve the trace id with the `Root` key, the span id with the `Parent` + -- key and the sampling parameter with the `Sampled` flag. Extra information should be ignored. + -- + -- The trace id has a custom format: `version-timestamp-uniqueid` and an opentelemetry compatible + -- id can be deduced by concatenating the timestamp and uniqueid. + -- + -- https://docs.aws.amazon.com/xray/latest/devguide/xray-concepts.html#xray-concepts-tracingheader + for _, key_pair in ipairs(split(aws_header, AWS_KV_PAIR_DELIM)) do + local key_pair_list = split(key_pair, AWS_KV_DELIM) + local key = strip(key_pair_list[1]) + local value = strip(key_pair_list[2]) + + if key == AWS_TRACE_ID_KEY then + local version, timestamp_subset, unique_id_subset = match(value, AWS_TRACE_ID_PATTERN) + + if #value ~= AWS_TRACE_ID_LEN or version ~= AWS_TRACE_ID_VERSION + or #timestamp_subset ~= AWS_TRACE_ID_TIMESTAMP_LEN + or #unique_id_subset ~= AWS_TRACE_ID_UNIQUE_ID_LEN then + kong.log.warn("invalid aws header trace id; ignoring.") + return + end + + trace_id = from_hex(timestamp_subset .. unique_id_subset) + + elseif key == AWS_PARENT_ID_KEY then + if #value ~= AWS_PARENT_ID_LEN then + kong.log.warn("invalid aws header parent id; ignoring.") + return + end + parent_id = from_hex(value) + + elseif key == AWS_SAMPLED_FLAG_KEY then + if value ~= "0" and value ~= "1" then + kong.log.warn("invalid aws header sampled flag; ignoring.") + return + end + + should_sample = value == "1" + end + end + + return { + trace_id = trace_id, + -- in aws "parent" is the parent span of the receiver + -- Internally we call that "span_id" + span_id = parent_id, + parent_id = nil, + should_sample = should_sample, + } +end + +return AWS_EXTRACTOR diff --git a/kong/tracing/propagation/extractors/b3.lua b/kong/tracing/propagation/extractors/b3.lua new file mode 100644 index 000000000000..efeb0154a5be --- /dev/null +++ b/kong/tracing/propagation/extractors/b3.lua @@ -0,0 +1,219 @@ +local _EXTRACTOR = require "kong.tracing.propagation.extractors._base" +local propagation_utils = require "kong.tracing.propagation.utils" + +local from_hex = propagation_utils.from_hex +local match = string.match +local type = type + +local B3_SINGLE_PATTERN = +"^(%x+)%-(%x%x%x%x%x%x%x%x%x%x%x%x%x%x%x%x)%-?([01d]?)%-?(%x*)$" + +local B3_EXTRACTOR = _EXTRACTOR:new({ + headers_validate = { + any = { + "b3", + "tracestate", + "x-b3-traceid", + "x-b3-spanid", + "x-b3-parentspanid", + "x-b3-sampled", + "x-b3-flags", + } + } +}) + + +local function read_single_header(headers) + local b3_single_header = headers["b3"] + if not b3_single_header then + local tracestate_header = headers["tracestate"] + + -- handling tracestate header if it is multi valued + if type(tracestate_header) == "table" then + -- https://www.w3.org/TR/trace-context/#tracestate-header + -- Handling multi value header : https://httpwg.org/specs/rfc7230.html#field.order + for _, v in ipairs(tracestate_header) do + if type(v) == "string" then + b3_single_header = match(v, "^b3=(.+)$") + if b3_single_header then + break + end + end + end + + elseif tracestate_header then + b3_single_header = match(tracestate_header, "^b3=(.+)$") + end + end + + if not b3_single_header or type(b3_single_header) ~= "string" then + return + end + + -- B3 single header + -- * For speed, the "-" separators between sampled and parent_id are optional on this implementation + -- This is not guaranteed to happen in future versions and won't be considered a breaking change + -- * The "sampled" section activates sampling with both "1" and "d". This is to match the + -- behavior of the X-B3-Flags header + local trace_id, span_id, should_sample, parent_id, flags + local invalid_id = false + + if b3_single_header == "1" or b3_single_header == "d" then + should_sample = true + if b3_single_header == "d" then + flags = "1" + end + elseif b3_single_header == "0" then + should_sample = false + else + local sampled + trace_id, span_id, sampled, parent_id = + match(b3_single_header, B3_SINGLE_PATTERN) + + local trace_id_len = trace_id and #trace_id or 0 + if trace_id + and (trace_id_len == 16 or trace_id_len == 32) + and (parent_id == "" or #parent_id == 16) + then + if sampled == "1" or sampled == "d" then + should_sample = true + if sampled == "d" then + flags = "1" + end + elseif sampled == "0" then + should_sample = false + end + + if parent_id == "" then + parent_id = nil + end + else + kong.log.warn("b3 single header invalid; ignoring.") + invalid_id = true + end + end + + return { + trace_id = trace_id, + span_id = span_id, + parent_id = parent_id, + should_sample = should_sample, + invalid_id = invalid_id, + flags = flags, + } +end + + +local function read_multi_header(headers) + -- X-B3-Sampled: if an upstream decided to sample this request, we do too. + local should_sample = headers["x-b3-sampled"] + if should_sample == "1" or should_sample == "true" then + should_sample = true + elseif should_sample == "0" or should_sample == "false" then + should_sample = false + elseif should_sample ~= nil then + kong.log.warn("x-b3-sampled header invalid; ignoring.") + should_sample = nil + end + + -- X-B3-Flags: if it equals '1' then it overrides sampling policy + -- We still want to kong.log.warn on invalid sample header, so do this after the above + local debug_header = headers["x-b3-flags"] + if debug_header == "1" then + should_sample = true + elseif debug_header ~= nil then + kong.log.warn("x-b3-flags header invalid; ignoring.") + end + + local trace_id, span_id, parent_id + local invalid_id = false + local trace_id_header = headers["x-b3-traceid"] + + if trace_id_header and ((#trace_id_header ~= 16 and #trace_id_header ~= 32) + or trace_id_header:match("%X")) then + kong.log.warn("x-b3-traceid header invalid; ignoring.") + invalid_id = true + else + trace_id = trace_id_header + end + + local span_id_header = headers["x-b3-spanid"] + if span_id_header and (#span_id_header ~= 16 or span_id_header:match("%X")) then + kong.log.warn("x-b3-spanid header invalid; ignoring.") + invalid_id = true + else + span_id = span_id_header + end + + local parent_id_header = headers["x-b3-parentspanid"] + if parent_id_header and (#parent_id_header ~= 16 or parent_id_header:match("%X")) then + kong.log.warn("x-b3-parentspanid header invalid; ignoring.") + invalid_id = true + else + parent_id = parent_id_header + end + + return { + trace_id = trace_id, + span_id = span_id, + parent_id = parent_id, + should_sample = should_sample, + invalid_id = invalid_id, + flags = debug_header, + } +end + + +function B3_EXTRACTOR:get_context(headers) + + local trace_id, span_id, parent_id, should_sample, flags, single_header + + local single_header_ctx = read_single_header(headers) + if single_header_ctx then + single_header = true + should_sample = single_header_ctx.should_sample + flags = single_header_ctx.flags + if not single_header_ctx.invalid_id then + trace_id = single_header_ctx.trace_id + span_id = single_header_ctx.span_id + parent_id = single_header_ctx.parent_id + end + end + + local multi_header_ctx = read_multi_header(headers) + if multi_header_ctx then + if should_sample == nil then + should_sample = multi_header_ctx.should_sample + end + flags = flags or multi_header_ctx.flags + + if not multi_header_ctx.invalid_id then + trace_id = trace_id or multi_header_ctx.trace_id + span_id = span_id or multi_header_ctx.span_id + parent_id = parent_id or multi_header_ctx.parent_id + end + end + + if trace_id == nil then + trace_id = nil + span_id = nil + parent_id = nil + end + + trace_id = trace_id and from_hex(trace_id) or nil + span_id = span_id and from_hex(span_id) or nil + parent_id = parent_id and from_hex(parent_id) or nil + + return { + trace_id = trace_id, + span_id = span_id, + parent_id = parent_id, + reuse_span_id = true, + should_sample = should_sample, + baggage = nil, + flags = flags, + single_header = single_header, + } +end + +return B3_EXTRACTOR diff --git a/kong/tracing/propagation/extractors/datadog.lua b/kong/tracing/propagation/extractors/datadog.lua new file mode 100644 index 000000000000..fec30e61e8df --- /dev/null +++ b/kong/tracing/propagation/extractors/datadog.lua @@ -0,0 +1,61 @@ +local _EXTRACTOR = require "kong.tracing.propagation.extractors._base" +local bn = require "resty.openssl.bn" + +local from_dec = bn.from_dec + +local DATADOG_EXTRACTOR = _EXTRACTOR:new({ + headers_validate = { + any = { + "x-datadog-trace-id", + "x-datadog-parent-id", + "x-datadog-sampling-priority", + } + } +}) + + +function DATADOG_EXTRACTOR:get_context(headers) + local should_sample = headers["x-datadog-sampling-priority"] + if should_sample == "1" or should_sample == "2" then + should_sample = true + elseif should_sample == "0" or should_sample == "-1" then + should_sample = false + elseif should_sample ~= nil then + kong.log.warn("x-datadog-sampling-priority header invalid; ignoring.") + return + end + + local trace_id = headers["x-datadog-trace-id"] + if trace_id then + trace_id = trace_id:match("^%s*(%d+)%s*$") + if not trace_id then + kong.log.warn("x-datadog-trace-id header invalid; ignoring.") + end + end + + local parent_id = headers["x-datadog-parent-id"] + if parent_id then + parent_id = parent_id:match("^%s*(%d+)%s*$") + if not parent_id then + kong.log.warn("x-datadog-parent-id header invalid; ignoring.") + end + end + + if not trace_id then + parent_id = nil + end + + trace_id = trace_id and from_dec(trace_id):to_binary() or nil + parent_id = parent_id and from_dec(parent_id):to_binary() or nil + + return { + trace_id = trace_id, + -- in datadog "parent" is the parent span of the receiver + -- Internally we call that "span_id" + span_id = parent_id, + parent_id = nil, + should_sample = should_sample, + } +end + +return DATADOG_EXTRACTOR diff --git a/kong/tracing/propagation/extractors/gcp.lua b/kong/tracing/propagation/extractors/gcp.lua new file mode 100644 index 000000000000..98c381b8c824 --- /dev/null +++ b/kong/tracing/propagation/extractors/gcp.lua @@ -0,0 +1,46 @@ +local _EXTRACTOR = require "kong.tracing.propagation.extractors._base" +local propagation_utils = require "kong.tracing.propagation.utils" +local bn = require "resty.openssl.bn" + +local type = type +local ngx_re_match = ngx.re.match + +local from_hex = propagation_utils.from_hex +local from_dec = bn.from_dec + +local GCP_TRACECONTEXT_REGEX = "^(?[0-9a-f]{32})/(?[0-9]{1,20})(;o=(?[0-9]))?$" + +local GCP_EXTRACTOR = _EXTRACTOR:new({ + headers_validate = { + any = { "x-cloud-trace-context" } + } +}) + + +function GCP_EXTRACTOR:get_context(headers) + local gcp_header = headers["x-cloud-trace-context"] + + if type(gcp_header) ~= "string" then + return + end + + local match, err = ngx_re_match(gcp_header, GCP_TRACECONTEXT_REGEX, 'jo') + if not match then + local warning = "invalid GCP header" + if err then + warning = warning .. ": " .. err + end + + kong.log.warn(warning .. "; ignoring.") + return + end + + return { + trace_id = from_hex(match["trace_id"]), + span_id = from_dec(match["span_id"]):to_binary(), + parent_id = nil, + should_sample = match["trace_flags"] == "1", + } +end + +return GCP_EXTRACTOR diff --git a/kong/tracing/propagation/extractors/jaeger.lua b/kong/tracing/propagation/extractors/jaeger.lua new file mode 100644 index 000000000000..8de8df024437 --- /dev/null +++ b/kong/tracing/propagation/extractors/jaeger.lua @@ -0,0 +1,76 @@ +local _EXTRACTOR = require "kong.tracing.propagation.extractors._base" +local propagation_utils = require "kong.tracing.propagation.utils" + +local from_hex = propagation_utils.from_hex +local parse_baggage_headers = propagation_utils.parse_baggage_headers +local match = string.match +local type = type +local tonumber = tonumber + +local JAEGER_TRACECONTEXT_PATTERN = "^(%x+):(%x+):(%x+):(%x+)$" +local JAEGER_BAGGAGE_PATTERN = "^uberctx%-(.*)$" + +local JAEGER_EXTRACTOR = _EXTRACTOR:new({ + headers_validate = { + any = { "uber-trace-id" } + } +}) + + +function JAEGER_EXTRACTOR:get_context(headers) + local jaeger_header = headers["uber-trace-id"] + + if type(jaeger_header) ~= "string" or jaeger_header == "" then + return + end + + local trace_id, span_id, parent_id, trace_flags = match(jaeger_header, JAEGER_TRACECONTEXT_PATTERN) + + -- values are not parsable hexidecimal and therefore invalid. + if trace_id == nil or span_id == nil or parent_id == nil or trace_flags == nil then + kong.log.warn("invalid jaeger uber-trace-id header; ignoring.") + return + end + + -- valid trace_id is required. + if #trace_id > 32 or tonumber(trace_id, 16) == 0 then + kong.log.warn("invalid jaeger trace ID; ignoring.") + return + end + + -- validating parent_id. If it is invalid just logging, as it can be ignored + -- https://www.jaegertracing.io/docs/1.29/client-libraries/#tracespan-identity + if #parent_id ~= 16 and tonumber(parent_id, 16) ~= 0 then + kong.log.warn("invalid jaeger parent ID; ignoring.") + end + + -- valid span_id is required. + if #span_id > 16 or tonumber(span_id, 16) == 0 then + kong.log.warn("invalid jaeger span ID; ignoring.") + return + end + + -- valid flags are required + if #trace_flags ~= 1 and #trace_flags ~= 2 then + kong.log.warn("invalid jaeger flags; ignoring.") + return + end + + -- Jaeger sampled flag: https://www.jaegertracing.io/docs/1.17/client-libraries/#tracespan-identity + local should_sample = tonumber(trace_flags, 16) % 2 == 1 + + trace_id = from_hex(trace_id) + span_id = from_hex(span_id) + parent_id = from_hex(parent_id) + + return { + trace_id = trace_id, + span_id = span_id, + parent_id = parent_id, + reuse_span_id = true, + should_sample = should_sample, + baggage = parse_baggage_headers(headers, JAEGER_BAGGAGE_PATTERN), + } +end + +return JAEGER_EXTRACTOR diff --git a/kong/tracing/propagation/extractors/ot.lua b/kong/tracing/propagation/extractors/ot.lua new file mode 100644 index 000000000000..e5249693d9c5 --- /dev/null +++ b/kong/tracing/propagation/extractors/ot.lua @@ -0,0 +1,68 @@ +local _EXTRACTOR = require "kong.tracing.propagation.extractors._base" +local propagation_utils = require "kong.tracing.propagation.utils" + +local from_hex = propagation_utils.from_hex +local parse_baggage_headers = propagation_utils.parse_baggage_headers + +local OT_BAGGAGE_PATTERN = "^ot%-baggage%-(.*)$" + +local OT_EXTRACTOR = _EXTRACTOR:new({ + headers_validate = { + any = { + "ot-tracer-sampled", + "ot-tracer-traceid", + "ot-tracer-spanid", + }, + } +}) + + +function OT_EXTRACTOR:get_context(headers) + local should_sample = headers["ot-tracer-sampled"] + if should_sample == "1" or should_sample == "true" then + should_sample = true + elseif should_sample == "0" or should_sample == "false" then + should_sample = false + elseif should_sample ~= nil then + kong.log.warn("ot-tracer-sampled header invalid; ignoring.") + should_sample = nil + end + + local trace_id, span_id + local invalid_id = false + + local trace_id_header = headers["ot-tracer-traceid"] + if trace_id_header and ((#trace_id_header ~= 16 and #trace_id_header ~= 32) or trace_id_header:match("%X")) then + kong.log.warn("ot-tracer-traceid header invalid; ignoring.") + invalid_id = true + else + trace_id = trace_id_header + end + + local span_id_header = headers["ot-tracer-spanid"] + if span_id_header and (#span_id_header ~= 16 or span_id_header:match("%X")) then + kong.log.warn("ot-tracer-spanid header invalid; ignoring.") + invalid_id = true + else + span_id = span_id_header + end + + if trace_id == nil or invalid_id then + trace_id = nil + span_id = nil + end + + trace_id = trace_id and from_hex(trace_id) or nil + span_id = span_id and from_hex(span_id) or nil + + + return { + trace_id = trace_id, + span_id = span_id, + parent_id = nil, + should_sample = should_sample, + baggage = parse_baggage_headers(headers, OT_BAGGAGE_PATTERN), + } +end + +return OT_EXTRACTOR diff --git a/kong/tracing/propagation/extractors/w3c.lua b/kong/tracing/propagation/extractors/w3c.lua new file mode 100644 index 000000000000..490d1dfd00c8 --- /dev/null +++ b/kong/tracing/propagation/extractors/w3c.lua @@ -0,0 +1,75 @@ +local _EXTRACTOR = require "kong.tracing.propagation.extractors._base" +local propagation_utils = require "kong.tracing.propagation.utils" + +local type = type +local tonumber = tonumber + +local from_hex = propagation_utils.from_hex + +local W3C_TRACECONTEXT_PATTERN = "^(%x+)%-(%x+)%-(%x+)%-(%x+)$" + +local W3C_EXTRACTOR = _EXTRACTOR:new({ + headers_validate = { + any = { "traceparent" } + } +}) + + +function W3C_EXTRACTOR:get_context(headers) + local traceparent = headers["traceparent"] + if type(traceparent) ~= "string" or traceparent == "" then + return + end + + local version, trace_id, parent_id, flags = traceparent:match(W3C_TRACECONTEXT_PATTERN) + + -- values are not parseable hexadecimal and therefore invalid. + if version == nil or trace_id == nil or parent_id == nil or flags == nil then + kong.log.warn("invalid W3C traceparent header; ignoring.") + return + end + + -- Only support version 00 of the W3C Trace Context spec. + if version ~= "00" then + kong.log.warn("invalid W3C Trace Context version; ignoring.") + return + end + + -- valid trace_id is required. + if #trace_id ~= 32 or tonumber(trace_id, 16) == 0 then + kong.log.warn("invalid W3C trace context trace ID; ignoring.") + return + end + + -- valid parent_id is required. + if #parent_id ~= 16 or tonumber(parent_id, 16) == 0 then + kong.log.warn("invalid W3C trace context parent ID; ignoring.") + return + end + + -- valid flags are required + if #flags ~= 2 then + kong.log.warn("invalid W3C trace context flags; ignoring.") + return + end + + -- W3C sampled flag: https://www.w3.org/TR/trace-context/#sampled-flag + local should_sample = tonumber(flags, 16) % 2 == 1 + + trace_id = from_hex(trace_id) + parent_id = from_hex(parent_id) + + return { + trace_id = trace_id, + -- in w3c "parent" is "ID of this request as known by the caller" + -- i.e. the parent span of the receiver. (https://www.w3.org/TR/trace-context/#parent-id) + -- Internally we call that "span_id" + span_id = parent_id, + parent_id = nil, + should_sample = should_sample, + baggage = nil, + flags = nil, + } +end + +return W3C_EXTRACTOR diff --git a/kong/tracing/propagation/init.lua b/kong/tracing/propagation/init.lua new file mode 100644 index 000000000000..7ee0ba3b02ee --- /dev/null +++ b/kong/tracing/propagation/init.lua @@ -0,0 +1,239 @@ +local tracing_context = require "kong.tracing.tracing_context" +local table_new = require "table.new" + +local formats = require "kong.tracing.propagation.utils".FORMATS + +local clear_header = kong.service.request.clear_header +local ngx_req_get_headers = ngx.req.get_headers +local table_insert = table.insert +local null = ngx.null +local type = type +local pairs = pairs +local ipairs = ipairs +local setmetatable = setmetatable + +local EXTRACTORS_PATH = "kong.tracing.propagation.extractors." +local INJECTORS_PATH = "kong.tracing.propagation.injectors." + + +-- This function retrieves the propagation parameters from a plugin +-- configuration, converting legacy parameters to their new locations. +local function get_plugin_params(config) + local propagation_config = config.propagation or table_new(0, 3) + + -- detect if any of the new fields was set (except for + -- default_format, which is required) and if so just return + -- the propagation configuration as is. + -- This also ensures that warnings are only logged once (per worker). + for k, v in pairs(propagation_config) do + if k ~= "default_format" and (v or null) ~= null then + return propagation_config + end + end + + if (config.default_header_type or null) ~= null then + propagation_config.default_format = config.default_header_type + + kong.log.warn( + "the default_header_type parameter is deprecated, please update your " + .. "configuration to use the propagation.default_format, " + .. "propagation.extract and propagation.inject options instead") + end + + if (config.header_type or null) ~= null then + if config.header_type == "preserve" then + -- configure extractors to match what used to be the harcoded + -- order of extraction in the old propagation module + propagation_config.extract = { + formats.B3, + formats.W3C, + formats.JAEGER, + formats.OT, + formats.DATADOG, + formats.AWS, + formats.GCP, + + } + propagation_config.inject = { "preserve" } + + elseif config.header_type == "ignore" then + propagation_config.inject = { propagation_config.default_format } + + else + propagation_config.extract = { + formats.B3, + formats.W3C, + formats.JAEGER, + formats.OT, + formats.DATADOG, + formats.AWS, + formats.GCP, + } + propagation_config.inject = { + -- the old logic used to propagate the "found" incoming format + "preserve", + config.header_type + } + end + + kong.log.warn( + "the header_type parameter is deprecated, please update your " + .. "configuration to use propagation.extract and propagation.inject instead") + end + + return propagation_config +end + + +-- Extract tracing data from incoming tracing headers +-- @param table conf propagation configuration +-- @return table|nil Extracted tracing context +local function extract_tracing_context(conf) + local extracted_ctx = {} + local headers = ngx_req_get_headers() + + local extractors = conf.extract + if not extractors then + -- configuring no extractors is valid to disable + -- context extraction from incoming tracing headers + return extracted_ctx + end + + for _, extractor_m in ipairs(extractors) do + local extractor = require(EXTRACTORS_PATH .. extractor_m) + + extracted_ctx = extractor:extract(headers) + + -- extract tracing context only from the first successful extractor + if type(extracted_ctx) == "table" and next(extracted_ctx) ~= nil then + kong.ctx.plugin.extracted_from = extractor_m + break + end + end + + return extracted_ctx +end + + +-- Clear tracing headers from the request +local function clear_tracing_headers(propagation_conf) + local headers = propagation_conf.clear + if not headers or next(headers) == nil then + return + end + + for _, header in ipairs(headers) do + clear_header(header) + end +end + + +-- Inject tracing context into outgoing requests +-- @param table conf propagation configuration +-- @param table inject_ctx The tracing context to inject +local function inject_tracing_context(propagation_conf, inject_ctx) + local injectors = propagation_conf.inject + if not injectors then + -- configuring no injectors is valid to disable + -- context injection in outgoing requests + return + end + + local err = {} + local trace_id_formats + for _, injector_m in ipairs(injectors) do + if injector_m == "preserve" then + -- preserve the incoming tracing header type + injector_m = kong.ctx.plugin.extracted_from or propagation_conf.default_format or formats.W3C + + -- "preserve" mappings: + -- b3 has one extractor and 2 injectors to handle single and multi-header + if injector_m == formats.B3 and inject_ctx.single_header then + injector_m = formats.B3_SINGLE + end + end + + local injector = require(INJECTORS_PATH .. injector_m) + + -- pass inject_ctx_instance to avoid modifying the original + local inject_ctx_instance = setmetatable({}, { __index = inject_ctx }) + -- inject tracing context information in outgoing headers + -- and obtain the formatted trace_id + local formatted_trace_id, injection_err = injector:inject(inject_ctx_instance) + if formatted_trace_id then + trace_id_formats = tracing_context.add_trace_id_formats(formatted_trace_id) + else + table_insert(err, injection_err) + end + end + + if #err > 0 then + return nil, table.concat(err, ", ") + end + return trace_id_formats +end + + +--- Propagate tracing headers. +-- +-- This function takes care of extracting, clearing and injecting tracing +-- headers according to the provided configuration. It also allows for +-- plugin-specific logic to be executed via a callback between the extraction +-- and injection steps. +-- +-- @function propagate +-- @param table propagation_conf The plugin's propagation configuration +-- this should use `get_plugin_params` to obtain the propagation configuration +-- from the plugin's configuration. +-- @param function get_inject_ctx_cb The callback function to apply +-- plugin-specific transformations to the extracted tracing context. It is +-- expected to return a table with the data to be injected in the outgoing +-- tracing headers. get_inject_ctx_cb receives the extracted tracing context +-- as its only argument, which is a table with a structure as defined in the +-- extractor base class. +-- @param variable_args Additional arguments to be passed to the callback +-- +-- @usage +-- propagation.propagate( +-- propagation.get_plugin_params(conf), +-- function(extract_ctx) +-- -- plugin-specific logic to obtain the data to be injected +-- return get_inject_ctx(conf, extract_ctx, other_args) +-- end +-- ) +local function propagate(propagation_conf, get_inject_ctx_cb, ...) + -- Tracing context Extraction: + local extract_ctx, extract_err = extract_tracing_context(propagation_conf) + if extract_err then + kong.log.err("failed to extract tracing context: ", extract_err) + end + extract_ctx = extract_ctx or {} + + -- Obtain the inject ctx (outgoing tracing headers data). The logic + -- for this is plugin-specific, defined in the get_inject_ctx_cb callback. + local inject_ctx = extract_ctx + if get_inject_ctx_cb then + inject_ctx = get_inject_ctx_cb(extract_ctx, ...) + end + + -- Clear headers: + clear_tracing_headers(propagation_conf) + + -- Tracing context Injection: + local trace_id_formats, injection_err = + inject_tracing_context(propagation_conf, inject_ctx) + if trace_id_formats then + kong.log.set_serialize_value("trace_id", trace_id_formats) + elseif injection_err then + kong.log.err(injection_err) + end +end + + +return { + extract = extract_tracing_context, + inject = inject_tracing_context, + clear = clear_tracing_headers, + propagate = propagate, + get_plugin_params = get_plugin_params, +} diff --git a/kong/tracing/propagation/injectors/_base.lua b/kong/tracing/propagation/injectors/_base.lua new file mode 100644 index 000000000000..f7ce3aa76c6c --- /dev/null +++ b/kong/tracing/propagation/injectors/_base.lua @@ -0,0 +1,212 @@ +local utils = require "kong.tools.utils" +local propagation_utils = require "kong.tracing.propagation.utils" + +local to_id_size = propagation_utils.to_id_size +local set_header = kong.service.request.set_header +local contains = utils.table_contains +local type = type +local ipairs = ipairs + +local _INJECTOR = { + name = "base_injector", + context_validate = { + any = {}, + all = {}, + }, + -- array of allowed trace_id sizes for an injector + -- the first element is the default size + trace_id_allowed_sizes = { 16 }, + span_id_size_bytes = 8, +} +_INJECTOR.__index = _INJECTOR + + +--- Instantiate a new injector. +-- +-- Constructor to create a new injector instance. It accepts a name (used for +-- logging purposes), a `context_validate` table that specifies the injector's +-- context requirements and the trace_id_allowed_sizes and span_id_size_bytes +-- params to define the allowed/expected injector's ID sizes. +-- +-- @function _INJECTOR:new +-- @param table e Injector instance to use for creating the new object +-- the table can have the following fields: +-- * `name` (string, optional): the name of the extractor, used for logging +-- from this class. +-- * `context_validate` (table, optional): a table with the following fields: +-- * `any` (table, optional): a list of context fields that are required to +-- be passed to the injector. If any of the headers is present, the +-- injector will be considered valid. +-- * `all` (table, optional): a list of context fields that are required to +-- be passed to the injector. All fields must be present for the +-- injector to be considered valid. +-- * `trace_id_allowed_sizes` (table, optional): list of sizes that the +-- injector is allowed to use for the trace ID. The first element is the +-- default size, the other sizes might be used depending on the incoming +-- trace ID size. +-- * `span_id_size_bytes` (number, optional): the size in bytes of the span +-- ID that the injector is expected to use. +-- +-- @usage +-- local my_injector = _INJECTOR:new({ +-- name = "my_injector", +-- context_validate = { +-- all = { "trace_id", "span_id" }, +-- any = { "parent_id", "should_sample" } +-- }, +-- trace_id_allowed_sizes = { 8, 16 }, +-- span_id_size_bytes = 8, +-- }) +function _INJECTOR:new(e) + e = e or {} + local inst = setmetatable(e, _INJECTOR) + + local err = "invalid injector instance: " + assert(type(inst.context_validate) == "table", + err .. "invalid context_validate variable") + + assert(type(inst.trace_id_allowed_sizes) == "table" and + #inst.trace_id_allowed_sizes > 0, + err .. "invalid trace_id_allowed_sizes variable") + + assert(type(inst.span_id_size_bytes) == "number" and + inst.span_id_size_bytes > 0, + err .. "invalid span_id_size_bytes variable") + + return inst +end + + +function _INJECTOR:verify_any(context) + local any = self.context_validate.any + if not any or #any == 0 then + return true + end + + if not context or type(context) ~= "table" then + return false, "no context to inject" + end + + for _, field in ipairs(any) do + if context[field] ~= nil then + return true + end + end + + return false, "no required field found in context: " .. + table.concat(any, ", ") +end + + +function _INJECTOR:verify_all(context) + local all = self.context_validate.all + if not all or #all == 0 then + return true + end + + if not context or type(context) ~= "table" then + return false, "no context to inject" + end + + for _, field in ipairs(all) do + if context[field] == nil then + return false, "field " .. field .. " not found in context" + end + end + + return true +end + + +-- injection failures are reported, injectors are not expected to fail because +-- kong should ensure the tracing context is valid +function _INJECTOR:verify_context(context) + local ok_any, err_any = self:verify_any(context) + local ok_all, err_all = self:verify_all(context) + + if ok_any and ok_all then + return true + end + + local err = err_any or "" + if err_all then + err = err .. (err_any and ", " or "") .. err_all + end + + return false, err +end + + +function _INJECTOR:inject(inj_tracing_ctx) + local context_verified, err = self:verify_context(inj_tracing_ctx) + if not context_verified then + return nil, self.name .. " injector context is invalid: " .. err + end + + -- Convert IDs to be compatible to the injector's format. + -- Use trace_id_allowed_sizes to try to keep the original (incoming) size + -- where possible. + -- Extractors automatically set `trace_id_original_size` during extraction. + local orig_size = inj_tracing_ctx.trace_id_original_size + local allowed = self.trace_id_allowed_sizes + local new_trace_id_size = contains(allowed, orig_size) and orig_size + or allowed[1] + + inj_tracing_ctx.trace_id = to_id_size(inj_tracing_ctx.trace_id, new_trace_id_size) + inj_tracing_ctx.span_id = to_id_size(inj_tracing_ctx.span_id, self.span_id_size_bytes) + inj_tracing_ctx.parent_id = to_id_size(inj_tracing_ctx.parent_id, self.span_id_size_bytes) + + local headers, h_err = self:create_headers(inj_tracing_ctx) + if not headers then + return nil, h_err + end + + for h_name, h_value in pairs(headers) do + set_header(h_name, h_value) + end + + local formatted_trace_id, t_err = self:get_formatted_trace_id(inj_tracing_ctx.trace_id) + if not formatted_trace_id then + return nil, t_err + end + return formatted_trace_id +end + + +--- Create headers to be injected. +-- +-- Function to be implemented by Injector subclasses, uses the extracted +-- tracing context to create and return headers for injection. +-- +-- @function _INJECTOR:create_headers(tracing_ctx) +-- @param table tracing_ctx The extracted tracing context. +-- The structure of this table is described in the Extractor base class. +-- @return table/array-of-tables that define the headers to be injected +-- example: +-- return { +-- traceparent = "00-0af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-01", +-- } +function _INJECTOR:create_headers(tracing_ctx) + return nil, "headers() not implemented in base class" +end + + +--- Get the formatted trace ID for the current Injector. +-- +-- Function to be implemented by Injector subclasses, it returns a +-- representation of the trace ID, formatted according to the current +-- injector's standard. +-- +-- @function _INJECTOR:get_formatted_trace_id(trace_id) +-- @param string trace_id The encoded trace ID. +-- @return table that defines a name and value for the formatted trace ID. +-- This is automatically included in Kong's serialized logs and will be +-- available to logging plugins. +-- Example: +-- return { w3c = "0af7651916cd43dd8448eb211c80319c" } +function _INJECTOR:get_formatted_trace_id(trace_id) + return nil, "trace_id() not implemented in base class" +end + + +return _INJECTOR diff --git a/kong/tracing/propagation/injectors/aws.lua b/kong/tracing/propagation/injectors/aws.lua new file mode 100644 index 000000000000..92bb9978fc6b --- /dev/null +++ b/kong/tracing/propagation/injectors/aws.lua @@ -0,0 +1,36 @@ +local _INJECTOR = require "kong.tracing.propagation.injectors._base" +local to_hex = require "resty.string".to_hex + +local sub = string.sub + +local AWS_TRACE_ID_VERSION = "1" +local AWS_TRACE_ID_TIMESTAMP_LEN = 8 + +local AWS_INJECTOR = _INJECTOR:new({ + name = "aws", + context_validate = { + all = { "trace_id", "span_id" }, + }, + trace_id_allowed_sizes = { 16 }, + span_id_size_bytes = 8, +}) + + +function AWS_INJECTOR:create_headers(out_tracing_ctx) + local trace_id = to_hex(out_tracing_ctx.trace_id) + return { + ["x-amzn-trace-id"] = "Root=" .. AWS_TRACE_ID_VERSION .. "-" .. + sub(trace_id, 1, AWS_TRACE_ID_TIMESTAMP_LEN) .. "-" .. + sub(trace_id, AWS_TRACE_ID_TIMESTAMP_LEN + 1, #trace_id) .. + ";Parent=" .. to_hex(out_tracing_ctx.span_id) .. ";Sampled=" .. + (out_tracing_ctx.should_sample and "1" or "0") + } +end + + +function AWS_INJECTOR:get_formatted_trace_id(trace_id) + return { aws = to_hex(trace_id) } +end + + +return AWS_INJECTOR diff --git a/kong/tracing/propagation/injectors/b3-single.lua b/kong/tracing/propagation/injectors/b3-single.lua new file mode 100644 index 000000000000..7731f34e34fe --- /dev/null +++ b/kong/tracing/propagation/injectors/b3-single.lua @@ -0,0 +1,44 @@ +local _INJECTOR = require "kong.tracing.propagation.injectors._base" +local to_hex = require "resty.string".to_hex + +local B3_SINGLE_INJECTOR = _INJECTOR:new({ + name = "b3-single", + context_validate = {}, -- all fields are optional + trace_id_allowed_sizes = { 16, 8 }, + span_id_size_bytes = 8, +}) + + +function B3_SINGLE_INJECTOR:create_headers(out_tracing_ctx) + local sampled + if out_tracing_ctx.flags == "1" then + sampled = "d" + elseif out_tracing_ctx.should_sample then + sampled = "1" + elseif out_tracing_ctx.should_sample == false then + sampled = "0" + end + + -- propagate sampling decision only + -- see: https://github.com/openzipkin/b3-propagation/blob/master/RATIONALE.md#b3-single-header-format + if not out_tracing_ctx.trace_id or not out_tracing_ctx.span_id then + sampled = sampled or "0" + + return { b3 = sampled } + end + + return { + b3 = to_hex(out_tracing_ctx.trace_id) .. + "-" .. to_hex(out_tracing_ctx.span_id) .. + (sampled and "-" .. sampled or "") .. + (out_tracing_ctx.parent_id and "-" .. to_hex(out_tracing_ctx.parent_id) or "") + } +end + + +function B3_SINGLE_INJECTOR:get_formatted_trace_id(trace_id) + return { b3 = trace_id and to_hex(trace_id) or "" } +end + + +return B3_SINGLE_INJECTOR diff --git a/kong/tracing/propagation/injectors/b3.lua b/kong/tracing/propagation/injectors/b3.lua new file mode 100644 index 000000000000..d5816e87fb05 --- /dev/null +++ b/kong/tracing/propagation/injectors/b3.lua @@ -0,0 +1,44 @@ +local _INJECTOR = require "kong.tracing.propagation.injectors._base" +local to_hex = require "resty.string".to_hex + +local B3_INJECTOR = _INJECTOR:new({ + name = "b3", + context_validate = {}, -- all fields are optional + trace_id_allowed_sizes = { 16, 8 }, + span_id_size_bytes = 8, +}) + + +function B3_INJECTOR:create_headers(out_tracing_ctx) + local headers + if out_tracing_ctx.trace_id and out_tracing_ctx.span_id then + headers = { + ["x-b3-traceid"] = to_hex(out_tracing_ctx.trace_id), + ["x-b3-spanid"] = to_hex(out_tracing_ctx.span_id), + } + + if out_tracing_ctx.parent_id then + headers["x-b3-parentspanid"] = to_hex(out_tracing_ctx.parent_id) + end + + else + headers = {} + end + + if out_tracing_ctx.flags then + headers["x-b3-flags"] = out_tracing_ctx.flags + + else + headers["x-b3-sampled"] = out_tracing_ctx.should_sample and "1" or "0" + end + + return headers +end + + +function B3_INJECTOR:get_formatted_trace_id(trace_id) + return { b3 = trace_id and to_hex(trace_id) or "" } +end + + +return B3_INJECTOR diff --git a/kong/tracing/propagation/injectors/datadog.lua b/kong/tracing/propagation/injectors/datadog.lua new file mode 100644 index 000000000000..a7270c9b995b --- /dev/null +++ b/kong/tracing/propagation/injectors/datadog.lua @@ -0,0 +1,40 @@ +local _INJECTOR = require "kong.tracing.propagation.injectors._base" +local bn = require "resty.openssl.bn" + +local from_binary = bn.from_binary + +local DATADOG_INJECTOR = _INJECTOR:new({ + name = "datadog", + context_validate = {}, -- all fields are optional + -- TODO: support 128-bit trace IDs + -- see: https://docs.datadoghq.com/tracing/guide/span_and_trace_id_format/#128-bit-trace-ids + -- and: https://github.com/DataDog/dd-trace-py/pull/7181/files + -- requires setting the `_dd.p.tid` span attribute + trace_id_allowed_sizes = { 8 }, + span_id_size_bytes = 8, +}) + + +function DATADOG_INJECTOR:create_headers(out_tracing_ctx) + local headers = { + ["x-datadog-trace-id"] = out_tracing_ctx.trace_id and + from_binary(out_tracing_ctx.trace_id):to_dec() or nil, + ["x-datadog-parent-id"] = out_tracing_ctx.span_id and + from_binary(out_tracing_ctx.span_id):to_dec() + or nil, + } + + if out_tracing_ctx.should_sample ~= nil then + headers["x-datadog-sampling-priority"] = out_tracing_ctx.should_sample and "1" or "0" + end + + return headers +end + + +function DATADOG_INJECTOR:get_formatted_trace_id(trace_id) + return { datadog = trace_id and from_binary(trace_id):to_dec() or nil } +end + + +return DATADOG_INJECTOR diff --git a/kong/tracing/propagation/injectors/gcp.lua b/kong/tracing/propagation/injectors/gcp.lua new file mode 100644 index 000000000000..1ff747218d6d --- /dev/null +++ b/kong/tracing/propagation/injectors/gcp.lua @@ -0,0 +1,29 @@ +local _INJECTOR = require "kong.tracing.propagation.injectors._base" +local bn = require "resty.openssl.bn" +local to_hex = require "resty.string".to_hex + +local GCP_INJECTOR = _INJECTOR:new({ + name = "gcp", + context_validate = { + all = { "trace_id", "span_id" }, + }, + trace_id_allowed_sizes = { 16 }, + span_id_size_bytes = 8, +}) + + +function GCP_INJECTOR:create_headers(out_tracing_ctx) + return { + ["x-cloud-trace-context"] = to_hex(out_tracing_ctx.trace_id) .. "/" .. + bn.from_binary(out_tracing_ctx.span_id):to_dec() .. + ";o=" .. (out_tracing_ctx.should_sample and "1" or "0") + } +end + + +function GCP_INJECTOR:get_formatted_trace_id(trace_id) + return { gcp = to_hex(trace_id) } +end + + +return GCP_INJECTOR diff --git a/kong/tracing/propagation/injectors/jaeger.lua b/kong/tracing/propagation/injectors/jaeger.lua new file mode 100644 index 000000000000..2bf103b930b0 --- /dev/null +++ b/kong/tracing/propagation/injectors/jaeger.lua @@ -0,0 +1,42 @@ +local _INJECTOR = require "kong.tracing.propagation.injectors._base" +local to_hex = require "resty.string".to_hex + +local pairs = pairs +local ngx_escape_uri = ngx.escape_uri + +local JAEGER_INJECTOR = _INJECTOR:new({ + name = "jaeger", + context_validate = { + all = { "trace_id", "span_id" }, + }, + trace_id_allowed_sizes = { 16, 8 }, + span_id_size_bytes = 8, +}) + + +function JAEGER_INJECTOR:create_headers(out_tracing_ctx) + local headers = { + ["uber-trace-id"] = string.format("%s:%s:%s:%s", + to_hex(out_tracing_ctx.trace_id), + to_hex(out_tracing_ctx.span_id), + out_tracing_ctx.parent_id and to_hex(out_tracing_ctx.parent_id) or "0", + out_tracing_ctx.should_sample and "01" or "00") + } + + local baggage = out_tracing_ctx.baggage + if baggage then + for k, v in pairs(baggage) do + headers["uberctx-" .. k] = ngx_escape_uri(v) + end + end + + return headers +end + + +function JAEGER_INJECTOR:get_formatted_trace_id(trace_id) + return { jaeger = to_hex(trace_id) } +end + + +return JAEGER_INJECTOR diff --git a/kong/tracing/propagation/injectors/ot.lua b/kong/tracing/propagation/injectors/ot.lua new file mode 100644 index 000000000000..f0bdc529e8c0 --- /dev/null +++ b/kong/tracing/propagation/injectors/ot.lua @@ -0,0 +1,43 @@ +local _INJECTOR = require "kong.tracing.propagation.injectors._base" +local to_hex = require "resty.string".to_hex + +local pairs = pairs +local ngx_escape_uri = ngx.escape_uri + +local OT_INJECTOR = _INJECTOR:new({ + name = "ot", + context_validate = { + all = { "trace_id", "span_id" }, + }, + trace_id_allowed_sizes = { 8, 16 }, + span_id_size_bytes = 8, +}) + + +function OT_INJECTOR:create_headers(out_tracing_ctx) + local headers = { + ["ot-tracer-traceid"] = to_hex(out_tracing_ctx.trace_id), + ["ot-tracer-spanid"] = to_hex(out_tracing_ctx.span_id), + } + + if out_tracing_ctx.should_sample ~= nil then + headers["ot-tracer-sampled"] = out_tracing_ctx.should_sample and "1" or "0" + end + + local baggage = out_tracing_ctx.baggage + if baggage then + for k, v in pairs(baggage) do + headers["ot-baggage-" .. k] = ngx_escape_uri(v) + end + end + + return headers +end + + +function OT_INJECTOR:get_formatted_trace_id(trace_id) + return { ot = to_hex(trace_id) } +end + + +return OT_INJECTOR diff --git a/kong/tracing/propagation/injectors/w3c.lua b/kong/tracing/propagation/injectors/w3c.lua new file mode 100644 index 000000000000..139428143bb6 --- /dev/null +++ b/kong/tracing/propagation/injectors/w3c.lua @@ -0,0 +1,33 @@ +local _INJECTOR = require "kong.tracing.propagation.injectors._base" +local to_hex = require "resty.string".to_hex + +local string_format = string.format + +local W3C_INJECTOR = _INJECTOR:new({ + name = "w3c", + context_validate = { + all = { "trace_id", "span_id" }, + }, + trace_id_allowed_sizes = { 16 }, + span_id_size_bytes = 8, +}) + + +function W3C_INJECTOR:create_headers(out_tracing_ctx) + local trace_id = to_hex(out_tracing_ctx.trace_id) + local span_id = to_hex(out_tracing_ctx.span_id) + local sampled = out_tracing_ctx.should_sample and "01" or "00" + + return { + traceparent = string_format("00-%s-%s-%s", trace_id, span_id, sampled) + } +end + + +function W3C_INJECTOR:get_formatted_trace_id(trace_id) + trace_id = to_hex(trace_id) + return { w3c = trace_id } +end + + +return W3C_INJECTOR diff --git a/kong/tracing/propagation/schema.lua b/kong/tracing/propagation/schema.lua new file mode 100644 index 000000000000..0ca294e9b61f --- /dev/null +++ b/kong/tracing/propagation/schema.lua @@ -0,0 +1,61 @@ +local Schema = require "kong.db.schema" +local utils = require "kong.tools.utils" +local formats = require "kong.tracing.propagation.utils".FORMATS + + +local extractors = {} +for _, ext in pairs(formats) do + -- b3 and b3-single formats use the same extractor: b3 + if ext ~= "b3-single" then + table.insert(extractors, ext) + end +end +local injectors = {} +for _, inj in pairs(formats) do + table.insert(injectors, inj) +end + + +return Schema.define { + type = "record", + fields = { + { + extract = { + description = "Header formats used to extract tracing context from incoming requests. If multiple values are specified, the first one found will be used for extraction. If left empty, Kong will not extract any tracing context information from incoming requests and generate a trace with no parent and a new trace ID.", + type = "array", + elements = { + type = "string", + one_of = extractors + }, + } + }, + { + clear = { + description = "Header names to clear after context extraction. This allows to extract the context from a certain header and then remove it from the request, useful when extraction and injection are performed on different header formats and the original header should not be sent to the upstream. If left empty, no headers are cleared.", + type = "array", + elements = { + type = "string", + custom_validator = utils.validate_header_name, + } + } + }, + { + inject = { + description = "Header formats used to inject tracing context. The value `preserve` will use the same header format as the incoming request. If multiple values are specified, all of them will be used during injection. If left empty, Kong will not inject any tracing context information in outgoing requests.", + type = "array", + elements = { + type = "string", + one_of = { "preserve", table.unpack(injectors) } -- luacheck: ignore table + }, + } + }, + { + default_format = { + description = "The default header format to use when extractors did not match any format in the incoming headers and `inject` is configured with the value: `preserve`. This can happen when no tracing header was found in the request, or the incoming tracing header formats were not included in `extract`.", + type = "string", + one_of = injectors, + required = true, + }, + } + } +} diff --git a/kong/tracing/propagation/utils.lua b/kong/tracing/propagation/utils.lua new file mode 100644 index 000000000000..9efd9e11acaa --- /dev/null +++ b/kong/tracing/propagation/utils.lua @@ -0,0 +1,98 @@ +local char = string.char +local gsub = string.gsub + +local match = string.match +local unescape_uri = ngx.unescape_uri +local pairs = pairs + +local NULL = "\0" +local TRACE_ID_SIZE_BYTES = 16 +local SPAN_ID_SIZE_BYTES = 8 + +local FORMATS = { + W3C = "w3c", + B3 = "b3", + B3_SINGLE = "b3-single", + JAEGER = "jaeger", + OT = "ot", + DATADOG = "datadog", + AWS = "aws", + GCP = "gcp", +} + +local function hex_to_char(c) + return char(tonumber(c, 16)) +end + +local function from_hex(str) + if type(str) ~= "string" then + return nil, "not a string" + end + + if #str % 2 ~= 0 then + str = "0" .. str + end + + if str ~= nil then + str = gsub(str, "%x%x", hex_to_char) + end + return str +end + +local baggage_mt = { + __newindex = function() + error("attempt to set immutable baggage", 2) + end, +} + +local function parse_baggage_headers(headers, header_pattern) + local baggage + for k, v in pairs(headers) do + local baggage_key = match(k, header_pattern) + if baggage_key then + if baggage then + baggage[baggage_key] = unescape_uri(v) + else + baggage = { [baggage_key] = unescape_uri(v) } + end + end + end + + if baggage then + return setmetatable(baggage, baggage_mt) + end +end + +local function to_id_size(id, length) + if not id then + return nil + end + + local len = #id + if len > length then + return id:sub(-length) + + elseif len < length then + return NULL:rep(length - len) .. id + end + + return id +end + +local function to_kong_trace_id(id) + return to_id_size(id, TRACE_ID_SIZE_BYTES) +end + +local function to_kong_span_id(id) + return to_id_size(id, SPAN_ID_SIZE_BYTES) +end + +return { + FORMATS = FORMATS, + + from_hex = from_hex, + to_id_size = to_id_size, + to_kong_trace_id = to_kong_trace_id, + to_kong_span_id = to_kong_span_id, + parse_baggage_headers = parse_baggage_headers, +} diff --git a/spec/01-unit/26-tracing/02-propagation_spec.lua b/spec/01-unit/26-tracing/02-propagation_spec.lua deleted file mode 100644 index ad1b39721704..000000000000 --- a/spec/01-unit/26-tracing/02-propagation_spec.lua +++ /dev/null @@ -1,1371 +0,0 @@ -local propagation = require "kong.tracing.propagation" - -local to_hex = require "resty.string".to_hex - -local table_merge = require "kong.tools.utils".table_merge - -local fmt = string.format - -local openssl_bignumber = require "resty.openssl.bn" - -local function to_hex_ids(arr) - return { arr[1], - arr[2] and to_hex(arr[2]) or nil, - arr[3] and to_hex(arr[3]) or nil, - arr[4] and to_hex(arr[4]) or nil, - arr[5] } -end - -local function left_pad_zero(str, count) - return ('0'):rep(count-#str) .. str -end - -local function to_id_len(id, len) - if #id < len then - return string.rep('0', len - #id) .. id - elseif #id > len then - return string.sub(id, -len) - end - - return id -end - -local parse = propagation.parse -local set = propagation.set -local from_hex = propagation.from_hex - -local trace_id = "0000000000000001" -local big_trace_id = "fffffffffffffff1" -local big_parent_id = "fffffffffffffff2" -local trace_id_32 = "00000000000000000000000000000001" -local big_trace_id_32 = "fffffffffffffffffffffffffffffff1" -local parent_id = "0000000000000002" -local span_id = "0000000000000003" -local big_span_id = "fffffffffffffff3" -local non_hex_id = "vvvvvvvvvvvvvvvv" -local too_short_id = "123" -local too_long_id = "1234567890123456789012345678901234567890" -- 40 digits - -describe("propagation.parse", function() - - _G.kong = { - log = {}, - } - - describe("b3 single header parsing", function() - local warn, debug - setup(function() - warn = spy.on(kong.log, "warn") - debug = spy.on(kong.log, "debug") - end) - before_each(function() - warn:clear() - debug:clear() - end) - teardown(function() - warn:revert() - debug:clear() - end) - - it("does not parse headers with ignore type", function() - local b3 = fmt("%s-%s-%s-%s", trace_id, span_id, "1", parent_id) - local t = { parse({ tracestate = "b3=" .. b3 }, "ignore") } - assert.spy(warn).not_called() - assert.same({}, t) - end) - - it("1-char", function() - local t = { parse({ b3 = "1" }) } - assert.same({ "b3-single", nil, nil, nil, true }, t) - assert.spy(warn).not_called() - - t = { parse({ b3 = "d" }) } - assert.same({ "b3-single", nil, nil, nil, true }, t) - assert.spy(warn).not_called() - - t = { parse({ b3 = "0" }) } - assert.same({ "b3-single", nil, nil, nil, false }, t) - assert.spy(warn).not_called() - end) - - it("4 fields", function() - local b3 = fmt("%s-%s-%s-%s", trace_id, span_id, "1", parent_id) - local t = { parse({ b3 = b3 }) } - assert.same({ "b3-single", trace_id, span_id, parent_id, true }, to_hex_ids(t)) - assert.spy(warn).not_called() - end) - - it("4 fields inside traceparent", function() - local b3 = fmt("%s-%s-%s-%s", trace_id, span_id, "1", parent_id) - local t = { parse({ tracestate = "b3=" .. b3 }) } - assert.same({ "b3-single", trace_id, span_id, parent_id, true }, to_hex_ids(t)) - assert.spy(warn).not_called() - end) - - it("32-digit trace_id", function() - local b3 = fmt("%s-%s-%s-%s", trace_id_32, span_id, "1", parent_id) - local t = { parse({ b3 = b3 }) } - assert.same({ "b3-single", trace_id_32, span_id, parent_id, true }, to_hex_ids(t)) - assert.spy(warn).not_called() - end) - - it("trace_id and span_id, no sample or parent_id", function() - local b3 = fmt("%s-%s", trace_id, span_id) - local t = { parse({ b3 = b3 }) } - assert.same({ "b3-single", trace_id, span_id }, to_hex_ids(t)) - assert.spy(warn).not_called() - end) - - it("32-digit trace_id and span_id, no sample or parent_id", function() - local b3 = fmt("%s-%s", trace_id_32, span_id) - local t = { parse({ b3 = b3 }) } - assert.same({ "b3-single", trace_id_32, span_id }, to_hex_ids(t)) - assert.spy(warn).not_called() - end) - - it("trace_id, span_id and sample, no parent_id", function() - local b3 = fmt("%s-%s-%s", trace_id, span_id, "1") - local t = { parse({ b3 = b3 }) } - assert.same({ "b3-single", trace_id, span_id, nil, true }, to_hex_ids(t)) - assert.spy(warn).not_called() - end) - - it("32-digit trace_id, span_id and sample, no parent_id", function() - local b3 = fmt("%s-%s-%s", trace_id_32, span_id, "1") - local t = { parse({ b3 = b3 }) } - assert.same({ "b3-single", trace_id_32, span_id, nil, true }, to_hex_ids(t)) - assert.spy(warn).not_called() - end) - - it("big 32-digit trace_id, span_id and sample, no parent_id", function() - local b3 = fmt("%s-%s-%s", big_trace_id_32, span_id, "1") - local t = { parse({ b3 = b3 }) } - assert.same({ "b3-single", big_trace_id_32, span_id, nil, true }, to_hex_ids(t)) - assert.spy(warn).not_called() - end) - - it("sample debug = always sample", function() - local b3 = fmt("%s-%s-%s-%s", trace_id, span_id, "d", parent_id) - local t = { parse({ b3 = b3 }) } - assert.same({ "b3-single", trace_id, span_id, parent_id, true }, to_hex_ids(t)) - assert.spy(warn).not_called() - end) - - it("sample 0 = don't sample", function() - local b3 = fmt("%s-%s-%s-%s", trace_id, span_id, "0", parent_id) - local t = { parse({ b3 = b3 }) } - assert.same({ "b3-single", trace_id, span_id, parent_id, false }, to_hex_ids(t)) - assert.spy(warn).not_called() - end) - - it("sample 0 overridden by x-b3-sampled", function() - local b3 = fmt("%s-%s-%s-%s", trace_id, span_id, "0", parent_id) - local t = { parse({ b3 = b3, ["x-b3-sampled"] = "1" }) } - assert.same({ "b3-single", trace_id, span_id, parent_id, true }, to_hex_ids(t)) - assert.spy(warn).not_called() - end) - - it("multi value tracestate header", function() - local tracestate_header = { "test", trace_id, span_id } - local t = { parse({ tracestate = tracestate_header }) } - assert.same({ }, to_hex_ids(t)) - assert.spy(debug).called(1) - end) - - describe("errors", function() - it("requires trace id", function() - local t = { parse({ b3 = "" }) } - assert.same({"b3-single"}, t) - assert.spy(warn).called_with("b3 single header invalid; ignoring.") - end) - - it("rejects existing but invalid trace_id", function() - local t = { parse({ b3 = non_hex_id .. "-" .. span_id }) } - assert.same({"b3-single"}, t) - assert.spy(warn).called_with("b3 single header invalid; ignoring.") - - t = { parse({ b3 = too_short_id .. "-" .. span_id }) } - assert.same({"b3-single"}, t) - assert.spy(warn).called_with("b3 single header invalid; ignoring.") - - -- too long - t = { parse({ b3 = too_long_id .. "-" .. span_id }) } - assert.same({"b3-single"}, t) - assert.spy(warn).called_with("b3 single header invalid; ignoring.") - end) - - it("requires span_id", function() - local t = { parse({ b3 = trace_id .. "-" }) } - assert.same({"b3-single"}, t) - assert.spy(warn).called_with("b3 single header invalid; ignoring.") - end) - - it("rejects existing but invalid span_id", function() - local t = { parse({ b3 = trace_id .. non_hex_id }) } - assert.same({"b3-single"}, t) - assert.spy(warn).called_with("b3 single header invalid; ignoring.") - - t = { parse({ b3 = trace_id .. too_short_id }) } - assert.same({"b3-single"}, t) - assert.spy(warn).called_with("b3 single header invalid; ignoring.") - - t = { parse({ b3 = trace_id .. too_long_id }) } - assert.same({"b3-single"}, t) - assert.spy(warn).called_with("b3 single header invalid; ignoring.") - end) - - it("rejects invalid sampled section", function() - local b3 = fmt("%s-%s-%s-%s", trace_id, span_id, "x", parent_id) - local t = { parse({ b3 = b3 }) } - assert.same({"b3-single"}, t) - assert.spy(warn).called_with("b3 single header invalid; ignoring.") - end) - - it("rejects invalid parent_id section", function() - local b3 = fmt("%s-%s-%s-%s", trace_id, span_id, "d", non_hex_id) - local t = { parse({ b3 = b3 }) } - assert.same({"b3-single"}, t) - assert.spy(warn).called_with("b3 single header invalid; ignoring.") - - b3 = fmt("%s-%s-%s-%s", trace_id, span_id, "d", too_short_id) - t = { parse({ b3 = b3 }) } - assert.same({"b3-single"}, t) - assert.spy(warn).called_with("b3 single header invalid; ignoring.") - - b3 = fmt("%s-%s-%s-%s", trace_id, span_id, "d", too_long_id) - t = { parse({ b3 = b3 }) } - assert.same({"b3-single"}, t) - assert.spy(warn).called_with("b3 single header invalid; ignoring.") - end) - end) - end) - - describe("W3C header parsing", function() - local warn - setup(function() - warn = spy.on(kong.log, "warn") - end) - before_each(function() - warn:clear() - end) - teardown(function() - warn:revert() - end) - - it("valid traceparent with sampling", function() - local traceparent = fmt("00-%s-%s-01", trace_id_32, parent_id) - local t = { parse({ traceparent = traceparent }) } - assert.same({ "w3c", trace_id_32, nil, parent_id, true }, to_hex_ids(t)) - assert.spy(warn).not_called() - end) - - it("valid traceparent without sampling", function() - local traceparent = fmt("00-%s-%s-00", trace_id_32, parent_id) - local t = { parse({ traceparent = traceparent }) } - assert.same({ "w3c", trace_id_32, nil, parent_id, false }, to_hex_ids(t)) - assert.spy(warn).not_called() - end) - - it("sampling with mask", function() - local traceparent = fmt("00-%s-%s-09", trace_id_32, parent_id) - local t = { parse({ traceparent = traceparent }) } - assert.same({ "w3c", trace_id_32, nil, parent_id, true }, to_hex_ids(t)) - assert.spy(warn).not_called() - end) - - it("no sampling with mask", function() - local traceparent = fmt("00-%s-%s-08", trace_id_32, parent_id) - local t = { parse({ traceparent = traceparent }) } - assert.same({ "w3c", trace_id_32, nil, parent_id, false }, to_hex_ids(t)) - assert.spy(warn).not_called() - end) - - describe("errors", function() - it("rejects traceparent versions other than 00", function() - local traceparent = fmt("01-%s-%s-00", trace_id_32, parent_id) - local t = { parse({ traceparent = traceparent }) } - assert.same({ "w3c" }, t) - assert.spy(warn).was_called_with("invalid W3C Trace Context version; ignoring.") - end) - - it("rejects invalid header", function() - local traceparent = "vv-00000000000000000000000000000001-0000000000000001-00" - local t = { parse({ traceparent = traceparent }) } - assert.same({ "w3c" }, t) - assert.spy(warn).was_called_with("invalid W3C traceparent header; ignoring.") - - traceparent = "00-vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv-0000000000000001-00" - t = { parse({ traceparent = traceparent }) } - assert.same({ "w3c" }, t) - assert.spy(warn).was_called_with("invalid W3C traceparent header; ignoring.") - - traceparent = "00-00000000000000000000000000000001-vvvvvvvvvvvvvvvv-00" - t = { parse({ traceparent = traceparent }) } - assert.same({ "w3c" }, t) - assert.spy(warn).was_called_with("invalid W3C traceparent header; ignoring.") - - traceparent = "00-00000000000000000000000000000001-0000000000000001-vv" - t = { parse({ traceparent = traceparent }) } - assert.same({ "w3c" }, t) - assert.spy(warn).was_called_with("invalid W3C traceparent header; ignoring.") - end) - - it("rejects invalid trace IDs", function() - local traceparent = fmt("00-%s-%s-00", too_short_id, parent_id) - local t = { parse({ traceparent = traceparent }) } - assert.same({ "w3c" }, t) - assert.spy(warn).was_called_with("invalid W3C trace context trace ID; ignoring.") - - traceparent = fmt("00-%s-%s-00", too_long_id, parent_id) - t = { parse({ traceparent = traceparent }) } - assert.same({ "w3c" }, t) - assert.spy(warn).was_called_with("invalid W3C trace context trace ID; ignoring.") - - -- cannot be all zeros - traceparent = fmt("00-00000000000000000000000000000000-%s-00", too_long_id, parent_id) - t = { parse({ traceparent = traceparent }) } - assert.same({ "w3c" }, t) - assert.spy(warn).was_called_with("invalid W3C trace context trace ID; ignoring.") - end) - - it("rejects invalid parent IDs", function() - local traceparent = fmt("00-%s-%s-00", trace_id_32, too_short_id) - local t = { parse({ traceparent = traceparent }) } - assert.same({ "w3c" }, t) - assert.spy(warn).was_called_with("invalid W3C trace context parent ID; ignoring.") - - traceparent = fmt("00-%s-%s-00", trace_id_32, too_long_id) - t = { parse({ traceparent = traceparent }) } - assert.same({ "w3c" }, t) - assert.spy(warn).was_called_with("invalid W3C trace context parent ID; ignoring.") - - -- cannot be all zeros - traceparent = fmt("00-%s-0000000000000000-01", trace_id_32) - t = { parse({ traceparent = traceparent }) } - assert.same({ "w3c" }, t) - assert.spy(warn).was_called_with("invalid W3C trace context parent ID; ignoring.") - end) - - it("rejects invalid trace flags", function() - local traceparent = fmt("00-%s-%s-000", trace_id_32, parent_id) - local t = { parse({ traceparent = traceparent }) } - assert.same({ "w3c" }, t) - assert.spy(warn).was_called_with("invalid W3C trace context flags; ignoring.") - - traceparent = fmt("00-%s-%s-0", trace_id_32, parent_id) - t = { parse({ traceparent = traceparent }) } - assert.same({ "w3c" }, t) - assert.spy(warn).was_called_with("invalid W3C trace context flags; ignoring.") - end) - end) - end) - - - describe("Jaeger header parsing", function() - local warn - setup(function() - warn = spy.on(kong.log, "warn") - end) - before_each(function() - warn:clear() - end) - teardown(function() - warn:revert() - end) - - it("valid uber-trace-id with sampling", function() - local ubertraceid = fmt("%s:%s:%s:%s", trace_id, span_id, parent_id, "1") - local t = { parse({ ["uber-trace-id"] = ubertraceid }) } - assert.same({ "jaeger", left_pad_zero(trace_id, 32), span_id, parent_id, true }, to_hex_ids(t)) - assert.spy(warn).not_called() - end) - - it("valid uber-trace-id without sampling", function() - local ubertraceid = fmt("%s:%s:%s:%s", trace_id, span_id, parent_id, "0") - local t = { parse({ ["uber-trace-id"] = ubertraceid }) } - assert.same({ "jaeger", left_pad_zero(trace_id, 32), span_id, parent_id, false }, to_hex_ids(t)) - assert.spy(warn).not_called() - end) - - it("valid uber-trace-id 128bit with sampling", function() - local ubertraceid = fmt("%s:%s:%s:%s", trace_id_32, span_id, parent_id, "1") - local t = { parse({ ["uber-trace-id"] = ubertraceid }) } - assert.same({ "jaeger", trace_id_32, span_id, parent_id, true }, to_hex_ids(t)) - assert.spy(warn).not_called() - end) - - it("valid uber-trace-id 128bit without sampling", function() - local ubertraceid = fmt("%s:%s:%s:%s", trace_id_32, span_id, parent_id, "0") - local t = { parse({ ["uber-trace-id"] = ubertraceid }) } - assert.same({ "jaeger", trace_id_32, span_id, parent_id, false }, to_hex_ids(t)) - assert.spy(warn).not_called() - end) - - it("valid uber-trace-id with parent_id 0", function() - local ubertraceid = fmt("%s:%s:%s:%s", trace_id, span_id, "0", "1") - local t = { parse({ ["uber-trace-id"] = ubertraceid }) } - assert.same({ "jaeger", left_pad_zero(trace_id, 32), span_id, to_hex("0"), true }, to_hex_ids(t)) - assert.spy(warn).not_called() - end) - - describe("errors", function() - it("rejects invalid header", function() - local ubertraceid = fmt("vv:%s:%s:%s", span_id, parent_id, "0") - local t = { parse({ ["uber-trace-id"] = ubertraceid }) } - assert.same({ "jaeger" }, t) - assert.spy(warn).was_called_with("invalid jaeger uber-trace-id header; ignoring.") - - ubertraceid = fmt("%s:vv:%s:%s", trace_id, parent_id, "0") - t = { parse({ ["uber-trace-id"] = ubertraceid }) } - assert.same({ "jaeger" }, t) - assert.spy(warn).was_called_with("invalid jaeger uber-trace-id header; ignoring.") - - ubertraceid = fmt("%s:%s:vv:%s", trace_id, span_id, "0") - t = { parse({ ["uber-trace-id"] = ubertraceid }) } - assert.same({ "jaeger" }, t) - assert.spy(warn).was_called_with("invalid jaeger uber-trace-id header; ignoring.") - - ubertraceid = fmt("%s:%s:%s:vv", trace_id, span_id, parent_id) - t = { parse({ ["uber-trace-id"] = ubertraceid }) } - assert.same({ "jaeger" }, t) - assert.spy(warn).was_called_with("invalid jaeger uber-trace-id header; ignoring.") - end) - - it("rejects invalid trace IDs", function() - local ubertraceid = fmt("%s:%s:%s:%s", too_long_id, span_id, parent_id, "1") - local t = { parse({ ["uber-trace-id"] = ubertraceid }) } - assert.same({ "jaeger" }, t) - assert.spy(warn).was_called_with("invalid jaeger trace ID; ignoring.") - - -- cannot be all zeros - ubertraceid = fmt("%s:%s:%s:%s", "00000000000000000000000000000000", span_id, parent_id, "1") - t = { parse({ ["uber-trace-id"] = ubertraceid }) } - assert.same({ "jaeger" }, t) - assert.spy(warn).was_called_with("invalid jaeger trace ID; ignoring.") - end) - - it("rejects invalid parent IDs", function() - -- Ignores invalid parent id and logs - local ubertraceid = fmt("%s:%s:%s:%s", trace_id, span_id, too_short_id, "1") - local t = { parse({ ["uber-trace-id"] = ubertraceid }) } - -- Note: to_hex(from_hex()) for too_short_id as the binary conversion from hex is resulting in a different number - assert.same({ "jaeger", left_pad_zero(trace_id, 32), span_id, to_hex(from_hex(too_short_id)), true }, to_hex_ids(t)) - assert.spy(warn).was_called_with("invalid jaeger parent ID; ignoring.") - - -- Ignores invalid parent id and logs - ubertraceid = fmt("%s:%s:%s:%s", trace_id, span_id, too_long_id, "1") - t = { parse({ ["uber-trace-id"] = ubertraceid }) } - assert.same({ "jaeger", left_pad_zero(trace_id, 32), span_id, too_long_id, true }, to_hex_ids(t)) - assert.spy(warn).was_called_with("invalid jaeger parent ID; ignoring.") - end) - - it("rejects invalid span IDs", function() - local ubertraceid = fmt("%s:%s:%s:%s", trace_id, too_long_id, parent_id, "1") - local t = { parse({ ["uber-trace-id"] = ubertraceid }) } - assert.same({ "jaeger" }, t) - assert.spy(warn).was_called_with("invalid jaeger span ID; ignoring.") - - -- cannot be all zeros - ubertraceid = fmt("%s:%s:%s:%s", trace_id, "00000000000000000000000000000000", parent_id, "1") - t = { parse({ ["uber-trace-id"] = ubertraceid }) } - assert.same({ "jaeger" }, t) - assert.spy(warn).was_called_with("invalid jaeger span ID; ignoring.") - end) - - it("rejects invalid trace flags", function() - local ubertraceid = fmt("%s:%s:%s:%s", trace_id, span_id, parent_id, "123") - local t = { parse({ ["uber-trace-id"] = ubertraceid }) } - assert.same({ "jaeger" }, t) - assert.spy(warn).was_called_with("invalid jaeger flags; ignoring.") - end) - - it("0-pad shorter span IDs", function() - local ubertraceid = fmt("%s:%s:%s:%s", trace_id, too_short_id, parent_id, "1") - local t = { parse({ ["uber-trace-id"] = ubertraceid }) } - assert.same({ "jaeger", left_pad_zero(trace_id, 32), left_pad_zero(too_short_id, 16), parent_id, true }, to_hex_ids(t)) - end) - - it("0-pad shorter trace IDs", function() - local ubertraceid = fmt("%s:%s:%s:%s", too_short_id, span_id, parent_id, "1") - local t = { parse({ ["uber-trace-id"] = ubertraceid }) } - assert.same({ "jaeger", left_pad_zero(too_short_id, 32), span_id, parent_id, true }, to_hex_ids(t)) - end) - end) - end) - - - describe("OT header parsing", function() - local warn - setup(function() - warn = spy.on(kong.log, "warn") - end) - before_each(function() - warn:clear() - end) - teardown(function() - warn:revert() - end) - - it("valid trace_id, valid span_id, sampled", function() - local t = { parse({ - ["ot-tracer-traceid"] = trace_id, - ["ot-tracer-spanid"] = span_id, - ["ot-tracer-sampled"] = "1", - })} - assert.same({ "ot", trace_id, nil, span_id, true }, to_hex_ids(t)) - assert.spy(warn).not_called() - end) - - it("valid big trace_id, valid big span_id, sampled", function() - local t = { parse({ - ["ot-tracer-traceid"] = big_trace_id, - ["ot-tracer-spanid"] = big_span_id, - ["ot-tracer-sampled"] = "1", - })} - assert.same({ "ot", big_trace_id, nil, big_span_id, true }, to_hex_ids(t)) - assert.spy(warn).not_called() - end) - - it("valid trace_id, valid span_id, not sampled", function() - local t = { parse({ - ["ot-tracer-traceid"] = trace_id, - ["ot-tracer-spanid"] = span_id, - ["ot-tracer-sampled"] = "0", - })} - assert.same({ "ot", trace_id, nil, span_id, false }, to_hex_ids(t)) - assert.spy(warn).not_called() - end) - - it("valid trace_id, valid span_id, sampled", function() - local t = { parse({ - ["ot-tracer-traceid"] = trace_id, - ["ot-tracer-spanid"] = span_id, - ["ot-tracer-sampled"] = "1", - })} - assert.same({ "ot", trace_id, nil, span_id, true }, to_hex_ids(t)) - assert.spy(warn).not_called() - end) - - it("valid trace_id, valid span_id, no sampled flag", function() - local t = { parse({ - ["ot-tracer-traceid"] = trace_id, - ["ot-tracer-spanid"] = span_id, - })} - assert.same({ "ot", trace_id, nil, span_id }, to_hex_ids(t)) - assert.spy(warn).not_called() - end) - - it("32 trace_id, valid span_id, no sampled flag", function() - local t = { parse({ - ["ot-tracer-traceid"] = trace_id_32, - ["ot-tracer-spanid"] = span_id, - })} - assert.same({ "ot", trace_id_32, nil, span_id }, to_hex_ids(t)) - assert.spy(warn).not_called() - end) - - it("big 32 trace_id, valid big_span_id, no sampled flag", function() - local t = { parse({ - ["ot-tracer-traceid"] = big_trace_id_32, - ["ot-tracer-spanid"] = big_span_id, - })} - assert.same({ "ot", big_trace_id_32, nil, big_span_id }, to_hex_ids(t)) - assert.spy(warn).not_called() - end) - - it("valid trace_id, valid span_id, sampled, valid baggage added", function() - local mock_key = "mock_key" - local mock_value = "mock_value" - local t = { parse({ - ["ot-tracer-traceid"] = trace_id, - ["ot-tracer-spanid"] = span_id, - ["ot-tracer-sampled"] = "1", - ["ot-baggage-"..mock_key] = mock_value - })} - local mock_baggage_index = t[6] - assert.same({ "ot", trace_id, nil, span_id, true }, to_hex_ids(t)) - assert.same(mock_baggage_index.mock_key, mock_value) - assert.spy(warn).not_called() - end) - - it("valid trace_id, valid span_id, sampled, invalid baggage added", function() - local t = { parse({ - ["ot-tracer-traceid"] = trace_id, - ["ot-tracer-spanid"] = span_id, - ["ot-tracer-sampled"] = "1", - ["ottttttttbaggage-foo"] = "invalid header" - })} - local mock_baggage_index = t[6] - assert.same({ "ot", trace_id, nil, span_id, true }, to_hex_ids(t)) - assert.same(mock_baggage_index, nil) - assert.spy(warn).not_called() - end) - end) - - describe("aws single header parsing", function() - local warn, debug - setup(function() - warn = spy.on(kong.log, "warn") - debug = spy.on(kong.log, "debug") - end) - before_each(function() - warn:clear() - debug:clear() - end) - teardown(function() - warn:revert() - debug:clear() - end) - - it("valid aws with sampling", function() - local aws = fmt("Root=1-%s-%s;Parent=%s;Sampled=%s", string.sub(trace_id_32, 1, 8), string.sub(trace_id_32, 9, #trace_id_32), span_id, "1") - local t = { parse({["x-amzn-trace-id"] = aws}) } - assert.spy(warn).not_called() - assert.same({ "aws", trace_id_32, span_id, nil, true }, to_hex_ids(t)) - end) - it("valid aws with spaces", function() - local aws = fmt(" Root = 1-%s-%s ; Parent= %s; Sampled =%s", string.sub(trace_id_32, 1, 8), string.sub(trace_id_32, 9, #trace_id_32), span_id, "1") - local t = { parse({["x-amzn-trace-id"] = aws}) } - assert.spy(warn).not_called() - assert.same({ "aws", trace_id_32, span_id, nil, true }, to_hex_ids(t)) - end) - it("valid aws with parent first", function() - local aws = fmt("Parent=%s;Root=1-%s-%s;Sampled=%s", span_id, string.sub(trace_id_32, 1, 8), string.sub(trace_id_32, 9, #trace_id_32), "1") - local t = { parse({["x-amzn-trace-id"] = aws}) } - assert.spy(warn).not_called() - assert.same({ "aws", trace_id_32, span_id, nil, true }, to_hex_ids(t)) - end) - it("valid aws with extra fields", function() - local aws = fmt("Foo=bar;Root=1-%s-%s;Parent=%s;Sampled=%s", string.sub(trace_id_32, 1, 8), string.sub(trace_id_32, 9, #trace_id_32), span_id, "1") - local t = { parse({["x-amzn-trace-id"] = aws}) } - assert.spy(warn).not_called() - assert.same({ "aws", trace_id_32, span_id, nil, true }, to_hex_ids(t)) - end) - it("valid aws without sampling", function() - local aws = fmt("Root=1-%s-%s;Parent=%s;Sampled=%s", string.sub(trace_id_32, 1, 8), string.sub(trace_id_32, 9, #trace_id_32), span_id, "0") - local t = { parse({["x-amzn-trace-id"] = aws}) } - assert.spy(warn).not_called() - assert.same({ "aws", trace_id_32, span_id, nil, false }, to_hex_ids(t)) - end) - it("valid aws with sampling big", function() - local aws = fmt("Root=1-%s-%s;Parent=%s;Sampled=%s", string.sub(big_trace_id_32, 1, 8), string.sub(big_trace_id_32, 9, #big_trace_id_32), big_span_id, "0") - local t = { parse({["x-amzn-trace-id"] = aws}) } - assert.spy(warn).not_called() - assert.same({ "aws", big_trace_id_32, big_span_id, nil, false }, to_hex_ids(t)) - end) - describe("errors", function() - it("rejects invalid trace IDs", function() - local aws = fmt("Root=0-%s-%s;Parent=%s;Sampled=%s", string.sub(trace_id_32, 1, 8), string.sub(trace_id_32, 9, #trace_id_32), big_span_id, "0") - local t = { parse({["x-amzn-trace-id"] = aws}) } - assert.same({ "aws" }, t) - assert.spy(warn).was_called_with("invalid aws header trace id; ignoring.") - - aws = fmt("Root=1-vv-%s;Parent=%s;Sampled=%s", string.sub(trace_id_32, 9, #trace_id_32), span_id, "0") - t = { parse({["x-amzn-trace-id"] = aws}) } - assert.same({ "aws" }, t) - assert.spy(warn).was_called_with("invalid aws header trace id; ignoring.") - - aws = fmt("Root=1-%s-vv;Parent=%s;Sampled=%s", string.sub(trace_id_32, 1, 8), span_id, "0") - t = { parse({["x-amzn-trace-id"] = aws}) } - assert.same({ "aws" }, t) - assert.spy(warn).was_called_with("invalid aws header trace id; ignoring.") - - aws = fmt("Root=1-%s-%s;Parent=%s;Sampled=%s", string.sub(too_long_id, 1, 8), string.sub(too_long_id, 9, #too_long_id), big_span_id, "0") - t = { parse({["x-amzn-trace-id"] = aws}) } - assert.same({ "aws" }, t) - assert.spy(warn).was_called_with("invalid aws header trace id; ignoring.") - - aws = fmt("Root=1-%s-%s;Parent=%s;Sampled=%s", string.sub(too_short_id, 1, 1), string.sub(too_short_id, 2, #too_short_id), big_span_id, "0") - t = { parse({["x-amzn-trace-id"] = aws}) } - assert.same({ "aws" }, t) - assert.spy(warn).was_called_with("invalid aws header trace id; ignoring.") - - aws = fmt("Root=;Parent=%s;Sampled=%s", big_span_id, "0") - t = { parse({["x-amzn-trace-id"] = aws}) } - assert.same({ "aws" }, t) - assert.spy(warn).was_called_with("invalid aws header trace id; ignoring.") - end) - - it("rejects invalid parent IDs", function() - local aws = fmt("Root=1-%s-%s;Parent=vv;Sampled=%s", string.sub(trace_id_32, 1, 8), string.sub(trace_id_32, 9, #trace_id_32), "0") - local t = { parse({["x-amzn-trace-id"] = aws}) } - assert.same({ "aws" }, t) - assert.spy(warn).was_called_with("invalid aws header parent id; ignoring.") - - aws = fmt("Root=1-%s-%s;Parent=%s;Sampled=%s", string.sub(trace_id_32, 1, 8), string.sub(trace_id_32, 9, #trace_id_32), too_long_id, "0") - t = { parse({["x-amzn-trace-id"] = aws}) } - assert.same({ "aws" }, t) - assert.spy(warn).was_called_with("invalid aws header parent id; ignoring.") - - aws = fmt("Root=1-%s-%s;Parent=%s;Sampled=%s", string.sub(trace_id_32, 1, 8), string.sub(trace_id_32, 2, #trace_id_32), too_short_id, "0") - t = { parse({["x-amzn-trace-id"] = aws}) } - assert.same({ "aws" }, t) - assert.spy(warn).was_called_with("invalid aws header parent id; ignoring.") - - aws = fmt("Root=1-%s-%s;Parent=;Sampled=%s", string.sub(trace_id_32, 1, 8), string.sub(trace_id_32, 2, #trace_id_32), "0") - t = { parse({["x-amzn-trace-id"] = aws}) } - assert.same({ "aws" }, t) - assert.spy(warn).was_called_with("invalid aws header parent id; ignoring.") - end) - - it("rejects invalid sample flag", function() - local aws = fmt("Root=1-%s-%s;Parent=%s;Sampled=2", string.sub(trace_id_32, 1, 8), string.sub(trace_id_32, 9, #trace_id_32), span_id) - local t = { parse({["x-amzn-trace-id"] = aws}) } - assert.same({ "aws" }, t) - assert.spy(warn).was_called_with("invalid aws header sampled flag; ignoring.") - - aws = fmt("Root=1-%s-%s;Parent=%s;Sampled=", string.sub(trace_id_32, 1, 8), string.sub(trace_id_32, 9, #trace_id_32), span_id) - t = { parse({["x-amzn-trace-id"] = aws}) } - assert.same({ "aws" }, t) - assert.spy(warn).was_called_with("invalid aws header sampled flag; ignoring.") - end) - end) - end) - - describe("GCP header parsing", function() - local warn - setup(function() - warn = spy.on(kong.log, "warn") - end) - before_each(function() - warn:clear() - end) - teardown(function() - warn:revert() - end) - - it("valid header with sampling", function() - local cloud_trace_context = fmt("%s/%s;o=1", trace_id_32, span_id) - local t = { parse({ ["x-cloud-trace-context"] = cloud_trace_context }) } - assert.same( - { "gcp", trace_id_32, tostring(tonumber(span_id)), nil, true }, - { t[1], to_hex(t[2]), openssl_bignumber.from_binary(t[3]):to_dec(), t[4], t[5] } - ) - assert.spy(warn).not_called() - end) - - it("valid header without sampling", function() - local cloud_trace_context = fmt("%s/%s;o=0", trace_id_32, span_id) - local t = { parse({ ["x-cloud-trace-context"] = cloud_trace_context }) } - assert.same( - { "gcp", trace_id_32, tostring(tonumber(span_id)), nil, false }, - { t[1], to_hex(t[2]), openssl_bignumber.from_binary(t[3]):to_dec(), t[4], t[5] } - ) - assert.spy(warn).not_called() - end) - - it("valid header without trace flag", function() - local cloud_trace_context = fmt("%s/%s", trace_id_32, span_id) - local t = { parse({ ["x-cloud-trace-context"] = cloud_trace_context }) } - assert.same( - { "gcp", trace_id_32, tostring(tonumber(span_id)), nil, false }, - { t[1], to_hex(t[2]), openssl_bignumber.from_binary(t[3]):to_dec(), t[4], t[5] } - ) - assert.spy(warn).not_called() - end) - - describe("errors", function() - it("rejects invalid trace IDs", function() - local cloud_trace_context = fmt("%s/%s;o=0", too_short_id, span_id) - local t = { parse({ ["x-cloud-trace-context"] = cloud_trace_context }) } - assert.same({ "gcp" }, t) - assert.spy(warn).was_called_with("invalid GCP header; ignoring.") - - cloud_trace_context = fmt("%s/%s;o=0", too_long_id, span_id) - t = { parse({ ["x-cloud-trace-context"] = cloud_trace_context }) } - assert.same({ "gcp" }, t) - assert.spy(warn).was_called_with("invalid GCP header; ignoring.") - - -- non hex characters in trace id - cloud_trace_context = fmt("abcdefghijklmnopqrstuvwxyz123456/%s;o=0", span_id) - t = { parse({ ["x-cloud-trace-context"] = cloud_trace_context }) } - assert.same({ "gcp" }, t) - assert.spy(warn).was_called_with("invalid GCP header; ignoring.") - end) - - it("rejects invalid span IDs", function() - -- missing - local cloud_trace_context = fmt("%s/;o=0", trace_id_32) - local t = { parse({ ["x-cloud-trace-context"] = cloud_trace_context }) } - assert.same({ "gcp" }, t) - assert.spy(warn).was_called_with("invalid GCP header; ignoring.") - - -- decimal value too large - cloud_trace_context = fmt("%s/%s;o=0", trace_id_32, too_long_id) - t = { parse({ ["x-cloud-trace-context"] = cloud_trace_context }) } - assert.same({ "gcp" }, t) - assert.spy(warn).was_called_with("invalid GCP header; ignoring.") - - -- non digit characters in span id - cloud_trace_context = fmt("%s/abcdefg;o=0", trace_id_32) - t = { parse({ ["x-cloud-trace-context"] = cloud_trace_context }) } - assert.same({ "gcp" }, t) - assert.spy(warn).was_called_with("invalid GCP header; ignoring.") - end) - - it("rejects invalid sampling value", function() - local cloud_trace_context = fmt("%s/%s;o=01", trace_id_32, span_id) - local t = { parse({ ["x-cloud-trace-context"] = cloud_trace_context }) } - assert.same({ "gcp" }, t) - assert.spy(warn).was_called_with("invalid GCP header; ignoring.") - - cloud_trace_context = fmt("%s/%s;o=", trace_id_32, span_id) - t = { parse({ ["x-cloud-trace-context"] = cloud_trace_context }) } - assert.same({ "gcp" }, t) - assert.spy(warn).was_called_with("invalid GCP header; ignoring.") - - cloud_trace_context = fmt("%s/%s;o=v", trace_id_32, span_id) - t = { parse({ ["x-cloud-trace-context"] = cloud_trace_context }) } - assert.same({ "gcp" }, t) - assert.spy(warn).was_called_with("invalid GCP header; ignoring.") - end) - - it("reports all invalid header values", function() - local cloud_trace_context = "vvvv/vvvv;o=v" - local t = { parse({ ["x-cloud-trace-context"] = cloud_trace_context }) } - assert.same({ "gcp" }, t) - assert.spy(warn).was_called_with("invalid GCP header; ignoring.") - end) - end) - end) -end) - - -describe("propagation.set", function() - local nop = function() end - - local headers - local warnings - - _G.kong = { - service = { - request = { - set_header = function(name, value) - headers[name] = value - end, - }, - }, - request = { - get_header = nop, - }, - log = { - warn = function(msg) - warnings[#warnings + 1] = msg - end, - set_serialize_value = function() end, - } - } - - for k, ids in ipairs({ {trace_id, span_id, parent_id}, - {big_trace_id, big_span_id, big_parent_id}, - {trace_id_32, span_id, parent_id}, - {big_trace_id_32, big_span_id, big_parent_id}, }) do - local trace_id = ids[1] - local span_id = ids[2] - local parent_id = ids[3] - - local w3c_trace_id = to_id_len(trace_id, 32) - local ot_trace_id = to_id_len(trace_id, 32) - local gcp_trace_id = to_id_len(trace_id, 32) - - local proxy_span = { - trace_id = from_hex(trace_id), - span_id = from_hex(span_id), - parent_id = from_hex(parent_id), - should_sample = true, - each_baggage_item = function() return nop end, - } - - local b3_headers = { - ["x-b3-traceid"] = trace_id, - ["x-b3-spanid"] = span_id, - ["x-b3-parentspanid"] = parent_id, - ["x-b3-sampled"] = "1" - } - - local b3_single_headers = { - b3 = fmt("%s-%s-1-%s", trace_id, span_id, parent_id) - } - - local w3c_headers = { - traceparent = fmt("00-%s-%s-01", w3c_trace_id, span_id) - } - - local jaeger_headers = { - ["uber-trace-id"] = fmt("%s:%s:%s:%s", trace_id, span_id, parent_id, "01") - } - - local ot_headers = { - ["ot-tracer-traceid"] = ot_trace_id, - ["ot-tracer-spanid"] = span_id, - ["ot-tracer-sampled"] = "1" - } - - local aws_headers = { - ["x-amzn-trace-id"] = fmt("Root=1-%s-%s;Parent=%s;Sampled=%s", - string.sub(trace_id, 1, 8), - string.sub(trace_id, 9, #trace_id), - span_id, - "1" - ) - } - - -- hex values are not valid span id inputs, translate to decimal - local gcp_headers = {["x-cloud-trace-context"] = gcp_trace_id .. "/" .. openssl_bignumber.from_hex(span_id):to_dec() .. ";o=1"} - - before_each(function() - headers = {} - warnings = {} - end) - - describe("conf.header_type = 'preserve', ids group #" .. k, function() - it("sets headers according to their found state when conf.header_type = preserve", function() - set("preserve", "b3", proxy_span) - assert.same(b3_headers, headers) - - headers = {} - - set("preserve", "b3-single", proxy_span) - assert.same(b3_single_headers, headers) - - headers = {} - - set("preserve", "w3c", proxy_span) - assert.same(w3c_headers, headers) - - headers = {} - - set("preserve", "jaeger", proxy_span) - assert.same(jaeger_headers, headers) - - headers = {} - - set("preserve", "aws", proxy_span) - assert.same(aws_headers, headers) - - headers = {} - - set("preserve", "gcp", proxy_span) - assert.same(gcp_headers, headers) - - assert.same({}, warnings) - end) - - it("sets headers according to default_header_type when no headers are provided", function() - set("preserve", nil, proxy_span) - assert.same(b3_headers, headers) - - headers = {} - - set("preserve", nil, proxy_span, "b3") - assert.same(b3_headers, headers) - - headers = {} - - set("preserve", nil, proxy_span, "b3-single") - assert.same(b3_single_headers, headers) - - headers = {} - - set("preserve", "w3c", proxy_span, "w3c") - assert.same(w3c_headers, headers) - - headers = {} - - set("preserve", nil, proxy_span, "jaeger") - assert.same(jaeger_headers, headers) - - headers = {} - - set("preserve", "ot", proxy_span, "ot") - assert.same(ot_headers, headers) - - headers = {} - - set("preserve", "aws", proxy_span, "aws") - assert.same(aws_headers, headers) - - headers = {} - set("preserve", "gcp", proxy_span, "gcp") - assert.same(gcp_headers, headers) - end) - end) - - describe("conf.header_type = 'b3', ids group #" .. k, function() - it("sets headers to b3 when conf.header_type = b3", function() - set("b3", "b3", proxy_span) - assert.same(b3_headers, headers) - - headers = {} - - set("b3", nil, proxy_span) - assert.same(b3_headers, headers) - - assert.same({}, warnings) - end) - - it("sets both the b3 and b3-single headers when a b3-single header is encountered.", function() - set("b3", "b3-single", proxy_span) - assert.same(table_merge(b3_headers, b3_single_headers), headers) - - -- but it generates a warning - assert.equals(1, #warnings) - assert.matches("Mismatched header types", warnings[1]) - end) - - it("sets both the b3 and w3c headers when a w3c header is encountered.", function() - set("b3", "w3c", proxy_span) - assert.same(table_merge(b3_headers, w3c_headers), headers) - - -- but it generates a warning - assert.equals(1, #warnings) - assert.matches("Mismatched header types", warnings[1]) - end) - - it("sets both the b3 and w3c headers when a jaeger header is encountered.", function() - set("b3", "jaeger", proxy_span) - assert.same(table_merge(b3_headers, jaeger_headers), headers) - - -- but it generates a warning - assert.equals(1, #warnings) - assert.matches("Mismatched header types", warnings[1]) - end) - - it("sets both the b3 and gcp headers when a gcp header is encountered.", function() - set("b3", "gcp", proxy_span) - assert.same(table_merge(b3_headers, gcp_headers), headers) - - -- but it generates a warning - assert.equals(1, #warnings) - assert.matches("Mismatched header types", warnings[1]) - end) - end) - - describe("conf.header_type = 'b3-single', ids group #", function() - it("sets headers to b3-single when conf.header_type = b3-single", function() - set("b3-single", "b3-single", proxy_span) - assert.same(b3_single_headers, headers) - assert.same({}, warnings) - end) - - it("sets both the b3 and b3-single headers when a b3 header is encountered.", function() - set("b3-single", "b3", proxy_span) - assert.same(table_merge(b3_headers, b3_single_headers), headers) - - -- but it generates a warning - assert.equals(1, #warnings) - assert.matches("Mismatched header types", warnings[1]) - end) - - it("sets both the b3 and w3c headers when a jaeger header is encountered.", function() - set("b3-single", "w3c", proxy_span) - assert.same(table_merge(b3_single_headers, w3c_headers), headers) - - -- but it generates a warning - assert.equals(1, #warnings) - assert.matches("Mismatched header types", warnings[1]) - end) - - it("sets both the b3 and w3c headers when a w3c header is encountered.", function() - set("b3-single", "jaeger", proxy_span) - assert.same(table_merge(b3_single_headers, jaeger_headers), headers) - - -- but it generates a warning - assert.equals(1, #warnings) - assert.matches("Mismatched header types", warnings[1]) - end) - - it("sets both the b3 and gcp headers when a gcp header is encountered.", function() - set("b3-single", "gcp", proxy_span) - assert.same(table_merge(b3_single_headers, gcp_headers), headers) - - -- but it generates a warning - assert.equals(1, #warnings) - assert.matches("Mismatched header types", warnings[1]) - end) - end) - - describe("conf.header_type = 'w3c', ids group #", function() - it("sets headers to w3c when conf.header_type = w3c", function() - set("w3c", "w3c", proxy_span) - assert.same(w3c_headers, headers) - assert.same({}, warnings) - end) - - it("sets both the b3 and w3c headers when a b3 header is encountered.", function() - set("w3c", "b3", proxy_span) - assert.same(table_merge(b3_headers, w3c_headers), headers) - - -- but it generates a warning - assert.equals(1, #warnings) - assert.matches("Mismatched header types", warnings[1]) - end) - - it("sets both the b3-single and w3c headers when a b3-single header is encountered.", function() - set("w3c", "b3-single", proxy_span) - assert.same(table_merge(b3_single_headers, w3c_headers), headers) - - -- but it generates a warning - assert.equals(1, #warnings) - assert.matches("Mismatched header types", warnings[1]) - end) - - it("sets both the jaeger and w3c headers when a jaeger header is encountered.", function() - set("w3c", "jaeger", proxy_span) - assert.same(table_merge(jaeger_headers, w3c_headers), headers) - - -- but it generates a warning - assert.equals(1, #warnings) - assert.matches("Mismatched header types", warnings[1]) - end) - - it("sets both the gcp and w3c headers when a gcp header is encountered.", function() - set("w3c", "gcp", proxy_span) - assert.same(table_merge(gcp_headers, w3c_headers), headers) - - -- but it generates a warning - assert.equals(1, #warnings) - assert.matches("Mismatched header types", warnings[1]) - end) - end) - - describe("conf.header_type = 'jaeger', ids group #", function() - it("sets headers to jaeger when conf.header_type = jaeger", function() - set("jaeger", "jaeger", proxy_span) - assert.same(jaeger_headers, headers) - assert.same({}, warnings) - end) - - it("sets both the b3 and jaeger headers when a b3 header is encountered.", function() - set("jaeger", "b3", proxy_span) - assert.same(table_merge(b3_headers, jaeger_headers), headers) - - -- but it generates a warning - assert.equals(1, #warnings) - assert.matches("Mismatched header types", warnings[1]) - end) - - it("sets both the b3-single and jaeger headers when a b3-single header is encountered.", function() - set("jaeger", "b3-single", proxy_span) - assert.same(table_merge(b3_single_headers, jaeger_headers), headers) - - -- but it generates a warning - assert.equals(1, #warnings) - assert.matches("Mismatched header types", warnings[1]) - end) - - it("sets both the jaeger and w3c headers when a w3c header is encountered.", function() - set("jaeger", "w3c", proxy_span) - assert.same(table_merge(jaeger_headers, w3c_headers), headers) - - -- but it generates a warning - assert.equals(1, #warnings) - assert.matches("Mismatched header types", warnings[1]) - end) - - it("sets both the jaeger and ot headers when a ot header is encountered.", function() - set("jaeger", "ot", proxy_span) - assert.same(table_merge(jaeger_headers, ot_headers), headers) - - -- but it generates a warning - assert.equals(1, #warnings) - assert.matches("Mismatched header types", warnings[1]) - end) - - it("sets both the jaeger and aws headers when an aws header is encountered.", function() - set("jaeger", "aws", proxy_span) - assert.same(table_merge(jaeger_headers, aws_headers), headers) - - -- but it generates a warning - assert.equals(1, #warnings) - assert.matches("Mismatched header types", warnings[1]) - end) - - it("sets both the jaeger and gcp headers when a gcp header is encountered.", function() - set("jaeger", "gcp", proxy_span) - assert.same(table_merge(jaeger_headers, gcp_headers), headers) - - -- but it generates a warning - assert.equals(1, #warnings) - assert.matches("Mismatched header types", warnings[1]) - end) - end) - - describe("conf.header_type = 'ot', ids group #", function() - it("sets headers to ot when conf.header_type = ot", function() - set("ot", "ot", proxy_span) - assert.same(ot_headers, headers) - assert.same({}, warnings) - end) - - it("sets both the b3 and ot headers when a b3 header is encountered.", function() - set("ot", "b3", proxy_span) - assert.same(table_merge(b3_headers, ot_headers), headers) - - -- but it generates a warning - assert.equals(1, #warnings) - assert.matches("Mismatched header types", warnings[1]) - end) - - it("sets both the b3-single and ot headers when a b3-single header is encountered.", function() - set("ot", "b3-single", proxy_span) - assert.same(table_merge(b3_single_headers, ot_headers), headers) - - -- but it generates a warning - assert.equals(1, #warnings) - assert.matches("Mismatched header types", warnings[1]) - end) - - it("sets both the w3c and ot headers when a w3c header is encountered.", function() - set("ot", "w3c", proxy_span) - assert.same(table_merge(w3c_headers, ot_headers), headers) - - -- but it generates a warning - assert.equals(1, #warnings) - assert.matches("Mismatched header types", warnings[1]) - end) - - it("sets both the ot and jaeger headers when a jaeger header is encountered.", function() - set("ot", "jaeger", proxy_span) - assert.same(table_merge(ot_headers, jaeger_headers), headers) - - -- but it generates a warning - assert.equals(1, #warnings) - assert.matches("Mismatched header types", warnings[1]) - end) - - it("sets both the ot and aws headers when a aws header is encountered.", function() - set("ot", "aws", proxy_span) - assert.same(table_merge(ot_headers, aws_headers), headers) - - -- but it generates a warning - assert.equals(1, #warnings) - assert.matches("Mismatched header types", warnings[1]) - end) - - it("sets both the ot and gcp headers when a gcp header is encountered.", function() - set("ot", "gcp", proxy_span) - assert.same(table_merge(ot_headers, gcp_headers), headers) - - -- but it generates a warning - assert.equals(1, #warnings) - assert.matches("Mismatched header types", warnings[1]) - end) - end) - - describe("conf.header_type = 'aws', ids group #", function() - it("sets headers to ot when conf.header_type = aws", function() - set("aws", "aws", proxy_span) - assert.same(aws_headers, headers) - assert.same({}, warnings) - end) - - it("sets both the b3 and aws headers when a b3 header is encountered.", function() - set("aws", "b3", proxy_span) - assert.same(table_merge(b3_headers, aws_headers), headers) - - -- but it generates a warning - assert.equals(1, #warnings) - assert.matches("Mismatched header types", warnings[1]) - end) - - it("sets both the b3-single and aws headers when a b3-single header is encountered.", function() - set("aws", "b3-single", proxy_span) - assert.same(table_merge(b3_single_headers, aws_headers), headers) - - -- but it generates a warning - assert.equals(1, #warnings) - assert.matches("Mismatched header types", warnings[1]) - end) - - it("sets both the w3c and aws headers when a w3c header is encountered.", function() - set("aws", "w3c", proxy_span) - assert.same(table_merge(w3c_headers, aws_headers), headers) - - -- but it generates a warning - assert.equals(1, #warnings) - assert.matches("Mismatched header types", warnings[1]) - end) - - it("sets both the aws and jaeger headers when a jaeger header is encountered.", function() - set("aws", "jaeger", proxy_span) - assert.same(table_merge(aws_headers, jaeger_headers), headers) - - -- but it generates a warning - assert.equals(1, #warnings) - assert.matches("Mismatched header types", warnings[1]) - end) - - it("sets both the aws and gcp headers when a gcp header is encountered.", function() - set("aws", "gcp", proxy_span) - assert.same(table_merge(aws_headers, gcp_headers), headers) - - -- but it generates a warning - assert.equals(1, #warnings) - assert.matches("Mismatched header types", warnings[1]) - end) - end) - - describe("conf.header_type = 'gcp', ids group #", function() - it("sets headers to gcp when conf.header_type = gcp", function() - set("gcp", "gcp", proxy_span) - assert.same(gcp_headers, headers) - assert.same({}, warnings) - end) - - it("sets both the b3 and gcp headers when a b3 header is encountered.", function() - set("gcp", "b3", proxy_span) - assert.same(table_merge(b3_headers, gcp_headers), headers) - - -- but it generates a warning - assert.equals(1, #warnings) - assert.matches("Mismatched header types", warnings[1]) - end) - - it("sets both the b3-single and gcp headers when a b3-single header is encountered.", function() - set("gcp", "b3-single", proxy_span) - assert.same(table_merge(b3_single_headers, gcp_headers), headers) - - -- but it generates a warning - assert.equals(1, #warnings) - assert.matches("Mismatched header types", warnings[1]) - end) - - it("sets both the gcp and ot headers when a ot header is encountered.", function() - set("gcp", "ot", proxy_span) - assert.same(table_merge(gcp_headers, ot_headers), headers) - - -- but it generates a warning - assert.equals(1, #warnings) - assert.matches("Mismatched header types", warnings[1]) - end) - - it("sets both the w3c and gcp headers when a w3c header is encountered.", function() - set("gcp", "w3c", proxy_span) - assert.same(table_merge(w3c_headers, gcp_headers), headers) - - -- but it generates a warning - assert.equals(1, #warnings) - assert.matches("Mismatched header types", warnings[1]) - end) - - it("sets both the gcp and jaeger headers when a jaeger header is encountered.", function() - set("gcp", "jaeger", proxy_span) - assert.same(table_merge(gcp_headers, jaeger_headers), headers) - - -- but it generates a warning - assert.equals(1, #warnings) - assert.matches("Mismatched header types", warnings[1]) - end) - - it("sets both the gcp and aws headers when an aws header is encountered.", function() - set("gcp", "aws", proxy_span) - assert.same(table_merge(gcp_headers, aws_headers), headers) - - -- but it generates a warning - assert.equals(1, #warnings) - assert.matches("Mismatched header types", warnings[1]) - end) - end) - end -end) diff --git a/spec/01-unit/26-tracing/02-propagation_strategies_spec.lua b/spec/01-unit/26-tracing/02-propagation_strategies_spec.lua new file mode 100644 index 000000000000..34f990c3a888 --- /dev/null +++ b/spec/01-unit/26-tracing/02-propagation_strategies_spec.lua @@ -0,0 +1,1750 @@ +local propagation_utils = require "kong.tracing.propagation.utils" +local utils = require "kong.tools.utils" +local bn = require "resty.openssl.bn" + +local from_hex = propagation_utils.from_hex +local to_hex = require "resty.string".to_hex + +local shallow_copy = utils.shallow_copy +local fmt = string.format +local sub = string.sub + +local EXTRACTORS_PATH = "kong.tracing.propagation.extractors." +local INJECTORS_PATH = "kong.tracing.propagation.injectors." + +local trace_id_16 = "0af7651916cd43dd8448eb211c80319c" +local trace_id_8 = "8448eb211c80319c" +local trace_id_8_dec = "9532127138774266268" -- 8448eb211c80319c to decimal +local span_id_8_1 = "b7ad6b7169203331" +local span_id_8_1_dec = "13235353014750950193" -- b7ad6b7169203331 to decimal +local span_id_8_2 = "b7ad6b7169203332" + +local big_trace_id = "fffffffffffffff1" +local big_trace_id_16 = "fffffffffffffffffffffffffffffff1" +local big_span_id = "fffffffffffffff3" +local big_dec_trace_id = bn.from_hex(big_trace_id):to_dec() +local big_dec_span_id = bn.from_hex(big_span_id):to_dec() +local big_dec_trace_id_16 = bn.from_hex(big_trace_id_16):to_dec() + +-- invalid IDs: +local too_long_id = "1234567890123456789012345678901234567890" + + +local function from_hex_ids(t) + local t1 = shallow_copy(t) + t1.trace_id = t.trace_id and from_hex(t.trace_id) or nil + t1.span_id = t.span_id and from_hex(t.span_id) or nil + t1.parent_id = t.parent_id and from_hex(t.parent_id) or nil + return t1 +end + +local function to_hex_ids(t) + local t1 = shallow_copy(t) + t1.trace_id = t.trace_id and to_hex(t.trace_id) or nil + t1.span_id = t.span_id and to_hex(t.span_id) or nil + t1.parent_id = t.parent_id and to_hex(t.parent_id) or nil + return t1 +end + +local padding_prefix = string.rep("0", 16) + +-- Input data (array) for running tests to test extraction and injection +-- (headers-to-context and context-to-headers): +-- { +-- extractor = "extractor-name", +-- injector = "injector-name", +-- headers_data = { { +-- description = "passing Tracing-Header-Name header", +-- extract = true, -- set to false to do skip extraction on this header data +-- inject = true, -- set to false to do skip injection on this header data +-- trace_id = "123abcde", +-- headers = { +-- ["Tracing-Header-Name"] = "123abcde:12345:1", +-- }, +-- ctx = { +-- trace_id = "123abcde", +-- span_id = "12345", +-- should_sample = true, +-- } +-- } +-- } +-- +-- Headers_data item to test extraction error case: +-- { +-- description = "invalid ids", +-- extract = true, +-- headers = { +-- ["traceparent"] = "00-1-2-00", +-- }, +-- err = "invalid trace ID; ignoring." +-- } +-- +-- Headers_data item to test injection error cases: +-- { +-- description = "missing trace id", +-- inject = true, +-- ctx = { +-- span_id = "abcdef", +-- }, +-- err = "injector context is invalid" +-- } + +local test_data = { { + extractor = "w3c", + injector = "w3c", + headers_data = { { + description = "base case", + extract = true, + inject = true, + trace_id = trace_id_16, + headers = { + ["traceparent"] = fmt("00-%s-%s-01", trace_id_16, span_id_8_1), + }, + ctx = { + trace_id = trace_id_16, + span_id = span_id_8_1, + should_sample = true, + trace_id_original_size = 16, + } + }, { + description = "extraction with sampling mask (on)", + extract = true, + inject = false, + trace_id = trace_id_16, + headers = { + ["traceparent"] = fmt("00-%s-%s-09", trace_id_16, span_id_8_1), + }, + ctx = { + trace_id = trace_id_16, + span_id = span_id_8_1, + should_sample = true, + trace_id_original_size = 16, + } + }, { + description = "extraction with sampling mask (off)", + extract = true, + inject = false, + trace_id = trace_id_16, + headers = { + ["traceparent"] = fmt("00-%s-%s-08", trace_id_16, span_id_8_1), + }, + ctx = { + trace_id = trace_id_16, + span_id = span_id_8_1, + should_sample = false, + trace_id_original_size = 16, + } + }, { + description = "sampled = false", + extract = true, + inject = true, + trace_id = trace_id_16, + headers = { + ["traceparent"] = fmt("00-%s-%s-00", trace_id_16, span_id_8_1), + }, + ctx = { + trace_id = trace_id_16, + span_id = span_id_8_1, + should_sample = false, + trace_id_original_size = 16, + } + }, { + description = "default injection size is 16B", + inject = true, + trace_id = trace_id_16, + headers = { + ["traceparent"] = fmt("00-%s-%s-01", trace_id_16, span_id_8_1), + }, + ctx = { + trace_id = trace_id_16, + span_id = span_id_8_1, + should_sample = true, + } + }, { -- extraction error cases + description = "invalid header 1", + extract = true, + headers = { + ["traceparent"] = fmt("vv-%s-%s-00", trace_id_16, span_id_8_1), + }, + err = "invalid W3C traceparent header; ignoring." + }, { + description = "invalid header 2", + extract = true, + headers = { + ["traceparent"] = fmt("00-vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv-%s-00", span_id_8_1), + }, + err = "invalid W3C traceparent header; ignoring." + }, { + description = "invalid header 3", + extract = true, + headers = { + ["traceparent"] = fmt("00-%s-vvvvvvvvvvvvvvvv-00", trace_id_16), + }, + err = "invalid W3C traceparent header; ignoring." + }, { + description = "invalid header 4", + extract = true, + headers = { + ["traceparent"] = fmt("00-%s-%s-vv", trace_id_16, span_id_8_1), + }, + err = "invalid W3C traceparent header; ignoring." + }, { + description = "invalid trace id (too short)", + extract = true, + headers = { + ["traceparent"] = fmt("00-%s-%s-00", "123", span_id_8_1), + }, + err = "invalid W3C trace context trace ID; ignoring." + }, { + description = "invalid trace id (all zero)", + extract = true, + headers = { + ["traceparent"] = fmt("00-%s-%s-00", "00000000000000000000000000000000", span_id_8_1), + }, + err = "invalid W3C trace context trace ID; ignoring." + }, { + description = "invalid trace id (too long)", + extract = true, + headers = { + ["traceparent"] = fmt("00-%s-%s-00", too_long_id, span_id_8_1), + }, + err = "invalid W3C trace context trace ID; ignoring." + }, { + description = "invalid parent id (too short)", + extract = true, + headers = { + ["traceparent"] = fmt("00-%s-%s-00", trace_id_16, "123"), + }, + err = "invalid W3C trace context parent ID; ignoring." + }, { + description = "invalid parent id (too long)", + extract = true, + headers = { + ["traceparent"] = fmt("00-%s-%s-00", trace_id_16, too_long_id), + }, + err = "invalid W3C trace context parent ID; ignoring." + }, { + description = "invalid parent id (all zero)", + extract = true, + headers = { + ["traceparent"] = fmt("00-%s-%s-00", trace_id_16, "0000000000000000"), + }, + err = "invalid W3C trace context parent ID; ignoring." + }, { + description = "invalid version", + extract = true, + headers = { + ["traceparent"] = fmt("01-%s-%s-00", trace_id_16, span_id_8_1), + }, + err = "invalid W3C Trace Context version; ignoring." + }, { + description = "invalid flags 1", + extract = true, + headers = { + ["traceparent"] = fmt("00-%s-%s-000", trace_id_16, span_id_8_1), + }, + err = "invalid W3C trace context flags; ignoring." + }, { + description = "invalid flags 2", + extract = true, + headers = { + ["traceparent"] = fmt("00-%s-%s-0", trace_id_16, span_id_8_1), + }, + err = "invalid W3C trace context flags; ignoring." + }, { -- injection error cases + description = "missing trace id", + inject = true, + ctx = { + span_id = span_id_8_1, + should_sample = false, + }, + err = "w3c injector context is invalid: field trace_id not found in context" + }, { + description = "missing span id", + inject = true, + ctx = { + trace_id = trace_id_16, + should_sample = false, + }, + err = "w3c injector context is invalid: field span_id not found in context" + } } +}, { + extractor = "b3", + injector = "b3", + headers_data = { { + description = "base case", + extract = true, + inject = true, + trace_id = trace_id_16, + headers = { + ["x-b3-traceid"] = trace_id_16, + ["x-b3-spanid"] = span_id_8_1, + ["x-b3-parentspanid"] = span_id_8_2, + ["x-b3-sampled"] = "1", + }, + ctx = { + trace_id = trace_id_16, + span_id = span_id_8_1, + parent_id = span_id_8_2, + should_sample = true, + flags = nil, + trace_id_original_size = 16, + reuse_span_id = true, + } + }, { + description = "sampling decision only", + extract = true, + inject = true, + trace_id = "", + headers = { + ["x-b3-sampled"] = "0", + }, + ctx = { + should_sample = false, + reuse_span_id = true, + }, + }, { + description = "sampled set via flags", + extract = true, + inject = true, + trace_id = trace_id_16, + headers = { + ["x-b3-traceid"] = trace_id_16, + ["x-b3-spanid"] = span_id_8_1, + ["x-b3-parentspanid"] = span_id_8_2, + ["x-b3-flags"] = "1", + }, + ctx = { + trace_id = trace_id_16, + span_id = span_id_8_1, + parent_id = span_id_8_2, + should_sample = true, + flags = "1", + trace_id_original_size = 16, + reuse_span_id = true, + } + }, { + description = "sampled = false", + extract = true, + inject = true, + trace_id = trace_id_16, + headers = { + ["x-b3-traceid"] = trace_id_16, + ["x-b3-spanid"] = span_id_8_1, + ["x-b3-parentspanid"] = span_id_8_2, + ["x-b3-sampled"] = "0", + }, + ctx = { + trace_id = trace_id_16, + span_id = span_id_8_1, + parent_id = span_id_8_2, + should_sample = false, + flags = nil, + trace_id_original_size = 16, + reuse_span_id = true, + } + }, { + description = "8-byte trace ID", + extract = true, + inject = true, + trace_id = trace_id_8, + headers = { + ["x-b3-traceid"] = trace_id_8, + ["x-b3-spanid"] = span_id_8_1, + ["x-b3-parentspanid"] = span_id_8_2, + ["x-b3-sampled"] = "1", + }, + ctx = { + trace_id = padding_prefix .. trace_id_8, + span_id = span_id_8_1, + parent_id = span_id_8_2, + should_sample = true, + flags = nil, + trace_id_original_size = 8, + reuse_span_id = true, + } + }, { + description = "default injection size is 16B", + inject = true, + trace_id = trace_id_16, + headers = { + ["x-b3-traceid"] = trace_id_16, + ["x-b3-spanid"] = span_id_8_1, + ["x-b3-parentspanid"] = span_id_8_2, + ["x-b3-sampled"] = "1", + }, + ctx = { + trace_id = trace_id_16, + span_id = span_id_8_1, + parent_id = span_id_8_2, + should_sample = true, + flags = nil, + } + }, { -- extraction error cases + description = "invalid trace id", + extract = true, + trace_id = "x", + headers = { + ["x-b3-traceid"] = "x", + ["x-b3-spanid"] = span_id_8_1, + ["x-b3-parentspanid"] = span_id_8_2, + ["x-b3-sampled"] = "0", + }, + err = "x-b3-traceid header invalid; ignoring." + } } +}, { + extractor = "b3", + injector = "b3-single", + headers_data = { { + description = "1-char header, sampled = true", + extract = true, + inject = true, + trace_id = "", + headers = { + ["b3"] = "1", + }, + ctx = { + single_header = true, + should_sample = true, + reuse_span_id = true, + } + }, { + description = "1-char header, sampled = false", + extract = true, + inject = true, + trace_id = "", + headers = { + ["b3"] = "0", + }, + ctx = { + single_header = true, + should_sample = false, + reuse_span_id = true, + } + }, { + description = "1-char header, debug", + extract = true, + inject = true, + trace_id = "", + headers = { + ["b3"] = "d", + }, + ctx = { + single_header = true, + should_sample = true, + flags = "1", + reuse_span_id = true, + } + }, { + description = "all fields", + extract = true, + inject = true, + trace_id = trace_id_16, + headers = { + ["b3"] = fmt("%s-%s-%s-%s", trace_id_16, span_id_8_1, "1", span_id_8_2), + }, + ctx = { + trace_id = trace_id_16, + span_id = span_id_8_1, + parent_id = span_id_8_2, + single_header = true, + should_sample = true, + trace_id_original_size = 16, + reuse_span_id = true, + } + }, { + description = "all fields, sampled = false", + extract = true, + inject = true, + trace_id = trace_id_16, + headers = { + ["b3"] = fmt("%s-%s-%s-%s", trace_id_16, span_id_8_1, "0", span_id_8_2), + }, + ctx = { + trace_id = trace_id_16, + span_id = span_id_8_1, + parent_id = span_id_8_2, + single_header = true, + should_sample = false, + trace_id_original_size = 16, + reuse_span_id = true, + } + }, { + description = "all fields, debug", + extract = true, + inject = true, + trace_id = trace_id_16, + headers = { + ["b3"] = fmt("%s-%s-%s-%s", trace_id_16, span_id_8_1, "d", span_id_8_2), + }, + ctx = { + trace_id = trace_id_16, + span_id = span_id_8_1, + parent_id = span_id_8_2, + single_header = true, + should_sample = true, + flags = "1", + trace_id_original_size = 16, + reuse_span_id = true, + } + }, { + description = "extraction from tracestate", + extract = true, + inject = false, + trace_id = trace_id_16, + headers = { + ["tracestate"] = "b3=" .. fmt("%s-%s-%s-%s", trace_id_16, span_id_8_1, "d", span_id_8_2), + }, + ctx = { + trace_id = trace_id_16, + span_id = span_id_8_1, + parent_id = span_id_8_2, + single_header = true, + should_sample = true, + flags = "1", + trace_id_original_size = 16, + reuse_span_id = true, + } + }, { + description = "extraction from tracestate multi-value", + extract = true, + inject = false, + trace_id = trace_id_16, + headers = { + ["tracestate"] = { + "test", + "b3=" .. fmt("%s-%s-%s-%s", trace_id_16, span_id_8_1, "1", span_id_8_2), + "test2", + } + }, + ctx = { + trace_id = trace_id_16, + span_id = span_id_8_1, + parent_id = span_id_8_2, + single_header = true, + should_sample = true, + trace_id_original_size = 16, + reuse_span_id = true, + } + }, { + description = "trace id and span id only: no sampled and no parent", + extract = true, + inject = true, + trace_id = trace_id_16, + headers = { + ["b3"] = fmt("%s-%s", trace_id_16, span_id_8_1), + }, + ctx = { + trace_id = trace_id_16, + span_id = span_id_8_1, + single_header = true, + trace_id_original_size = 16, + reuse_span_id = true, + } + }, { + description = "no parent", + extract = true, + inject = true, + trace_id = trace_id_16, + headers = { + ["b3"] = fmt("%s-%s-%s", trace_id_16, span_id_8_1, "1"), + }, + ctx = { + trace_id = trace_id_16, + span_id = span_id_8_1, + single_header = true, + should_sample = true, + trace_id_original_size = 16, + reuse_span_id = true, + } + }, { + description = "8-byte trace ID", + extract = true, + inject = true, + trace_id = trace_id_8, + headers = { + ["b3"] = fmt("%s-%s-%s", trace_id_8, span_id_8_1, "1"), + }, + ctx = { + trace_id = padding_prefix .. trace_id_8, + span_id = span_id_8_1, + single_header = true, + should_sample = true, + trace_id_original_size = 8, + reuse_span_id = true, + } + }, { + description = "default injection size is 16B", + inject = true, + trace_id = trace_id_16, + headers = { + ["b3"] = fmt("%s-%s-%s-%s", trace_id_16, span_id_8_1, "d", span_id_8_2), + }, + ctx = { + trace_id = trace_id_16, + span_id = span_id_8_1, + parent_id = span_id_8_2, + single_header = true, + should_sample = true, + flags = "1", + } + }, { + description = "big 16B trace ID", + inject = true, + trace_id = big_trace_id_16, + headers = { + ["b3"] = fmt("%s-%s-%s-%s", big_trace_id_16, span_id_8_1, "d", span_id_8_2), + }, + ctx = { + trace_id = big_trace_id_16, + span_id = span_id_8_1, + parent_id = span_id_8_2, + single_header = true, + should_sample = true, + flags = "1", + } + }, { -- extraction error cases + description = "invalid trace ID (non hex)", + extract = true, + trace_id = "abc", + headers = { + ["b3"] = fmt("xxx-%s-%s-%s", span_id_8_1, "1", span_id_8_2), + }, + err = "b3 single header invalid; ignoring." + }, { + description = "invalid trace ID (too long)", + extract = true, + headers = { + ["b3"] = fmt("%s-%s-%s-%s", too_long_id, span_id_8_1, "1", span_id_8_2), + }, + err = "b3 single header invalid; ignoring." + }, { + description = "invalid trace ID (too short)", + extract = true, + headers = { + ["b3"] = fmt("%s-%s-%s-%s", "123", span_id_8_1, "1", span_id_8_2), + }, + err = "b3 single header invalid; ignoring." + }, { + description = "empty header", + extract = true, + headers = { + ["b3"] = "", + }, + err = "b3 single header invalid; ignoring." + }, { + description = "no span id", + extract = true, + headers = { + ["b3"] = trace_id_16 .. "-", + }, + err = "b3 single header invalid; ignoring." + }, { + description = "non hex span id", + extract = true, + headers = { + ["b3"] = trace_id_16 .. "-xxx", + }, + err = "b3 single header invalid; ignoring." + }, { + description = "invalid span id (too long)", + extract = true, + headers = { + ["b3"] = fmt("%s-%s-%s-%s", trace_id_16, too_long_id, "1", span_id_8_2), + }, + err = "b3 single header invalid; ignoring." + }, { + description = "invalid span id (too short)", + extract = true, + headers = { + ["b3"] = fmt("%s-%s-%s-%s", trace_id_16, "123", "1", span_id_8_2), + }, + err = "b3 single header invalid; ignoring." + }, { + description = "invalid sampled", + extract = true, + headers = { + ["b3"] = fmt("%s-%s-%s", trace_id_16, span_id_8_1, "x"), + }, + err = "b3 single header invalid; ignoring." + }, { + description = "invalid parent", + extract = true, + headers = { + ["b3"] = fmt("%s-%s-%s-%s", trace_id_16, span_id_8_1, "d", "xxx"), + }, + err = "b3 single header invalid; ignoring." + }, { + description = "invalid parent (too long)", + extract = true, + headers = { + ["b3"] = fmt("%s-%s-%s-%s", trace_id_16, span_id_8_1, "d", too_long_id), + }, + err = "b3 single header invalid; ignoring." + }, { + description = "invalid parent (too short)", + extract = true, + headers = { + ["b3"] = fmt("%s-%s-%s-%s", trace_id_16, span_id_8_1, "d", "123"), + }, + err = "b3 single header invalid; ignoring." + } } +}, { + extractor = "jaeger", + injector = "jaeger", + headers_data = { { + description = "base case", + extract = true, + inject = true, + trace_id = trace_id_16, + headers = { + ["uber-trace-id"] = fmt("%s:%s:%s:%s", trace_id_16, span_id_8_1, span_id_8_2, "01"), + ["uberctx-foo"] = "bar", + }, + ctx = { + trace_id = trace_id_16, + span_id = span_id_8_1, + parent_id = span_id_8_2, + should_sample = true, + baggage = { foo = "bar" }, + trace_id_original_size = 16, + reuse_span_id = true, + } + }, { + description = "sampled = false", + extract = true, + inject = true, + trace_id = trace_id_16, + headers = { + ["uber-trace-id"] = fmt("%s:%s:%s:%s", trace_id_16, span_id_8_1, span_id_8_2, "00"), + }, + ctx = { + trace_id = trace_id_16, + span_id = span_id_8_1, + parent_id = span_id_8_2, + should_sample = false, + trace_id_original_size = 16, + reuse_span_id = true, + } + }, { + description = "parent = 0", + extract = true, + inject = false, + trace_id = trace_id_16, + headers = { + ["uber-trace-id"] = fmt("%s:%s:%s:%s", trace_id_16, span_id_8_1, "0", "01"), + }, + ctx = { + trace_id = trace_id_16, + span_id = span_id_8_1, + parent_id = "0000000000000000", + should_sample = true, + trace_id_original_size = 16, + reuse_span_id = true, + } + }, { + description = "0-pad shorter span ID", + extract = true, + inject = false, + trace_id = trace_id_16, + headers = { + ["uber-trace-id"] = fmt("%s:%s:%s:%s", trace_id_16, "123", span_id_8_2, "01"), + }, + ctx = { + trace_id = trace_id_16, + span_id = "0000000000000123", + parent_id = span_id_8_2, + should_sample = true, + trace_id_original_size = 16, + reuse_span_id = true, + } + }, { + description = "0-pad shorter trace ID", + extract = true, + inject = false, + trace_id = trace_id_16, + headers = { + ["uber-trace-id"] = fmt("%s:%s:%s:%s", "1234", span_id_8_1, span_id_8_2, "01"), + }, + ctx = { + trace_id = "00000000000000000000000000001234", + span_id = span_id_8_1, + parent_id = span_id_8_2, + should_sample = true, + trace_id_original_size = 2, + reuse_span_id = true, + } + }, { + description = "8B trace ID", + extract = true, + inject = true, + trace_id = trace_id_8, + headers = { + ["uber-trace-id"] = fmt("%s:%s:%s:%s", trace_id_8, span_id_8_1, span_id_8_2, "01"), + }, + ctx = { + trace_id = padding_prefix .. trace_id_8, + span_id = span_id_8_1, + parent_id = span_id_8_2, + should_sample = true, + trace_id_original_size = 8, + reuse_span_id = true, + } + }, { + description = "default injection size is 16B", + inject = true, + trace_id = trace_id_16, + headers = { + ["uber-trace-id"] = fmt("%s:%s:%s:%s", trace_id_16, span_id_8_1, span_id_8_2, "01"), + ["uberctx-foo"] = "bar", + }, + ctx = { + trace_id = trace_id_16, + span_id = span_id_8_1, + parent_id = span_id_8_2, + should_sample = true, + baggage = { foo = "bar" }, + } + }, { -- extraction error cases + description = "invalid header 1", + extract = true, + headers = { + ["uber-trace-id"] = fmt("vv:%s:%s:%s", span_id_8_1, span_id_8_2, "00"), + }, + err = "invalid jaeger uber-trace-id header; ignoring." + }, { + description = "invalid header 2", + extract = true, + headers = { + ["uber-trace-id"] = fmt("%s:vv:%s:%s", trace_id_8, span_id_8_2, "00"), + }, + err = "invalid jaeger uber-trace-id header; ignoring." + }, { + description = "invalid header 3", + extract = true, + headers = { + ["uber-trace-id"] = fmt("%s:%s:vv:%s", trace_id_8, span_id_8_1, "00"), + }, + err = "invalid jaeger uber-trace-id header; ignoring." + }, { + description = "invalid header 4", + extract = true, + headers = { + ["uber-trace-id"] = fmt("%s:%s:%s:vv", trace_id_8, span_id_8_1, span_id_8_2), + }, + err = "invalid jaeger uber-trace-id header; ignoring." + }, { + description = "invalid trace id (too long)", + extract = true, + headers = { + ["uber-trace-id"] = fmt("%s:%s:%s:00", too_long_id, span_id_8_1, span_id_8_2), + }, + err = "invalid jaeger trace ID; ignoring." + }, { + description = "invalid trace id (all zero)", + extract = true, + headers = { + ["uber-trace-id"] = fmt("%s:%s:%s:00", "00000000000000000000000000000000", span_id_8_1, span_id_8_2), + }, + err = "invalid jaeger trace ID; ignoring." + }, { + description = "invalid parent id (too short)", + extract = true, + headers = { + ["uber-trace-id"] = fmt("%s:%s:%s:00", trace_id_16, span_id_8_1, "ff"), + }, + err = "invalid jaeger parent ID; ignoring." + }, { + description = "invalid parent id (too long)", + extract = true, + headers = { + ["uber-trace-id"] = fmt("%s:%s:%s:00", trace_id_16, span_id_8_1, too_long_id), + }, + err = "invalid jaeger parent ID; ignoring." + }, { + description = "invalid span id (too long)", + extract = true, + headers = { + ["uber-trace-id"] = fmt("%s:%s:%s:00", trace_id_16, too_long_id, span_id_8_1), + }, + err = "invalid jaeger span ID; ignoring." + }, { + description = "invalid span id (all zero)", + extract = true, + headers = { + ["uber-trace-id"] = fmt("%s:%s:%s:00", trace_id_16, "00000000000000000000000000000000", span_id_8_1), + }, + err = "invalid jaeger span ID; ignoring." + }, { + description = "invalid flags", + extract = true, + headers = { + ["uber-trace-id"] = fmt("%s:%s:%s:123", trace_id_16, span_id_8_1, span_id_8_2), + }, + err = "invalid jaeger flags; ignoring." + }, { -- injection error cases + description = "missing trace id", + inject = true, + ctx = { + span_id = span_id_8_1, + should_sample = false, + }, + err = "jaeger injector context is invalid: field trace_id not found in context" + }, { + description = "missing span id", + inject = true, + ctx = { + trace_id = trace_id_16, + should_sample = false, + }, + err = "jaeger injector context is invalid: field span_id not found in context" + } } +}, { + extractor = "ot", + injector = "ot", + headers_data = { { + description = "base case", + extract = true, + inject = true, + trace_id = trace_id_16, + headers = { + ["ot-tracer-traceid"] = trace_id_16, + ["ot-tracer-spanid"] = span_id_8_1, + ["ot-tracer-sampled"] = "1", + ["ot-baggage-foo"] = "bar", + }, + ctx = { + trace_id = trace_id_16, + span_id = span_id_8_1, + should_sample = true, + baggage = { foo = "bar" }, + trace_id_original_size = 16, + } + }, { + description = "sampled = false", + extract = true, + inject = true, + trace_id = trace_id_16, + headers = { + ["ot-tracer-traceid"] = trace_id_16, + ["ot-tracer-spanid"] = span_id_8_1, + ["ot-tracer-sampled"] = "0", + }, + ctx = { + trace_id = trace_id_16, + span_id = span_id_8_1, + should_sample = false, + trace_id_original_size = 16, + } + }, { + description = "missing sampled flag", + extract = true, + inject = true, + trace_id = trace_id_16, + headers = { + ["ot-tracer-traceid"] = trace_id_16, + ["ot-tracer-spanid"] = span_id_8_1, + }, + ctx = { + trace_id = trace_id_16, + span_id = span_id_8_1, + trace_id_original_size = 16, + } + }, { + description = "large trace and span ids", + extract = true, + inject = true, + trace_id = big_trace_id_16, + headers = { + ["ot-tracer-traceid"] = big_trace_id_16, + ["ot-tracer-spanid"] = big_span_id, + ["ot-baggage-foo"] = "bar", + }, + ctx = { + trace_id = big_trace_id_16, + span_id = big_span_id, + baggage = { foo = "bar" }, + trace_id_original_size = 16, + } + }, { + description = "8B trace id", + extract = true, + inject = true, + trace_id = trace_id_8, + headers = { + ["ot-tracer-traceid"] = trace_id_8, + ["ot-tracer-spanid"] = span_id_8_1, + ["ot-tracer-sampled"] = "0", + }, + ctx = { + trace_id = padding_prefix .. trace_id_8, + span_id = span_id_8_1, + should_sample = false, + trace_id_original_size = 8, + } + }, { + description = "default injection size is 8B", + inject = true, + trace_id = trace_id_8, + headers = { + ["ot-tracer-traceid"] = trace_id_8, + ["ot-tracer-spanid"] = span_id_8_1, + ["ot-tracer-sampled"] = "1", + ["ot-baggage-foo"] = "bar", + }, + ctx = { + trace_id = padding_prefix .. trace_id_8, + span_id = span_id_8_1, + should_sample = true, + baggage = { foo = "bar" }, + } + }, { + description = "invalid baggage", + extract = true, + trace_id = trace_id_16, + headers = { + ["ot-tracer-traceid"] = trace_id_16, + ["ot-tracer-spanid"] = span_id_8_1, + ["ot-tracer-sampled"] = "1", + ["otttttbaggage-foo"] = "bar", + }, + ctx = { + trace_id = trace_id_16, + span_id = span_id_8_1, + should_sample = true, + baggage = nil, + trace_id_original_size = 16, + } + }, { -- extraction error cases + description = "invalid header", + extract = true, + headers = { + ["ot-tracer-traceid"] = "xx", + }, + err = "ot-tracer-traceid header invalid; ignoring." + }, { -- injection error cases + description = "missing trace id", + inject = true, + ctx = { + span_id = span_id_8_1, + should_sample = false, + }, + err = "ot injector context is invalid: field trace_id not found in context" + }, { + description = "missing span id", + inject = true, + ctx = { + trace_id = trace_id_16, + should_sample = false, + }, + err = "ot injector context is invalid: field span_id not found in context" + } } +}, { + extractor = "datadog", + injector = "datadog", + headers_data = { { + description = "base case", + extract = true, + inject = true, + trace_id = trace_id_8_dec, + headers = { + ["x-datadog-trace-id"] = trace_id_8_dec, + ["x-datadog-parent-id"] = span_id_8_1_dec, + ["x-datadog-sampling-priority"] = "1", + }, + ctx = { + trace_id = padding_prefix .. trace_id_8, + span_id = span_id_8_1, + should_sample = true, + trace_id_original_size = 8, + } + }, { + description = "sampled = false", + extract = true, + inject = true, + trace_id = trace_id_8_dec, + headers = { + ["x-datadog-trace-id"] = trace_id_8_dec, + ["x-datadog-parent-id"] = span_id_8_1_dec, + ["x-datadog-sampling-priority"] = "0", + }, + ctx = { + trace_id = padding_prefix .. trace_id_8, + span_id = span_id_8_1, + should_sample = false, + trace_id_original_size = 8, + } + }, { + description = "missing trace id ignores parent id", + extract = true, + headers = { + ["x-datadog-parent-id"] = span_id_8_1_dec, + ["x-datadog-sampling-priority"] = "1", + }, + ctx = { + should_sample = true, + } + }, { + description = "missing parent id", + extract = true, + inject = true, + trace_id = trace_id_8_dec, + headers = { + ["x-datadog-trace-id"] = trace_id_8_dec, + ["x-datadog-sampling-priority"] = "1", + }, + ctx = { + trace_id = padding_prefix .. trace_id_8, + should_sample = true, + trace_id_original_size = 8, + } + }, { + description = "missing sampled", + extract = true, + inject = true, + trace_id = trace_id_8_dec, + headers = { + ["x-datadog-trace-id"] = trace_id_8_dec, + ["x-datadog-parent-id"] = span_id_8_1_dec, + }, + ctx = { + trace_id = padding_prefix .. trace_id_8, + span_id = span_id_8_1, + trace_id_original_size = 8, + } + }, { + description = "big dec trace id", + extract = true, + inject = true, + trace_id = big_dec_trace_id, + headers = { + ["x-datadog-trace-id"] = big_dec_trace_id, + ["x-datadog-parent-id"] = span_id_8_1_dec, + }, + ctx = { + trace_id = padding_prefix .. big_trace_id, + span_id = span_id_8_1, + trace_id_original_size = 8, + } + }, { + description = "big dec span id", + extract = true, + inject = true, + trace_id = trace_id_8_dec, + headers = { + ["x-datadog-trace-id"] = trace_id_8_dec, + ["x-datadog-parent-id"] = big_dec_span_id, + }, + ctx = { + trace_id = padding_prefix .. trace_id_8, + span_id = big_span_id, + trace_id_original_size = 8, + } + }, { + description = "(can extract invalid) big dec trace id 16", + extract = true, + trace_id = big_dec_trace_id, + headers = { + ["x-datadog-trace-id"] = big_dec_trace_id_16, + ["x-datadog-parent-id"] = span_id_8_1_dec, + }, + ctx = { + trace_id = big_trace_id_16, + span_id = span_id_8_1, + trace_id_original_size = 16, + } + }, { + description = "default injection size is 8B", + inject = true, + trace_id = trace_id_8_dec, + headers = { + ["x-datadog-trace-id"] = trace_id_8_dec, + ["x-datadog-parent-id"] = span_id_8_1_dec, + ["x-datadog-sampling-priority"] = "1", + }, + ctx = { + trace_id = trace_id_16, + span_id = span_id_8_1, + should_sample = true, + } + }, { -- extraction error cases + description = "invalid trace id", + extract = true, + trace_id = trace_id_16, + headers = { + ["x-datadog-trace-id"] = trace_id_16, + ["x-datadog-parent-id"] = span_id_8_1_dec, + ["x-datadog-sampling-priority"] = "1", + }, + err = "x-datadog-trace-id header invalid; ignoring." + }, { + description = "invalid parent id", + extract = true, + trace_id = trace_id_16, + headers = { + ["x-datadog-trace-id"] = trace_id_8_dec, + ["x-datadog-parent-id"] = span_id_8_1, + ["x-datadog-sampling-priority"] = "1", + }, + err = "x-datadog-parent-id header invalid; ignoring." + }, { + description = "empty string trace id", + extract = true, + trace_id = "", + headers = { + ["x-datadog-trace-id"] = "", + ["x-datadog-parent-id"] = span_id_8_1_dec, + ["x-datadog-sampling-priority"] = "1", + }, + err = "x-datadog-trace-id header invalid; ignoring." + }, { + description = "invalid parent id", + extract = true, + trace_id = trace_id_16, + headers = { + ["x-datadog-trace-id"] = trace_id_8_dec, + ["x-datadog-parent-id"] = span_id_8_1, + ["x-datadog-sampling-priority"] = "1", + }, + err = "x-datadog-parent-id header invalid; ignoring." + }, { + description = "empty string parent id", + extract = true, + trace_id = "", + headers = { + ["x-datadog-trace-id"] = trace_id_8_dec, + ["x-datadog-parent-id"] = "", + ["x-datadog-sampling-priority"] = "1", + }, + err = "x-datadog-parent-id header invalid; ignoring." + } } +}, { + extractor = "aws", + injector = "aws", + headers_data = { { + description = "base case", + extract = true, + inject = true, + trace_id = trace_id_16, + headers = { + ["x-amzn-trace-id"] = fmt("Root=1-%s-%s;Parent=%s;Sampled=%s", + sub(trace_id_16, 1, 8), + sub(trace_id_16, 9, #trace_id_16), + span_id_8_1, "1"), + }, + ctx = { + trace_id = trace_id_16, + span_id = span_id_8_1, + should_sample = true, + trace_id_original_size = 16, + } + }, { + description = "with spaces", + extract = true, + inject = false, + trace_id = trace_id_16, + headers = { + ["x-amzn-trace-id"] = fmt(" Root = 1-%s-%s ; Parent= %s; Sampled =%s", + sub(trace_id_16, 1, 8), + sub(trace_id_16, 9, #trace_id_16), + span_id_8_1, "1"), + }, + ctx = { + trace_id = trace_id_16, + span_id = span_id_8_1, + should_sample = true, + trace_id_original_size = 16, + } + }, { + description = "parent first", + extract = true, + inject = false, + trace_id = trace_id_16, + headers = { + ["x-amzn-trace-id"] = fmt("Parent=%s;Root=1-%s-%s;Sampled=%s", + span_id_8_1, + sub(trace_id_16, 1, 8), + sub(trace_id_16, 9, #trace_id_16), + "1"), + }, + ctx = { + trace_id = trace_id_16, + span_id = span_id_8_1, + should_sample = true, + trace_id_original_size = 16, + } + }, { + description = "extra fields", + extract = true, + inject = false, + trace_id = trace_id_16, + headers = { + ["x-amzn-trace-id"] = fmt("Foo=bar;Root=1-%s-%s;Parent=%s;Sampled=%s", + sub(trace_id_16, 1, 8), + sub(trace_id_16, 9, #trace_id_16), + span_id_8_1, + "1"), + }, + ctx = { + trace_id = trace_id_16, + span_id = span_id_8_1, + should_sample = true, + trace_id_original_size = 16, + } + }, { + description = "large id", + extract = true, + inject = true, + trace_id = big_trace_id_16, + headers = { + ["x-amzn-trace-id"] = fmt("Root=1-%s-%s;Parent=%s;Sampled=%s", + sub(big_trace_id_16, 1, 8), + sub(big_trace_id_16, 9, #big_trace_id_16), + span_id_8_1, + "1"), + }, + ctx = { + trace_id = big_trace_id_16, + span_id = span_id_8_1, + should_sample = true, + trace_id_original_size = 16, + } + }, { + description = "sampled = false", + extract = true, + inject = true, + trace_id = trace_id_16, + headers = { + ["x-amzn-trace-id"] = fmt("Root=1-%s-%s;Parent=%s;Sampled=%s", + sub(trace_id_16, 1, 8), + sub(trace_id_16, 9, #trace_id_16), + span_id_8_1, "0"), + }, + ctx = { + trace_id = trace_id_16, + span_id = span_id_8_1, + should_sample = false, + trace_id_original_size = 16, + } + }, { + description = "default injection size is 16B", + inject = true, + trace_id = trace_id_16, + headers = { + ["x-amzn-trace-id"] = fmt("Root=1-%s-%s;Parent=%s;Sampled=%s", + sub(trace_id_16, 1, 8), + sub(trace_id_16, 9, #trace_id_16), + span_id_8_1, "1"), + }, + ctx = { + trace_id = trace_id_16, + span_id = span_id_8_1, + should_sample = true, + } + }, { -- extraction error cases + description = "invalid trace id 1", + extract = true, + headers = { + ["x-amzn-trace-id"] = fmt("Root=0-%s-%s;Parent=%s;Sampled=%s", + sub(trace_id_8, 1, 8), + sub(trace_id_8, 9, #trace_id_8), + span_id_8_1, "0"), + }, + err = "invalid aws header trace id; ignoring." + }, { + description = "invalid trace id 2", + extract = true, + headers = { + ["x-amzn-trace-id"] = fmt("Root=1-vv-%s;Parent=%s;Sampled=%s", + sub(trace_id_8, 9, #trace_id_8), + span_id_8_1, "0"), + }, + err = "invalid aws header trace id; ignoring." + }, { + description = "invalid trace id 3", + extract = true, + headers = { + ["x-amzn-trace-id"] = fmt("Root=1-%s-vv;Parent=%s;Sampled=%s", + sub(trace_id_8, 1, 8), + span_id_8_1, "0"), + }, + err = "invalid aws header trace id; ignoring." + }, { + description = "invalid trace id (too short)", + extract = true, + headers = { + ["x-amzn-trace-id"] = fmt("Root=1-%s-%s;Parent=%s;Sampled=%s", + sub(trace_id_8, 1, 8), + sub(trace_id_8, 9, #trace_id_8), + span_id_8_1, "0"), + }, + err = "invalid aws header trace id; ignoring." + }, { + description = "invalid trace id (too long)", + extract = true, + headers = { + ["x-amzn-trace-id"] = fmt("Root=1-%s-%s;Parent=%s;Sampled=%s", + sub(too_long_id, 1, 8), + sub(too_long_id, 9, #too_long_id), + span_id_8_1, "0"), + }, + err = "invalid aws header trace id; ignoring." + }, { + description = "missing trace id", + extract = true, + trace_id = trace_id_16, + headers = { + ["x-amzn-trace-id"] = fmt("Root=;Parent=%s;Sampled=%s", + span_id_8_1, "0"), + }, + err = "invalid aws header trace id; ignoring." + }, { + description = "invalid parent id 1", + extract = true, + headers = { + ["x-amzn-trace-id"] = fmt("Root=1-%s-%s;Parent=vv;Sampled=%s", + sub(trace_id_16, 1, 8), + sub(trace_id_16, 9, #trace_id_16), + "0"), + }, + err = "invalid aws header parent id; ignoring." + }, { + description = "invalid parent id (too long)", + extract = true, + headers = { + ["x-amzn-trace-id"] = fmt("Root=1-%s-%s;Parent=%s;Sampled=%s", + sub(trace_id_16, 1, 8), + sub(trace_id_16, 9, #trace_id_16), + too_long_id, "0"), + }, + err = "invalid aws header parent id; ignoring." + }, { + description = "invalid parent id (too short)", + extract = true, + headers = { + ["x-amzn-trace-id"] = fmt("Root=1-%s-%s;Parent=%s;Sampled=%s", + sub(trace_id_16, 1, 8), + sub(trace_id_16, 9, #trace_id_16), + "123", "0"), + }, + err = "invalid aws header parent id; ignoring." + }, { + description = "missing parent id", + extract = true, + headers = { + ["x-amzn-trace-id"] = fmt("Root=1-%s-%s;Parent=;Sampled=%s", + sub(trace_id_16, 1, 8), + sub(trace_id_16, 9, #trace_id_16), + "0"), + }, + err = "invalid aws header parent id; ignoring." + }, { + description = "invalid sampled flag", + extract = true, + headers = { + ["x-amzn-trace-id"] = fmt("Root=1-%s-%s;Parent=%s;Sampled=2", + sub(trace_id_16, 1, 8), + sub(trace_id_16, 9, #trace_id_16), + span_id_8_1, "0"), + }, + err = "invalid aws header sampled flag; ignoring." + }, { + description = "missing sampled flag", + extract = true, + headers = { + ["x-amzn-trace-id"] = fmt("Root=1-%s-%s;Parent=%s;Sampled=", + sub(trace_id_16, 1, 8), + sub(trace_id_16, 9, #trace_id_16), + span_id_8_1), + }, + err = "invalid aws header sampled flag; ignoring." + }, { -- injection error cases + description = "missing trace id", + inject = true, + ctx = { + span_id = span_id_8_1, + should_sample = false, + }, + err = "aws injector context is invalid: field trace_id not found in context" + }, { + description = "missing span id", + inject = true, + ctx = { + trace_id = trace_id_16, + should_sample = false, + }, + err = "aws injector context is invalid: field span_id not found in context" + } } +}, { + extractor = "gcp", + injector = "gcp", + headers_data = { { + description = "base case", + extract = true, + inject = true, + trace_id = trace_id_16, + headers = { + ["x-cloud-trace-context"] = fmt("%s/%s;o=1", trace_id_16, span_id_8_1_dec), + }, + ctx = { + trace_id = trace_id_16, + span_id = span_id_8_1, + should_sample = true, + trace_id_original_size = 16, + } + }, { + description = "sampled = false", + extract = true, + inject = true, + trace_id = trace_id_16, + headers = { + ["x-cloud-trace-context"] = fmt("%s/%s;o=0", trace_id_16, span_id_8_1_dec), + }, + ctx = { + trace_id = trace_id_16, + span_id = span_id_8_1, + should_sample = false, + trace_id_original_size = 16, + } + }, { + description = "no flag", + extract = true, + inject = false, + trace_id = trace_id_16, + headers = { + ["x-cloud-trace-context"] = fmt("%s/%s", trace_id_16, span_id_8_1_dec), + }, + ctx = { + trace_id = trace_id_16, + span_id = span_id_8_1, + should_sample = false, + trace_id_original_size = 16, + } + }, { + description = "default injection size is 16B", + inject = true, + trace_id = trace_id_16, + headers = { + ["x-cloud-trace-context"] = fmt("%s/%s;o=1", trace_id_16, span_id_8_1_dec), + }, + ctx = { + trace_id = trace_id_16, + span_id = span_id_8_1, + should_sample = true, + } + }, { -- extraction error cases + description = "invalid trace id (too short)", + extract = true, + trace_id = "123", + headers = { + ["x-cloud-trace-context"] = fmt("%s/%s;o=0", "123", span_id_8_1_dec), + }, + err = "invalid GCP header; ignoring." + }, { + description = "invalid trace id (too long)", + extract = true, + trace_id = too_long_id, + headers = { + ["x-cloud-trace-context"] = fmt("%s/%s;o=0", too_long_id, span_id_8_1_dec), + }, + err = "invalid GCP header; ignoring." + }, { + description = "invalid trace id (no hex)", + extract = true, + trace_id = trace_id_8, + headers = { + ["x-cloud-trace-context"] = fmt("vvv/%s;o=0", span_id_8_1_dec), + }, + err = "invalid GCP header; ignoring." + }, { + description = "missing span id", + extract = true, + trace_id = trace_id_8, + headers = { + ["x-cloud-trace-context"] = fmt("%s/;o=0", trace_id_16), + }, + err = "invalid GCP header; ignoring." + }, { + description = "invalid span id (non digit)", + extract = true, + headers = { + ["x-cloud-trace-context"] = fmt("%s/%s;o=0", trace_id_16, span_id_8_1), + }, + err = "invalid GCP header; ignoring." + }, { + description = "invalid span id (too large)", + extract = true, + headers = { + ["x-cloud-trace-context"] = fmt("%s/%s;o=0", trace_id_16, span_id_8_1_dec .. "0"), + }, + err = "invalid GCP header; ignoring." + }, { + description = "invalid sampling value (01)", + extract = true, + trace_id = trace_id_8, + headers = { + ["x-cloud-trace-context"] = fmt("%s/%s;o=01", trace_id_16, span_id_8_1_dec), + }, + err = "invalid GCP header; ignoring." + }, { + description = "invalid sampling value (missing)", + extract = true, + trace_id = trace_id_8, + headers = { + ["x-cloud-trace-context"] = fmt("%s/%s;o=", trace_id_16, span_id_8_1_dec), + }, + err = "invalid GCP header; ignoring." + }, { + description = "invalid sampling value (non digit)", + extract = true, + trace_id = trace_id_8, + headers = { + ["x-cloud-trace-context"] = fmt("%s/%s;o=v", trace_id_16, span_id_8_1_dec), + }, + err = "invalid GCP header; ignoring." + }, { -- injection error cases + description = "missing trace id", + inject = true, + ctx = { + span_id = span_id_8_1, + should_sample = false, + }, + err = "gcp injector context is invalid: field trace_id not found in context" + }, { + description = "missing span id", + inject = true, + ctx = { + trace_id = trace_id_16, + should_sample = false, + }, + err = "gcp injector context is invalid: field span_id not found in context" + } } +} } + + +describe("Tracing Headers Propagation Strategies", function() + local req_headers + local old_kong = _G.kong + + _G.kong = { + log = {}, + service = { + request = { + set_header = function(name, value) + req_headers[name] = value + end, + clear_header = function(name) + req_headers[name] = nil + end, + } + } + } + + local warn + + lazy_setup(function() + warn = spy.on(kong.log, "warn") + end) + + lazy_teardown(function() + _G.kong = old_kong + end) + + for _, data in ipairs(test_data) do + local extractor = data.extractor + local injector = data.injector + local headers_data = data.headers_data + + describe("#" .. extractor .. " extractor and " .. injector .. " injector", function() + local ex = require(EXTRACTORS_PATH .. extractor) + + before_each(function() + warn:clear() + req_headers = {} + end) + + it("handles no incoming headers correctly", function() + local ctx, err = ex:extract({}) + + assert.is_nil(err) + assert.is_nil(ctx) + assert.spy(warn).was_not_called() + end) + + for _, h_info in ipairs(headers_data) do + describe("incoming #" .. extractor .. " headers", function() + lazy_teardown(function() + req_headers = nil + end) + + before_each(function() + req_headers = {} + for h_name, h_value in pairs(h_info.headers) do + req_headers[h_name] = h_value + end + warn:clear() + end) + + if h_info.ctx and h_info.headers and h_info.extract then + it("with " .. h_info.description .. " extracts tracing context", function() + local ctx, err = ex:extract(req_headers) + + assert.is_not_nil(ctx) + assert.is_nil(err) + assert.same(h_info.ctx, to_hex_ids(ctx)) + assert.spy(warn).was_not_called() + end) + + elseif h_info.err and h_info.extract then -- extraction error cases + it("with " .. h_info.description .. " fails", function() + ex:extract(req_headers) + assert.spy(warn).was_called_with(h_info.err) + end) + end + end) + end + end) + + describe("#" .. injector .. " injector", function() + local inj = require(INJECTORS_PATH .. injector) + + for _, h_info in ipairs(headers_data) do + lazy_teardown(function() + req_headers = nil + end) + + before_each(function() + req_headers = {} + warn:clear() + end) + + if h_info.ctx and h_info.headers and h_info.inject then + it("with " .. h_info.description .. " injects tracing context", function() + local formatted_trace_id, err = inj:inject(from_hex_ids(h_info.ctx)) + + assert.is_nil(err) + + -- check formatted trace id (the key has the same name as + -- the extractor) + local format = extractor + assert.same(formatted_trace_id, { + [format] = h_info.trace_id, + }) + + assert.spy(warn).was_not_called() + + -- headers are injected in request correctly + assert.same(h_info.headers, req_headers) + end) + + elseif h_info.err and h_info.inject then -- injection error cases + it("with " .. h_info.description .. " fails", function() + local formatted_trace_id, err = inj:inject(from_hex_ids(h_info.ctx)) + assert.is_nil(formatted_trace_id) + assert.equals(h_info.err, err) + end) + end + end + end) + end +end) diff --git a/spec/01-unit/26-tracing/03-propagation_module_spec.lua b/spec/01-unit/26-tracing/03-propagation_module_spec.lua new file mode 100644 index 000000000000..8a918110fd8e --- /dev/null +++ b/spec/01-unit/26-tracing/03-propagation_module_spec.lua @@ -0,0 +1,463 @@ +local propagation_utils = require "kong.tracing.propagation.utils" +local tablex = require "pl.tablex" +local shallow_copy = require "kong.tools.utils".shallow_copy +local to_hex = require "resty.string".to_hex + +local from_hex = propagation_utils.from_hex +local fmt = string.format + + +-- W3C Ids +local trace_id_16_w3c = "0af7651916cd43dd8448eb211c80319c" +local trace_id_8_w3c_dec = "9532127138774266268" -- 8448eb211c80319c to decimal +local span_id_8_w3c = "b7ad6b7169203331" +local span_id_8_w3c_dec = "13235353014750950193" -- b7ad6b7169203331 to decimal + +-- B3 Ids +local trace_id_16_b3 = "dc9d1b0ccedf0ecaf4f26ffab84d4f5e" +local trace_id_8_b3 = "f4f26ffab84d4f5e" +local span_id_8_b3 = "b7ad6b7169203332" +local span_id_8_b3p = "f4f26ffab84d4f5f" + +-- Jaeger Ids +local trace_id_16_jae = "f744b23fe9aa64f08255043ba51848db" +local span_id_8_jae = "f4f26ffab84d4f60" +local span_id_8_jaep = "f4f26ffab84d4f61" + +local padding_prefix = string.rep("0", 16) + +-- apply some transformation to a hex id (affects last byte) +local function transform_hex_id(id) + local max = string.byte("f") + local min = string.byte("0") + + local bytes = { id:byte(1, -1) } + local last_byte = bytes[#bytes] + + last_byte = last_byte + 1 < max and last_byte + 1 or min + bytes[#bytes] = last_byte + return string.char(unpack(bytes)) +end + +-- apply some transformation (same as transform_hex_id above) to a binary id +local function transform_bin_id(bid) + return from_hex(transform_hex_id(to_hex(bid))) +end + +local request_headers_1 = { + traceparent = fmt("00-%s-%s-01", trace_id_16_w3c, span_id_8_w3c), + ["x-b3-traceid"] = trace_id_16_b3, + ["x-b3-spanid"] = span_id_8_b3, + ["x-b3-sampled"] = "1", + ["x-b3-parentspanid"] = span_id_8_b3p, + ["uber-trace-id"] = fmt("%s:%s:%s:%s", trace_id_16_jae, span_id_8_jae, span_id_8_jaep, "01"), +} + +local request_headers_2 = { + ["x-b3-traceid"] = trace_id_8_b3, + ["x-b3-spanid"] = span_id_8_b3, + ["x-b3-sampled"] = "1", + ["x-b3-parentspanid"] = span_id_8_b3p, +} + + +local test_data = { { + description = "extract empty, inject empty (propagation disabled)", + req_headers = request_headers_1, + conf = { + propagation = { + extract = {}, + inject = {}, + } + }, + expected = request_headers_1 +}, { + description = "extract = ignore, inject single heder", + req_headers = request_headers_1, + conf = { + propagation = { + extract = {}, + inject = { "w3c" }, + } + }, + cb = function() + -- no extraction, set some values using the callback + -- (same as what tracing plugins would do) + return { + trace_id = from_hex("0af7651916cd43dd8448eb211c80319d"), + span_id = from_hex("8448eb211c80319e"), + should_sample = true, + } + end, + expected = tablex.merge(request_headers_1, { + traceparent = fmt("00-%s-%s-01", "0af7651916cd43dd8448eb211c80319d", "8448eb211c80319e"), + }, true) +}, { + description = "extract = ignore, inject multiple heders", + req_headers = request_headers_1, + conf = { + propagation = { + extract = {}, + inject = { "w3c", "b3-single" }, + } + }, + cb = function() + -- no extraction, set some values using the callback + -- (same as what tracing plugins would do) + return { + trace_id = from_hex("0af7651916cd43dd8448eb211c80319d"), + span_id = from_hex("8448eb211c80319e"), + should_sample = true, + } + end, + expected = tablex.merge(request_headers_1, { + traceparent = fmt("00-%s-%s-01", "0af7651916cd43dd8448eb211c80319d", "8448eb211c80319e"), + b3 = fmt("%s-%s-1", "0af7651916cd43dd8448eb211c80319d", "8448eb211c80319e"), + }, true) +}, { + description = "extract = ignore, inject = preserve, no default setting", + req_headers = request_headers_1, + conf = { + propagation = { + extract = {}, + inject = { "preserve" }, + } + }, + cb = function() + -- no extraction, set some values using the callback + -- (same as what tracing plugins would do) + return { + trace_id = from_hex("0af7651916cd43dd8448eb211c80319d"), + span_id = from_hex("8448eb211c80319e"), + should_sample = true, + } + end, + expected = tablex.merge(request_headers_1, { + traceparent = fmt("00-%s-%s-01", "0af7651916cd43dd8448eb211c80319d", "8448eb211c80319e"), + }, true) +}, { + description = "extract = ignore, inject = preserve, uses default format", + req_headers = request_headers_1, + conf = { + propagation = { + extract = {}, + inject = { "preserve" }, + default_format = "datadog", + } + }, + cb = function() + -- no extraction, set some values using the callback + -- (same as what tracing plugins would do) + return { + trace_id = from_hex("0af7651916cd43dd8448eb211c80319d"), + span_id = from_hex("8448eb211c80319e"), + should_sample = true, + } + end, + expected = tablex.merge(request_headers_1, { + ["x-datadog-trace-id"] = "9532127138774266269", -- 8448eb211c80319d to dec + ["x-datadog-parent-id"] = "9532127138774266270", -- 8448eb211c80319e to dec + ["x-datadog-sampling-priority"] = "1" + }, true) +}, { + description = "extract configured with header not found in request, inject = preserve, uses default format", + req_headers = request_headers_1, + conf = { + propagation = { + extract = { "datadog" }, + inject = { "preserve" }, + default_format = "ot" + } + }, + -- apply some updates to the extracted ctx + cb = function(ctx) + assert.same(ctx, {}) + + ctx.trace_id = from_hex("0af7651916cd43dd8448eb211c80319d") + ctx.span_id = from_hex("8448eb211c80319e") + ctx.should_sample = true + + return ctx + end, + expected = tablex.merge(request_headers_1, { + ["ot-tracer-sampled"] = '1', + ["ot-tracer-spanid"] = '8448eb211c80319e', + ["ot-tracer-traceid"] = '8448eb211c80319d', + }, true) +}, { + description = "extract configured with header found in request, inject = preserve + other formats", + req_headers = request_headers_1, + conf = { + propagation = { + extract = { "b3", "w3c", "jaeger" }, + inject = { "w3c", "preserve", "b3-single" }, + } + }, + -- apply some updates to the extracted ctx + cb = function(ctx) + ctx.trace_id = transform_bin_id(ctx.trace_id) + ctx.span_id = transform_bin_id(ctx.span_id) + ctx.parent_id = transform_bin_id(ctx.parent_id) + return ctx + end, + expected = tablex.merge(request_headers_1, { + traceparent = fmt("00-%s-%s-01", transform_hex_id(trace_id_16_b3), transform_hex_id(span_id_8_b3)), + ["x-b3-traceid"] = transform_hex_id(trace_id_16_b3), + ["x-b3-spanid"] = transform_hex_id(span_id_8_b3), + ["x-b3-sampled"] = "1", + ["x-b3-parentspanid"] = transform_hex_id(span_id_8_b3p), + b3 = fmt("%s-%s-1-%s", transform_hex_id(trace_id_16_b3), transform_hex_id(span_id_8_b3), + transform_hex_id(span_id_8_b3p)), + }, true) +}, { + description = "extract configured with header formats, injection disabled", + req_headers = request_headers_1, + conf = { + propagation = { + extract = { "gcp", "aws", "ot", "datadog", "b3", "w3c", "jaeger" }, + inject = {}, + } + }, + cb = function() + return { + trace_id = from_hex("abcdef"), + span_id = from_hex("123fff"), + should_sample = true, + } + end, + expected = request_headers_1 +}, { + description = "extract configured with header formats, b3 first", + req_headers = request_headers_1, + conf = { + propagation = { + extract = { "b3", "w3c", "jaeger" }, + inject = { "w3c" }, + } + }, + -- apply some updates to the extracted ctx + cb = function(ctx) + ctx.trace_id = transform_bin_id(ctx.trace_id) + ctx.span_id = transform_bin_id(ctx.span_id) + return ctx + end, + expected = tablex.merge(request_headers_1, { + traceparent = fmt("00-%s-%s-01", transform_hex_id(trace_id_16_b3), transform_hex_id(span_id_8_b3)), + }, true) +}, { + description = "extract configured with header formats, w3c first", + req_headers = request_headers_1, + conf = { + propagation = { + extract = { "w3c", "b3", "jaeger" }, + inject = { "w3c" }, + } + }, + -- apply some updates to the extracted ctx + cb = function(ctx) + ctx.trace_id = transform_bin_id(ctx.trace_id) + ctx.span_id = transform_bin_id(ctx.span_id) + return ctx + end, + expected = tablex.merge(request_headers_1, { + traceparent = fmt("00-%s-%s-01", transform_hex_id(trace_id_16_w3c), transform_hex_id(span_id_8_w3c)), + }, true) +}, { + description = "extract configured with header formats, missing first header", + req_headers = request_headers_1, + conf = { + propagation = { + extract = { "datadog", "jaeger", "b3" }, + inject = { "w3c" }, + } + }, + -- apply some updates to the extracted ctx + cb = function(ctx) + ctx.trace_id = transform_bin_id(ctx.trace_id) + ctx.span_id = transform_bin_id(ctx.span_id) + return ctx + end, + expected = tablex.merge(request_headers_1, { + traceparent = fmt("00-%s-%s-01", transform_hex_id(trace_id_16_jae), transform_hex_id(span_id_8_jae)), + }, true) +}, { + description = "extract configured with header formats, multiple injection", + req_headers = request_headers_1, + conf = { + propagation = { + extract = { "w3c", "b3", "jaeger" }, + inject = { "datadog", "w3c" }, + } + }, + -- apply some updates to the extracted ctx + cb = function(ctx) + ctx.trace_id = transform_bin_id(ctx.trace_id) + ctx.span_id = transform_bin_id(ctx.span_id) + return ctx + end, + expected = tablex.merge(request_headers_1, { + traceparent = fmt("00-%s-%s-01", transform_hex_id(trace_id_16_w3c), transform_hex_id(span_id_8_w3c)), + ["x-datadog-trace-id"] = transform_hex_id(trace_id_8_w3c_dec), + ["x-datadog-parent-id"] = transform_hex_id(span_id_8_w3c_dec), + ["x-datadog-sampling-priority"] = "1" + }, true) +}, { + description = "extract = b3, 64b id, inject = b3 and w3c", + req_headers = request_headers_2, + conf = { + propagation = { + extract = { "b3", }, + inject = { "w3c", "b3" }, + } + }, + -- apply some updates to the extracted ctx + cb = function(ctx) + ctx.trace_id = transform_bin_id(ctx.trace_id) + ctx.span_id = transform_bin_id(ctx.span_id) + ctx.parent_id = transform_bin_id(ctx.parent_id) + return ctx + end, + expected = tablex.merge(request_headers_2, { + traceparent = fmt("00-%s-%s-01", transform_hex_id(padding_prefix .. trace_id_8_b3), transform_hex_id(span_id_8_b3)), + ["x-b3-traceid"] = transform_hex_id(trace_id_8_b3), -- 64b (same as incoming) + ["x-b3-spanid"] = transform_hex_id(span_id_8_b3), + ["x-b3-sampled"] = "1", + ["x-b3-parentspanid"] = transform_hex_id(span_id_8_b3p), + }, true) +}, { + description = "extract = b3, 128b id, inject = b3 and w3c", + req_headers = request_headers_1, + conf = { + propagation = { + extract = { "b3", }, + inject = { "w3c", "b3" }, + } + }, + -- apply some updates to the extracted ctx + cb = function(ctx) + ctx.trace_id = transform_bin_id(ctx.trace_id) + ctx.span_id = transform_bin_id(ctx.span_id) + ctx.parent_id = transform_bin_id(ctx.parent_id) + return ctx + end, + expected = tablex.merge(request_headers_1, { + traceparent = fmt("00-%s-%s-01", transform_hex_id(trace_id_16_b3), transform_hex_id(span_id_8_b3)), + ["x-b3-traceid"] = transform_hex_id(trace_id_16_b3), -- 128b (same as incoming) + ["x-b3-spanid"] = transform_hex_id(span_id_8_b3), + ["x-b3-sampled"] = "1", + ["x-b3-parentspanid"] = transform_hex_id(span_id_8_b3p), + }, true) +}, { + description = "extract configured with header formats, inject = preserve (matches jaeger)", + req_headers = request_headers_1, + conf = { + propagation = { + extract = { "datadog", "jaeger", "b3" }, + inject = { "preserve" }, + } + }, + -- apply some updates to the extracted ctx + cb = function(ctx) + ctx.trace_id = transform_bin_id(ctx.trace_id) + ctx.span_id = transform_bin_id(ctx.span_id) + ctx.parent_id = transform_bin_id(ctx.parent_id) + return ctx + end, + expected = tablex.merge(request_headers_1, { + ["uber-trace-id"] = fmt("%s:%s:%s:%s", transform_hex_id(trace_id_16_jae), transform_hex_id(span_id_8_jae), + transform_hex_id(span_id_8_jaep), "01"), + }, true) +}, { + description = "clear = b3 and w3c", + req_headers = request_headers_1, + conf = { + propagation = { + extract = { "datadog", "jaeger", "b3", "w3c" }, + inject = { "preserve" }, + clear = { + "x-b3-traceid", + "x-b3-spanid", + "x-b3-sampled", + "x-b3-parentspanid", + "traceparent" + } + } + }, + -- apply some updates to the extracted ctx + cb = function(ctx) + ctx.trace_id = transform_bin_id(ctx.trace_id) + ctx.span_id = transform_bin_id(ctx.span_id) + ctx.parent_id = transform_bin_id(ctx.parent_id) + return ctx + end, + expected = { + ["uber-trace-id"] = fmt("%s:%s:%s:%s", transform_hex_id(trace_id_16_jae), transform_hex_id(span_id_8_jae), + transform_hex_id(span_id_8_jaep), "01"), + } +} } + + + +describe("Tracing Headers Propagation Module", function() + local warn, err, set_serialize_value, req_headers + local old_get_headers = _G.ngx.req.get_headers + local old_kong = _G.kong + + _G.ngx.req.get_headers = function() + return req_headers + end + + _G.kong = { + ctx = { + plugin = {}, + }, + log = {}, + service = { + request = { + set_header = function(name, value) + req_headers[name] = value + end, + clear_header = function(name) + req_headers[name] = nil + end, + } + } + } + local propagation = require "kong.tracing.propagation" + + lazy_setup(function() + err = spy.on(kong.log, "err") + warn = spy.on(kong.log, "warn") + set_serialize_value = spy.on(kong.log, "set_serialize_value") + end) + + lazy_teardown(function() + _G.kong = old_kong + _G.ngx.req.get_headers = old_get_headers + end) + + describe("propagate() function with", function() + before_each(function() + warn:clear() + err:clear() + set_serialize_value:clear() + end) + + for _, t in ipairs(test_data) do + it(t.description .. " updates headers correctly", function() + local conf = t.conf + local expected = t.expected + req_headers = shallow_copy(t.req_headers) + + propagation.propagate( + propagation.get_plugin_params(conf), + t.cb or function(c) return c end + ) + + assert.spy(err).was_not_called() + assert.spy(warn).was_not_called() + assert.same(expected, req_headers) + end) + end + end) +end) diff --git a/spec/01-unit/26-tracing/03-request-id_spec.lua b/spec/01-unit/26-tracing/04-request-id_spec.lua similarity index 100% rename from spec/01-unit/26-tracing/03-request-id_spec.lua rename to spec/01-unit/26-tracing/04-request-id_spec.lua diff --git a/spec/02-integration/14-tracing/02-propagation_spec.lua b/spec/02-integration/14-tracing/02-propagation_spec.lua index 9ddfadb55c66..7387e3e8a3b7 100644 --- a/spec/02-integration/14-tracing/02-propagation_spec.lua +++ b/spec/02-integration/14-tracing/02-propagation_spec.lua @@ -2,104 +2,135 @@ local helpers = require "spec.helpers" local cjson = require "cjson" local utils = require "kong.tools.utils" local to_hex = require("resty.string").to_hex +local from_hex = require 'kong.tracing.propagation.utils'.from_hex local rand_bytes = utils.get_rand_bytes -local TCP_PORT = 35001 - local function gen_id(len) return to_hex(rand_bytes(len)) end + +-- modifies the last byte of an ID +local function transform_bin_id(id, last_byte) + if not id then + return + end + local bytes = {string.byte(id, 1, #id)} + bytes[#bytes] = string.byte(last_byte) + return string.char(unpack(bytes)) +end + +local function generate_function_plugin_config(propagation_config, trace_id, span_id) + local extract = propagation_config.extract or "nil" + local inject = propagation_config.inject or "nil" + local clear = propagation_config.clear or "nil" + local default_format = propagation_config.default_format or "nil" + + return { + access = { + string.format([[ + local propagation = require 'kong.tracing.propagation' + local from_hex = require 'kong.tracing.propagation.utils'.from_hex + + local function transform_bin_id(id, last_byte) + if not id then + return + end + local bytes = {string.byte(id, 1, #id)} + bytes[#bytes] = string.byte(last_byte) + return string.char(unpack(bytes)) + end + + propagation.propagate( + propagation.get_plugin_params( + { + propagation = { + extract = %s, + inject = %s, + clear = %s, + default_format = %s, + } + } + ), + function(ctx) + -- create or modify the context so we can validate it later + + if not ctx.trace_id then + ctx.trace_id = from_hex("%s") + else + ctx.trace_id = transform_bin_id(ctx.trace_id, from_hex("0")) + end + + if not ctx.span_id then + ctx.span_id = from_hex("%s") + ngx.log(ngx.ERR, "generated span_id: " .. ctx.span_id) + else + ctx.span_id = transform_bin_id(ctx.span_id, from_hex("0")) + ngx.log(ngx.ERR, "transformed span_id: " .. ctx.span_id) + end + + if ctx.parent_id then + ctx.span_id = transform_bin_id(ctx.parent_id, from_hex("0")) + ngx.log(ngx.ERR, "transformed span_id: " .. ctx.span_id) + end + + ctx.should_sample=true + + return ctx + end + ) + ]], extract, inject, clear, default_format, trace_id, span_id), + }, + } +end + for _, strategy in helpers.each_strategy() do local proxy_client describe("tracing propagation spec #" .. strategy, function() - lazy_setup(function() - local bp, _ = assert(helpers.get_db_utils(strategy, { - "routes", - "plugins", - }, { "tcp-trace-exporter", "trace-propagator" })) - - bp.routes:insert({ - hosts = { "propagate.test" }, - }) - - bp.plugins:insert({ - name = "tcp-trace-exporter", - config = { - host = "127.0.0.1", - port = TCP_PORT, - custom_spans = false, - } - }) - - bp.plugins:insert({ - name = "trace-propagator" - }) - end) - describe("spans hierarchy", function () + describe("parsing incoming headers with multiple plugins", function () + local trace_id, span_id + lazy_setup(function() - assert(helpers.start_kong { - database = strategy, - nginx_conf = "spec/fixtures/custom_nginx.template", - plugins = "tcp-trace-exporter,trace-propagator", - tracing_instrumentations = "balancer", - tracing_sampling_rate = 1, + trace_id = gen_id(16) + span_id = gen_id(8) + local bp, _ = assert(helpers.get_db_utils(strategy, { + "routes", + "plugins", + })) + + local multi_plugin_route = bp.routes:insert({ + hosts = { "propagate.test" }, }) - proxy_client = helpers.proxy_client() - end) - lazy_teardown(function() - if proxy_client then - proxy_client:close() - end - helpers.stop_kong() - end) - - it("propagates the balancer span", function () - local thread = helpers.tcp_server(TCP_PORT) - local r = assert(proxy_client:send { - method = "GET", - path = "/request", - headers = { - ["Host"] = "propagate.test", - } + bp.plugins:insert({ + name = "pre-function", + route = multi_plugin_route, + config = generate_function_plugin_config({ + extract = "{}", -- ignores incoming + inject = '{ "preserve" }', -- falls back to default + default_format = '"b3-single"', -- defaults to b3 + }, trace_id, span_id), }) - assert.res_status(200, r) - local body = r:read_body() - body = assert(body and cjson.decode(body)) - - local ok, res = thread:join() - assert.True(ok) - assert.is_string(res) - - -- expected spans are returned - local spans = cjson.decode(res) - assert.is_same(2, #spans, res) - local balancer_span = spans[2] - assert.is_same("kong.balancer", balancer_span.name) - - local traceparent = assert(body.headers.traceparent) - local trace_id = balancer_span.trace_id - local span_id = balancer_span.span_id - -- traceparent contains correct trace id and the balancer span's id - assert.equals("00-" .. trace_id .. "-" .. span_id .. "-01", traceparent) - end) - end) - describe("parsing incoming headers", function () - local trace_id = gen_id(16) - local span_id = gen_id(8) + bp.plugins:insert({ + name = "post-function", + route = multi_plugin_route, + config = generate_function_plugin_config({ + extract = '{ "w3c", "b3" }', -- reads b3 + inject = '{ "w3c" }', -- and injects w3c + default_format = "datadog", -- default not used here + clear = '{ "ot-tracer-spanid" }', -- clears this header + }), + }) - lazy_setup(function() - assert(helpers.start_kong { + helpers.start_kong({ database = strategy, + plugins = "bundled", nginx_conf = "spec/fixtures/custom_nginx.template", - plugins = "tcp-trace-exporter,trace-propagator", - tracing_instrumentations = "request,router,balancer,plugin_access,plugin_header_filter", - tracing_sampling_rate = 1, + untrusted_lua = "on", }) proxy_client = helpers.proxy_client() end) @@ -111,56 +142,27 @@ for _, strategy in helpers.each_strategy() do helpers.stop_kong() end) - it("enables sampling when incoming header has sampled enabled", function () - local thread = helpers.tcp_server(TCP_PORT) - local r = assert(proxy_client:send { - method = "GET", - path = "/request", + it("propagates and clears as expected", function() + local r = proxy_client:get("/", { headers = { - ["Host"] = "propagate.test", - traceparent = string.format("00-%s-%s-01", trace_id, span_id), - } + ["ot-tracer-traceid"] = gen_id(16), + ["ot-tracer-spanid"] = gen_id(8), + ["ot-tracer-sampled"] = "0", + host = "propagate.test", + }, }) - assert.res_status(200, r) - local body = r:read_body() - body = assert(body and cjson.decode(body)) - - local ok, res = thread:join() - assert.True(ok) - assert.is_string(res) - - -- all spans are returned - local spans = cjson.decode(res) - assert.is_same(6, #spans, res) - - local traceparent = assert(body.headers.traceparent) - assert.matches("00%-" .. trace_id .. "%-%x+%-01", traceparent) - end) - - it("disables sampling when incoming header has sampled disabled", function () - local thread = helpers.tcp_server(TCP_PORT) - local r = assert(proxy_client:send { - method = "GET", - path = "/request", - headers = { - ["Host"] = "propagate.test", - traceparent = string.format("00-%s-%s-00", trace_id, span_id), - } - }) - assert.res_status(200, r) - local body = r:read_body() - body = assert(body and cjson.decode(body)) - - local ok, res = thread:join() - assert.True(ok) - assert.is_string(res) - - -- no spans are returned - local spans = cjson.decode(res) - assert.is_same(0, #spans, res) - local traceparent = assert(body.headers.traceparent) - assert.matches("00%-" .. trace_id .. "%-%x+%-00", traceparent) + local body = assert.response(r).has.status(200) + local json = cjson.decode(body) + + assert.equals(trace_id .. "-" .. span_id .. "-1", json.headers.b3) + local expected_trace_id = to_hex(transform_bin_id(from_hex(trace_id), from_hex("0"))) + local expected_span_id = to_hex(transform_bin_id(from_hex(span_id), from_hex("0"))) + assert.equals("00-" .. expected_trace_id .. "-" .. expected_span_id .. "-01", json.headers.traceparent) + -- initial header remained unchanged + assert.equals("0", json.headers["ot-tracer-sampled"]) + -- header configured to be cleared was cleared + assert.is_nil(json.headers["ot-tracer-spanid"]) end) end) end) diff --git a/spec/03-plugins/02-legacy_propagation_parameter_warning_spec.lua b/spec/03-plugins/02-legacy_propagation_parameter_warning_spec.lua new file mode 100644 index 000000000000..88e8a487ec57 --- /dev/null +++ b/spec/03-plugins/02-legacy_propagation_parameter_warning_spec.lua @@ -0,0 +1,124 @@ +local cjson = require "cjson" +local helpers = require "spec.helpers" + + +for _, strategy in helpers.each_strategy() do + describe("legacy propagation parameters [#" .. strategy .. "]", function() + local db + local admin_client + + lazy_setup(function() + -- Create a service to make sure that our database is initialized properly. + local bp + bp, db = helpers.get_db_utils(strategy, { + "services", + }) + + db:truncate() + + bp.services:insert{ + protocol = "http", + host = helpers.mock_upstream_host, + port = helpers.mock_upstream_port, + } + assert(helpers.start_kong({ + database = strategy, + nginx_conf = "spec/fixtures/custom_nginx.template", + })) + admin_client = helpers.admin_client() + end) + + lazy_teardown(function() + if admin_client then + admin_client:close() + end + helpers.stop_kong() + end) + + before_each(function() + helpers.clean_logfile() + end) + + local plugin_id + + after_each(function() + if plugin_id then + local res = admin_client:delete("/plugins/" .. plugin_id) + assert.res_status(204, res) + end + end) + + local plugins = { + ["zipkin"] = { + http_endpoint = "http://example.com/", + }, + ["opentelemetry"] = { + endpoint = "http://example.com/", + }, + } + + for plugin, base_config in pairs(plugins) do + + local function create_plugin(parameter, value) + local config = table.clone(base_config) + if parameter then + config[parameter] = value + end + + local res = admin_client:post( + "/plugins", + { + headers = { + ["Content-Type"] = "application/json" + }, + body = cjson.encode({ + name = plugin, + config = config + }) + } + ) + local body = cjson.decode(assert.res_status(201, res)) + plugin_id = body.id + end + + local log_wait_time = 0.01 + describe("[#" .. plugin .. "]", function() + it("no unexpected propagation parameter deprecation warnings by default", function() + create_plugin() + assert.logfile().has.no.line("is deprecated, please use config.queue", true, log_wait_time) + end) + + local parameters = { header_type = { + default_value = "preserve", + test_values = { "jaeger", "w3c", "ignore" } + } } + + if plugin == "zipkin" then + parameters.default_header_type = { + default_value = "b3", + test_values = { "ot", "aws", "datadog" } + } + end + + for parameter, values in pairs(parameters) do + local default_value = values.default_value + local test_values = values.test_values + local expected_warning = "config." .. parameter .. " is deprecated, please use config.propagation" + + it ("does not warn when " .. parameter .. " is set to the old default " .. tostring(default_value), function() + create_plugin(parameter, default_value) + assert.logfile().has.no.line(expected_warning, true, log_wait_time) + end) + + for _, test_value in ipairs(test_values) do + it ("does warn when " .. parameter .. " is set to a value different from the old default " + .. tostring(default_value) .. " (" .. tostring(test_value) .. ")", function() + create_plugin(parameter, test_value) + assert.logfile().has.line(expected_warning, true, log_wait_time) + end) + end + end + end) + end + end) +end diff --git a/spec/03-plugins/34-zipkin/zipkin_no_endpoint_spec.lua b/spec/03-plugins/34-zipkin/zipkin_no_endpoint_spec.lua index e054111d6023..d4a310200dd7 100644 --- a/spec/03-plugins/34-zipkin/zipkin_no_endpoint_spec.lua +++ b/spec/03-plugins/34-zipkin/zipkin_no_endpoint_spec.lua @@ -5,7 +5,6 @@ local to_hex = require "resty.string".to_hex local fmt = string.format local W3C_TRACE_ID_HEX_LEN = 32 -local OT_TRACE_ID_HEX_LEN = 32 local function gen_trace_id(traceid_byte_count) @@ -154,12 +153,14 @@ describe("http integration tests with zipkin server (no http_endpoint) [#" }) local body = assert.response(r).has.status(200) local json = cjson.decode(body) + local expected_len = traceid_byte_count * 2 + -- Trace ID is left padded with 0 for assert - assert.matches( ('0'):rep(32-#trace_id) .. trace_id .. ":%x+:" .. span_id .. ":01", json.headers["uber-trace-id"]) + assert.matches( ('0'):rep(expected_len-#trace_id) .. trace_id .. ":%x+:" .. span_id .. ":01", json.headers["uber-trace-id"]) end) it("propagates ot headers", function() - local trace_id = gen_trace_id(8) + local trace_id = gen_trace_id(traceid_byte_count) local span_id = gen_span_id() local r = proxy_client:get("/", { headers = { @@ -172,7 +173,8 @@ describe("http integration tests with zipkin server (no http_endpoint) [#" local body = assert.response(r).has.status(200) local json = cjson.decode(body) - assert.equals(to_id_len(trace_id, OT_TRACE_ID_HEX_LEN), json.headers["ot-tracer-traceid"]) + local expected_len = traceid_byte_count * 2 + assert.equals(to_id_len(trace_id, expected_len), json.headers["ot-tracer-traceid"]) end) end) end diff --git a/spec/03-plugins/34-zipkin/zipkin_spec.lua b/spec/03-plugins/34-zipkin/zipkin_spec.lua index 12543bb70922..9d4889c87fb9 100644 --- a/spec/03-plugins/34-zipkin/zipkin_spec.lua +++ b/spec/03-plugins/34-zipkin/zipkin_spec.lua @@ -5,10 +5,16 @@ local to_hex = require "resty.string".to_hex local fmt = string.format -local OT_TRACE_ID_HEX_LEN = 32 local ZIPKIN_HOST = helpers.zipkin_host local ZIPKIN_PORT = helpers.zipkin_port +local http_route_host = "http-route" +local http_route_ignore_host = "http-route-ignore" +local http_route_w3c_host = "http-route-w3c" +local http_route_dd_host = "http-route-dd" +local http_route_clear_host = "http-clear-route" +local http_route_no_preserve_host = "http-no-preserve-route" + -- Transform zipkin annotations into a hash of timestamps. It assumes no repeated values -- input: { { value = x, timestamp = y }, { value = x2, timestamp = y2 } } -- output: { x = y, x2 = y2 } @@ -212,11 +218,14 @@ for _, strategy in helpers.each_strategy() do } }) - -- enable zipkin on the service, with sample_ratio = 1 + -- enable zipkin on the route, with sample_ratio = 1 -- this should generate traces, even if there is another plugin with sample_ratio = 0 bp.plugins:insert({ name = "zipkin", - service = { id = service.id }, + route = {id = bp.routes:insert({ + service = service, + hosts = { http_route_host }, + }).id}, config = { sample_ratio = 1, http_endpoint = fmt("http://%s:%d/api/v2/spans", ZIPKIN_HOST, ZIPKIN_PORT), @@ -601,9 +610,190 @@ for _, strategy in helpers.each_strategy() do end) end +local function setup_zipkin_old_propagation(bp, service, traceid_byte_count) + -- enable zipkin plugin globally pointing to mock server + bp.plugins:insert({ + name = "zipkin", + -- enable on TCP as well (by default it is only enabled on http, https, grpc, grpcs) + protocols = { "http", "https", "tcp", "tls", "grpc", "grpcs" }, + config = { + sample_ratio = 1, + http_endpoint = fmt("http://%s:%d/api/v2/spans", ZIPKIN_HOST, ZIPKIN_PORT), + traceid_byte_count = traceid_byte_count, + static_tags = { + { name = "static", value = "ok" }, + }, + default_header_type = "b3-single", + } + }) + + -- header_type = "ignore", def w3c + bp.plugins:insert({ + name = "zipkin", + route = {id = bp.routes:insert({ + service = service, + hosts = { http_route_ignore_host }, + }).id}, + config = { + sample_ratio = 1, + http_endpoint = fmt("http://%s:%d/api/v2/spans", ZIPKIN_HOST, ZIPKIN_PORT), + header_type = "ignore", + default_header_type = "w3c", + } + }) + + -- header_type = "w3c" + bp.plugins:insert({ + name = "zipkin", + route = {id = bp.routes:insert({ + service = service, + hosts = { http_route_w3c_host }, + }).id}, + config = { + sample_ratio = 1, + http_endpoint = fmt("http://%s:%d/api/v2/spans", ZIPKIN_HOST, ZIPKIN_PORT), + header_type = "w3c", + default_header_type = "b3-single", + } + }) + + -- header_type = "datadog" + bp.plugins:insert({ + name = "zipkin", + route = {id = bp.routes:insert({ + service = service, + hosts = { http_route_dd_host }, + }).id}, + config = { + sample_ratio = 1, + http_endpoint = fmt("http://%s:%d/api/v2/spans", ZIPKIN_HOST, ZIPKIN_PORT), + header_type = "datadog", + default_header_type = "datadog", + } + }) +end + +local function setup_zipkin_new_propagation(bp, service, traceid_byte_count) + -- enable zipkin plugin globally pointing to mock server + bp.plugins:insert({ + name = "zipkin", + -- enable on TCP as well (by default it is only enabled on http, https, grpc, grpcs) + protocols = { "http", "https", "tcp", "tls", "grpc", "grpcs" }, + config = { + sample_ratio = 1, + http_endpoint = fmt("http://%s:%d/api/v2/spans", ZIPKIN_HOST, ZIPKIN_PORT), + traceid_byte_count = traceid_byte_count, + static_tags = { + { name = "static", value = "ok" }, + }, + propagation = { + extract = { "b3", "w3c", "jaeger", "ot", "datadog", "aws", "gcp" }, + inject = { "preserve" }, + default_format = "b3-single", + }, + } + }) + + -- header_type = "ignore", def w3c + bp.plugins:insert({ + name = "zipkin", + route = {id = bp.routes:insert({ + service = service, + hosts = { http_route_ignore_host }, + }).id}, + config = { + sample_ratio = 1, + http_endpoint = fmt("http://%s:%d/api/v2/spans", ZIPKIN_HOST, ZIPKIN_PORT), + propagation = { + extract = { }, + inject = { "preserve" }, + default_format = "w3c", + }, + } + }) + + -- header_type = "w3c" + bp.plugins:insert({ + name = "zipkin", + route = {id = bp.routes:insert({ + service = service, + hosts = { http_route_w3c_host }, + }).id}, + config = { + sample_ratio = 1, + http_endpoint = fmt("http://%s:%d/api/v2/spans", ZIPKIN_HOST, ZIPKIN_PORT), + propagation = { + extract = { "b3", "w3c", "jaeger", "ot", "datadog", "aws", "gcp" }, + inject = { "preserve", "w3c" }, + default_format = "b3-single", + }, + } + }) + + -- header_type = "datadog" + bp.plugins:insert({ + name = "zipkin", + route = {id = bp.routes:insert({ + service = service, + hosts = { http_route_dd_host }, + }).id}, + config = { + sample_ratio = 1, + http_endpoint = fmt("http://%s:%d/api/v2/spans", ZIPKIN_HOST, ZIPKIN_PORT), + propagation = { + extract = { "b3", "w3c", "jaeger", "ot", "aws", "datadog", "gcp" }, + inject = { "preserve", "datadog" }, + default_format = "datadog", + }, + } + }) + + -- available with new configuration only: + -- no preserve + bp.plugins:insert({ + name = "zipkin", + route = {id = bp.routes:insert({ + service = service, + hosts = { http_route_no_preserve_host }, + }).id}, + config = { + sample_ratio = 1, + http_endpoint = fmt("http://%s:%d/api/v2/spans", ZIPKIN_HOST, ZIPKIN_PORT), + propagation = { + extract = { "b3" }, + inject = { "w3c" }, + default_format = "w3c", + } + } + }) + + --clear + bp.plugins:insert({ + name = "zipkin", + route = {id = bp.routes:insert({ + service = service, + hosts = { http_route_clear_host }, + }).id}, + config = { + sample_ratio = 1, + http_endpoint = fmt("http://%s:%d/api/v2/spans", ZIPKIN_HOST, ZIPKIN_PORT), + propagation = { + extract = { "w3c", "ot" }, + inject = { "preserve" }, + clear = { + "ot-tracer-traceid", + "ot-tracer-spanid", + "ot-tracer-sampled", + }, + default_format = "b3", + } + } + }) +end for _, strategy in helpers.each_strategy() do for _, traceid_byte_count in ipairs({ 8, 16 }) do +for _, propagation_config in ipairs({"old", "new"}) do describe("http integration tests with zipkin server [#" .. strategy .. "] traceid_byte_count: " .. traceid_byte_count, function() @@ -617,31 +807,21 @@ describe("http integration tests with zipkin server [#" lazy_setup(function() local bp = helpers.get_db_utils(strategy, { "services", "routes", "plugins" }) - -- enable zipkin plugin globally pointing to mock server - bp.plugins:insert({ - name = "zipkin", - -- enable on TCP as well (by default it is only enabled on http, https, grpc, grpcs) - protocols = { "http", "https", "tcp", "tls", "grpc", "grpcs" }, - config = { - sample_ratio = 1, - http_endpoint = fmt("http://%s:%d/api/v2/spans", ZIPKIN_HOST, ZIPKIN_PORT), - traceid_byte_count = traceid_byte_count, - static_tags = { - { name = "static", value = "ok" }, - }, - default_header_type = "b3-single", - } - }) - service = bp.services:insert { name = string.lower("http-" .. utils.random_string()), } + if propagation_config == "old" then + setup_zipkin_old_propagation(bp, service, traceid_byte_count) + else + setup_zipkin_new_propagation(bp, service, traceid_byte_count) + end + -- kong (http) mock upstream route = bp.routes:insert({ name = string.lower("route-" .. utils.random_string()), service = service, - hosts = { "http-route" }, + hosts = { http_route_host }, preserve_host = true, }) @@ -695,7 +875,7 @@ describe("http integration tests with zipkin server [#" local r = proxy_client:get("/", { headers = { ["x-b3-sampled"] = "1", - host = "http-route", + host = http_route_host, ["zipkin-tags"] = "foo=bar; baz=qux" }, }) @@ -718,7 +898,7 @@ describe("http integration tests with zipkin server [#" ["http.path"] = "/", ["http.status_code"] = "200", -- found (matches server status) ["http.protocol"] = "HTTP/1.1", - ["http.host"] = "http-route", + ["http.host"] = http_route_host, lc = "kong", static = "ok", foo = "bar", @@ -1039,7 +1219,7 @@ describe("http integration tests with zipkin server [#" local r = proxy_client:get("/", { headers = { b3 = fmt("%s-%s-%s-%s", trace_id, span_id, "1", parent_id), - host = "http-route", + host = http_route_host, }, }) local body = assert.response(r).has.status(200) @@ -1071,7 +1251,7 @@ describe("http integration tests with zipkin server [#" local r = proxy_client:get("/", { headers = { b3 = fmt("%s-%s-1", trace_id, span_id), - host = "http-route", + host = http_route_host, }, }) local body = assert.response(r).has.status(200) @@ -1093,6 +1273,7 @@ describe("http integration tests with zipkin server [#" assert.equals(trace_id, balancer_span.traceId) assert.not_equals(span_id, balancer_span.id) assert.equals(span_id, balancer_span.parentId) + end) it("works with only trace_id and span_id", function() @@ -1103,7 +1284,7 @@ describe("http integration tests with zipkin server [#" headers = { b3 = fmt("%s-%s", trace_id, span_id), ["x-b3-sampled"] = "1", - host = "http-route", + host = http_route_host, }, }) local body = assert.response(r).has.status(200) @@ -1161,7 +1342,7 @@ describe("http integration tests with zipkin server [#" local r = proxy_client:get("/", { headers = { traceparent = fmt("00-%s-%s-01", trace_id, parent_id), - host = "http-route" + host = http_route_host }, }) local body = assert.response(r).has.status(200) @@ -1212,12 +1393,13 @@ describe("http integration tests with zipkin server [#" local r = proxy_client:get("/", { headers = { ["uber-trace-id"] = fmt("%s:%s:%s:%s", trace_id, span_id, parent_id, "1"), - host = "http-route" + host = http_route_host }, }) local body = assert.response(r).has.status(200) local json = cjson.decode(body) - assert.matches(('0'):rep(32-#trace_id) .. trace_id .. ":%x+:" .. span_id .. ":01", json.headers["uber-trace-id"]) + local expected_len = traceid_byte_count * 2 + assert.matches(('0'):rep(expected_len-#trace_id) .. trace_id .. ":%x+:" .. span_id .. ":01", json.headers["uber-trace-id"]) local spans = wait_for_spans(zipkin_client, 3, nil, trace_id) local balancer_span = assert(get_span("get (balancer try 1)", spans), "balancer span missing") @@ -1266,7 +1448,7 @@ describe("http integration tests with zipkin server [#" describe("ot header propagation", function() it("works on regular calls", function() - local trace_id = gen_trace_id(8) + local trace_id = gen_trace_id(traceid_byte_count) local span_id = gen_span_id() local r = proxy_client:get("/", { @@ -1274,13 +1456,14 @@ describe("http integration tests with zipkin server [#" ["ot-tracer-traceid"] = trace_id, ["ot-tracer-spanid"] = span_id, ["ot-tracer-sampled"] = "1", - host = "http-route", + host = http_route_host, }, }) local body = assert.response(r).has.status(200) local json = cjson.decode(body) - assert.equals(to_id_len(trace_id, OT_TRACE_ID_HEX_LEN), json.headers["ot-tracer-traceid"]) + local expected_len = traceid_byte_count * 2 + assert.equals(to_id_len(trace_id, expected_len), json.headers["ot-tracer-traceid"]) local spans = wait_for_spans(zipkin_client, 3, nil, trace_id) local balancer_span = assert(get_span("get (balancer try 1)", spans), "balancer span missing") @@ -1321,7 +1504,7 @@ describe("http integration tests with zipkin server [#" local r = proxy_client:get("/", { headers = { -- no tracing header - host = "http-route" + host = http_route_host }, }) local body = assert.response(r).has.status(200) @@ -1329,6 +1512,115 @@ describe("http integration tests with zipkin server [#" assert.not_nil(json.headers.b3) end) end) + + describe("propagation configuration", function() + it("ignores incoming headers and uses default type", function() + local trace_id = gen_trace_id(16) + local r = proxy_client:get("/", { + headers = { + ["x-b3-sampled"] = "1", + ["x-b3-traceid"] = trace_id, + host = http_route_ignore_host, + }, + }) + local body = assert.response(r).has.status(200) + local json = cjson.decode(body) + -- uses default type + assert.is_not_nil(json.headers.traceparent) + -- incoming trace id is ignored + assert.not_matches("00%-" .. trace_id .. "%-%x+-01", json.headers.traceparent) + end) + + it("propagates w3c tracing headers + incoming format (preserve + w3c)", function() + local trace_id = gen_trace_id(16) + local span_id = gen_span_id() + local parent_id = gen_span_id() + + local r = proxy_client:get("/", { + headers = { + b3 = fmt("%s-%s-1-%s", trace_id, span_id, parent_id), + host = http_route_w3c_host + }, + }) + local body = assert.response(r).has.status(200) + local json = cjson.decode(body) + + assert.matches("00%-" .. trace_id .. "%-%x+-01", json.headers.traceparent) + -- incoming b3 is modified + assert.not_equals(fmt("%s-%s-1-%s", trace_id, span_id, parent_id), json.headers.b3) + assert.matches(trace_id .. "%-%x+%-1%-%x+", json.headers.b3) + end) + + describe("propagates datadog tracing headers", function() + it("with datadog headers in client request", function() + local trace_id = "1234567890" + local r = proxy_client:get("/", { + headers = { + ["x-datadog-trace-id"] = trace_id, + host = http_route_host, + }, + }) + local body = assert.response(r).has.status(200) + local json = cjson.decode(body) + + assert.equals(trace_id, json.headers["x-datadog-trace-id"]) + assert.is_not_nil(tonumber(json.headers["x-datadog-parent-id"])) + end) + + it("without datadog headers in client request", function() + local r = proxy_client:get("/", { + headers = { host = http_route_dd_host }, + }) + local body = assert.response(r).has.status(200) + local json = cjson.decode(body) + + assert.is_not_nil(tonumber(json.headers["x-datadog-trace-id"])) + assert.is_not_nil(tonumber(json.headers["x-datadog-parent-id"])) + end) + end) + + if propagation_config == "new" then + it("clears non-propagated headers when configured to do so", function() + local trace_id = gen_trace_id(16) + local parent_id = gen_span_id() + + local r = proxy_client:get("/", { + headers = { + traceparent = fmt("00-%s-%s-01", trace_id, parent_id), + ["ot-tracer-traceid"] = trace_id, + ["ot-tracer-spanid"] = parent_id, + ["ot-tracer-sampled"] = "1", + host = http_route_clear_host + }, + }) + local body = assert.response(r).has.status(200) + local json = cjson.decode(body) + assert.matches("00%-" .. trace_id .. "%-%x+-01", json.headers.traceparent) + assert.is_nil(json.headers["ot-tracer-traceid"]) + assert.is_nil(json.headers["ot-tracer-spanid"]) + assert.is_nil(json.headers["ot-tracer-sampled"]) + end) + + it("does not preserve incoming header type if preserve is not specified", function() + local trace_id = gen_trace_id(16) + local span_id = gen_span_id() + local parent_id = gen_span_id() + + local r = proxy_client:get("/", { + headers = { + b3 = fmt("%s-%s-1-%s", trace_id, span_id, parent_id), + host = http_route_no_preserve_host + }, + }) + local body = assert.response(r).has.status(200) + local json = cjson.decode(body) + -- b3 was not injected, only preserved as incoming + assert.equals(fmt("%s-%s-1-%s", trace_id, span_id, parent_id), json.headers.b3) + -- w3c was injected + assert.matches("00%-" .. trace_id .. "%-%x+-01", json.headers.traceparent) + end) + end + end) end) end end @@ -1554,3 +1846,4 @@ for _, strategy in helpers.each_strategy() do end) end) end +end diff --git a/spec/03-plugins/37-opentelemetry/03-propagation_spec.lua b/spec/03-plugins/37-opentelemetry/03-propagation_spec.lua index e1d029df92d1..4cfab4a72736 100644 --- a/spec/03-plugins/37-opentelemetry/03-propagation_spec.lua +++ b/spec/03-plugins/37-opentelemetry/03-propagation_spec.lua @@ -8,6 +8,14 @@ local fmt = string.format local TCP_PORT = 35001 +local http_route_host = "http-route" +local http_route_ignore_host = "http-route-ignore" +local http_route_w3c_host = "http-route-w3c" +local http_route_dd_host = "http-route-dd" +local http_route_b3_single_host = "http-route-b3-single" +local http_route_clear_host = "http-clear-route" +local http_route_no_preserve_host = "http-no-preserve-route" + local function gen_trace_id() return to_hex(utils.get_rand_bytes(16)) end @@ -56,10 +64,202 @@ local function assert_correct_trace_hierarchy(spans, incoming_span_id) end end +local function setup_otel_old_propagation(bp, service) + bp.plugins:insert({ + name = "opentelemetry", + route = {id = bp.routes:insert({ + service = service, + hosts = { http_route_host }, + }).id}, + config = { + -- fake endpoint, request to backend will sliently fail + endpoint = "http://localhost:8080/v1/traces", + } + }) + + bp.plugins:insert({ + name = "opentelemetry", + route = {id = bp.routes:insert({ + service = service, + hosts = { http_route_ignore_host }, + }).id}, + config = { + endpoint = "http://localhost:8080/v1/traces", + header_type = "ignore", + } + }) + + bp.plugins:insert({ + name = "opentelemetry", + route = {id = bp.routes:insert({ + service = service, + hosts = { http_route_w3c_host }, + }).id}, + config = { + endpoint = "http://localhost:8080/v1/traces", + header_type = "w3c", + } + }) + + bp.plugins:insert({ + name = "opentelemetry", + route = {id = bp.routes:insert({ + service = service, + hosts = { http_route_dd_host }, + }).id}, + config = { + endpoint = "http://localhost:8080/v1/traces", + header_type = "datadog", + } + }) + + bp.plugins:insert({ + name = "opentelemetry", + route = {id = bp.routes:insert({ + service = service, + hosts = { http_route_b3_single_host }, + }).id}, + config = { + endpoint = "http://localhost:8080/v1/traces", + header_type = "b3-single", + } + }) +end + +-- same configurations as "setup_otel_old_propagation", using the new +-- propagation configuration fields +local function setup_otel_new_propagation(bp, service) + bp.plugins:insert({ + name = "opentelemetry", + route = {id = bp.routes:insert({ + service = service, + hosts = { http_route_host }, + }).id}, + config = { + endpoint = "http://localhost:8080/v1/traces", + propagation = { + extract = { "b3", "w3c", "jaeger", "ot", "datadog", "aws", "gcp" }, + inject = { "preserve" }, + default_format = "w3c", + } + } + }) + + bp.plugins:insert({ + name = "opentelemetry", + route = {id = bp.routes:insert({ + service = service, + hosts = { http_route_ignore_host }, + }).id}, + config = { + endpoint = "http://localhost:8080/v1/traces", + propagation = { + extract = { }, + inject = { "preserve" }, + default_format = "w3c", + } + } + }) + + bp.plugins:insert({ + name = "opentelemetry", + route = {id = bp.routes:insert({ + service = service, + hosts = { http_route_w3c_host }, + }).id}, + config = { + endpoint = "http://localhost:8080/v1/traces", + propagation = { + extract = { "b3", "w3c", "jaeger", "ot", "datadog", "aws", "gcp" }, + inject = { "preserve", "w3c" }, + default_format = "w3c", + } + } + }) + + bp.plugins:insert({ + name = "opentelemetry", + route = {id = bp.routes:insert({ + service = service, + hosts = { http_route_dd_host }, + }).id}, + config = { + endpoint = "http://localhost:8080/v1/traces", + propagation = { + extract = { "b3", "w3c", "jaeger", "ot", "datadog", "aws", "gcp" }, + inject = { "preserve", "datadog" }, + default_format = "datadog", + } + } + }) + + bp.plugins:insert({ + name = "opentelemetry", + route = {id = bp.routes:insert({ + service = service, + hosts = { http_route_b3_single_host }, + }).id}, + config = { + endpoint = "http://localhost:8080/v1/traces", + propagation = { + extract = { "b3", "w3c", "jaeger", "ot", "datadog", "aws", "gcp" }, + inject = { "preserve", "b3-single" }, + default_format = "w3c", + } + } + }) + + -- available with new configuration only: + -- no preserve + bp.plugins:insert({ + name = "opentelemetry", + route = {id = bp.routes:insert({ + service = service, + hosts = { http_route_no_preserve_host }, + }).id}, + config = { + endpoint = "http://localhost:8080/v1/traces", + -- old configuration ignored when new propagation configuration is provided + header_type = "preserve", + propagation = { + extract = { "b3" }, + inject = { "w3c" }, + default_format = "w3c", + } + } + }) + + -- clear + bp.plugins:insert({ + name = "opentelemetry", + route = {id = bp.routes:insert({ + service = service, + hosts = { http_route_clear_host }, + }).id}, + config = { + endpoint = "http://localhost:8080/v1/traces", + propagation = { + extract = { "w3c", "ot" }, + inject = { "preserve" }, + clear = { + "ot-tracer-traceid", + "ot-tracer-spanid", + "ot-tracer-sampled", + }, + default_format = "b3", + } + } + }) +end + for _, strategy in helpers.each_strategy() do for _, instrumentations in ipairs({"all", "off"}) do for _, sampling_rate in ipairs({1, 0}) do -describe("propagation tests #" .. strategy .. " instrumentations: " .. instrumentations .. " sampling_rate: " .. sampling_rate, function() +for _, propagation_config in ipairs({"old", "new"}) do +describe("propagation tests #" .. strategy .. + " instrumentations: #" .. instrumentations .. + " sampling_rate: " .. sampling_rate .. + " propagation config: #" .. propagation_config, function() local service local proxy_client @@ -74,70 +274,19 @@ describe("propagation tests #" .. strategy .. " instrumentations: " .. instrumen end lazy_setup(function() - local bp = helpers.get_db_utils(strategy, { "services", "routes", "plugins" }, { "trace-propagator" }) + local bp = helpers.get_db_utils(strategy, { "services", "routes", "plugins" }) service = bp.services:insert() - local multi_plugin_route = bp.routes:insert({ - hosts = { "multi-plugin" }, - service = service, - }) - - bp.plugins:insert({ - name = "opentelemetry", - route = {id = bp.routes:insert({ - service = service, - hosts = { "http-route" }, - }).id}, - config = { - -- fake endpoint, request to backend will sliently fail - endpoint = "http://localhost:8080/v1/traces", - } - }) - - bp.plugins:insert({ - name = "opentelemetry", - route = {id = bp.routes:insert({ - service = service, - hosts = { "http-route-ignore" }, - }).id}, - config = { - -- fake endpoint, request to backend will sliently fail - endpoint = "http://localhost:8080/v1/traces", - header_type = "ignore", - } - }) - - bp.plugins:insert({ - name = "opentelemetry", - route = {id = bp.routes:insert({ - service = service, - hosts = { "http-route-w3c" }, - }).id}, - config = { - -- fake endpoint, request to backend will sliently fail - endpoint = "http://localhost:8080/v1/traces", - header_type = "w3c", - } - }) - - bp.plugins:insert({ - name = "trace-propagator", - route = multi_plugin_route, - }) - - bp.plugins:insert({ - name = "opentelemetry", - route = multi_plugin_route, - config = { - endpoint = "http://localhost:8080/v1/traces", - header_type = "ignore", - } - }) + if propagation_config == "old" then + setup_otel_old_propagation(bp, service) + else + setup_otel_new_propagation(bp, service) + end helpers.start_kong({ database = strategy, - plugins = "bundled, trace-propagator", + plugins = "bundled", nginx_conf = "spec/fixtures/custom_nginx.template", tracing_instrumentations = instrumentations, tracing_sampling_rate = sampling_rate, @@ -153,7 +302,7 @@ describe("propagation tests #" .. strategy .. " instrumentations: " .. instrumen it("default propagation headers (w3c)", function() local r = proxy_client:get("/", { headers = { - host = "http-route", + host = http_route_host, }, }) local body = assert.response(r).has.status(200) @@ -167,7 +316,7 @@ describe("propagation tests #" .. strategy .. " instrumentations: " .. instrumen headers = { ["x-b3-sampled"] = "1", ["x-b3-traceid"] = trace_id, - host = "http-route", + host = http_route_host, }, }) local body = assert.response(r).has.status(200) @@ -184,7 +333,7 @@ describe("propagation tests #" .. strategy .. " instrumentations: " .. instrumen local r = proxy_client:get("/", { headers = { b3 = fmt("%s-%s-%s-%s", trace_id, span_id, "1", parent_id), - host = "http-route", + host = http_route_host, }, }) local body = assert.response(r).has.status(200) @@ -199,7 +348,7 @@ describe("propagation tests #" .. strategy .. " instrumentations: " .. instrumen local r = proxy_client:get("/", { headers = { b3 = fmt("%s-%s-1", trace_id, span_id), - host = "http-route", + host = http_route_host, }, }) local body = assert.response(r).has.status(200) @@ -214,7 +363,7 @@ describe("propagation tests #" .. strategy .. " instrumentations: " .. instrumen local r = proxy_client:get("/", { headers = { b3 = fmt("%s-%s-0", trace_id, span_id), - host = "http-route", + host = http_route_host, }, }) local body = assert.response(r).has.status(200) @@ -232,7 +381,7 @@ describe("propagation tests #" .. strategy .. " instrumentations: " .. instrumen local r = proxy_client:get("/", { headers = { traceparent = fmt("00-%s-%s-01", trace_id, parent_id), - host = "http-route" + host = http_route_host }, }) local body = assert.response(r).has.status(200) @@ -247,7 +396,7 @@ describe("propagation tests #" .. strategy .. " instrumentations: " .. instrumen local r = proxy_client:get("/", { headers = { traceparent = fmt("00-%s-%s-01", trace_id, parent_id), - host = "http-route-ignore" + host = http_route_ignore_host }, }) local body = assert.response(r).has.status(200) @@ -263,7 +412,7 @@ describe("propagation tests #" .. strategy .. " instrumentations: " .. instrumen headers = { ["x-b3-sampled"] = "1", ["x-b3-traceid"] = trace_id, - host = "http-route-ignore", + host = http_route_ignore_host, }, }) local body = assert.response(r).has.status(200) @@ -280,12 +429,52 @@ describe("propagation tests #" .. strategy .. " instrumentations: " .. instrumen local r = proxy_client:get("/", { headers = { traceparent = fmt("00-%s-%s-01", trace_id, parent_id), - host = "http-route-w3c" + host = http_route_w3c_host + }, + }) + local body = assert.response(r).has.status(200) + local json = cjson.decode(body) + assert.matches("00%-" .. trace_id .. "%-%x+-" .. sampled_flag_w3c, json.headers.traceparent) + end) + + it("propagates w3c tracing headers + incoming format (preserve + w3c)", function() + local trace_id = gen_trace_id() + local span_id = gen_span_id() + local parent_id = gen_span_id() + + local r = proxy_client:get("/", { + headers = { + b3 = fmt("%s-%s-%s-%s", trace_id, span_id, sampled_flag_b3, parent_id), + host = http_route_w3c_host }, }) local body = assert.response(r).has.status(200) local json = cjson.decode(body) assert.matches("00%-" .. trace_id .. "%-%x+-" .. sampled_flag_w3c, json.headers.traceparent) + assert.not_equals(fmt("%s-%s-%s-%s", trace_id, span_id, sampled_flag_b3, parent_id), json.headers.b3) + assert.matches(trace_id .. "%-%x+%-" .. sampled_flag_b3 .. "%-%x+", json.headers.b3) + -- if no instrumentation is enabled no new spans are created so the + -- incoming span is the parent of the outgoing span + if instrumentations == "off" then + assert.matches(trace_id .. "%-%x+%-" .. sampled_flag_b3 .. "%-" .. span_id, json.headers.b3) + end + end) + + it("propagates b3-single tracing headers when header_type set to b3-single", function() + local trace_id = gen_trace_id() + local span_id = gen_span_id() + local parent_id = gen_span_id() + + local r = proxy_client:get("/", { + headers = { + b3 = fmt("%s-%s-%s-%s", trace_id, span_id, sampled_flag_b3, parent_id), + host = http_route_b3_single_host + }, + }) + local body = assert.response(r).has.status(200) + local json = cjson.decode(body) + assert.not_equals(fmt("%s-%s-%s-%s", trace_id, span_id, sampled_flag_b3, parent_id), json.headers.b3) + assert.matches(trace_id .. "%-%x+%-" .. sampled_flag_b3 .. "%-%x+", json.headers.b3) end) it("propagates jaeger tracing headers", function() @@ -296,7 +485,7 @@ describe("propagation tests #" .. strategy .. " instrumentations: " .. instrumen local r = proxy_client:get("/", { headers = { ["uber-trace-id"] = fmt("%s:%s:%s:%s", trace_id, span_id, parent_id, "1"), - host = "http-route" + host = http_route_host }, }) local body = assert.response(r).has.status(200) @@ -313,7 +502,7 @@ describe("propagation tests #" .. strategy .. " instrumentations: " .. instrumen ["ot-tracer-traceid"] = trace_id, ["ot-tracer-spanid"] = span_id, ["ot-tracer-sampled"] = "1", - host = "http-route", + host = http_route_host, }, }) local body = assert.response(r).has.status(200) @@ -322,10 +511,40 @@ describe("propagation tests #" .. strategy .. " instrumentations: " .. instrumen assert.equals(trace_id, json.headers["ot-tracer-traceid"]) end) + + describe("propagates datadog tracing headers", function() + it("with datadog headers in client request", function() + local trace_id = "1234567890" + local r = proxy_client:get("/", { + headers = { + ["x-datadog-trace-id"] = trace_id, + host = http_route_host, + }, + }) + local body = assert.response(r).has.status(200) + local json = cjson.decode(body) + + assert.equals(trace_id, json.headers["x-datadog-trace-id"]) + assert.is_not_nil(tonumber(json.headers["x-datadog-parent-id"])) + end) + + it("without datadog headers in client request", function() + local r = proxy_client:get("/", { + headers = { host = http_route_dd_host }, + }) + local body = assert.response(r).has.status(200) + local json = cjson.decode(body) + + assert.is_not_nil(tonumber(json.headers["x-datadog-trace-id"])) + assert.is_not_nil(tonumber(json.headers["x-datadog-parent-id"])) + end) + end) + + it("propagate spwaned span with ot headers", function() local r = proxy_client:get("/", { headers = { - host = "http-route", + host = http_route_host, }, }) local body = assert.response(r).has.status(200) @@ -340,26 +559,54 @@ describe("propagation tests #" .. strategy .. " instrumentations: " .. instrumen assert.same(sampled_flag_w3c, m[3]) end) - it("with multiple plugins, propagates the correct header", function() - local trace_id = gen_trace_id() + if propagation_config == "new" then + it("clears non-propagated headers when configured to do so", function() + local trace_id = gen_trace_id() + local parent_id = gen_span_id() - local r = proxy_client:get("/", { - headers = { - ["x-b3-sampled"] = "1", - ["x-b3-traceid"] = trace_id, - host = "multi-plugin", - }, - }) - local body = assert.response(r).has.status(200) - local json = cjson.decode(body) - assert.matches("00%-%x+-" .. json.headers["x-b3-spanid"] .. "%-" .. sampled_flag_w3c, json.headers.traceparent) - end) + local r = proxy_client:get("/", { + headers = { + traceparent = fmt("00-%s-%s-01", trace_id, parent_id), + ["ot-tracer-traceid"] = trace_id, + ["ot-tracer-spanid"] = parent_id, + ["ot-tracer-sampled"] = "1", + host = http_route_clear_host + }, + }) + local body = assert.response(r).has.status(200) + local json = cjson.decode(body) + assert.matches("00%-" .. trace_id .. "%-%x+-" .. sampled_flag_w3c, json.headers.traceparent) + assert.is_nil(json.headers["ot-tracer-traceid"]) + assert.is_nil(json.headers["ot-tracer-spanid"]) + assert.is_nil(json.headers["ot-tracer-sampled"]) + end) + + it("does not preserve incoming header type if preserve is not specified", function() + local trace_id = gen_trace_id() + local span_id = gen_span_id() + local parent_id = gen_span_id() + + local r = proxy_client:get("/", { + headers = { + b3 = fmt("%s-%s-%s-%s", trace_id, span_id, sampled_flag_b3, parent_id), + host = http_route_no_preserve_host + }, + }) + local body = assert.response(r).has.status(200) + local json = cjson.decode(body) + -- b3 was not injected, only preserved as incoming + assert.equals(fmt("%s-%s-%s-%s", trace_id, span_id, sampled_flag_b3, parent_id), json.headers.b3) + -- w3c was injected + assert.matches("00%-" .. trace_id .. "%-%x+-" .. sampled_flag_w3c, json.headers.traceparent) + end) + end end) end end +end for _, instrumentation in ipairs({ "request", "request,balancer", "all" }) do -describe("propagation tests with enabled " .. instrumentation .. " instrumentation (issue #11294) #" .. strategy, function() +describe("propagation tests with enabled " .. instrumentation .. " instrumentation #" .. strategy, function() local service, route local proxy_client @@ -392,7 +639,7 @@ describe("propagation tests with enabled " .. instrumentation .. " instrumentati helpers.start_kong({ database = strategy, - plugins = "bundled, trace-propagator, tcp-trace-exporter", + plugins = "bundled,tcp-trace-exporter", nginx_conf = "spec/fixtures/custom_nginx.template", tracing_instrumentations = instrumentation, tracing_sampling_rate = 1, @@ -405,7 +652,7 @@ describe("propagation tests with enabled " .. instrumentation .. " instrumentati helpers.stop_kong() end) - it("sets the outgoint parent span's ID correctly", function() + it("sets the outgoint parent span's ID correctly (issue #11294)", function() local trace_id = gen_trace_id() local incoming_span_id = gen_span_id() local thread = helpers.tcp_server(TCP_PORT) @@ -438,6 +685,26 @@ describe("propagation tests with enabled " .. instrumentation .. " instrumentati assert_correct_trace_hierarchy(spans, incoming_span_id) end) + + it("disables sampling when incoming header has sampled disabled", function() + local trace_id = gen_trace_id() + local incoming_span_id = gen_span_id() + local thread = helpers.tcp_server(TCP_PORT) + + local r = proxy_client:get("/", { + headers = { + traceparent = fmt("00-%s-%s-00", trace_id, incoming_span_id), + host = "http-route" + }, + }) + assert.response(r).has.status(200) + + local _, res = thread:join() + assert.is_string(res) + local spans = cjson.decode(res) + assert.equals(0, #spans, res) + end) + end) end end diff --git a/spec/fixtures/custom_plugins/kong/plugins/trace-propagator/handler.lua b/spec/fixtures/custom_plugins/kong/plugins/trace-propagator/handler.lua deleted file mode 100644 index 5b61cbcd3f4b..000000000000 --- a/spec/fixtures/custom_plugins/kong/plugins/trace-propagator/handler.lua +++ /dev/null @@ -1,56 +0,0 @@ -local propagation = require "kong.tracing.propagation" -local tracing_context = require "kong.tracing.tracing_context" - -local ngx = ngx -local kong = kong -local propagation_parse = propagation.parse -local propagation_set = propagation.set - -local _M = { - PRIORITY = 1001, - VERSION = "1.0", -} - - -function _M:access(conf) - local headers = ngx.req.get_headers() - local tracer = kong.tracing.name == "noop" and kong.tracing.new("otel") - or kong.tracing - local root_span = ngx.ctx.KONG_SPANS and ngx.ctx.KONG_SPANS[1] - if not root_span then - root_span = tracer.start_span("root") - root_span:set_attribute("kong.propagation_only", true) - kong.ctx.plugin.should_sample = false - end - - local injected_parent_span = tracing_context.get_unlinked_span("balancer") or root_span - - local header_type, trace_id, span_id, parent_id, parent_sampled = propagation_parse(headers) - - -- overwrite trace ids - -- with the value extracted from incoming tracing headers - if trace_id then - injected_parent_span.trace_id = trace_id - tracing_context.set_raw_trace_id(trace_id) - end - if span_id then - root_span.parent_id = span_id - elseif parent_id then - root_span.parent_id = parent_id - end - - -- Set the sampled flag for the outgoing header's span - local sampled - if kong.ctx.plugin.should_sample == false then - sampled = false - else - sampled = tracer:get_sampling_decision(parent_sampled, conf.sampling_rate) - tracer:set_should_sample(sampled) - end - injected_parent_span.should_sample = sampled - - local type = header_type and "preserve" or "w3c" - propagation_set(type, header_type, injected_parent_span, "w3c") -end - -return _M diff --git a/spec/fixtures/custom_plugins/kong/plugins/trace-propagator/schema.lua b/spec/fixtures/custom_plugins/kong/plugins/trace-propagator/schema.lua deleted file mode 100644 index 60890560413e..000000000000 --- a/spec/fixtures/custom_plugins/kong/plugins/trace-propagator/schema.lua +++ /dev/null @@ -1,11 +0,0 @@ -return { - name = "trace-propagator", - fields = { - { - config = { - type = "record", - fields = { } - } - } - } -}