From 1ed88ad91a2abf7f336806d2bef1b6252ddc95ff Mon Sep 17 00:00:00 2001 From: Luc Vieillescazes Date: Thu, 21 Nov 2024 11:25:10 +0100 Subject: [PATCH 01/10] Enable more PHP docker-ssi tests (#3509) --- tests/docker_ssi/test_docker_ssi.py | 1 - utils/docker_ssi/docker_ssi_definitions.py | 23 +++++++++++++++++++++- 2 files changed, 22 insertions(+), 2 deletions(-) diff --git a/tests/docker_ssi/test_docker_ssi.py b/tests/docker_ssi/test_docker_ssi.py index ff93013ce3..36b44021b9 100644 --- a/tests/docker_ssi/test_docker_ssi.py +++ b/tests/docker_ssi/test_docker_ssi.py @@ -92,7 +92,6 @@ def test_telemetry(self): @features.ssi_guardrails @irrelevant(context.library == "java" and context.installed_language_runtime >= "1.8.0_0") - @bug(condition=context.library == "php" and context.installed_language_runtime < "7.0", reason="INPLAT-180") @irrelevant(context.library == "php" and context.installed_language_runtime >= "7.0") @bug(condition=context.library == "python" and context.installed_language_runtime < "3.7.0", reason="INPLAT-181") @irrelevant(context.library == "python" and context.installed_language_runtime >= "3.7.0") diff --git a/utils/docker_ssi/docker_ssi_definitions.py b/utils/docker_ssi/docker_ssi_definitions.py index 1eb5b864ab..98586a7a1d 100644 --- a/utils/docker_ssi/docker_ssi_definitions.py +++ b/utils/docker_ssi/docker_ssi_definitions.py @@ -66,14 +66,28 @@ class PHPRuntimeInstallableVersions: """ PHP runtime versions that can be installed automatically""" PHP56 = RuntimeInstallableVersion("PHP56", "5.6") # Not supported (EOL runtime) + PHP70 = RuntimeInstallableVersion("PHP70", "7.0") + PHP71 = RuntimeInstallableVersion("PHP71", "7.1") + PHP72 = RuntimeInstallableVersion("PHP72", "7.2") + PHP73 = RuntimeInstallableVersion("PHP73", "7.3") PHP74 = RuntimeInstallableVersion("PHP74", "7.4") + PHP80 = RuntimeInstallableVersion("PHP80", "8.0") + PHP81 = RuntimeInstallableVersion("PHP81", "8.1") + PHP82 = RuntimeInstallableVersion("PHP82", "8.2") PHP83 = RuntimeInstallableVersion("PHP83", "8.3") @staticmethod def get_all_versions(): return [ PHPRuntimeInstallableVersions.PHP56, + PHPRuntimeInstallableVersions.PHP70, + PHPRuntimeInstallableVersions.PHP71, + PHPRuntimeInstallableVersions.PHP72, + PHPRuntimeInstallableVersions.PHP73, PHPRuntimeInstallableVersions.PHP74, + PHPRuntimeInstallableVersions.PHP80, + PHPRuntimeInstallableVersions.PHP81, + PHPRuntimeInstallableVersions.PHP82, PHPRuntimeInstallableVersions.PHP83, ] @@ -174,7 +188,14 @@ def get_version_id(version): PHP_APP = WeblogDescriptor( "php-app", "php", - [SupportedImages().UBUNTU_22_AMD64.with_allowed_runtime_versions(PHPRuntimeInstallableVersions.get_all_versions())], + [ + SupportedImages().UBUNTU_22_AMD64.with_allowed_runtime_versions( + PHPRuntimeInstallableVersions.get_all_versions() + ), + SupportedImages().UBUNTU_22_ARM64.with_allowed_runtime_versions( + PHPRuntimeInstallableVersions.get_all_versions() + ), + ], ) PY_APP = WeblogDescriptor( From cc012e3fb2eb214bd59ca5aed6864e5df63fb5f4 Mon Sep 17 00:00:00 2001 From: Charles de Beauchesne Date: Thu, 21 Nov 2024 11:46:52 +0100 Subject: [PATCH 02/10] [java] Skip flaky test for APMAPI-908 (#3520) --- tests/parametric/test_config_consistency.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/tests/parametric/test_config_consistency.py b/tests/parametric/test_config_consistency.py index 912e37f4d6..1cb05f3574 100644 --- a/tests/parametric/test_config_consistency.py +++ b/tests/parametric/test_config_consistency.py @@ -2,10 +2,10 @@ Test configuration consistency for features across supported APM SDKs. """ +from urllib.parse import urlparse import pytest -from utils import scenarios, features, context, bug, missing_feature, irrelevant +from utils import scenarios, features, context, bug, missing_feature, irrelevant, flaky from utils.parametric.spec.trace import find_span_in_traces -from urllib.parse import urlparse parametrize = pytest.mark.parametrize @@ -183,6 +183,7 @@ def test_default_trace_rate_limit(self, library_env, test_agent, test_library): reason="PHP backfill model does not support strict two-trace limit, see test below for its behavior", ) @parametrize("library_env", [{"DD_TRACE_RATE_LIMIT": "1", "DD_TRACE_SAMPLE_RATE": "1"}]) + @flaky(library="java", reason="APMAPI-908") def test_setting_trace_rate_limit_strict(self, library_env, test_agent, test_library): with test_library: with test_library.start_span(name="s1") as s1: From f66729e0e834dcaf1f27d6205482f7ddae82fd5c Mon Sep 17 00:00:00 2001 From: Rachel Yang Date: Thu, 21 Nov 2024 10:15:41 -0500 Subject: [PATCH 03/10] baggage tests max items update (#3474) --- tests/parametric/test_headers_baggage.py | 27 +++++++++++++++++++----- 1 file changed, 22 insertions(+), 5 deletions(-) diff --git a/tests/parametric/test_headers_baggage.py b/tests/parametric/test_headers_baggage.py index ea3df19074..3ca04759e6 100644 --- a/tests/parametric/test_headers_baggage.py +++ b/tests/parametric/test_headers_baggage.py @@ -217,17 +217,34 @@ def test_baggageheader_maxitems_inject_D016(self, test_library): """Ensure that baggage headers are not injected when the number of baggage items exceeds the maximum number of items.""" max_items = 64 with test_library.start_span(name="test_baggageheader_maxitems_inject_D016") as span: - for i in range(max_items + 1): + for i in range(max_items + 2): span.set_baggage(f"key{i}", f"value{i}") headers = test_library.inject_headers(span.span_id) - assert not any("baggage" in item for item in headers) + for header in headers: + if "baggage" in header: + baggage_header = header + items = baggage_header[1].split(",") + assert len(items) == max_items def test_baggageheader_maxbytes_inject_D017(self, test_library): """Ensure that baggage headers are not injected when the total byte size of the baggage exceeds the maximum size.""" max_bytes = 8192 with test_library.start_span(name="test_baggageheader_maxbytes_inject_D017",) as span: - span.set_baggage("foo", "a" * (max_bytes)) + baggage_items = { + "key1": "a" * ((max_bytes // 3)), + "key2": "b" * ((max_bytes // 3)), + "key3": "c" * ((max_bytes // 3)), + "key4": "d", + } + for key, value in baggage_items.items(): + span.set_baggage(key, value) - headers = test_library.inject_headers(span.span_id) - assert not any("baggage" in item for item in headers) + headers = test_library.inject_headers(span.span_id) + for header in headers: + if "baggage" in header: + baggage_header = header + items = baggage_header[1].split(",") + header_size = len(baggage_header[1].encode("utf-8")) + assert len(items) == 2 + assert header_size <= max_bytes From 9f4ee4bc11eaf5b69c18b47e1926c97c720204c6 Mon Sep 17 00:00:00 2001 From: Munir Abdinur Date: Thu, 21 Nov 2024 10:16:13 -0500 Subject: [PATCH 04/10] parametric: remove stale comment (#3513) --- tests/parametric/test_parametric_endpoints.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/tests/parametric/test_parametric_endpoints.py b/tests/parametric/test_parametric_endpoints.py index 1013d157ce..6820d68920 100644 --- a/tests/parametric/test_parametric_endpoints.py +++ b/tests/parametric/test_parametric_endpoints.py @@ -764,8 +764,6 @@ def test_flush(self, test_agent, test_library): Supported Return Values: - success: boolean """ - # Here we are avoiding using the __exit__() operation on the contextmanager - # and instead manually finishing and flushing the span. with test_library.otel_start_span("test_otel_flush") as s1: pass From 0662943a2fe473c958df707edda92fc698563efe Mon Sep 17 00:00:00 2001 From: William Conti <58711692+wconti27@users.noreply.github.com> Date: Thu, 21 Nov 2024 10:47:09 -0500 Subject: [PATCH 05/10] create api gateway test (#3349) Co-authored-by: Charles de Beauchesne --- docs/weblog/README.md | 17 ++++ manifests/cpp.yml | 2 + manifests/dotnet.yml | 2 + manifests/golang.yml | 2 + manifests/java.yml | 2 + manifests/nodejs.yml | 4 + manifests/php.yml | 2 + manifests/python.yml | 2 + manifests/ruby.yml | 2 + tests/integrations/test_inferred_proxy.py | 91 +++++++++++++++++++ utils/_features.py | 11 +++ utils/build/docker/nodejs/express4/app.js | 7 ++ .../express4/integrations/api_gateway.js | 14 +++ 13 files changed, 158 insertions(+) create mode 100644 tests/integrations/test_inferred_proxy.py create mode 100644 utils/build/docker/nodejs/express4/integrations/api_gateway.js diff --git a/docs/weblog/README.md b/docs/weblog/README.md index 8e4dbff6f8..c5db06c7b6 100644 --- a/docs/weblog/README.md +++ b/docs/weblog/README.md @@ -512,6 +512,23 @@ By default, the generated event has the following specification: Values can be changed with the query params called `event_name`. +### GET '/inferred-proxy/span-creation' + +This endpoint is supposed to be hit with the necessary headers that are used to create inferred proxy +spans for routers such as AWS API Gateway. Not including the headers means a span will not be created by the tracer +if the feature exists. + +The endpoint supports the following query parameters: + - `status_code`: str containing status code to used in API response + +The headers necessary to create a span with example values: + `x-dd-proxy-request-time-ms`: start time in milliseconds + `x-dd-proxy-path`: "/api/data", + `x-dd-proxy-httpmethod`: "GET", + `x-dd-proxy-domain-name`: "system-tests-api-gateway.com", + `x-dd-proxy-stage`: "staging", + `x-dd-proxy`: "aws-apigateway", + ### GET /users This endpoint calls the appsec blocking SDK functions used for blocking users. If the expected parameter matches one of diff --git a/manifests/cpp.yml b/manifests/cpp.yml index 31e5862218..10d7f95601 100644 --- a/manifests/cpp.yml +++ b/manifests/cpp.yml @@ -137,6 +137,8 @@ tests/: Test_DsmSQS: missing_feature Test_Dsm_Manual_Checkpoint_Inter_Process: missing_feature Test_Dsm_Manual_Checkpoint_Intra_Process: missing_feature + test_inferred_proxy.py: + Test_AWS_API_Gateway_Inferred_Span_Creation: missing_feature test_otel_drop_in.py: Test_Otel_Drop_In: missing_feature parametric/: diff --git a/manifests/dotnet.yml b/manifests/dotnet.yml index c9c07d622b..d480d34fca 100644 --- a/manifests/dotnet.yml +++ b/manifests/dotnet.yml @@ -360,6 +360,8 @@ tests/: Test_DsmSQS: v2.48.0 Test_Dsm_Manual_Checkpoint_Inter_Process: missing_feature Test_Dsm_Manual_Checkpoint_Intra_Process: missing_feature + test_inferred_proxy.py: + Test_AWS_API_Gateway_Inferred_Span_Creation: missing_feature test_otel_drop_in.py: Test_Otel_Drop_In: missing_feature k8s_lib_injection/: diff --git a/manifests/golang.yml b/manifests/golang.yml index 7c3701b1c9..8defcd4907 100644 --- a/manifests/golang.yml +++ b/manifests/golang.yml @@ -482,6 +482,8 @@ tests/: Test_Dsm_Manual_Checkpoint_Intra_Process: "*": irrelevant net-http: missing_feature (Endpoint not implemented) + test_inferred_proxy.py: + Test_AWS_API_Gateway_Inferred_Span_Creation: missing_feature test_otel_drop_in.py: Test_Otel_Drop_In: missing_feature parametric/: diff --git a/manifests/java.yml b/manifests/java.yml index cc225fc0f7..9ce829ef2a 100644 --- a/manifests/java.yml +++ b/manifests/java.yml @@ -1426,6 +1426,8 @@ tests/: Test_Dsm_Manual_Checkpoint_Intra_Process: "*": irrelevant spring-boot: bug (AIDM-325) + test_inferred_proxy.py: + Test_AWS_API_Gateway_Inferred_Span_Creation: missing_feature test_mongo.py: Test_Mongo: bug (APMAPI-729) test_otel_drop_in.py: diff --git a/manifests/nodejs.yml b/manifests/nodejs.yml index 62192d351c..3304307730 100644 --- a/manifests/nodejs.yml +++ b/manifests/nodejs.yml @@ -628,6 +628,10 @@ tests/: Test_Dsm_Manual_Checkpoint_Intra_Process: '*': irrelevant express4: *ref_5_20_0 + test_inferred_proxy.py: + Test_AWS_API_Gateway_Inferred_Span_Creation: + '*': irrelevant + express4: *ref_5_26_0 test_otel_drop_in.py: Test_Otel_Drop_In: missing_feature k8s_lib_injection/: diff --git a/manifests/php.yml b/manifests/php.yml index 0a47e38cff..84a488e210 100644 --- a/manifests/php.yml +++ b/manifests/php.yml @@ -296,6 +296,8 @@ tests/: Test_DsmSQS: missing_feature Test_Dsm_Manual_Checkpoint_Inter_Process: missing_feature Test_Dsm_Manual_Checkpoint_Intra_Process: missing_feature + test_inferred_proxy.py: + Test_AWS_API_Gateway_Inferred_Span_Creation: missing_feature test_otel_drop_in.py: Test_Otel_Drop_In: missing_feature parametric/: diff --git a/manifests/python.yml b/manifests/python.yml index 473e64f1fd..217de2fb13 100644 --- a/manifests/python.yml +++ b/manifests/python.yml @@ -723,6 +723,8 @@ tests/: Test_Dsm_Manual_Checkpoint_Intra_Process: '*': irrelevant flask-poc: v2.8.0 + test_inferred_proxy.py: + Test_AWS_API_Gateway_Inferred_Span_Creation: missing_feature test_otel_drop_in.py: Test_Otel_Drop_In: missing_feature k8s_lib_injection/: diff --git a/manifests/ruby.yml b/manifests/ruby.yml index 26b7e80899..b34f9a5609 100644 --- a/manifests/ruby.yml +++ b/manifests/ruby.yml @@ -364,6 +364,8 @@ tests/: Test_Dsm_Manual_Checkpoint_Intra_Process: '*': irrelevant rails70: missing_feature (Endpoint not implemented) + test_inferred_proxy.py: + Test_AWS_API_Gateway_Inferred_Span_Creation: missing_feature test_otel_drop_in.py: Test_Otel_Drop_In: missing_feature k8s_lib_injection/: diff --git a/tests/integrations/test_inferred_proxy.py b/tests/integrations/test_inferred_proxy.py new file mode 100644 index 0000000000..fe92c3ebed --- /dev/null +++ b/tests/integrations/test_inferred_proxy.py @@ -0,0 +1,91 @@ +import json +import time + +from utils import weblog, scenarios, features, interfaces +from utils.tools import logger + + +@features.aws_api_gateway_inferred_span_creation +@scenarios.integrations +class Test_AWS_API_Gateway_Inferred_Span_Creation: + """ Verify DSM context is extracted using "dd-pathway-ctx-base64" """ + + start_time = round(time.time() * 1e3) + start_time_ns = start_time * 1e6 + + def setup_api_gateway_inferred_span_creation(self): + headers = { + "x-dd-proxy-request-time-ms": str(self.start_time), # in ms + "x-dd-proxy-path": "/api/data", + "x-dd-proxy-httpmethod": "GET", + "x-dd-proxy-domain-name": "system-tests-api-gateway.com", + "x-dd-proxy-stage": "staging", + "x-dd-proxy": "aws-apigateway", + } + + self.r = weblog.get(f"/inferred-proxy/span-creation?status_code=200", headers=headers, timeout=60,) + + def test_api_gateway_inferred_span_creation(self): + assert self.r.text == "ok" + + span = get_span(interfaces.library) + + assert span is not None, "API Gateway inferred span should have been created but was not found!" + + assert_api_gateway_span(self, span) + + +def get_span(interface): + logger.debug(f"Trying to find API Gateway span for interface: {interface}") + + for data, trace in interface.get_traces(): + for span in trace: + if not span.get("meta"): + continue + + if span["name"] != "aws.apigateway": + continue + + logger.debug(f"Span found in {data['log_filename']}:\n{json.dumps(span, indent=2)}") + return span + + logger.debug("No span found") + return None + + +def assert_api_gateway_span(testCase, span): + assert span["name"] == "aws.apigateway", "Inferred AWS API Gateway span name should be 'aws.apigateway'" + + # Assertions to check if the span data contains the required keys and values. + assert "meta" in span, "Inferred AWS API Gateway span should contain 'meta'" + assert ( + "component" in span["meta"] + ), "Inferred AWS API Gateway span meta should contain 'component' equal to 'aws-apigateway'" + assert span["meta"]["component"] == "aws-apigateway", "Expected component to be 'aws-apigateway'" + assert "service" in span["meta"], "Inferred AWS API Gateway span meta should contain 'service'" + + assert ( + span["meta"]["service"] == "system-tests-api-gateway.com" + ), "Inferred AWS API Gateway span expected service should equal 'system-tests-api-gateway.com'" + assert "span.kind" in span["meta"], "Inferred AWS API Gateway span meta should contain 'span.kind'" + assert ( + span["meta"]["span.kind"] == "internal" + ), "Inferred AWS API Gateway span meta span.kind should equal 'internal'" + assert "http.method" in span["meta"], "Inferred AWS API Gateway span meta should contain 'http.method'" + assert span["meta"]["http.method"] == "GET", "Inferred AWS API Gateway span meta expected HTTP method to be 'GET'" + assert "http.url" in span["meta"], "Inferred AWS API Gateway span eta should contain 'http.url'" + assert ( + span["meta"]["http.url"] == "system-tests-api-gateway.com/api/data" + ), "Inferred AWS API Gateway span meta expected HTTP URL to be 'system-tests-api-gateway.com/api/data'" + assert "http.route" in span["meta"], "Inferred AWS API Gateway span meta should contain 'http.route'" + assert ( + span["meta"]["http.route"] == "/api/data" + ), "Inferred AWS API Gateway span meta expected HTTP route to be '/api/data'" + assert "stage" in span["meta"], "Inferred AWS API Gateway span meta should contain 'stage'" + assert span["meta"]["stage"] == "staging", "Inferred AWS API Gateway span meta expected stage to be 'staging'" + assert "start" in span, f"Inferred AWS API Gateway span should have 'startTime'" + + if not interfaces.library.replay: + assert ( + span["start"] == testCase.start_time_ns + ), f"Inferred AWS API Gateway span startTime should equal expected '{str(testCase.start_time_ns)}''" diff --git a/utils/_features.py b/utils/_features.py index 463f078c11..5a8ac794d2 100644 --- a/utils/_features.py +++ b/utils/_features.py @@ -2447,6 +2447,17 @@ def serverless_span_pointers(test_object): pytest.mark.features(feature_id=328)(test_object) return test_object + @staticmethod + def aws_api_gateway_inferred_span_creation(test_object): + """ + AWS Api Gateway: Tests that API Gateway inferred span creation works as expected + + https://feature-parity.us1.prod.dog/#/?feature=341 + """ + pytest.mark.features(feature_id=341)(test_object) + + return test_object + @staticmethod def parametric_endpoint_parity(test_object): """ diff --git a/utils/build/docker/nodejs/express4/app.js b/utils/build/docker/nodejs/express4/app.js index 8b883fb5b9..85143ecb4f 100644 --- a/utils/build/docker/nodejs/express4/app.js +++ b/utils/build/docker/nodejs/express4/app.js @@ -19,6 +19,7 @@ const { spawnSync } = require('child_process') const pgsql = require('./integrations/db/postgres') const mysql = require('./integrations/db/mysql') const mssql = require('./integrations/db/mssql') +const apiGateway = require('./integrations/api_gateway') const multer = require('multer') const uploadToMemory = multer({ storage: multer.memoryStorage(), limits: { fileSize: 200000 } }) @@ -191,6 +192,12 @@ try { console.error('DSM routes initialization has failed', e) } +try { + apiGateway.initRoutes(app, tracer) +} catch (e) { + console.error('Api Gateway routes initialization has failed', e) +} + app.get('/kafka/produce', (req, res) => { const topic = req.query.topic diff --git a/utils/build/docker/nodejs/express4/integrations/api_gateway.js b/utils/build/docker/nodejs/express4/integrations/api_gateway.js new file mode 100644 index 0000000000..788572ae30 --- /dev/null +++ b/utils/build/docker/nodejs/express4/integrations/api_gateway.js @@ -0,0 +1,14 @@ +function initRoutes (app, tracer) { + console.log('Api Gateway routes initialized.') + + app.get('/inferred-proxy/span-creation', (req, res) => { + const statusCode = parseInt(req.query.status_code, 10) + + console.log('Received an API Gateway request') + console.log('Request headers:', req.headers) + + res.status(statusCode).send('ok') + }) +} + +module.exports = { initRoutes } From f946c90e41de00b532dfe6fbca442805e4853395 Mon Sep 17 00:00:00 2001 From: Rachel Yang Date: Thu, 21 Nov 2024 11:18:37 -0500 Subject: [PATCH 06/10] adding mypy checks to parametric (#3488) --- docs/edit/format.md | 2 +- format.sh | 15 +++++++++++++-- pyproject.toml | 7 +++++++ requirements.txt | 1 + tests/parametric/conftest.py | 13 +++++-------- tests/parametric/test_headers_baggage.py | 2 +- tests/parametric/test_library_tracestats.py | 7 +++---- tests/parametric/test_telemetry.py | 6 ++++-- tests/parametric/test_tracer.py | 10 ++++++---- tests/parametric/test_tracer_flare.py | 2 +- utils/parametric/spec/remoteconfig.py | 4 ++-- utils/parametric/spec/trace.py | 2 +- 12 files changed, 45 insertions(+), 26 deletions(-) diff --git a/docs/edit/format.md b/docs/edit/format.md index 9b03ce8261..32a2995b92 100644 --- a/docs/edit/format.md +++ b/docs/edit/format.md @@ -1,4 +1,4 @@ -System tests code is in python, and is linted/formated using [black](https://black.readthedocs.io/en/stable/) and [pylint](https://pylint.readthedocs.io/en/latest/). +System tests code is in python, and is linted/formated using [mypy](https://mypy.readthedocs.io/en/stable/), [black](https://black.readthedocs.io/en/stable/) and [pylint](https://pylint.readthedocs.io/en/latest/). Ensure you meet the other pre-reqs in [README.md](../../README.md#requirements) Then, run the linter with: diff --git a/format.sh b/format.sh index 649cc5d4a1..b0d756727a 100755 --- a/format.sh +++ b/format.sh @@ -38,11 +38,22 @@ source venv/bin/activate echo "Checking Python files..." if [ "$COMMAND" == "fix" ]; then - black . + black --quiet . else black --check --diff . fi -pylint utils # pylint does not have a fix mode + +echo "Running mypy type checks..." +if ! mypy --config pyproject.toml; then + echo "Mypy type checks failed. Please fix the errors above. 💥 💔 💥" + exit 1 +fi + +echo "Running pylint checks..." +if ! pylint utils; then + echo "Pylint checks failed. Please fix the errors above. 💥 💔 💥" + exit 1 +fi echo "Checking trailing whitespaces..." INCLUDE_PATTERN='.*\.(md|yml|yaml|sh|cs|Dockerfile|java|sql|ts|js|php)$' diff --git a/pyproject.toml b/pyproject.toml index dbbb5f8cd4..9b1f5f28de 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -102,6 +102,13 @@ allow_no_jira_ticket_for_bugs = [ "tests/parametric/test_config_consistency.py::Test_Config_TraceLogDirectory", ] +[tool.mypy] +files = ["utils/parametric", "tests/parametric"] +ignore_missing_imports = true +disable_error_code = ["no-redef"] +exclude = 'utils/parametric/_library_client\.py|^(?!utils/parametric|tests/parametric).*$' +follow_imports = "skip" + [tool.pylint] init-hook='import sys; sys.path.append(".")' max-line-length = 120 diff --git a/requirements.txt b/requirements.txt index 39f982111c..c494e411b7 100644 --- a/requirements.txt +++ b/requirements.txt @@ -11,6 +11,7 @@ pylint==3.0.4 python-dateutil==2.8.2 msgpack==1.0.4 watchdog==3.0.0 +mypy==1.0.0 aiohttp==3.9.0 yarl==1.9.4 diff --git a/tests/parametric/conftest.py b/tests/parametric/conftest.py index ff2aef83d4..0e8fbda0c0 100644 --- a/tests/parametric/conftest.py +++ b/tests/parametric/conftest.py @@ -11,7 +11,7 @@ from typing import Dict, Generator, List, TextIO, TypedDict, Optional, Any import urllib.parse -import requests +import requests # type: ignore import pytest from utils.parametric.spec import remoteconfig @@ -46,10 +46,7 @@ class AgentRequest(TypedDict): class AgentRequestV06Stats(AgentRequest): - method: str - url: str - headers: Dict[str, str] - body: V06StatsPayload + body: V06StatsPayload # type: ignore def pytest_configure(config): @@ -107,7 +104,7 @@ def __init__(self, base_url: str, pytest_request: None): self._base_url = base_url self._session = requests.Session() self._pytest_request = pytest_request - self.log_path = f"{context.scenario.host_log_folder}/outputs/{pytest_request.cls.__name__}/{pytest_request.node.name}/agent_api.log" + self.log_path = f"{context.scenario.host_log_folder}/outputs/{pytest_request.cls.__name__}/{pytest_request.node.name}/agent_api.log" # type: ignore os.makedirs(os.path.dirname(self.log_path), exist_ok=True) def _url(self, path: str) -> str: @@ -460,7 +457,7 @@ def wait_for_rc_capabilities(self, capabilities: List[int] = [], wait_loops: int time.sleep(0.01) raise AssertionError("No RemoteConfig capabilities found, got capabilites %r" % capabilities_seen) - def wait_for_tracer_flare(self, case_id: str = None, clear: bool = False, wait_loops: int = 100): + def wait_for_tracer_flare(self, case_id: Optional[str] = None, clear: bool = False, wait_loops: int = 100): """Wait for the tracer-flare to be received by the test agent.""" for i in range(wait_loops): try: @@ -479,7 +476,7 @@ def wait_for_tracer_flare(self, case_id: str = None, clear: bool = False, wait_l @pytest.fixture(scope="session") -def docker() -> str: +def docker() -> Optional[str]: """Fixture to ensure docker is ready to use on the system.""" # Redirect output to /dev/null since we just care if we get a successful response code. r = subprocess.run( diff --git a/tests/parametric/test_headers_baggage.py b/tests/parametric/test_headers_baggage.py index 3ca04759e6..483f70e374 100644 --- a/tests/parametric/test_headers_baggage.py +++ b/tests/parametric/test_headers_baggage.py @@ -1,6 +1,6 @@ from operator import le from py import test -from requests import head +from requests import head # type: ignore from utils.parametric.spec.trace import SAMPLING_PRIORITY_KEY, ORIGIN from utils.parametric.spec.trace import span_has_no_parent from utils.parametric.headers import make_single_request_and_get_inject_headers diff --git a/tests/parametric/test_library_tracestats.py b/tests/parametric/test_library_tracestats.py index 313793f628..06a53cae76 100644 --- a/tests/parametric/test_library_tracestats.py +++ b/tests/parametric/test_library_tracestats.py @@ -19,10 +19,9 @@ def _human_stats(stats: V06StatsAggr) -> str: """Return human-readable stats for debugging stat aggregations.""" - copy = stats.copy() - del copy["ErrorSummary"] - del copy["OkSummary"] - return str(copy) + # Create a copy excluding 'ErrorSummary' and 'OkSummary' since TypedDicts don't allow delete + filtered_copy = {k: v for k, v in stats.items() if k not in {"ErrorSummary", "OkSummary"}} + return str(filtered_copy) def enable_tracestats(sample_rate: Optional[float] = None) -> Any: diff --git a/tests/parametric/test_telemetry.py b/tests/parametric/test_telemetry.py index 531b6534c0..991c4a9145 100644 --- a/tests/parametric/test_telemetry.py +++ b/tests/parametric/test_telemetry.py @@ -129,7 +129,7 @@ class Test_Consistent_Configs: "DD_TRACE_RATE_LIMIT": 10, "DD_TRACE_HEADER_TAGS": "User-Agent:my-user-agent,Content-Type.", "DD_TRACE_ENABLED": "true", - "DD_TRACE_OBFUSCATION_QUERY_STRING_REGEXP": "\d{3}-\d{2}-\d{4}", + "DD_TRACE_OBFUSCATION_QUERY_STRING_REGEXP": r"\d{3}-\d{2}-\d{4}", "DD_TRACE_LOG_DIRECTORY": "/some/temporary/directory", "DD_TRACE_CLIENT_IP_HEADER": "random-header-name", "DD_TRACE_HTTP_CLIENT_ERROR_STATUSES": "200-250", @@ -157,7 +157,9 @@ def test_library_settings(self, library_env, test_agent, test_library): configuration_by_name.get("DD_TRACE_HEADER_TAGS").get("value") == "User-Agent:my-user-agent,Content-Type." ) assert configuration_by_name.get("DD_TRACE_ENABLED").get("value") == True - assert configuration_by_name.get("DD_TRACE_OBFUSCATION_QUERY_STRING_REGEXP").get("value") == "\d{3}-\d{2}-\d{4}" + assert ( + configuration_by_name.get("DD_TRACE_OBFUSCATION_QUERY_STRING_REGEXP").get("value") == r"\d{3}-\d{2}-\d{4}" + ) assert configuration_by_name.get("DD_TRACE_CLIENT_IP_HEADER").get("value") == "random-header-name" @pytest.mark.parametrize( diff --git a/tests/parametric/test_tracer.py b/tests/parametric/test_tracer.py index 805cf92750..8405b50b5c 100644 --- a/tests/parametric/test_tracer.py +++ b/tests/parametric/test_tracer.py @@ -26,7 +26,7 @@ def test_tracer_span_top_level_attributes(self, test_agent: _TestAgentAPI, test_ "operation", service="my-webserver", resource="/endpoint", typestr="web" ) as parent: parent.set_metric("number", 10) - with test_library.start_span("operation.child", parent_id=parent.span_id) as child: + with test_library.start_span("operation.child", parent_id=parent.span_id) as child: # type: ignore child.set_meta("key", "val") traces = test_agent.wait_for_num_traces(1, sort_by_start=False) @@ -60,7 +60,7 @@ def test_tracer_repository_url_environment_variable( """ with test_library: with test_library.start_span("operation") as parent: - with test_library.start_span("operation.child", parent_id=parent.span_id): + with test_library.start_span("operation.child", parent_id=parent.span_id): # type: ignore pass traces = test_agent.wait_for_num_traces(1, sort_by_start=False) @@ -86,7 +86,7 @@ def test_tracer_commit_sha_environment_variable( """ with test_library: with test_library.start_span("operation") as parent: - with test_library.start_span("operation.child", parent_id=parent.span_id): + with test_library.start_span("operation.child", parent_id=parent.span_id): # type: ignore pass traces = test_agent.wait_for_num_traces(1, sort_by_start=False) @@ -146,7 +146,7 @@ def test_tracer_repository_url_strip_credentials( """ with test_library: with test_library.start_span("operation") as parent: - with test_library.start_span("operation.child", parent_id=parent.span_id): + with test_library.start_span("operation.child", parent_id=parent.span_id): # type: ignore pass traces = test_agent.wait_for_num_traces(1, sort_by_start=False) @@ -175,6 +175,7 @@ def test_tracer_service_name_environment_variable( traces = test_agent.wait_for_num_traces(1, sort_by_start=False) trace = find_trace(traces, root.trace_id) span = find_root_span(trace) + assert span is not None, "Root span not found" assert span["name"] == "operation" assert span["service"] == library_env["DD_SERVICE"] @@ -195,5 +196,6 @@ def test_tracer_env_environment_variable( trace = find_trace(traces, root.trace_id) span = find_root_span(trace) + assert span is not None, "Root span not found" assert span["name"] == "operation" assert span["meta"]["env"] == library_env["DD_ENV"] diff --git a/tests/parametric/test_tracer_flare.py b/tests/parametric/test_tracer_flare.py index 23f9a794d7..d697bb18be 100644 --- a/tests/parametric/test_tracer_flare.py +++ b/tests/parametric/test_tracer_flare.py @@ -69,7 +69,7 @@ def _java_tracer_flare_filenames() -> Set: } -def _set_log_level(test_agent, log_level: str) -> int: +def _set_log_level(test_agent, log_level: str) -> str: """Helper to create the appropriate "flare-log-level" config in RC for a given log-level.""" cfg_id = uuid4().hex test_agent.set_remote_config( diff --git a/utils/parametric/spec/remoteconfig.py b/utils/parametric/spec/remoteconfig.py index b0e7dc7930..59b8a60bd6 100644 --- a/utils/parametric/spec/remoteconfig.py +++ b/utils/parametric/spec/remoteconfig.py @@ -1,4 +1,4 @@ -from typing import Literal +from typing import Any, Literal from typing import Tuple from utils.dd_constants import Capabilities @@ -13,5 +13,5 @@ APPLY_STATUS = Literal[0, 1, 2, 3] -def human_readable_capabilities(caps: int) -> Tuple[str]: +def human_readable_capabilities(caps: int) -> Tuple[Any, ...]: return tuple(c.name for c in Capabilities if caps >> c & 1) diff --git a/utils/parametric/spec/trace.py b/utils/parametric/spec/trace.py index fbcf9dfa7f..4d5bb498ea 100644 --- a/utils/parametric/spec/trace.py +++ b/utils/parametric/spec/trace.py @@ -257,4 +257,4 @@ def id_to_int(value: Union[str, int]) -> int: # as stringified integers (ids will be stringified to workaround percision issues in some languages) return int(value) except ValueError: - return int(value, 16) + return int(value, 16) # type: ignore From 8031afccb0249f0b4fd76c34727f7279ec174dc0 Mon Sep 17 00:00:00 2001 From: Charles de Beauchesne Date: Thu, 21 Nov 2024 17:27:34 +0100 Subject: [PATCH 07/10] Support for gzip in multipart/form-data; #3508 (#3522) --- utils/proxy/_deserializer.py | 65 +++++++++++++++++++++--------------- 1 file changed, 38 insertions(+), 27 deletions(-) diff --git a/utils/proxy/_deserializer.py b/utils/proxy/_deserializer.py index e3936aaff2..c36baceb2e 100644 --- a/utils/proxy/_deserializer.py +++ b/utils/proxy/_deserializer.py @@ -4,6 +4,7 @@ import base64 import gzip +import io import json import logging from hashlib import md5 @@ -189,35 +190,14 @@ def json_load(): if headers.get("Content-Type", "").lower().startswith("application/json"): item["content"] = json.loads(item["content"]) - elif headers.get("Content-Type", "") == "application/octet-stream": - content_disposition = headers.get("Content-Disposition", "") - - if not content_disposition.startswith("form-data"): - item["system-tests-error"] = "Unknown content-disposition, please contact #apm-shared-testing" - item["content"] = None - - else: - meta_data = {} + elif headers.get("Content-Type", "") == "application/gzip": + with gzip.GzipFile(fileobj=io.BytesIO(part.content)) as gz_file: + content = gz_file.read() - for part in content_disposition.split(";"): - if "=" in part: - key, value = part.split("=", 1) - meta_data[key.strip()] = value.strip() + _deserialize_file_in_multipart_form_data(item, headers, export_content_files_to, content) - if "filename" not in meta_data: - item[ - "system-tests-error" - ] = "Filename not found in content-disposition, please contact #apm-shared-testing" - else: - filename = meta_data["filename"].strip('"') - file_path = f"{export_content_files_to}/{md5(item['content']).hexdigest()}_{filename}" - - with open(file_path, "wb") as f: - f.write(item["content"]) - - item["system-tests-information"] = "File exported to a separated file" - item["system-tests-file-path"] = file_path - del item["content"] + elif headers.get("Content-Type", "") == "application/octet-stream": + _deserialize_file_in_multipart_form_data(item, headers, export_content_files_to, part.content) decoded.append(item) @@ -229,6 +209,37 @@ def json_load(): return content +def _deserialize_file_in_multipart_form_data( + item: dict, headers: dict, export_content_files_to: str, content: bytes +) -> None: + content_disposition = headers.get("Content-Disposition", "") + + if not content_disposition.startswith("form-data"): + item["system-tests-error"] = "Unknown content-disposition, please contact #apm-shared-testing" + item["content"] = None + + else: + meta_data = {} + + for part in content_disposition.split(";"): + if "=" in part: + key, value = part.split("=", 1) + meta_data[key.strip()] = value.strip() + + if "filename" not in meta_data: + item["system-tests-error"] = "Filename not found in content-disposition, please contact #apm-shared-testing" + else: + filename = meta_data["filename"].strip('"') + file_path = f"{export_content_files_to}/{md5(content).hexdigest()}_{filename}" + + with open(file_path, "wb") as f: + f.write(content) + + item["system-tests-information"] = "File exported to a separated file" + item["system-tests-file-path"] = file_path + del item["content"] + + def _deserialized_nested_json_from_trace_payloads(content, interface): """ trace payload from agent and library contains strings that are json """ From c38d1435d80ca17d411c6ea83fcf4926f16faf5b Mon Sep 17 00:00:00 2001 From: William Conti <58711692+wconti27@users.noreply.github.com> Date: Thu, 21 Nov 2024 12:39:25 -0500 Subject: [PATCH 08/10] fix api gateway tests (#3528) Co-authored-by: Charles de Beauchesne --- utils/_context/_scenarios/integrations.py | 1 + 1 file changed, 1 insertion(+) diff --git a/utils/_context/_scenarios/integrations.py b/utils/_context/_scenarios/integrations.py index 9d022631e3..dd46d035ad 100644 --- a/utils/_context/_scenarios/integrations.py +++ b/utils/_context/_scenarios/integrations.py @@ -35,6 +35,7 @@ def __init__(self) -> None: "DD_TRACE_SPAN_ATTRIBUTE_SCHEMA": "v1", "AWS_ACCESS_KEY_ID": "my-access-key", "AWS_SECRET_ACCESS_KEY": "my-access-key", + "DD_TRACE_INFERRED_PROXY_SERVICES_ENABLED": "true", }, include_postgres_db=True, include_cassandra_db=True, From 3ce4399b8ccca30a7050b64c6077c6870ea38031 Mon Sep 17 00:00:00 2001 From: Roch Devost Date: Thu, 21 Nov 2024 17:13:08 -0500 Subject: [PATCH 09/10] [nodejs] enable crashtracking tests (#3517) --- manifests/nodejs.yml | 4 +++- utils/_context/_scenarios/parametric.py | 4 ++-- utils/build/docker/nodejs/parametric/server.js | 5 +++++ 3 files changed, 10 insertions(+), 3 deletions(-) diff --git a/manifests/nodejs.yml b/manifests/nodejs.yml index 3304307730..0d0ed74b6d 100644 --- a/manifests/nodejs.yml +++ b/manifests/nodejs.yml @@ -39,6 +39,7 @@ refs: - &ref_5_24_0 '>=5.24.0 || ^4.48.0' - &ref_5_25_0 '>=5.25.0 || ^4.49.0' - &ref_5_26_0 '>=5.26.0 || ^4.50.0' + - &ref_5_27_0 '>=5.27.0 || ^4.51.0' tests/: apm_tracing_e2e/: @@ -644,7 +645,8 @@ tests/: Test_Config_TraceEnabled: *ref_4_3_0 Test_Config_TraceLogDirectory: missing_feature Test_Config_UnifiedServiceTagging: *ref_5_25_0 - test_crashtracking.py: missing_feature + test_crashtracking.py: + Test_Crashtracking: *ref_5_27_0 test_dynamic_configuration.py: TestDynamicConfigSamplingRules: *ref_5_16_0 TestDynamicConfigTracingEnabled: *ref_5_4_0 diff --git a/utils/_context/_scenarios/parametric.py b/utils/_context/_scenarios/parametric.py index 19e182ef14..679083d0db 100644 --- a/utils/_context/_scenarios/parametric.py +++ b/utils/_context/_scenarios/parametric.py @@ -388,8 +388,8 @@ def node_library_factory() -> APMLibraryTestServer: container_name="node-test-client", container_tag="node-test-client", container_img=f""" -FROM node:18.10-alpine -RUN apk add --no-cache bash curl git jq +FROM node:18.10-slim +RUN apt-get update && apt-get -y install bash curl git jq WORKDIR /usr/app COPY {nodejs_reldir}/../app.sh /usr/app/ RUN printf 'node server.js' >> app.sh diff --git a/utils/build/docker/nodejs/parametric/server.js b/utils/build/docker/nodejs/parametric/server.js index 3e4f7bb08a..f0c67a3100 100644 --- a/utils/build/docker/nodejs/parametric/server.js +++ b/utils/build/docker/nodejs/parametric/server.js @@ -71,6 +71,11 @@ app.post('/trace/span/extract_headers', (req, res) => { res.json({ span_id: extractedSpanID }); }); +app.get('/trace/crash', (req, res) => { + process.kill(process.pid, 'SIGSEGV'); + res.json({}); +}); + app.post('/trace/span/start', (req, res) => { const request = req.body; let parent = spans[request.parent_id] || ddContext[request.parent_id]; From 86f272e8ccde244d37b544ca246f78eb229e6c4c Mon Sep 17 00:00:00 2001 From: Oleg Pudeyev <156273877+p-datadog@users.noreply.github.com> Date: Thu, 21 Nov 2024 18:04:02 -0500 Subject: [PATCH 10/10] DEBUG-2334 Ruby DI system tests (#3516) Co-authored-by: Oleg Pudeyev --- tests/debugger/probes/pii_line.json | 18 +++ tests/debugger/test_debugger_pii.py | 52 ++++++--- .../debugger/test_debugger_probe_snapshot.py | 5 +- tests/debugger/test_debugger_probe_status.py | 5 +- tests/debugger/utils.py | 12 +- utils/_remote_config.py | 7 ++ .../app/controllers/debugger_controller.rb | 54 +++++++++ .../ruby/rails70/app/models/base_pii.rb | 7 ++ .../ruby/rails70/app/models/custom_pii.rb | 5 + .../docker/ruby/rails70/app/models/pii.rb | 109 ++++++++++++++++++ .../docker/ruby/rails70/config/routes.rb | 5 + utils/build/docker/ruby/rails70/db/schema.rb | 28 +++++ 12 files changed, 285 insertions(+), 22 deletions(-) create mode 100644 tests/debugger/probes/pii_line.json create mode 100644 utils/build/docker/ruby/rails70/app/controllers/debugger_controller.rb create mode 100644 utils/build/docker/ruby/rails70/app/models/base_pii.rb create mode 100644 utils/build/docker/ruby/rails70/app/models/custom_pii.rb create mode 100644 utils/build/docker/ruby/rails70/app/models/pii.rb create mode 100644 utils/build/docker/ruby/rails70/db/schema.rb diff --git a/tests/debugger/probes/pii_line.json b/tests/debugger/probes/pii_line.json new file mode 100644 index 0000000000..54e8f8002e --- /dev/null +++ b/tests/debugger/probes/pii_line.json @@ -0,0 +1,18 @@ +[ + { + "language": "", + "pii": "", + "id": "log170aa-acda-4453-9111-1478a600line", + "where": { + "typeName": null, + "sourceFile": "ACTUAL_SOURCE_FILE", + "lines": [ + "33" + ] + }, + "captureSnapshot": true, + "capture": { + "maxFieldCount": 200 + } + } +] \ No newline at end of file diff --git a/tests/debugger/test_debugger_pii.py b/tests/debugger/test_debugger_pii.py index 458357618c..599c3b4af1 100644 --- a/tests/debugger/test_debugger_pii.py +++ b/tests/debugger/test_debugger_pii.py @@ -121,8 +121,8 @@ def filter(keys_to_filter): @features.debugger_pii_redaction @scenarios.debugger_pii_redaction class Test_Debugger_PII_Redaction(base._Base_Debugger_Test): - def _setup(self): - probes = base.read_probes("pii") + def _setup(self, probes_file): + probes = base.read_probes(probes_file) self.expected_probe_ids = base.extract_probe_ids(probes) self.rc_state = rc.send_debugger_command(probes, version=1) @@ -130,26 +130,29 @@ def _setup(self): self.weblog_responses = [weblog.get("/debugger/pii")] - def _test(self, redacted_keys, redacted_types): + def _test(self, redacted_keys, redacted_types, line_probe=False): self.assert_all_states_not_error() self.assert_all_probes_are_installed() self.assert_all_weblog_responses_ok() - self._validate_pii_keyword_redaction(redacted_keys) - self._validate_pii_type_redaction(redacted_types) + self._validate_pii_keyword_redaction(redacted_keys, line_probe=line_probe) + self._validate_pii_type_redaction(redacted_types, line_probe=line_probe) def setup_pii_redaction_full(self): - self._setup() + self._setup("pii") @missing_feature(context.library < "java@1.34", reason="keywords are not fully redacted") @missing_feature(context.library < "dotnet@2.51", reason="keywords are not fully redacted") @bug(context.library == "python@2.16.0", reason="DEBUG-3127") @bug(context.library == "python@2.16.1", reason="DEBUG-3127") + # Ruby requires @irrelevant rather than @missing_feature to skip setup + # for this test (which will interfere with the line probe test). + @irrelevant(context.library == "ruby", reason="Local variable capture not implemented for method probes") def test_pii_redaction_full(self): self._test(REDACTED_KEYS, REDACTED_TYPES) def setup_pii_redaction_java_1_33(self): - self._setup() + self._setup("pii") @irrelevant(context.library != "java@1.33", reason="not relevant for other version") def test_pii_redaction_java_1_33(self): @@ -170,7 +173,7 @@ def test_pii_redaction_java_1_33(self): ) def setup_pii_redaction_dotnet_2_50(self): - self._setup() + self._setup("pii") @irrelevant(context.library != "dotnet@2.50", reason="not relevant for other version") @bug( @@ -179,7 +182,14 @@ def setup_pii_redaction_dotnet_2_50(self): def test_pii_redaction_dotnet_2_50(self): self._test(filter(["applicationkey", "connectionstring"]), REDACTED_TYPES) - def _validate_pii_keyword_redaction(self, should_redact_field_names): + def setup_pii_redaction_line(self): + self._setup("pii_line") + + @irrelevant(context.library != "ruby", reason="Ruby needs to use line probes to capture variables") + def test_pii_redaction_line(self): + self._test(REDACTED_KEYS, REDACTED_TYPES, True) + + def _validate_pii_keyword_redaction(self, should_redact_field_names, line_probe=False): agent_logs_endpoint_requests = list(interfaces.agent.get_data(path_filters="/api/v2/logs")) not_redacted = [] not_found = list(set(should_redact_field_names)) @@ -193,12 +203,21 @@ def _validate_pii_keyword_redaction(self, should_redact_field_names): if snapshot: for field_name in should_redact_field_names: - fields = snapshot["captures"]["return"]["locals"]["pii"]["fields"] - - if field_name in fields: + if line_probe: + fields = snapshot["captures"]["lines"]["33"]["locals"]["pii"]["fields"] + else: + fields = snapshot["captures"]["return"]["locals"]["pii"]["fields"] + + # Ruby prefixes instance variable names with @ + if context.library == "ruby": + check_field_name = "@" + field_name + else: + check_field_name = field_name + + if check_field_name in fields: not_found.remove(field_name) - if "value" in fields[field_name]: + if "value" in fields[check_field_name]: not_redacted.append(field_name) error_message = "" if not_redacted: @@ -212,7 +231,7 @@ def _validate_pii_keyword_redaction(self, should_redact_field_names): if error_message != "": raise ValueError(error_message) - def _validate_pii_type_redaction(self, should_redact_types): + def _validate_pii_type_redaction(self, should_redact_types, line_probe=False): agent_logs_endpoint_requests = list(interfaces.agent.get_data(path_filters="/api/v2/logs")) not_redacted = [] @@ -225,7 +244,10 @@ def _validate_pii_type_redaction(self, should_redact_types): if snapshot: for type_name in should_redact_types: - type_info = snapshot["captures"]["return"]["locals"][type_name] + if line_probe: + type_info = snapshot["captures"]["lines"]["33"]["locals"][type_name] + else: + type_info = snapshot["captures"]["return"]["locals"][type_name] if "fields" in type_info: not_redacted.append(type_name) diff --git a/tests/debugger/test_debugger_probe_snapshot.py b/tests/debugger/test_debugger_probe_snapshot.py index b279f2e129..f22d5f65c7 100644 --- a/tests/debugger/test_debugger_probe_snapshot.py +++ b/tests/debugger/test_debugger_probe_snapshot.py @@ -4,7 +4,7 @@ import tests.debugger.utils as base -from utils import scenarios, interfaces, weblog, features, remote_config as rc, bug +from utils import scenarios, interfaces, weblog, features, remote_config as rc, bug, missing_feature, context @features.debugger @@ -41,6 +41,7 @@ def setup_span_method_probe_snaphots(self): ] @bug(library="python", reason="DEBUG-2708, DEBUG-2709") + @missing_feature(context.library == "ruby", reason="Not yet implemented") def test_span_method_probe_snaphots(self): self.assert_all_states_not_error() self.assert_all_probes_are_installed() @@ -61,6 +62,7 @@ def setup_span_decoration_method_probe_snaphots(self): ] @bug(library="python", reason="DEBUG-2708, DEBUG-2709") + @missing_feature(context.library == "ruby", reason="Not yet implemented") def test_span_decoration_method_probe_snaphots(self): self.assert_all_states_not_error() self.assert_all_probes_are_installed() @@ -105,6 +107,7 @@ def setup_span_decoration_line_probe_snaphots(self): weblog.get("/debugger/span-decoration/asd/1"), ] + @missing_feature(context.library == "ruby", reason="Not yet implemented") def test_span_decoration_line_probe_snaphots(self): self.assert_all_states_not_error() self.assert_all_probes_are_installed() diff --git a/tests/debugger/test_debugger_probe_status.py b/tests/debugger/test_debugger_probe_status.py index dd55ef9ef8..b4300094ca 100644 --- a/tests/debugger/test_debugger_probe_status.py +++ b/tests/debugger/test_debugger_probe_status.py @@ -4,7 +4,7 @@ import tests.debugger.utils as base -from utils import scenarios, features, remote_config as rc, bug, context +from utils import weblog, scenarios, features, remote_config as rc, bug, context, missing_feature @features.debugger @@ -40,6 +40,7 @@ def setup_probe_status_metric(self): @bug(context.library == "python@2.16.0", reason="DEBUG-3127") @bug(context.library == "python@2.16.1", reason="DEBUG-3127") + @missing_feature(context.library == "ruby", reason="Not yet implemented") def test_probe_status_metric(self): self._assert() @@ -49,6 +50,7 @@ def setup_probe_status_span(self): self._setup(probes) + @missing_feature(context.library == "ruby", reason="Not yet implemented") def test_probe_status_span(self): self._assert() @@ -60,6 +62,7 @@ def setup_probe_status_spandecoration(self): @bug(context.library == "python@2.16.0", reason="DEBUG-3127") @bug(context.library == "python@2.16.1", reason="DEBUG-3127") + @missing_feature(context.library == "ruby", reason="Not yet implemented") def test_probe_status_spandecoration(self): self._assert() diff --git a/tests/debugger/utils.py b/tests/debugger/utils.py index 6f43f1d551..74305a14bc 100644 --- a/tests/debugger/utils.py +++ b/tests/debugger/utils.py @@ -15,7 +15,7 @@ from utils.dd_constants import RemoteConfigApplyState as ApplyState _CONFIG_PATH = "/v0.7/config" -_DEBUGER_PATH = "/api/v2/debugger" +_DEBUGGER_PATH = "/api/v2/debugger" _LOGS_PATH = "/api/v2/logs" _TRACES_PATH = "/api/v0.2/traces" @@ -51,16 +51,18 @@ def read_diagnostic_data(): tracer_version = version.parse(re.sub(r"[^0-9.].*$", "", tracer["tracer_version"])) if tracer["language"] == "java": if tracer_version > version.parse("1.27.0"): - path = _DEBUGER_PATH + path = _DEBUGGER_PATH else: path = _LOGS_PATH elif tracer["language"] == "dotnet": if tracer_version > version.parse("2.49.0"): - path = _DEBUGER_PATH + path = _DEBUGGER_PATH else: path = _LOGS_PATH elif tracer["language"] == "python": - path = _DEBUGER_PATH + path = _DEBUGGER_PATH + elif tracer["language"] == "ruby": + path = _DEBUGGER_PATH else: path = _LOGS_PATH @@ -133,7 +135,7 @@ def _all_probes_installed(self, probes_map): return False if not self.all_probes_installed: - if data["path"] == _DEBUGER_PATH or data["path"] == _LOGS_PATH: + if data["path"] == _DEBUGGER_PATH or data["path"] == _LOGS_PATH: self.all_probes_installed = _all_probes_installed(self, get_probes_map([data])) return self.all_probes_installed diff --git a/utils/_remote_config.py b/utils/_remote_config.py index ef2ee40d77..3486d04bb2 100644 --- a/utils/_remote_config.py +++ b/utils/_remote_config.py @@ -226,6 +226,11 @@ def _get_probe_type(probe_id): probe["where"]["methodName"] = re.sub( r"([a-z])([A-Z])", r"\1_\2", probe["where"]["methodName"] ).lower() + elif library_name == "ruby": + probe["where"]["typeName"] = "DebuggerController" + probe["where"]["methodName"] = re.sub( + r"([a-z])([A-Z])", r"\1_\2", probe["where"]["methodName"] + ).lower() elif probe["where"]["sourceFile"] == "ACTUAL_SOURCE_FILE": if library_name == "dotnet": probe["where"]["sourceFile"] = "DebuggerController.cs" @@ -233,6 +238,8 @@ def _get_probe_type(probe_id): probe["where"]["sourceFile"] = "DebuggerController.java" elif library_name == "python": probe["where"]["sourceFile"] = "debugger_controller.py" + elif library_name == "ruby": + probe["where"]["sourceFile"] = "debugger_controller.rb" logger.debug(f"RC probe is:\n{json.dumps(probe, indent=2)}") probe_type = _get_probe_type(probe["id"]) diff --git a/utils/build/docker/ruby/rails70/app/controllers/debugger_controller.rb b/utils/build/docker/ruby/rails70/app/controllers/debugger_controller.rb new file mode 100644 index 0000000000..9e5120d79c --- /dev/null +++ b/utils/build/docker/ruby/rails70/app/controllers/debugger_controller.rb @@ -0,0 +1,54 @@ +# Padding +# Padding +# Padding +# Padding + +class DebuggerController < ActionController::Base + def init + # This method does nothing. + # When the endpoint corresponding to it is invoked however, + # the middleware installed by dd-trace-rb initializes remote configuration. + render inline: 'debugger init' + end + + # Padding + # Padding + # Padding + # Padding + + def log_probe + render inline: 'Log probe' # This needs to be line 20 + end + + # Padding + # Padding + # Padding + # Padding + + def pii + pii = Pii.new + customPii = CustomPii.new + value = pii.test_value + custom_value = customPii.test_value + render inline: "PII #{value}. CustomPII #{custom_value}" # must be line 33 + end + + # Padding + # Padding + # Padding + # Padding + # Padding + # Padding + # Padding + # Padding + # Padding + # Padding + # Padding + # Padding + # Padding + + def mix_probe + value = params[:string_arg].length * Integer(params[:int_arg]) + render inline: "Mixed result #{value}" # must be line 52 + end +end diff --git a/utils/build/docker/ruby/rails70/app/models/base_pii.rb b/utils/build/docker/ruby/rails70/app/models/base_pii.rb new file mode 100644 index 0000000000..eedf66c48b --- /dev/null +++ b/utils/build/docker/ruby/rails70/app/models/base_pii.rb @@ -0,0 +1,7 @@ +class BasePii + def initialize + @test_value = 'should be redacted' + end + + attr_reader :test_value +end diff --git a/utils/build/docker/ruby/rails70/app/models/custom_pii.rb b/utils/build/docker/ruby/rails70/app/models/custom_pii.rb new file mode 100644 index 0000000000..234112fdc8 --- /dev/null +++ b/utils/build/docker/ruby/rails70/app/models/custom_pii.rb @@ -0,0 +1,5 @@ +class CustomPii < BasePii + def initialize + @custom_key = 'should be redacted' + end +end diff --git a/utils/build/docker/ruby/rails70/app/models/pii.rb b/utils/build/docker/ruby/rails70/app/models/pii.rb new file mode 100644 index 0000000000..fc303a2c0b --- /dev/null +++ b/utils/build/docker/ruby/rails70/app/models/pii.rb @@ -0,0 +1,109 @@ + +# Copied from test_debugger_pii.py + +REDACTED_KEYS = [ + "_2fa", + "accesstoken", + "access_token", + "Access_Token", + "accessToken", + "AccessToken", + "ACCESSTOKEN", + "aiohttpsession", + "apikey", + "apisecret", + "apisignature", + "applicationkey", + "auth", + "authorization", + "authtoken", + "ccnumber", + "certificatepin", + "cipher", + "clientid", + "clientsecret", + "connectionstring", + "connectsid", + "cookie", + "credentials", + "creditcard", + "csrf", + "csrftoken", + "cvv", + "databaseurl", + "dburl", + "encryptionkey", + "encryptionkeyid", + "env", + "geolocation", + "gpgkey", + "ipaddress", + "jti", + "jwt", + "licensekey", + "masterkey", + "mysqlpwd", + "nonce", + "oauth", + "oauthtoken", + "otp", + "passhash", + "passwd", + "password", + "passwordb", + "pemfile", + "pgpkey", + "phpsessid", + "pin", + "pincode", + "pkcs8", + "privatekey", + "publickey", + "pwd", + "recaptchakey", + "refreshtoken", + "routingnumber", + "salt", + "secret", + "secretkey", + "secrettoken", + "securityanswer", + "securitycode", + "securityquestion", + "serviceaccountcredentials", + "session", + "sessionid", + "sessionkey", + "setcookie", + "signature", + "signaturekey", + "sshkey", + "ssn", + "symfony", + "token", + "transactionid", + "twiliotoken", + "usersession", + "voterid", + "xapikey", + "xauthtoken", + "xcsrftoken", + "xforwardedfor", + "xrealip", + "xsrf", + "xsrftoken", + "customidentifier1", + "customidentifier2", +] + +REDACTED_TYPES = ["customPii"] + +VALUE = "SHOULD_BE_REDACTED" + +class Pii < BasePii + def initialize + REDACTED_KEYS.each do |key| + instance_variable_set("@#{key}", VALUE) + end + end +end diff --git a/utils/build/docker/ruby/rails70/config/routes.rb b/utils/build/docker/ruby/rails70/config/routes.rb index 22c3f8c126..5624e4b3e1 100644 --- a/utils/build/docker/ruby/rails70/config/routes.rb +++ b/utils/build/docker/ruby/rails70/config/routes.rb @@ -47,4 +47,9 @@ get '/requestdownstream' => 'system_test#request_downstream' get '/returnheaders' => 'system_test#return_headers' + + get '/debugger/init' => 'debugger#init' + get '/debugger/pii' => 'debugger#pii' + get '/debugger/log' => 'debugger#log_probe' + get '/debugger/mix/:string_arg/:int_arg' => 'debugger#mix_probe' end diff --git a/utils/build/docker/ruby/rails70/db/schema.rb b/utils/build/docker/ruby/rails70/db/schema.rb new file mode 100644 index 0000000000..47ee9dc592 --- /dev/null +++ b/utils/build/docker/ruby/rails70/db/schema.rb @@ -0,0 +1,28 @@ +# This file is auto-generated from the current state of the database. Instead +# of editing this file, please use the migrations feature of Active Record to +# incrementally modify your database, and then regenerate this schema definition. +# +# This file is the source Rails uses to define your schema when running `bin/rails +# db:schema:load`. When creating a new database, `bin/rails db:schema:load` tends to +# be faster and is potentially less error prone than running all of your +# migrations from scratch. Old migrations may fail to apply correctly if those +# migrations use external dependencies or application code. +# +# It's strongly recommended that you check this file into your version control system. + +ActiveRecord::Schema.define(version: 2023_06_21_141816) do + + create_table "users", id: :string, force: :cascade do |t| + t.string "username", default: "", null: false + t.string "email", default: "", null: false + t.string "encrypted_password", default: "", null: false + t.string "reset_password_token" + t.datetime "reset_password_sent_at", precision: 6 + t.datetime "remember_created_at", precision: 6 + t.datetime "created_at", precision: 6, null: false + t.datetime "updated_at", precision: 6, null: false + t.index ["email"], name: "index_users_on_email", unique: true + t.index ["reset_password_token"], name: "index_users_on_reset_password_token", unique: true + end + +end