From 366f8d36be57f2d82dde2347903ce1e019fc830c Mon Sep 17 00:00:00 2001 From: Mikayla Toffler <46911781+mtoffl01@users.noreply.github.com> Date: Tue, 17 Sep 2024 11:21:10 -0400 Subject: [PATCH 1/3] Fix typo in parametric.md --- docs/scenarios/parametric.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/scenarios/parametric.md b/docs/scenarios/parametric.md index 8d72595ba2..5c2859eb98 100644 --- a/docs/scenarios/parametric.md +++ b/docs/scenarios/parametric.md @@ -122,7 +122,7 @@ Clone the repo: git clone git@github.com:DataDog/dd-trace-java.git cd dd-trace-java ``` -By default you will be on the `master` branch, but if you'd like to run system-tests on the changes you made to your local branch, `gitc checkout` to that branch. +By default you will be on the `master` branch, but if you'd like to run system-tests on the changes you made to your local branch, `git checkout` to that branch before proceeding. 2. Build Java Tracer artifacts ``` From 53fe59b401a1234993e76492a96f0ac14aedacc5 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Wed, 18 Sep 2024 11:06:25 -0400 Subject: [PATCH 2/3] Configure Kind in Gitlab to allow kubernetes injection tests to run (#3057) * implement logic to allow KinD to work in gitlab * try getting the container id directly * wrap with bash? * escape escape * Use json in python to get all the info * python doesn't handle bash piping too well * correct decoding of the json * more fixups * $HOME is not resolved * better the '/root' * extract to function * forgot a couple words * print out config * add debug info * add more debugging * fix typo * log running pods * test test-agent 1.16.0 * restore tag * pull if not present * restore pull policy * debug locally * deploy app * fix * fix local * debug local * no stop cluster * use internal dns to access to dev test agent * debug traces for gitlab patch * test * fix agent port * test manual inject * fix ports * fix * enable all tests * destroy cluster after * keep network * debug network connection * disable kind network policies * restore * diable tests * no pull images * load local image into cluster * no helm * revert helm charts * no destroy cluster * connect kind containers to bridget network * revert change * restore by default * test only helm * disable kube proxty * disable kube proxty * test * kubeproxy * pod subnet * connect kind cluster * pull offline * helm offline * cluster agent offline * preload webapp * pull policy never * enable all tests * run one by one * activate more tets * run one tests * test admission controller only * test uds * uds pull policy never * enable two tests * cluster agent traces * change interfaces sync * fix command sync * fix command sync * enable all tests * datadog kubernetes * fix merge * enable all * offline mode * helm chart offline mode file pattern * datadog helm offline * Remove offline-mode, rework setup * remove some unintended changes * some debug info. Fix sed * use formatting instead of a loop to get network info * strip() to remove whitespace * remove debug logs * merge and other fixes * formatting * text and variable name changes --------- Co-authored-by: roberto montero --- .../test_k8s_manual_inject.py | 80 ++++++++++--------- utils/k8s_lib_injection/k8s_command_utils.py | 12 ++- utils/k8s_lib_injection/k8s_kind_cluster.py | 76 +++++++++++++++--- 3 files changed, 117 insertions(+), 51 deletions(-) diff --git a/tests/k8s_lib_injection/test_k8s_manual_inject.py b/tests/k8s_lib_injection/test_k8s_manual_inject.py index b3e4b958cb..8804134f74 100644 --- a/tests/k8s_lib_injection/test_k8s_manual_inject.py +++ b/tests/k8s_lib_injection/test_k8s_manual_inject.py @@ -13,50 +13,52 @@ class _TestAdmisionController: def test_inject_admission_controller(self, test_k8s_instance): logger.info( - f"Launching test _test_inject_admission_controller: Weblog: [{test_k8s_instance.k8s_kind_cluster.weblog_port}] Agent: [{test_k8s_instance.k8s_kind_cluster.agent_port}]" + f"Launching test _test_inject_admission_controller: Weblog: [{test_k8s_instance.k8s_kind_cluster.get_weblog_port()}] Agent: [{test_k8s_instance.k8s_kind_cluster.get_agent_port()}]" ) test_k8s_instance.deploy_test_agent() test_k8s_instance.deploy_datadog_cluster_agent() test_k8s_instance.deploy_weblog_as_pod() - traces_json = self._get_dev_agent_traces(test_k8s_instance.k8s_kind_cluster.agent_port) + traces_json = self._get_dev_agent_traces(test_k8s_instance.k8s_kind_cluster) assert len(traces_json) > 0, "No traces found" logger.info(f"Test _test_inject_admission_controller finished") def test_inject_uds_admission_controller(self, test_k8s_instance): logger.info( - f"Launching test test_inject_uds_admission_controller: Weblog: [{test_k8s_instance.k8s_kind_cluster.weblog_port}] Agent: [{test_k8s_instance.k8s_kind_cluster.agent_port}]" + f"Launching test test_inject_uds_admission_controller: Weblog: [{test_k8s_instance.k8s_kind_cluster.get_weblog_port()}] Agent: [{test_k8s_instance.k8s_kind_cluster.get_agent_port()}]" ) test_k8s_instance.deploy_test_agent() test_k8s_instance.deploy_datadog_cluster_agent(use_uds=True) test_k8s_instance.deploy_weblog_as_pod() - traces_json = self._get_dev_agent_traces(test_k8s_instance.k8s_kind_cluster.agent_port) + traces_json = self._get_dev_agent_traces(test_k8s_instance.k8s_kind_cluster) assert len(traces_json) > 0, "No traces found" logger.info(f"Test test_inject_uds_admission_controller finished") def test_inject_without_admission_controller(self, test_k8s_instance): logger.info( - f"Launching test _test_inject_without_admission_controller: Weblog: [{test_k8s_instance.k8s_kind_cluster.weblog_port}] Agent: [{test_k8s_instance.k8s_kind_cluster.agent_port}]" + f"Launching test _test_inject_without_admission_controller: Weblog: [{test_k8s_instance.k8s_kind_cluster.get_weblog_port()}] Agent: [{test_k8s_instance.k8s_kind_cluster.get_agent_port()}]" ) test_k8s_instance.deploy_test_agent() test_k8s_instance.deploy_weblog_as_pod(with_admission_controller=False) - traces_json = self._get_dev_agent_traces(test_k8s_instance.k8s_kind_cluster.agent_port) + traces_json = self._get_dev_agent_traces(test_k8s_instance.k8s_kind_cluster) assert len(traces_json) > 0, "No traces found" logger.info(f"Test _test_inject_without_admission_controller finished") def test_inject_uds_without_admission_controller(self, test_k8s_instance): logger.info( - f"Launching test test_inject_uds_without_admission_controller: Weblog: [{test_k8s_instance.k8s_kind_cluster.weblog_port}] Agent: [{test_k8s_instance.k8s_kind_cluster.agent_port}]" + f"Launching test test_inject_uds_without_admission_controller: Weblog: [{test_k8s_instance.k8s_kind_cluster.get_weblog_port()}] Agent: [{test_k8s_instance.k8s_kind_cluster.get_agent_port()}]" ) test_k8s_instance.deploy_test_agent() test_k8s_instance.deploy_weblog_as_pod(with_admission_controller=False, use_uds=True) - traces_json = self._get_dev_agent_traces(test_k8s_instance.k8s_kind_cluster.agent_port) + traces_json = self._get_dev_agent_traces(test_k8s_instance.k8s_kind_cluster) assert len(traces_json) > 0, "No traces found" logger.info(f"Test test_inject_uds_without_admission_controller finished") - def _get_dev_agent_traces(self, agent_port, retry=10): + def _get_dev_agent_traces(self, k8s_kind_cluster, retry=10): for _ in range(retry): logger.info(f"[Check traces] Checking traces:") - response = requests.get(f"http://localhost:{agent_port}/test/traces") + response = requests.get( + f"http://{k8s_kind_cluster.cluster_host_name}:{k8s_kind_cluster.get_agent_port()}/test/traces" + ) traces_json = response.json() if len(traces_json) > 0: logger.debug(f"Test traces response: {traces_json}") @@ -73,7 +75,7 @@ class _TestAdmisionControllerAsm: def test_inject_asm_admission_controller(self, test_k8s_instance): logger.info( - f"Launching test test_inject_asm_admission_controller: Weblog: [{test_k8s_instance.k8s_kind_cluster.weblog_port}] Agent: [{test_k8s_instance.k8s_kind_cluster.agent_port}]" + f"Launching test test_inject_asm_admission_controller: Weblog: [{test_k8s_instance.k8s_kind_cluster.get_weblog_port()}] Agent: [{test_k8s_instance.k8s_kind_cluster.get_agent_port()}]" ) asm_features = { @@ -84,15 +86,16 @@ def test_inject_asm_admission_controller(self, test_k8s_instance): test_k8s_instance.deploy_datadog_cluster_agent(features=asm_features) test_k8s_instance.deploy_agent() - weblog_port = test_k8s_instance.k8s_kind_cluster.weblog_port - logger.info(f"Waiting for weblog available [localhost:{weblog_port}]") - wait_for_port(weblog_port, "localhost", 80.0) - logger.info(f"[localhost:{weblog_port}]: Weblog app is ready!") - warmup_weblog(f"http://localhost:{weblog_port}/") - logger.info(f"Making a request to weblog [localhost:{weblog_port}]") - request_uuid = make_get_request(f"http://localhost:{weblog_port}/") + weblog_port = test_k8s_instance.k8s_kind_cluster.get_weblog_port() + weblog_host = test_k8s_instance.k8s_kind_cluster.cluster_host_name + logger.info(f"Waiting for weblog available [{weblog_host}:{weblog_port}]") + wait_for_port(weblog_port, weblog_host, 80.0) + logger.info(f"[{weblog_host}:{weblog_port}]: Weblog app is ready!") + warmup_weblog(f"http://{weblog_host}:{weblog_port}/") + logger.info(f"Making a request to weblog [{weblog_host}:{weblog_port}]") + request_uuid = make_get_request(f"http://{weblog_host}:{weblog_port}/") - logger.info(f"Http request done with uuid: [{request_uuid}] for [localhost:{weblog_port}]") + logger.info(f"Http request done with uuid: [{request_uuid}] for [{weblog_host}:{weblog_port}]") wait_backend_trace_id(request_uuid, 120.0, profile=False, validator=backend_trace_validator) @@ -101,13 +104,15 @@ def test_inject_asm_admission_controller(self, test_k8s_instance): class TestAdmisionControllerProfiling: """Test profiling activation with the admission controller.""" - def _check_profiling_request_sent(self, agent_port, timeout=90): + def _check_profiling_request_sent(self, k8s_kind_cluster, timeout=90): """ Use test agent profiling endpoint to check if the profiling data has been sent by the injectect library. Checks the request made to the profiling endpoint (/profiling/v1/input). The profiling post data can take between 12 and 90 seconds (12 if the library supports both env vars, 90 if it supports neither. """ mustend = time.time() + timeout while time.time() < mustend: - response = requests.get(f"http://localhost:{agent_port}/test/session/requests") + response = requests.get( + f"http://{k8s_kind_cluster.cluster_host_name}:{k8s_kind_cluster.get_agent_port()}/test/session/requests" + ) for request in response.json(): if request["url"].endswith("/profiling/v1/input"): return True @@ -117,7 +122,7 @@ def _check_profiling_request_sent(self, agent_port, timeout=90): def test_profiling_disabled_by_default(self, test_k8s_instance): logger.info(f"Launching test test_profiling_disabled_by_default") logger.info( - f": Weblog: [{test_k8s_instance.k8s_kind_cluster.weblog_port}] Agent: [{test_k8s_instance.k8s_kind_cluster.agent_port}]" + f": Weblog: [{test_k8s_instance.k8s_kind_cluster.get_weblog_port()}] Agent: [{test_k8s_instance.k8s_kind_cluster.get_agent_port()}]" ) test_k8s_instance.deploy_test_agent() test_k8s_instance.deploy_datadog_cluster_agent() @@ -125,28 +130,28 @@ def test_profiling_disabled_by_default(self, test_k8s_instance): test_k8s_instance.deploy_weblog_as_pod( env={"DD_PROFILING_UPLOAD_PERIOD": "10", "DD_INTERNAL_PROFILING_LONG_LIVED_THRESHOLD": "1500"} ) - profiling_request_found = self._check_profiling_request_sent(test_k8s_instance.k8s_kind_cluster.agent_port) + profiling_request_found = self._check_profiling_request_sent(test_k8s_instance.k8s_kind_cluster) assert not profiling_request_found, "Profiling should be disabled by default, but a profiling request was found" @bug(context.library > "python@2.12.2", reason="APMON-1496") def test_profiling_admission_controller(self, test_k8s_instance): logger.info(f"Launching test test_profiling_admission_controller") logger.info( - f": Weblog: [{test_k8s_instance.k8s_kind_cluster.weblog_port}] Agent: [{test_k8s_instance.k8s_kind_cluster.agent_port}]" + f": Weblog: [{test_k8s_instance.k8s_kind_cluster.get_weblog_port()}] Agent: [{test_k8s_instance.k8s_kind_cluster.get_agent_port()}]" ) test_k8s_instance.deploy_test_agent() test_k8s_instance.deploy_datadog_cluster_agent(features={"datadog.profiling.enabled": "auto"}) test_k8s_instance.deploy_weblog_as_pod( env={"DD_PROFILING_UPLOAD_PERIOD": "10", "DD_INTERNAL_PROFILING_LONG_LIVED_THRESHOLD": "1500"} ) - profiling_request_found = self._check_profiling_request_sent(test_k8s_instance.k8s_kind_cluster.agent_port) + profiling_request_found = self._check_profiling_request_sent(test_k8s_instance.k8s_kind_cluster) assert profiling_request_found, "No profiling request found" @bug(context.library > "python@2.12.2", reason="APMON-1496") def test_profiling_override_cluster_env(self, test_k8s_instance): logger.info(f"Launching test test_profiling_override_cluster_env") logger.info( - f": Weblog: [{test_k8s_instance.k8s_kind_cluster.weblog_port}] Agent: [{test_k8s_instance.k8s_kind_cluster.agent_port}]" + f": Weblog: [{test_k8s_instance.k8s_kind_cluster.get_weblog_port()}] Agent: [{test_k8s_instance.k8s_kind_cluster.get_agent_port()}]" ) cluster_agent_config = { "clusterAgent.env[0].name": "DD_ADMISSION_CONTROLLER_AUTO_INSTRUMENTATION_PROFILING_ENABLED", @@ -157,12 +162,12 @@ def test_profiling_override_cluster_env(self, test_k8s_instance): test_k8s_instance.deploy_weblog_as_pod( env={"DD_PROFILING_UPLOAD_PERIOD": "10", "DD_INTERNAL_PROFILING_LONG_LIVED_THRESHOLD": "1500"} ) - profiling_request_found = self._check_profiling_request_sent(test_k8s_instance.k8s_kind_cluster.agent_port) + profiling_request_found = self._check_profiling_request_sent(test_k8s_instance.k8s_kind_cluster) assert profiling_request_found, "No profiling request found" def _test_inject_profiling_admission_controller_real(self, test_k8s_instance): logger.info( - f"Launching test test_inject_profiling_admission_controller: Weblog: [{test_k8s_instance.k8s_kind_cluster.weblog_port}] Agent: [{test_k8s_instance.k8s_kind_cluster.agent_port}]" + f"Launching test test_inject_profiling_admission_controller: Weblog: [{test_k8s_instance.k8s_kind_cluster.get_weblog_port()}] Agent: [{test_k8s_instance.k8s_kind_cluster.get_agent_port()}]" ) test_k8s_instance.deploy_datadog_cluster_agent(features={"datadog.profiling.enabled": "auto"}) @@ -170,15 +175,16 @@ def _test_inject_profiling_admission_controller_real(self, test_k8s_instance): test_k8s_instance.deploy_weblog_as_pod( env={"DD_PROFILING_UPLOAD_PERIOD": "10", "DD_INTERNAL_PROFILING_LONG_LIVED_THRESHOLD": "1500"} ) - weblog_port = test_k8s_instance.k8s_kind_cluster.weblog_port - logger.info(f"Waiting for weblog available [localhost:{weblog_port}]") - wait_for_port(weblog_port, "localhost", 80.0) - logger.info(f"[localhost:{weblog_port}]: Weblog app is ready!") - warmup_weblog(f"http://localhost:{weblog_port}/") - logger.info(f"Making a request to weblog [localhost:{weblog_port}]") - request_uuid = make_get_request(f"http://localhost:{weblog_port}/") - - logger.info(f"Http request done with uuid: [{request_uuid}] for [localhost:{weblog_port}]") + weblog_port = test_k8s_instance.k8s_kind_cluster.get_weblog_port() + weblog_host = test_k8s_instance.k8s_kind_cluster.cluster_host_name + logger.info(f"Waiting for weblog available [{weblog_host}:{weblog_port}]") + wait_for_port(weblog_port, weblog_host, 80.0) + logger.info(f"[{weblog_host}:{weblog_port}]: Weblog app is ready!") + warmup_weblog(f"http://{weblog_host}:{weblog_port}/") + logger.info(f"Making a request to weblog [{weblog_host}:{weblog_port}]") + request_uuid = make_get_request(f"http://{weblog_host}:{weblog_port}/") + + logger.info(f"Http request done with uuid: [{request_uuid}] for [{weblog_host}:{weblog_port}]") wait_backend_trace_id(request_uuid, 120.0, profile=True) diff --git a/utils/k8s_lib_injection/k8s_command_utils.py b/utils/k8s_lib_injection/k8s_command_utils.py index 52622f869b..1d9523f4cd 100644 --- a/utils/k8s_lib_injection/k8s_command_utils.py +++ b/utils/k8s_lib_injection/k8s_command_utils.py @@ -1,11 +1,11 @@ -import subprocess, datetime, os, time, signal +import subprocess, datetime, os, time, signal, shlex from utils.tools import logger from utils import context from utils.k8s_lib_injection.k8s_sync_kubectl import KubectlLock from retry import retry -def execute_command(command, timeout=None, logfile=None): +def execute_command(command, timeout=None, logfile=None, subprocess_env=None): """call shell-command and either return its output or kill it if it doesn't normally exit within timeout seconds and return None""" applied_timeout = 90 @@ -16,10 +16,16 @@ def execute_command(command, timeout=None, logfile=None): command_out_redirect = subprocess.PIPE if logfile: command_out_redirect = open(logfile, "w") + + if not subprocess_env: + subprocess_env = os.environ.copy() + output = "" try: start = datetime.datetime.now() - process = subprocess.Popen(command.split(), stdout=command_out_redirect, stderr=command_out_redirect) + process = subprocess.Popen( + shlex.split(command), stdout=command_out_redirect, stderr=command_out_redirect, env=subprocess_env + ) while process.poll() is None: time.sleep(0.1) diff --git a/utils/k8s_lib_injection/k8s_kind_cluster.py b/utils/k8s_lib_injection/k8s_kind_cluster.py index 3e18ec969f..9688f4b112 100644 --- a/utils/k8s_lib_injection/k8s_kind_cluster.py +++ b/utils/k8s_lib_injection/k8s_kind_cluster.py @@ -5,7 +5,7 @@ import tempfile from uuid import uuid4 -from utils.k8s_lib_injection.k8s_command_utils import execute_command +from utils.k8s_lib_injection.k8s_command_utils import execute_command, execute_command_sync from utils.tools import logger from utils import context @@ -21,7 +21,7 @@ def ensure_cluster(): def _ensure_cluster(): k8s_kind_cluster = K8sKindCluster() - k8s_kind_cluster.confiure_ports() + k8s_kind_cluster.configure_networking(docker_in_docker="GITLAB_CI" in os.environ) kind_data = "" with open("utils/k8s_lib_injection/resources/kind-config-template.yaml", "r") as file: @@ -35,11 +35,18 @@ def _ensure_cluster(): with open(cluster_config, "w") as fp: fp.write(kind_data) fp.seek(0) - execute_command( - f"kind create cluster --image=kindest/node:v1.25.3@sha256:f52781bc0d7a19fb6c405c2af83abfeb311f130707a0e219175677e366cc45d1 --name {k8s_kind_cluster.cluster_name} --config {cluster_config} --wait 1m" - ) - # time.sleep(20) + kind_command = f"kind create cluster --image=kindest/node:v1.25.3@sha256:f52781bc0d7a19fb6c405c2af83abfeb311f130707a0e219175677e366cc45d1 --name {k8s_kind_cluster.cluster_name} --config {cluster_config} --wait 1m" + + if "GITLAB_CI" in os.environ: + # Kind needs to run in bridge network to communicate with the internet: https://github.com/DataDog/buildenv/blob/master/cookbooks/dd_firewall/templates/rules.erb#L96 + new_env = os.environ.copy() + new_env["KIND_EXPERIMENTAL_DOCKER_NETWORK"] = "bridge" + execute_command(kind_command, subprocess_env=new_env) + + setup_kind_in_gitlab(k8s_kind_cluster) + else: + execute_command(kind_command) return k8s_kind_cluster @@ -49,6 +56,37 @@ def destroy_cluster(k8s_kind_cluster): execute_command(f"docker rm -f {k8s_kind_cluster.cluster_name}-control-plane") +def setup_kind_in_gitlab(k8s_kind_cluster): + # The build runs in a docker container: + # - Docker commands are forwarded to the host. + # - The kind container is a sibling to the build container + # Three things need to happen + # 1) The kind container needs to be in the bridge network to communicate with the internet: done in _ensure_cluster() + # 2) Kube config needs to be altered to use the correct IP of the control plane server + # 3) The internal ports needs to be used rather than external ports: handled in get_agent_port() and get_weblog_port() + correct_control_plane_ip = execute_command( + f"docker container inspect {k8s_kind_cluster.cluster_name}-control-plane --format '{{{{.NetworkSettings.Networks.bridge.IPAddress}}}}'" + ).strip() + if not correct_control_plane_ip: + raise Exception("Unable to find correct control plane IP") + logger.debug(f"[setup_kind_in_gitlab] correct_control_plane_ip: {correct_control_plane_ip}") + + control_plane_address_in_config = execute_command( + f'docker container inspect {k8s_kind_cluster.cluster_name}-control-plane --format \'{{{{index .NetworkSettings.Ports "6443/tcp" 0 "HostIp"}}}}:{{{{index .NetworkSettings.Ports "6443/tcp" 0 "HostPort"}}}}\'' + ).strip() + if not control_plane_address_in_config: + raise Exception("Unable to find control plane address from config") + logger.debug(f"[setup_kind_in_gitlab] control_plane_address_in_config: {control_plane_address_in_config}") + + # Replace server config with dns name + internal port + execute_command_sync( + f"sed -i -e 's/{control_plane_address_in_config}/{correct_control_plane_ip}:6443/g' {os.environ['HOME']}/.kube/config", + k8s_kind_cluster, + ) + + k8s_kind_cluster.cluster_host_name = correct_control_plane_ip + + def get_free_port(): last_allowed_port = 65535 port = random.randint(1100, 65100) @@ -67,10 +105,26 @@ class K8sKindCluster: def __init__(self): self.cluster_name = f"lib-injection-testing-{str(uuid4())[:8]}" self.context_name = f"kind-{self.cluster_name}" - self.agent_port = 18126 - self.weblog_port = 18080 - - def confiure_ports(self): - # Get random free ports + self.cluster_host_name = "localhost" + self.agent_port = None + self.weblog_port = None + self.internal_agent_port = None + self.internal_weblog_port = None + self.docker_in_docker = False + + def configure_networking(self, docker_in_docker=False): + self.docker_in_docker = docker_in_docker self.agent_port = get_free_port() self.weblog_port = get_free_port() + self.internal_agent_port = 8126 + self.internal_weblog_port = 18080 + + def get_agent_port(self): + if self.docker_in_docker: + return self.internal_agent_port + return self.agent_port + + def get_weblog_port(self): + if self.docker_in_docker: + return self.internal_weblog_port + return self.weblog_port From 5e36d1b44a213182bd85e24c7c11e15432f25bbd Mon Sep 17 00:00:00 2001 From: Charles de Beauchesne Date: Wed, 18 Sep 2024 17:47:07 +0200 Subject: [PATCH 3/3] APMRP-360 set cold case JIRA for old bug declarations (#3065) * APMRP-360 set cold case JIRA for old bug declarations * Add Test_RemoteConfigurationUpdateSequenceFeatures --- pyproject.toml | 17 ----------------- tests/appsec/iast/sink/test_insecure_cookie.py | 2 +- .../appsec/iast/sink/test_no_httponly_cookie.py | 2 +- .../appsec/iast/sink/test_no_samesite_cookie.py | 2 +- tests/appsec/iast/sink/test_ssrf.py | 2 +- tests/appsec/iast/source/test_body.py | 2 +- tests/appsec/iast/source/test_cookie_name.py | 2 +- tests/appsec/test_traces.py | 2 +- tests/appsec/waf/test_addresses.py | 4 ++-- tests/appsec/waf/test_blocking.py | 8 ++++---- tests/appsec/waf/test_rules.py | 8 ++++---- tests/appsec/waf/test_telemetry.py | 4 ++-- .../remote_config/test_remote_configuration.py | 8 ++++---- tests/test_sampling_rates.py | 4 ++-- tests/test_semantic_conventions.py | 2 +- 15 files changed, 26 insertions(+), 43 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index af201c66f8..efff1b06f4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -52,14 +52,8 @@ allow_no_feature_nodes = [ allow_no_jira_ticket_for_bugs = [ "tests/apm_tracing_e2e/test_otel.py::Test_Otel_Span.test_datadog_otel_span", - "tests/appsec/iast/sink/test_insecure_cookie.py::TestInsecureCookie.test_secure", - "tests/appsec/iast/sink/test_no_httponly_cookie.py::TestNoHttponlyCookie.test_secure", - "tests/appsec/iast/sink/test_no_samesite_cookie.py::TestNoSamesiteCookie.test_secure", "tests/appsec/iast/sink/test_sql_injection.py::TestSqlInjection.test_insecure", - "tests/appsec/iast/sink/test_ssrf.py::TestSSRF.test_insecure", "tests/appsec/iast/source/test_body.py::TestRequestBody.test_source_reported", - "tests/appsec/iast/source/test_body.py::TestRequestBody.test_telemetry_metric_instrumented_source", - "tests/appsec/iast/source/test_cookie_name.py::TestCookieName.test_telemetry_metric_instrumented_source", "tests/appsec/iast/source/test_parameter_name.py::TestParameterName.test_source_get_reported", "tests/appsec/iast/source/test_parameter_name.py::TestParameterName.test_source_post_reported", "tests/appsec/iast/source/test_parameter_name.py::TestParameterName.test_source_reported", @@ -89,7 +83,6 @@ allow_no_jira_ticket_for_bugs = [ "tests/appsec/test_shell_execution.py::Test_ShellExecution.test_truncate_1st_argument", "tests/appsec/test_shell_execution.py::Test_ShellExecution.test_truncate_blank_2nd_argument", "tests/appsec/test_traces.py::Test_AppSecEventSpanTags.test_header_collection", - "tests/appsec/test_traces.py::Test_AppSecEventSpanTags.test_root_span_coherence", "tests/appsec/test_traces.py::Test_RetainTraces", "tests/appsec/test_user_blocking_full_denylist.py::Test_UserBlocking_FullDenylist.test_blocking_test", "tests/appsec/waf/test_addresses.py::Test_BodyJson", @@ -97,22 +90,14 @@ allow_no_jira_ticket_for_bugs = [ "tests/appsec/waf/test_addresses.py::Test_BodyXml", "tests/appsec/waf/test_addresses.py::Test_BodyXml.test_xml_attr_value", "tests/appsec/waf/test_addresses.py::Test_BodyXml.test_xml_content", - "tests/appsec/waf/test_addresses.py::Test_Cookies.test_cookies_with_special_chars2", - "tests/appsec/waf/test_addresses.py::Test_Cookies.test_cookies_with_special_chars2_custom_rules", "tests/appsec/waf/test_blocking.py::Test_Blocking.test_accept_all", "tests/appsec/waf/test_blocking.py::Test_Blocking.test_accept_full_json", "tests/appsec/waf/test_blocking.py::Test_Blocking.test_accept_partial_json", - "tests/appsec/waf/test_blocking.py::Test_Blocking.test_no_accept", "tests/appsec/waf/test_exclusions.py::Test_Exclusions.test_input_exclusion_negative_test", "tests/appsec/waf/test_exclusions.py::Test_Exclusions.test_rule_exclusion_positive_test", "tests/appsec/waf/test_miscs.py::Test_404", - "tests/appsec/waf/test_rules.py::Test_DiscoveryScan.test_security_scan", - "tests/appsec/waf/test_rules.py::Test_HttpProtocol.test_http_protocol", - "tests/appsec/waf/test_rules.py::Test_LFI.test_lfi_in_path", "tests/appsec/waf/test_rules.py::Test_SQLI.test_sqli2", "tests/appsec/waf/test_rules.py::Test_SQLI.test_sqli3", - "tests/appsec/waf/test_telemetry.py::Test_TelemetryMetrics.test_headers_are_correct", - "tests/appsec/waf/test_telemetry.py::Test_TelemetryMetrics.test_metric_waf_requests", "tests/auto_inject/test_auto_inject_install.py::TestContainerAutoInjectInstallScript.test_install", "tests/auto_inject/test_auto_inject_install.py::TestInstallerAutoInjectManual.test_install_uninstall", "tests/auto_inject/test_auto_inject_install.py::TestSimpleInstallerAutoInjectManual.test_install", @@ -165,9 +150,7 @@ allow_no_jira_ticket_for_bugs = [ "tests/parametric/test_trace_sampling.py::Test_Trace_Sampling_Tags_Feb2024_Revision.test_globs_different_casing", "tests/parametric/test_trace_sampling.py::Test_Trace_Sampling_Tags_Feb2024_Revision.test_metric_existence", "tests/parametric/test_trace_sampling.py::Test_Trace_Sampling_Tags_Feb2024_Revision.test_metric_matching", - "tests/remote_config/test_remote_configuration.py::Test_RemoteConfigurationUpdateSequenceASMDD.test_tracer_update_sequence", "tests/remote_config/test_remote_configuration.py::Test_RemoteConfigurationUpdateSequenceFeatures.test_tracer_update_sequence", - "tests/remote_config/test_remote_configuration.py::Test_RemoteConfigurationUpdateSequenceLiveDebugging.test_tracer_update_sequence", "tests/stats/test_miscs.py::Test_Miscs.test_request_headers", "tests/test_data_integrity.py::Test_TraceHeaders.test_trace_header_container_tags", "tests/test_data_integrity.py::Test_TraceHeaders.test_traces_header_present", diff --git a/tests/appsec/iast/sink/test_insecure_cookie.py b/tests/appsec/iast/sink/test_insecure_cookie.py index 474710e06c..0804bb82a0 100644 --- a/tests/appsec/iast/sink/test_insecure_cookie.py +++ b/tests/appsec/iast/sink/test_insecure_cookie.py @@ -17,7 +17,7 @@ class TestInsecureCookie(BaseSinkTest): data = {} location_map = {"nodejs": {"express4": "iast/index.js", "express4-typescript": "iast.ts"}} - @bug(context.library < "java@1.18.3", reason="Incorrect handling of HttpOnly flag") + @bug(context.library < "java@1.18.3", reason="APMRP-360") def test_secure(self): super().test_secure() diff --git a/tests/appsec/iast/sink/test_no_httponly_cookie.py b/tests/appsec/iast/sink/test_no_httponly_cookie.py index 7f1808a32e..894affae61 100644 --- a/tests/appsec/iast/sink/test_no_httponly_cookie.py +++ b/tests/appsec/iast/sink/test_no_httponly_cookie.py @@ -17,7 +17,7 @@ class TestNoHttponlyCookie(BaseSinkTest): data = {} location_map = {"nodejs": {"express4": "iast/index.js", "express4-typescript": "iast.ts"}} - @bug(context.library < "java@1.18.3", reason="Incorrect handling of HttpOnly flag") + @bug(context.library < "java@1.18.3", reason="APMRP-360") def test_secure(self): super().test_secure() diff --git a/tests/appsec/iast/sink/test_no_samesite_cookie.py b/tests/appsec/iast/sink/test_no_samesite_cookie.py index 60e4e08e31..7f3ddbc235 100644 --- a/tests/appsec/iast/sink/test_no_samesite_cookie.py +++ b/tests/appsec/iast/sink/test_no_samesite_cookie.py @@ -17,7 +17,7 @@ class TestNoSamesiteCookie(BaseSinkTest): data = {} location_map = {"nodejs": {"express4": "iast/index.js", "express4-typescript": "iast.ts"}} - @bug(context.library < "java@1.18.3", reason="Incorrect handling of HttpOnly flag") + @bug(context.library < "java@1.18.3", reason="APMRP-360") def test_secure(self): super().test_secure() diff --git a/tests/appsec/iast/sink/test_ssrf.py b/tests/appsec/iast/sink/test_ssrf.py index 54f0ebd296..71465c3001 100644 --- a/tests/appsec/iast/sink/test_ssrf.py +++ b/tests/appsec/iast/sink/test_ssrf.py @@ -21,7 +21,7 @@ class TestSSRF(BaseSinkTest): "python": {"flask-poc": "app.py", "django-poc": "app/urls.py"}, } - @bug(context.library < "java@1.14.0", reason="https://github.com/DataDog/dd-trace-java/pull/5172") + @bug(context.library < "java@1.14.0", reason="APMRP-360") def test_insecure(self): super().test_insecure() diff --git a/tests/appsec/iast/source/test_body.py b/tests/appsec/iast/source/test_body.py index 1cd87ec3af..5e7ef90076 100644 --- a/tests/appsec/iast/source/test_body.py +++ b/tests/appsec/iast/source/test_body.py @@ -25,7 +25,7 @@ def test_source_reported(self): context.library < "java@1.22.0" and "spring-boot" not in context.weblog_variant, reason="Metrics not implemented", ) - @bug(context.library >= "java@1.13.0" and context.library < "java@1.17.0", reason="Not reported") + @bug(context.library >= "java@1.13.0" and context.library < "java@1.17.0", reason="APMRP-360") @missing_feature(library="dotnet", reason="Not implemented yet") def test_telemetry_metric_instrumented_source(self): super().test_telemetry_metric_instrumented_source() diff --git a/tests/appsec/iast/source/test_cookie_name.py b/tests/appsec/iast/source/test_cookie_name.py index 2046b6ad6e..cc3016b654 100644 --- a/tests/appsec/iast/source/test_cookie_name.py +++ b/tests/appsec/iast/source/test_cookie_name.py @@ -22,7 +22,7 @@ class TestCookieName(BaseSourceTest): context.library < "java@1.22.0" and "spring-boot" not in context.weblog_variant, reason="Metrics not implemented", ) - @bug(context.library >= "java@1.16.0" and context.library < "java@1.22.0", reason="Not working as expected") + @bug(context.library >= "java@1.16.0" and context.library < "java@1.22.0", reason="APMRP-360") @missing_feature(weblog_variant="akka-http", reason="Not working as expected") def test_telemetry_metric_instrumented_source(self): super().test_telemetry_metric_instrumented_source() diff --git a/tests/appsec/test_traces.py b/tests/appsec/test_traces.py index 1381246c38..8b5b4ff72b 100644 --- a/tests/appsec/test_traces.py +++ b/tests/appsec/test_traces.py @@ -98,7 +98,7 @@ def test_header_collection(self): missing_response_headers = set(required_response_headers) - set(span.get("meta", {}).keys()) assert not missing_response_headers, f"Missing response headers: {missing_response_headers}" - @bug(context.library < "java@0.93.0") + @bug(context.library < "java@0.93.0", reason="APMRP-360") def test_root_span_coherence(self): """Appsec tags are not on span where type is not web, http or rpc""" valid_appsec_span_types = ["web", "http", "rpc"] diff --git a/tests/appsec/waf/test_addresses.py b/tests/appsec/waf/test_addresses.py index 35d309c860..0f01f5e392 100644 --- a/tests/appsec/waf/test_addresses.py +++ b/tests/appsec/waf/test_addresses.py @@ -181,7 +181,7 @@ def setup_cookies_with_special_chars2(self): @irrelevant(library="golang", reason="not handled by the Go standard cookie parser") @irrelevant(library="dotnet", reason="Quotation marks cause kestrel to erase the whole value") - @bug(context.library < "java@0.96.0") + @bug(context.library < "java@0.96.0", reason="APMRP-360") @irrelevant(context.appsec_rules_version >= "1.2.7", reason="cookies were disabled for the time being") def test_cookies_with_special_chars2(self): """Other cookies patterns""" @@ -225,7 +225,7 @@ def setup_cookies_with_special_chars2_custom_rules(self): @irrelevant(library="golang", reason="Not handled by the Go standard cookie parser") @irrelevant(library="dotnet", reason="Quotation marks cause kestrel to erase the whole value") - @bug(context.library < "java@0.96.0") + @bug(context.library < "java@0.96.0", reason="APMRP-360") @scenarios.appsec_custom_rules def test_cookies_with_special_chars2_custom_rules(self): """Other cookies patterns""" diff --git a/tests/appsec/waf/test_blocking.py b/tests/appsec/waf/test_blocking.py index 28fdf35fcd..db59534d45 100644 --- a/tests/appsec/waf/test_blocking.py +++ b/tests/appsec/waf/test_blocking.py @@ -52,10 +52,10 @@ class Test_Blocking: def setup_no_accept(self): self.r_na = weblog.get("/waf/", headers={"User-Agent": "Arachni/v1"}) - @bug(context.library < "java@0.115.0" and context.weblog_variant == "spring-boot-undertow", reason="npe") - @bug(context.library < "java@0.115.0" and context.weblog_variant == "spring-boot-wildfly", reason="npe") - @bug(context.library < "python@1.16.1", reason="Bug, minify and remove new line characters") - @bug(context.library < "ruby@1.12.1", reason="wrong default content-type") + @bug(context.library < "java@0.115.0" and context.weblog_variant == "spring-boot-undertow", reason="APMRP-360") + @bug(context.library < "java@0.115.0" and context.weblog_variant == "spring-boot-wildfly", reason="APMRP-360") + @bug(context.library < "python@1.16.1", reason="APMRP-360") + @bug(context.library < "ruby@1.12.1", reason="APMRP-360") def test_no_accept(self): """Blocking without an accept header""" assert self.r_na.status_code == 403 diff --git a/tests/appsec/waf/test_rules.py b/tests/appsec/waf/test_rules.py index bd09813a5b..db8499b16b 100644 --- a/tests/appsec/waf/test_rules.py +++ b/tests/appsec/waf/test_rules.py @@ -30,8 +30,8 @@ class Test_HttpProtocol: def setup_http_protocol(self): self.r_1 = weblog.get("/waf/", params={"key": ".cookie;domain="}) - @bug(context.library < "dotnet@2.1.0") - @bug(context.library < "java@0.98.1") + @bug(context.library < "dotnet@2.1.0", reason="APMRP-360") + @bug(context.library < "java@0.98.1", reason="APMRP-360") def test_http_protocol(self): """ AppSec catches attacks by violation of HTTP protocol in encoded cookie value""" interfaces.library.assert_waf_attack(self.r_1, waf_rules.http_protocol_violation.crs_943_100) @@ -74,7 +74,7 @@ def test_lfi_percent_2f(self): def setup_lfi_in_path(self): self.r_5 = weblog.get("/waf/..") - @bug(context.library < "java@0.92.0") + @bug(context.library < "java@0.92.0", reason="APMRP-360") @irrelevant(library="python", weblog_variant="django-poc") @irrelevant(library="dotnet", reason="lfi patterns are always filtered by the host web-server") @irrelevant( @@ -322,7 +322,7 @@ def setup_security_scan(self): self.r10 = weblog.get("/administrator/components/component.php") self.r11 = weblog.get("/login.pwd") - @bug(context.library < "java@0.98.0" and context.weblog_variant == "spring-boot-undertow") + @bug(context.library < "java@0.98.0" and context.weblog_variant == "spring-boot-undertow", reason="APMRP-360") @bug(library="java", weblog_variant="spring-boot-openliberty", reason="APPSEC-6583") def test_security_scan(self): """AppSec WAF catches Discovery scan""" diff --git a/tests/appsec/waf/test_telemetry.py b/tests/appsec/waf/test_telemetry.py index f540a649e3..1fe62be390 100644 --- a/tests/appsec/waf/test_telemetry.py +++ b/tests/appsec/waf/test_telemetry.py @@ -32,7 +32,7 @@ class Test_TelemetryMetrics: setup_headers_are_correct = _setup - @bug(context.library < "java@1.13.0", reason="Missing two headers") + @bug(context.library < "java@1.13.0", reason="APMRP-360") def test_headers_are_correct(self): """Tests that all telemetry requests have correct headers.""" for data in interfaces.library.get_telemetry_data(flatten_message_batches=False): @@ -77,7 +77,7 @@ def test_metric_waf_init(self): setup_metric_waf_requests = _setup - @bug(context.library < "java@1.13.0", reason="Missing tags") + @bug(context.library < "java@1.13.0", reason="APMRP-360") def test_metric_waf_requests(self): """Test waf.requests metric.""" expected_metric_name = "waf.requests" diff --git a/tests/remote_config/test_remote_configuration.py b/tests/remote_config/test_remote_configuration.py index 2b7f32c6de..261d945a4f 100644 --- a/tests/remote_config/test_remote_configuration.py +++ b/tests/remote_config/test_remote_configuration.py @@ -193,7 +193,7 @@ def setup_tracer_update_sequence(self): reason="ASM_FEATURES was not subscribed when a custom rules file was present", ) @bug(library="golang", reason="missing update file datadog/2/ASM_FEATURES/ASM_FEATURES-third/config") - @bug(context.library < "java@1.13.0", reason="id reported for config state is not the expected one") + @bug(context.library < "java@1.13.0", reason="APMRP-360") def test_tracer_update_sequence(self): """test update sequence, based on a scenario mocked in the proxy""" @@ -285,7 +285,7 @@ def setup_tracer_update_sequence(self): remote_config.send_sequential_commands(payloads) - @bug(context.library < "java@1.13.0", reason="id reported for config state is not the expected one") + @bug(context.library < "java@1.13.0", reason="APMRP-360") def test_tracer_update_sequence(self): """test update sequence, based on a scenario mocked in the proxy""" @@ -334,13 +334,13 @@ def setup_tracer_update_sequence(self): remote_config.send_sequential_commands(payloads) - @bug(context.library >= "java@1.1.0" and context.library < "java@1.4.0", reason="?") + @bug(context.library >= "java@1.1.0" and context.library < "java@1.4.0", reason="APMRP-360") @irrelevant( context.library >= "java@1.4.0" and context.appsec_rules_file is not None, reason="ASM_DD not subscribed with custom rules. This is the compliant behavior", ) @bug(context.weblog_variant == "spring-boot-openliberty", reason="APPSEC-6721") - @bug(context.library <= "java@1.12.1", reason="config state id value was wrong") + @bug(context.library <= "java@1.12.1", reason="APMRP-360") def test_tracer_update_sequence(self): """test update sequence, based on a scenario mocked in the proxy""" diff --git a/tests/test_sampling_rates.py b/tests/test_sampling_rates.py index 177c702769..abc814f677 100644 --- a/tests/test_sampling_rates.py +++ b/tests/test_sampling_rates.py @@ -134,8 +134,8 @@ def setup_sampling_decision(self): @irrelevant(context.library in ("nodejs", "php", "dotnet"), reason="AIT-374") @missing_feature(library="cpp", reason="https://github.com/DataDog/dd-opentracing-cpp/issues/173") - @bug(context.library < "java@0.92.0") - @flaky(context.library < "python@0.57.0") + @bug(context.library < "java@0.92.0", reason="APMRP-360") + @flaky(context.library < "python@0.57.0", reason="APMRP-360") @flaky(context.library >= "java@0.98.0", reason="APMJAVA-743") @flaky( context.library == "ruby" and context.weblog_variant in ("sinatra14", "sinatra20", "sinatra21", "uds-sinatra"), diff --git a/tests/test_semantic_conventions.py b/tests/test_semantic_conventions.py index 86739c04c9..9316233bf1 100644 --- a/tests/test_semantic_conventions.py +++ b/tests/test_semantic_conventions.py @@ -232,7 +232,7 @@ def validator(span): @bug(library="php", reason="language tag not implemented") # TODO: Versions previous to 1.1.0 might be ok, but were not tested so far. - @bug(context.library < "java@1.1.0", reason="language tag implemented but not for all spans") + @bug(context.library < "java@1.1.0", reason="APMRP-360") @bug(library="dotnet", reason="AIT-8735") @missing_feature(context.library < "dotnet@2.6.0") def test_meta_language_tag(self):