From 87ba81e47bb1da1c9d617e435e89f0c72049ea2e Mon Sep 17 00:00:00 2001 From: Charles de Beauchesne Date: Fri, 6 Dec 2024 11:37:30 +0100 Subject: [PATCH 1/7] Fix ruff check for utils/scripts folder (#3637) --- pyproject.toml | 23 +------- utils/scripts/compute-workflow-parameters.py | 6 +-- utils/scripts/compute_impacted_scenario.py | 2 +- utils/scripts/decode-rc.py | 3 +- utils/scripts/extract_appsec_waf_rules.py | 12 ++--- utils/scripts/get-workflow-summary.py | 3 +- utils/scripts/grep-nightly-logs.py | 5 +- utils/scripts/junit_report.py | 2 +- utils/scripts/markdown_logs.py | 13 ++--- utils/scripts/push-metrics.py | 7 +-- utils/scripts/update_features.py | 57 -------------------- 11 files changed, 26 insertions(+), 107 deletions(-) delete mode 100644 utils/scripts/update_features.py diff --git a/pyproject.toml b/pyproject.toml index f30698eb4f..362baf7261 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -208,6 +208,7 @@ ignore = [ ] "utils/grpc/weblog_pb2_grpc.py" = ["ALL"] # keep this one, it's a generated file "utils/grpc/weblog_pb2.py" = ["ALL"] # keep this one, it's a generated file +"utils/scripts/*" = ["T201"] "utils/interfaces/*" = [ "A001", "E713", @@ -283,24 +284,6 @@ ignore = [ "T201", ] "utils/_context/_scenarios/__init__.py" = ["T201"] -"utils/scripts/*" = [ - "T201", # allow prints here - "PLW2901", # to be corrected - "SIM115", # to be corrected - "PLR2004", # to be corrected - "S314", - "RUF013", - "B007", - "Q003", - "F541", - "S113", - "A002", - "PLR1711", - "SIM910", - "C405", - "RUF010", - "PLR1704", -] "utils/proxy/*" = [ "S104", # to be transfered in the line "DTZ006", # to be corrected @@ -313,10 +296,6 @@ ignore = [ "T201", # to be corrected "C416", # to be corrected ] -"utils/scripts/push-metrics.py" = [ - "UP017", # to be corrected -] -"utils/scripts/update_features.py" = ["ALL"] # file must be deleted "utils/telemetry_utils.py" = ["N806", "RUF012"] # to be corrected "utils/tools.py" = ["T201", "PLW2901"] # to be transfered in the line "conftest.py" = [ diff --git a/utils/scripts/compute-workflow-parameters.py b/utils/scripts/compute-workflow-parameters.py index dabe1442ef..4b9f7d1e6d 100644 --- a/utils/scripts/compute-workflow-parameters.py +++ b/utils/scripts/compute-workflow-parameters.py @@ -113,8 +113,8 @@ def get_opentelemetry_weblogs(library): def main(language: str, scenarios: str, groups: str): scenario_map = get_github_workflow_map(scenarios.split(","), groups.split(",")) - for github_workflow, scenarios in scenario_map.items(): - print(f"{github_workflow}_scenarios={json.dumps(scenarios)}") + for github_workflow, scenario_list in scenario_map.items(): + print(f"{github_workflow}_scenarios={json.dumps(scenario_list)}") endtoend_weblogs = get_endtoend_weblogs(language) print(f"endtoend_weblogs={json.dumps(endtoend_weblogs)}") @@ -126,7 +126,7 @@ def main(language: str, scenarios: str, groups: str): print(f"opentelemetry_weblogs={json.dumps(opentelemetry_weblogs)}") _experimental_parametric_job_count = int(os.environ.get("_EXPERIMENTAL_PARAMETRIC_JOB_COUNT", "1")) - print(f"_experimental_parametric_job_matrix={str(list(range(1, _experimental_parametric_job_count + 1)))}") + print(f"_experimental_parametric_job_matrix={list(range(1, _experimental_parametric_job_count + 1))!s}") if __name__ == "__main__": diff --git a/utils/scripts/compute_impacted_scenario.py b/utils/scripts/compute_impacted_scenario.py index 7a719e2ad8..3c6818b7c4 100644 --- a/utils/scripts/compute_impacted_scenario.py +++ b/utils/scripts/compute_impacted_scenario.py @@ -8,7 +8,7 @@ class Result: def __init__(self) -> None: - self.scenarios = set(["DEFAULT"]) # always run the default scenario + self.scenarios = {"DEFAULT"} # always run the default scenario self.scenarios_groups = set() def add_scenario(self, scenario: str): diff --git a/utils/scripts/decode-rc.py b/utils/scripts/decode-rc.py index 7385f59fb1..febb69906a 100644 --- a/utils/scripts/decode-rc.py +++ b/utils/scripts/decode-rc.py @@ -20,7 +20,7 @@ def from_payload(payload): for config_name in payload.get("client_configs", []): target = targets["signed"]["targets"][config_name] - raw_config = configs.get(config_name, None) + raw_config = configs.get(config_name) config = result.add_client_config(config_name, raw_config, target["custom"]["v"]) @@ -68,7 +68,6 @@ def main(filename): # print(json.dumps(item, indent=2)) # print("-" * 120) # print(json.dumps(command.to_payload(deserialized=True), indent=2)) - return if __name__ == "__main__": diff --git a/utils/scripts/extract_appsec_waf_rules.py b/utils/scripts/extract_appsec_waf_rules.py index 3c2d5c63af..a8dcc5cbbd 100644 --- a/utils/scripts/extract_appsec_waf_rules.py +++ b/utils/scripts/extract_appsec_waf_rules.py @@ -6,13 +6,13 @@ import requests -def to_camel_case(input): - return "".join(ele.title() for ele in input.split("_")) +def to_camel_case(str_input): + return "".join(ele.title() for ele in str_input.split("_")) URL = "https://raw.githubusercontent.com/DataDog/appsec-event-rules/main/build/recommended.json" -data = requests.get(URL).json() +data = requests.get(URL, timeout=10).json() version = data["version"] @@ -28,7 +28,7 @@ def to_camel_case(input): except KeyError: print(event) -HEADER = f"""# Unless explicitly stated otherwise all files in this repository are licensed under the the Apache License Version 2.0. +HEADER = """# Unless explicitly stated otherwise all files in this repository are licensed under the the Apache License Version 2.0. # This product includes software developed at Datadog (https://www.datadoghq.com/). # Copyright 2021 Datadog, Inc. @@ -42,14 +42,14 @@ def to_camel_case(input): for key, rules in result.items(): f.write(f"\n\nclass {key}:\n") for name, rule in rules.items(): - f.write(f" {name} = \"{rule['id']}\" # {rule['name']}\n") + f.write(f" {name} = \"{rule['id']}\" # {rule['name']}\n") # noqa: Q003 (black does not like this) with open("utils/interfaces/_library/appsec_data.py", "w") as f: f.write(HEADER) f.write("\n\nrule_id_to_type = {\n") for key, rules in result.items(): - for name, rule in rules.items(): + for rule in rules.values(): rule_id = rule["id"] f.write(f' "{rule_id}": "{key}",\n') f.write("}\n") diff --git a/utils/scripts/get-workflow-summary.py b/utils/scripts/get-workflow-summary.py index 1e40b9653f..742ce3218d 100644 --- a/utils/scripts/get-workflow-summary.py +++ b/utils/scripts/get-workflow-summary.py @@ -56,8 +56,7 @@ def main(repo_slug: str, run_id: int) -> None: for step_name, items in failing_steps.items(): print(f"❌ **Failures for `{step_name}`**\n") - items = sorted(items, key=lambda x: x[0]["name"]) - for job, step in items: + for job, step in sorted(items, key=lambda x: x[0]["name"]): url = job["html_url"] print(f"* [{job['name']}]({url}#step:{step['number']})") diff --git a/utils/scripts/grep-nightly-logs.py b/utils/scripts/grep-nightly-logs.py index 7c75987595..e518de2729 100644 --- a/utils/scripts/grep-nightly-logs.py +++ b/utils/scripts/grep-nightly-logs.py @@ -74,7 +74,7 @@ def main( for job in jobs["jobs"]: job_name = job["name"] - if "artifact" in job_name or language not in job_name: + if "artifact" in job_name or (language and language not in job_name): continue if job["conclusion"] != "failure": @@ -101,7 +101,8 @@ def main( if __name__ == "__main__": parser = argparse.ArgumentParser(prog="grep-nightly-logs", description="Grep into nightly logs to find a pattern",) parser.add_argument( - "language", + "--language", + "-l", type=str, help="One of the supported Datadog languages", choices=["cpp", "dotnet", "python", "ruby", "golang", "java", "nodejs", "php"], diff --git a/utils/scripts/junit_report.py b/utils/scripts/junit_report.py index e50bd23bfb..1c423ae918 100644 --- a/utils/scripts/junit_report.py +++ b/utils/scripts/junit_report.py @@ -12,7 +12,7 @@ def junit_modifyreport(json_report, junit_report_path, junit_properties): """Add extra information to auto generated JUnit xml file""" # Open XML Junit report - junit_report = ET.parse(junit_report_path) + junit_report = ET.parse(junit_report_path) # noqa: S314 # get root element junit_report_root = junit_report.getroot() for test in json_report["tests"]: diff --git a/utils/scripts/markdown_logs.py b/utils/scripts/markdown_logs.py index 73004ed15b..5ace7e4b2e 100644 --- a/utils/scripts/markdown_logs.py +++ b/utils/scripts/markdown_logs.py @@ -14,7 +14,8 @@ def main(): for x in os.listdir("."): if x.startswith("logs") and os.path.isfile(f"{x}/report.json"): result[x] = collections.defaultdict(int) - data = json.load(open(f"{x}/report.json", "r")) + with open(f"{x}/report.json", "r") as f: + data = json.load(f) for test in data["tests"]: outcome = "skipped" if test["metadata"]["details"] is not None else test["outcome"] result[x][outcome] += 1 @@ -22,14 +23,14 @@ def main(): table_row("Scenario", *[f"{outcome} {icon}" for outcome, icon in all_outcomes.items()]) table_row(*(["-"] * (len(all_outcomes) + 1))) - for scenario, outcomes in result.items(): - if scenario == "logs": - scenario = "Main scenario" + for folder_name, outcomes in result.items(): + if folder_name == "logs": + scenario_name = "Main scenario" else: # "ab_cd_ef" => "Ab Cd Ef" - scenario = " ".join([s.capitalize() for s in scenario[5:].split("_")]) + scenario_name = " ".join([s.capitalize() for s in folder_name[5:].split("_")]) - table_row(scenario, *[str(outcomes.get(outcome, "")) for outcome in all_outcomes]) + table_row(scenario_name, *[str(outcomes.get(outcome, "")) for outcome in all_outcomes]) if __name__ == "__main__": diff --git a/utils/scripts/push-metrics.py b/utils/scripts/push-metrics.py index 6fd5482468..2a6d2ca62b 100644 --- a/utils/scripts/push-metrics.py +++ b/utils/scripts/push-metrics.py @@ -1,6 +1,6 @@ # Successfully installed datadog_api_client-2.24.1 -from datetime import datetime, timezone +from datetime import datetime, timezone, UTC import requests @@ -28,10 +28,7 @@ def main(): data = requests.get("https://dd-feature-parity.azurewebsites.net/statistics", timeout=10) values = flatten(data.json()) - series = [ - Series(metric=name, points=[Point([(datetime.now(timezone.utc)).timestamp(), value]),],) - for name, value in values - ] + series = [Series(metric=name, points=[Point([(datetime.now(UTC)).timestamp(), value]),],) for name, value in values] configuration = Configuration(host="datad0g.com") with ApiClient(configuration) as api_client: diff --git a/utils/scripts/update_features.py b/utils/scripts/update_features.py deleted file mode 100644 index 266ce5746a..0000000000 --- a/utils/scripts/update_features.py +++ /dev/null @@ -1,57 +0,0 @@ -import requests - -from utils import features - - -def get_known_features(): - """ return an object feature_id -> attribute name in features decorator """ - result = {} - - for attr in dir(features): - if attr.startswith("__"): - continue - - def obj(): - pass - - try: - obj = getattr(features, attr)(obj) - except AttributeError: - pass - - if hasattr(obj, "pytestmark"): - result[obj.pytestmark[0].kwargs["feature_id"]] = attr - - return result - - -def _main(): - known_features = get_known_features() - data = requests.get("https://dd-feature-parity.azurewebsites.net/Import/Features", timeout=10).json() - data = {feature["id"]: feature for feature in data} - - for feature_id, python_name in known_features.items(): - if feature_id not in data: - print(f"Feature {python_name}/{feature_id} is not present anymore in the feature parity database") - - for feature in data.values(): - feature_id = feature["id"] - if feature_id not in known_features: - docstring = f""" - {feature['name']} - - https://feature-parity.us1.prod.dog/#/?feature={feature_id}""" - print( - f""" - @staticmethod - def {feature['codeSafeName'].lower()}(test_object): - ""\"{docstring} - ""\" - pytest.mark.features(feature_id={feature_id})(test_object) - return test_object -""" - ) - - -if __name__ == "__main__": - _main() From 5897d7934491765d7310eb215e3e4c76ef52869c Mon Sep 17 00:00:00 2001 From: Kevin Gosse Date: Fri, 6 Dec 2024 11:46:23 +0100 Subject: [PATCH 2/7] Enable docker-ssi tests for .net (#3621) Co-authored-by: Andrew Lock --- .../dd-lib-dotnet-init-test-app/Program.cs | 20 +++++-- tests/docker_ssi/test_docker_ssi_crash.py | 2 +- .../build/ssi/base/dotnet_install_runtimes.sh | 10 ++++ utils/build/ssi/base/tested_components.sh | 2 + utils/build/ssi/dotnet/dotnet-app.Dockerfile | 15 +++++ utils/docker_ssi/docker_ssi_definitions.py | 60 +++++++++++++++++++ utils/docker_ssi/docker_ssi_matrix_utils.py | 3 + utils/interfaces/_test_agent.py | 22 +++++-- 8 files changed, 121 insertions(+), 13 deletions(-) create mode 100755 utils/build/ssi/base/dotnet_install_runtimes.sh create mode 100644 utils/build/ssi/dotnet/dotnet-app.Dockerfile diff --git a/lib-injection/build/docker/dotnet/dd-lib-dotnet-init-test-app/Program.cs b/lib-injection/build/docker/dotnet/dd-lib-dotnet-init-test-app/Program.cs index 039e2d52f0..23c84f4f50 100644 --- a/lib-injection/build/docker/dotnet/dd-lib-dotnet-init-test-app/Program.cs +++ b/lib-injection/build/docker/dotnet/dd-lib-dotnet-init-test-app/Program.cs @@ -1,23 +1,30 @@ using System.Diagnostics; if (Environment.GetEnvironmentVariable("FORKED") != null) +{ + Thread.Sleep(5_000); // Add a small delay otherwise the telemetry forwarder leaves a zombie process behind + CrashMe(null); + + // Should never get there + Thread.Sleep(Timeout.Infinite); +} + +var builder = WebApplication.CreateBuilder(args); +var app = builder.Build(); + +static string CrashMe(HttpRequest? request) { var thread = new Thread(() => { - Thread.Sleep(5_000); // Add a small delay otherwise the telemetry forwarder leaves a zombie process behind throw new BadImageFormatException("Expected"); }); thread.Start(); thread.Join(); - // Should never get there - Thread.Sleep(Timeout.Infinite); + return "Failed to crash"; } -var builder = WebApplication.CreateBuilder(args); -var app = builder.Build(); - static string ForkAndCrash(HttpRequest request) { // Simulate fork @@ -172,6 +179,7 @@ static string GetZombies(HttpRequest request) app.MapGet("/", () => "Hello World!"); +app.MapGet("/crashme", CrashMe); app.MapGet("/fork_and_crash", ForkAndCrash); app.MapGet("/child_pids", GetChildPids); app.MapGet("/zombies", GetZombies); diff --git a/tests/docker_ssi/test_docker_ssi_crash.py b/tests/docker_ssi/test_docker_ssi_crash.py index 7d1231a6cd..8a73ea6cb6 100644 --- a/tests/docker_ssi/test_docker_ssi_crash.py +++ b/tests/docker_ssi/test_docker_ssi_crash.py @@ -31,7 +31,7 @@ def setup_crash(self): self.r = TestDockerSSICrash._r @features.ssi_crashtracking - @bug(condition=context.library not in ("python", "nodejs"), reason="INPLAT-11") + @bug(condition=context.library not in ("python", "nodejs", "dotnet"), reason="INPLAT-11") @irrelevant(context.library == "python" and context.installed_language_runtime < "3.7.0") @irrelevant(context.library == "nodejs" and context.installed_language_runtime < "17.0") def test_crash(self): diff --git a/utils/build/ssi/base/dotnet_install_runtimes.sh b/utils/build/ssi/base/dotnet_install_runtimes.sh new file mode 100755 index 0000000000..b54396b3ab --- /dev/null +++ b/utils/build/ssi/base/dotnet_install_runtimes.sh @@ -0,0 +1,10 @@ +#!/bin/bash + +export DOTNET_VERSION=$1 +set -e + +curl -sSL https://dot.net/v1/dotnet-install.sh --output dotnet-install.sh \ + && chmod +x ./dotnet-install.sh \ + && ./dotnet-install.sh --version "$DOTNET_VERSION" --install-dir /usr/share/dotnet \ + && rm ./dotnet-install.sh \ + && ln -s /usr/share/dotnet/dotnet /usr/bin/dotnet diff --git a/utils/build/ssi/base/tested_components.sh b/utils/build/ssi/base/tested_components.sh index 4a21b80426..ee3b2b73d7 100755 --- a/utils/build/ssi/base/tested_components.sh +++ b/utils/build/ssi/base/tested_components.sh @@ -16,6 +16,8 @@ elif [ "$DD_LANG" == "js" ]; then . "$NVM_DIR/nvm.sh" runtime_version=$(node --version | tr -d 'v') +elif [ "$DD_LANG" == "dotnet" ]; then + runtime_version=$(dotnet --version) fi if [ -f /etc/debian_version ] || [ "$DISTRIBUTION" = "Debian" ] || [ "$DISTRIBUTION" = "Ubuntu" ]; then diff --git a/utils/build/ssi/dotnet/dotnet-app.Dockerfile b/utils/build/ssi/dotnet/dotnet-app.Dockerfile new file mode 100644 index 0000000000..f7e2ec0b61 --- /dev/null +++ b/utils/build/ssi/dotnet/dotnet-app.Dockerfile @@ -0,0 +1,15 @@ +ARG BASE_IMAGE + +FROM ${BASE_IMAGE} +WORKDIR /app + +ENV DOTNET_SYSTEM_GLOBALIZATION_INVARIANT=1 + +COPY lib-injection/build/docker/dotnet/dd-lib-dotnet-init-test-app/ . + +RUN dotnet restore +RUN dotnet build -c Release + +ENV ASPNETCORE_URLS=http://+:18080 +EXPOSE 18080 +CMD [ "dotnet", "run", "--no-build", "--no-restore", "-c", "Release" ] \ No newline at end of file diff --git a/utils/docker_ssi/docker_ssi_definitions.py b/utils/docker_ssi/docker_ssi_definitions.py index 31a2ae0f37..d5f5453165 100644 --- a/utils/docker_ssi/docker_ssi_definitions.py +++ b/utils/docker_ssi/docker_ssi_definitions.py @@ -181,6 +181,25 @@ def get_version_id(version): raise ValueError(f"Node.js version {version} not supported") +class DotnetRuntimeInstallableVersions: + """ Python runtime versions that can be installed automatically""" + + DOTNET70 = RuntimeInstallableVersion("DOTNET70", "7.0.410") + + @staticmethod + def get_all_versions(): + return [ + DotnetRuntimeInstallableVersions.DOTNET70, + ] + + @staticmethod + def get_version_id(version): + for version_check in DotnetRuntimeInstallableVersions.get_all_versions(): + if version_check.version == version: + return version_check.version_id + raise ValueError(f".NET version {version} not supported") + + # HERE ADD YOUR WEBLOG DEFINITION: SUPPORTED IMAGES AND INSTALABLE RUNTIME VERSIONS # Maybe a weblog app contains preinstalled language runtime, in this case we define the weblog without runtime version JETTY_APP = WeblogDescriptor( @@ -272,6 +291,46 @@ def get_version_id(version): ], ) +DOTNET_APP = WeblogDescriptor( + "dotnet-app", + "dotnet", + [ + SupportedImages().UBUNTU_22_AMD64.with_allowed_runtime_versions( + DotnetRuntimeInstallableVersions.get_all_versions() + ), + SupportedImages().UBUNTU_22_ARM64.with_allowed_runtime_versions( + DotnetRuntimeInstallableVersions.get_all_versions() + ), + SupportedImages().UBUNTU_16_AMD64.with_allowed_runtime_versions( + DotnetRuntimeInstallableVersions.get_all_versions() + ), + SupportedImages().ORACLELINUX_8_AMD64.with_allowed_runtime_versions( + DotnetRuntimeInstallableVersions.get_all_versions() + ), + SupportedImages().ORACLELINUX_8_ARM64.with_allowed_runtime_versions( + DotnetRuntimeInstallableVersions.get_all_versions() + ), + SupportedImages().ORACLELINUX_9_AMD64.with_allowed_runtime_versions( + DotnetRuntimeInstallableVersions.get_all_versions() + ), + SupportedImages().ORACLELINUX_9_ARM64.with_allowed_runtime_versions( + DotnetRuntimeInstallableVersions.get_all_versions() + ), + SupportedImages().ALMALINUX_8_AMD64.with_allowed_runtime_versions( + DotnetRuntimeInstallableVersions.get_all_versions() + ), + SupportedImages().ALMALINUX_8_ARM64.with_allowed_runtime_versions( + DotnetRuntimeInstallableVersions.get_all_versions() + ), + SupportedImages().ALMALINUX_9_AMD64.with_allowed_runtime_versions( + DotnetRuntimeInstallableVersions.get_all_versions() + ), + SupportedImages().ALMALINUX_9_ARM64.with_allowed_runtime_versions( + DotnetRuntimeInstallableVersions.get_all_versions() + ), + ], +) + # HERE ADD YOUR WEBLOG DEFINITION TO THE LIST ALL_WEBLOGS = [ JETTY_APP, @@ -282,4 +341,5 @@ def get_version_id(version): PHP_APP, PY_APP, JS_APP, + DOTNET_APP, ] diff --git a/utils/docker_ssi/docker_ssi_matrix_utils.py b/utils/docker_ssi/docker_ssi_matrix_utils.py index 09eada8802..f5f0bdca33 100644 --- a/utils/docker_ssi/docker_ssi_matrix_utils.py +++ b/utils/docker_ssi/docker_ssi_matrix_utils.py @@ -3,6 +3,7 @@ JSRuntimeInstallableVersions, PHPRuntimeInstallableVersions, PythonRuntimeInstallableVersions, + DotnetRuntimeInstallableVersions, ) @@ -16,5 +17,7 @@ def resolve_runtime_version(library, runtime): return PythonRuntimeInstallableVersions.get_version_id(runtime) elif library == "nodejs": return JSRuntimeInstallableVersions.get_version_id(runtime) + elif library == "dotnet": + return DotnetRuntimeInstallableVersions.get_version_id(runtime) raise ValueError(f"Library {library} not supported") diff --git a/utils/interfaces/_test_agent.py b/utils/interfaces/_test_agent.py index 0ed1f1014c..d1edef52ae 100644 --- a/utils/interfaces/_test_agent.py +++ b/utils/interfaces/_test_agent.py @@ -91,9 +91,19 @@ def get_telemetry_logs(self): def get_crash_reports(self): logger.debug("Try to find telemetry data related to crash reports") - return [ - p - for t in self.get_telemetry_logs() - for p in t["payload"] - if "signame" in p.get("tags", "") or "signum" in p.get("tags", "") - ] + crash_reports = [] + + for t in self.get_telemetry_logs(): + payload = t["payload"] + + # If payload is a list, iterate through its items + if isinstance(payload, list): + crash_reports.extend( + p for p in payload if "signame" in p.get("tags", "") or "signum" in p.get("tags", "") + ) + # If payload is a single object, check it directly + elif isinstance(payload, dict): + if "signame" in payload.get("tags", "") or "signum" in payload.get("tags", ""): + crash_reports.append(payload) + + return crash_reports From bab436243befbaaef2707a8471b2760cd192bf35 Mon Sep 17 00:00:00 2001 From: Charles de Beauchesne Date: Fri, 6 Dec 2024 14:33:47 +0100 Subject: [PATCH 3/7] Fix ruff check for some utils files (#3636) --- pyproject.toml | 39 +------------- tests/test_telemetry.py | 4 +- utils/_context/_scenarios/__init__.py | 2 +- utils/_context/containers.py | 63 ++++++++++++----------- utils/proxy/_decoders/protobuf_schemas.py | 4 +- utils/proxy/_deserializer.py | 23 ++++----- utils/proxy/core.py | 6 +-- utils/tools.py | 6 +-- 8 files changed, 57 insertions(+), 90 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 362baf7261..63559ddea5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -163,6 +163,8 @@ ignore = [ "D211", # no-blank-line-before-class "D213", # multi-line-summary-second-line "D400", # First line should end with a period + "D406", # we are not using numpy convention for docstrings + "D407", # we are not using reStructuredText for docstrings "D415", # First line should end with a period "EM101", # Exception must not use a string literal => painful "EM102", # Exception must not use an f-string => painful @@ -187,25 +189,6 @@ ignore = [ # TODO : remove those ignores [tool.ruff.lint.per-file-ignores] -"utils/_context/containers.py" = [ - "D406", - "D407", - "D417", - "F841", - "FBT001", - "FIX003", - "PLW2901", - "PTH101", - "PTH109", - "RET505", - "RUF012", - "S104", - "S106", - "S110", - "S113", - "S603", - "S607", -] "utils/grpc/weblog_pb2_grpc.py" = ["ALL"] # keep this one, it's a generated file "utils/grpc/weblog_pb2.py" = ["ALL"] # keep this one, it's a generated file "utils/scripts/*" = ["T201"] @@ -283,25 +266,7 @@ ignore = [ "SIM108", "T201", ] -"utils/_context/_scenarios/__init__.py" = ["T201"] -"utils/proxy/*" = [ - "S104", # to be transfered in the line - "DTZ006", # to be corrected - "PLR5501", # to be corrected - "G202", # to be corrected - "PLC0206", # to be corrected - "S307", # to be corrected - "RET505", # to be corrected - "UP034", # to be corrected - "T201", # to be corrected - "C416", # to be corrected -] "utils/telemetry_utils.py" = ["N806", "RUF012"] # to be corrected -"utils/tools.py" = ["T201", "PLW2901"] # to be transfered in the line -"conftest.py" = [ - "SLF001", # to be transfered in the line - "N806", # to be corrected -] "utils/waf_rules.py" = ["N801"] "utils/virtual_machine/*" = [ "A002", diff --git a/tests/test_telemetry.py b/tests/test_telemetry.py index 9ded4aa46b..466defa5da 100644 --- a/tests/test_telemetry.py +++ b/tests/test_telemetry.py @@ -133,7 +133,7 @@ def test_seq_id(self): """Test that messages are sent sequentially""" MAX_OUT_OF_ORDER_LAG = 0.3 # s - FMT = "%Y-%m-%dT%H:%M:%S.%f" + FMT = "%Y-%m-%dT%H:%M:%S.%f%z" telemetry_data = list(interfaces.library.get_telemetry_data(flatten_message_batches=False)) if len(telemetry_data) == 0: @@ -296,7 +296,7 @@ def _get_heartbeat_delays_by_runtime() -> dict: The value is a list of delay observed on this runtime id """ - fmt = "%Y-%m-%dT%H:%M:%S.%f" + fmt = "%Y-%m-%dT%H:%M:%S.%f%z" telemetry_data = list(interfaces.library.get_telemetry_data()) heartbeats_by_runtime = defaultdict(list) diff --git a/utils/_context/_scenarios/__init__.py b/utils/_context/_scenarios/__init__.py index a3487001ed..2bac4b790b 100644 --- a/utils/_context/_scenarios/__init__.py +++ b/utils/_context/_scenarios/__init__.py @@ -791,7 +791,7 @@ def _main(): for scenario in get_all_scenarios() } - print(json.dumps(data, indent=2)) + print(json.dumps(data, indent=2)) # noqa: T201 if __name__ == "__main__": diff --git a/utils/_context/containers.py b/utils/_context/containers.py index 870c2ae948..62de71df57 100644 --- a/utils/_context/containers.py +++ b/utils/_context/containers.py @@ -40,7 +40,7 @@ def _get_client(): ).stdout.strip() return docker.DockerClient(base_url=endpoint) except: - pass + logger.exception("Fail to get docker client with context") if "Error while fetching server API version: ('Connection aborted.'" in str(e): pytest.exit("Connection refused to docker daemon, is it running?", 1) @@ -71,7 +71,7 @@ def create_inject_volume(): class TestedContainer: - _container: Container + _container: Container = None # https://docker-py.readthedocs.io/en/stable/containers.html def __init__( @@ -87,7 +87,7 @@ def __init__( **kwargs, ) -> None: self.name = name - self.host_project_dir = os.environ.get("SYSTEM_TESTS_HOST_PROJECT_DIR", os.getcwd()) + self.host_project_dir = os.environ.get("SYSTEM_TESTS_HOST_PROJECT_DIR", str(Path.cwd())) self.host_log_folder = host_log_folder self.allow_old_container = allow_old_container @@ -249,8 +249,8 @@ def wait_for_health(self) -> bool: if exit_code != 0: logger.stdout(f"Healthcheck failed for {self.name}:\n{output}") return False - else: - logger.info(f"Healthcheck successful for {self.name}") + + logger.info(f"Healthcheck successful for {self.name}") return True @@ -318,10 +318,12 @@ def _fix_host_pwd_in_volumes(self): host_pwd = self.host_project_dir result = {} - for k, v in self.kwargs["volumes"].items(): - if k.startswith("./"): - k = f"{host_pwd}{k[1:]}" - result[k] = v + for host_path, container_path in self.kwargs["volumes"].items(): + if host_path.startswith("./"): + corrected_host_path = f"{host_pwd}{host_path[1:]}" + result[corrected_host_path] = container_path + else: + result[host_path] = container_path self.kwargs["volumes"] = result @@ -367,8 +369,9 @@ def collect_logs(self): ("stdout", self._container.logs(stdout=True, stderr=False)), ("stderr", self._container.logs(stdout=False, stderr=True)), ) - for output_name, output in data: + for output_name, raw_output in data: filename = f"{self.log_folder_path}/{output_name}.log" + output = raw_output for key in keys: output = output.replace(key, b"") with open(filename, "wb") as f: @@ -392,12 +395,11 @@ def remove(self): # collect logs before removing self.collect_logs() self._container.remove(force=True) - except Exception as e: + except: # Sometimes, the container does not exists. # We can safely ignore this, because if it's another issue # it will be killed at startup - - pass + logger.info(f"Fail to remove container {self.name}") if self.stdout_interface is not None: self.stdout_interface.load_data() @@ -668,16 +670,6 @@ def __init__( volumes = {} if volumes is None else volumes volumes[f"./{host_log_folder}/docker/weblog/logs/"] = {"bind": "/var/log/system-tests", "mode": "rw"} - try: - with open("./binaries/nodejs-load-from-local", encoding="utf-8") as f: - path = f.read().strip(" \r\n") - volumes[os.path.abspath(path)] = { - "bind": "/volumes/dd-trace-js", - "mode": "ro", - } - except Exception: - pass - base_environment = { # Datadog setup "DD_SERVICE": "weblog", @@ -804,6 +796,17 @@ def configure(self, replay): self.appsec_rules_file = (self.image.env | self.environment).get("DD_APPSEC_RULES", None) + if library == "nodejs": + try: + with open("./binaries/nodejs-load-from-local", encoding="utf-8") as f: + path = f.read().strip(" \r\n") + self.kwargs["volumes"][os.path.abspath(path)] = { + "bind": "/volumes/dd-trace-js", + "mode": "ro", + } + except Exception: + logger.info("No local dd-trace-js found") + def post_start(self): from utils import weblog @@ -845,7 +848,7 @@ def telemetry_heartbeat_interval(self): def request(self, method, url, **kwargs): """ perform an HTTP request on the weblog, must NOT be used for tests """ - return requests.request(method, f"http://localhost:{self.port}{url}", **kwargs) + return requests.request(method, f"http://localhost:{self.port}{url}", **kwargs) # noqa: S113 class PostgresContainer(SqlDbTestedContainer): @@ -866,7 +869,7 @@ def __init__(self, host_log_folder) -> None: stdout_interface=interfaces.postgres, dd_integration_service="postgresql", db_user="system_tests_user", - db_password="system_tests", + db_password="system_tests", # noqa: S106 db_host="postgres", db_instance="system_tests_dbname", ) @@ -967,7 +970,7 @@ def __init__(self, host_log_folder) -> None: healthcheck={"test": "/healthcheck.sh", "retries": 60}, dd_integration_service="mysql", db_user="mysqldb", - db_password="mysqldb", + db_password="mysqldb", # noqa: S106 db_host="mysqldb", db_instance="mysql_dbname", ) @@ -978,7 +981,7 @@ def __init__(self, host_log_folder) -> None: self.data_mssql = f"./{host_log_folder}/data-mssql" healthcheck = { - # XXX: Using 127.0.0.1 here instead of localhost to avoid using IPv6 in some systems. + # Using 127.0.0.1 here instead of localhost to avoid using IPv6 in some systems. # -C : trust self signed certificates "test": '/opt/mssql-tools18/bin/sqlcmd -S 127.0.0.1 -U sa -P "yourStrong(!)Password" -Q "SELECT 1" -b -C', "retries": 20, @@ -997,7 +1000,7 @@ def __init__(self, host_log_folder) -> None: healthcheck=healthcheck, dd_integration_service="mssql", db_user="SA", - db_password="yourStrong(!)Password", + db_password="yourStrong(!)Password", # noqa: S106 db_host="mssql", db_instance="master", ) @@ -1024,7 +1027,7 @@ def __init__(self, host_log_folder) -> None: environment={}, volumes={self._otel_config_host_path: {"bind": "/etc/otelcol-config.yml", "mode": "ro",}}, host_log_folder=host_log_folder, - ports={"13133/tcp": ("0.0.0.0", 13133)}, + ports={"13133/tcp": ("0.0.0.0", 13133)}, # noqa: S104 ) # Override wait_for_health because we cannot do docker exec for container opentelemetry-collector-contrib @@ -1051,7 +1054,7 @@ def start(self) -> Container: prev_mode = os.stat(self._otel_config_host_path).st_mode new_mode = prev_mode | stat.S_IROTH if prev_mode != new_mode: - os.chmod(self._otel_config_host_path, new_mode) + Path(self._otel_config_host_path).chmod(new_mode) return super().start() diff --git a/utils/proxy/_decoders/protobuf_schemas.py b/utils/proxy/_decoders/protobuf_schemas.py index 80d3aaa823..f0b4a9ce83 100644 --- a/utils/proxy/_decoders/protobuf_schemas.py +++ b/utils/proxy/_decoders/protobuf_schemas.py @@ -10,9 +10,9 @@ with open(Path(__file__).parent / "agent.descriptor", "rb") as f: _fds = FileDescriptorSet.FromString(f.read()) -_messages = GetMessages([file for file in _fds.file]) +_messages = GetMessages(list(_fds.file)) -print(f"Message types present in protobuf descriptors: {_messages.keys()}") +print(f"Message types present in protobuf descriptors: {_messages.keys()}") # noqa: T201 TracePayload = _messages["datadog.trace.AgentPayload"] MetricPayload = _messages["datadog.agentpayload.MetricPayload"] diff --git a/utils/proxy/_deserializer.py b/utils/proxy/_deserializer.py index 570b1095d4..660278f668 100644 --- a/utils/proxy/_deserializer.py +++ b/utils/proxy/_deserializer.py @@ -2,6 +2,7 @@ # This product includes software developed at Datadog (https://www.datadoghq.com/). # Copyright 2021 Datadog, Inc. +from ast import literal_eval import base64 import gzip import io @@ -109,23 +110,21 @@ def json_load(): if not content or len(content) == 0: return None - if content_type and any((mime_type in content_type for mime_type in ("application/json", "text/json"))): + if content_type and any(mime_type in content_type for mime_type in ("application/json", "text/json")): return json_load() if path == "/v0.7/config": # Kyle, please add content-type header :) if key == "response" and message["status_code"] == 404: return content.decode(encoding="utf-8") - else: - return json_load() + + return json_load() if interface == "library" and path == "/info": if key == "response": return json_load() - else: - if not content: - return None - else: - return content + + # replace zero length strings/bytes by None + return content if content else None if content_type in ("application/msgpack", "application/msgpack, application/msgpack") or (path == "/v0.6/stats"): result = msgpack.unpackb(content, unicode_errors="replace", strict_map_key=False) @@ -152,7 +151,7 @@ def json_load(): if content_type == "application/x-protobuf": # Raw data can be either a str like "b'\n\x\...'" or bytes - content = eval(content) if isinstance(content, str) else content + content = literal_eval(content) if isinstance(content, str) else content assert isinstance(content, bytes) dd_protocol = get_header_value("dd-protocol", message["headers"]) if dd_protocol == "otlp" and "traces" in path: @@ -185,9 +184,9 @@ def json_load(): content_type_part = "" - for name in headers: + for name, value in headers.items(): if name.lower() == "content-type": - content_type_part = headers[name].lower() + content_type_part = value.lower() break if content_type_part.startswith("application/json"): @@ -307,7 +306,7 @@ def deserialize(data, key, content, interface, export_content_files_to: str): data["path"], data[key], content, interface, key, export_content_files_to ) except: - logger.exception(f"Error while deserializing {data['log_filename']}", exc_info=True) + logger.exception(f"Error while deserializing {data['log_filename']}") data[key]["raw_content"] = str(content) data[key]["traceback"] = str(traceback.format_exc()) diff --git a/utils/proxy/core.py b/utils/proxy/core.py index cf472108df..71f8d61c27 100644 --- a/utils/proxy/core.py +++ b/utils/proxy/core.py @@ -3,7 +3,7 @@ import json import logging import os -from datetime import datetime +from datetime import datetime, UTC from mitmproxy import master, options, http from mitmproxy.addons import errorcheck, default_addons @@ -220,7 +220,7 @@ def response(self, flow): "host": flow.request.host, "port": flow.request.port, "request": { - "timestamp_start": datetime.fromtimestamp(flow.request.timestamp_start).isoformat(), + "timestamp_start": datetime.fromtimestamp(flow.request.timestamp_start, tz=UTC).isoformat(), "headers": list(flow.request.headers.items()), "length": len(flow.request.content) if flow.request.content else 0, }, @@ -347,7 +347,7 @@ def start_proxy() -> None: loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) - opts = options.Options(mode=modes, listen_host="0.0.0.0", confdir="utils/proxy/.mitmproxy") + opts = options.Options(mode=modes, listen_host="0.0.0.0", confdir="utils/proxy/.mitmproxy") # noqa: S104 proxy = master.Master(opts, event_loop=loop) proxy.addons.add(*default_addons()) proxy.addons.add(errorcheck.ErrorCheck()) diff --git a/utils/tools.py b/utils/tools.py index cc0876bd0b..0306a421cd 100644 --- a/utils/tools.py +++ b/utils/tools.py @@ -34,8 +34,8 @@ def update_environ_with_local_env(): try: with open(".env", "r", encoding="utf-8") as f: logger.debug("Found a .env file") - for line in f: - line = line.strip(" \t\n") + for raw_line in f: + line = raw_line.strip(" \t\n") line = re.sub(r"(.*)#.$", r"\1", line) line = re.sub(r"^(export +)(.*)$", r"\2", line) if "=" in line: @@ -64,7 +64,7 @@ def stdout(self, message, *args, **kws): else: # at this point, the logger may not yet be configured with the pytest terminal # so directly print in stdout - print(message) + print(message) # noqa: T201 logging.Logger.stdout = stdout From 3913d678898b8fd8a63189e75104bb6a1f561ed0 Mon Sep 17 00:00:00 2001 From: Charles de Beauchesne Date: Fri, 6 Dec 2024 16:50:00 +0100 Subject: [PATCH 4/7] [nodejs] Fix otel scenario (#3641) --- utils/build/docker/nodejs_otel/express4-otel.Dockerfile | 1 + utils/scripts/compute_impacted_scenario.py | 2 ++ 2 files changed, 3 insertions(+) diff --git a/utils/build/docker/nodejs_otel/express4-otel.Dockerfile b/utils/build/docker/nodejs_otel/express4-otel.Dockerfile index 9262035584..210b1ef090 100644 --- a/utils/build/docker/nodejs_otel/express4-otel.Dockerfile +++ b/utils/build/docker/nodejs_otel/express4-otel.Dockerfile @@ -31,6 +31,7 @@ ENV OTEL_BSP_SCHEDULE_DELAY=200 RUN npm install --save @opentelemetry/api RUN npm install --save @opentelemetry/auto-instrumentations-node RUN npm install @opentelemetry/instrumentation-mysql2 +RUN npm install @opentelemetry/otlp-exporter-base RUN npm install --save opentelemetry-instrumentation-mssql # docker startup diff --git a/utils/scripts/compute_impacted_scenario.py b/utils/scripts/compute_impacted_scenario.py index 3c6818b7c4..79c80d584e 100644 --- a/utils/scripts/compute_impacted_scenario.py +++ b/utils/scripts/compute_impacted_scenario.py @@ -162,6 +162,8 @@ def main(): r"utils/docker_ssi/.*": ScenarioGroup.DOCKER_SSI.value, ### Profiling case r"utils/_context/_scenarios/profiling\.py": ScenarioGroup.PROFILING.value, + ### otel weblog + r"utils/build/docker/nodejs_otel/.*": ScenarioGroup.OPEN_TELEMETRY.value, ### else, run all r"utils/.*": ScenarioGroup.ALL.value, ## few files with no effect From b278b765b59e3c35414db523fe8dc299fa9a337c Mon Sep 17 00:00:00 2001 From: Charles de Beauchesne Date: Fri, 6 Dec 2024 17:23:54 +0100 Subject: [PATCH 5/7] Fix ruff checks in interfaces folder (#3638) --- pyproject.toml | 33 +++---------------------- utils/interfaces/_core.py | 2 +- utils/interfaces/_library/appsec.py | 10 +++----- utils/interfaces/_logs.py | 17 +++++++------ utils/interfaces/_schemas_validators.py | 9 ++++--- utils/interfaces/_test_agent.py | 25 +++++++++---------- utils/interfaces/schemas/serve_doc.py | 12 ++++----- 7 files changed, 39 insertions(+), 69 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 63559ddea5..70ef333a55 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -189,33 +189,9 @@ ignore = [ # TODO : remove those ignores [tool.ruff.lint.per-file-ignores] -"utils/grpc/weblog_pb2_grpc.py" = ["ALL"] # keep this one, it's a generated file -"utils/grpc/weblog_pb2.py" = ["ALL"] # keep this one, it's a generated file -"utils/scripts/*" = ["T201"] -"utils/interfaces/*" = [ - "A001", - "E713", - "FBT001", - "FBT003", - "PLR5501", - "PLW2901", - "PTH109", - "PTH112", - "RET502", - "RET504", - "RSE102", - "RUF010", - "RUF012", - "RUF013", - "S201", - "SIM102", - "SIM103", - "SIM118", - "T201", - "TRY203", - "UP011", - "UP034", -] +"utils/grpc/weblog_pb2_grpc.py" = ["ALL"] # keep this one, it's a generated file +"utils/grpc/weblog_pb2.py" = ["ALL"] # keep this one, it's a generated file +"utils/scripts/*" = ["T201"] # allow print statements in scripts folder "utils/{k8s_lib_injection/*,_context/_scenarios/k8s_lib_injection.py}" = [ "D207", "SIM115", @@ -257,7 +233,6 @@ ignore = [ "N803", "RET505", ] -"utils/otel_validators/validator_trace.py" = ["FBT001"] "utils/{_context/_scenarios/docker_ssi.py,docker_ssi/docker_ssi_matrix_builder.py,docker_ssi/docker_ssi_matrix_utils.py}" = [ "PLR2004", "SIM210", @@ -266,8 +241,6 @@ ignore = [ "SIM108", "T201", ] -"utils/telemetry_utils.py" = ["N806", "RUF012"] # to be corrected -"utils/waf_rules.py" = ["N801"] "utils/virtual_machine/*" = [ "A002", "ANN002", diff --git a/utils/interfaces/_core.py b/utils/interfaces/_core.py index 5293cead0c..f3c01a27f9 100644 --- a/utils/interfaces/_core.py +++ b/utils/interfaces/_core.py @@ -131,7 +131,7 @@ def get_data(self, path_filters=None): path_filters = [re.compile(path) for path in path_filters] for data in self._data_list: - if path_filters is not None and all((path.fullmatch(data["path"]) is None for path in path_filters)): + if path_filters is not None and all(path.fullmatch(data["path"]) is None for path in path_filters): continue yield data diff --git a/utils/interfaces/_library/appsec.py b/utils/interfaces/_library/appsec.py index 66e46d2c57..0e4df0f0ce 100644 --- a/utils/interfaces/_library/appsec.py +++ b/utils/interfaces/_library/appsec.py @@ -112,11 +112,9 @@ def validate(self, span, appsec_data): elif self.address and self.key_path and (self.address, self.key_path) not in full_addresses: logger.info(f"saw {full_addresses}, expecting {(self.address, self.key_path)}") - elif self.span_validator and not self.span_validator(span, appsec_data): - return False # validator should output the reason for the failure - else: - return True + # validator should output the reason for the failure + return not (self.span_validator and not self.span_validator(span, appsec_data)) def validate_legacy(self, event): event_version = event.get("event_version", "0.1.0") @@ -158,13 +156,13 @@ def __init__(self, header_name): self.header_name = header_name.lower() def validate_legacy(self, event): - headers = [n.lower() for n in event["context"]["http"]["request"]["headers"].keys()] + headers = [n.lower() for n in event["context"]["http"]["request"]["headers"]] assert self.header_name in headers, f"header {self.header_name} not reported" return True def validate(self, span, appsec_data): - headers = [n.lower() for n in span["meta"].keys() if n.startswith("http.request.headers.")] + headers = [n.lower() for n in span["meta"] if n.startswith("http.request.headers.")] assert f"http.request.headers.{self.header_name}" in headers, f"header {self.header_name} not reported" return True diff --git a/utils/interfaces/_logs.py b/utils/interfaces/_logs.py index cc9f8468cf..5f05d1f674 100644 --- a/utils/interfaces/_logs.py +++ b/utils/interfaces/_logs.py @@ -26,7 +26,7 @@ def __init__(self, name, new_log_line_pattern=None): self._data_list = [] def _get_files(self): - raise NotImplementedError() + raise NotImplementedError def _clean_line(self, line): return line @@ -51,7 +51,8 @@ def _read(self): try: with open(filename, "r", encoding="utf-8") as f: buffer = [] - for line in f: + for raw_line in f: + line = raw_line if line.endswith("\n"): line = line[:-1] # remove tailing \n line = self._clean_line(line) @@ -266,16 +267,16 @@ def check(self, data): if "message" in data and self.pattern.search(data["message"]): for key, extra_pattern in self.extra_conditions.items(): if key not in data: - logger.info(f"For {self}, {repr(self.pattern.pattern)} was found, but [{key}] field is missing") + logger.info(f"For {self}, {self.pattern.pattern!r} was found, but [{key}] field is missing") logger.info(f"-> Log line is {data['message']}") - return + return None if not extra_pattern.search(data[key]): logger.info( - f"For {self}, {repr(self.pattern.pattern)} was found, but condition on [{key}] failed: " + f"For {self}, {self.pattern.pattern!r} was found, but condition on [{key}] failed: " f"'{extra_pattern.pattern}' != '{data[key]}'" ) - return + return None logger.debug(f"For {self}, found {data['message']}") return True @@ -307,11 +308,11 @@ def test_main(self): context.scenario = scenarios.default i = _PostgresStdout() - i.configure(scenarios.default.host_log_folder, True) + i.configure(scenarios.default.host_log_folder, replay=True) i.load_data() for item in i.get_data(): - print(item) + print(item) # noqa: T201 if __name__ == "__main__": diff --git a/utils/interfaces/_schemas_validators.py b/utils/interfaces/_schemas_validators.py index 9f226ca25a..66d59cc3db 100644 --- a/utils/interfaces/_schemas_validators.py +++ b/utils/interfaces/_schemas_validators.py @@ -9,6 +9,7 @@ from dataclasses import dataclass import os +from pathlib import Path import json import re import functools @@ -37,7 +38,7 @@ def _get_schemas_filenames(): yield os.path.join(root, f) -@functools.lru_cache() +@functools.lru_cache def _get_schemas_store(): """returns a dict with all defined schemas""" @@ -57,7 +58,7 @@ def _get_schemas_store(): return store -@functools.lru_cache() +@functools.lru_cache def _get_schema_validator(schema_id): store = _get_schemas_store() @@ -116,7 +117,7 @@ def get_errors(self, data) -> list[SchemaError]: def _main(): for interface in ("agent", "library"): validator = SchemaValidator(interface) - folders = [folder for folder in os.listdir(".") if os.path.isdir(folder) and folder.startswith("logs")] + folders = [folder for folder in os.listdir(".") if Path(folder).is_dir() and folder.startswith("logs")] for folder in folders: path = f"{folder}/interfaces/{interface}" @@ -129,7 +130,7 @@ def _main(): if "request" in data and data["request"]["length"] != 0: for error in validator.get_errors(data): - print(error.message) + print(error.message) # noqa: T201 if __name__ == "__main__": diff --git a/utils/interfaces/_test_agent.py b/utils/interfaces/_test_agent.py index d1edef52ae..8e0fd5af8a 100644 --- a/utils/interfaces/_test_agent.py +++ b/utils/interfaces/_test_agent.py @@ -17,20 +17,17 @@ def collect_data(self, interface_folder, agent_host="localhost", agent_port=8126 logger.debug("Collecting data from test agent") client = agent_client.TestAgentClient(base_url=f"http://{agent_host}:{agent_port}") - try: - self._data_traces_list = client.traces(clear=False) - if self._data_traces_list: - pathlib.Path(f"{interface_folder}/00_traces.json").write_text( - json.dumps(self._data_traces_list, indent=2), encoding="utf-8" - ) - - self._data_telemetry_list = client.telemetry(clear=False) - if self._data_telemetry_list: - pathlib.Path(f"{interface_folder}/00_telemetry.json").write_text( - json.dumps(self._data_telemetry_list, indent=2), encoding="utf-8" - ) - except ValueError as e: - raise e + self._data_traces_list = client.traces(clear=False) + if self._data_traces_list: + pathlib.Path(f"{interface_folder}/00_traces.json").write_text( + json.dumps(self._data_traces_list, indent=2), encoding="utf-8" + ) + + self._data_telemetry_list = client.telemetry(clear=False) + if self._data_telemetry_list: + pathlib.Path(f"{interface_folder}/00_telemetry.json").write_text( + json.dumps(self._data_telemetry_list, indent=2), encoding="utf-8" + ) def get_traces(self, request=None): rid = get_rid_from_request(request) diff --git a/utils/interfaces/schemas/serve_doc.py b/utils/interfaces/schemas/serve_doc.py index 0268b21086..cf924d43e1 100644 --- a/utils/interfaces/schemas/serve_doc.py +++ b/utils/interfaces/schemas/serve_doc.py @@ -3,7 +3,7 @@ # Copyright 2021 Datadog, Inc. import os -import json +from pathlib import Path from flask import Flask, send_from_directory, request, render_template from utils.interfaces._schemas_validators import _get_schemas_store, _get_schemas_filenames @@ -13,7 +13,7 @@ static_folder = os.path.join(json_schema_for_humans.__path__[0], "templates/js") -template_folder = os.path.join(os.getcwd(), "utils/interfaces/schemas") +template_folder = os.path.join(str(Path.cwd()), "utils/interfaces/schemas") app = Flask(__name__, static_url_path="/static", static_folder=static_folder, template_folder=template_folder) @@ -26,12 +26,12 @@ def default(): data = {"schemas": []} - for id, schema in store.items(): + for schema_id, schema in store.items(): # skip some schemas - if not id.endswith("request.json") and not "title" in schema: + if not schema_id.endswith("request.json") and "title" not in schema: continue - doc_path = id.replace(".json", ".html") + doc_path = schema_id.replace(".json", ".html") # doc_path = doc_path[len("utils/interfaces/schemas"):] # filename = filename[len("utils/interfaces/schemas"):] @@ -52,4 +52,4 @@ def documentation(path): if __name__ == "__main__": - app.run(port=8080, debug=True) + app.run(port=8080, debug=True) # noqa: S201 From 7d615f35bcdf77143ce543d12c62a81b2428690f Mon Sep 17 00:00:00 2001 From: Charles de Beauchesne Date: Fri, 6 Dec 2024 17:27:02 +0100 Subject: [PATCH 6/7] Update changelog --- CHANGELOG.md | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9731a24d1d..07616dd049 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,19 @@ All notable changes to this project will be documented in this file. + +### 2024-11 (207 PR merged) + +* 2024-11-22 [Docker SSI: report data to FPD](https://github.com/DataDog/system-tests/pull/3525) by @robertomonteromiguel +* 2024-11-21 [adding mypy checks](https://github.com/DataDog/system-tests/pull/3488) by @rachelyangdog +* 2024-11-18 [[ruby] Add Rails 7.2 and Rails 8.0 weblogs](https://github.com/DataDog/system-tests/pull/3471) by @vpellan +* 2024-11-13 [Use a unique way to define scenario groups #3400](https://github.com/DataDog/system-tests/pull/3451) by @cbeauchesne +* 2024-11-18 [Test for zombie processes in crashtracking](https://github.com/DataDog/system-tests/pull/3364) by @kevingosse +* 2024-11-04 [Fix parametric instability at container start](https://github.com/DataDog/system-tests/pull/3359) by @cbeauchesne +* 2024-11-06 [parametric: Adds a feature to track the parity for parametric endpoints](https://github.com/DataDog/system-tests/pull/3345) by @mabdinur +* 2024-11-04 [[golang] Migrate Parametric app from grpc to http](https://github.com/DataDog/system-tests/pull/3332) by @mtoffl01 + + ### 2024-10 (176 PR merged) * Lot of work done on SSI/onboarding by @robertomonteromiguel and @emmettbutler From 5802d25e27517c8789249673e173d5a54bbe4433 Mon Sep 17 00:00:00 2001 From: Charles de Beauchesne Date: Mon, 9 Dec 2024 09:56:32 +0100 Subject: [PATCH 7/7] Enables some ruff rules (#3642) --- conftest.py | 4 ++-- docs/architecture/test_template.py | 2 +- pyproject.toml | 7 +------ utils/__init__.py | 15 +++++++++++++++ utils/_context/_scenarios/__init__.py | 1 - utils/_context/_scenarios/auto_injection.py | 2 +- utils/_context/_scenarios/integrations.py | 8 ++++---- utils/_context/containers.py | 4 ++-- utils/_context/core.py | 2 +- utils/_decorators.py | 7 +------ utils/cgroup_info.py | 1 - utils/dd_constants.py | 4 ++-- utils/docker_ssi/docker_ssi_model.py | 2 +- utils/interfaces/schemas/serve_doc.py | 6 +++--- .../k8s_lib_injection/k8s_datadog_kubernetes.py | 5 ----- utils/k8s_lib_injection/k8s_sync_kubectl.py | 10 +++++----- utils/k8s_lib_injection/k8s_weblog.py | 3 +-- utils/k8s_lib_injection/k8s_wrapper.py | 4 +--- utils/onboarding/debug_vm.py | 4 ++-- utils/onboarding/injection_log_parser.py | 2 +- utils/onboarding/weblog_interface.py | 4 ++-- utils/properties_serialization.py | 4 ++-- utils/scripts/compute_impacted_scenario.py | 4 ++-- utils/scripts/get-nightly-logs.py | 2 +- utils/scripts/get-workflow-summary.py | 2 +- utils/scripts/grep-nightly-logs.py | 2 +- utils/scripts/merge_gitlab_aws_pipelines.py | 1 - utils/scripts/push-metrics.py | 2 +- utils/virtual_machine/utils.py | 8 ++++---- utils/virtual_machine/vagrant_provider.py | 6 +++--- .../virtual_machine/virtual_machine_provider.py | 16 ++++++++-------- 31 files changed, 69 insertions(+), 75 deletions(-) diff --git a/conftest.py b/conftest.py index 1143b2d7da..205bf867fe 100644 --- a/conftest.py +++ b/conftest.py @@ -214,13 +214,13 @@ def pytest_pycollect_makemodule(module_path, parent): nodeid = str(module_path.relative_to(module_path.cwd())) if nodeid in manifests and library in manifests[nodeid]: - declaration = manifests[nodeid][library] + declaration: str = manifests[nodeid][library] logger.info(f"Manifest declaration found for {nodeid}: {declaration}") mod: pytest.Module = pytest.Module.from_parent(parent, path=module_path) - if declaration.startswith("irrelevant") or declaration.startswith("flaky"): + if declaration.startswith(("irrelevant", "flaky")): mod.add_marker(pytest.mark.skip(reason=declaration)) logger.debug(f"Module {nodeid} is skipped by manifest file because {declaration}") else: diff --git a/docs/architecture/test_template.py b/docs/architecture/test_template.py index 801114e7e4..51ca011195 100644 --- a/docs/architecture/test_template.py +++ b/docs/architecture/test_template.py @@ -2,7 +2,7 @@ # This product includes software developed at Datadog (https://www.datadoghq.com/). # Copyright 2021 Datadog, Inc. -from utils import weblog, interfaces, context, irrelevant +from utils import weblog, interfaces # *ATTENTION*: Copy this file to the tests folder, modify, and rename with a prefix of `test_` to enable your new tests diff --git a/pyproject.toml b/pyproject.toml index 70ef333a55..76c3dfd59b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -98,15 +98,12 @@ ignore = [ "E501", # line too long "E722", # TBD "E741", - "F401", # easy "FBT001", "FBT002", # Boolean default positional argument, TBD - "FURB129", "INP001", # implicit package "N801", # easy "N806", - "PERF401", - "PIE810", + "PERF401", # TBD, the "good" code can be harder to read "PLR0911", # too many return, may be replaced by a higher default value "PLR0912", # Too many branches "PLR0913", # too many arguments, may be replaced by a higher default value @@ -145,8 +142,6 @@ ignore = [ "UP015", "UP024", "UP038", # we really want this? TBD - "W291", # trailing whitespace, easy one - "W293", ### Ignores that will be kept for the entire project diff --git a/utils/__init__.py b/utils/__init__.py index 6a09e25684..7b8ca702eb 100644 --- a/utils/__init__.py +++ b/utils/__init__.py @@ -10,3 +10,18 @@ from utils import interfaces, _remote_config as remote_config from utils.interfaces._core import ValidationError from utils._features import features + +__all__ = [ + "ValidationError", + "bug", + "context", + "features", + "flaky", + "interfaces", + "irrelevant", + "missing_feature", + "remote_config", + "rfc", + "scenarios", + "weblog", +] diff --git a/utils/_context/_scenarios/__init__.py b/utils/_context/_scenarios/__init__.py index 2bac4b790b..995408cabf 100644 --- a/utils/_context/_scenarios/__init__.py +++ b/utils/_context/_scenarios/__init__.py @@ -1,4 +1,3 @@ -import os import json import pytest diff --git a/utils/_context/_scenarios/auto_injection.py b/utils/_context/_scenarios/auto_injection.py index 501f221ae1..46a76b9600 100644 --- a/utils/_context/_scenarios/auto_injection.py +++ b/utils/_context/_scenarios/auto_injection.py @@ -443,7 +443,7 @@ def __init__( class InstallerAutoInjectionScenarioProfiling(_VirtualMachineScenario): - """ As Profiling is not included in GA (2024/11) we reduce the number of VMS to speed up the execution + """ As Profiling is not included in GA (2024/11) we reduce the number of VMS to speed up the execution Until we fix the performance problems on the AWS architecture and speed up the tests""" def __init__( diff --git a/utils/_context/_scenarios/integrations.py b/utils/_context/_scenarios/integrations.py index dd46d035ad..430d236bca 100644 --- a/utils/_context/_scenarios/integrations.py +++ b/utils/_context/_scenarios/integrations.py @@ -59,7 +59,7 @@ class AWSIntegrationsScenario(EndToEndScenario): 🔴🚫🔴🚫🔴🚫🔴🚫🔴🚫🔴🚫🔴🚫🔴🚫🔴🚫🔴🚫🔴🚫🔴🚫🔴🔴🚫🔴🚫🔴🚫🔴🚫🔴🚫🔴🚫🔴🚫🔴🚫🔴🚫🔴🚫🔴🚫🔴🚫🔴🚫🔴🚫🔴🚫🔴🚫🔴🚫🔴🚫 ⚠️⚠️⚠️⚠️⚠️⚠️⚠️ AWS Authentication Error ⚠️⚠️⚠️⚠️⚠️⚠️⚠️ - It seems that your AWS authentication is not set up correctly. + It seems that your AWS authentication is not set up correctly. Please take the following actions: 🔑 With `aws-vault` setup: @@ -67,11 +67,11 @@ class AWSIntegrationsScenario(EndToEndScenario): To enter an authenticated shell session that sets temp AWS credentials in your shell environment: 👉 `aws-vault login sso-sandbox-account-admin --` 👉 `[your system-test command]` - or - + or + To run ONLY the system tests command with auth: (temp AWS credentials are not set in shell environment) 👉 `aws-vault login sso-sandbox-account-admin -- [your system-test command]` - + 🔧 Or to first set up `aws-vault` / `aws-cli`, please visit: 🔗 [AWS CLI Config Setup & Update Guide] diff --git a/utils/_context/containers.py b/utils/_context/containers.py index 62de71df57..35b0d7e83f 100644 --- a/utils/_context/containers.py +++ b/utils/_context/containers.py @@ -747,7 +747,7 @@ def _get_image_list_from_dockerfile(dockerfile) -> list[str]: pattern = re.compile(r"FROM\s+(?P[^ ]+)") with open(dockerfile, "r", encoding="utf-8") as f: - for line in f.readlines(): + for line in f: if match := pattern.match(line): result.append(match.group("image_name")) @@ -761,7 +761,7 @@ def get_image_list(self, library: str, weblog: str) -> list[str]: pattern = re.compile(r"^FROM\s+(?P[^\s]+)") arg_pattern = re.compile(r"^ARG\s+(?P[^\s]+)\s*=\s*(?P[^\s]+)") with open(f"utils/build/docker/{library}/{weblog}.Dockerfile", "r", encoding="utf-8") as f: - for line in f.readlines(): + for line in f: if match := arg_pattern.match(line): args[match.group("arg_name")] = match.group("arg_value") diff --git a/utils/_context/core.py b/utils/_context/core.py index e3e7b911a9..15e386945a 100644 --- a/utils/_context/core.py +++ b/utils/_context/core.py @@ -8,7 +8,7 @@ class _Context: - """ + """ Context is an helper class that exposes scenario properties Those properties may be used in decorators, and thus, should always exists, even if the current scenario does not define them. diff --git a/utils/_decorators.py b/utils/_decorators.py index 5b4f9ee454..b4616fa7ee 100644 --- a/utils/_decorators.py +++ b/utils/_decorators.py @@ -229,12 +229,7 @@ def compute_declaration(only_for_library, component_name, declaration, tested_ve assert declaration != "?" # ensure there is no more ? in version declaration - if ( - declaration.startswith("missing_feature") - or declaration.startswith("flaky") - or declaration.startswith("bug") - or declaration.startswith("irrelevant") - ): + if declaration.startswith(("missing_feature", "bug", "flaky", "irrelevant")): return declaration # declaration must be now a version number diff --git a/utils/cgroup_info.py b/utils/cgroup_info.py index 11d270a128..7c897e0745 100644 --- a/utils/cgroup_info.py +++ b/utils/cgroup_info.py @@ -3,7 +3,6 @@ # Copyright 2021 Datadog, Inc. import re -from typing import Optional import attr diff --git a/utils/dd_constants.py b/utils/dd_constants.py index 9ea7deb6d7..1cec6cf0f3 100644 --- a/utils/dd_constants.py +++ b/utils/dd_constants.py @@ -1,6 +1,6 @@ from enum import IntEnum -from opentelemetry.trace import SpanKind # pylint: disable=W0611 -from opentelemetry.trace import StatusCode # pylint: disable=W0611 +from opentelemetry.trace import SpanKind # noqa: F401 +from opentelemetry.trace import StatusCode # noqa: F401 # Key used in the metrics map to indicate tracer sampling priority diff --git a/utils/docker_ssi/docker_ssi_model.py b/utils/docker_ssi/docker_ssi_model.py index 979d7bc65b..bdb36ea412 100644 --- a/utils/docker_ssi/docker_ssi_model.py +++ b/utils/docker_ssi/docker_ssi_model.py @@ -32,7 +32,7 @@ def name(self): class WeblogDescriptor: - """ Encapsulates information of the weblog: name, library and + """ Encapsulates information of the weblog: name, library and supported images with the supported installable runtime versions """ # see utils._features to check ids diff --git a/utils/interfaces/schemas/serve_doc.py b/utils/interfaces/schemas/serve_doc.py index cf924d43e1..d54c0f105a 100644 --- a/utils/interfaces/schemas/serve_doc.py +++ b/utils/interfaces/schemas/serve_doc.py @@ -5,9 +5,9 @@ import os from pathlib import Path -from flask import Flask, send_from_directory, request, render_template -from utils.interfaces._schemas_validators import _get_schemas_store, _get_schemas_filenames -from json_schema_for_humans.generate import generate_from_schema, generate_from_filename +from flask import Flask, render_template +from utils.interfaces._schemas_validators import _get_schemas_store +from json_schema_for_humans.generate import generate_from_schema from json_schema_for_humans.generation_configuration import GenerationConfiguration import json_schema_for_humans diff --git a/utils/k8s_lib_injection/k8s_datadog_kubernetes.py b/utils/k8s_lib_injection/k8s_datadog_kubernetes.py index 21ba762fc1..7ca4762f35 100644 --- a/utils/k8s_lib_injection/k8s_datadog_kubernetes.py +++ b/utils/k8s_lib_injection/k8s_datadog_kubernetes.py @@ -1,7 +1,4 @@ import time -import os -import json -import base64 from kubernetes import client, watch @@ -9,8 +6,6 @@ helm_add_repo, helm_install_chart, execute_command_sync, - path_clusterrole, - kubectl_apply, ) from utils.k8s_lib_injection.k8s_logger import k8s_logger diff --git a/utils/k8s_lib_injection/k8s_sync_kubectl.py b/utils/k8s_lib_injection/k8s_sync_kubectl.py index 89795c6d0f..84eb0622b4 100644 --- a/utils/k8s_lib_injection/k8s_sync_kubectl.py +++ b/utils/k8s_lib_injection/k8s_sync_kubectl.py @@ -9,7 +9,7 @@ class KubectlLockException(Exception): class KubectlLock(object): - """ A file locking mechanism that has context-manager support so + """ A file locking mechanism that has context-manager support so you can use it in a with statement. This should be relatively cross compatible as it doesn't rely on msvcrt or fcntl for the locking. """ @@ -29,7 +29,7 @@ def __init__(self, file_name=f"{context.scenario.host_log_folder}/kubectl.sync", def acquire(self): """ Acquire the lock, if possible. If the lock is in use, it check again every `wait` seconds. It does this until it either gets the lock or - exceeds `timeout` number of seconds, in which case it throws + exceeds `timeout` number of seconds, in which case it throws an exception. """ start_time = time.time() @@ -50,8 +50,8 @@ def acquire(self): # self.is_locked = True def release(self): - """ Get rid of the lock by deleting the lockfile. - When working in a `with` statement, this gets automatically + """ Get rid of the lock by deleting the lockfile. + When working in a `with` statement, this gets automatically called at the end. """ if self.is_locked: @@ -60,7 +60,7 @@ def release(self): self.is_locked = False def __enter__(self): - """ Activated when used in the with statement. + """ Activated when used in the with statement. Should automatically acquire a lock to be used in the with block. """ if not self.is_locked: diff --git a/utils/k8s_lib_injection/k8s_weblog.py b/utils/k8s_lib_injection/k8s_weblog.py index 23145a3b60..3025f4023c 100644 --- a/utils/k8s_lib_injection/k8s_weblog.py +++ b/utils/k8s_lib_injection/k8s_weblog.py @@ -1,5 +1,4 @@ -import time, datetime -from kubernetes import client, config, watch +from kubernetes import client, watch from utils.k8s_lib_injection.k8s_logger import k8s_logger diff --git a/utils/k8s_lib_injection/k8s_wrapper.py b/utils/k8s_lib_injection/k8s_wrapper.py index 7d3ceb9cd6..5080673c70 100644 --- a/utils/k8s_lib_injection/k8s_wrapper.py +++ b/utils/k8s_lib_injection/k8s_wrapper.py @@ -1,6 +1,4 @@ -import time -from kubernetes import client, config, watch -from utils.tools import logger +from kubernetes import client, config from retry import retry diff --git a/utils/onboarding/debug_vm.py b/utils/onboarding/debug_vm.py index ad82004400..2215d46045 100644 --- a/utils/onboarding/debug_vm.py +++ b/utils/onboarding/debug_vm.py @@ -5,8 +5,8 @@ def extract_logs_to_file(logs_data, log_folder): - """ extract logs to different files. - The logs_data is a string results of executing the command: + """ extract logs to different files. + The logs_data is a string results of executing the command: find /var/log -type f -name "*.log"| xargs tail -n +1 """ output_file = None diff --git a/utils/onboarding/injection_log_parser.py b/utils/onboarding/injection_log_parser.py index cc193754af..4b6da08e86 100644 --- a/utils/onboarding/injection_log_parser.py +++ b/utils/onboarding/injection_log_parser.py @@ -9,7 +9,7 @@ def exclude_telemetry_logs_filter(line): def command_injection_skipped(command_line, log_local_path): - """ From parsed log, search on the list of logged commands + """ From parsed log, search on the list of logged commands if one command has been skipped from the instrumentation""" command, command_args = _parse_command(command_line) logger.debug(f"- Checking command: {command_args}") diff --git a/utils/onboarding/weblog_interface.py b/utils/onboarding/weblog_interface.py index 007ddb2d59..d5ee3038df 100644 --- a/utils/onboarding/weblog_interface.py +++ b/utils/onboarding/weblog_interface.py @@ -28,13 +28,13 @@ def warmup_weblog(app_url): def make_internal_get_request(stdin_file, vm_port): - """ This method is exclusively for testing through KrunVm microVM. + """ This method is exclusively for testing through KrunVm microVM. It is used to make a request to the weblog application inside the VM, using stdin file""" generated_uuid = str(randint(1, 100000000000000000)) timeout = 80 script_to_run = f"""#!/bin/bash -echo "Requesting weblog..." +echo "Requesting weblog..." URL="http://localhost:{vm_port}/" TIMEOUT={timeout} TRACE_ID={generated_uuid} diff --git a/utils/properties_serialization.py b/utils/properties_serialization.py index 24b3a42358..ef3511157c 100644 --- a/utils/properties_serialization.py +++ b/utils/properties_serialization.py @@ -44,9 +44,9 @@ def from_dict(d): class SetupProperties: - """ + """ This class will store all properties initialized by setup function, and dump =them into a file - In replay mode, it will restore then to the good instance + In replay mode, it will restore then to the good instance """ def __init__(self): diff --git a/utils/scripts/compute_impacted_scenario.py b/utils/scripts/compute_impacted_scenario.py index 79c80d584e..fd1f0ab4f3 100644 --- a/utils/scripts/compute_impacted_scenario.py +++ b/utils/scripts/compute_impacted_scenario.py @@ -97,7 +97,7 @@ def main(): # git diff --name-only HEAD ${{ github.event.pull_request.base.sha || github.sha }} >> modified_files.txt with open("modified_files.txt", "r", encoding="utf-8") as f: - modified_files = [line.strip() for line in f.readlines()] + modified_files = [line.strip() for line in f] for file in modified_files: @@ -105,7 +105,7 @@ def main(): if file.startswith("tests/auto_inject"): # Nothing to do, onboarding test run on gitlab nightly or manually pass - elif file.endswith("/utils.py") or file.endswith("/conftest.py"): + elif file.endswith(("/utils.py", "/conftest.py")): # particular use case for modification in tests/ of a file utils.py or conftest.py # in that situation, takes all scenarios executed in tests// diff --git a/utils/scripts/get-nightly-logs.py b/utils/scripts/get-nightly-logs.py index eab35aee64..d138ff84f5 100644 --- a/utils/scripts/get-nightly-logs.py +++ b/utils/scripts/get-nightly-logs.py @@ -21,7 +21,7 @@ def get_environ(): try: with open(".env", "r", encoding="utf-8") as f: - lines = [l.replace("export ", "").strip().split("=") for l in f.readlines() if l.strip()] + lines = [l.replace("export ", "").strip().split("=") for l in f if l.strip()] environ = {**environ, **dict(lines)} except FileNotFoundError: pass diff --git a/utils/scripts/get-workflow-summary.py b/utils/scripts/get-workflow-summary.py index 742ce3218d..7c610398fe 100644 --- a/utils/scripts/get-workflow-summary.py +++ b/utils/scripts/get-workflow-summary.py @@ -10,7 +10,7 @@ def get_environ(): try: with open(".env", "r", encoding="utf-8") as f: - lines = [l.replace("export ", "").strip().split("=") for l in f.readlines() if l.strip()] + lines = [l.replace("export ", "").strip().split("=") for l in f if l.strip()] environ = {**environ, **dict(lines)} except FileNotFoundError: pass diff --git a/utils/scripts/grep-nightly-logs.py b/utils/scripts/grep-nightly-logs.py index e518de2729..6f78f3fe22 100644 --- a/utils/scripts/grep-nightly-logs.py +++ b/utils/scripts/grep-nightly-logs.py @@ -18,7 +18,7 @@ def get_environ(): try: with open(".env", "r", encoding="utf-8") as f: - lines = [l.replace("export ", "").strip().split("=") for l in f.readlines() if l.strip()] + lines = [l.replace("export ", "").strip().split("=") for l in f if l.strip()] environ = {**environ, **dict(lines)} except FileNotFoundError: pass diff --git a/utils/scripts/merge_gitlab_aws_pipelines.py b/utils/scripts/merge_gitlab_aws_pipelines.py index 9bc5bda2cf..f4e8d8b25b 100644 --- a/utils/scripts/merge_gitlab_aws_pipelines.py +++ b/utils/scripts/merge_gitlab_aws_pipelines.py @@ -1,4 +1,3 @@ -import json import yaml import argparse import os.path diff --git a/utils/scripts/push-metrics.py b/utils/scripts/push-metrics.py index 2a6d2ca62b..df83010091 100644 --- a/utils/scripts/push-metrics.py +++ b/utils/scripts/push-metrics.py @@ -1,6 +1,6 @@ # Successfully installed datadog_api_client-2.24.1 -from datetime import datetime, timezone, UTC +from datetime import datetime, UTC import requests diff --git a/utils/virtual_machine/utils.py b/utils/virtual_machine/utils.py index c6dad21077..614afa13e3 100644 --- a/utils/virtual_machine/utils.py +++ b/utils/virtual_machine/utils.py @@ -6,9 +6,9 @@ def parametrize_virtual_machines(bugs: list[dict] = None): - """ You can set multiple bugs for a single test case. - If you want to set a bug for a specific VM, you can set the vm_name or vm_cpu or weblog_variant in the bug dictionary (using one or more fields). - ie: + """ You can set multiple bugs for a single test case. + If you want to set a bug for a specific VM, you can set the vm_name or vm_cpu or weblog_variant in the bug dictionary (using one or more fields). + ie: - Marks as bug for vm with name "vm1" and weblog_variant "app1" * @parametrize_virtual_machines(bugs=[{"vm_name":"vm1", "weblog_variant":"app1", "reason": "APMON-1576"}]) - Marks as bug for vm with cpu type "amd64" and weblog_variant "app1" @@ -95,7 +95,7 @@ def get_tested_apps_vms(): def nginx_parser(nginx_config_file): - """ This function is used to parse the nginx config file and return the apps in the return block of the location block of the server block of the http block. + """ This function is used to parse the nginx config file and return the apps in the return block of the location block of the server block of the http block. TODO: Improve this uggly code """ import crossplane import json diff --git a/utils/virtual_machine/vagrant_provider.py b/utils/virtual_machine/vagrant_provider.py index acf3d1f2e5..5cc50768c0 100644 --- a/utils/virtual_machine/vagrant_provider.py +++ b/utils/virtual_machine/vagrant_provider.py @@ -42,7 +42,7 @@ def stack_up(self): def _set_vagrant_configuration(self, vm): """ Makes some configuration on the vagrant files - These configurations are relative to the provider and to port forwarding (for weblog) and port for ssh + These configurations are relative to the provider and to port forwarding (for weblog) and port for ssh TODO Support for different vagrant providers. Currently only support for qemu """ @@ -58,7 +58,7 @@ def _set_vagrant_configuration(self, vm): qe.machine = "q35" qe.cpu = "max" qe.smp = "cpus=8,sockets=1,cores=8,threads=1" - qe.net_device = "virtio-net-pci" + qe.net_device = "virtio-net-pci" """ port_configuration = f""" config.vm.network "forwarded_port", guest: 5985, host: {vm.deffault_open_port} @@ -170,7 +170,7 @@ def remote_copy_folders( class MySFTPClient(paramiko.SFTPClient): def put_dir(self, source, target): """ Uploads the contents of the source directory to the target path. The - target directory needs to exists. All subdirectories in source are + target directory needs to exists. All subdirectories in source are created under target. """ for item in os.listdir(source): diff --git a/utils/virtual_machine/virtual_machine_provider.py b/utils/virtual_machine/virtual_machine_provider.py index 88efb60d82..728826c19b 100644 --- a/utils/virtual_machine/virtual_machine_provider.py +++ b/utils/virtual_machine/virtual_machine_provider.py @@ -38,7 +38,7 @@ def configure(self, required_vms): self.vms = required_vms def stack_up(self): - """ Each provider should implement the method that start up all the machines. + """ Each provider should implement the method that start up all the machines. After each machine is up, you will call the install_provision method for each machine. """ raise NotImplementedError @@ -47,7 +47,7 @@ def stack_destroy(self): raise NotImplementedError def install_provision(self, vm, server, server_connection): - """ + """ This method orchestrate the provision installation for a machine Vm object contains the provision for the machine. The provision structure must satisfy the class utils/virtual_machine/virtual_machine_provisioner.py#Provision @@ -127,7 +127,7 @@ def install_provision(self, vm, server, server_connection): ) def _remote_install(self, server_connection, vm, last_task, installation, logger_name=None, output_callback=None): - """ Manages a installation. + """ Manages a installation. The installation must satisfy the class utils/virtual_machine/virtual_machine_provisioner.py#Installation """ local_command = None command_environment = vm.get_command_environment() @@ -218,15 +218,15 @@ class Commander: """ Run commands on the VMs. Each provider should implement this class.""" def create_cache(self, vm, server, last_task): - """ Create a cache from existing server. - Use vm.get_cache_name() to get the cache name. + """ Create a cache from existing server. + Use vm.get_cache_name() to get the cache name. Server is the started server to create the cache from. Use last_task to depend on the last executed task. Return the current task executed.""" return last_task def execute_local_command(self, local_command_id, local_command, env, last_task, logger_name): - """ Execute a local command in the current machine. + """ Execute a local command in the current machine. Env contain environment variables to be used in the command. logger_name is the name of the logger to use to store the output of the command. Use last_task to depend on the last executed task. @@ -234,7 +234,7 @@ def execute_local_command(self, local_command_id, local_command, env, last_task, raise NotImplementedError def copy_file(self, id, local_path, remote_path, connection, last_task, vm=None): - """ Copy a file from local to remote. + """ Copy a file from local to remote. Use last_task to depend on the last executed task. Return the current task executed.""" raise NotImplementedError @@ -242,7 +242,7 @@ def copy_file(self, id, local_path, remote_path, connection, last_task, vm=None) def remote_command( self, id, remote_command, connection, last_task, logger_name, output_callback=None, populate_env=True ): - """ Execute a command in the remote server. + """ Execute a command in the remote server. Use last_task to depend on the last executed task. logger_name is the name of the logger to use to store the output of the command. output_callback is a function to be called with the output of the command.