From e0145a9b0b0dd559ac4e5d696b36e04ca5842fe0 Mon Sep 17 00:00:00 2001 From: Martin Slemr Date: Wed, 26 Jul 2023 12:58:29 +0200 Subject: [PATCH] Automation Analytics Export to Ingress It collects db, os and cluster data, packs them to 1+ tarballs and sends to console.redhat.com, if enabled. Jira AA-1757 No-Issue Signed-off-by: Martin Slemr --- CHANGES/aa-1757.feature | 2 + ...metrics-collection-automation-analytics.py | 45 ++++ ...s3.py => metrics-collection-lightspeed.py} | 12 +- .../__init__.py | 0 .../automation_analytics/__init__.py | 0 .../automation_analytics/collector.py | 69 ++++++ .../automation_analytics/data.py | 143 +++++++++++ .../automation_analytics/package.py | 57 +++++ galaxy_ng/app/metrics_collection/collector.py | 11 + .../app/metrics_collection/common_data.py | 170 +++++++++++++ .../metrics_collection/lightspeed/__init__.py | 0 .../lightspeed}/collector.py | 23 +- .../lightspeed/data.py} | 49 +--- .../lightspeed}/package.py | 5 +- galaxy_ng/app/settings.py | 17 ++ .../management/commands/analytics/__init__.py | 0 .../automation_analytics/__init__.py | 0 .../automation_analytics/test_collector.py | 223 ++++++++++++++++++ .../commands/test_analytics_export_s3.py | 41 ---- .../test_metrics_collection_lightspeed.py | 45 ++++ 20 files changed, 815 insertions(+), 97 deletions(-) create mode 100644 CHANGES/aa-1757.feature create mode 100644 galaxy_ng/app/management/commands/metrics-collection-automation-analytics.py rename galaxy_ng/app/management/commands/{analytics-export-s3.py => metrics-collection-lightspeed.py} (57%) rename galaxy_ng/app/{management/commands/analytics => metrics_collection}/__init__.py (100%) create mode 100644 galaxy_ng/app/metrics_collection/automation_analytics/__init__.py create mode 100644 galaxy_ng/app/metrics_collection/automation_analytics/collector.py create mode 100644 galaxy_ng/app/metrics_collection/automation_analytics/data.py create mode 100644 galaxy_ng/app/metrics_collection/automation_analytics/package.py create mode 100644 galaxy_ng/app/metrics_collection/collector.py create mode 100644 galaxy_ng/app/metrics_collection/common_data.py create mode 100644 galaxy_ng/app/metrics_collection/lightspeed/__init__.py rename galaxy_ng/app/{management/commands/analytics => metrics_collection/lightspeed}/collector.py (64%) rename galaxy_ng/app/{management/commands/analytics/galaxy_collector.py => metrics_collection/lightspeed/data.py} (88%) rename galaxy_ng/app/{management/commands/analytics => metrics_collection/lightspeed}/package.py (86%) create mode 100644 galaxy_ng/tests/unit/app/management/commands/analytics/__init__.py create mode 100644 galaxy_ng/tests/unit/app/management/commands/analytics/automation_analytics/__init__.py create mode 100644 galaxy_ng/tests/unit/app/management/commands/analytics/automation_analytics/test_collector.py delete mode 100644 galaxy_ng/tests/unit/app/management/commands/test_analytics_export_s3.py create mode 100644 galaxy_ng/tests/unit/app/management/commands/test_metrics_collection_lightspeed.py diff --git a/CHANGES/aa-1757.feature b/CHANGES/aa-1757.feature new file mode 100644 index 0000000000..77a2587f52 --- /dev/null +++ b/CHANGES/aa-1757.feature @@ -0,0 +1,2 @@ +Added management command `metrics-collection-automation-analytics`. +Renamed command `analytics-export-s3` to `metrics-collection-lighspeed`. diff --git a/galaxy_ng/app/management/commands/metrics-collection-automation-analytics.py b/galaxy_ng/app/management/commands/metrics-collection-automation-analytics.py new file mode 100644 index 0000000000..014aa81970 --- /dev/null +++ b/galaxy_ng/app/management/commands/metrics-collection-automation-analytics.py @@ -0,0 +1,45 @@ +import logging + +from django.core.management.base import BaseCommand +from galaxy_ng.app.metrics_collection.automation_analytics.collector import Collector +from galaxy_ng.app.metrics_collection.automation_analytics import data as automation_analytics_data + +logger = logging.getLogger("metrics_collection.export_automation_analytics") + + +class Command(BaseCommand): + help = ("Django management command to export collections data to " + "ingress -> automation metrics_collection") + + def add_arguments(self, parser): + parser.add_argument( + '--dry-run', dest='dry-run', action='store_true', + help='Gather metrics_collection without shipping' + ) + parser.add_argument( + '--ship', dest='ship', action='store_true', + help='Enable to ship metrics to the Red Hat Cloud' + ) + + def handle(self, *args, **options): + """Handle command""" + + opt_ship = options.get('ship') + opt_dry_run = options.get('dry-run') + + if opt_ship and opt_dry_run: + self.logger.error('Both --ship and --dry-run cannot be processed at the same time.') + return + + collector = Collector( + collector_module=automation_analytics_data, + collection_type=Collector.MANUAL_COLLECTION if opt_ship else Collector.DRY_RUN, + logger=logger + ) + + tgzfiles = collector.gather() + if tgzfiles: + for tgz in tgzfiles: + self.stdout.write(tgz) + else: + self.stdout.write("No metrics_collection tarballs collected") diff --git a/galaxy_ng/app/management/commands/analytics-export-s3.py b/galaxy_ng/app/management/commands/metrics-collection-lightspeed.py similarity index 57% rename from galaxy_ng/app/management/commands/analytics-export-s3.py rename to galaxy_ng/app/management/commands/metrics-collection-lightspeed.py index c1a3dfff74..1381834dcf 100644 --- a/galaxy_ng/app/management/commands/analytics-export-s3.py +++ b/galaxy_ng/app/management/commands/metrics-collection-lightspeed.py @@ -1,11 +1,11 @@ import logging from django.core.management.base import BaseCommand -from galaxy_ng.app.management.commands.analytics.collector import Collector -from galaxy_ng.app.management.commands.analytics import galaxy_collector +from galaxy_ng.app.metrics_collection.lightspeed.collector import Collector +from galaxy_ng.app.metrics_collection.lightspeed import data as lightspeed_data from django.utils.timezone import now, timedelta -logger = logging.getLogger("analytics") +logger = logging.getLogger("metrics_collection.export_lightspeed") class Command(BaseCommand): @@ -15,14 +15,14 @@ def handle(self, *args, **options): """Handle command""" collector = Collector( - collector_module=galaxy_collector, - collection_type="manual", + collector_module=lightspeed_data, + collection_type=Collector.MANUAL_COLLECTION, logger=logger, ) collector.gather(since=now() - timedelta(days=8), until=now() - timedelta(days=1)) - print("Completed ") + self.stdout.write("Gather Analytics => S3(Lightspeed): Completed ") if __name__ == "__main__": diff --git a/galaxy_ng/app/management/commands/analytics/__init__.py b/galaxy_ng/app/metrics_collection/__init__.py similarity index 100% rename from galaxy_ng/app/management/commands/analytics/__init__.py rename to galaxy_ng/app/metrics_collection/__init__.py diff --git a/galaxy_ng/app/metrics_collection/automation_analytics/__init__.py b/galaxy_ng/app/metrics_collection/automation_analytics/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/galaxy_ng/app/metrics_collection/automation_analytics/collector.py b/galaxy_ng/app/metrics_collection/automation_analytics/collector.py new file mode 100644 index 0000000000..bd9b488eaa --- /dev/null +++ b/galaxy_ng/app/metrics_collection/automation_analytics/collector.py @@ -0,0 +1,69 @@ +from django.conf import settings + +from galaxy_ng.app.metrics_collection.collector import Collector as BaseCollector +from galaxy_ng.app.metrics_collection.automation_analytics.package import Package + + +class Collector(BaseCollector): + @staticmethod + def _package_class(): + return Package + + def is_enabled(self): + if not settings.GALAXY_METRICS_COLLECTION_AUTOMATION_ANALYTICS_ENABLED: + self.logger.log(self.log_level, + "Metrics Collection for Ansible Automation Platform not enabled.") + return False + return super().is_enabled() + + def _is_shipping_configured(self): + auth_valid = bool(settings.GALAXY_METRICS_COLLECTION_C_RH_C_UPLOAD_URL) + + # There are two possible types of authentication + # 1) RH account - user/password + # 2) X-RH-Identity header (inside cloud or testing) + if auth_valid: + auth_valid = settings.GALAXY_METRICS_COLLECTION_AUTOMATION_ANALYTICS_AUTH_TYPE in [ + Package.SHIPPING_AUTH_USERPASS, + Package.SHIPPING_AUTH_IDENTITY] + if auth_valid: + if settings.GALAXY_METRICS_COLLECTION_AUTOMATION_ANALYTICS_AUTH_TYPE == \ + Package.SHIPPING_AUTH_USERPASS: + auth_valid = bool(settings.GALAXY_METRICS_COLLECTION_REDHAT_USERNAME) and \ + bool(settings.GALAXY_METRICS_COLLECTION_REDHAT_PASSWORD) + + if settings.GALAXY_METRICS_COLLECTION_AUTOMATION_ANALYTICS_AUTH_TYPE == \ + Package.SHIPPING_AUTH_IDENTITY: + auth_valid = bool(settings.GALAXY_METRICS_COLLECTION_ORG_ID) + if not auth_valid: + self.logger.log(self.log_level, "No metrics collection, configuration is invalid. " + "Use --dry-run to gather locally without sending.") + return auth_valid + + def _last_gathering(self): + # TODO: Waiting for persistent DB storage in Hub + # https://issues.redhat.com/browse/AAH-2009 + # return settings.AUTOMATION_ANALYTICS_LAST_GATHER + return None + + def _load_last_gathered_entries(self): + # TODO: Waiting for persistent DB storage in Hub + # https://issues.redhat.com/browse/AAH-2009 + # from awx.conf.models import Setting + # + # last_entries = Setting.objects.filter(key='AUTOMATION_ANALYTICS_LAST_ENTRIES').first() + # last_gathered_entries = \ + # json.loads((last_entries.value if last_entries is not None else '') or '{}', + # object_hook=datetime_hook) + last_gathered_entries = {} + return last_gathered_entries + + def _save_last_gathered_entries(self, last_gathered_entries): + # TODO: Waiting for persistent DB storage in Hub + # https://issues.redhat.com/browse/AAH-2009 + pass + + def _save_last_gather(self): + # TODO: Waiting for persistent DB storage in Hub + # https://issues.redhat.com/browse/AAH-2009 + pass diff --git a/galaxy_ng/app/metrics_collection/automation_analytics/data.py b/galaxy_ng/app/metrics_collection/automation_analytics/data.py new file mode 100644 index 0000000000..dc73a8fdcc --- /dev/null +++ b/galaxy_ng/app/metrics_collection/automation_analytics/data.py @@ -0,0 +1,143 @@ +import os +from django.db import connection +from insights_analytics_collector import CsvFileSplitter, register +import galaxy_ng.app.metrics_collection.common_data as data + + +@register("config", "1.0", description="General platform configuration.", config=True) +def config(since, **kwargs): + return data.config() + + +@register("instance_info", "1.0", description="Node information") +def instance_info(since, **kwargs): + return data.instance_info() + + +@register("collections", "1.0", format="csv", description="Data on ansible_collection") +def collections(since, full_path, until, **kwargs): + query = data.collections_query() + + return export_to_csv(full_path, "collections", query) + + +@register( + "collection_versions", + "1.0", + format="csv", + description="Data on ansible_collectionversion", +) +def collection_versions(since, full_path, until, **kwargs): + query = data.collection_versions_query() + + return export_to_csv(full_path, "collection_versions", query) + + +@register( + "collection_version_tags", + "1.0", + format="csv", + description="Full sync: Data on ansible_collectionversion_tags" +) +def collection_version_tags(since, full_path, **kwargs): + query = data.collection_version_tags_query() + return export_to_csv(full_path, "collection_version_tags", query) + + +@register( + "collection_tags", + "1.0", + format="csv", + description="Data on ansible_tag" +) +def collection_tags(since, full_path, **kwargs): + query = data.collection_tags_query() + return export_to_csv(full_path, "collection_tags", query) + + +@register( + "collection_version_signatures", + "1.0", + format="csv", + description="Data on ansible_collectionversionsignature", +) +def collection_version_signatures(since, full_path, **kwargs): + query = data.collection_version_signatures_query() + + return export_to_csv(full_path, "collection_version_signatures", query) + + +@register( + "signing_services", + "1.0", + format="csv", + description="Data on core_signingservice" +) +def signing_services(since, full_path, **kwargs): + query = data.signing_services_query() + return export_to_csv(full_path, "signing_services", query) + + +# @register( +# "collection_imports", +# "1.0", +# format="csv", +# description="Data on ansible_collectionimport", +# ) +# def collection_imports(since, full_path, until, **kwargs): +# # currently no rows in the table, so no objects to base a query off +# source_query = """COPY ( +# SELECT * FROM ansible_collectionimport +# ) TO STDOUT WITH CSV HEADER +# """ +# return _simple_csv(full_path, "ansible_collectionimport", source_query) +# + +@register( + "collection_download_logs", + "1.0", + format="csv", + description="Data from ansible_downloadlog" +) +def collection_download_logs(since, full_path, until, **kwargs): + query = data.collection_downloads_query() + return export_to_csv(full_path, "collection_download_logs", query) + + +@register( + "collection_download_counts", + "1.0", + format="csv", + description="Data from ansible_collectiondownloadcount" +) +def collection_download_counts(since, full_path, until, **kwargs): + query = data.collection_download_counts_query() + return export_to_csv(full_path, "collection_download_counts", query) + + +def _get_csv_splitter(file_path, max_data_size=209715200): + return CsvFileSplitter(filespec=file_path, max_file_size=max_data_size) + + +def export_to_csv(full_path, file_name, query): + copy_query = f"""COPY ( + {query} + ) TO STDOUT WITH CSV HEADER + """ + return _simple_csv(full_path, file_name, copy_query, max_data_size=209715200) + + +def _simple_csv(full_path, file_name, query, max_data_size=209715200): + file_path = _get_file_path(full_path, file_name) + tfile = _get_csv_splitter(file_path, max_data_size) + + with connection.cursor() as cursor: + with cursor.copy(query) as copy: + while data := copy.read(): + tfile.write(str(data, 'utf8')) + + return tfile.file_list() + + +def _get_file_path(path, table): + return os.path.join(path, table + ".csv") diff --git a/galaxy_ng/app/metrics_collection/automation_analytics/package.py b/galaxy_ng/app/metrics_collection/automation_analytics/package.py new file mode 100644 index 0000000000..00bae0ec33 --- /dev/null +++ b/galaxy_ng/app/metrics_collection/automation_analytics/package.py @@ -0,0 +1,57 @@ +import base64 +import json +from django.conf import settings + +from insights_analytics_collector import Package as InsightsAnalyticsPackage + + +class Package(InsightsAnalyticsPackage): + CERT_PATH = "/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem" + PAYLOAD_CONTENT_TYPE = "application/vnd.redhat.automation-hub.hub_payload+tgz" + + def _tarname_base(self): + timestamp = self.collector.gather_until + return f'galaxy-hub-analytics-{timestamp.strftime("%Y-%m-%d-%H%M")}' + + def get_ingress_url(self): + return settings.GALAXY_METRICS_COLLECTION_C_RH_C_UPLOAD_URL + + def _get_rh_user(self): + return settings.GALAXY_METRICS_COLLECTION_REDHAT_USERNAME + + def _get_rh_password(self): + return settings.GALAXY_METRICS_COLLECTION_REDHAT_PASSWORD + + def _get_x_rh_identity(self): + """Auth: x-rh-identity header for HTTP POST request to cloud + Optional, if shipping_auth_mode() redefined to SHIPPING_AUTH_IDENTITY + """ + tenant_id = f"{int(settings.GALAXY_METRICS_COLLECTION_ORG_ID):07d}" + identity = { + "identity": { + "type": "User", + "account_number": tenant_id, + "user": {"is_org_admin": True}, + "internal": {"org_id": tenant_id} + } + } + identity = base64.b64encode(json.dumps(identity).encode("utf8")) + return identity + + def hub_version(self): + try: + config_data = self.collector.collections.get("config", {}).data or {} + parsed = json.loads(config_data) + return parsed.get('hub_version', '0.0') + except json.decoder.JSONDecodeError: + return "unknown version" + + def _get_http_request_headers(self): + headers = { + 'Content-Type': 'application/json', + 'User-Agent': f'GalaxyNG | Red Hat Ansible Automation Platform ({self.hub_version()})' + } + return headers + + def shipping_auth_mode(self): + return settings.GALAXY_METRICS_COLLECTION_AUTOMATION_ANALYTICS_AUTH_TYPE diff --git a/galaxy_ng/app/metrics_collection/collector.py b/galaxy_ng/app/metrics_collection/collector.py new file mode 100644 index 0000000000..65544aa842 --- /dev/null +++ b/galaxy_ng/app/metrics_collection/collector.py @@ -0,0 +1,11 @@ +from django.db import connection +from insights_analytics_collector import Collector as BaseCollector + + +class Collector(BaseCollector): + def _is_valid_license(self): + return True + + @staticmethod + def db_connection(): + return connection diff --git a/galaxy_ng/app/metrics_collection/common_data.py b/galaxy_ng/app/metrics_collection/common_data.py new file mode 100644 index 0000000000..291f08ba7c --- /dev/null +++ b/galaxy_ng/app/metrics_collection/common_data.py @@ -0,0 +1,170 @@ +import requests +import logging +from urllib.parse import urljoin +import platform +import distro +from django.conf import settings +from pulpcore.plugin.models import system_id + +logger = logging.getLogger("metrics_collection.export_data") + + +def api_status(): + status_path = '/pulp/api/v3/status/' + try: + url = urljoin(settings.ANSIBLE_API_HOSTNAME, status_path) + response = requests.request("GET", url) + if response.status_code == 200: + return response.json() + else: + logger.error(f"export metrics_collection: failed API call {status_path}: " + f"HTTP status {response.status_code}") + return {} + except Exception as e: + logger.error(f"export metrics_collection: failed API call {status_path}: {e}") + return {} + + +def hub_version(): + status = api_status() + galaxy_version = '' + for version in status['versions']: + if version['component'] == 'galaxy': + galaxy_version = version['version'] + return galaxy_version + + +def config(): + + return { + "platform": { + "system": platform.system(), + "dist": distro.linux_distribution(), + "release": platform.release(), + "type": "openshift", # valid for GalaxyNG/Cloud Hub + }, + "authentication_backends": settings.AUTHENTICATION_BACKENDS, + "deployment_mode": settings.GALAXY_DEPLOYMENT_MODE, + "install_uuid": system_id(), # core_systemid.pulp_id + "instance_uuid": "", # instances in cluster not distinguished + "hub_url_base": settings.ANSIBLE_API_HOSTNAME, + "hub_version": hub_version() + } + + +def instance_info(): + status = api_status() + + return { + "versions": status.get('versions', {}), + "online_workers": status.get('online_workers', []), + "online_content_apps": status.get('online_content_apps', []), + "database_connection": status.get('database_connection', {}), + "redis_connection": status.get('redis_connection', {}), + "storage": status.get('storage', {}), + "content_settings": status.get('content_settings', {}), + "domain_enabled": status.get('domain_enabled', '') + } + + +def collections_query(): + return """ + SELECT "ansible_collection"."pulp_id" AS uuid, + "ansible_collection"."pulp_created", + "ansible_collection"."pulp_last_updated", + "ansible_collection"."namespace", + "ansible_collection"."name" + FROM "ansible_collection" + """ + + +def collection_versions_query(): + return """ + SELECT "ansible_collectionversion"."content_ptr_id" AS uuid, + "core_content"."pulp_created", + "core_content"."pulp_last_updated", + "ansible_collectionversion"."collection_id", + "ansible_collectionversion"."contents", + "ansible_collectionversion"."dependencies", + "ansible_collectionversion"."description", + "ansible_collectionversion"."license", + "ansible_collectionversion"."version", + "ansible_collectionversion"."requires_ansible", + "ansible_collectionversion"."is_highest", + "ansible_collectionversion"."repository" + FROM "ansible_collectionversion" + INNER JOIN "core_content" ON ( + "ansible_collectionversion"."content_ptr_id" = "core_content"."pulp_id" + ) + """ + + +def collection_version_tags_query(): + return """ + SELECT id, + collectionversion_id AS collection_version_id, + tag_id + FROM ansible_collectionversion_tags + """ + + +def collection_tags_query(): + return """ + SELECT pulp_id AS uuid, + pulp_created, + pulp_last_updated, + name + FROM ansible_tag + """ + + +def collection_version_signatures_query(): + return """ + SELECT "ansible_collectionversionsignature".content_ptr_id AS uuid, + "core_content".pulp_created, + "core_content".pulp_last_updated, + "ansible_collectionversionsignature".signed_collection_id AS collection_version_id, + "ansible_collectionversionsignature".data, + "ansible_collectionversionsignature".digest, + "ansible_collectionversionsignature".pubkey_fingerprint, + "ansible_collectionversionsignature".signing_service_id + FROM ansible_collectionversionsignature + INNER JOIN core_content + ON core_content.pulp_id = "ansible_collectionversionsignature".content_ptr_id + """ + + +def signing_services_query(): + return """ + SELECT pulp_id AS uuid, + pulp_created, + pulp_last_updated, + public_key, + name + FROM core_signingservice + """ + + +def collection_downloads_query(): + return """ + SELECT pulp_id AS uuid, + pulp_created, + pulp_last_updated, + content_unit_id AS collection_version_id, + ip, + extra_data->>'org_id' AS org_id, + user_agent + FROM ansible_downloadlog + """ + + +def collection_download_counts_query(): + return """ + SELECT pulp_id AS uuid, + pulp_created, + pulp_last_updated, + namespace, + name, + download_count + FROM ansible_collectiondownloadcount + """ diff --git a/galaxy_ng/app/metrics_collection/lightspeed/__init__.py b/galaxy_ng/app/metrics_collection/lightspeed/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/galaxy_ng/app/management/commands/analytics/collector.py b/galaxy_ng/app/metrics_collection/lightspeed/collector.py similarity index 64% rename from galaxy_ng/app/management/commands/analytics/collector.py rename to galaxy_ng/app/metrics_collection/lightspeed/collector.py index 8a9a6db82d..b0844b9611 100644 --- a/galaxy_ng/app/management/commands/analytics/collector.py +++ b/galaxy_ng/app/metrics_collection/lightspeed/collector.py @@ -1,7 +1,6 @@ -from django.db import connection - -from insights_analytics_collector import Collector as BaseCollector -from galaxy_ng.app.management.commands.analytics.package import Package +from django.conf import settings +from galaxy_ng.app.metrics_collection.collector import Collector as BaseCollector +from galaxy_ng.app.metrics_collection.lightspeed.package import Package class Collector(BaseCollector): @@ -10,23 +9,21 @@ def __init__(self, collection_type, collector_module, logger): collection_type=collection_type, collector_module=collector_module, logger=logger ) - @staticmethod - def db_connection(): - return connection - @staticmethod def _package_class(): return Package + def is_enabled(self): + if not settings.GALAXY_METRICS_COLLECTION_LIGHTSPEED_ENABLED: + self.logger.log(self.log_level, + "Metrics Collection for Ansible Lightspeed not enabled.") + return False + return super().is_enabled() + def get_last_gathering(self): return self._last_gathering() def _is_shipping_configured(self): - # TODO: need shipping configuration - return True - - def _is_valid_license(self): - # TODO: need license information and validation logics return True def _last_gathering(self): diff --git a/galaxy_ng/app/management/commands/analytics/galaxy_collector.py b/galaxy_ng/app/metrics_collection/lightspeed/data.py similarity index 88% rename from galaxy_ng/app/management/commands/analytics/galaxy_collector.py rename to galaxy_ng/app/metrics_collection/lightspeed/data.py index 4ad8253c89..0e8fbabee7 100644 --- a/galaxy_ng/app/management/commands/analytics/galaxy_collector.py +++ b/galaxy_ng/app/metrics_collection/lightspeed/data.py @@ -1,55 +1,32 @@ import os -import platform -import distro -from django.conf import settings +from django.db import connection from insights_analytics_collector import CsvFileSplitter, register - -from galaxy_ng.app.management.commands.analytics.collector import Collector +import galaxy_ng.app.metrics_collection.common_data as data @register("config", "1.0", description="General platform configuration.", config=True) def config(since, **kwargs): - # TODO: license_info = get_license() - license_info = {} + cfg = data.config() - return { - "platform": { - "system": platform.system(), - "dist": distro.linux_distribution(), - "release": platform.release(), - "type": "traditional", - }, - "external_logger_enabled": "todo", - "external_logger_type": "todo", - "install_uuid": "todo", - "instance_uuid": "todo", - "tower_url_base": "todo", - "tower_version": "todo", - "logging_aggregators": ["todo"], - "pendo_tracking": "todo", - "hub_url_base": "todo", - "hub_version": "todo", + # just for compatibility, remove when Wisdom team is aware of that + license_info = {} + compatibility_csv = { "license_type": license_info.get("license_type", "UNLICENSED"), "free_instances": license_info.get("free_instances", 0), "total_licensed_instances": license_info.get("instance_count", 0), "license_expiry": license_info.get("time_remaining", 0), - "authentication_backends": settings.AUTHENTICATION_BACKENDS, + "external_logger_enabled": "todo", + "external_logger_type": "todo", + "logging_aggregators": ["todo"], + "pendo_tracking": "todo" } + return cfg | compatibility_csv @register("instance_info", "1.0", description="Node information", config=True) def instance_info(since, **kwargs): - # TODO: - - return { - "versions": {"system": "todo"}, - "online_workers": "todo", - "online_content_apps": "todo", - "database_connection": "todo", - "redis_connection": "todo", - "storage": "todo", - } + return data.instance_info() @register("ansible_collection_table", "1.0", format="csv", description="Data on ansible_collection") @@ -222,7 +199,7 @@ def _simple_csv(full_path, file_name, query, max_data_size=209715200): file_path = _get_file_path(full_path, file_name) tfile = _get_csv_splitter(file_path, max_data_size) - with Collector.db_connection().cursor() as cursor: + with connection.cursor() as cursor: with cursor.copy(query) as copy: while data := copy.read(): tfile.write(str(data, 'utf8')) diff --git a/galaxy_ng/app/management/commands/analytics/package.py b/galaxy_ng/app/metrics_collection/lightspeed/package.py similarity index 86% rename from galaxy_ng/app/management/commands/analytics/package.py rename to galaxy_ng/app/metrics_collection/lightspeed/package.py index 1f5740adce..396504cd0f 100644 --- a/galaxy_ng/app/management/commands/analytics/package.py +++ b/galaxy_ng/app/metrics_collection/lightspeed/package.py @@ -5,6 +5,9 @@ class Package(InsightsAnalyticsPackage): + """Package is the class responsible to creating and sending of one tar.gz archive""" + # Ansible Lightspeed was originally named as wisdom, + # that's the reason for the content-type's name PAYLOAD_CONTENT_TYPE = "application/vnd.redhat.wisdom.filename+tgz" def _tarname_base(self): @@ -26,7 +29,7 @@ def _get_rh_region(self): def _get_rh_bucket(self): return os.environ["aws_bucket"] - def get_s3_configured(): + def get_s3_configured(self): return True def shipping_auth_mode(self): diff --git a/galaxy_ng/app/settings.py b/galaxy_ng/app/settings.py index 01398b523c..1afaf24895 100644 --- a/galaxy_ng/app/settings.py +++ b/galaxy_ng/app/settings.py @@ -249,3 +249,20 @@ # groups foo and bar login and only group foo exists in the system, # the user will be added to foo and bar will be ignored. GALAXY_LDAP_MIRROR_ONLY_EXISTING_GROUPS = False + +# Enables Metrics collection for Lightspeed/Wisdom +# - django command metrics-collection-lightspeed +GALAXY_METRICS_COLLECTION_LIGHTSPEED_ENABLED = True +# Enables Metrics collection for Automation Analytics +# - django command metrics-collection-automation-analytics +GALAXY_METRICS_COLLECTION_AUTOMATION_ANALYTICS_ENABLED = False +# List of values has the insights_analytics_collector/package.py:SHIPPING_AUTH_* +GALAXY_METRICS_COLLECTION_AUTOMATION_ANALYTICS_AUTH_TYPE = "user-pass" +# URL of Ingress upload API in console.redhat.com +GALAXY_METRICS_COLLECTION_C_RH_C_UPLOAD_URL = None +# RH account's user +GALAXY_METRICS_COLLECTION_REDHAT_USERNAME = None +# RH account's password +GALAXY_METRICS_COLLECTION_REDHAT_PASSWORD = None +# RH account's org id (required for x-rh-identity auth type) +GALAXY_METRICS_COLLECTION_ORG_ID = None diff --git a/galaxy_ng/tests/unit/app/management/commands/analytics/__init__.py b/galaxy_ng/tests/unit/app/management/commands/analytics/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/galaxy_ng/tests/unit/app/management/commands/analytics/automation_analytics/__init__.py b/galaxy_ng/tests/unit/app/management/commands/analytics/automation_analytics/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/galaxy_ng/tests/unit/app/management/commands/analytics/automation_analytics/test_collector.py b/galaxy_ng/tests/unit/app/management/commands/analytics/automation_analytics/test_collector.py new file mode 100644 index 0000000000..f6bad58705 --- /dev/null +++ b/galaxy_ng/tests/unit/app/management/commands/analytics/automation_analytics/test_collector.py @@ -0,0 +1,223 @@ +import importlib +import json +import logging +import os +import tarfile +from unittest.mock import MagicMock, patch, ANY +from insights_analytics_collector import register +import insights_analytics_collector.package +import galaxy_ng.app.metrics_collection.common_data +from galaxy_ng.app.metrics_collection.automation_analytics.collector import Collector +from galaxy_ng.app.metrics_collection.automation_analytics.package import Package +from django.test import TestCase, override_settings + + +@register('config', '1.0', config=True) +def config(since, **kwargs): + return {'hub_version': 'x.y'} + + +@register('example1', '1.0') +def example1(since, **kwargs): + return {'galaxy': 123} + + +@register('example2', '1.1') +def example2(since, **kwargs): + return {'galaxy': 123} + + +@register('example3', '1.3') +def example3(since, **kwargs): + return {'galaxy': 123} + + +@register('bad_json', '1.0') +def bad_json(since, **kwargs): + return set() + + +@register('json_exception', '1.0') +def json_exception(since, **kwargs): + raise ValueError('Json collection went wrong') + + +@register('bad_csv', '1.0', format='csv') +def bad_csv(since, **kwargs): + return None + + +@register('csv_exception', '1.0', format='csv') +def csv_exception(since, **kwargs): + raise ValueError('CSV collection went wrong') + + +@override_settings(GALAXY_METRICS_COLLECTION_AUTOMATION_ANALYTICS_ENABLED=True) +class TestAutomationAnalyticsCollector(TestCase): + def setUp(self): + super().setUp() + self.api_status = MagicMock() + self.api_status.return_value = {} + galaxy_ng.app.management.commands.analytics.common_data.api_status = self.api_status + + self.logger = MagicMock() + self.log = MagicMock("log") + self.logger.log = self.log + self.logger.exception = MagicMock("exception") + + def test_no_config_gather(self): + """Config is missing, no data are collected""" + collector = Collector( + collector_module=importlib.import_module(__name__), + collection_type=Collector.DRY_RUN) + + tgzfiles = collector.gather(subset=['example1', 'example2']) + assert tgzfiles is None + + def test_wrong_collections(self): + collector = Collector( + collector_module=importlib.import_module(__name__), + collection_type=Collector.DRY_RUN, + logger=self.logger + ) + + tgzfiles = collector.gather(subset=['config', + 'bad_json', 'json_exception', + 'bad_csv', 'csv_exception']) + assert len(tgzfiles) == 1 + + files = {} + with tarfile.open(tgzfiles[0], "r:gz") as archive: + for member in archive.getmembers(): + files[member.name] = archive.extractfile(member) + + # files added automatically + assert './manifest.json' in files.keys() + assert './data_collection_status.csv' in files.keys() + + # required files + assert './config.json' in files.keys() # required file + + # Wrong data are not part of the tarball + assert './bad_json.json' not in files.keys() + assert './json_exception.json' not in files.keys() + assert './bad_csv.csv' not in files.keys() + assert './csv_exception.csv' not in files.keys() + + def test_correct_gather(self): + collector = Collector( + collector_module=importlib.import_module(__name__), + collection_type=Collector.DRY_RUN + ) + tgzfiles = collector.gather(subset=['config', + 'example1', + 'example2']) + assert len(tgzfiles) == 1 + + files = {} + with tarfile.open(tgzfiles[0], "r:gz") as archive: + for member in archive.getmembers(): + files[member.name] = archive.extractfile(member) + + # files added automatically + assert './manifest.json' in files.keys() + assert './data_collection_status.csv' in files.keys() + + # files/data collected by @register decorator + assert './config.json' in files.keys() # required file + assert './example1.json' in files.keys() + assert json.loads(files['./example1.json'].read()) == {'galaxy': 123} + assert './example2.json' in files.keys() + + # not in chosen subset + assert './example3.json' not in files.keys() + + try: + for tgz in tgzfiles: + os.remove(tgz) + except Exception: + pass + + @override_settings(GALAXY_METRICS_COLLECTION_AUTOMATION_ANALYTICS_ENABLED=False) + @override_settings(GALAXY_METRICS_COLLECTION_C_RH_C_UPLOAD_URL="https://www.example.com") + def test_collection_disabled(self): + collector = Collector( + collector_module=importlib.import_module(__name__), + collection_type=Collector.DRY_RUN, + logger=self.logger + ) + + tgzfiles = collector.gather(subset=['config', 'example1']) + assert tgzfiles is None + + self.log.assert_called_with(logging.ERROR, + "Metrics Collection for Ansible Automation Platform " + "not enabled.") + + @override_settings( + GALAXY_METRICS_COLLECTION_AUTOMATION_ANALYTICS_AUTH_TYPE=Package.SHIPPING_AUTH_CERTIFICATES + ) + def test_invalid_auth(self): + self._test_shipping_error() + + @override_settings( + GALAXY_METRICS_COLLECTION_AUTOMATION_ANALYTICS_AUTH_TYPE=Package.SHIPPING_AUTH_USERPASS + ) + def test_userpass_empty_user(self): + self._test_shipping_error() + + @override_settings( + GALAXY_METRICS_COLLECTION_AUTOMATION_ANALYTICS_AUTH_TYPE=Package.SHIPPING_AUTH_USERPASS + ) + @override_settings(GALAXY_METRICS_COLLECTION_REDHAT_USERNAME="redhat") + @override_settings(GALAXY_METRICS_COLLECTION_REDHAT_PASSWORD="") + def test_userpass_empty_password(self): + self._test_shipping_error() + + @override_settings( + GALAXY_METRICS_COLLECTION_AUTOMATION_ANALYTICS_AUTH_TYPE=Package.SHIPPING_AUTH_IDENTITY + ) + def test_identity_no_org_id(self): + self._test_shipping_error() + + @override_settings( + GALAXY_METRICS_COLLECTION_AUTOMATION_ANALYTICS_AUTH_TYPE=Package.SHIPPING_AUTH_USERPASS + ) + @override_settings(GALAXY_METRICS_COLLECTION_C_RH_C_UPLOAD_URL="https://www.example.com") + @override_settings(GALAXY_METRICS_COLLECTION_REDHAT_USERNAME="redhat") + @override_settings(GALAXY_METRICS_COLLECTION_REDHAT_PASSWORD="pass") + @patch.object(insights_analytics_collector.package.requests.Session, "post") + def test_valid_shipping(self, mock_post): + mock_post_response = MagicMock(name="post_response") + mock_post_response.status_code = 200 + mock_post.return_value = mock_post_response + + collector = Collector( + collector_module=importlib.import_module(__name__), + collection_type=Collector.MANUAL_COLLECTION, + logger=self.logger + ) + tgzfiles = collector.gather(subset=['config', 'example1']) + assert len(tgzfiles) == 1 + + expected_headers = {'User-Agent': 'GalaxyNG | Red Hat Ansible Automation Platform (x.y)'} + mock_post.assert_called_with("https://www.example.com", + files=ANY, + verify=Package.CERT_PATH, + auth=("redhat", "pass"), + headers=expected_headers, + timeout=(31, 31) + ) + + def _test_shipping_error(self): + collector = Collector( + collector_module=importlib.import_module(__name__), + collection_type=Collector.MANUAL_COLLECTION, + logger=self.logger + ) + tgzfiles = collector.gather(subset=['config', 'example1']) + assert tgzfiles is None + + self.log.assert_called_with(logging.ERROR, + "No metrics collection, configuration is invalid. " + "Use --dry-run to gather locally without sending.") diff --git a/galaxy_ng/tests/unit/app/management/commands/test_analytics_export_s3.py b/galaxy_ng/tests/unit/app/management/commands/test_analytics_export_s3.py deleted file mode 100644 index 137f16094c..0000000000 --- a/galaxy_ng/tests/unit/app/management/commands/test_analytics_export_s3.py +++ /dev/null @@ -1,41 +0,0 @@ -from unittest.mock import patch, mock_open, MagicMock -from django.core.management import call_command -from django.test import TestCase -import os - - -s3_details = { - "aws_access_key_id": "blah", - "aws_secret_access_key": "blah", - "aws_region": "blah", - "aws_bucket": "blah", -} - - -class TestAnalyticsExportS3Command(TestCase): - def setUp(self): - super().setUp() - - def test_command_output(self): - call_command("analytics-export-s3") - - @patch("galaxy_ng.app.management.commands.analytics.galaxy_collector._get_csv_splitter") - @patch("builtins.open", new_callable=mock_open, read_data="data") - @patch("boto3.client") - @patch.dict(os.environ, s3_details, clear=True) - def test_write_file_to_s3_success(self, boto3, mock_file, simpleCSVHelper): - assert os.getenv("aws_access_key_id") == "blah" - assert os.getenv("aws_secret_access_key") == "blah" - assert os.getenv("aws_region") == "blah" - assert os.getenv("aws_bucket") == "blah" - - csvsplitter = MagicMock() - csvsplitter.write = MagicMock(name="write") - csvsplitter.file_list = MagicMock(name="file_list") - simpleCSVHelper.return_value = csvsplitter - - call_command("analytics-export-s3") - - simpleCSVHelper.assert_called() - csvsplitter.file_list.assert_called() - csvsplitter.write.assert_called() diff --git a/galaxy_ng/tests/unit/app/management/commands/test_metrics_collection_lightspeed.py b/galaxy_ng/tests/unit/app/management/commands/test_metrics_collection_lightspeed.py new file mode 100644 index 0000000000..f981b08221 --- /dev/null +++ b/galaxy_ng/tests/unit/app/management/commands/test_metrics_collection_lightspeed.py @@ -0,0 +1,45 @@ +from unittest.mock import patch, mock_open, MagicMock +from django.core.management import call_command +from django.test import TestCase +import os +import galaxy_ng.app.metrics_collection.common_data + +s3_details = { + "aws_access_key_id": "blah", + "aws_secret_access_key": "blah", + "aws_region": "blah", + "aws_bucket": "blah", +} + + +class TestMetricsCollectionLightspeedCommand(TestCase): + def setUp(self): + super().setUp() + self.api_status = MagicMock() + self.api_status.return_value = {} + galaxy_ng.app.metrics_collection.common_data.api_status = self.api_status + + def test_command_output(self): + call_command("metrics-collection-lightspeed") + + @patch("galaxy_ng.app.metrics_collection.lightspeed.data._get_csv_splitter") + @patch("builtins.open", new_callable=mock_open, read_data="data") + @patch("boto3.client") + @patch.dict(os.environ, s3_details, clear=True) + def test_write_file_to_s3_success(self, boto3, mock_file, simple_csv_helper): + assert os.getenv("aws_access_key_id") == "blah" + assert os.getenv("aws_secret_access_key") == "blah" + assert os.getenv("aws_region") == "blah" + assert os.getenv("aws_bucket") == "blah" + + csv_splitter = MagicMock() + csv_splitter.write = MagicMock(name="write") + csv_splitter.file_list = MagicMock(name="file_list") + simple_csv_helper.return_value = csv_splitter + + call_command("metrics-collection-lightspeed") + + self.api_status.assert_called() + simple_csv_helper.assert_called() + csv_splitter.file_list.assert_called() + csv_splitter.write.assert_called()