From 278020c4fcef2ba3dc8d93786e2c0c658de5ccbc Mon Sep 17 00:00:00 2001 From: Chris Doehring Date: Mon, 18 Nov 2024 17:22:54 -0800 Subject: [PATCH 01/16] Add ExcecutableMixin. --- app/actions/core.py | 4 + app/conftest.py | 1363 +++++++++++------- app/services/self_registration.py | 32 +- app/services/tests/test_self_registration.py | 279 ++-- 4 files changed, 1091 insertions(+), 587 deletions(-) diff --git a/app/actions/core.py b/app/actions/core.py index a7dbe09..1f7bd30 100644 --- a/app/actions/core.py +++ b/app/actions/core.py @@ -14,6 +14,10 @@ class PullActionConfiguration(ActionConfiguration): pass +class ExecutableActionMixin: + pass + + class PushActionConfiguration(ActionConfiguration): pass diff --git a/app/conftest.py b/app/conftest.py index ae20973..2fec482 100644 --- a/app/conftest.py +++ b/app/conftest.py @@ -24,11 +24,20 @@ WebhookExecutionFailed, IntegrationWebhookCustomLog, CustomWebhookLog, - LogLevel + LogLevel, +) +from app.actions import ( + PullActionConfiguration, + AuthActionConfiguration, + ExecutableActionMixin, ) -from app.actions import PullActionConfiguration from app.services.utils import GlobalUISchemaOptions, FieldWithUIOptions, UIOptions -from app.webhooks import GenericJsonTransformConfig, GenericJsonPayload, WebhookPayload, WebhookConfiguration +from app.webhooks import ( + GenericJsonTransformConfig, + GenericJsonPayload, + WebhookPayload, + WebhookConfiguration, +) class AsyncMock(MagicMock): @@ -52,7 +61,9 @@ def mock_redis(mocker, mock_integration_state): redis = MagicMock() redis_client = mocker.MagicMock() redis_client.set.return_value = async_return(MagicMock()) - redis_client.get.return_value = async_return(json.dumps(mock_integration_state, default=str)) + redis_client.get.return_value = async_return( + json.dumps(mock_integration_state, default=str) + ) redis_client.delete.return_value = async_return(MagicMock()) redis_client.setex.return_value = async_return(None) redis_client.incr.return_value = redis_client @@ -69,44 +80,134 @@ def mock_redis(mocker, mock_integration_state): @pytest.fixture def integration_v2(): return Integration.parse_obj( - {'id': '779ff3ab-5589-4f4c-9e0a-ae8d6c9edff0', 'name': 'Gundi X', 'base_url': 'https://gundi-er.pamdas.org', - 'enabled': True, - 'type': {'id': '50229e21-a9fe-4caa-862c-8592dfb2479b', 'name': 'EarthRanger', 'value': 'earth_ranger', - 'description': 'Integration type for Integration X Sites', 'actions': [ - {'id': '80448d1c-4696-4b32-a59f-f3494fc949ac', 'type': 'auth', 'name': 'Authenticate', 'value': 'auth', - 'description': 'Authenticate against Integration X', - 'schema': {'type': 'object', 'required': ['token'], 'properties': {'token': {'type': 'string'}}}}, - {'id': '4b721b37-f4ca-4f20-b07c-2caadb095ecb', 'type': 'pull', 'name': 'Pull Events', - 'value': 'pull_events', 'description': 'Extract events from EarthRanger sites', - 'schema': {'type': 'object', 'title': 'PullObservationsConfig', 'required': ['start_datetime'], - 'properties': {'start_datetime': {'type': 'string', 'title': 'Start Datetime'}}}}, - {'id': '75b3040f-ab1f-42e7-b39f-8965c088b154', 'type': 'pull', 'name': 'Pull Observations', - 'value': 'pull_observations', 'description': 'Extract observations from an EarthRanger Site', - 'schema': {'type': 'object', 'title': 'PullObservationsConfig', 'required': ['start_datetime'], - 'properties': {'start_datetime': {'type': 'string', 'title': 'Start Datetime'}}}}, - {'id': '425a2e2f-ae71-44fb-9314-bc0116638e4f', 'type': 'push', 'name': 'Push Event Attachments', - 'value': 'push_event_attachments', - 'description': 'EarthRanger sites support adding attachments to events', 'schema': {}}, - {'id': '8e101f31-e693-404c-b6ee-20fde6019f16', 'type': 'push', 'name': 'Push Events', - 'value': 'push_events', 'description': 'EarthRanger sites support sending Events (a.k.a Reports)', - 'schema': {}}]}, - 'owner': {'id': 'a91b400b-482a-4546-8fcb-ee42b01deeb6', 'name': 'Test Org', 'description': ''}, - 'configurations': [ - {'id': '5577c323-b961-4277-9047-b1f27fd6a1b7', 'integration': '779ff3ab-5589-4f4c-9e0a-ae8d6c9edff0', - 'action': {'id': '75b3040f-ab1f-42e7-b39f-8965c088b154', 'type': 'pull', 'name': 'Pull Observations', - 'value': 'pull_observations'}, - 'data': {'end_datetime': '2023-11-10T06:00:00-00:00', 'start_datetime': '2023-11-10T05:30:00-00:00', - 'force_run_since_start': False}}, - {'id': '431af42b-c431-40af-8b57-a349253e15df', 'integration': '779ff3ab-5589-4f4c-9e0a-ae8d6c9edff0', - 'action': {'id': '4b721b37-f4ca-4f20-b07c-2caadb095ecb', 'type': 'pull', 'name': 'Pull Events', - 'value': 'pull_events'}, 'data': {'start_datetime': '2023-11-16T00:00:00-03:00'}}, - {'id': '30f8878c-4a98-4c95-88eb-79f73c40fb2f', 'integration': '779ff3ab-5589-4f4c-9e0a-ae8d6c9edff0', - 'action': {'id': '80448d1c-4696-4b32-a59f-f3494fc949ac', 'type': 'auth', 'name': 'Authenticate', - 'value': 'auth'}, 'data': {'token': 'testtoken2a97022f21732461ee103a08fac8a35'}}], - 'additional': {}, - 'default_route': {'id': '5abf3845-7c9f-478a-bc0f-b24d87038c4b', 'name': 'Gundi X Provider - Default Route'}, - 'status': 'healthy', - 'status_details': '', + { + "id": "779ff3ab-5589-4f4c-9e0a-ae8d6c9edff0", + "name": "Gundi X", + "base_url": "https://gundi-er.pamdas.org", + "enabled": True, + "type": { + "id": "50229e21-a9fe-4caa-862c-8592dfb2479b", + "name": "EarthRanger", + "value": "earth_ranger", + "description": "Integration type for Integration X Sites", + "actions": [ + { + "id": "80448d1c-4696-4b32-a59f-f3494fc949ac", + "type": "auth", + "name": "Authenticate", + "value": "auth", + "description": "Authenticate against Integration X", + "schema": { + "type": "object", + "required": ["token"], + "properties": {"token": {"type": "string"}}, + }, + }, + { + "id": "4b721b37-f4ca-4f20-b07c-2caadb095ecb", + "type": "pull", + "name": "Pull Events", + "value": "pull_events", + "description": "Extract events from EarthRanger sites", + "schema": { + "type": "object", + "title": "PullObservationsConfig", + "required": ["start_datetime"], + "properties": { + "start_datetime": { + "type": "string", + "title": "Start Datetime", + } + }, + }, + }, + { + "id": "75b3040f-ab1f-42e7-b39f-8965c088b154", + "type": "pull", + "name": "Pull Observations", + "value": "pull_observations", + "description": "Extract observations from an EarthRanger Site", + "schema": { + "type": "object", + "title": "PullObservationsConfig", + "required": ["start_datetime"], + "properties": { + "start_datetime": { + "type": "string", + "title": "Start Datetime", + } + }, + }, + }, + { + "id": "425a2e2f-ae71-44fb-9314-bc0116638e4f", + "type": "push", + "name": "Push Event Attachments", + "value": "push_event_attachments", + "description": "EarthRanger sites support adding attachments to events", + "schema": {}, + }, + { + "id": "8e101f31-e693-404c-b6ee-20fde6019f16", + "type": "push", + "name": "Push Events", + "value": "push_events", + "description": "EarthRanger sites support sending Events (a.k.a Reports)", + "schema": {}, + }, + ], + }, + "owner": { + "id": "a91b400b-482a-4546-8fcb-ee42b01deeb6", + "name": "Test Org", + "description": "", + }, + "configurations": [ + { + "id": "5577c323-b961-4277-9047-b1f27fd6a1b7", + "integration": "779ff3ab-5589-4f4c-9e0a-ae8d6c9edff0", + "action": { + "id": "75b3040f-ab1f-42e7-b39f-8965c088b154", + "type": "pull", + "name": "Pull Observations", + "value": "pull_observations", + }, + "data": { + "end_datetime": "2023-11-10T06:00:00-00:00", + "start_datetime": "2023-11-10T05:30:00-00:00", + "force_run_since_start": False, + }, + }, + { + "id": "431af42b-c431-40af-8b57-a349253e15df", + "integration": "779ff3ab-5589-4f4c-9e0a-ae8d6c9edff0", + "action": { + "id": "4b721b37-f4ca-4f20-b07c-2caadb095ecb", + "type": "pull", + "name": "Pull Events", + "value": "pull_events", + }, + "data": {"start_datetime": "2023-11-16T00:00:00-03:00"}, + }, + { + "id": "30f8878c-4a98-4c95-88eb-79f73c40fb2f", + "integration": "779ff3ab-5589-4f4c-9e0a-ae8d6c9edff0", + "action": { + "id": "80448d1c-4696-4b32-a59f-f3494fc949ac", + "type": "auth", + "name": "Authenticate", + "value": "auth", + }, + "data": {"token": "testtoken2a97022f21732461ee103a08fac8a35"}, + }, + ], + "additional": {}, + "default_route": { + "id": "5abf3845-7c9f-478a-bc0f-b24d87038c4b", + "name": "Gundi X Provider - Default Route", + }, + "status": "healthy", + "status_details": "", } ) @@ -134,24 +235,28 @@ def integration_v2_with_webhook(): "title": "MockWebhookConfigModel", "type": "object", "properties": { - "allowed_devices_list": {"title": "Allowed Devices List", "type": "array", "items": {}}, - "deduplication_enabled": {"title": "Deduplication Enabled", "type": "boolean"}}, - "required": ["allowed_devices_list", "deduplication_enabled"] + "allowed_devices_list": { + "title": "Allowed Devices List", + "type": "array", + "items": {}, + }, + "deduplication_enabled": { + "title": "Deduplication Enabled", + "type": "boolean", + }, + }, + "required": ["allowed_devices_list", "deduplication_enabled"], }, "ui_schema": { - "allowed_devices_list": { - "ui:widget": "select" - }, - "deduplication_enabled": { - "ui:widget": "radio" - } - } - } + "allowed_devices_list": {"ui:widget": "select"}, + "deduplication_enabled": {"ui:widget": "radio"}, + }, + }, }, "owner": { "id": "a91b400b-482a-4546-8fcb-ee42b01deeb6", "name": "Test Org", - "description": "" + "description": "", }, "configurations": [], "webhook_configuration": { @@ -164,8 +269,8 @@ def integration_v2_with_webhook(): }, "data": { "allowed_devices_list": ["device1", "device2"], - "deduplication_enabled": True - } + "deduplication_enabled": True, + }, }, "additional": {}, "default_route": None, @@ -197,46 +302,34 @@ def integration_v2_with_webhook_generic(): "schema": { "type": "object", "title": "GenericJsonTransformConfig", - "required": [ - "json_schema", - "output_type" - ], + "required": ["json_schema", "output_type"], "properties": { "jq_filter": { "type": "string", "title": "Jq Filter", "default": ".", "example": ". | map(select(.isActive))", - "description": "JQ filter to transform JSON data." - }, - "json_schema": { - "type": "object", - "title": "Json Schema" + "description": "JQ filter to transform JSON data.", }, + "json_schema": {"type": "object", "title": "Json Schema"}, "output_type": { "type": "string", "title": "Output Type", - "description": "Output type for the transformed data: 'obv' or 'event'" - } - } + "description": "Output type for the transformed data: 'obv' or 'event'", + }, + }, }, "ui_schema": { - "jq_filter": { - "ui:widget": "textarea" - }, - "json_schema": { - "ui:widget": "textarea" - }, - "output_type": { - "ui:widget": "text" - } - } - } + "jq_filter": {"ui:widget": "textarea"}, + "json_schema": {"ui:widget": "textarea"}, + "output_type": {"ui:widget": "text"}, + }, + }, }, "owner": { "id": "a91b400b-482a-4546-8fcb-ee42b01deeb6", "name": "Test Org", - "description": "" + "description": "", }, "configurations": [], "webhook_configuration": { @@ -245,56 +338,41 @@ def integration_v2_with_webhook_generic(): "webhook": { "id": "3a42a1bb-6d26-4dde-9ecb-72cb208695c2", "name": "Onyesha Wh Webhook", - "value": "onyesha_wh_webhook" + "value": "onyesha_wh_webhook", }, "data": { - "jq_filter": "{ \"source\": .end_device_ids.device_id, \"source_name\": .end_device_ids.device_id, \"type\": .uplink_message.locations.\"frm-payload\".source, \"recorded_at\": .uplink_message.settings.time, \"location\": { \"lat\": .uplink_message.locations.\"frm-payload\".latitude, \"lon\": .uplink_message.locations.\"frm-payload\".longitude }, \"additional\": { \"application_id\": .end_device_ids.application_ids.application_id, \"dev_eui\": .end_device_ids.dev_eui, \"dev_addr\": .end_device_ids.dev_addr, \"batterypercent\": .uplink_message.decoded_payload.batterypercent, \"gps\": .uplink_message.decoded_payload.gps } }", + "jq_filter": '{ "source": .end_device_ids.device_id, "source_name": .end_device_ids.device_id, "type": .uplink_message.locations."frm-payload".source, "recorded_at": .uplink_message.settings.time, "location": { "lat": .uplink_message.locations."frm-payload".latitude, "lon": .uplink_message.locations."frm-payload".longitude }, "additional": { "application_id": .end_device_ids.application_ids.application_id, "dev_eui": .end_device_ids.dev_eui, "dev_addr": .end_device_ids.dev_addr, "batterypercent": .uplink_message.decoded_payload.batterypercent, "gps": .uplink_message.decoded_payload.gps } }', "json_schema": { "type": "object", "properties": { - "received_at": { - "type": "string", - "format": "date-time" - }, + "received_at": {"type": "string", "format": "date-time"}, "end_device_ids": { "type": "object", "properties": { - "dev_eui": { - "type": "string" - }, - "dev_addr": { - "type": "string" - }, - "device_id": { - "type": "string" - }, + "dev_eui": {"type": "string"}, + "dev_addr": {"type": "string"}, + "device_id": {"type": "string"}, "application_ids": { "type": "object", "properties": { - "application_id": { - "type": "string" - } + "application_id": {"type": "string"} }, - "additionalProperties": False - } + "additionalProperties": False, + }, }, - "additionalProperties": False + "additionalProperties": False, }, "uplink_message": { "type": "object", "properties": { - "f_cnt": { - "type": "integer" - }, - "f_port": { - "type": "integer" - }, + "f_cnt": {"type": "integer"}, + "f_port": {"type": "integer"}, "settings": { "type": "object", "properties": { "time": { "type": "string", - "format": "date-time" + "format": "date-time", }, "data_rate": { "type": "object", @@ -310,21 +388,17 @@ def integration_v2_with_webhook_generic(): }, "spreading_factor": { "type": "integer" - } + }, }, - "additionalProperties": False + "additionalProperties": False, } }, - "additionalProperties": False + "additionalProperties": False, }, - "frequency": { - "type": "string" - }, - "timestamp": { - "type": "integer" - } + "frequency": {"type": "string"}, + "timestamp": {"type": "integer"}, }, - "additionalProperties": False + "additionalProperties": False, }, "locations": { "type": "object", @@ -332,138 +406,92 @@ def integration_v2_with_webhook_generic(): "frm-payload": { "type": "object", "properties": { - "source": { - "type": "string" - }, - "latitude": { - "type": "number" - }, - "longitude": { - "type": "number" - } + "source": {"type": "string"}, + "latitude": {"type": "number"}, + "longitude": {"type": "number"}, }, - "additionalProperties": False + "additionalProperties": False, } }, - "additionalProperties": False - }, - "frm_payload": { - "type": "string" + "additionalProperties": False, }, + "frm_payload": {"type": "string"}, "network_ids": { "type": "object", "properties": { - "ns_id": { - "type": "string" - }, - "net_id": { - "type": "string" - }, - "tenant_id": { - "type": "string" - }, - "cluster_id": { - "type": "string" - }, - "tenant_address": { - "type": "string" - }, - "cluster_address": { - "type": "string" - } + "ns_id": {"type": "string"}, + "net_id": {"type": "string"}, + "tenant_id": {"type": "string"}, + "cluster_id": {"type": "string"}, + "tenant_address": {"type": "string"}, + "cluster_address": {"type": "string"}, }, - "additionalProperties": False + "additionalProperties": False, }, "received_at": { "type": "string", - "format": "date-time" + "format": "date-time", }, "rx_metadata": { "type": "array", "items": { "type": "object", "properties": { - "snr": { - "type": "number" - }, - "rssi": { - "type": "integer" - }, + "snr": {"type": "number"}, + "rssi": {"type": "integer"}, "time": { "type": "string", - "format": "date-time" + "format": "date-time", }, "gps_time": { "type": "string", - "format": "date-time" - }, - "timestamp": { - "type": "integer" + "format": "date-time", }, + "timestamp": {"type": "integer"}, "gateway_ids": { "type": "object", "properties": { - "eui": { - "type": "string" - }, + "eui": {"type": "string"}, "gateway_id": { "type": "string" - } + }, }, - "additionalProperties": False + "additionalProperties": False, }, "received_at": { "type": "string", - "format": "date-time" + "format": "date-time", }, - "channel_rssi": { - "type": "integer" - }, - "uplink_token": { - "type": "string" - }, - "channel_index": { - "type": "integer" - } + "channel_rssi": {"type": "integer"}, + "uplink_token": {"type": "string"}, + "channel_index": {"type": "integer"}, }, - "additionalProperties": False - } + "additionalProperties": False, + }, }, "decoded_payload": { "type": "object", "properties": { - "gps": { - "type": "string" - }, - "latitude": { - "type": "number" - }, - "longitude": { - "type": "number" - }, - "batterypercent": { - "type": "integer" - } + "gps": {"type": "string"}, + "latitude": {"type": "number"}, + "longitude": {"type": "number"}, + "batterypercent": {"type": "integer"}, }, - "additionalProperties": False + "additionalProperties": False, }, - "consumed_airtime": { - "type": "string" - } + "consumed_airtime": {"type": "string"}, }, - "additionalProperties": False + "additionalProperties": False, }, "correlation_ids": { "type": "array", - "items": { - "type": "string" - } - } + "items": {"type": "string"}, + }, }, - "additionalProperties": False + "additionalProperties": False, }, - "output_type": "obv" - } + "output_type": "obv", + }, }, "additional": {}, "default_route": None, @@ -476,83 +504,53 @@ def integration_v2_with_webhook_generic(): @pytest.fixture def mock_generic_webhook_config(): return { - "jq_filter": "{ \"source\": .end_device_ids.device_id, \"source_name\": .end_device_ids.device_id, \"type\": .uplink_message.locations.\"frm-payload\".source, \"recorded_at\": .uplink_message.settings.time, \"location\": { \"lat\": .uplink_message.locations.\"frm-payload\".latitude, \"lon\": .uplink_message.locations.\"frm-payload\".longitude }, \"additional\": { \"application_id\": .end_device_ids.application_ids.application_id, \"dev_eui\": .end_device_ids.dev_eui, \"dev_addr\": .end_device_ids.dev_addr, \"batterypercent\": .uplink_message.decoded_payload.batterypercent, \"gps\": .uplink_message.decoded_payload.gps } }", + "jq_filter": '{ "source": .end_device_ids.device_id, "source_name": .end_device_ids.device_id, "type": .uplink_message.locations."frm-payload".source, "recorded_at": .uplink_message.settings.time, "location": { "lat": .uplink_message.locations."frm-payload".latitude, "lon": .uplink_message.locations."frm-payload".longitude }, "additional": { "application_id": .end_device_ids.application_ids.application_id, "dev_eui": .end_device_ids.dev_eui, "dev_addr": .end_device_ids.dev_addr, "batterypercent": .uplink_message.decoded_payload.batterypercent, "gps": .uplink_message.decoded_payload.gps } }', "json_schema": { "type": "object", "properties": { - "received_at": { - "type": "string", - "format": "date-time" - }, + "received_at": {"type": "string", "format": "date-time"}, "end_device_ids": { "type": "object", "properties": { - "dev_eui": { - "type": "string" - }, - "dev_addr": { - "type": "string" - }, - "device_id": { - "type": "string" - }, + "dev_eui": {"type": "string"}, + "dev_addr": {"type": "string"}, + "device_id": {"type": "string"}, "application_ids": { "type": "object", - "properties": { - "application_id": { - "type": "string" - } - }, - "additionalProperties": False - } + "properties": {"application_id": {"type": "string"}}, + "additionalProperties": False, + }, }, - "additionalProperties": False + "additionalProperties": False, }, "uplink_message": { "type": "object", "properties": { - "f_cnt": { - "type": "integer" - }, - "f_port": { - "type": "integer" - }, + "f_cnt": {"type": "integer"}, + "f_port": {"type": "integer"}, "settings": { "type": "object", "properties": { - "time": { - "type": "string", - "format": "date-time" - }, + "time": {"type": "string", "format": "date-time"}, "data_rate": { "type": "object", "properties": { "lora": { "type": "object", "properties": { - "bandwidth": { - "type": "integer" - }, - "coding_rate": { - "type": "string" - }, - "spreading_factor": { - "type": "integer" - } + "bandwidth": {"type": "integer"}, + "coding_rate": {"type": "string"}, + "spreading_factor": {"type": "integer"}, }, - "additionalProperties": False + "additionalProperties": False, } }, - "additionalProperties": False - }, - "frequency": { - "type": "string" + "additionalProperties": False, }, - "timestamp": { - "type": "integer" - } + "frequency": {"type": "string"}, + "timestamp": {"type": "integer"}, }, - "additionalProperties": False + "additionalProperties": False, }, "locations": { "type": "object", @@ -560,137 +558,80 @@ def mock_generic_webhook_config(): "frm-payload": { "type": "object", "properties": { - "source": { - "type": "string" - }, - "latitude": { - "type": "number" - }, - "longitude": { - "type": "number" - } + "source": {"type": "string"}, + "latitude": {"type": "number"}, + "longitude": {"type": "number"}, }, - "additionalProperties": False + "additionalProperties": False, } }, - "additionalProperties": False - }, - "frm_payload": { - "type": "string" + "additionalProperties": False, }, + "frm_payload": {"type": "string"}, "network_ids": { "type": "object", "properties": { - "ns_id": { - "type": "string" - }, - "net_id": { - "type": "string" - }, - "tenant_id": { - "type": "string" - }, - "cluster_id": { - "type": "string" - }, - "tenant_address": { - "type": "string" - }, - "cluster_address": { - "type": "string" - } + "ns_id": {"type": "string"}, + "net_id": {"type": "string"}, + "tenant_id": {"type": "string"}, + "cluster_id": {"type": "string"}, + "tenant_address": {"type": "string"}, + "cluster_address": {"type": "string"}, }, - "additionalProperties": False - }, - "received_at": { - "type": "string", - "format": "date-time" + "additionalProperties": False, }, + "received_at": {"type": "string", "format": "date-time"}, "rx_metadata": { "type": "array", "items": { "type": "object", "properties": { - "snr": { - "type": "number" - }, - "rssi": { - "type": "integer" - }, - "time": { - "type": "string", - "format": "date-time" - }, + "snr": {"type": "number"}, + "rssi": {"type": "integer"}, + "time": {"type": "string", "format": "date-time"}, "gps_time": { "type": "string", - "format": "date-time" - }, - "timestamp": { - "type": "integer" + "format": "date-time", }, + "timestamp": {"type": "integer"}, "gateway_ids": { "type": "object", "properties": { - "eui": { - "type": "string" - }, - "gateway_id": { - "type": "string" - } + "eui": {"type": "string"}, + "gateway_id": {"type": "string"}, }, - "additionalProperties": False + "additionalProperties": False, }, "received_at": { "type": "string", - "format": "date-time" - }, - "channel_rssi": { - "type": "integer" + "format": "date-time", }, - "uplink_token": { - "type": "string" - }, - "channel_index": { - "type": "integer" - } + "channel_rssi": {"type": "integer"}, + "uplink_token": {"type": "string"}, + "channel_index": {"type": "integer"}, }, - "additionalProperties": False - } + "additionalProperties": False, + }, }, "decoded_payload": { "type": "object", "properties": { - "gps": { - "type": "string" - }, - "latitude": { - "type": "number" - }, - "longitude": { - "type": "number" - }, - "batterypercent": { - "type": "integer" - } + "gps": {"type": "string"}, + "latitude": {"type": "number"}, + "longitude": {"type": "number"}, + "batterypercent": {"type": "integer"}, }, - "additionalProperties": False + "additionalProperties": False, }, - "consumed_airtime": { - "type": "string" - } + "consumed_airtime": {"type": "string"}, }, - "additionalProperties": False + "additionalProperties": False, }, - "correlation_ids": { - "type": "array", - "items": { - "type": "string" - } - } + "correlation_ids": {"type": "array", "items": {"type": "string"}}, }, - "additionalProperties": False + "additionalProperties": False, }, - "output_type": "obv" + "output_type": "obv", } @@ -700,16 +641,12 @@ def pull_observations_config(): @pytest.fixture -def mock_gundi_client_v2( - mocker, - integration_v2, - mock_get_gundi_api_key -): +def mock_gundi_client_v2(mocker, integration_v2, mock_get_gundi_api_key): mock_client = mocker.MagicMock() - mock_client.get_integration_api_key.return_value = async_return(mock_get_gundi_api_key), - mock_client.get_integration_details.return_value = async_return( - integration_v2 + mock_client.get_integration_api_key.return_value = ( + async_return(mock_get_gundi_api_key), ) + mock_client.get_integration_details.return_value = async_return(integration_v2) mock_client.register_integration_type = AsyncMock() mock_client.__aenter__.return_value = mock_client return mock_client @@ -717,12 +654,12 @@ def mock_gundi_client_v2( @pytest.fixture def mock_gundi_client_v2_for_webhooks( - mocker, - integration_v2_with_webhook, - mock_get_gundi_api_key + mocker, integration_v2_with_webhook, mock_get_gundi_api_key ): mock_client = mocker.MagicMock() - mock_client.get_integration_api_key.return_value = async_return(mock_get_gundi_api_key), + mock_client.get_integration_api_key.return_value = ( + async_return(mock_get_gundi_api_key), + ) mock_client.get_integration_details.return_value = async_return( integration_v2_with_webhook ) @@ -733,12 +670,12 @@ def mock_gundi_client_v2_for_webhooks( @pytest.fixture def mock_gundi_client_v2_for_webhooks_generic( - mocker, - integration_v2_with_webhook_generic, - mock_get_gundi_api_key + mocker, integration_v2_with_webhook_generic, mock_get_gundi_api_key ): mock_client = mocker.MagicMock() - mock_client.get_integration_api_key.return_value = async_return(mock_get_gundi_api_key), + mock_client.get_integration_api_key.return_value = ( + async_return(mock_get_gundi_api_key), + ) mock_client.get_integration_details.return_value = async_return( integration_v2_with_webhook_generic ) @@ -756,10 +693,10 @@ def mock_gundi_client_v2_class(mocker, mock_gundi_client_v2): @pytest.fixture def mock_gundi_sensors_client_class( - mocker, - events_created_response, - event_attachment_created_response, - observations_created_response + mocker, + events_created_response, + event_attachment_created_response, + observations_created_response, ): mock_gundi_sensors_client_class = mocker.MagicMock() mock_gundi_sensors_client = mocker.MagicMock() @@ -784,29 +721,17 @@ def events_created_response(): "title": "Animal Sighting", "event_type": "wildlife_sighting_rep", "recorded_at": "2024-01-29 20:51:10-03:00", - "location": { - "lat": -51.688645, - "lon": -72.704421 - }, - "event_details": { - "site_name": "MM Spot", - "species": "lion" - } + "location": {"lat": -51.688645, "lon": -72.704421}, + "event_details": {"site_name": "MM Spot", "species": "lion"}, }, { "id": "e1e1e1e1-e1e1-e1e1-e1e1-e1e1e1e1e1e2", "title": "Animal Sighting", "event_type": "wildlife_sighting_rep", "recorded_at": "2024-01-29 20:51:25-03:00", - "location": { - "lat": -51.688646, - "lon": -72.704421 - }, - "event_details": { - "site_name": "MM Spot", - "species": "lion" - } - } + "location": {"lat": -51.688646, "lon": -72.704421}, + "event_details": {"site_name": "MM Spot", "species": "lion"}, + }, ] @@ -816,13 +741,13 @@ def event_attachment_created_response(): { "object_id": "af8e2946-bad6-4d02-8a26-99dde34bd9fa", "created_at": "2024-07-04T13:15:26.559894Z", - "updated_at": None + "updated_at": None, }, { "object_id": "gat51h73-dd71-dj88-91uh-jah7162hy6fs", "created_at": "2024-07-03T13:15:26.559894Z", - "updated_at": None - } + "updated_at": None, + }, ] @@ -835,13 +760,8 @@ def observations_created_response(): "type": "tracking-device", "subject_type": "puma", "recorded_at": "2024-01-24 09:03:00-0300", - "location": { - "lat": -51.748, - "lon": -72.720 - }, - "additional": { - "speed_kmph": 5 - } + "location": {"lat": -51.748, "lon": -72.720}, + "additional": {"speed_kmph": 5}, }, { "id": "e1e1e1e1-e1e1-e1e1-e1e1-e1e1e1e1e1e2", @@ -849,14 +769,9 @@ def observations_created_response(): "type": "tracking-device", "subject_type": "puma", "recorded_at": "2024-01-24 09:05:00-0300", - "location": { - "lat": -51.755, - "lon": -72.755 - }, - "additional": { - "speed_kmph": 5 - } - } + "location": {"lat": -51.755, "lon": -72.755}, + "additional": {"speed_kmph": 5}, + }, ] @@ -864,7 +779,7 @@ def observations_created_response(): def mock_state_manager(mocker): mock_state_manager = mocker.MagicMock() mock_state_manager.get_state.return_value = async_return( - {'last_execution': '2023-11-17T11:20:00+0200'} + {"last_execution": "2023-11-17T11:20:00+0200"} ) mock_state_manager.set_state.return_value = async_return(None) return mock_state_manager @@ -872,7 +787,7 @@ def mock_state_manager(mocker): @pytest.fixture def mock_pubsub_client( - mocker, integration_event_pubsub_message, gcp_pubsub_publish_response + mocker, integration_event_pubsub_message, gcp_pubsub_publish_response ): mock_client = mocker.MagicMock() mock_publisher = mocker.MagicMock() @@ -913,7 +828,7 @@ class MockPullActionConfiguration(PullActionConfiguration): description="Number of days to look back for data.", ui_options=UIOptions( widget="range", - ) + ), ) force_fetch: bool = FieldWithUIOptions( False, @@ -921,7 +836,7 @@ class MockPullActionConfiguration(PullActionConfiguration): description="Force fetch even if in a quiet period.", ui_options=UIOptions( widget="select", - ) + ), ) ui_global_options = GlobalUISchemaOptions( order=[ @@ -931,19 +846,53 @@ class MockPullActionConfiguration(PullActionConfiguration): ) +class MockAuthenticateActionConfiguration( + AuthActionConfiguration, ExecutableActionMixin +): + username: str = FieldWithUIOptions( + ..., + title="Username", + ui_options=UIOptions( + widget="text", + ), + ) + password: pydantic.SecretStr = FieldWithUIOptions( + ..., + title="Password", + ui_options=UIOptions( + widget="password", + ), + ) + + @pytest.fixture def mock_action_handlers(mocker): mock_action_handler = AsyncMock() mock_action_handler.return_value = {"observations_extracted": 10} - mock_action_handlers = {"pull_observations": (mock_action_handler, MockPullActionConfiguration)} + mock_action_handlers = { + "pull_observations": (mock_action_handler, MockPullActionConfiguration) + } + return mock_action_handlers + + +@pytest.fixture +def mock_auth_action_handlers(mocker): + mock_action_handler = AsyncMock() + mock_action_handler.return_value = { + "username": "me@example.com", + "password": "something-fancy", + } + mock_action_handlers = { + "auth": (mock_action_handler, MockAuthenticateActionConfiguration) + } return mock_action_handlers @pytest.fixture def auth_headers_response(): return { - 'Accept-Type': 'application/json', - 'Authorization': 'Bearer testtoken2a97022f21732461ee103a08fac8a35' + "Accept-Type": "application/json", + "Authorization": "Bearer testtoken2a97022f21732461ee103a08fac8a35", } @@ -965,11 +914,12 @@ def event_v2_cloud_event_payload(): return { "message": { "data": "eyJpbnRlZ3JhdGlvbl9pZCI6ICI4NDNlMDgwMS1lODFhLTQ3ZTUtOWNlMi1iMTc2ZTQ3MzZhODUiLCAiYWN0aW9uX2lkIjogInB1bGxfb2JzZXJ2YXRpb25zIn0=", - "messageId": "10298788169291041", "message_id": "10298788169291041", + "messageId": "10298788169291041", + "message_id": "10298788169291041", "publishTime": timestamp, - "publish_time": timestamp + "publish_time": timestamp, }, - "subscription": "projects/cdip-stage-78ca/subscriptions/integrationx-actions-sub" + "subscription": "projects/cdip-stage-78ca/subscriptions/integrationx-actions-sub", } @@ -979,11 +929,12 @@ def event_v2_cloud_event_payload_with_config_overrides(): return { "message": { "data": "eyJpbnRlZ3JhdGlvbl9pZCI6ICI4NDNlMDgwMS1lODFhLTQ3ZTUtOWNlMi1iMTc2ZTQ3MzZhODUiLCAiYWN0aW9uX2lkIjogInB1bGxfb2JzZXJ2YXRpb25zIiwgImNvbmZpZ19vdmVycmlkZXMiOiB7Imxvb2tiYWNrX2RheXMiOiAzfX0=", - "messageId": "10298788169291041", "message_id": "10298788169291041", + "messageId": "10298788169291041", + "message_id": "10298788169291041", "publishTime": timestamp, - "publish_time": timestamp + "publish_time": timestamp, }, - "subscription": "projects/cdip-stage-78ca/subscriptions/integrationx-actions-sub" + "subscription": "projects/cdip-stage-78ca/subscriptions/integrationx-actions-sub", } @@ -1021,7 +972,7 @@ def action_started_event(): config_data={ "end_datetime": "2024-01-10T00:00:00-00:00", "start_datetime": "2024-01-01T23:59:59-00:00", - "force_run_since_start": True + "force_run_since_start": True, }, ) ) @@ -1036,9 +987,9 @@ def action_complete_event(): config_data={ "end_datetime": "2024-01-10T00:00:00-00:00", "start_datetime": "2024-01-01T23:59:59-00:00", - "force_run_since_start": True + "force_run_since_start": True, }, - result={"observations_extracted": 10} + result={"observations_extracted": 10}, ) ) @@ -1052,9 +1003,9 @@ def action_failed_event(): config_data={ "end_datetime": "2024-01-10T00:00:00-00:00", "start_datetime": "2024-01-01T23:59:59-00:00", - "force_run_since_start": True + "force_run_since_start": True, }, - error="ConnectionError: Error connecting to X system" + error="ConnectionError: Error connecting to X system", ) ) @@ -1068,13 +1019,13 @@ def custom_activity_log_event(): config_data={ "end_datetime": "2024-01-01T00:00:00-00:00", "start_datetime": "2024-01-10T23:59:59-00:00", - "force_run_since_start": True + "force_run_since_start": True, }, title="Invalid start_datetime for action pull_observations", level=LogLevel.ERROR, data={ "details": "start_datetime cannot be grater than end_datetime. Please fix the configuration." - } + }, ) ) @@ -1083,13 +1034,158 @@ def custom_activity_log_event(): def webhook_started_event(): return IntegrationWebhookStarted( payload=WebhookExecutionStarted( - integration_id='ed8ed116-efb4-4fb1-9d68-0ecc4b0996a1', - webhook_id='lionguards_webhook', + integration_id="ed8ed116-efb4-4fb1-9d68-0ecc4b0996a1", + webhook_id="lionguards_webhook", config_data={ - 'json_schema': {'type': 'object', 'properties': {'received_at': {'type': 'string', 'format': 'date-time'}, 'end_device_ids': {'type': 'object', 'properties': {'dev_eui': {'type': 'string'}, 'dev_addr': {'type': 'string'}, 'device_id': {'type': 'string'}, 'application_ids': {'type': 'object', 'properties': {'application_id': {'type': 'string'}}, 'additionalProperties': False}}, 'additionalProperties': False}, 'uplink_message': {'type': 'object', 'properties': {'f_cnt': {'type': 'integer'}, 'f_port': {'type': 'integer'}, 'settings': {'type': 'object', 'properties': {'time': {'type': 'string', 'format': 'date-time'}, 'data_rate': {'type': 'object', 'properties': {'lora': {'type': 'object', 'properties': {'bandwidth': {'type': 'integer'}, 'coding_rate': {'type': 'string'}, 'spreading_factor': {'type': 'integer'}}, 'additionalProperties': False}}, 'additionalProperties': False}, 'frequency': {'type': 'string'}, 'timestamp': {'type': 'integer'}}, 'additionalProperties': False}, 'locations': {'type': 'object', 'properties': {'frm-payload': {'type': 'object', 'properties': {'source': {'type': 'string'}, 'latitude': {'type': 'number'}, 'longitude': {'type': 'number'}}, 'additionalProperties': False}}, 'additionalProperties': False}, 'frm_payload': {'type': 'string'}, 'network_ids': {'type': 'object', 'properties': {'ns_id': {'type': 'string'}, 'net_id': {'type': 'string'}, 'tenant_id': {'type': 'string'}, 'cluster_id': {'type': 'string'}, 'tenant_address': {'type': 'string'}, 'cluster_address': {'type': 'string'}}, 'additionalProperties': False}, 'received_at': {'type': 'string', 'format': 'date-time'}, 'rx_metadata': {'type': 'array', 'items': {'type': 'object', 'properties': {'snr': {'type': 'number'}, 'rssi': {'type': 'integer'}, 'time': {'type': 'string', 'format': 'date-time'}, 'gps_time': {'type': 'string', 'format': 'date-time'}, 'timestamp': {'type': 'integer'}, 'gateway_ids': {'type': 'object', 'properties': {'eui': {'type': 'string'}, 'gateway_id': {'type': 'string'}}, 'additionalProperties': False}, 'received_at': {'type': 'string', 'format': 'date-time'}, 'channel_rssi': {'type': 'integer'}, 'uplink_token': {'type': 'string'}, 'channel_index': {'type': 'integer'}}, 'additionalProperties': False}}, 'decoded_payload': {'type': 'object', 'properties': {'gps': {'type': 'string'}, 'latitude': {'type': 'number'}, 'longitude': {'type': 'number'}, 'batterypercent': {'type': 'integer'}}, 'additionalProperties': False}, 'consumed_airtime': {'type': 'string'}}, 'additionalProperties': False}, 'correlation_ids': {'type': 'array', 'items': {'type': 'string'}}}, 'additionalProperties': False}, - 'jq_filter': '{"source": .end_device_ids.device_id, "source_name": .end_device_ids.device_id, "type": .uplink_message.locations."frm-payload".source, "recorded_at": .uplink_message.settings.time, "location": { "lat": .uplink_message.locations."frm-payload".latitude, "lon": .uplink_message.locations."frm-payload".longitude}, "additional": {"application_id": .end_device_ids.application_ids.application_id, "dev_eui": .end_device_ids.dev_eui, "dev_addr": .end_device_ids.dev_addr, "batterypercent": .uplink_message.decoded_payload.batterypercent, "gps": .uplink_message.decoded_payload.gps}}', - 'output_type': 'obv' - } + "json_schema": { + "type": "object", + "properties": { + "received_at": {"type": "string", "format": "date-time"}, + "end_device_ids": { + "type": "object", + "properties": { + "dev_eui": {"type": "string"}, + "dev_addr": {"type": "string"}, + "device_id": {"type": "string"}, + "application_ids": { + "type": "object", + "properties": { + "application_id": {"type": "string"} + }, + "additionalProperties": False, + }, + }, + "additionalProperties": False, + }, + "uplink_message": { + "type": "object", + "properties": { + "f_cnt": {"type": "integer"}, + "f_port": {"type": "integer"}, + "settings": { + "type": "object", + "properties": { + "time": { + "type": "string", + "format": "date-time", + }, + "data_rate": { + "type": "object", + "properties": { + "lora": { + "type": "object", + "properties": { + "bandwidth": { + "type": "integer" + }, + "coding_rate": { + "type": "string" + }, + "spreading_factor": { + "type": "integer" + }, + }, + "additionalProperties": False, + } + }, + "additionalProperties": False, + }, + "frequency": {"type": "string"}, + "timestamp": {"type": "integer"}, + }, + "additionalProperties": False, + }, + "locations": { + "type": "object", + "properties": { + "frm-payload": { + "type": "object", + "properties": { + "source": {"type": "string"}, + "latitude": {"type": "number"}, + "longitude": {"type": "number"}, + }, + "additionalProperties": False, + } + }, + "additionalProperties": False, + }, + "frm_payload": {"type": "string"}, + "network_ids": { + "type": "object", + "properties": { + "ns_id": {"type": "string"}, + "net_id": {"type": "string"}, + "tenant_id": {"type": "string"}, + "cluster_id": {"type": "string"}, + "tenant_address": {"type": "string"}, + "cluster_address": {"type": "string"}, + }, + "additionalProperties": False, + }, + "received_at": { + "type": "string", + "format": "date-time", + }, + "rx_metadata": { + "type": "array", + "items": { + "type": "object", + "properties": { + "snr": {"type": "number"}, + "rssi": {"type": "integer"}, + "time": { + "type": "string", + "format": "date-time", + }, + "gps_time": { + "type": "string", + "format": "date-time", + }, + "timestamp": {"type": "integer"}, + "gateway_ids": { + "type": "object", + "properties": { + "eui": {"type": "string"}, + "gateway_id": {"type": "string"}, + }, + "additionalProperties": False, + }, + "received_at": { + "type": "string", + "format": "date-time", + }, + "channel_rssi": {"type": "integer"}, + "uplink_token": {"type": "string"}, + "channel_index": {"type": "integer"}, + }, + "additionalProperties": False, + }, + }, + "decoded_payload": { + "type": "object", + "properties": { + "gps": {"type": "string"}, + "latitude": {"type": "number"}, + "longitude": {"type": "number"}, + "batterypercent": {"type": "integer"}, + }, + "additionalProperties": False, + }, + "consumed_airtime": {"type": "string"}, + }, + "additionalProperties": False, + }, + "correlation_ids": { + "type": "array", + "items": {"type": "string"}, + }, + }, + "additionalProperties": False, + }, + "jq_filter": '{"source": .end_device_ids.device_id, "source_name": .end_device_ids.device_id, "type": .uplink_message.locations."frm-payload".source, "recorded_at": .uplink_message.settings.time, "location": { "lat": .uplink_message.locations."frm-payload".latitude, "lon": .uplink_message.locations."frm-payload".longitude}, "additional": {"application_id": .end_device_ids.application_ids.application_id, "dev_eui": .end_device_ids.dev_eui, "dev_addr": .end_device_ids.dev_addr, "batterypercent": .uplink_message.decoded_payload.batterypercent, "gps": .uplink_message.decoded_payload.gps}}', + "output_type": "obv", + }, ) ) @@ -1098,14 +1194,159 @@ def webhook_started_event(): def webhook_complete_event(): return IntegrationWebhookComplete( payload=WebhookExecutionComplete( - integration_id='ed8ed116-efb4-4fb1-9d68-0ecc4b0996a1', - webhook_id='lionguards_webhook', + integration_id="ed8ed116-efb4-4fb1-9d68-0ecc4b0996a1", + webhook_id="lionguards_webhook", config_data={ - 'json_schema': {'type': 'object', 'properties': {'received_at': {'type': 'string', 'format': 'date-time'}, 'end_device_ids': {'type': 'object', 'properties': {'dev_eui': {'type': 'string'}, 'dev_addr': {'type': 'string'}, 'device_id': {'type': 'string'}, 'application_ids': {'type': 'object', 'properties': {'application_id': {'type': 'string'}}, 'additionalProperties': False}}, 'additionalProperties': False}, 'uplink_message': {'type': 'object', 'properties': {'f_cnt': {'type': 'integer'}, 'f_port': {'type': 'integer'}, 'settings': {'type': 'object', 'properties': {'time': {'type': 'string', 'format': 'date-time'}, 'data_rate': {'type': 'object', 'properties': {'lora': {'type': 'object', 'properties': {'bandwidth': {'type': 'integer'}, 'coding_rate': {'type': 'string'}, 'spreading_factor': {'type': 'integer'}}, 'additionalProperties': False}}, 'additionalProperties': False}, 'frequency': {'type': 'string'}, 'timestamp': {'type': 'integer'}}, 'additionalProperties': False}, 'locations': {'type': 'object', 'properties': {'frm-payload': {'type': 'object', 'properties': {'source': {'type': 'string'}, 'latitude': {'type': 'number'}, 'longitude': {'type': 'number'}}, 'additionalProperties': False}}, 'additionalProperties': False}, 'frm_payload': {'type': 'string'}, 'network_ids': {'type': 'object', 'properties': {'ns_id': {'type': 'string'}, 'net_id': {'type': 'string'}, 'tenant_id': {'type': 'string'}, 'cluster_id': {'type': 'string'}, 'tenant_address': {'type': 'string'}, 'cluster_address': {'type': 'string'}}, 'additionalProperties': False}, 'received_at': {'type': 'string', 'format': 'date-time'}, 'rx_metadata': {'type': 'array', 'items': {'type': 'object', 'properties': {'snr': {'type': 'number'}, 'rssi': {'type': 'integer'}, 'time': {'type': 'string', 'format': 'date-time'}, 'gps_time': {'type': 'string', 'format': 'date-time'}, 'timestamp': {'type': 'integer'}, 'gateway_ids': {'type': 'object', 'properties': {'eui': {'type': 'string'}, 'gateway_id': {'type': 'string'}}, 'additionalProperties': False}, 'received_at': {'type': 'string', 'format': 'date-time'}, 'channel_rssi': {'type': 'integer'}, 'uplink_token': {'type': 'string'}, 'channel_index': {'type': 'integer'}}, 'additionalProperties': False}}, 'decoded_payload': {'type': 'object', 'properties': {'gps': {'type': 'string'}, 'latitude': {'type': 'number'}, 'longitude': {'type': 'number'}, 'batterypercent': {'type': 'integer'}}, 'additionalProperties': False}, 'consumed_airtime': {'type': 'string'}}, 'additionalProperties': False}, 'correlation_ids': {'type': 'array', 'items': {'type': 'string'}}}, 'additionalProperties': False}, - 'jq_filter': '{"source": .end_device_ids.device_id, "source_name": .end_device_ids.device_id, "type": .uplink_message.locations."frm-payload".source, "recorded_at": .uplink_message.settings.time, "location": { "lat": .uplink_message.locations."frm-payload".latitude, "lon": .uplink_message.locations."frm-payload".longitude}, "additional": {"application_id": .end_device_ids.application_ids.application_id, "dev_eui": .end_device_ids.dev_eui, "dev_addr": .end_device_ids.dev_addr, "batterypercent": .uplink_message.decoded_payload.batterypercent, "gps": .uplink_message.decoded_payload.gps}}', - 'output_type': 'obv' + "json_schema": { + "type": "object", + "properties": { + "received_at": {"type": "string", "format": "date-time"}, + "end_device_ids": { + "type": "object", + "properties": { + "dev_eui": {"type": "string"}, + "dev_addr": {"type": "string"}, + "device_id": {"type": "string"}, + "application_ids": { + "type": "object", + "properties": { + "application_id": {"type": "string"} + }, + "additionalProperties": False, + }, + }, + "additionalProperties": False, + }, + "uplink_message": { + "type": "object", + "properties": { + "f_cnt": {"type": "integer"}, + "f_port": {"type": "integer"}, + "settings": { + "type": "object", + "properties": { + "time": { + "type": "string", + "format": "date-time", + }, + "data_rate": { + "type": "object", + "properties": { + "lora": { + "type": "object", + "properties": { + "bandwidth": { + "type": "integer" + }, + "coding_rate": { + "type": "string" + }, + "spreading_factor": { + "type": "integer" + }, + }, + "additionalProperties": False, + } + }, + "additionalProperties": False, + }, + "frequency": {"type": "string"}, + "timestamp": {"type": "integer"}, + }, + "additionalProperties": False, + }, + "locations": { + "type": "object", + "properties": { + "frm-payload": { + "type": "object", + "properties": { + "source": {"type": "string"}, + "latitude": {"type": "number"}, + "longitude": {"type": "number"}, + }, + "additionalProperties": False, + } + }, + "additionalProperties": False, + }, + "frm_payload": {"type": "string"}, + "network_ids": { + "type": "object", + "properties": { + "ns_id": {"type": "string"}, + "net_id": {"type": "string"}, + "tenant_id": {"type": "string"}, + "cluster_id": {"type": "string"}, + "tenant_address": {"type": "string"}, + "cluster_address": {"type": "string"}, + }, + "additionalProperties": False, + }, + "received_at": { + "type": "string", + "format": "date-time", + }, + "rx_metadata": { + "type": "array", + "items": { + "type": "object", + "properties": { + "snr": {"type": "number"}, + "rssi": {"type": "integer"}, + "time": { + "type": "string", + "format": "date-time", + }, + "gps_time": { + "type": "string", + "format": "date-time", + }, + "timestamp": {"type": "integer"}, + "gateway_ids": { + "type": "object", + "properties": { + "eui": {"type": "string"}, + "gateway_id": {"type": "string"}, + }, + "additionalProperties": False, + }, + "received_at": { + "type": "string", + "format": "date-time", + }, + "channel_rssi": {"type": "integer"}, + "uplink_token": {"type": "string"}, + "channel_index": {"type": "integer"}, + }, + "additionalProperties": False, + }, + }, + "decoded_payload": { + "type": "object", + "properties": { + "gps": {"type": "string"}, + "latitude": {"type": "number"}, + "longitude": {"type": "number"}, + "batterypercent": {"type": "integer"}, + }, + "additionalProperties": False, + }, + "consumed_airtime": {"type": "string"}, + }, + "additionalProperties": False, + }, + "correlation_ids": { + "type": "array", + "items": {"type": "string"}, + }, + }, + "additionalProperties": False, + }, + "jq_filter": '{"source": .end_device_ids.device_id, "source_name": .end_device_ids.device_id, "type": .uplink_message.locations."frm-payload".source, "recorded_at": .uplink_message.settings.time, "location": { "lat": .uplink_message.locations."frm-payload".latitude, "lon": .uplink_message.locations."frm-payload".longitude}, "additional": {"application_id": .end_device_ids.application_ids.application_id, "dev_eui": .end_device_ids.dev_eui, "dev_addr": .end_device_ids.dev_addr, "batterypercent": .uplink_message.decoded_payload.batterypercent, "gps": .uplink_message.decoded_payload.gps}}', + "output_type": "obv", }, - result={'data_points_qty': 1} + result={"data_points_qty": 1}, ) ) @@ -1114,14 +1355,159 @@ def webhook_complete_event(): def webhook_failed_event(): return IntegrationWebhookFailed( payload=WebhookExecutionFailed( - integration_id='ed8ed116-efb4-4fb1-9d68-0ecc4b0996a1', - webhook_id='lionguards_webhook', + integration_id="ed8ed116-efb4-4fb1-9d68-0ecc4b0996a1", + webhook_id="lionguards_webhook", config_data={ - 'json_schema': {'type': 'object', 'properties': {'received_at': {'type': 'string', 'format': 'date-time'}, 'end_device_ids': {'type': 'object', 'properties': {'dev_eui': {'type': 'string'}, 'dev_addr': {'type': 'string'}, 'device_id': {'type': 'string'}, 'application_ids': {'type': 'object', 'properties': {'application_id': {'type': 'string'}}, 'additionalProperties': False}}, 'additionalProperties': False}, 'uplink_message': {'type': 'object', 'properties': {'f_cnt': {'type': 'integer'}, 'f_port': {'type': 'integer'}, 'settings': {'type': 'object', 'properties': {'time': {'type': 'string', 'format': 'date-time'}, 'data_rate': {'type': 'object', 'properties': {'lora': {'type': 'object', 'properties': {'bandwidth': {'type': 'integer'}, 'coding_rate': {'type': 'string'}, 'spreading_factor': {'type': 'integer'}}, 'additionalProperties': False}}, 'additionalProperties': False}, 'frequency': {'type': 'string'}, 'timestamp': {'type': 'integer'}}, 'additionalProperties': False}, 'locations': {'type': 'object', 'properties': {'frm-payload': {'type': 'object', 'properties': {'source': {'type': 'string'}, 'latitude': {'type': 'number'}, 'longitude': {'type': 'number'}}, 'additionalProperties': False}}, 'additionalProperties': False}, 'frm_payload': {'type': 'string'}, 'network_ids': {'type': 'object', 'properties': {'ns_id': {'type': 'string'}, 'net_id': {'type': 'string'}, 'tenant_id': {'type': 'string'}, 'cluster_id': {'type': 'string'}, 'tenant_address': {'type': 'string'}, 'cluster_address': {'type': 'string'}}, 'additionalProperties': False}, 'received_at': {'type': 'string', 'format': 'date-time'}, 'rx_metadata': {'type': 'array', 'items': {'type': 'object', 'properties': {'snr': {'type': 'number'}, 'rssi': {'type': 'integer'}, 'time': {'type': 'string', 'format': 'date-time'}, 'gps_time': {'type': 'string', 'format': 'date-time'}, 'timestamp': {'type': 'integer'}, 'gateway_ids': {'type': 'object', 'properties': {'eui': {'type': 'string'}, 'gateway_id': {'type': 'string'}}, 'additionalProperties': False}, 'received_at': {'type': 'string', 'format': 'date-time'}, 'channel_rssi': {'type': 'integer'}, 'uplink_token': {'type': 'string'}, 'channel_index': {'type': 'integer'}}, 'additionalProperties': False}}, 'decoded_payload': {'type': 'object', 'properties': {'gps': {'type': 'string'}, 'latitude': {'type': 'number'}, 'longitude': {'type': 'number'}, 'batterypercent': {'type': 'integer'}}, 'additionalProperties': False}, 'consumed_airtime': {'type': 'string'}}, 'additionalProperties': False}, 'correlation_ids': {'type': 'array', 'items': {'type': 'string'}}}, 'additionalProperties': False}, - 'jq_filter': '{"source": .end_device_ids.device_id, "source_name": .end_device_ids.device_id, "type": .uplink_message.locations."frm-payload".source, "recorded_at": .uplink_message.settings.time, "location": { "lat": .uplink_message.locations."frm-payload".latitude, "lon": .uplink_message.locations."frm-payload".longitude}, "additional": {"application_id": .end_device_ids.application_ids.application_id, "dev_eui": .end_device_ids.dev_eui, "dev_addr": .end_device_ids.dev_addr, "batterypercent": .uplink_message.decoded_payload.batterypercent, "gps": .uplink_message.decoded_payload.gps}}', - 'output_type': 'patrol' + "json_schema": { + "type": "object", + "properties": { + "received_at": {"type": "string", "format": "date-time"}, + "end_device_ids": { + "type": "object", + "properties": { + "dev_eui": {"type": "string"}, + "dev_addr": {"type": "string"}, + "device_id": {"type": "string"}, + "application_ids": { + "type": "object", + "properties": { + "application_id": {"type": "string"} + }, + "additionalProperties": False, + }, + }, + "additionalProperties": False, + }, + "uplink_message": { + "type": "object", + "properties": { + "f_cnt": {"type": "integer"}, + "f_port": {"type": "integer"}, + "settings": { + "type": "object", + "properties": { + "time": { + "type": "string", + "format": "date-time", + }, + "data_rate": { + "type": "object", + "properties": { + "lora": { + "type": "object", + "properties": { + "bandwidth": { + "type": "integer" + }, + "coding_rate": { + "type": "string" + }, + "spreading_factor": { + "type": "integer" + }, + }, + "additionalProperties": False, + } + }, + "additionalProperties": False, + }, + "frequency": {"type": "string"}, + "timestamp": {"type": "integer"}, + }, + "additionalProperties": False, + }, + "locations": { + "type": "object", + "properties": { + "frm-payload": { + "type": "object", + "properties": { + "source": {"type": "string"}, + "latitude": {"type": "number"}, + "longitude": {"type": "number"}, + }, + "additionalProperties": False, + } + }, + "additionalProperties": False, + }, + "frm_payload": {"type": "string"}, + "network_ids": { + "type": "object", + "properties": { + "ns_id": {"type": "string"}, + "net_id": {"type": "string"}, + "tenant_id": {"type": "string"}, + "cluster_id": {"type": "string"}, + "tenant_address": {"type": "string"}, + "cluster_address": {"type": "string"}, + }, + "additionalProperties": False, + }, + "received_at": { + "type": "string", + "format": "date-time", + }, + "rx_metadata": { + "type": "array", + "items": { + "type": "object", + "properties": { + "snr": {"type": "number"}, + "rssi": {"type": "integer"}, + "time": { + "type": "string", + "format": "date-time", + }, + "gps_time": { + "type": "string", + "format": "date-time", + }, + "timestamp": {"type": "integer"}, + "gateway_ids": { + "type": "object", + "properties": { + "eui": {"type": "string"}, + "gateway_id": {"type": "string"}, + }, + "additionalProperties": False, + }, + "received_at": { + "type": "string", + "format": "date-time", + }, + "channel_rssi": {"type": "integer"}, + "uplink_token": {"type": "string"}, + "channel_index": {"type": "integer"}, + }, + "additionalProperties": False, + }, + }, + "decoded_payload": { + "type": "object", + "properties": { + "gps": {"type": "string"}, + "latitude": {"type": "number"}, + "longitude": {"type": "number"}, + "batterypercent": {"type": "integer"}, + }, + "additionalProperties": False, + }, + "consumed_airtime": {"type": "string"}, + }, + "additionalProperties": False, + }, + "correlation_ids": { + "type": "array", + "items": {"type": "string"}, + }, + }, + "additionalProperties": False, + }, + "jq_filter": '{"source": .end_device_ids.device_id, "source_name": .end_device_ids.device_id, "type": .uplink_message.locations."frm-payload".source, "recorded_at": .uplink_message.settings.time, "location": { "lat": .uplink_message.locations."frm-payload".latitude, "lon": .uplink_message.locations."frm-payload".longitude}, "additional": {"application_id": .end_device_ids.application_ids.application_id, "dev_eui": .end_device_ids.dev_eui, "dev_addr": .end_device_ids.dev_addr, "batterypercent": .uplink_message.decoded_payload.batterypercent, "gps": .uplink_message.decoded_payload.gps}}', + "output_type": "patrol", }, - error='Invalid output type: patrol. Please review the configuration.' + error="Invalid output type: patrol. Please review the configuration.", ) ) @@ -1130,37 +1516,44 @@ def webhook_failed_event(): def webhook_custom_activity_log_event(): return IntegrationWebhookCustomLog( payload=CustomWebhookLog( - integration_id='ed8ed116-efb4-4fb1-9d68-0ecc4b0996a1', - webhook_id='lionguards_webhook', + integration_id="ed8ed116-efb4-4fb1-9d68-0ecc4b0996a1", + webhook_id="lionguards_webhook", config_data={}, - title='Webhook data transformed successfully', + title="Webhook data transformed successfully", level=LogLevel.DEBUG, data={ - 'transformed_data': [ + "transformed_data": [ { - 'source': 'test-webhooks-mm', - 'source_name': 'test-webhooks-mm', - 'type': 'SOURCE_GPS', - 'recorded_at': '2024-06-07T15:08:19.841Z', - 'location': {'lat': -4.1234567, 'lon': 32.01234567890123}, - 'additional': { - 'application_id': 'lt10-globalsat', - 'dev_eui': '123456789ABCDEF0', - 'dev_addr': '12345ABC', - 'batterypercent': 100, - 'gps': '3D fix' - } + "source": "test-webhooks-mm", + "source_name": "test-webhooks-mm", + "type": "SOURCE_GPS", + "recorded_at": "2024-06-07T15:08:19.841Z", + "location": {"lat": -4.1234567, "lon": 32.01234567890123}, + "additional": { + "application_id": "lt10-globalsat", + "dev_eui": "123456789ABCDEF0", + "dev_addr": "12345ABC", + "batterypercent": 100, + "gps": "3D fix", + }, } ] - } + }, ) ) @pytest.fixture def system_event( - request, action_started_event, action_complete_event, action_failed_event, custom_activity_log_event, - webhook_started_event, webhook_complete_event, webhook_failed_event, webhook_custom_activity_log_event + request, + action_started_event, + action_complete_event, + action_failed_event, + custom_activity_log_event, + webhook_started_event, + webhook_complete_event, + webhook_failed_event, + webhook_custom_activity_log_event, ): if request.param == "action_started_event": return action_started_event @@ -1191,7 +1584,11 @@ def mock_get_webhook_handler_for_generic_json_payload(mocker, mock_webhook_handl mock_get_webhook_handler = mocker.MagicMock() payload_model = GenericJsonPayload config_model = GenericJsonTransformConfig - mock_get_webhook_handler.return_value = mock_webhook_handler, payload_model, config_model + mock_get_webhook_handler.return_value = ( + mock_webhook_handler, + payload_model, + config_model, + ) return mock_get_webhook_handler @@ -1208,14 +1605,14 @@ class MockWebhookConfigModel(WebhookConfiguration): title="Allowed Devices List", ui_options=UIOptions( widget="list", - ) + ), ) deduplication_enabled: bool = FieldWithUIOptions( ..., title="Deduplication Enabled", ui_options=UIOptions( widget="radio", - ) + ), ) @@ -1224,7 +1621,11 @@ def mock_get_webhook_handler_for_fixed_json_payload(mocker, mock_webhook_handler mock_get_webhook_handler = mocker.MagicMock() payload_model = MockWebhookPayloadModel config_model = MockWebhookConfigModel - mock_get_webhook_handler.return_value = mock_webhook_handler, payload_model, config_model + mock_get_webhook_handler.return_value = ( + mock_webhook_handler, + payload_model, + config_model, + ) return mock_get_webhook_handler @@ -1233,7 +1634,7 @@ def mock_webhook_request_headers_onyesha(): return { "apikey": "testapikey", "x-consumer-username": "integration:testintegrationid", - "x-gundi-integration-type": "onyesha_wh" + "x-gundi-integration-type": "onyesha_wh", } @@ -1242,15 +1643,11 @@ def mock_webhook_request_payload_for_dynamic_schema(): return { "end_device_ids": { "device_id": "lt10-1234", - "application_ids": { - "application_id": "lt10-myapp" - }, + "application_ids": {"application_id": "lt10-myapp"}, "dev_eui": "0123456789ABCDEF", - "dev_addr": "789ABCDE" + "dev_addr": "789ABCDE", }, - "correlation_ids": [ - "gs:uplink:FAKEWXYZK41B1ZE12346578ABC" - ], + "correlation_ids": ["gs:uplink:FAKEWXYZK41B1ZE12346578ABC"], "received_at": "2024-06-07T15:08:20.179713582Z", "uplink_message": { "f_port": 2, @@ -1260,13 +1657,13 @@ def mock_webhook_request_payload_for_dynamic_schema(): "batterypercent": 100, "gps": "3D fix", "latitude": -2.3828796, - "longitude": 37.338060999999996 + "longitude": 37.338060999999996, }, "rx_metadata": [ { "gateway_ids": { "gateway_id": "my-gateway-006", - "eui": "123ABCDEFF1234A1" + "eui": "123ABCDEFF1234A1", }, "time": "2024-06-07T15:08:19.841Z", "timestamp": 1569587228, @@ -1276,7 +1673,7 @@ def mock_webhook_request_payload_for_dynamic_schema(): "uplink_token": "FakeTokenlvbi1ndWFyZGlhbnMtMDA2Eghk13r//gFake123LjsBRoMCOPEjLMGELbnpsADIODawZXXgw4qDAjjxTestBhDAyIKRAw==", "channel_index": 7, "gps_time": "2024-06-07T15:08:19.841Z", - "received_at": "2024-06-07T15:08:19.880458765Z" + "received_at": "2024-06-07T15:08:19.880458765Z", } ], "settings": { @@ -1284,12 +1681,12 @@ def mock_webhook_request_payload_for_dynamic_schema(): "lora": { "bandwidth": 125000, "spreading_factor": 11, - "coding_rate": "4/5" + "coding_rate": "4/5", } }, "frequency": "867900000", "timestamp": 1569587228, - "time": "2024-06-07T15:08:19.841Z" + "time": "2024-06-07T15:08:19.841Z", }, "received_at": "2024-06-07T15:08:19.940799259Z", "consumed_airtime": "1.482752s", @@ -1297,7 +1694,7 @@ def mock_webhook_request_payload_for_dynamic_schema(): "frm-payload": { "latitude": -5.1234567, "longitude": 32.132456789999999, - "source": "SOURCE_GPS" + "source": "SOURCE_GPS", } }, "network_ids": { @@ -1306,9 +1703,9 @@ def mock_webhook_request_payload_for_dynamic_schema(): "tenant_id": "faketenant", "cluster_id": "eu1", "cluster_address": "eu1.cloud.thethings.industries", - "tenant_address": "faketenant.eu1.cloud.thethings.industries" - } - } + "tenant_address": "faketenant.eu1.cloud.thethings.industries", + }, + }, } @@ -1318,5 +1715,5 @@ def mock_webhook_request_payload_for_fixed_schema(): "device_id": "device1", "received_at": "2024-06-07T15:08:20.179713582Z", "lat": -2.3828796, - "lon": 35.3380609 + "lon": 35.3380609, } diff --git a/app/services/self_registration.py b/app/services/self_registration.py index 88e68a9..9b905b2 100644 --- a/app/services/self_registration.py +++ b/app/services/self_registration.py @@ -5,7 +5,13 @@ import stamina import httpx -from app.actions import action_handlers, AuthActionConfiguration, PullActionConfiguration, PushActionConfiguration +from app.actions import ( + action_handlers, + AuthActionConfiguration, + PullActionConfiguration, + PushActionConfiguration, + ExecutableActionMixin, +) from app.settings import INTEGRATION_TYPE_SLUG, INTEGRATION_SERVICE_URL from .core import ActionTypeEnum from app.webhooks.core import get_webhook_handler, GenericJsonTransformConfig @@ -17,7 +23,9 @@ async def register_integration_in_gundi(gundi_client, type_slug=None, service_ur # Prepare the integration name and value integration_type_slug = type_slug or INTEGRATION_TYPE_SLUG if not integration_type_slug: - raise ValueError("Please define a slug id for this integration type, either passing it in the type_slug argument or setting it in the INTEGRATION_TYPE_SLUG setting.") + raise ValueError( + "Please define a slug id for this integration type, either passing it in the type_slug argument or setting it in the INTEGRATION_TYPE_SLUG setting." + ) integration_type_slug = integration_type_slug.strip().lower() integration_type_name = integration_type_slug.replace("_", " ").title() logger.info(f"Registering integration type '{integration_type_slug}'...") @@ -27,7 +35,9 @@ async def register_integration_in_gundi(gundi_client, type_slug=None, service_ur "description": f"Default type for integrations with {integration_type_name}", } if integration_service_url := service_url or INTEGRATION_SERVICE_URL: - logger.info(f"Registering '{integration_type_slug}' with service_url: '{integration_service_url}'") + logger.info( + f"Registering '{integration_type_slug}' with service_url: '{integration_service_url}'" + ) data["service_url"] = integration_service_url # Prepare the actions and schemas @@ -45,6 +55,10 @@ async def register_integration_in_gundi(gundi_client, type_slug=None, service_ur action_type = ActionTypeEnum.PUSH_DATA.value else: action_type = ActionTypeEnum.GENERIC.value + + if issubclass(config_model, ExecutableActionMixin): + action_schema["is_executable"] = True + actions.append( { "type": action_type, @@ -53,7 +67,9 @@ async def register_integration_in_gundi(gundi_client, type_slug=None, service_ur "description": f"{integration_type_name} {action_name} action", "schema": action_schema, "ui_schema": action_ui_schema, - "is_periodic_action": True if issubclass(config_model, PullActionConfiguration) else False, + "is_periodic_action": ( + True if issubclass(config_model, PullActionConfiguration) else False + ), } ) data["actions"] = actions @@ -63,7 +79,9 @@ async def register_integration_in_gundi(gundi_client, type_slug=None, service_ur except (ImportError, AttributeError, NotImplementedError) as e: logger.info(f"Webhook handler not found. Skipping webhook registration.") except Exception as e: - logger.warning(f"Error getting webhook handler: {e}. Skipping webhook registration.") + logger.warning( + f"Error getting webhook handler: {e}. Skipping webhook registration." + ) else: data["webhook"] = { "name": f"{integration_type_name} Webhook", @@ -75,7 +93,9 @@ async def register_integration_in_gundi(gundi_client, type_slug=None, service_ur logger.info(f"Registering '{integration_type_slug}' with actions: '{actions}'") # Register the integration type and actions in Gundi - async for attempt in stamina.retry_context(on=httpx.HTTPError, wait_initial=datetime.timedelta(seconds=1),attempts=3): + async for attempt in stamina.retry_context( + on=httpx.HTTPError, wait_initial=datetime.timedelta(seconds=1), attempts=3 + ): with attempt: response = await gundi_client.register_integration_type(data) logger.info(f"Registering integration type '{integration_type_slug}'...DONE") diff --git a/app/services/tests/test_self_registration.py b/app/services/tests/test_self_registration.py index 302d2a9..2af00eb 100644 --- a/app/services/tests/test_self_registration.py +++ b/app/services/tests/test_self_registration.py @@ -8,11 +8,17 @@ @pytest.mark.asyncio async def test_register_integration_with_slug_setting( - mocker, mock_gundi_client_v2, mock_action_handlers, mock_get_webhook_handler_for_fixed_json_payload + mocker, + mock_gundi_client_v2, + mock_action_handlers, + mock_get_webhook_handler_for_fixed_json_payload, ): mocker.patch("app.services.self_registration.INTEGRATION_TYPE_SLUG", "x_tracker") mocker.patch("app.services.self_registration.action_handlers", mock_action_handlers) - mocker.patch("app.services.self_registration.get_webhook_handler", mock_get_webhook_handler_for_fixed_json_payload) + mocker.patch( + "app.services.self_registration.get_webhook_handler", + mock_get_webhook_handler_for_fixed_json_payload, + ) await register_integration_in_gundi(gundi_client=mock_gundi_client_v2) assert mock_gundi_client_v2.register_integration_type.called mock_gundi_client_v2.register_integration_type.assert_called_with( @@ -34,28 +40,25 @@ async def test_register_integration_with_slug_setting( "title": "Data lookback days", "description": "Number of days to look back for data.", "default": 30, - "minimum": 1, "maximum": 30, - "type": "integer" + "minimum": 1, + "maximum": 30, + "type": "integer", }, "force_fetch": { "title": "Force fetch", "description": "Force fetch even if in a quiet period.", "default": False, - "type": "boolean" - } + "type": "boolean", + }, }, - "definitions": {} + "definitions": {}, }, "ui_schema": { - "lookback_days": { - "ui:widget": "range" - }, - "force_fetch": { - "ui:widget": "select" - }, + "lookback_days": {"ui:widget": "range"}, + "force_fetch": {"ui:widget": "select"}, "ui:order": ["lookback_days", "force_fetch"], }, - "is_periodic_action": True + "is_periodic_action": True, } ], "webhook": { @@ -69,36 +72,41 @@ async def test_register_integration_with_slug_setting( "allowed_devices_list": { "title": "Allowed Devices List", "type": "array", - "items": {} + "items": {}, }, "deduplication_enabled": { "title": "Deduplication Enabled", - "type": "boolean" - } + "type": "boolean", + }, }, "definitions": {}, - "required": [ - "allowed_devices_list", - "deduplication_enabled" - ] + "required": ["allowed_devices_list", "deduplication_enabled"], }, "ui_schema": { "allowed_devices_list": {"ui:widget": "list"}, - "deduplication_enabled": {"ui:widget": "radio"} + "deduplication_enabled": {"ui:widget": "radio"}, }, - } + }, } ) @pytest.mark.asyncio async def test_register_integration_with_slug_arg( - mocker, mock_gundi_client_v2, mock_action_handlers, mock_get_webhook_handler_for_fixed_json_payload + mocker, + mock_gundi_client_v2, + mock_action_handlers, + mock_get_webhook_handler_for_fixed_json_payload, ): mocker.patch("app.services.action_runner.action_handlers", mock_action_handlers) mocker.patch("app.services.self_registration.action_handlers", mock_action_handlers) - mocker.patch("app.services.self_registration.get_webhook_handler", mock_get_webhook_handler_for_fixed_json_payload) - await register_integration_in_gundi(gundi_client=mock_gundi_client_v2, type_slug="x_tracker") + mocker.patch( + "app.services.self_registration.get_webhook_handler", + mock_get_webhook_handler_for_fixed_json_payload, + ) + await register_integration_in_gundi( + gundi_client=mock_gundi_client_v2, type_slug="x_tracker" + ) assert mock_gundi_client_v2.register_integration_type.called mock_gundi_client_v2.register_integration_type.assert_called_with( { @@ -119,29 +127,25 @@ async def test_register_integration_with_slug_arg( "title": "Data lookback days", "description": "Number of days to look back for data.", "default": 30, - "minimum": 1, "maximum": 30, - "type": "integer" + "minimum": 1, + "maximum": 30, + "type": "integer", }, "force_fetch": { "title": "Force fetch", "description": "Force fetch even if in a quiet period.", "default": False, - "type": "boolean" - } + "type": "boolean", + }, }, - "definitions": {} + "definitions": {}, }, "ui_schema": { - "lookback_days": { - "ui:widget": "range" - }, - "force_fetch": { - - "ui:widget": "select" - }, + "lookback_days": {"ui:widget": "range"}, + "force_fetch": {"ui:widget": "select"}, "ui:order": ["lookback_days", "force_fetch"], }, - "is_periodic_action": True + "is_periodic_action": True, } ], "webhook": { @@ -155,39 +159,41 @@ async def test_register_integration_with_slug_arg( "allowed_devices_list": { "title": "Allowed Devices List", "type": "array", - "items": {} + "items": {}, }, "deduplication_enabled": { "title": "Deduplication Enabled", - "type": "boolean" - } + "type": "boolean", + }, }, "definitions": {}, - "required": [ - "allowed_devices_list", - "deduplication_enabled" - ] + "required": ["allowed_devices_list", "deduplication_enabled"], }, "ui_schema": { "allowed_devices_list": {"ui:widget": "list"}, - "deduplication_enabled": {"ui:widget": "radio"} + "deduplication_enabled": {"ui:widget": "radio"}, }, - } + }, } ) @pytest.mark.asyncio async def test_register_integration_with_service_url_arg( - mocker, mock_gundi_client_v2, mock_action_handlers, mock_get_webhook_handler_for_fixed_json_payload + mocker, + mock_gundi_client_v2, + mock_action_handlers, + mock_get_webhook_handler_for_fixed_json_payload, ): mocker.patch("app.services.self_registration.INTEGRATION_TYPE_SLUG", "x_tracker") mocker.patch("app.services.self_registration.action_handlers", mock_action_handlers) - mocker.patch("app.services.self_registration.get_webhook_handler", mock_get_webhook_handler_for_fixed_json_payload) + mocker.patch( + "app.services.self_registration.get_webhook_handler", + mock_get_webhook_handler_for_fixed_json_payload, + ) service_url = "https://xtracker-actions-runner-jabcutl8yb-uc.a.run.app" await register_integration_in_gundi( - gundi_client=mock_gundi_client_v2, - service_url=service_url + gundi_client=mock_gundi_client_v2, service_url=service_url ) assert mock_gundi_client_v2.register_integration_type.called mock_gundi_client_v2.register_integration_type.assert_called_with( @@ -195,7 +201,7 @@ async def test_register_integration_with_service_url_arg( "name": "X Tracker", "value": "x_tracker", "description": f"Default type for integrations with X Tracker", - 'service_url': service_url, + "service_url": service_url, "actions": [ { "type": "pull", @@ -210,28 +216,25 @@ async def test_register_integration_with_service_url_arg( "title": "Data lookback days", "description": "Number of days to look back for data.", "default": 30, - "minimum": 1, "maximum": 30, - "type": "integer" + "minimum": 1, + "maximum": 30, + "type": "integer", }, "force_fetch": { "title": "Force fetch", "description": "Force fetch even if in a quiet period.", "default": False, - "type": "boolean" - } + "type": "boolean", + }, }, - "definitions": {} + "definitions": {}, }, "ui_schema": { - "lookback_days": { - "ui:widget": "range" - }, - "force_fetch": { - "ui:widget": "select" - }, + "lookback_days": {"ui:widget": "range"}, + "force_fetch": {"ui:widget": "select"}, "ui:order": ["lookback_days", "force_fetch"], }, - "is_periodic_action": True + "is_periodic_action": True, } ], "webhook": { @@ -245,39 +248,44 @@ async def test_register_integration_with_service_url_arg( "allowed_devices_list": { "title": "Allowed Devices List", "type": "array", - "items": {} + "items": {}, }, "deduplication_enabled": { "title": "Deduplication Enabled", - "type": "boolean" - } + "type": "boolean", + }, }, "definitions": {}, - "required": [ - "allowed_devices_list", - "deduplication_enabled" - ] + "required": ["allowed_devices_list", "deduplication_enabled"], }, "ui_schema": { "allowed_devices_list": {"ui:widget": "list"}, - "deduplication_enabled": {"ui:widget": "radio"} + "deduplication_enabled": {"ui:widget": "radio"}, }, - } + }, } ) @pytest.mark.asyncio async def test_register_integration_with_service_url_setting( - mocker, mock_gundi_client_v2, mock_action_handlers, mock_get_webhook_handler_for_fixed_json_payload + mocker, + mock_gundi_client_v2, + mock_action_handlers, + mock_get_webhook_handler_for_fixed_json_payload, ): service_url = "https://xtracker-actions-runner-jabcutl8yb-uc.a.run.app" mocker.patch("app.services.self_registration.INTEGRATION_TYPE_SLUG", "x_tracker") mocker.patch("app.services.self_registration.INTEGRATION_SERVICE_URL", service_url) mocker.patch("app.services.self_registration.action_handlers", mock_action_handlers) - mocker.patch("app.services.self_registration.get_webhook_handler", mock_get_webhook_handler_for_fixed_json_payload) + mocker.patch( + "app.services.self_registration.get_webhook_handler", + mock_get_webhook_handler_for_fixed_json_payload, + ) - await register_integration_in_gundi(gundi_client=mock_gundi_client_v2, ) + await register_integration_in_gundi( + gundi_client=mock_gundi_client_v2, + ) assert mock_gundi_client_v2.register_integration_type.called mock_gundi_client_v2.register_integration_type.assert_called_with( @@ -285,7 +293,7 @@ async def test_register_integration_with_service_url_setting( "name": "X Tracker", "value": "x_tracker", "description": f"Default type for integrations with X Tracker", - 'service_url': service_url, + "service_url": service_url, "actions": [ { "type": "pull", @@ -300,28 +308,106 @@ async def test_register_integration_with_service_url_setting( "title": "Data lookback days", "description": "Number of days to look back for data.", "default": 30, - "minimum": 1, "maximum": 30, - "type": "integer" + "minimum": 1, + "maximum": 30, + "type": "integer", }, "force_fetch": { "title": "Force fetch", "description": "Force fetch even if in a quiet period.", "default": False, - "type": "boolean" - } + "type": "boolean", + }, }, - "definitions": {} + "definitions": {}, }, "ui_schema": { - "lookback_days": { - "ui:widget": "range" + "lookback_days": {"ui:widget": "range"}, + "force_fetch": {"ui:widget": "select"}, + "ui:order": ["lookback_days", "force_fetch"], + }, + "is_periodic_action": True, + } + ], + "webhook": { + "name": "X Tracker Webhook", + "value": "x_tracker_webhook", + "description": "Webhook Integration with X Tracker", + "schema": { + "title": "MockWebhookConfigModel", + "type": "object", + "properties": { + "allowed_devices_list": { + "title": "Allowed Devices List", + "type": "array", + "items": {}, }, - "force_fetch": { - "ui:widget": "select" + "deduplication_enabled": { + "title": "Deduplication Enabled", + "type": "boolean", }, - "ui:order": ["lookback_days", "force_fetch"], }, - "is_periodic_action": True + "definitions": {}, + "required": ["allowed_devices_list", "deduplication_enabled"], + }, + "ui_schema": { + "allowed_devices_list": {"ui:widget": "list"}, + "deduplication_enabled": {"ui:widget": "radio"}, + }, + }, + } + ) + + +@pytest.mark.asyncio +async def test_register_integration_with_executable_action( + mocker, + mock_gundi_client_v2, + mock_auth_action_handlers, + mock_get_webhook_handler_for_fixed_json_payload, +): + mocker.patch("app.services.self_registration.INTEGRATION_TYPE_SLUG", "x_tracker") + mocker.patch( + "app.services.self_registration.action_handlers", mock_auth_action_handlers + ) + mocker.patch( + "app.services.self_registration.get_webhook_handler", + mock_get_webhook_handler_for_fixed_json_payload, + ) + await register_integration_in_gundi(gundi_client=mock_gundi_client_v2) + assert mock_gundi_client_v2.register_integration_type.called + mock_gundi_client_v2.register_integration_type.assert_called_with( + { + "name": "X Tracker", + "value": "x_tracker", + "description": "Default type for integrations with X Tracker", + "actions": [ + { + "type": "auth", + "name": "Auth", + "value": "auth", + "description": "X Tracker Auth action", + "schema": { + "title": "MockAuthenticateActionConfiguration", + "type": "object", + "properties": { + "username": {"title": "Username", "type": "string"}, + "password": { + "title": "Password", + "type": "string", + "writeOnly": True, + "format": "password", + }, + }, + "required": ["username", "password"], + "definitions": {}, + "is_executable": True, + }, + "ui_schema": { + "username": {"ui:widget": "text"}, + "password": {"ui:widget": "password"}, + }, + "is_periodic_action": False, } ], "webhook": { @@ -335,23 +421,20 @@ async def test_register_integration_with_service_url_setting( "allowed_devices_list": { "title": "Allowed Devices List", "type": "array", - "items": {} + "items": {}, }, "deduplication_enabled": { "title": "Deduplication Enabled", - "type": "boolean" - } + "type": "boolean", + }, }, + "required": ["allowed_devices_list", "deduplication_enabled"], "definitions": {}, - "required": [ - "allowed_devices_list", - "deduplication_enabled" - ] }, "ui_schema": { "allowed_devices_list": {"ui:widget": "list"}, - "deduplication_enabled": {"ui:widget": "radio"} + "deduplication_enabled": {"ui:widget": "radio"}, }, - } + }, } ) From ede39387061452eb026a05bd8e9ba520d9e23ebe Mon Sep 17 00:00:00 2001 From: Chris Doehring Date: Fri, 22 Nov 2024 09:08:11 -0800 Subject: [PATCH 02/16] Update README.md --- README.md | 461 +----------------------------------------------------- 1 file changed, 2 insertions(+), 459 deletions(-) diff --git a/README.md b/README.md index 8f95e02..9d41dec 100644 --- a/README.md +++ b/README.md @@ -1,459 +1,2 @@ -# gundi-integration-action-runner -Template repo for integration in Gundi v2. - -## Usage -- Fork this repo -- Implement your own actions in `actions/handlers.py` -- Define configurations needed for your actions in `action/configurations.py` -- Or implement a webhooks handler in `webhooks/handlers.py` -- and define configurations needed for your webhooks in `webhooks/configurations.py` -- Optionally, add the `@activity_logger()` decorator in actions to log common events which you can later see in the portal: - - Action execution started - - Action execution complete - - Error occurred during action execution -- Optionally, add the `@webhook_activity_logger()` decorator in the webhook handler to log common events which you can later see in the portal: - - Webhook execution started - - Webhook execution complete - - Error occurred during webhook execution -- Optionally, use `log_action_activity()` or `log_webhook_activity()` to log custom messages which you can later see in the portal - - -## Action Examples: - -```python -# actions/configurations.py -from .core import PullActionConfiguration - - -class PullObservationsConfiguration(PullActionConfiguration): - lookback_days: int = 10 - - -``` - -```python -# actions/handlers.py -from app.services.activity_logger import activity_logger, log_activity -from app.services.gundi import send_observations_to_gundi -from gundi_core.events import LogLevel -from .configurations import PullObservationsConfiguration - - -@activity_logger() -async def action_pull_observations(integration, action_config: PullObservationsConfiguration): - - # Add your business logic to extract data here... - - # Optionally, log a custom messages to be shown in the portal - await log_activity( - integration_id=integration.id, - action_id="pull_observations", - level=LogLevel.INFO, - title="Extracting observations with filter..", - data={"start_date": "2024-01-01", "end_date": "2024-01-31"}, - config_data=action_config.dict() - ) - - # Normalize the extracted data into a list of observations following to the Gundi schema: - observations = [ - { - "source": "collar-xy123", - "type": "tracking-device", - "subject_type": "puma", - "recorded_at": "2024-01-24 09:03:00-0300", - "location": { - "lat": -51.748, - "lon": -72.720 - }, - "additional": { - "speed_kmph": 10 - } - } - ] - - # Send the extracted data to Gundi - await send_observations_to_gundi(observations=observations, integration_id=integration.id) - - # The result will be recorded in the portal if using the activity_logger decorator - return {"observations_extracted": 10} -``` - - -## Webhooks Usage: -This framework provides a way to handle incoming webhooks from external services. You can define a handler function in `webhooks/handlers.py` and define the expected payload schema and configurations in `webhooks/configurations.py`. Several base classes are provided in `webhooks/core.py` to help you define the expected schema and configurations. - - -### Fixed Payload Schema -If you expect to receive data with a fixed schema, you can define a Pydantic model for the payload and configurations. These models will be used for validating and parsing the incoming data. -```python -# webhooks/configurations.py -import pydantic -from .core import WebhookPayload, WebhookConfiguration - - -class MyWebhookPayload(WebhookPayload): - device_id: str - timestamp: str - lat: float - lon: float - speed_kmph: float - - -class MyWebhookConfig(WebhookConfiguration): - custom_setting: str - another_custom_setting: bool - -``` -### Webhook Handler -Your webhook handler function must be named webhook_handler and it must accept the payload and config as arguments. The payload will be validated and parsed using the annotated Pydantic model. The config will be validated and parsed using the annotated Pydantic model. You can then implement your business logic to extract the data and send it to Gundi. -```python -# webhooks/handlers.py -from app.services.activity_logger import webhook_activity_logger -from app.services.gundi import send_observations_to_gundi -from .configurations import MyWebhookPayload, MyWebhookConfig - - -@webhook_activity_logger() -async def webhook_handler(payload: MyWebhookPayload, integration=None, webhook_config: MyWebhookConfig = None): - # Implement your custom logic to process the payload here... - - # If the request is related to an integration, you can use the integration object to access the integration's data - - # Normalize the extracted data into a list of observations following to the Gundi schema: - transformed_data = [ - { - "source": payload.device_id, - "type": "tracking-device", - "recorded_at": payload.timestamp, - "location": { - "lat": payload.lat, - "lon": payload.lon - }, - "additional": { - "speed_kmph": payload.speed_kmph - } - } - ] - await send_observations_to_gundi( - observations=transformed_data, - integration_id=integration.id - ) - - return {"observations_extracted": 1} -``` - -### Dynamic Payload Schema -If you expect to receive data with different schemas, you can define a schema per integration using JSON schema. To do that, annotate the payload arg with the `GenericJsonPayload` model, and annotate the webhook_config arg with the `DynamicSchemaConfig` model or a subclass. Then you can define the schema in the Gundi portal, and the framework will build the Pydantic model on runtime based on that schema, to validate and parse the incoming data. -```python -# webhooks/configurations.py -import pydantic -from .core import DynamicSchemaConfig - - -class MyWebhookConfig(DynamicSchemaConfig): - custom_setting: str - another_custom_setting: bool - -``` -```python -# webhooks/handlers.py -from app.services.activity_logger import webhook_activity_logger -from .core import GenericJsonPayload -from .configurations import MyWebhookConfig - - -@webhook_activity_logger() -async def webhook_handler(payload: GenericJsonPayload, integration=None, webhook_config: MyWebhookConfig = None): - # Implement your custom logic to process the payload here... - return {"observations_extracted": 1} -``` - - -### Simple JSON Transformations -For simple JSON to JSON transformations, you can use the [JQ language](https://jqlang.github.io/jq/manual/#basic-filters) to transform the incoming data. To do that, annotate the webhook_config arg with the `GenericJsonTransformConfig` model or a subclass. Then you can specify the `jq_filter` and the `output_type` (`ev` for event or `obv` for observation) in Gundi. -```python -# webhooks/configurations.py -import pydantic -from .core import WebhookPayload, GenericJsonTransformConfig - - -class MyWebhookPayload(WebhookPayload): - device_id: str - timestamp: str - lat: float - lon: float - speed_kmph: float - - -class MyWebhookConfig(GenericJsonTransformConfig): - custom_setting: str - another_custom_setting: bool - - -``` -```python -# webhooks/handlers.py -import json -import pyjq -from app.services.activity_logger import webhook_activity_logger -from app.services.gundi import send_observations_to_gundi -from .configurations import MyWebhookPayload, MyWebhookConfig - - -@webhook_activity_logger() -async def webhook_handler(payload: MyWebhookPayload, integration=None, webhook_config: MyWebhookConfig = None): - # Sample implementation using the JQ language to transform the incoming data - input_data = json.loads(payload.json()) - transformation_rules = webhook_config.jq_filter - transformed_data = pyjq.all(transformation_rules, input_data) - print(f"Transformed Data:\n: {transformed_data}") - # webhook_config.output_type == "obv": - response = await send_observations_to_gundi( - observations=transformed_data, - integration_id=integration.id - ) - data_points_qty = len(transformed_data) if isinstance(transformed_data, list) else 1 - print(f"{data_points_qty} data point(s) sent to Gundi.") - return {"data_points_qty": data_points_qty} -``` - - -### Dynamic Payload Schema with JSON Transformations -You can combine the dynamic schema and JSON transformations by annotating the payload arg with the `GenericJsonPayload` model, and annotating the webhook_config arg with the `GenericJsonTransformConfig` models or their subclasses. Then you can define the schema and the JQ filter in the Gundi portal, and the framework will build the Pydantic model on runtime based on that schema, to validate and parse the incoming data, and apply a [JQ filter](https://jqlang.github.io/jq/manual/#basic-filters) to transform the data. -```python -# webhooks/handlers.py -import json -import pyjq -from app.services.activity_logger import webhook_activity_logger -from app.services.gundi import send_observations_to_gundi -from .core import GenericJsonPayload, GenericJsonTransformConfig - - -@webhook_activity_logger() -async def webhook_handler(payload: GenericJsonPayload, integration=None, webhook_config: GenericJsonTransformConfig = None): - # Sample implementation using the JQ language to transform the incoming data - input_data = json.loads(payload.json()) - filter_expression = webhook_config.jq_filter.replace("\n", ""). replace(" ", "") - transformed_data = pyjq.all(filter_expression, input_data) - print(f"Transformed Data:\n: {transformed_data}") - # webhook_config.output_type == "obv": - response = await send_observations_to_gundi( - observations=transformed_data, - integration_id=integration.id - ) - data_points_qty = len(transformed_data) if isinstance(transformed_data, list) else 1 - print(f"{data_points_qty} data point(s) sent to Gundi.") - return {"data_points_qty": data_points_qty} -``` - - -### Hex string payloads -If you expect to receive payloads containing binary data encoded as hex strings (e.g. ), you can use StructHexString, HexStringPayload and HexStringConfig which facilitate validation and parsing of hex strings. The user will define the name of the field containing the hex string and will define the structure of the data in the hex string, using Gundi. -The fields are defined in the hex_format attribute of the configuration, following the [struct module format string syntax](https://docs.python.org/3/library/struct.html#format-strings). The fields will be extracted from the hex string and made available as sub-fields in the data field of the payload. THey will be extracted in the order they are defined in the hex_format attribute. -```python -# webhooks/configurations.py -from app.services.utils import StructHexString -from .core import HexStringConfig, WebhookConfiguration - - -# Expected data: {"device": "BF170A","data": "6881631900003c20020000c3", "time": "1638201313", "type": "bove"} -class MyWebhookPayload(HexStringPayload, WebhookPayload): - device: str - time: str - type: str - data: StructHexString - - -class MyWebhookConfig(HexStringConfig, WebhookConfiguration): - custom_setting: str - another_custom_setting: bool - -""" -Sample configuration in Gundi: -{ - "hex_data_field": "data", - "hex_format": { - "byte_order": ">", - "fields": [ - { - "name": "start_bit", - "format": "B", - "output_type": "int" - }, - { - "name": "v", - "format": "I" - }, - { - "name": "interval", - "format": "H", - "output_type": "int" - }, - { - "name": "meter_state_1", - "format": "B" - }, - { - "name": "meter_state_2", - "format": "B", - "bit_fields": [ - { - "name": "meter_batter_alarm", - "end_bit": 0, - "start_bit": 0, - "output_type": "bool" - }, - { - "name": "empty_pipe_alarm", - "end_bit": 1, - "start_bit": 1, - "output_type": "bool" - }, - { - "name": "reverse_flow_alarm", - "end_bit": 2, - "start_bit": 2, - "output_type": "bool" - }, - { - "name": "over_range_alarm", - "end_bit": 3, - "start_bit": 3, - "output_type": "bool" - }, - { - "name": "temp_alarm", - "end_bit": 4, - "start_bit": 4, - "output_type": "bool" - }, - { - "name": "ee_error", - "end_bit": 5, - "start_bit": 5, - "output_type": "bool" - }, - { - "name": "transduce_in_error", - "end_bit": 6, - "start_bit": 6, - "output_type": "bool" - }, - { - "name": "transduce_out_error", - "end_bit": 7, - "start_bit": 7, - "output_type": "bool" - }, - { - "name": "transduce_out_error", - "end_bit": 7, - "start_bit": 7, - "output_type": "bool" - } - ] - }, - { - "name": "r1", - "format": "B", - "output_type": "int" - }, - { - "name": "r2", - "format": "B", - "output_type": "int" - }, - { - "name": "crc", - "format": "B" - } - ] - } -} -""" -# The data extracted from the hex string will be made available as new sub-fields as follows: -""" -{ - "device": "AB1234", - "time": "1638201313", - "type": "bove", - "data": { - "value": "6881631900003c20020000c3", - "format_spec": ">BIHBBBBB", - "unpacked_data": { - "start_bit": 104, - "v": 1663873, - "interval": 15360, - "meter_state_1": 32, - "meter_state_2": 2, - "r1": 0, - "r2": 0, - "crc": 195, - "meter_batter_alarm": True, - "empty_pipe_alarm": True, - "reverse_flow_alarm": False, - "over_range_alarm": False, - "temp_alarm": False, - "ee_error": False, - "transduce_in_error": False, - "transduce_out_error": False - } - } -} -""" -``` -Notice: This can also be combined with Dynamic Schema and JSON Transformations. In that case the hex string will be parsed first, adn then the JQ filter can be applied to the extracted data. - -### Custom UI for configurations (ui schema) -It's possible to customize how the forms for configurations are displayed in the Gundi portal. -To do that, use `FieldWithUIOptions` in your models. The `UIOptions` and `GlobalUISchemaOptions` will allow you to customize the appearance of the fields in the portal by setting any of the ["ui schema"](https://rjsf-team.github.io/react-jsonschema-form/docs/api-reference/uiSchema) supported options. - -```python -# Example -import pydantic -from app.services.utils import FieldWithUIOptions, GlobalUISchemaOptions, UIOptions -from .core import AuthActionConfiguration, PullActionConfiguration - - -class AuthenticateConfig(AuthActionConfiguration): - email: str # This will be rendered with default widget and settings - password: pydantic.SecretStr = FieldWithUIOptions( - ..., - format="password", - title="Password", - description="Password for the Global Forest Watch account.", - ui_options=UIOptions( - widget="password", # This will be rendered as a password input hiding the input - ) - ) - ui_global_options = GlobalUISchemaOptions( - order=["email", "password"], # This will set the order of the fields in the form - ) - - -class MyPullActionConfiguration(PullActionConfiguration): - lookback_days: int = FieldWithUIOptions( - 10, - le=30, - ge=1, - title="Data lookback days", - description="Number of days to look back for data.", - ui_options=UIOptions( - widget="range", # This will be rendered ad a range slider - ) - ) - force_fetch: bool = FieldWithUIOptions( - False, - title="Force fetch", - description="Force fetch even if in a quiet period.", - ui_options=UIOptions( - widget="radio", # This will be rendered as a radio button - ) - ) - ui_global_options = GlobalUISchemaOptions( - order=[ - "lookback_days", - "force_fetch", - ], - ) -``` +# gundi-integration-ats-v2 +Gundi Action Runner for [Advanced Telemetry Systems](https://www.atsdocs.tech/). From e87a5f568a0615c0bead16564e61411ba3cc3c13 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Victor=20H=2E=20Garc=C3=ADa?= Date: Fri, 22 Nov 2024 12:52:24 -0600 Subject: [PATCH 03/16] Initial Release --- app/actions/client.py | 256 ++++++++++++++++++++++++++++++++++ app/actions/configurations.py | 13 ++ app/actions/handlers.py | 197 ++++++++++++++++++++++++++ requirements.in | 1 + requirements.txt | 126 +++++++++++++++++ 5 files changed, 593 insertions(+) create mode 100644 app/actions/client.py create mode 100644 requirements.txt diff --git a/app/actions/client.py b/app/actions/client.py new file mode 100644 index 0000000..7f327c4 --- /dev/null +++ b/app/actions/client.py @@ -0,0 +1,256 @@ +import logging +import httpx +import pydantic +import xmltodict + +from datetime import datetime +from app.actions.configurations import ( + AuthenticateConfig, + PullObservationsConfig +) +from app.services.errors import ConfigurationNotFound +from app.services.utils import find_config_for_action +from xml.parsers.expat import ExpatError +from typing import List, Optional + + +logger = logging.getLogger(__name__) + + +# Pydantic models +class DataResponse(pydantic.BaseModel): + ats_serial_num: str = pydantic.Field(..., alias="AtsSerialNum") + longitude: float = pydantic.Field(None, alias='Longitude', ge=-180.0, le=360.0) + latitude: float = pydantic.Field(None, alias='Latitude', ge=-90.0, le=90.0) + date_year_and_julian: datetime = pydantic.Field(..., alias="DateYearAndJulian") + num_sats: Optional[str] = pydantic.Field(None, alias='NumSats') + hdop: Optional[str] = pydantic.Field(None, alias='Hdop') + fix_time: Optional[str] = pydantic.Field(None, alias='FixTime') + dimension: Optional[str] = pydantic.Field(None, alias='Dimension') + activity: Optional[str] = pydantic.Field(None, alias='Activity') + temperature: Optional[str] = pydantic.Field(None, alias='Temperature') + mortality: Optional[bool] = pydantic.Field(None, alias='Mortality') + low_batt_voltage: Optional[bool] = pydantic.Field(None, alias='LowBattVoltage') + + class Config: + allow_population_by_field_name = True + + """ + @validator('DateYearAndJulian') + def parse_datetime(cls, v): + if not v.tzinfo: + return v.replace(tzinfo=timezone.utc) + return v + """ + + +class TransmissionsResponse(pydantic.BaseModel): + date_sent: datetime = pydantic.Field(..., alias="DateSent") + collar_serial_num: str = pydantic.Field(..., alias="CollarSerialNum") + number_fixes: Optional[int] = pydantic.Field(None, alias='NumberFixes') + batt_voltage: Optional[float] = pydantic.Field(None, alias='BattVoltage') + mortality: Optional[str] = pydantic.Field(None, alias='Mortality') + break_off: Optional[str] = pydantic.Field(None, alias='BreakOff') + sat_errors: Optional[str] = pydantic.Field(None, alias='SatErrors') + year_base: Optional[str] = pydantic.Field(None, alias='YearBase') + day_base: Optional[str] = pydantic.Field(None, alias='DayBase') + gmt_offset: Optional[int] = pydantic.Field(None, alias='GmtOffset') + low_batt_voltage: Optional[bool] = pydantic.Field(None, alias='LowBattVoltage') + + class Config: + allow_population_by_field_name = True + + """ + @validator('DateSent') + def parse_datetime(cls, v): + if not v.tzinfo: + return v.replace(tzinfo=timezone.utc) + return v + """ + + +class PullObservationsDataResponse(pydantic.BaseModel): + vehicles: List[DataResponse] + + @pydantic.validator("vehicles", pre=True) + def validate_vehicles(cls, val): + if isinstance(val, list): + return val + # val is not a valid list, return an empty list instead + return [] + + +class PullObservationsTransmissionsResponse(pydantic.BaseModel): + transmissions: List[TransmissionsResponse] + + @pydantic.validator("transmissions", pre=True) + def validate_transmissions(cls, val): + if isinstance(val, list): + return val + # val is not a valid list, return an empty list instead + return [] + + +class PullObservationsBadXMLException(Exception): + def __init__(self, error: Exception, message: str, status_code=422): + self.status_code = status_code + self.message = message + self.error = error + super().__init__(f"'{self.status_code}: {self.message}, Error: {self.error}'") + + +def get_auth_config(integration): + # Look for the login credentials, needed for any action + auth_config = find_config_for_action( + configurations=integration.configurations, + action_id="auth" + ) + if not auth_config: + raise ConfigurationNotFound( + f"Authentication settings for integration {str(integration.id)} " + f"are missing. Please fix the integration setup in the portal." + ) + return AuthenticateConfig.parse_obj(auth_config.data) + + +def get_pull_config(integration): + # Look for the login credentials, needed for any action + pull_config = find_config_for_action( + configurations=integration.configurations, + action_id="pull_observations" + ) + if not pull_config: + raise ConfigurationNotFound( + f"Authentication settings for integration {str(integration.id)} " + f"are missing. Please fix the integration setup in the portal." + ) + return PullObservationsConfig.parse_obj(pull_config.data) + + +def closest_transmission(transmissions, test_date): + sorted_list = sorted([t.DateSent for t in transmissions]) + previous_date = sorted_list[-1] + for date in sorted_list: + if date >= test_date: + if abs((date - test_date).days) < abs((previous_date - test_date).days): + return [t for t in transmissions if t.DateSent == date][0] + else: + return [t for t in transmissions if t.DateSent == previous_date][0] + previous_date = date + return [t for t in transmissions if t.DateSent == sorted_list[-1]][0] + + +async def get_data_endpoint_response(config, auth): + endpoint = config.data_endpoint + async with httpx.AsyncClient(timeout=120) as session: + response = await session.get(endpoint, auth=(auth.username, auth.password.get_secret_value())) + response.raise_for_status() + try: + parsed_xml = xmltodict.parse(response.text) + except (xmltodict.ParsingInterrupted, ExpatError) as e: + msg = "Error while parsing XML from 'data' endpoint" + logger.exception( + msg, + extra={ + "attention_needed": True, + "endpoint": endpoint, + "username": auth.username + } + ) + raise PullObservationsBadXMLException(message=msg, error=e) + else: + try: + data_xml_tag = parsed_xml["DataSet"].get("diffgr:diffgram", {}) + data = data_xml_tag.get("NewDataSet", {}) + except KeyError as e: + msg = "Error while parsing 'data' response from XML" + logger.exception( + msg, + extra={ + "attention_needed": True, + "endpoint": endpoint, + "username": auth.username + } + ) + raise PullObservationsBadXMLException(message=msg, error=e) + else: + if data: + try: + parsed_response = PullObservationsDataResponse.parse_obj( + {"vehicles": data.get("Table", [])} + ) + except pydantic.ValidationError as e: + msg = "Error while parsing 'PullObservationsDataResponse' response from XML (data)" + logger.exception( + msg, + extra={ + "attention_needed": True, + "endpoint": endpoint, + "username": auth.username + } + ) + raise PullObservationsBadXMLException(message=msg, error=e) + else: + response = parsed_response.vehicles + else: + response = [] + + return response + + +async def get_transmissions_endpoint_response(config, auth): + endpoint = config.transmissions_endpoint + async with httpx.AsyncClient(timeout=120) as session: + response = await session.get(endpoint, auth=(auth.username, auth.password.get_secret_value())) + response.raise_for_status() + try: + parsed_xml = xmltodict.parse(response.text) + except (xmltodict.ParsingInterrupted, ExpatError) as e: + msg = "Error while parsing XML from 'transmissions' endpoint" + logger.exception( + msg, + extra={ + "attention_needed": True, + "endpoint": endpoint, + "username": auth.username + } + ) + raise PullObservationsBadXMLException(message=msg, error=e) + else: + try: + transmissions_xml_tag = parsed_xml["DataSet"].get("diffgr:diffgram", {}) + transmissions = transmissions_xml_tag.get("NewDataSet", {}) + except KeyError as e: + msg = "Error while parsing 'transmissions' response from XML" + logger.exception( + msg, + extra={ + "attention_needed": True, + "endpoint": endpoint, + "username": auth.username + } + ) + raise PullObservationsBadXMLException(message=msg, error=e) + else: + if transmissions: + try: + parsed_response = PullObservationsTransmissionsResponse.parse_obj( + {"transmissions": transmissions.get("Table", [])} + ) + except pydantic.ValidationError as e: + msg = "Error while parsing 'PullObservationsTransmissionsResponse' response from XML (data)" + logger.exception( + msg, + extra={ + "attention_needed": True, + "endpoint": endpoint, + "username": auth.username + } + ) + raise PullObservationsBadXMLException(message=msg, error=e) + else: + response = parsed_response.transmissions + else: + response = [] + + return response diff --git a/app/actions/configurations.py b/app/actions/configurations.py index e69de29..c362154 100644 --- a/app/actions/configurations.py +++ b/app/actions/configurations.py @@ -0,0 +1,13 @@ +from .core import ActionConfiguration +import pydantic + +class AuthenticateConfig(ActionConfiguration): + username: str + password: pydantic.SecretStr = pydantic.Field(..., format="password") + + +class PullObservationsConfig(ActionConfiguration): + data_endpoint: str + transmissions_endpoint: str + mortality_event_type: str = "mortality_event" + observations_per_request: int = 200 diff --git a/app/actions/handlers.py b/app/actions/handlers.py index e69de29..95390b4 100644 --- a/app/actions/handlers.py +++ b/app/actions/handlers.py @@ -0,0 +1,197 @@ +import datetime +import httpx +import logging +import stamina +import app.actions.client as client +import app.services.gundi as gundi_tools +from app.services.activity_logger import activity_logger +from app.services.state import IntegrationStateManager + + +logger = logging.getLogger(__name__) + + +state_manager = IntegrationStateManager() + + +async def filter_and_transform(vehicles, transmissions, integration_id, action_id): + transformed_data = [] + main_data = ["ats_serial_num", "date_year_and_julian", "latitude", "longitude"] + + vehicles_to_skip = [] # FOR NOW, THESE VEHICLES WILL BE SKIPPED BECAUSE NO GMT OFFSET SET + + # Get GMT offset per serial num + serial_nums = set([v.ats_serial_num for v in vehicles]) + gmt_offsets = {} + for serial_num in serial_nums: + try: + gmt_offset = next(iter(set([t.gmt_offset for t in transmissions if t.collar_serial_num == serial_num]))) + # check for invalid offsets + if gmt_offset is None: + gmt_offsets[serial_num] = 0 + elif abs(gmt_offset) <= 24: + gmt_offsets[serial_num] = gmt_offset + else: + vehicles_to_skip.append(serial_num) + message = f"GMT offset invalid for device '{serial_num}' value '{gmt_offset}'" + logger.warning( + message, + extra={ + 'needs_attention': True, + 'integration_id': integration_id, + 'action_id': action_id + } + ) + except StopIteration: + # If no offset found, a 0 offset will be set (new collar) + gmt_offsets[serial_num] = 0 + + for vehicle in vehicles: + if vehicle.ats_serial_num not in vehicles_to_skip: + # Get GmtOffset for this device + gmt_offset_value = gmt_offsets.get(vehicle.ats_serial_num) + time_delta = datetime.timedelta(hours=gmt_offset_value) + timezone_object = datetime.timezone(time_delta) + + date_year_and_julian_with_tz = vehicle.date_year_and_julian.replace(tzinfo=timezone_object) + + vehicle.date_year_and_julian = date_year_and_julian_with_tz + + # Get current state for the device + current_state = await state_manager.get_state( + integration_id, + action_id, + vehicle.ats_serial_num + ) + + if current_state: + # Compare current state with new data + latest_device_timestamp = datetime.datetime.strptime( + current_state.get("latest_device_timestamp"), + '%Y-%m-%d %H:%M:%S%z' + ) + + if vehicle.date_year_and_julian <= latest_device_timestamp: + # Data is not new, not transform + logger.info( + f"Excluding device ID '{vehicle.ats_serial_num}' obs '{vehicle.date_year_and_julian}'" + ) + continue + + data = { + "source": vehicle.ats_serial_num, + "source_name": vehicle.ats_serial_num, + 'type': 'tracking-device', + "recorded_at": vehicle.date_year_and_julian, + "location": { + "lat": vehicle.latitude, + "lon": vehicle.longitude + }, + "additional": { + key: value for key, value in vehicle.dict().items() + if key not in main_data and value is not None + } + } + transformed_data.append(data) + + return transformed_data + + +@activity_logger() +async def action_pull_observations(integration, action_config: client.PullObservationsConfig): + logger.info( + f"Executing pull_observations action with integration {integration} and action_config {action_config}..." + ) + observations_extracted = [] + try: + async for attempt in stamina.retry_context( + on=httpx.HTTPError, + attempts=3, + wait_initial=datetime.timedelta(seconds=10), + wait_max=datetime.timedelta(seconds=10), + ): + with attempt: + vehicles = await client.get_data_endpoint_response( + config=client.get_pull_config(integration), + auth=client.get_auth_config(integration) + ) + + if vehicles: + transmissions = await client.get_transmissions_endpoint_response( + config=client.get_pull_config(integration), + auth=client.get_auth_config(integration) + ) + else: + logger.warning(f"No observations were pulled.") + return {"message": "No observations pulled"} + + if not transmissions: + logger.warning(f"No transmissions were pulled.") + return {"message": "No transmissions pulled"} + + logger.info(f"Observations pulled with success.") + + transformed_data = await filter_and_transform( + vehicles, + transmissions, + str(integration.id), + "pull_observations" + ) + + if transformed_data: + # Send transformed data to Sensors API V2 + def generate_batches(iterable, n=action_config.observations_per_request): + for i in range(0, len(iterable), n): + yield iterable[i: i + n] + + for i, batch in enumerate(generate_batches(transformed_data)): + async for attempt in stamina.retry_context( + on=httpx.HTTPError, + attempts=3, + wait_initial=datetime.timedelta(seconds=10), + wait_max=datetime.timedelta(seconds=10), + ): + with attempt: + try: + logger.info( + f'Sending observations batch #{i}: {len(batch)} observations' + ) + response = await gundi_tools.send_observations_to_gundi( + observations=batch, + integration_id=integration.id + ) + except httpx.HTTPError as e: + msg = f'Sensors API returned error for integration_id: {str(integration.id)}. Exception: {e}' + logger.exception( + msg, + extra={ + 'needs_attention': True, + 'integration_id': str(integration.id), + 'action_id': "pull_observations" + } + ) + raise e + else: + if response: + # Update states + for vehicle in batch: + state = { + "latest_device_timestamp": vehicle.get("recorded_at"), + "mortality": vehicle["additional"].get("mortality") + } + await state_manager.set_state( + str(integration.id), + "pull_observations", + state, + vehicle.get("source"), + ) + observations_extracted.append(response) + return {'observations_extracted': observations_extracted or []} + + except httpx.HTTPError as e: + message = f"pull_observations action returned error." + logger.exception(message, extra={ + "integration_id": str(integration.id), + "attention_needed": True + }) + raise e \ No newline at end of file diff --git a/requirements.in b/requirements.in index 973513d..2aa6137 100644 --- a/requirements.in +++ b/requirements.in @@ -1 +1,2 @@ # Add your integration-specific dependencies here +xmltodict \ No newline at end of file diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..3a468c9 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,126 @@ +# +# This file is autogenerated by pip-compile with Python 3.10 +# by the following command: +# +# pip-compile --output-file=requirements.txt requirements-base.in requirements.in +# +aiohttp==3.9.1 + # via gcloud-aio-auth +aiosignal==1.3.1 + # via aiohttp +anyio==3.7.1 + # via + # fastapi + # httpcore + # starlette +async-timeout==4.0.3 + # via + # aiohttp + # redis +attrs==23.2.0 + # via aiohttp +backoff==2.2.1 + # via gcloud-aio-auth +certifi==2024.2.2 + # via + # httpcore + # httpx +cffi==1.16.0 + # via cryptography +chardet==5.2.0 + # via gcloud-aio-auth +click==8.1.7 + # via + # -r requirements-base.in + # uvicorn +cryptography==42.0.3 + # via gcloud-aio-auth +environs==9.5.0 + # via + # -r requirements-base.in + # gundi-client-v2 +exceptiongroup==1.2.2 + # via anyio +fastapi==0.103.2 + # via -r requirements-base.in +frozenlist==1.4.1 + # via + # aiohttp + # aiosignal +gcloud-aio-auth==5.2.0 + # via gcloud-aio-pubsub +gcloud-aio-pubsub==6.0.0 + # via -r requirements-base.in +gundi-client-v2==2.3.8 + # via -r requirements-base.in +gundi-core==1.7.1 + # via + # -r requirements-base.in + # gundi-client-v2 +h11==0.14.0 + # via + # httpcore + # uvicorn +httpcore==0.17.3 + # via httpx +httpx==0.24.1 + # via + # gundi-client-v2 + # respx +idna==3.6 + # via + # anyio + # httpx + # yarl +marshmallow==3.20.2 + # via environs +multidict==6.0.5 + # via + # aiohttp + # yarl +packaging==23.2 + # via marshmallow +prometheus-client==0.20.0 + # via gcloud-aio-pubsub +pycparser==2.21 + # via cffi +pydantic==1.10.19 + # via + # -r requirements-base.in + # fastapi + # gundi-client-v2 + # gundi-core +pyjq==2.6.0 + # via -r requirements-base.in +pyjwt==2.8.0 + # via gcloud-aio-auth +python-dotenv==1.0.1 + # via environs +python-json-logger==2.0.7 + # via -r requirements-base.in +redis==5.0.1 + # via -r requirements-base.in +respx==0.20.2 + # via gundi-client-v2 +sniffio==1.3.0 + # via + # anyio + # httpcore + # httpx +stamina==23.2.0 + # via -r requirements-base.in +starlette==0.27.0 + # via fastapi +tenacity==8.2.3 + # via stamina +typing-extensions==4.9.0 + # via + # fastapi + # pydantic + # uvicorn +uvicorn==0.23.2 + # via -r requirements-base.in +xmltodict==0.13.0 + # via -r requirements.in +yarl==1.9.4 + # via aiohttp From 575124a4c85d6be31c6a2b547fdb05d8106ca0f5 Mon Sep 17 00:00:00 2001 From: "Victor H. Garcia" Date: Fri, 22 Nov 2024 17:51:33 -0600 Subject: [PATCH 04/16] Revert "Initial release" --- README.md | 461 +++++++++++++++++++++++++++++++++- app/actions/client.py | 256 ------------------- app/actions/configurations.py | 13 - app/actions/handlers.py | 197 --------------- requirements.in | 1 - requirements.txt | 126 ---------- 6 files changed, 459 insertions(+), 595 deletions(-) delete mode 100644 app/actions/client.py delete mode 100644 requirements.txt diff --git a/README.md b/README.md index 9d41dec..8f95e02 100644 --- a/README.md +++ b/README.md @@ -1,2 +1,459 @@ -# gundi-integration-ats-v2 -Gundi Action Runner for [Advanced Telemetry Systems](https://www.atsdocs.tech/). +# gundi-integration-action-runner +Template repo for integration in Gundi v2. + +## Usage +- Fork this repo +- Implement your own actions in `actions/handlers.py` +- Define configurations needed for your actions in `action/configurations.py` +- Or implement a webhooks handler in `webhooks/handlers.py` +- and define configurations needed for your webhooks in `webhooks/configurations.py` +- Optionally, add the `@activity_logger()` decorator in actions to log common events which you can later see in the portal: + - Action execution started + - Action execution complete + - Error occurred during action execution +- Optionally, add the `@webhook_activity_logger()` decorator in the webhook handler to log common events which you can later see in the portal: + - Webhook execution started + - Webhook execution complete + - Error occurred during webhook execution +- Optionally, use `log_action_activity()` or `log_webhook_activity()` to log custom messages which you can later see in the portal + + +## Action Examples: + +```python +# actions/configurations.py +from .core import PullActionConfiguration + + +class PullObservationsConfiguration(PullActionConfiguration): + lookback_days: int = 10 + + +``` + +```python +# actions/handlers.py +from app.services.activity_logger import activity_logger, log_activity +from app.services.gundi import send_observations_to_gundi +from gundi_core.events import LogLevel +from .configurations import PullObservationsConfiguration + + +@activity_logger() +async def action_pull_observations(integration, action_config: PullObservationsConfiguration): + + # Add your business logic to extract data here... + + # Optionally, log a custom messages to be shown in the portal + await log_activity( + integration_id=integration.id, + action_id="pull_observations", + level=LogLevel.INFO, + title="Extracting observations with filter..", + data={"start_date": "2024-01-01", "end_date": "2024-01-31"}, + config_data=action_config.dict() + ) + + # Normalize the extracted data into a list of observations following to the Gundi schema: + observations = [ + { + "source": "collar-xy123", + "type": "tracking-device", + "subject_type": "puma", + "recorded_at": "2024-01-24 09:03:00-0300", + "location": { + "lat": -51.748, + "lon": -72.720 + }, + "additional": { + "speed_kmph": 10 + } + } + ] + + # Send the extracted data to Gundi + await send_observations_to_gundi(observations=observations, integration_id=integration.id) + + # The result will be recorded in the portal if using the activity_logger decorator + return {"observations_extracted": 10} +``` + + +## Webhooks Usage: +This framework provides a way to handle incoming webhooks from external services. You can define a handler function in `webhooks/handlers.py` and define the expected payload schema and configurations in `webhooks/configurations.py`. Several base classes are provided in `webhooks/core.py` to help you define the expected schema and configurations. + + +### Fixed Payload Schema +If you expect to receive data with a fixed schema, you can define a Pydantic model for the payload and configurations. These models will be used for validating and parsing the incoming data. +```python +# webhooks/configurations.py +import pydantic +from .core import WebhookPayload, WebhookConfiguration + + +class MyWebhookPayload(WebhookPayload): + device_id: str + timestamp: str + lat: float + lon: float + speed_kmph: float + + +class MyWebhookConfig(WebhookConfiguration): + custom_setting: str + another_custom_setting: bool + +``` +### Webhook Handler +Your webhook handler function must be named webhook_handler and it must accept the payload and config as arguments. The payload will be validated and parsed using the annotated Pydantic model. The config will be validated and parsed using the annotated Pydantic model. You can then implement your business logic to extract the data and send it to Gundi. +```python +# webhooks/handlers.py +from app.services.activity_logger import webhook_activity_logger +from app.services.gundi import send_observations_to_gundi +from .configurations import MyWebhookPayload, MyWebhookConfig + + +@webhook_activity_logger() +async def webhook_handler(payload: MyWebhookPayload, integration=None, webhook_config: MyWebhookConfig = None): + # Implement your custom logic to process the payload here... + + # If the request is related to an integration, you can use the integration object to access the integration's data + + # Normalize the extracted data into a list of observations following to the Gundi schema: + transformed_data = [ + { + "source": payload.device_id, + "type": "tracking-device", + "recorded_at": payload.timestamp, + "location": { + "lat": payload.lat, + "lon": payload.lon + }, + "additional": { + "speed_kmph": payload.speed_kmph + } + } + ] + await send_observations_to_gundi( + observations=transformed_data, + integration_id=integration.id + ) + + return {"observations_extracted": 1} +``` + +### Dynamic Payload Schema +If you expect to receive data with different schemas, you can define a schema per integration using JSON schema. To do that, annotate the payload arg with the `GenericJsonPayload` model, and annotate the webhook_config arg with the `DynamicSchemaConfig` model or a subclass. Then you can define the schema in the Gundi portal, and the framework will build the Pydantic model on runtime based on that schema, to validate and parse the incoming data. +```python +# webhooks/configurations.py +import pydantic +from .core import DynamicSchemaConfig + + +class MyWebhookConfig(DynamicSchemaConfig): + custom_setting: str + another_custom_setting: bool + +``` +```python +# webhooks/handlers.py +from app.services.activity_logger import webhook_activity_logger +from .core import GenericJsonPayload +from .configurations import MyWebhookConfig + + +@webhook_activity_logger() +async def webhook_handler(payload: GenericJsonPayload, integration=None, webhook_config: MyWebhookConfig = None): + # Implement your custom logic to process the payload here... + return {"observations_extracted": 1} +``` + + +### Simple JSON Transformations +For simple JSON to JSON transformations, you can use the [JQ language](https://jqlang.github.io/jq/manual/#basic-filters) to transform the incoming data. To do that, annotate the webhook_config arg with the `GenericJsonTransformConfig` model or a subclass. Then you can specify the `jq_filter` and the `output_type` (`ev` for event or `obv` for observation) in Gundi. +```python +# webhooks/configurations.py +import pydantic +from .core import WebhookPayload, GenericJsonTransformConfig + + +class MyWebhookPayload(WebhookPayload): + device_id: str + timestamp: str + lat: float + lon: float + speed_kmph: float + + +class MyWebhookConfig(GenericJsonTransformConfig): + custom_setting: str + another_custom_setting: bool + + +``` +```python +# webhooks/handlers.py +import json +import pyjq +from app.services.activity_logger import webhook_activity_logger +from app.services.gundi import send_observations_to_gundi +from .configurations import MyWebhookPayload, MyWebhookConfig + + +@webhook_activity_logger() +async def webhook_handler(payload: MyWebhookPayload, integration=None, webhook_config: MyWebhookConfig = None): + # Sample implementation using the JQ language to transform the incoming data + input_data = json.loads(payload.json()) + transformation_rules = webhook_config.jq_filter + transformed_data = pyjq.all(transformation_rules, input_data) + print(f"Transformed Data:\n: {transformed_data}") + # webhook_config.output_type == "obv": + response = await send_observations_to_gundi( + observations=transformed_data, + integration_id=integration.id + ) + data_points_qty = len(transformed_data) if isinstance(transformed_data, list) else 1 + print(f"{data_points_qty} data point(s) sent to Gundi.") + return {"data_points_qty": data_points_qty} +``` + + +### Dynamic Payload Schema with JSON Transformations +You can combine the dynamic schema and JSON transformations by annotating the payload arg with the `GenericJsonPayload` model, and annotating the webhook_config arg with the `GenericJsonTransformConfig` models or their subclasses. Then you can define the schema and the JQ filter in the Gundi portal, and the framework will build the Pydantic model on runtime based on that schema, to validate and parse the incoming data, and apply a [JQ filter](https://jqlang.github.io/jq/manual/#basic-filters) to transform the data. +```python +# webhooks/handlers.py +import json +import pyjq +from app.services.activity_logger import webhook_activity_logger +from app.services.gundi import send_observations_to_gundi +from .core import GenericJsonPayload, GenericJsonTransformConfig + + +@webhook_activity_logger() +async def webhook_handler(payload: GenericJsonPayload, integration=None, webhook_config: GenericJsonTransformConfig = None): + # Sample implementation using the JQ language to transform the incoming data + input_data = json.loads(payload.json()) + filter_expression = webhook_config.jq_filter.replace("\n", ""). replace(" ", "") + transformed_data = pyjq.all(filter_expression, input_data) + print(f"Transformed Data:\n: {transformed_data}") + # webhook_config.output_type == "obv": + response = await send_observations_to_gundi( + observations=transformed_data, + integration_id=integration.id + ) + data_points_qty = len(transformed_data) if isinstance(transformed_data, list) else 1 + print(f"{data_points_qty} data point(s) sent to Gundi.") + return {"data_points_qty": data_points_qty} +``` + + +### Hex string payloads +If you expect to receive payloads containing binary data encoded as hex strings (e.g. ), you can use StructHexString, HexStringPayload and HexStringConfig which facilitate validation and parsing of hex strings. The user will define the name of the field containing the hex string and will define the structure of the data in the hex string, using Gundi. +The fields are defined in the hex_format attribute of the configuration, following the [struct module format string syntax](https://docs.python.org/3/library/struct.html#format-strings). The fields will be extracted from the hex string and made available as sub-fields in the data field of the payload. THey will be extracted in the order they are defined in the hex_format attribute. +```python +# webhooks/configurations.py +from app.services.utils import StructHexString +from .core import HexStringConfig, WebhookConfiguration + + +# Expected data: {"device": "BF170A","data": "6881631900003c20020000c3", "time": "1638201313", "type": "bove"} +class MyWebhookPayload(HexStringPayload, WebhookPayload): + device: str + time: str + type: str + data: StructHexString + + +class MyWebhookConfig(HexStringConfig, WebhookConfiguration): + custom_setting: str + another_custom_setting: bool + +""" +Sample configuration in Gundi: +{ + "hex_data_field": "data", + "hex_format": { + "byte_order": ">", + "fields": [ + { + "name": "start_bit", + "format": "B", + "output_type": "int" + }, + { + "name": "v", + "format": "I" + }, + { + "name": "interval", + "format": "H", + "output_type": "int" + }, + { + "name": "meter_state_1", + "format": "B" + }, + { + "name": "meter_state_2", + "format": "B", + "bit_fields": [ + { + "name": "meter_batter_alarm", + "end_bit": 0, + "start_bit": 0, + "output_type": "bool" + }, + { + "name": "empty_pipe_alarm", + "end_bit": 1, + "start_bit": 1, + "output_type": "bool" + }, + { + "name": "reverse_flow_alarm", + "end_bit": 2, + "start_bit": 2, + "output_type": "bool" + }, + { + "name": "over_range_alarm", + "end_bit": 3, + "start_bit": 3, + "output_type": "bool" + }, + { + "name": "temp_alarm", + "end_bit": 4, + "start_bit": 4, + "output_type": "bool" + }, + { + "name": "ee_error", + "end_bit": 5, + "start_bit": 5, + "output_type": "bool" + }, + { + "name": "transduce_in_error", + "end_bit": 6, + "start_bit": 6, + "output_type": "bool" + }, + { + "name": "transduce_out_error", + "end_bit": 7, + "start_bit": 7, + "output_type": "bool" + }, + { + "name": "transduce_out_error", + "end_bit": 7, + "start_bit": 7, + "output_type": "bool" + } + ] + }, + { + "name": "r1", + "format": "B", + "output_type": "int" + }, + { + "name": "r2", + "format": "B", + "output_type": "int" + }, + { + "name": "crc", + "format": "B" + } + ] + } +} +""" +# The data extracted from the hex string will be made available as new sub-fields as follows: +""" +{ + "device": "AB1234", + "time": "1638201313", + "type": "bove", + "data": { + "value": "6881631900003c20020000c3", + "format_spec": ">BIHBBBBB", + "unpacked_data": { + "start_bit": 104, + "v": 1663873, + "interval": 15360, + "meter_state_1": 32, + "meter_state_2": 2, + "r1": 0, + "r2": 0, + "crc": 195, + "meter_batter_alarm": True, + "empty_pipe_alarm": True, + "reverse_flow_alarm": False, + "over_range_alarm": False, + "temp_alarm": False, + "ee_error": False, + "transduce_in_error": False, + "transduce_out_error": False + } + } +} +""" +``` +Notice: This can also be combined with Dynamic Schema and JSON Transformations. In that case the hex string will be parsed first, adn then the JQ filter can be applied to the extracted data. + +### Custom UI for configurations (ui schema) +It's possible to customize how the forms for configurations are displayed in the Gundi portal. +To do that, use `FieldWithUIOptions` in your models. The `UIOptions` and `GlobalUISchemaOptions` will allow you to customize the appearance of the fields in the portal by setting any of the ["ui schema"](https://rjsf-team.github.io/react-jsonschema-form/docs/api-reference/uiSchema) supported options. + +```python +# Example +import pydantic +from app.services.utils import FieldWithUIOptions, GlobalUISchemaOptions, UIOptions +from .core import AuthActionConfiguration, PullActionConfiguration + + +class AuthenticateConfig(AuthActionConfiguration): + email: str # This will be rendered with default widget and settings + password: pydantic.SecretStr = FieldWithUIOptions( + ..., + format="password", + title="Password", + description="Password for the Global Forest Watch account.", + ui_options=UIOptions( + widget="password", # This will be rendered as a password input hiding the input + ) + ) + ui_global_options = GlobalUISchemaOptions( + order=["email", "password"], # This will set the order of the fields in the form + ) + + +class MyPullActionConfiguration(PullActionConfiguration): + lookback_days: int = FieldWithUIOptions( + 10, + le=30, + ge=1, + title="Data lookback days", + description="Number of days to look back for data.", + ui_options=UIOptions( + widget="range", # This will be rendered ad a range slider + ) + ) + force_fetch: bool = FieldWithUIOptions( + False, + title="Force fetch", + description="Force fetch even if in a quiet period.", + ui_options=UIOptions( + widget="radio", # This will be rendered as a radio button + ) + ) + ui_global_options = GlobalUISchemaOptions( + order=[ + "lookback_days", + "force_fetch", + ], + ) +``` diff --git a/app/actions/client.py b/app/actions/client.py deleted file mode 100644 index 7f327c4..0000000 --- a/app/actions/client.py +++ /dev/null @@ -1,256 +0,0 @@ -import logging -import httpx -import pydantic -import xmltodict - -from datetime import datetime -from app.actions.configurations import ( - AuthenticateConfig, - PullObservationsConfig -) -from app.services.errors import ConfigurationNotFound -from app.services.utils import find_config_for_action -from xml.parsers.expat import ExpatError -from typing import List, Optional - - -logger = logging.getLogger(__name__) - - -# Pydantic models -class DataResponse(pydantic.BaseModel): - ats_serial_num: str = pydantic.Field(..., alias="AtsSerialNum") - longitude: float = pydantic.Field(None, alias='Longitude', ge=-180.0, le=360.0) - latitude: float = pydantic.Field(None, alias='Latitude', ge=-90.0, le=90.0) - date_year_and_julian: datetime = pydantic.Field(..., alias="DateYearAndJulian") - num_sats: Optional[str] = pydantic.Field(None, alias='NumSats') - hdop: Optional[str] = pydantic.Field(None, alias='Hdop') - fix_time: Optional[str] = pydantic.Field(None, alias='FixTime') - dimension: Optional[str] = pydantic.Field(None, alias='Dimension') - activity: Optional[str] = pydantic.Field(None, alias='Activity') - temperature: Optional[str] = pydantic.Field(None, alias='Temperature') - mortality: Optional[bool] = pydantic.Field(None, alias='Mortality') - low_batt_voltage: Optional[bool] = pydantic.Field(None, alias='LowBattVoltage') - - class Config: - allow_population_by_field_name = True - - """ - @validator('DateYearAndJulian') - def parse_datetime(cls, v): - if not v.tzinfo: - return v.replace(tzinfo=timezone.utc) - return v - """ - - -class TransmissionsResponse(pydantic.BaseModel): - date_sent: datetime = pydantic.Field(..., alias="DateSent") - collar_serial_num: str = pydantic.Field(..., alias="CollarSerialNum") - number_fixes: Optional[int] = pydantic.Field(None, alias='NumberFixes') - batt_voltage: Optional[float] = pydantic.Field(None, alias='BattVoltage') - mortality: Optional[str] = pydantic.Field(None, alias='Mortality') - break_off: Optional[str] = pydantic.Field(None, alias='BreakOff') - sat_errors: Optional[str] = pydantic.Field(None, alias='SatErrors') - year_base: Optional[str] = pydantic.Field(None, alias='YearBase') - day_base: Optional[str] = pydantic.Field(None, alias='DayBase') - gmt_offset: Optional[int] = pydantic.Field(None, alias='GmtOffset') - low_batt_voltage: Optional[bool] = pydantic.Field(None, alias='LowBattVoltage') - - class Config: - allow_population_by_field_name = True - - """ - @validator('DateSent') - def parse_datetime(cls, v): - if not v.tzinfo: - return v.replace(tzinfo=timezone.utc) - return v - """ - - -class PullObservationsDataResponse(pydantic.BaseModel): - vehicles: List[DataResponse] - - @pydantic.validator("vehicles", pre=True) - def validate_vehicles(cls, val): - if isinstance(val, list): - return val - # val is not a valid list, return an empty list instead - return [] - - -class PullObservationsTransmissionsResponse(pydantic.BaseModel): - transmissions: List[TransmissionsResponse] - - @pydantic.validator("transmissions", pre=True) - def validate_transmissions(cls, val): - if isinstance(val, list): - return val - # val is not a valid list, return an empty list instead - return [] - - -class PullObservationsBadXMLException(Exception): - def __init__(self, error: Exception, message: str, status_code=422): - self.status_code = status_code - self.message = message - self.error = error - super().__init__(f"'{self.status_code}: {self.message}, Error: {self.error}'") - - -def get_auth_config(integration): - # Look for the login credentials, needed for any action - auth_config = find_config_for_action( - configurations=integration.configurations, - action_id="auth" - ) - if not auth_config: - raise ConfigurationNotFound( - f"Authentication settings for integration {str(integration.id)} " - f"are missing. Please fix the integration setup in the portal." - ) - return AuthenticateConfig.parse_obj(auth_config.data) - - -def get_pull_config(integration): - # Look for the login credentials, needed for any action - pull_config = find_config_for_action( - configurations=integration.configurations, - action_id="pull_observations" - ) - if not pull_config: - raise ConfigurationNotFound( - f"Authentication settings for integration {str(integration.id)} " - f"are missing. Please fix the integration setup in the portal." - ) - return PullObservationsConfig.parse_obj(pull_config.data) - - -def closest_transmission(transmissions, test_date): - sorted_list = sorted([t.DateSent for t in transmissions]) - previous_date = sorted_list[-1] - for date in sorted_list: - if date >= test_date: - if abs((date - test_date).days) < abs((previous_date - test_date).days): - return [t for t in transmissions if t.DateSent == date][0] - else: - return [t for t in transmissions if t.DateSent == previous_date][0] - previous_date = date - return [t for t in transmissions if t.DateSent == sorted_list[-1]][0] - - -async def get_data_endpoint_response(config, auth): - endpoint = config.data_endpoint - async with httpx.AsyncClient(timeout=120) as session: - response = await session.get(endpoint, auth=(auth.username, auth.password.get_secret_value())) - response.raise_for_status() - try: - parsed_xml = xmltodict.parse(response.text) - except (xmltodict.ParsingInterrupted, ExpatError) as e: - msg = "Error while parsing XML from 'data' endpoint" - logger.exception( - msg, - extra={ - "attention_needed": True, - "endpoint": endpoint, - "username": auth.username - } - ) - raise PullObservationsBadXMLException(message=msg, error=e) - else: - try: - data_xml_tag = parsed_xml["DataSet"].get("diffgr:diffgram", {}) - data = data_xml_tag.get("NewDataSet", {}) - except KeyError as e: - msg = "Error while parsing 'data' response from XML" - logger.exception( - msg, - extra={ - "attention_needed": True, - "endpoint": endpoint, - "username": auth.username - } - ) - raise PullObservationsBadXMLException(message=msg, error=e) - else: - if data: - try: - parsed_response = PullObservationsDataResponse.parse_obj( - {"vehicles": data.get("Table", [])} - ) - except pydantic.ValidationError as e: - msg = "Error while parsing 'PullObservationsDataResponse' response from XML (data)" - logger.exception( - msg, - extra={ - "attention_needed": True, - "endpoint": endpoint, - "username": auth.username - } - ) - raise PullObservationsBadXMLException(message=msg, error=e) - else: - response = parsed_response.vehicles - else: - response = [] - - return response - - -async def get_transmissions_endpoint_response(config, auth): - endpoint = config.transmissions_endpoint - async with httpx.AsyncClient(timeout=120) as session: - response = await session.get(endpoint, auth=(auth.username, auth.password.get_secret_value())) - response.raise_for_status() - try: - parsed_xml = xmltodict.parse(response.text) - except (xmltodict.ParsingInterrupted, ExpatError) as e: - msg = "Error while parsing XML from 'transmissions' endpoint" - logger.exception( - msg, - extra={ - "attention_needed": True, - "endpoint": endpoint, - "username": auth.username - } - ) - raise PullObservationsBadXMLException(message=msg, error=e) - else: - try: - transmissions_xml_tag = parsed_xml["DataSet"].get("diffgr:diffgram", {}) - transmissions = transmissions_xml_tag.get("NewDataSet", {}) - except KeyError as e: - msg = "Error while parsing 'transmissions' response from XML" - logger.exception( - msg, - extra={ - "attention_needed": True, - "endpoint": endpoint, - "username": auth.username - } - ) - raise PullObservationsBadXMLException(message=msg, error=e) - else: - if transmissions: - try: - parsed_response = PullObservationsTransmissionsResponse.parse_obj( - {"transmissions": transmissions.get("Table", [])} - ) - except pydantic.ValidationError as e: - msg = "Error while parsing 'PullObservationsTransmissionsResponse' response from XML (data)" - logger.exception( - msg, - extra={ - "attention_needed": True, - "endpoint": endpoint, - "username": auth.username - } - ) - raise PullObservationsBadXMLException(message=msg, error=e) - else: - response = parsed_response.transmissions - else: - response = [] - - return response diff --git a/app/actions/configurations.py b/app/actions/configurations.py index c362154..e69de29 100644 --- a/app/actions/configurations.py +++ b/app/actions/configurations.py @@ -1,13 +0,0 @@ -from .core import ActionConfiguration -import pydantic - -class AuthenticateConfig(ActionConfiguration): - username: str - password: pydantic.SecretStr = pydantic.Field(..., format="password") - - -class PullObservationsConfig(ActionConfiguration): - data_endpoint: str - transmissions_endpoint: str - mortality_event_type: str = "mortality_event" - observations_per_request: int = 200 diff --git a/app/actions/handlers.py b/app/actions/handlers.py index 95390b4..e69de29 100644 --- a/app/actions/handlers.py +++ b/app/actions/handlers.py @@ -1,197 +0,0 @@ -import datetime -import httpx -import logging -import stamina -import app.actions.client as client -import app.services.gundi as gundi_tools -from app.services.activity_logger import activity_logger -from app.services.state import IntegrationStateManager - - -logger = logging.getLogger(__name__) - - -state_manager = IntegrationStateManager() - - -async def filter_and_transform(vehicles, transmissions, integration_id, action_id): - transformed_data = [] - main_data = ["ats_serial_num", "date_year_and_julian", "latitude", "longitude"] - - vehicles_to_skip = [] # FOR NOW, THESE VEHICLES WILL BE SKIPPED BECAUSE NO GMT OFFSET SET - - # Get GMT offset per serial num - serial_nums = set([v.ats_serial_num for v in vehicles]) - gmt_offsets = {} - for serial_num in serial_nums: - try: - gmt_offset = next(iter(set([t.gmt_offset for t in transmissions if t.collar_serial_num == serial_num]))) - # check for invalid offsets - if gmt_offset is None: - gmt_offsets[serial_num] = 0 - elif abs(gmt_offset) <= 24: - gmt_offsets[serial_num] = gmt_offset - else: - vehicles_to_skip.append(serial_num) - message = f"GMT offset invalid for device '{serial_num}' value '{gmt_offset}'" - logger.warning( - message, - extra={ - 'needs_attention': True, - 'integration_id': integration_id, - 'action_id': action_id - } - ) - except StopIteration: - # If no offset found, a 0 offset will be set (new collar) - gmt_offsets[serial_num] = 0 - - for vehicle in vehicles: - if vehicle.ats_serial_num not in vehicles_to_skip: - # Get GmtOffset for this device - gmt_offset_value = gmt_offsets.get(vehicle.ats_serial_num) - time_delta = datetime.timedelta(hours=gmt_offset_value) - timezone_object = datetime.timezone(time_delta) - - date_year_and_julian_with_tz = vehicle.date_year_and_julian.replace(tzinfo=timezone_object) - - vehicle.date_year_and_julian = date_year_and_julian_with_tz - - # Get current state for the device - current_state = await state_manager.get_state( - integration_id, - action_id, - vehicle.ats_serial_num - ) - - if current_state: - # Compare current state with new data - latest_device_timestamp = datetime.datetime.strptime( - current_state.get("latest_device_timestamp"), - '%Y-%m-%d %H:%M:%S%z' - ) - - if vehicle.date_year_and_julian <= latest_device_timestamp: - # Data is not new, not transform - logger.info( - f"Excluding device ID '{vehicle.ats_serial_num}' obs '{vehicle.date_year_and_julian}'" - ) - continue - - data = { - "source": vehicle.ats_serial_num, - "source_name": vehicle.ats_serial_num, - 'type': 'tracking-device', - "recorded_at": vehicle.date_year_and_julian, - "location": { - "lat": vehicle.latitude, - "lon": vehicle.longitude - }, - "additional": { - key: value for key, value in vehicle.dict().items() - if key not in main_data and value is not None - } - } - transformed_data.append(data) - - return transformed_data - - -@activity_logger() -async def action_pull_observations(integration, action_config: client.PullObservationsConfig): - logger.info( - f"Executing pull_observations action with integration {integration} and action_config {action_config}..." - ) - observations_extracted = [] - try: - async for attempt in stamina.retry_context( - on=httpx.HTTPError, - attempts=3, - wait_initial=datetime.timedelta(seconds=10), - wait_max=datetime.timedelta(seconds=10), - ): - with attempt: - vehicles = await client.get_data_endpoint_response( - config=client.get_pull_config(integration), - auth=client.get_auth_config(integration) - ) - - if vehicles: - transmissions = await client.get_transmissions_endpoint_response( - config=client.get_pull_config(integration), - auth=client.get_auth_config(integration) - ) - else: - logger.warning(f"No observations were pulled.") - return {"message": "No observations pulled"} - - if not transmissions: - logger.warning(f"No transmissions were pulled.") - return {"message": "No transmissions pulled"} - - logger.info(f"Observations pulled with success.") - - transformed_data = await filter_and_transform( - vehicles, - transmissions, - str(integration.id), - "pull_observations" - ) - - if transformed_data: - # Send transformed data to Sensors API V2 - def generate_batches(iterable, n=action_config.observations_per_request): - for i in range(0, len(iterable), n): - yield iterable[i: i + n] - - for i, batch in enumerate(generate_batches(transformed_data)): - async for attempt in stamina.retry_context( - on=httpx.HTTPError, - attempts=3, - wait_initial=datetime.timedelta(seconds=10), - wait_max=datetime.timedelta(seconds=10), - ): - with attempt: - try: - logger.info( - f'Sending observations batch #{i}: {len(batch)} observations' - ) - response = await gundi_tools.send_observations_to_gundi( - observations=batch, - integration_id=integration.id - ) - except httpx.HTTPError as e: - msg = f'Sensors API returned error for integration_id: {str(integration.id)}. Exception: {e}' - logger.exception( - msg, - extra={ - 'needs_attention': True, - 'integration_id': str(integration.id), - 'action_id': "pull_observations" - } - ) - raise e - else: - if response: - # Update states - for vehicle in batch: - state = { - "latest_device_timestamp": vehicle.get("recorded_at"), - "mortality": vehicle["additional"].get("mortality") - } - await state_manager.set_state( - str(integration.id), - "pull_observations", - state, - vehicle.get("source"), - ) - observations_extracted.append(response) - return {'observations_extracted': observations_extracted or []} - - except httpx.HTTPError as e: - message = f"pull_observations action returned error." - logger.exception(message, extra={ - "integration_id": str(integration.id), - "attention_needed": True - }) - raise e \ No newline at end of file diff --git a/requirements.in b/requirements.in index 2aa6137..973513d 100644 --- a/requirements.in +++ b/requirements.in @@ -1,2 +1 @@ # Add your integration-specific dependencies here -xmltodict \ No newline at end of file diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 3a468c9..0000000 --- a/requirements.txt +++ /dev/null @@ -1,126 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.10 -# by the following command: -# -# pip-compile --output-file=requirements.txt requirements-base.in requirements.in -# -aiohttp==3.9.1 - # via gcloud-aio-auth -aiosignal==1.3.1 - # via aiohttp -anyio==3.7.1 - # via - # fastapi - # httpcore - # starlette -async-timeout==4.0.3 - # via - # aiohttp - # redis -attrs==23.2.0 - # via aiohttp -backoff==2.2.1 - # via gcloud-aio-auth -certifi==2024.2.2 - # via - # httpcore - # httpx -cffi==1.16.0 - # via cryptography -chardet==5.2.0 - # via gcloud-aio-auth -click==8.1.7 - # via - # -r requirements-base.in - # uvicorn -cryptography==42.0.3 - # via gcloud-aio-auth -environs==9.5.0 - # via - # -r requirements-base.in - # gundi-client-v2 -exceptiongroup==1.2.2 - # via anyio -fastapi==0.103.2 - # via -r requirements-base.in -frozenlist==1.4.1 - # via - # aiohttp - # aiosignal -gcloud-aio-auth==5.2.0 - # via gcloud-aio-pubsub -gcloud-aio-pubsub==6.0.0 - # via -r requirements-base.in -gundi-client-v2==2.3.8 - # via -r requirements-base.in -gundi-core==1.7.1 - # via - # -r requirements-base.in - # gundi-client-v2 -h11==0.14.0 - # via - # httpcore - # uvicorn -httpcore==0.17.3 - # via httpx -httpx==0.24.1 - # via - # gundi-client-v2 - # respx -idna==3.6 - # via - # anyio - # httpx - # yarl -marshmallow==3.20.2 - # via environs -multidict==6.0.5 - # via - # aiohttp - # yarl -packaging==23.2 - # via marshmallow -prometheus-client==0.20.0 - # via gcloud-aio-pubsub -pycparser==2.21 - # via cffi -pydantic==1.10.19 - # via - # -r requirements-base.in - # fastapi - # gundi-client-v2 - # gundi-core -pyjq==2.6.0 - # via -r requirements-base.in -pyjwt==2.8.0 - # via gcloud-aio-auth -python-dotenv==1.0.1 - # via environs -python-json-logger==2.0.7 - # via -r requirements-base.in -redis==5.0.1 - # via -r requirements-base.in -respx==0.20.2 - # via gundi-client-v2 -sniffio==1.3.0 - # via - # anyio - # httpcore - # httpx -stamina==23.2.0 - # via -r requirements-base.in -starlette==0.27.0 - # via fastapi -tenacity==8.2.3 - # via stamina -typing-extensions==4.9.0 - # via - # fastapi - # pydantic - # uvicorn -uvicorn==0.23.2 - # via -r requirements-base.in -xmltodict==0.13.0 - # via -r requirements.in -yarl==1.9.4 - # via aiohttp From 4db0c9b5148c7a12572ae71faf1d79e5ffd7b157 Mon Sep 17 00:00:00 2001 From: Mariano Martinez Grasso Date: Wed, 18 Dec 2024 11:15:46 -0300 Subject: [PATCH 05/16] Support setting action schedules by command or decorator --- app/register.py | 26 ++++++++++++-- app/services/action_runner.py | 7 +++- app/services/self_registration.py | 38 ++++++++++++--------- app/services/utils.py | 56 +++++++++++++++++++++++++++++-- 4 files changed, 107 insertions(+), 20 deletions(-) diff --git a/app/register.py b/app/register.py index c53332b..38bc7ba 100644 --- a/app/register.py +++ b/app/register.py @@ -2,13 +2,35 @@ import click from app.services.action_runner import _portal from app.services.self_registration import register_integration_in_gundi +from app.services.utils import CrontabSchedule @click.command() @click.option('--slug', default=None, help='Slug ID for the integration type') @click.option('--service-url', default=None, help='Service URL used to trigger actions or receive webhooks') -def register_integration(slug, service_url): - asyncio.run(register_integration_in_gundi(gundi_client=_portal, type_slug=slug, service_url=service_url)) +@click.option( + '--schedule', + multiple=True, + help='Schedules in the format "action_id:crontab schedule" (e.g., "pull_events:0 */4 * * *")' +) +def register_integration(slug, service_url, schedule): + schedules = {} + for item in schedule: + try: + action_id, cron_schedule = item.split(":", 1) + schedules[action_id.strip()] = CrontabSchedule.parse_obj_from_crontab(cron_schedule.strip()) + except ValueError: + raise click.BadParameter( + f"Invalid schedule format: {item}. Expected format is 'action_id:schedule'." + ) + asyncio.run( + register_integration_in_gundi( + gundi_client=_portal, + type_slug=slug, + service_url=service_url, + action_schedules=schedules + ) + ) # Main diff --git a/app/services/action_runner.py b/app/services/action_runner.py index f3aea7a..c60b31b 100644 --- a/app/services/action_runner.py +++ b/app/services/action_runner.py @@ -1,9 +1,10 @@ -import datetime import logging + import httpx import pydantic import stamina from gundi_client_v2 import GundiClient + from app.actions import action_handlers from app import settings from fastapi import status @@ -137,3 +138,7 @@ async def execute_action(integration_id: str, action_id: str, config_overrides: ) else: return result + + + + diff --git a/app/services/self_registration.py b/app/services/self_registration.py index 9b905b2..c79a6e3 100644 --- a/app/services/self_registration.py +++ b/app/services/self_registration.py @@ -19,7 +19,7 @@ logger = logging.getLogger(__name__) -async def register_integration_in_gundi(gundi_client, type_slug=None, service_url=None): +async def register_integration_in_gundi(gundi_client, type_slug=None, service_url=None, action_schedules=None): # Prepare the integration name and value integration_type_slug = type_slug or INTEGRATION_TYPE_SLUG if not integration_type_slug: @@ -43,7 +43,7 @@ async def register_integration_in_gundi(gundi_client, type_slug=None, service_ur # Prepare the actions and schemas actions = [] for action_id, handler in action_handlers.items(): - _, config_model = handler + func, config_model = handler action_name = action_id.replace("_", " ").title() action_schema = json.loads(config_model.schema_json()) action_ui_schema = config_model.ui_schema() @@ -59,19 +59,27 @@ async def register_integration_in_gundi(gundi_client, type_slug=None, service_ur if issubclass(config_model, ExecutableActionMixin): action_schema["is_executable"] = True - actions.append( - { - "type": action_type, - "name": action_name, - "value": action_id, - "description": f"{integration_type_name} {action_name} action", - "schema": action_schema, - "ui_schema": action_ui_schema, - "is_periodic_action": ( - True if issubclass(config_model, PullActionConfiguration) else False - ), - } - ) + action = { + "type": action_type, + "name": action_name, + "value": action_id, + "description": f"{integration_type_name} {action_name} action", + "schema": action_schema, + "ui_schema": action_ui_schema, + } + + if issubclass(config_model, PullActionConfiguration): + action["is_periodic_action"] = True + # Schedules can be specified by argument or using a decorator + if action_schedules and action_id in action_schedules: + action["crontab_schedule"] = action_schedules[action_id].dict() + elif crontab_schedule := getattr(func, "crontab_schedule"): + action["crontab_schedule"] = crontab_schedule.dict() + else: + action["is_periodic_action"] = False + + actions.append(action) + data["actions"] = actions try: # Register webhook config if available diff --git a/app/services/utils.py b/app/services/utils.py index da4a1b5..5cea28e 100644 --- a/app/services/utils.py +++ b/app/services/utils.py @@ -1,9 +1,10 @@ import struct -from typing import Annotated import typing +from functools import wraps from pydantic import create_model, BaseModel from pydantic.fields import Field, FieldInfo, Undefined, NoArgAnyCallable -from typing import Any, Dict, Optional, Union, List +from pydantic.class_validators import validator +from typing import Any, Dict, Optional, Union, List, Annotated def find_config_for_action(configurations, action_id): @@ -373,3 +374,54 @@ def schema(cls, **kwargs): definitions.pop(field, None) json_schema_dict['definitions'] = definitions return json_schema_dict + + +class CrontabSchedule(BaseModel): + minute: str = Field("0", regex=r"^(\*|([0-5]?\d)(,([0-5]?\d)|-([0-5]?\d)|/([1-5]?\d))*)$") + hour: str = Field("6", regex=r"^(\*|([01]?\d|2[0-3])(,([01]?\d|2[0-3])|-(\d+)|/([1-9]\d?))*)$") + day_of_week: str = Field("*", regex=r"^(\*|[0-6](,[0-6]|-[0-6]|/[0-6])*)$") + day_of_month: str = Field("*", regex=r"^(\*|([1-9]|[12]\d|3[01])(,([1-9]|[12]\d|3[01])|-(\d+)|/(\d+))*)$") + month_of_year: str = Field("*", regex=r"^(\*|([1-9]|1[0-2])(,([1-9]|1[0-2])|-(\d+)|/(\d+))*)$") + tz_offset: int = Field(0, description="Timezone offset from UTC, e.g., 0 for UTC, -5 for UTC-5, +2 for UTC+2") + + @validator("tz_offset") + def validate_timezone(cls, value): + """Validate that timezone is an integer between -12 and +14.""" + if not (-12 <= value <= 14): + raise ValueError("Timezone offset must be between -12 and +14.") + return value + + @validator("minute", "hour", "day_of_week", "day_of_month", "month_of_year") + def validate_crontab_field(cls, value, field): + if not value: + raise ValueError(f"{field.name} cannot be empty.") + return value + + # build from crontab string + @classmethod + def parse_obj_from_crontab(cls, crontab: str): + minute, hour, day_of_month, month_of_year, day_of_week, tz_offset = crontab.split() + return cls( + minute=minute, + hour=hour, + day_of_month=day_of_month, + month_of_year=month_of_year, + day_of_week=day_of_week, + tz_offset=int(tz_offset) + ) + + +# Defines when a periodic action runs. Can receive a CrontabSchedule object or a string as an argument +def crontab_schedule(crontab: Union[CrontabSchedule, str]): + def decorator(func): + if isinstance(crontab, str): + schedule = CrontabSchedule.parse_obj_from_crontab(crontab) + else: + schedule = crontab + setattr(func, "crontab_schedule", schedule) + + @wraps(func) + async def wrapper(*args, **kwargs): + return await func(*args, **kwargs) + return wrapper + return decorator From 92e57a56de70a8e85a2c9b52a1009ee40f572714 Mon Sep 17 00:00:00 2001 From: Mariano Martinez Grasso Date: Thu, 19 Dec 2024 10:42:28 -0300 Subject: [PATCH 06/16] Make timezone optional --- app/register.py | 6 ++++-- app/services/self_registration.py | 3 ++- app/services/utils.py | 10 +++++++++- 3 files changed, 15 insertions(+), 4 deletions(-) diff --git a/app/register.py b/app/register.py index 38bc7ba..b36960b 100644 --- a/app/register.py +++ b/app/register.py @@ -1,5 +1,7 @@ import asyncio import click +import pydantic + from app.services.action_runner import _portal from app.services.self_registration import register_integration_in_gundi from app.services.utils import CrontabSchedule @@ -19,9 +21,9 @@ def register_integration(slug, service_url, schedule): try: action_id, cron_schedule = item.split(":", 1) schedules[action_id.strip()] = CrontabSchedule.parse_obj_from_crontab(cron_schedule.strip()) - except ValueError: + except (pydantic.ValidationError, ValueError) as e: raise click.BadParameter( - f"Invalid schedule format: {item}. Expected format is 'action_id:schedule'." + f"Invalid schedule format: {item}.\n Expected format is 'action_id:MIN HOUR DOM MON DOW [TZ]'. e.g., 'pull_events:0 */4 * * * -5'. \n {e}" ) asyncio.run( register_integration_in_gundi( diff --git a/app/services/self_registration.py b/app/services/self_registration.py index c79a6e3..068e92e 100644 --- a/app/services/self_registration.py +++ b/app/services/self_registration.py @@ -73,7 +73,8 @@ async def register_integration_in_gundi(gundi_client, type_slug=None, service_ur # Schedules can be specified by argument or using a decorator if action_schedules and action_id in action_schedules: action["crontab_schedule"] = action_schedules[action_id].dict() - elif crontab_schedule := getattr(func, "crontab_schedule"): + elif hasattr(func, "crontab_schedule"): + crontab_schedule = getattr(func, "crontab_schedule") action["crontab_schedule"] = crontab_schedule.dict() else: action["is_periodic_action"] = False diff --git a/app/services/utils.py b/app/services/utils.py index 5cea28e..97ae069 100644 --- a/app/services/utils.py +++ b/app/services/utils.py @@ -400,7 +400,15 @@ def validate_crontab_field(cls, value, field): # build from crontab string @classmethod def parse_obj_from_crontab(cls, crontab: str): - minute, hour, day_of_month, month_of_year, day_of_week, tz_offset = crontab.split() + parts = crontab.split() + if len(parts) == 6: + minute, hour, day_of_month, month_of_year, day_of_week, tz_offset = parts + elif len(parts) == 5: + minute, hour, day_of_month, month_of_year, day_of_week = parts + tz_offset = 0 + else: + raise ValueError("Invalid crontab format. Must have 5 or 6 fields.") + return cls( minute=minute, hour=hour, From cb7aa86b38e35cdab1b86fc7aed08cea00db5e9d Mon Sep 17 00:00:00 2001 From: Mariano Martinez Grasso Date: Thu, 19 Dec 2024 10:58:24 -0300 Subject: [PATCH 07/16] Fix regular expressions for crontab --- app/services/utils.py | 30 ++++++++++++++++++++++++------ 1 file changed, 24 insertions(+), 6 deletions(-) diff --git a/app/services/utils.py b/app/services/utils.py index 97ae069..c4fc364 100644 --- a/app/services/utils.py +++ b/app/services/utils.py @@ -377,12 +377,30 @@ def schema(cls, **kwargs): class CrontabSchedule(BaseModel): - minute: str = Field("0", regex=r"^(\*|([0-5]?\d)(,([0-5]?\d)|-([0-5]?\d)|/([1-5]?\d))*)$") - hour: str = Field("6", regex=r"^(\*|([01]?\d|2[0-3])(,([01]?\d|2[0-3])|-(\d+)|/([1-9]\d?))*)$") - day_of_week: str = Field("*", regex=r"^(\*|[0-6](,[0-6]|-[0-6]|/[0-6])*)$") - day_of_month: str = Field("*", regex=r"^(\*|([1-9]|[12]\d|3[01])(,([1-9]|[12]\d|3[01])|-(\d+)|/(\d+))*)$") - month_of_year: str = Field("*", regex=r"^(\*|([1-9]|1[0-2])(,([1-9]|1[0-2])|-(\d+)|/(\d+))*)$") - tz_offset: int = Field(0, description="Timezone offset from UTC, e.g., 0 for UTC, -5 for UTC-5, +2 for UTC+2") + minute: str = Field( + "*", + regex=r"^(\*|([0-5]?\d)(,([0-5]?\d))*|([0-5]?\d-[0-5]?\d)(/\d+)?|\*(/\d+)?)$" + ) + hour: str = Field( + "*", + regex=r"^(\*|([01]?\d|2[0-3])(,([01]?\d|2[0-3]))*|([01]?\d|2[0-3]-[01]?\d|2[0-3])(/\d+)?|\*(/\d+)?)$" + ) + day_of_week: str = Field( + "*", + regex=r"^(\*|[0-6](,[0-6])*|([0-6]-[0-6])(/\d+)?|\*(/\d+)?)$" + ) + day_of_month: str = Field( + "*", + regex=r"^(\*|([1-9]|[12]\d|3[01])(,([1-9]|[12]\d|3[01]))*|([1-9]|[12]\d|3[01]-[1-9]|[12]\d|3[01])(/\d+)?|\*(/\d+)?)$" + ) + month_of_year: str = Field( + "*", + regex=r"^(\*|([1-9]|1[0-2])(,([1-9]|1[0-2]))*|([1-9]|1[0-2]-[1-9]|1[0-2])(/\d+)?|\*(/\d+)?)$" + ) + tz_offset: int = Field( + 0, + description="Timezone offset from UTC, e.g., 0 for UTC, -5 for UTC-5, +2 for UTC+2" + ) @validator("tz_offset") def validate_timezone(cls, value): From a7794cbe6aaab5f23aefb6eb73a7fffe48fc532d Mon Sep 17 00:00:00 2001 From: Mariano Martinez Grasso Date: Thu, 19 Dec 2024 20:36:28 -0300 Subject: [PATCH 08/16] Add test coverage for regitration with custom action schedule --- app/conftest.py | 3 +- app/services/tests/test_self_registration.py | 56 ++++++++++++++++++++ 2 files changed, 58 insertions(+), 1 deletion(-) diff --git a/app/conftest.py b/app/conftest.py index 2fec482..e6a2199 100644 --- a/app/conftest.py +++ b/app/conftest.py @@ -31,7 +31,7 @@ AuthActionConfiguration, ExecutableActionMixin, ) -from app.services.utils import GlobalUISchemaOptions, FieldWithUIOptions, UIOptions +from app.services.utils import GlobalUISchemaOptions, FieldWithUIOptions, UIOptions, CrontabSchedule from app.webhooks import ( GenericJsonTransformConfig, GenericJsonPayload, @@ -869,6 +869,7 @@ class MockAuthenticateActionConfiguration( def mock_action_handlers(mocker): mock_action_handler = AsyncMock() mock_action_handler.return_value = {"observations_extracted": 10} + mock_action_handler.crontab_schedule = CrontabSchedule.parse_obj_from_crontab("*/10 * * * * -5") mock_action_handlers = { "pull_observations": (mock_action_handler, MockPullActionConfiguration) } diff --git a/app/services/tests/test_self_registration.py b/app/services/tests/test_self_registration.py index 2af00eb..239867d 100644 --- a/app/services/tests/test_self_registration.py +++ b/app/services/tests/test_self_registration.py @@ -2,6 +2,7 @@ from fastapi.testclient import TestClient from app.main import app from app.services.self_registration import register_integration_in_gundi +from app.services.utils import crontab_schedule, CrontabSchedule api_client = TestClient(app) @@ -59,6 +60,14 @@ async def test_register_integration_with_slug_setting( "ui:order": ["lookback_days", "force_fetch"], }, "is_periodic_action": True, + "crontab_schedule": { + "day_of_month": "*", + "day_of_week": "*", + "hour": "*", + "minute": "*/10", + "month_of_year": "*", + "tz_offset": -5 + }, } ], "webhook": { @@ -146,6 +155,14 @@ async def test_register_integration_with_slug_arg( "ui:order": ["lookback_days", "force_fetch"], }, "is_periodic_action": True, + "crontab_schedule": { + "day_of_month": "*", + "day_of_week": "*", + "hour": "*", + "minute": "*/10", + "month_of_year": "*", + "tz_offset": -5 + }, } ], "webhook": { @@ -235,6 +252,14 @@ async def test_register_integration_with_service_url_arg( "ui:order": ["lookback_days", "force_fetch"], }, "is_periodic_action": True, + "crontab_schedule": { + "day_of_month": "*", + "day_of_week": "*", + "hour": "*", + "minute": "*/10", + "month_of_year": "*", + "tz_offset": -5 + }, } ], "webhook": { @@ -327,6 +352,14 @@ async def test_register_integration_with_service_url_setting( "ui:order": ["lookback_days", "force_fetch"], }, "is_periodic_action": True, + "crontab_schedule": { + "day_of_month": "*", + "day_of_week": "*", + "hour": "*", + "minute": "*/10", + "month_of_year": "*", + "tz_offset": -5 + }, } ], "webhook": { @@ -438,3 +471,26 @@ async def test_register_integration_with_executable_action( }, } ) + + +@pytest.mark.asyncio +async def test_crontab_schedule_decorator( + mocker, mock_publish_event, integration_v2, pull_observations_config +): + + mocker.patch("app.services.activity_logger.publish_event", mock_publish_event) + + @crontab_schedule("5-55/10 * * * *") + async def action_pull_observations(integration, action_config): + return {"observations_extracted": 10} + + assert hasattr(action_pull_observations, "crontab_schedule") + expected_schedule = CrontabSchedule( + minute='5-55/10', + hour='*', + day_of_week='*', + day_of_month='*', + month_of_year='*', + tz_offset=0 + ) + assert action_pull_observations.crontab_schedule == expected_schedule From 5e0bf20ae6064a282f34a05714a68e85b46dd9c2 Mon Sep 17 00:00:00 2001 From: Mariano Martinez Grasso Date: Thu, 19 Dec 2024 21:24:17 -0300 Subject: [PATCH 09/16] Add instructions about custom schedule usage in the readme --- README.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/README.md b/README.md index 8f95e02..3b60943 100644 --- a/README.md +++ b/README.md @@ -16,6 +16,7 @@ Template repo for integration in Gundi v2. - Webhook execution complete - Error occurred during webhook execution - Optionally, use `log_action_activity()` or `log_webhook_activity()` to log custom messages which you can later see in the portal +- Optionally, use `@crontab_schedule()` or `register.py --schedule` to make an action to run on a custom schedule ## Action Examples: @@ -35,10 +36,12 @@ class PullObservationsConfiguration(PullActionConfiguration): # actions/handlers.py from app.services.activity_logger import activity_logger, log_activity from app.services.gundi import send_observations_to_gundi +from app.services.utils import crontab_schedule from gundi_core.events import LogLevel from .configurations import PullObservationsConfiguration +@crontab_schedule("0 */4 * * *") # Run every 4 hours @activity_logger() async def action_pull_observations(integration, action_config: PullObservationsConfiguration): From 87677f6437e74a1c9b3b1249a423ee85db7f06fe Mon Sep 17 00:00:00 2001 From: Mariano Martinez Grasso Date: Fri, 20 Dec 2024 16:14:45 -0300 Subject: [PATCH 10/16] Fix wrong mocks that may affect tests --- app/conftest.py | 42 ++++++++++++++---------------------------- 1 file changed, 14 insertions(+), 28 deletions(-) diff --git a/app/conftest.py b/app/conftest.py index e6a2199..54bbb67 100644 --- a/app/conftest.py +++ b/app/conftest.py @@ -717,20 +717,14 @@ def mock_gundi_sensors_client_class( def events_created_response(): return [ { - "id": "e1e1e1e1-e1e1-e1e1-e1e1-e1e1e1e1e1e1", - "title": "Animal Sighting", - "event_type": "wildlife_sighting_rep", - "recorded_at": "2024-01-29 20:51:10-03:00", - "location": {"lat": -51.688645, "lon": -72.704421}, - "event_details": {"site_name": "MM Spot", "species": "lion"}, + "object_id": "af8e2946-bad6-4d02-8a26-99dde34bd9fa", + "created_at": "2024-07-04T13:15:26.559894Z", + "updated_at": None, }, { - "id": "e1e1e1e1-e1e1-e1e1-e1e1-e1e1e1e1e1e2", - "title": "Animal Sighting", - "event_type": "wildlife_sighting_rep", - "recorded_at": "2024-01-29 20:51:25-03:00", - "location": {"lat": -51.688646, "lon": -72.704421}, - "event_details": {"site_name": "MM Spot", "species": "lion"}, + "object_id": "gat51h73-dd71-dj88-91uh-jah7162hy6fa", + "created_at": "2024-07-03T13:15:26.559894Z", + "updated_at": None, }, ] @@ -739,12 +733,12 @@ def events_created_response(): def event_attachment_created_response(): return [ { - "object_id": "af8e2946-bad6-4d02-8a26-99dde34bd9fa", + "object_id": "af8e2946-bad6-4d02-8a26-99dde34bd9fb", "created_at": "2024-07-04T13:15:26.559894Z", "updated_at": None, }, { - "object_id": "gat51h73-dd71-dj88-91uh-jah7162hy6fs", + "object_id": "gat51h73-dd71-dj88-91uh-jah7162hy6fb", "created_at": "2024-07-03T13:15:26.559894Z", "updated_at": None, }, @@ -755,22 +749,14 @@ def event_attachment_created_response(): def observations_created_response(): return [ { - "id": "e1e1e1e1-e1e1-e1e1-e1e1-e1e1e1e1e1e1", - "source": "device-xy123", - "type": "tracking-device", - "subject_type": "puma", - "recorded_at": "2024-01-24 09:03:00-0300", - "location": {"lat": -51.748, "lon": -72.720}, - "additional": {"speed_kmph": 5}, + "object_id": "af8e2946-bad6-4d02-8a26-99dde34bd9fc", + "created_at": "2024-07-04T13:15:26.559894Z", + "updated_at": None, }, { - "id": "e1e1e1e1-e1e1-e1e1-e1e1-e1e1e1e1e1e2", - "source": "test-device-mariano", - "type": "tracking-device", - "subject_type": "puma", - "recorded_at": "2024-01-24 09:05:00-0300", - "location": {"lat": -51.755, "lon": -72.755}, - "additional": {"speed_kmph": 5}, + "object_id": "gat51h73-dd71-dj88-91uh-jah7162hy6fc", + "created_at": "2024-07-03T13:15:26.559894Z", + "updated_at": None, }, ] From f8afb4514f1a9040b595500f4da17054ffd4df81 Mon Sep 17 00:00:00 2001 From: Mariano Martinez Grasso Date: Fri, 20 Dec 2024 20:53:14 -0300 Subject: [PATCH 11/16] Improve error handling in test auth --- app/actions/handlers.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/app/actions/handlers.py b/app/actions/handlers.py index edc1789..b574e0d 100644 --- a/app/actions/handlers.py +++ b/app/actions/handlers.py @@ -43,8 +43,9 @@ async def action_auth(integration: Integration, action_config: AuthenticateConfi valid_credentials = await er_client.login() else: return {"valid_credentials": False, "error": "Please provide either a token or username/password."} - except ERClientException: - valid_credentials = False + except ERClientException as e: + # ToDo. Differentiate ER errors from invalid credentials + return {"valid_credentials": False, "error": f"Error trying to check credentials: {e}"} return {"valid_credentials": valid_credentials} From 2ce9f21149a054fb7188f3db17f47cae608aac7d Mon Sep 17 00:00:00 2001 From: Mariano Martinez Grasso Date: Fri, 20 Dec 2024 20:54:01 -0300 Subject: [PATCH 12/16] [WIP] Improve config form for authentication using jsonschema conditionals --- app/actions/configurations.py | 36 +++++++++++++++++++++++++++++++++-- 1 file changed, 34 insertions(+), 2 deletions(-) diff --git a/app/actions/configurations.py b/app/actions/configurations.py index ecec8e8..5a7b2ef 100644 --- a/app/actions/configurations.py +++ b/app/actions/configurations.py @@ -7,15 +7,24 @@ from .core import AuthActionConfiguration, PullActionConfiguration, ExecutableActionMixin +class ERAuthenticationType(str, Enum): + TOKEN = "token" + USERNAME_PASSWORD = "username_password" + + class AuthenticateConfig(AuthActionConfiguration, ExecutableActionMixin): + authentication_type: ERAuthenticationType = Field( + ERAuthenticationType.TOKEN, + description="Type of authentication to use." + ) username: Optional[str] = Field( "", - example="user@pamdas.org", + example="myuser", description="Username used to authenticate against Earth Ranger API", ) password: Optional[SecretStr] = Field( "", - example="passwd1234abc", + example="mypasswd1234abc", description="Password used to authenticate against Earth Ranger API", format="password" ) @@ -25,6 +34,29 @@ class AuthenticateConfig(AuthActionConfiguration, ExecutableActionMixin): description="Token used to authenticate against Earth Ranger API", ) + class Config: + schema_extra = { + "if": { + "properties": {"authentication_type": {"const": "token"}} + }, + "then": { + "required": ["token"], + "properties": { + "token": {"type": "string", "description": "Token is required if authentication_type is 'token'."} + }, + }, + "else": { + "required": ["username", "password"], + "properties": { + "username": {"type": "string", + "description": "Username is required if authentication_type is 'username_password'."}, + "password": {"type": "string", "format": "password", + "description": "Password is required if authentication_type is 'username_password'."}, + }, + }, + } + +print(AuthenticateConfig.schema_json(indent=2)) class PullObservationsConfig(PullActionConfiguration): start_datetime: str From ce1dbb2102251b2407bbae0e3e8fbb9ec7ca413b Mon Sep 17 00:00:00 2001 From: Mariano Martinez Grasso Date: Mon, 23 Dec 2024 08:54:18 -0300 Subject: [PATCH 13/16] Update the er-client --- requirements.in | 2 +- requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements.in b/requirements.in index 04bb8e0..e300e87 100644 --- a/requirements.in +++ b/requirements.in @@ -1,3 +1,3 @@ # Add your integration-specific dependencies here -https://github.com/PADAS/er-client/releases/download/v1.3.2/earthranger_client-1.3.2-py3-none-any.whl +https://github.com/PADAS/er-client/releases/download/v1.3.3/earthranger_client-1.3.3-py3-none-any.whl backports.zoneinfo==0.2.1;python_version<"3.9" diff --git a/requirements.txt b/requirements.txt index a567fb7..e9182d6 100644 --- a/requirements.txt +++ b/requirements.txt @@ -46,7 +46,7 @@ cryptography==43.0.3 # via gcloud-aio-auth dateparser==1.2.0 # via earthranger-client -earthranger-client @ https://github.com/PADAS/er-client/releases/download/v1.3.2/earthranger_client-1.3.2-py3-none-any.whl +earthranger-client @ https://github.com/PADAS/er-client/releases/download/v1.3.3/earthranger_client-1.3.3-py3-none-any.whl # via -r requirements.in environs==9.5.0 # via From 6d136ff8d1ed91560eaf590b39d06d6bf61292b0 Mon Sep 17 00:00:00 2001 From: Mariano Martinez Grasso Date: Mon, 23 Dec 2024 17:27:48 -0300 Subject: [PATCH 14/16] Improve auth config to show either token or usr n psw fields --- app/actions/configurations.py | 78 ++++++++++++++++++++++++++--------- 1 file changed, 59 insertions(+), 19 deletions(-) diff --git a/app/actions/configurations.py b/app/actions/configurations.py index 5a7b2ef..2785435 100644 --- a/app/actions/configurations.py +++ b/app/actions/configurations.py @@ -4,6 +4,7 @@ from pydantic import Field, SecretStr +from app.services.utils import GlobalUISchemaOptions from .core import AuthActionConfiguration, PullActionConfiguration, ExecutableActionMixin @@ -34,37 +35,76 @@ class AuthenticateConfig(AuthActionConfiguration, ExecutableActionMixin): description="Token used to authenticate against Earth Ranger API", ) + ui_global_options: GlobalUISchemaOptions = GlobalUISchemaOptions( + order=["authentication_type", "token", "username", "password"], + ) + class Config: - schema_extra = { - "if": { - "properties": {"authentication_type": {"const": "token"}} - }, - "then": { - "required": ["token"], - "properties": { - "token": {"type": "string", "description": "Token is required if authentication_type is 'token'."} + @staticmethod + def schema_extra(schema: dict): + # Remove token, username, and password from the root properties + schema["properties"].pop("token", None) + schema["properties"].pop("username", None) + schema["properties"].pop("password", None) + + # Show token OR username & password based on authentication_type + schema.update({ + "if": { + "properties": { + "authentication_type": {"const": "token"} + } }, - }, - "else": { - "required": ["username", "password"], - "properties": { - "username": {"type": "string", - "description": "Username is required if authentication_type is 'username_password'."}, - "password": {"type": "string", "format": "password", - "description": "Password is required if authentication_type is 'username_password'."}, + "then": { + "required": ["token"], + "properties": { + "token": { + "title": "Token", + "description": "Token used to authenticate against Earth Ranger API", + "default": "", + "example": "1b4c1e9c-5ee0-44db-c7f1-177ede2f854a", + "type": "string" + } + } }, - }, - } + "else": { + "required": ["username", "password"], + "properties": { + "username": { + "title": "Username", + "description": "Username used to authenticate against Earth Ranger API", + "default": "", + "example": "myuser", + "type": "string" + }, + "password": { + "title": "Password", + "description": "Password used to authenticate against Earth Ranger API", + "default": "", + "example": "mypasswd1234abc", + "format": "password", + "type": "string", + "writeOnly": True + } + } + } + }) -print(AuthenticateConfig.schema_json(indent=2)) class PullObservationsConfig(PullActionConfiguration): start_datetime: str end_datetime: Optional[str] = None force_run_since_start: bool = False + ui_global_options: GlobalUISchemaOptions = GlobalUISchemaOptions( + order=["start_datetime", "end_datetime", "force_run_since_start"], + ) + class PullEventsConfig(PullActionConfiguration): start_datetime: str end_datetime: Optional[str] = None force_run_since_start: bool = False + + ui_global_options: GlobalUISchemaOptions = GlobalUISchemaOptions( + order=["start_datetime", "end_datetime", "force_run_since_start"], + ) \ No newline at end of file From 4ca83711c753caa31d7974f79f5a422e2aa829da Mon Sep 17 00:00:00 2001 From: Mariano Martinez Grasso Date: Tue, 24 Dec 2024 11:33:14 -0300 Subject: [PATCH 15/16] Improve error messages on invalid auth and add ToDo for teh ER client --- app/actions/handlers.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/app/actions/handlers.py b/app/actions/handlers.py index b574e0d..486c6e9 100644 --- a/app/actions/handlers.py +++ b/app/actions/handlers.py @@ -44,8 +44,8 @@ async def action_auth(integration: Integration, action_config: AuthenticateConfi else: return {"valid_credentials": False, "error": "Please provide either a token or username/password."} except ERClientException as e: - # ToDo. Differentiate ER errors from invalid credentials - return {"valid_credentials": False, "error": f"Error trying to check credentials: {e}"} + # ToDo. Differentiate ER errors from invalid credentials in the ER client + return {"valid_credentials": False, "error": str(e)} return {"valid_credentials": valid_credentials} From 4976d177212b6d66e1973954eb7ba7dff277ea83 Mon Sep 17 00:00:00 2001 From: Mariano Martinez Grasso Date: Tue, 24 Dec 2024 12:00:45 -0300 Subject: [PATCH 16/16] Test coverage for test credentials --- app/actions/tests/conftest.py | 101 ++++++++++++++++++++++++++++++ app/actions/tests/test_actions.py | 33 +++++++++- 2 files changed, 132 insertions(+), 2 deletions(-) diff --git a/app/actions/tests/conftest.py b/app/actions/tests/conftest.py index 526e5aa..2ad6aa2 100644 --- a/app/actions/tests/conftest.py +++ b/app/actions/tests/conftest.py @@ -1,5 +1,6 @@ import asyncio import pytest +from erclient import ERClientException from gundi_core.schemas.v2 import Integration @@ -105,6 +106,106 @@ def mock_erclient_class( return mocked_erclient_class + +@pytest.fixture +def er_401_exception(): + return ERClientException( + 'Failed to GET to ER web service. provider_key: None, service: https://gundi-dev.staging.pamdas.org/api/v1.0, path: user/me,\n\t 401 from ER. Message: Authentication credentials were not provided. {"status":{"code":401,"message":"Unauthorized","detail":"Authentication credentials were not provided."}}' + ) + + +@pytest.fixture +def er_500_exception(): + return ERClientException( + 'Failed to GET to ER web service. provider_key: None, service: https://gundi-dev.staging.pamdas.org/api/v1.0, path: user/me,\n\t 500 from ER. Message: duplicate key value violates unique constraint "observations_observation_tenant_source_at_unique"' + ) + + +@pytest.fixture +def er_generic_exception(): + return ERClientException( + 'Failed to GET to ER web service. provider_key: None, service: https://gundi-dev.staging.pamdas.org/api/v1.0, path: user/me,\n\t Error from ER. Message: Something went wrong' + ) + + +@pytest.fixture +def mock_erclient_class_with_error( + request, + mocker, + er_401_exception, + er_500_exception, + er_generic_exception, + er_client_close_response +): + + if request.param == "er_401_exception": + er_error = er_401_exception + elif request.param == "er_500_exception": + er_error = er_500_exception + else: + er_error = er_generic_exception + mocked_erclient_class = mocker.MagicMock() + erclient_mock = mocker.MagicMock() + erclient_mock.get_me.side_effect = er_error + erclient_mock.auth_headers.side_effect = er_error + erclient_mock.get_events.side_effect = er_error + erclient_mock.get_observations.side_effect = er_error + erclient_mock.close.return_value = async_return( + er_client_close_response + ) + erclient_mock.__aenter__.return_value = erclient_mock + erclient_mock.__aexit__.return_value = er_client_close_response + mocked_erclient_class.return_value = erclient_mock + return mocked_erclient_class + + + +@pytest.fixture +def mock_erclient_class_with_auth_401( + mocker, + auth_headers_response, + er_401_exception, + +): + mocked_erclient_class = mocker.MagicMock() + erclient_mock = mocker.MagicMock() + erclient_mock.get_me.side_effect = er_401_exception + erclient_mock.auth_headers.side_effect = er_401_exception + erclient_mock.get_events.side_effect = er_401_exception + erclient_mock.get_observations.side_effect = er_401_exception + erclient_mock.close.return_value = async_return( + er_client_close_response + ) + erclient_mock.__aenter__.return_value = erclient_mock + erclient_mock.__aexit__.return_value = er_client_close_response + mocked_erclient_class.return_value = erclient_mock + return mocked_erclient_class + + +@pytest.fixture +def mock_erclient_class_with_auth_500( + mocker, + auth_headers_response, + er_500_exception, + get_events_response, + get_observations_response, + er_client_close_response +): + mocked_erclient_class = mocker.MagicMock() + erclient_mock = mocker.MagicMock() + erclient_mock.get_me.side_effect = er_500_exception + erclient_mock.auth_headers.side_effect = er_500_exception + erclient_mock.get_events.side_effect = er_500_exception + erclient_mock.get_observations.side_effect = er_500_exception + erclient_mock.close.return_value = async_return( + er_client_close_response + ) + erclient_mock.__aenter__.return_value = erclient_mock + erclient_mock.__aexit__.return_value = er_client_close_response + mocked_erclient_class.return_value = erclient_mock + return mocked_erclient_class + + @pytest.fixture def mock_gundi_sensors_client_class(mocker, events_created_response, observations_created_response): mock_gundi_sensors_client_class = mocker.MagicMock() diff --git a/app/actions/tests/test_actions.py b/app/actions/tests/test_actions.py index 0f61122..9d9e54a 100644 --- a/app/actions/tests/test_actions.py +++ b/app/actions/tests/test_actions.py @@ -3,7 +3,7 @@ @pytest.mark.asyncio -async def test_execute_auth_action( +async def test_execute_auth_action_with_valid_credentials( mocker, mock_gundi_client_v2, mock_erclient_class, er_integration_v2, mock_publish_event ): @@ -19,7 +19,36 @@ async def test_execute_auth_action( assert mock_gundi_client_v2.get_integration_details.called assert mock_erclient_class.return_value.get_me.called - assert response == {"valid_credentials": True} + assert response.get("valid_credentials") == True + + +@pytest.mark.parametrize( + "mock_erclient_class_with_error", + [ + "er_401_exception", + "er_500_exception", + "er_generic_exception", + ], + indirect=["mock_erclient_class_with_error"]) +@pytest.mark.asyncio +async def test_execute_auth_action_with_invalid_credentials( + mocker, mock_gundi_client_v2, er_integration_v2, + mock_publish_event, mock_erclient_class_with_error +): + mocker.patch("app.services.action_runner._portal", mock_gundi_client_v2) + mocker.patch("app.services.activity_logger.publish_event", mock_publish_event) + mocker.patch("app.services.action_runner.publish_event", mock_publish_event) + mocker.patch("app.actions.handlers.AsyncERClient", mock_erclient_class_with_error) + + response = await execute_action( + integration_id=str(er_integration_v2.id), + action_id="auth" + ) + + assert mock_gundi_client_v2.get_integration_details.called + assert mock_erclient_class_with_error.return_value.get_me.called + assert response.get("valid_credentials") == False + assert "error" in response @pytest.mark.asyncio