From 42a66943e8250edda3f605816a4913c6cfa2191e Mon Sep 17 00:00:00 2001 From: maiyerlee Date: Tue, 8 Oct 2024 08:07:42 -0500 Subject: [PATCH 01/10] Create system user on data import --- backend/data_tools/initial_data/015-ops_user.sql | 1 + 1 file changed, 1 insertion(+) diff --git a/backend/data_tools/initial_data/015-ops_user.sql b/backend/data_tools/initial_data/015-ops_user.sql index d06b4eeae5..cfee84a788 100644 --- a/backend/data_tools/initial_data/015-ops_user.sql +++ b/backend/data_tools/initial_data/015-ops_user.sql @@ -1,3 +1,4 @@ INSERT INTO ops.ops_user (id, oidc_id, hhs_id, email, first_name, last_name, division, status, created_on, updated_on) VALUES (520, '00000000-0000-1111-a111-000000000018', null, 'admin.demo@email.com', 'Admin', 'Demo', 3, 'ACTIVE', current_timestamp, current_timestamp); INSERT INTO ops.ops_user (id, oidc_id, hhs_id, email, first_name, last_name, division, status, created_on, updated_on) VALUES (521, '00000000-0000-1111-a111-000000000019', null, 'user.demo@email.com', 'User', 'Demo', 3, 'ACTIVE', current_timestamp, current_timestamp); INSERT INTO ops.ops_user (id, oidc_id, hhs_id, email, first_name, last_name, division, status, created_on, updated_on) VALUES (523, '00000000-0000-1111-a111-000000000021', null, 'budget.team@email.com', 'Budget', 'Team', 1, 'ACTIVE', current_timestamp, current_timestamp); +INSERT INTO ops.ops_user (id, oidc_id, hhs_id, email, first_name, last_name, division, status, created_on, updated_on) VALUES (524, '00000000-0000-1111-a111-000000000022', null, 'system.admin@email.com', null, null, null, 'LOCKED', current_timestamp, current_timestamp); From 7fa1be37cc895a90c1a01d983252efcc949db2da Mon Sep 17 00:00:00 2001 From: maiyerlee Date: Tue, 8 Oct 2024 08:11:29 -0500 Subject: [PATCH 02/10] Add script queries --- .../src/disable_inactive_users/queries.py | 25 +++++++++++++++++++ 1 file changed, 25 insertions(+) create mode 100644 backend/data_tools/src/disable_inactive_users/queries.py diff --git a/backend/data_tools/src/disable_inactive_users/queries.py b/backend/data_tools/src/disable_inactive_users/queries.py new file mode 100644 index 0000000000..27ddb54eae --- /dev/null +++ b/backend/data_tools/src/disable_inactive_users/queries.py @@ -0,0 +1,25 @@ +EXCLUDED_USER_IDS = [520, 521, 522, 523] + +SYSTEM_USER_ID = 524 +SYSTEM_USER_OIDC_ID = "00000000-0000-1111-a111-000000000022" +SYSTEM_USER_EMAIL = "system.admin@email.com" + +INACTIVE_USER_QUERY = ( + "SELECT id " + "FROM ops_user " + "WHERE id IN ( " + " SELECT ou.id " + " FROM user_session JOIN ops_user ou ON user_session.user_id = ou.id " + " WHERE ou.status = 'ACTIVE' " + " AND user_session.last_active_at < CURRENT_TIMESTAMP - INTERVAL '60 days'" + ");" +) + +ALL_ACTIVE_USER_SESSIONS_QUERY = ( + "SELECT * " + "FROM user_session " + "WHERE user_id = :user_id AND is_active = TRUE " + "ORDER BY created_on DESC" +) + +SYSTEM_USER_QUERY = "SELECT id FROM ops_user WHERE oidc_id = :oidc_id" From 32e25362b477c7fda412b819c3ca8bdf40e4e45c Mon Sep 17 00:00:00 2001 From: maiyerlee Date: Tue, 8 Oct 2024 08:13:58 -0500 Subject: [PATCH 03/10] Add disable inactive users ADR --- ...-apps-jobs-for-disabling-inactive-users.md | 67 +++++++++++++++++++ 1 file changed, 67 insertions(+) create mode 100644 docs/adr/026-use-container-apps-jobs-for-disabling-inactive-users.md diff --git a/docs/adr/026-use-container-apps-jobs-for-disabling-inactive-users.md b/docs/adr/026-use-container-apps-jobs-for-disabling-inactive-users.md new file mode 100644 index 0000000000..85b1b37825 --- /dev/null +++ b/docs/adr/026-use-container-apps-jobs-for-disabling-inactive-users.md @@ -0,0 +1,67 @@ +# 26. Container Apps Jobs for Disabling Inactive Users + +Date: 09\30\2024 + +## Status +Accepted + +## Context + +We want to set up a process to automatically disable user accounts after 60 days of inactivity. +This pertains to the security compliance Access Control AC-02(03) Disable Accounts. + +## Context and explanation of the tech concepts + +Each row in the ops_user table represents a user account with a status of ACTIVE, INACTIVE, or LOCKED. +An INACTIVE status indicates a disabled account. + +To check for inactivity, we'll use the last_active_at timestamp from the user_session table, which is updated whenever a user logs into OPS. +If the timestamp shows that a user hasn't logged in or started a session in 60 days or more, their account will be flagged for disabling. + +OPS runs in a Container App that includes a Data Import process executed through a Container Apps Job. +The image deployed to this Container App is built from the Dockerfile.data-tools-import defined in the OPRE-OPS repository. + +## Options Considered +- Container Apps Jobs +- [Celery](https://flask.palletsprojects.com/en/3.0.x/patterns/celery/) + +### Tradeoffs +#### Container Apps Jobs +- Pros + - We already have a process in place to run containerized tasks within the OPS environment. + - The process can be scheduled to run at specific times. + - The process can be monitored and logged. + - OPS engineers can be notified if the process fails. +- Cons + - Container Apps Jobs has limited [metrics](https://learn.microsoft.com/en-us/azure/container-apps/metrics). + - Creating notifications for Container Apps Jobs is a new feature to OPS. +#### Celery +- Pros + - Celery is a distributed task queue that can run tasks in the background. + - It can be used to schedule tasks to run at specific times. + - It supports various message brokers. + - Fits well with the python and flask tech stack. +- Cons + - Getting Celery set up was challenging. + - Configuring the message broker required multiple attempts to establish communication. + - Ensuring the tasks executed properly added to the difficulty. + +## Decision +* We will use Container Apps Jobs to disable inactive user accounts. +* The process will be scheduled hourly and ignore FakeAuth users. +* Database changes executed by this process will be implemented with a SQLAlchemy script. +* OPS engineers will be notified if the process fails. + +* The following ensures changes in user account status made by this process are recorded and logged in the same way as if a System Admin had manually disabled the user account: + * A UPDATE_USER event record is added to the ops_events table. + * All user sessions are deactivate by setting the active column to False. + * The ops_db_history table is updated with the changes made to the user account. + +## Consequences +The decision to utilize Container Apps Jobs for managing inactive user accounts presents several positive consequences, such as streamlining the process and improving monitoring capabilities. +This integration will ensure consistent application of access controls with minimal manual intervention, while hourly scheduling will facilitate timely management of user accounts, reducing security risks. +Additionally, implementing a notification system will allow OPS engineers to promptly respond to any process failures, helping to ensure system integrity. +Leveraging the existing infrastructure minimizes the need for retraining and maximizes the use of current resources. + +Overall, while using Container Apps Jobs meets immediate operational needs, it’s important to consider its limitations and risks for long-term success. +Issues like limited metrics, potential issues in deploying new monitoring alerts, and reliance on changing infrastructure could pose challenges. From e6c795266ac6335ef1ae8840284abffbbd164e5e Mon Sep 17 00:00:00 2001 From: maiyerlee Date: Tue, 8 Oct 2024 08:16:41 -0500 Subject: [PATCH 04/10] Add disable inactive users script and tests --- .../src/disable_inactive_users/__init__.py | 0 .../disable_inactive_users.py | 110 ++++++++++++++++++ .../tests/disable_inactive_users/__init__.py | 0 .../test_disable_inactive_users.py | 102 ++++++++++++++++ 4 files changed, 212 insertions(+) create mode 100644 backend/data_tools/src/disable_inactive_users/__init__.py create mode 100644 backend/data_tools/src/disable_inactive_users/disable_inactive_users.py create mode 100644 backend/data_tools/tests/disable_inactive_users/__init__.py create mode 100644 backend/data_tools/tests/disable_inactive_users/test_disable_inactive_users.py diff --git a/backend/data_tools/src/disable_inactive_users/__init__.py b/backend/data_tools/src/disable_inactive_users/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/backend/data_tools/src/disable_inactive_users/disable_inactive_users.py b/backend/data_tools/src/disable_inactive_users/disable_inactive_users.py new file mode 100644 index 0000000000..cf344ea937 --- /dev/null +++ b/backend/data_tools/src/disable_inactive_users/disable_inactive_users.py @@ -0,0 +1,110 @@ +import logging +import os + +from data_tools.src.disable_inactive_users.queries import ( + ALL_ACTIVE_USER_SESSIONS_QUERY, + EXCLUDED_USER_IDS, + INACTIVE_USER_QUERY, + SYSTEM_USER_EMAIL, + SYSTEM_USER_ID, + SYSTEM_USER_OIDC_ID, + SYSTEM_USER_QUERY, +) +from data_tools.src.import_static_data.import_data import get_config, init_db +from sqlalchemy import text +from sqlalchemy.orm import Mapper, Session + +from models import * # noqa: F403, F401 + +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + + +def create_system_user(se): + """Create system user if it doesn't exist.""" + system_user = se.execute( + text(SYSTEM_USER_QUERY), + {"oidc_id": SYSTEM_USER_OIDC_ID} + ).fetchone() + + if system_user is None: + sys_user = User( + id=SYSTEM_USER_ID, + email=SYSTEM_USER_EMAIL, + oidc_id=SYSTEM_USER_OIDC_ID, + status=UserStatus.LOCKED + ) + se.add(sys_user) + se.commit() + return sys_user.id + + return system_user[0] + + +def deactivate_user(se, user_id, system_user_id): + """Deactivate a single user and log the change.""" + updated_user = User(id=user_id, status=UserStatus.INACTIVE, updated_by=system_user_id) + se.merge(updated_user) + + db_audit = build_audit(updated_user, OpsDBHistoryType.UPDATED) + ops_db_history = OpsDBHistory( + event_type=OpsDBHistoryType.UPDATED, + created_by=system_user_id, + class_name=updated_user.__class__.__name__, + row_key=db_audit.row_key, + changes=db_audit.changes, + ) + se.add(ops_db_history) + + ops_event = OpsEvent( + event_type=OpsEventType.UPDATE_USER, + event_status=OpsEventStatus.SUCCESS, + created_by=system_user_id, + ) + se.add(ops_event) + + all_user_sessions = se.execute(text(ALL_ACTIVE_USER_SESSIONS_QUERY), {"user_id": user_id}) + for session in all_user_sessions: + updated_user_session = UserSession( + id=session[0], + is_active=False, + updated_by=system_user_id + ) + se.merge(updated_user_session) + + +def update_disabled_users_status(conn: sqlalchemy.engine.Engine): + """Update the status of disabled users in the database.""" + with Session(conn) as se: + logger.info("Checking for System User.") + system_user_id = create_system_user(se) + + logger.info("Fetching inactive users.") + results = se.execute(text(INACTIVE_USER_QUERY)).scalars().all() + user_ids = [uid for uid in results if uid not in EXCLUDED_USER_IDS] + + if not user_ids: + logger.info("No inactive users found.") + return + + logger.info("Inactive users found:", user_ids) + + for user_id in user_ids: + logger.info("Deactivating user", user_id) + deactivate_user(se, user_id, system_user_id) + + se.commit() + + +if __name__ == "__main__": + logger.info("Starting Disable Inactive Users process.") + + script_env = os.getenv("ENV") + script_config = get_config(script_env) + db_engine, db_metadata_obj = init_db(script_config) + + event.listen(Mapper, "after_configured", setup_schema(BaseModel)) + + update_disabled_users_status(db_engine) + + logger.info("Disable Inactive Users process complete.") diff --git a/backend/data_tools/tests/disable_inactive_users/__init__.py b/backend/data_tools/tests/disable_inactive_users/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/backend/data_tools/tests/disable_inactive_users/test_disable_inactive_users.py b/backend/data_tools/tests/disable_inactive_users/test_disable_inactive_users.py new file mode 100644 index 0000000000..f19932c82e --- /dev/null +++ b/backend/data_tools/tests/disable_inactive_users/test_disable_inactive_users.py @@ -0,0 +1,102 @@ +from unittest.mock import MagicMock, patch + +from data_tools.src.disable_inactive_users.disable_inactive_users import ( + create_system_user, + deactivate_user, + update_disabled_users_status, +) +from data_tools.src.disable_inactive_users.queries import SYSTEM_USER_EMAIL, SYSTEM_USER_ID, SYSTEM_USER_OIDC_ID + +from models import OpsDBHistoryType, OpsEventStatus, OpsEventType, UserStatus + +system_user_id = 111 + +def test_create_system_user(): + mock_se = MagicMock() + mock_se.execute.return_value.fetchone.return_value = None + + result = create_system_user(mock_se) + + se_add = mock_se.add.call_args[0][0] + mock_se.execute.assert_called_once() + mock_se.add.assert_called_once() + mock_se.commit.assert_called_once() + assert result == SYSTEM_USER_ID + assert se_add.id == SYSTEM_USER_ID + assert se_add.email == SYSTEM_USER_EMAIL + assert se_add.oidc_id == SYSTEM_USER_OIDC_ID + assert se_add.first_name is None + assert se_add.last_name is None + + +def test_return_existing_system_user(): + mock_se = MagicMock() + mock_se.execute.return_value.fetchone.return_value = (SYSTEM_USER_ID,) + + result = create_system_user(mock_se) + + assert result == SYSTEM_USER_ID + mock_se.add.assert_not_called() + mock_se.commit.assert_not_called() + +def test_deactivate_user(): + user_id = 1 + db_history_changes = { + "id": { + "new": user_id, + "old": None + }, + "status": { + "new": "INACTIVE", + "old": None + }, + "updated_by": { + "new": system_user_id, + "old": None} + } + + mock_se = MagicMock() + mock_se.execute.return_value = [(1,), (2,)] + + deactivate_user(mock_se, user_id, system_user_id) + + assert mock_se.merge.call_count == 3 + assert mock_se.add.call_count == 2 + + user_call = mock_se.merge.call_args_list[0] + assert user_call[0][0].id == user_id + assert user_call[0][0].status == UserStatus.INACTIVE + assert user_call[0][0].updated_by == system_user_id + + user_session_call_1 = mock_se.merge.call_args_list[1] + assert user_session_call_1[0][0].id == user_id + assert user_session_call_1[0][0].is_active == False + assert user_session_call_1[0][0].updated_by == system_user_id + + user_session_call_2 = mock_se.merge.call_args_list[1] + assert user_session_call_2[0][0].id == user_id + assert user_session_call_2[0][0].is_active == False + assert user_session_call_2[0][0].updated_by == system_user_id + + ops_db_history_call = mock_se.add.call_args_list[0] + assert ops_db_history_call[0][0].event_type == OpsDBHistoryType.UPDATED + assert ops_db_history_call[0][0].created_by == system_user_id + assert ops_db_history_call[0][0].class_name == 'User' + assert ops_db_history_call[0][0].row_key == str(user_id) + assert ops_db_history_call[0][0].changes == db_history_changes + + ops_events_call = mock_se.add.call_args_list[1] + assert ops_events_call[0][0].event_type == OpsEventType.UPDATE_USER + assert ops_events_call[0][0].event_status == OpsEventStatus.SUCCESS + assert ops_events_call[0][0].created_by == system_user_id + +@patch("data_tools.src.disable_inactive_users.disable_inactive_users.logger") +def test_no_inactive_users(mock_logger): + mock_conn = MagicMock() + mock_se = MagicMock() + mock_se.execute.return_value.all.return_value = None + update_disabled_users_status(mock_conn) + + mock_logger.info.assert_any_call("Checking for System User.") + mock_logger.info.assert_any_call("Fetching inactive users.") + mock_logger.info.assert_any_call("No inactive users found.") From 88a7000e710cc743c2a311f304914ed02307e579 Mon Sep 17 00:00:00 2001 From: maiyerlee Date: Tue, 8 Oct 2024 08:22:56 -0500 Subject: [PATCH 05/10] Expose auditing into history model --- backend/models/history.py | 84 +++++++++++++++++++++++++++++++- backend/ops_api/ops/history.py | 87 +--------------------------------- 2 files changed, 84 insertions(+), 87 deletions(-) diff --git a/backend/models/history.py b/backend/models/history.py index 5a8b9a2dbe..f9d293074f 100644 --- a/backend/models/history.py +++ b/backend/models/history.py @@ -1,10 +1,16 @@ +from collections import namedtuple +from datetime import date, datetime +from decimal import Decimal from enum import Enum +from types import NoneType -from sqlalchemy import Column, Index, String, desc +from sqlalchemy import Column, Index, String, desc, inspect from sqlalchemy.dialects.postgresql import ENUM, JSONB +from sqlalchemy.orm.attributes import get_history from .base import BaseModel +DbRecordAudit = namedtuple("DbRecordAudit", "row_key changes") class OpsDBHistoryType(Enum): NEW = 1 @@ -30,3 +36,79 @@ class OpsDBHistory(BaseModel): OpsDBHistory.row_key, desc(OpsDBHistory.created_on), ) + + +def build_audit(obj, event_type: OpsDBHistoryType) -> DbRecordAudit: # noqa: C901 + row_key = "|".join([str(getattr(obj, pk.name)) for pk in inspect(obj.__table__).primary_key.columns.values()]) + + changes = {} + + mapper = obj.__mapper__ + + # collect changes in column values + auditable_columns = list(filter(lambda c: c.key in obj.__dict__, mapper.columns)) + for col in auditable_columns: + key = col.key + hist = get_history(obj, key) + if hist.has_changes(): + # this assumes columns are primitives, not lists + old_val = convert_for_jsonb(hist.deleted[0]) if hist.deleted else None + new_val = convert_for_jsonb(hist.added[0]) if hist.added else None + # exclude Enums that didn't really change + if hist.deleted and isinstance(hist.deleted[0], Enum) and old_val == new_val: + continue + if event_type == OpsDBHistoryType.NEW: + if new_val: + changes[key] = { + "new": new_val, + } + else: + changes[key] = { + "new": new_val, + "old": old_val, + } + + # collect changes in relationships, such as agreement.team_members + # limit this to relationships that aren't being logged as their own Classes + # and only include them on the editable side + auditable_relationships = list( + filter( + lambda rel: rel.secondary is not None and not rel.viewonly, + mapper.relationships, + ) + ) + + for relationship in auditable_relationships: + key = relationship.key + hist = get_history(obj, key) + if hist.has_changes(): + related_class_name = ( + relationship.argument if isinstance(relationship.argument, str) else relationship.argument.__name__ + ) + changes[key] = { + "collection_of": related_class_name, + "added": convert_for_jsonb(hist.added), + } + if event_type != OpsDBHistoryType.NEW: + changes[key]["deleted"] = convert_for_jsonb(hist.deleted) + return DbRecordAudit(row_key, changes) + + +def convert_for_jsonb(value): + if isinstance(value, (str, bool, int, float, NoneType)): + return value + if isinstance(value, Enum): + return value.name + if isinstance(value, Decimal): + return float(value) + if isinstance(value, datetime): + return value.isoformat() + if isinstance(value, date): + return value.isoformat() + if isinstance(value, BaseModel): + if callable(getattr(value, "to_slim_dict", None)): + return value.to_slim_dict() + return value.to_dict() + if isinstance(value, (list, tuple)): + return [convert_for_jsonb(item) for item in value] + return str(value) diff --git a/backend/ops_api/ops/history.py b/backend/ops_api/ops/history.py index edba2a050b..4890812d55 100644 --- a/backend/ops_api/ops/history.py +++ b/backend/ops_api/ops/history.py @@ -1,51 +1,22 @@ import json import logging -from collections import namedtuple -from datetime import date, datetime -from decimal import Decimal -from enum import Enum -from types import NoneType from flask import current_app from flask_jwt_extended import current_user -from sqlalchemy import inspect from sqlalchemy.cyextension.collections import IdentitySet from sqlalchemy.orm import Session -from sqlalchemy.orm.attributes import get_history from models import ( Agreement, AgreementChangeRequest, AgreementOpsDbHistory, - BaseModel, BudgetLineItem, OpsDBHistory, OpsDBHistoryType, OpsEvent, + build_audit, ) -DbRecordAudit = namedtuple("DbRecordAudit", "row_key changes") - - -def convert_for_jsonb(value): - if isinstance(value, (str, bool, int, float, NoneType)): - return value - if isinstance(value, Enum): - return value.name - if isinstance(value, Decimal): - return float(value) - if isinstance(value, datetime): - return value.isoformat() - if isinstance(value, date): - return value.isoformat() - if isinstance(value, BaseModel): - if callable(getattr(value, "to_slim_dict", None)): - return value.to_slim_dict() - return value.to_dict() - if isinstance(value, (list, tuple)): - return [convert_for_jsonb(item) for item in value] - return str(value) - def find_relationship_by_fk(obj, col_key): for rel in obj.__mapper__.relationships: @@ -57,62 +28,6 @@ def find_relationship_by_fk(obj, col_key): return None -def build_audit(obj, event_type: OpsDBHistoryType) -> DbRecordAudit: # noqa: C901 - row_key = "|".join([str(getattr(obj, pk.name)) for pk in inspect(obj.__table__).primary_key.columns.values()]) - - changes = {} - - mapper = obj.__mapper__ - - # collect changes in column values - auditable_columns = list(filter(lambda c: c.key in obj.__dict__, mapper.columns)) - for col in auditable_columns: - key = col.key - hist = get_history(obj, key) - if hist.has_changes(): - # this assumes columns are primitives, not lists - old_val = convert_for_jsonb(hist.deleted[0]) if hist.deleted else None - new_val = convert_for_jsonb(hist.added[0]) if hist.added else None - # exclude Enums that didn't really change - if hist.deleted and isinstance(hist.deleted[0], Enum) and old_val == new_val: - continue - if event_type == OpsDBHistoryType.NEW: - if new_val: - changes[key] = { - "new": new_val, - } - else: - changes[key] = { - "new": new_val, - "old": old_val, - } - - # collect changes in relationships, such as agreement.team_members - # limit this to relationships that aren't being logged as their own Classes - # and only include them on the editable side - auditable_relationships = list( - filter( - lambda rel: rel.secondary is not None and not rel.viewonly, - mapper.relationships, - ) - ) - - for relationship in auditable_relationships: - key = relationship.key - hist = get_history(obj, key) - if hist.has_changes(): - related_class_name = ( - relationship.argument if isinstance(relationship.argument, str) else relationship.argument.__name__ - ) - changes[key] = { - "collection_of": related_class_name, - "added": convert_for_jsonb(hist.added), - } - if event_type != OpsDBHistoryType.NEW: - changes[key]["deleted"] = convert_for_jsonb(hist.deleted) - return DbRecordAudit(row_key, changes) - - def track_db_history_before(session: Session): session.add_all(add_obj_to_db_history(session.deleted, OpsDBHistoryType.DELETED)) session.add_all(add_obj_to_db_history(session.dirty, OpsDBHistoryType.UPDATED)) From c12e19ead47897947eed512dce12b89ce28c9a9b Mon Sep 17 00:00:00 2001 From: maiyerlee Date: Tue, 8 Oct 2024 08:44:07 -0500 Subject: [PATCH 06/10] Add disabling inactive user service to docker compose for local dev --- docker-compose.yml | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/docker-compose.yml b/docker-compose.yml index 2d3b11ca9e..b4f6bd9b0d 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -45,6 +45,24 @@ services: db: condition: service_healthy + disable-inactive-users: + profiles: + - '' # This is the default profile, which is used for development + build: + context: ./backend/ + dockerfile: Dockerfile.data-tools + platform: linux/amd64 + container_name: disable-inactive-users + environment: + - ENV=local + - SQLALCHEMY_DATABASE_URI=postgresql://ops:ops@db:5432/postgres + command: ["/home/app/.venv/bin/python", "./data_tools/src/disable_inactive_users/disable_inactive_users.py"] + depends_on: + db: + condition: service_healthy + data-import: + condition: service_completed_successfully + backend: profiles: - '' # This is the default profile, which is used for development From affb75a678b008963aeac433b4010db62065ac34 Mon Sep 17 00:00:00 2001 From: maiyerlee Date: Tue, 8 Oct 2024 23:50:18 -0500 Subject: [PATCH 07/10] Rename to match container app job name --- .../__init__.py | 0 .../disable_users.py} | 6 +++--- .../queries.py | 0 .../__init__.py | 0 .../test_disable_users.py} | 12 ++++-------- docker-compose.yml | 6 +++--- 6 files changed, 10 insertions(+), 14 deletions(-) rename backend/data_tools/src/{disable_inactive_users => disable_users}/__init__.py (100%) rename backend/data_tools/src/{disable_inactive_users/disable_inactive_users.py => disable_users/disable_users.py} (94%) rename backend/data_tools/src/{disable_inactive_users => disable_users}/queries.py (100%) rename backend/data_tools/tests/{disable_inactive_users => disable_users}/__init__.py (100%) rename backend/data_tools/tests/{disable_inactive_users/test_disable_inactive_users.py => disable_users/test_disable_users.py} (88%) diff --git a/backend/data_tools/src/disable_inactive_users/__init__.py b/backend/data_tools/src/disable_users/__init__.py similarity index 100% rename from backend/data_tools/src/disable_inactive_users/__init__.py rename to backend/data_tools/src/disable_users/__init__.py diff --git a/backend/data_tools/src/disable_inactive_users/disable_inactive_users.py b/backend/data_tools/src/disable_users/disable_users.py similarity index 94% rename from backend/data_tools/src/disable_inactive_users/disable_inactive_users.py rename to backend/data_tools/src/disable_users/disable_users.py index cf344ea937..398cccd33b 100644 --- a/backend/data_tools/src/disable_inactive_users/disable_inactive_users.py +++ b/backend/data_tools/src/disable_users/disable_users.py @@ -1,7 +1,7 @@ import logging import os -from data_tools.src.disable_inactive_users.queries import ( +from data_tools.src.disable_users.queries import ( ALL_ACTIVE_USER_SESSIONS_QUERY, EXCLUDED_USER_IDS, INACTIVE_USER_QUERY, @@ -41,7 +41,7 @@ def create_system_user(se): return system_user[0] -def deactivate_user(se, user_id, system_user_id): +def disable_user(se, user_id, system_user_id): """Deactivate a single user and log the change.""" updated_user = User(id=user_id, status=UserStatus.INACTIVE, updated_by=system_user_id) se.merge(updated_user) @@ -91,7 +91,7 @@ def update_disabled_users_status(conn: sqlalchemy.engine.Engine): for user_id in user_ids: logger.info("Deactivating user", user_id) - deactivate_user(se, user_id, system_user_id) + disable_user(se, user_id, system_user_id) se.commit() diff --git a/backend/data_tools/src/disable_inactive_users/queries.py b/backend/data_tools/src/disable_users/queries.py similarity index 100% rename from backend/data_tools/src/disable_inactive_users/queries.py rename to backend/data_tools/src/disable_users/queries.py diff --git a/backend/data_tools/tests/disable_inactive_users/__init__.py b/backend/data_tools/tests/disable_users/__init__.py similarity index 100% rename from backend/data_tools/tests/disable_inactive_users/__init__.py rename to backend/data_tools/tests/disable_users/__init__.py diff --git a/backend/data_tools/tests/disable_inactive_users/test_disable_inactive_users.py b/backend/data_tools/tests/disable_users/test_disable_users.py similarity index 88% rename from backend/data_tools/tests/disable_inactive_users/test_disable_inactive_users.py rename to backend/data_tools/tests/disable_users/test_disable_users.py index f19932c82e..6367f37f74 100644 --- a/backend/data_tools/tests/disable_inactive_users/test_disable_inactive_users.py +++ b/backend/data_tools/tests/disable_users/test_disable_users.py @@ -1,11 +1,7 @@ from unittest.mock import MagicMock, patch -from data_tools.src.disable_inactive_users.disable_inactive_users import ( - create_system_user, - deactivate_user, - update_disabled_users_status, -) -from data_tools.src.disable_inactive_users.queries import SYSTEM_USER_EMAIL, SYSTEM_USER_ID, SYSTEM_USER_OIDC_ID +from data_tools.src.disable_users.disable_users import create_system_user, disable_user, update_disabled_users_status +from data_tools.src.disable_users.queries import SYSTEM_USER_EMAIL, SYSTEM_USER_ID, SYSTEM_USER_OIDC_ID from models import OpsDBHistoryType, OpsEventStatus, OpsEventType, UserStatus @@ -58,7 +54,7 @@ def test_deactivate_user(): mock_se = MagicMock() mock_se.execute.return_value = [(1,), (2,)] - deactivate_user(mock_se, user_id, system_user_id) + disable_user(mock_se, user_id, system_user_id) assert mock_se.merge.call_count == 3 assert mock_se.add.call_count == 2 @@ -90,7 +86,7 @@ def test_deactivate_user(): assert ops_events_call[0][0].event_status == OpsEventStatus.SUCCESS assert ops_events_call[0][0].created_by == system_user_id -@patch("data_tools.src.disable_inactive_users.disable_inactive_users.logger") +@patch("data_tools.src.disable_users.disable_users.logger") def test_no_inactive_users(mock_logger): mock_conn = MagicMock() mock_se = MagicMock() diff --git a/docker-compose.yml b/docker-compose.yml index b4f6bd9b0d..9c0f4c7d39 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -45,18 +45,18 @@ services: db: condition: service_healthy - disable-inactive-users: + disable-users: profiles: - '' # This is the default profile, which is used for development build: context: ./backend/ dockerfile: Dockerfile.data-tools platform: linux/amd64 - container_name: disable-inactive-users + container_name: disable-users environment: - ENV=local - SQLALCHEMY_DATABASE_URI=postgresql://ops:ops@db:5432/postgres - command: ["/home/app/.venv/bin/python", "./data_tools/src/disable_inactive_users/disable_inactive_users.py"] + command: ["/home/app/.venv/bin/python", "./data_tools/src/disable_users/disable_users.py"] depends_on: db: condition: service_healthy From c89eb5d2bca0b155dcac37688994cd1ea381bcc8 Mon Sep 17 00:00:00 2001 From: maiyerlee Date: Mon, 14 Oct 2024 10:09:02 -0500 Subject: [PATCH 08/10] Update system admin creation --- backend/data_tools/data/user_data.json5 | 7 +++++++ backend/data_tools/src/disable_users/queries.py | 6 +++--- 2 files changed, 10 insertions(+), 3 deletions(-) diff --git a/backend/data_tools/data/user_data.json5 b/backend/data_tools/data/user_data.json5 index 8dcd0443b1..d19a4cc099 100644 --- a/backend/data_tools/data/user_data.json5 +++ b/backend/data_tools/data/user_data.json5 @@ -556,6 +556,13 @@ oidc_id: "00000000-0000-1111-a111-000000000022", groups: [], status: "ACTIVE" + }, + { // 526 + first_name: "System", + last_name: "Admin", + email: "system.admin@email.com", + oidc_id: "00000000-0000-1111-a111-000000000026", + status: "LOCKED" } ], notification: [ diff --git a/backend/data_tools/src/disable_users/queries.py b/backend/data_tools/src/disable_users/queries.py index 27ddb54eae..f4ad496975 100644 --- a/backend/data_tools/src/disable_users/queries.py +++ b/backend/data_tools/src/disable_users/queries.py @@ -1,7 +1,7 @@ -EXCLUDED_USER_IDS = [520, 521, 522, 523] +EXCLUDED_USER_IDS = [520, 521, 522, 523, 525, 526] -SYSTEM_USER_ID = 524 -SYSTEM_USER_OIDC_ID = "00000000-0000-1111-a111-000000000022" +SYSTEM_USER_ID = 526 +SYSTEM_USER_OIDC_ID = "00000000-0000-1111-a111-000000000026" SYSTEM_USER_EMAIL = "system.admin@email.com" INACTIVE_USER_QUERY = ( From c48ff191cda4f7bc0b1b2655a5e125d27af95e30 Mon Sep 17 00:00:00 2001 From: maiyerlee Date: Thu, 17 Oct 2024 12:15:04 -0500 Subject: [PATCH 09/10] fix: use oidc_id instead of id --- .../src/disable_users/disable_users.py | 58 +++++--- .../data_tools/src/disable_users/queries.py | 16 ++- .../tests/disable_users/test_disable_users.py | 128 ++++++++++-------- 3 files changed, 116 insertions(+), 86 deletions(-) diff --git a/backend/data_tools/src/disable_users/disable_users.py b/backend/data_tools/src/disable_users/disable_users.py index 398cccd33b..b6fe16bda3 100644 --- a/backend/data_tools/src/disable_users/disable_users.py +++ b/backend/data_tools/src/disable_users/disable_users.py @@ -3,12 +3,11 @@ from data_tools.src.disable_users.queries import ( ALL_ACTIVE_USER_SESSIONS_QUERY, - EXCLUDED_USER_IDS, + EXCLUDED_USER_OIDC_IDS, + GET_USER_ID_BY_OIDC_QUERY, INACTIVE_USER_QUERY, - SYSTEM_USER_EMAIL, - SYSTEM_USER_ID, - SYSTEM_USER_OIDC_ID, - SYSTEM_USER_QUERY, + SYSTEM_ADMIN_EMAIL, + SYSTEM_ADMIN_OIDC_ID, ) from data_tools.src.import_static_data.import_data import get_config, init_db from sqlalchemy import text @@ -20,36 +19,50 @@ logger = logging.getLogger(__name__) -def create_system_user(se): +def get_ids_from_oidc_ids(se, oidc_ids: list): + """Retrieve user IDs corresponding to a list of OIDC IDs.""" + if not all(isinstance(oidc_id, str) for oidc_id in oidc_ids): + raise ValueError("All oidc_ids must be strings.") + + ids = [] + for oidc_id in oidc_ids: + user_id = se.execute(text(GET_USER_ID_BY_OIDC_QUERY), {"oidc_id": oidc_id}).scalar() + + if user_id is not None: + ids.append(user_id) + + return ids + + +def create_system_admin(se): """Create system user if it doesn't exist.""" - system_user = se.execute( - text(SYSTEM_USER_QUERY), - {"oidc_id": SYSTEM_USER_OIDC_ID} + system_admin = se.execute( + text(GET_USER_ID_BY_OIDC_QUERY), + {"oidc_id": SYSTEM_ADMIN_OIDC_ID} ).fetchone() - if system_user is None: + if system_admin is None: sys_user = User( - id=SYSTEM_USER_ID, - email=SYSTEM_USER_EMAIL, - oidc_id=SYSTEM_USER_OIDC_ID, + email=SYSTEM_ADMIN_EMAIL, + oidc_id=SYSTEM_ADMIN_OIDC_ID, status=UserStatus.LOCKED ) se.add(sys_user) se.commit() return sys_user.id - return system_user[0] + return system_admin[0] -def disable_user(se, user_id, system_user_id): +def disable_user(se, user_id, system_admin_id): """Deactivate a single user and log the change.""" - updated_user = User(id=user_id, status=UserStatus.INACTIVE, updated_by=system_user_id) + updated_user = User(id=user_id, status=UserStatus.INACTIVE, updated_by=system_admin_id) se.merge(updated_user) db_audit = build_audit(updated_user, OpsDBHistoryType.UPDATED) ops_db_history = OpsDBHistory( event_type=OpsDBHistoryType.UPDATED, - created_by=system_user_id, + created_by=system_admin_id, class_name=updated_user.__class__.__name__, row_key=db_audit.row_key, changes=db_audit.changes, @@ -59,7 +72,7 @@ def disable_user(se, user_id, system_user_id): ops_event = OpsEvent( event_type=OpsEventType.UPDATE_USER, event_status=OpsEventStatus.SUCCESS, - created_by=system_user_id, + created_by=system_admin_id, ) se.add(ops_event) @@ -68,7 +81,7 @@ def disable_user(se, user_id, system_user_id): updated_user_session = UserSession( id=session[0], is_active=False, - updated_by=system_user_id + updated_by=system_admin_id ) se.merge(updated_user_session) @@ -77,11 +90,12 @@ def update_disabled_users_status(conn: sqlalchemy.engine.Engine): """Update the status of disabled users in the database.""" with Session(conn) as se: logger.info("Checking for System User.") - system_user_id = create_system_user(se) + system_admin_id = create_system_admin(se) logger.info("Fetching inactive users.") results = se.execute(text(INACTIVE_USER_QUERY)).scalars().all() - user_ids = [uid for uid in results if uid not in EXCLUDED_USER_IDS] + excluded_ids = get_ids_from_oidc_ids(se, EXCLUDED_USER_OIDC_IDS) + user_ids = [uid for uid in results if uid not in excluded_ids] if not user_ids: logger.info("No inactive users found.") @@ -91,7 +105,7 @@ def update_disabled_users_status(conn: sqlalchemy.engine.Engine): for user_id in user_ids: logger.info("Deactivating user", user_id) - disable_user(se, user_id, system_user_id) + disable_user(se, user_id, system_admin_id) se.commit() diff --git a/backend/data_tools/src/disable_users/queries.py b/backend/data_tools/src/disable_users/queries.py index f4ad496975..a43397f922 100644 --- a/backend/data_tools/src/disable_users/queries.py +++ b/backend/data_tools/src/disable_users/queries.py @@ -1,8 +1,14 @@ -EXCLUDED_USER_IDS = [520, 521, 522, 523, 525, 526] +SYSTEM_ADMIN_OIDC_ID = "00000000-0000-1111-a111-000000000026" +SYSTEM_ADMIN_EMAIL = "system.admin@email.com" -SYSTEM_USER_ID = 526 -SYSTEM_USER_OIDC_ID = "00000000-0000-1111-a111-000000000026" -SYSTEM_USER_EMAIL = "system.admin@email.com" +EXCLUDED_USER_OIDC_IDS = [ + "00000000-0000-1111-a111-000000000018", # Admin Demo + "00000000-0000-1111-a111-000000000019", # User Demo + "00000000-0000-1111-a111-000000000020", # Director Dave + "00000000-0000-1111-a111-000000000021", # Budget Team + "00000000-0000-1111-a111-000000000022", # Director Derrek + SYSTEM_ADMIN_OIDC_ID # System Admin +] INACTIVE_USER_QUERY = ( "SELECT id " @@ -22,4 +28,4 @@ "ORDER BY created_on DESC" ) -SYSTEM_USER_QUERY = "SELECT id FROM ops_user WHERE oidc_id = :oidc_id" +GET_USER_ID_BY_OIDC_QUERY = "SELECT id FROM ops_user WHERE oidc_id = :oidc_id" diff --git a/backend/data_tools/tests/disable_users/test_disable_users.py b/backend/data_tools/tests/disable_users/test_disable_users.py index 6367f37f74..8e171baadd 100644 --- a/backend/data_tools/tests/disable_users/test_disable_users.py +++ b/backend/data_tools/tests/disable_users/test_disable_users.py @@ -1,98 +1,108 @@ from unittest.mock import MagicMock, patch -from data_tools.src.disable_users.disable_users import create_system_user, disable_user, update_disabled_users_status -from data_tools.src.disable_users.queries import SYSTEM_USER_EMAIL, SYSTEM_USER_ID, SYSTEM_USER_OIDC_ID +import pytest +from data_tools.src.disable_users.disable_users import ( + create_system_admin, + disable_user, + get_ids_from_oidc_ids, + update_disabled_users_status, +) +from data_tools.src.disable_users.queries import SYSTEM_ADMIN_EMAIL, SYSTEM_ADMIN_OIDC_ID from models import OpsDBHistoryType, OpsEventStatus, OpsEventType, UserStatus -system_user_id = 111 +system_admin_id = 111 -def test_create_system_user(): - mock_se = MagicMock() - mock_se.execute.return_value.fetchone.return_value = None +@pytest.fixture +def mock_session(): + """Fixture for creating a mock SQLAlchemy session.""" + session = MagicMock() + session.execute.return_value.fetchone.return_value = None + return session - result = create_system_user(mock_se) +def test_create_system_admin(mock_session): + create_system_admin(mock_session) - se_add = mock_se.add.call_args[0][0] - mock_se.execute.assert_called_once() - mock_se.add.assert_called_once() - mock_se.commit.assert_called_once() - assert result == SYSTEM_USER_ID - assert se_add.id == SYSTEM_USER_ID - assert se_add.email == SYSTEM_USER_EMAIL - assert se_add.oidc_id == SYSTEM_USER_OIDC_ID + se_add = mock_session.add.call_args[0][0] + mock_session.execute.assert_called_once() + mock_session.add.assert_called_once() + mock_session.commit.assert_called_once() + assert se_add.email == SYSTEM_ADMIN_EMAIL + assert se_add.oidc_id == SYSTEM_ADMIN_OIDC_ID assert se_add.first_name is None assert se_add.last_name is None +def test_return_existing_system_admin(mock_session): + mock_session.execute.return_value.fetchone.return_value = (system_admin_id,) -def test_return_existing_system_user(): - mock_se = MagicMock() - mock_se.execute.return_value.fetchone.return_value = (SYSTEM_USER_ID,) + result = create_system_admin(mock_session) - result = create_system_user(mock_se) + assert result == system_admin_id + mock_session.add.assert_not_called() + mock_session.commit.assert_not_called() - assert result == SYSTEM_USER_ID - mock_se.add.assert_not_called() - mock_se.commit.assert_not_called() - -def test_deactivate_user(): +def test_deactivate_user(mock_session): user_id = 1 db_history_changes = { - "id": { - "new": user_id, - "old": None - }, - "status": { - "new": "INACTIVE", - "old": None - }, - "updated_by": { - "new": system_user_id, - "old": None} + "id": {"new": user_id, "old": None}, + "status": {"new": "INACTIVE", "old": None}, + "updated_by": {"new": system_admin_id, "old": None} } - mock_se = MagicMock() - mock_se.execute.return_value = [(1,), (2,)] + mock_session.execute.return_value = [(1,), (2,)] - disable_user(mock_se, user_id, system_user_id) + disable_user(mock_session, user_id, system_admin_id) - assert mock_se.merge.call_count == 3 - assert mock_se.add.call_count == 2 + assert mock_session.merge.call_count == 3 + assert mock_session.add.call_count == 2 - user_call = mock_se.merge.call_args_list[0] + user_call = mock_session.merge.call_args_list[0] assert user_call[0][0].id == user_id assert user_call[0][0].status == UserStatus.INACTIVE - assert user_call[0][0].updated_by == system_user_id + assert user_call[0][0].updated_by == system_admin_id - user_session_call_1 = mock_se.merge.call_args_list[1] + user_session_call_1 = mock_session.merge.call_args_list[1] assert user_session_call_1[0][0].id == user_id - assert user_session_call_1[0][0].is_active == False - assert user_session_call_1[0][0].updated_by == system_user_id - - user_session_call_2 = mock_se.merge.call_args_list[1] - assert user_session_call_2[0][0].id == user_id - assert user_session_call_2[0][0].is_active == False - assert user_session_call_2[0][0].updated_by == system_user_id + assert user_session_call_1[0][0].is_active is False + assert user_session_call_1[0][0].updated_by == system_admin_id - ops_db_history_call = mock_se.add.call_args_list[0] + ops_db_history_call = mock_session.add.call_args_list[0] assert ops_db_history_call[0][0].event_type == OpsDBHistoryType.UPDATED - assert ops_db_history_call[0][0].created_by == system_user_id + assert ops_db_history_call[0][0].created_by == system_admin_id assert ops_db_history_call[0][0].class_name == 'User' assert ops_db_history_call[0][0].row_key == str(user_id) assert ops_db_history_call[0][0].changes == db_history_changes - ops_events_call = mock_se.add.call_args_list[1] + ops_events_call = mock_session.add.call_args_list[1] assert ops_events_call[0][0].event_type == OpsEventType.UPDATE_USER assert ops_events_call[0][0].event_status == OpsEventStatus.SUCCESS - assert ops_events_call[0][0].created_by == system_user_id + assert ops_events_call[0][0].created_by == system_admin_id @patch("data_tools.src.disable_users.disable_users.logger") -def test_no_inactive_users(mock_logger): - mock_conn = MagicMock() - mock_se = MagicMock() - mock_se.execute.return_value.all.return_value = None - update_disabled_users_status(mock_conn) +def test_no_inactive_users(mock_logger, mock_session): + mock_session.execute.return_value.all.return_value = None + update_disabled_users_status(mock_session) mock_logger.info.assert_any_call("Checking for System User.") mock_logger.info.assert_any_call("Fetching inactive users.") mock_logger.info.assert_any_call("No inactive users found.") + +def test_valid_oidc_ids(mock_session): + mock_session.execute.return_value.scalar.side_effect = [1, 2, None] # Mock responses for OIDC IDs + + oidc_ids = ["oidc_1", "oidc_2", "oidc_3"] + expected_ids = [1, 2] + + result = get_ids_from_oidc_ids(mock_session, oidc_ids) + assert result == expected_ids + + empty_result = get_ids_from_oidc_ids(mock_session, []) + assert empty_result == [] + +def test_invalid_oidc_id_type(mock_session): + oidc_ids = ["valid_oidc", 123, "another_valid_oidc"] + + with pytest.raises(ValueError) as context: + get_ids_from_oidc_ids(mock_session, oidc_ids) + + assert str(context.value) == "All oidc_ids must be strings." From 9c1059d8c4b3778ac45f06f586d2ab9c71e2a1dd Mon Sep 17 00:00:00 2001 From: maiyerlee Date: Mon, 21 Oct 2024 11:51:43 -0500 Subject: [PATCH 10/10] Improve ADR for readability --- ...-apps-jobs-for-disabling-inactive-users.md | 33 ++++++++++--------- 1 file changed, 18 insertions(+), 15 deletions(-) diff --git a/docs/adr/026-use-container-apps-jobs-for-disabling-inactive-users.md b/docs/adr/026-use-container-apps-jobs-for-disabling-inactive-users.md index 85b1b37825..1c992d5b7c 100644 --- a/docs/adr/026-use-container-apps-jobs-for-disabling-inactive-users.md +++ b/docs/adr/026-use-container-apps-jobs-for-disabling-inactive-users.md @@ -6,16 +6,14 @@ Date: 09\30\2024 Accepted ## Context - We want to set up a process to automatically disable user accounts after 60 days of inactivity. This pertains to the security compliance Access Control AC-02(03) Disable Accounts. ## Context and explanation of the tech concepts +Each row in the **ops_user** table represents a user account with a status of `ACTIVE, INACTIVE, or LOCKED`. +An `INACTIVE` status indicates a disabled account. -Each row in the ops_user table represents a user account with a status of ACTIVE, INACTIVE, or LOCKED. -An INACTIVE status indicates a disabled account. - -To check for inactivity, we'll use the last_active_at timestamp from the user_session table, which is updated whenever a user logs into OPS. +To check for inactivity, we'll use the _last_active_at_ timestamp from the **user_session** table, which is updated whenever a user logs into OPS. If the timestamp shows that a user hasn't logged in or started a session in 60 days or more, their account will be flagged for disabling. OPS runs in a Container App that includes a Data Import process executed through a Container Apps Job. @@ -50,18 +48,23 @@ The image deployed to this Container App is built from the Dockerfile.data-tools * We will use Container Apps Jobs to disable inactive user accounts. * The process will be scheduled hourly and ignore FakeAuth users. * Database changes executed by this process will be implemented with a SQLAlchemy script. +* Automatic processes will use the System Admin as the user. * OPS engineers will be notified if the process fails. + * The following ensures changes in user account status made by this process are recorded and logged in the same way as if a System Admin had manually disabled the user account: - * A UPDATE_USER event record is added to the ops_events table. - * All user sessions are deactivate by setting the active column to False. - * The ops_db_history table is updated with the changes made to the user account. + * A `UPDATE_USER` event record is added to the ops_events table. + * All user sessions are deactivate by setting the _active_ column to `False`. + * The **ops_db_history** table is updated with the changes made to the user account. ## Consequences -The decision to utilize Container Apps Jobs for managing inactive user accounts presents several positive consequences, such as streamlining the process and improving monitoring capabilities. -This integration will ensure consistent application of access controls with minimal manual intervention, while hourly scheduling will facilitate timely management of user accounts, reducing security risks. -Additionally, implementing a notification system will allow OPS engineers to promptly respond to any process failures, helping to ensure system integrity. -Leveraging the existing infrastructure minimizes the need for retraining and maximizes the use of current resources. - -Overall, while using Container Apps Jobs meets immediate operational needs, it’s important to consider its limitations and risks for long-term success. -Issues like limited metrics, potential issues in deploying new monitoring alerts, and reliance on changing infrastructure could pose challenges. +| **Category** | **Details** | **Impact** | +|-----------------|-----------------------------------------------------------------|------------------------------------------------| +| **Benefits** | Easier to implement | Increases efficiency | +| **Benefits** | Applies access controls consistently with minimal manual work | Reduces mistakes and adds security | +| **Benefits** | Hourly scheduling helps manage user accounts on time | Reduces potential security risk | +| **Benefits** | Notification aspect allows quick responses to process failures | Improves reliability and response time | +| **Benefits** | Uses existing resources | Saves time and money, no upskilling/retraining | +| **Limitations** | Limited metrics might lower effectiveness | May make decision-making harder | +| **Limitations** | Possible issues with setting up new monitoring alerts | Could result in missing important events | +| **Limitations** | Dependence on changing infrastructure might create challenges | Risks stability of operations |