From 55a6c255a0a2d6cc821cb9872f9fa4bcfb5005a0 Mon Sep 17 00:00:00 2001 From: David Mateo Date: Fri, 19 Feb 2021 13:49:40 +0100 Subject: [PATCH 01/21] Update 2.0 - more recent pip ansible, molecule and python - test-requirements version update - molecule file updated - update tests - update install tasks - update config tasks - update templates and new one for 2.0 airflow cfg --- .travis.yml | 13 +- .yamlint | 31 +- Pipfile | 6 +- defaults/main.yml | 550 ++++++--- .../default/{playbook.yml => converge.yml} | 2 +- molecule/default/molecule.yml | 34 +- molecule/default/tests/test_airflow.yml | 74 +- molecule/extra_packages/molecule.yml | 35 +- tasks/config.yml | 43 +- tasks/install.yml | 41 +- templates/airflow-environment-file.j2 | 5 +- templates/airflow2.cfg.j2 | 1013 +++++++++++++++++ test-requirements.txt | 6 +- 13 files changed, 1620 insertions(+), 233 deletions(-) rename molecule/default/{playbook.yml => converge.yml} (70%) create mode 100644 templates/airflow2.cfg.j2 diff --git a/.travis.yml b/.travis.yml index c54bf9a..81f6cbe 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,14 +1,17 @@ --- +dist: xenial language: python -python: "3.8.6" -sudo: required - +python: "3.8" +os: linux services: - docker - install: - - pip3 install pipenv + - pip install pipenv - pipenv sync +env: + jobs: + - MOLECULE_DISTRO=debian:stretch-slim + - MOLECULE_DISTRO=debian:buster-slim script: - pipenv run molecule test --all diff --git a/.yamlint b/.yamlint index ee83f1d..84510f6 100644 --- a/.yamlint +++ b/.yamlint @@ -1,9 +1,11 @@ +--- +# Based on ansible-lint config extends: default ignore: | .travis.yml - /molecule/ - /tests/ + molecule/ + tests/ rules: braces: @@ -12,7 +14,26 @@ rules: brackets: max-spaces-inside: 1 level: error + colons: + max-spaces-after: -1 + level: error + commas: + max-spaces-after: -1 + level: error + comments: disable + comments-indentation: disable + document-start: disable + empty-lines: + max: 3 + level: error + hyphens: + level: error + indentation: disable + key-duplicates: enable line-length: disable - # NOTE(retr0h): Templates no longer fail this lint rule. - # Uncomment if running old Molecule templates. - truthy: disable \ No newline at end of file + new-line-at-end-of-file: disable + new-lines: + type: unix + trailing-spaces: disable + truthy: disable + \ No newline at end of file diff --git a/Pipfile b/Pipfile index f5353af..b7b655f 100644 --- a/Pipfile +++ b/Pipfile @@ -6,10 +6,10 @@ verify_ssl = true [dev-packages] [packages] -ansible = "==2.8.8" -molecule = "==3.0.1" +ansible = "==2.9.9" +molecule = "==3.0.6" docker = "==4.1.0" ansible-lint = "==4.2.0" [requires] -python_version = "3.7" +python_version = "3.8" diff --git a/defaults/main.yml b/defaults/main.yml index 7720918..1079f57 100644 --- a/defaults/main.yml +++ b/defaults/main.yml @@ -1,19 +1,117 @@ --- ## General -airflow_version: 1.9.0 +airflow_app_name: airflow +airflow_version: 2.0.1 +airflow_package: apache-airflow +airflow_python_version: 3.7 +airflow_constraint_url: "https://raw.githubusercontent.com/apache/airflow/constraints-{{ airflow_version }}/constraints-{{ airflow_python_version }}.txt" # Available extra packages: -# https://airflow.apache.org/installation.html#extra-packages +# https://airflow.apache.org/docs/apache-airflow/stable/extra-packages-ref.html # List should follow Ansible's YAML basics: # https://docs.ansible.com/ansible/latest/reference_appendices/YAMLSyntax.html#yaml-basics -airflow_extra_packages: [] - -airflow_pip_executable: "pip" -airflow_executable: "/usr/local/bin/airflow" -airflow_root_path: "/opt/airflow" +airflow_bundle_package: # all|all_dbs|devel|devel_hadoop|devel_all|devel_ci See bundle extras section +# airflow_extra_packages: + # [Apache] + # - apache.atlas + # - apache.beam + # - apache.cassandra + # - apache.druid + # - apache.hdfs + # - apache.hive + # - apache.kylin + # - apache.livy + # - apache.pig + # - apache.pinot + # - apache.spark + # - apache.sqoop + # - apache.webhdfs + # [Services] + # - amazon + # - microsoft.azure + # - cloudant + # - databricks + # - datadog + # - dask + # - dingding + # - discord + # - facebook + # - google + # - github_enterprise + # - google_auth + # - hashicorp + # - jira + # - opsgenie + # - pagerduty + # - plexus + # - qubole + # - salesforce + # - sendgrid + # - segment + # - sentry + # - slack + # - snowflake + # - telegram + # - vertica + # - yandex + # - zendesk + # [Software] + # - async + # - celery + # - cncf.kubernetes + # - docker + # - elasticsearch + # - exasol + # - jenkins + # - ldap + # - mongo + # - microsoft.mssql + # - mysql + # - odbc + # - openfaas + # - oracle + # - postgres + # - password + # - presto + # - rabbitmq + # - redis + # - samba + # - singularity + # - statsd + # - tableau + # - virtualenv + # [Others] + # - cgroups + # - ftp + # - grpc + # - http + # - imap + # - jdbc + # - kerberos + # - papermill + # - sftp + # - sqlite + # - ssh + # - microsoft.winrm airflow_required_libs: + - python3 + - python3-dev + - python3-pip - python-pip - acl +# Airflow 2.0 system level requirements https://airflow.apache.org/docs/apache-airflow/stable/installation.html#system-dependencies + - freetds-bin + - krb5-user + - ldap-utils + - libffi6 + - libsasl2-2 + - libsasl2-modules + - libssl1.1 + - locales + - lsb-release + - sasl2-bin + - sqlite3 + - unixodbc airflow_required_python_packages: - {name: werkzeug, version: 0.16.1} @@ -24,6 +122,15 @@ airflow_required_python_packages: airflow_user: airflow airflow_group: airflow +# Admin user +airflow_admin_user: + username: admin + password: admin + role: Admin + firstname: admin + lastname: admin + email: email@domain.com + ## Service options airflow_scheduler_runs: 5 @@ -48,237 +155,330 @@ airflow_services: path: airflow-flower.service.j2 # Files & Paths -airflow_home: /etc/airflow -airflow_dags_folder: "{{ airflow_home }}/dags" -airflow_logs_folder: /var/log/airflow -airflow_child_process_log_folder: "{{ airflow_logs_folder }}/scheduler" -airflow_pidfile_folder: "/run/airflow" +airflow_executable: "/usr/local/bin/airflow" +airflow_pip_executable: "pip3" +airflow_app_home: "/opt/{{ airflow_app_name }}" +airflow_home: "/home/{{ airflow_user }}" +airflow_conf_path: "/etc/{{ airflow_app_name }}" +airflow_pidfile_folder: "/run/{{ airflow_app_name }}" airflow_environment_file_folder: /etc/sysconfig airflow_environment_extra_vars: [] # - name: PATH # value: "/custom/path/bin:$PATH" +airflow_skeleton_paths: + - "{{ airflow_app_home }}" + - "{{ airflow_home }}" + - "{{ airflow_conf_path }}" + - "{{ airflow_dags_folder }}" + - "{{ airflow_logs_folder }}" + - "{{ airflow_child_process_log_folder }}" + - "{{ airflow_pidfile_folder }}" + - "{{ airflow_environment_file_folder }}" + - "{{ airflow_plugins_folder }}" # Allowing playbooks to provide external config files&templates airflow_extra_conf_path: "{{ playbook_dir }}/files/airflow" airflow_extra_conf_template_path: "{{ playbook_dir }}/templates/airflow" airflow_config_template_path: airflow.cfg.j2 +airflow_config_template_path_v2: airflow2.cfg.j2 -# AIRFLOW CONFIGURATION -# --------------------- -airflow_logging_level: "INFO" -airflow_fab_logging_level: "WARN" +# DICT +celery_extra_packages: -airflow_logging_config_class: +# This Celery version is guaranteed to work with Airflow 1.8.x +celery_version: 3.1.17 -airflow_log_format: > - "[%%(asctime)s] {{ '{' }}%%(filename)s:%%(lineno)d} %%(levelname)s - %%(message)s" -airflow_simple_log_format: "%%(asctime)s %%(levelname)s - %%(message)s" +# DAGs +# Python dependencies needed by the DAGs. This variable is expected to be a +# list of items following the structure provided in the example comment +dags_dependencies: +# - {name: pip_package, version: version_needed} -airflow_log_filename_template: > - "{{ '{{' }} ti.dag_id {{ '}}' }}/{{ '{{' }} ti.task_id {{ '}}' }}/{{ '{{' }} ts {{ '}}' }}/{{ '{{' }} try_number {{ '}}' }}.log" -airflow_log_processor_filename_template: "{{ '{{' }} filename {{ '}}' }}.log" +# AIRFLOW CONFIGURATION https://airflow.apache.org/docs/apache-airflow/stable/configurations-ref.html +# --------------------- +# [core] +airflow_dags_folder: "{{ airflow_home }}/dags" airflow_hostname_callable: "socket:getfqdn" - airflow_default_timezone: "utc" - -airflow_task_log_reader: "task" -airflow_enable_xcom_pickling: "True" -airflow_killed_task_cleanup_time: 60 -airflow_dag_run_conf_overrides_params: "False" - -airflow_load_examples: true - # The executor class that airflow should use. Choices include # SequentialExecutor, LocalExecutor, CeleryExecutor, DaskExecutor airflow_executor: SequentialExecutor - +airflow_database_conn: "sqlite:///{{ airflow_app_home }}/airflow.db" +airflow_database_engine_encoding: utf-8 +airflow_database_engine_collation_for_ids: +airflow_database_pool_enabled: True +airflow_database_pool_size: 5 +airflow_database_max_overflow: 10 +airflow_database_pool_recycle: 2000 +airflow_database_pool_pre_ping: True +airflow_database_schema: +airflow_database_connect_args: airflow_parallelism: 32 airflow_dag_concurrency: 16 airflow_dags_are_paused_at_creation: true -airflow_non_pooled_task_slot_count: 128 airflow_max_active_runs_per_dag: 16 - +airflow_load_examples: False +airflow_load_default_connections: False +airflow_plugins_folder: "{{ airflow_app_home }}/plugins" +airflow_execute_tasks_new_python_interpreter: False airflow_fernet_key: - airflow_donot_pickle: false airflow_dagbag_import_timeout: 30 - +airflow_dagbag_import_error_tracebacks: True +airflow_dagbag_import_error_traceback_depth: 2 +airflow_dag_file_processor_timeout: 50 airflow_task_runner: BashTaskRunner - airflow_default_impersonation: +airflow_security: airflow_unit_test_mode: false +airflow_enable_xcom_pickling: "True" +airflow_killed_task_cleanup_time: 60 +airflow_dag_run_conf_overrides_params: False +airflow_dag_discovery_safe_mode: True +airflow_default_task_retries: 0 +airflow_min_serialized_dag_update_interval: 30 +airflow_min_serialized_dag_fetch_interval: 10 +airflow_store_dag_code: False +airflow_max_num_rendered_ti_fields_per_task: 30 +airflow_check_slas: True +airflow_xcom_backend: airflow.models.xcom.BaseXCom +airflow_lazy_load_plugins: True +airflow_lazy_discover_providers: True +airflow_max_db_retries: 3 -airflow_plugins_folder: "{{ airflow_home }}/plugins" +airflow_non_pooled_task_slot_count: 128 -## REMOTE LOGS -airflow_remote_base_log_folder: +# [logging] +airflow_logs_folder: /var/log/airflow +airflow_remote_logging: False airflow_remote_log_conn_id: +airflow_google_key_path: +airflow_remote_base_log_folder: airflow_encrypt_s3_logs: false -# DEPRECATED -airflow_s3_log_folder: +airflow_logging_level: "INFO" +airflow_fab_logging_level: "WARN" +airflow_logging_config_class: +airflow_colored_console_log: True +colored_log_format: "[%%(blue)s%%(asctime)s%%(reset)s] {{ '{{' }}%%(blue)s%%(filename)s:%%(reset)s%%(lineno)d{{ '}}' }} %%(log_color)s%%(levelname)s%%(reset)s - %%(log_color)s%%(message)s%%(reset)s" +colored_formatter_class: airflow.utils.log.colored_log.CustomTTYColoredFormatter +airflow_log_format: > + "[%%(asctime)s] {{ '{' }}%%(filename)s:%%(lineno)d} %%(levelname)s - %%(message)s" +airflow_simple_log_format: "%%(asctime)s %%(levelname)s - %%(message)s" +airflow_task_log_prefix_template: +airflow_log_filename_template: > + "{{ '{{' }} ti.dag_id {{ '}}' }}/{{ '{{' }} ti.task_id {{ '}}' }}/{{ '{{' }} ts {{ '}}' }}/{{ '{{' }} try_number {{ '}}' }}.log" +airflow_log_processor_filename_template: "{{ '{{' }} filename {{ '}}' }}.log" +airflow_dag_processor_manager_log_location: "{airflow_logs_folder}/dag_processor_manager/dag_processor_manager.log" +airflow_task_log_reader: "task" +airflow_extra_loggers: -## DB -airflow_database_conn: sqlite:///{{ airflow_home }}/airflow.db -airflow_database_pool_size: 5 -airflow_database_pool_recycle: 2000 +# [metrics] +# STASTD +airflow_statsd_on: false +airflow_statsd_host: localhost +airflow_statsd_port: 8125 +airflow_statsd_prefix: airflow +airflow_statsd_allow_list: +airflow_stat_name_handler: +airflow_statsd_datadog_enabled: False +airflow_statsd_datadog_tags: +airflow_statsd_custom_client_path: + +# [secrets] +airflow_secrets_backend: +airflow_backend_kwargs: -## CLI +# [cli] airflow_cli_api_client: airflow.api.client.local_client airflow_cli_api_endpoint_url: http://localhost:8080 -## API +# [debug] +airflow_fail_fast: False + +# [api] +airflow_enable_experimental_api: False airflow_auth_backend: airflow.api.auth.backend.default +airflow_maximum_page_limit: 100 +airflow_fallback_page_limit: 100 +airflow_google_oauth2_audience: -## LINEAGE +# [lineage] airflow_lineage_backend: -## ATLAS -airflow_atlas_sasl_enabled: "False" +# [atlas] +airflow_atlas_sasl_enabled: False airflow_atlas_host: airflow_atlas_port: 21000 airflow_atlas_username: airflow_atlas_password: -## OPERATORS -airflow_operator_default_owner: Airflow +# [operators] +airflow_operator_default_owner: airflow airflow_operator_default_cpus: 1 airflow_operator_default_ram: 512 airflow_operator_default_disk: 512 airflow_operator_default_gpus: 0 +airflow_allow_illegal_arguments: False -## HIVE +# [hive] airflow_default_hive_mapred_queue: +airflow_mapred_job_name_template: -## WEBSERVER -airflow_webserver_base_url: http://localhost:8080 +# [webserver] +airflow_webserver_base_url: "http://localhost:{{ airflow_webserver_port }}" +airflow_webserver_default_ui_timezone: UTC airflow_webserver_host: 0.0.0.0 airflow_webserver_port: 8080 -airflow_webserver_workers: 4 -airflow_webserver_worker_timeout: 120 airflow_webserver_ssl_cert: airflow_webserver_ssl_key: airflow_webserver_master_timeout: 120 +airflow_webserver_worker_timeout: 120 airflow_webserver_worker_refresh_batch_size: 1 airflow_webserver_worker_refresh_interval: 30 +airflow_webserver_reload_on_plugin_change: False airflow_webserver_secret_key: temporary_key +airflow_webserver_workers: 4 airflow_webserver_worker_class: sync -airflow_webserver_expose_config: false -airflow_webserver_filter_by_owner: false -airflow_webserver_owner_mode: user +airflow_webserver_access_logfile: "{{ airflow_logs_folder }}/gunicorn-access.log" +airflow_webserver_error_logfile: "{{ airflow_logs_folder }}/gunicorn-error.log" +airflow_webserver_access_logformat: +airflow_webserver_expose_config: False +airflow_webserver_expose_hostname: True +airflow_webserver_expose_stacktrace: True +airflow_webserver_dag_default_view: tree # Valid values are: tree, graph, duration, gantt, landing_times airflow_webserver_dag_orientation: LR -airflow_webserver_demo_mode: false +airflow_webserver_demo_mode: False airflow_webserver_log_fetch_timeout_sec: 5 -airflow_webserver_hide_paused_dags_by_default: false +airflow_webserver_log_fetch_delay_sec: 2 +airflow_webserver_log_auto_tailing_offset: 30 +airflow_webserver_log_animation_speed: 1000 +airflow_webserver_hide_paused_dags_by_default: False airflow_webserver_page_size: 100 -airflow_webserver_rbac: "False" airflow_webserver_navbar_color: "#007A87" airflow_webserver_default_dag_run_display_number: 25 +airflow_webserver_enable_proxy_fix: False +airflow_webserver_proxy_fix_x_for: 1 +airflow_webserver_proxy_fix_x_proto: 1 +airflow_webserver_proxy_fix_x_host: 1 +airflow_webserver_proxy_fix_x_port: 1 +airflow_webserver_proxy_fix_x_prefix: 1 +airflow_webserver_cookie_secure: False +airflow_webserver_cookie_samesite: Lax +airflow_webserver_default_wrap: False +airflow_webserver_x_frame_enabled: True +airflow_webserver_analytics_tool: +airflow_webserver_analytics_id: +airflow_webserver_show_recent_stats_for_completed_runs: True +airflow_webserver_update_fab_perms: True +airflow_webserver_session_lifetime_minutes: 43200 + +airflow_webserver_filter_by_owner: False +airflow_webserver_owner_mode: user +airflow_webserver_rbac: "False" -## Webserver Authentication -# (http://pythonhosted.org/airflow/security.html#web-authentication) - -# Choices of auth_backend include: -# - airflow.contrib.auth.backends.password_auth -# - airflow.contrib.auth.backends.ldap_auth -# - airflow.contrib.auth.backends.github_enterprise_auth -# - others? :) -airflow_webserver_authenticate: false -airflow_webserver_auth_backend: - -## LDAP (only applies if airflow_webserver_auth_backend == "airflow.contrib.auth.backends.ldap_auth") -# Bear in mind that, starting with Airflow 1.10.0, PyPi package pyasn1 v0.4.4 is needed -airflow_ldap_uri: -airflow_ldap_user_filter: -airflow_ldap_user_name_attr: -airflow_ldap_superuser_filter: -airflow_ldap_data_profiler_filter: -airflow_ldap_bind_user: -airflow_ldap_bind_password: -airflow_ldap_basedn: -airflow_ldap_cacert: -airflow_ldap_search_scope: - -## MAIL +# [email] airflow_email_backend: airflow.utils.email.send_email_smtp +airflow_email_default_email_on_retry: True +airflow_email_default_email_on_failure: True +airflow_email_subject_template: +airflow_email_html_content_template: -## SMTP +# [smtp] airflow_smtp_host: localhost -airflow_smtp_starttls: true -airflow_smtp_ssl: true +airflow_smtp_starttls: True +airflow_smtp_ssl: True airflow_smtp_port: 25 airflow_smtp_mail_from: airflow@airflow.com airflow_smtp_user: airflow_smtp_passwd: -## DASK -airflow_dask_cluster_address: 127.0.0.1:8786 -airflow_dask_tls_ca: -airflow_dask_tls_cert: -airflow_dask_tls_key: +# [sentry] +airflow_sentry_on: false +airflow_sentry_dsn: -## SCHEDULER -airflow_scheduler_job_heartbeat_sec: 5 -airflow_scheduler_heartbeat_sec: 5 -airflow_scheduler_run_duration: -1 -airflow_scheduler_min_file_process_interval: 0 -airflow_scheduler_min_file_parsing_loop_time: 1 -airflow_scheduler_dag_dir_list_interval: 300 -airflow_scheduler_print_stats_interval: 30 -airflow_scheduler_zombie_task_threshold: 300 -airflow_scheduler_catchup_by_default: true -airflow_scheduler_max_threads: 2 -airflow_scheduler_authenticate: true -airflow_scheduler_max_tis_per_query: 512 - -## STASTD -airflow_statsd_on: false -airflow_statsd_host: localhost -airflow_statsd_port: 8125 -airflow_statsd_prefix: airflow +# [celery_kubernetes_executor] +airflow_kubernetes_queue: kubernetes -## CELERY +# [celery] airflow_celery_app_name: airflow.executors.celery_executor airflow_celery_concurrency: 16 +airflow_celery_worker_autoscale: +airflow_celery_worker_prefetch_multiplier: airflow_celery_worker_log_server_port: 8793 +airflow_celery_worker_umask: 0o077 airflow_celery_broker_url: sqla+mysql://airflow:airflow@localhost:3306/airflow airflow_celery_result_backend: db+mysql://airflow:airflow@localhost:3306/airflow airflow_celery_default_queue: default -airflow_celery_ssl_active: "False" +airflow_celery_sync_parallelism: 0 +airflow_celery_config_options: airflow.config_templates.default_celery.DEFAULT_CELERY_CONFIG +airflow_celery_ssl_active: False airflow_celery_ssl_key: airflow_celery_ssl_cert: airflow_celery_ssl_cacert: +airflow_celery_pool: prefork +airflow_celery_operation_timeout: 1.0 +airflow_celery_task_track_started: True +airflow_celery_task_adoption_timeout: 600 +airflow_celery_task_publish_max_retries: 3 +airflow_celery_worker_precheck: False +# [flower] +airflow_flower_host: 0.0.0.0 +airflow_flower_url_prefix: +airflow_flower_port: 5555 +airflow_flower_basic_auth: -# DICT -celery_extra_packages: +# [celery_broker_transport_options] +airflow_celery_broker_visibility_timeout: -# This Celery version is guaranteed to work with Airflow 1.8.x -celery_version: 3.1.17 +# [dask] +airflow_dask_cluster_address: 127.0.0.1:8786 +airflow_dask_tls_ca: +airflow_dask_tls_cert: +airflow_dask_tls_key: -## FLOWER -airflow_flower_host: 0.0.0.0 -airflow_flower_port: 5555 +# [scheduler] +airflow_scheduler_job_heartbeat_sec: 5 +airflow_scheduler_clean_tis_without_dagrun_interval: 15.0 +airflow_scheduler_heartbeat_sec: 5 +airflow_scheduler_num_runs: -1 +airflow_scheduler_processor_poll_interval: 1 +airflow_scheduler_min_file_process_interval: 30 +airflow_scheduler_dag_dir_list_interval: 300 +airflow_scheduler_print_stats_interval: 30 +airflow_scheduler_pool_metrics_interval: 5.0 +airflow_scheduler_scheduler_health_check_threshold: 30 +airflow_scheduler_orphaned_tasks_check_interval: 300.0 +airflow_child_process_log_folder: "{{ airflow_logs_folder }}/scheduler" +airflow_scheduler_zombie_task_threshold: 300 +airflow_scheduler_catchup_by_default: True +airflow_scheduler_max_tis_per_query: 512 +airflow_scheduler_use_row_level_locking: True +airflow_scheduler_max_dagruns_to_create_per_loop: 10 +airflow_scheduler_max_dagruns_per_loop_to_schedule: 20 +airflow_scheduler_schedule_after_task_execution: True +airflow_scheduler_parsing_processes: 2 +airflow_scheduler_use_job_schedule: True +airflow_scheduler_allow_trigger_in_future: False -## MESOS -airflow_mesos_master_host: localhost:5050 -airflow_mesos_framework_name: Airflow -airflow_mesos_task_cpu: 1 -airflow_mesos_task_memory: 256 -airflow_mesos_checkpoint: false -airflow_mesos_authenticate: false +airflow_scheduler_run_duration: -1 +airflow_scheduler_min_file_parsing_loop_time: 1 +airflow_scheduler_max_threads: 2 +airflow_scheduler_authenticate: True -## KERBEROS +# [kerberos] airflow_kerberos_ccache: /tmp/airflow_krb5_ccache airflow_kerberos_principal: airflow airflow_kerberos_reinit_frequency: 3600 airflow_kerberos_kinit_path: kinit airflow_kerberos_keytab: airflow.keytab -# GITHUB ENTEPRISE +# [github_enterprise] airflow_github_enterprise_api_rev: v3 -## ADMIN -airflow_admin_hide_sensitive_variable_fields: true +# [admin] +airflow_admin_hide_sensitive_variable_fields: True +airflow_admin_sensitive_variable_fields: airflow_admin_variables: [] # - key: bucket_name # value: logs_foo @@ -289,17 +489,39 @@ airflow_admin_connections: [] # - conn_id: pg_conn # conn_uri: postgres://user:passwordh@host:5432/dbname -## ELASTICSEARCH +# [elasticsearch] airflow_elasticsearch_host: airflow_elasticsearch_log_id_template: > "{dag_id}-{task_id}-{execution_date}-{try_number}" airflow_elasticsearch_end_of_log_mark: "end_of_log" +airflow_elasticsearch_frontend: +airflow_elasticsearch_write_stdout: False +airflow_elasticsearch_json_format: False +airflow_elasticsearch_json_fields: asctime, filename, lineno, levelname, message + +# [elasticsearch_configs] +airflow_elasticsearch_configs_use_ssl: False +airflow_elasticsearch_configs_verify_certs: True -## KUBERNETES +# [kubernetes] +airflow_kubernetes_pod_template_file: airflow_kubernetes_worker_container_repository: airflow_kubernetes_worker_container_tag: -airflow_kubernetes_delete_worker_pods: "True" -airflow_kubernetes_namespace: "default" +airflow_kubernetes_namespace: default +airflow_kubernetes_delete_worker_pods: True +airlfow_kubernetes_delete_worker_pods_on_failure: False +airflow_kubernetes_worker_pods_creation_batch_size: 1 +airflow_kubernetes_multi_namespace_mode: False +airflow_kubernetes_in_cluster: True +airflow_kubernetes_cluster_context: +airflow_kubernetes_config_file: +airflow_kubernetes_kube_client_request_args: +airflow_kubernetes_delete_option_kwargs: +airflow_kubernetes_enable_tcp_keepalive: False +airflow_kubernetes_tcp_keep_idle: 120 +airflow_kubernetes_tcp_keep_intvl: 30 +airflow_kubernetes_tcp_keep_cnt: 6 + airflow_kubernetes_airflow_configmap: airflow_kubernetes_dags_volume_subpath: airflow_kubernetes_dags_volume_claim: @@ -317,10 +539,44 @@ airflow_kubernetes_git_sync_init_container_name: "git-sync-clone" airflow_kubernetes_worker_service_account_name: airflow_kubernetes_image_pull_secrets: airflow_kubernetes_gcp_service_account_keys: -airflow_kubernetes_in_cluster: "True" -## DAGs -# Python dependencies needed by the DAGs. This variable is expected to be a -# list of items following the structure provided in the example comment -dags_dependencies: -# - {name: pip_package, version: version_needed} +# [smart_sensor] +airflow_use_smart_sensor: False +airflow_shard_code_upper_limit: 10000 +airflow_shards: 5 +airflow_sensors_enabled: NamedHivePartitionSensor + +# [Webserver Authentication] +# (http://pythonhosted.org/airflow/security.html#web-authentication) + +# Choices of auth_backend include: +# - airflow.contrib.auth.backends.password_auth +# - airflow.contrib.auth.backends.ldap_auth +# - airflow.contrib.auth.backends.github_enterprise_auth +# - others? :) +airflow_webserver_authenticate: false +airflow_webserver_auth_backend: + +# [LDAP] (only applies if airflow_webserver_auth_backend == "airflow.contrib.auth.backends.ldap_auth") +# Bear in mind that, starting with Airflow 1.10.0, PyPi package pyasn1 v0.4.4 is needed +airflow_ldap_uri: +airflow_ldap_user_filter: +airflow_ldap_user_name_attr: +airflow_ldap_superuser_filter: +airflow_ldap_data_profiler_filter: +airflow_ldap_bind_user: +airflow_ldap_bind_password: +airflow_ldap_basedn: +airflow_ldap_cacert: +airflow_ldap_search_scope: + +# [mesos] +airflow_mesos_master_host: localhost:5050 +airflow_mesos_framework_name: Airflow +airflow_mesos_task_cpu: 1 +airflow_mesos_task_memory: 256 +airflow_mesos_checkpoint: false +airflow_mesos_authenticate: false + +# DEPRECATED +airflow_s3_log_folder: diff --git a/molecule/default/playbook.yml b/molecule/default/converge.yml similarity index 70% rename from molecule/default/playbook.yml rename to molecule/default/converge.yml index f0c41cb..7968f82 100644 --- a/molecule/default/playbook.yml +++ b/molecule/default/converge.yml @@ -1,6 +1,6 @@ --- - name: Converge - hosts: all + hosts: airflow_group roles: - role: airflow-role diff --git a/molecule/default/molecule.yml b/molecule/default/molecule.yml index 58b5f66..ab56ab0 100644 --- a/molecule/default/molecule.yml +++ b/molecule/default/molecule.yml @@ -6,26 +6,36 @@ driver: lint: | yamllint . ansible-lint . - platforms: - - name: airflow-${MOLECULE_DISTRO:-stretch} - image: debian:${MOLECULE_DISTRO:-stretch} - privileged: true + - name: airflow + groups: + - airflow_group + image: ${MOLECULE_DISTRO:-debian:buster-slim} + privileged: false capabilities: - SYS_ADMIN + tmpfs: + - /tmp + - /run + - /run/lock volumes: - '/sys/fs/cgroup:/sys/fs/cgroup:ro' - groups: - - airflow command: '/lib/systemd/systemd' - + stop_signal: 'RTMIN+3' + exposed_ports: + - 8088/tcp + - 8080/tcp + - 8081/tcp + - 5000/tcp + published_ports: + - 0.0.0.0:8088:8088/tcp + - 0.0.0.0:8080:8080/tcp + - 0.0.0.0:8081:8081/tcp + - 0.0.0.0:5000:5000/tcp provisioner: name: ansible - lint: - name: ansible-lint - enabled: false -scenario: - name: default +# scenario: +# name: default verifier: name: ansible diff --git a/molecule/default/tests/test_airflow.yml b/molecule/default/tests/test_airflow.yml index b0c4e4e..034e1cd 100644 --- a/molecule/default/tests/test_airflow.yml +++ b/molecule/default/tests/test_airflow.yml @@ -1,25 +1,73 @@ --- + +command: + /usr/local/bin/airflow version: + exit-status: 0 + stdout: + - "{{ airflow_version }}" service: -{% for airflow_service in airflow_services %} -{% if airflow_services[airflow_service]["enabled"] %} - {{ airflow_service }}: - enabled: true + airflow-webserver: + enabled: "{{ airflow_services.airflow-webserver.enabled }}" + state: "{{ airflow_services.airflow-webserver.state }}" + running: true + airflow-scheduler: + enabled: "{{ airflow_services.airflow-scheduler.enabled }}" + state: "{{ airflow_services.airflow-scheduler.state }}" + running: true + airflow-worker: + enabled: "{{ airflow_services.airflow-worker.enabled }}" + state: "{{ airflow_services.airflow-worker.state }}" + airflow-flower: + enabled: "{{ airflow_services.airflow-flower.enabled }}" + state: "{{ airflow_services.airflow-flower.state }}" +process: + {{ airflow_exec_name }}: running: true -{% endif %} -{% endfor %} - user: {{ airflow_user }}: exists: true groups: - {{ airflow_group }} - + shell: /usr/sbin/nologin group: {{ airflow_group }}: exists: true +port: + tcp:{{ airflow_webserver_port }}: + listening: true + ip: + - {{ airflow_webserver_host }} + tcp:{{ airflow_flower_port }}: + listening: true + ip: + - {{ airflow_flower_host }} +http: + {{ airflow_health_url }}: + status: 200 +# file: +# {{ airflow_bin_path }}: +# exists: true +# filetype: directory +# owner: {{ airflow_user }} +# group: {{ airflow_group }} +# {{ airflow_metadatadb_path }}: +# exists: true +# filetype: directory +# owner: {{ airflow_user }} +# group: {{ airflow_group }} +# {{ airflow_conf_path }}: +# exists: true +# filetype: directory +# owner: {{ airflow_user }} +# group: {{ airflow_group }} +# {{ airflow_config_file_path }}: +# exists: true +# filetype: file +# owner: {{ airflow_user }} +# group: {{ airflow_group }} +# contains: +# - "/APP_NAME\\s*=\\s.+/" +# - "/airflow_WEBSERVER_PORT\\s*=\\s\\d+/" +# - "/SECRET_KEY\\s*=\\s'.+'/" +# - "/SQLALCHEMY_DATABASE_URI\\s*=\\s'.+'/" -command: - /usr/local/bin/airflow version: - exit-status: 0 - stdout: - - "{{ airflow_version }}" diff --git a/molecule/extra_packages/molecule.yml b/molecule/extra_packages/molecule.yml index f6d4c98..946eb39 100644 --- a/molecule/extra_packages/molecule.yml +++ b/molecule/extra_packages/molecule.yml @@ -6,25 +6,34 @@ driver: lint: | yamllint . ansible-lint . - platforms: - - name: airflow_extra_packages-${MOLECULE_DISTRO:-stretch} - image: debian:${MOLECULE_DISTRO:-stretch} - privileged: true + - name: airflow-extra-packages-${MOLECULE_DISTRO} + image: ${MOLECULE_DISTRO:-debian:MOLECULE_DISTRO-slim} + privileged: false capabilities: - SYS_ADMIN + tmpfs: + - /tmp + - /run + - /run/lock volumes: - '/sys/fs/cgroup:/sys/fs/cgroup:ro' - groups: - - airflow command: '/lib/systemd/systemd' - + stop_signal: 'RTMIN+3' + exposed_ports: + - 8088/tcp + - 8080/tcp + - 8081/tcp + - 5000/tcp + published_ports: + - 0.0.0.0:8088:8088/tcp + - 0.0.0.0:8080:8080/tcp + - 0.0.0.0:8081:8081/tcp + - 0.0.0.0:5000:5000/tcp provisioner: name: ansible - lint: - name: ansible-lint - enabled: false -scenario: - name: extra_packages +# scenario: +# name: default verifier: - name: ansible \ No newline at end of file + name: ansible + diff --git a/tasks/config.yml b/tasks/config.yml index 32705e5..7b5cac8 100644 --- a/tasks/config.yml +++ b/tasks/config.yml @@ -20,14 +20,30 @@ owner: root group: root -- name: Airflow | Copy basic airflow config file +- name: Airflow | Copy basic airflow config file (< 2.0) template: src: "{{ airflow_config_template_path }}" - dest: "{{ airflow_home }}/airflow.cfg" + dest: "{{ airflow_conf_path }}/airflow.cfg" owner: "{{ airflow_user }}" group: "{{ airflow_group }}" mode: 0640 register: airflow_config + when: airflow_version is version( '2.0.0', '<') + notify: + - restart airflow-webserver + - restart airflow-scheduler + - restart airflow-worker + - restart airflow-flower + +- name: Airflow | Copy basic airflow config file (>= 2.0) + template: + src: "{{ airflow_config_template_path_v2 }}" + dest: "{{ airflow_conf_path }}/airflow.cfg" + owner: "{{ airflow_user }}" + group: "{{ airflow_group }}" + mode: 0640 + register: airflow_config + when: airflow_version is version( '2.0.0', '>=') notify: - restart airflow-webserver - restart airflow-scheduler @@ -37,7 +53,7 @@ - name: Airflow | Initializing DB < 2.0 command: "{{ airflow_executable }} initdb" environment: - AIRFLOW_HOME: "{{ airflow_home }}" + AIRFLOW_HOME: "{{ airflow_app_home }}" become: true become_user: "{{ airflow_user }}" tags: @@ -52,11 +68,11 @@ - name: Airflow | Initializing DB > 2.0 command: "{{ airflow_executable }} db init" environment: - AIRFLOW_HOME: "{{ airflow_home }}" + AIRFLOW_HOME: "{{ airflow_app_home }}" become: true become_user: "{{ airflow_user }}" - tags: - skip_ansible_lint + # tags: + # skip_ansible_lint when: airflow_version is version( '2.0.0', '>=') and (airflow_install.changed or airflow_config.changed) notify: - restart airflow-webserver @@ -64,10 +80,14 @@ - restart airflow-worker - restart airflow-flower +- name: Airflow | Create Admin user (> 2.0) + command: "{{ airflow_executable }} users create -u {{ airflow_admin_user.username }} -p {{ airflow_admin_user.password }} -f {{ airflow_admin_user.firstname }} -l {{ airflow_admin_user.lastname }} -r {{ airflow_admin_user.role }} -e {{ airflow_admin_user.email }}" + when: airflow_version is version( '2.0.0', '>=') + - name: Airflow | Copy extra airflow config files (provided by playbooks) copy: src: "{{ item }}" - dest: "{{ airflow_home }}/{{ item | basename }}" + dest: "{{ airflow_conf_path }}/{{ item | basename }}" owner: "{{ airflow_user }}" group: "{{ airflow_group }}" mode: 0640 @@ -82,7 +102,7 @@ - name: Airflow | Copy extra airflow config templates (provided by playbooks) template: src: "{{ item }}" - dest: "{{ airflow_home }}/{{ item | basename }}" + dest: "{{ airflow_conf_path }}/{{ item | basename }}" owner: "{{ airflow_user }}" group: "{{ airflow_group }}" mode: 0640 @@ -95,10 +115,9 @@ - restart airflow-flower - name: Airflow | Add variables from configuration file - command: "{{ airflow_executable }} variables -s \ - {{ item.key }} {{ item.value }}" + command: "{{ airflow_executable }} variables -s {{ item.key }} {{ item.value }}" environment: - AIRFLOW_HOME: "{{ airflow_home }}" + AIRFLOW_HOME: "{{ airflow_app_home }}" become: true become_user: "{{ airflow_user }}" with_items: "{{ airflow_admin_variables }}" @@ -110,7 +129,7 @@ {% for key, value in item.iteritems() %}--{{ key }} '{{ value }}' \ {% endfor %}" environment: - AIRFLOW_HOME: "{{ airflow_home }}" + AIRFLOW_HOME: "{{ airflow_app_home }}" become: true become_user: "{{ airflow_user }}" with_items: "{{ airflow_admin_connections }}" diff --git a/tasks/install.yml b/tasks/install.yml index c58930c..8c53a9b 100644 --- a/tasks/install.yml +++ b/tasks/install.yml @@ -8,21 +8,18 @@ user: name: "{{ airflow_user }}" group: "{{ airflow_group }}" - shell: /bin/bash + system: yes + shell: /usr/sbin/nologin + # shell: /bin/bash + createhome: yes -- name: Airflow | Create path for Airflow home +- name: Airflow | Ensure airflow skeleton paths file: - path: "{{ airflow_home }}" - state: directory + path: "{{ item }}" owner: "{{ airflow_user }}" group: "{{ airflow_group }}" - -- name: Airflow | Create path for configuration files - file: - path: "{{ airflow_environment_file_folder }}" state: directory - owner: "{{ airflow_user }}" - group: "{{ airflow_group }}" + with_items: "{{ airflow_skeleton_paths }}" - name: Airflow | Installing dependencies apt: @@ -30,6 +27,13 @@ state: present update_cache: true +# See https://airflow.apache.org/docs/apache-airflow/stable/installation.html#installation-tools +- name: Airflow | Install pip 20.2.4 version + pip: + executable: "{{ airflow_pip_executable }}" + name: pip + version: 20.2.4 + - name: Airflow | Installing Python pip dependencies pip: executable: "{{ airflow_pip_executable }}" @@ -52,16 +56,17 @@ with_items: "{{ celery_extra_packages }}" when: airflow_executor == "CeleryExecutor" and celery_extra_packages -- name: Airflow | Set AIRFLOW_HOME environment variable in /etc/environment +- name: Airflow | Set AIRFLOW_HOME environment variable in "{{ airflow_home }}" lineinfile: path: /etc/environment - line: 'AIRFLOW_HOME={{ airflow_home }}' + line: "AIRFLOW_HOME={{ airflow_home }}" - name: Airflow | Installing Airflow pip: executable: "{{ airflow_pip_executable }}" - name: apache-airflow + name: "{{ airflow_package }}[{{ airflow_bundle_package | default(omit) }}]" version: "{{ airflow_version }}" + state: present extra_args: --no-cache-dir register: airflow_install environment: @@ -70,10 +75,11 @@ - name: Airflow | Installing Airflow Extra Packages (prior to Ansible 2.7) pip: executable: "{{ airflow_pip_executable }}" - name: "apache-airflow[{{ item }}]=={{ airflow_version }}" + name: "{{ airflow_package }}[{{ item }}]=={{ airflow_version }}" with_items: "{{ airflow_extra_packages }}" when: - - airflow_extra_packages + - airflow_extra_packages is defined + - not airflow_bundle_package - ansible_version.full is version_compare('2.7', '<') - name: Airflow | Installing Airflow Extra Packages @@ -82,7 +88,8 @@ name: "apache-airflow[{{ airflow_extra_packages | join(', ') }}]" version: "{{ airflow_version }}" when: - - airflow_extra_packages + - airflow_extra_packages is defined + - not airflow_bundle_package - ansible_version.full is version_compare('2.7', '>=') - name: Airflow | Installing DAGs dependencies @@ -109,7 +116,7 @@ - name: Airflow | Copy Daemon scripts template: src: "{{ item.value.path }}" - dest: /lib/systemd/system/{{ item.key }}.service + dest: "/lib/systemd/system/{{ item.key }}.service" mode: 0644 notify: restart {{ item.key }} with_dict: "{{ airflow_services }}" diff --git a/templates/airflow-environment-file.j2 b/templates/airflow-environment-file.j2 index 4a3115f..4e6ac27 100644 --- a/templates/airflow-environment-file.j2 +++ b/templates/airflow-environment-file.j2 @@ -21,8 +21,9 @@ # required setting, 0 sets it to unlimited. Scheduler will get restart after every X runs SCHEDULER_RUNS={{ airflow_scheduler_runs }} -AIRFLOW_HOME={{ airflow_home }} -PATH=$PATH:{{ airflow_root_path }}/airflow_venv/bin +AIRFLOW_HOME={{ airflow_app_home }} +AIRFLOW_CONFIG={{ airflow_conf_path }}/airflow.cfg +PATH=$PATH:{{ airflow_app_home }}/airflow_venv/bin HOSTALIASES=/etc/host.aliases diff --git a/templates/airflow2.cfg.j2 b/templates/airflow2.cfg.j2 new file mode 100644 index 0000000..8c51a5f --- /dev/null +++ b/templates/airflow2.cfg.j2 @@ -0,0 +1,1013 @@ +[core] +# The folder where your airflow pipelines live, most likely a +# subfolder in a code repository. This path must be absolute. +dags_folder = {{ airflow_dags_folder }} + +# Hostname by providing a path to a callable, which will resolve the hostname. +# The format is "package.function". +# +# For example, default value "socket.getfqdn" means that result from getfqdn() of "socket" +# package will be used as hostname. +# +# No argument should be required in the function specified. +# If using IP address as hostname is preferred, use value ``airflow.utils.net.get_host_ip_address`` +hostname_callable = {{ airflow_hostname_callable }} + +# Default timezone in case supplied date times are naive +# can be utc (default), system, or any IANA timezone string (e.g. Europe/Amsterdam) +default_timezone = {{ airflow_default_timezone }} + +# The executor class that airflow should use. Choices include +# ``SequentialExecutor``, ``LocalExecutor``, ``CeleryExecutor``, ``DaskExecutor``, +# ``KubernetesExecutor``, ``CeleryKubernetesExecutor`` or the +# full import path to the class when using a custom executor. +executor = {{ airflow_executor }} + +# The SqlAlchemy connection string to the metadata database. +# SqlAlchemy supports many different database engine, more information +# their website +sql_alchemy_conn = {{ airflow_database_conn }} + +# The encoding for the databases +sql_engine_encoding = {{ airflow_database_engine_encoding }} + +# Collation for ``dag_id``, ``task_id``, ``key`` columns in case they have different encoding. +# This is particularly useful in case of mysql with utf8mb4 encoding because +# primary keys for XCom table has too big size and ``sql_engine_collation_for_ids`` should +# be set to ``utf8mb3_general_ci``. +sql_engine_collation_for_ids = {{ airflow_database_engine_collation_for_ids }} + +# If SqlAlchemy should pool database connections. +sql_alchemy_pool_enabled = {{ airflow_database_pool_enabled }} + +# The SqlAlchemy pool size is the maximum number of database connections +# in the pool. 0 indicates no limit. +sql_alchemy_pool_size = {{ airflow_database_pool_size }} + +# The maximum overflow size of the pool. +# When the number of checked-out connections reaches the size set in pool_size, +# additional connections will be returned up to this limit. +# When those additional connections are returned to the pool, they are disconnected and discarded. +# It follows then that the total number of simultaneous connections the pool will allow +# is pool_size + max_overflow, +# and the total number of "sleeping" connections the pool will allow is pool_size. +# max_overflow can be set to ``-1`` to indicate no overflow limit; +# no limit will be placed on the total number of concurrent connections. Defaults to ``10``. +sql_alchemy_max_overflow = {{ airflow_database_max_overflow }} + +# The SqlAlchemy pool recycle is the number of seconds a connection +# can be idle in the pool before it is invalidated. This config does +# not apply to sqlite. If the number of DB connections is ever exceeded, +# a lower config value will allow the system to recover faster. +airflow_database_pool_recycle = {{ airflow_database_pool_recycle }} + +# Check connection at the start of each connection pool checkout. +# Typically, this is a simple statement like "SELECT 1". +# More information here: +# https://docs.sqlalchemy.org/en/13/core/pooling.html#disconnect-handling-pessimistic +sql_alchemy_pool_pre_ping = {{ airflow_database_pool_pre_ping }} + +# The schema to use for the metadata database. +# SqlAlchemy supports databases with the concept of multiple schemas. +sql_alchemy_schema = {{ airflow_database_schema }} + +# Import path for connect args in SqlAlchemy. Defaults to an empty dict. +# This is useful when you want to configure db engine args that SqlAlchemy won't parse +# in connection string. +# See https://docs.sqlalchemy.org/en/13/core/engines.html#sqlalchemy.create_engine.params.connect_args +sql_alchemy_connect_args = {{ airflow_database_connect_args }} + +# The amount of parallelism as a setting to the executor. This defines +# the max number of task instances that should run simultaneously +# on this airflow installation +parallelism = {{ airflow_parallelism }} + +# The number of task instances allowed to run concurrently by the scheduler +# in one DAG. Can be overridden by ``concurrency`` on DAG level. +dag_concurrency = {{ airflow_dag_concurrency }} + +# Are DAGs paused by default at creation +dags_are_paused_at_creation = {{ airflow_dags_are_paused_at_creation }} + +# The maximum number of active DAG runs per DAG +max_active_runs_per_dag = {{ airflow_max_active_runs_per_dag }} + +# Whether to load the DAG examples that ship with Airflow. It's good to +# get started, but you probably want to set this to ``False`` in a production +# environment +load_examples = {{ airflow_load_examples }} + +# Whether to load the default connections that ship with Airflow. It's good to +# get started, but you probably want to set this to ``False`` in a production +# environment +load_default_connections = {{ airflow_load_default_connections }} + +# Path to the folder containing Airflow plugins +plugins_folder = {{ airflow_plugins_folder }} + +# Should tasks be executed via forking of the parent process ("False", +# the speedier option) or by spawning a new python process ("True" slow, +# but means plugin changes picked up by tasks straight away) +execute_tasks_new_python_interpreter = {{ airflow_execute_tasks_new_python_interpreter }} + +# Secret key to save connection passwords in the db +fernet_key = {{ airflow_fernet_key }} + +# Whether to disable pickling dags +donot_pickle = {{ airflow_donot_pickle }} + +# How long before timing out a python file import +dagbag_import_timeout = {{ airflow_dagbag_import_timeout }} + +# Should a traceback be shown in the UI for dagbag import errors, +# instead of just the exception message +dagbag_import_error_tracebacks = {{ airflow_dagbag_import_error_tracebacks }} + +# If tracebacks are shown, how many entries from the traceback should be shown +dagbag_import_error_traceback_depth = {{ airflow_dagbag_import_error_traceback_depth }} + +# How long before timing out a DagFileProcessor, which processes a dag file +dag_file_processor_timeout = {{ airflow_dag_file_processor_timeout }} + +# The class to use for running task instances in a subprocess. +# Choices include StandardTaskRunner, CgroupTaskRunner or the full import path to the class +# when using a custom task runner. +task_runner = {{ airflow_task_runner }} + +# If set, tasks without a ``run_as_user`` argument will be run with this user +# Can be used to de-elevate a sudo user running Airflow when executing tasks +default_impersonation = {{ airflow_default_impersonation }} + +# What security module to use (for example kerberos) +security = {{ airflow_security }} + +# Turn unit test mode on (overwrites many configuration options with test +# values at runtime) +unit_test_mode = {{ airflow_unit_test_mode }} + +# Whether to enable pickling for xcom (note that this is insecure and allows for +# RCE exploits). +enable_xcom_pickling = {{ airflow_enable_xcom_pickling }} + +# When a task is killed forcefully, this is the amount of time in seconds that +# it has to cleanup after it is sent a SIGTERM, before it is SIGKILLED +killed_task_cleanup_time = {{ airflow_killed_task_cleanup_time }} + +# Whether to override params with dag_run.conf. If you pass some key-value pairs +# through ``airflow dags backfill -c`` or +# ``airflow dags trigger -c``, the key-value pairs will override the existing ones in params. +dag_run_conf_overrides_params = {{ airflow_dag_run_conf_overrides_params }} + +# When discovering DAGs, ignore any files that don't contain the strings ``DAG`` and ``airflow``. +dag_discovery_safe_mode = {{ airflow_dag_discovery_safe_mode }} + +# The number of retries each task is going to have by default. Can be overridden at dag or task level. +default_task_retries = {{ airflow_default_task_retries }} + +# Updating serialized DAG can not be faster than a minimum interval to reduce database write rate. +min_serialized_dag_update_interval = {{ airflow_min_serialized_dag_update_interval }} + +# Fetching serialized DAG can not be faster than a minimum interval to reduce database +# read rate. This config controls when your DAGs are updated in the Webserver +min_serialized_dag_fetch_interval = {{ airflow_min_serialized_dag_fetch_interval }} + +# Whether to persist DAG files code in DB. +# If set to True, Webserver reads file contents from DB instead of +# trying to access files in a DAG folder. +# Example: store_dag_code = False +store_dag_code = {{ airflow_store_dag_code }} + +# Maximum number of Rendered Task Instance Fields (Template Fields) per task to store +# in the Database. +# All the template_fields for each of Task Instance are stored in the Database. +# Keeping this number small may cause an error when you try to view ``Rendered`` tab in +# TaskInstance view for older tasks. +max_num_rendered_ti_fields_per_task = {{ airflow_max_num_rendered_ti_fields_per_task }} + +# On each dagrun check against defined SLAs +check_slas = {{ airflow_check_slas }} + +# Path to custom XCom class that will be used to store and resolve operators results +# Example: xcom_backend = path.to.CustomXCom +xcom_backend = {{ airflow_xcom_backend }} + +# By default Airflow plugins are lazily-loaded (only loaded when required). Set it to ``False``, +# if you want to load plugins whenever 'airflow' is invoked via cli or loaded from module. +lazy_load_plugins = {{ airflow_lazy_load_plugins }} + +# By default Airflow providers are lazily-discovered (discovery and imports happen only when required). +# Set it to False, if you want to discover providers whenever 'airflow' is invoked via cli or +# loaded from module. +lazy_discover_providers = {{ airflow_lazy_discover_providers }} + +# Number of times the code should be retried in case of DB Operational Errors. +# Not all transactions will be retried as it can cause undesired state. +# Currently it is only used in ``DagFileProcessor.process_file`` to retry ``dagbag.sync_to_db``. +max_db_retries = {{ airflow_max_db_retries }} + +[logging] +# The folder where airflow should store its log files +# This path must be absolute +base_log_folder = {{ airflow_logs_folder }} + +# Airflow can store logs remotely in AWS S3, Google Cloud Storage or Elastic Search. +# Set this to True if you want to enable remote logging. +remote_logging = {{ airflow_remote_logging }} + +# Users must supply an Airflow connection id that provides access to the storage +# location. +remote_log_conn_id = {{ airflow_remote_log_conn_id }} + +# Path to Google Credential JSON file. If omitted, authorization based on `the Application Default +# Credentials +# `__ will +# be used. +google_key_path = {{ airflow_google_key_path }} + +# Storage bucket URL for remote logging +# S3 buckets should start with "s3://" +# Cloudwatch log groups should start with "cloudwatch://" +# GCS buckets should start with "gs://" +# WASB buckets should start with "wasb" just to help Airflow select correct handler +# Stackdriver logs should start with "stackdriver://" +remote_base_log_folder = {{ airflow_remote_base_log_folder }} + +# Use server-side encryption for logs stored in S3 +encrypt_s3_logs = {{ airflow_encrypt_s3_logs }} + +# Logging level +logging_level = {{ airflow_logging_level }} + +# Logging level for Flask-appbuilder UI +fab_logging_level = {{ airflow_fab_logging_level }} + +# Logging class +# Specify the class that will specify the logging configuration +# This class has to be on the python classpath +# Example: logging_config_class = my.path.default_local_settings.LOGGING_CONFIG +logging_config_class = {{ airflow_logging_config_class }} + +# Flag to enable/disable Colored logs in Console +# Colour the logs when the controlling terminal is a TTY. +colored_console_log = {{ airflow_colored_console_log }} + +# Log format for when Colored logs is enabled +colored_log_format = {{ colored_log_format }} +colored_formatter_class = {{ colored_formatter_class }} + +# Format of Log line +log_format = {{ airflow_log_format }} +simple_log_format = {{ airflow_simple_log_format }} + +# Specify prefix pattern like mentioned below with stream handler TaskHandlerWithCustomFormatter +task_log_prefix_template = {{ airflow_task_log_prefix_template }} + +# Formatting for how airflow generates file names/paths for each task run. +log_filename_template = {{ airflow_log_filename_template }} + +# Formatting for how airflow generates file names for log +log_processor_filename_template = {{ airflow_log_processor_filename_template }} + +# full path of dag_processor_manager logfile +dag_processor_manager_log_location = {{ airflow_dag_processor_manager_log_location }} + +# Name of handler to read task instance logs. +# Defaults to use ``task`` handler. +task_log_reader = {{ airflow_task_log_reader }} + +# A comma\-separated list of third-party logger names that will be configured to print messages to +# consoles\. +# Example: extra_loggers = connexion,sqlalchemy +extra_loggers = {{ airflow_extra_loggers }} + +[metrics] + +# StatsD (https://github.com/etsy/statsd) integration settings. +# Enables sending metrics to StatsD. +statsd_on = {{ airflow_statsd_on }} +statsd_host = {{ airflow_statsd_host }} +statsd_port = {{ airflow_statsd_port }} +statsd_prefix = {{ airflow_statsd_prefix }} + +# If you want to avoid sending all the available metrics to StatsD, +# you can configure an allow list of prefixes (comma separated) to send only the metrics that +# start with the elements of the list (e.g: "scheduler,executor,dagrun") +statsd_allow_list = {{ airflow_statsd_allow_list }} + +# A function that validate the statsd stat name, apply changes to the stat name if necessary and return +# the transformed stat name. +# +# The function should have the following signature: +# def func_name(stat_name: str) -> str: +stat_name_handler = {{ airflow_stat_name_handler }} + +# To enable datadog integration to send airflow metrics. +statsd_datadog_enabled = {{ airflow_statsd_datadog_enabled }} + +# List of datadog tags attached to all metrics(e.g: key1:value1,key2:value2) +statsd_datadog_tags = {{ airflow_statsd_datadog_tags }} + +# If you want to utilise your own custom Statsd client set the relevant +# module path below. +# Note: The module path must exist on your PYTHONPATH for Airflow to pick it up +statsd_custom_client_path = {{ airflow_statsd_custom_client_path }} + +[secrets] +# Full class name of secrets backend to enable (will precede env vars and metastore in search path) +# Example: backend = airflow.providers.amazon.aws.secrets.systems_manager.SystemsManagerParameterStoreBackend +backend = {{ airflow_secrets_backend }} + +# The backend_kwargs param is loaded into a dictionary and passed to __init__ of secrets backend class. +# See documentation for the secrets backend you are using. JSON is expected. +# Example for AWS Systems Manager ParameterStore: +backend_kwargs = {{ airflow_backend_kwargs }} + +[cli] +# In what way should the cli access the API. The LocalClient will use the +# database directly, while the json_client will use the api running on the +# webserver +api_client = {{ airflow_cli_api_client }} + +# If you set web_server_url_prefix, do NOT forget to append it here, ex: +# ``endpoint_url = http://localhost:8080/myroot`` +# So api will look like: ``http://localhost:8080/myroot/api/experimental/...`` +endpoint_url = {{ airflow_cli_api_endpoint_url }} + +[debug] +# Used only with ``DebugExecutor``. If set to ``True`` DAG will fail with first +# failed task. Helpful for debugging purposes. +fail_fast = {{ airflow_fail_fast }} + +[api] +# Enables the deprecated experimental API. Please note that these APIs do not have access control. +# The authenticated user has full access. +# +# .. warning:: +# +# This `Experimental REST API `__ is +# deprecated since version 2.0. Please consider using +# `the Stable REST API `__. +# For more information on migration, see +# `UPDATING.md `_ +enable_experimental_api = {{ airflow_enable_experimental_api }} + +# How to authenticate users of the API. See +# https://airflow.apache.org/docs/stable/security.html for possible values. +# ("airflow.api.auth.backend.default" allows all requests for historic reasons) +auth_backend = {{ airflow_auth_backend }} + +# Used to set the maximum page limit for API requests +maximum_page_limit = {{ airflow_maximum_page_limit }} + +# Used to set the default page limit when limit is zero. A default limit +# of 100 is set on OpenApi spec. However, this particular default limit +# only work when limit is set equal to zero(0) from API requests. +# If no limit is supplied, the OpenApi spec default is used. +fallback_page_limit = {{ airflow_fallback_page_limit }} + +# The intended audience for JWT token credentials used for authorization. This value must match on the client and server sides. If empty, audience will not be tested. +# Example: google_oauth2_audience = project-id-random-value.apps.googleusercontent.com +google_oauth2_audience = {{ airflow_google_oauth2_audience }} + +# Path to Google Cloud Service Account key file (JSON). If omitted, authorization based on +# `the Application Default Credentials +# `__ will +# be used. +# Example: google_key_path = /files/service-account-json +google_key_path = {{ airflow_google_key_path}} + +[lineage] +# what lineage backend to use +backend = {{ airflow_lineage_backend }} + +[atlas] +sasl_enabled = {{ airflow_atlas_sasl_enabled }} +host = {{ airflow_atlas_host }} +port = {{ airflow_atlas_port }} +username = {{ airflow_atlas_username }} +password = {{ airflow_atlas_password }} + +[operators] +# The default owner assigned to each new operator, unless +# provided explicitly or passed via ``default_args`` +default_owner = {{ airflow_operator_default_owner }} +default_cpus = {{ airflow_operator_default_cpus }} +default_ram = {{ airflow_operator_default_ram }} +default_disk = {{ airflow_operator_default_disk }} +default_gpus = {{ airflow_operator_default_gpus }} + +# Is allowed to pass additional/unused arguments (args, kwargs) to the BaseOperator operator. +# If set to False, an exception will be thrown, otherwise only the console message will be displayed. +allow_illegal_arguments = {{ airflow_allow_illegal_arguments }} + +[hive] +# Default mapreduce queue for HiveOperator tasks +default_hive_mapred_queue = {{ airflow_default_hive_mapred_queue }} + +# Template for mapred_job_name in HiveOperator, supports the following named parameters +# hostname, dag_id, task_id, execution_date +mapred_job_name_template = {{ airflow_mapred_job_name_template }} + +[webserver] +# The base url of your website as airflow cannot guess what domain or +# cname you are using. This is used in automated emails that +# airflow sends to point links to the right web server +base_url = {{ airflow_webserver_base_url }} + +# Default timezone to display all dates in the UI, can be UTC, system, or +# any IANA timezone string (e.g. Europe/Amsterdam). If left empty the +# default value of core/default_timezone will be used +# Example: default_ui_timezone = America/New_York +default_ui_timezone = {{ airflow_webserver_default_ui_timezone }} + +# The ip specified when starting the web server +web_server_host = {{ airflow_webserver_host }} + +# The port on which to run the web server +web_server_port = {{ airflow_webserver_port }} + +# Paths to the SSL certificate and key for the web server. When both are +# provided SSL will be enabled. This does not change the web server port. +web_server_ssl_cert = {{ airflow_webserver_ssl_cert }} + +# Paths to the SSL certificate and key for the web server. When both are +# provided SSL will be enabled. This does not change the web server port. +web_server_ssl_key = {{ airflow_webserver_ssl_key }} + +# Number of seconds the webserver waits before killing gunicorn master that doesn't respond +web_server_master_timeout = {{ airflow_webserver_master_timeout }} + +# Number of seconds the gunicorn webserver waits before timing out on a worker +web_server_worker_timeout = {{ airflow_webserver_worker_timeout }} + +# Number of workers to refresh at a time. When set to 0, worker refresh is +# disabled. When nonzero, airflow periodically refreshes webserver workers by +# bringing up new ones and killing old ones. +worker_refresh_batch_size = {{ airflow_webserver_worker_refresh_batch_size }} + +# Number of seconds to wait before refreshing a batch of workers. +worker_refresh_interval = {{ airflow_webserver_worker_refresh_interval }} + +# If set to True, Airflow will track files in plugins_folder directory. When it detects changes, +# then reload the gunicorn. +reload_on_plugin_change = {{ airflow_webserver_reload_on_plugin_change }} + +# Secret key used to run your flask app +# It should be as random as possible +secret_key = {{ airflow_webserver_secret_key }} + +# Number of workers to run the Gunicorn web server +workers = {{ airflow_webserver_workers }} + +# The worker class gunicorn should use. Choices include +# sync (default), eventlet, gevent +worker_class = {{ airflow_webserver_worker_class }} + +# Log files for the gunicorn webserver. '-' means log to stderr. +access_logfile = {{ airflow_webserver_access_logfile }} + +# Log files for the gunicorn webserver. '-' means log to stderr. +error_logfile = {{ airflow_webserver_error_logfile }} + +# Access log format for gunicorn webserver. +# default format is %%(h)s %%(l)s %%(u)s %%(t)s "%%(r)s" %%(s)s %%(b)s "%%(f)s" "%%(a)s" +# documentation - https://docs.gunicorn.org/en/stable/settings.html#access-log-format +access_logformat = {{ airflow_webserver_access_logformat }} + +# Expose the configuration file in the web server +expose_config = {{ airflow_webserver_expose_config }} + +# Expose hostname in the web server +expose_hostname = {{ airflow_webserver_expose_hostname }} + +# Expose stacktrace in the web server +expose_stacktrace = {{ airflow_webserver_expose_stacktrace }} + +# Default DAG view. Valid values are: ``tree``, ``graph``, ``duration``, ``gantt``, ``landing_times`` +dag_default_view = {{ airflow_webserver_dag_default_view }} + +# Default DAG orientation. Valid values are: +# ``LR`` (Left->Right), ``TB`` (Top->Bottom), ``RL`` (Right->Left), ``BT`` (Bottom->Top) +dag_orientation = {{ airflow_webserver_dag_orientation }} + +# Puts the webserver in demonstration mode; blurs the names of Operators for +# privacy. +demo_mode = {{ airflow_webserver_demo_mode }} + +# The amount of time (in secs) webserver will wait for initial handshake +# while fetching logs from other worker machine +log_fetch_timeout_sec = {{ airflow_webserver_log_fetch_timeout_sec }} + +# Time interval (in secs) to wait before next log fetching. +log_fetch_delay_sec = {{ airflow_webserver_log_fetch_delay_sec }} + +# Distance away from page bottom to enable auto tailing. +log_auto_tailing_offset = {{ airflow_webserver_log_auto_tailing_offset }} + +# Animation speed for auto tailing log display. +log_animation_speed = {{ airflow_webserver_log_animation_speed }} + +# By default, the webserver shows paused DAGs. Flip this to hide paused +# DAGs by default +hide_paused_dags_by_default = {{ airflow_webserver_hide_paused_dags_by_default }} + +# Consistent page size across all listing views in the UI +page_size = {{ airflow_webserver_page_size }} + +# Define the color of navigation bar +navbar_color = {{ airflow_webserver_navbar_color }} + +# Default dagrun to show in UI +default_dag_run_display_number = {{ airflow_webserver_default_dag_run_display_number }} + +# Enable werkzeug ``ProxyFix`` middleware for reverse proxy +enable_proxy_fix = {{ airflow_webserver_enable_proxy_fix }} + +# Number of values to trust for ``X-Forwarded-For``. +# More info: https://werkzeug.palletsprojects.com/en/0.16.x/middleware/proxy_fix/ +proxy_fix_x_for = {{ airflow_webserver_proxy_fix_x_for }} + +# Number of values to trust for ``X-Forwarded-Proto`` +proxy_fix_x_proto = {{ airflow_webserver_proxy_fix_x_proto }} + +# Number of values to trust for ``X-Forwarded-Host`` +proxy_fix_x_host = {{ airflow_webserver_proxy_fix_x_host }} + +# Number of values to trust for ``X-Forwarded-Port`` +proxy_fix_x_port = {{ airflow_webserver_proxy_fix_x_port }} + +# Number of values to trust for ``X-Forwarded-Prefix`` +proxy_fix_x_prefix = {{ airflow_webserver_proxy_fix_x_prefix }} + +# Set secure flag on session cookie +cookie_secure = {{ airflow_webserver_cookie_secure }} + +# Set samesite policy on session cookie +cookie_samesite = {{ airflow_webserver_cookie_samesite }} + +# Default setting for wrap toggle on DAG code and TI log views. +default_wrap = {{ airflow_webserver_default_wrap }} + +# Allow the UI to be rendered in a frame +x_frame_enabled = {{ airflow_webserver_x_frame_enabled }} + +# Send anonymous user activity to your analytics tool +# choose from google_analytics, segment, or metarouter +analytics_tool = {{ airflow_webserver_analytics_tool }} + +# Unique ID of your account in the analytics tool +analytics_id = {{ airflow_webserver_analytics_id }} + +# 'Recent Tasks' stats will show for old DagRuns if set +show_recent_stats_for_completed_runs = {{ airflow_webserver_show_recent_stats_for_completed_runs }} + +# Update FAB permissions and sync security manager roles +# on webserver startup +update_fab_perms = {{ airflow_webserver_update_fab_perms }} + +# The UI cookie lifetime in minutes. User will be logged out from UI after +# ``session_lifetime_minutes`` of non-activity +session_lifetime_minutes = {{ airflow_webserver_session_lifetime_minutes }} + +[email] + +# Configuration email backend and whether to +# send email alerts on retry or failure +# Email backend to use +email_backend = {{ airflow_email_backend }} + +# Whether email alerts should be sent when a task is retried +default_email_on_retry = {{ airflow_email_default_email_on_retry }} + +# Whether email alerts should be sent when a task failed +default_email_on_failure = {{ airflow_email_default_email_on_failure }} + +# File that will be used as the template for Email subject (which will be rendered using Jinja2). +# If not set, Airflow uses a base template. +# Example: subject_template = /path/to/my_subject_template_file +subject_template = {{airflow_email_subject_template }} + +# File that will be used as the template for Email content (which will be rendered using Jinja2). +# If not set, Airflow uses a base template. +# Example: html_content_template = /path/to/my_html_content_template_file +html_content_template = {{ airflow_email_html_content_template }} + +[smtp] + +# If you want airflow to send emails on retries, failure, and you want to use +# the airflow.utils.email.send_email_smtp function, you have to configure an +# smtp server here +smtp_host = {{ airflow_smtp_host }} +smtp_starttls = {{ airflow_smtp_starttls }} +smtp_ssl = {{ airflow_smtp_ssl }} +smtp_port = 25 +smtp_mail_from = airflow@example.com +smtp_timeout = 30 +smtp_retry_limit = 5 +{% if airflow_smtp_user %} +smtp_user = {{ airflow_smtp_user }} +{% endif %} +{% if airflow_smtp_passwd %} +smtp_password = {{ airflow_smtp_passwd }} +{% endif %} + +[sentry] + +# Sentry (https://docs.sentry.io) integration. Here you can supply +# additional configuration options based on the Python platform. See: +# https://docs.sentry.io/error-reporting/configuration/?platform=python. +# Unsupported options: ``integrations``, ``in_app_include``, ``in_app_exclude``, +# ``ignore_errors``, ``before_breadcrumb``, ``before_send``, ``transport``. +# Enable error reporting to Sentry +sentry_on = {{ airflow_sentry_on }} +sentry_dsn = + +[celery_kubernetes_executor] + +# This section only applies if you are using the ``CeleryKubernetesExecutor`` in +# ``[core]`` section above +# Define when to send a task to ``KubernetesExecutor`` when using ``CeleryKubernetesExecutor``. +# When the queue of a task is the value of ``kubernetes_queue`` (default ``kubernetes``), +# the task is executed via ``KubernetesExecutor``, +# otherwise via ``CeleryExecutor`` +kubernetes_queue = kubernetes + +[celery] + +# This section only applies if you are using the CeleryExecutor in +# ``[core]`` section above +# The app name that will be used by celery +celery_app_name = {{ airflow_celery_app_name }} + +# The concurrency that will be used when starting workers with the +# ``airflow celery worker`` command. This defines the number of task instances that +# a worker will take, so size up your workers based on the resources on +# your worker box and the nature of your tasks +worker_concurrency = {{ airflow_celery_concurrency }} + +# The maximum and minimum concurrency that will be used when starting workers with the +# ``airflow celery worker`` command (always keep minimum processes, but grow +# to maximum if necessary). Note the value should be max_concurrency,min_concurrency +# Pick these numbers based on resources on worker box and the nature of the task. +# If autoscale option is available, worker_concurrency will be ignored. +# http://docs.celeryproject.org/en/latest/reference/celery.bin.worker.html#cmdoption-celery-worker-autoscale +# Example: worker_autoscale = 16,12 +worker_autoscale = {{ airflow_celery_worker_autoscale }} + +# Used to increase the number of tasks that a worker prefetches which can improve performance. +# The number of processes multiplied by worker_prefetch_multiplier is the number of tasks +# that are prefetched by a worker. A value greater than 1 can result in tasks being unnecessarily +# blocked if there are multiple workers and one worker prefetches tasks that sit behind long +# running tasks while another worker has unutilized processes that are unable to process the already +# claimed blocked tasks. +# https://docs.celeryproject.org/en/stable/userguide/optimizing.html#prefetch-limits +# Example: worker_prefetch_multiplier = 1 +worker_prefetch_multiplier = {{ airflow_celery_worker_prefetch_multiplier }} + +# When you start an airflow worker, airflow starts a tiny web server +# subprocess to serve the workers local log files to the airflow main +# web server, who then builds pages and sends them to users. This defines +# the port on which the logs are served. It needs to be unused, and open +# visible from the main web server to connect into the workers. +worker_log_server_port = {{ airflow_celery_worker_log_server_port }} + +# Umask that will be used when starting workers with the ``airflow celery worker`` +# in daemon mode. This control the file-creation mode mask which determines the initial +# value of file permission bits for newly created files. +worker_umask = {{ airflow_celery_worker_umask }} + +# The Celery broker URL. Celery supports RabbitMQ, Redis and experimentally +# a sqlalchemy database. Refer to the Celery documentation for more information. +broker_url = {{ airflow_celery_broker_url }} + +# The Celery result_backend. When a job finishes, it needs to update the +# metadata of the job. Therefore it will post a message on a message bus, +# or insert it into a database (depending of the backend) +# This status is used by the scheduler to update the state of the task +# The use of a database is highly recommended +# http://docs.celeryproject.org/en/latest/userguide/configuration.html#task-result-backend-settings +result_backend = {{ airflow_celery_result_backend }} + +# Celery Flower is a sweet UI for Celery. Airflow has a shortcut to start +# it ``airflow celery flower``. This defines the IP that Celery Flower runs on +flower_host = {{ airflow_flower_host }} + +# The root URL for Flower +# Example: flower_url_prefix = /flower +flower_url_prefix = {{ airflow_flower_url_prefix }} + +# This defines the port that Celery Flower runs on +flower_port = {{ airflow_flower_port }} + +# Securing Flower with Basic Authentication +# Accepts user:password pairs separated by a comma +# Example: flower_basic_auth = user1:password1,user2:password2 +flower_basic_auth = {{ airflow_flower_basic_auth }} + +# Default queue that tasks get assigned to and that worker listen on. +default_queue = {{ airflow_celery_default_queue }} + +# How many processes CeleryExecutor uses to sync task state. +# 0 means to use max(1, number of cores - 1) processes. +sync_parallelism = {{ airflow_celery_sync_parallelism }} + +# Import path for celery configuration options +celery_config_options = {{ airflow_celery_config_options }} +ssl_active = {{ airflow_celery_ssl_active }} +ssl_key = {{ airflow_celery_ssl_key }} +ssl_cert = {{ airflow_celery_ssl_cert }} +ssl_cacert = {{ airflow_celery_ssl_cacert }} + +# Celery Pool implementation. +# Choices include: ``prefork`` (default), ``eventlet``, ``gevent`` or ``solo``. +# See: +# https://docs.celeryproject.org/en/latest/userguide/workers.html#concurrency +# https://docs.celeryproject.org/en/latest/userguide/concurrency/eventlet.html +pool = {{ airflow_celery_pool }} + +# The number of seconds to wait before timing out ``send_task_to_executor`` or +# ``fetch_celery_task_state`` operations. +operation_timeout = {{ airflow_celery_operation_timeout }} + +# Celery task will report its status as 'started' when the task is executed by a worker. +# This is used in Airflow to keep track of the running tasks and if a Scheduler is restarted +# or run in HA mode, it can adopt the orphan tasks launched by previous SchedulerJob. +task_track_started = {{ airflow_celery_task_track_started }} + +# Time in seconds after which Adopted tasks are cleared by CeleryExecutor. This is helpful to clear +# stalled tasks. +task_adoption_timeout = {{ airflow_celery_task_adoption_timeout }} + +# The Maximum number of retries for publishing task messages to the broker when failing +# due to ``AirflowTaskTimeout`` error before giving up and marking Task as failed. +task_publish_max_retries = {{ airflow_celery_task_publish_max_retries }} + +# Worker initialisation check to validate Metadata Database connection +worker_precheck = {{ airflow_celery_worker_precheck }} + +[celery_broker_transport_options] + +# This section is for specifying options which can be passed to the +# underlying celery broker transport. See: +# http://docs.celeryproject.org/en/latest/userguide/configuration.html#std:setting-broker_transport_options +# The visibility timeout defines the number of seconds to wait for the worker +# to acknowledge the task before the message is redelivered to another worker. +# Make sure to increase the visibility timeout to match the time of the longest +# ETA you're planning to use. +# visibility_timeout is only supported for Redis and SQS celery brokers. +# See: +# http://docs.celeryproject.org/en/master/userguide/configuration.html#std:setting-broker_transport_options +# Example: visibility_timeout = 21600 +visibility_timeout = {{ airflow_celery_broker_visibility_timeout }} + +[dask] +{% if airflow_executor == "DaskExecutor" %} +# This section only applies if you are using the DaskExecutor in +# [core] section above +# The IP address and port of the Dask cluster's scheduler. +cluster_address = {{ airflow_dask_cluster_address }} + +# TLS/ SSL settings to access a secured Dask scheduler. +tls_ca = {{ airflow_dask_tls_ca }} +tls_cert = {{ airflow_dask_tls_cert }} +tls_key = {{ airflow_dask_tls_key }} +{% endif %} + +[scheduler] +# Task instances listen for external kill signal (when you clear tasks +# from the CLI or the UI), this defines the frequency at which they should +# listen (in seconds). +job_heartbeat_sec = {{ airflow_scheduler_job_heartbeat_sec }} + +# How often (in seconds) to check and tidy up 'running' TaskInstancess +# that no longer have a matching DagRun +clean_tis_without_dagrun_interval = {{ airflow_scheduler_clean_tis_without_dagrun_interval }} + +# The scheduler constantly tries to trigger new tasks (look at the +# scheduler section in the docs for more information). This defines +# how often the scheduler should run (in seconds). +scheduler_heartbeat_sec = {{ airflow_scheduler_heartbeat_sec }} + +# The number of times to try to schedule each DAG file +# -1 indicates unlimited number +num_runs = {{ airflow_scheduler_num_runs }} + +# The number of seconds to wait between consecutive DAG file processing +processor_poll_interval = {{ airflow_scheduler_processor_poll_interval }} + +# Number of seconds after which a DAG file is parsed. The DAG file is parsed every +# ``min_file_process_interval`` number of seconds. Updates to DAGs are reflected after +# this interval. Keeping this number low will increase CPU usage. +min_file_process_interval = {{ airflow_scheduler_min_file_process_interval }} + +# How often (in seconds) to scan the DAGs directory for new files. Default to 5 minutes. +dag_dir_list_interval = {{ airflow_scheduler_dag_dir_list_interval }} + +# How often should stats be printed to the logs. Setting to 0 will disable printing stats +print_stats_interval = {{ airflow_scheduler_print_stats_interval }} + +# How often (in seconds) should pool usage stats be sent to statsd (if statsd_on is enabled) +pool_metrics_interval = {{ airflow_scheduler_pool_metrics_interval }} + +# If the last scheduler heartbeat happened more than scheduler_health_check_threshold +# ago (in seconds), scheduler is considered unhealthy. +# This is used by the health check in the "/health" endpoint +scheduler_health_check_threshold = {{ airflow_scheduler_scheduler_health_check_threshold }} + +# How often (in seconds) should the scheduler check for orphaned tasks and SchedulerJobs +orphaned_tasks_check_interval = {{ airflow_scheduler_orphaned_tasks_check_interval }} +child_process_log_directory = {{ airflow_child_process_log_folder }} + +# Local task jobs periodically heartbeat to the DB. If the job has +# not heartbeat in this many seconds, the scheduler will mark the +# associated task instance as failed and will re-schedule the task. +scheduler_zombie_task_threshold = {{ airflow_scheduler_zombie_task_threshold }} + +# Turn off scheduler catchup by setting this to ``False``. +# Default behavior is unchanged and +# Command Line Backfills still work, but the scheduler +# will not do scheduler catchup if this is ``False``, +# however it can be set on a per DAG basis in the +# DAG definition (catchup) +catchup_by_default = {{ airflow_scheduler_catchup_by_default }} + +# This changes the batch size of queries in the scheduling main loop. +# If this is too high, SQL query performance may be impacted by one +# or more of the following: +# - reversion to full table scan +# - complexity of query predicate +# - excessive locking +# Additionally, you may hit the maximum allowable query length for your db. +# Set this to 0 for no limit (not advised) +max_tis_per_query = {{ airflow_scheduler_max_tis_per_query }} + +# Should the scheduler issue ``SELECT ... FOR UPDATE`` in relevant queries. +# If this is set to False then you should not run more than a single +# scheduler at once +use_row_level_locking = {{ airflow_scheduler_use_row_level_locking }} + +# Max number of DAGs to create DagRuns for per scheduler loop +# +# Default: 10 +max_dagruns_to_create_per_loop = {{ airflow_scheduler_max_dagruns_to_create_per_loop }} + +# How many DagRuns should a scheduler examine (and lock) when scheduling +# and queuing tasks. +# +# Default: 20 +max_dagruns_per_loop_to_schedule = {{ airflow_scheduler_max_dagruns_per_loop_to_schedule }} + +# Should the Task supervisor process perform a "mini scheduler" to attempt to schedule more tasks of the +# same DAG. Leaving this on will mean tasks in the same DAG execute quicker, but might starve out other +# dags in some circumstances +# +# Default: True +schedule_after_task_execution = {{ airflow_scheduler_schedule_after_task_execution }} + +# The scheduler can run multiple processes in parallel to parse dags. +# This defines how many processes will run. +parsing_processes = {{ airflow_scheduler_parsing_processes }} + +# Turn off scheduler use of cron intervals by setting this to False. +# DAGs submitted manually in the web UI or with trigger_dag will still run. +use_job_schedule = {{ airflow_scheduler_use_job_schedule }} + +# Allow externally triggered DagRuns for Execution Dates in the future +# Only has effect if schedule_interval is set to None in DAG +allow_trigger_in_future = {{ airflow_scheduler_allow_trigger_in_future }} + +[kerberos] +ccache = {{ airflow_kerberos_ccache }} + +# gets augmented with fqdn +principal = {{ airflow_kerberos_principal }} +reinit_frequency = {{ airflow_kerberos_reinit_frequency }} +kinit_path = {{ airflow_kerberos_kinit_path }} +keytab = {{ airflow_kerberos_keytab }} + +[github_enterprise] +api_rev = {{ airflow_github_enterprise_api_rev }} + +[admin] +# UI to hide sensitive variable fields when set to True +hide_sensitive_variable_fields = {{ airflow_admin_hide_sensitive_variable_fields }} + +# A comma-separated list of sensitive keywords to look for in variables names. +sensitive_variable_fields = {{ airflow_admin_sensitive_variable_fields }} + +[elasticsearch] +# Elasticsearch host +host = {{ airflow_elasticsearch_host }} + +# Format of the log_id, which is used to query for a given tasks logs +log_id_template = {{ airflow_elasticsearch_log_id_template }} + +# Used to mark the end of a log stream for a task +end_of_log_mark = {{ airflow_elasticsearch_end_of_log_mark }} + +# Qualified URL for an elasticsearch frontend (like Kibana) with a template argument for log_id +# Code will construct log_id using the log_id template from the argument above. +# NOTE: The code will prefix the https:// automatically, don't include that here. +frontend = {{ airflow_elasticsearch_frontend }} + +# Write the task logs to the stdout of the worker, rather than the default files +write_stdout = {{ airflow_elasticsearch_write_stdout }} + +# Instead of the default log formatter, write the log lines as JSON +json_format = {{ airflow_elasticsearch_json_format }} + +# Log fields to also attach to the json output, if enabled +json_fields = {{ airflow_elasticsearch_json_fields }} + +[elasticsearch_configs] +use_ssl = {{ airflow_elasticsearch_configs_use_ssl }} +verify_certs = {{ airflow_elasticsearch_configs_verify_certs }} + +[kubernetes] +# Path to the YAML pod file. If set, all other kubernetes-related fields are ignored. +pod_template_file = {{ airflow_kubernetes_pod_template_file }} + +# The repository of the Kubernetes Image for the Worker to Run +worker_container_repository = {{ airflow_kubernetes_worker_container_repository }} + +# The tag of the Kubernetes Image for the Worker to Run +worker_container_tag = {{ airflow_kubernetes_worker_container_tag }} + +# The Kubernetes namespace where airflow workers should be created. Defaults to ``default`` +namespace = {{ airflow_kubernetes_namespace }} + +# If True, all worker pods will be deleted upon termination +delete_worker_pods = {{ airflow_kubernetes_delete_worker_pods }} + +# If False (and delete_worker_pods is True), +# failed worker pods will not be deleted so users can investigate them. +delete_worker_pods_on_failure = {{ airlfow_kubernetes_delete_worker_pods_on_failure }} + +# Number of Kubernetes Worker Pod creation calls per scheduler loop. +# Note that the current default of "1" will only launch a single pod +# per-heartbeat. It is HIGHLY recommended that users increase this +# number to match the tolerance of their kubernetes cluster for +# better performance. +worker_pods_creation_batch_size = {{ airflow_kubernetes_worker_pods_creation_batch_size }} + +# Allows users to launch pods in multiple namespaces. +# Will require creating a cluster-role for the scheduler +multi_namespace_mode = {{ airflow_kubernetes_multi_namespace_mode }} + +# Use the service account kubernetes gives to pods to connect to kubernetes cluster. +# It's intended for clients that expect to be running inside a pod running on kubernetes. +# It will raise an exception if called from a process not running in a kubernetes environment. +in_cluster = {{ airflow_kubernetes_in_cluster }} + +# When running with in_cluster=False change the default cluster_context or config_file +# options to Kubernetes client. Leave blank these to use default behaviour like ``kubectl`` has. +cluster_context = {{ airflow_kubernetes_cluster_context }} + +# Path to the kubernetes configfile to be used when ``in_cluster`` is set to False +config_file = {{ airflow_kubernetes_config_file }} + +# Keyword parameters to pass while calling a kubernetes client core_v1_api methods +# from Kubernetes Executor provided as a single line formatted JSON dictionary string. +# List of supported params are similar for all core_v1_apis, hence a single config +# variable for all apis. See: +# https://raw.githubusercontent.com/kubernetes-client/python/41f11a09995efcd0142e25946adc7591431bfb2f/kubernetes/client/api/core_v1_api.py +kube_client_request_args = {{ airflow_kubernetes_kube_client_request_args }} + +# Optional keyword arguments to pass to the ``delete_namespaced_pod`` kubernetes client +# ``core_v1_api`` method when using the Kubernetes Executor. +# This should be an object and can contain any of the options listed in the ``v1DeleteOptions`` +# class defined here: +# https://github.com/kubernetes-client/python/blob/41f11a09995efcd0142e25946adc7591431bfb2f/kubernetes/client/models/v1_delete_options.py#L19 +delete_option_kwargs = {{ airflow_kubernetes_delete_option_kwargs }} + +# Enables TCP keepalive mechanism. This prevents Kubernetes API requests to hang indefinitely +# when idle connection is time-outed on services like cloud load balancers or firewalls. +enable_tcp_keepalive = {{ airflow_kubernetes_enable_tcp_keepalive }} + +# When the `enable_tcp_keepalive` option is enabled, TCP probes a connection that has +# been idle for `tcp_keep_idle` seconds. +tcp_keep_idle = {% if airflow_kubernetes_enable_tcp_keepalive == True %}{{ airflow_kubernetes_tcp_keep_idle }}{% endif %} + +# When the `enable_tcp_keepalive` option is enabled, if Kubernetes API does not respond +# to a keepalive probe, TCP retransmits the probe after `tcp_keep_intvl` seconds. +tcp_keep_intvl = {{ airflow_kubernetes_tcp_keep_intvl }} + +# When the `enable_tcp_keepalive` option is enabled, if Kubernetes API does not respond +# to a keepalive probe, TCP retransmits the probe `tcp_keep_cnt number` of times before +# a connection is considered to be broken. +tcp_keep_cnt = {{ airflow_kubernetes_tcp_keep_cnt }} + +[smart_sensor] +# When `use_smart_sensor` is True, Airflow redirects multiple qualified sensor tasks to +# smart sensor task. +use_smart_sensor = {{ airflow_use_smart_sensor }} + +# `shard_code_upper_limit` is the upper limit of `shard_code` value. The `shard_code` is generated +# by `hashcode % shard_code_upper_limit`. +shard_code_upper_limit = {{ airflow_shard_code_upper_limit }} + +# The number of running smart sensor processes for each service. +shards = {{ airflow_shards }} + +# comma separated sensor classes support in smart_sensor. +sensors_enabled = {{ airflow_sensors_enabled }} diff --git a/test-requirements.txt b/test-requirements.txt index 3793162..357a06a 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -1,4 +1,4 @@ -molecule==3.0.1 +ansible==2.9.9 +molecule==3.0.6 docker==4.1.0 -ansible-lint==4.2.0 -ansible==2.8.8 \ No newline at end of file +ansible-lint==4.2.0 \ No newline at end of file From 0e44c490eab34051b56b8412cc40ce7d6b45fe1e Mon Sep 17 00:00:00 2001 From: David Mateo Date: Fri, 19 Feb 2021 14:02:19 +0100 Subject: [PATCH 02/21] Pipfile.lock update --- Pipfile.lock | 367 ++++++++++++++++++++++----------------------------- 1 file changed, 156 insertions(+), 211 deletions(-) diff --git a/Pipfile.lock b/Pipfile.lock index 502fda3..30147f1 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,11 +1,11 @@ { "_meta": { "hash": { - "sha256": "f8bc581fa040d1bdc745af45fea8fd51d5a186b2f6980f246384c111f4d8cf97" + "sha256": "3b1830187ce6d7d5296835882968e728c774d799303d0646367caf20ecbf3e9f" }, "pipfile-spec": 6, "requires": { - "python_version": "3.7" + "python_version": "3.8" }, "sources": [ { @@ -18,10 +18,10 @@ "default": { "ansible": { "hashes": [ - "sha256:c364ff5807cb88af29b161a3a1d88ff737f10b930a24be66d88769ee204f4536" + "sha256:e83d84ae8bf131c0499d8a4c0e1144bf969454c43086e61cca3c224227df29d1" ], "index": "pypi", - "version": "==2.8.8" + "version": "==2.9.9" }, "ansible-lint": { "hashes": [ @@ -31,13 +31,6 @@ "index": "pypi", "version": "==4.2.0" }, - "appdirs": { - "hashes": [ - "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41", - "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128" - ], - "version": "==1.4.4" - }, "arrow": { "hashes": [ "sha256:e098abbd9af3665aea81bdd6c869e93af4feb078e98468dd351c383af187aac5", @@ -45,13 +38,6 @@ ], "version": "==0.17.0" }, - "aspy.yaml": { - "hashes": [ - "sha256:463372c043f70160a9ec950c3f1e4c3a82db5fca01d334b6bc89c7164d744bdc", - "sha256:e7c742382eff2caed61f87a39d13f99109088e5e93f04d76eb8d4b28aa143f45" - ], - "version": "==1.3.0" - }, "bcrypt": { "hashes": [ "sha256:5b93c1726e50a93a033c36e5ca7fdcd29a5c7395af50a6892f5d9e7c6cfbfb29", @@ -79,65 +65,59 @@ }, "certifi": { "hashes": [ - "sha256:5930595817496dd21bb8dc35dad090f1c2cd0adfaf21204bf6732ca5d8ee34d3", - "sha256:8fc0819f1f30ba15bdb34cceffb9ef04d99f420f68eb75d901e9560b8749fc41" + "sha256:1a4995114262bffbc2413b159f2a1a480c969de6e6eb13ee966d470af86af59c", + "sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830" ], - "version": "==2020.6.20" + "version": "==2020.12.5" }, "cffi": { "hashes": [ - "sha256:005f2bfe11b6745d726dbb07ace4d53f057de66e336ff92d61b8c7e9c8f4777d", - "sha256:09e96138280241bd355cd585148dec04dbbedb4f46128f340d696eaafc82dd7b", - "sha256:0b1ad452cc824665ddc682400b62c9e4f5b64736a2ba99110712fdee5f2505c4", - "sha256:0ef488305fdce2580c8b2708f22d7785ae222d9825d3094ab073e22e93dfe51f", - "sha256:15f351bed09897fbda218e4db5a3d5c06328862f6198d4fb385f3e14e19decb3", - "sha256:22399ff4870fb4c7ef19fff6eeb20a8bbf15571913c181c78cb361024d574579", - "sha256:23e5d2040367322824605bc29ae8ee9175200b92cb5483ac7d466927a9b3d537", - "sha256:2791f68edc5749024b4722500e86303a10d342527e1e3bcac47f35fbd25b764e", - "sha256:2f9674623ca39c9ebe38afa3da402e9326c245f0f5ceff0623dccdac15023e05", - "sha256:3363e77a6176afb8823b6e06db78c46dbc4c7813b00a41300a4873b6ba63b171", - "sha256:33c6cdc071ba5cd6d96769c8969a0531be2d08c2628a0143a10a7dcffa9719ca", - "sha256:3b8eaf915ddc0709779889c472e553f0d3e8b7bdf62dab764c8921b09bf94522", - "sha256:3cb3e1b9ec43256c4e0f8d2837267a70b0e1ca8c4f456685508ae6106b1f504c", - "sha256:3eeeb0405fd145e714f7633a5173318bd88d8bbfc3dd0a5751f8c4f70ae629bc", - "sha256:44f60519595eaca110f248e5017363d751b12782a6f2bd6a7041cba275215f5d", - "sha256:4d7c26bfc1ea9f92084a1d75e11999e97b62d63128bcc90c3624d07813c52808", - "sha256:529c4ed2e10437c205f38f3691a68be66c39197d01062618c55f74294a4a4828", - "sha256:6642f15ad963b5092d65aed022d033c77763515fdc07095208f15d3563003869", - "sha256:85ba797e1de5b48aa5a8427b6ba62cf69607c18c5d4eb747604b7302f1ec382d", - "sha256:8f0f1e499e4000c4c347a124fa6a27d37608ced4fe9f7d45070563b7c4c370c9", - "sha256:a624fae282e81ad2e4871bdb767e2c914d0539708c0f078b5b355258293c98b0", - "sha256:b0358e6fefc74a16f745afa366acc89f979040e0cbc4eec55ab26ad1f6a9bfbc", - "sha256:bbd2f4dfee1079f76943767fce837ade3087b578aeb9f69aec7857d5bf25db15", - "sha256:bf39a9e19ce7298f1bd6a9758fa99707e9e5b1ebe5e90f2c3913a47bc548747c", - "sha256:c11579638288e53fc94ad60022ff1b67865363e730ee41ad5e6f0a17188b327a", - "sha256:c150eaa3dadbb2b5339675b88d4573c1be3cb6f2c33a6c83387e10cc0bf05bd3", - "sha256:c53af463f4a40de78c58b8b2710ade243c81cbca641e34debf3396a9640d6ec1", - "sha256:cb763ceceae04803adcc4e2d80d611ef201c73da32d8f2722e9d0ab0c7f10768", - "sha256:cc75f58cdaf043fe6a7a6c04b3b5a0e694c6a9e24050967747251fb80d7bce0d", - "sha256:d80998ed59176e8cba74028762fbd9b9153b9afc71ea118e63bbf5d4d0f9552b", - "sha256:de31b5164d44ef4943db155b3e8e17929707cac1e5bd2f363e67a56e3af4af6e", - "sha256:e66399cf0fc07de4dce4f588fc25bfe84a6d1285cc544e67987d22663393926d", - "sha256:f0620511387790860b249b9241c2f13c3a80e21a73e0b861a2df24e9d6f56730", - "sha256:f4eae045e6ab2bb54ca279733fe4eb85f1effda392666308250714e01907f394", - "sha256:f92cdecb618e5fa4658aeb97d5eb3d2f47aa94ac6477c6daf0f306c5a3b9e6b1", - "sha256:f92f789e4f9241cd262ad7a555ca2c648a98178a953af117ef7fad46aa1d5591" - ], - "version": "==1.14.3" - }, - "cfgv": { - "hashes": [ - "sha256:32e43d604bbe7896fe7c248a9c2276447dbef840feb28fe20494f62af110211d", - "sha256:cf22deb93d4bcf92f345a5c3cd39d3d41d6340adc60c78bbbd6588c384fda6a1" - ], - "version": "==3.2.0" + "sha256:00a1ba5e2e95684448de9b89888ccd02c98d512064b4cb987d48f4b40aa0421e", + "sha256:00e28066507bfc3fe865a31f325c8391a1ac2916219340f87dfad602c3e48e5d", + "sha256:045d792900a75e8b1e1b0ab6787dd733a8190ffcf80e8c8ceb2fb10a29ff238a", + "sha256:0638c3ae1a0edfb77c6765d487fee624d2b1ee1bdfeffc1f0b58c64d149e7eec", + "sha256:105abaf8a6075dc96c1fe5ae7aae073f4696f2905fde6aeada4c9d2926752362", + "sha256:155136b51fd733fa94e1c2ea5211dcd4c8879869008fc811648f16541bf99668", + "sha256:1a465cbe98a7fd391d47dce4b8f7e5b921e6cd805ef421d04f5f66ba8f06086c", + "sha256:1d2c4994f515e5b485fd6d3a73d05526aa0fcf248eb135996b088d25dfa1865b", + "sha256:2c24d61263f511551f740d1a065eb0212db1dbbbbd241db758f5244281590c06", + "sha256:51a8b381b16ddd370178a65360ebe15fbc1c71cf6f584613a7ea08bfad946698", + "sha256:594234691ac0e9b770aee9fcdb8fa02c22e43e5c619456efd0d6c2bf276f3eb2", + "sha256:5cf4be6c304ad0b6602f5c4e90e2f59b47653ac1ed9c662ed379fe48a8f26b0c", + "sha256:64081b3f8f6f3c3de6191ec89d7dc6c86a8a43911f7ecb422c60e90c70be41c7", + "sha256:6bc25fc545a6b3d57b5f8618e59fc13d3a3a68431e8ca5fd4c13241cd70d0009", + "sha256:798caa2a2384b1cbe8a2a139d80734c9db54f9cc155c99d7cc92441a23871c03", + "sha256:7c6b1dece89874d9541fc974917b631406233ea0440d0bdfbb8e03bf39a49b3b", + "sha256:7ef7d4ced6b325e92eb4d3502946c78c5367bc416398d387b39591532536734e", + "sha256:840793c68105fe031f34d6a086eaea153a0cd5c491cde82a74b420edd0a2b909", + "sha256:8d6603078baf4e11edc4168a514c5ce5b3ba6e3e9c374298cb88437957960a53", + "sha256:9cc46bc107224ff5b6d04369e7c595acb700c3613ad7bcf2e2012f62ece80c35", + "sha256:9f7a31251289b2ab6d4012f6e83e58bc3b96bd151f5b5262467f4bb6b34a7c26", + "sha256:9ffb888f19d54a4d4dfd4b3f29bc2c16aa4972f1c2ab9c4ab09b8ab8685b9c2b", + "sha256:a5ed8c05548b54b998b9498753fb9cadbfd92ee88e884641377d8a8b291bcc01", + "sha256:a7711edca4dcef1a75257b50a2fbfe92a65187c47dab5a0f1b9b332c5919a3fb", + "sha256:af5c59122a011049aad5dd87424b8e65a80e4a6477419c0c1015f73fb5ea0293", + "sha256:b18e0a9ef57d2b41f5c68beefa32317d286c3d6ac0484efd10d6e07491bb95dd", + "sha256:b4e248d1087abf9f4c10f3c398896c87ce82a9856494a7155823eb45a892395d", + "sha256:ba4e9e0ae13fc41c6b23299545e5ef73055213e466bd107953e4a013a5ddd7e3", + "sha256:c6332685306b6417a91b1ff9fae889b3ba65c2292d64bd9245c093b1b284809d", + "sha256:d5ff0621c88ce83a28a10d2ce719b2ee85635e85c515f12bac99a95306da4b2e", + "sha256:d9efd8b7a3ef378dd61a1e77367f1924375befc2eba06168b6ebfa903a5e59ca", + "sha256:df5169c4396adc04f9b0a05f13c074df878b6052430e03f50e68adf3a57aa28d", + "sha256:ebb253464a5d0482b191274f1c8bf00e33f7e0b9c66405fbffc61ed2c839c775", + "sha256:ec80dc47f54e6e9a78181ce05feb71a0353854cc26999db963695f950b5fb375", + "sha256:f032b34669220030f905152045dfa27741ce1a6db3324a5bc0b96b6c7420c87b", + "sha256:f60567825f791c6f8a592f3c6e3bd93dd2934e3f9dac189308426bd76b00ef3b", + "sha256:f803eaa94c2fcda012c047e62bc7a51b0bdabda1cad7a92a522694ea2d76e49f" + ], + "version": "==1.14.4" }, "chardet": { "hashes": [ - "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae", - "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691" + "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa", + "sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5" ], - "version": "==3.0.4" + "version": "==4.0.0" }, "click": { "hashes": [ @@ -154,10 +134,10 @@ }, "click-help-colors": { "hashes": [ - "sha256:0d841a4058ec88c47f93ff6f32547a055f8e0a0273f6bd6cb3e08430f195131d", - "sha256:119e5faf69cfc919c995c5962326ac8fd87f11e56a371af594e3dfd8458f4c6e" + "sha256:eb037a2dd95a9e20b3897c2b3ca57e7f6797f76a8d93f7eeedda7fcdcbc9b635", + "sha256:f6c3d1fe86b07790e6ef0339f458196a8814de90946d876774ea4b4f30a5a539" ], - "version": "==0.8" + "version": "==0.9" }, "colorama": { "hashes": [ @@ -175,37 +155,22 @@ }, "cryptography": { "hashes": [ - "sha256:07ca431b788249af92764e3be9a488aa1d39a0bc3be313d826bbec690417e538", - "sha256:13b88a0bd044b4eae1ef40e265d006e34dbcde0c2f1e15eb9896501b2d8f6c6f", - "sha256:32434673d8505b42c0de4de86da8c1620651abd24afe91ae0335597683ed1b77", - "sha256:3cd75a683b15576cfc822c7c5742b3276e50b21a06672dc3a800a2d5da4ecd1b", - "sha256:4e7268a0ca14536fecfdf2b00297d4e407da904718658c1ff1961c713f90fd33", - "sha256:545a8550782dda68f8cdc75a6e3bf252017aa8f75f19f5a9ca940772fc0cb56e", - "sha256:55d0b896631412b6f0c7de56e12eb3e261ac347fbaa5d5e705291a9016e5f8cb", - "sha256:5849d59358547bf789ee7e0d7a9036b2d29e9a4ddf1ce5e06bb45634f995c53e", - "sha256:6dc59630ecce8c1f558277ceb212c751d6730bd12c80ea96b4ac65637c4f55e7", - "sha256:7117319b44ed1842c617d0a452383a5a052ec6aa726dfbaffa8b94c910444297", - "sha256:75e8e6684cf0034f6bf2a97095cb95f81537b12b36a8fedf06e73050bb171c2d", - "sha256:7b8d9d8d3a9bd240f453342981f765346c87ade811519f98664519696f8e6ab7", - "sha256:a035a10686532b0587d58a606004aa20ad895c60c4d029afa245802347fab57b", - "sha256:a4e27ed0b2504195f855b52052eadcc9795c59909c9d84314c5408687f933fc7", - "sha256:a733671100cd26d816eed39507e585c156e4498293a907029969234e5e634bc4", - "sha256:a75f306a16d9f9afebfbedc41c8c2351d8e61e818ba6b4c40815e2b5740bb6b8", - "sha256:bd717aa029217b8ef94a7d21632a3bb5a4e7218a4513d2521c2a2fd63011e98b", - "sha256:d25cecbac20713a7c3bc544372d42d8eafa89799f492a43b79e1dfd650484851", - "sha256:d26a2557d8f9122f9bf445fc7034242f4375bd4e95ecda007667540270965b13", - "sha256:d3545829ab42a66b84a9aaabf216a4dce7f16dbc76eb69be5c302ed6b8f4a29b", - "sha256:d3d5e10be0cf2a12214ddee45c6bd203dab435e3d83b4560c03066eda600bfe3", - "sha256:efe15aca4f64f3a7ea0c09c87826490e50ed166ce67368a68f315ea0807a20df" - ], - "version": "==3.2.1" - }, - "distlib": { - "hashes": [ - "sha256:8c09de2c67b3e7deef7184574fc060ab8a793e7adbb183d942c389c8b13c52fb", - "sha256:edf6116872c863e1aa9d5bb7cb5e05a022c519a4594dc703843343a9ddd9bff1" - ], - "version": "==0.3.1" + "sha256:287032b6a7d86abc98e8e977b20138c53fea40e5b24e29090d5a675a973dcd10", + "sha256:288c65eea20bd89b11102c47b118bc1e0749386b0a0dfebba414076c5d4c8188", + "sha256:7eed937ad9b53280a5f53570d3a7dc93cb4412b6a3d58d4c6bb78cc26319c729", + "sha256:dab437c2e84628703e3358f0f06555a6259bc5039209d51aa3b05af667ff4fd0", + "sha256:ee5e19f0856b6fbbdbab15c2787ca65d203801d2d65d0b8de6218f424206c848", + "sha256:f21be9ec6b44c223b2024bbe59d394fadc7be320d18a8d595419afadb6cd5620", + "sha256:f6ea140d2736b7e1f0de4f988c43f76b0b3f3d365080e091715429ba218dce28" + ], + "version": "==3.4.4" + }, + "distro": { + "hashes": [ + "sha256:0e58756ae38fbd8fc3020d54badb8eae17c5b9dcbed388b17bb55b8a5928df92", + "sha256:df74eed763e18d10d0da624258524ae80486432cd17392d9c3d96f5e83cd2799" + ], + "version": "==1.5.0" }, "docker": { "hashes": [ @@ -217,24 +182,10 @@ }, "fasteners": { "hashes": [ - "sha256:007e4d2b2d4a10093f67e932e5166722d2eab83b77724156e92ad013c6226574", - "sha256:3a176da6b70df9bb88498e1a18a9e4a8579ed5b9141207762368a1017bf8f5ef" - ], - "version": "==0.15" - }, - "filelock": { - "hashes": [ - "sha256:18d82244ee114f543149c66a6e0c14e9c4f8a1044b5cdaadd0f82159d6a6ff59", - "sha256:929b7d63ec5b7d6b71b0fa5ac14e030b3f70b75747cef1b10da9b879fef15836" - ], - "version": "==3.0.12" - }, - "identify": { - "hashes": [ - "sha256:3139bf72d81dfd785b0a464e2776bd59bdc725b4cc10e6cf46b56a0db931c82e", - "sha256:969d844b7a85d32a5f9ac4e163df6e846d73c87c8b75847494ee8f4bd2186421" + "sha256:74b6847e0a6bb3b56c8511af8e24c40e4cf7a774dfff5b251c260ed338096a4b", + "sha256:c995d8c26b017c5d6a6de9ad29a0f9cdd57de61ae1113d28fac26622b06a0933" ], - "version": "==1.5.6" + "version": "==0.16" }, "idna": { "hashes": [ @@ -243,20 +194,12 @@ ], "version": "==2.10" }, - "importlib-metadata": { - "hashes": [ - "sha256:77a540690e24b0305878c37ffd421785a6f7e53c8b5720d211b211de8d0e95da", - "sha256:cefa1a2f919b866c5beb7c9f7b0ebb4061f30a8a9bf16d609b000e2dfaceb9c3" - ], - "markers": "python_version < '3.8'", - "version": "==2.0.0" - }, "jinja2": { "hashes": [ - "sha256:89aab215427ef59c34ad58735269eb58b1a5808103067f7bb9d5836c651b3bb0", - "sha256:f0a4641d3cf955324a89c04f3d94663aa4d638abe8f733ecd3582848e1c37035" + "sha256:03e47ad063331dd6a3f04a43eddca8a966a26ba0c5b7207a9a9e4e08f1b29419", + "sha256:a6d58433de0ae800347cab1fa3043cebbabe8baa9d29e668f1c768cb87a333c6" ], - "version": "==2.11.2" + "version": "==2.11.3" }, "jinja2-time": { "hashes": [ @@ -272,8 +215,12 @@ "sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235", "sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5", "sha256:13d3144e1e340870b25e7b10b98d779608c02016d5184cfb9927a9f10c689f42", + "sha256:195d7d2c4fbb0ee8139a6cf67194f3973a6b3042d742ebe0a9ed36d8b6f0c07f", + "sha256:22c178a091fc6630d0d045bdb5992d2dfe14e3259760e713c490da5323866c39", "sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff", "sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b", + "sha256:2beec1e0de6924ea551859edb9e7679da6e4870d32cb766240ce17e0a0ba2014", + "sha256:3b8a6499709d29c2e2399569d96719a1b21dcd94410a586a18526b143ec8470f", "sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1", "sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e", "sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183", @@ -282,48 +229,49 @@ "sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1", "sha256:6788b695d50a51edb699cb55e35487e430fa21f1ed838122d722e0ff0ac5ba15", "sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1", + "sha256:6f1e273a344928347c1290119b493a1f0303c52f5a5eae5f16d74f48c15d4a85", + "sha256:6fffc775d90dcc9aed1b89219549b329a9250d918fd0b8fa8d93d154918422e1", "sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e", "sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b", "sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905", + "sha256:7fed13866cf14bba33e7176717346713881f56d9d2bcebab207f7a036f41b850", + "sha256:84dee80c15f1b560d55bcfe6d47b27d070b4681c699c572af2e3c7cc90a3b8e0", "sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735", "sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d", + "sha256:98bae9582248d6cf62321dcb52aaf5d9adf0bad3b40582925ef7c7f0ed85fceb", "sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e", "sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d", "sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c", + "sha256:a6a744282b7718a2a62d2ed9d993cad6f5f585605ad352c11de459f4108df0a1", + "sha256:acf08ac40292838b3cbbb06cfe9b2cb9ec78fce8baca31ddb87aaac2e2dc3bc2", "sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21", "sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2", "sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5", + "sha256:b1dba4527182c95a0db8b6060cc98ac49b9e2f5e64320e2b56e47cb2831978c7", "sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b", + "sha256:b7d644ddb4dbd407d31ffb699f1d140bc35478da613b441c582aeb7c43838dd8", "sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6", + "sha256:bf5aa3cbcfdf57fa2ee9cd1822c862ef23037f5c832ad09cfea57fa846dec193", "sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f", + "sha256:caabedc8323f1e93231b52fc32bdcde6db817623d33e100708d9a68e1f53b26b", "sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f", "sha256:cdb132fc825c38e1aeec2c8aa9338310d29d337bebbd7baa06889d09a60a1fa2", + "sha256:d53bc011414228441014aa71dbec320c66468c1030aae3a6e29778a3382d96e5", + "sha256:d73a845f227b0bfe8a7455ee623525ee656a9e2e749e4742706d80a6065d5e2c", + "sha256:d9be0ba6c527163cbed5e0857c451fcd092ce83947944d6c14bc95441203f032", "sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7", - "sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be" + "sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be", + "sha256:feb7b34d6325451ef96bc0e36e1a6c0c1c64bc1fbec4b854f4529e51887b1621" ], "version": "==1.1.1" }, "molecule": { "hashes": [ - "sha256:e181522ff30b1212ad29225f94652dc71bafdcb847c9375eeee1fb169c1c94ef", - "sha256:f451af7a8793bea7e12d9f6a1b627af23a299f31063281fc401cbe17ad196a41" + "sha256:b83d73b3e34ad9bc7cc62f1869a2c9e4c4cc5e5c5a2d9de963e838d5f37fb47b", + "sha256:e3ba0fb0bab1f60b6f0aef768a3759b383267773be47ee942adadf27d9f6fa35" ], "index": "pypi", - "version": "==3.0.1" - }, - "monotonic": { - "hashes": [ - "sha256:23953d55076df038541e648a53676fb24980f7a1be290cdda21300b3bc21dfb0", - "sha256:552a91f381532e33cbd07c6a2655a21908088962bb8fa7239ecbcc6ad1140cc7" - ], - "version": "==1.5" - }, - "nodeenv": { - "hashes": [ - "sha256:5304d424c529c997bc888453aeaa6362d242b6b4631e90f3d4bf1b290f1c84a9", - "sha256:ab45090ae383b716c4ef89e690c41ff8c2b257b85b309f01f3654df3d084bd7c" - ], - "version": "==1.5.0" + "version": "==3.0.6" }, "paramiko": { "hashes": [ @@ -334,10 +282,10 @@ }, "pathspec": { "hashes": [ - "sha256:7d91249d21749788d07a2d0f94147accd8f845507400749ea19c1ec9054a12b0", - "sha256:da45173eb3a6f2a5a487efba21f050af2b41948be6ab52b6a1e3ff22bb8b7061" + "sha256:86379d6b86d75816baba717e64b1a3a3469deb93bb76d613c9ce79edc5cb68fd", + "sha256:aa0cb481c4041bf52ffa7b0d8fa6cd3e88a2ca4879c533c9153882ee2556790d" ], - "version": "==0.8.0" + "version": "==0.8.1" }, "pexpect": { "hashes": [ @@ -360,19 +308,12 @@ ], "version": "==0.5.0" }, - "pre-commit": { - "hashes": [ - "sha256:8f48d8637bdae6fa70cc97db9c1dd5aa7c5c8bf71968932a380628c25978b850", - "sha256:f92a359477f3252452ae2e8d3029de77aec59415c16ae4189bcfba40b757e029" - ], - "version": "==1.21.0" - }, "ptyprocess": { "hashes": [ - "sha256:923f299cc5ad920c68f2bc0bc98b75b9f838b93b599941a6b63ddbc2476394c0", - "sha256:d7cc528d76e76342423ca640335bd3633420dc1366f258cb31d05e865ef5ca1f" + "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35", + "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220" ], - "version": "==0.6.0" + "version": "==0.7.0" }, "pycparser": { "hashes": [ @@ -425,26 +366,36 @@ }, "pyyaml": { "hashes": [ - "sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97", - "sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76", - "sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2", - "sha256:69f00dca373f240f842b2931fb2c7e14ddbacd1397d57157a9b005a6a9942648", - "sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf", - "sha256:74809a57b329d6cc0fdccee6318f44b9b8649961fa73144a98735b0aaf029f1f", - "sha256:7739fc0fa8205b3ee8808aea45e968bc90082c10aef6ea95e855e10abf4a37b2", - "sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee", - "sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d", - "sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c", - "sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a" - ], - "version": "==5.3.1" + "sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf", + "sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696", + "sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393", + "sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77", + "sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922", + "sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5", + "sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8", + "sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10", + "sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc", + "sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018", + "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e", + "sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253", + "sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183", + "sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb", + "sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185", + "sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db", + "sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46", + "sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b", + "sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63", + "sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df", + "sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc" + ], + "version": "==5.4.1" }, "requests": { "hashes": [ - "sha256:b3559a131db72c33ee969480840fff4bb6dd111de7dd27c8ee1f820f4f00231b", - "sha256:fe75cc94a9443b9246fc7049224f75604b113c36acb93f87b80ed42c44cbb898" + "sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804", + "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e" ], - "version": "==2.24.0" + "version": "==2.25.1" }, "ruamel.yaml": { "hashes": [ @@ -457,20 +408,27 @@ "ruamel.yaml.clib": { "hashes": [ "sha256:058a1cc3df2a8aecc12f983a48bda99315cebf55a3b3a5463e37bb599b05727b", + "sha256:1236df55e0f73cd138c0eca074ee086136c3f16a97c2ac719032c050f7e0622f", + "sha256:1f8c0a4577c0e6c99d208de5c4d3fd8aceed9574bb154d7a2b21c16bb924154c", "sha256:2602e91bd5c1b874d6f93d3086f9830f3e907c543c7672cf293a97c3fabdcd91", "sha256:28116f204103cb3a108dfd37668f20abe6e3cafd0d3fd40dba126c732457b3cc", "sha256:2d24bd98af676f4990c4d715bcdc2a60b19c56a3fb3a763164d2d8ca0e806ba7", + "sha256:2fd336a5c6415c82e2deb40d08c222087febe0aebe520f4d21910629018ab0f3", "sha256:30dca9bbcbb1cc858717438218d11eafb78666759e5094dd767468c0d577a7e7", "sha256:44c7b0498c39f27795224438f1a6be6c5352f82cb887bc33d962c3a3acc00df6", "sha256:464e66a04e740d754170be5e740657a3b3b6d2bcc567f0c3437879a6e6087ff6", + "sha256:46d6d20815064e8bb023ea8628cfb7402c0f0e83de2c2227a88097e239a7dffd", "sha256:4df5019e7783d14b79217ad9c56edf1ba7485d614ad5a385d1b3c768635c81c0", "sha256:4e52c96ca66de04be42ea2278012a2342d89f5e82b4512fb6fb7134e377e2e62", "sha256:5254af7d8bdf4d5484c089f929cb7f5bafa59b4f01d4f48adda4be41e6d29f99", "sha256:52ae5739e4b5d6317b52f5b040b1b6639e8af68a5b8fd606a8b08658fbd0cab5", "sha256:53b9dd1abd70e257a6e32f934ebc482dac5edb8c93e23deb663eac724c30b026", + "sha256:6c0a5dc52fc74eb87c67374a4e554d4761fd42a4d01390b7e868b30d21f4b8bb", "sha256:73b3d43e04cc4b228fa6fa5d796409ece6fcb53a6c270eb2048109cbcbc3b9c2", "sha256:74161d827407f4db9072011adcfb825b5258a5ccb3d2cd518dd6c9edea9e30f1", + "sha256:75f0ee6839532e52a3a53f80ce64925ed4aed697dd3fa890c4c918f3304bd4f4", "sha256:839dd72545ef7ba78fd2aa1a5dd07b33696adf3e68fae7f31327161c1093001b", + "sha256:8be05be57dc5c7b4a0b24edcaa2f7275866d9c907725226cdde46da09367d923", "sha256:8e8fd0a22c9d92af3a34f91e8a2594eeb35cba90ab643c5e0e643567dc8be43e", "sha256:a873e4d4954f865dcb60bdc4914af7eaae48fb56b60ed6daa1d6251c72f5337c", "sha256:ab845f1f51f7eb750a78937be9f79baea4a42c7960f5a94dde34e69f3cce1988", @@ -484,19 +442,27 @@ "markers": "platform_python_implementation == 'CPython' and python_version < '3.9'", "version": "==0.2.2" }, + "selinux": { + "hashes": [ + "sha256:820adcf1b4451c9cc7759848797703263ba0eb6a4cad76d73548a9e0d57b7926", + "sha256:d435f514e834e3fdc0941f6a29d086b80b2ea51b28112aee6254bd104ee42a74" + ], + "markers": "sys_platform == 'linux'", + "version": "==0.2.1" + }, "sh": { "hashes": [ - "sha256:39aa9af22f6558a0c5d132881cf43e34828ca03e4ae11114852ca6a55c7c1d8e", - "sha256:75e86a836f47de095d4531718fe8489e6f7446c75ddfa5596f632727b919ffae" + "sha256:6f792e45b45d039b423081558904981e8ab49572b0c38009fcc65feaab06bcda", + "sha256:97a3d2205e3c6a842d87ebbc9ae93acae5a352b1bc4609b428d0fd5bb9e286a3" ], - "version": "==1.14.1" + "version": "==1.13.1" }, "shellingham": { "hashes": [ - "sha256:576c1982bea0ba82fb46c36feb951319d7f42214a82634233f58b40d858a751e", - "sha256:7f6206ae169dc1a03af8a138681b3f962ae61cc93ade84d0585cca3aaf770044" + "sha256:4855c2458d6904829bd34c299f11fdeed7cfefbf8a2c522e4caea6cd76b3171e", + "sha256:536b67a0697f2e4af32ab176c00a50ac2899c5a05e0d8e2dadac8e58888283f9" ], - "version": "==1.3.2" + "version": "==1.4.0" }, "six": { "hashes": [ @@ -519,13 +485,6 @@ ], "version": "==1.3" }, - "toml": { - "hashes": [ - "sha256:926b612be1e5ce0634a2ca03470f95169cf16f939018233a670519cb4ac58b0f", - "sha256:bda89d5935c2eac546d648028b9901107a595863cb36bae0c73ac804a9b4ce88" - ], - "version": "==0.10.1" - }, "tree-format": { "hashes": [ "sha256:a538523aa78ae7a4b10003b04f3e1b37708e0e089d99c9d3b9e1c71384c9a7f9", @@ -535,17 +494,10 @@ }, "urllib3": { "hashes": [ - "sha256:8d7eaa5a82a1cac232164990f04874c594c9453ec55eef02eab885aa02fc17a2", - "sha256:f5321fbe4bf3fefa0efd0bfe7fb14e90909eb62a48ccda331726b4319897dd5e" + "sha256:1b465e494e3e0d8939b50680403e3aedaa2bc434b7d5af64dfd3c958d7f5ae80", + "sha256:de3eedaad74a2683334e282005cd8d7f22f4d55fa690a2a1020a416cb0a47e73" ], - "version": "==1.25.11" - }, - "virtualenv": { - "hashes": [ - "sha256:b0011228208944ce71052987437d3843e05690b2f23d1c7da4263fde104c97a2", - "sha256:b8d6110f493af256a40d65e29846c69340a947669eec8ce784fcf3dd3af28380" - ], - "version": "==20.1.0" + "version": "==1.26.3" }, "websocket-client": { "hashes": [ @@ -556,17 +508,10 @@ }, "yamllint": { "hashes": [ - "sha256:b1549cbe5b47b6ba67bdeea31720f5c51431a4d0c076c1557952d841f7223519", - "sha256:c7be4d0d2584a1b561498fa9acb77ad22eb434a109725c7781373ae496d823b3" - ], - "version": "==1.25.0" - }, - "zipp": { - "hashes": [ - "sha256:102c24ef8f171fd729d46599845e95c7ab894a4cf45f5de11a44cc7444fb1108", - "sha256:ed5eee1974372595f9e416cc7bbeeb12335201d8081ca8a0743c954d4446e5cb" + "sha256:8a5f8e442f49309eaf3e9d7232ce76f2fc8026f5c0c0b164b83f33fed1399637", + "sha256:b0e4c89985c7f5f8451c2eb8c67d804d10ac13a4abe031cbf49bdf3465d01087" ], - "version": "==3.4.0" + "version": "==1.26.0" } }, "develop": {} From cd70ab4f16e9ccbe52ae378fdf615504dc2d57aa Mon Sep 17 00:00:00 2001 From: David Mateo Date: Tue, 2 Mar 2021 17:34:17 +0100 Subject: [PATCH 03/21] Many many changes - Commented vars that by default are empty main.yml - Add new necessary vars main.yml - New v2 vars due uncompatible config between airflow 1.x and 2.x like the hostname_callable - Add AIRFLOW_HOME and AIRFLOW_CONFIG to environment vars in converge - Updated verify.yml - Updated test airfow_tests.yml - Updated deprecated and warning ways when conditionally running tasks - New tasks to perform checks for admin user existence - Now airflow will be installed in a virtualenv for better dependencies management - Updated env file - Updated RuntimeDirectory on services - airflow.cfg template update to conditionally set parameters if ansible vars are defined - yamllint updated --- .yamlint => .yamllint | 3 +- defaults/main.yml | 15 +++-- molecule/default/converge.yml | 3 + molecule/default/tests/test_airflow.yml | 42 +++++--------- molecule/default/verify.yml | 15 +++-- tasks/config.yml | 38 +++++++++++-- tasks/install.yml | 74 ++++++++++++++++++++----- templates/airflow-environment-file.j2 | 2 +- templates/airflow-flower.service.j2 | 4 +- templates/airflow-scheduler.service.j2 | 4 +- templates/airflow-webserver.service.j2 | 4 +- templates/airflow-worker.service.j2 | 4 +- templates/airflow2.cfg.j2 | 30 +++++----- 13 files changed, 155 insertions(+), 83 deletions(-) rename .yamlint => .yamllint (96%) diff --git a/.yamlint b/.yamllint similarity index 96% rename from .yamlint rename to .yamllint index 84510f6..fe1769f 100644 --- a/.yamlint +++ b/.yamllint @@ -28,7 +28,7 @@ rules: level: error hyphens: level: error - indentation: disable + indentation: enable key-duplicates: enable line-length: disable new-line-at-end-of-file: disable @@ -36,4 +36,3 @@ rules: type: unix trailing-spaces: disable truthy: disable - \ No newline at end of file diff --git a/defaults/main.yml b/defaults/main.yml index 1079f57..6e1710c 100644 --- a/defaults/main.yml +++ b/defaults/main.yml @@ -97,6 +97,7 @@ airflow_required_libs: - python3 - python3-dev - python3-pip + - python3-setuptools - python-pip - acl # Airflow 2.0 system level requirements https://airflow.apache.org/docs/apache-airflow/stable/installation.html#system-dependencies @@ -113,8 +114,8 @@ airflow_required_libs: - sqlite3 - unixodbc -airflow_required_python_packages: - - {name: werkzeug, version: 0.16.1} +# airflow_required_python_packages: + # - {name: werkzeug, version: 0.16.1} # Version is not mandatory # - {name: pyasn1, version: 0.4.4} @@ -155,7 +156,7 @@ airflow_services: path: airflow-flower.service.j2 # Files & Paths -airflow_executable: "/usr/local/bin/airflow" +airflow_executable: "{{ airflow_app_home }}/bin/airflow" airflow_pip_executable: "pip3" airflow_app_home: "/opt/{{ airflow_app_name }}" airflow_home: "/home/{{ airflow_user }}" @@ -163,6 +164,7 @@ airflow_conf_path: "/etc/{{ airflow_app_name }}" airflow_pidfile_folder: "/run/{{ airflow_app_name }}" airflow_environment_file_folder: /etc/sysconfig airflow_environment_extra_vars: [] +airflow_health_url: "{{ airflow_webserver_host }}:{{ airflow_webserver_port }}/health" # - name: PATH # value: "/custom/path/bin:$PATH" airflow_skeleton_paths: @@ -172,7 +174,6 @@ airflow_skeleton_paths: - "{{ airflow_dags_folder }}" - "{{ airflow_logs_folder }}" - "{{ airflow_child_process_log_folder }}" - - "{{ airflow_pidfile_folder }}" - "{{ airflow_environment_file_folder }}" - "{{ airflow_plugins_folder }}" @@ -191,7 +192,7 @@ celery_version: 3.1.17 # DAGs # Python dependencies needed by the DAGs. This variable is expected to be a # list of items following the structure provided in the example comment -dags_dependencies: +# dags_dependencies: # - {name: pip_package, version: version_needed} @@ -200,6 +201,7 @@ dags_dependencies: # [core] airflow_dags_folder: "{{ airflow_home }}/dags" airflow_hostname_callable: "socket:getfqdn" +airflow_hostname_callable_v2: "socket.getfqdn" airflow_default_timezone: "utc" # The executor class that airflow should use. Choices include # SequentialExecutor, LocalExecutor, CeleryExecutor, DaskExecutor @@ -337,7 +339,7 @@ airflow_webserver_worker_timeout: 120 airflow_webserver_worker_refresh_batch_size: 1 airflow_webserver_worker_refresh_interval: 30 airflow_webserver_reload_on_plugin_change: False -airflow_webserver_secret_key: temporary_key +airflow_webserver_secret_key: "replace_me" airflow_webserver_workers: 4 airflow_webserver_worker_class: sync airflow_webserver_access_logfile: "{{ airflow_logs_folder }}/gunicorn-access.log" @@ -356,6 +358,7 @@ airflow_webserver_log_animation_speed: 1000 airflow_webserver_hide_paused_dags_by_default: False airflow_webserver_page_size: 100 airflow_webserver_navbar_color: "#007A87" +airflow_webserver_navbar_color_v2: "#fff" airflow_webserver_default_dag_run_display_number: 25 airflow_webserver_enable_proxy_fix: False airflow_webserver_proxy_fix_x_for: 1 diff --git a/molecule/default/converge.yml b/molecule/default/converge.yml index 7968f82..431fe99 100644 --- a/molecule/default/converge.yml +++ b/molecule/default/converge.yml @@ -4,3 +4,6 @@ hosts: airflow_group roles: - role: airflow-role + environment: + AIRFLOW_HOME: "{{ airflow_app_home }}" + AIRFLOW_CONFIG: "{{ airflow_conf_path }}/airflow.cfg" diff --git a/molecule/default/tests/test_airflow.yml b/molecule/default/tests/test_airflow.yml index 034e1cd..e994151 100644 --- a/molecule/default/tests/test_airflow.yml +++ b/molecule/default/tests/test_airflow.yml @@ -1,7 +1,7 @@ --- command: - /usr/local/bin/airflow version: + "{{ airflow_executable }} version": exit-status: 0 stdout: - "{{ airflow_version }}" @@ -21,7 +21,7 @@ service: enabled: "{{ airflow_services.airflow-flower.enabled }}" state: "{{ airflow_services.airflow-flower.state }}" process: - {{ airflow_exec_name }}: + {{ airflow_executable }}: running: true user: {{ airflow_user }}: @@ -44,30 +44,14 @@ port: http: {{ airflow_health_url }}: status: 200 -# file: -# {{ airflow_bin_path }}: -# exists: true -# filetype: directory -# owner: {{ airflow_user }} -# group: {{ airflow_group }} -# {{ airflow_metadatadb_path }}: -# exists: true -# filetype: directory -# owner: {{ airflow_user }} -# group: {{ airflow_group }} -# {{ airflow_conf_path }}: -# exists: true -# filetype: directory -# owner: {{ airflow_user }} -# group: {{ airflow_group }} -# {{ airflow_config_file_path }}: -# exists: true -# filetype: file -# owner: {{ airflow_user }} -# group: {{ airflow_group }} -# contains: -# - "/APP_NAME\\s*=\\s.+/" -# - "/airflow_WEBSERVER_PORT\\s*=\\s\\d+/" -# - "/SECRET_KEY\\s*=\\s'.+'/" -# - "/SQLALCHEMY_DATABASE_URI\\s*=\\s'.+'/" - +file: + "{{ airflow_app_home }}/bin": + exists: true + filetype: directory + owner: {{ airflow_user }} + group: {{ airflow_group }} + {{ airflow_conf_path }}: + exists: true + filetype: directory + owner: {{ airflow_user }} + group: {{ airflow_group }} diff --git a/molecule/default/verify.yml b/molecule/default/verify.yml index 311c989..80f10b7 100644 --- a/molecule/default/verify.yml +++ b/molecule/default/verify.yml @@ -3,13 +3,15 @@ # Tests need distributed to the appropriate ansible host/groups # prior to execution by `goss validate`. -- name: Verify - hosts: all +- name: Verify airflow + hosts: + - airflow become: true vars: - goss_version: v0.3.7 + goss_version: v0.3.16 goss_arch: amd64 goss_dst: /usr/local/bin/goss + goss_sha256sum: 827e354b48f93bce933f5efcd1f00dc82569c42a179cf2d384b040d8a80bfbfb goss_url: "https://github.com/aelsabbahy/goss/releases/download/{{ goss_version }}/goss-linux-{{ goss_arch }}" goss_test_directory: /tmp goss_format: documentation @@ -23,24 +25,25 @@ get_url: url: "{{ goss_url }}" dest: "{{ goss_dst }}" + sha256sum: "{{ goss_sha256sum }}" mode: 0755 register: download_goss until: download_goss is succeeded - retries: 6 + retries: 3 - name: Copy Goss tests to remote template: src: "{{ item }}" dest: "{{ goss_test_directory }}/{{ item | basename }}" with_fileglob: - - "{{ playbook_dir }}/tests/test_*.yml" + - "tests/test_*.yml" - name: Register test files shell: "ls {{ goss_test_directory }}/test_*.yml" register: test_files - name: Execute Goss tests - command: "{{ goss_dst }} -g {{ item }} validate --format {{ goss_format }} --retry-timeout 10s" + command: "{{ goss_dst }} -g {{ item }} validate --format {{ goss_format }}" register: test_results with_items: "{{ test_files.stdout_lines }}" diff --git a/tasks/config.yml b/tasks/config.yml index 7b5cac8..3ce845e 100644 --- a/tasks/config.yml +++ b/tasks/config.yml @@ -58,7 +58,9 @@ become_user: "{{ airflow_user }}" tags: skip_ansible_lint - when: airflow_version is version( '2.0.0', '<') and ( airflow_install.changed or airflow_config.changed) + when: + - airflow_version is version( '2.0.0', '<') + - (airflow_install.changed or airflow_config.changed) notify: - restart airflow-webserver - restart airflow-scheduler @@ -73,16 +75,44 @@ become_user: "{{ airflow_user }}" # tags: # skip_ansible_lint - when: airflow_version is version( '2.0.0', '>=') and (airflow_install.changed or airflow_config.changed) + when: + - airflow_version is version( '2.0.0', '>=') + - (airflow_install.changed or airflow_config.changed) notify: - restart airflow-webserver - restart airflow-scheduler - restart airflow-worker - restart airflow-flower -- name: Airflow | Create Admin user (> 2.0) - command: "{{ airflow_executable }} users create -u {{ airflow_admin_user.username }} -p {{ airflow_admin_user.password }} -f {{ airflow_admin_user.firstname }} -l {{ airflow_admin_user.lastname }} -r {{ airflow_admin_user.role }} -e {{ airflow_admin_user.email }}" +- name: Airflow | Check Admin user (> 2.0) + command: "{{ airflow_executable }} users list" + register: airflow_check_admin + changed_when: false when: airflow_version is version( '2.0.0', '>=') + no_log: true + +- name: Airflow | Create Admin user (> 2.0) + command: + argv: + - "{{ airflow_executable }}" + - users + - create + - --username + - "{{ airflow_admin_user.username }}" + - --password + - "{{ airflow_admin_user.password }}" + - --firstname + - "{{ airflow_admin_user.firstname }}" + - --lastname + - "{{ airflow_admin_user.lastname }}" + - --role + - "{{ airflow_admin_user.role }}" + - --email + - "{{ airflow_admin_user.email }}" + no_log: true + when: + - airflow_version is version( '2.0.0', '>=') + - "airflow_admin_user.email not in airflow_check_admin.stdout" - name: Airflow | Copy extra airflow config files (provided by playbooks) copy: diff --git a/tasks/install.yml b/tasks/install.yml index 8c53a9b..637b132 100644 --- a/tasks/install.yml +++ b/tasks/install.yml @@ -8,10 +8,10 @@ user: name: "{{ airflow_user }}" group: "{{ airflow_group }}" - system: yes - shell: /usr/sbin/nologin + system: true + shell: /usr/sbin/nologin # shell: /bin/bash - createhome: yes + createhome: true - name: Airflow | Ensure airflow skeleton paths file: @@ -34,48 +34,90 @@ name: pip version: 20.2.4 -- name: Airflow | Installing Python pip dependencies +- name: Airflow | Install virtualenv pip: executable: "{{ airflow_pip_executable }}" + name: virtualenv + +- name: Airflow | Check if exists virtualenv + stat: + path: "{{ airflow_app_home }}/pyvenv.cfg" + register: virtualenv_check + +- name: Airflow | Set a virtualenv + become: true + become_user: "{{ airflow_user }}" + command: "virtualenv -p python{{ airflow_python_version }} {{ airflow_app_home }}" + when: not virtualenv_check.stat.exists + +- name: Airflow | Installing Python pip dependencies + become: true + become_user: "{{ airflow_user }}" + pip: name: "{{ item.name }}" version: "{{ item.version | default(omit) }}" + virtualenv: "{{ airflow_app_home }}" + virtualenv_python: "python{{ airflow_python_version }}" with_items: "{{ airflow_required_python_packages }}" + when: airflow_required_python_packages is defined - name: Airflow | Installing proper Celery version + become: true + become_user: "{{ airflow_user }}" pip: - executable: "{{ airflow_pip_executable }}" name: celery version: "{{ celery_version }}" + virtualenv: "{{ airflow_app_home }}" + virtualenv_python: "python{{ airflow_python_version }}" when: airflow_executor == "CeleryExecutor" - name: Airflow | Installing extra Celery packages + become: true + become_user: "{{ airflow_user }}" pip: - executable: "{{ airflow_pip_executable }}" name: celery[{{ item }}] version: "{{ celery_version }}" + virtualenv: "{{ airflow_app_home }}" + virtualenv_python: "python{{ airflow_python_version }}" with_items: "{{ celery_extra_packages }}" when: airflow_executor == "CeleryExecutor" and celery_extra_packages -- name: Airflow | Set AIRFLOW_HOME environment variable in "{{ airflow_home }}" +- name: Airflow | Add Airflow bin path to the PATH env" + lineinfile: + path: /etc/environment + line: "PATH=$PATH:{{ airflow_app_home }}/bin" + +- name: Airflow | Set AIRFLOW_HOME environment variable in app "{{ airflow_app_home }}" + lineinfile: + path: /etc/environment + line: "AIRFLOW_HOME={{ airflow_app_home }}" + +- name: Airflow | Set AIRFLOW_CONFIG environment variable in app "{{ airflow_conf_path }}" lineinfile: path: /etc/environment - line: "AIRFLOW_HOME={{ airflow_home }}" + line: "AIRFLOW_CONFIG={{ airflow_conf_path }}/airflow.cfg" - name: Airflow | Installing Airflow + become: true + become_user: "{{ airflow_user }}" pip: - executable: "{{ airflow_pip_executable }}" name: "{{ airflow_package }}[{{ airflow_bundle_package | default(omit) }}]" version: "{{ airflow_version }}" state: present + virtualenv: "{{ airflow_app_home }}" + virtualenv_python: "python{{ airflow_python_version }}" extra_args: --no-cache-dir register: airflow_install environment: SLUGIFY_USES_TEXT_UNIDECODE: "yes" - name: Airflow | Installing Airflow Extra Packages (prior to Ansible 2.7) + become: true + become_user: "{{ airflow_user }}" pip: - executable: "{{ airflow_pip_executable }}" name: "{{ airflow_package }}[{{ item }}]=={{ airflow_version }}" + virtualenv: "{{ airflow_app_home }}" + virtualenv_python: "python{{ airflow_python_version }}" with_items: "{{ airflow_extra_packages }}" when: - airflow_extra_packages is defined @@ -83,22 +125,28 @@ - ansible_version.full is version_compare('2.7', '<') - name: Airflow | Installing Airflow Extra Packages + become: true + become_user: "{{ airflow_user }}" pip: - executable: "{{ airflow_pip_executable }}" name: "apache-airflow[{{ airflow_extra_packages | join(', ') }}]" version: "{{ airflow_version }}" + virtualenv: "{{ airflow_app_home }}" + virtualenv_python: "python{{ airflow_python_version }}" when: - airflow_extra_packages is defined - not airflow_bundle_package - ansible_version.full is version_compare('2.7', '>=') - name: Airflow | Installing DAGs dependencies + become: true + become_user: "{{ airflow_user }}" pip: - executable: "{{ airflow_pip_executable }}" name: "{{ item.name }}" version: "{{ item.version }}" + virtualenv: "{{ airflow_app_home }}" + virtualenv_python: "python{{ airflow_python_version }}" with_items: "{{ dags_dependencies }}" - when: dags_dependencies + when: dags_dependencies is defined notify: - restart airflow-webserver - restart airflow-scheduler diff --git a/templates/airflow-environment-file.j2 b/templates/airflow-environment-file.j2 index 4e6ac27..63c3deb 100644 --- a/templates/airflow-environment-file.j2 +++ b/templates/airflow-environment-file.j2 @@ -23,7 +23,7 @@ SCHEDULER_RUNS={{ airflow_scheduler_runs }} AIRFLOW_HOME={{ airflow_app_home }} AIRFLOW_CONFIG={{ airflow_conf_path }}/airflow.cfg -PATH=$PATH:{{ airflow_app_home }}/airflow_venv/bin +PATH=$PATH:{{ airflow_app_home }}/bin HOSTALIASES=/etc/host.aliases diff --git a/templates/airflow-flower.service.j2 b/templates/airflow-flower.service.j2 index 988977e..325fe6b 100644 --- a/templates/airflow-flower.service.j2 +++ b/templates/airflow-flower.service.j2 @@ -17,14 +17,14 @@ After=network.target postgresql.service mysql.service redis.service rabbitmq-ser Wants=postgresql.service mysql.service redis.service rabbitmq-server.service [Service] +RuntimeDirectory={{ airflow_app_name }}-flower EnvironmentFile={{ airflow_environment_file_folder }}/airflow User={{ airflow_user }} Group={{ airflow_group }} Type=simple -ExecStart={{ airflow_executable }} flower --pid {{ airflow_pidfile_folder }}/flower.pid +ExecStart={{ airflow_executable }} flower --pid {{ airflow_pidfile_folder }}-flower/flower.pid Restart=on-failure RestartSec=10s -RuntimeDirectory=airflow PrivateTmp={{ airflow_private_tmp }} [Install] diff --git a/templates/airflow-scheduler.service.j2 b/templates/airflow-scheduler.service.j2 index 7bf161b..531050b 100644 --- a/templates/airflow-scheduler.service.j2 +++ b/templates/airflow-scheduler.service.j2 @@ -17,14 +17,14 @@ After=network.target postgresql.service mysql.service redis.service rabbitmq-ser Wants=postgresql.service mysql.service redis.service rabbitmq-server.service [Service] +RuntimeDirectory={{ airflow_app_name }}-scheduler EnvironmentFile={{ airflow_environment_file_folder }}/airflow User={{ airflow_user }} Group={{ airflow_group }} Type=simple -ExecStart={{ airflow_executable }} scheduler -n ${SCHEDULER_RUNS} --pid {{ airflow_pidfile_folder }}/scheduler.pid +ExecStart={{ airflow_executable }} scheduler -n ${SCHEDULER_RUNS} --pid {{ airflow_pidfile_folder }}-scheduler/scheduler.pid Restart=always RestartSec=5s -RuntimeDirectory=airflow PrivateTmp={{ airflow_private_tmp }} [Install] diff --git a/templates/airflow-webserver.service.j2 b/templates/airflow-webserver.service.j2 index f09dcef..540400c 100644 --- a/templates/airflow-webserver.service.j2 +++ b/templates/airflow-webserver.service.j2 @@ -17,15 +17,15 @@ After=network.target postgresql.service mysql.service redis.service rabbitmq-ser Wants=postgresql.service mysql.service redis.service rabbitmq-server.service [Service] +RuntimeDirectory={{ airflow_app_name }}-webserver EnvironmentFile={{ airflow_environment_file_folder }}/airflow User={{ airflow_user }} Group={{ airflow_group }} Type=simple -ExecStart={{ airflow_executable }} webserver --pid {{ airflow_pidfile_folder }}/webserver.pid +ExecStart={{ airflow_executable }} webserver --pid {{ airflow_pidfile_folder }}-webserver/webserver.pid Restart=on-failure RestartSec=5s PrivateTmp={{ airflow_private_tmp }} -RuntimeDirectory=airflow [Install] WantedBy=multi-user.target diff --git a/templates/airflow-worker.service.j2 b/templates/airflow-worker.service.j2 index eb91bd5..ae95cbd 100644 --- a/templates/airflow-worker.service.j2 +++ b/templates/airflow-worker.service.j2 @@ -17,16 +17,16 @@ After=network.target postgresql.service mysql.service redis.service rabbitmq-ser Wants=postgresql.service mysql.service redis.service rabbitmq-server.service [Service] +RuntimeDirectory={{ airflow_app_name }}-worker EnvironmentFile={{ airflow_environment_file_folder }}/airflow User={{ airflow_user }} Group={{ airflow_group }} Type=simple -ExecStart={{ airflow_executable }} worker --pid {{ airflow_pidfile_folder }}/worker.pid +ExecStart={{ airflow_executable }} worker --pid {{ airflow_pidfile_folder }}-worker/worker.pid KillSignal=SIGINT Restart=on-failure RestartSec=10s PrivateTmp={{ airflow_private_tmp }} -RuntimeDirectory=airflow [Install] WantedBy=multi-user.target diff --git a/templates/airflow2.cfg.j2 b/templates/airflow2.cfg.j2 index 8c51a5f..7b14c05 100644 --- a/templates/airflow2.cfg.j2 +++ b/templates/airflow2.cfg.j2 @@ -11,7 +11,7 @@ dags_folder = {{ airflow_dags_folder }} # # No argument should be required in the function specified. # If using IP address as hostname is preferred, use value ``airflow.utils.net.get_host_ip_address`` -hostname_callable = {{ airflow_hostname_callable }} +hostname_callable = {{ airflow_hostname_callable_v2 }} # Default timezone in case supplied date times are naive # can be utc (default), system, or any IANA timezone string (e.g. Europe/Amsterdam) @@ -35,7 +35,8 @@ sql_engine_encoding = {{ airflow_database_engine_encoding }} # This is particularly useful in case of mysql with utf8mb4 encoding because # primary keys for XCom table has too big size and ``sql_engine_collation_for_ids`` should # be set to ``utf8mb3_general_ci``. -sql_engine_collation_for_ids = {{ airflow_database_engine_collation_for_ids }} +# sql_engine_collation_for_ids = +{% if airflow_database_engine_collation_for_ids %}sql_engine_collation_for_ids = {{ airflow_database_engine_collation_for_ids }}{% endif %} # If SqlAlchemy should pool database connections. sql_alchemy_pool_enabled = {{ airflow_database_pool_enabled }} @@ -75,7 +76,8 @@ sql_alchemy_schema = {{ airflow_database_schema }} # This is useful when you want to configure db engine args that SqlAlchemy won't parse # in connection string. # See https://docs.sqlalchemy.org/en/13/core/engines.html#sqlalchemy.create_engine.params.connect_args -sql_alchemy_connect_args = {{ airflow_database_connect_args }} +# sql_alchemy_connect_args = +{% if airflow_database_connect_args %}sql_alchemy_connect_args = {{ airflow_database_connect_args }}{% endif %} # The amount of parallelism as a setting to the executor. This defines # the max number of task instances that should run simultaneously @@ -175,7 +177,7 @@ min_serialized_dag_fetch_interval = {{ airflow_min_serialized_dag_fetch_interval # If set to True, Webserver reads file contents from DB instead of # trying to access files in a DAG folder. # Example: store_dag_code = False -store_dag_code = {{ airflow_store_dag_code }} +{% if airflow_store_dag_code %}store_dag_code = {{ airflow_store_dag_code }}{% endif %} # Maximum number of Rendered Task Instance Fields (Template Fields) per task to store # in the Database. @@ -515,7 +517,7 @@ hide_paused_dags_by_default = {{ airflow_webserver_hide_paused_dags_by_default } page_size = {{ airflow_webserver_page_size }} # Define the color of navigation bar -navbar_color = {{ airflow_webserver_navbar_color }} +navbar_color = {{ airflow_webserver_navbar_color_v2 }} # Default dagrun to show in UI default_dag_run_display_number = {{ airflow_webserver_default_dag_run_display_number }} @@ -553,10 +555,10 @@ x_frame_enabled = {{ airflow_webserver_x_frame_enabled }} # Send anonymous user activity to your analytics tool # choose from google_analytics, segment, or metarouter -analytics_tool = {{ airflow_webserver_analytics_tool }} +{% if airflow_webserver_analytics_tool %}analytics_tool = {{ airflow_webserver_analytics_tool }}{% endif %} # Unique ID of your account in the analytics tool -analytics_id = {{ airflow_webserver_analytics_id }} +{% if airflow_webserver_analytics_id %}analytics_id = {{ airflow_webserver_analytics_id }}{% endif %} # 'Recent Tasks' stats will show for old DagRuns if set show_recent_stats_for_completed_runs = {{ airflow_webserver_show_recent_stats_for_completed_runs }} @@ -585,12 +587,12 @@ default_email_on_failure = {{ airflow_email_default_email_on_failure }} # File that will be used as the template for Email subject (which will be rendered using Jinja2). # If not set, Airflow uses a base template. # Example: subject_template = /path/to/my_subject_template_file -subject_template = {{airflow_email_subject_template }} +{% if airflow_email_subject_template %}subject_template = {{ airflow_email_subject_template }}{% endif %} # File that will be used as the template for Email content (which will be rendered using Jinja2). # If not set, Airflow uses a base template. # Example: html_content_template = /path/to/my_html_content_template_file -html_content_template = {{ airflow_email_html_content_template }} +{% if airflow_email_html_content_template %}html_content_template = {{ airflow_email_html_content_template }}{% endif %} [smtp] @@ -652,7 +654,7 @@ worker_concurrency = {{ airflow_celery_concurrency }} # If autoscale option is available, worker_concurrency will be ignored. # http://docs.celeryproject.org/en/latest/reference/celery.bin.worker.html#cmdoption-celery-worker-autoscale # Example: worker_autoscale = 16,12 -worker_autoscale = {{ airflow_celery_worker_autoscale }} +{% if airflow_celery_worker_autoscale %}worker_autoscale = {{ airflow_celery_worker_autoscale }}{% endif %} # Used to increase the number of tasks that a worker prefetches which can improve performance. # The number of processes multiplied by worker_prefetch_multiplier is the number of tasks @@ -662,7 +664,7 @@ worker_autoscale = {{ airflow_celery_worker_autoscale }} # claimed blocked tasks. # https://docs.celeryproject.org/en/stable/userguide/optimizing.html#prefetch-limits # Example: worker_prefetch_multiplier = 1 -worker_prefetch_multiplier = {{ airflow_celery_worker_prefetch_multiplier }} +{% if airflow_celery_worker_prefetch_multiplier %}worker_prefetch_multiplier = {{ airflow_celery_worker_prefetch_multiplier }}{% endif %} # When you start an airflow worker, airflow starts a tiny web server # subprocess to serve the workers local log files to the airflow main @@ -758,7 +760,7 @@ worker_precheck = {{ airflow_celery_worker_precheck }} # See: # http://docs.celeryproject.org/en/master/userguide/configuration.html#std:setting-broker_transport_options # Example: visibility_timeout = 21600 -visibility_timeout = {{ airflow_celery_broker_visibility_timeout }} +{% if airflow_celery_broker_visibility_timeout %}visibility_timeout = {{ airflow_celery_broker_visibility_timeout }}{% endif %} [dask] {% if airflow_executor == "DaskExecutor" %} @@ -961,10 +963,10 @@ in_cluster = {{ airflow_kubernetes_in_cluster }} # When running with in_cluster=False change the default cluster_context or config_file # options to Kubernetes client. Leave blank these to use default behaviour like ``kubectl`` has. -cluster_context = {{ airflow_kubernetes_cluster_context }} +{% if airflow_kubernetes_cluster_context %}cluster_context = {{ airflow_kubernetes_cluster_context }}{% endif %} # Path to the kubernetes configfile to be used when ``in_cluster`` is set to False -config_file = {{ airflow_kubernetes_config_file }} +{% if airflow_kubernetes_config_file %}config_file = {{ airflow_kubernetes_config_file }}{% endif %} # Keyword parameters to pass while calling a kubernetes client core_v1_api methods # from Kubernetes Executor provided as a single line formatted JSON dictionary string. From 3c6924553ef48ee2b735abca2d78c1880383b084 Mon Sep 17 00:00:00 2001 From: David Mateo Date: Wed, 3 Mar 2021 18:44:42 +0100 Subject: [PATCH 04/21] Tests and service vars updated - Goss tests completed - airflow_services var updated to avoid ansible problems with "-" character - Necessary tasks updated related to airflow_services var --- defaults/main.yml | 21 ++++++++++++++------- handlers/main.yml | 16 ++++++++-------- molecule/default/tests/test_airflow.yml | 25 ++++++++++--------------- molecule/default/verify.yml | 1 + tasks/install.yml | 12 ++++++------ tasks/service.yml | 2 +- 6 files changed, 40 insertions(+), 37 deletions(-) diff --git a/defaults/main.yml b/defaults/main.yml index 6e1710c..b377dc8 100644 --- a/defaults/main.yml +++ b/defaults/main.yml @@ -133,25 +133,32 @@ airflow_admin_user: email: email@domain.com ## Service options -airflow_scheduler_runs: 5 - +airflow_scheduler_runs: 1000 airflow_private_tmp: false airflow_services: - airflow-webserver: + airflow_webserver: + service_name: airflow-webserver enabled: true + running: true state: started path: airflow-webserver.service.j2 - airflow-scheduler: + airflow_scheduler: + service_name: airflow-scheduler enabled: true + running: true state: started path: airflow-scheduler.service.j2 - airflow-worker: + airflow_worker: + service_name: airflow-worker enabled: false + running: false state: started path: airflow-worker.service.j2 - airflow-flower: + airflow_flower: + service_name: airflow-flower enabled: false + running: false state: started path: airflow-flower.service.j2 @@ -164,7 +171,7 @@ airflow_conf_path: "/etc/{{ airflow_app_name }}" airflow_pidfile_folder: "/run/{{ airflow_app_name }}" airflow_environment_file_folder: /etc/sysconfig airflow_environment_extra_vars: [] -airflow_health_url: "{{ airflow_webserver_host }}:{{ airflow_webserver_port }}/health" +airflow_health_url: "http://{{ airflow_webserver_host }}:{{ airflow_webserver_port }}/health" # - name: PATH # value: "/custom/path/bin:$PATH" airflow_skeleton_paths: diff --git a/handlers/main.yml b/handlers/main.yml index 31e7ad7..2cb6358 100644 --- a/handlers/main.yml +++ b/handlers/main.yml @@ -6,29 +6,29 @@ name: airflow-webserver state: restarted when: >- - airflow_services['airflow-webserver'] - and airflow_services['airflow-webserver']['enabled'] + airflow_services['airflow_webserver'] + and airflow_services['airflow_webserver']['enabled'] - name: restart airflow-scheduler systemd: name: airflow-scheduler state: restarted when: >- - airflow_services['airflow-scheduler'] - and airflow_services['airflow-scheduler']['enabled'] + airflow_services['airflow_scheduler'] + and airflow_services['airflow_scheduler']['enabled'] - name: restart airflow-worker systemd: name: airflow-worker state: restarted when: >- - airflow_services['airflow-worker'] - and airflow_services['airflow-worker']['enabled'] + airflow_services['airflow_worker'] + and airflow_services['airflow_worker']['enabled'] - name: restart airflow-flower systemd: name: airflow-flower state: restarted when: >- - airflow_services['airflow-flower'] - and airflow_services['airflow-flower']['enabled'] + airflow_services['airflow_flower'] + and airflow_services['airflow_flower']['enabled'] diff --git a/molecule/default/tests/test_airflow.yml b/molecule/default/tests/test_airflow.yml index e994151..9b76526 100644 --- a/molecule/default/tests/test_airflow.yml +++ b/molecule/default/tests/test_airflow.yml @@ -7,22 +7,17 @@ command: - "{{ airflow_version }}" service: airflow-webserver: - enabled: "{{ airflow_services.airflow-webserver.enabled }}" - state: "{{ airflow_services.airflow-webserver.state }}" - running: true + enabled: {{ airflow_services.airflow_webserver.enabled }} + running: {{ airflow_services.airflow_webserver.running }} airflow-scheduler: - enabled: "{{ airflow_services.airflow-scheduler.enabled }}" - state: "{{ airflow_services.airflow-scheduler.state }}" - running: true + enabled: {{ airflow_services.airflow_scheduler.enabled }} + running: {{ airflow_services.airflow_scheduler.running }} airflow-worker: - enabled: "{{ airflow_services.airflow-worker.enabled }}" - state: "{{ airflow_services.airflow-worker.state }}" + enabled: {{ airflow_services.airflow_worker.enabled }} + running: {{ airflow_services.airflow_worker.running }} airflow-flower: - enabled: "{{ airflow_services.airflow-flower.enabled }}" - state: "{{ airflow_services.airflow-flower.state }}" -process: - {{ airflow_executable }}: - running: true + enabled: {{ airflow_services.airflow_flower.enabled }} + running: {{ airflow_services.airflow_flower.running }} user: {{ airflow_user }}: exists: true @@ -34,11 +29,11 @@ group: exists: true port: tcp:{{ airflow_webserver_port }}: - listening: true + listening: {{ airflow_services.airflow_webserver.running }} ip: - {{ airflow_webserver_host }} tcp:{{ airflow_flower_port }}: - listening: true + listening: {{ airflow_services.airflow_flower.running }} ip: - {{ airflow_flower_host }} http: diff --git a/molecule/default/verify.yml b/molecule/default/verify.yml index 80f10b7..364493c 100644 --- a/molecule/default/verify.yml +++ b/molecule/default/verify.yml @@ -46,6 +46,7 @@ command: "{{ goss_dst }} -g {{ item }} validate --format {{ goss_format }}" register: test_results with_items: "{{ test_files.stdout_lines }}" + ignore_errors: true - name: Display details about the Goss results debug: diff --git a/tasks/install.yml b/tasks/install.yml index 637b132..4ae74e0 100644 --- a/tasks/install.yml +++ b/tasks/install.yml @@ -148,10 +148,10 @@ with_items: "{{ dags_dependencies }}" when: dags_dependencies is defined notify: - - restart airflow-webserver - - restart airflow-scheduler - - restart airflow-worker - - restart airflow-flower + - restart airflow_webserver + - restart airflow_scheduler + - restart airflow_worker + - restart airflow_flower - name: Airflow | Copy Environment File template: @@ -164,8 +164,8 @@ - name: Airflow | Copy Daemon scripts template: src: "{{ item.value.path }}" - dest: "/lib/systemd/system/{{ item.key }}.service" + dest: "/lib/systemd/system/{{ item.value.service_name }}.service" mode: 0644 - notify: restart {{ item.key }} + notify: restart {{ item.value.service_name }} with_dict: "{{ airflow_services }}" when: item.value.enabled diff --git a/tasks/service.yml b/tasks/service.yml index e2af96c..cda9426 100644 --- a/tasks/service.yml +++ b/tasks/service.yml @@ -2,7 +2,7 @@ - name: Airflow | Configuring service systemd: - name: "{{ item.key }}" + name: "{{ item.value.service_name }}" state: "{{ item.value.state }}" enabled: "{{ item.value.enabled }}" daemon_reload: true From 4cc0c18b61dc003501fd089e78c655b9f86e01de Mon Sep 17 00:00:00 2001 From: David Mateo Date: Thu, 11 Mar 2021 11:56:14 +0100 Subject: [PATCH 05/21] Environment vars exports updated and more - removed unnecesary airflow required libs - removed airflow user home - changed default dag folder - replace me fernet key - set retries to 6 in Goss test dowloading Goss and retry timeout - extra_packages scenario updated - environment removed in tasks, using global at converge play level - Replaced exports to /etc/environment with a profile.d script exporting env vars - profile.d airflow.sh template added --- defaults/main.yml | 13 +++-- molecule/default/verify.yml | 4 +- molecule/extra_packages/converge.yml | 12 ++++ .../group_vars/airflow/main.yml | 15 ++--- molecule/extra_packages/molecule.yml | 7 ++- molecule/extra_packages/playbook.yml | 6 -- .../extra_packages/tests/test_airflow.yml | 56 ++++++++++++++----- molecule/extra_packages/verify.yml | 12 ++-- tasks/config.yml | 12 +--- tasks/install.yml | 23 +++----- templates/airflow-profile.d-file.j2 | 31 ++++++++++ templates/airflow.cfg.j2 | 2 +- 12 files changed, 122 insertions(+), 71 deletions(-) create mode 100644 molecule/extra_packages/converge.yml delete mode 100644 molecule/extra_packages/playbook.yml create mode 100644 templates/airflow-profile.d-file.j2 diff --git a/defaults/main.yml b/defaults/main.yml index b377dc8..62ccd35 100644 --- a/defaults/main.yml +++ b/defaults/main.yml @@ -3,8 +3,12 @@ airflow_app_name: airflow airflow_version: 2.0.1 airflow_package: apache-airflow + +# The default buster's python version, if you want other version you should make sure it is +# installed before airflow_python_version: 3.7 airflow_constraint_url: "https://raw.githubusercontent.com/apache/airflow/constraints-{{ airflow_version }}/constraints-{{ airflow_python_version }}.txt" + # Available extra packages: # https://airflow.apache.org/docs/apache-airflow/stable/extra-packages-ref.html # List should follow Ansible's YAML basics: @@ -94,12 +98,11 @@ airflow_bundle_package: # all|all_dbs|devel|devel_hadoop|devel_all|devel_ci See # - microsoft.winrm airflow_required_libs: - - python3 - python3-dev - python3-pip - python3-setuptools - python-pip - - acl + # - acl # Airflow 2.0 system level requirements https://airflow.apache.org/docs/apache-airflow/stable/installation.html#system-dependencies - freetds-bin - krb5-user @@ -166,7 +169,6 @@ airflow_services: airflow_executable: "{{ airflow_app_home }}/bin/airflow" airflow_pip_executable: "pip3" airflow_app_home: "/opt/{{ airflow_app_name }}" -airflow_home: "/home/{{ airflow_user }}" airflow_conf_path: "/etc/{{ airflow_app_name }}" airflow_pidfile_folder: "/run/{{ airflow_app_name }}" airflow_environment_file_folder: /etc/sysconfig @@ -176,7 +178,6 @@ airflow_health_url: "http://{{ airflow_webserver_host }}:{{ airflow_webserver_po # value: "/custom/path/bin:$PATH" airflow_skeleton_paths: - "{{ airflow_app_home }}" - - "{{ airflow_home }}" - "{{ airflow_conf_path }}" - "{{ airflow_dags_folder }}" - "{{ airflow_logs_folder }}" @@ -206,7 +207,7 @@ celery_version: 3.1.17 # AIRFLOW CONFIGURATION https://airflow.apache.org/docs/apache-airflow/stable/configurations-ref.html # --------------------- # [core] -airflow_dags_folder: "{{ airflow_home }}/dags" +airflow_dags_folder: "{{ airflow_app_home }}/dags" airflow_hostname_callable: "socket:getfqdn" airflow_hostname_callable_v2: "socket.getfqdn" airflow_default_timezone: "utc" @@ -231,7 +232,7 @@ airflow_load_examples: False airflow_load_default_connections: False airflow_plugins_folder: "{{ airflow_app_home }}/plugins" airflow_execute_tasks_new_python_interpreter: False -airflow_fernet_key: +airflow_fernet_key: nOtAfErNeTkEyRePlAcEmE____ItfURHlEDxrt-bBQw= airflow_donot_pickle: false airflow_dagbag_import_timeout: 30 airflow_dagbag_import_error_tracebacks: True diff --git a/molecule/default/verify.yml b/molecule/default/verify.yml index 364493c..a9befbf 100644 --- a/molecule/default/verify.yml +++ b/molecule/default/verify.yml @@ -29,7 +29,7 @@ mode: 0755 register: download_goss until: download_goss is succeeded - retries: 3 + retries: 6 - name: Copy Goss tests to remote template: @@ -43,7 +43,7 @@ register: test_files - name: Execute Goss tests - command: "{{ goss_dst }} -g {{ item }} validate --format {{ goss_format }}" + command: "{{ goss_dst }} -g {{ item }} validate --format {{ goss_format }} --retry-timeout 10s" register: test_results with_items: "{{ test_files.stdout_lines }}" ignore_errors: true diff --git a/molecule/extra_packages/converge.yml b/molecule/extra_packages/converge.yml new file mode 100644 index 0000000..bcf504d --- /dev/null +++ b/molecule/extra_packages/converge.yml @@ -0,0 +1,12 @@ +--- + +- name: Converge + hosts: airflow_group + roles: + - role: airflow-role + environment: + AIRFLOW_HOME: "{{ airflow_app_home }}" + AIRFLOW_CONFIG: "{{ airflow_conf_path }}/airflow.cfg" + vars_files: + - ../../defaults/main.yml + - ./group_vars/airflow/main.yml diff --git a/molecule/extra_packages/group_vars/airflow/main.yml b/molecule/extra_packages/group_vars/airflow/main.yml index 1ba0c6b..e05cabd 100644 --- a/molecule/extra_packages/group_vars/airflow/main.yml +++ b/molecule/extra_packages/group_vars/airflow/main.yml @@ -1,11 +1,12 @@ --- -airflow_fernet_key: xKy13nPFfDflJ0DYGVTwf_DEmbItfURHlEDxrt-bBQw= - -airflow_required_libs: - - acl - - python-pip - - python-mysqldb +airflow_fernet_key: nOtAfErNeTkEyRePlAcEmE____ItfURHlEDxrt-bBQw= airflow_extra_packages: - celery - - mysql + - cgroups + - ftp + - http + - jdbc + - sftp + - sqlite + - ssh diff --git a/molecule/extra_packages/molecule.yml b/molecule/extra_packages/molecule.yml index 946eb39..db7f9b1 100644 --- a/molecule/extra_packages/molecule.yml +++ b/molecule/extra_packages/molecule.yml @@ -7,8 +7,10 @@ lint: | yamllint . ansible-lint . platforms: - - name: airflow-extra-packages-${MOLECULE_DISTRO} - image: ${MOLECULE_DISTRO:-debian:MOLECULE_DISTRO-slim} + - name: airflow-extra-packages + groups: + - airflow_group + image: ${MOLECULE_DISTRO:-debian:buster-slim} privileged: false capabilities: - SYS_ADMIN @@ -36,4 +38,3 @@ provisioner: # name: default verifier: name: ansible - diff --git a/molecule/extra_packages/playbook.yml b/molecule/extra_packages/playbook.yml deleted file mode 100644 index f0c41cb..0000000 --- a/molecule/extra_packages/playbook.yml +++ /dev/null @@ -1,6 +0,0 @@ ---- - -- name: Converge - hosts: all - roles: - - role: airflow-role diff --git a/molecule/extra_packages/tests/test_airflow.yml b/molecule/extra_packages/tests/test_airflow.yml index 8f26ec8..ac9c0eb 100644 --- a/molecule/extra_packages/tests/test_airflow.yml +++ b/molecule/extra_packages/tests/test_airflow.yml @@ -1,26 +1,52 @@ --- +command: + "{{ airflow_executable }} version": + exit-status: 0 + stdout: + - "{{ airflow_version }}" service: -{% for airflow_service in airflow_services %} -{% if airflow_services[airflow_service]["enabled"] %} - {{ airflow_service }}: - enabled: true - running: true -{% endif %} -{% endfor %} - + airflow-webserver: + enabled: {{ airflow_services.airflow_webserver.enabled }} + running: {{ airflow_services.airflow_webserver.running }} + airflow-scheduler: + enabled: {{ airflow_services.airflow_scheduler.enabled }} + running: {{ airflow_services.airflow_scheduler.running }} + airflow-worker: + enabled: {{ airflow_services.airflow_worker.enabled }} + running: {{ airflow_services.airflow_worker.running }} + airflow-flower: + enabled: {{ airflow_services.airflow_flower.enabled }} + running: {{ airflow_services.airflow_flower.running }} user: {{ airflow_user }}: exists: true groups: - {{ airflow_group }} - + shell: /usr/sbin/nologin group: {{ airflow_group }}: exists: true - -command: - /usr/local/bin/airflow version: - exit-status: 0 - stdout: - - "{{ airflow_version }}" +port: + tcp:{{ airflow_webserver_port }}: + listening: {{ airflow_services.airflow_webserver.running }} + ip: + - {{ airflow_webserver_host }} + tcp:{{ airflow_flower_port }}: + listening: {{ airflow_services.airflow_flower.running }} + ip: + - {{ airflow_flower_host }} +http: + {{ airflow_health_url }}: + status: 200 +file: + "{{ airflow_app_home }}/bin": + exists: true + filetype: directory + owner: {{ airflow_user }} + group: {{ airflow_group }} + {{ airflow_conf_path }}: + exists: true + filetype: directory + owner: {{ airflow_user }} + group: {{ airflow_group }} \ No newline at end of file diff --git a/molecule/extra_packages/verify.yml b/molecule/extra_packages/verify.yml index 311c989..a9befbf 100644 --- a/molecule/extra_packages/verify.yml +++ b/molecule/extra_packages/verify.yml @@ -3,13 +3,15 @@ # Tests need distributed to the appropriate ansible host/groups # prior to execution by `goss validate`. -- name: Verify - hosts: all +- name: Verify airflow + hosts: + - airflow become: true vars: - goss_version: v0.3.7 + goss_version: v0.3.16 goss_arch: amd64 goss_dst: /usr/local/bin/goss + goss_sha256sum: 827e354b48f93bce933f5efcd1f00dc82569c42a179cf2d384b040d8a80bfbfb goss_url: "https://github.com/aelsabbahy/goss/releases/download/{{ goss_version }}/goss-linux-{{ goss_arch }}" goss_test_directory: /tmp goss_format: documentation @@ -23,6 +25,7 @@ get_url: url: "{{ goss_url }}" dest: "{{ goss_dst }}" + sha256sum: "{{ goss_sha256sum }}" mode: 0755 register: download_goss until: download_goss is succeeded @@ -33,7 +36,7 @@ src: "{{ item }}" dest: "{{ goss_test_directory }}/{{ item | basename }}" with_fileglob: - - "{{ playbook_dir }}/tests/test_*.yml" + - "tests/test_*.yml" - name: Register test files shell: "ls {{ goss_test_directory }}/test_*.yml" @@ -43,6 +46,7 @@ command: "{{ goss_dst }} -g {{ item }} validate --format {{ goss_format }} --retry-timeout 10s" register: test_results with_items: "{{ test_files.stdout_lines }}" + ignore_errors: true - name: Display details about the Goss results debug: diff --git a/tasks/config.yml b/tasks/config.yml index 3ce845e..c942e05 100644 --- a/tasks/config.yml +++ b/tasks/config.yml @@ -52,8 +52,6 @@ - name: Airflow | Initializing DB < 2.0 command: "{{ airflow_executable }} initdb" - environment: - AIRFLOW_HOME: "{{ airflow_app_home }}" become: true become_user: "{{ airflow_user }}" tags: @@ -69,15 +67,11 @@ - name: Airflow | Initializing DB > 2.0 command: "{{ airflow_executable }} db init" - environment: - AIRFLOW_HOME: "{{ airflow_app_home }}" become: true become_user: "{{ airflow_user }}" - # tags: - # skip_ansible_lint when: - airflow_version is version( '2.0.0', '>=') - - (airflow_install.changed or airflow_config.changed) + - airflow_install.changed or airflow_config.changed notify: - restart airflow-webserver - restart airflow-scheduler @@ -146,8 +140,6 @@ - name: Airflow | Add variables from configuration file command: "{{ airflow_executable }} variables -s {{ item.key }} {{ item.value }}" - environment: - AIRFLOW_HOME: "{{ airflow_app_home }}" become: true become_user: "{{ airflow_user }}" with_items: "{{ airflow_admin_variables }}" @@ -158,8 +150,6 @@ command: "{{ airflow_executable }} connections -a \ {% for key, value in item.iteritems() %}--{{ key }} '{{ value }}' \ {% endfor %}" - environment: - AIRFLOW_HOME: "{{ airflow_app_home }}" become: true become_user: "{{ airflow_user }}" with_items: "{{ airflow_admin_connections }}" diff --git a/tasks/install.yml b/tasks/install.yml index 4ae74e0..026efc6 100644 --- a/tasks/install.yml +++ b/tasks/install.yml @@ -11,7 +11,7 @@ system: true shell: /usr/sbin/nologin # shell: /bin/bash - createhome: true + createhome: false - name: Airflow | Ensure airflow skeleton paths file: @@ -21,6 +21,12 @@ state: directory with_items: "{{ airflow_skeleton_paths }}" +- name: Airflow | Copy profile.d file + template: + src: airflow-profile.d-file.j2 + dest: "/etc/profile.d/airflow.sh" + mode: 0644 + - name: Airflow | Installing dependencies apt: pkg: "{{ airflow_required_libs }}" @@ -82,21 +88,6 @@ with_items: "{{ celery_extra_packages }}" when: airflow_executor == "CeleryExecutor" and celery_extra_packages -- name: Airflow | Add Airflow bin path to the PATH env" - lineinfile: - path: /etc/environment - line: "PATH=$PATH:{{ airflow_app_home }}/bin" - -- name: Airflow | Set AIRFLOW_HOME environment variable in app "{{ airflow_app_home }}" - lineinfile: - path: /etc/environment - line: "AIRFLOW_HOME={{ airflow_app_home }}" - -- name: Airflow | Set AIRFLOW_CONFIG environment variable in app "{{ airflow_conf_path }}" - lineinfile: - path: /etc/environment - line: "AIRFLOW_CONFIG={{ airflow_conf_path }}/airflow.cfg" - - name: Airflow | Installing Airflow become: true become_user: "{{ airflow_user }}" diff --git a/templates/airflow-profile.d-file.j2 b/templates/airflow-profile.d-file.j2 new file mode 100644 index 0000000..bf1bb04 --- /dev/null +++ b/templates/airflow-profile.d-file.j2 @@ -0,0 +1,31 @@ +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This file is the environment file for Airflow. Put this file in /etc/sysconfig/airflow per default +# configuration of the systemd unit files. +# +# AIRFLOW_CONFIG= +# AIRFLOW_HOME= +# +# required setting, 0 sets it to unlimited. Scheduler will get restart after every X runs +export SCHEDULER_RUNS={{ airflow_scheduler_runs }} + +if [ -d "{{ airflow_app_home }}/bin" ] ; then + export PATH="{{ airflow_app_home }}/bin:$PATH" +fi +if [ -d "{{ airflow_app_home }}" ] ; then + export AIRFLOW_HOME={{ airflow_app_home }} +fi +if [ -d "{{ airflow_conf_path }}" ] ; then + export AIRFLOW_CONFIG={{ airflow_conf_path }}/airflow.cfg +fi diff --git a/templates/airflow.cfg.j2 b/templates/airflow.cfg.j2 index fe61722..ec2687d 100644 --- a/templates/airflow.cfg.j2 +++ b/templates/airflow.cfg.j2 @@ -1,6 +1,6 @@ [core] # The home folder for airflow, default is ~/airflow -airflow_home = {{ airflow_home }} +airflow_home = {{ airflow_app_home }} # The folder where your airflow pipelines live, most likely a # subfolder in a code repository From 584bddf33e9f6d7ab3e5912a7e50bf7b01c98cc4 Mon Sep 17 00:00:00 2001 From: David Mateo Date: Thu, 11 Mar 2021 13:22:34 +0100 Subject: [PATCH 06/21] Travis typo and cleaning vars - fixed .travis.yml python version typo - unsetting unnecesary values from vars and setting up in scenario vars --- .travis.yml | 2 +- defaults/main.yml | 16 ++++++++-------- molecule/default/group_vars/airflow/main.yml | 9 +++++++++ 3 files changed, 18 insertions(+), 9 deletions(-) diff --git a/.travis.yml b/.travis.yml index 81f6cbe..66af49a 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,7 +1,7 @@ --- dist: xenial language: python -python: "3.8" +python: "3.7" os: linux services: - docker diff --git a/defaults/main.yml b/defaults/main.yml index 62ccd35..b9d1ffb 100644 --- a/defaults/main.yml +++ b/defaults/main.yml @@ -128,12 +128,12 @@ airflow_group: airflow # Admin user airflow_admin_user: - username: admin - password: admin - role: Admin - firstname: admin - lastname: admin - email: email@domain.com + username: + password: + role: + firstname: + lastname: + email: ## Service options airflow_scheduler_runs: 1000 @@ -232,7 +232,7 @@ airflow_load_examples: False airflow_load_default_connections: False airflow_plugins_folder: "{{ airflow_app_home }}/plugins" airflow_execute_tasks_new_python_interpreter: False -airflow_fernet_key: nOtAfErNeTkEyRePlAcEmE____ItfURHlEDxrt-bBQw= +airflow_fernet_key: airflow_donot_pickle: false airflow_dagbag_import_timeout: 30 airflow_dagbag_import_error_tracebacks: True @@ -347,7 +347,7 @@ airflow_webserver_worker_timeout: 120 airflow_webserver_worker_refresh_batch_size: 1 airflow_webserver_worker_refresh_interval: 30 airflow_webserver_reload_on_plugin_change: False -airflow_webserver_secret_key: "replace_me" +airflow_webserver_secret_key: airflow_webserver_workers: 4 airflow_webserver_worker_class: sync airflow_webserver_access_logfile: "{{ airflow_logs_folder }}/gunicorn-access.log" diff --git a/molecule/default/group_vars/airflow/main.yml b/molecule/default/group_vars/airflow/main.yml index 6f1fcb5..193f6e6 100644 --- a/molecule/default/group_vars/airflow/main.yml +++ b/molecule/default/group_vars/airflow/main.yml @@ -1,2 +1,11 @@ --- airflow_fernet_key: xKy13nPFfDflJ0DYGVTwf_DEmbItfURHlEDxrt-bBQw= +airflow_webserver_secret_key: "ihadsf908auw0340684" + +airflow_admin_user: + username: admin + password: admin + role: Admin + firstname: admin + lastname: admin + email: email@domain.com \ No newline at end of file From 0000f58a61c20e199785068d8753825193f037e0 Mon Sep 17 00:00:00 2001 From: David Mateo Date: Mon, 15 Mar 2021 10:27:55 +0100 Subject: [PATCH 07/21] Change logrotate outdated folder --- templates/gunicorn-logrotate.j2 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/templates/gunicorn-logrotate.j2 b/templates/gunicorn-logrotate.j2 index 8a132a5..00b235b 100644 --- a/templates/gunicorn-logrotate.j2 +++ b/templates/gunicorn-logrotate.j2 @@ -8,6 +8,6 @@ create 644 {{ airflow_user }} {{ airflow_group }} sharedscripts postrotate - [ -f {{ airflow_pidfile_folder }}/webserver.pid ] && kill -USR1 `cat {{ airflow_pidfile_folder }}/webserver.pid` + [ -f {{ airflow_pidfile_folder }}-webserver/webserver.pid ] && kill -USR1 `cat {{ airflow_pidfile_folder }}/webserver.pid` endscript } From 06213177c3b7a99d9990296668067f3df9b37d72 Mon Sep 17 00:00:00 2001 From: David Mateo Date: Fri, 2 Apr 2021 12:24:23 +0200 Subject: [PATCH 08/21] Slipt default vars - Split default vars in multiple files for better understanding - Add vars for webserver_config.py config - webserver_config.py template add - LDAP basic templated - Celery install tasks "deprecated" install celery from extra packages - New task file users for users tasks related depending of the type of auth --- defaults/{main.yml => main/airflow-cfg.yml} | 210 +------------------- defaults/main/main.yml | 209 +++++++++++++++++++ defaults/main/webserver-py.yml | 78 ++++++++ tasks/config.yml | 45 ++--- tasks/install.yml | 41 ++-- tasks/main.yml | 6 + tasks/users.yml | 58 ++++++ templates/webserver_config.j2 | 169 ++++++++++++++++ 8 files changed, 558 insertions(+), 258 deletions(-) rename defaults/{main.yml => main/airflow-cfg.yml} (73%) create mode 100644 defaults/main/main.yml create mode 100644 defaults/main/webserver-py.yml create mode 100644 tasks/users.yml create mode 100644 templates/webserver_config.j2 diff --git a/defaults/main.yml b/defaults/main/airflow-cfg.yml similarity index 73% rename from defaults/main.yml rename to defaults/main/airflow-cfg.yml index b9d1ffb..f8f46ca 100644 --- a/defaults/main.yml +++ b/defaults/main/airflow-cfg.yml @@ -1,211 +1,6 @@ --- -## General -airflow_app_name: airflow -airflow_version: 2.0.1 -airflow_package: apache-airflow - -# The default buster's python version, if you want other version you should make sure it is -# installed before -airflow_python_version: 3.7 -airflow_constraint_url: "https://raw.githubusercontent.com/apache/airflow/constraints-{{ airflow_version }}/constraints-{{ airflow_python_version }}.txt" - -# Available extra packages: -# https://airflow.apache.org/docs/apache-airflow/stable/extra-packages-ref.html -# List should follow Ansible's YAML basics: -# https://docs.ansible.com/ansible/latest/reference_appendices/YAMLSyntax.html#yaml-basics -airflow_bundle_package: # all|all_dbs|devel|devel_hadoop|devel_all|devel_ci See bundle extras section -# airflow_extra_packages: - # [Apache] - # - apache.atlas - # - apache.beam - # - apache.cassandra - # - apache.druid - # - apache.hdfs - # - apache.hive - # - apache.kylin - # - apache.livy - # - apache.pig - # - apache.pinot - # - apache.spark - # - apache.sqoop - # - apache.webhdfs - # [Services] - # - amazon - # - microsoft.azure - # - cloudant - # - databricks - # - datadog - # - dask - # - dingding - # - discord - # - facebook - # - google - # - github_enterprise - # - google_auth - # - hashicorp - # - jira - # - opsgenie - # - pagerduty - # - plexus - # - qubole - # - salesforce - # - sendgrid - # - segment - # - sentry - # - slack - # - snowflake - # - telegram - # - vertica - # - yandex - # - zendesk - # [Software] - # - async - # - celery - # - cncf.kubernetes - # - docker - # - elasticsearch - # - exasol - # - jenkins - # - ldap - # - mongo - # - microsoft.mssql - # - mysql - # - odbc - # - openfaas - # - oracle - # - postgres - # - password - # - presto - # - rabbitmq - # - redis - # - samba - # - singularity - # - statsd - # - tableau - # - virtualenv - # [Others] - # - cgroups - # - ftp - # - grpc - # - http - # - imap - # - jdbc - # - kerberos - # - papermill - # - sftp - # - sqlite - # - ssh - # - microsoft.winrm - -airflow_required_libs: - - python3-dev - - python3-pip - - python3-setuptools - - python-pip - # - acl -# Airflow 2.0 system level requirements https://airflow.apache.org/docs/apache-airflow/stable/installation.html#system-dependencies - - freetds-bin - - krb5-user - - ldap-utils - - libffi6 - - libsasl2-2 - - libsasl2-modules - - libssl1.1 - - locales - - lsb-release - - sasl2-bin - - sqlite3 - - unixodbc - -# airflow_required_python_packages: - # - {name: werkzeug, version: 0.16.1} -# Version is not mandatory -# - {name: pyasn1, version: 0.4.4} - -# Owner -airflow_user: airflow -airflow_group: airflow - -# Admin user -airflow_admin_user: - username: - password: - role: - firstname: - lastname: - email: - -## Service options -airflow_scheduler_runs: 1000 -airflow_private_tmp: false - -airflow_services: - airflow_webserver: - service_name: airflow-webserver - enabled: true - running: true - state: started - path: airflow-webserver.service.j2 - airflow_scheduler: - service_name: airflow-scheduler - enabled: true - running: true - state: started - path: airflow-scheduler.service.j2 - airflow_worker: - service_name: airflow-worker - enabled: false - running: false - state: started - path: airflow-worker.service.j2 - airflow_flower: - service_name: airflow-flower - enabled: false - running: false - state: started - path: airflow-flower.service.j2 - -# Files & Paths -airflow_executable: "{{ airflow_app_home }}/bin/airflow" -airflow_pip_executable: "pip3" -airflow_app_home: "/opt/{{ airflow_app_name }}" -airflow_conf_path: "/etc/{{ airflow_app_name }}" -airflow_pidfile_folder: "/run/{{ airflow_app_name }}" -airflow_environment_file_folder: /etc/sysconfig -airflow_environment_extra_vars: [] -airflow_health_url: "http://{{ airflow_webserver_host }}:{{ airflow_webserver_port }}/health" -# - name: PATH -# value: "/custom/path/bin:$PATH" -airflow_skeleton_paths: - - "{{ airflow_app_home }}" - - "{{ airflow_conf_path }}" - - "{{ airflow_dags_folder }}" - - "{{ airflow_logs_folder }}" - - "{{ airflow_child_process_log_folder }}" - - "{{ airflow_environment_file_folder }}" - - "{{ airflow_plugins_folder }}" - -# Allowing playbooks to provide external config files&templates -airflow_extra_conf_path: "{{ playbook_dir }}/files/airflow" -airflow_extra_conf_template_path: "{{ playbook_dir }}/templates/airflow" -airflow_config_template_path: airflow.cfg.j2 -airflow_config_template_path_v2: airflow2.cfg.j2 - -# DICT -celery_extra_packages: - -# This Celery version is guaranteed to work with Airflow 1.8.x -celery_version: 3.1.17 - -# DAGs -# Python dependencies needed by the DAGs. This variable is expected to be a -# list of items following the structure provided in the example comment -# dags_dependencies: -# - {name: pip_package, version: version_needed} - - # AIRFLOW CONFIGURATION https://airflow.apache.org/docs/apache-airflow/stable/configurations-ref.html -# --------------------- +# ------------------------------------------------------------------------------ # [core] airflow_dags_folder: "{{ airflow_app_home }}/dags" airflow_hostname_callable: "socket:getfqdn" @@ -386,7 +181,7 @@ airflow_webserver_session_lifetime_minutes: 43200 airflow_webserver_filter_by_owner: False airflow_webserver_owner_mode: user -airflow_webserver_rbac: "False" +airflow_webserver_rbac: False # [email] airflow_email_backend: airflow.utils.email.send_email_smtp @@ -433,6 +228,7 @@ airflow_celery_task_track_started: True airflow_celery_task_adoption_timeout: 600 airflow_celery_task_publish_max_retries: 3 airflow_celery_worker_precheck: False + # [flower] airflow_flower_host: 0.0.0.0 airflow_flower_url_prefix: diff --git a/defaults/main/main.yml b/defaults/main/main.yml new file mode 100644 index 0000000..aeed9f3 --- /dev/null +++ b/defaults/main/main.yml @@ -0,0 +1,209 @@ +--- +# [General] +airflow_app_name: airflow +airflow_version: 2.0.1 +airflow_package: apache-airflow + +# The default buster's python version, if you want other version you should make sure it is +# installed before +airflow_python_version: 3.7 +airflow_constraint_url: "https://raw.githubusercontent.com/apache/airflow/constraints-{{ airflow_version }}/constraints-{{ airflow_python_version }}.txt" + +# Available extra packages: +# https://airflow.apache.org/docs/apache-airflow/stable/extra-packages-ref.html +# List should follow Ansible's YAML basics: +# https://docs.ansible.com/ansible/latest/reference_appendices/YAMLSyntax.html#yaml-basics +airflow_bundle_package: # all|all_dbs|devel|devel_hadoop|devel_all|devel_ci See bundle extras section +# airflow_extra_packages: + # [Apache] + # - apache.atlas + # - apache.beam + # - apache.cassandra + # - apache.druid + # - apache.hdfs + # - apache.hive + # - apache.kylin + # - apache.livy + # - apache.pig + # - apache.pinot + # - apache.spark + # - apache.sqoop + # - apache.webhdfs + # [Services] + # - amazon + # - microsoft.azure + # - cloudant + # - databricks + # - datadog + # - dask + # - dingding + # - discord + # - facebook + # - google + # - github_enterprise + # - google_auth + # - hashicorp + # - jira + # - opsgenie + # - pagerduty + # - plexus + # - qubole + # - salesforce + # - sendgrid + # - segment + # - sentry + # - slack + # - snowflake + # - telegram + # - vertica + # - yandex + # - zendesk + # [Software] + # - async + # - celery + # - cncf.kubernetes + # - docker + # - elasticsearch + # - exasol + # - jenkins + # - ldap + # - mongo + # - microsoft.mssql + # - mysql + # - odbc + # - openfaas + # - oracle + # - postgres + # - password + # - presto + # - rabbitmq + # - redis + # - samba + # - singularity + # - statsd + # - tableau + # - virtualenv + # [Others] + # - cgroups + # - ftp + # - grpc + # - http + # - imap + # - jdbc + # - kerberos + # - papermill + # - sftp + # - sqlite + # - ssh + # - microsoft.winrm + +airflow_default_required_libs: + - python3-dev + - python3-pip + - python3-setuptools + - python-pip + # - acl +# Airflow 2.0 system level requirements https://airflow.apache.org/docs/apache-airflow/stable/installation.html#system-dependencies + - freetds-bin + - krb5-user + - ldap-utils + - libffi6 + - libsasl2-2 + - libsasl2-modules + - libssl1.1 + - locales + - lsb-release + - sasl2-bin + - sqlite3 + - unixodbc + +airflow_additional_required_libs: [] + +airflow_required_libs: "{{ airflow_default_required_libs + airflow_additional_required_libs }}" + +# airflow_required_python_packages: + # - { name: SQLAlchemy, version: 1.3.23 } # v1.4 Breaks airflow installation +# Version is not mandatory +# - {name: pyasn1, version: 0.4.4} + +# Owner +airflow_user: airflow +airflow_group: airflow + +# Admin user +airflow_admin_users: + - name: default + username: admin + password: admin + role: Admin + firstname: Admin + lastname: Admin + email: admin@email.com + +# Service options +airflow_scheduler_runs: 1000 +airflow_private_tmp: false + +airflow_services: + airflow_webserver: + service_name: airflow-webserver + enabled: true + running: true + state: started + path: airflow-webserver.service.j2 + airflow_scheduler: + service_name: airflow-scheduler + enabled: true + running: true + state: started + path: airflow-scheduler.service.j2 + airflow_worker: + service_name: airflow-worker + enabled: false + running: false + state: started + path: airflow-worker.service.j2 + airflow_flower: + service_name: airflow-flower + enabled: false + running: false + state: started + path: airflow-flower.service.j2 + +# Files & Paths +airflow_executable: "{{ airflow_app_home }}/bin/airflow" +airflow_pip_executable: "pip3" +airflow_app_home: "/opt/{{ airflow_app_name }}" +airflow_conf_path: "/etc/{{ airflow_app_name }}" +airflow_pidfile_folder: "/run/{{ airflow_app_name }}" +airflow_environment_file_folder: /etc/sysconfig +airflow_environment_extra_vars: [] +airflow_health_url: "http://{{ airflow_webserver_host }}:{{ airflow_webserver_port }}/health" +# - name: PATH +# value: "/custom/path/bin:$PATH" +airflow_skeleton_paths: + - "{{ airflow_app_home }}" + - "{{ airflow_conf_path }}" + - "{{ airflow_dags_folder }}" + - "{{ airflow_logs_folder }}" + - "{{ airflow_child_process_log_folder }}" + - "{{ airflow_environment_file_folder }}" + - "{{ airflow_plugins_folder }}" + +# Allowing playbooks to provide external config files&templates +airflow_extra_conf_path: "{{ playbook_dir }}/files/airflow" +airflow_extra_conf_template_path: "{{ playbook_dir }}/templates/airflow" +airflow_config_template_path: airflow.cfg.j2 +airflow_config_template_path_v2: airflow2.cfg.j2 + +# DICT +celery_extra_packages: + +# This Celery version is guaranteed to work with Airflow 1.8.x +celery_version: 3.1.17 + +# DAGs +# Python dependencies needed by the DAGs. This variable is expected to be a +# list of items following the structure provided in the example comment +# dags_dependencies: +# - {name: pip_package, version: version_needed} diff --git a/defaults/main/webserver-py.yml b/defaults/main/webserver-py.yml new file mode 100644 index 0000000..a6c9dae --- /dev/null +++ b/defaults/main/webserver-py.yml @@ -0,0 +1,78 @@ +--- +# AIRFLOW webserver_config.py +# ------------------------------------------------------------------------------ +# Flask-WTF flag for CSRF +airflow_WTF_CSRF_ENABLED: True + +# AUTHENTICATION CONFIG +# ---------------------------------------------------- +# For details on how to set up each of the following authentication, see +# http://flask-appbuilder.readthedocs.io/en/latest/security.html# authentication-methods +# for details. + +# The authentication type must be one of: AUTH_OID, AUTH_DB, AUTH_LDAP, AUTH_REMOTE_USER, AUTH_OAUTH +airflow_AUTH_TYPE: AUTH_DB + +# When using LDAP Auth, setup the ldap server +airflow_AUTH_LDAP_SERVER: +airflow_AUTH_LDAP_USE_TLS: False + +# registration configs +airflow_AUTH_LDAP_FIRSTNAME_FIELD: givenName +airflow_AUTH_LDAP_LASTNAME_FIELD: sn +airflow_AUTH_LDAP_EMAIL_FIELD: mail # if null in LDAP, email is set to: "{username}@email.notfound" + +# search configs +airflow_AUTH_LDAP_SEARCH: # the LDAP search base +airflow_AUTH_LDAP_UID_FIELD: # the username field +airflow_AUTH_LDAP_BIND_USER: # the special bind username for search +airflow_AUTH_LDAP_BIND_PASSWORD: # the special bind password for search + +airflow_AUTH_LDAP_SEARCH_FILTER: # limit the LDAP search scope + +# Setup Full admin role name +airflow_AUTH_ROLE_ADMIN: Admin + +# Setup Public role name, no authentication needed +airflow_AUTH_ROLE_PUBLIC: Public + +# Will allow user self registration +airflow_AUTH_USER_REGISTRATION: False + +# If we should replace ALL the user's roles each login, or only on registration +airflow_AUTH_ROLES_SYNC_AT_LOGIN: True + +# Force users to re-auth after 30min of inactivity (to keep roles in sync) +airflow_PERMANENT_SESSION_LIFETIME: 1800 + +# The recaptcha it's automatically enabled for user self registration is active and the keys are necessary +airflow_RECAPTCHA_PRIVATE_KEY: +airflow_RECAPTCHA_PUBLIC_KEY: + +# Config for Flask-Mail necessary for user self registration +airflow_MAIL_SERVER: +airflow_MAIL_PORT: +airflow_MAIL_USE_TLS: +airflow_MAIL_USE_SSL: +airflow_MAIL_USERNAME: +airflow_MAIL_PASSWORD: +airflow_MAIL_DEFAULT_SENDER: +airflow_MAIL_MAX_EMAILS: +airflow_MAIL_ASCII_ATTACHMENTS: + + +# The default user self registration role +airflow_AUTH_USER_REGISTRATION_ROLE: Public + +# When using OAuth Auth, setup provider(s) info +airflow_OAUTH_PROVIDERS: + +# When using OpenID Auth, uncomment to setup OpenID providers. +airflow_OPENID_PROVIDERS: + +# Theme CONFIG +# Flask App Builder comes up with a number of predefined themes +# that you can use for Apache Airflow. +# http://flask-appbuilder.readthedocs.io/en/latest/customizing.html#changing-themes +# Valid themes will be: "bootstrap-theme.css" (default bootstrap), "amelia.css", "cerulean.css", "cosmo.css", "cyborg.css", "darkly.css", "flatly.css", "journal.css", "lumen.css", "paper.css", "readable.css", "sandstone.css", "simplex.css", "slate.css", "solar.css", "spacelab.css", "superhero.css", "united.css", "yeti.css" +airflow_APP_THEME: diff --git a/tasks/config.yml b/tasks/config.yml index c942e05..bfe5ccf 100644 --- a/tasks/config.yml +++ b/tasks/config.yml @@ -20,6 +20,21 @@ owner: root group: root +- name: Airflow | Copy webserver_config.py file + template: + src: webserver_config.j2 + dest: "{{ airflow_app_home }}/webserver_config.py" + owner: "{{ airflow_user }}" + group: "{{ airflow_group }}" + mode: 0644 + register: airflow_config + when: airflow_version is version( '2.0.0', '>=') + notify: + - restart airflow-webserver + - restart airflow-scheduler + - restart airflow-worker + - restart airflow-flower + - name: Airflow | Copy basic airflow config file (< 2.0) template: src: "{{ airflow_config_template_path }}" @@ -78,36 +93,6 @@ - restart airflow-worker - restart airflow-flower -- name: Airflow | Check Admin user (> 2.0) - command: "{{ airflow_executable }} users list" - register: airflow_check_admin - changed_when: false - when: airflow_version is version( '2.0.0', '>=') - no_log: true - -- name: Airflow | Create Admin user (> 2.0) - command: - argv: - - "{{ airflow_executable }}" - - users - - create - - --username - - "{{ airflow_admin_user.username }}" - - --password - - "{{ airflow_admin_user.password }}" - - --firstname - - "{{ airflow_admin_user.firstname }}" - - --lastname - - "{{ airflow_admin_user.lastname }}" - - --role - - "{{ airflow_admin_user.role }}" - - --email - - "{{ airflow_admin_user.email }}" - no_log: true - when: - - airflow_version is version( '2.0.0', '>=') - - "airflow_admin_user.email not in airflow_check_admin.stdout" - - name: Airflow | Copy extra airflow config files (provided by playbooks) copy: src: "{{ item }}" diff --git a/tasks/install.yml b/tasks/install.yml index 026efc6..c190a69 100644 --- a/tasks/install.yml +++ b/tasks/install.yml @@ -36,7 +36,6 @@ # See https://airflow.apache.org/docs/apache-airflow/stable/installation.html#installation-tools - name: Airflow | Install pip 20.2.4 version pip: - executable: "{{ airflow_pip_executable }}" name: pip version: 20.2.4 @@ -67,26 +66,26 @@ with_items: "{{ airflow_required_python_packages }}" when: airflow_required_python_packages is defined -- name: Airflow | Installing proper Celery version - become: true - become_user: "{{ airflow_user }}" - pip: - name: celery - version: "{{ celery_version }}" - virtualenv: "{{ airflow_app_home }}" - virtualenv_python: "python{{ airflow_python_version }}" - when: airflow_executor == "CeleryExecutor" - -- name: Airflow | Installing extra Celery packages - become: true - become_user: "{{ airflow_user }}" - pip: - name: celery[{{ item }}] - version: "{{ celery_version }}" - virtualenv: "{{ airflow_app_home }}" - virtualenv_python: "python{{ airflow_python_version }}" - with_items: "{{ celery_extra_packages }}" - when: airflow_executor == "CeleryExecutor" and celery_extra_packages +# - name: Airflow | Installing proper Celery version +# become: true +# become_user: "{{ airflow_user }}" +# pip: +# name: celery +# version: "{{ celery_version }}" +# virtualenv: "{{ airflow_app_home }}" +# virtualenv_python: "python{{ airflow_python_version }}" +# when: airflow_executor == "CeleryExecutor" + +# - name: Airflow | Installing extra Celery packages +# become: true +# become_user: "{{ airflow_user }}" +# pip: +# name: celery[{{ item }}] +# version: "{{ celery_version }}" +# virtualenv: "{{ airflow_app_home }}" +# virtualenv_python: "python{{ airflow_python_version }}" +# with_items: "{{ celery_extra_packages }}" +# when: airflow_executor == "CeleryExecutor" and celery_extra_packages - name: Airflow | Installing Airflow become: true diff --git a/tasks/main.yml b/tasks/main.yml index 42b52f8..f8155ad 100644 --- a/tasks/main.yml +++ b/tasks/main.yml @@ -12,6 +12,12 @@ - config - airflow_config +- name: Airflow | Users + import_tasks: users.yml + tags: + - users + - airflow_users + - name: Airflow | Service import_tasks: service.yml tags: diff --git a/tasks/users.yml b/tasks/users.yml new file mode 100644 index 0000000..7eba0b5 --- /dev/null +++ b/tasks/users.yml @@ -0,0 +1,58 @@ +--- + +- name: Airflow | Check Admin user (> 2.0) + command: "{{ airflow_executable }} users list" + register: airflow_check_admin + changed_when: false + when: airflow_version is version( '2.0.0', '>=') + no_log: true + +- name: Airflow | Create Admin user AUTH_DB (> 2.0) + command: + argv: + - "{{ airflow_executable }}" + - users + - create + - --username + - "{{ item.username }}" + - --password + - "{{ item.password }}" + - --firstname + - "{{ item.firstname }}" + - --lastname + - "{{ item.lastname }}" + - --role + - "{{ item.role }}" + - --email + - "{{ item.email }}" + no_log: true + with_items: "{{ airflow_admin_users }}" + when: + - airflow_version is version( '2.0.0', '>=') + - "item.email not in airflow_check_admin.stdout" + - "airflow_AUTH_TYPE == 'AUTH_DB'" + +- name: Airflow | Create Admin users AUTH_LDAP (> 2.0) + command: + argv: + - "{{ airflow_executable }}" + - users + - create + - --username + - "{{ item.username }}" + - --password + - "{{ item.password }}" + - --firstname + - "{{ item.firstname }}" + - --lastname + - "{{ item.lastname }}" + - --role + - "{{ item.role }}" + - --email + - "{{ item.email }}" + no_log: true + with_items: "{{ airflow_admin_users }}" + when: + - airflow_version is version( '2.0.0', '>=') + - "item.email not in airflow_check_admin.stdout" + - "airflow_AUTH_TYPE == 'AUTH_LDAP'" \ No newline at end of file diff --git a/templates/webserver_config.j2 b/templates/webserver_config.j2 new file mode 100644 index 0000000..794ff39 --- /dev/null +++ b/templates/webserver_config.j2 @@ -0,0 +1,169 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +"""Default configuration for the Airflow webserver""" +import os + +from flask_appbuilder.security.manager import ( + AUTH_DB, + AUTH_LDAP, + AUTH_OAUTH, + AUTH_OID, + AUTH_REMOTE_USER +) + +basedir = os.path.abspath(os.path.dirname(__file__)) + +# Flask-WTF flag for CSRF +WTF_CSRF_ENABLED = {{ airflow_WTF_CSRF_ENABLED }} + +# ---------------------------------------------------- +# AUTHENTICATION CONFIG +# ---------------------------------------------------- +# For details on how to set up each of the following authentication, see +# http://flask-appbuilder.readthedocs.io/en/latest/security.html# authentication-methods +# for details. + +# The authentication type +# AUTH_OID : Is for OpenID +# AUTH_DB : Is for database +# AUTH_LDAP : Is for LDAP +# AUTH_REMOTE_USER : Is for using REMOTE_USER from web server +# AUTH_OAUTH : Is for OAuth +# AUTH_TYPE = AUTH_DB +AUTH_TYPE = {{ airflow_AUTH_TYPE }} + +{% if airflow_AUTH_TYPE == "AUTH_LDAP" %} +# When using LDAP Auth, setup the ldap server +AUTH_LDAP_SERVER = '{{ airflow_AUTH_LDAP_SERVER }}' +AUTH_LDAP_USE_TLS = {{ airflow_AUTH_LDAP_USE_TLS }} + +# registration configs +AUTH_LDAP_FIRSTNAME_FIELD = '{{ airflow_AUTH_LDAP_FIRSTNAME_FIELD }}' +AUTH_LDAP_LASTNAME_FIELD = '{{ airflow_AUTH_LDAP_LASTNAME_FIELD }}' +AUTH_LDAP_EMAIL_FIELD = '{{ airflow_AUTH_LDAP_EMAIL_FIELD }}' # if null in LDAP, email is set to: "{username}@email.notfound" + +# search configs +AUTH_LDAP_SEARCH = '{{ airflow_AUTH_LDAP_SEARCH }}' # the LDAP search base +AUTH_LDAP_UID_FIELD = '{{ airflow_AUTH_LDAP_UID_FIELD }}' # the username field +AUTH_LDAP_BIND_USER = '{{ airflow_AUTH_LDAP_BIND_USER }}' # the special bind username for search +AUTH_LDAP_BIND_PASSWORD = '{{ airflow_AUTH_LDAP_BIND_PASSWORD }}' # the special bind password for search + +# only allow users with memberOf="cn=myTeam,ou=teams,dc=example,dc=com" +# AUTH_LDAP_SEARCH_FILTER = "(memberOf=cn=myTeam,ou=teams,dc=example,dc=com)" +AUTH_LDAP_SEARCH_FILTER = '{{ airflow_AUTH_LDAP_SEARCH_FILTER }}' +{% endif %} + +# Setup Full admin role name +AUTH_ROLE_ADMIN = '{{ airflow_AUTH_ROLE_ADMIN }}' + +# Setup Public role name, no authentication needed +AUTH_ROLE_PUBLIC = '{{ airflow_AUTH_ROLE_PUBLIC }}' + +# Will allow user self registration +AUTH_USER_REGISTRATION = {{ airflow_AUTH_USER_REGISTRATION }} + +{% if airflow_AUTH_USER_REGISTRATION == True %} +# The default user self registration role +AUTH_USER_REGISTRATION_ROLE = '{{ airflow_AUTH_USER_REGISTRATION_ROLE }}' + +{% if airflow_RECAPTCHA_PRIVATE_KEY and airflow_RECAPTCHA_PUBLIC_KEY %} +# The recaptcha it's automatically enabled for user self registration is active and the keys are necessary +RECAPTCHA_PRIVATE_KEY = {{ airflow_RECAPTCHA_PRIVATE_KEY }} +RECAPTCHA_PUBLIC_KEY = {{ airflow_RECAPTCHA_PUBLIC_KEY }} +{% endif %} + +# Config for Flask-Mail necessary for user self registration +{% if airflow_MAIL_SERVER %} +MAIL_SERVER = '{{ airflow_MAIL_SERVER }}' +{% endif %} +{% if airflow_MAIL_PORT %} +MAIL_PORT = {{ airflow_MAIL_PORT }} +{% endif %} +{% if airflow_MAIL_USE_TLS %} +MAIL_USE_TLS = {{ airflow_MAIL_USE_TLS }} +{% endif %} +{% if airflow_MAIL_USE_SSL %} +MAIL_USE_SSL = {{ airflow_MAIL_USE_SSL }} +{% endif %} +{% if airflow_MAIL_USERNAME %} +MAIL_USERNAME = '{{ airflow_MAIL_USERNAME }}' +{% endif %} +{% if airflow_MAIL_PASSWORD %} +MAIL_PASSWORD = '{{ airflow_MAIL_PASSWORD }}' +{% endif %} +{% if airflow_MAIL_DEFAULT_SENDER %} +MAIL_DEFAULT_SENDER = '{{ airflow_MAIL_DEFAULT_SENDER }}' +{% endif %} +{% if airflow_MAIL_MAX_EMAILS %} +MAIL_MAX_EMAILS = {{ airflow_MAIL_MAX_EMAILS }} +{% endif %} +{% if airflow_MAIL_ASCII_ATTACHMENTS %} +MAIL_ASCII_ATTACHMENTS = {{ airflow_MAIL_ASCII_ATTACHMENTS }} +{% endif %} +{% endif %} + +# If we should replace ALL the user's roles each login, or only on registration +AUTH_ROLES_SYNC_AT_LOGIN = {{ airflow_AUTH_ROLES_SYNC_AT_LOGIN }} + +# Force users to re-auth after 30min of inactivity (to keep roles in sync) +PERMANENT_SESSION_LIFETIME = {{ airflow_PERMANENT_SESSION_LIFETIME }} + +# When using OAuth Auth, uncomment to setup provider(s) info +# Google OAuth example: +# OAUTH_PROVIDERS = [{ +# 'name':'google', +# 'token_key':'access_token', +# 'icon':'fa-google', +# 'remote_app': { +# 'api_base_url':'https://www.googleapis.com/oauth2/v2/', +# 'client_kwargs':{ +# 'scope': 'email profile' +# }, +# 'access_token_url':'https://accounts.google.com/o/oauth2/token', +# 'authorize_url':'https://accounts.google.com/o/oauth2/auth', +# 'request_token_url': None, +# 'client_id': GOOGLE_KEY, +# 'client_secret': GOOGLE_SECRET_KEY, +# } +# }] +{% if airflow_OAUTH_PROVIDERS %} +OAUTH_PROVIDERS = {{ airflow_OAUTH_PROVIDERS }} +{% endif %} + +# When using OpenID Auth, uncomment to setup OpenID providers. +# example for OpenID authentication +# OPENID_PROVIDERS = [ +# { 'name': 'Yahoo', 'url': 'https://me.yahoo.com' }, +# { 'name': 'AOL', 'url': 'http://openid.aol.com/' }, +# { 'name': 'Flickr', 'url': 'http://www.flickr.com/' }, +# { 'name': 'MyOpenID', 'url': 'https://www.myopenid.com' }] +{% if airflow_OPENID_PROVIDERS %} +OPENID_PROVIDERS = {{ airflow_OPENID_PROVIDERS }} +{% endif %} + +# ---------------------------------------------------- +# Theme CONFIG +# ---------------------------------------------------- +# Flask App Builder comes up with a number of predefined themes +# that you can use for Apache Airflow. +# http://flask-appbuilder.readthedocs.io/en/latest/customizing.html#changing-themes +# Please make sure to remove "navbar_color" configuration from airflow.cfg +# in order to fully utilize the theme. (or use that property in conjunction with theme) +{% if airflow_APP_THEME %} +APP_THEME = '{{ airflow_APP_THEME }}' +{% endif %} From b00b8128244a462b46ac66453af1733a98c11395 Mon Sep 17 00:00:00 2001 From: David Mateo Date: Mon, 5 Apr 2021 14:49:15 +0200 Subject: [PATCH 09/21] Template editing - flower and worker template updated to work with 2.0 airflow --- templates/airflow-flower.service.j2 | 4 ++++ templates/airflow-worker.service.j2 | 4 ++++ 2 files changed, 8 insertions(+) diff --git a/templates/airflow-flower.service.j2 b/templates/airflow-flower.service.j2 index 325fe6b..40c31d8 100644 --- a/templates/airflow-flower.service.j2 +++ b/templates/airflow-flower.service.j2 @@ -22,7 +22,11 @@ EnvironmentFile={{ airflow_environment_file_folder }}/airflow User={{ airflow_user }} Group={{ airflow_group }} Type=simple +{% if airflow_version is version( '2.0.0', '>=') %} +ExecStart={{ airflow_executable }} celery flower --pid {{ airflow_pidfile_folder }}-flower/flower.pid +{% else %} ExecStart={{ airflow_executable }} flower --pid {{ airflow_pidfile_folder }}-flower/flower.pid +{% endif %} Restart=on-failure RestartSec=10s PrivateTmp={{ airflow_private_tmp }} diff --git a/templates/airflow-worker.service.j2 b/templates/airflow-worker.service.j2 index ae95cbd..0c81d8a 100644 --- a/templates/airflow-worker.service.j2 +++ b/templates/airflow-worker.service.j2 @@ -22,7 +22,11 @@ EnvironmentFile={{ airflow_environment_file_folder }}/airflow User={{ airflow_user }} Group={{ airflow_group }} Type=simple +{% if airflow_version is version( '2.0.0', '>=') %} +ExecStart={{ airflow_executable }} celery worker --pid {{ airflow_pidfile_folder }}-worker/worker.pid +{% else %} ExecStart={{ airflow_executable }} worker --pid {{ airflow_pidfile_folder }}-worker/worker.pid +{% endif %} KillSignal=SIGINT Restart=on-failure RestartSec=10s From c25bb13ce96829dbeeee7ed850c32b352aadb138 Mon Sep 17 00:00:00 2001 From: David Mateo Date: Tue, 6 Apr 2021 09:53:32 +0200 Subject: [PATCH 10/21] Molecule group fixed - Changed molecule group to fix the no loading molecule/group_vars --- molecule/default/group_vars/airflow/main.yml | 11 - .../default/group_vars/airflow_group/main.yml | 8 + templates/airflow.cfg2.j2 | 1028 ----------------- 3 files changed, 8 insertions(+), 1039 deletions(-) delete mode 100644 molecule/default/group_vars/airflow/main.yml create mode 100644 molecule/default/group_vars/airflow_group/main.yml delete mode 100644 templates/airflow.cfg2.j2 diff --git a/molecule/default/group_vars/airflow/main.yml b/molecule/default/group_vars/airflow/main.yml deleted file mode 100644 index 193f6e6..0000000 --- a/molecule/default/group_vars/airflow/main.yml +++ /dev/null @@ -1,11 +0,0 @@ ---- -airflow_fernet_key: xKy13nPFfDflJ0DYGVTwf_DEmbItfURHlEDxrt-bBQw= -airflow_webserver_secret_key: "ihadsf908auw0340684" - -airflow_admin_user: - username: admin - password: admin - role: Admin - firstname: admin - lastname: admin - email: email@domain.com \ No newline at end of file diff --git a/molecule/default/group_vars/airflow_group/main.yml b/molecule/default/group_vars/airflow_group/main.yml new file mode 100644 index 0000000..8a0bdf9 --- /dev/null +++ b/molecule/default/group_vars/airflow_group/main.yml @@ -0,0 +1,8 @@ +--- +# Example keys +airflow_fernet_key: xKy13nPFfDflJ0DYGVTwf_DEmbItfURHlEDxrt-bBQw= +airflow_webserver_secret_key: "ihadsf908auw0340684" + +airflow_required_python_packages: + - { name: SQLAlchemy, version: 1.3.23 } + - { name: psycopg2 } \ No newline at end of file diff --git a/templates/airflow.cfg2.j2 b/templates/airflow.cfg2.j2 deleted file mode 100644 index d546aca..0000000 --- a/templates/airflow.cfg2.j2 +++ /dev/null @@ -1,1028 +0,0 @@ -{{ ansible_managed | comment }} -[core] -# The folder where your airflow pipelines live, most likely a -# subfolder in a code repository. This path must be absolute. -dags_folder = {{ airflow_dags_folder }} - -# Hostname by providing a path to a callable, which will resolve the hostname. -# The format is "package.function". -# -# For example, default value "socket.getfqdn" means that result from getfqdn() of "socket" -# package will be used as hostname. -# -# No argument should be required in the function specified. -# If using IP address as hostname is preferred, use value ``airflow.utils.net.get_host_ip_address`` -hostname_callable = {{ airflow_hostname_callable }} - -# Default timezone in case supplied date times are naive -# can be utc (default), system, or any IANA timezone string (e.g. Europe/Amsterdam) -default_timezone = {{ airflow_default_timezone }} - -# The executor class that airflow should use. Choices include -# ``SequentialExecutor``, ``LocalExecutor``, ``CeleryExecutor``, ``DaskExecutor``, -# ``KubernetesExecutor``, ``CeleryKubernetesExecutor`` or the -# full import path to the class when using a custom executor. -executor = {{ airflow_executor }} - -# The SqlAlchemy connection string to the metadata database. -# SqlAlchemy supports many different database engine, more information -# their website -sql_alchemy_conn = {{ airflow_database_conn }} - -# The encoding for the databases -sql_engine_encoding = utf-8 - -# Collation for ``dag_id``, ``task_id``, ``key`` columns in case they have different encoding. -# This is particularly useful in case of mysql with utf8mb4 encoding because -# primary keys for XCom table has too big size and ``sql_engine_collation_for_ids`` should -# be set to ``utf8mb3_general_ci``. -# sql_engine_collation_for_ids = - -# If SqlAlchemy should pool database connections. -sql_alchemy_pool_enabled = True - -# The SqlAlchemy pool size is the maximum number of database connections -# in the pool. 0 indicates no limit. -sql_alchemy_pool_size = {{ airflow_database_pool_size }} - -# The maximum overflow size of the pool. -# When the number of checked-out connections reaches the size set in pool_size, -# additional connections will be returned up to this limit. -# When those additional connections are returned to the pool, they are disconnected and discarded. -# It follows then that the total number of simultaneous connections the pool will allow -# is pool_size + max_overflow, -# and the total number of "sleeping" connections the pool will allow is pool_size. -# max_overflow can be set to ``-1`` to indicate no overflow limit; -# no limit will be placed on the total number of concurrent connections. Defaults to ``10``. -sql_alchemy_max_overflow = 10 - -# The SqlAlchemy pool recycle is the number of seconds a connection -# can be idle in the pool before it is invalidated. This config does -# not apply to sqlite. If the number of DB connections is ever exceeded, -# a lower config value will allow the system to recover faster. -sql_alchemy_pool_recycle = {{ airflow_database_pool_recycle }} - -# Check connection at the start of each connection pool checkout. -# Typically, this is a simple statement like "SELECT 1". -# More information here: -# https://docs.sqlalchemy.org/en/13/core/pooling.html#disconnect-handling-pessimistic -sql_alchemy_pool_pre_ping = True - -# The schema to use for the metadata database. -# SqlAlchemy supports databases with the concept of multiple schemas. -sql_alchemy_schema = - -# Import path for connect args in SqlAlchemy. Defaults to an empty dict. -# This is useful when you want to configure db engine args that SqlAlchemy won't parse -# in connection string. -# See https://docs.sqlalchemy.org/en/13/core/engines.html#sqlalchemy.create_engine.params.connect_args -# sql_alchemy_connect_args = - -# The amount of parallelism as a setting to the executor. This defines -# the max number of task instances that should run simultaneously -# on this airflow installation -parallelism = {{ airflow_parallelism }} - -# The number of task instances allowed to run concurrently by the scheduler -# in one DAG. Can be overridden by ``concurrency`` on DAG level. -dag_concurrency = {{ airflow_dag_concurrency }} - -# Are DAGs paused by default at creation -dags_are_paused_at_creation = {{ airflow_dags_are_paused_at_creation }} - -# The maximum number of active DAG runs per DAG -max_active_runs_per_dag = {{ airflow_max_active_runs_per_dag }} - -# Whether to load the DAG examples that ship with Airflow. It's good to -# get started, but you probably want to set this to ``False`` in a production -# environment -load_examples = {{ airflow_load_examples }} - -# Whether to load the default connections that ship with Airflow. It's good to -# get started, but you probably want to set this to ``False`` in a production -# environment -load_default_connections = True - -# Path to the folder containing Airflow plugins -plugins_folder = {{ airflow_plugins_folder }} - -# Should tasks be executed via forking of the parent process ("False", -# the speedier option) or by spawning a new python process ("True" slow, -# but means plugin changes picked up by tasks straight away) -execute_tasks_new_python_interpreter = False - -# Secret key to save connection passwords in the db -fernet_key = {{ airflow_fernet_key }} - -# Whether to disable pickling dags -donot_pickle = {{ airflow_donot_pickle }} - -# How long before timing out a python file import -dagbag_import_timeout = {{ airflow_dagbag_import_timeout }} - -# Should a traceback be shown in the UI for dagbag import errors, -# instead of just the exception message -dagbag_import_error_tracebacks = True - -# If tracebacks are shown, how many entries from the traceback should be shown -dagbag_import_error_traceback_depth = 2 - -# How long before timing out a DagFileProcessor, which processes a dag file -dag_file_processor_timeout = 50 - -# The class to use for running task instances in a subprocess. -# Choices include StandardTaskRunner, CgroupTaskRunner or the full import path to the class -# when using a custom task runner. -task_runner = {{ airflow_task_runner }} - -# If set, tasks without a ``run_as_user`` argument will be run with this user -# Can be used to de-elevate a sudo user running Airflow when executing tasks -default_impersonation = {{ airflow_default_impersonation }} - -# What security module to use (for example kerberos) -security = - -# Turn unit test mode on (overwrites many configuration options with test -# values at runtime) -unit_test_mode = False - -# Whether to enable pickling for xcom (note that this is insecure and allows for -# RCE exploits). -enable_xcom_pickling = {{ airflow_enable_xcom_pickling }} - -# When a task is killed forcefully, this is the amount of time in seconds that -# it has to cleanup after it is sent a SIGTERM, before it is SIGKILLED -killed_task_cleanup_time = {{ airflow_killed_task_cleanup_time }} - -# Whether to override params with dag_run.conf. If you pass some key-value pairs -# through ``airflow dags backfill -c`` or -# ``airflow dags trigger -c``, the key-value pairs will override the existing ones in params. -dag_run_conf_overrides_params = {{ airflow_dag_run_conf_overrides_params }} - -# When discovering DAGs, ignore any files that don't contain the strings ``DAG`` and ``airflow``. -dag_discovery_safe_mode = True - -# The number of retries each task is going to have by default. Can be overridden at dag or task level. -default_task_retries = 0 - -# Updating serialized DAG can not be faster than a minimum interval to reduce database write rate. -min_serialized_dag_update_interval = 30 - -# Fetching serialized DAG can not be faster than a minimum interval to reduce database -# read rate. This config controls when your DAGs are updated in the Webserver -min_serialized_dag_fetch_interval = 10 - -# Whether to persist DAG files code in DB. -# If set to True, Webserver reads file contents from DB instead of -# trying to access files in a DAG folder. -# Example: store_dag_code = False -# store_dag_code = - -# Maximum number of Rendered Task Instance Fields (Template Fields) per task to store -# in the Database. -# All the template_fields for each of Task Instance are stored in the Database. -# Keeping this number small may cause an error when you try to view ``Rendered`` tab in -# TaskInstance view for older tasks. -max_num_rendered_ti_fields_per_task = 30 - -# On each dagrun check against defined SLAs -check_slas = True - -# Path to custom XCom class that will be used to store and resolve operators results -# Example: xcom_backend = path.to.CustomXCom -xcom_backend = airflow.models.xcom.BaseXCom - -# By default Airflow plugins are lazily-loaded (only loaded when required). Set it to ``False``, -# if you want to load plugins whenever 'airflow' is invoked via cli or loaded from module. -lazy_load_plugins = True - -# By default Airflow providers are lazily-discovered (discovery and imports happen only when required). -# Set it to False, if you want to discover providers whenever 'airflow' is invoked via cli or -# loaded from module. -lazy_discover_providers = True - -# Number of times the code should be retried in case of DB Operational Errors. -# Not all transactions will be retried as it can cause undesired state. -# Currently it is only used in ``DagFileProcessor.process_file`` to retry ``dagbag.sync_to_db``. -max_db_retries = 3 - -[logging] -# The folder where airflow should store its log files -# This path must be absolute -base_log_folder = {{ airflow_logs_folder }} - -# Airflow can store logs remotely in AWS S3, Google Cloud Storage or Elastic Search. -# Set this to True if you want to enable remote logging. -remote_logging = False - -# Users must supply an Airflow connection id that provides access to the storage -# location. -remote_log_conn_id = {{ airflow_remote_log_conn_id }} - -# Path to Google Credential JSON file. If omitted, authorization based on `the Application Default -# Credentials -# `__ will -# be used. -google_key_path = - -# Storage bucket URL for remote logging -# S3 buckets should start with "s3://" -# Cloudwatch log groups should start with "cloudwatch://" -# GCS buckets should start with "gs://" -# WASB buckets should start with "wasb" just to help Airflow select correct handler -# Stackdriver logs should start with "stackdriver://" -remote_base_log_folder = {{ airflow_s3_log_folder }} - -# Use server-side encryption for logs stored in S3 -encrypt_s3_logs = {{ airflow_encrypt_s3_logs }} - -# Logging level -logging_level = INFO - -# Logging level for Flask-appbuilder UI -fab_logging_level = WARN - -# Logging class -# Specify the class that will specify the logging configuration -# This class has to be on the python classpath -# Example: logging_config_class = my.path.default_local_settings.LOGGING_CONFIG -logging_config_class = {{ airflow_logging_config_class }} - -# Flag to enable/disable Colored logs in Console -# Colour the logs when the controlling terminal is a TTY. -colored_console_log = False - -# Log format for when Colored logs is enabled -colored_formatter_class = {{ airflow_simple_log_format }} - -# Format of Log line -log_format = {{ airflow_log_format }} -simple_log_format = %%(asctime)s %%(levelname)s - %%(message)s - -# Specify prefix pattern like mentioned below with stream handler TaskHandlerWithCustomFormatter -# Example: task_log_prefix_template = -task_log_prefix_template = - -# Formatting for how airflow generates file names/paths for each task run. -log_filename_template = {{ airflow_log_filename_template }} - -# Formatting for how airflow generates file names for log -log_processor_filename_template = {{ airflow_log_processor_filename_template }} - -# full path of dag_processor_manager logfile -dag_processor_manager_log_location = /etc/airflow/logs/dag_processor_manager/dag_processor_manager.log - -# Name of handler to read task instance logs. -# Defaults to use ``task`` handler. -task_log_reader = task - -# A comma\-separated list of third-party logger names that will be configured to print messages to -# consoles\. -# Example: extra_loggers = connexion,sqlalchemy -extra_loggers = - -[metrics] - -# StatsD (https://github.com/etsy/statsd) integration settings. -# Enables sending metrics to StatsD. -statsd_on = False -statsd_host = localhost -statsd_port = 8125 -statsd_prefix = airflow - -# If you want to avoid sending all the available metrics to StatsD, -# you can configure an allow list of prefixes (comma separated) to send only the metrics that -# start with the elements of the list (e.g: "scheduler,executor,dagrun") -statsd_allow_list = - -# A function that validate the statsd stat name, apply changes to the stat name if necessary and return -# the transformed stat name. -# -# The function should have the following signature: -# def func_name(stat_name: str) -> str: -stat_name_handler = - -# To enable datadog integration to send airflow metrics. -statsd_datadog_enabled = False - -# List of datadog tags attached to all metrics(e.g: key1:value1,key2:value2) -statsd_datadog_tags = - -# If you want to utilise your own custom Statsd client set the relevant -# module path below. -# Note: The module path must exist on your PYTHONPATH for Airflow to pick it up -# statsd_custom_client_path = - -[secrets] -# Full class name of secrets backend to enable (will precede env vars and metastore in search path) -# Example: backend = airflow.providers.amazon.aws.secrets.systems_manager.SystemsManagerParameterStoreBackend -backend = {{ airflow_lineage_backend }} - -# The backend_kwargs param is loaded into a dictionary and passed to __init__ of secrets backend class. -# See documentation for the secrets backend you are using. JSON is expected. -# Example for AWS Systems Manager ParameterStore: -# ``{"connections_prefix": "/airflow/connections", "profile_name": "default"}`` -backend_kwargs = - -[cli] -# In what way should the cli access the API. The LocalClient will use the -# database directly, while the json_client will use the api running on the -# webserver -api_client = {{ airflow_cli_api_client }} - -# If you set web_server_url_prefix, do NOT forget to append it here, ex: -# ``endpoint_url = http://localhost:8080/myroot`` -# So api will look like: ``http://localhost:8080/myroot/api/experimental/...`` -endpoint_url = {{ airflow_cli_api_endpoint_url }} - -[debug] -# Used only with ``DebugExecutor``. If set to ``True`` DAG will fail with first -# failed task. Helpful for debugging purposes. -fail_fast = False - -[api] -# Enables the deprecated experimental API. Please note that these APIs do not have access control. -# The authenticated user has full access. -# -# .. warning:: -# -# This `Experimental REST API `__ is -# deprecated since version 2.0. Please consider using -# `the Stable REST API `__. -# For more information on migration, see -# `UPDATING.md `_ -enable_experimental_api = False - -# How to authenticate users of the API. See -# https://airflow.apache.org/docs/stable/security.html for possible values. -# ("airflow.api.auth.backend.default" allows all requests for historic reasons) -{% if airflow_webserver_auth_backend %} -auth_backend = {{ airflow_webserver_auth_backend }} -{% endif %} - -# Used to set the maximum page limit for API requests -maximum_page_limit = 100 - -# Used to set the default page limit when limit is zero. A default limit -# of 100 is set on OpenApi spec. However, this particular default limit -# only work when limit is set equal to zero(0) from API requests. -# If no limit is supplied, the OpenApi spec default is used. -fallback_page_limit = 100 - -# The intended audience for JWT token credentials used for authorization. This value must match on the client and server sides. If empty, audience will not be tested. -# Example: google_oauth2_audience = project-id-random-value.apps.googleusercontent.com -google_oauth2_audience = - -# Path to Google Cloud Service Account key file (JSON). If omitted, authorization based on -# `the Application Default Credentials -# `__ will -# be used. -# Example: google_key_path = /files/service-account-json -google_key_path = - -[lineage] -# what lineage backend to use -backend = {{ airflow_lineage_backend }} - -[atlas] -sasl_enabled = {{ airflow_atlas_sasl_enabled }} -host = {{ airflow_atlas_host }} -port = port = {{ airflow_atlas_port }} -username = {{ airflow_atlas_username }} -password = {{ airflow_atlas_password }} - -[operators] -# The default owner assigned to each new operator, unless -# provided explicitly or passed via ``default_args`` -default_owner = {{ airflow_operator_default_owner }} -default_cpus = {{ airflow_operator_default_cpus }} -default_ram = {{ airflow_operator_default_ram }} -default_disk = {{ airflow_operator_default_disk }} -default_gpus = {{ airflow_operator_default_gpus }} - -# Is allowed to pass additional/unused arguments (args, kwargs) to the BaseOperator operator. -# If set to False, an exception will be thrown, otherwise only the console message will be displayed. -allow_illegal_arguments = False - -[hive] -# Default mapreduce queue for HiveOperator tasks -default_hive_mapred_queue = {{ airflow_default_hive_mapred_queue }} - -# Template for mapred_job_name in HiveOperator, supports the following named parameters -# hostname, dag_id, task_id, execution_date -# mapred_job_name_template = - -[webserver] -# The base url of your website as airflow cannot guess what domain or -# cname you are using. This is used in automated emails that -# airflow sends to point links to the right web server -base_url = {{ airflow_webserver_base_url }} - -# Default timezone to display all dates in the UI, can be UTC, system, or -# any IANA timezone string (e.g. Europe/Amsterdam). If left empty the -# default value of core/default_timezone will be used -# Example: default_ui_timezone = America/New_York -default_ui_timezone = {{ airflow_default_timezone }} - -# The ip specified when starting the web server -web_server_host = {{ airflow_webserver_host }} - -# The port on which to run the web server -web_server_port = {{ airflow_webserver_port }} - -# Paths to the SSL certificate and key for the web server. When both are -# provided SSL will be enabled. This does not change the web server port. -web_server_ssl_cert = {{ airflow_webserver_ssl_cert }} - -# Paths to the SSL certificate and key for the web server. When both are -# provided SSL will be enabled. This does not change the web server port. -web_server_ssl_key = {{ airflow_webserver_ssl_key }} - -# Number of seconds the webserver waits before killing gunicorn master that doesn't respond -web_server_master_timeout = {{ airflow_webserver_master_timeout }} - -# Number of seconds the gunicorn webserver waits before timing out on a worker -web_server_worker_timeout = {{ airflow_webserver_worker_timeout }} - -# Number of workers to refresh at a time. When set to 0, worker refresh is -# disabled. When nonzero, airflow periodically refreshes webserver workers by -# bringing up new ones and killing old ones. -worker_refresh_batch_size = {{ airflow_webserver_worker_refresh_batch_size }} - -# Number of seconds to wait before refreshing a batch of workers. -worker_refresh_interval = {{ airflow_webserver_worker_refresh_interval }} - -# If set to True, Airflow will track files in plugins_folder directory. When it detects changes, -# then reload the gunicorn. -reload_on_plugin_change = False - -# Secret key used to run your flask app -# It should be as random as possible -secret_key = {{ airflow_webserver_secret_key }} - -# Number of workers to run the Gunicorn web server -workers = {{ airflow_webserver_workers }} - -# The worker class gunicorn should use. Choices include -# sync (default), eventlet, gevent -worker_class = {{ airflow_webserver_worker_class }} - -# Log files for the gunicorn webserver. '-' means log to stderr. -access_logfile = {{ airflow_logs_folder }}/gunicorn-access.log - -# Log files for the gunicorn webserver. '-' means log to stderr. -error_logfile = {{ airflow_logs_folder }}/gunicorn-error.log - -# Access log format for gunicorn webserver. -# default format is %%(h)s %%(l)s %%(u)s %%(t)s "%%(r)s" %%(s)s %%(b)s "%%(f)s" "%%(a)s" -# documentation - https://docs.gunicorn.org/en/stable/settings.html#access-log-format -access_logformat = - -# Expose the configuration file in the web server -expose_config = {{ airflow_webserver_expose_config }} - -# Expose hostname in the web server -expose_hostname = True - -# Expose stacktrace in the web server -expose_stacktrace = True - -# Default DAG view. Valid values are: ``tree``, ``graph``, ``duration``, ``gantt``, ``landing_times`` -dag_default_view = tree - -# Default DAG orientation. Valid values are: -# ``LR`` (Left->Right), ``TB`` (Top->Bottom), ``RL`` (Right->Left), ``BT`` (Bottom->Top) -dag_orientation = {{ airflow_webserver_dag_orientation }} - -# Puts the webserver in demonstration mode; blurs the names of Operators for -# privacy. -demo_mode = {{ airflow_webserver_demo_mode }} - -# The amount of time (in secs) webserver will wait for initial handshake -# while fetching logs from other worker machine -log_fetch_timeout_sec = {{ airflow_webserver_log_fetch_timeout_sec }} - -# Time interval (in secs) to wait before next log fetching. -log_fetch_delay_sec = 2 - -# Distance away from page bottom to enable auto tailing. -log_auto_tailing_offset = 30 - -# Animation speed for auto tailing log display. -log_animation_speed = 1000 - -# By default, the webserver shows paused DAGs. Flip this to hide paused -# DAGs by default -hide_paused_dags_by_default = {{ airflow_webserver_hide_paused_dags_by_default }} - -# Consistent page size across all listing views in the UI -page_size = {{ airflow_webserver_page_size }} - -# Define the color of navigation bar -navbar_color = {{ airflow_webserver_navbar_color }} - -# Default dagrun to show in UI -default_dag_run_display_number = {{ airflow_webserver_default_dag_run_display_number }} - -# Enable werkzeug ``ProxyFix`` middleware for reverse proxy -enable_proxy_fix = False - -# Number of values to trust for ``X-Forwarded-For``. -# More info: https://werkzeug.palletsprojects.com/en/0.16.x/middleware/proxy_fix/ -proxy_fix_x_for = 1 - -# Number of values to trust for ``X-Forwarded-Proto`` -proxy_fix_x_proto = 1 - -# Number of values to trust for ``X-Forwarded-Host`` -proxy_fix_x_host = 1 - -# Number of values to trust for ``X-Forwarded-Port`` -proxy_fix_x_port = 1 - -# Number of values to trust for ``X-Forwarded-Prefix`` -proxy_fix_x_prefix = 1 - -# Set secure flag on session cookie -cookie_secure = False - -# Set samesite policy on session cookie -cookie_samesite = Lax - -# Default setting for wrap toggle on DAG code and TI log views. -default_wrap = False - -# Allow the UI to be rendered in a frame -x_frame_enabled = True - -# Send anonymous user activity to your analytics tool -# choose from google_analytics, segment, or metarouter -# analytics_tool = - -# Unique ID of your account in the analytics tool -# analytics_id = - -# 'Recent Tasks' stats will show for old DagRuns if set -show_recent_stats_for_completed_runs = True - -# Update FAB permissions and sync security manager roles -# on webserver startup -update_fab_perms = True - -# The UI cookie lifetime in minutes. User will be logged out from UI after -# ``session_lifetime_minutes`` of non-activity -session_lifetime_minutes = 43200 - -[email] - -# Configuration email backend and whether to -# send email alerts on retry or failure -# Email backend to use -email_backend = {{ airflow_email_backend }} - -# Whether email alerts should be sent when a task is retried -default_email_on_retry = True - -# Whether email alerts should be sent when a task failed -default_email_on_failure = True - -[smtp] - -# If you want airflow to send emails on retries, failure, and you want to use -# the airflow.utils.email.send_email_smtp function, you have to configure an -# smtp server here -smtp_host = {{ airflow_smtp_host }} -smtp_starttls = {{ airflow_smtp_starttls }} -smtp_ssl = {{ airflow_smtp_ssl }} -{% if airflow_smtp_user %} -smtp_user = {{ airflow_smtp_user }} -{% endif %} -{% if airflow_smtp_passwd %} -smtp_password = {{ airflow_smtp_passwd }} -{% endif %} -smtp_port = {{ airflow_smtp_port }} -smtp_mail_from = {{ airflow_smtp_mail_from }} -smtp_timeout = 30 -smtp_retry_limit = 5 - -[sentry] - -# Sentry (https://docs.sentry.io) integration. Here you can supply -# additional configuration options based on the Python platform. See: -# https://docs.sentry.io/error-reporting/configuration/?platform=python. -# Unsupported options: ``integrations``, ``in_app_include``, ``in_app_exclude``, -# ``ignore_errors``, ``before_breadcrumb``, ``before_send``, ``transport``. -# Enable error reporting to Sentry -sentry_on = false -sentry_dsn = - -[celery_kubernetes_executor] - -# This section only applies if you are using the ``CeleryKubernetesExecutor`` in -# ``[core]`` section above -# Define when to send a task to ``KubernetesExecutor`` when using ``CeleryKubernetesExecutor``. -# When the queue of a task is ``kubernetes_queue``, the task is executed via ``KubernetesExecutor``, -# otherwise via ``CeleryExecutor`` -kubernetes_queue = kubernetes - -{% if airflow_executor == "CeleryExecutor" %} -[celery] - -# This section only applies if you are using the CeleryExecutor in -# ``[core]`` section above -# The app name that will be used by celery -celery_app_name = {{ airflow_celery_app_name }} - -# The concurrency that will be used when starting workers with the -# ``airflow celery worker`` command. This defines the number of task instances that -# a worker will take, so size up your workers based on the resources on -# your worker box and the nature of your tasks -worker_concurrency = {{ airflow_celery_concurrency }} - -# The maximum and minimum concurrency that will be used when starting workers with the -# ``airflow celery worker`` command (always keep minimum processes, but grow -# to maximum if necessary). Note the value should be max_concurrency,min_concurrency -# Pick these numbers based on resources on worker box and the nature of the task. -# If autoscale option is available, worker_concurrency will be ignored. -# http://docs.celeryproject.org/en/latest/reference/celery.bin.worker.html#cmdoption-celery-worker-autoscale -# Example: worker_autoscale = 16,12 -# worker_autoscale = - -# Used to increase the number of tasks that a worker prefetches which can improve performance. -# The number of processes multiplied by worker_prefetch_multiplier is the number of tasks -# that are prefetched by a worker. A value greater than 1 can result in tasks being unnecessarily -# blocked if there are multiple workers and one worker prefetches tasks that sit behind long -# running tasks while another worker has unutilized processes that are unable to process the already -# claimed blocked tasks. -# https://docs.celeryproject.org/en/stable/userguide/optimizing.html#prefetch-limits -# Example: worker_prefetch_multiplier = 1 -# worker_prefetch_multiplier = - -# When you start an airflow worker, airflow starts a tiny web server -# subprocess to serve the workers local log files to the airflow main -# web server, who then builds pages and sends them to users. This defines -# the port on which the logs are served. It needs to be unused, and open -# visible from the main web server to connect into the workers. -worker_log_server_port = {{ airflow_celery_worker_log_server_port }} - -# Umask that will be used when starting workers with the ``airflow celery worker`` -# in daemon mode. This control the file-creation mode mask which determines the initial -# value of file permission bits for newly created files. -worker_umask = 0o077 - -# The Celery broker URL. Celery supports RabbitMQ, Redis and experimentally -# a sqlalchemy database. Refer to the Celery documentation for more information. -broker_url = {{ airflow_celery_broker_url }} - -# The Celery result_backend. When a job finishes, it needs to update the -# metadata of the job. Therefore it will post a message on a message bus, -# or insert it into a database (depending of the backend) -# This status is used by the scheduler to update the state of the task -# The use of a database is highly recommended -# http://docs.celeryproject.org/en/latest/userguide/configuration.html#task-result-backend-settings -result_backend = {{ airflow_celery_result_backend }} - -# Celery Flower is a sweet UI for Celery. Airflow has a shortcut to start -# it ``airflow celery flower``. This defines the IP that Celery Flower runs on -flower_host = {{ airflow_flower_host }} - -# The root URL for Flower -# Example: flower_url_prefix = /flower -flower_url_prefix = - -# This defines the port that Celery Flower runs on -flower_port = {{ airflow_flower_port }} - -# Securing Flower with Basic Authentication -# Accepts user:password pairs separated by a comma -# Example: flower_basic_auth = user1:password1,user2:password2 -flower_basic_auth = - -# Default queue that tasks get assigned to and that worker listen on. -default_queue = {{ airflow_celery_default_queue }} - -# How many processes CeleryExecutor uses to sync task state. -# 0 means to use max(1, number of cores - 1) processes. -sync_parallelism = 0 - -# Import path for celery configuration options -celery_config_options = airflow.config_templates.default_celery.DEFAULT_CELERY_CONFIG -ssl_active = {{ airflow_celery_ssl_active }} -ssl_key = {{ airflow_celery_ssl_key }} -ssl_cert = {{ airflow_celery_ssl_cert }} -ssl_cacert = {{ airflow_celery_ssl_cacert }} - -# Celery Pool implementation. -# Choices include: ``prefork`` (default), ``eventlet``, ``gevent`` or ``solo``. -# See: -# https://docs.celeryproject.org/en/latest/userguide/workers.html#concurrency -# https://docs.celeryproject.org/en/latest/userguide/concurrency/eventlet.html -pool = prefork - -# The number of seconds to wait before timing out ``send_task_to_executor`` or -# ``fetch_celery_task_state`` operations. -operation_timeout = 1.0 - -# Celery task will report its status as 'started' when the task is executed by a worker. -# This is used in Airflow to keep track of the running tasks and if a Scheduler is restarted -# or run in HA mode, it can adopt the orphan tasks launched by previous SchedulerJob. -task_track_started = True - -# Time in seconds after which Adopted tasks are cleared by CeleryExecutor. This is helpful to clear -# stalled tasks. -task_adoption_timeout = 600 - -# The Maximum number of retries for publishing task messages to the broker when failing -# due to ``AirflowTaskTimeout`` error before giving up and marking Task as failed. -task_publish_max_retries = 3 - -# Worker initialisation check to validate Metadata Database connection -worker_precheck = False - -[celery_broker_transport_options] - -# This section is for specifying options which can be passed to the -# underlying celery broker transport. See: -# http://docs.celeryproject.org/en/latest/userguide/configuration.html#std:setting-broker_transport_options -# The visibility timeout defines the number of seconds to wait for the worker -# to acknowledge the task before the message is redelivered to another worker. -# Make sure to increase the visibility timeout to match the time of the longest -# ETA you're planning to use. -# visibility_timeout is only supported for Redis and SQS celery brokers. -# See: -# http://docs.celeryproject.org/en/master/userguide/configuration.html#std:setting-broker_transport_options -# Example: visibility_timeout = 21600 -# visibility_timeout = -{% elif airflow_executor == "DaskExecutor" %} -[dask] - -# This section only applies if you are using the DaskExecutor in -# [core] section above -# The IP address and port of the Dask cluster's scheduler. -cluster_address = {{ airflow_dask_cluster_address }} - -# TLS/ SSL settings to access a secured Dask scheduler. -tls_ca = {{ airflow_dask_tls_ca }} -tls_cert = {{ airflow_dask_tls_cert }} -tls_key = {{ airflow_dask_tls_key }} -{% endif %} - -[scheduler] -# Task instances listen for external kill signal (when you clear tasks -# from the CLI or the UI), this defines the frequency at which they should -# listen (in seconds). -job_heartbeat_sec = {{ airflow_scheduler_job_heartbeat_sec }} - -# How often (in seconds) to check and tidy up 'running' TaskInstancess -# that no longer have a matching DagRun -clean_tis_without_dagrun_interval = 15.0 - -# The scheduler constantly tries to trigger new tasks (look at the -# scheduler section in the docs for more information). This defines -# how often the scheduler should run (in seconds). -scheduler_heartbeat_sec = {{ airflow_scheduler_heartbeat_sec }} - -# The number of times to try to schedule each DAG file -# -1 indicates unlimited number -num_runs = -1 - -# The number of seconds to wait between consecutive DAG file processing -processor_poll_interval = 1 - -# after how much time (seconds) a new DAGs should be picked up from the filesystem -min_file_process_interval = {{ airflow_scheduler_min_file_process_interval }} - -# How often (in seconds) to scan the DAGs directory for new files. Default to 5 minutes. -dag_dir_list_interval = {{ airflow_scheduler_dag_dir_list_interval }} - -# How often should stats be printed to the logs. Setting to 0 will disable printing stats -print_stats_interval = {{ airflow_scheduler_print_stats_interval }} - -# How often (in seconds) should pool usage stats be sent to statsd (if statsd_on is enabled) -pool_metrics_interval = 5.0 - -# If the last scheduler heartbeat happened more than scheduler_health_check_threshold -# ago (in seconds), scheduler is considered unhealthy. -# This is used by the health check in the "/health" endpoint -scheduler_health_check_threshold = 30 - -# How often (in seconds) should the scheduler check for orphaned tasks and SchedulerJobs -orphaned_tasks_check_interval = 300.0 -child_process_log_directory = {{ airflow_child_process_log_folder }} - -# Local task jobs periodically heartbeat to the DB. If the job has -# not heartbeat in this many seconds, the scheduler will mark the -# associated task instance as failed and will re-schedule the task. -scheduler_zombie_task_threshold = {{ airflow_scheduler_zombie_task_threshold }} - -# Turn off scheduler catchup by setting this to ``False``. -# Default behavior is unchanged and -# Command Line Backfills still work, but the scheduler -# will not do scheduler catchup if this is ``False``, -# however it can be set on a per DAG basis in the -# DAG definition (catchup) -catchup_by_default = {{ airflow_scheduler_catchup_by_default }} - -# This changes the batch size of queries in the scheduling main loop. -# If this is too high, SQL query performance may be impacted by one -# or more of the following: -# - reversion to full table scan -# - complexity of query predicate -# - excessive locking -# Additionally, you may hit the maximum allowable query length for your db. -# Set this to 0 for no limit (not advised) -max_tis_per_query = {{ airflow_scheduler_max_tis_per_query }} - -# Should the scheduler issue ``SELECT ... FOR UPDATE`` in relevant queries. -# If this is set to False then you should not run more than a single -# scheduler at once -use_row_level_locking = True - -# Max number of DAGs to create DagRuns for per scheduler loop -# -# Default: 10 -# max_dagruns_to_create_per_loop = - -# How many DagRuns should a scheduler examine (and lock) when scheduling -# and queuing tasks. -# -# Default: 20 -# max_dagruns_per_loop_to_schedule = - -# Should the Task supervisor process perform a "mini scheduler" to attempt to schedule more tasks of the -# same DAG. Leaving this on will mean tasks in the same DAG execute quicker, but might starve out other -# dags in some circumstances -# -# Default: True -# schedule_after_task_execution = - -# The scheduler can run multiple processes in parallel to parse dags. -# This defines how many processes will run. -parsing_processes = 2 - -# Turn off scheduler use of cron intervals by setting this to False. -# DAGs submitted manually in the web UI or with trigger_dag will still run. -use_job_schedule = True - -# Allow externally triggered DagRuns for Execution Dates in the future -# Only has effect if schedule_interval is set to None in DAG -allow_trigger_in_future = False - -[kerberos] -ccache = {{ airflow_kerberos_ccache }} - -# gets augmented with fqdn -principal = {{ airflow_kerberos_principal }} -reinit_frequency = {{ airflow_kerberos_reinit_frequency }} -kinit_path = {{ airflow_kerberos_kinit_path }} -keytab = {{ airflow_kerberos_keytab }} - -[github_enterprise] -api_rev = {{ airflow_github_enterprise_api_rev }} - -[admin] -# UI to hide sensitive variable fields when set to True -hide_sensitive_variable_fields = {{ airflow_admin_hide_sensitive_variable_fields }} - -# A comma-separated list of sensitive keywords to look for in variables names. -sensitive_variable_fields = - -{% if airflow_webserver_auth_backend == "airflow.api.auth.backend.basic_auth" %} -[ldap] -# set a connection without encryption: uri = ldap://: -uri = {{ airflow_ldap_uri }} -user_filter = {{ airflow_ldap_user_filter }} -# in case of Active Directory you would use: user_name_attr = sAMAccountName -user_name_attr = {{ airflow_ldap_user_name_attr }} -{% if airflow_ldap_superuser_filter %} -superuser_filter = {{ airflow_ldap_superuser_filter }} -{% endif %} -{% if airflow_ldap_data_profiler_filter %} -data_profiler_filter = {{ airflow_ldap_data_profiler_filter }} -{% endif %} -bind_user = {{ airflow_ldap_bind_user }} -bind_password = {{ airflow_ldap_bind_password }} -basedn = {{ airflow_ldap_basedn }} -cacert = {{ airflow_ldap_cacert }} -# Set search_scope to one of them: BASE, LEVEL , SUBTREE -# Set search_scope to SUBTREE if using Active Directory, and not specifying an Organizational Unit -search_scope = {{ airflow_ldap_search_scope }} -{% endif %} - -[elasticsearch] -# Elasticsearch host -host = - -# Format of the log_id, which is used to query for a given tasks logs -log_id_template = {dag_id}-{task_id}-{execution_date}-{try_number} - -# Used to mark the end of a log stream for a task -end_of_log_mark = end_of_log - -# Qualified URL for an elasticsearch frontend (like Kibana) with a template argument for log_id -# Code will construct log_id using the log_id template from the argument above. -# NOTE: The code will prefix the https:// automatically, don't include that here. -frontend = - -# Write the task logs to the stdout of the worker, rather than the default files -write_stdout = False - -# Instead of the default log formatter, write the log lines as JSON -json_format = False - -# Log fields to also attach to the json output, if enabled -json_fields = asctime, filename, lineno, levelname, message - -[elasticsearch_configs] -use_ssl = False -verify_certs = True - -[kubernetes] -# Path to the YAML pod file. If set, all other kubernetes-related fields are ignored. -pod_template_file = - -# The repository of the Kubernetes Image for the Worker to Run -worker_container_repository = - -# The tag of the Kubernetes Image for the Worker to Run -worker_container_tag = - -# The Kubernetes namespace where airflow workers should be created. Defaults to ``default`` -namespace = default - -# If True, all worker pods will be deleted upon termination -delete_worker_pods = True - -# If False (and delete_worker_pods is True), -# failed worker pods will not be deleted so users can investigate them. -delete_worker_pods_on_failure = False - -# Number of Kubernetes Worker Pod creation calls per scheduler loop. -# Note that the current default of "1" will only launch a single pod -# per-heartbeat. It is HIGHLY recommended that users increase this -# number to match the tolerance of their kubernetes cluster for -# better performance. -worker_pods_creation_batch_size = 1 - -# Allows users to launch pods in multiple namespaces. -# Will require creating a cluster-role for the scheduler -multi_namespace_mode = False - -# Use the service account kubernetes gives to pods to connect to kubernetes cluster. -# It's intended for clients that expect to be running inside a pod running on kubernetes. -# It will raise an exception if called from a process not running in a kubernetes environment. -in_cluster = True - -# When running with in_cluster=False change the default cluster_context or config_file -# options to Kubernetes client. Leave blank these to use default behaviour like ``kubectl`` has. -# cluster_context = - -# Path to the kubernetes configfile to be used when ``in_cluster`` is set to False -# config_file = - -# Keyword parameters to pass while calling a kubernetes client core_v1_api methods -# from Kubernetes Executor provided as a single line formatted JSON dictionary string. -# List of supported params are similar for all core_v1_apis, hence a single config -# variable for all apis. See: -# https://raw.githubusercontent.com/kubernetes-client/python/41f11a09995efcd0142e25946adc7591431bfb2f/kubernetes/client/api/core_v1_api.py -kube_client_request_args = - -# Optional keyword arguments to pass to the ``delete_namespaced_pod`` kubernetes client -# ``core_v1_api`` method when using the Kubernetes Executor. -# This should be an object and can contain any of the options listed in the ``v1DeleteOptions`` -# class defined here: -# https://github.com/kubernetes-client/python/blob/41f11a09995efcd0142e25946adc7591431bfb2f/kubernetes/client/models/v1_delete_options.py#L19 -# Example: delete_option_kwargs = {"grace_period_seconds": 10} -delete_option_kwargs = - -# Enables TCP keepalive mechanism. This prevents Kubernetes API requests to hang indefinitely -# when idle connection is time-outed on services like cloud load balancers or firewalls. -enable_tcp_keepalive = False - -# When the `enable_tcp_keepalive` option is enabled, TCP probes a connection that has -# been idle for `tcp_keep_idle` seconds. -tcp_keep_idle = 120 - -# When the `enable_tcp_keepalive` option is enabled, if Kubernetes API does not respond -# to a keepalive probe, TCP retransmits the probe after `tcp_keep_intvl` seconds. -tcp_keep_intvl = 30 - -# When the `enable_tcp_keepalive` option is enabled, if Kubernetes API does not respond -# to a keepalive probe, TCP retransmits the probe `tcp_keep_cnt number` of times before -# a connection is considered to be broken. -tcp_keep_cnt = 6 - -[smart_sensor] -# When `use_smart_sensor` is True, Airflow redirects multiple qualified sensor tasks to -# smart sensor task. -use_smart_sensor = False - -# `shard_code_upper_limit` is the upper limit of `shard_code` value. The `shard_code` is generated -# by `hashcode % shard_code_upper_limit`. -shard_code_upper_limit = 10000 - -# The number of running smart sensor processes for each service. -shards = 5 - -# comma separated sensor classes support in smart_sensor. -sensors_enabled = NamedHivePartitionSensor \ No newline at end of file From 1cbc79bb9450e6f69cb5ffcd7e028390e51bc28e Mon Sep 17 00:00:00 2001 From: David Mateo Date: Wed, 7 Apr 2021 11:38:15 +0200 Subject: [PATCH 11/21] Python version usage corrected - Unset pyhton version used, default case omit --- Pipfile | 2 +- defaults/main/main.yml | 2 +- tasks/install.yml | 56 ++++++++++++++++++++++-------------------- 3 files changed, 32 insertions(+), 28 deletions(-) diff --git a/Pipfile b/Pipfile index b7b655f..d7c1798 100644 --- a/Pipfile +++ b/Pipfile @@ -12,4 +12,4 @@ docker = "==4.1.0" ansible-lint = "==4.2.0" [requires] -python_version = "3.8" +python_version = "3.7" diff --git a/defaults/main/main.yml b/defaults/main/main.yml index aeed9f3..fabee3e 100644 --- a/defaults/main/main.yml +++ b/defaults/main/main.yml @@ -6,7 +6,7 @@ airflow_package: apache-airflow # The default buster's python version, if you want other version you should make sure it is # installed before -airflow_python_version: 3.7 +airflow_python_version: airflow_constraint_url: "https://raw.githubusercontent.com/apache/airflow/constraints-{{ airflow_version }}/constraints-{{ airflow_python_version }}.txt" # Available extra packages: diff --git a/tasks/install.yml b/tasks/install.yml index c190a69..9dab84e 100644 --- a/tasks/install.yml +++ b/tasks/install.yml @@ -52,7 +52,7 @@ - name: Airflow | Set a virtualenv become: true become_user: "{{ airflow_user }}" - command: "virtualenv -p python{{ airflow_python_version }} {{ airflow_app_home }}" + command: "virtualenv -p python{{ airflow_python_version | default(omit) }} {{ airflow_app_home }}" when: not virtualenv_check.stat.exists - name: Airflow | Installing Python pip dependencies @@ -62,30 +62,34 @@ name: "{{ item.name }}" version: "{{ item.version | default(omit) }}" virtualenv: "{{ airflow_app_home }}" - virtualenv_python: "python{{ airflow_python_version }}" + virtualenv_python: "python{{ airflow_python_version | default(omit) }}" with_items: "{{ airflow_required_python_packages }}" when: airflow_required_python_packages is defined -# - name: Airflow | Installing proper Celery version -# become: true -# become_user: "{{ airflow_user }}" -# pip: -# name: celery -# version: "{{ celery_version }}" -# virtualenv: "{{ airflow_app_home }}" -# virtualenv_python: "python{{ airflow_python_version }}" -# when: airflow_executor == "CeleryExecutor" - -# - name: Airflow | Installing extra Celery packages -# become: true -# become_user: "{{ airflow_user }}" -# pip: -# name: celery[{{ item }}] -# version: "{{ celery_version }}" -# virtualenv: "{{ airflow_app_home }}" -# virtualenv_python: "python{{ airflow_python_version }}" -# with_items: "{{ celery_extra_packages }}" -# when: airflow_executor == "CeleryExecutor" and celery_extra_packages +- name: Airflow | Installing proper Celery version + become: true + become_user: "{{ airflow_user }}" + pip: + name: celery + version: "{{ celery_version }}" + virtualenv: "{{ airflow_app_home }}" + virtualenv_python: "python{{ airflow_python_version | default(omit) }}" + when: + - airflow_executor == "CeleryExecutor" + - airflow_version is version( '2.0.0', '<') + +- name: Airflow | Installing extra Celery packages + become: true + become_user: "{{ airflow_user }}" + pip: + name: celery[{{ item }}] + version: "{{ celery_version }}" + virtualenv: "{{ airflow_app_home }}" + virtualenv_python: "python{{ airflow_python_version | default(omit) }}" + with_items: "{{ celery_extra_packages }}" + when: + - airflow_executor == "CeleryExecutor" and celery_extra_packages + - airflow_version is version( '2.0.0', '<') - name: Airflow | Installing Airflow become: true @@ -95,7 +99,7 @@ version: "{{ airflow_version }}" state: present virtualenv: "{{ airflow_app_home }}" - virtualenv_python: "python{{ airflow_python_version }}" + virtualenv_python: "python{{ airflow_python_version | default(omit) }}" extra_args: --no-cache-dir register: airflow_install environment: @@ -107,7 +111,7 @@ pip: name: "{{ airflow_package }}[{{ item }}]=={{ airflow_version }}" virtualenv: "{{ airflow_app_home }}" - virtualenv_python: "python{{ airflow_python_version }}" + virtualenv_python: "python{{ airflow_python_version | default(omit) }}" with_items: "{{ airflow_extra_packages }}" when: - airflow_extra_packages is defined @@ -121,7 +125,7 @@ name: "apache-airflow[{{ airflow_extra_packages | join(', ') }}]" version: "{{ airflow_version }}" virtualenv: "{{ airflow_app_home }}" - virtualenv_python: "python{{ airflow_python_version }}" + virtualenv_python: "python{{ airflow_python_version | default(omit) }}" when: - airflow_extra_packages is defined - not airflow_bundle_package @@ -134,7 +138,7 @@ name: "{{ item.name }}" version: "{{ item.version }}" virtualenv: "{{ airflow_app_home }}" - virtualenv_python: "python{{ airflow_python_version }}" + virtualenv_python: "python{{ airflow_python_version | default(omit) }}" with_items: "{{ dags_dependencies }}" when: dags_dependencies is defined notify: From c65eb71a6af3f975dd44f59bb05f75235ffdf8d6 Mon Sep 17 00:00:00 2001 From: David Mateo Date: Wed, 7 Apr 2021 17:38:53 +0200 Subject: [PATCH 12/21] Fix forgotten paths - Fixed missed paths after 'reorder' group_vars --- molecule/default/verify.yml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/molecule/default/verify.yml b/molecule/default/verify.yml index a9befbf..a5bb792 100644 --- a/molecule/default/verify.yml +++ b/molecule/default/verify.yml @@ -17,8 +17,10 @@ goss_format: documentation vars_files: - - ../../defaults/main.yml - - ./group_vars/airflow/main.yml + - ../../defaults/main/main.yml + - ../../defaults/main/airflow-cfg.yml + - ../../defaults/main/webserver-py.yml + - ./group_vars/airflow_group/main.yml tasks: - name: Download and install Goss From ae1d3eb21a14bec5138a7790385144e121564545 Mon Sep 17 00:00:00 2001 From: David Mateo Date: Thu, 8 Apr 2021 17:07:57 +0200 Subject: [PATCH 13/21] Build fixes and Readme updated - Updated README.md - .travis debian9 "support" removed (check the readme) - Pip version parameterized - Renamed webserver-config-py.yml file --- .gitignore | 2 + .travis.yml | 1 - README.md | 107 +++++++++++------- defaults/main/main.yml | 6 + ...bserver-py.yml => webserver-config-py.yml} | 0 molecule/default/molecule.yml | 10 +- molecule/default/verify.yml | 2 +- molecule/extra_packages/converge.yml | 3 - .../group_vars/airflow/main.yml | 12 -- .../group_vars/airflow_group/main.yml | 17 +++ molecule/extra_packages/molecule.yml | 10 +- molecule/extra_packages/verify.yml | 6 +- tasks/install.yml | 5 +- 13 files changed, 115 insertions(+), 66 deletions(-) rename defaults/main/{webserver-py.yml => webserver-config-py.yml} (100%) delete mode 100644 molecule/extra_packages/group_vars/airflow/main.yml create mode 100644 molecule/extra_packages/group_vars/airflow_group/main.yml diff --git a/.gitignore b/.gitignore index b31d4b9..76758fc 100644 --- a/.gitignore +++ b/.gitignore @@ -5,3 +5,5 @@ tests/__pycache__ .cache .vagrant tests/roles +.vscode/* +*.vt100 \ No newline at end of file diff --git a/.travis.yml b/.travis.yml index 66af49a..2bc6d73 100644 --- a/.travis.yml +++ b/.travis.yml @@ -10,7 +10,6 @@ install: - pipenv sync env: jobs: - - MOLECULE_DISTRO=debian:stretch-slim - MOLECULE_DISTRO=debian:buster-slim script: - pipenv run molecule test --all diff --git a/README.md b/README.md index c631cfe..b7a01d4 100644 --- a/README.md +++ b/README.md @@ -1,112 +1,137 @@ -![Logo](https://raw.githubusercontent.com/idealista/airflow-role/master/logo.gif) - -[![Build Status](https://travis-ci.org/idealista/airflow-role.png)](https://travis-ci.org/idealista/airflow-role) +# Apache Airflow Ansible role [![Build Status](https://travis-ci.org/idealista/airflow-role.png)](https://travis-ci.org/idealista/airflow-role) -# Apache Airflow Ansible role +![Logo](https://raw.githubusercontent.com/idealista/airflow-role/master/logo.gif) This ansible role installs a Apache Airflow server in a Debian/Ubuntu environment. - [Getting Started](#getting-started) - - [Prerequisities](#prerequisities) - - [Installing](#installing) -- [Usage](#usage) -- [Testing](#testing) -- [Built With](#built-with) -- [Versioning](#versioning) -- [Authors](#authors) -- [License](#license) -- [Contributing](#contributing) + - [Prerequisites](#prerequisites-☑️) + - [Installing](#Installing-📥) +- [Usage](#usage-🏃) +- [Testing](#testing-🧪) +- [Built With](#built-with-🏗️) +- [Versioning](#versioning-🗂️) +- [Authors](#authors-:🦸) +- [License](#license-🗒️) +- [Contributing](#contributing-👷) ## Getting Started These instructions will get you a copy of the role for your ansible playbook. Once launched, it will install a [Apache Airflow](https://airflow.apache.org/) server in a Debian or Ubuntu system. -### Prerequisities +### Prerequisites ☑️ -Ansible 2.8.8 version installed. -Inventory destination should be a Debian or Ubuntu environment. +Ansible 2.9.9 version installed. +Inventory destination should be a Debian (preferable Debian 10 Buster ) or Ubuntu environment. + +This role should work with older versions of Debian but you need to know that due to Airflow minimum requirements you should check that Python 3.6 (or higher) is installed before (See: [Airflow prerequisites](https://airflow.apache.org/docs/apache-airflow/stable/installation.html#prerequisites)). By default this role use the predefined installation of Python that comes with the distro. For testing purposes, [Molecule](https://molecule.readthedocs.io/) with [Docker](https://www.docker.com/) as driver. -### Installing +### Installing 📥 Create or add to your roles dependency file (e.g requirements.yml) from GitHub: -``` +```yml - src: http://github.com/idealista/airflow-role.git scm: git - version: 1.7.3 + version: 2.0.0 name: airflow ``` or using [Ansible Galaxy](https://galaxy.ansible.com/idealista/airflow-role/) as origin if you prefer: -``` +```yml - src: idealista.airflow-role - version: 1.7.3 + version: 2.0.0 name: airflow ``` Install the role with ansible-galaxy command: -``` +```shell ansible-galaxy install -p roles -r requirements.yml -f ``` Use in a playbook: -``` +```yml --- - hosts: someserver roles: - { role: airflow } ``` -## Usage +## Usage 🏃 + +Look to the defaults properties files to see the possible configuration properties, take a look for them: -Look to the defaults properties file to see the possible configuration properties. +- [`main.yml`](./defaults/main/main.yml) for airflow general purpose +- [`airflow-cfg.yml`](./defaults/main/airflow-cfg.yml) for all the related airflow.cfg config parameters +- [`webserver-config-py.yml`](./defaults/main/webserver-config-py.yml) for all the related webserver_config.py config parameters -Bear in mind that, starting with Airflow v1.10.0, PyPi package `pyasn1` v0.4.4 is needed. To install it: -``` yml +👉 Don't forget to set your: + +- 🔑 Fernet key +- 🔑 Webserver secret key +- 🐍 Python and pip version +- 📦 [Extra packages](#Extra-packages) if you need additional operators, hooks, sensors... +- 📦 [Required Python packages](#Required-Python-packages) with version specific like SQLAlchemy for example (to avoid known Airflow bugs❗️) like below or because are necessary +- ⚠️ With Airflow v1.10.0, PyPi package `pyasn1` v0.4.4 is needed. To install it: + +### 📦 Required Python packages + +[`airflow_required_python_packages`](./defaults/main/main.yml) should be a list following this format: + +```yml airflow_required_python_packages: + - { name: SQLAlchemy, version: 1.3.23 } + - { name: psycopg2 } - {name: pyasn1, version: 0.4.4} ``` -`airflow_extra_packages` (available at: https://airflow.apache.org/installation.html#extra-packages) should be a list following this format: -``` yml +### 📦 Extra packages + +[`airflow_extra_packages`](./defaults/main/main.yml) should be a list following this format: + +```yml airflow_extra_packages: + - apache.atlas - celery - - mysql + - ssh ``` -## Testing -``` -pipenv install -r test-requirements.txt --python 3.8 +👉 For more info about this extra packages see: [Airflow extra packages](https://airflow.apache.org/docs/apache-airflow/stable/extra-packages-ref.html) + +## Testing 🧪 + +```shell +pipenv install -r test-requirements.txt --python 3.7 pipenv run molecule test ``` -## Built With +## Built With 🏗️ -![Ansible](https://img.shields.io/badge/ansible-2.4.5.0-green.svg) +![Ansible](https://img.shields.io/badge/ansible-2.9.9-green.svg) -## Versioning +## Versioning 🗂️ For the versions available, see the [tags on this repository](https://github.com/idealista/airflow-role/tags). -Additionaly you can see what change in each version in the [CHANGELOG.md](CHANGELOG.md) file. +Additionally you can see what change in each version in the [CHANGELOG.md](CHANGELOG.md) file. -## Authors +## Authors 🦸 -* **Idealista** - *Work with* - [idealista](https://github.com/idealista) +- **Idealista** - *Work with* - [idealista](https://github.com/idealista) See also the list of [contributors](https://github.com/idealista/airflow-role/contributors) who participated in this project. -## License +## License 🗒️ ![Apache 2.0 License](https://img.shields.io/hexpm/l/plug.svg) This project is licensed under the [Apache 2.0](https://www.apache.org/licenses/LICENSE-2.0) license - see the [LICENSE](LICENSE) file for details. -## Contributing +## Contributing 👷 Please read [CONTRIBUTING.md](CONTRIBUTING.md) for details on our code of conduct, and the process for submitting pull requests to us. diff --git a/defaults/main/main.yml b/defaults/main/main.yml index fabee3e..e398c88 100644 --- a/defaults/main/main.yml +++ b/defaults/main/main.yml @@ -7,6 +7,12 @@ airflow_package: apache-airflow # The default buster's python version, if you want other version you should make sure it is # installed before airflow_python_version: + +# Because problems with some pip versions, we added this option to choose what version the role should install. +# See https://airflow.apache.org/docs/apache-airflow/stable/installation.html#installation-tools +airflow_pip_version: 20.2.4 + +# Airflow url to download the correct version airflow_constraint_url: "https://raw.githubusercontent.com/apache/airflow/constraints-{{ airflow_version }}/constraints-{{ airflow_python_version }}.txt" # Available extra packages: diff --git a/defaults/main/webserver-py.yml b/defaults/main/webserver-config-py.yml similarity index 100% rename from defaults/main/webserver-py.yml rename to defaults/main/webserver-config-py.yml diff --git a/molecule/default/molecule.yml b/molecule/default/molecule.yml index ab56ab0..6fa135f 100644 --- a/molecule/default/molecule.yml +++ b/molecule/default/molecule.yml @@ -1,11 +1,14 @@ --- dependency: name: galaxy + driver: name: docker + lint: | yamllint . ansible-lint . + platforms: - name: airflow groups: @@ -32,10 +35,13 @@ platforms: - 0.0.0.0:8080:8080/tcp - 0.0.0.0:8081:8081/tcp - 0.0.0.0:5000:5000/tcp + provisioner: name: ansible -# scenario: -# name: default + +scenario: + name: default + verifier: name: ansible diff --git a/molecule/default/verify.yml b/molecule/default/verify.yml index a5bb792..8c745b5 100644 --- a/molecule/default/verify.yml +++ b/molecule/default/verify.yml @@ -19,7 +19,7 @@ vars_files: - ../../defaults/main/main.yml - ../../defaults/main/airflow-cfg.yml - - ../../defaults/main/webserver-py.yml + - ../../defaults/main/webserver-config-py.yml - ./group_vars/airflow_group/main.yml tasks: diff --git a/molecule/extra_packages/converge.yml b/molecule/extra_packages/converge.yml index bcf504d..431fe99 100644 --- a/molecule/extra_packages/converge.yml +++ b/molecule/extra_packages/converge.yml @@ -7,6 +7,3 @@ environment: AIRFLOW_HOME: "{{ airflow_app_home }}" AIRFLOW_CONFIG: "{{ airflow_conf_path }}/airflow.cfg" - vars_files: - - ../../defaults/main.yml - - ./group_vars/airflow/main.yml diff --git a/molecule/extra_packages/group_vars/airflow/main.yml b/molecule/extra_packages/group_vars/airflow/main.yml deleted file mode 100644 index e05cabd..0000000 --- a/molecule/extra_packages/group_vars/airflow/main.yml +++ /dev/null @@ -1,12 +0,0 @@ ---- -airflow_fernet_key: nOtAfErNeTkEyRePlAcEmE____ItfURHlEDxrt-bBQw= - -airflow_extra_packages: - - celery - - cgroups - - ftp - - http - - jdbc - - sftp - - sqlite - - ssh diff --git a/molecule/extra_packages/group_vars/airflow_group/main.yml b/molecule/extra_packages/group_vars/airflow_group/main.yml new file mode 100644 index 0000000..2a81f33 --- /dev/null +++ b/molecule/extra_packages/group_vars/airflow_group/main.yml @@ -0,0 +1,17 @@ +--- +# Example keys +airflow_fernet_key: xKy13nPFfDflJ0DYGVTwf_DEmbItfURHlEDxrt-bBQw= +airflow_webserver_secret_key: "ihadsf908auw0340684" + +airflow_required_python_packages: + - { name: SQLAlchemy, version: 1.3.23 } + - { name: psycopg2 } + +airflow_extra_packages: + - celery + - cgroups + - ftp + - http + - jdbc + - sftp + - ssh diff --git a/molecule/extra_packages/molecule.yml b/molecule/extra_packages/molecule.yml index db7f9b1..6214d1f 100644 --- a/molecule/extra_packages/molecule.yml +++ b/molecule/extra_packages/molecule.yml @@ -1,11 +1,14 @@ --- dependency: name: galaxy + driver: name: docker + lint: | yamllint . ansible-lint . + platforms: - name: airflow-extra-packages groups: @@ -32,9 +35,12 @@ platforms: - 0.0.0.0:8080:8080/tcp - 0.0.0.0:8081:8081/tcp - 0.0.0.0:5000:5000/tcp + provisioner: name: ansible -# scenario: -# name: default + +scenario: + name: extra_packages + verifier: name: ansible diff --git a/molecule/extra_packages/verify.yml b/molecule/extra_packages/verify.yml index a9befbf..8c745b5 100644 --- a/molecule/extra_packages/verify.yml +++ b/molecule/extra_packages/verify.yml @@ -17,8 +17,10 @@ goss_format: documentation vars_files: - - ../../defaults/main.yml - - ./group_vars/airflow/main.yml + - ../../defaults/main/main.yml + - ../../defaults/main/airflow-cfg.yml + - ../../defaults/main/webserver-config-py.yml + - ./group_vars/airflow_group/main.yml tasks: - name: Download and install Goss diff --git a/tasks/install.yml b/tasks/install.yml index 9dab84e..95c575d 100644 --- a/tasks/install.yml +++ b/tasks/install.yml @@ -34,10 +34,11 @@ update_cache: true # See https://airflow.apache.org/docs/apache-airflow/stable/installation.html#installation-tools -- name: Airflow | Install pip 20.2.4 version +- name: Airflow | Install pip "{{ airflow_pip_version }}" version pip: name: pip - version: 20.2.4 + version: "{{ airflow_pip_version }}" + when: airflow_pip_version is defined - name: Airflow | Install virtualenv pip: From e5bbef147c0901be65eaff5cbebc67e3e90894f9 Mon Sep 17 00:00:00 2001 From: David Mateo Date: Thu, 8 Apr 2021 18:37:11 +0200 Subject: [PATCH 14/21] README update --- README.md | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/README.md b/README.md index b7a01d4..3267042 100644 --- a/README.md +++ b/README.md @@ -24,7 +24,9 @@ These instructions will get you a copy of the role for your ansible playbook. On Ansible 2.9.9 version installed. Inventory destination should be a Debian (preferable Debian 10 Buster ) or Ubuntu environment. -This role should work with older versions of Debian but you need to know that due to Airflow minimum requirements you should check that Python 3.6 (or higher) is installed before (See: [Airflow prerequisites](https://airflow.apache.org/docs/apache-airflow/stable/installation.html#prerequisites)). By default this role use the predefined installation of Python that comes with the distro. +ℹ️ This role should work with older versions of Debian but you need to know that due to Airflow minimum requirements you should check that 🐍 Python 3.6 (or higher) is installed before (👉 See: [Airflow prerequisites](https://airflow.apache.org/docs/apache-airflow/stable/installation.html#prerequisites)). + +ℹ️ By default this role use the predefined installation of Python that comes with the distro. For testing purposes, [Molecule](https://molecule.readthedocs.io/) with [Docker](https://www.docker.com/) as driver. @@ -70,14 +72,17 @@ Look to the defaults properties files to see the possible configuration properti - [`airflow-cfg.yml`](./defaults/main/airflow-cfg.yml) for all the related airflow.cfg config parameters - [`webserver-config-py.yml`](./defaults/main/webserver-config-py.yml) for all the related webserver_config.py config parameters -👉 Don't forget to set your: +👉 Don't forget : -- 🔑 Fernet key -- 🔑 Webserver secret key -- 🐍 Python and pip version +- 🔑 To set Fernet key. +- 🔑 To set webserver secret key. +- 📝 To set your AIRFLOW_HOME and AIRFLOW_CONFIG at your own discretion. +- 📝 To set your installation and config skelton paths at your own discretion. + - 👉 See `airflow_skeleton_paths` in [`main.yml`](./defaults/main/main.yml) +- 🐍 Python and pip version. - 📦 [Extra packages](#Extra-packages) if you need additional operators, hooks, sensors... - 📦 [Required Python packages](#Required-Python-packages) with version specific like SQLAlchemy for example (to avoid known Airflow bugs❗️) like below or because are necessary -- ⚠️ With Airflow v1.10.0, PyPi package `pyasn1` v0.4.4 is needed. To install it: +- ⚠️ With Airflow v1.10.0, PyPi package `pyasn1` v0.4.4 is needed. See examples below ### 📦 Required Python packages From 5e57486c459b03cf442bcd774b3ad045a843fc9f Mon Sep 17 00:00:00 2001 From: David Mateo Date: Fri, 9 Apr 2021 12:44:59 +0200 Subject: [PATCH 15/21] Fix typo in config and gently stop services - Fixed typo in airflow-cfg template causing log troubles - Use SIGINT kill signal in all service templates --- README.md | 2 +- defaults/main/airflow-cfg.yml | 2 +- tasks/install.yml | 3 ++- templates/airflow-flower.service.j2 | 1 + templates/airflow-scheduler.service.j2 | 1 + templates/airflow-webserver.service.j2 | 1 + 6 files changed, 7 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 3267042..cf17e9a 100644 --- a/README.md +++ b/README.md @@ -17,7 +17,7 @@ This ansible role installs a Apache Airflow server in a Debian/Ubuntu environmen ## Getting Started -These instructions will get you a copy of the role for your ansible playbook. Once launched, it will install a [Apache Airflow](https://airflow.apache.org/) server in a Debian or Ubuntu system. +These instructions will get you a copy of the role for your ansible playbook. Once launched, it will install [Apache Airflow](https://airflow.apache.org/) in a Debian or Ubuntu system. ### Prerequisites ☑️ diff --git a/defaults/main/airflow-cfg.yml b/defaults/main/airflow-cfg.yml index f8f46ca..4c4f615 100644 --- a/defaults/main/airflow-cfg.yml +++ b/defaults/main/airflow-cfg.yml @@ -74,7 +74,7 @@ airflow_task_log_prefix_template: airflow_log_filename_template: > "{{ '{{' }} ti.dag_id {{ '}}' }}/{{ '{{' }} ti.task_id {{ '}}' }}/{{ '{{' }} ts {{ '}}' }}/{{ '{{' }} try_number {{ '}}' }}.log" airflow_log_processor_filename_template: "{{ '{{' }} filename {{ '}}' }}.log" -airflow_dag_processor_manager_log_location: "{airflow_logs_folder}/dag_processor_manager/dag_processor_manager.log" +airflow_dag_processor_manager_log_location: "{{ airflow_logs_folder }}/dag_processor_manager/dag_processor_manager.log" airflow_task_log_reader: "task" airflow_extra_loggers: diff --git a/tasks/install.yml b/tasks/install.yml index 95c575d..7011d1e 100644 --- a/tasks/install.yml +++ b/tasks/install.yml @@ -3,6 +3,7 @@ - name: Airflow | Ensure Airflow group group: name: "{{ airflow_group }}" + become: true - name: Airflow | Ensure Airflow user user: @@ -10,8 +11,8 @@ group: "{{ airflow_group }}" system: true shell: /usr/sbin/nologin - # shell: /bin/bash createhome: false + become: true - name: Airflow | Ensure airflow skeleton paths file: diff --git a/templates/airflow-flower.service.j2 b/templates/airflow-flower.service.j2 index 40c31d8..684c642 100644 --- a/templates/airflow-flower.service.j2 +++ b/templates/airflow-flower.service.j2 @@ -27,6 +27,7 @@ ExecStart={{ airflow_executable }} celery flower --pid {{ airflow_pidfile_folder {% else %} ExecStart={{ airflow_executable }} flower --pid {{ airflow_pidfile_folder }}-flower/flower.pid {% endif %} +KillSignal=SIGINT Restart=on-failure RestartSec=10s PrivateTmp={{ airflow_private_tmp }} diff --git a/templates/airflow-scheduler.service.j2 b/templates/airflow-scheduler.service.j2 index 531050b..bbdd1ff 100644 --- a/templates/airflow-scheduler.service.j2 +++ b/templates/airflow-scheduler.service.j2 @@ -23,6 +23,7 @@ User={{ airflow_user }} Group={{ airflow_group }} Type=simple ExecStart={{ airflow_executable }} scheduler -n ${SCHEDULER_RUNS} --pid {{ airflow_pidfile_folder }}-scheduler/scheduler.pid +KillSignal=SIGINT Restart=always RestartSec=5s PrivateTmp={{ airflow_private_tmp }} diff --git a/templates/airflow-webserver.service.j2 b/templates/airflow-webserver.service.j2 index 540400c..b185113 100644 --- a/templates/airflow-webserver.service.j2 +++ b/templates/airflow-webserver.service.j2 @@ -23,6 +23,7 @@ User={{ airflow_user }} Group={{ airflow_group }} Type=simple ExecStart={{ airflow_executable }} webserver --pid {{ airflow_pidfile_folder }}-webserver/webserver.pid +KillSignal=SIGINT Restart=on-failure RestartSec=5s PrivateTmp={{ airflow_private_tmp }} From 5cec842ab26dc3c51cf6c39fdada477b5a12b534 Mon Sep 17 00:00:00 2001 From: David Mateo Date: Fri, 9 Apr 2021 15:05:56 +0200 Subject: [PATCH 16/21] Restore missed extra_args for DAGs dependencies --- tasks/install.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tasks/install.yml b/tasks/install.yml index 7011d1e..a690546 100644 --- a/tasks/install.yml +++ b/tasks/install.yml @@ -138,9 +138,10 @@ become_user: "{{ airflow_user }}" pip: name: "{{ item.name }}" - version: "{{ item.version }}" + version: "{{ item.version | default(omit) }}" virtualenv: "{{ airflow_app_home }}" virtualenv_python: "python{{ airflow_python_version | default(omit) }}" + extra_args: "{{ item.extra_args | default(omit) }}" with_items: "{{ dags_dependencies }}" when: dags_dependencies is defined notify: From d8b057c62973c9250d303d7222b7e4e8d24f5ad4 Mon Sep 17 00:00:00 2001 From: David Mateo Date: Fri, 9 Apr 2021 15:18:39 +0200 Subject: [PATCH 17/21] Hotfix last commit - Wrong identation on extra_args commited by error --- tasks/install.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tasks/install.yml b/tasks/install.yml index a690546..0ea34d5 100644 --- a/tasks/install.yml +++ b/tasks/install.yml @@ -141,7 +141,7 @@ version: "{{ item.version | default(omit) }}" virtualenv: "{{ airflow_app_home }}" virtualenv_python: "python{{ airflow_python_version | default(omit) }}" - extra_args: "{{ item.extra_args | default(omit) }}" + extra_args: "{{ item.extra_args | default(omit) }}" with_items: "{{ dags_dependencies }}" when: dags_dependencies is defined notify: From 221b9569247a397c05c89342fdf5fdb8c0c09ac5 Mon Sep 17 00:00:00 2001 From: David Mateo Date: Mon, 12 Apr 2021 09:45:52 +0200 Subject: [PATCH 18/21] Update .git things - Update .gitignore to remove vagrant entry - Add new .gitattributes for better yml detection --- .gitattributes | 3 +++ .gitignore | 1 - 2 files changed, 3 insertions(+), 1 deletion(-) create mode 100644 .gitattributes diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000..4ba765e --- /dev/null +++ b/.gitattributes @@ -0,0 +1,3 @@ +*.yml linguist-detectable=true +*.yaml linguist-detectable=true +*.html linguist-detectable=false \ No newline at end of file diff --git a/.gitignore b/.gitignore index 76758fc..f6d6ae3 100644 --- a/.gitignore +++ b/.gitignore @@ -3,7 +3,6 @@ tests/playbook.retry tests/__pycache__ .molecule .cache -.vagrant tests/roles .vscode/* *.vt100 \ No newline at end of file From c8564c9954ee0d00f2b758f79baf287cd2cc043c Mon Sep 17 00:00:00 2001 From: David Mateo Date: Mon, 12 Apr 2021 12:04:40 +0200 Subject: [PATCH 19/21] Admin default values and readme update - Remove default admin values main.yml (thanks to @xtianae7) - Add this default admin values to molecule group_vars - Readme updated to warn about setting admin values - Readme emoji anchors fixed (thanks to @blalop) --- README.md | 51 ++++++++++--------- defaults/main/main.yml | 14 ++--- .../default/group_vars/airflow_group/main.yml | 9 ++++ .../group_vars/airflow_group/main.yml | 9 ++++ 4 files changed, 51 insertions(+), 32 deletions(-) diff --git a/README.md b/README.md index cf17e9a..043b705 100644 --- a/README.md +++ b/README.md @@ -5,21 +5,21 @@ This ansible role installs a Apache Airflow server in a Debian/Ubuntu environment. - [Getting Started](#getting-started) - - [Prerequisites](#prerequisites-☑️) - - [Installing](#Installing-📥) -- [Usage](#usage-🏃) -- [Testing](#testing-🧪) -- [Built With](#built-with-🏗️) -- [Versioning](#versioning-🗂️) -- [Authors](#authors-:🦸) -- [License](#license-🗒️) -- [Contributing](#contributing-👷) + - [Prerequisites](#prerequisites-ballot_box_with_check) + - [Installing](#Installing-inbox_tray ) +- [Usage](#usage-runner) +- [Testing](#testing-test_tube) +- [Built With](#built-with-building_construction) +- [Versioning](#versioning-card_file_box) +- [Authors](#authors-superhero) +- [License](#license-spiral_notepad) +- [Contributing](#contributing-construction_worker) ## Getting Started These instructions will get you a copy of the role for your ansible playbook. Once launched, it will install [Apache Airflow](https://airflow.apache.org/) in a Debian or Ubuntu system. -### Prerequisites ☑️ +### Prerequisites :ballot_box_with_check: Ansible 2.9.9 version installed. Inventory destination should be a Debian (preferable Debian 10 Buster ) or Ubuntu environment. @@ -30,7 +30,7 @@ Inventory destination should be a Debian (preferable Debian 10 Buster ) or Ubunt For testing purposes, [Molecule](https://molecule.readthedocs.io/) with [Docker](https://www.docker.com/) as driver. -### Installing 📥 +### Installing :inbox_tray: Create or add to your roles dependency file (e.g requirements.yml) from GitHub: @@ -64,27 +64,28 @@ Use in a playbook: - { role: airflow } ``` -## Usage 🏃 +## Usage :runner: Look to the defaults properties files to see the possible configuration properties, take a look for them: -- [`main.yml`](./defaults/main/main.yml) for airflow general purpose -- [`airflow-cfg.yml`](./defaults/main/airflow-cfg.yml) for all the related airflow.cfg config parameters -- [`webserver-config-py.yml`](./defaults/main/webserver-config-py.yml) for all the related webserver_config.py config parameters +- [`main.yml`](./defaults/main/main.yml) for airflow general purpose. +- [`airflow-cfg.yml`](./defaults/main/airflow-cfg.yml) for all the related airflow.cfg config parameters. +- [`webserver-config-py.yml`](./defaults/main/webserver-config-py.yml) for all the related webserver_config.py config parameters. 👉 Don't forget : +- 🦸 To set your Admin user. - 🔑 To set Fernet key. - 🔑 To set webserver secret key. - 📝 To set your AIRFLOW_HOME and AIRFLOW_CONFIG at your own discretion. - 📝 To set your installation and config skelton paths at your own discretion. - 👉 See `airflow_skeleton_paths` in [`main.yml`](./defaults/main/main.yml) - 🐍 Python and pip version. -- 📦 [Extra packages](#Extra-packages) if you need additional operators, hooks, sensors... -- 📦 [Required Python packages](#Required-Python-packages) with version specific like SQLAlchemy for example (to avoid known Airflow bugs❗️) like below or because are necessary +- 📦 [Extra packages](#package-Extra-packages) if you need additional operators, hooks, sensors... +- 📦 [Required Python packages](#package-Required-Python-packages) with version specific like SQLAlchemy for example (to avoid known Airflow bugs❗️) like below or because are necessary - ⚠️ With Airflow v1.10.0, PyPi package `pyasn1` v0.4.4 is needed. See examples below -### 📦 Required Python packages +### :package: Required Python packages [`airflow_required_python_packages`](./defaults/main/main.yml) should be a list following this format: @@ -95,7 +96,7 @@ airflow_required_python_packages: - {name: pyasn1, version: 0.4.4} ``` -### 📦 Extra packages +### :package: Extra packages [`airflow_extra_packages`](./defaults/main/main.yml) should be a list following this format: @@ -108,35 +109,35 @@ airflow_extra_packages: 👉 For more info about this extra packages see: [Airflow extra packages](https://airflow.apache.org/docs/apache-airflow/stable/extra-packages-ref.html) -## Testing 🧪 +## Testing :test_tube: ```shell pipenv install -r test-requirements.txt --python 3.7 pipenv run molecule test ``` -## Built With 🏗️ +## Built With :building_construction: ![Ansible](https://img.shields.io/badge/ansible-2.9.9-green.svg) -## Versioning 🗂️ +## Versioning :card_file_box: For the versions available, see the [tags on this repository](https://github.com/idealista/airflow-role/tags). Additionally you can see what change in each version in the [CHANGELOG.md](CHANGELOG.md) file. -## Authors 🦸 +## Authors :superhero: - **Idealista** - *Work with* - [idealista](https://github.com/idealista) See also the list of [contributors](https://github.com/idealista/airflow-role/contributors) who participated in this project. -## License 🗒️ +## License :spiral_notepad: ![Apache 2.0 License](https://img.shields.io/hexpm/l/plug.svg) This project is licensed under the [Apache 2.0](https://www.apache.org/licenses/LICENSE-2.0) license - see the [LICENSE](LICENSE) file for details. -## Contributing 👷 +## Contributing :construction_worker: Please read [CONTRIBUTING.md](CONTRIBUTING.md) for details on our code of conduct, and the process for submitting pull requests to us. diff --git a/defaults/main/main.yml b/defaults/main/main.yml index e398c88..cda464b 100644 --- a/defaults/main/main.yml +++ b/defaults/main/main.yml @@ -138,13 +138,13 @@ airflow_group: airflow # Admin user airflow_admin_users: - - name: default - username: admin - password: admin - role: Admin - firstname: Admin - lastname: Admin - email: admin@email.com + - name: + username: + password: + role: + firstname: + lastname: + email: # Service options airflow_scheduler_runs: 1000 diff --git a/molecule/default/group_vars/airflow_group/main.yml b/molecule/default/group_vars/airflow_group/main.yml index 8a0bdf9..f7ca69f 100644 --- a/molecule/default/group_vars/airflow_group/main.yml +++ b/molecule/default/group_vars/airflow_group/main.yml @@ -3,6 +3,15 @@ airflow_fernet_key: xKy13nPFfDflJ0DYGVTwf_DEmbItfURHlEDxrt-bBQw= airflow_webserver_secret_key: "ihadsf908auw0340684" +airflow_admin_users: + - name: default + username: admin + password: admin + role: Admin + firstname: Admin + lastname: Admin + email: admin@email.com + airflow_required_python_packages: - { name: SQLAlchemy, version: 1.3.23 } - { name: psycopg2 } \ No newline at end of file diff --git a/molecule/extra_packages/group_vars/airflow_group/main.yml b/molecule/extra_packages/group_vars/airflow_group/main.yml index 2a81f33..d1bf123 100644 --- a/molecule/extra_packages/group_vars/airflow_group/main.yml +++ b/molecule/extra_packages/group_vars/airflow_group/main.yml @@ -3,6 +3,15 @@ airflow_fernet_key: xKy13nPFfDflJ0DYGVTwf_DEmbItfURHlEDxrt-bBQw= airflow_webserver_secret_key: "ihadsf908auw0340684" +airflow_admin_users: + - name: default + username: admin + password: admin + role: Admin + firstname: Admin + lastname: Admin + email: admin@email.com + airflow_required_python_packages: - { name: SQLAlchemy, version: 1.3.23 } - { name: psycopg2 } From ecd6744a65f2a855b1e042286b344c542ee3a951 Mon Sep 17 00:00:00 2001 From: David Mateo Date: Mon, 12 Apr 2021 16:54:36 +0200 Subject: [PATCH 20/21] Changelog update --- CHANGELOG.md | 89 ++++++++++++++++++++++++++++++++++++++++++++++------ 1 file changed, 80 insertions(+), 9 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9ed300f..7df7fa0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,122 +1,193 @@ # Change Log + All notable changes to this project will be documented in this file. This project adheres to [Semantic Versioning](http://semver.org/) and [Keep a changelog](https://github.com/olivierlacan/keep-a-changelog). ## [Unreleased](https://github.com/idealista/airflow-role/tree/develop) + +[Full Changelog](https://github.com/idealista/airflow-role/compare/1.8.4...2.0.0) + +### Added + +- Support for new Apache Airflow 2.0 and its new configs files +- New templates and new group_vars files to fit better with new Apache Airflow 2.0 +- New user related tasks +- [#82](https://github.com/idealista/airflow-role/issues/82) Add support to 2.0 airflow version @lorientedev + ### Changed -- *[#85](https://github.com/idealista/airflow-role/issues/85) Add extra args in pip install to allow different repositories @lorientedev +- Project cleaning +- Updated ansible and molecule test requirements +- Updated molecule tests +- Updated travis file +- Updated yamllint +- Updated service templates +- Updated config and install tasks +- Updated [`README.md`](./README.md) to give more information about the role and advice some stuff +- [#54](https://github.com/idealista/airflow-role/issues/54) Install tasks fail when run without escalated privileges - ## [1.8.4](https://github.com/idealista/airflow-role/tree/1.8.4) +### Fixed -- *[#82](https://github.com/idealista/airflow-role/issues/82) Add support to airflow 2.0 version. @lorientedev +- [#61](https://github.com/idealista/airflow-role/issues/61) Better worker restarts +- [#85](https://github.com/idealista/airflow-role/issues/85) Add extra args in pip install to allow different repositories @lorientedev -## [1.8.3](https://github.com/idealista/airflow-role/tree/1.8.3) +## [1.8.4](https://github.com/idealista/airflow-role/tree/1.8.4) -- *[#78](https://github.com/idealista/airflow-role/issues/78) Add path for services templates to allow overwrite from playbook @lorientedev +- [#82](https://github.com/idealista/airflow-role/issues/82) Add support to airflow 2.0 version. @lorientedev +## [1.8.3](https://github.com/idealista/airflow-role/tree/1.8.3) + +- [#78](https://github.com/idealista/airflow-role/issues/78) Add path for services templates to allow overwrite from playbook @lorientedev ## [1.8.2](https://github.com/idealista/airflow-role/tree/1.8.2) -- *[#75](https://github.com/idealista/airflow-role/issues/75) Added variable to change private_tmp value in service config @lorientedev +- [#75](https://github.com/idealista/airflow-role/issues/75) Added variable to change private_tmp value in service config @lorientedev ## [1.8.1](https://github.com/idealista/airflow-role/tree/1.8.1) -- *[#68](https://github.com/idealista/airflow-role/issues/68) Update dependencies versions and solve some lint errors @lorientedev +- [#68](https://github.com/idealista/airflow-role/issues/68) Update dependencies versions and solve some lint errors @lorientedev ## [1.8.0](https://github.com/idealista/airflow-role/tree/1.8.0) + [Full Changelog](https://github.com/idealista/airflow-role/compare/1.7.3...1.8.0) + ### Added -- *[#61](https://github.com/idealista/airflow-role/issues/61) Add KillSignal=SIGINT to workers service file* @jnogol + +- [#61](https://github.com/idealista/airflow-role/issues/61) Add KillSignal=SIGINT to workers service file @jnogol + ### Fixed -- *[#50](https://github.com/idealista/airflow-role/issues/50) Fix deprecation warning from jinja templates* @adrimarteau @jnogol + +- [#50](https://github.com/idealista/airflow-role/issues/50) Fix deprecation warning from jinja templates @adrimarteau @jnogol ## [1.7.3](https://github.com/idealista/airflow-role/tree/1.7.3) + [Full Changelog](https://github.com/idealista/airflow-role/compare/1.7.2...1.7.3) + ### Fixed + - *[#55](https://github.com/idealista/airflow-role/pull/55) Use `{{ airflow_home }}` to set the default `airflow_database_conn` in defaults/main.yml* @davestern ## [1.7.2](https://github.com/idealista/airflow-role/tree/1.7.2) + [Full Changelog](https://github.com/idealista/airflow-role/compare/1.7.1...1.7.2) + ### Fixed + - *[#47](https://github.com/idealista/airflow-role/issues/47) Fix web UI when using LDAP and Airflow>=1.10* @jnogol ## [1.7.1](https://github.com/idealista/airflow-role/tree/1.7.1) + [Full Changelog](https://github.com/idealista/airflow-role/compare/1.7.0...1.7.1) + ### Changed + - *[#44](https://github.com/idealista/airflow-role/issues/44) Make role compatible with Airflow 1.10.0* @jnogol - *Update Goss version to 0.3.6* @jnogol ## [1.7.0](https://github.com/idealista/airflow-role/tree/1.7.0) + [Full Changelog](https://github.com/idealista/airflow-role/compare/1.6.0...1.7.0) + ### Changed + - *Update default version to 1.9.0* @jnogol - *Ability to provide `airflow.cfg` template via playbooks* @jnogol - *[#41](https://github.com/idealista/airflow-role/issues/41) Update `airflow.cfg` template with 1.9.0 features* @jnogol ### Added + - *Add sample DAG in tests to avoid scheduler issues* @jnogol - *Add `airflow_` tags in `main.yml`* @jnogol ## [1.6.0](https://github.com/idealista/airflow-role/tree/1.6.0) + [Full Changelog](https://github.com/idealista/airflow-role/compare/1.5.0...1.6.0) + ### Changed + - *[#38](https://github.com/idealista/airflow-role/pull/38) Parametrized PID files location, and create it on startup if it doesn't already exist.* @fhalim ## [1.5.0](https://github.com/idealista/airflow-role/tree/1.5.0) + [Full Changelog](https://github.com/idealista/airflow-role/compare/1.4.0...1.5.0) + ### Added + - *[#36](https://github.com/idealista/airflow-role/issues/32) Add tasks in config to create variables and connections used in Airflow DAGs.* @deytao ## [1.4.0](https://github.com/idealista/airflow-role/tree/1.4.0) + [Full Changelog](https://github.com/idealista/airflow-role/compare/1.3.2...1.4.0) + ### Added + - *[#32](https://github.com/idealista/airflow-role/issues/32) Use Goss instead of Testinfra* @jnogol ### Fixed + - *[#33](https://github.com/idealista/airflow-role/pull/33) Travis working* @jnogol ### Changed + - *[#31](https://github.com/idealista/airflow-role/pull/31) Using import_tasks instead of include and avoiding pip cache* @sschaetz - *[#34](https://github.com/idealista/airflow-role/pull/34) Add config to have customizable pip and airflow executables* @deytao ## [1.3.2](https://github.com/idealista/airflow-role/tree/1.3.2) + [Full Changelog](https://github.com/idealista/airflow-role/compare/1.3.1...1.3.2) + ### Fixed + - *[#28](https://github.com/idealista/airflow-role/issues/28) Fix Init DB task hang* @jnogol ## [1.3.1](https://github.com/idealista/airflow-role/tree/1.3.1) + [Full Changelog](https://github.com/idealista/airflow-role/compare/1.3.0...1.3.1) + ### Fixed + - *Deleted DAGs automatic managent: better do it with CI tools* @jnogol - *Deleted port bindings in molecule.yml: they weren't necessary* @jnogol - *Better and more understandable format for dags_dependencies variable* @jnogol ## [1.3.0](https://github.com/idealista/airflow-role/tree/1.3.0) + [Full Changelog](https://github.com/idealista/airflow-role/compare/1.2.0...1.3.0) + ### Added + - *Testinfra tests in Travis CI* @jnogol - *DAGs and plugins automatic management via repositories and cron job* @jnogol - *Docker environment in Molecule* @jnogol ### Fixed + - *Test if service is running in test_ansible.py now working* @jnogol ## [1.2.0](https://github.com/idealista/airflow-role/tree/1.2.0) + [Full Changelog](https://github.com/idealista/airflow-role/compare/1.1.0...1.2.0) + ### Added + - *Travis CI integration added* @jnogol ### Fixed + - *Authentication via LDAP now working* @jnogol - *Tiny bugs in tasks/install.yml fixed* @jnogol ## [1.1.0](https://github.com/idealista/airflow-role/tree/1.1.0) + [Full Changelog](https://github.com/idealista/airflow-role/compare/1.0.0...1.1.0) + ### Added + - *Added Celery Worker optional installation* @jnogol - *Added Celery Flower service configuration* @jnogol ## [1.0.0](https://github.com/idealista/airflow-role/tree/1.0.0) + ### Added + - *First release* From 64bae95c4396c071ac0a8b17848c8f8e4ba98e7b Mon Sep 17 00:00:00 2001 From: David Mateo Date: Tue, 13 Apr 2021 13:22:46 +0200 Subject: [PATCH 21/21] Update changelog --- CHANGELOG.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7df7fa0..d9bd77a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,8 @@ This project adheres to [Semantic Versioning](http://semver.org/) and [Keep a ch ## [Unreleased](https://github.com/idealista/airflow-role/tree/develop) +## [2.0.0](https://github.com/idealista/airflow-role/tree/2.0.0) + [Full Changelog](https://github.com/idealista/airflow-role/compare/1.8.4...2.0.0) ### Added