diff --git a/.github/workflows/molecule_tests.yml b/.github/workflows/molecule_tests.yml index abb151d691..e31f7656cc 100644 --- a/.github/workflows/molecule_tests.yml +++ b/.github/workflows/molecule_tests.yml @@ -96,6 +96,7 @@ jobs: - timezone - tippecanoe - towerdeploy + - ufw_firewall - video_reserves - vips # - zookeeper diff --git a/group_vars/abid/staging.yml b/group_vars/abid/staging.yml index 33fef19eb1..cd6f49c8e4 100644 --- a/group_vars/abid/staging.yml +++ b/group_vars/abid/staging.yml @@ -1,4 +1,21 @@ --- +# firewall +ufw_firewall_rules: + - service: ssh + port: 22 + protocol: tcp + allowed_cidrs: + - 10.249.64.0/18 + - 10.249.0.0/18 + - 128.112.0.0/16 + - 172.20.95.0/24 + - 172.20.192.0/19 + - service: http + port: 80 + protocol: tcp + allowed_cidrs: + - 128.112.200.0/21 + - 128.112.0.0/16 postgres_host: "lib-postgres-staging1.princeton.edu" postgres_version: 15 postgresql_is_local: false diff --git a/group_vars/crowdstrike/vault.yml b/group_vars/crowdstrike/vault.yml index d50e3150f9..4cc2240d47 100644 --- a/group_vars/crowdstrike/vault.yml +++ b/group_vars/crowdstrike/vault.yml @@ -1,8 +1,18 @@ $ANSIBLE_VAULT;1.1;AES256 -66313262353430383663643734616264316630346662373630326264643261663737373836313764 -6534396535383733383834376634383031633832643931320a623536616134353333643932303436 -65666331306166306232663234623163653339316635336634313961306338356630613034393761 -3163336562393435660a396235363835353534623838666166613237646164663962363365353166 -63666131626136656632636265336133323836613035613465393066643666363837356533343865 -37383237333066373965616437373865336238653533373162643964346138636238646535623763 -313935613765663631376533386234636665 +32343739356133353166333634656236623262653930633561396335653933396232396530653737 +3266643139643339626334343335346336616166363738640a366262343862393233626462376662 +37326132396636633561346562636532663037376666643865386535613965353135383464396536 +6335613037643339630a333132313636376437626530323031346237666665373239353439363337 +38666538653331366166623062306430646666633839623362623231633763386464313466333164 +64636134383461646133336665646130363364363436623234383134336661656637353339666666 +65623531393339366630646332633937623162363261303261373962383234373832373765623433 +38343030623432393030306433363336366261643134333336633330313063373833666136326235 +62313963353636356638313531353133393136386431633337663538386135663536636336336234 +39313762363633656130313765376263643863646434376537316662326338613237333063306662 +65306465663961383264303864616363653631623538636632613033643136386636633366356465 +38666165356132353763653565666332616438643865636437646230613862333335343561653464 +61303832323831613262623631616335303534313366653232663463636536623832326565636537 +32626662343364393334616466373631303136666431326336376165366238333632376338363136 +62356164663734666631393038613961633139323063343166346230393361623536353666336233 +65396133363030393333333962616666393632303366326134303035383961386135353233333633 +63316161623637633035653139363964393862333831363334623733646238393930 diff --git a/group_vars/gitlab/vault.yml b/group_vars/gitlab/vault.yml index f54bbfe513..0991d9b6ff 100644 --- a/group_vars/gitlab/vault.yml +++ b/group_vars/gitlab/vault.yml @@ -1,29 +1,29 @@ $ANSIBLE_VAULT;1.1;AES256 -34393763653934633532386366376138386134313266653464643836653235323436393566643937 -6433373830646435393434316566326262613961366139390a353138356661313935333533643632 -66393932323939666638633138646665323964653236633563653164633566643962343565613833 -6263366666623131360a333633643064633639626334623933353964643431626335373666653537 -64336565626166323135343934636665396230313732636232386334366664316130366232303866 -38333437623763346137336437313365366334613232616636656663336363336365336164386532 -64636235653334636233363638363061623433653031663063353833356562373965333763636336 -30613439313734666438393662323533326233376463643539653537386239613663386231633535 -32663732336232393762303731653438353064613236646564326265666235616534316530663161 -33343264393035623465666636346561386466303038313336393239643630333963386339343632 -36353036323764383466613432663137646634646162636564396439393034656239383464396438 -33366266653430353964643734646261663835643132616331316365663239623035346431323761 -65386561613566636530633132376436323832666438323231656239636563386136656565636361 -38393934353536373562646332643336383333353634313630346662653162303635356362396165 -63346434386164633762663261333435663435636531663263306337643565633365323364636132 -34386436356364643339346235366634336636306465316263653832333835303138663031343433 -63383663316535323361643432393136613738616535653339376133316164313330663262303833 -33373737666331343935613963366266396138633837626238326263616466333136313834373038 -63333031313432633163646361353961336132376433653936333039656235393163346133343430 -34323037336264643766643364363063303031613163653535393761396365623039346230393261 -33313566373030353535313164363538393763613036366665663132303233313631386565633461 -38396331363138646432363337336630356237343963376231373732633238653263383861303730 -63306331616163333237366364373665303834386266313261646534643630363861646565326565 -30616437663937373433333832323633306335383061653938393762333938626632333633386639 -36346265343831386566373432336134663035636333333765633566613364383233633233376433 -62663432363634643639346533393964336432636565633733366430363238323036613530373864 -39393533653437646433326231303466366166343933613130653132356166303635306632343061 -35353838316432653565 +32376636666536333665633561653865653234316465303639633862643639386630353166333533 +3237636633353232333933643239323431623961663836340a346633633030323836343530643133 +37653737626539346134643665633165346637363834323532626463323861653163363637306164 +6435326230363031340a316134326163383035383736666338336362376332383237616464633664 +65326530633235643730353264633235613164633433376636383734353861303630613465303134 +65393264613262623061326661393232316139343335666330303166346131383561386532383761 +31316639303737623562323232376239323862646361336466363937393830303338626266626337 +33383038373532323635303337396538373962383338616662366634653936663464343439663331 +38616130616533373330656462663164353130346438396562663235373732383934646562353835 +35333862643837396435333330656461623430396261636333343636313865336433633866366430 +65383438393839396634313361623535316635626463373065616534336635366434313138306131 +38656663613839303336323431323832373238616163626366363932303437373434383537363636 +39306162366565323366636462366461653165333339306262376332643863343966323733343230 +31643762353363623132336562626437393064616163636463633831633964326539626132313937 +31646231643633303532386533393662626466303062653937356537643661356236613938323961 +61643139333765323265343837656531336430663132653661376665343965353332353865383230 +64396431366538363462316637343863333762396133633732616539386430666664396139306631 +64646165316563663031383836356633343239376533616331636166303530613961303831653632 +31626439356135333161306132623339626534396132613633306363356630653931363562306636 +64343830376338623835616635353834303463326333376562313364636562303334643361336362 +64383739636562346332323132386439663061376438623964373266636330663337393337346233 +66373730366335396130323231326330323331663930616333346339376661323639363131653731 +36353139613334303337396338353263356362663864633636376663633061363632626337666238 +32636139643263336133323665356236643034633235666131333364346466643361373261383039 +62643230396330383366326265346564306637616638666538643063623134306462643466333239 +39373936666339653331613061356631333437623236643233653162663464663632333936343334 +66616461626131386634636564303538633861626462346366636663353832376566346239386461 +35656436633332626438 diff --git a/group_vars/nfsserver/staging.yml b/group_vars/nfsserver/staging.yml index 6e208421ec..699a627d88 100644 --- a/group_vars/nfsserver/staging.yml +++ b/group_vars/nfsserver/staging.yml @@ -32,7 +32,7 @@ prds_staging1: "128.112.200.237" prds_staging2: "128.112.203.122" recap_www_staging1: "128.112.203.230" recap_www_staging2: "128.112.203.131" -slavery_staging1: "128.112.202.236" +slavery_staging1: "172.20.80.74" slavery_staging2: "128.112.203.152" slavery_staging_upgrade1: "172.20.80.68" slavery_staging_upgrade2: "172.20.80.69" diff --git a/group_vars/nomad/dpulc/staging.yml b/group_vars/nomad/dpulc/staging.yml index 59a9444909..44fa965278 100644 --- a/group_vars/nomad/dpulc/staging.yml +++ b/group_vars/nomad/dpulc/staging.yml @@ -8,6 +8,9 @@ dpul_c_postgres_admin_user: "postgres" dpul_c_staging_figgy_db_host: "figgy-db-staging1.princeton.edu" dpul_c_staging_figgy_db_name: "figgy_staging" dpul_c_staging_figgy_db_password: "{{ vault_dpul_c_staging_figgy_db_password }}" +dpul_c_production_figgy_db_host: "figgy-db-prod1.princeton.edu" +dpul_c_production_figgy_db_name: "figgy_production" +dpul_c_production_figgy_db_password: "{{ vault_dpul_c_production_figgy_db_password }}" dpul_c_nomad_env_vars: DB_NAME: '{{ dpul_c_db_name }}' DB_USER: '{{ dpul_c_db_user }}' @@ -15,4 +18,4 @@ dpul_c_nomad_env_vars: POSTGRES_HOST: '{{ dpul_c_postgres_host }}' SECRET_KEY_BASE: '{{ vault_dpul_c_secret_key_base }}' SOLR_URL: 'http://lib-solr8d-staging.princeton.edu:8983/solr/dpulc-staging' - FIGGY_DATABASE_URL: 'ecto://dpulc_staging:{{ dpul_c_staging_figgy_db_password }}@{{ dpul_c_staging_figgy_db_host }}/{{ dpul_c_staging_figgy_db_name }}' + FIGGY_DATABASE_URL: 'ecto://dpulc_staging:{{ dpul_c_production_figgy_db_password }}@{{ dpul_c_production_figgy_db_host }}/{{ dpul_c_production_figgy_db_name }}' diff --git a/group_vars/nomad/dpulc/vault.yml b/group_vars/nomad/dpulc/vault.yml index 0235adb31d..c848207edb 100644 --- a/group_vars/nomad/dpulc/vault.yml +++ b/group_vars/nomad/dpulc/vault.yml @@ -1,17 +1,21 @@ $ANSIBLE_VAULT;1.1;AES256 -63636438363930626363653233343036616165326137643766666434353866366364353534393063 -6434383035643037393439353537356438393337316465630a346539343536373065316362353433 -38656261316139623364373064366439643937616466616230303538333235303562303035373364 -3461663163663461360a366134363531656234633663396235643962343530333964653733646136 -62373532356534643264336538386335346239343035666535646638333739316639316466633164 -62663761333136306463623861346665316165343561363461316664356233313630333630333433 -62626234623938663934643239653733366234636236386637396463663635386666643938313263 -37643038653238646363313537386162383634336365363066646432386134303630393563303765 -64636337653433393130343035373861396165623463333837333734356331323432346330663564 -35376362363338613862366561653233636661323662353036346165353732323635396364373065 -65333737313934346165336661633035666564306336626563643035633434333361336131333133 -35633631393236666236353033333439613335653562383766646334366337653430616538306633 -32373636636361316233336433326331663335323734363364376533353866363363333436363462 -35373539303333633936386162633336363330393032653733656233303630636665356664663834 -35333235383865323531303962653961356661373233353731303232363437633436376364646538 -39663432346562343637 +33366231336536626433396436666233376138363135333133376332383538613837343330393031 +3764373330613038623135393661323131316639363333330a383630396365393762663033313138 +61396238353364653339343166656139363166646365383633623932353334623230663866623231 +6363663530336435320a636331656530623138613835306239626636396261326531313661393466 +37326334323332656430326561626134346434343032363632386637633466636130396261373333 +65633838383037353432343337653835383333383637373965616464613263323733613066363661 +36326163343263633939356132633636326438316266303262383837653034373539386266323730 +64323034643466393565316135386630346135373763373037346338383439353765336439376262 +62636562386430346364353563646166353835616165373237383539633230336530326165383131 +31623766363133666333636538396363623930323666353166616535306435353265353661356633 +62363534333234353030316663306464393963313362316663323362316564373933613862643735 +66636433633663313033353763663833633433626436333037323831356339626131303738383961 +61653835333839303463363538386632323536336138356338316537633033653832383165333262 +31373935373633663732633934303237636533353365373236336261363135666466383830643566 +61626536326139646435303764316565343161323835663839383136666132333161363666663562 +66303361653161633533306132366263316264353531353038373865373933383166376336383865 +31626335376431363363396432613137316132386534653763326566366664353234633139633066 +61326565373762636633333738326539323063633431343038613839646238333533396465626161 +30663838383362646235373532363865356630333334646538383939656363313738636336396233 +32313830373264653164 diff --git a/group_vars/nomad_cluster.yml b/group_vars/nomad_cluster.yml index 13af1e5ee9..d21b6c96ba 100644 --- a/group_vars/nomad_cluster.yml +++ b/group_vars/nomad_cluster.yml @@ -27,3 +27,15 @@ deploy_user_uid: 1004 nomad_podman_version: '0.5.2' nomad_server_consul_token: "{{ vault_nomad_server_consul_token }}" nomad_client_consul_token: "{{ vault_nomad_client_consul_token }}" +nomad_plugins: + nomad-driver-podman: + config: + volumes: + enabled: true + selinuxlabel: "z" + extra_labels: + - "job_name" + - "task_group_name" + - "task_name" + - "namespace" + - "node_name" diff --git a/group_vars/pdc_describe/main.yml b/group_vars/pdc_describe/main.yml index 2729052a2c..2218369c81 100644 --- a/group_vars/pdc_describe/main.yml +++ b/group_vars/pdc_describe/main.yml @@ -34,7 +34,7 @@ bundler_version: "2.5.14" passenger_extra_http_config: - "passenger_preload_bundler on;" -sidekiq_worker_threads: 10 +sidekiq_worker_threads: 15 rails_app_vars: - name: SECRET_KEY_BASE @@ -92,4 +92,4 @@ rails_app_vars: - name: EZID_DEFAULT_SHOULDER value: "ark:/88435/" - name: RAILS_MAX_THREADS - value: 10 + value: 15 diff --git a/playbooks/abid.yml b/playbooks/abid.yml index f25b9e8fa5..2ef507c824 100644 --- a/playbooks/abid.yml +++ b/playbooks/abid.yml @@ -10,6 +10,8 @@ - ../group_vars/abid/{{ runtime_env | default('staging') }}.yml - ../group_vars/abid/vault.yml roles: + - role: roles/ufw_firewall + when: runtime_env == "staging" - role: roles/abid post_tasks: diff --git a/playbooks/utils/replace_vm_host.yml b/playbooks/utils/replace_vm_host.yml index c58c50768d..c9101e3a02 100644 --- a/playbooks/utils/replace_vm_host.yml +++ b/playbooks/utils/replace_vm_host.yml @@ -35,9 +35,14 @@ - name: set the vm network to the private network if the IP starts with 172.20 ansible.builtin.set_fact: - vm_network: "VM Network - LibNetPvt" + vm_network: "VM Network - ip4-library-servers" when: old_vm_info.virtual_machines[0].ip_address is match("172.20.*") + - name: set the vm network to the public network if the IP starts with 128.112 + ansible.builtin.set_fact: + vm_network: "Virtual Machine Network" + when: old_vm_info.virtual_machines[0].ip_address is match("128.112.*") + - name: Print out warning ansible.builtin.debug: msg: Ansible will now move and power off the current {{ old_vm_info.virtual_machines[0].guest_name }} VM, then create a replacement in the {{ vm_network }} network. @@ -92,7 +97,7 @@ unit_number: 0 state: present networks: - - name: "{{ vm_network | default('Virtual Machine Network')}}" + - name: "{{ vm_network }}" wait_for_ip_address: true register: new_vm_deets @@ -120,7 +125,7 @@ datacenter: "{{ vcenter_datacenter }}" # new VM var does not include UUID; use moid, which is unique in each vCenter instance moid: "{{ new_vm_deets.instance.moid }}" - network_name: "{{ vm_network | default('Virtual Machine Network')}}" + network_name: "{{ vm_network }}" state: absent mac_address: "{{ new_vm_deets.instance.hw_eth0.macaddress }}" @@ -134,7 +139,7 @@ # new VM var does not include UUID; use moid, which is unique in each vCenter instance moid: "{{ new_vm_deets.instance.moid }}" folder: "{{ old_vm_info.virtual_machines[0].folder }}" - network_name: "{{ vm_network | default('Virtual Machine Network')}}" + network_name: "{{ vm_network }}" device_type: "vmxnet3" connected: true mac_address: "{{ old_vm_info.virtual_machines[0].mac_address[0] }}" @@ -170,6 +175,7 @@ The VM you replaced had an UUID of {{ old_vm_info.virtual_machines[0].uuid }} a mac address of {{ old_vm_info.virtual_machines[0].mac_address[0] }} + in the {{ vm_network }} network {{ old_vm_info.virtual_machines[0].allocated.cpu }} CPUs {{ (old_vm_info.virtual_machines[0].allocated.memory | int / 1024) }} GB of memory {{ old_vm_info.virtual_machines[0].allocated.storage | human_readable }} of disk allocated @@ -182,6 +188,7 @@ The VM you just created has an UUID of {{ new_vm_info.virtual_machines[0].uuid }} a mac address of {{ new_vm_info.virtual_machines[0].mac_address[0] }} + in the {{ vm_network }} network {{ new_vm_info.virtual_machines[0].allocated.cpu }} CPUs {{ (new_vm_info.virtual_machines[0].allocated.memory | int / 1024) }} GB of memory {{ new_vm_info.virtual_machines[0].allocated.storage | human_readable }} of disk allocated diff --git a/playbooks/utils/security_theater.yml b/playbooks/utils/security_theater.yml index 4df8d1fde4..085339de33 100644 --- a/playbooks/utils/security_theater.yml +++ b/playbooks/utils/security_theater.yml @@ -3,16 +3,29 @@ # By default this playbook runs on all hosts in the three environment groups. To run against a single host or group, use '--limit '. For example '--limit qa' or '--limit figgy-web-staging1.princeton.edu'." # - name: install OIT Security Tools on a host - hosts: staging:qa:production + hosts: all remote_user: pulsys serial: "{{ concurrent_vms | default('5') }}" - become: true vars_files: - ../../group_vars/crowdstrike/vault.yml - ../../group_vars/crowdstrike/vars.yml - ../../group_vars/all/vars.yml - ../../group_vars/all/vault.yml + roles: + - role: crowdstrike.falcon.falcon_install + vars: + falcon_client_id: "{{ vault_crowdstrike_client_id }}" + falcon_client_secret: "{{ vault_crowdstrike_secret }}" + falcon_sensor_version_decrement: 2 + # be sure to add this so we don't download if we don't need to + falcon_api_sensor_download_path: /opt/ + + - role: crowdstrike.falcon.falcon_configure + vars: + falcon_client_id: "{{ vault_crowdstrike_client_id }}" + falcon_client_secret: "{{ vault_crowdstrike_secret }}" + tasks: - name: Populate service facts ansible.builtin.service_facts: @@ -66,28 +79,6 @@ - "ansible_facts.services['besclient.service'] is not defined" - ansible_os_family == "RedHat" - - name: Download the Falcon sensor deb file (Ubuntu) - ansible.builtin.get_url: - url: "https://isoshare.cpaneldev.princeton.edu/isoShares/Agents/Falcon/Latest/linux/Ubuntu/14_16_18_20_22/falcon-sensor_7.05.0-16004_amd64.deb" - dest: "/tmp/falcon-sensor_7.05.0-16004_amd64.deb" - owner: pulsys - group: pulsys - mode: "0644" - when: - - "'falcon-sensor' not in ansible_facts.packages" - - ansible_os_family == "Debian" - - - name: Download the Falcon sensor rpm file (RedHat) - ansible.builtin.get_url: - url: "https://isoshare.cpaneldev.princeton.edu/isoShares/Agents/Falcon/Latest/linux/RHEL/Oracle/9/falcon-sensor-7.02.0-15705.el9.x86_64.rpm" - dest: "/tmp/falcon-sensor_7.05.0-16004_el9.x86_64.rpm" - owner: pulsys - group: pulsys - mode: "0644" - when: - - "'falcon-sensor' not in ansible_facts.packages" - - ansible_os_family == "RedHat" - - name: install BESClient agent (Ubuntu) ansible.builtin.apt: deb: "/tmp/BESAgent-10.0.7.52-debian6.amd64.deb" @@ -107,36 +98,6 @@ ansible.builtin.command: /etc/init.d/besclient start when: "ansible_facts.services['besclient.service'] is not defined" - - name: install crowdstrike falcon sensor agent (Ubuntu) - ansible.builtin.apt: - deb: "/tmp/falcon-sensor_7.05.0-16004_amd64.deb" - when: - - "'falcon-sensor' not in ansible_facts.packages" - - ansible_os_family == "Debian" - - - name: install crowdstrike falcon sensor agent (RedHat) - ansible.builtin.dnf: - name: "/tmp/falcon-sensor_7.05.0-16004_el9.x86_64.rpm" - disable_gpg_check: true - state: present - when: - - "'falcon-sensor' not in ansible_facts.packages" - - ansible_os_family == "RedHat" - - - name: launch crowdstrike falcon agent - command: /opt/CrowdStrike/falconctl -s --cid={{ princeton_cid }} - become: true - when: - - "'falcon-sensor' not in ansible_facts.packages" - - - name: start and enable crowdstrike falcon agent - ansible.builtin.systemd_service: - name: "falcon-sensor" - enabled: true - state: started - when: - - "'falcon-sensor' not in ansible_facts.packages" - - name: Check for rapid7 path ansible.builtin.stat: path: /opt/rapid7 @@ -172,7 +133,7 @@ when: - not rapid7_home.stat.exists - post_tasks: - - name: send information to slack - ansible.builtin.include_tasks: - file: slack_tasks_end_of_playbook.yml + # post_tasks: + # - name: send information to slack + # ansible.builtin.include_tasks: + # file: slack_tasks_end_of_playbook.yml diff --git a/playbooks/utils/ufw_firewall.yml b/playbooks/utils/ufw_firewall.yml new file mode 100644 index 0000000000..d0d0abc125 --- /dev/null +++ b/playbooks/utils/ufw_firewall.yml @@ -0,0 +1,20 @@ +--- +# you MUST run this playbook on a host or group with '--limit' for example `ansible-playbook -v ---limit pdc_describe_staging playbooks/utils/vm_firewall.yml` +# +- name: Run to install ufw + hosts: all + # hosts: staging:qa:production + remote_user: pulsys + become: true + vars: + running_on_server: true + + pre_tasks: + - name: stop playbook if you didn't pass --limit + fail: + msg: "you must use -l or --limit" + when: ansible_limit is not defined + run_once: true + + roles: + - role: roles/ufw_firewall diff --git a/roles/datadog/molecule/default/ansible.cfg b/roles/datadog/molecule/default/ansible.cfg new file mode 100644 index 0000000000..ef0ee75b54 --- /dev/null +++ b/roles/datadog/molecule/default/ansible.cfg @@ -0,0 +1,2 @@ +[defaults] +remote_tmp = /root/tmp/.ansible/tmp diff --git a/roles/example/meta/main.yml b/roles/example/meta/main.yml index bf4512d21a..82248f8112 100644 --- a/roles/example/meta/main.yml +++ b/roles/example/meta/main.yml @@ -12,6 +12,6 @@ galaxy_info: platforms: - name: Ubuntu versions: - - bionic + - jammy dependencies: - role: "" diff --git a/roles/pul_nomad/tasks/dpul_c.yml b/roles/pul_nomad/tasks/dpul_c.yml index ef4f040b00..e30ffd0535 100644 --- a/roles/pul_nomad/tasks/dpul_c.yml +++ b/roles/pul_nomad/tasks/dpul_c.yml @@ -21,7 +21,17 @@ become_user: '{{ dpul_c_postgres_admin_user }}' run_once: true -- name: 'pul_nomad_dpulc | create figgy postgresql db user' +- name: 'pul_nomad_dpulc | ensure access to postgres server for client connections' + ansible.builtin.lineinfile: + path: '/etc/postgresql/{{ dpul_c_postgres_version }}/main/pg_hba.conf' + line: 'host all all {{ ansible_default_ipv4.address }}/32 md5' + delegate_to: '{{ dpul_c_postgres_host }}' + register: remote_postgres_configured + throttle: 1 # have this task run one machine at a time to avoid race condition + when: + - "nomad_node_role == 'client'" + +- name: 'pul_nomad_dpulc | create figgy staging postgresql db user' community.postgresql.postgresql_user: name: 'dpulc_staging' password: '{{ dpul_c_staging_figgy_db_password }}' @@ -32,7 +42,7 @@ become_user: '{{ dpul_c_postgres_admin_user }}' run_once: true -- name: 'pul_nomad_dpulc | grant read privileges for read-only user' +- name: 'pul_nomad_dpulc | grant read privileges for figgy staging read-only user' delegate_to: '{{ dpul_c_staging_figgy_db_host }}' become: true become_user: '{{ dpul_c_postgres_admin_user }}' @@ -46,21 +56,46 @@ roles: 'dpulc_staging' grant_option: true -- name: 'pul_nomad_dpulc | ensure access to postgres server for client connections' +- name: 'pul_nomad_dpulc | ensure access to figgy staging postgres server for client connections' ansible.builtin.lineinfile: path: '/etc/postgresql/{{ dpul_c_postgres_version }}/main/pg_hba.conf' line: 'host all all {{ ansible_default_ipv4.address }}/32 md5' - delegate_to: '{{ dpul_c_postgres_host }}' - register: remote_postgres_configured + delegate_to: '{{ dpul_c_staging_figgy_db_host }}' + register: remote_figgy_postgres_configured throttle: 1 # have this task run one machine at a time to avoid race condition when: - "nomad_node_role == 'client'" -- name: 'pul_nomad_dpulc | ensure access to figgy postgres server for client connections' +- name: 'pul_nomad_dpulc | create figgy production postgresql db user' + community.postgresql.postgresql_user: + name: 'dpulc_staging' + password: '{{ dpul_c_production_figgy_db_password }}' + encrypted: true + state: 'present' + delegate_to: '{{ dpul_c_production_figgy_db_host }}' + become: true + become_user: '{{ dpul_c_postgres_admin_user }}' + run_once: true + +- name: 'pul_nomad_dpulc | grant read privileges for figgy production read-only user' + delegate_to: '{{ dpul_c_production_figgy_db_host }}' + become: true + become_user: '{{ dpul_c_postgres_admin_user }}' + run_once: true + community.postgresql.postgresql_privs: + database: '{{ dpul_c_production_figgy_db_name }}' + state: present + privs: SELECT + type: table + objs: ALL_IN_SCHEMA + roles: 'dpulc_staging' + grant_option: true + +- name: 'pul_nomad_dpulc | ensure access to figgy production postgres server for client connections' ansible.builtin.lineinfile: path: '/etc/postgresql/{{ dpul_c_postgres_version }}/main/pg_hba.conf' line: 'host all all {{ ansible_default_ipv4.address }}/32 md5' - delegate_to: '{{ dpul_c_staging_figgy_db_host }}' + delegate_to: '{{ dpul_c_production_figgy_db_host }}' register: remote_figgy_postgres_configured throttle: 1 # have this task run one machine at a time to avoid race condition when: @@ -74,7 +109,7 @@ - remote_postgres_configured.changed delegate_to: '{{ dpul_c_postgres_host }}' -- name: 'pul_nomad_dpulc | reload figgy postgresql' +- name: 'pul_nomad_dpulc | reload figgy staging postgresql' ansible.builtin.service: name: postgresql state: reloaded @@ -82,6 +117,14 @@ - remote_postgres_configured.changed delegate_to: '{{ dpul_c_staging_figgy_db_host }}' +- name: 'pul_nomad_dpulc | reload production figgy postgresql' + ansible.builtin.service: + name: postgresql + state: reloaded + when: + - remote_postgres_configured.changed + delegate_to: '{{ dpul_c_production_figgy_db_host }}' + - name: 'pul_nomad_dpulc | add the application environment variables' ansible.builtin.shell: cmd: '/usr/local/bin/nomad var put -force nomad/jobs/dpulc-staging {{ dpul_c_nomad_env_vars.keys() | zip(dpul_c_nomad_env_vars.values()) | map("join", "=") | join(" ") }}' diff --git a/roles/pul_nomad/tasks/main.yml b/roles/pul_nomad/tasks/main.yml index 47435389e8..946331b828 100644 --- a/roles/pul_nomad/tasks/main.yml +++ b/roles/pul_nomad/tasks/main.yml @@ -215,16 +215,14 @@ - "consul_node_role == 'client'" - created_agent_token_client +# We need to get DNSMasq's 127.0.0.1 above the Princeton nameservers, but +# NetworkManager manages DNS on RedHat machines. This updates the connection to +# prepend the DNS server. - name: 'pul_nomad | Configure DNS for consul' become: true - ansible.builtin.lineinfile: - path: /etc/resolv.conf - regexp: ^nameserver 127.0.0.1 - line: nameserver 127.0.0.1 - create: true - owner: root - group: root - mode: u=rw,g=r,o=r + ansible.builtin.shell: '/bin/nmcli connection modify {{ ansible_default_ipv4.interface }} ipv4.dns "127.0.0.1" && /bin/nmcli connection up {{ ansible_default_ipv4.interface }}' + when: + - "ansible_os_family == 'RedHat'" tags: - notest diff --git a/roles/ufw_firewall/README.md b/roles/ufw_firewall/README.md new file mode 100644 index 0000000000..0711a6d25c --- /dev/null +++ b/roles/ufw_firewall/README.md @@ -0,0 +1,35 @@ +# UFW Firewall Role + +This Ansible role configures the Uncomplicated Firewall (UFW) on our Linux systems. It allows you to define allowed and denied networks and ports, making it easy to manage your firewall rules. + +## Requirements + +- Ansible 2.9 or higher +- Supported Operating Systems: + - Rocky Linux (tested on 9) + - Ubuntu (tested on jammy) + +## Role Variables + +the examples below allow ssh, http, and redis to those CIDR subnets. For ssh make sure you use the [defaults/main.yml](defaults/main.yml) example or you will lose access to your VM + +```yaml +ufw_firewall_rules: + - service: ssh + port: 22 + protocol: tcp + allowed_cidrs: + - 128.112.200.0/21 + - service: http + port: 80 + protocol: tcp + allowed_cidrs: + - 128.112.200.0/21 + - 128.112.0.0/16 + - service: redis + port: 6379 + protocol: tcp + allowed_cidrs: + - 128.112.200.0/21 + - 128.112.0.0/16 +``` diff --git a/roles/ufw_firewall/defaults/main.yml b/roles/ufw_firewall/defaults/main.yml new file mode 100644 index 0000000000..769ef64226 --- /dev/null +++ b/roles/ufw_firewall/defaults/main.yml @@ -0,0 +1,19 @@ +--- +# defaults file for roles/ufw_firewall +ufw_firewall_rules: + - service: ssh + port: 22 + protocol: tcp + allowed_cidrs: + - 10.249.64.0/18 + - 10.249.0.0/18 + - 128.112.0.0/16 + - 172.20.95.0/24 + - 172.20.192.0/19 +# example of http in your group_vars/project + # - service: http + # port: 80 + # protocol: tcp + # allowed_cidrs: + # - 128.112.200.0/21 + # - 128.112.0.0/16 diff --git a/roles/ufw_firewall/handlers/main.yml b/roles/ufw_firewall/handlers/main.yml new file mode 100644 index 0000000000..9e8700c26b --- /dev/null +++ b/roles/ufw_firewall/handlers/main.yml @@ -0,0 +1,6 @@ +--- +# handlers file for roles/ufw_firewall +- name: restart ufw + ansible.builtin.service: + name: ufw + state: restarted diff --git a/roles/ufw_firewall/meta/main.yml b/roles/ufw_firewall/meta/main.yml new file mode 100644 index 0000000000..41eaaca095 --- /dev/null +++ b/roles/ufw_firewall/meta/main.yml @@ -0,0 +1,16 @@ +--- +galaxy_info: + role_name: ufw_firewall + company: Princeton University Library + description: UFW Firewall + author: pulibrary + + license: MIT + + min_ansible_version: "2.9" + + platforms: + - name: Ubuntu + versions: + - jammy +dependencies: [] diff --git a/roles/ufw_firewall/molecule/default/converge.yml b/roles/ufw_firewall/molecule/default/converge.yml new file mode 100644 index 0000000000..80948325c9 --- /dev/null +++ b/roles/ufw_firewall/molecule/default/converge.yml @@ -0,0 +1,15 @@ +--- +- name: Converge + hosts: all + vars: + - running_on_server: false + become: true + pre_tasks: + - name: Update cache + ansible.builtin.apt: + update_cache: true + cache_valid_time: 600 + tasks: + - name: "Include ufw_firewall" + ansible.builtin.include_role: + name: ufw_firewall diff --git a/roles/ufw_firewall/molecule/default/molecule.yml b/roles/ufw_firewall/molecule/default/molecule.yml new file mode 100644 index 0000000000..fc1de4aea1 --- /dev/null +++ b/roles/ufw_firewall/molecule/default/molecule.yml @@ -0,0 +1,22 @@ +--- +scenario: + name: default +driver: + name: docker +lint: | + set -e + yamllint . + ansible-lint +platforms: + - name: instance + image: "ghcr.io/pulibrary/pul_containers:jammy_multi" + command: "sleep infinity" + volumes: + - /sys/fs/cgroup:/sys/fs/cgroup:ro + privileged: true + pre_build_image: true +provisioner: + name: ansible + log: true +verifier: + name: ansible diff --git a/roles/ufw_firewall/molecule/default/verify.yml b/roles/ufw_firewall/molecule/default/verify.yml new file mode 100644 index 0000000000..e1bdd1b9bd --- /dev/null +++ b/roles/ufw_firewall/molecule/default/verify.yml @@ -0,0 +1,14 @@ +--- +- name: Verify + hosts: all + gather_facts: false + tasks: + - name: Check UFW status + ansible.builtin.command: ufw status + register: ufw_status + changed_when: false + + - name: Assert UFW is active + ansible.builtin.assert: + that: + - "'Status: active' in ufw_status.stdout" diff --git a/roles/ufw_firewall/tasks/main.yml b/roles/ufw_firewall/tasks/main.yml new file mode 100644 index 0000000000..efd113bf75 --- /dev/null +++ b/roles/ufw_firewall/tasks/main.yml @@ -0,0 +1,22 @@ +--- +# tasks file for roles/ufw_firewall + +- name: UFW | Install UFW + ansible.builtin.package: + name: ufw + state: present + +- name: UFW | Enable UFW + community.general.ufw: + state: enabled + +- name: UFW | generic rules + ansible.builtin.include_tasks: rules.yml + loop: "{{ ufw_firewall_rules }}" + loop_control: + loop_var: rule + +- name: UFW | Deny all other incoming traffic + community.general.ufw: + rule: deny + direction: in diff --git a/roles/ufw_firewall/tasks/rules.yml b/roles/ufw_firewall/tasks/rules.yml new file mode 100644 index 0000000000..a3e5937f14 --- /dev/null +++ b/roles/ufw_firewall/tasks/rules.yml @@ -0,0 +1,8 @@ +--- +- name: Firewall | Allow variable defined rule on ufw + community.general.ufw: + rule: allow + src: "{{ item }}" + port: "{{ rule.port }}" + proto: "{{ rule.protocol }}" + loop: "{{ rule.allowed_cidrs }}" diff --git a/roles/ufw_firewall/tests/inventory b/roles/ufw_firewall/tests/inventory new file mode 100644 index 0000000000..878877b077 --- /dev/null +++ b/roles/ufw_firewall/tests/inventory @@ -0,0 +1,2 @@ +localhost + diff --git a/roles/ufw_firewall/tests/test.yml b/roles/ufw_firewall/tests/test.yml new file mode 100644 index 0000000000..c2a6fe5c92 --- /dev/null +++ b/roles/ufw_firewall/tests/test.yml @@ -0,0 +1,5 @@ +--- +- hosts: localhost + remote_user: root + roles: + - roles/ufw_firewall diff --git a/roles/ufw_firewall/vars/main.yml b/roles/ufw_firewall/vars/main.yml new file mode 100644 index 0000000000..00391dd1a9 --- /dev/null +++ b/roles/ufw_firewall/vars/main.yml @@ -0,0 +1,2 @@ +--- +# vars file for roles/ufw_firewall diff --git a/tower_ees/security-tools-ee.yml b/tower_ees/security-tools-ee.yml new file mode 100644 index 0000000000..43fa5e0c61 --- /dev/null +++ b/tower_ees/security-tools-ee.yml @@ -0,0 +1,55 @@ +--- +# EE definition for our 'security-theater' playbook +# to build an EE, run `ansible-builder build` + +# builder schema version +version: 3 + +images: + # build a new image based on this image + base_image: + name: registry.redhat.io/ansible-automation-platform-24/ee-supported-rhel8:1.0.0-543 + # name: registry.redhat.io/ansible-automation-platform-24/ee-supported-rhel9 + +dependencies: + ansible_core: + package_pip: ansible-core==2.15.7 + ansible_runner: + package_pip: ansible-runner + python_interpreter: + python_path: "/usr/bin/python3" + galaxy: + # includes all collections used in the prancible repo + # versions taken from the Ansible 8 inclusion list: + # https://github.com/ansible-community/ansible-build-data/blob/main/8/ansible-8.7.0.yaml + collections: + - name: community.general + version: 7.5.2 + # these collections are not in Ansible package + # latest versions from Galaxy as of Oct. 2024 + - name: crowdstrike.falcon + version: 4.6.0 + + # python packages, stuff you install with 'pip install' + python: + - six + - psutil + + # system packages, stuff you install with 'apt/yum/dnf install' + # ansible-builder writes this list into the 'context/bindep.txt' file + system: + - git + - python3-dnf + - python3-pip +options: + # default path is /usr/bin/dnf, which does not exist on the base image + package_manager_path: /usr/bin/microdnf + +# if you need to run more commands on the EE, use this section: +# additional_build_steps: + # items in the list of append_base steps are expressed + # as containerfile directives + # prepend_base: + # - RUN /usr/bin/apt-get update + # append_base: + # - RUN /usr/bin/python3 -m pip install --upgrade pip