diff --git a/.ansible-lint b/.ansible-lint index e582a588..057c65e0 100755 --- a/.ansible-lint +++ b/.ansible-lint @@ -1,14 +1,18 @@ +--- + parseable: true quiet: true skip_list: - 'schema' - 'no-changed-when' - 'var-spacing' + - 'fqcn-builtins' - 'experimental' - 'name[play]' - 'name[casing]' - 'name[template]' - 'fqcn[action]' + - 'key-order[task]' - '204' - '305' - '303' diff --git a/.config/.gitleaks-report.json b/.config/.gitleaks-report.json new file mode 100644 index 00000000..ba634848 --- /dev/null +++ b/.config/.gitleaks-report.json @@ -0,0 +1,122 @@ +[ + { + "Description": "Generic API Key", + "StartLine": 133, + "EndLine": 133, + "StartColumn": 18, + "EndColumn": 68, + "Match": "secret\": \"0f5b530255e5a064cc73699e4fa44ba8b2ad399f\"", + "Secret": "0f5b530255e5a064cc73699e4fa44ba8b2ad399f", + "File": ".config/.secrets.baseline", + "SymlinkFile": "", + "Commit": "358016009cd8ec06f468d091aba4e92e984a8c4b", + "Entropy": 3.7561984, + "Author": "Mark Bolwell", + "Email": "mark.bollyuk@gmail.com", + "Date": "2023-09-11T10:19:54Z", + "Message": "updated secrets\n\nSigned-off-by: Mark Bolwell \u003cmark.bollyuk@gmail.com\u003e", + "Tags": [], + "RuleID": "generic-api-key", + "Fingerprint": "358016009cd8ec06f468d091aba4e92e984a8c4b:.config/.secrets.baseline:generic-api-key:133" + }, + { + "Description": "Generic API Key", + "StartLine": 9, + "EndLine": 9, + "StartColumn": 5, + "EndColumn": 39, + "Match": "Secret\": \"grub.pbkdf2.sha512.10000\"", + "Secret": "grub.pbkdf2.sha512.10000", + "File": ".config/.gitleaks-report.json", + "SymlinkFile": "", + "Commit": "f046ed0c486cba258a6d50e7124566a314b87c8e", + "Entropy": 3.8035088, + "Author": "Mark Bolwell", + "Email": "mark.bollyuk@gmail.com", + "Date": "2023-09-11T09:06:43Z", + "Message": "added pre-commit setup\n\nSigned-off-by: Mark Bolwell \u003cmark.bollyuk@gmail.com\u003e", + "Tags": [], + "RuleID": "generic-api-key", + "Fingerprint": "f046ed0c486cba258a6d50e7124566a314b87c8e:.config/.gitleaks-report.json:generic-api-key:9" + }, + { + "Description": "Generic API Key", + "StartLine": 125, + "EndLine": 125, + "StartColumn": 18, + "EndColumn": 68, + "Match": "secret\": \"4fae1797297d5c73819a504516f2de7740e4b52d\"", + "Secret": "4fae1797297d5c73819a504516f2de7740e4b52d", + "File": ".config/.secrets.baseline", + "SymlinkFile": "", + "Commit": "f046ed0c486cba258a6d50e7124566a314b87c8e", + "Entropy": 3.7898228, + "Author": "Mark Bolwell", + "Email": "mark.bollyuk@gmail.com", + "Date": "2023-09-11T09:06:43Z", + "Message": "added pre-commit setup\n\nSigned-off-by: Mark Bolwell \u003cmark.bollyuk@gmail.com\u003e", + "Tags": [], + "RuleID": "generic-api-key", + "Fingerprint": "f046ed0c486cba258a6d50e7124566a314b87c8e:.config/.secrets.baseline:generic-api-key:125" + }, + { + "Description": "Generic API Key", + "StartLine": 135, + "EndLine": 135, + "StartColumn": 18, + "EndColumn": 68, + "Match": "secret\": \"f395ee0a2d842bfcf81da0aad13591e2a9311fe1\"", + "Secret": "f395ee0a2d842bfcf81da0aad13591e2a9311fe1", + "File": ".config/.secrets.baseline", + "SymlinkFile": "", + "Commit": "f046ed0c486cba258a6d50e7124566a314b87c8e", + "Entropy": 3.618454, + "Author": "Mark Bolwell", + "Email": "mark.bollyuk@gmail.com", + "Date": "2023-09-11T09:06:43Z", + "Message": "added pre-commit setup\n\nSigned-off-by: Mark Bolwell \u003cmark.bollyuk@gmail.com\u003e", + "Tags": [], + "RuleID": "generic-api-key", + "Fingerprint": "f046ed0c486cba258a6d50e7124566a314b87c8e:.config/.secrets.baseline:generic-api-key:135" + }, + { + "Description": "Generic API Key", + "StartLine": 145, + "EndLine": 145, + "StartColumn": 18, + "EndColumn": 68, + "Match": "secret\": \"2aaf9f2a51d8fe89e48cb9cc7d04a991ceb7f360\"", + "Secret": "2aaf9f2a51d8fe89e48cb9cc7d04a991ceb7f360", + "File": ".config/.secrets.baseline", + "SymlinkFile": "", + "Commit": "f046ed0c486cba258a6d50e7124566a314b87c8e", + "Entropy": 3.8439426, + "Author": "Mark Bolwell", + "Email": "mark.bollyuk@gmail.com", + "Date": "2023-09-11T09:06:43Z", + "Message": "added pre-commit setup\n\nSigned-off-by: Mark Bolwell \u003cmark.bollyuk@gmail.com\u003e", + "Tags": [], + "RuleID": "generic-api-key", + "Fingerprint": "f046ed0c486cba258a6d50e7124566a314b87c8e:.config/.secrets.baseline:generic-api-key:145" + }, + { + "Description": "Generic API Key", + "StartLine": 479, + "EndLine": 479, + "StartColumn": 23, + "EndColumn": 63, + "Match": "password_hash: \"grub.pbkdf2.sha512.10000\"", + "Secret": "grub.pbkdf2.sha512.10000", + "File": "defaults/main.yml", + "SymlinkFile": "", + "Commit": "ea067d7f8f12f2a81d7b2b99449799b1fae1ae51", + "Entropy": 3.8035088, + "Author": "Mark Bolwell", + "Email": "mark.bollyuk@gmail.com", + "Date": "2023-07-10T15:12:00Z", + "Message": "updated default vars\n\nSigned-off-by: Mark Bolwell \u003cmark.bollyuk@gmail.com\u003e", + "Tags": [], + "RuleID": "generic-api-key", + "Fingerprint": "ea067d7f8f12f2a81d7b2b99449799b1fae1ae51:defaults/main.yml:generic-api-key:479" + } +] diff --git a/.config/.secrets.baseline b/.config/.secrets.baseline new file mode 100644 index 00000000..23ed11cf --- /dev/null +++ b/.config/.secrets.baseline @@ -0,0 +1,153 @@ +{ + "version": "1.4.0", + "plugins_used": [ + { + "name": "ArtifactoryDetector" + }, + { + "name": "AWSKeyDetector" + }, + { + "name": "AzureStorageKeyDetector" + }, + { + "name": "Base64HighEntropyString", + "limit": 4.5 + }, + { + "name": "BasicAuthDetector" + }, + { + "name": "CloudantDetector" + }, + { + "name": "DiscordBotTokenDetector" + }, + { + "name": "GitHubTokenDetector" + }, + { + "name": "HexHighEntropyString", + "limit": 3.0 + }, + { + "name": "IbmCloudIamDetector" + }, + { + "name": "IbmCosHmacDetector" + }, + { + "name": "JwtTokenDetector" + }, + { + "name": "KeywordDetector", + "keyword_exclude": "" + }, + { + "name": "MailchimpDetector" + }, + { + "name": "NpmDetector" + }, + { + "name": "PrivateKeyDetector" + }, + { + "name": "SendGridDetector" + }, + { + "name": "SlackDetector" + }, + { + "name": "SoftlayerDetector" + }, + { + "name": "SquareOAuthDetector" + }, + { + "name": "StripeDetector" + }, + { + "name": "TwilioKeyDetector" + } + ], + "filters_used": [ + { + "path": "detect_secrets.filters.allowlist.is_line_allowlisted" + }, + { + "path": "detect_secrets.filters.common.is_ignored_due_to_verification_policies", + "min_level": 2 + }, + { + "path": "detect_secrets.filters.heuristic.is_indirect_reference" + }, + { + "path": "detect_secrets.filters.heuristic.is_likely_id_string" + }, + { + "path": "detect_secrets.filters.heuristic.is_lock_file" + }, + { + "path": "detect_secrets.filters.heuristic.is_not_alphanumeric_string" + }, + { + "path": "detect_secrets.filters.heuristic.is_potential_uuid" + }, + { + "path": "detect_secrets.filters.heuristic.is_prefixed_with_dollar_sign" + }, + { + "path": "detect_secrets.filters.heuristic.is_sequential_string" + }, + { + "path": "detect_secrets.filters.heuristic.is_swagger_file" + }, + { + "path": "detect_secrets.filters.heuristic.is_templated_secret" + }, + { + "path": "detect_secrets.filters.regex.should_exclude_file", + "pattern": [ + ".config/.gitleaks-report.json" + ] + } + ], + "results": { + "defaults/main.yml": [ + { + "type": "Secret Keyword", + "filename": "defaults/main.yml", + "hashed_secret": "4fae1797297d5c73819a504516f2de7740e4b52d", + "is_verified": false, + "line_number": 480 + }, + { + "type": "Secret Keyword", + "filename": "defaults/main.yml", + "hashed_secret": "0f5b530255e5a064cc73699e4fa44ba8b2ad399f", + "is_verified": false, + "line_number": 623 + } + ], + "tasks/main.yml": [ + { + "type": "Secret Keyword", + "filename": "tasks/main.yml", + "hashed_secret": "f395ee0a2d842bfcf81da0aad13591e2a9311fe1", + "is_verified": false, + "line_number": 54 + } + ], + "tasks/parse_etc_password.yml": [ + { + "type": "Secret Keyword", + "filename": "tasks/parse_etc_password.yml", + "hashed_secret": "2aaf9f2a51d8fe89e48cb9cc7d04a991ceb7f360", + "is_verified": false, + "line_number": 16 + } + ] + }, + "generated_at": "2023-09-13T11:09:17Z" +} diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md deleted file mode 100644 index 3a19c72b..00000000 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ /dev/null @@ -1,32 +0,0 @@ ---- -name: Report Issue -about: Create a bug issue ticket to help us improve -title: '' -labels: bug -assignees: '' - ---- - -**Describe the Issue** -A clear and concise description of what the bug is. - -**Expected Behavior** -A clear and concise description of what you expected to happen. - -**Actual Behavior** -A clear and concise description of what's happening. - -**Control(s) Affected** -What controls are being affected by the issue - -**Environment (please complete the following information):** - - Ansible Version: [e.g. 2.10] - - Host Python Version: [e.g. Python 3.7.6] - - Ansible Server Python Version: [e.g. Python 3.7.6] - - Additional Details: - -**Additional Notes** -Anything additional goes here - -**Possible Solution** -Enter a suggested fix here diff --git a/.github/ISSUE_TEMPLATE/feature-request-or-enhancement.md b/.github/ISSUE_TEMPLATE/feature-request-or-enhancement.md deleted file mode 100644 index bf457005..00000000 --- a/.github/ISSUE_TEMPLATE/feature-request-or-enhancement.md +++ /dev/null @@ -1,21 +0,0 @@ ---- -name: Feature Request or Enhancement -about: Suggest an idea for this project -title: '' -labels: enhancement -assignees: '' - ---- - -**Feature Request or Enhancement** - - Feature [] - - Enhancement [] - -**Summary of Request** -A clear and concise description of what you want to happen. - -**Describe alternatives you've considered** -A clear and concise description of any alternative solutions or features you've considered. - -**Suggested Code** -Please provide any code you have in mind to fulfill the request diff --git a/.github/ISSUE_TEMPLATE/question.md b/.github/ISSUE_TEMPLATE/question.md deleted file mode 100644 index cbab6e73..00000000 --- a/.github/ISSUE_TEMPLATE/question.md +++ /dev/null @@ -1,17 +0,0 @@ ---- -name: Question -about: Ask away....... -title: '' -labels: question -assignees: '' - ---- - -**Question** -Pose question here. - -**Environment (please complete the following information):** - - Ansible Version: [e.g. 2.10] - - Host Python Version: [e.g. Python 3.7.6] - - Ansible Server Python Version: [e.g. Python 3.7.6] - - Additional Details: diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md deleted file mode 100644 index 1bf89d37..00000000 --- a/.github/pull_request_template.md +++ /dev/null @@ -1,11 +0,0 @@ -**Overall Review of Changes:** -A general description of the changes made that are being requested for merge - -**Issue Fixes:** -Please list (using linking) any open issues this PR addresses - -**Enhancements:** -Please list any enhancements/features that are not open issue tickets - -**How has this been tested?:** -Please give an overview of how these changes were tested. If they were not please use N/A diff --git a/.github/workflows/OS.tfvars b/.github/workflows/OS.tfvars deleted file mode 100644 index 7a8a067c..00000000 --- a/.github/workflows/OS.tfvars +++ /dev/null @@ -1,9 +0,0 @@ -#Ami ubuntu 2004 -ami_id = "ami-05fe5907b25984493" -ami_os = "ubuntu20" -ami_username = "ubuntu" -ami_user_home = "/home/ubuntu" -instance_tags = { - Name = "UBUNTU20-CIS" - Environment = "Ansible_Lockdown_GH_PR_Pipeline" -} diff --git a/.github/workflows/devel_pipeline_validation.yml b/.github/workflows/devel_pipeline_validation.yml new file mode 100644 index 00000000..a4e7d48a --- /dev/null +++ b/.github/workflows/devel_pipeline_validation.yml @@ -0,0 +1,138 @@ +--- + + name: Devel pipeline + + on: # yamllint disable-line rule:truthy + pull_request_target: + types: [opened, reopened, synchronize] + branches: + - devel + paths: + - '**.yml' + - '**.sh' + - '**.j2' + - '**.ps1' + - '**.cfg' + + # A workflow run is made up of one or more jobs + # that can run sequentially or in parallel + jobs: + # This will create messages for first time contributers and direct them to the Discord server + welcome: + runs-on: ubuntu-latest + + steps: + - uses: actions/first-interaction@main + with: + repo-token: ${{ secrets.GITHUB_TOKEN }} + pr-message: |- + Congrats on opening your first pull request and thank you for taking the time to help improve Ansible-Lockdown! + Please join in the conversation happening on the [Discord Server](https://discord.io/ansible-lockdown) as well. + + # This workflow contains a single job which tests the playbook + playbook-test: + # The type of runner that the job will run on + runs-on: ubuntu-latest + env: + ENABLE_DEBUG: ${{ vars.ENABLE_DEBUG }} + # Imported as a variable by terraform + TF_VAR_repository: ${{ github.event.repository.name }} + defaults: + run: + shell: bash + working-directory: .github/workflows/github_linux_IaC + + steps: + - name: Clone ${{ github.event.repository.name }} + uses: actions/checkout@v3 + with: + ref: ${{ github.event.pull_request.head.sha }} + + # Pull in terraform code for linux servers + - name: Clone github IaC plan + uses: actions/checkout@v3 + with: + repository: ansible-lockdown/github_linux_IaC + path: .github/workflows/github_linux_IaC + + - name: Add_ssh_key + working-directory: .github/workflows + env: + SSH_AUTH_SOCK: /tmp/ssh_agent.sock + PRIVATE_KEY: "${{ secrets.SSH_PRV_KEY }}" + run: | + mkdir .ssh + chmod 700 .ssh + echo $PRIVATE_KEY > .ssh/github_actions.pem + chmod 600 .ssh/github_actions.pem + + - name: DEBUG - Show IaC files + if: env.ENABLE_DEBUG == 'true' + run: | + echo "OSVAR = $OSVAR" + echo "benchmark_type = $benchmark_type" + pwd + ls + env: + # Imported from github variables this is used to load the relvent OS.tfvars file + OSVAR: ${{ vars.OSVAR }} + benchmark_type: ${{ vars.BENCHMARK_TYPE }} + + - name: Terraform_Init + id: init + run: terraform init + env: + # Imported from github variables this is used to load the relvent OS.tfvars file + OSVAR: ${{ vars.OSVAR }} + TF_VAR_benchmark_type: ${{ vars.BENCHMARK_TYPE }} + + - name: Terraform_Validate + id: validate + run: terraform validate + env: + # Imported from github variables this is used to load the relvent OS.tfvars file + OSVAR: ${{ vars.OSVAR }} + TF_VAR_benchmark_type: ${{ vars.BENCHMARK_TYPE }} + + - name: Terraform_Apply + id: apply + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + OSVAR: ${{ vars.OSVAR }} + TF_VAR_benchmark_type: ${{ vars.BENCHMARK_TYPE }} + run: terraform apply -var-file "github_vars.tfvars" -var-file "${OSVAR}.tfvars" --auto-approve -input=false + + ## Debug Section + - name: DEBUG - Show Ansible hostfile + if: env.ENABLE_DEBUG == 'true' + run: cat hosts.yml + + # Aws deployments taking a while to come up insert sleep or playbook fails + + - name: Sleep for 60 seconds + run: sleep 60s + + # Run the ansible playbook + - name: Run_Ansible_Playbook + uses: arillso/action.playbook@master + with: + playbook: site.yml + inventory: .github/workflows/github_linux_IaC/hosts.yml + galaxy_file: collections/requirements.yml + private_key: ${{ secrets.SSH_PRV_KEY }} + # verbose: 3 + env: + ANSIBLE_HOST_KEY_CHECKING: "false" + ANSIBLE_DEPRECATION_WARNINGS: "false" + + # Remove test system - User secrets to keep if necessary + + - name: Terraform_Destroy + if: always() && env.ENABLE_DEBUG == 'false' + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + OSVAR: ${{ vars.OSVAR }} + TF_VAR_benchmark_type: ${{ vars.BENCHMARK_TYPE }} + run: terraform destroy -var-file "github_vars.tfvars" -var-file "${OSVAR}.tfvars" --auto-approve -input=false diff --git a/.github/workflows/github_networks.tf b/.github/workflows/github_networks.tf deleted file mode 100644 index ba777642..00000000 --- a/.github/workflows/github_networks.tf +++ /dev/null @@ -1,53 +0,0 @@ -resource "aws_vpc" "Main" { - cidr_block = var.main_vpc_cidr - instance_tenancy = "default" - tags = { - Environment = "${var.environment}" - Name = "${var.namespace}-VPC" - } -} - -resource "aws_internet_gateway" "IGW" { - vpc_id = aws_vpc.Main.id - tags = { - Environment = "${var.environment}" - Name = "${var.namespace}-IGW" - } -} - -resource "aws_subnet" "publicsubnets" { - vpc_id = aws_vpc.Main.id - cidr_block = var.public_subnets - availability_zone = var.availability_zone - tags = { - Environment = "${var.environment}" - Name = "${var.namespace}-pubsub" - } -} - -resource "aws_subnet" "Main" { - vpc_id = aws_vpc.Main.id - cidr_block = var.private_subnets - availability_zone = var.availability_zone - tags = { - Environment = "${var.environment}" - Name = "${var.namespace}-prvsub" - } -} - -resource "aws_route_table" "PublicRT" { - vpc_id = aws_vpc.Main.id - route { - cidr_block = "0.0.0.0/0" - gateway_id = aws_internet_gateway.IGW.id - } - tags = { - Environment = "${var.environment}" - Name = "${var.namespace}-publicRT" - } -} - -resource "aws_route_table_association" "rt_associate_public" { - subnet_id = aws_subnet.Main.id - route_table_id = aws_route_table.PublicRT.id -} diff --git a/.github/workflows/github_vars.tfvars b/.github/workflows/github_vars.tfvars deleted file mode 100644 index b79af632..00000000 --- a/.github/workflows/github_vars.tfvars +++ /dev/null @@ -1,13 +0,0 @@ -// github_actions variables -// Resourced in github_networks.tf -// Declared in variables.tf -// - -namespace = "Ansible_Lockdown_GH_PR_Actions" -environment = "Ansible_Lockdown_GH_PR_Pipeline" - -// Matching pair name found in AWS for keypairs PEM key -ami_key_pair_name = "github_actions" -main_vpc_cidr = "172.22.0.0/24" -public_subnets = "172.22.0.128/26" -private_subnets = "172.22.0.192/26" diff --git a/.github/workflows/linux_benchmark_testing.yml b/.github/workflows/linux_benchmark_testing.yml deleted file mode 100644 index eed42b3d..00000000 --- a/.github/workflows/linux_benchmark_testing.yml +++ /dev/null @@ -1,111 +0,0 @@ -# This is a basic workflow to help you get started with Actions - -name: linux_benchmark_pipeline - -# Controls when the action will run. -# Triggers the workflow on push or pull request -# events but only for the devel branch -on: # yamllint disable-line rule:truthy - pull_request_target: - types: [opened, reopened, synchronize] - branches: - - devel - - main - paths: - - '**.yml' - - '**.sh' - - '**.j2' - - '**.ps1' - - '**.cfg' - -# A workflow run is made up of one or more jobs -# that can run sequentially or in parallel -jobs: - # This will create messages for first time contributers and direct them to the Discord server - welcome: - runs-on: ubuntu-latest - - steps: - - uses: actions/first-interaction@main - with: - repo-token: ${{ secrets.GITHUB_TOKEN }} - pr-message: |- - Congrats on opening your first pull request and thank you for taking the time to help improve Ansible-Lockdown! - Please join in the conversation happening on the [Discord Server](https://discord.io/ansible-lockdown) as well. - # This workflow contains a single job called "build" - build: - # The type of runner that the job will run on - runs-on: ubuntu-latest - - env: - ENABLE_DEBUG: false - - # Steps represent a sequence of tasks that will be executed as part of the job - steps: - # Checks-out your repository under $GITHUB_WORKSPACE, - # so your job can access it - - uses: actions/checkout@v3 - with: - ref: ${{ github.event.pull_request.head.sha }} - - - name: Add_ssh_key - working-directory: .github/workflows - env: - SSH_AUTH_SOCK: /tmp/ssh_agent.sock - PRIVATE_KEY: "${{ secrets.SSH_PRV_KEY }}" - run: | - mkdir .ssh - chmod 700 .ssh - echo $PRIVATE_KEY > .ssh/github_actions.pem - chmod 600 .ssh/github_actions.pem - -### Build out the server - - name: Terraform_Init - working-directory: .github/workflows - run: terraform init - - - name: Terraform_Validate - working-directory: .github/workflows - run: terraform validate - - - name: Terraform_Apply - working-directory: .github/workflows - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - run: terraform apply -var-file "github_vars.tfvars" -var-file "OS.tfvars" --auto-approve -input=false - - ## Debug Section - - name: DEBUG - Show Ansible hostfile - if: env.ENABLE_DEBUG == 'true' - working-directory: .github/workflows - run: cat hosts.yml - - # Aws deployments taking a while to come up insert sleep or playbook fails - - - name: Sleep for 60 seconds - run: sleep 60s - shell: bash - - # Run the ansible playbook - - name: Run_Ansible_Playbook - uses: arillso/action.playbook@master - with: - playbook: site.yml - inventory: .github/workflows/hosts.yml - galaxy_file: collections/requirements.yml - private_key: ${{ secrets.SSH_PRV_KEY }} - # verbose: 3 - env: - ANSIBLE_HOST_KEY_CHECKING: "false" - ANSIBLE_DEPRECATION_WARNINGS: "false" - - # Remove test system - User secrets to keep if necessary - - - name: Terraform_Destroy - working-directory: .github/workflows - if: always() && env.ENABLE_DEBUG == 'false' - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - run: terraform destroy -var-file "github_vars.tfvars" -var-file "OS.tfvars" --auto-approve -input=false diff --git a/.github/workflows/main.tf b/.github/workflows/main.tf deleted file mode 100644 index abac688c..00000000 --- a/.github/workflows/main.tf +++ /dev/null @@ -1,75 +0,0 @@ -provider "aws" { - profile = "" - region = var.aws_region -} - -// Create a security group with access to port 22 - -resource "random_id" "server" { - keepers = { - # Generate a new id each time we switch to a new AMI id - ami_id = "${var.ami_id}" - } - - byte_length = 8 -} - -resource "aws_security_group" "github_actions" { - name = "${var.namespace}-${random_id.server.hex}-SG" - vpc_id = aws_vpc.Main.id - - ingress { - from_port = 22 - to_port = 22 - protocol = "tcp" - cidr_blocks = ["0.0.0.0/0"] - } - - egress { - from_port = 0 - to_port = 0 - protocol = "-1" - cidr_blocks = ["0.0.0.0/0"] - } - tags = { - Environment = "${var.environment}" - Name = "${var.namespace}-SG" - } -} - -// instance setup - -resource "aws_instance" "testing_vm" { - ami = var.ami_id - availability_zone = var.availability_zone - associate_public_ip_address = true - key_name = var.ami_key_pair_name # This is the key as known in the ec2 key_pairs - instance_type = var.instance_type - tags = var.instance_tags - vpc_security_group_ids = [aws_security_group.github_actions.id] - subnet_id = aws_subnet.Main.id - root_block_device { - delete_on_termination = true - } -} - -// generate inventory file -resource "local_file" "inventory" { - filename = "./hosts.yml" - directory_permission = "0755" - file_permission = "0644" - content = < .ssh/github_actions.pem + chmod 600 .ssh/github_actions.pem + + - name: DEBUG - Show IaC files + if: env.ENABLE_DEBUG == 'true' + run: | + echo "OSVAR = $OSVAR" + echo "benchmark_type = $benchmark_type" + pwd + ls + env: + # Imported from github variables this is used to load the relvent OS.tfvars file + OSVAR: ${{ vars.OSVAR }} + benchmark_type: ${{ vars.BENCHMARK_TYPE }} + + - name: Terraform_Init + id: init + run: terraform init + env: + # Imported from github variables this is used to load the relvent OS.tfvars file + OSVAR: ${{ vars.OSVAR }} + TF_VAR_benchmark_type: ${{ vars.BENCHMARK_TYPE }} + + - name: Terraform_Validate + id: validate + run: terraform validate + env: + # Imported from github variables this is used to load the relvent OS.tfvars file + OSVAR: ${{ vars.OSVAR }} + TF_VAR_benchmark_type: ${{ vars.BENCHMARK_TYPE }} + + - name: Terraform_Apply + id: apply + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + OSVAR: ${{ vars.OSVAR }} + TF_VAR_benchmark_type: ${{ vars.BENCHMARK_TYPE }} + run: terraform apply -var-file "github_vars.tfvars" -var-file "${OSVAR}.tfvars" --auto-approve -input=false + + ## Debug Section + - name: DEBUG - Show Ansible hostfile + if: env.ENABLE_DEBUG == 'true' + run: cat hosts.yml + + # Aws deployments taking a while to come up insert sleep or playbook fails + + - name: Sleep for 60 seconds + run: sleep 60s + + # Run the ansible playbook + - name: Run_Ansible_Playbook + uses: arillso/action.playbook@master + with: + playbook: site.yml + inventory: .github/workflows/github_linux_IaC/hosts.yml + galaxy_file: collections/requirements.yml + private_key: ${{ secrets.SSH_PRV_KEY }} + # verbose: 3 + env: + ANSIBLE_HOST_KEY_CHECKING: "false" + ANSIBLE_DEPRECATION_WARNINGS: "false" + + # Remove test system - User secrets to keep if necessary + + - name: Terraform_Destroy + if: always() && env.ENABLE_DEBUG == 'false' + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + OSVAR: ${{ vars.OSVAR }} + TF_VAR_benchmark_type: ${{ vars.BENCHMARK_TYPE }} + run: terraform destroy -var-file "github_vars.tfvars" -var-file "${OSVAR}.tfvars" --auto-approve -input=false diff --git a/.github/workflows/terraform.tfvars b/.github/workflows/terraform.tfvars deleted file mode 100644 index 31113784..00000000 --- a/.github/workflows/terraform.tfvars +++ /dev/null @@ -1,6 +0,0 @@ -// vars should be loaded by OSname.tfvars -availability_zone = "us-east-1b" -aws_region = "us-east-1" -ami_os = var.ami_os -ami_username = var.ami_username -instance_tags = var.instance_tags diff --git a/.github/workflows/test.sh b/.github/workflows/test.sh deleted file mode 100644 index 1a7202a8..00000000 --- a/.github/workflows/test.sh +++ /dev/null @@ -1,6 +0,0 @@ -RHEL7=$(grep -c RHEL7 OS.tfvars) -if [ `echo $?` != 0 ]; then - exit 0 -fi - - diff --git a/.github/workflows/update_galaxy.yml b/.github/workflows/update_galaxy.yml index 2052b0a8..951a53cb 100644 --- a/.github/workflows/update_galaxy.yml +++ b/.github/workflows/update_galaxy.yml @@ -6,7 +6,7 @@ name: update galaxy # Controls when the action will run. # Triggers the workflow on merge request events to the main branch -on: # yamllint disable-line rule:truthy +on: push: branches: - main diff --git a/.github/workflows/variables.tf b/.github/workflows/variables.tf deleted file mode 100644 index b579ac00..00000000 --- a/.github/workflows/variables.tf +++ /dev/null @@ -1,76 +0,0 @@ -// Taken from the OSname.tfvars - -variable "aws_region" { - description = "AWS region" - default = "us-east-1" - type = string -} - -variable "availability_zone" { - description = "List of availability zone in the region" - default = "us-east-1b" - type = string -} - -variable "instance_type" { - description = "EC2 Instance Type" - default = "t3.micro" - type = string -} - -variable "instance_tags" { - description = "Tags to set for instances" - type = map(string) -} - -variable "ami_key_pair_name" { - description = "Name of key pair in AWS thats used" - type = string -} - -variable "ami_os" { - description = "AMI OS Type" - type = string -} - -variable "ami_id" { - description = "AMI ID reference" - type = string -} - -variable "ami_username" { - description = "Username for the ami id" - type = string -} - -variable "ami_user_home" { - description = "home dir for the username" - type = string -} - -variable "namespace" { - description = "Name used across all tags" - type = string -} - -variable "environment" { - description = "Env Name used across all tags" - type = string -} - -// taken from github_vars.tfvars & - -variable "main_vpc_cidr" { - description = "Private cidr block to be used for vpc" - type = string -} - -variable "public_subnets" { - description = "public subnet cidr block" - type = string -} - -variable "private_subnets" { - description = "private subnet cidr block" - type = string -} diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 00000000..97c79434 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,67 @@ +--- +##### CI for use by github no need for action to be added +##### Inherited +ci: + autofix_prs: false + skip: [detect-aws-credentials, ansible-lint ] + +repos: +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v3.2.0 + hooks: + # Safety + - id: detect-aws-credentials + - id: detect-private-key + + # git checks + - id: check-merge-conflict + - id: check-added-large-files + - id: check-case-conflict + + # General checks + - id: trailing-whitespace + name: Trim Trailing Whitespace + description: This hook trims trailing whitespace. + entry: trailing-whitespace-fixer + language: python + types: [text] + args: [--markdown-linebreak-ext=md] + - id: end-of-file-fixer + +# Scan for passwords +- repo: https://github.com/Yelp/detect-secrets + rev: v1.4.0 + hooks: + - id: detect-secrets + args: [ '--baseline', '.config/.secrets.baseline' ] + exclude: .config/.gitleaks-report.json + +- repo: https://github.com/gitleaks/gitleaks + rev: v8.17.0 + hooks: + - id: gitleaks + args: ['--baseline-path', '.config/.gitleaks-report.json'] + +- repo: https://github.com/ansible-community/ansible-lint + rev: v6.17.2 + hooks: + - id: ansible-lint + name: Ansible-lint + description: This hook runs ansible-lint. + entry: python3 -m ansiblelint --force-color site.yml -c .ansible-lint + language: python + # do not pass files to ansible-lint, see: + # https://github.com/ansible/ansible-lint/issues/611 + pass_filenames: false + always_run: true + additional_dependencies: + # https://github.com/pre-commit/pre-commit/issues/1526 + # If you want to use specific version of ansible-core or ansible, feel + # free to override `additional_dependencies` in your own hook config + # file. + - ansible-core>=2.10.1 + +- repo: https://github.com/adrienverge/yamllint.git + rev: v1.32.0 # or higher tag + hooks: + - id: yamllint diff --git a/.yamllint b/.yamllint index 7dc754f0..65faae6c 100755 --- a/.yamllint +++ b/.yamllint @@ -1,24 +1,29 @@ --- -# Based on ansible-lint config extends: default +ignore: | + tests/ + molecule/ + .github/ + .gitlab-ci.yml + *molecule.yml + rules: - braces: {max-spaces-inside: 1, level: error} - brackets: {max-spaces-inside: 1, level: error} - colons: {max-spaces-after: -1, level: error} - commas: {max-spaces-after: -1, level: error} - comments: disable - comments-indentation: disable - document-start: disable - empty-lines: {max: 3, level: error} - hyphens: {level: error} indentation: - # Requiring 4 space indentation + # Requiring 4 space indentation spaces: 4 - # Requiring consistent indentation within a file, either indented or not + # Requiring consistent indentation within a file, either indented or not indent-sequences: consistent - key-duplicates: enable + braces: + max-spaces-inside: 1 + level: error + brackets: + max-spaces-inside: 1 + level: error + empty-lines: + max: 1 line-length: disable + key-duplicates: enable new-line-at-end-of-file: enable new-lines: type: unix diff --git a/Changelog.md b/Changelog.md index 2abcaed6..41f93930 100644 --- a/Changelog.md +++ b/Changelog.md @@ -1,9 +1,28 @@ # Change log for Ubuntu 2004 +## V2.0 based upon CIS 2.0.1 + +- v2.0.1 - refer to change history from official CIS pdf. + - ReWrite of many rules + - Ordering and numbering of rules + - many title updates +- timesync options increased + - default systemd-timesyncd + - chrony options updated +- idempotency improvements +- new discoveries + - interactive users + - uid min value + - is_container discovery and default var +- pre-commit added to setup +- README new layout + +- Added test for rule 4.3.4 check user is using sudo has password set before NOPASSWD removed from sudoers + ## V1.0.1 - based upon CIS 1.1.0 thanks to ikthomas -[#84](https://github.com/ansible-lockdown/UBUNTU20-CIS/issues/84) +[#84](https://github.com/ansible-lockdown/UBUNTU20-CIS/issues/84) ## v1.0.0 @@ -12,13 +31,13 @@ thanks to ikthomas - ansible version ## April 2023 Updates -- Addressed Bugs +- Addressed Bugs - [#73](https://github.com/ansible-lockdown/UBUNTU20-CIS/issues/73) - Thanks @fnschroeder (Fix Taken From @uk-bolly issue_73 branch) - [#80](https://github.com/ansible-lockdown/UBUNTU20-CIS/issues/80) - Thanks @kdebisschop - Added Fixes For Outstanding PR's - [#81](https://github.com/ansible-lockdown/UBUNTU20-CIS/pull/81) - Thanks @kdebisschop - Fixed Linting Errors For Yamllint & Ansbile-Lint - - Adjusted Builtin to Posix For sysctl module. + - Adjusted Builtin to Posix For sysctl module. ## Feb 23 updates - Initial diff --git a/README.md b/README.md index cf8d0e77..2d31a775 100644 --- a/README.md +++ b/README.md @@ -1,12 +1,41 @@ # Ubuntu 20 CIS -![Build Status](https://img.shields.io/github/workflow/status/ansible-lockdown/UBUNTU20-CIS/CommunityToDevel?label=Devel%20Build%20Status&style=plastic) -![Build Status](https://img.shields.io/github/workflow/status/ansible-lockdown/UBUNTU20-CIS/DevelToMaster?label=Main%20Build%20Status&style=plastic) -![Release](https://img.shields.io/github/v/release/ansible-lockdown/UBUNTU20-CIS?style=plastic) +## Configure a Ubuntu 20 machine to be [CIS](https://www.cisecurity.org/cis-benchmarks/) compliant -Configure Ubuntu 20 machine to be [CIS](https://www.cisecurity.org/cis-benchmarks/) v1.1.0 compliant. There are some intrusive tasks that have a toggle in defaults main.yml to disable to automated fix +### Based on CIS Ubuntu Linux 20.04 LTS Benchmark v2.0.1 [Release](https://learn.cisecurity.org/l/799323/2023-06-27/4t1gmr) -## Join us +![Org Stars](https://img.shields.io/github/stars/ansible-lockdown?label=Org%20Stars&style=social) +![Stars](https://img.shields.io/github/stars/ansible-lockdown/ubuntu20-cis?label=Repo%20Stars&style=social) +![Forks](https://img.shields.io/github/forks/ansible-lockdown/ubuntu20-cis?style=social) +![followers](https://img.shields.io/github/followers/ansible-lockdown?style=social) +[![Twitter URL](https://img.shields.io/twitter/url/https/twitter.com/AnsibleLockdown.svg?style=social&label=Follow%20%40AnsibleLockdown)](https://twitter.com/AnsibleLockdown) + +![Ansible Galaxy Quality](https://img.shields.io/ansible/quality/54777?label=Quality&&logo=ansible) +![Discord Badge](https://img.shields.io/discord/925818806838919229?logo=discord) + +![Devel Build Status](https://img.shields.io/github/actions/workflow/status/ansible-lockdown/ubuntu20-cis/linux_benchmark_testing.yml?label=Devel%20Build%20Status) +![Devel Commits](https://img.shields.io/github/commit-activity/m/ansible-lockdown/ubuntu20-cis/devel?color=dark%20green&label=Devel%20Branch%20Commits) + +![Release Branch](https://img.shields.io/badge/Release%20Branch-Main-brightgreen) +![Main Build Status](https://img.shields.io/github/actions/workflow/status/ansible-lockdown/ubuntu20-cis/linux_benchmark_testing.yml?label=Build%20Status) +![Main Release Date](https://img.shields.io/github/release-date/ansible-lockdown/ubuntu20-cis?label=Release%20Date) +![Release Tag](https://img.shields.io/github/v/tag/ansible-lockdown/ubuntu20-cis?label=Release%20Tag&&color=success) + +![Issues Open](https://img.shields.io/github/issues-raw/ansible-lockdown/ubuntu20-cis?label=Open%20Issues) +![Issues Closed](https://img.shields.io/github/issues-closed-raw/ansible-lockdown/ubuntu20-cis?label=Closed%20Issues&&color=success) +![Pull Requests](https://img.shields.io/github/issues-pr/ansible-lockdown/ubuntu20-cis?label=Pull%20Requests) + +![License](https://img.shields.io/github/license/ansible-lockdown/ubuntu20-cis?label=License) + +--- + +## Looking for support? + +[Lockdown Enterprise](https://www.lockdownenterprise.com#GH_AL_UB20_cis) + +[Ansible support](https://www.mindpointgroup.com/cybersecurity-products/ansible-counselor#GH_AL_UB20_cis) + +### Community On our [Discord Server](https://discord.io/ansible-lockdown) to ask questions, discuss features, or just chat with other Ansible-Lockdown users @@ -16,18 +45,13 @@ This role **will make changes to the system** that could break things. This is n This role was developed against a clean install of the Operating System. If you are implimenting to an existing system please review this role for any site specific changes that are needed. -NFtables is not supported in the role. - -Based on -[CIS_Ubuntu_Linux_20.04_LTS_Benchmark](https://community.cisecurity.org/collab/public/index.php). - ## Documentation -- [Readthedocs](https://ansible-lockdown.readthedocs.io/en/latest/) -- [Getting Started](https://www.lockdownenterprise.com/docs/getting-started-with-lockdown) -- [Customizing Roles](https://www.lockdownenterprise.com/docs/customizing-lockdown-enterprise) -- [Per-Host Configuration](https://www.lockdownenterprise.com/docs/per-host-lockdown-enterprise-configuration) -- [Getting the Most Out of the Role](https://www.lockdownenterprise.com/docs/get-the-most-out-of-lockdown-enterprise) +- [Read The Docs](https://ansible-lockdown.readthedocs.io/en/latest/) +- [Getting Started](https://www.lockdownenterprise.com/docs/getting-started-with-lockdown#GH_AL_UB20_cis) +- [Customizing Roles](https://www.lockdownenterprise.com/docs/customizing-lockdown-enterprise#GH_AL_UB20_cis) +- [Per-Host Configuration](https://www.lockdownenterprise.com/docs/per-host-lockdown-enterprise-configuration#GH_AL_UB20_cis) +- [Getting the Most Out of the Role](https://www.lockdownenterprise.com/docs/get-the-most-out-of-lockdown-enterprise#GH_AL_UB20_cis) ## Requirements @@ -39,11 +63,9 @@ Based on - [Tower User Guide](https://docs.ansible.com/ansible-tower/latest/html/userguide/index.html) - [Ansible Community Info](https://docs.ansible.com/ansible/latest/community/index.html) - Functioning Ansible and/or Tower Installed, configured, and running. This includes all of the base Ansible/Tower configurations, needed packages installed, and infrastructure setup. - +- Please read through the tasks in this role to gain an understanding of what each control is doing. Some of the tasks are disruptive and can have unintended consiquences in a live production system. Also familiarize yourself with the variables in the defaults/main.yml file. **Technical Dependencies:** -- It is suggested that the server is patched in the normal manner before running this role. - - Running Ansible/Tower setup (this role is tested against Ansible version 2.9.1 and newer) - Python3 Ansible run environment @@ -53,14 +75,16 @@ This can be turned on or off within the defaults/main.yml file with the variable This is a much quicker, very lightweight, checking (where possible) config compliance and live/running settings. -A new form of auditing has been developed, by using a small (12MB) go binary called [goss](https://github.com/aelsabbahy/goss) along with the relevant configurations to check. Without the need for infrastructure or other tooling. +A new form of auditing has been develeoped, by using a small (12MB) go binary called [goss](https://github.com/aelsabbahy/goss) along with the relevant configurations to check. Without the need for infrastructure or other tooling. This audit will not only check the config has the correct setting but aims to capture if it is running with that configuration also trying to remove [false positives](https://www.mindpointgroup.com/blog/is-compliance-scanning-still-relevant/) in the process. Refer to [UBUNTU20-CIS-Audit](https://github.com/ansible-lockdown/UBUNTU20-CIS-Audit). +Further audit documentation can be found at [Read The Docs](https://ansible-lockdown.readthedocs.io/en/latest/) + ## Role Variables -This role is designed that the end user should not have to edit the tasks themselves. All customizing should be done via the defaults/main.yml file or with extra vars within the project, job, workflow, etc. These variables can be found [here](https://github.com/ansible-lockdown/UBUNTU20-CIS/wiki/Main-Variables) in the Main Variables Wiki page. All variables are listed there along with descriptions. +This role is designed that the end user should not have to edit the tasks themselves. All customizing should be done via the defaults/main.yml file or with extra vars within the project, job, workflow, etc. ## Branches @@ -88,11 +112,10 @@ uses: - runs the audit using the devel branch - This is an automated test that occurs on pull requests into devel -## Support - -This is a community project at its core and will be managed as such. +## Added Extras -If you would are interested in dedicated support to assist or provide bespoke setups +- [pre-commit](https://pre-commit.com) can be tested and can be run from within the directory -- [Ansible Counselor](https://www.mindpointgroup.com/products/ansible-counselor-on-demand-ansible-services-and-consulting/) -- [Try us out](https://engage.mindpointgroup.com/try-ansible-counselor) +```sh +pre-commit run +``` diff --git a/defaults/main.yml b/defaults/main.yml index c4a1a9e3..3b2b64f9 100644 --- a/defaults/main.yml +++ b/defaults/main.yml @@ -1,9 +1,4 @@ --- -# If you would like a report at the end accordin to OpenSCAP as to the report results -# then you should set ubtu20cis_oscap_scan to true/yes. -# NOTE: This requires the python_xmltojson package on the control host. -ubtu20cis_oscap_scan: false -ubtu20cis_report_dir: /tmp ubtu20cis_section1_patch: true ubtu20cis_section2_patch: true @@ -15,11 +10,13 @@ ubtu20cis_section6_patch: true # System will reboot if false, can give better audit results skip_reboot: true +system_is_container: false + ## Benchmark name used by auditing control role # The audit variable found at the base benchmark: UBUNTU20-CIS ## metadata for Audit benchmark -benchmark_version: 'v1.1.0' +benchmark_version: 'v2.0.1' ### Audit Binary is required on the remote host setup_audit: false @@ -86,41 +83,46 @@ ubtu20cis_rule_1_1_1_4: true ubtu20cis_rule_1_1_1_5: true ubtu20cis_rule_1_1_1_6: true ubtu20cis_rule_1_1_1_7: true -ubtu20cis_rule_1_1_2: true -ubtu20cis_rule_1_1_3: true -ubtu20cis_rule_1_1_4: true -ubtu20cis_rule_1_1_5: true -ubtu20cis_rule_1_1_6: true -ubtu20cis_rule_1_1_7: true -ubtu20cis_rule_1_1_8: true +ubtu20cis_rule_1_1_2_1: true +ubtu20cis_rule_1_1_2_2: true +ubtu20cis_rule_1_1_2_3: true +ubtu20cis_rule_1_1_2_4: true +ubtu20cis_rule_1_1_3_1: true +ubtu20cis_rule_1_1_3_2: true +ubtu20cis_rule_1_1_3_3: true +ubtu20cis_rule_1_1_4_1: true +ubtu20cis_rule_1_1_4_2: true +ubtu20cis_rule_1_1_4_3: true +ubtu20cis_rule_1_1_4_4: true +ubtu20cis_rule_1_1_5_1: true +ubtu20cis_rule_1_1_5_2: true +ubtu20cis_rule_1_1_5_3: true +ubtu20cis_rule_1_1_5_4: true +ubtu20cis_rule_1_1_6_1: true +ubtu20cis_rule_1_1_6_2: true +ubtu20cis_rule_1_1_6_3: true +ubtu20cis_rule_1_1_6_4: true +ubtu20cis_rule_1_1_7_1: true +ubtu20cis_rule_1_1_7_2: true +ubtu20cis_rule_1_1_7_3: true +ubtu20cis_rule_1_1_8_1: true +ubtu20cis_rule_1_1_8_2: true +ubtu20cis_rule_1_1_8_3: true ubtu20cis_rule_1_1_9: true ubtu20cis_rule_1_1_10: true -ubtu20cis_rule_1_1_11: true -ubtu20cis_rule_1_1_12: true -ubtu20cis_rule_1_1_13: true -ubtu20cis_rule_1_1_14: true -ubtu20cis_rule_1_1_15: true -ubtu20cis_rule_1_1_16: true -ubtu20cis_rule_1_1_17: true -ubtu20cis_rule_1_1_18: true -ubtu20cis_rule_1_1_19: true -ubtu20cis_rule_1_1_20: true -ubtu20cis_rule_1_1_21: true -ubtu20cis_rule_1_1_22: true -ubtu20cis_rule_1_1_23: true -ubtu20cis_rule_1_1_24: true ubtu20cis_rule_1_2_1: true ubtu20cis_rule_1_2_2: true ubtu20cis_rule_1_3_1: true ubtu20cis_rule_1_3_2: true +ubtu20cis_rule_1_3_3: true ubtu20cis_rule_1_4_1: true ubtu20cis_rule_1_4_2: true ubtu20cis_rule_1_4_3: true -ubtu20cis_rule_1_4_4: true ubtu20cis_rule_1_5_1: true ubtu20cis_rule_1_5_2: true ubtu20cis_rule_1_5_3: true ubtu20cis_rule_1_5_4: true +ubtu20cis_rule_1_5_5: true ubtu20cis_rule_1_6_1_1: true ubtu20cis_rule_1_6_1_2: true ubtu20cis_rule_1_6_1_3: true @@ -135,42 +137,61 @@ ubtu20cis_rule_1_8_1: true ubtu20cis_rule_1_8_2: true ubtu20cis_rule_1_8_3: true ubtu20cis_rule_1_8_4: true -ubtu20cis_rule_1_9: true +ubtu20cis_rule_1_8_5: true +ubtu20cis_rule_1_8_6: true +ubtu20cis_rule_1_8_7: true +ubtu20cis_rule_1_8_8: true +ubtu20cis_rule_1_8_9: true +ubtu20cis_rule_1_8_10: true # Section 2 Fixes # Section 2 is Services (Special Purpose Services, and service clients) ubtu20cis_rule_2_1_1_1: true -ubtu20cis_rule_2_1_1_2: true -ubtu20cis_rule_2_1_1_3: true -ubtu20cis_rule_2_1_1_4: true -ubtu20cis_rule_2_1_2: true -ubtu20cis_rule_2_1_3: true -ubtu20cis_rule_2_1_4: true -ubtu20cis_rule_2_1_5: true -ubtu20cis_rule_2_1_6: true -ubtu20cis_rule_2_1_7: true -ubtu20cis_rule_2_1_8: true -ubtu20cis_rule_2_1_9: true -ubtu20cis_rule_2_1_10: true -ubtu20cis_rule_2_1_11: true -ubtu20cis_rule_2_1_12: true -ubtu20cis_rule_2_1_13: true -ubtu20cis_rule_2_1_14: true -ubtu20cis_rule_2_1_15: true -ubtu20cis_rule_2_1_16: true -ubtu20cis_rule_2_1_17: true +ubtu20cis_rule_2_1_2_1: true +ubtu20cis_rule_2_1_2_2: true +ubtu20cis_rule_2_1_2_3: true +ubtu20cis_rule_2_1_3_1: true +ubtu20cis_rule_2_1_3_2: true +ubtu20cis_rule_2_1_4_1: true +ubtu20cis_rule_2_1_4_2: true +ubtu20cis_rule_2_1_4_3: true +ubtu20cis_rule_2_1_4_4: true + ubtu20cis_rule_2_2_1: true ubtu20cis_rule_2_2_2: true ubtu20cis_rule_2_2_3: true ubtu20cis_rule_2_2_4: true ubtu20cis_rule_2_2_5: true ubtu20cis_rule_2_2_6: true -ubtu20cis_rule_2_3: true +ubtu20cis_rule_2_2_7: true +ubtu20cis_rule_2_2_8: true +ubtu20cis_rule_2_2_9: true +ubtu20cis_rule_2_2_10: true +ubtu20cis_rule_2_2_11: true +ubtu20cis_rule_2_2_12: true +ubtu20cis_rule_2_2_13: true +ubtu20cis_rule_2_2_14: true +ubtu20cis_rule_2_2_15: true +ubtu20cis_rule_2_2_16: true +ubtu20cis_rule_2_2_17: true + +ubtu20cis_rule_2_3_1: true +ubtu20cis_rule_2_3_2: true +ubtu20cis_rule_2_3_3: true +ubtu20cis_rule_2_3_4: true +ubtu20cis_rule_2_3_5: true +ubtu20cis_rule_2_3_6: true +ubtu20cis_rule_2_4: true # Section 3 Fixes # Section 3 is Network Configuration (Disable Unused Networks, Network Parameters (Host Only), Network Parameters (Host and Router), Uncommon Network Protocols, and Firewall Configuration) ubtu20cis_rule_3_1_1: true ubtu20cis_rule_3_1_2: true +ubtu20cis_rule_3_1_3: true +ubtu20cis_rule_3_1_4: true +ubtu20cis_rule_3_1_5: true +ubtu20cis_rule_3_1_6: true +ubtu20cis_rule_3_1_7: true ubtu20cis_rule_3_2_1: true ubtu20cis_rule_3_2_2: true ubtu20cis_rule_3_3_1: true @@ -186,44 +207,42 @@ ubtu20cis_rule_3_4_1: true ubtu20cis_rule_3_4_2: true ubtu20cis_rule_3_4_3: true ubtu20cis_rule_3_4_4: true -ubtu20cis_rule_3_5_1_1: true -ubtu20cis_rule_3_5_1_2: true -ubtu20cis_rule_3_5_1_3: true -ubtu20cis_rule_3_5_1_4: true -ubtu20cis_rule_3_5_1_5: true -ubtu20cis_rule_3_5_1_6: true -ubtu20cis_rule_3_5_1_7: true -ubtu20cis_rule_3_5_2_1: true -ubtu20cis_rule_3_5_2_2: true -ubtu20cis_rule_3_5_2_3: true -ubtu20cis_rule_3_5_2_4: true -ubtu20cis_rule_3_5_2_5: true -ubtu20cis_rule_3_5_2_6: true -ubtu20cis_rule_3_5_2_7: true -ubtu20cis_rule_3_5_2_8: true -ubtu20cis_rule_3_5_2_9: true -ubtu20cis_rule_3_5_2_10: true -ubtu20cis_rule_3_5_3_1_1: true -ubtu20cis_rule_3_5_3_1_2: true -ubtu20cis_rule_3_5_3_1_3: true -ubtu20cis_rule_3_5_3_2_1: true -ubtu20cis_rule_3_5_3_2_2: true -ubtu20cis_rule_3_5_3_2_3: true -ubtu20cis_rule_3_5_3_2_4: true -ubtu20cis_rule_3_5_3_3_1: true -ubtu20cis_rule_3_5_3_3_2: true -ubtu20cis_rule_3_5_3_3_3: true -ubtu20cis_rule_3_5_3_3_4: true +# ufw +ubtu20cis_rule_3_4_1_1: true +ubtu20cis_rule_3_4_1_2: true +ubtu20cis_rule_3_4_1_3: true +ubtu20cis_rule_3_4_1_4: true +ubtu20cis_rule_3_4_1_5: true +ubtu20cis_rule_3_4_1_6: true +ubtu20cis_rule_3_4_1_7: true +# nftables +ubtu20cis_rule_3_4_2_1: true +ubtu20cis_rule_3_4_2_2: true +ubtu20cis_rule_3_4_2_3: true +ubtu20cis_rule_3_4_2_4: true +ubtu20cis_rule_3_4_2_5: true +ubtu20cis_rule_3_4_2_6: true +ubtu20cis_rule_3_4_2_7: true +ubtu20cis_rule_3_4_2_8: true +ubtu20cis_rule_3_4_2_9: true +ubtu20cis_rule_3_4_2_10: true +# iptables +ubtu20cis_rule_3_4_3_1_1: true +ubtu20cis_rule_3_4_3_1_2: true +ubtu20cis_rule_3_4_3_1_3: true +ubtu20cis_rule_3_4_3_2_1: true +ubtu20cis_rule_3_4_3_2_2: true +ubtu20cis_rule_3_4_3_2_3: true +ubtu20cis_rule_3_4_3_2_4: true +ubtu20cis_rule_3_4_3_3_1: true +ubtu20cis_rule_3_4_3_3_2: true +ubtu20cis_rule_3_4_3_3_3: true +ubtu20cis_rule_3_4_3_3_4: true # Section 4 Fixes -# Section 4 is Logging and Auditing (Configure System Accounting (auditd), Configure Data Retention, and Configure Logging) -ubtu20cis_rule_4_1_1_1: true -ubtu20cis_rule_4_1_1_2: true -ubtu20cis_rule_4_1_1_3: true -ubtu20cis_rule_4_1_1_4: true -ubtu20cis_rule_4_1_2_1: true -ubtu20cis_rule_4_1_2_2: true -ubtu20cis_rule_4_1_2_3: true +# Section 4 is Access Authentication and job schedulers +ubtu20cis_rule_4_1_1: true +ubtu20cis_rule_4_1_2: true ubtu20cis_rule_4_1_3: true ubtu20cis_rule_4_1_4: true ubtu20cis_rule_4_1_5: true @@ -231,79 +250,125 @@ ubtu20cis_rule_4_1_6: true ubtu20cis_rule_4_1_7: true ubtu20cis_rule_4_1_8: true ubtu20cis_rule_4_1_9: true -ubtu20cis_rule_4_1_10: true -ubtu20cis_rule_4_1_11: true -ubtu20cis_rule_4_1_12: true -ubtu20cis_rule_4_1_13: true -ubtu20cis_rule_4_1_14: true -ubtu20cis_rule_4_1_15: true -ubtu20cis_rule_4_1_16: true -ubtu20cis_rule_4_1_17: true -ubtu20cis_rule_4_2_1_1: true -ubtu20cis_rule_4_2_1_2: true -ubtu20cis_rule_4_2_1_3: true -ubtu20cis_rule_4_2_1_4: true -ubtu20cis_rule_4_2_1_5: true -ubtu20cis_rule_4_2_1_6: true -ubtu20cis_rule_4_2_2_1: true -ubtu20cis_rule_4_2_2_2: true -ubtu20cis_rule_4_2_2_3: true +ubtu20cis_rule_4_2_1: true +ubtu20cis_rule_4_2_2: true ubtu20cis_rule_4_2_3: true -ubtu20cis_rule_4_3: true -ubtu20cis_rule_4_4: true +ubtu20cis_rule_4_2_4: true +ubtu20cis_rule_4_2_5: true +ubtu20cis_rule_4_2_6: true +ubtu20cis_rule_4_2_7: true +ubtu20cis_rule_4_2_8: true +ubtu20cis_rule_4_2_9: true +ubtu20cis_rule_4_2_10: true +ubtu20cis_rule_4_2_11: true +ubtu20cis_rule_4_2_12: true +ubtu20cis_rule_4_2_13: true +ubtu20cis_rule_4_2_14: true +ubtu20cis_rule_4_2_15: true +ubtu20cis_rule_4_2_16: true +ubtu20cis_rule_4_2_17: true +ubtu20cis_rule_4_2_18: true +ubtu20cis_rule_4_2_19: true +ubtu20cis_rule_4_2_20: true +ubtu20cis_rule_4_2_21: true +ubtu20cis_rule_4_2_22: true +ubtu20cis_rule_4_3_1: true +ubtu20cis_rule_4_3_2: true +ubtu20cis_rule_4_3_3: true +ubtu20cis_rule_4_3_4: true +ubtu20cis_rule_4_3_5: true +ubtu20cis_rule_4_3_6: true +ubtu20cis_rule_4_3_7: true +ubtu20cis_rule_4_4_1: true +ubtu20cis_rule_4_4_2: true +ubtu20cis_rule_4_4_3: true +ubtu20cis_rule_4_4_4: true +ubtu20cis_rule_4_4_5: true + +ubtu20cis_rule_4_5_1_1: true +ubtu20cis_rule_4_5_1_2: true +ubtu20cis_rule_4_5_1_3: true +ubtu20cis_rule_4_5_1_4: true +ubtu20cis_rule_4_5_1_5: true +ubtu20cis_rule_4_5_1_6: true +ubtu20cis_rule_4_5_1_7: true +ubtu20cis_rule_4_5_1: true +ubtu20cis_rule_4_5_2: true +ubtu20cis_rule_4_5_3: true +ubtu20cis_rule_4_5_4: true +ubtu20cis_rule_4_5_5: true +ubtu20cis_rule_4_5_6: true +ubtu20cis_rule_4_5_7: true # Section 5 Fixes -# Section 5 is Access, Authentication, and Authorization (Configure time-based job schedulers, Configure sudo, Configure SSH Server, Configure PAM -# and User Accounts and Environment) -ubtu20cis_rule_5_1_1: true -ubtu20cis_rule_5_1_2: true +# Section 5 is Logging and Auditing + +# journald +ubtu20cis_rule_5_1_1_1_1: true +ubtu20cis_rule_5_1_1_1_2: true +ubtu20cis_rule_5_1_1_1_3: true +ubtu20cis_rule_5_1_1_1_4: true +ubtu20cis_rule_5_1_1_2: true +ubtu20cis_rule_5_1_1_3: true +ubtu20cis_rule_5_1_1_4: true +ubtu20cis_rule_5_1_1_5: true +ubtu20cis_rule_5_1_1_6: true +ubtu20cis_rule_5_1_1_7: true + +# rsyslog + +ubtu20cis_rule_5_1_2_1: true +ubtu20cis_rule_5_1_2_2: true +ubtu20cis_rule_5_1_2_3: true +ubtu20cis_rule_5_1_2_4: true +ubtu20cis_rule_5_1_2_5: true +ubtu20cis_rule_5_1_2_6: true +ubtu20cis_rule_5_1_2_7: true + ubtu20cis_rule_5_1_3: true -ubtu20cis_rule_5_1_4: true -ubtu20cis_rule_5_1_5: true -ubtu20cis_rule_5_1_6: true -ubtu20cis_rule_5_1_7: true -ubtu20cis_rule_5_1_8: true -ubtu20cis_rule_5_1_9: true -ubtu20cis_rule_5_2_1: true -ubtu20cis_rule_5_2_2: true -ubtu20cis_rule_5_2_3: true -ubtu20cis_rule_5_3_1: true -ubtu20cis_rule_5_3_2: true -ubtu20cis_rule_5_3_3: true -ubtu20cis_rule_5_3_4: true -ubtu20cis_rule_5_3_5: true -ubtu20cis_rule_5_3_6: true -ubtu20cis_rule_5_3_7: true -ubtu20cis_rule_5_3_8: true -ubtu20cis_rule_5_3_9: true -ubtu20cis_rule_5_3_10: true -ubtu20cis_rule_5_3_11: true -ubtu20cis_rule_5_3_12: true -ubtu20cis_rule_5_3_13: true -ubtu20cis_rule_5_3_14: true -ubtu20cis_rule_5_3_15: true -ubtu20cis_rule_5_3_16: true -ubtu20cis_rule_5_3_17: true -ubtu20cis_rule_5_3_18: true -ubtu20cis_rule_5_3_19: true -ubtu20cis_rule_5_3_20: true -ubtu20cis_rule_5_3_21: true -ubtu20cis_rule_5_3_22: true -ubtu20cis_rule_5_4_1: true -ubtu20cis_rule_5_4_2: true -ubtu20cis_rule_5_4_3: true -ubtu20cis_rule_5_4_4: true -ubtu20cis_rule_5_5_1_1: true -ubtu20cis_rule_5_5_1_2: true -ubtu20cis_rule_5_5_1_3: true -ubtu20cis_rule_5_5_1_4: true -ubtu20cis_rule_5_5_1_5: true -ubtu20cis_rule_5_5_2: true -ubtu20cis_rule_5_5_3: true -ubtu20cis_rule_5_5_4: true -ubtu20cis_rule_5_5_5: true -ubtu20cis_rule_5_6: true -ubtu20cis_rule_5_7: true + +# Auditd +ubtu20cis_rule_5_2_1_1: true +ubtu20cis_rule_5_2_1_2: true +ubtu20cis_rule_5_2_1_3: true +ubtu20cis_rule_5_2_1_4: true +ubtu20cis_rule_5_2_2_1: true +ubtu20cis_rule_5_2_2_2: true +ubtu20cis_rule_5_2_2_3: true + +ubtu20cis_rule_5_2_3_1: true +ubtu20cis_rule_5_2_3_2: true +ubtu20cis_rule_5_2_3_3: true +ubtu20cis_rule_5_2_3_4: true +ubtu20cis_rule_5_2_3_5: true +ubtu20cis_rule_5_2_3_6: true +ubtu20cis_rule_5_2_3_7: true +ubtu20cis_rule_5_2_3_8: true +ubtu20cis_rule_5_2_3_9: true +ubtu20cis_rule_5_2_3_10: true +ubtu20cis_rule_5_2_3_11: true +ubtu20cis_rule_5_2_3_12: true +ubtu20cis_rule_5_2_3_13: true +ubtu20cis_rule_5_2_3_14: true +ubtu20cis_rule_5_2_3_15: true +ubtu20cis_rule_5_2_3_16: true +ubtu20cis_rule_5_2_3_17: true +ubtu20cis_rule_5_2_3_18: true +ubtu20cis_rule_5_2_3_19: true +ubtu20cis_rule_5_2_3_20: true +ubtu20cis_rule_5_2_3_21: true + +ubtu20cis_rule_5_2_4_1: true +ubtu20cis_rule_5_2_4_2: true +ubtu20cis_rule_5_2_4_3: true +ubtu20cis_rule_5_2_4_4: true +ubtu20cis_rule_5_2_4_5: true +ubtu20cis_rule_5_2_4_6: true +ubtu20cis_rule_5_2_4_7: true +ubtu20cis_rule_5_2_4_8: true +ubtu20cis_rule_5_2_4_9: true +ubtu20cis_rule_5_2_4_10: true +ubtu20cis_rule_5_2_4_11: true # Section 6 Fixes # Section is Systme Maintenance (System File Permissions and User and Group Settings) @@ -320,7 +385,7 @@ ubtu20cis_rule_6_1_10: true ubtu20cis_rule_6_1_11: true ubtu20cis_rule_6_1_12: true ubtu20cis_rule_6_1_13: true -ubtu20cis_rule_6_1_14: true + ubtu20cis_rule_6_2_1: true ubtu20cis_rule_6_2_2: true ubtu20cis_rule_6_2_3: true @@ -333,11 +398,6 @@ ubtu20cis_rule_6_2_9: true ubtu20cis_rule_6_2_10: true ubtu20cis_rule_6_2_11: true ubtu20cis_rule_6_2_12: true -ubtu20cis_rule_6_2_13: true -ubtu20cis_rule_6_2_14: true -ubtu20cis_rule_6_2_15: true -ubtu20cis_rule_6_2_16: true -ubtu20cis_rule_6_2_17: true # Service configuration variables, set to true to keep service ubtu20cis_allow_autofs: false @@ -345,6 +405,7 @@ ubtu20cis_allow_usb_storage: false ubtu20cis_avahi_server: false ubtu20cis_cups_server: false ubtu20cis_dhcp_server: false +ubtu20cis_dnsmasq_server: false ubtu20cis_ldap_server: false ubtu20cis_nfs_server: false ubtu20cis_dns_server: false @@ -379,22 +440,16 @@ ubtu20cis_ipv6_required: false # false means you do not require a desktop ubtu20cis_desktop_required: false +# UID settings for interactive users +# These are discovered via logins.def if set true +discover_int_uid: false +min_int_uid: 1000 +max_int_uid: 65533 + # Section 1 Control Variables -# Control 1.1.2/1.1.3/1.1.4/1.1.5 -# ubtu20cis_tmp_fstab_options are the file system options for the fstabs configuration -# To conform to CIS cotnrol 1.1.2 could use any settings -# To conform to CIS control 1.1.3 nodev needs to be present -# To conform to CIS control 1.1.4 nosuid needs to be present -# To conform to CIS control 1.1.5 noexec needs to present -ubtu20cis_tmp_fstab_options: "defaults,rw,nosuid,nodev,noexec,relatime" - -# Control 1.1.6/1.1.7/1.1.8/1.1.9 -# ubtu20cis_dev_shm_fstab_options are the fstab file system options for /dev/shm -# To conform to CIS control 1.1.6 could use any settings -# To conform to CIS control 1.1.7 nodev needs to be present -# To conform to CIS control 1.1.8 nosuid needs to be present -# To conform to CIS control 1.1.9 noexec needs to be present -ubtu20cis_dev_shm_fstab_options: "defaults,noexec,nodev,nosuid" +# This will force the tmp_mnt type if not correctly discovered +# either tmp_systemd or fstab +expected_tmp_mnt: fstab # Control 1.3.1 - allow aide to be configured ubtu20cis_config_aide: true @@ -411,16 +466,19 @@ ubtu20cis_aide_cron: aide_month: '*' aide_weekday: '*' -# Control 1.4.2 +# Control 1.4.1 # THIS VARIABLE SHOULD BE CHANGED # This will fail assertion if not changed and rule 1.4.2 is enabled # insert password as per output of running grub-mkpasswd-pbkdf2 # refers to https://help.ubuntu.com/community/Grub2/Passwords +ubtu20cis_set_boot_pass: false ubtu20cis_set_grub_password: true ubtu20cis_grub_user_file: /etc/grub.d/40_custom -ubtu20cis_grub_users: root -ubtu20cis_grub_pw: "grub.pbkdf2.sha512.10000" +ubtu20cis_grub_user: root +ubtu20cis_grub_file: /boot/grub/grub.cfg +ubtu20cis_bootloader_password_hash: "grub.pbkdf2.sha512.10000" + # Change the following value to true if you wish to be prompted to get past grub bootloader ubtu20cis_ask_passwd_to_boot: false @@ -431,7 +489,7 @@ ubtu20cis_root_pw: $6$ # 1.7.1 # disable dynamic motd to stop extra sshd message from appearing -ubtu20cis_disable_dynamic_motd: false +ubtu20cis_disable_dynamic_motd: true # Control 1.8.2 # This will be the motd banner must not contain the below items in order to be compliant with Ubuntu 20 CIS @@ -443,32 +501,27 @@ ubtu20cis_warning_banner: | # Control 2.1.1.1 # ubtu20cis_time_sync_tool is the tool in which to synchronize time # The two options are chrony, ntp, or systemd-timesyncd -ubtu20cis_time_sync_tool: "chrony" - -# Control 2.1.1.2 -# ubtu20cis_ntp_server_list is the list ntp servers -# ubtu20cis_ntp_fallback_server_list is the list of fallback NTP servers -ubtu20cis_ntp_server_list: "0.debian.pool.ntp.org 1.debian.pool.ntp.org" -ubtu20cis_ntp_fallback_server_list: "2.debian.pool.ntp.org 3.debian.pool.ntp.org" - -# Control 2.1.1.3/2.1.1.4 -# ubtu20cis_chrony_server_options is the server options for chrony -ubtu20cis_chrony_server_options: "minpoll 8" -# ubtu20cis_time_synchronization_servers are the synchronization servers -ubtu20cis_time_synchronization_servers: - - 0.pool.ntp.org - - 1.pool.ntp.org - - 2.pool.ntp.org - - 3.pool.ntp.org -# ubtu20cis_chrony_user is the user that chrony will use, default is _chrony -ubtu20cis_chrony_user: "_chrony" -# ubtu20cis_ntp_server_options is the server options for ntp -ubtu20cis_ntp_server_options: "iburst" - -# Control 2.1.15 -# ubtu20_cis_mail_transfer_agent is the mail transfer agent in use -# The options are exim4, postfix or other -# ubtu20_cis_mail_transfer_agent: "other" +ubtu20cis_time_sync_tool: "systemd-timesyncd" + +# This setting allow to use 'pool' or 'server' options using values below. +# only one wil be applied +ubtu20cis_chrony_timesource: pool + +# Control 2.1.2.1 2.1.3.1 +# Time settings used for all versions +ubtu20cis_time_pool: + - name: time.nist.gov + options: iburst maxsources 4 + +ubtu20cis_time_servers: + - name: time-a-g.nist.gov + options: iburst + - name: time-b-g.nist.gov + options: iburst + - name: time-c-g.nist.gov + options: iburst + +ubtu20cis_is_mail_server: false # Section 3 Control Variables # Control 3.1.2 @@ -479,7 +532,7 @@ ubtu20cis_install_network_manager: true # The valid options to use are ufw, nftables, or iptables # Warning!! nftables is not supported in this role and will only message out if nftables is selected # If using nftables please manually adjust firewall settings -ubtu20cis_firewall_package: "iptables" +ubtu20cis_firewall_package: "ufw" # Control 3.5.1.5 # ubtu20cis_ufw_allow_out_ports are the ports for the firewall to allow @@ -504,7 +557,6 @@ ubtu20cis_ufw_allow_in: # nftables configs are applied to # ubtu20cis_nftables_table_name: "inet filter" - # Controls 3.5.3.2.1 through 3.5.3.3.4 # The iptables module only writes to memory which means a reboot could revert settings # The below toggle will install iptables-persistent and save the rules in memory (/etc/iptables/rules.v4 or rules.v6) @@ -512,84 +564,34 @@ ubtu20cis_ufw_allow_in: ubtu20cis_save_iptables_cis_rules: true # Section 4 Control Variables -# Control 4.1.1.4 -# ubtu20cis_audit_back_log_limit is the audit_back_log limit and should be set to a sufficient value -# The example from CIS uses 8192 -ubtu20cis_audit_back_log_limit: 8192 - -# Control 4.1.2.1 -# ubtu20cis_max_log_file_size is largest the log file will become in MB -# This shoudl be set based on your sites policy -ubtu20cis_max_log_file_size: 10 - -# Control 4.1.2.2 -ubtu20cis_auditd: - admin_space_left_action: halt - max_log_file_action: keep_logs - -# Control 4.2.1.3 -# ubtu20cis_rsyslog_ansible_managed will toggle ansible automated configurations of rsyslog -# You should set the rsyslog to your side specific needs. This toggle will use the example from -# page 347 to set rsyslog loggin based on those configuration suggestions. Settings can be seen -# in control 4.2.1.3 -ubtu20cis_rsyslog_ansible_managed: true - -# Control 4.2.1.5 -# ubtu20cis_remote_log_server is the remote logging server -ubtu20cis_remote_log_server: 192.168.2.100 - -# Control 4.2.1.6 -ubtu20cis_system_is_log_server: true - -# Control 4.3 -# ubtu20cis_logrotate is the log rotate frequencey. Options are daily, weekly, monthly, and yearly -ubtu20cis_logrotate: "daily" - -# Control 4.3 -# ubtu20cis_logrotate_create_settings are the settings for the create parameter in /etc/logrotate.conf -# The permissions need to be 640 or more restrictive. -# If you would like to include user/group settings to this parameter format the var as below -# ubtu20cis_logrotate_create_settings: "0640 root utmp" -ubtu20cis_logrotate_create_settings: "0640" - -# Section 5 Control Variables -# Control 5.2.1 -# ubtu20cis_sudo_package is the name of the sudo package to install -# The possible values are "sudo" or "sudo-ldap" -ubtu20cis_sudo_package: "sudo" - -# Control 5.2.3 -# ubtu20cis_sudo_logfile is the path and file name of the sudo log file -ubtu20cis_sudo_logfile: "/var/log/sudo.log" # ubtu20cis_sshd will contain all sshd variables. The task association and variable descriptions for each section are listed below -# Control 5.3.4 +# Control 4.2.x # allow_users, allow_groups, deny_users, and deny_groups. These are lists of users and groups to allow or deny ssh access to # These are lists that are just space delimited, for example allow_users: "vagrant ubuntu" for the vagrant and ubuntu users -# Control 5.3.5 + # log_level is the log level variable. This needs to be set to VERBOSE or INFO to conform to CIS standards -# Control 5.3.7 + # max_auth_tries is the max number of authentication attampts per connection. -# This value should be 4 or less to conform to CIS standards -# Control 5.3.13 +# This value should be 10 or less to conform to CIS standards + # ciphers is a comma seperated list of site approved ciphers # ONLY USE STRONG CIPHERS. Weak ciphers are listed below # DO NOT USE: 3des-cbc, aes128-cbc, aes192-cbc, and aes256-cbc -# Control 5.3.14 + # MACs is the comma seperated list of site approved MAC algorithms that SSH can use during communication # ONLY USE STRONG ALGORITHMS. Weak algorithms are listed below # DO NOT USE: hmac-md5, hmac-md5-96, hmac-ripemd160, hmac-sha1, hmac-sha1-96, umac-64@openssh.com, umac-128@openssh.com, hmac-md5-etm@openssh.com, # hmac-md5-96-etm@openssh.com, hmac-ripemd160-etm@openssh.com, hmac-sha1-etm@openssh.com, hmac-sha1-96-etm@openssh.com, umac-64-etm@openssh.com, umac-128-etm@openssh.com -# Control 5.3.15 + # kex_algorithms is comma seperated list of the algorithms for key exchange methods # ONLY USE STRONG ALGORITHMS. Weak algorithms are listed below # DO NOT USE: diffie-hellman-group1-sha1, diffie-hellman-group14-sha1, diffie-hellman-group-exchange-sha1 -# Control 5.3.16 + # client_alive_interval is the amount of time idle before ssh session terminated. Set to 300 or less to conform to CIS standards # client_alive_count_max will send client alive messages at the configured interval. Set to 3 or less to conform to CIS standards -# Control 5.3.17 + # login_grace_time is the time allowed for successful authentication to the SSH server. This needs to be set to 60 seconds or less to conform to CIS standards -# Control 5.3.22 # max_sessions is the max number of open sessions permitted. Set the value to 4 or less to conform to CIS standards ubtu20cis_sshd: log_level: "INFO" @@ -607,70 +609,126 @@ ubtu20cis_sshd: # deny_users: # deny_groups: -# 5.4.2 pam_tally2 login options allows for audit to be removed if required +# Control 4.3.x +# ubtu20cis_sudo_package is the name of the sudo package to install +# The possible values are "sudo" or "sudo-ldap" +ubtu20cis_sudo_package: "sudo" + +# ubtu20cis_sudo_logfile is the path and file name of the sudo log file +ubtu20cis_sudo_logfile: "/var/log/sudo.log" +ubtu20cis_sudo_timestamp_timeout: 15 +ubtu20cis_sugroup: nosugroup + +# Controls 4.4.x +ubtu20cis_passwd_hash_algo: sha512 +# pam_tally2 login options allows for audit to be removed if required ubtu20cis_pamtally2_login_opts: 'onerr=fail audit silent deny=5 unlock_time=900' -# Control 5.4.3 # ubtu20cis_pamd_pwhistory_remember is number of password chnage cycles a user can re-use a password # This needs to be 5 or more to conform to CIS standards ubtu20cis_pamd_pwhistory_remember: 5 +# Control 4.5.x # ubtu20cis_pass will be password based variables -# Control 5.5.1.1 # pass_min_days is the min number of days allowed between changing passwords. Set to 1 or more to conform to CIS standards -# Control 5.5.1.2 # max_days forces passwords to expire in configured number of days. Set to 365 or less to conform to CIS standards -# Control 5.5.1.3 # warn_age is how many days before pw expiry the user will be warned. Set to 7 or more to conform to CIS standards -# Control 5.5.1.4 # inactive the number of days of inactivity before the account will lock. Set to 30 day sor less to conform to CIS standards ubtu20cis_pass: max_days: 365 min_days: 1 warn_age: 7 inactive: 30 + character_changed: 5 + max_repeat_letters: 3 -# Control 5.5.4 # ubtu120cis_bash_umask is the umask to set in the /etc/bash.bashrc and /etc/profile. # The value needs to be 027 or more restrictive to comply with CIS standards ubtu20cis_bash_umask: '027' -# Control 5.5.5 # Session timeout setting file (TMOUT setting can be set in multiple files) # Timeout value is in seconds. Set value to 900 seconds or less ubtu20cis_shell_session_timeout: file: /etc/profile.d/tmout.sh timeout: 900 -# Control 5.7 -# ubtu20cis_su_group is the su group to use with pam_wheel -ubtu20cis_su_group: "wheel" +# Section 5 Control Variables - Logging and Auditing + +# Logging +# Can be rsyslog or journald - anything else is not supported and will skip these steps +ubtu20cis_syslog_service: 'rsyslog' +# if this is the journald server client upload to set to true +journald_log_server: false + +# These Entries will need to be populated by the user +# Journald Settings +ubtu20cis_journal_upload: + remote_url: + serverkeyfile: + servercertfile: + ubtu20cis_journald: + +ubtu20cis_journald_log_rotate: + systemmaxuse: + systemkeepfree: + runtimemaxuse: + runtimekeepfree: + maxfilesec: + +## rsyslog +ubtu20cis_system_is_log_server: false +# ubtu20cis_rsyslog_ansible_managed will toggle ansible automated configurations of rsyslog +# You should set the rsyslog to your side specific needs. +ubtu20cis_rsyslog_ansible_managed: true + +# ubtu20cis_remote_log_server is the remote logging server +ubtu20cis_remote_log_server: 192.168.2.100 + +# ubtu20cis_audit_back_log_limit is the audit_back_log limit and should be set to a sufficient value +# The example from CIS uses 8192 +ubtu20cis_audit_back_log_limit: 8192 + +# ubtu20cis_max_log_file_size is largest the log file will become in MB +# This shoudl be set based on your sites policy +ubtu20cis_max_log_file_size: 10 + +# +ubtu20cis_auditd: + admin_space_left_action: halt + max_log_file_action: keep_logs + +# ubtu20cis_logrotate is the log rotate frequencey. Options are daily, weekly, monthly, and yearly +ubtu20cis_logrotate: "daily" + +# Control 4.3 +# ubtu20cis_logrotate_create_settings are the settings for the create parameter in /etc/logrotate.conf +# The permissions need to be 640 or more restrictive. +# If you would like to include user/group settings to this parameter format the var as below +# ubtu20cis_logrotate_create_settings: "0640 root utmp" +ubtu20cis_logrotate_create_settings: "0640" # Section 6 Control Variables -# Control 6.1.10 + # ubtu20cis_no_world_write_adjust will toggle the automated fix to remove world-writable perms from all files # Setting to true will remove all world-writable permissions, and false will leave as-is ubtu20cis_no_world_write_adjust: true -# Control 6.1.11 # ubtu20cis_un_owned_owner is the owner to set files to that have no owner ubtu20cis_unowned_owner: root # ubtu20cis_no_owner_adjust will toggle the automated fix to give a user to unowned files/directories # true will give the owner from ubtu20cis_un_owned_owner to all unowned files/directories and false will skip ubtu20cis_no_owner_adjust: true -# Control 6.1.12 + # ubtu20cis_ungrouped_group is the group to set files to that have no group ubtu20cis_ungrouped_group: root # ubtu20cis_no_group_adjust will toggle the automated fix to give a group to ungrouped files/directories # true will give the group from ubtu20cis_un_owned_group to all ungrouped files/directories and false will skip ubtu20cis_no_group_adjust: true -# Control 6.1.13 # ubtu20cis_suid_adjust is the toggle to remove the SUID bit from all files on all mounts # Set to true this role will remove that bit, set to false we will just warn about the files ubtu20cis_suid_adjust: false -# Control 6.1.14 # ubtu20cis_sgid_adjust is the toggle to remove the GUID bit from all files on all mounts # Set to true this role will remove that bit, set to false we will just warn about the files ubtu20cis_sgid_adjust: false @@ -679,13 +737,12 @@ ubtu20cis_sgid_adjust: false # ubtu20cis_passwd_label: "{{ (this_item | default(item)).id }}: {{ (this_item | default(item)).dir }}" ubtu20cis_passwd_label: "{{ (this_item | default(item)).id }}: {{ (this_item | default(item)).dir }}" - #### Audit Configuration Settings #### ### Audit binary settings ### audit_bin_version: - release: v0.3.21 - checksum: 'sha256:9a9200779603acf0353d2c0e85ae46e083596c10838eaf4ee050c924678e4fe3' + release: v0.3.23 + checksum: 'sha256:9e9f24e25f86d6adf2e669a9ffbe8c3d7b9b439f5f877500dea02ba837e10e4d' audit_bin_path: /usr/local/bin/ audit_bin: "{{ audit_bin_path }}goss" audit_format: json diff --git a/handlers/main.yml b/handlers/main.yml index a4ac7add..925cc021 100644 --- a/handlers/main.yml +++ b/handlers/main.yml @@ -1,6 +1,6 @@ --- -- name: remount tmp +- name: Remount tmp ansible.posix.mount: name: /tmp state: remounted @@ -10,12 +10,32 @@ name: /dev/shm state: remounted -- name: remount var_tmp +- name: Remount var + ansible.posix.mount: + name: /var + state: remounted + +- name: Remount var_tmp ansible.posix.mount: name: /var/tmp state: remounted -- name: grub update +- name: Remount var_log + ansible.posix.mount: + name: /var/log + state: remounted + +- name: Remount var_log_audit + ansible.posix.mount: + name: /var/log/audit + state: remounted + +- name: Remount home + ansible.posix.mount: + name: /home + state: remounted + +- name: Grub update ansible.builtin.shell: update-grub notify: change_requires_reboot failed_when: false @@ -25,14 +45,19 @@ failed_when: false - name: restart postfix - ansible.builtin.service: + ansible.builtin.systemd: name: postfix state: restarted - name: restart exim4 - ansible.builtin.service: + ansible.builtin.systemd: name: exim4 state: restarted + +- name: restart timeservice + ansible.builtin.systemd: + name: "{{ ubtu20cis_time_sync_tool }}" + state: restarted # --------------- # --------------- # This is not a control however using the iptables module only writes to memory @@ -84,13 +109,18 @@ tags: - skip_ansible_lint -- name: restart rsyslog - ansible.builtin.service: - name: rsyslog +- name: restart syslog service + ansible.builtin.systemd: + name: "{{ ubtu20cis_syslog_service }}" + state: restarted + +- name: restart journal-upload + ansible.builtin.systemd: + name: systemd-journal-upload state: restarted - name: restart sshd - ansible.builtin.service: + ansible.builtin.systemd: name: sshd state: restarted diff --git a/meta/main.yml b/meta/main.yml index fe9b5c4b..45ec8eff 100644 --- a/meta/main.yml +++ b/meta/main.yml @@ -7,7 +7,7 @@ galaxy_info: license: MIT role_name: ubuntu20_cis namespace: mindpointgroup - min_ansible_version: 2.9.0 + min_ansible_version: 2.10.1 platforms: - name: Ubuntu versions: diff --git a/tasks/main.yml b/tasks/main.yml index 062d3c55..5afb0e98 100644 --- a/tasks/main.yml +++ b/tasks/main.yml @@ -33,7 +33,52 @@ fail_msg: "The root password set does not pass checks" when: - ubtu20cis_set_root_password - - ubtu20cis_rule_1_4_4 + - ubtu20cis_rule_1_4_3 + +# This control should always run as this can pass on unintended issues. +- name: "Check password set for connecting user" + block: + - name: Capture current password state of connecting user" + ansible.builtin.shell: "grep {{ ansible_env.SUDO_USER }} /etc/shadow | awk -F: '{print $2}'" + changed_when: false + failed_when: false + check_mode: false + register: ansible_user_password_set + + - name: "Assert that password set for {{ ansible_env.SUDO_USER }} and account not locked" + ansible.builtin.assert: + that: ansible_user_password_set.stdout | length != 0 and ansible_user_password_set.stdout != "!!" + fail_msg: "You have {{ sudo_password_rule }} enabled but the user = {{ ansible_env.SUDO_USER }} has no password set - It can break access" + success_msg: "You have a password set for sudo user {{ ansible_env.SUDO_USER }}" + vars: + sudo_password_rule: ubtu20cis_rule_4_3_4 + when: + - ubtu20cis_rule_4_3_4 + - ansible_env.SUDO_USER is defined + tags: + - always + +- name: Setup rules if container + block: + - name: Discover and set container variable if required + ansible.builtin.set_fact: + system_is_container: true + + - name: Load variable for container + ansible.builtin.include_vars: + file: "{{ container_vars_file }}" + + - name: Output if discovered is a container + ansible.builtin.debug: + msg: system has been discovered as a container + when: + - system_is_container + when: + - ansible_connection == 'docker' or + ansible_virtualization_type in ["docker", "lxc", "openvz", "podman", "container"] + tags: + - container_discovery + - always - name: Import preliminary tasks ansible.builtin.import_tasks: prelim.yml @@ -105,20 +150,6 @@ - name: flush handlers ansible.builtin.meta: flush_handlers -- name: reboot system - block: - - name: reboot system if not skipped - ansible.builtin.reboot: - when: - - not skip_reboot - - - name: Warning a reboot required but skip option set - ansible.builtin.debug: - msg: "Warning!! changes have been made that require a reboot to be implemented but skip reboot was set - Can affect compliance check results" - changed_when: true - when: - - skip_reboot - - name: run post remediation tasks ansible.builtin.import_tasks: post.yml tags: diff --git a/tasks/prelim.yml b/tasks/prelim.yml index 44672b91..e1a9eeda 100644 --- a/tasks/prelim.yml +++ b/tasks/prelim.yml @@ -1,5 +1,43 @@ --- +- name: Gather the package facts + ansible.builtin.package_facts: + manager: auto + tags: + - always + +- name: "PRELIM | Section 1.1 | Create list of mount points" + ansible.builtin.set_fact: + mount_names: "{{ ansible_mounts | map(attribute='mount') | list }}" + tags: + - always + +- name: PRELIM | Capture tmp mount type | discover mount tmp type + block: + - name: PRELIM | Capture tmp mount type | discover mount tmp type + ansible.builtin.shell: systemctl is-enabled tmp.mount + register: discover_tmp_mnt_type + changed_when: false + failed_when: discover_tmp_mnt_type.rc not in [ 0, 1 ] + + - name: PRELIM | Capture tmp mount type | Set to expected_tmp_mnt variable + ansible.builtin.set_fact: + tmp_mnt_type: "{{ expected_tmp_mnt }}" + when: "'generated' in discover_tmp_mnt_type.stdout" + + - name: PRELIM | Capture tmp mount type | Set systemd service + ansible.builtin.set_fact: + tmp_mnt_type: tmp_systemd + when: "'generated' not in discover_tmp_mnt_type.stdout" + when: + - "'/tmp' in mount_names" + - ubtu20cis_rule_1_1_2_1 or + ubtu20cis_rule_1_1_2_2 or + ubtu20cis_rule_1_1_2_3 or + ubtu20cis_rule_1_1_2_4 + tags: + - always + - name: "PRELIM | register if snap being used" ansible.builtin.shell: df -h | grep -wc "/snap" changed_when: false @@ -7,7 +45,7 @@ register: snap_pkg_mgr tags: - rule_1.1.1.6 - - section1 + - always - name: "PRELIM | Run apt update" ansible.builtin.package: @@ -18,7 +56,7 @@ tags: - rule_1.3.1 - rule_1.9 - - section1 + - always - name: "PRELIM | Check for autofs service" ansible.builtin.shell: "systemctl show autofs | grep LoadState | cut -d = -f 2" @@ -26,10 +64,11 @@ changed_when: false check_mode: false when: - - ubtu20cis_rule_1_1_23 + - ubtu20cis_rule_1_1_9 tags: - - rule_1.1.23 + - rule_1.1.9 - section1 + - always - name: "PRELIM | Check for avahi-daemon service" ansible.builtin.shell: "systemctl show avahi-daemon | grep LoadState | cut -d = -f 2" @@ -37,10 +76,10 @@ changed_when: false check_mode: false when: - - ubtu20cis_rule_2_1_3 + - ubtu20cis_rule_2_2_2 tags: - - rule_2.1.3 - - section2 + - rule_2.2.2 + - always - name: "PRELIM | Find wireless network devices" ansible.builtin.shell: find /sys/class/net/*/wireless | awk -F'/' awk '{print $5}' @@ -51,7 +90,69 @@ - ubtu20cis_rule_3_1_2 tags: - rule_3.1.2 - - section3 + - always + +- name: "PRELIM | PATCH | Ensure auditd is installed" + block: + - name: "PRELIM | PATCH | Ensure auditd is installed" + ansible.builtin.package: + name: ['auditd', 'audispd-plugins'] + state: present + + - name: "PRELIM | 5.2.4.5 | Audit conf and rules files | list files" + ansible.builtin.find: + path: /etc/audit/ + file_type: file + recurse: true + patterns: '*.conf,*.rules' + register: auditd_conf_files + when: + - ubtu20cis_rule_5_2_1_1 + - "'auditd' not in ansible_facts.packages or + 'auditd-plugins' not in ansible_facts.packages" + - ubtu20cis_rule_5_2_4_5 or + ubtu20cis_rule_5_2_4_6 or + ubtu20cis_rule_5_2_4_7 + tags: + - level2-server + - level2-workstation + - patch + - auditd + - always + +- name: "PRELIM | Discover Interactive UID MIN and MIN from logins.def" + block: + - name: "PRELIM | Capture UID_MIN information from logins.def" + ansible.builtin.shell: grep -w "^UID_MIN" /etc/login.defs | awk '{print $NF}' + changed_when: false + register: uid_min_id + + - name: "PRELIM | Capture UID_MAX information from logins.def" + ansible.builtin.shell: grep -w "^UID_MAX" /etc/login.defs | awk '{print $NF}' + changed_when: false + register: uid_max_id + + - name: "PRELIM | Capture GID_MIN information from logins.def" + ansible.builtin.shell: grep -w "^GID_MIN" /etc/login.defs | awk '{print $NF}' + changed_when: false + register: gid_min_id + + - name: "PRELIM | set_facts for interactive uid/gid" + ansible.builtin.set_fact: + min_int_uid: "{{ uid_min_id.stdout }}" + max_int_uid: "{{ uid_max_id.stdout }}" + min_int_gid: "{{ gid_min_id.stdout }}" + when: + - not discover_int_uid + tags: + - always + +- name: "PRELIM | Interactive User accounts" + ansible.builtin.shell: 'cat /etc/passwd | cut -d: -f5,6,7 | grep -Ev ":(/usr|)/(s|)bin/nologin|/root|:/bin:" | cut -d: -f2' + changed_when: false + register: interactive_users_home + tags: + - always - name: "PRELIM | Install Network-Manager" ansible.builtin.package: @@ -65,7 +166,19 @@ - not ubtu20cis_system_is_container tags: - rule_3.1.2 - - section3 + - always + +- name: "PRELIM | 4.3.4 | Find all sudoers files." + ansible.builtin.shell: "find /etc/sudoers /etc/sudoers.d/ -type f ! -name '*~' ! -name '*.*'" + changed_when: false + failed_when: false + check_mode: false + register: ubtu20cis_sudoers_files + when: + - ubtu20cis_rule_4_3_4 or + ubtu20cis_rule_4_3_5 + tags: + - always - name: "PRELIM | Install ACL" ansible.builtin.package: @@ -75,12 +188,6 @@ - ubtu20cis_rule_6_2_6 tags: - rule_6.2.6 - - section6 - -- name: "PRELIM | Section 1.1 | Create list of mount points" - ansible.builtin.set_fact: - mount_names: "{{ ansible_mounts | map(attribute='mount') | list }}" - tags: - always - name: "PRELIM | List users accounts" diff --git a/tasks/section_1/cis_1.1.1.x.yml b/tasks/section_1/cis_1.1.1.x.yml index a2dbcae3..d0f61a3d 100644 --- a/tasks/section_1/cis_1.1.1.x.yml +++ b/tasks/section_1/cis_1.1.1.x.yml @@ -8,6 +8,14 @@ line: install cramfs /bin/true create: true + - name: "1.1.1.1 | PATCH | Ensure mounting of cramfs filesystems is disabled | blacklist" + ansible.builtin.lineinfile: + path: /etc/modprobe.d/blacklist.conf + regexp: "^(#)?blacklist cramfs(\\s|$)" + line: "blacklist cramfs" + create: true + mode: '0600' + - name: "1.1.1.1 | PATCH | Ensure mounting of cramfs filesystems is disabled | Disable cramfs" community.general.modprobe: name: cramfs @@ -37,6 +45,14 @@ name: freevxfs state: absent when: ansible_connection != 'docker' + + - name: "1.1.1.2 | PATCH | Ensure mounting of freevxfs filesystems is disabled | blacklist" + ansible.builtin.lineinfile: + path: /etc/modprobe.d/blacklist.conf + regexp: "^(#)?blacklist freevxfs(\\s|$)" + line: "blacklist freevxfs" + create: true + mode: '0600' when: - ubtu20cis_rule_1_1_1_2 tags: @@ -61,6 +77,14 @@ name: jffs2 state: absent when: ansible_connection != 'docker' + + - name: "1.1.1.3 | PATCH | Ensure mounting of jffs2 filesystems is disabled | blacklist" + ansible.builtin.lineinfile: + path: /etc/modprobe.d/blacklist.conf + regexp: "^(#)?blacklist jffs2(\\s|$)" + line: "blacklist jffs2" + create: true + mode: '0600' when: - ubtu20cis_rule_1_1_1_3 tags: @@ -85,6 +109,14 @@ name: hfs state: absent when: ansible_connection != 'docker' + + - name: "1.1.1.4 | PATCH | Ensure mounting of hfs filesystems is disabled | blacklist" + ansible.builtin.lineinfile: + path: /etc/modprobe.d/blacklist.conf + regexp: "^(#)?blacklist hfs(\\s|$)" + line: "blacklist hfs" + create: true + mode: '0600' when: - ubtu20cis_rule_1_1_1_4 tags: @@ -109,6 +141,14 @@ name: hfsplus state: absent when: ansible_connection != 'docker' + + - name: "1.1.1.5 | PATCH | Ensure mounting of hfsplus filesystems is disabled | blacklist" + ansible.builtin.lineinfile: + path: /etc/modprobe.d/blacklist.conf + regexp: "^(#)?blacklist hfsplus(\\s|$)" + line: "blacklist hfsplus" + create: true + mode: '0600' when: - ubtu20cis_rule_1_1_1_5 tags: @@ -132,8 +172,16 @@ community.general.modprobe: name: squashfs state: absent - ignore_errors: true + ignore_errors: true # noqa: ignore-errors when: ansible_connection != 'docker' + + - name: "1.1.1.6 | PATCH | Ensure mounting of squashfs filesystems is disabled | blacklist" + ansible.builtin.lineinfile: + path: /etc/modprobe.d/blacklist.conf + regexp: "^(#)?blacklist squashfs(\\s|$)" + line: "blacklist squashfs" + create: true + mode: '0600' when: - ubtu20cis_rule_1_1_1_6 - snap_pkg_mgr.stdout == 0 @@ -159,11 +207,19 @@ name: udf state: absent when: ansible_connection != 'docker' + + - name: "1.1.1.7 | PATCH | Ensure mounting of udf filesystems is disabled | blacklist" + ansible.builtin.lineinfile: + path: /etc/modprobe.d/blacklist.conf + regexp: "^(#)?blacklist udf(\\s|$)" + line: "blacklist udf" + create: true + mode: '0600' when: - ubtu20cis_rule_1_1_1_7 tags: - - level1-server - - level1-workstation + - level2-server + - level2-workstation - automated - patch - rule_1.1.1.7 diff --git a/tasks/section_1/cis_1.1.10.yml b/tasks/section_1/cis_1.1.10.yml new file mode 100644 index 00000000..a36efa4f --- /dev/null +++ b/tasks/section_1/cis_1.1.10.yml @@ -0,0 +1,33 @@ +--- + +- name: "1.1.10 | PATCH | Disable USB Storage" + block: + - name: "1.1.10 | PATCH | Disable USB Storage | Set modprobe config" + ansible.builtin.lineinfile: + path: /etc/modprobe.d/usb_storage.conf + regexp: '^install usb-storage' + line: 'install usb-storage /bin/true' + create: true + + - name: "1.1.10 | PATCH | Disable USB Storage | Remove usb-storage module" + community.general.modprobe: + name: usb-storage + state: absent + when: ansible_connection != 'docker' + + - name: "1.1.10 | PATCH | Disable USB Storage | blacklist" + ansible.builtin.lineinfile: + path: /etc/modprobe.d/blacklist.conf + regexp: '^blacklist usb-storage' + line: 'blacklist usb-storage' + create: true + when: + - ubtu20cis_rule_1_1_10 + - not ubtu20cis_allow_usb_storage + tags: + - level1-server + - level2-workstation + - automated + - patch + - rule_1.1.10 + - usb_storage diff --git a/tasks/section_1/cis_1.1.2.x.yml b/tasks/section_1/cis_1.1.2.x.yml new file mode 100644 index 00000000..7a1be4cd --- /dev/null +++ b/tasks/section_1/cis_1.1.2.x.yml @@ -0,0 +1,88 @@ +--- + +- name: "1.1.2.1 | AUDIT | Ensure /tmp is a separate partition" + block: + - name: "1.1.2.1 | AUDIT | Ensure /tmp is a separate partition | Absent" + ansible.builtin.debug: + msg: "Warning!! {{ required_mount }} doesn't exist. This is a manual task" + + - name: "1.1.2.1 | WARN | Ensure /tmp is a separate partition | warn_count" + ansible.builtin.import_tasks: warning_facts.yml + vars: + warn_control_id: '1.1.2.1' + required_mount: '/tmp' + when: + - required_mount not in mount_names + - ubtu20cis_rule_1_1_2_1 + tags: + - level1-server + - level1-workstation + - audit + - mounts + - rule_1.1.2.1 + - tmp + +- name: | + "1.1.2.2 | PATCH | Ensure nodev option set on /tmp partition | tmp_systemd" + "1.1.2.3 | PATCH | Ensure noexec option set on /tmp partition | tmp_systemd" + "1.1.2.4 | PATCH | Ensure nosuid option set on /tmp partition | tmp_systemd" + ansible.builtin.template: + src: etc/systemd/system/tmp.mount.j2 + dest: /etc/systemd/system/tmp.mount + owner: root + group: root + mode: 0644 + notify: Remount tmp + with_items: + - "{{ ansible_mounts }}" + loop_control: + label: "{{ item.device }}" + when: + - item.mount == "/tmp" + - tmp_mnt_type == 'tmp_systemd' + - ubtu20cis_rule_1_1_2_1 or + ubtu20cis_rule_1_1_2_2 or + ubtu20cis_rule_1_1_2_3 or + ubtu20cis_rule_1_1_2_4 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_1.1.2.2 + - rule_1.1.2.3 + - rule_1.1.2.4 + - tmp + +- name: | + "1.1.2.2 | PATCH | Ensure nodev option set on /tmp partition | fstab" + "1.1.2.3 | PATCH | Ensure noexec option set on /tmp partition | fstab" + "1.1.2.4 | PATCH | Ensure nosuid option set on /tmp partition | fstab" + ansible.posix.mount: + path: /tmp + src: "{{ item.device }}" + state: present + fstype: "{{ item.fstype }}" + opts: defaults,{% if ubtu20cis_rule_1_1_2_2 %}nodev,{% endif %}{% if ubtu20cis_rule_1_1_2_3 %}noexec,{% endif %}{% if ubtu20cis_rule_1_1_2_4 %}nosuid{% endif %} + notify: remount tmp + with_items: + - "{{ ansible_mounts }}" + loop_control: + label: "{{ item.device }}" + when: + - item.mount == "/tmp" + - tmp_mnt_type == 'fstab' + - ubtu20cis_rule_1_1_2_1 or + ubtu20cis_rule_1_1_2_2 or + ubtu20cis_rule_1_1_2_3 or + ubtu20cis_rule_1_1_2_4 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_1.1.1.2 + - rule_1.1.2.2 + - rule_1.1.2.3 + - rule_1.1.2.4 + - tmp diff --git a/tasks/section_1/cis_1.1.3.x.yml b/tasks/section_1/cis_1.1.3.x.yml new file mode 100644 index 00000000..c959abea --- /dev/null +++ b/tasks/section_1/cis_1.1.3.x.yml @@ -0,0 +1,49 @@ +--- + +- name: "1.1.3.1 | AUDIT | Ensure separate partition exists for /var" + block: + - name: "1.1.3.1 | AUDIT | Ensure separate partition exists for /var | Absent" + ansible.builtin.debug: + msg: "Warning!! {{ required_mount }} doesn't exist. This is a manual task" + + - name: "1.1.3.1 | WARN | Ensure separate partition exists for /var | warn_count" + ansible.builtin.import_tasks: warning_facts.yml + vars: + warn_control_id: '1.1.3.1' + required_mount: '/var' + when: + - required_mount not in mount_names + - ubtu20cis_rule_1_1_3_1 + tags: + - level2-server + - level2-workstation + - automated + - audit + - rule_1.1.3.1 + - var + +- name: | + "1.1.3.2 | PATCH | Ensure /var partition includes the nodev option" + "1.1.3.3 | PATCH | Ensure /var partition includes the nosuid option" + ansible.posix.mount: + path: /var + src: "{{ item.device }}" + state: present + fstype: "{{ item.fstype }}" + opts: defaults,{% if ubtu20cis_rule_1_1_3_2 %}nodev,{% endif %}{% if ubtu20cis_rule_1_1_3_3 %}nosuid{% endif %} + notify: Remount var + loop: "{{ ansible_mounts }}" + loop_control: + label: "{{ item.device }}" + when: + - item.mount == "/var" + - ubtu20cis_rule_1_1_3_2 or + ubtu20cis_rule_1_1_3_3 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_1.1.3.2 + - rule_1.1.3.3 + - var diff --git a/tasks/section_1/cis_1.1.4.x.yml b/tasks/section_1/cis_1.1.4.x.yml new file mode 100644 index 00000000..f5ac21a1 --- /dev/null +++ b/tasks/section_1/cis_1.1.4.x.yml @@ -0,0 +1,52 @@ +--- + +- name: "1.1.4.1 | AUDIT | Ensure separate partition exists for /var/tmp " + block: + - name: "1.1.4.1 | AUDIT | Ensure separate partition exists for /var/tmp | Absent" + ansible.builtin.debug: + msg: "Warning!! {{ required_mount }} doesn't exist. This is a manual task" + + - name: "1.1.4.1 | WARN | Ensure separate partition exists for /var/tmp | warn_count" + ansible.builtin.import_tasks: warning_facts.yml + vars: + warn_control_id: '1.1.4.1' + required_mount: '/var/tmp' + when: + - required_mount not in mount_names + - ubtu20cis_rule_1_1_4_1 + tags: + - level2-server + - level2-workstation + - automated + - audit + - rule_1.1.4.1 + - var + +- name: | + "1.1.4.2 | PATCH | Ensure /var/tmp partition includes the nodev option" + "1.1.4.3 | PATCH | Ensure /var/tmp partition includes the noexec option" + "1.1.4.4 | PATCH | Ensure /var/tmp partition includes the nosuid option" + ansible.posix.mount: + path: /var/tmp + src: "{{ item.device }}" + state: present + fstype: "{{ item.fstype }}" + opts: defaults,{% if ubtu20cis_rule_1_1_4_2 %}nodev,{% endif %}{% if ubtu20cis_rule_1_1_4_3 %}noexec,{% endif %}{% if ubtu20cis_rule_1_1_4_4 %}nosuid{% endif %} + notify: Remount var_tmp + with_items: "{{ ansible_mounts }}" + loop_control: + label: "{{ item.device }}" + when: + - item.mount == "/var/tmp" + - ubtu20cis_rule_1_1_4_2 or + ubtu20cis_rule_1_1_4_3 or + ubtu20cis_rule_1_1_4_4 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_1.1.4.2 + - rule_1.1.4.3 + - rule_1.1.4.4 + - var diff --git a/tasks/section_1/cis_1.1.5.x.yml b/tasks/section_1/cis_1.1.5.x.yml new file mode 100644 index 00000000..e6abba62 --- /dev/null +++ b/tasks/section_1/cis_1.1.5.x.yml @@ -0,0 +1,52 @@ +--- + +- name: "1.1.5.1 | AUDIT | Ensure separate partition exists for /var/log" + block: + - name: "1.1.5.1 | AUDIT | Ensure separate partition exists for /var/log| Absent" + ansible.builtin.debug: + msg: "Warning!! {{ required_mount }} doesn't exist. This is a manual task" + + - name: "1.1.5.1 | WARN | Ensure separate partition exists for /var/log | warn_count" + ansible.builtin.import_tasks: warning_facts.yml + vars: + warn_control_id: '1.1.5.1' + required_mount: '/var/log' + when: + - required_mount not in mount_names + - ubtu20cis_rule_1_1_5_1 + tags: + - level2-server + - level2-workstation + - automated + - audit + - rule_1.1.5.1 + - varlog + +- name: | + "1.1.5.2 | PATCH | Ensure /var/log partition includes the nodev option" + "1.1.5.3 | PATCH | Ensure /var/log partition includes the noexec option" + "1.1.5.4 | PATCH | Ensure /var/log partition includes the nosuid option" + ansible.posix.mount: + path: /var/log + src: "{{ item.device }}" + state: present + fstype: "{{ item.fstype }}" + opts: defaults,{% if ubtu20cis_rule_1_1_5_2 %}nodev,{% endif %}{% if ubtu20cis_rule_1_1_5_3 %}noexec,{% endif %}{% if ubtu20cis_rule_1_1_5_4 %}nosuid{% endif %} + notify: Remount var_log + loop: "{{ ansible_mounts }}" + loop_control: + label: "{{ item.device }}" + when: + - item.mount == "/var/log" + - ubtu20cis_rule_1_1_5_2 or + ubtu20cis_rule_1_1_5_3 or + ubtu20cis_rule_1_1_5_4 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_1.1.5.2 + - rule_1.1.5.3 + - rule_1.1.5.4 + - varlog diff --git a/tasks/section_1/cis_1.1.6.x.yml b/tasks/section_1/cis_1.1.6.x.yml new file mode 100644 index 00000000..567368cc --- /dev/null +++ b/tasks/section_1/cis_1.1.6.x.yml @@ -0,0 +1,52 @@ +--- + +- name: "1.1.6.1 | AUDIT | Ensure separate partition exists for /var/log/audit" + block: + - name: "1.1.6.1 | AUDIT | Ensure separate partition exists for /var/log/audit | Absent" + ansible.builtin.debug: + msg: "Warning!! {{ required_mount }} doesn't exist. This is a manual task" + + - name: "1.1.6.1 | WARN | Ensure separate partition exists for /var/log/audit | warn_count" + ansible.builtin.import_tasks: warning_facts.yml + vars: + warn_control_id: '1.1.6.1' + required_mount: '/var/log/audit' + when: + - required_mount not in mount_names + - ubtu20cis_rule_1_1_6_1 + tags: + - level2-server + - level2-workstation + - automated + - audit + - rule_1.1.6.1 + - varlogaudit + +- name: | + "1.1.6.2 | PATCH | Ensure /var/log/audit partition includes the nodev option" + "1.1.6.3 | PATCH | Ensure /var/log/audit partition includes the noexec option" + "1.1.6.4 | PATCH | Ensure /var/log/audit partition includes the nosuid option" + ansible.posix.mount: + path: /var/log/audit + src: "{{ item.device }}" + state: present + fstype: "{{ item.fstype }}" + opts: defaults,{% if ubtu20cis_rule_1_1_6_2 %}nodev,{% endif %}{% if ubtu20cis_rule_1_1_6_3 %}noexec,{% endif %}{% if ubtu20cis_rule_1_1_6_4 %}nosuid{% endif %} + notify: Remount var_log_audit + loop: "{{ ansible_mounts }}" + loop_control: + label: "{{ item.device }}" + when: + - item.mount == "/var/log/audit" + - ubtu20cis_rule_1_1_6_2 or + ubtu20cis_rule_1_1_6_3 or + ubtu20cis_rule_1_1_6_4 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_1.1.6.2 + - rule_1.1.6.3 + - rule_1.1.6.4 + - varlogaudit diff --git a/tasks/section_1/cis_1.1.7.x.yml b/tasks/section_1/cis_1.1.7.x.yml new file mode 100644 index 00000000..6decf9fd --- /dev/null +++ b/tasks/section_1/cis_1.1.7.x.yml @@ -0,0 +1,49 @@ +--- + +- name: "1.1.7.1 | AUDIT | Ensure /home is a separate partition" + block: + - name: "1.1.7.1 | AUDIT | Ensure separate partition exists for /home | Absent" + ansible.builtin.debug: + msg: "Warning!! {{ required_mount }} doesn't exist. This is a manual task" + + - name: "1.1.7.1 | WARN | Ensure separate partition exists for /home | warn_count" + ansible.builtin.import_tasks: warning_facts.yml + vars: + warn_control_id: '1.1.7.1' + required_mount: '/home' + when: + - required_mount not in mount_names + - ubtu20cis_rule_1_1_7_1 + tags: + - level2-server + - level2-workstation + - automated + - audit + - rule_1.1.7.1 + - home + +- name: | + "1.1.7.2 | PATCH | Ensure /home partition includes the nodev option" + "1.1.7.3 | PATCH | Ensure /home partition includes the nosuid option" + ansible.posix.mount: + path: /home + src: "{{ item.device }}" + state: present + fstype: "{{ item.fstype }}" + opts: defaults,{% if ubtu20cis_rule_1_1_7_2 %}nodev,{% endif %}{% if ubtu20cis_rule_1_1_7_3 %}nosuid,{% endif %} + notify: Remount home + loop: "{{ ansible_mounts }}" + loop_control: + label: "{{ item.device }}" + when: + - item.mount == "/home" + - ubtu20cis_rule_1_1_7_2 or + ubtu20cis_rule_1_1_7_3 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_1.1.7.2 + - rule_1.1.7.3 + - home diff --git a/tasks/section_1/cis_1.1.8.x.yml b/tasks/section_1/cis_1.1.8.x.yml new file mode 100644 index 00000000..5466378c --- /dev/null +++ b/tasks/section_1/cis_1.1.8.x.yml @@ -0,0 +1,26 @@ +--- + +- name: | + "1.1.8.1 | PATCH | Ensure nodev option set on /dev/shm partition" + "1.1.8.2 | PATCH | Ensure noexec option set on /dev/shm partition" + "1.1.8.3 | PATCH | Ensure nosuid option set on /dev/shm partition" + ansible.posix.mount: + path: /dev/shm + src: /dev/shm + fstype: tmpfs + state: present + opts: "defaults,{% if ubtu20cis_rule_1_1_8_1 %}nodev,{% endif %}{% if ubtu20cis_rule_1_1_8_2 %}noexec,{% endif %}{% if ubtu20cis_rule_1_1_8_3 %}nosuid{% endif %}" + notify: remount dev_shm + when: + - ubtu20cis_rule_1_1_8_1 or + ubtu20cis_rule_1_1_8_2 or + ubtu20cis_rule_1_1_8_3 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_1.1.8.1 + - rule_1.1.8.2 + - rule_1.1.8.3 + - dev_shm diff --git a/tasks/section_1/cis_1.1.9.yml b/tasks/section_1/cis_1.1.9.yml new file mode 100644 index 00000000..f4ab77ba --- /dev/null +++ b/tasks/section_1/cis_1.1.9.yml @@ -0,0 +1,18 @@ +--- + +- name: "1.1.9 | PATCH | Disable Automounting" + ansible.builtin.service: + name: autofs + state: stopped + enabled: false + when: + - ubtu20cis_rule_1_1_9 + - ubtu20cis_autofs_service_status.stdout == "loaded" + - not ubtu20cis_allow_autofs + tags: + - level1-server + - level2-workstation + - automated + - patch + - rule_1.1.9 + - automounting diff --git a/tasks/section_1/cis_1.1.x.yml b/tasks/section_1/cis_1.1.x.yml deleted file mode 100644 index 71fd2375..00000000 --- a/tasks/section_1/cis_1.1.x.yml +++ /dev/null @@ -1,348 +0,0 @@ ---- - -- name: "1.1.2 | PATCH | Ensure /tmp is configured" - ansible.posix.mount: - path: /tmp - src: /tmp - state: mounted - fstype: tmpfs - opts: "{{ ubtu20cis_tmp_fstab_options }}" - when: - - ubtu20cis_rule_1_1_2 - tags: - - level1-server - - level1-workstation - - automated - - patch - - rule_1.1.2 - - tmp - -- name: | - "1.1.3 | PATCH | Ensure nodev option set on /tmp partition" - "1.1.4 | PATCH | Ensure nosuid option set on /tmp partition" - "1.1.5 | PATCH | Ensure noexec option set on /tmp partition" - ansible.posix.mount: - name: /tmp - src: /tmp - state: present - fstype: tmpfs - opts: "{{ ubtu20cis_tmp_fstab_options }}" - notify: remount tmp - when: - - ubtu20cis_rule_1_1_3 or - ubtu20cis_rule_1_1_4 or - ubtu20cis_rule_1_1_5 - # - ubtu20cis_vartmp['enabled'] - tags: - - level1-server - - level1-workstation - - automated - - patch - - rule_1.1.3 - - rule_1.1.4 - - rule_1.1.5 - - tmp - -- name: "1.1.6 | PATCH | Ensure /dev/shm is configured" - ansible.posix.mount: - name: /dev/shm - src: /dev/shm - state: mounted - fstype: tmpfs - opts: "{{ ubtu20cis_dev_shm_fstab_options }}" - when: - - ubtu20cis_rule_1_1_6 - tags: - - level1-server - - level1-workstation - - automated - - patch - - rule_1.1.6 - - dev_shm - -- name: | - "1.1.7 | PATCH | Ensure nodev option set on /dev/shm partition" - "1.1.8 | PATCH | Ensure nosuid option set on /dev/shm partition" - "1.1.9 | PATCH | Ensure noexec option set on /dev/shm partition" - ansible.posix.mount: - name: /dev/shm - src: /dev/shm - state: present - fstype: tmpfs - opts: "{{ ubtu20cis_dev_shm_fstab_options }}" - notify: remount dev_shm - when: - - ubtu20cis_rule_1_1_7 or - ubtu20cis_rule_1_1_8 or - ubtu20cis_rule_1_1_9 - tags: - - level1-server - - level1-workstation - - automated - - patch - - rule_1.1.7 - - rule_1.1.8 - - rule_1.1.9 - - dev_shm - -- name: "1.1.10 | AUDIT | Ensure separate partition exists for /var" - block: - - name: "1.1.10 | AUDIT | Ensure separate partition exists for /var | Warn if partition is absent" - ansible.builtin.debug: - msg: "Warning!! {{ required_mount }} doesn't exist. This is a manual task" - - - name: "1.1.10 | AUDIT | Ensure separate partition exists for /var | Present" - ansible.builtin.import_tasks: warning_facts.yml - vars: - warn_control_id: '1.1.10' - required_mount: '/var' - when: - - "'/var' not in mount_names" - - ubtu20cis_rule_1_1_10 - tags: - - level2-server - - level2-workstation - - automated - - audit - - rule_1.1.10 - - var - -- name: "1.1.11 | AUDIT | Ensure separate partition exists for /var/tmp" - block: - - name: "1.1.11 | AUDIT | Ensure separate partition exists for /var/tmp | Warn if partition is absent" - ansible.builtin.debug: - msg: "Warning!! {{ required_mount }} doesn't exist. This is a manual task" - - - name: "1.1.11 | AUDIT | Ensure separate partition exists for /var/tmp | Present" - ansible.builtin.import_tasks: warning_facts.yml - vars: - warn_control_id: '1.1.11' - required_mount: '/var/tmp' - when: - - "'/var/tmp' not in mount_names" - - ubtu20cis_rule_1_1_11 - tags: - - level2-server - - level2-workstation - - automated - - audit - - rule_1.1.11 - - var_tmp - -- name: | - "1.1.12 | PATCH | Ensure /var/tmp partition includes the nodev option" - "1.1.13 | PATCH | Ensure /var/tmp partition includes the nosuid option" - "1.1.14 | PATCH | Ensure /var/tmp partition includes the noexec option" - ansible.posix.mount: - name: /var/tmp - src: "{{ item.device }}" - state: present - fstype: "{{ item.fstype }}" - opts: "defaults,{% if ubtu20cis_rule_1_1_12 %}nodev,{% endif %}{% if ubtu20cis_rule_1_1_13 %}nosuid,{% endif %}{% if ubtu20cis_rule_1_1_14 %}noexec{% endif %}" - loop: "{{ ansible_mounts }}" - loop_control: - label: "{{ item.device }}" - notify: remount var_tmp - when: - - ubtu20cis_rule_1_1_12 or - ubtu20cis_rule_1_1_13 or - ubtu20cis_rule_1_1_14 - - item.mount == '/var/tmp' - tags: - - level1-server - - level1-workstation - - automated - - patch - - rule_1.1.12 - - rule_1.1.13 - - rule_1.1.14 - - var_tmp - -- name: "1.1.15 | AUDIT | Ensure separate partition exists for /var/log" - block: - - name: "1.1.15 | AUDIT | Ensure separate partition exists for /var/log | Warn if partition is absent" - ansible.builtin.debug: - msg: "Warning!! {{ required_mount }} doesn't exist. This is a manual task" - - - name: "1.1.15 | AUDIT | Ensure separate partition exists for /var/log | Present" - ansible.builtin.import_tasks: warning_facts.yml - vars: - warn_control_id: '1.1.15' - required_mount: '/var/log' - when: - - "'/var/log' not in mount_names" - - ubtu20cis_rule_1_1_15 - tags: - - level2-server - - level2-workstation - - automated - - audit - - rule_1.1.15 - - var_log - -- name: "1.1.16 | AUDIT | Ensure separate partition exists for /var/log/audit" - block: - - name: "1.1.16 | AUDIT | Ensure separate partition exists for /var/log/audit | Warn if partition is absent" - ansible.builtin.debug: - msg: "Warning!! {{ required_mount }} doesn't exist. This is a manual task" - - - name: "1.1.16 | AUDIT | Ensure separate partition exists for /var/log/audit | Present" - ansible.builtin.import_tasks: warning_facts.yml - vars: - warn_control_id: '1.1.6.1' - required_mount: '/var/log/audit' - when: - - "'/var/log/audit' not in mount_names" - - ubtu20cis_rule_1_1_16 - tags: - - level2-server - - level2-workstation - - automated - - audit - - var_log_audit - -- name: "1.1.17 | AUDIT | Ensure separate partition exists for /home" - block: - - name: "1.1.17 | AUDIT | Ensure separate partition exists for /home | Warn if partition is absent" - ansible.builtin.debug: - msg: "Warning!! {{ required_mount }} doesn't exist. This is a manual task" - - - name: "1.1.17 | AUDIT | Ensure separate partition exists for /home | Present" - ansible.builtin.import_tasks: warning_facts.yml - vars: - warn_control_id: '1.1.17' - required_mount: '/home' - when: - - "'/home' not in mount_names" - - ubtu20cis_rule_1_1_17 - tags: - - level2-server - - level2-workstation - - automated - - audit - - home - -- name: "1.1.18 | PATCH | Ensure /home partition includes the nodev option" - ansible.posix.mount: - name: "/home" - src: "{{ item.device }}" - state: mounted - fstype: "{{ item.fstype }}" - opts: "nodev" - loop: "{{ ansible_mounts }}" - loop_control: - label: "{{ item.device }}" - when: - - ubtu20cis_rule_1_1_18 - - item.mount == "/home" - tags: - - level1-server - - level1-workstation - - automated - - patch - - rule_1.1.18 - - home - -- name: "1.1.19 | AUDIT | Ensure nodev option set on removable media partitions" - ansible.builtin.debug: - msg: "Warning!!!! Not relevant control" - when: - - ubtu20cis_rule_1_1_19 - tags: - - level1-server - - level1-workstation - - manual - - audit - - rule_1.1.19 - - removable_media - -- name: "1.1.20 | AUDIT | Ensure nosuid option set on removable media partitions" - ansible.builtin.debug: - msg: "Warning!!!! Not relevant control" - when: - - ubtu20cis_rule_1_1_20 - tags: - - level1-server - - level1-workstation - - manual - - audit - - rule_1.1.20 - - removable_media - -- name: "1.1.21 | AUDIT | Ensure noexec option set on removable media partitions" - ansible.builtin.debug: - msg: "Warning!!!! Not relevant control" - when: - - ubtu20cis_rule_1_1_21 - tags: - - level1-server - - level1-workstation - - manual - - audit - - rule_1.1.21 - - removable_media - -- name: "1.1.22 | PATCH | Ensure sticky bit is set on all world-writable directories" - block: - - name: "1.1.22 | PATCH | Ensure sticky bit is set on all world-writable directories" - ansible.builtin.shell: df --local -P | awk '{if (NR!=1) print $6}' | xargs -I '{}' find '{}' -xdev -type d \( -perm -0002 -a ! -perm -1000 \) 2>/dev/null - changed_when: false - register: ubtu20cis_1_1_22_sticky - - - name: "1.1.22 | PATCH | Ensure sticky bit is set on all world-writable directories" - ansible.builtin.shell: df --local -P | awk '{if (NR!=1) print $6}' | xargs -I '{}' find '{}' -xdev -type d \( -perm -0002 -a ! -perm -1000 \) 2>/dev/null | xargs -I '{}' chmod a+t '{}' - failed_when: ubtu20cis_1_1_22_status.rc > 0 - check_mode: false - register: ubtu20cis_1_1_22_status - when: ubtu20cis_1_1_22_sticky.stdout | length > 0 - when: - - ubtu20cis_rule_1_1_22 - tags: - - level1-server - - level1-workstation - - automated - - patch - - rule_1.1.22 - - sticky_bit - -- name: "1.1.23 | PATCH | Disable Automounting" - ansible.builtin.service: - name: autofs - state: stopped - enabled: false - when: - - ubtu20cis_rule_1_1_23 - - ubtu20cis_autofs_service_status.stdout == "loaded" - - not ubtu20cis_allow_autofs - tags: - - level1-server - - level2-workstation - - automated - - patch - - rule_1.1.23 - - automounting - -- name: "1.1.24 | PATCH | Disable USB Storage" - block: - - name: "1.1.24 | PATCH | Disable USB Storage | Set modprobe config" - ansible.builtin.lineinfile: - path: /etc/modprobe.d/usb_storage.conf - regexp: '^install usb-storage' - line: 'install usb-storage /bin/true' - create: true - - - name: "1.1.24 | PATCH | Disable USB Storage | Remove usb-storage module" - community.general.modprobe: - name: usb-storage - state: absent - when: ansible_connection != 'docker' - when: - - ubtu20cis_rule_1_1_24 - - not ubtu20cis_allow_usb_storage - tags: - - level1-server - - level2-workstation - - automated - - patch - - rule_1.1.24 - - usb_storage diff --git a/tasks/section_1/cis_1.2.x.yml b/tasks/section_1/cis_1.2.x.yml index 53f16a35..de0ca053 100644 --- a/tasks/section_1/cis_1.2.x.yml +++ b/tasks/section_1/cis_1.2.x.yml @@ -1,62 +1,57 @@ --- -- name: "1.2.1 | AUDIT | Ensure package manager repositories are configured" + +- name: "1.2.1 | PATCH | Ensure AIDE is installed" block: - - name: "1.2.1 | AUDIT | Ensure package manager repositories are configured | Get repositories" - ansible.builtin.shell: apt-cache policy - changed_when: false - failed_when: false - check_mode: false - register: ubtu20cis_1_2_1_apt_policy + - name: "1.2.1 | PATCH | Ensure AIDE is installed" + ansible.builtin.package: + name: ['aide', 'aide-common'] + state: present + update_cache: true - - name: "1.2.1 | AUDIT | Ensure package manager repositories are configured | Message out repository configs" - ansible.builtin.debug: - msg: - - "Warning!! Below are the apt package repositories" - - "Please review to make sure they conform to your sites policies" - - "{{ ubtu20cis_1_2_1_apt_policy.stdout_lines }}" + - name: "1.2.1 | PATCH | Ensure AIDE is installed | Recapture packages" + ansible.builtin.package_facts: + manager: auto - - name: "1.2.1 | AUDIT | Ensure package manager repositories are configured | Warn Count" - ansible.builtin.import_tasks: warning_facts.yml - vars: - warn_control_id: '1.2.1' + - name: "1.2.1 | PATCH | Ensure AIDE is installed | Configure AIDE" + ansible.builtin.shell: aide init && mv /var/lib/aide/aide.db.new /var/lib/aide/aide.db + args: + creates: /var/lib/aide/aide.db + changed_when: false + failed_when: false + async: 45 + poll: 0 + when: not ansible_check_mode when: - ubtu20cis_rule_1_2_1 + - ubtu20cis_config_aide + - "'aide' not in ansible_facts.packages or + 'aide-common' not in ansible_facts.packages" tags: - level1-server - level1-workstation - - manual - - audit + - automated + - patch - rule_1.2.1 - - apt - -- name: "1.2.2 | AUDIT | Ensure GPG keys are configured" - block: - - name: "1.2.2 | AUDIT | Ensure GPG keys are configured | Get apt gpg keys" - ansible.builtin.shell: apt-key list - changed_when: false - failed_when: false - check_mode: false - register: ubtu20cis_1_2_2_apt_gpgkeys - - - name: "1.2.2 | AUDIT | Ensure GPG keys are configured | Message out apt gpg keys" - ansible.builtin.debug: - msg: - - "Warning!! Below are the apt gpg keys configured" - - "Please review to make sure they are configured" - - "in accordance with site policy" - - "{{ ubtu20cis_1_2_2_apt_gpgkeys.stdout_lines }}" + - aide - - name: "1.2.2 | AUDIT | Ensure GPG keys are configured | Warn Count" - ansible.builtin.import_tasks: warning_facts.yml - vars: - warn_control_id: '1.2.2' +- name: "1.2.2 | PATCH | Ensure filesystem integrity is regularly checked" + ansible.builtin.cron: + name: Run AIDE integrity check + cron_file: "{{ ubtu20cis_aide_cron['cron_file'] }}" + user: "{{ ubtu20cis_aide_cron['cron_user'] }}" + minute: "{{ ubtu20cis_aide_cron['aide_minute'] | default('0') }}" + hour: "{{ ubtu20cis_aide_cron['aide_hour'] | default('5') }}" + day: "{{ ubtu20cis_aide_cron['aide_day'] | default('*') }}" + month: "{{ ubtu20cis_aide_cron['aide_month'] | default('*') }}" + weekday: "{{ ubtu20cis_aide_cron['aide_weekday'] | default('*') }}" + job: "{{ ubtu20cis_aide_cron['aide_job'] }}" when: + - ubtu20cis_config_aide - ubtu20cis_rule_1_2_2 tags: - level1-server - level1-workstation - - manual - - audit + - automated + - patch - rule_1.2.2 - - gpg - - keys + - cron diff --git a/tasks/section_1/cis_1.3.x.yml b/tasks/section_1/cis_1.3.x.yml index 91f7957e..db90a218 100644 --- a/tasks/section_1/cis_1.3.x.yml +++ b/tasks/section_1/cis_1.3.x.yml @@ -1,51 +1,77 @@ --- -- name: "1.3.1 | PATCH | Ensure AIDE is installed" - block: - - name: "1.3.1 | PATCH | Ensure AIDE is installed" - ansible.builtin.package: - name: ['aide', 'aide-common'] - state: present - update_cache: true - - name: "1.3.1 | PATCH | Ensure AIDE is installed | Configure AIDE" - ansible.builtin.shell: aideinit && mv /var/lib/aide/aide.db.new /var/lib/aide/aide.db - args: - warn: false - creates: /var/lib/aide/aide.db - changed_when: false - failed_when: false - async: 45 - poll: 0 - when: not ansible_check_mode +- name: "1.3.1 | PATCH | Ensure updates, patches, and additional security software are installed" + ansible.builtin.package: + name: "*" + state: latest when: - - ubtu20cis_config_aide - ubtu20cis_rule_1_3_1 tags: - level1-server - level1-workstation - - automated - - patch + - manual + - audit - rule_1.3.1 - - aide + - apt -- name: "1.3.2 | PATCH | Ensure filesystem integrity is regularly checked" - ansible.builtin.cron: - name: Run AIDE integrity check - cron_file: "{{ ubtu20cis_aide_cron['cron_file'] }}" - user: "{{ ubtu20cis_aide_cron['cron_user'] }}" - minute: "{{ ubtu20cis_aide_cron['aide_minute'] | default('0') }}" - hour: "{{ ubtu20cis_aide_cron['aide_hour'] | default('5') }}" - day: "{{ ubtu20cis_aide_cron['aide_day'] | default('*') }}" - month: "{{ ubtu20cis_aide_cron['aide_month'] | default('*') }}" - weekday: "{{ ubtu20cis_aide_cron['aide_weekday'] | default('*') }}" - job: "{{ ubtu20cis_aide_cron['aide_job'] }}" +- name: "1.3.2 | AUDIT | Ensure package manager repositories are configured" + block: + - name: "1.3.2 | AUDIT | Ensure package manager repositories are configured | Get repositories" + ansible.builtin.shell: apt-cache policy + changed_when: false + failed_when: false + check_mode: false + register: ubtu20cis_1_3_2_apt_policy + + - name: "1.3.2 | AUDIT | Ensure package manager repositories are configured | Message out repository configs" + ansible.builtin.debug: + msg: + - "Warning!! Below are the apt package repositories" + - "Please review to make sure they conform to your sites policies" + - "{{ ubtu20cis_1_3_2_apt_policy.stdout_lines }}" + + - name: "1.3.2 | AUDIT | Ensure package manager repositories are configured | Warn Count" + ansible.builtin.import_tasks: warning_facts.yml + vars: + warn_control_id: '1.3.2' when: - - ubtu20cis_config_aide - ubtu20cis_rule_1_3_2 tags: - level1-server - level1-workstation - - automated - - patch + - manual + - audit - rule_1.3.2 - - cron + - apt + +- name: "1.3.3 | AUDIT | Ensure GPG keys are configured" + block: + - name: "1.3.3 | AUDIT | Ensure GPG keys are configured | Get apt gpg keys" + ansible.builtin.shell: apt-key list + changed_when: false + failed_when: false + check_mode: false + register: ubtu20cis_1_3_3_apt_gpgkeys + + - name: "1.3.3 | AUDIT | Ensure GPG keys are configured | Message out apt gpg keys" + ansible.builtin.debug: + msg: + - "Warning!! Below are the apt gpg keys configured" + - "Please review to make sure they are configured" + - "in accordance with site policy" + - "{{ ubtu20cis_1_3_3_apt_gpgkeys.stdout_lines }}" + + - name: "1.3.3 | AUDIT | Ensure GPG keys are configured | Warn Count" + ansible.builtin.import_tasks: warning_facts.yml + vars: + warn_control_id: '1.3.3' + when: + - ubtu20cis_rule_1_3_3 + tags: + - level1-server + - level1-workstation + - manual + - audit + - rule_1.3.3 + - gpg + - keys diff --git a/tasks/section_1/cis_1.4.x.yml b/tasks/section_1/cis_1.4.x.yml index 525f9500..41774164 100644 --- a/tasks/section_1/cis_1.4.x.yml +++ b/tasks/section_1/cis_1.4.x.yml @@ -1,100 +1,72 @@ --- -- name: "1.4.1 | PATCH | Ensure permissions on bootloader config are not overridden" - block: - - name: "1.4.1 | PATCH | Ensure permissions on bootloader config are not overridden | Change chmod setting" - ansible.builtin.replace: - path: /usr/sbin/grub-mkconfig - regexp: 'chmod\s\d\d\d\s\${grub_cfg}.new' - replace: 'chmod 400 ${grub_cfg}.new' - - - name: "1.4.1 | PATCH | Ensure permissions on bootloader config are not overridden | Remove check on password" - ansible.builtin.lineinfile: - path: /usr/sbin/grub-mkconfig - regexp: 'if \[ \"x\$\{grub_cfg\}\" != "x" \] && ! grep "\^password" \${grub_cfg}.new' - line: if [ "x${grub_cfg}" != "x" ]; then - when: - - ubtu20cis_rule_1_4_1 - tags: - - level1-server - - level1-workstation - - automated - - patch - - rule_1.4.1 - - grub -# Variables listed in 1.4.2 will need to be set for your environment - -- name: "1.4.2 | PATCH | Ensure bootloader password is set" +- name: "1.4.1 | PATCH | Ensure bootloader password is set" block: - - name: "1.4.2 | PATCH | Ensure bootloader password is set | set boot password" + - name: "1.4.1 | PATCH | Ensure bootloader password is set" ansible.builtin.lineinfile: - path: "{{ ubtu20cis_grub_user_file}}" - regexp: "{{ item.regexp }}" - line: "{{ item.line }}" - insertafter: "{{ item.insertafter | default(omit) }}" - no_log: true - loop: - - { regexp: '^set superusers=', line: 'set superusers="{{ ubtu20cis_grub_users }}"' } - - { regexp: '^password_pbkdf2', line: 'password_pbkdf2 {{ ubtu20cis_grub_users }} {{ ubtu20cis_grub_pw }}', insertafter: '^set superusers=' } - notify: grub update + path: "{{ ubtu20cis_grub_user_file }}" + regexp: '^(password_pbkdf2 {{ ubtu20cis_grub_user }}) grub.pbkdf2.*' + line: '\1 {{ ubtu20cis_bootloader_password_hash }}' + insertafter: set superusers="{{ ubtu20cis_grub_user }}" + state: present + notify: Grub update - - name: "1.4.2 | PATCH | Ensure bootloader password is set | allow unrestricted boot" + - name: "1.4.1 | PATCH | Ensure bootloader password is set | allow unrestricted boot" ansible.builtin.lineinfile: path: "/etc/grub.d/10_linux" regexp: '(^CLASS="--class gnu-linux --class gnu --class os).*"$' line: '\g<1> --unrestricted"' backrefs: true + notify: Grub update when: not ubtu20cis_ask_passwd_to_boot - notify: grub update when: - - ubtu20cis_rule_1_4_2 - - ubtu20cis_set_grub_password + - ubtu20cis_set_boot_pass + - ubtu20cis_rule_1_4_1 tags: - level1-server - level1-workstation - automated - patch - - rule_1.4.2 + - rule_1.4.1 - grub -- name: "1.4.3 | PATCH | Ensure permissions on bootloader config are configured" +- name: "1.4.2 | PATCH | Ensure permissions on bootloader config are configured" block: - - name: "1.4.3 | AUDIT | Ensure permissions on bootloader config are configured | Check for Grub file" + - name: "1.4.2 | AUDIT | Ensure permissions on bootloader config are configured | Check for Grub file" ansible.builtin.stat: - path: /boot/grub/grub.cfg + path: "{{ ubtu20cis_grub_file }}" check_mode: false - register: ubtu20cis_1_4_3_grub_cfg_status + register: ubtu20cis_1_4_2_grub_cfg_status - - name: "1.4.3 | PATCH | Ensure permissions on bootloader config are configured | Set permissions" + - name: "1.4.2 | PATCH | Ensure permissions on bootloader config are configured | Set permissions" ansible.builtin.file: - path: /boot/grub/grub.cfg + path: "{{ ubtu20cis_grub_file }}" owner: root group: root - mode: 0400 + mode: 0600 when: - - ubtu20cis_1_4_3_grub_cfg_status.stat.exists + - ubtu20cis_1_4_2_grub_cfg_status.stat.exists when: - - ubtu20cis_rule_1_4_3 + - ubtu20cis_rule_1_4_2 tags: - level1-server - level1-workstation - automated - patch - - rule_1.4.3 + - rule_1.4.2 - grub -# The following needs to be set ensure this is in the encrypted form. -- name: "1.4.4 | PATCH | Ensure authentication required for single user mode" +- name: "1.4.3 | PATCH | Ensure authentication required for single user mode" ansible.builtin.user: - name: root - password: "{{ ubtu20cis_root_pw }}" + name: "{{ ubtu20cis_grub_user }}" + password: "{{ ubtu20cis_bootloader_password_hash }}" when: - - ubtu20cis_rule_1_4_4 - - ubtu20cis_set_root_password + - ubtu20cis_rule_1_4_3 + - ubtu20cis_set_boot_pass tags: - level1-server - level1-workstation - automated - patch - - rule_1.4.4 + - rule_1.4.3 - passwd diff --git a/tasks/section_1/cis_1.5.x.yml b/tasks/section_1/cis_1.5.x.yml index b124a6cb..153edf12 100644 --- a/tasks/section_1/cis_1.5.x.yml +++ b/tasks/section_1/cis_1.5.x.yml @@ -1,45 +1,30 @@ --- -- name: "1.5.1 | AUDIT | Ensure XD/NX support is enabled" + +- name: "1.5.1 | PATCH | Ensure prelink is not installed" block: - - name: "1.5.1 | AUDIT | Ensure XD/NX support is enabled | Find status of XD/NX" - ansible.builtin.shell: 'dmesg | grep -w "NX (Execute Disable) protection: active"' + - name: "1.5.1 | PATCH | Ensure prelink is not installed | Restore binaries to normal" + ansible.builtin.shell: prelink -ua changed_when: false failed_when: false - check_mode: false - register: ubtu20cis_1_5_1_xdnx_status - - - name: "1.5.1 | AUDIT | Ensure XD/NX support is enabled | Alert if XD/NX is not enabled" - ansible.builtin.debug: - msg: "Warning!! You do not have XD/NX (Execute Disable/No Execute) enabled" - when: ubtu20cis_1_5_1_xdnx_status | length == 0 - - name: "1.5.1 | AUDIT | Ensure XD/NX support is enabled | Warn Count" - ansible.builtin.import_tasks: warning_facts.yml - when: ubtu20cis_1_5_1_xdnx_status | length == 0 - vars: - warn_control_id: '1.5.1' + - name: "1.5.1 | PATCH | Ensure prelink is not installed| Remove prelink package" + ansible.builtin.package: + name: prelink + state: absent when: - ubtu20cis_rule_1_5_1 tags: - level1-server - level1-workstation - - manual - - audit + - automated + - patch - rule_1.5.1 - - xd/nx - -- name: "1.5.2 | PATCH | Ensure address space layout randomization (ASLR) is enabled" - block: - - name: "1.5.2 | PATCH | Ensure address space layout randomization (ASLR) is enabled | Set ASLR settings" - ansible.builtin.lineinfile: - path: /etc/sysctl.conf - regexp: '^kernel.randomize_va_space' - line: 'kernel.randomize_va_space = 2' + - prelink - - name: "1.5.2 | PATCH | Ensure address space layout randomization (ASLR) is enabled | Set active kernel parameter" - ansible.posix.sysctl: - name: kernel.randomize_va_space - value: '2' +- name: "1.5.2 | PATCH | Ensure address space layout randomization (ASLR) is enabled | Set active kernel parameter" + ansible.posix.sysctl: + name: kernel.randomize_va_space + value: '2' when: - ubtu20cis_rule_1_5_2 tags: @@ -50,31 +35,61 @@ - rule_1.5.2 - aslr -- name: "1.5.3 | PATCH | Ensure prelink is not installed" +- name: "1.5.3 | PATCH | Ensure ptrace_scope is restricted | sysctl.conf" + ansible.builtin.lineinfile: + path: /etc/sysctl.d/60-kernel_sysctl.conf + regexp: '^kernel.yama.ptrace_scope ' + line: kernel.yama.ptrace_scope=1 + create: true + owner: root + group: root + mode: '0644' + when: + - ubtu20cis_rule_1_5_3 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_1.5.3 + +- name: "1.5.4 | PATCH | Ensure Automatic Error Reporting is not enabled" block: - - name: "1.5.3 | PATCH | Ensure prelink is not installed | Restore binaries to normal" - ansible.builtin.shell: prelink -ua - changed_when: false - failed_when: false + - name: "1.5.4 | PATCH | Ensure Automatic Error Reporting is not enabled | disable" + ansible.builtin.lineinfile: + path: /etc/default/apport + regexp: ^enabled + line: enabled=0 + create: true + owner: root + group: root + mode: 0644 - - name: "1.5.3 | PATCH | Ensure prelink is not installed| Remove prelink package" + - name: "1.5.4 | PATCH | Ensure Automatic Error Reporting is not enabled | stop service" + ansible.builtin.systemd: + name: apport + state: stopped + enabled: false + + - name: "1.5.4 | PATCH | Ensure Automatic Error Reporting is not enabled | remove package" ansible.builtin.package: - name: prelink + name: apport state: absent + when: + - "'apport' in ansible_facts.packages" when: - - ubtu20cis_rule_1_5_3 + - ubtu20cis_rule_1_5_4 tags: - level1-server - level1-workstation - automated - patch - - rule_1.5.3 - - prelink - + - rule_1.5.4 + - apport -- name: "1.5.4 | PATCH | Ensure core dumps are restricted" +- name: "1.5.5 | PATCH | Ensure core dumps are restricted" block: - - name: "1.5.4 | PATCH | Ensure core dumps are restricted | kernel sysctl" + - name: "1.5.5 | PATCH | Ensure core dumps are restricted | kernel sysctl" ansible.posix.sysctl: name: fs.suid_dumpable value: '0' @@ -83,7 +98,7 @@ sysctl_set: true ignoreerrors: true - - name: "1.5.4 | PATCH | Ensure core dumps are restricted | security limits" + - name: "1.5.5 | PATCH | Ensure core dumps are restricted | security limits" ansible.builtin.lineinfile: path: /etc/security/limits.d/99_zero_core.conf regexp: '^\* hard core' @@ -93,7 +108,7 @@ group: root mode: '0644' - - name: "1.5.4 | PATCH | Ensure core dumps are restricted | sysctl.conf" + - name: "1.5.5 | PATCH | Ensure core dumps are restricted | sysctl.conf" ansible.builtin.lineinfile: path: /etc/sysctl.conf regexp: '^fs.suid_dumpable' @@ -102,7 +117,7 @@ group: root mode: '0644' - - name: "1.5.4 | PATCH | Ensure core dumps are restricted | coredump.conf" + - name: "1.5.5 | PATCH | Ensure core dumps are restricted | coredump.conf" ansible.builtin.lineinfile: path: /etc/systemd/coredump.conf regexp: "{{ item.regexp }}" @@ -116,11 +131,11 @@ - { regexp: '^ProcessSizeMax', line: 'ProcessSizeMax=0' } when: "'systemd-coredump' in ansible_facts.packages" when: - - ubtu20cis_rule_1_5_4 + - ubtu20cis_rule_1_5_5 tags: - level1-server - level1-workstation - automated - patch - - rule_1.5.4 + - rule_1.5.5 - coredump diff --git a/tasks/section_1/cis_1.6.x.yml b/tasks/section_1/cis_1.6.x.yml index e47398d8..ca897d0d 100644 --- a/tasks/section_1/cis_1.6.x.yml +++ b/tasks/section_1/cis_1.6.x.yml @@ -28,7 +28,7 @@ regexp: '^GRUB_CMDLINE_LINUX' line: 'GRUB_CMDLINE_LINUX="apparmor=1 security=apparmor {{ ubtu20cis_1_6_1_2_cmdline_settings.stdout }}"' insertafter: '^GRUB_' - notify: grub update + notify: Grub update when: - "'apparmor' not in ubtu20cis_1_6_1_2_cmdline_settings.stdout" - "'security' not in ubtu20cis_1_6_1_2_cmdline_settings.stdout" @@ -44,7 +44,7 @@ when: - "'apparmor' in ubtu20cis_1_6_1_2_cmdline_settings.stdout" - "'security' in ubtu20cis_1_6_1_2_cmdline_settings.stdout" - notify: grub update + notify: Grub update when: - ubtu20cis_rule_1_6_1_2 tags: diff --git a/tasks/section_1/cis_1.8.x.yml b/tasks/section_1/cis_1.8.x.yml index 513e7148..53b33b74 100644 --- a/tasks/section_1/cis_1.8.x.yml +++ b/tasks/section_1/cis_1.8.x.yml @@ -1,13 +1,14 @@ --- + - name: "1.8.1 | PATCH | Ensure GNOME Display Manager is removed" ansible.builtin.package: name: gdm3 state: absent - notify: change_requires_reboot when: - ubtu20cis_rule_1_8_1 - not ubtu20cis_desktop_required - ubtu20cis_disruption_high + - "'gdm3' in ansible_facts.packages" tags: - level2-server - manual @@ -25,7 +26,7 @@ owner: root group: root mode: 0644 - notify: reload gdm + notify: Update dconf with_items: - { regexp: '\[org\/gnome\/login-screen\]', line: '[org/gnome/login-screen]', insertafter: EOF } - { regexp: 'banner-message-enable', line: 'banner-message-enable=true', insertafter: '\[org\/gnome\/login-screen\]'} @@ -43,7 +44,7 @@ - name: "1.8.3 | PATCH | Ensure disable-user-list is enabled" ansible.builtin.lineinfile: - path: /etc/gdm3/greeter.dconf-defaul + path: /etc/gdm3/greeter.dconf-default regexp: '^disable-user-list=' line: 'disable-user-list=true' insertafter: 'banner-message-text=' @@ -51,7 +52,7 @@ owner: root group: root mode: 0644 - notify: reload gdm + notify: Update dconf when: - ubtu20cis_rule_1_8_3 - ubtu20cis_desktop_required @@ -61,19 +62,210 @@ - automated - patch - rule_1.8.3 - - gdm3 + - gnome + +- name: "1.8.4 | PATCH | Ensure GDM screen locks when the user is idle" + block: + - name: "1.8.4 | PATCH | Ensure GDM screen locks when the user is idle | session profile" + ansible.builtin.lineinfile: + path: /etc/dconf/profile/session + regexp: "{{ item.regexp }}" + line: "{{ item.line }}" + insertafter: "{{ item.after | default(omit) }}" + loop: + - "{ regexp: 'user-db: user', line: 'user' }" + - "{ regexp: 'system-db: {{ ubtu20cis_dconf_db_name }}'', after: '^user-db.*' }" + + - name: "1.8.4 | PATCH | Ensure GDM screen locks when the user is idle | make directory" + ansible.builtin.file: + path: "/etc/dconf/db/{{ ubtu20cis_dconf_db_name }}.d" + owner: root + group: root + mode: 0755 + state: directory + notify: Update dconf + + - name: "1.8.4 | PATCH | Ensure GDM screen locks when the user is idle | session script" + ansible.builtin.template: + src: etc/dconf/db/00-screensaver.j2 + dest: "/etc/dconf/db/{{ ubtu20cis_dconf_db_name }}.d/00-screensaver" + owner: root + group: root + mode: '0644' + notify: Update dconf + when: + - ubtu20cis_rule_1_8_4 + - ubtu20cis_desktop_required + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_1.8.4 + - gnome + +- name: "1.8.5 | PATCH | Ensure GDM screen locks cannot be overridden" + block: + - name: "1.8.5 | PATCH | Ensure GDM screen locks cannot be overridden | make lock directory" + ansible.builtin.file: + path: "/etc/dconf/db/{{ ubtu20cis_dconf_db_name }}.d/locks" + owner: root + group: root + mode: 0755 + state: directory + notify: Update dconf + + - name: "1.8.5 | PATCH | Ensure GDM screen locks cannot be overridden | make lockfile" + ansible.builtin.template: + src: etc/dconf/db/00-screensaver_lock.j2 + dest: "/etc/dconf/db/{{ ubtu20cis_dconf_db_name }}.d/locks/00-screensaver" + owner: root + group: root + mode: 0644 + notify: Update dconf + when: + - ubtu20cis_rule_1_8_5 + - ubtu20cis_desktop_required + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_1.8.5 + - gnome + +- name: "1.8.6 | PATCH | Ensure GDM automatic mounting of removable media is disabled" + block: + - name: "1.8.6 | PATCH | Ensure GDM automatic mounting of removable media is disabled | make directory" + ansible.builtin.file: + path: "/etc/dconf/db/{{ ubtu20cis_dconf_db_name }}.d" + owner: root + group: root + mode: 0755 + state: directory + notify: Update dconf + + - name: "1.8.6 | PATCH | Ensure GDM automatic mounting of removable media is disabled | session script" + ansible.builtin.template: + src: etc/dconf/db/00-media-automount.j2 + dest: "/etc/dconf/db/{{ ubtu20cis_dconf_db_name }}.d/00-media-automount" + owner: root + group: root + mode: '0644' + notify: Update dconf + when: + - ubtu20cis_rule_1_8_6 + - ubtu20cis_desktop_required + tags: + - level1-server + - level2-workstation + - automated + - patch + - rule_1.8.6 + - gnome + +- name: "1.8.7 | PATCH | Ensure GDM disabling automatic mounting of removable media is not overridden" + block: + - name: "1.8.7 | PATCH | Ensure GDM disabling automatic mounting of removable media is not overridden | make lock directory" + ansible.builtin.file: + path: "/etc/dconf/db/{{ ubtu20cis_dconf_db_name }}.d/locks" + owner: root + group: root + mode: 0755 + state: directory + notify: Update dconf + + - name: "1.8.7 | PATCH | Ensure GDM disabling automatic mounting of removable media is not overridden | make lockfile" + ansible.builtin.template: + src: etc/dconf/db/00-automount_lock.j2 + dest: "/etc/dconf/db/{{ ubtu20cis_dconf_db_name }}.d/locks/00-automount_lock" + owner: root + group: root + mode: 0644 + notify: Update dconf + when: + - ubtu20cis_rule_1_8_7 + - ubtu20cis_desktop_required + tags: + - level1-server + - level2-workstation + - automated + - patch + - rule_1.8.7 + - gnome + +- name: "1.8.8 | PATCH | Ensure GDM autorun-never is enabled" + block: + - name: "1.8.8 | PATCH | Ensure GDM autorun-never is enabled | make directory" + ansible.builtin.file: + path: "/etc/dconf/db/{{ ubtu20cis_dconf_db_name }}.d" + owner: root + group: root + mode: 0755 + state: directory + notify: Update dconf + + - name: "1.8.8 | PATCH | Ensure GDM autorun-never is enabled | session script" + ansible.builtin.template: + src: etc/dconf/db/00-media-autorun.j2 + dest: "/etc/dconf/db/{{ ubtu20cis_dconf_db_name }}.d/00-media-autorun" + owner: root + group: root + mode: '0644' + notify: Update dconf + when: + - ubtu20cis_rule_1_8_8 + - ubtu20cis_desktop_required + tags: + - level1-server + - level2-workstation + - automated + - patch + - rule_1.8.8 + - gnome + +- name: "1.8.9 | PATCH | Ensure GDM autorun-never is not overridden" + block: + - name: "1.8.9 | PATCH | Ensure GDM autorun-never is not overridden | make lock directory" + ansible.builtin.file: + path: "/etc/dconf/db/{{ ubtu20cis_dconf_db_name }}.d/locks" + owner: root + group: root + mode: 0755 + state: directory + notify: Update dconf + + - name: "1.8.9 | PATCH | Ensure GDM autorun-never is not overridden | make lockfile" + ansible.builtin.template: + src: etc/dconf/db/00-autorun_lock.j2 + dest: "/etc/dconf/db/{{ ubtu20cis_dconf_db_name }}.d/locks/00-autorun_lock" + owner: root + group: root + mode: 0644 + notify: Update dconf + when: + - ubtu20cis_rule_1_8_9 + - ubtu20cis_desktop_required + tags: + - level1-server + - level2-workstation + - automated + - patch + - rule_1.8.9 + - gnome -- name: "1.8.4 | PATCH | Ensure XDCMP is not enabled" +- name: "1.8.10 | PATCH | Ensure XDCMP is not enabled" ansible.builtin.lineinfile: path: /etc/gdm3/custom.conf regexp: '^Enable.*=.*true' state: absent when: - - ubtu20cis_rule_1_8_4 + - ubtu20cis_rule_1_8_10 tags: - level1-server - level1-workstation - automated - patch - - rule_1.8.4 + - rule_1.8.10 + - gnome - xdcmp diff --git a/tasks/section_1/cis_1.9.yml b/tasks/section_1/cis_1.9.yml deleted file mode 100644 index 8718eab0..00000000 --- a/tasks/section_1/cis_1.9.yml +++ /dev/null @@ -1,23 +0,0 @@ ---- -- name: "1.9 | PATCH | Ensure updates, patches, and additional security software are installed" - block: - - name: "1.9 | PATCH | Ensure updates, patches, and additional security software are installed" - ansible.builtin.package: - name: "*" - state: latest - register: server_patched - notify: change_requires_reboot - - - name: "1.9 | PATCH | Ensure updates, patches, and additional security software are installed | update package facts" # noqa no-handler - ansible.builtin.package_facts: - manager: auto - when: - - server_patched.changed - when: - - ubtu20cis_rule_1_9 - tags: - - level1-server - - level1-workstation - - manual - - patch - - rule_1.9 diff --git a/tasks/section_1/main.yml b/tasks/section_1/main.yml index 46babc7f..f7d9203c 100644 --- a/tasks/section_1/main.yml +++ b/tasks/section_1/main.yml @@ -2,13 +2,37 @@ - name: "SECTION | 1.1.1 | Disable Unused Filesystems" ansible.builtin.import_tasks: cis_1.1.1.x.yml -- name: "SECTION | 1.1 | Configure mounts and Filesystems" - ansible.builtin.import_tasks: cis_1.1.x.yml +- name: "SECTION | 1.1.2 | Configure /tmp" + ansible.builtin.import_tasks: cis_1.1.2.x.yml -- name: "SECTION | 1.2 | Cofnigure Software Updates" +- name: "SECTION | 1.1.3 | Configure /var" + ansible.builtin.import_tasks: cis_1.1.3.x.yml + +- name: "SECTION | 1.1.4 | Configure /var/tmp" + ansible.builtin.import_tasks: cis_1.1.4.x.yml + +- name: "SECTION | 1.1.5 | Configure /var/log" + ansible.builtin.import_tasks: cis_1.1.5.x.yml + +- name: "SECTION | 1.1.6 | Configure /var/log/audit" + ansible.builtin.import_tasks: cis_1.1.6.x.yml + +- name: "SECTION | 1.1.7 | Configure /home" + ansible.builtin.import_tasks: cis_1.1.7.x.yml + +- name: "SECTION | 1.1.8 | Configure /dev/shm" + ansible.builtin.import_tasks: cis_1.1.8.x.yml + +- name: "SECTION | 1.1.9 | Configure autofs" + ansible.builtin.import_tasks: cis_1.1.9.yml + +- name: "SECTION | 1.1.10 | Configure usb-storage" + ansible.builtin.import_tasks: cis_1.1.10.yml + +- name: "SECTION | 1.2 | Filesystem Integrity Checking" ansible.builtin.import_tasks: cis_1.2.x.yml -- name: "SECTION | 1.3. | Filesystem Integrity Checking" +- name: "SECTION | 1.3. | gpg and repository configuration" ansible.builtin.import_tasks: cis_1.3.x.yml - name: "SECTION | 1.4 | Secure Boot Settings" @@ -25,6 +49,3 @@ - name: "SECTION | 1.8 | GNOME Display Manager" ansible.builtin.import_tasks: cis_1.8.x.yml - -- name: "SECTION | 1.9 | Ensure updates, patches, and additional security software are installed" - ansible.builtin.import_tasks: cis_1.9.yml diff --git a/tasks/section_2/cis_2.1.1.x.yml b/tasks/section_2/cis_2.1.1.x.yml new file mode 100644 index 00000000..84e4bcbb --- /dev/null +++ b/tasks/section_2/cis_2.1.1.x.yml @@ -0,0 +1,36 @@ +--- + +- name: "2.1.1.1 | PATCH | Ensure a single time synchronization daemon is in use" + block: + - name: "2.1.1.1 | PATCH | Ensure a single time synchronization daemon is in use | Pkg installed" + ansible.builtin.package: + name: "{{ ubtu20cis_time_sync_tool }}" + state: present + + - name: "2.1.1.1 | PATCH | Ensure a single time synchronization daemon is in use | other pkgs removed" + ansible.builtin.package: + name: "{{ item }}" + state: absent + loop: + - chrony + - ntp + when: item != ubtu20cis_time_sync_tool + + - name: "2.1.1.1 | PATCH | Ensure a single time synchronization daemon is in use | mask service" + ansible.builtin.service: + name: systemd-timesyncd.service + state: stopped + enabled: false + masked: true + when: ubtu20cis_time_sync_tool != "systemd-timesyncd" + when: + - ubtu20cis_rule_2_1_1_1 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_2.1.1.1 + - chrony + - ntp + - systemd-timesyncd diff --git a/tasks/section_2/cis_2.1.2.x.yml b/tasks/section_2/cis_2.1.2.x.yml new file mode 100644 index 00000000..c13901ae --- /dev/null +++ b/tasks/section_2/cis_2.1.2.x.yml @@ -0,0 +1,71 @@ +--- + +- name: "2.1.2.1 | PATCH | Ensure chrony is configured with authorized timeserver" + block: + - name: "2.1.2.1 | PATCH | Ensure chrony is configured with authorized timeserver | ensure source dir exists" + ansible.builtin.file: + path: /etc/chrony/sources.d + state: directory + owner: root + group: root + mode: '0755' + + - name: "2.1.2.1 | PATCH | Ensure chrony is configured with authorized timeserver | sources" + ansible.builtin.template: + src: "{{ item }}.j2" + dest: "/{{ item }}" + mode: 0644 + owner: root + group: root + loop: + - 'etc/chrony/sources.d/time.sources' + notify: restart timeservice + + - name: "2.1.2.1 | PATCH | Ensure chrony is configured with authorized timeserver | remove current sources in .conf" + ansible.builtin.replace: + path: /etc/chrony/chrony.conf + regexp: '^(server.*|pool.*)' + replace: '#\1' + notify: restart timeservice + + - name: "2.1.2.1 | PATCH | Ensure chrony is configured with authorized timeserver | load sources" + ansible.builtin.lineinfile: + path: /etc/chrony/chrony.conf + regexp: '^include /etc/chrony/sources.d.*' + line: include /etc/chrony/sources.d/time.sources + notify: restart timeservice + when: + - ubtu20cis_rule_2_1_2_1 + tags: + - level1-server + - level1-workstation + - patch + - rule_2.1.2.1 + - chrony + +- name: "2.1.2.2 | PATCH | Ensure chrony is running as user _chrony" + ansible.builtin.lineinfile: + path: /etc/chrony/chrony.conf + regexp: '^user _chrony' + line: 'user _chrony' + when: + - ubtu20cis_rule_2_1_2_2 + tags: + - level1-server + - level1-workstation + - patch + - rule_2.1.2.2 + - chrony + +- name: "2.1.2.3 | PATCH | Ensure chrony is enabled and running" + ansible.builtin.systemd: + name: chrony + state: started + enabled: true + when: + - ubtu20cis_rule_2_1_2_3 + tags: + - level1-server + - level1-workstation + - rule_2.1.2.3 + - chrony diff --git a/tasks/section_2/cis_2.1.3.x.yml b/tasks/section_2/cis_2.1.3.x.yml new file mode 100644 index 00000000..e9f57d67 --- /dev/null +++ b/tasks/section_2/cis_2.1.3.x.yml @@ -0,0 +1,44 @@ +--- + +- name: "2.1.3.1 | PATCH | Ensure systemd-timesyncd configured with authorized timeserver" + block: + - name: "2.1.3.1 | PATCH | Ensure systemd-timesyncd configured with authorized timeserver | conf directory" + ansible.builtin.file: + path: /etc/systemd/timesyncd.conf.d + state: directory + owner: root + group: root + mode: 0755 + + - name: "2.1.3.1 | PATCH | Ensure systemd-timesyncd configured with authorized timeserver | sources" + ansible.builtin.template: + src: "{{ item }}.j2" + dest: "/{{ item }}" + mode: 0644 + owner: root + group: root + loop: + - "etc/systemd/timesyncd.conf.d/50-timesyncd.conf" + notify: restart timeservice + when: + - ubtu20cis_rule_2_1_3_1 + tags: + - level1-server + - level1-workstation + - patch + - rule_2.1.3.1 + - timesyncd + +- name: "2.1.3.2 | PATCH | Ensure systemd-timesyncd is enabled and running" + ansible.builtin.systemd: + name: systemd-timesyncd + state: started + enabled: true + masked: false + when: + - ubtu20cis_rule_2_1_3_2 + tags: + - level1-server + - level1-workstation + - rule_2.1.3.2 + - timesyncd diff --git a/tasks/section_2/cis_2.1.4.x.yml b/tasks/section_2/cis_2.1.4.x.yml new file mode 100644 index 00000000..23a4b6df --- /dev/null +++ b/tasks/section_2/cis_2.1.4.x.yml @@ -0,0 +1,78 @@ +--- + +- name: "2.1.4.1 | PATCH | Ensure ntp access control is configured " + ansible.builtin.lineinfile: + path: /etc/ntp.conf + regexp: '^(restrict) (|{{ item }}) .*$' + line: 'restrict {{ item }} default kod nomodify notrap nopeer noquery' + loop: + - '-4' + - '-6' + notify: Restart timeservice + when: + - ubtu20cis_rule_2_1_4_1 + tags: + - level1-server + - level1-workstation + - patch + - rule_2.1.4.1 + - ntp + +- name: "2.1.4.2 | PATCH | Ensure ntp is configured with authorized timeserver" + block: + - name: "2.1.4.2 | PATCH | Ensure ntp is configured with authorized timeserver | pool" + ansible.builtin.lineinfile: + path: /etc/ntp.conf + regexp: '^pool.*' + line: 'pool {{ item.name }} {{ item.options }}' + notify: Restart timeservice + loop: "{{ ubtu20cis_time_pool }}" + loop_control: + label: "{{ item.name }}" + + - name: "2.1.4.2 | PATCH | Ensure ntp is configured with authorized timeserver | servers" + ansible.builtin.lineinfile: + path: /etc/ntp.conf + insertafter: '^server' + line: 'server {{ item.name }} {{ item.options }}' + loop: "{{ ubtu20cis_time_servers }}" + loop_control: + label: "{{ item.name }}" + notify: Restart timeservice + when: + - ubtu20cis_rule_2_1_4_2 + tags: + - level1-server + - level1-workstation + - patch + - rule_2.1.4.2 + - ntp + +- name: "2.1.4.3 | PATCH | Ensure ntp is running as user ntp" + ansible.builtin.lineinfile: + path: /etc/init.d/ntp + regexp: '^RUNASUSER.*' + line: 'RUNASUSER=ntp' + notify: Restart timeservice + when: + - ubtu20cis_rule_2_1_4_3 + tags: + - level1-server + - level1-workstation + - patch + - rule_2.1.4.3 + - ntp + +- name: "2.1.4.4 | PATCH | Ensure ntp is enabled and running" + ansible.builtin.systemd: + name: ntp + state: started + enabled: true + masked: false + when: + - ubtu20cis_rule_2_1_4_4 + tags: + - level1-server + - level1-workstation + - rule_2.1.4.4 + - ntp diff --git a/tasks/section_2/cis_2.1.x.yml b/tasks/section_2/cis_2.1.x.yml deleted file mode 100644 index 465a2280..00000000 --- a/tasks/section_2/cis_2.1.x.yml +++ /dev/null @@ -1,475 +0,0 @@ ---- - -- name: "2.1.1.1 | PATCH | Ensure time synchronization is in use" - ansible.builtin.package: - name: "{{ ubtu20cis_time_sync_tool }}" - state: present - when: - - ubtu20cis_rule_2_1_1_1 - tags: - - level1-server - - level1-workstation - - automated - - patch - - rule_2.1.1.1 - - chrony - -- name: "2.1.1.2 | PATCH | Ensure systemd-timesyncd is configured" - block: - - name: "2.1.1.2 | PATCH | Ensure systemd-timesyncd is configured | Remove ntp and chrony" - ansible.builtin.package: - name: ['ntp', 'chrony'] - state: absent - - - name: "2.1.1.2 | PATCH | Ensure systemd-timesyncd is configured | Set configuration for systemd-timesyncd" - ansible.builtin.lineinfile: - path: /etc/systemd/timesyncd.conf - regexp: "{{ item.regexp }}" - line: "{{ item.line }}" - insertafter: "{{ item.insertafter }}" - with_items: - - { regexp: '^\[Time\]', line: '[Time]', insertafter: EOF } - - { regexp: '^#NTP|^NTP', line: 'NTP={{ ubtu20cis_ntp_server_list }}', insertafter: '\[Time\]' } - - { regexp: '^#FallbackNTP|^FallbackNTP', line: 'FallbackNTP={{ ubtu20cis_ntp_fallback_server_list }}', insertafter: '\[Time\]' } - - { regexp: '^#RootDistanceMaxSec|^RootDistanceMaxSec', line: 'RootDistanceMaxSec=1', insertafter: '\[Time\]' } - - - name: "2.1.1.2 | PATCH | Ensure systemd-timesyncd is configured | Start and enable the systemd-timesyncd service" - ansible.builtin.systemd: - name: systemd-timesyncd.service - state: started - enabled: true - masked: false - - - name: "2.1.1.2 | AUDIT | Ensure systemd-timesyncd is configured | Capture NTP state" - ansible.builtin.shell: "timedatectl status | grep NTP | cut -d':' -f2 | sed 's/^[ \\t]*//;s/[ \\t]*$//'" - changed_when: false - failed_when: false - register: ubtu20cis_2_1_1_2_ntp_status - - - name: "2.1.1.2 | PATCH | Ensure systemd-timesyncd is configured | Set timedatectl to ntp" - ansible.builtin.shell: timedatectl set-ntp true - changed_when: true - when: "'inactive' in ubtu20cis_2_1_1_2_ntp_status.stdout" - when: - - ubtu20cis_rule_2_1_1_2 - - ubtu20cis_time_sync_tool == "systemd-timesyncd" - tags: - - level1-server - - level1-workstation - - automated - - manual - - patch - - rule_2.1.1.2 - - systemd-timesyncd - -- name: "2.1.1.3 | PATCH | Ensure chrony is configured" - block: - - name: "2.1.1.3 | PATCH | Ensure chrony is configured | Remove ntp" - ansible.builtin.package: - name: ntp - state: absent - - - name: "2.1.1.3 | PATCH | Ensure chrony is configured | Disable/Mask systemd-timesyncd" - ansible.builtin.systemd: - name: systemd-timesyncd - state: stopped - enabled: false - masked: true - when: "'systemd-timesyncd' in ansible_facts.packages" - - - name: "2.1.1.3 | AUDIT | Ensure chrony is configured | Check for chrony user" - ansible.builtin.shell: grep {{ ubtu20cis_chrony_user }} /etc/passwd - changed_when: false - failed_when: false - check_mode: false - register: ubtu20cis_2_1_1_3_chrony_user_status - - - name: "2.1.1.3 | PATCH | Ensure chrony is configured | Set chrony.conf file" - ansible.builtin.template: - src: chrony.conf.j2 - dest: /etc/chrony/chrony.conf - owner: root - group: root - mode: 0644 - - - name: "2.1.1.3 | PATCH | Ensure chrony is configured | Create chrony user" - ansible.builtin.user: - name: "{{ ubtu20cis_chrony_user }}" - shell: /usr/sbin/nologin - system: true - when: ubtu20cis_2_1_1_3_chrony_user_status.stdout | length > 0 - - - name: "2.2.1.3 | PATCH | Ensure chrony is configured | Set option to use chrony user" - ansible.builtin.lineinfile: - path: /etc/default/chrony - regexp: '^DAEMON_OPTS' - line: 'DAEMON_OPTS="-u _chrony"' - when: - - ubtu20cis_rule_2_1_1_3 - - ubtu20cis_time_sync_tool == "chrony" - tags: - - level1-server - - level1-workstation - - automated - - patch - - rule_2.1.1.3 - - chrony - -- name: "2.1.1.4 | PATCH | Ensure ntp is configured" - block: - - name: "2.1.1.4 | PATCH | Ensure ntp is configured | Remove chrony" - ansible.builtin.package: - name: chrony - state: absent - - - name: "2.1.1.4 | PATCH | Ensure ntp is configured | Disable/Mask systemd-timesyncd" - ansible.builtin.systemd: - name: systemd-timesyncd - state: stopped - enabled: false - masked: true - - - name: "2.1.1.4 | PATCH | Ensure ntp is configured | Set ntp.conf settings" - ansible.builtin.template: - src: ntp.conf.j2 - dest: /etc/ntp.conf - owner: root - group: root - mode: 0644 - - - name: "2.1.1.4 | PATCH | Ensure ntp is configured | Modify sysconfig/ntpd" - ansible.builtin.lineinfile: - path: /etc/sysconfig/ntpd - regexp: "{{ item.regexp }}" - line: "{{ item. line }}" - create: true - with_items: - - { regexp: '^OPTIONS', line: 'OPTIONS="-u ntp:ntp"'} - - { regexp: '^NTPD_OPTIONS', line: 'NTPD_OPTIONS="-u ntp:ntp"' } - - - name: "2.1.1.4 | PATCH | Ensure ntp is configured | Modify /etc/init.d/ntp" - ansible.builtin.lineinfile: - path: /etc/init.d/ntp - regexp: '^RUNAUSER' - line: 'RUNAUSER=ntp' - when: - - ubtu20cis_rule_2_1_1_4 - - ubtu20cis_time_sync_tool == "ntp" - tags: - - level1-server - - level1-workstation - - automated - - patch - - rule_2.1.1.4 - - ntp - -- name: "2.1.2 | PATCH | Ensure X Window System is not installed" - ansible.builtin.package: - name: xserver-xorg* - state: absent - when: - - ubtu20cis_rule_2_1_2 - - not ubtu20cis_desktop_required - tags: - - level1-server - - automated - - patch - - rule_2.1.2 - - xwindows - -- name: "2.1.3 | PATCH | Ensure Avahi Server is not installed" - block: - - name: "2.1.3 | PATCH | Ensure Avahi Server is not installed | Stop/Disable avahi-daemon.service" - ansible.builtin.service: - name: avahi-daemon.service - state: stopped - enabled: false - when: avahi_service_status.stdout == "loaded" - - - name: "2.1.3 | PATCH | Ensure Avahi Server is not installed | Stop/Disable avahi-daemon.socket" - ansible.builtin.service: - name: avahi-daemon.socket - state: stopped - enabled: false - when: avahi_service_status.stdout == "loaded" - - - name: "2.1.3 | PATCH | Ensure Avahi Server is not installed | Remove avahi-daemon" - ansible.builtin.package: - name: avahi-daemon - state: absent - when: - - ubtu20cis_rule_2_1_3 - - not ubtu20cis_avahi_server - - ubtu20cis_disruption_high - tags: - - level1-server - - level1-workstation - - automated - - patch - - rule_2.1.3 - - avahi - - services - -- name: "2.1.4 | PATCH | Ensure CUPS is not installed" - ansible.builtin.package: - name: cups - state: absent - when: - - ubtu20cis_rule_2_1_4 - - not ubtu20cis_cups_server - tags: - - level1-server - - level2-workstation - - automated - - patch - - rule_2.1.4 - - cups - - services - -- name: "2.1.5 | PATCH | Ensure DHCP Server is not installed" - ansible.builtin.package: - name: isc-dhcp-server - state: absent - when: - - ubtu20cis_rule_2_1_5 - - not ubtu20cis_dhcp_server - tags: - - level1-server - - level1-workstation - - automated - - patch - - rule_2.1.5 - - dhcp - - services - -- name: "2.1.6 | PATCH | Ensure LDAP server is not installed" - ansible.builtin.package: - name: slapd - state: absent - when: - - ubtu20cis_rule_2_1_6 - - not ubtu20cis_ldap_server - tags: - - level1-server - - level1-workstation - - automated - - patch - - rule_2.1.6 - - ldap - - services - -- name: "2.1.7 | PATCH | Ensure NFS is not installed" - ansible.builtin.package: - name: nfs-kernel-server - state: absent - when: - - ubtu20cis_rule_2_1_7 - - not ubtu20cis_nfs_server - tags: - - level1-server - - level1-workstation - - automated - - patch - - rule_2.1.7 - - nfs - - rpc - - services - -- name: "2.1.8 | PATCH | Ensure DNS Server is not installed" - ansible.builtin.package: - name: bind9 - state: absent - when: - - ubtu20cis_rule_2_1_8 - - not ubtu20cis_dns_server - tags: - - level1-server - - level1-workstation - - automated - - patch - - rule_2.1.8 - - dns - - service - -- name: "2.1.9 | PATCH | Ensure FTP Server is not installed" - ansible.builtin.package: - name: vsftpd - state: absent - when: - - ubtu20cis_rule_2_1_9 - - not ubtu20cis_vsftpd_server - tags: - - level1-server - - level1-workstation - - automated - - patch - - rule_2.1.9 - - ftp - - service - -- name: "2.1.10 | PATCH | Ensure HTTP server is not installed" - ansible.builtin.package: - name: apache2 - state: absent - when: - - ubtu20cis_rule_2_1_10 - - not ubtu20cis_httpd_server - tags: - - level1-server - - level1-workstation - - automated - - patch - - rule_2.1.10 - - httpd - - service - -- name: "2.1.11 | PATCH | Ensure IMAP and POP3 server are not installed" - ansible.builtin.package: - name: ['dovecot-imapd', 'dovecot-pop3d'] - state: absent - when: - - ubtu20cis_rule_2_1_11 - - not ubtu20cis_dovecot_server - tags: - - level1-server - - level1-workstation - - automated - - patch - - rule_2.1.11 - - dovecot - - service - -- name: "2.1.12 | PATCH | Ensure Samba is not installed" - ansible.builtin.package: - name: samba - state: absent - when: - - ubtu20cis_rule_2_1_12 - - not ubtu20cis_smb_server - tags: - - level1-server - - level1-workstation - - automated - - patch - - rule_2.1.12 - - samba - - service - -- name: "2.1.13 | PATCH | Ensure HTTP Proxy Server is not installed" - ansible.builtin.package: - name: squid - state: absent - when: - - ubtu20cis_rule_2_1_13 - - not ubtu20cis_squid_server - tags: - - level1-server - - level1-workstation - - automated - - patch - - rule_2.1.13 - - http_proxy - - service - -- name: "2.1.14 | PATCH | Ensure SNMP Server is not installed" - ansible.builtin.package: - name: snmpd - state: absent - when: - - ubtu20cis_rule_2_1_14 - - not ubtu20cis_snmp_server - tags: - - level1-server - - level1-workstation - - automated - - patch - - rule_2.1.14 - - snmp - - service - -- name: "2.1.15 | PATCH | Ensure mail transfer agent is configured for local-only mode" - block: - - name: "2.1.15 | PATCH | Ensure mail transfer agent is configured for local-only mode | Make changes if exim4 installed" - ansible.builtin.lineinfile: - path: /etc/exim4/update-exim4.conf.conf - regexp: "{{ item.regexp }}" - line: "{{ item.line }}" - with_items: - - { regexp: '^dc_eximconfig_configtype', line: "dc_eximconfig_configtype='local'" } - - { regexp: '^dc_local_interfaces', line: "dc_local_interfaces='127.0.0.1 ; ::1'" } - - { regexp: '^dc_readhost', line: "dc_readhost=''" } - - { regexp: '^dc_relay_domains', line: "dc_relay_domains=''" } - - { regexp: '^dc_minimaldns', line: "dc_minimaldns='false'" } - - { regexp: '^dc_relay_nets', line: "dc_relay_nets=''" } - - { regexp: '^dc_smarthost', line: "dc_smarthost=''" } - - { regexp: '^dc_use_split_config', line: "dc_use_split_config='false'" } - - { regexp: '^dc_hide_mailname', line: "dc_hide_mailname=''" } - - { regexp: '^dc_mailname_in_oh', line: "dc_mailname_in_oh='true'" } - - { regexp: '^dc_localdelivery', line: "dc_localdelivery='mail_spool'" } - notify: restart exim4 - when: "'exim4' in ansible_facts.packages" - - - name: "2.1.15 | PATCH | Ensure mail transfer agent is configured for local-only mode | Make changes if postfix is installed" - ansible.builtin.lineinfile: - path: /etc/postfix/main.cf - regexp: '^(#)?inet_interfaces' - line: 'inet_interfaces = loopback-only' - notify: restart postfix - when: "'postfix' in ansible_facts.packages" - - - name: "2.1.15 | PATCH | Ensure mail transfer agent is configured for local-only mode | Message out other main agents" - ansible.builtin.debug: - msg: - - "Warning!! You are not using either exim4 or postfix" - - "Please review your vendors documentation to configure local-only mode" - when: - - "'exim4' not in ansible_facts.packages" - - "'postfix' not in ansible_facts.packages" - - - name: "2.1.15 | AUDIT | Ensure mail transfer agent is configured for local-only mode | Warn Count" - ansible.builtin.import_tasks: warning_facts.yml - when: - - "'exim4' not in ansible_facts.packages" - - "'postfix' not in ansible_facts.packages" - vars: - warn_control_id: '2.1.15' - when: - - ubtu20cis_rule_2_1_15 - tags: - - level1-server - - level1-workstation - - automated - - scored - - patch - - rule_2.1.15 - - postfix - -- name: "2.1.16 | PATCH | Ensure rsync service is not installed" - ansible.builtin.package: - name: rsync - state: absent - when: - - ubtu20cis_rule_2_1_16 - - not ubtu20cis_rsync_server - tags: - - level1-server - - level1-workstation - - automated - - patch - - rule_2.1.16 - - rsync - -- name: "2.1.17 | PATCH | Ensure NIS Server is not installed" - ansible.builtin.package: - name: nis - state: absent - when: - - ubtu20cis_rule_2_1_17 - - not ubtu20cis_nis_server - tags: - - level1-server - - level1-workstation - - automated - - rule_2.1.17 - - nis - - service diff --git a/tasks/section_2/cis_2.2.x.yml b/tasks/section_2/cis_2.2.x.yml index 006f0e68..5b0be52a 100644 --- a/tasks/section_2/cis_2.2.x.yml +++ b/tasks/section_2/cis_2.2.x.yml @@ -1,69 +1,95 @@ --- -- name: "2.2.1 | PATCH | Ensure NIS Client is not installed" + +- name: "2.2.1 | PATCH | Ensure X Window System is not installed" ansible.builtin.package: - name: nis + name: xserver-xorg* state: absent when: - ubtu20cis_rule_2_2_1 - - not ubtu20cis_nis_required + - not ubtu20cis_desktop_required tags: - level1-server - - level1-workstation + - automated + - patch - rule_2.2.1 - - nis + - xwindows -- name: "2.2.2 | PATCH | Ensure rsh client is not installed" - ansible.builtin.package: - name: rsh-client - state: absent +- name: "2.2.2 | PATCH | Ensure Avahi Server is not installed" + block: + - name: "2.2.2 | PATCH | Ensure Avahi Server is not installed| Stop/Disable avahi-daemon.service" + ansible.builtin.service: + name: avahi-daemon.service + state: stopped + enabled: false + when: avahi_service_status.stdout == "loaded" + + - name: "2.2.2 | PATCH | Ensure Avahi Server is not installed | Stop/Disable avahi-daemon.socket" + ansible.builtin.systemd: + name: avahi-daemon.socket + state: stopped + enabled: false + when: avahi_service_status.stdout == "loaded" + + - name: "2.2.2 | PATCH | Ensure Avahi Server is not installed | Remove avahi-daemon" + ansible.builtin.package: + name: avahi-daemon + state: absent when: - ubtu20cis_rule_2_2_2 - - not ubtu20cis_rsh_required + - not ubtu20cis_avahi_server + - ubtu20cis_disruption_high + - "'avahi' in ansible_facts.packages" tags: - level1-server - level1-workstation - automated - patch - rule_2.2.2 - - rsh + - avahi + - services -- name: "2.2.3 | PATCH | Ensure talk client is not installed" +- name: "2.2.3 | PATCH | Ensure CUPS is not installed" ansible.builtin.package: - name: talk + name: cups state: absent when: - ubtu20cis_rule_2_2_3 - - not ubtu20cis_talk_required + - not ubtu20cis_cups_server + - "'cups' in ansible_facts.packages" tags: - level1-server - - level1-workstation + - level2-workstation - automated - patch - rule_2.2.3 - - talk + - cups + - services -- name: "2.2.4 | PATCH | Ensure telnet client is not installed" +- name: "2.2.4 | PATCH | Ensure DHCP Server is not installed" ansible.builtin.package: - name: telnet + name: isc-dhcp-server state: absent when: - ubtu20cis_rule_2_2_4 - - not ubtu20cis_telnet_required + - not ubtu20cis_dhcp_server + - "'isc-dhcp-server' in ansible_facts.packages" tags: - level1-server - level1-workstation - automated - patch - rule_2.2.4 - - telnet + - dhcp + - services -- name: "2.2.5 | PATCH | Ensure LDAP client is not installed" +- name: "2.2.5 | PATCH | Ensure LDAP server is not installed" ansible.builtin.package: - name: ldap-utils + name: slapd state: absent when: - ubtu20cis_rule_2_2_5 - - not ubtu20cis_ldap_clients_required + - not ubtu20cis_ldap_server + - "'slapd' in ansible_facts.packages" tags: - level1-server - level1-workstation @@ -71,18 +97,248 @@ - patch - rule_2.2.5 - ldap + - services -- name: "2.2.6 | PATCH | Ensure RPC is not installed" +- name: "2.2.6 | PATCH | Ensure NFS is not installed" ansible.builtin.package: - name: rpcbind + name: nfs-kernel-server state: absent when: - ubtu20cis_rule_2_2_6 - - not ubtu20cis_rpc_required + - not ubtu20cis_nfs_server + - "'nfs-kernel-server' in ansible_facts.packages" tags: - level1-server - level1-workstation - automated - patch - rule_2.2.6 - - rpbc + - nfs + - rpc + - services + +- name: "2.2.7 | PATCH | Ensure DNS Server is not installed" + ansible.builtin.package: + name: bind9 + state: absent + when: + - ubtu20cis_rule_2_2_7 + - not ubtu20cis_dns_server + - "'bind9' in ansible_facts.packages" + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_2.2.7 + - dns + - service + +- name: "2.2.8 | PATCH | Ensure FTP Server is not installed" + ansible.builtin.package: + name: vsftpd + state: absent + when: + - ubtu20cis_rule_2_2_8 + - not ubtu20cis_vsftpd_server + - "'vsftpd' in ansible_facts.packages" + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_2.2.8 + - ftp + - service + +- name: "2.2.9 | PATCH | Ensure HTTP server is not installed" + ansible.builtin.package: + name: apache2 + state: absent + when: + - ubtu20cis_rule_2_2_9 + - not ubtu20cis_httpd_server + - "'apache2' in ansible_facts.packages" + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_2.2.9 + - httpd + - service + +- name: "2.2.10 | PATCH | Ensure IMAP and POP3 server are not installed" + ansible.builtin.package: + name: ['dovecot-imapd', 'dovecot-pop3d'] + state: absent + when: + - ubtu20cis_rule_2_2_10 + - not ubtu20cis_dovecot_server + - "'dovecot-imapd' in ansible_facts.packages or + 'dovecot-pop3d' in ansible_facts.packages" + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_2.2.10 + - dovecot + - service + +- name: "2.2.11 | PATCH | Ensure Samba is not installed" + ansible.builtin.package: + name: samba + state: absent + when: + - ubtu20cis_rule_2_2_11 + - not ubtu20cis_smb_server + - "'samba' in ansible_facts.packages" + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_2.2.11 + - samba + - service + +- name: "2.2.12 | PATCH | Ensure HTTP Proxy Server is not installed" + ansible.builtin.package: + name: squid + state: absent + when: + - ubtu20cis_rule_2_2_12 + - not ubtu20cis_squid_server + - "'squid' in ansible_facts.packages" + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_2.2.12 + - http_proxy + - service + +- name: "2.2.13 | PATCH | Ensure SNMP Server is not installed" + ansible.builtin.package: + name: snmpd + state: absent + when: + - ubtu20cis_rule_2_2_13 + - not ubtu20cis_snmp_server + - "'snmpd' in ansible_facts.packages" + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_2.2.13 + - snmp + - service + +- name: "2.2.14 | PATCH | Ensure NIS Server is not installed" + ansible.builtin.package: + name: nis + state: absent + when: + - ubtu20cis_rule_2_2_14 + - not ubtu20cis_nis_server + - "'nis' in ansible_facts.packages" + tags: + - level1-server + - level1-workstation + - automated + - rule_2.2.14 + - nis + - service + +- name: "2.2.15 | PATCH | Ensure dnsmasq service is not installed" + ansible.builtin.package: + name: dnsmasq + state: absent + when: + - ubtu20cis_rule_2_2_15 + - not ubtu20cis_dnsmasq_server + - "'dnsmasq' in ansible_facts.packages" + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_2.2.15 + - rsync + +- name: "2.2.16 | PATCH | Ensure mail transfer agent is configured for local-only mode" + block: + - name: "2.2.16 | PATCH | Ensure mail transfer agent is configured for local-only mode | Make changes if exim4 installed" + ansible.builtin.lineinfile: + path: /etc/exim4/update-exim4.conf.conf + regexp: "{{ item.regexp }}" + line: "{{ item.line }}" + with_items: + - { regexp: '^dc_eximconfig_configtype', line: "dc_eximconfig_configtype='local'" } + - { regexp: '^dc_local_interfaces', line: "dc_local_interfaces='127.0.0.1 ; ::1'" } + - { regexp: '^dc_readhost', line: "dc_readhost=''" } + - { regexp: '^dc_relay_domains', line: "dc_relay_domains=''" } + - { regexp: '^dc_minimaldns', line: "dc_minimaldns='false'" } + - { regexp: '^dc_relay_nets', line: "dc_relay_nets=''" } + - { regexp: '^dc_smarthost', line: "dc_smarthost=''" } + - { regexp: '^dc_use_split_config', line: "dc_use_split_config='false'" } + - { regexp: '^dc_hide_mailname', line: "dc_hide_mailname=''" } + - { regexp: '^dc_mailname_in_oh', line: "dc_mailname_in_oh='true'" } + - { regexp: '^dc_localdelivery', line: "dc_localdelivery='mail_spool'" } + notify: restart exim4 + when: "'exim4' in ansible_facts.packages" + + - name: "2.2.16 | PATCH | Ensure mail transfer agent is configured for local-only mode | Make changes if postfix is installed" + ansible.builtin.lineinfile: + path: /etc/postfix/main.cf + regexp: '^(#)?inet_interfaces' + line: 'inet_interfaces = loopback-only' + notify: restart postfix + when: "'postfix' in ansible_facts.packages" + + - name: "2.2.16 | PATCH | Ensure mail transfer agent is configured for local-only mode | Message out other main agents" + ansible.builtin.debug: + msg: + - "Warning!! You are not using either exim4 or postfix" + - "Please review your vendors documentation to configure local-only mode" + when: + - "'exim4' not in ansible_facts.packages" + - "'postfix' not in ansible_facts.packages" + + - name: "2.2.16 | WARN | Ensure mail transfer agent is configured for local-only mode | warn_count" + ansible.builtin.import_tasks: warning_facts.yml + when: + - "'exim4' not in ansible_facts.packages" + - "'postfix' not in ansible_facts.packages" + vars: + warn_control_id: '2.2.16' + when: + - ubtu20cis_rule_2_2_16 + - not ubtu20cis_is_mail_server + tags: + - level1-server + - level1-workstation + - automated + - scored + - patch + - rule_2.2.16 + - postfix + +- name: "2.2.17 | PATCH | Ensure rsync service is not installed" + ansible.builtin.package: + name: rsync + state: absent + when: + - ubtu20cis_rule_2_2_17 + - not ubtu20cis_rsync_server + - "'rsync' in ansible_facts.packages" + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_2.2.17 + - rsync diff --git a/tasks/section_2/cis_2.3.x.yml b/tasks/section_2/cis_2.3.x.yml new file mode 100644 index 00000000..051cfe02 --- /dev/null +++ b/tasks/section_2/cis_2.3.x.yml @@ -0,0 +1,89 @@ +--- + +- name: "2.3.1 | PATCH | Ensure NIS Client is not installed" + ansible.builtin.package: + name: nis + state: absent + when: + - ubtu20cis_rule_2_3_1 + - not ubtu20cis_nis_required + tags: + - level1-server + - level1-workstation + - rule_2.3.1 + - nis + +- name: "2.3.2 | PATCH | Ensure rsh client is not installed" + ansible.builtin.package: + name: rsh-client + state: absent + when: + - ubtu20cis_rule_2_3_2 + - not ubtu20cis_rsh_required + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_2.3.2 + - rsh + +- name: "2.3.3 | PATCH | Ensure talk client is not installed" + ansible.builtin.package: + name: talk + state: absent + when: + - ubtu20cis_rule_2_3_3 + - not ubtu20cis_talk_required + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_2.3.3 + - talk + +- name: "2.3.4 | PATCH | Ensure telnet client is not installed" + ansible.builtin.package: + name: telnet + state: absent + when: + - ubtu20cis_rule_2_3_4 + - not ubtu20cis_telnet_required + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_2.3.4 + - telnet + +- name: "2.3.5 | PATCH | Ensure LDAP client is not installed" + ansible.builtin.package: + name: ldap-utils + state: absent + when: + - ubtu20cis_rule_2_3_5 + - not ubtu20cis_ldap_clients_required + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_2.3.5 + - ldap + +- name: "2.3.6 | PATCH | Ensure RPC is not installed" + ansible.builtin.package: + name: rpcbind + state: absent + when: + - ubtu20cis_rule_2_3_6 + - not ubtu20cis_rpc_required + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_2.3.6 + - rpbc diff --git a/tasks/section_2/cis_2.3.yml b/tasks/section_2/cis_2.4.yml similarity index 59% rename from tasks/section_2/cis_2.3.yml rename to tasks/section_2/cis_2.4.yml index a9387428..42b1e6b5 100644 --- a/tasks/section_2/cis_2.3.yml +++ b/tasks/section_2/cis_2.4.yml @@ -1,29 +1,32 @@ --- -- name: "2.3 | AUDIT | Ensure nonessential services are removed or masked" + +- name: "2.4 | AUDIT | Ensure nonessential services are removed or masked" block: - - name: "2.3 | AUDIT | Ensure nonessential services are removed or masked | Check for services" + - name: "2.4 | AUDIT | Ensure nonessential services are removed or masked | Check for services" ansible.builtin.shell: lsof -i -P -n | grep -v "(ESTABLISHED)" changed_when: false failed_when: false check_mode: false register: ubtu20cis_2_3_services - - name: "2.3 | AUDIT | Ensure nonessential services are removed or masked | Message out running services" + - name: "2.4 | AUDIT | Ensure nonessential services are removed or masked | Message out running services" ansible.builtin.debug: msg: - "Warning!! Below are the running services. Please review and remove as well as mask un-needed services" - "{{ ubtu20cis_2_3_services.stdout_lines }}" + when: ubtu20cis_2_3_services.stdout | length > 0 - - name: "2.3 | AUDIT | Ensure nonessential services are removed or masked | Warn Count" + - name: "2.4 | AUDIT | Ensure nonessential services are removed or masked | Set warning count" ansible.builtin.import_tasks: warning_facts.yml + when: ubtu20cis_2_3_services.stdout | length > 0 vars: - warn_control_id: '2.3' + warn_control_id: '2.4' when: - - ubtu20cis_rule_2_3 + - ubtu20cis_rule_2_4 tags: - level1-server - level1-workstation - manual - audit - - rule_2.3 + - rule_2.4 - services diff --git a/tasks/section_2/main.yml b/tasks/section_2/main.yml index 1a39e2bb..95021fef 100644 --- a/tasks/section_2/main.yml +++ b/tasks/section_2/main.yml @@ -1,9 +1,24 @@ --- -- name: "SECTION | 2.1 | Special Purpose Services" - ansible.builtin.import_tasks: cis_2.1.x.yml +- name: "SECTION | 2.1.1 | Configure Time Synchronization" + ansible.builtin.import_tasks: cis_2.1.1.x.yml -- name: "SECTION | 2.2 | Service Clients" +- name: "SECTION | 2.1.2 | Configure chrony" + ansible.builtin.import_tasks: cis_2.1.2.x.yml + when: ubtu20cis_time_sync_tool == "chrony" + +- name: "SECTION | 2.1.3 | Configure systemd-timesyncd" + ansible.builtin.import_tasks: cis_2.1.3.x.yml + when: ubtu20cis_time_sync_tool == "systemd-timesyncd" + +- name: "SECTION | 2.1.4 | Configure NTP" + ansible.builtin.import_tasks: cis_2.1.4.x.yml + when: ubtu20cis_time_sync_tool == "ntp" + +- name: "SECTION | 2.2 | Special Purpose Services" ansible.builtin.import_tasks: cis_2.2.x.yml -- name: "SECTION | 2.3 | Ensure nonessential services are removed or masked" - ansible.builtin.import_tasks: cis_2.3.yml +- name: "SECTION | 2.3 | Service Clients" + ansible.builtin.import_tasks: cis_2.3.x.yml + +- name: "SECTION | 2.4 | Ensure nonessential services are removed or masked" + ansible.builtin.import_tasks: cis_2.4.yml diff --git a/tasks/section_3/cis_3.1.x.yml b/tasks/section_3/cis_3.1.x.yml index 396222c8..68fa65d8 100644 --- a/tasks/section_3/cis_3.1.x.yml +++ b/tasks/section_3/cis_3.1.x.yml @@ -1,4 +1,5 @@ --- + - name: "3.1.1 | PATCH | Disable IPv6" block: - name: "3.1.1 | AUDIT | Disable IPv6 | Get current GRUB_CMDLINE_LINUX settings" @@ -14,7 +15,7 @@ regexp: '^GRUB_CMDLINE_LINUX' line: 'GRUB_CMDLINE_LINUX="{{ ubtu20cis_3_1_1_grub_cmdline_linux_settings.stdout }} ipv6.disable=1"' when: "'ipv6.disable' not in ubtu20cis_3_1_1_grub_cmdline_linux_settings.stdout" - notify: grub update + notify: Grub update - name: "3.1.1 | PATCH | Disable IPv6 | Set ipv6.disable to 1 if exists" ansible.builtin.replace: @@ -22,7 +23,7 @@ regexp: 'ipv6\.disable=.' replace: 'ipv6.disable=1' when: "'ipv6.disable' in ubtu20cis_3_1_1_grub_cmdline_linux_settings.stdout" - notify: grub update + notify: Grub update - name: "3.1.1 | PATCH | Disable IPv6 | Remove net.ipv6.conf.all.disable_ipv6" ansible.builtin.lineinfile: @@ -68,6 +69,15 @@ loop: - "{{ wireless_interfaces.stdout_lines }}" + - name: "3.1.2 | PATCH | Ensure wireless interfaces are not loadable | blacklist" + ansible.builtin.lineinfile: + path: /etc/modprobe.d/blacklist.conf + regexp: 'blacklist\s*{{ item }}\s*' + line: "blacklist {{ item.line }}" + create: true + loop: + - "{{ wireless_interfaces.stdout_lines }}" + when: - ubtu20cis_rule_3_1_2 - wireless_interfaces.stdout | length > 0 @@ -78,3 +88,120 @@ - patch - rule_3.1.2 - wireless + +- name: "3.1.3 | PATCH | Ensure bluetooth is disabled" + ansible.builtin.systemd: + name: bluetooth.service + state: stopped + enabled: false + masked: true + when: + - ubtu20cis_rule_3_1_3 + - "'bluetooth' in ansible_facts.packages" + tags: + - level1-server + - automated + - patch + - rule_3.1.3 + - bluetooth + +- name: "3.1.4 | PATCH | Ensure DCCP is disabled" + block: + - name: "3.1.4 | PATCH | Ensure DCCP is disabled | modprobe" + ansible.builtin.lineinfile: + path: /etc/modprobe.d/dccp.conf + regexp: '^(#)?install dccp(\\s|$)' + line: 'install dccp /bin/true' + create: true + + - name: "3.1.4 | PATCH | Ensure DCCP is disabled | blacklist" + ansible.builtin.lineinfile: + path: /etc/modprobe.d/blacklist.conf + regexp: '^(#)?blacklist dccp(\\s|$)' + line: 'blacklist dccp' + create: true + when: + - ubtu20cis_rule_3_1_4 + tags: + - level2-server + - level2-workstation + - automated + - patch + - rule_3.1.4 + - dccp + +- name: "3.1.5 | PATCH | Ensure SCTP is disabled" + block: + - name: "3.1.5 | PATCH | Ensure SCTP is disabled | modprobe" + ansible.builtin.lineinfile: + path: /etc/modprobe.d/sctp.conf + regexp: "^(#)?install sctp(\\s|$)" + line: 'install sctp /bin/true' + create: true + + - name: "3.1.5 | PATCH | Ensure SCTP is disabled | blacklist" + ansible.builtin.lineinfile: + path: /etc/modprobe.d/blacklist.conf + regexp: "^(#)?blacklist sctp(\\s|$)" + line: 'blacklist sctp' + create: true + + when: + - ubtu20cis_rule_3_1_5 + tags: + - level2-server + - level2-workstation + - automated + - patch + - rule_3.1.5 + - sctp + +- name: "3.1.6 | PATCH | Ensure RDS is disabled" + block: + - name: "3.1.6 | PATCH | Ensure RDS is disabled | modprobe" + ansible.builtin.lineinfile: + path: /etc/modprobe.d/rds.conf + regexp: '^(#)?install rds(\\s|$)' + line: 'install rds /bin/true' + create: true + + - name: "3.1.6 | PATCH | Ensure RDS is disabled | blacklist" + ansible.builtin.lineinfile: + path: /etc/modprobe.d/blacklist.conf + regexp: "^(#)?blacklist rds(\\s|$)" + line: 'blacklist rds' + create: true + when: + - ubtu20cis_rule_3_1_6 + tags: + - level2-server + - level2-workstation + - automated + - patch + - rule_3.1.6 + - rds + +- name: "3.1.7 | PATCH | Ensure TIPC is disabled" + block: + - name: "3.1.7 | PATCH | Ensure TIPC is disabled | modprobe" + ansible.builtin.lineinfile: + path: /etc/modprobe.d/tipc.conf + regexp: '^(#)?install tipc(\\s|$)' + line: install tipc /bin/true + create: true + + - name: "3.1.7 | PATCH | Ensure TIPC is disabled | blacklist" + ansible.builtin.lineinfile: + path: /etc/modprobe.d/blacklist.conf + regexp: "^(#)?blacklist tipc(\\s|$)" + line: 'blacklist tipc' + create: true + when: + - ubtu20cis_rule_3_1_7 + tags: + - level2-server + - level2-workstation + - automated + - patch + - rule_3.1.7 + - tipc diff --git a/tasks/section_3/cis_3.5.1.x.yml b/tasks/section_3/cis_3.4.1.x.yml similarity index 70% rename from tasks/section_3/cis_3.5.1.x.yml rename to tasks/section_3/cis_3.4.1.x.yml index 028941fd..76f598f5 100644 --- a/tasks/section_3/cis_3.5.1.x.yml +++ b/tasks/section_3/cis_3.4.1.x.yml @@ -1,76 +1,76 @@ --- -- name: "3.5.1.1 | PATCH | Ensure ufw is installed" +- name: "3.4.1.1 | PATCH | Ensure ufw is installed" ansible.builtin.package: name: ufw state: present when: - - ubtu20cis_rule_3_5_1_1 + - ubtu20cis_rule_3_4_1_1 - ubtu20cis_firewall_package == "ufw" tags: - level1-server - level1-workstation - automated - patch - - rule_3.5.1.1 + - rule_3.4.1.1 - apt - ufw -- name: "3.5.1.2 | PATCH | Ensure iptables-persistent is not installed with ufw" +- name: "3.4.1.2 | PATCH | Ensure iptables-persistent is not installed with ufw" ansible.builtin.package: name: iptables-persistent state: absent when: - - ubtu20cis_rule_3_5_1_2 + - ubtu20cis_rule_3_4_1_2 - ubtu20cis_firewall_package == "ufw" tags: - level1-server - level1-workstation - automated - patch - - rule_3.5.1.2 + - rule_3.4.1.2 - ufw # Adding the allow OpenSSH rule while enabling ufw to allow ansible to run after enabling -- name: "3.5.1.3 | PATCH | Ensure ufw service is enabled" +- name: "3.4.1.3 | PATCH | Ensure ufw service is enabled" community.general.ufw: rule: allow name: OpenSSH state: enabled when: - - ubtu20cis_rule_3_5_2_1 + - ubtu20cis_rule_3_4_1_3 - ubtu20cis_firewall_package == "ufw" tags: - level1-server - level1-workstation - automated - patch - - rule_3.5.1.3 + - rule_3.4.1.3 - ufw -- name: "3.5.1.4 | PATCH | Ensure loopback traffic is configured" +- name: "3.4.1.4 | PATCH | Ensure loopback traffic is configured" block: - - name: "3.5.1.4 | PATCH | Ensure loopback traffic is configured | Set allow in ufw rules" + - name: "3.4.1.4 | PATCH | Ensure loopback traffic is configured | Set allow in ufw rules" community.general.ufw: rule: allow direction: in interface: lo notify: reload ufw - - name: "3.5.1.4 | PATCH | Ensure loopback traffic is configured | Set allow out ufw rules" + - name: "3.4.1.4 | PATCH | Ensure loopback traffic is configured | Set allow out ufw rules" community.general.ufw: rule: allow direction: out interface: lo notify: reload ufw - - name: "3.5.1.4 | PATCH | Ensure loopback traffic is configured | Set deny ufw rules IPv4" + - name: "3.4.1.4 | PATCH | Ensure loopback traffic is configured | Set deny ufw rules IPv4" community.general.ufw: rule: deny direction: in from_ip: 127.0.0.0/8 notify: reload ufw - - name: "3.5.1.4 | PATCH | Ensure loopback traffic is configured | Set deny ufw rules IPv6" + - name: "3.4.1.4 | PATCH | Ensure loopback traffic is configured | Set deny ufw rules IPv6" community.general.ufw: rule: deny direction: in @@ -78,19 +78,19 @@ notify: reload ufw when: ubtu20cis_ipv6_required when: - - ubtu20cis_rule_3_5_1_4 + - ubtu20cis_rule_3_4_1_4 - ubtu20cis_firewall_package == "ufw" tags: - level1-server - level1-workstation - automated - patch - - rule_3.5.1.4 + - rule_3.4.1.4 - ufw -- name: "3.5.1.5 | PATCH | Ensure ufw outbound connections are configured" +- name: "3.4.1.5 | PATCH | Ensure ufw outbound connections are configured" block: - - name: "3.5.1.5 | PATCH | Ensure ufw outbound connections are configured | Custom ports" + - name: "3.4.1.5 | PATCH | Ensure ufw outbound connections are configured | Custom ports" community.general.ufw: rule: allow direction: out @@ -100,63 +100,63 @@ notify: reload ufw when: ubtu20cis_ufw_allow_out_ports != "all" - - name: "3.5.1.5 | PATCH | Ensure ufw outbound connections are configured | Allow all" + - name: "3.4.1.5 | PATCH | Ensure ufw outbound connections are configured | Allow all" community.general.ufw: rule: allow direction: out notify: reload ufw when: "'all' in ubtu20cis_ufw_allow_out_ports" when: - - ubtu20cis_rule_3_5_1_5 + - ubtu20cis_rule_3_4_1_5 - ubtu20cis_firewall_package == "ufw" tags: - level1-server - level1-workstation - manual - patch - - rule_3.5.1.5 + - rule_3.4.1.5 - ufw -- name: "3.5.1.6 | AUDIT | Ensure ufw firewall rules exist for all open ports" +- name: "3.4.1.6 | AUDIT | Ensure ufw firewall rules exist for all open ports" block: - - name: "3.5.1.6 | AUDIT | Ensure ufw firewall rules exist for all open ports | Get list of open ports" + - name: "3.4.1.6 | AUDIT | Ensure ufw firewall rules exist for all open ports | Get list of open ports" ansible.builtin.shell: ss -4tuln changed_when: false failed_when: false check_mode: false - register: ubtu20cis_3_5_1_6_open_listen_ports + register: ubtu20cis_3_4_1_6_open_listen_ports - - name: "3.5.1.6 | AUDIT | Ensure ufw firewall rules exist for all open ports | Get list of firewall rules" + - name: "3.4.1.6 | AUDIT | Ensure ufw firewall rules exist for all open ports | Get list of firewall rules" ansible.builtin.shell: ufw status changed_when: false failed_when: false check_mode: false - register: ubtu20cis_3_5_1_6_firewall_rules + register: ubtu20cis_3_4_1_6_firewall_rules - - name: "3.5.1.6 | AUDIT | Ensure ufw firewall rules exist for all open ports | Message out settings" + - name: "3.4.1.6 | AUDIT | Ensure ufw firewall rules exist for all open ports | Message out settings" ansible.builtin.debug: msg: - "Warning!! Below are the listening ports and firewall rules" - "Please create firewall rule for any open ports if not already done" - "*****---Open Listen Ports---*****" - - "{{ ubtu20cis_3_5_1_6_open_listen_ports.stdout_lines }}" + - "{{ ubtu20cis_3_4_1_6_open_listen_ports.stdout_lines }}" - "*****---Firewall Rules---*****" - - "{{ ubtu20cis_3_5_1_6_firewall_rules.stdout_lines }}" + - "{{ ubtu20cis_3_4_1_6_firewall_rules.stdout_lines }}" - - name: "3.5.1.6 | AUDIT | Ensure ufw firewall rules exist for all open ports | Warn Count" + - name: "3.4.1.6 | AUDIT | Ensure ufw firewall rules exist for all open ports | Warn Count" ansible.builtin.import_tasks: warning_facts.yml vars: - warn_control_id: '3.5.1.6' + warn_control_id: '3.4.1.6' when: - - ubtu20cis_rule_3_5_1_6 + - ubtu20cis_rule_3_4_1_6 - ubtu20cis_firewall_package == "ufw" tags: - level1-server - level1-workstation - manual - audit - - rule_3.5.1.6 + - rule_3.4.1.6 - ufw - name: "OPTIONAL | UFW incoming ports" @@ -175,7 +175,7 @@ - ufw - ufw_in_ports -- name: "3.5.1.7 | PATCH | Ensure ufw default deny firewall policy" +- name: "3.4.1.7 | PATCH | Ensure ufw default deny firewall policy" community.general.ufw: default: deny direction: "{{ item }}" @@ -185,12 +185,12 @@ - outgoing - routed when: - - ubtu20cis_rule_3_5_1_7 + - ubtu20cis_rule_3_4_1_7 - ubtu20cis_firewall_package == "ufw" tags: - level1-server - level1-workstation - automated - patch - - rule_3.5.1.7 + - rule_3.4.1.7 - ufw diff --git a/tasks/section_3/cis_3.5.2.x.yml b/tasks/section_3/cis_3.4.2.x.yml similarity index 59% rename from tasks/section_3/cis_3.5.2.x.yml rename to tasks/section_3/cis_3.4.2.x.yml index b47dc8a2..f03538f0 100644 --- a/tasks/section_3/cis_3.5.2.x.yml +++ b/tasks/section_3/cis_3.4.2.x.yml @@ -2,204 +2,204 @@ # --------------- # --------------- -# NFTables is unsupported with this role. However I have the actions commented out as a guide +# NFTables is unsupported with this role. However we have the actions commented out as a guide # --------------- # --------------- -- name: "3.5.2.1 | AUDIT | Ensure nftables is installed" +- name: "3.4.2.1 | AUDIT | Ensure nftables is installed" ansible.builtin.package: name: nftables state: present when: - - ubtu20cis_rule_3_5_2_1 + - ubtu20cis_rule_3_4_2_1 - ubtu20cis_firewall_package == "nftables" tags: - level1-server - level1-workstation - automated - audit - - rule_3.5.2.1 + - rule_3.4.2.1 - nftables -- name: "3.5.2.2 | AUDIT | Ensure ufw is uninstalled or disabled with nftables" +- name: "3.4.2.2 | AUDIT | Ensure ufw is uninstalled or disabled with nftables" ansible.builtin.package: name: ufw state: absent when: - - ubtu20cis_rule_3_5_2_2 + - ubtu20cis_rule_3_4_2_2 - ubtu20cis_firewall_package == "nftables" tags: - level1-server - level1-workstation - automated - audit - - rule_3.5.2.2 + - rule_3.4.2.2 - nftables -- name: "3.5.2.3 | AUDIT | Ensure iptables are flushed with nftables" +- name: "3.4.2.3 | AUDIT | Ensure iptables are flushed with nftables" block: - - name: "3.5.2.3 | AUDIT | Ensure iptables are flushed with nftables" + - name: "3.4.2.3 | AUDIT | Ensure iptables are flushed with nftables" ansible.builtin.debug: msg: "Warning!! NFTables is not supported in this role. Please use UFW, iptables, or manually manage nftables" - - name: "3.5.2.3 | AUDIT | Ensure iptables are flushed with nftables | Warn Count" + - name: "3.4.2.3 | AUDIT | Ensure iptables are flushed with nftables | Warn Count" ansible.builtin.import_tasks: warning_facts.yml vars: - warn_control_id: '3.5.2.3 NFTables changes not supported' + warn_control_id: '3.4.2.3 NFTables changes not supported' when: - - ubtu20cis_rule_3_5_2_3 + - ubtu20cis_rule_3_4_2_3 - ubtu20cis_firewall_package == "nftables" tags: - level1-server - level1-workstation - manual - audit - - rule_3.5.2.3 + - rule_3.4.2.3 - nftables -- name: "3.5.2.4 | AUDIT | Ensure a nftables table exists" +- name: "3.4.2.4 | AUDIT | Ensure a nftables table exists" block: - - name: "3.5.2.4 | AUDIT | Ensure a nftables table exists" + - name: "3.4.2.4 | AUDIT | Ensure a nftables table exists" ansible.builtin.debug: msg: "Warning!! NFTables is not supported in this role. Please use UFW, iptables, or manually manage nftables" - - name: "3.5.2.4 | AUDIT | Ensure a nftables table exists | Warn Count" + - name: "3.4.2.4 | AUDIT | Ensure a nftables table exists | Warn Count" ansible.builtin.import_tasks: warning_facts.yml vars: - warn_control_id: '3.5.2.4 NFTables changes not supported' + warn_control_id: '3.4.2.4 NFTables changes not supported' when: - - ubtu20cis_rule_3_5_2_4 + - ubtu20cis_rule_3_4_2_4 - ubtu20cis_firewall_package == "nftables" tags: - level1-server - level1-workstation - automated - patch - - rule_3.5.2.4 + - rule_3.4.2.4 - nftables -- name: "3.5.2.5 | AUDIT | Ensure nftables base chains exist" +- name: "3.4.2.5 | AUDIT | Ensure nftables base chains exist" block: - - name: "3.5.2.5 | AUDIT | Ensure nftables base chains exist" + - name: "3.4.2.5 | AUDIT | Ensure nftables base chains exist" ansible.builtin.debug: msg: "Warning!! NFTables is not supported in this role. Please use UFW, iptables, or manually manage nftables" - - name: "3.5.2.5 | AUDIT | Ensure nftables base chains exist | Warn Count" + - name: "3.4.2.5 | AUDIT | Ensure nftables base chains exist | Warn Count" ansible.builtin.import_tasks: warning_facts.yml vars: - warn_control_id: '3.5.2.5 NFTables changes not supported' + warn_control_id: '3.4.2.5 NFTables changes not supported' when: - - ubtu20cis_rule_3_5_2_5 + - ubtu20cis_rule_3_4_2_5 - ubtu20cis_firewall_package == "nftables" tags: - level1-server - level1-workstation - automated - audit - - rule_3.5.2.5 + - rule_3.4.2.5 - nftables -- name: "3.5.2.6 | AUDIT | Ensure nftables loopback traffic is configured" +- name: "3.4.2.6 | AUDIT | Ensure nftables loopback traffic is configured" block: - - name: "3.5.2.6 | AUDIT | Ensure nftables loopback traffic is configured" + - name: "3.4.2.6 | AUDIT | Ensure nftables loopback traffic is configured" ansible.builtin.debug: msg: "Warning!! NFTables is not supported in this role. Please use UFW, iptables, or manually manage nftables" - - name: "3.5.2.6 | AUDIT | Ensure nftables loopback traffic is configured | Warn Count" + - name: "3.4.2.6 | AUDIT | Ensure nftables loopback traffic is configured | Warn Count" ansible.builtin.import_tasks: warning_facts.yml vars: - warn_control_id: '3.5.2.6 NFTables changes not supported' + warn_control_id: '3.4.2.6 NFTables changes not supported' when: - - ubtu20cis_rule_3_5_2_6 + - ubtu20cis_rule_3_4_2_6 - ubtu20cis_firewall_package == "nftables" tags: - level1-server - level1-workstation - automated - audit - - rule_3.5.2.6 + - rule_3.4.2.6 - nftables -- name: "3.5.2.7 | AUDIT | Ensure nftables outbound and established connections are configured" +- name: "3.4.2.7 | AUDIT | Ensure nftables outbound and established connections are configured" block: - - name: "3.5.2.7 | AUDIT | Ensure nftables outbound and established connections are configured" + - name: "3.4.2.7 | AUDIT | Ensure nftables outbound and established connections are configured" ansible.builtin.debug: msg: "Warning!! NFTables is not supported in this role. Please use UFW, iptables, or manually manage nftables" - - name: "3.5.2.7 | AUDIT | Ensure nftables outbound and established connections are configured | Warn Count" + - name: "3.4.2.7 | AUDIT | Ensure nftables outbound and established connections are configured | Warn Count" ansible.builtin.import_tasks: warning_facts.yml vars: - warn_control_id: '3.5.2.7 NFTables changes not supported' + warn_control_id: '3.4.2.7 NFTables changes not supported' when: - - ubtu20cis_rule_3_5_2_7 + - ubtu20cis_rule_3_4_2_7 - ubtu20cis_firewall_package == "nftables" tags: - level1-server - level1-workstation - manual - audit - - rule_3.5.2.7 + - rule_3.4.2.7 - nftables -- name: "3.5.2.8 | AUDIT | Ensure nftables default deny firewall policy" +- name: "3.4.2.8 | AUDIT | Ensure nftables default deny firewall policy" block: - - name: "3.5.2.8 | AUDIT | Ensure nftables default deny firewall policy" + - name: "3.4.2.8 | AUDIT | Ensure nftables default deny firewall policy" ansible.builtin.debug: msg: "Warning!! NFTables is not supported in this role. Please use UFW, iptables, or manually manage nftables" - - name: "3.5.2.8 | AUDIT | Ensure nftables default deny firewall policy | Warn Count" + - name: "3.4.2.8 | AUDIT | Ensure nftables default deny firewall policy | Warn Count" ansible.builtin.import_tasks: warning_facts.yml vars: - warn_control_id: '3.5.2.8 NFTables changes not supported' + warn_control_id: '3.4.2.8 NFTables changes not supported' when: - - ubtu20cis_rule_3_5_2_8 + - ubtu20cis_rule_3_4_2_8 - ubtu20cis_firewall_package == "nftables" tags: - level1-server - level1-workstation - automated - audit - - rule_3.5.2.8 + - rule_3.4.2.8 - nftables -- name: "3.5.2.9 | AUDIT | Ensure nftables service is enabled" +- name: "3.4.2.9 | AUDIT | Ensure nftables service is enabled" block: - - name: "3.5.2.9 | AUDIT | Ensure nftables service is enabled" + - name: "3.4.2.9 | AUDIT | Ensure nftables service is enabled" ansible.builtin.debug: msg: "Warning!! NFTables is not supported in this role. Please use UFW, iptables, or manually manage nftables" - - name: "3.5.2.9 | AUDIT | Ensure nftables service is enabled | Warn Count" + - name: "3.4.2.9 | AUDIT | Ensure nftables service is enabled | Warn Count" ansible.builtin.import_tasks: warning_facts.yml vars: - warn_control_id: '3.5.2.9 NFTables changes not supported' + warn_control_id: '3.4.2.9 NFTables changes not supported' when: - - ubtu20cis_rule_3_5_2_9 + - ubtu20cis_rule_3_4_2_9 - ubtu20cis_firewall_package == "nftables" tags: - level1-server - level1-workstation - automated - audit - - rule_3.5.2.9 + - rule_3.4.2.9 - nftables -- name: "3.5.2.10 | AUDIT | Ensure nftables rules are permanent" +- name: "3.4.2.10 | AUDIT | Ensure nftables rules are permanent" block: - - name: "3.5.2.10 | AUDIT | Ensure nftables rules are permanent" + - name: "3.4.2.10 | AUDIT | Ensure nftables rules are permanent" ansible.builtin.debug: msg: "Warning!! NFTables is not supported in this role. Please use UFW, iptables, or manually manage nftables" - - name: "3.5.2.10 | AUDIT | Ensure nftables rules are permanent | Warn Count" + - name: "3.4.2.10 | AUDIT | Ensure nftables rules are permanent | Warn Count" ansible.builtin.import_tasks: warning_facts.yml vars: - warn_control_id: '3.5.2.10 NFTables changes not supported' + warn_control_id: '3.4.2.10 NFTables changes not supported' when: - - ubtu20cis_rule_3_5_2_10 + - ubtu20cis_rule_3_4_2_10 - ubtu20cis_firewall_package == "nftables" tags: - level1-server - level1-workstation - automated - audit - - rule_3.5.2.10 + - rule_3.4.2.10 - nftables diff --git a/tasks/section_3/cis_3.5.3.x.yml b/tasks/section_3/cis_3.4.3.x.yml similarity index 73% rename from tasks/section_3/cis_3.5.3.x.yml rename to tasks/section_3/cis_3.4.3.x.yml index 162aa60a..077346fa 100644 --- a/tasks/section_3/cis_3.5.3.x.yml +++ b/tasks/section_3/cis_3.4.3.x.yml @@ -1,53 +1,104 @@ --- -- name: "3.5.3.1.1 | PATCH | Ensure iptables packages are installed" +- name: "3.4.3.1.1 | PATCH | Ensure iptables packages are installed" ansible.builtin.package: name: ['iptables', 'iptables-persistent'] state: present when: - - ubtu20cis_rule_3_5_3_1_1 + - ubtu20cis_rule_3_4_3_1_1 - ubtu20cis_firewall_package == "iptables" tags: - level1-server - level1-workstation - automated - patch - - rule_3.5.3.1.1 + - rule_3.4.3.1.1 - iptables -- name: "3.5.3.1.2 | PATCH | Ensure nftables is not installed with iptables" +- name: "3.4.3.1.2 | PATCH | Ensure nftables is not installed with iptables" ansible.builtin.package: name: nftables state: absent when: - - ubtu20cis_rule_3_5_3_1_2 + - ubtu20cis_rule_3_4_3_1_2 - ubtu20cis_firewall_package == "iptables" tags: - level1-server - level1-workstation - automated - patch - - rule_3.5.3.1.2 + - rule_3.4.3.1.2 - iptables -- name: "3.5.3.1.3 | PATCH | Ensure ufw is uninstalled or disabled with iptables" +- name: "3.4.3.1.3 | PATCH | Ensure ufw is uninstalled or disabled with iptables" ansible.builtin.package: name: ufw state: absent when: - - ubtu20cis_rule_3_5_3_1_3 + - ubtu20cis_rule_3_4_3_1_3 - ubtu20cis_firewall_package == "iptables" tags: - level1-server - level1-workstation - automated - patch - - rule_3.5.3.1.3 + - rule_3.4.3.1.3 - iptables -- name: "3.5.3.2.1 | PATCH | Ensure iptables loopback traffic is configured" +# iptables ipv4 + +- name: "3.4.3.2.1 | PATCH | Ensure iptables default deny firewall policy" + block: + - name: "3.4.3.2.1 | PATCH | Ensure iptables default deny firewall policy | Configure SSH to be allowed in" + ansible.builtin.iptables: + chain: INPUT + protocol: tcp + destination_port: 22 + jump: ACCEPT + ctstate: 'NEW,ESTABLISHED' + notify: persistent ip4tables + + - name: "3.4.3.2.1 | PATCH | Ensure iptables default deny firewall policy | Configure SSH to be allowed out" + ansible.builtin.iptables: + chain: OUTPUT + protocol: tcp + source_port: 22 + jump: ACCEPT + ctstate: 'NEW,ESTABLISHED' + notify: persistent ip4tables + + - name: "3.4.3.2.1 | PATCH | Ensure iptables default deny firewall policy | Enable apt traffic" + ansible.builtin.iptables: + chain: INPUT + ctstate: 'ESTABLISHED' + jump: ACCEPT + notify: persistent ip4tables + + - name: "3.4.3.2.1 | PATCH | Ensure iptables default deny firewall policy | Set drop items" + ansible.builtin.iptables: + policy: DROP + chain: "{{ item }}" + with_items: + - INPUT + - FORWARD + - OUTPUT + notify: persistent ip4tables + when: + - ubtu20cis_rule_3_4_3_2_1 + - ubtu20cis_firewall_package == "iptables" + - ubtu20cis_ipv4_required + - not system_is_ec2 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_3.4.3.2.1 + - iptables + +- name: "3.4.3.2.2 | PATCH | Ensure iptables loopback traffic is configured" block: - - name: "3.5.3.2.1 | PATCH | Ensure iptables loopback traffic is configured | INPUT loopback ACCEPT" + - name: "3.4.3.2.2 | PATCH | Ensure iptables loopback traffic is configured | INPUT loopback ACCEPT" ansible.builtin.iptables: action: append chain: INPUT @@ -55,7 +106,7 @@ jump: ACCEPT notify: persistent ip4tables - - name: "3.5.3.2.1 | PATCH | Ensure iptables loopback traffic is configured | OUTPUT loopback ACCEPT" + - name: "3.4.3.2.2 | PATCH | Ensure iptables loopback traffic is configured | OUTPUT loopback ACCEPT" ansible.builtin.iptables: action: append chain: OUTPUT @@ -63,7 +114,7 @@ jump: ACCEPT notify: persistent ip4tables - - name: "3.5.3.2.1 | PATCH | Ensure iptables loopback traffic is configured | OUTPUT loopback ACCEPT" + - name: "3.4.3.2.2 | PATCH | Ensure iptables loopback traffic is configured | OUTPUT loopback ACCEPT" ansible.builtin.iptables: action: append chain: INPUT @@ -71,7 +122,7 @@ jump: DROP notify: persistent ip4tables when: - - ubtu20cis_rule_3_5_3_2_1 + - ubtu20cis_rule_3_4_3_2_2 - ubtu20cis_firewall_package == "iptables" - ubtu20cis_ipv4_required tags: @@ -79,10 +130,10 @@ - level1-workstation - automated - patch - - rule_3.5.3.2.1 + - rule_3.4.3.2.2 - iptables -- name: "3.5.3.2.2 | PATCH | Ensure iptables outbound and established connections are configured" +- name: "3.4.3.2.3 | PATCH | Ensure iptables outbound and established connections are configured" ansible.builtin.iptables: action: append chain: '{{ item.chain }}' @@ -99,7 +150,7 @@ - { chain: INPUT, protocol: icmp, ctstate: 'ESTABLISHED' } notify: persistent ip4tables when: - - ubtu20cis_rule_3_5_3_2_2 + - ubtu20cis_rule_3_4_3_2_3 - ubtu20cis_firewall_package == "iptables" - ubtu20cis_ipv4_required tags: @@ -107,89 +158,39 @@ - level1-workstation - manual - patch - - rule_3.5.3.2.2 + - rule_3.4.3.2.3 - iptables -- name: "3.5.3.2.3 | PATCH | Ensure iptables default deny firewall policy" +- name: "3.4.3.2.4 | AUDIT | Ensure iptables firewall rules exist for all open ports" block: - - name: "3.5.3.2.3 | PATCH | Ensure iptables default deny firewall policy | Configure SSH to be allowed in" - ansible.builtin.iptables: - chain: INPUT - protocol: tcp - destination_port: 22 - jump: ACCEPT - ctstate: 'NEW,ESTABLISHED' - notify: persistent ip4tables - - - name: "3.5.3.2.3 | PATCH | Ensure iptables default deny firewall policy | Configure SSH to be allowed out" - ansible.builtin.iptables: - chain: OUTPUT - protocol: tcp - source_port: 22 - jump: ACCEPT - ctstate: 'NEW,ESTABLISHED' - notify: persistent ip4tables - - - name: "3.5.3.2.3 | PATCH | Ensure iptables default deny firewall policy | Enable apt traffic" - ansible.builtin.iptables: - chain: INPUT - ctstate: 'ESTABLISHED' - jump: ACCEPT - notify: persistent ip4tables - - - name: "3.5.3.2.3 | PATCH | Ensure iptables default deny firewall policy | Set drop items" - ansible.builtin.iptables: - policy: DROP - chain: "{{ item }}" - with_items: - - INPUT - - FORWARD - - OUTPUT - notify: persistent ip4tables - when: - - ubtu20cis_rule_3_5_3_2_3 - - ubtu20cis_firewall_package == "iptables" - - ubtu20cis_ipv4_required - - not system_is_ec2 - tags: - - level1-server - - level1-workstation - - automated - - patch - - rule_3.5.3.2.3 - - iptables - - -- name: "3.5.3.2.4 | AUDIT | Ensure iptables firewall rules exist for all open ports" - block: - - name: "3.5.3.2.4 | AUDIT | Ensure iptables firewall rules exist for all open ports | Get list of open ports" + - name: "3.4.3.2.4 | AUDIT | Ensure iptables firewall rules exist for all open ports | Get list of open ports" ansible.builtin.shell: ss -4tuln changed_when: false failed_when: false check_mode: false - register: ubtu20cis_3_5_3_2_4_open_ports + register: ubtu20cis_3_4_3_2_4_open_ports notify: persistent ip4tables - - name: "3.5.3.2.4 | AUDIT | Ensure iptables firewall rules exist for all open ports | Get list of rules" + - name: "3.4.3.2.4 | AUDIT | Ensure iptables firewall rules exist for all open ports | Get list of rules" ansible.builtin.shell: iptables -L INPUT -v -n changed_when: false failed_when: false check_mode: false - register: ubtu20cis_3_5_3_2_4_current_rules + register: ubtu20cis_3_4_3_2_4_current_rules notify: persistent ip4tables - - name: "3.5.3.2.4 | AUDIT | Ensure iptables firewall rules exist for all open ports | Alert about settings" + - name: "3.4.3.2.4 | AUDIT | Ensure iptables firewall rules exist for all open ports | Alert about settings" ansible.builtin.debug: msg: - "Warning!! Below is the list the open ports and current rules" - "Please create a rule for any open port that does not have a current rule" - "Open Ports:" - - "{{ ubtu20cis_3_5_3_2_4_open_ports.stdout_lines }}" + - "{{ ubtu20cis_3_4_3_2_4_open_ports.stdout_lines }}" - "Current Rules:" - - "{{ ubtu20cis_3_5_3_2_4_current_rules.stdout_lines }}" + - "{{ ubtu20cis_3_4_3_2_4_current_rules.stdout_lines }}" notify: persistent ip4tables when: - - ubtu20cis_rule_3_5_3_2_4 + - ubtu20cis_rule_3_4_3_2_4 - ubtu20cis_firewall_package == "iptables" - ubtu20cis_ipv4_required tags: @@ -197,12 +198,54 @@ - level1-workstation - automated - audit - - rule_3.5.3.2.4 + - rule_3.4.3.2.4 - iptables -- name: "3.5.3.3.1 | PATCH | Ensure ip6tables loopback traffic is configured" +# iptable ipv6 + +- name: "3.4.3.3.1 | PATCH | Ensure ip6tables default deny firewall policy" block: - - name: "3.5.3.3.1 | PATCH | Ensure ip6tables loopback traffic is configured | INPUT loopback ACCEPT" + - name: "3.4.3.3.1 | PATCH | Ensure ip6tables default deny firewall policy | Configure SSH to be allowed out" + ansible.builtin.iptables: + chain: OUTPUT + protocol: tcp + source_port: 22 + jump: ACCEPT + ctstate: 'NEW,ESTABLISHED' + ip_version: ipv6 + + - name: "3.4.3.3.1 | PATCH | Ensure ip6tables default deny firewall policy | Enable apt traffic" + ansible.builtin.iptables: + chain: INPUT + ctstate: 'ESTABLISHED' + jump: ACCEPT + ip_version: ipv6 + + - name: "3.4.3.3.1 | PATCH | Ensure ip6tables default deny firewall policy | Set drop items" + ansible.builtin.iptables: + policy: DROP + chain: "{{ item }}" + ip_version: ipv6 + with_items: + - INPUT + - FORWARD + - OUTPUT + when: + - ubtu20cis_rule_3_4_3_3_1 + - ubtu20cis_firewall_package == "iptables" + - ubtu20cis_ipv6_required + - not ubtu20cis_ipv4_required + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_3.4.3.3.1 + - ip6tables + +- name: "3.4.3.3.2 | PATCH | Ensure ip6tables loopback traffic is configured" + block: + - name: "3.4.3.3.2 | PATCH | Ensure ip6tables loopback traffic is configured | INPUT loopback ACCEPT" ansible.builtin.iptables: action: append chain: INPUT @@ -210,7 +253,7 @@ jump: ACCEPT ip_version: ipv6 - - name: "3.5.3.3.1 | PATCH | Ensure ip6tables loopback traffic is configured | OUTPUT loopback ACCEPT" + - name: "3.4.3.3.2 | PATCH | Ensure ip6tables loopback traffic is configured | OUTPUT loopback ACCEPT" ansible.builtin.iptables: action: append chain: OUTPUT @@ -218,7 +261,7 @@ jump: ACCEPT ip_version: ipv6 - - name: "3.5.3.3.1 | PATCH | Ensure ip6tables loopback traffic is configured | INPUT loopback drop" + - name: "3.4.3.3.2 | PATCH | Ensure ip6tables loopback traffic is configured | INPUT loopback drop" ansible.builtin.iptables: action: append chain: INPUT @@ -226,7 +269,7 @@ jump: DROP ip_version: ipv6 when: - - ubtu20cis_rule_3_5_3_3_1 + - ubtu20cis_rule_3_4_3_3_2 - ubtu20cis_firewall_package == "iptables" - ubtu20cis_ipv6_required - not ubtu20cis_ipv4_required @@ -235,10 +278,10 @@ - level1-workstation - automated - patch - - rule_3.5.3.3.1 + - rule_3.4.3.3.2 - ip6tables -- name: "3.5.3.3.2 | PATCH | Ensure ip6tables outbound and established connections are configured" +- name: "3.4.3.3.3 | PATCH | Ensure ip6tables outbound and established connections are configured" ansible.builtin.iptables: action: append chain: '{{ item.chain }}' @@ -255,7 +298,7 @@ - { chain: INPUT, protocol: udp, ctstate: 'ESTABLISHED' } - { chain: INPUT, protocol: icmp, ctstate: 'ESTABLISHED' } when: - - ubtu20cis_rule_3_5_3_3_2 + - ubtu20cis_rule_3_4_3_3_3 - ubtu20cis_firewall_package == "iptables" - ubtu20cis_ipv6_required - not ubtu20cis_ipv4_required @@ -264,81 +307,41 @@ - level1-workstation - manual - patch - - rule_3.5.3.3.2 - - ip6tables - -- name: "3.5.3.3.3 | PATCH | Ensure ip6tables default deny firewall policy" - block: - - name: "3.5.3.3.3 | PATCH | Ensure ip6tables default deny firewall policy | Configure SSH to be allowed out" - ansible.builtin.iptables: - chain: OUTPUT - protocol: tcp - source_port: 22 - jump: ACCEPT - ctstate: 'NEW,ESTABLISHED' - ip_version: ipv6 - - - name: "3.5.3.3.3 | PATCH | Ensure ip6tables default deny firewall policy | Enable apt traffic" - ansible.builtin.iptables: - chain: INPUT - ctstate: 'ESTABLISHED' - jump: ACCEPT - ip_version: ipv6 - - - name: "3.5.3.3.3 | PATCH | Ensure ip6tables default deny firewall policy | Set drop items" - ansible.builtin.iptables: - policy: DROP - chain: "{{ item }}" - ip_version: ipv6 - with_items: - - INPUT - - FORWARD - - OUTPUT - when: - - ubtu20cis_rule_3_5_3_3_3 - - ubtu20cis_firewall_package == "iptables" - - ubtu20cis_ipv6_required - - not ubtu20cis_ipv4_required - tags: - - level1-server - - level1-workstation - - automated - - patch - - rule_3.5.3.3.3 + - rule_3.4.3.3.3 - ip6tables -- name: "3.5.3.3.4 | AUDIT | Ensure ip6tables firewall rules exist for all open ports" +- name: "3.4.3.3.4 | AUDIT | Ensure ip6tables firewall rules exist for all open ports" block: - - name: "3.5.3.3.4 | AUDIT | Ensure ip6tables firewall rules exist for all open ports | Get list of open ports" + - name: "3.4.3.3.4 | AUDIT | Ensure ip6tables firewall rules exist for all open ports | Get list of open ports" ansible.builtin.shell: ss -6tuln changed_when: false failed_when: false check_mode: false - register: ubtu20cis_3_5_3_3_4_open_ports + register: ubtu20cis_3_4_3_3_4_open_ports - - name: "3.5.3.3.4 | AUDIT | Ensure ip6tables firewall rules exist for all open ports | Get list of rules" + - name: "3.4.3.3.4 | AUDIT | Ensure ip6tables firewall rules exist for all open ports | Get list of rules" ansible.builtin.shell: ip6tables -L INPUT -v -n changed_when: false failed_when: false check_mode: false - register: ubtu20cis_3_5_3_3_4_current_rules + register: ubtu20cis_3_4_3_3_4_current_rules - - name: "3.5.3.3.4 | AUDIT | Ensure ip6tables firewall rules exist for all open ports | Alert about settings" + - name: "3.4.3.3.4 | AUDIT | Ensure ip6tables firewall rules exist for all open ports | Alert about settings" ansible.builtin.debug: msg: - "Warning!! Below is the list the open ports and current rules" - "Please create a rule for any open port that does not have a current rule" - "Open Ports:" - - "{{ ubtu20cis_3_5_3_3_4_open_ports.stdout_lines }}" + - "{{ ubtu20cis_3_4_3_3_4_open_ports.stdout_lines }}" - "Current Rules:" - - "{{ ubtu20cis_3_5_3_3_4_current_rules.stdout_lines }}" + - "{{ ubtu20cis_3_4_3_3_4_current_rules.stdout_lines }}" - - name: "3.5.3.3.4 | AUDIT | Ensure ip6tables firewall rules exist for all open ports | Warn Count" + - name: "3.4.3.3.4 | AUDIT | Ensure ip6tables firewall rules exist for all open ports | Warn Count" ansible.builtin.import_tasks: warning_facts.yml vars: - warn_control_id: '3.5.3.3.4' + warn_control_id: '3.4.3.3.4' when: - - ubtu20cis_rule_3_5_3_3_4 + - ubtu20cis_rule_3_4_3_3_4 - ubtu20cis_firewall_package == "iptables" - ubtu20cis_ipv6_required - not ubtu20cis_ipv4_required @@ -347,5 +350,5 @@ - level1-workstation - automated - audit - - rule_3.5.3.3.4 + - rule_3.4.3.3.4 - ip6tables diff --git a/tasks/section_3/cis_3.4.x.yml b/tasks/section_3/cis_3.4.x.yml deleted file mode 100644 index 7b6bafed..00000000 --- a/tasks/section_3/cis_3.4.x.yml +++ /dev/null @@ -1,64 +0,0 @@ ---- -- name: "3.4.1 | PATCH | Ensure DCCP is disabled" - ansible.builtin.lineinfile: - path: /etc/modprobe.d/dccp.conf - regexp: '^(#)?install dccp(\\s|$)' - line: 'install dccp /bin/true' - create: true - when: - - ubtu20cis_rule_3_4_1 - tags: - - level2-server - - level2-workstation - - automated - - patch - - rule_3.4.1 - - dccp - -- name: "3.4.2 | PATCH | Ensure SCTP is disabled" - ansible.builtin.lineinfile: - path: /etc/modprobe.d/sctp.conf - regexp: "^(#)?install sctp(\\s|$)" - line: 'install sctp /bin/true' - create: true - when: - - ubtu20cis_rule_3_4_2 - tags: - - level2-server - - level2-workstation - - automated - - patch - - rule_3.4.2 - - sctp - -- name: "3.4.3 | PATCH | Ensure RDS is disabled" - ansible.builtin.lineinfile: - path: /etc/modprobe.d/rds.conf - regexp: '^(#)?install rds(\\s|$)' - line: 'install rds /bin/true' - create: true - when: - - ubtu20cis_rule_3_4_3 - tags: - - level2-server - - level2-workstation - - automated - - patch - - rule_3.4.3 - - rds - -- name: "3.4.4 | PATCH | Ensure TIPC is disabled" - ansible.builtin.lineinfile: - path: /etc/modprobe.d/tipc.conf - regexp: '^(#)?install tipc(\\s|$)' - line: install tipc /bin/true - create: true - when: - - ubtu20cis_rule_3_4_4 - tags: - - level2-server - - level2-workstation - - automated - - patch - - rule_3.4.4 - - tipc diff --git a/tasks/section_3/main.yml b/tasks/section_3/main.yml index 1721fae9..97a37743 100644 --- a/tasks/section_3/main.yml +++ b/tasks/section_3/main.yml @@ -8,20 +8,17 @@ - name: "SECTION | 3.3 | Network Parameters Host and Router" ansible.builtin.import_tasks: cis_3.3.x.yml -- name: "SECTION | 3.4 | Uncommon Network Protocols" - ansible.builtin.import_tasks: cis_3.4.x.yml - -- name: "SECTION | 3.5.1 | Firewall Configuration UFW" - ansible.builtin.import_tasks: cis_3.5.1.x.yml +- name: "SECTION | 3.4.1 | Firewall Configuration UFW" + ansible.builtin.import_tasks: cis_3.4.1.x.yml when: - ubtu20cis_firewall_package == "ufw" -- name: "SECTION | 3.5.2 | Firewall Configuration nftables" - ansible.builtin.import_tasks: cis_3.5.2.x.yml +- name: "SECTION | 3.4.2 | Firewall Configuration nftables" + ansible.builtin.import_tasks: cis_3.4.2.x.yml when: - ubtu20cis_firewall_package == "nftables" -- name: "SECTION | 3.5.3 | Firewall Configuration iptables" - ansible.builtin.import_tasks: cis_3.5.3.x.yml +- name: "SECTION | 3.4.3 | Firewall Configuration iptables" + ansible.builtin.import_tasks: cis_3.4.3.x.yml when: - ubtu20cis_firewall_package == "iptables" diff --git a/tasks/section_4/cis_4.1.x.yml b/tasks/section_4/cis_4.1.x.yml index a7ede8c4..d97540be 100644 --- a/tasks/section_4/cis_4.1.x.yml +++ b/tasks/section_4/cis_4.1.x.yml @@ -1,279 +1,162 @@ --- -- name: "4.1.3 | PATCH | Ensure events that modify date and time information are collected" - ansible.builtin.template: - src: audit/ubtu20cis_4_1_3_timechange.rules.j2 - dest: /etc/audit/rules.d/time-change.rules - owner: root - group: root - mode: 0600 - notify: restart auditd - when: - - ubtu20cis_rule_4_1_3 - tags: - - level2-server - - level2-workstation - - automated - - patch - - rule_4.1.3 - - auditd -- name: "4.1.4 | PATCH | Ensure events that modify user/group information are collected" - ansible.builtin.template: - src: audit/ubtu20cis_4_1_4_identity.rules.j2 - dest: /etc/audit/rules.d/identity.rules - owner: root - group: root - mode: 0600 - notify: restart auditd +- name: "4.1.1 | PATCH | Ensure cron daemon is enabled and active" + ansible.builtin.service: + name: cron + state: started + enabled: true when: - - ubtu20cis_rule_4_1_4 + - ubtu20cis_rule_4_1_1 tags: - - level2-server - - level2-workstation + - level1-server + - level1-workstation - automated - patch - - rule_4.1.4 - - auditd + - rule_4.1.1 + - cron -- name: "4.1.5 | PATCH | Ensure events that modify the system's network environment are collected" - ansible.builtin.template: - src: audit/ubtu20cis_4_1_5_systemlocale.rules.j2 - dest: /etc/audit/rules.d/system-locale.rules +- name: "4.1.2 | PATCH | Ensure permissions on /etc/crontab are configured" + ansible.builtin.file: + path: /etc/crontab owner: root group: root mode: 0600 - notify: restart auditd when: - - ubtu20cis_rule_4_1_5 + - ubtu20cis_rule_4_1_2 tags: - - level2-server - - level2-workstation + - level1-server + - level1-workstation - automated - patch - - rule_4.1.5 - - auditd + - rule_4.1.2 + - cron -- name: "4.1.6 | PATCH | Ensure events that modify the system's Mandatory Access Controls are collected" - ansible.builtin.template: - src: audit/ubtu20cis_4_1_6_macpolicy.rules.j2 - dest: /etc/audit/rules.d/MAC-policy.rules +- name: "4.1.3 | PATCH | Ensure permissions on /etc/cron.hourly are configured" + ansible.builtin.file: + path: /etc/cron.hourly owner: root group: root - mode: 0600 - notify: restart auditd + mode: 0700 when: - - ubtu20cis_rule_4_1_6 + - ubtu20cis_rule_4_1_3 tags: - - level2-server - - level2-workstation + - level1-server + - level1-workstation - automated - patch - - rule_4.1.6 - - auditd + - rule_4.1.3 + - cron -- name: "4.1.7 | PATCH | Ensure login and logout events are collected" - ansible.builtin.template: - src: audit/ubtu20cis_4_1_7_logins.rules.j2 - dest: /etc/audit/rules.d/logins.rules +- name: "4.1.4 | PATCH | Ensure permissions on /etc/cron.daily are configured" + ansible.builtin.file: + path: /etc/cron.daily owner: root group: root - mode: 0600 - notify: restart auditd + mode: 0700 when: - - ubtu20cis_rule_4_1_7 + - ubtu20cis_rule_4_1_4 tags: - - level2-server - - level2-workstation + - level1-server + - level1-workstation - automated - patch - - rule_4.1.7 - - auditd + - rule_4.1.4 + - cron -- name: "4.1.8 | PATCH | Ensure session initiation information is collected" - ansible.builtin.template: - src: audit/ubtu20cis_4_1_8_session.rules.j2 - dest: /etc/audit/rules.d/session.rules +- name: "4.1.5 | PATCH | Ensure permissions on /etc/cron.weekly are configured" + ansible.builtin.file: + path: /etc/cron.weekly owner: root group: root - mode: 0600 - notify: restart auditd + mode: 0700 when: - - ubtu20cis_rule_4_1_8 + - ubtu20cis_rule_4_1_5 tags: - - level2-server - - level2-workstation + - level1-server + - level1-workstation - automated - patch - - rule_4.1.8 - - auditd + - rule_4.1.5 + - cron -- name: "4.1.9 | PATCH | Ensure discretionary access control permission modification events are collected" - ansible.builtin.template: - src: audit/ubtu20cis_4_1_9_permmod.rules.j2 - dest: /etc/audit/rules.d/perm_mod.rules +- name: "4.1.6 | PATCH | Ensure permissions on /etc/cron.monthly are configured" + ansible.builtin.file: + path: /etc/cron.monthly owner: root group: root - mode: 0600 - notify: restart auditd + mode: 0700 when: - - ubtu20cis_rule_4_1_9 + - ubtu20cis_rule_4_1_6 tags: - - level2-server - - level2-workstation + - level1-server + - level1-workstation - automated - patch - - rule_4.1.9 - - auditd + - rule_4.1.6 + - cron -- name: "4.1.10 | PATCH | Ensure unsuccessful unauthorized file access attempts are collected" - ansible.builtin.template: - src: audit/ubtu20cis_4_1_10_access.rules.j2 - dest: /etc/audit/rules.d/access.rules +- name: "4.1.7 | PATCH | Ensure permissions on /etc/cron.d are configured" + ansible.builtin.file: + path: /etc/cron.d owner: root group: root - mode: 0600 - notify: restart auditd + mode: 0700 when: - - ubtu20cis_rule_4_1_10 + - ubtu20cis_rule_4_1_7 tags: - - level2-server - - level2-workstation + - level1-server + - level1-workstation - automated - patch - - rule_4.1.10 - - auditd + - rule_4.1.7 + - cron -- name: "4.1.11 | PATCH | Ensure use of privileged commands is collected" +- name: "4.1.8 | PATCH | Ensure at/cron is restricted to authorized users" block: - - name: "4.1.11 | AUDIT | Ensure use of privileged commands is collected | Get list of privileged programs" - ansible.builtin.shell: for i in $(df | grep '^/dev' | awk '{ print $NF }'); do find $i -xdev -type f -perm -4000 -o -type f -perm -2000 2>/dev/null; done - register: priv_procs - changed_when: false - check_mode: false + - name: "4.1.8 | PATCH | Ensure at/cron is restricted to authorized users | Remove cron.deny" + ansible.builtin.file: + path: /etc/cron.deny + state: absent - - name: "4.1.11 | PATCH | Ensure use of privileged commands is collected | Set privileged rules" - ansible.builtin.template: - src: audit/ubtu20cis_4_1_11_privileged.rules.j2 - dest: /etc/audit/rules.d/privileged.rules + - name: "4.1.8 | PATCH | Ensure at/cron is restricted to authorized users | Create cron.allow" + ansible.builtin.copy: + content: "" + dest: /etc/cron.allow + force: false owner: root - group: root - mode: 0600 - notify: restart auditd + group: crontab + mode: 0640 when: - - ubtu20cis_rule_4_1_11 - tags: - - level2-server - - level2-workstation - - automated - - patch - - rule_4.1.11 - - auditd - -- name: "4.1.12 | PATCH | Ensure successful file system mounts are collected" - ansible.builtin.template: - src: audit/ubtu20cis_4_1_12_audit.rules.j2 - dest: /etc/audit/rules.d/audit.rules - owner: root - group: root - mode: 0600 - notify: restart auditd - when: - ubtu20cis_rule_4_1_12 - tags: - - level2-server - - level2-workstation - - automated - - patch - - rule_4.1.12 - - auditd - -- name: "4.1.13 | PATCH | Ensure file deletion events by users are collected" - ansible.builtin.template: - src: audit/ubtu20cis_4_1_13_delete.rules.j2 - dest: /etc/audit/rules.d/delete.rules - owner: root - group: root - mode: 0600 - notify: restart auditd - when: - - ubtu20cis_rule_4_1_13 - tags: - - level2-server - - level2-workstation - - automated - - patch - - rule_4.1.13 - - auditd - -- name: "4.1.14 | PATCH | Ensure changes to system administration scope (sudoers) is collected" - ansible.builtin.template: - src: audit/ubtu20cis_4_1_14_scope.rules.j2 - dest: /etc/audit/rules.d/scope.rules - owner: root - group: root - mode: 0600 - notify: restart auditd - when: - - ubtu20cis_rule_4_1_14 - tags: - - level2-server - - level2-workstation - - automated - - patch - - rule_4.1.14 - - auditd - -- name: "4.1.15 | PATCH | Ensure system administrator command executions (sudo) are collected" - ansible.builtin.template: - src: audit/ubtu20cis_4_1_15_actions.rules.j2 - dest: /etc/audit/rules.d/actions.rules - owner: root - group: root - mode: 0600 - notify: restart auditd - when: - - ubtu20cis_rule_4_1_15 + - ubtu20cis_rule_4_1_8 tags: - - level2-server - - level2-workstation + - level1-server + - level1-workstation - automated - patch - - rule_4.1.15 - - auditd + - rule_4.1.8 + - cron -- name: "4.1.16 | PATCH | Ensure kernel module loading and unloading is collected" - ansible.builtin.template: - src: audit/ubtu20cis_4_1_16_modules.rules.j2 - dest: /etc/audit/rules.d/modules.rules - owner: root - group: root - mode: 0600 - notify: restart auditd - when: - - ubtu20cis_rule_4_1_16 - tags: - - level2-server - - level2-workstation - - automated - - patch - - rule_4.1.16 - - auditd +- name: "4.1.9 | PATCH | Ensure at is restricted to authorized users" + block: + - name: "4.1.9 | PATCH | Ensure at is restricted to authorized users | Remove at.deny" + ansible.builtin.file: + path: /etc/at.deny + state: absent -- name: "4.1.17 | PATCH | Ensure the audit configuration is immutable" - ansible.builtin.template: - src: audit/ubtu20cis_4_1_17_99finalize.rules.j2 - dest: /etc/audit/rules.d/99-finalize.rules - owner: root - group: root - mode: 0600 - notify: restart auditd + - name: "4.1.9 | PATCH | Ensure at is restricted to authorized users | Create at.allow" + ansible.builtin.copy: + content: "" + dest: /etc/at.allow + force: false + owner: root + group: root + mode: 0640 when: - - ubtu20cis_rule_4_1_17 + - ubtu20cis_rule_4_1_9 tags: - - level2-server - - level2-workstation + - level1-server + - level1-workstation - automated - - scored - patch - - rule_4.1.17 - - auditd + - rule_4.1.9 + - cron diff --git a/tasks/section_4/cis_4.2.2.x.yml b/tasks/section_4/cis_4.2.2.x.yml deleted file mode 100644 index e0d97d46..00000000 --- a/tasks/section_4/cis_4.2.2.x.yml +++ /dev/null @@ -1,50 +0,0 @@ ---- -- name: "4.2.2.1 | PATCH | Ensure journald is configured to send logs to rsyslog" - ansible.builtin.lineinfile: - path: /etc/systemd/journald.conf - regexp: '^ForwardToSyslog|^#ForwardToSyslog' - line: 'ForwardToSyslog=yes' - insertafter: '\[Journal\]' - when: - - ubtu20cis_rule_4_2_2_1 - tags: - - level1-server - - level1-workstation - - automated - - patch - - rule_4.2.2.1 - - rsyslog - - journald - -- name: "4.2.2.2 | PATCH | Ensure journald is configured to compress large log files" - ansible.builtin.lineinfile: - path: /etc/systemd/journald.conf - regexp: '^Compress|^#Compress' - line: 'Compress=yes' - insertafter: '\[Journal\]' - when: - - ubtu20cis_rule_4_2_2_2 - tags: - - level1-server - - level1-workstation - - patch - - rule_4.2.2.2 - - rsyslog - - journald - -- name: "4.2.2.3 | PATCH | Ensure journald is configured to write logfiles to persistent disk" - ansible.builtin.lineinfile: - path: /etc/systemd/journald.conf - regexp: '^Storage|^#Storage' - line: 'Storage=persistent' - insertafter: '\[Journal\]' - when: - - ubtu20cis_rule_4_2_2_3 - tags: - - level1-server - - level1-workstation - - automated - - patch - - rule_4.2.2.3 - - rsyslog - - journald diff --git a/tasks/section_4/cis_4.2.3.yml b/tasks/section_4/cis_4.2.3.yml deleted file mode 100644 index 9e87e223..00000000 --- a/tasks/section_4/cis_4.2.3.yml +++ /dev/null @@ -1,24 +0,0 @@ ---- -- name: "4.2.3 | PATCH | Ensure permissions on all logfiles are configured" - block: - - name: "4.2.3 | AUDIT | Ensure permissions on all logfiles are configured | discover " - ansible.builtin.shell: find /var/log -type f -exec chmod g-wx,o-rwx "{}" + -o -type d - changed_when: false - check_mode: false - register: ubtu20cis_4_2_3_logfile_perms_status - - - name: "4.2.3 | PATCH | Ensure permissions on all logfiles are configured | adjust " - ansible.builtin.shell: find /var/log -type f -exec chmod g-wx,o-rwx "{}" + -o -type d -exec chmod g-w,o-rwx "{}" + - check_mode: false - when: - - ubtu20cis_4_2_3_logfile_perms_status.stdout | length > 0 - when: - - ubtu20cis_rule_4_2_3 - tags: - - level1-server - - level1-workstation - - automated - - patch - - rule_4.2.3 - - logfiles - - permissions diff --git a/tasks/section_5/cis_5.3.x.yml b/tasks/section_4/cis_4.2.x.yml similarity index 72% rename from tasks/section_5/cis_5.3.x.yml rename to tasks/section_4/cis_4.2.x.yml index 0d7910eb..2afd44da 100644 --- a/tasks/section_5/cis_5.3.x.yml +++ b/tasks/section_4/cis_4.2.x.yml @@ -1,79 +1,80 @@ --- -- name: "5.3.1 | PATCH | Ensure permissions on /etc/ssh/sshd_config are configured" + +- name: "4.2.1 | PATCH | Ensure permissions on /etc/ssh/sshd_config are configured" ansible.builtin.file: path: /etc/ssh/sshd_config owner: root group: root mode: 0600 when: - - ubtu20cis_rule_5_3_1 + - ubtu20cis_rule_4_2_1 tags: - level1-server - level1-workstation - automated - patch - - rule_5.3.1 + - rule_4.2.1 - ssh -- name: "5.3.2 | PATCH | Ensure permissions on SSH private host key files are configured" +- name: "4.2.2 | PATCH | Ensure permissions on SSH private host key files are configured" block: - - name: "5.3.2 | AUDIT | Ensure permissions on SSH private host key files are configured | Find ssh_host private keys" + - name: "4.2.2 | AUDIT | Ensure permissions on SSH private host key files are configured | Find ssh_host private keys" ansible.builtin.find: paths: /etc/ssh patterns: 'ssh_host_*_key' - register: ubtu20cis_5_3_2_ssh_host_priv_keys + register: ubtu20cis_4_2_2_ssh_host_priv_keys - - name: "5.3.2 | PATCH | Ensure permissions on SSH private host key files are configured | Set permissions" + - name: "4.2.2 | PATCH | Ensure permissions on SSH private host key files are configured | Set permissions" ansible.builtin.file: path: "{{ item.path }}" owner: root group: root mode: 0600 with_items: - - "{{ ubtu20cis_5_3_2_ssh_host_priv_keys.files }}" + - "{{ ubtu20cis_4_2_2_ssh_host_priv_keys.files }}" loop_control: label: "{{ item.path }}" when: - - ubtu20cis_rule_5_3_2 + - ubtu20cis_rule_4_2_2 tags: - level1-server - level1-workstation - automated - patch - - rule_5.3.2 + - rule_4.2.2 - ssh -- name: "5.3.3 | PATCH | Ensure permissions on SSH public host key files are configured" +- name: "4.2.3 | PATCH | Ensure permissions on SSH public host key files are configured" block: - - name: "5.3.3 | AUDIT | Ensure permissions on SSH public host key files are configured | Find ssh_host public keys" + - name: "4.2.3 | AUDIT | Ensure permissions on SSH public host key files are configured | Find ssh_host public keys" ansible.builtin.find: paths: /etc/ssh patterns: 'ssh_host_*_key.pub' - register: ubtu20cis_5_3_3_ssh_host_pub_keys + register: ubtu20cis_4_2_3_ssh_host_pub_keys - - name: "5.3.3 | PATCH | Ensure permissions on SSH public host key files are configured | Set permissions" + - name: "4.2.3 | PATCH | Ensure permissions on SSH public host key files are configured | Set permissions" ansible.builtin.file: path: "{{ item.path }}" owner: root group: root mode: 0644 with_items: - - "{{ ubtu20cis_5_3_3_ssh_host_pub_keys.files }}" + - "{{ ubtu20cis_4_2_3_ssh_host_pub_keys.files }}" loop_control: label: "{{ item.path }}" when: - - ubtu20cis_rule_5_3_3 + - ubtu20cis_rule_4_2_3 tags: - level1-server - level1-workstation - automated - patch - - rule_5.3.3 + - rule_4.2.3 - ssh -- name: "5.3.4 | PATCH | Ensure SSH access is limited" +- name: "4.2.4 | PATCH | Ensure SSH access is limited" block: - - name: "5.3.4 | PATCH | Ensure SSH access is limited | Add allowed users" + - name: "4.2.4 | PATCH | Ensure SSH access is limited | Add allowed users" ansible.builtin.lineinfile: path: /etc/ssh/sshd_config regexp: '^AllowUsers|^#AllowUsers' @@ -81,7 +82,7 @@ notify: restart sshd when: "ubtu20cis_sshd['allow_users']|default('') != ''" - - name: "5.3.4 | PATCH | Ensure SSH access is limited | Add allowed groups" + - name: "4.2.4 | PATCH | Ensure SSH access is limited | Add allowed groups" ansible.builtin.lineinfile: path: /etc/ssh/sshd_config regexp: '^AllowGroups|^#AllowGroups' @@ -89,7 +90,7 @@ notify: restart sshd when: "ubtu20cis_sshd['allow_groups']|default('') != ''" - - name: "5.3.4 | PATCH | Ensure SSH access is limited | Add deny users" + - name: "4.2.4 | PATCH | Ensure SSH access is limited | Add deny users" ansible.builtin.lineinfile: path: /etc/ssh/sshd_config regexp: '^DenyUsers|^#DenyUsers' @@ -97,7 +98,7 @@ notify: restart sshd when: "ubtu20cis_sshd['deny_users']|default('') != ''" - - name: "5.3.4 | PATCH | Ensure SSH access is limited | Add deny groups" + - name: "4.2.4 | PATCH | Ensure SSH access is limited | Add deny groups" ansible.builtin.lineinfile: path: /etc/ssh/sshd_config regexp: '^DenyGroups|^#DenyGroups' @@ -105,16 +106,16 @@ notify: restart sshd when: "ubtu20cis_sshd['deny_groups']|default('') != ''" when: - - ubtu20cis_rule_5_3_4 + - ubtu20cis_rule_4_2_4 tags: - level1-server - level1-workstation - automated - patch - - rule_5.3.4 + - rule_4.2.4 - ssh -- name: "5.3.5 | PATCH | Ensure SSH LogLevel is appropriate" +- name: "4.2.5 | PATCH | Ensure SSH LogLevel is appropriate" ansible.builtin.lineinfile: path: /etc/ssh/sshd_config regexp: '^LogLevel|^#LogLevel' @@ -122,130 +123,131 @@ insertafter: '^# Logging' notify: restart sshd when: - - ubtu20cis_rule_5_3_5 + - ubtu20cis_rule_4_2_5 tags: - level1-server - level1-workstation - automated - patch - - rule_5.3.5 + - rule_4.2.5 - ssh -- name: "5.3.6 | PATCH | Ensure SSH X11 forwarding is disabled" +- name: "4.2.6 | PATCH | Ensure SSH PAM is enabled" ansible.builtin.lineinfile: path: /etc/ssh/sshd_config - regexp: '^X11Forwarding|^#X11Forwarding' - line: 'X11Forwarding no' + regexp: '^UsePAM|^#UsePAM' + line: 'UsePAM yes' + insertafter: '^# and ChallengeResponseAuthentication' notify: restart sshd when: - - ubtu20cis_rule_5_3_6 + - ubtu20cis_rule_4_2_6 tags: - - level2-server + - level1-server - level1-workstation - automated - patch - - rule_5.3.6 + - rule_4.2.6 - ssh + - pam -- name: "5.3.7 | PATCH | Ensure SSH MaxAuthTries is set to 4 or less" +- name: "4.2.7 | PATCH | Ensure SSH root login is disabled" ansible.builtin.lineinfile: path: /etc/ssh/sshd_config - regexp: '^MaxAuthTries|^#MaxAuthTries' - line: 'MaxAuthTries {{ ubtu20cis_sshd.max_auth_tries }}' - insertafter: '^# Authentication' + regexp: '^PermitRootLogin|^#PermitRootLogin' + line: 'PermitRootLogin no' notify: restart sshd when: - - ubtu20cis_rule_5_3_7 + - ubtu20cis_rule_4_2_7 tags: - level1-server - level1-workstation - automated - patch - - rule_5.3.7 + - rule_4.2.7 - ssh -- name: "5.3.8 | PATCH | Ensure SSH IgnoreRhosts is enabled" +- name: "4.2.8 | PATCH | Ensure SSH HostbasedAuthentication is disabled" ansible.builtin.lineinfile: path: /etc/ssh/sshd_config - regexp: '^IgnoreRhosts|^#IgnoreRhosts' - line: 'IgnoreRhosts yes' + regexp: '^HostbasedAuthentication|^#HostbasedAuthentication' + line: 'HostbasedAuthentication no' notify: restart sshd when: - - ubtu20cis_rule_5_3_8 + - ubtu20cis_rule_4_2_8 tags: - level1-server - level1-workstation - automated - patch - - rule_5.3.8 + - rule_4.2.8 - ssh -- name: "5.3.9 | PATCH | Ensure SSH HostbasedAuthentication is disabled" +- name: "4.2.9 | PATCH | Ensure SSH PermitEmptyPasswords is disabled" ansible.builtin.lineinfile: path: /etc/ssh/sshd_config - regexp: '^HostbasedAuthentication|^#HostbasedAuthentication' - line: 'HostbasedAuthentication no' + regexp: '^PermitEmptyPasswords|^#PermitEmptyPasswords' + line: 'PermitEmptyPasswords no' + insertafter: '# To disable tunneled clear text passwords' notify: restart sshd when: - - ubtu20cis_rule_5_3_9 + - ubtu20cis_rule_4_2_9 tags: - level1-server - level1-workstation - automated - patch - - rule_5.3.9 + - rule_4.2.9 - ssh -- name: "5.3.10 | PATCH | Ensure SSH root login is disabled" +- name: "4.2.10 | PATCH | Ensure SSH PermitUserEnvironment is disabled" ansible.builtin.lineinfile: path: /etc/ssh/sshd_config - regexp: '^PermitRootLogin|^#PermitRootLogin' - line: 'PermitRootLogin no' + regexp: '^PermitUserEnvironment|^#PermitUserEnvironment' + line: 'PermitUserEnvironment no' notify: restart sshd when: - - ubtu20cis_rule_5_3_10 + - ubtu20cis_rule_4_2_10 tags: - level1-server - level1-workstation - automated - patch - - rule_5.3.10 + - rule_4.2.10 - ssh -- name: "5.3.11 | PATCH | Ensure SSH PermitEmptyPasswords is disabled" +- name: "4.2.11 | PATCH | Ensure SSH IgnoreRhosts is enabled" ansible.builtin.lineinfile: path: /etc/ssh/sshd_config - regexp: '^PermitEmptyPasswords|^#PermitEmptyPasswords' - line: 'PermitEmptyPasswords no' - insertafter: '# To disable tunneled clear text passwords' + regexp: '^IgnoreRhosts|^#IgnoreRhosts' + line: 'IgnoreRhosts yes' notify: restart sshd when: - - ubtu20cis_rule_5_3_11 + - ubtu20cis_rule_4_2_11 tags: - level1-server - level1-workstation - automated - patch - - rule_5.3.11 + - rule_4.2.11 - ssh -- name: "5.3.12 | PATCH | Ensure SSH PermitUserEnvironment is disabled" +- name: "4.2.12 | PATCH | Ensure SSH X11 forwarding is disabled" ansible.builtin.lineinfile: path: /etc/ssh/sshd_config - regexp: '^PermitUserEnvironment|^#PermitUserEnvironment' - line: 'PermitUserEnvironment no' + regexp: '^X11Forwarding|^#X11Forwarding' + line: 'X11Forwarding no' notify: restart sshd when: - - ubtu20cis_rule_5_3_12 + - ubtu20cis_rule_4_2_12 tags: - - level1-server + - level2-server - level1-workstation - automated - patch - - rule_5.3.12 + - rule_4.2.12 - ssh -- name: "5.3.13 | PATCH | Ensure only strong Ciphers are used" +- name: "4.2.13 | PATCH | Ensure only strong Ciphers are used" ansible.builtin.lineinfile: path: /etc/ssh/sshd_config regexp: '^Ciphers|^#Ciphers' @@ -253,16 +255,16 @@ insertafter: '^# Ciphers and keying' notify: restart sshd when: - - ubtu20cis_rule_5_3_13 + - ubtu20cis_rule_4_2_13 tags: - level1-server - level1-workstation - automated - patch - - rule_5.3.13 + - rule_4.2.13 - ssh -- name: "5.3.14 | PATCH | Ensure only strong MAC algorithms are used" +- name: "4.2.14 | PATCH | Ensure only strong MAC algorithms are used" ansible.builtin.lineinfile: path: /etc/ssh/sshd_config regexp: '^MACs|^#MACs' @@ -270,16 +272,16 @@ insertafter: '^# Ciphers and keying' notify: restart sshd when: - - ubtu20cis_rule_5_3_14 + - ubtu20cis_rule_4_2_14 tags: - level1-server - level1-workstation - automated - patch - - rule_5.3.14 + - rule_4.2.14 - ssh -- name: "5.3.15 | PATCH | Ensure only strong Key Exchange algorithms are used" +- name: "4.2.15 | PATCH | Ensure only strong Key Exchange algorithms are used" ansible.builtin.lineinfile: path: /etc/ssh/sshd_config regexp: '^KexAlgorithms|^#KexAlgorithms' @@ -287,131 +289,130 @@ insertafter: '^# Ciphers and keying' notify: restart sshd when: - - ubtu20cis_rule_5_3_15 + - ubtu20cis_rule_4_2_15 tags: - level1-server - level1-workstation - automated - patch - - rule_5.3.15 + - rule_4.2.15 - ssh -- name: "5.3.16 | PATCH | Ensure SSH Idle Timeout Interval is configured" +- name: "4.2.16 | PATCH | Ensure SSH AllowTcpForwarding is disabled" ansible.builtin.lineinfile: path: /etc/ssh/sshd_config - regexp: "{{ item.regexp }}" - line: "{{ item.line }}" - with_items: - - { regexp: '^ClientAliveInterval|^#ClientAliveInterval', line: 'ClientAliveInterval {{ ubtu20cis_sshd.client_alive_interval }}' } - - { regexp: '^ClientAliveCountMax|^#ClientAliveCountMax', line: 'ClientAliveCountMax {{ ubtu20cis_sshd.client_alive_count_max }}' } + regexp: '^AllowTcpForwarding|^#AllowTcpForwarding' + line: 'AllowTcpForwarding no' notify: restart sshd when: - - ubtu20cis_rule_5_3_16 + - ubtu20cis_rule_4_2_16 tags: - - level1-server - - level1-workstation + - level2-server + - level2-workstation - automated - patch - - rule_5.3.16 - - sshd + - rule_4.2.16 + - ssh -- name: "5.3.17 | PATCH | Ensure SSH LoginGraceTime is set to one minute or less" +- name: "4.2.17 | PATCH | Ensure SSH warning banner is configured" ansible.builtin.lineinfile: path: /etc/ssh/sshd_config - regexp: '^LoginGraceTime|^#LoginGraceTime' - line: 'LoginGraceTime {{ ubtu20cis_sshd.login_grace_time }}' - insertafter: '^# Authentication' + regexp: '^Banner|^#Banner' + line: Banner /etc/issue.net + insertafter: '^# no default banner path' notify: restart sshd when: - - ubtu20cis_rule_5_3_17 + - ubtu20cis_rule_4_2_17 tags: - level1-server - level1-workstation - automated - patch - - rule_5.3.17 + - rule_4.2.17 - ssh -- name: "5.3.18 | PATCH | Ensure SSH warning banner is configured" +- name: "4.2.18 | PATCH | Ensure SSH MaxAuthTries is set to 4 or less" ansible.builtin.lineinfile: path: /etc/ssh/sshd_config - regexp: '^Banner|^#Banner' - line: Banner /etc/issue.net - insertafter: '^# no default banner path' + regexp: '^MaxAuthTries|^#MaxAuthTries' + line: 'MaxAuthTries {{ ubtu20cis_sshd.max_auth_tries }}' + insertafter: '^# Authentication' notify: restart sshd when: - - ubtu20cis_rule_5_3_18 + - ubtu20cis_rule_4_2_18 tags: - level1-server - level1-workstation - automated - patch - - rule_5.3.18 + - rule_4.2.18 - ssh -- name: "5.3.19 | PATCH | Ensure SSH PAM is enabled" +- name: "4.2.19 | PATCH | Ensure SSH MaxStartups is configured" ansible.builtin.lineinfile: path: /etc/ssh/sshd_config - regexp: '^UsePAM|^#UsePAM' - line: 'UsePAM yes' - insertafter: '^# and ChallengeResponseAuthentication' + regexp: '^MaxStartups|^#MaxStartups' + line: 'MaxStartups 10:30:60' notify: restart sshd when: - - ubtu20cis_rule_5_3_19 + - ubtu20cis_rule_4_2_19 tags: - level1-server - level1-workstation - automated - patch - - rule_5.3.19 + - rule_4.2.19 - ssh - - pam -- name: "5.3.20 | PATCH | Ensure SSH AllowTcpForwarding is disabled" +- name: "4.2.20 | PATCH | Ensure SSH LoginGraceTime is set to one minute or less" ansible.builtin.lineinfile: path: /etc/ssh/sshd_config - regexp: '^AllowTcpForwarding|^#AllowTcpForwarding' - line: 'AllowTcpForwarding no' + regexp: '^LoginGraceTime|^#LoginGraceTime' + line: 'LoginGraceTime {{ ubtu20cis_sshd.login_grace_time }}' + insertafter: '^# Authentication' notify: restart sshd when: - - ubtu20cis_rule_5_3_20 + - ubtu20cis_rule_4_2_20 tags: - - level2-server - - level2-workstation + - level1-server + - level1-workstation - automated - patch - - rule_5.3.20 + - rule_4.2.20 - ssh -- name: "5.3.21 | PATCH | Ensure SSH MaxStartups is configured" +- name: "4.2.21 | PATCH | Ensure SSH MaxSessions is set to 10 or less" ansible.builtin.lineinfile: path: /etc/ssh/sshd_config - regexp: '^MaxStartups|^#MaxStartups' - line: 'MaxStartups 10:30:60' + regexp: '^MaxSessions|^#MaxSessions' + line: 'MaxSessions {{ ubtu20cis_sshd.max_sessions }}' + insertafter: '^# Authentication' notify: restart sshd when: - - ubtu20cis_rule_5_3_21 + - ubtu20cis_rule_4_2_21 tags: - level1-server - level1-workstation - automated - patch - - rule_5.3.21 + - rule_4.2.21 - ssh -- name: "5.3.22 | PATCH | Ensure SSH MaxSessions is set to 4 or less" +- name: "4.2.22 | PATCH | Ensure SSH Idle Timeout Interval is configured" ansible.builtin.lineinfile: path: /etc/ssh/sshd_config - regexp: '^MaxSessions|^#MaxSessions' - line: 'MaxSessions {{ ubtu20cis_sshd.max_sessions }}' - insertafter: '^# Authentication' + regexp: "{{ item.regexp }}" + line: "{{ item.line }}" + with_items: + - { regexp: '^ClientAliveInterval|^#ClientAliveInterval', line: 'ClientAliveInterval {{ ubtu20cis_sshd.client_alive_interval }}' } + - { regexp: '^ClientAliveCountMax|^#ClientAliveCountMax', line: 'ClientAliveCountMax {{ ubtu20cis_sshd.client_alive_count_max }}' } notify: restart sshd when: - - ubtu20cis_rule_5_3_22 + - ubtu20cis_rule_4_2_22 tags: - level1-server - level1-workstation - automated - patch - - rule_5.3.22 - - ssh + - rule_4.2.22 + - sshd diff --git a/tasks/section_4/cis_4.3.x.yml b/tasks/section_4/cis_4.3.x.yml new file mode 100644 index 00000000..dc6b1b71 --- /dev/null +++ b/tasks/section_4/cis_4.3.x.yml @@ -0,0 +1,142 @@ +--- + +- name: "4.3.1 | PATCH | Ensure sudo is installed" + ansible.builtin.package: + name: "{{ ubtu20cis_sudo_package }}" + state: present + when: + - ubtu20cis_rule_4_3_1 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_4.3.1 + - sudo + +- name: "4.3.2 | PATCH | Ensure sudo commands use pty" + ansible.builtin.lineinfile: + path: /etc/sudoers + regexp: '^Defaults use_' + line: 'Defaults use_pty' + insertafter: '^Defaults' + when: + - ubtu20cis_rule_4_3_2 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_4.3.2 + - sudo + +- name: "4.3.3 | PATCH | Ensure sudo log file exists" + ansible.builtin.lineinfile: + path: /etc/sudoers + regexp: '^Defaults logfile' + line: 'Defaults logfile="{{ ubtu20cis_sudo_logfile }}"' + insertafter: '^Defaults' + when: + - ubtu20cis_rule_4_3_3 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_4.3.3 + - sudo + +- name: "4.3.4 | PATCH | Ensure users must provide password for escalation" + ansible.builtin.replace: + path: "{{ item }}" + regexp: '^([^#|{% if system_is_ec2 %}ec2-user{% endif %}].*)NOPASSWD(.*)' + replace: '\1PASSWD\2' + validate: '/usr/sbin/visudo -cf %s' + loop: "{{ ubtu20cis_sudoers_files.stdout_lines }}" + when: + - ubtu20cis_rule_4_3_4 + tags: + - level2-server + - level2-workstation + - patch + - sudo + - rule_4.3.4 + +- name: "4.3.5 | PATCH | Ensure re-authentication for privilege escalation is not disabled globally" + ansible.builtin.replace: + path: "{{ item }}" + regexp: '^([^#].*)!authenticate(.*)' + replace: '\1authenticate\2' + validate: '/usr/sbin/visudo -cf %s' + loop: "{{ ubtu20cis_sudoers_files.stdout_lines }}" + when: + - ubtu20cis_rule_4_3_5 + tags: + - level1-server + - level1-workstation + - patch + - sudo + - rule_4.3.5 + +- name: "4.3.6 | PATCH | Ensure sudo authentication timeout is configured correctly" + block: + - name: "4.3.6 | AUDIT | Ensure sudo authentication timeout is configured correctly | Get files with timeout set" + ansible.builtin.shell: grep -is 'timestamp_timeout' /etc/sudoers /etc/sudoers.d/* | cut -d":" -f1 | uniq | sort + changed_when: false + failed_when: false + register: ubtu20cis_4_3_6_timeout_files + + - name: "4.3.6 | PATCH | Ensure sudo authentication timeout is configured correctly | Set value if no results" + ansible.builtin.lineinfile: + path: /etc/sudoers + regexp: 'Defaults timestamp_timeout=' + line: "Defaults timestamp_timeout={{ ubtu20cis_sudo_timestamp_timeout }}" + validate: '/usr/sbin/visudo -cf %s' + when: ubtu20cis_4_3_6_timeout_files.stdout | length == 0 + + - name: "4.3.6 | PATCH | Ensure sudo authentication timeout is configured correctly | Set value if has results" + ansible.builtin.replace: + path: "{{ item }}" + regexp: 'timestamp_timeout=(\d+)' + replace: "timestamp_timeout={{ ubtu20cis_sudo_timestamp_timeout }}" + validate: '/usr/sbin/visudo -cf %s' + loop: "{{ ubtu20cis_4_3_6_timeout_files.stdout_lines }}" + when: ubtu20cis_4_3_6_timeout_files.stdout | length > 0 + when: + - ubtu20cis_rule_4_3_6 + tags: + - level1-server + - level1-workstation + - patch + - sudo + - rule_4.3.6 + +- name: "4.3.7 | PATCH | Ensure access to the su command is restricted" + block: + + - name: "4.3.7 | PATCH | Ensure access to the su command is restricted | Ensure sugroup exists" + ansible.builtin.group: + name: "{{ ubtu20cis_sugroup }}" + state: present + register: ubtu20cis_4_3_7_sugroup + + - name: "4.3.7 | PATCH | Ensure access to the su command is restricted | remove users from group" + ansible.builtin.lineinfile: + path: /etc/group + regexp: '^{{ ubtu20cis_sugroup }}(:.:.*:).*$' + line: '{{ ubtu20cis_sugroup }}\g<1>' + backrefs: true + + - name: "4.3.7 | PATCH | Ensure access to the su command is restricted | Setting pam_wheel to use_uid" + ansible.builtin.lineinfile: + path: /etc/pam.d/su + regexp: '^(#)?auth\s+required\s+pam_wheel\.so' + line: 'auth required pam_wheel.so use_uid group={{ ubtu20cis_sugroup }}' + when: + - ubtu20cis_rule_4_3_7 + tags: + - level1-server + - level1-workstation + - patch + - sudo + - rule_4.3.7 diff --git a/tasks/section_4/cis_4.3.yml b/tasks/section_4/cis_4.3.yml deleted file mode 100644 index ae2894d8..00000000 --- a/tasks/section_4/cis_4.3.yml +++ /dev/null @@ -1,28 +0,0 @@ ---- -- name: "4.3 | PATCH | Ensure logrotate is configured" - block: - - name: "4.3 | PATCH | Ensure logrotate is configured | Get logrotate files" - ansible.builtin.find: - paths: /etc/logrotate.d/ - check_mode: false - register: ubtu20cis_4_3_logrotate_files - - - name: "4.3 | PATCH | Ensure logrotate is configured | Set rotation configurations" - ansible.builtin.replace: - path: "{{ item.path }}" - regexp: '^(\s*)(daily|weekly|monthly|yearly)$' - replace: "\\1{{ ubtu20cis_logrotate }}" - with_items: - - "{{ ubtu20cis_4_3_logrotate_files.files }}" - - { path: "/etc/logrotate.conf" } - loop_control: - label: "{{ item.path }}" - when: - - ubtu20cis_rule_4_3 - tags: - - level1-server - - level1-workstation - - manual - - patch - - rule_4.3 - - logrotate diff --git a/tasks/section_5/cis_5.4.x.yml b/tasks/section_4/cis_4.4.x.yml similarity index 58% rename from tasks/section_5/cis_5.4.x.yml rename to tasks/section_4/cis_4.4.x.yml index d285f41d..260587b1 100644 --- a/tasks/section_5/cis_5.4.x.yml +++ b/tasks/section_4/cis_4.4.x.yml @@ -1,31 +1,32 @@ --- -- name: "5.4.1 | PATCH | Ensure password creation requirements are configured" + +- name: "4.4.1 | PATCH | Ensure password creation requirements are configured" block: - - name: "5.4.1 | PATCH | Ensure password creation requirements are configured | Install pam_pwquality module" + - name: "4.4.1 | PATCH | Ensure password creation requirements are configured | Install pam_pwquality module" ansible.builtin.package: name: libpam-pwquality state: present - - name: "5.4.1 | PATCH | Ensure password creation requirements are configured | Add minlen" + - name: "4.4.1 | PATCH | Ensure password creation requirements are configured | Add minlen" ansible.builtin.lineinfile: path: /etc/security/pwquality.conf regexp: '^minlen|^# minlen' line: minlen = 14 - - name: "5.4.1 | PATCH | Ensure password creation requirements are configured | Add minclass" + - name: "4.4.1 | PATCH | Ensure password creation requirements are configured | Add minclass" ansible.builtin.lineinfile: path: /etc/security/pwquality.conf regexp: '^minclass|^# minclass' line: 'minclass = 4' - - name: "5.4.1 | AUDIT | Ensure password creation requirements are configured | Confirm pwquality module in common-password" + - name: "4.4.1 | AUDIT | Ensure password creation requirements are configured | Confirm pwquality module in common-password" ansible.builtin.shell: grep 'password.*requisite.*pam_pwquality.so' /etc/pam.d/common-password changed_when: false failed_when: false check_mode: false - register: ubtu20cis_5_4_1_pam_pwquality_state + register: ubtu20cis_4_4_1_pam_pwquality_state - - name: "5.4.1 | PATCH | Ensure password creation requirements are configured | Set retry to 3 if pwquality exists" + - name: "4.4.1 | PATCH | Ensure password creation requirements are configured | Set retry to 3 if pwquality exists" community.general.pamd: name: common-password type: password @@ -33,9 +34,9 @@ module_path: pam_pwquality.so module_arguments: 'retry=3' state: args_present - when: ubtu20cis_5_4_1_pam_pwquality_state.stdout | length > 0 + when: ubtu20cis_4_4_1_pam_pwquality_state.stdout | length > 0 - - name: "5.4.1 | PATCH | Ensure password creation requirements are configured | Set retry to 3 if pwquality does not exist" + - name: "4.4.1 | PATCH | Ensure password creation requirements are configured | Set retry to 3 if pwquality does not exist" community.general.pamd: name: common-password type: password @@ -46,15 +47,15 @@ new_module_path: pam_pwquality.so module_arguments: 'retry=3' state: after - when: ubtu20cis_5_4_1_pam_pwquality_state.stdout | length == 0 + when: ubtu20cis_4_4_1_pam_pwquality_state.stdout | length == 0 when: - - ubtu20cis_rule_5_4_1 + - ubtu20cis_rule_4_4_1 tags: - level1-server - level1-workstation - automated - patch - - rule_5.4.1 + - rule_4.4.1 - pam # ------------- @@ -70,10 +71,10 @@ # figure out why pam_deny kills vagrant user. Below is everything working but the pam_deny.so in the last task with_items # ------------- # ------------- -- name: "5.4.2 | PATCH | Ensure lockout for failed password attempts is configured" +- name: "4.4.2 | PATCH | Ensure lockout for failed password attempts is configured" block: - - name: "5.4.2 | PATCH | Ensure lockout for failed password attempts is configured | common-account load pam_tally" + - name: "4.4.2 | PATCH | Ensure lockout for failed password attempts is configured | common-account load pam_tally" ansible.builtin.lineinfile: path: /etc/pam.d/common-account regexp: "{{ item.regexp }}" @@ -83,33 +84,33 @@ - { regexp: '^account\s+requisite\s+pam_deny.so', line: 'account requisite pam_deny.so', after: '^# here.s the fallback if no module succeeds' } - { regexp: '^account\s+required\s+pam_tally2.so', line: 'account required pam_tally2.so', after: '^account requisite pam_deny.so' } - - name: "5.4.2 | PATCH | Ensure lockout for failed password attempts is configured | common-auth pam_tally2 opts" + - name: "4.4.2 | PATCH | Ensure lockout for failed password attempts is configured | common-auth pam_tally2 opts" ansible.builtin.lineinfile: path: /etc/pam.d/common-auth regexp: '^auth\s+required pam_tally2.so .*onerr=fail.*' line: 'auth required pam_tally2.so {{ ubtu20cis_pamtally2_login_opts }}' insertafter: '^# here are the per-package modules' when: - - ubtu20cis_rule_5_4_2 + - ubtu20cis_rule_4_4_2 tags: - level1-server - level1-workstation - automated - patch - - rule_5.4.2 + - rule_4.4.2 - pamd - notimplemented -- name: "5.4.3 | PATCH | Ensure password reuse is limited" +- name: "4.4.3 | PATCH | Ensure password reuse is limited" block: - - name: "5.4.3 | AUDIT | Ensure password reuse is limited | Confirm pam_pwhistory.so in common-password" + - name: "4.4.3 | AUDIT | Ensure password reuse is limited | Confirm pam_pwhistory.so in common-password" ansible.builtin.shell: grep 'password.*required.*pam_pwhistory.so' /etc/pam.d/common-password changed_when: false failed_when: false check_mode: false - register: ubtu20cis_5_4_3_pam_pwhistory_state + register: ubtu20cis_4_4_3_pam_pwhistory_state - - name: "5.4.3 | PATCH | Ensure password reuse is limited | Set remember value if pam_pwhistory exists" + - name: "4.4.3 | PATCH | Ensure password reuse is limited | Set remember value if pam_pwhistory exists" community.general.pamd: name: common-password type: password @@ -117,55 +118,85 @@ module_path: pam_pwhistory.so module_arguments: 'remember={{ ubtu20cis_pamd_pwhistory_remember }}' state: args_present - when: ubtu20cis_5_4_3_pam_pwhistory_state.stdout | length > 0 + when: ubtu20cis_4_4_3_pam_pwhistory_state.stdout | length > 0 - - name: "5.4.3 | PATCH | Ensure password reuse is limited | Set remember value if pam_pwhistory does no exist" + - name: "4.4.3 | PATCH | Ensure password reuse is limited | Set remember value if pam_pwhistory does no exist" ansible.builtin.lineinfile: path: /etc/pam.d/common-password line: 'password required pam_pwhistory.so remember={{ ubtu20cis_pamd_pwhistory_remember }}' insertafter: '^# end of pam-auth-update config' - when: ubtu20cis_5_4_3_pam_pwhistory_state.stdout | length == 0 + when: ubtu20cis_4_4_3_pam_pwhistory_state.stdout | length == 0 when: - - ubtu20cis_rule_5_4_3 + - ubtu20cis_rule_4_4_3 tags: - level1-server - level1-workstation - automated - patch - - rule_5.4.3 + - rule_4.4.3 - pamd -- name: "5.4.4 | PATCH | Ensure password hashing algorithm is SHA-512" +- name: "4.4.4 | PATCH | Ensure strong password hashing algorithm is configured" block: - - name: "5.4.4 | AUDIT | Ensure password hashing algorithm is SHA-512 | Confirm pam_unix.so" - ansible.builtin.shell: grep -E '^\s*password\s+(\S+\s+)+pam_unix\.so\s+(\S+\s+)*sha512\s*(\S+\s*)*(\s+#.*)?$' /etc/pam.d/common-password + - name: "4.4.4 | AUDIT | Ensure strong password hashing algorithm is configured | Confirm pam_unix.so" + ansible.builtin.shell: grep -E '^\s*password\s+(\S+\s+)+pam_unix\.so\s+(\S+\s+)*(sha512|yescrypt)\s*(\S+\s*)*(\s+#.*)?$' /etc/pam.d/common-password changed_when: false failed_when: false check_mode: false - register: ubtu20cis_5_4_4_pam_unix_state + register: ubtu20cis_4_4_4_pam_unix_state - - name: "5.4.4 | PATCH | Ensure password hashing algorithm is SHA-512 | Set hashing if pam_unix.so exists" + - name: "4.4.4 | PATCH | Ensure strong password hashing algorithm is configured | Set hashing if pam_unix.so exists" community.general.pamd: name: common-password type: password control: '[success=1 default=ignore]' module_path: pam_unix.so - module_arguments: sha512 + module_arguments: "{{ ubtu20cis_passwd_hash_algo }}" state: args_present - when: ubtu20cis_5_4_4_pam_unix_state.stdout | length > 0 + when: ubtu20cis_4_4_4_pam_unix_state.stdout | length > 0 - - name: "5.4.4 | PATCH | Ensure password hashing algorithm is SHA-512 | Set hashing if pam_unix.so does not exist" + - name: "4.4.4 | PATCH | Ensure strong password hashing algorithm is configured | Set hashing if pam_unix.so does not exist" ansible.builtin.lineinfile: path: /etc/pam.d/common-password - line: 'password [success=1 default=ignore] pam_unix.so sha512' + line: 'password [success=1 default=ignore] pam_unix.so {{ ubtu20cis_passwd_hash_algo }}' insertafter: '^# end of pam-auth-update config' - when: ubtu20cis_5_4_4_pam_unix_state.stdout | length == 0 + when: ubtu20cis_4_4_4_pam_unix_state.stdout | length == 0 when: - - ubtu20cis_rule_5_4_4 + - ubtu20cis_rule_4_4_4 tags: - level1-server - level1-workstation - automated - patch - - rule_5.4.4 + - rule_4.4.4 - pamd + +- name: "4.4.5 | PATCH | Ensure password hashing algorithm is up to date with the latest standards" + block: + - name: "4.4.5 | AUDIT | Ensure all current passwords uses the configured hashing algorithm | capture hash" + ansible.builtin.shell: cat /etc/shadow | awk -F':' '{print $1" "$2}' | grep -Ev '(!|\*)' + changed_when: false + failed_when: false + check_mode: false + register: ubtu20cis_4_4_5_passwd_hash_used + + # This is only looking for sha512 if yescrypt need to change the $6$ to $y$ + - name: "4.4.5 | AUDIT | Ensure all current passwords uses the configured hashing algorithm | check has found" + ansible.builtin.debug: + msg: "Warning!! Passwords found using not using {{ ubtu20cis_passwd_hash_algo }} algorithm - This requires manual intervention" + when: "' $6$' not in ubtu20cis_4_4_5_passwd_hash_used.stdout" + + - name: "4.4.5 | WARN | Ensure all current passwords uses the configured hashing algorithm | warn_count" + ansible.builtin.import_tasks: warning_facts.yml + when: "' $6$' not in ubtu20cis_4_4_5_passwd_hash_used.stdout" + vars: + warn_control_id: '4.4.5' + when: + - ubtu20cis_rule_4_4_5 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_4.4.5 + - pam diff --git a/tasks/section_4/cis_4.4.yml b/tasks/section_4/cis_4.4.yml deleted file mode 100644 index b119f2a3..00000000 --- a/tasks/section_4/cis_4.4.yml +++ /dev/null @@ -1,31 +0,0 @@ ---- -- name: "4.4 | PATCH | Ensure logrotate assigns appropriate permissions" - block: - - name: "4.4 | AUDIT | Ensure logrotate assigns appropriate permissions | find logrotate create files" - ansible.builtin.find: - paths: /etc/logrotate.d/ - contains: '^\s*create\s\s*(?!0[6][0,4]0)(([0-9]|)[0-9]{3})' - use_regex: true - register: ubtu20cis_rule_4_4_logrotate_create - - - name: "4.4 | PATCH | Ensure logrotate assigns appropriate permissions | update logrotate.d files if exists" - ansible.builtin.lineinfile: - path: "{{ item.path }}" - regexp: '^(\s*\b)create($|\s)(\d*)(\b.*)$' - line: '\g<1>create {{ ubtu20cis_logrotate_create_settings }}\g<4>' - backrefs: true - insertbefore: '^}' - with_items: - - { path: '/etc/logrotate.conf'} - - "{{ ubtu20cis_rule_4_4_logrotate_create.files }}" - loop_control: - label: "{{ item.path }}" - when: - - ubtu20cis_rule_4_4 - tags: - - level1-server - - level1-workstation - - automated - - patch - - rule_4.4 - - logrotate diff --git a/tasks/section_4/cis_4.5.1.x.yml b/tasks/section_4/cis_4.5.1.x.yml new file mode 100644 index 00000000..ab877fef --- /dev/null +++ b/tasks/section_4/cis_4.5.1.x.yml @@ -0,0 +1,206 @@ +--- + +- name: "4.5.1.1 | PATCH | Ensure minimum days between password changes is configured" + block: + - name: "4.5.1.1 | PATCH | Ensure minimum days between password changes is configured | Set /etc/login.defs PASS_MIN_DAYS" + ansible.builtin.lineinfile: + path: /etc/login.defs + regexp: '^PASS_MIN_DAYS|^#PASS_MIN_DAYS' + line: 'PASS_MIN_DAYS {{ ubtu20cis_pass.min_days }}' + + - name: "4.5.1.1 | PATCH | Ensure minimum days between password changes is configured | Set existing users PASS_MIN_DAYS" + ansible.builtin.shell: chage --mindays {{ ubtu20cis_pass.min_days }} {{ item }} + failed_when: false + with_items: + - "{{ ubtu20cis_passwd | selectattr('uid', '>=', 1000) | map(attribute='id') | list }}" + when: + - ubtu20cis_disruption_high + - item != 'nobody' + when: + - ubtu20cis_rule_4_5_1_1 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_4.5.1.1 + - user + - login + +- name: "4.5.1.2 | PATCH | Ensure password expiration is 365 days or less" + block: + - name: "4.5.1.2 | PATCH | Ensure password expiration is 365 days or less | Set /etc/login.defs PASS_MAX_DAYS" + ansible.builtin.lineinfile: + path: /etc/login.defs + regexp: '^PASS_MAX_DAYS|^#PASS_MAX_DAYS' + line: 'PASS_MAX_DAYS {{ ubtu20cis_pass.max_days }}' + insertafter: '# Password aging controls' + + - name: "4.5.1.2 | PATCH | Ensure password expiration is 365 days or less | Set existing users PASS_MAX_DAYS" + ansible.builtin.shell: chage --maxdays {{ ubtu20cis_pass.max_days }} {{ item }} + failed_when: false + with_items: + - "{{ ubtu20cis_passwd | selectattr('uid', '>=', 1000) | map(attribute='id') | list }}" + when: + - ubtu20cis_disruption_high + - item != 'nobody' + when: + - ubtu20cis_rule_4_5_1_2 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_4.5.1.2 + - user + - login + +- name: "4.5.1.3 | PATCH | Ensure password expiration warning days is 7 or more" + block: + - name: "4.5.1.3 | PATCH | Ensure password expiration warning days is 7 or more | Set /etc/login.defs PASS_WARN_AGE" + ansible.builtin.lineinfile: + path: /etc/login.defs + regexp: '^PASS_WARN_AGE|^#PASS_WARN_AGE' + line: 'PASS_WARN_AGE {{ ubtu20cis_pass.warn_age }}' + + - name: "4.5.1.3 | PATCH | Ensure password expiration warning days is 7 or more | Set existing users PASS_WARN_AGE" + ansible.builtin.shell: chage --warndays {{ ubtu20cis_pass.warn_age }} {{ item }} + failed_when: false + with_items: + - "{{ ubtu20cis_passwd | selectattr('uid', '>=', 1000) | map(attribute='id') | list }}" + when: + - ubtu20cis_disruption_high + - item != 'nobody' + when: + - ubtu20cis_rule_4_5_1_3 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_4.5.1.3 + - user + - login + +- name: "4.5.1.4 | PATCH | Ensure inactive password lock is 30 days or less" + block: + - name: "4.5.1.4 | AUDIT | Ensure inactive password lock is 30 days or less | Get current inactive period" + ansible.builtin.shell: useradd -D | grep INACTIVE | cut -d= -f2 + changed_when: false + failed_when: false + register: ubtu20cis_4_5_1_5_inactive_period + + - name: "4.5.1.4 | AUDIT | Ensure inactive password lock is 30 days or less | Get current users out of compliance" + ansible.builtin.shell: awk -F":" '(/^[^:]+:[^!*]/ && ($7<0 || $7>30)){print $1}' //etc/shadow + changed_when: false + failed_when: false + register: ubtu20cis_4_5_1_4_current_users + + - name: "4.5.1.4 | PATCH | Ensure inactive password lock is 30 days or less | Set inactive period for new users" + ansible.builtin.shell: useradd -D -f {{ ubtu20cis_pass.inactive }} + changed_when: true + failed_when: false + when: ubtu20cis_4_5_1_5_inactive_period.stdout != ubtu20cis_pass.inactive | string + + - name: "4.5.1.4 | PATCH | Ensure inactive password lock is 30 days or less | Set inactive period for existing users" + ansible.builtin.shell: chage --inactive {{ ubtu20cis_pass.inactive }} {{ item }} + failed_when: false + with_items: + - "{{ ubtu20cis_passwd | selectattr('uid', '>=', 1000) | map(attribute='id') | list | intersect(ubtu20cis_4_5_1_4_current_users.stdout_lines) | list }}" + when: + - ubtu20cis_disruption_high + - ubtu20cis_4_5_1_4_current_users.stdout | length > 0 + when: + - ubtu20cis_rule_4_5_1_4 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_4.5.1.4 + - user + - login + +- name: "4.5.1.5 | PATCH | Ensure all users last password change date is in the past" + block: + - name: "4.5.1.5 | AUDIT | Ensure all users last password change date is in the past | Get current date in Unix Time" + ansible.builtin.shell: echo $(($(date --utc --date "$1" +%s)/86400)) + changed_when: false + failed_when: false + check_mode: false + register: ubtu20cis_4_5_1_5_current_time + + - name: "4.5.1.5 | AUDIT | Ensure all users last password change date is in the past | Get list of users with last changed PW date in future" + ansible.builtin.shell: "cat /etc/shadow | awk -F: '{if($3>{{ ubtu20cis_4_5_1_5_current_time.stdout }})print$1}'" + changed_when: false + failed_when: false + check_mode: false + register: ubtu20cis_4_5_1_5_user_list + + - name: "4.5.1.5 | PATCH | Ensure all users last password change date is in the past | Warn about users" + ansible.builtin.debug: + msg: + - "Warning!! The following accounts have the last PW change date in the future" + - "{{ ubtu20cis_4_5_1_5_user_list.stdout_lines }}" + when: ubtu20cis_4_5_1_5_user_list.stdout | length > 0 + + - name: "4.5.1.5 | PATCH | Ensure all users last password change date is in the past | Warn Count" + ansible.builtin.import_tasks: warning_facts.yml + when: ubtu20cis_4_5_1_5_user_list.stdout | length > 0 + + - name: "4.5.1.5 | PATCH | Ensure all users last password change date is in the past | Lock accounts with future PW changed dates" + ansible.builtin.shell: passwd --expire {{ item }} + failed_when: false + with_items: + - "{{ ubtu20cis_4_5_1_5_user_list.stdout_lines }}" + when: + - ubtu20cis_disruption_high + - ubtu20cis_4_5_1_5_user_list.stdout | length > 0 + vars: + warn_control_id: '4.5.1.5' + when: + - ubtu20cis_rule_4_5_1_5 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_4.5.1.5 + - user + - login + +- name: "4.5.1.6 | PATCH | Ensure the number of changed characters in a new password is configured" + ansible.builtin.lineinfile: + path: /etc/security/pwquality.conf + regexp: '^(#\s+|)difok|' + line: 'difok = {{ ubtu20cis_pass.character_changed }}' + create: true + mode: 0640 + when: + - ubtu20cis_rule_4_5_1_6 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_4.5.1.6 + - user + - login + +- name: "4.5.1.7 | PATCH | Ensure the number of changed characters in a new password is configured" + ansible.builtin.lineinfile: + path: /etc/security/pwquality.conf + regexp: '^(#\s+|)dictcheck' + line: 'dictcheck = 1' + create: true + mode: 0640 + when: + - ubtu20cis_rule_4_5_1_7 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_4.5.1.7 + - user + - login diff --git a/tasks/section_4/cis_4.5.x.yml b/tasks/section_4/cis_4.5.x.yml new file mode 100644 index 00000000..9f851614 --- /dev/null +++ b/tasks/section_4/cis_4.5.x.yml @@ -0,0 +1,158 @@ +--- + +- name: "4.5.2 | PATCH | Ensure system accounts are secured" + block: + - name: "4.5.2 | PATCH | Ensure system accounts are secured | Set system accounts to nologin" + ansible.builtin.user: + name: "{{ item }}" + shell: /sbin/nologin + with_items: + - "{{ ubtu20cis_passwd | selectattr('uid', '<', 1000) | map(attribute='id') | list }}" + when: + - item != "root" + - item != "sync" + - item != "shutdown" + - item != "halt" + - item != "irc" + + - name: "4.5.2 | PATCH | Ensure system accounts are secured | Lock non-root system accounts" + ansible.builtin.user: + name: "{{ item }}" + password_lock: true + with_items: + - "{{ ubtu20cis_passwd | selectattr('uid', '<', 1000) | map(attribute='id') | list }}" + when: + - item != "root" + when: + - ubtu20cis_rule_4_5_2 + - ubtu20cis_disruption_high + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_4.5.2 + - user + - system + +- name: "4.5.3 | PATCH | Ensure default group for the root account is GID 0" + block: + - name: "4.5.3 | PATCH | Ensure default group for the root account is GID 0 | Set root group to GUID 0" + ansible.builtin.group: + name: root + gid: 0 + + - name: "4.5.3 | PATCH | Ensure default group for the root account is GID 0 | Set root user to root group" + ansible.builtin.user: + name: root + group: root + when: + - ubtu20cis_rule_4_5_3 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_4.5.3 + - user + - system + +- name: "4.5.4 | PATCH | Ensure default user umask is 027 or more restrictive" + block: + - name: "4.5.4 | AUDIT | Ensure default user umask is 027 or more restrictive" + ansible.builtin.shell: grep -E '^session\s+optional\s+pam_umask.so' /etc/pam.d/common-session + changed_when: false + failed_when: false + check_mode: false + register: ubtu20cis_4_5_4_umask_pam_status + + - name: "4.5.4 | PATCH | Ensure default user umask is 027 or more restrictive" + ansible.builtin.lineinfile: + path: /etc/pam.d/common-session + line: 'session optional pam_umask.so' + insertbefore: '^# end of pam-auth-update config' + when: ubtu20cis_4_5_4_umask_pam_status.stdout | length == 0 + + - name: "4.5.4 | PATCH | Ensure default user umask is 027 or more restrictive" + ansible.builtin.lineinfile: + path: "{{ item }}" + regexp: '(?i)(umask\s*)' + line: '\g<1>{{ ubtu20cis_bash_umask }}' + backrefs: true + with_items: + - /etc/bash.bashrc + - /etc/profile + - /etc/login.defs + + - name: "4.5.4 | PATCH | Ensure default user umask is 027 or more restrictive" + ansible.builtin.lineinfile: + path: /etc/login.defs + regexp: '^USERGROUPS_ENAB' + line: USERGROUPS_ENAB no + when: + - ubtu20cis_rule_4_5_4 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_4.5.4 + - user + +- name: "4.5.5 | PATCH | Ensure default user shell timeout is configured" + ansible.builtin.blockinfile: + create: true + mode: 0644 + dest: "{{ item.dest }}" + state: "{{ item.state }}" + marker: "# {mark} ANSIBLE MANAGED" + block: | + # Set session timeout - CIS ID 4.5.5 + # only set TMOUT if it isn't set yet to avoid a shell error + : ${TMOUT={{ ubtu20cis_shell_session_timeout.timeout }}} + readonly TMOUT + export TMOUT + with_items: + - { dest: "{{ ubtu20cis_shell_session_timeout.file }}", state: present } + - { dest: /etc/profile, state: "{{ (ubtu20cis_shell_session_timeout.file == '/etc/profile') | ternary('present', 'absent') }}" } + - { dest: /etc/bash.bashrc, state: present } + when: + - ubtu20cis_rule_4_5_5 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_4.5.5 + - user + +- name: "4.5.6 | PATCH | Ensure nologin is not listed in /etc/shells" + ansible.builtin.lineinfile: + path: /etc/shells + regex: nologin\b + state: absent + when: + - ubtu20cis_rule_4_5_6 + tags: + - level2-server + - level2-workstation + - automated + - patch + - rule_4.5.6 + - user + +- name: "4.5.7 | PATCH | Ensure maximum number of same consecutive characters in a password is configured" + ansible.builtin.lineinfile: + path: /etc/security/pwquality.conf + regexp: '^(#\s+|)maxrepeat' + line: 'maxrepeat = {{ ubtu20cis_pass.max_repeat_letters }}' + when: + - ubtu20cis_rule_4_5_7 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_4.5.7 + - user + - login diff --git a/tasks/section_4/main.yml b/tasks/section_4/main.yml index 03e4ddd9..08afefa9 100644 --- a/tasks/section_4/main.yml +++ b/tasks/section_4/main.yml @@ -1,24 +1,18 @@ --- -- name: "SECTION | 4.1.1 | Ensure auditing is enabled" - ansible.builtin.import_tasks: cis_4.1.1.x.yml - -- name: "SECTION | 4.1.2 | Configure Data Retention" - ansible.builtin.import_tasks: cis_4.1.2.x.yml - -- name: "SECTION | 4.1.x | Login Settings" +- name: "SECTION | 4.1 | Configure job based time schedulers" ansible.builtin.import_tasks: cis_4.1.x.yml -- name: "SECTION | 4.2.1 | Configure rsyslog" - ansible.builtin.import_tasks: cis_4.2.1.x.yml +- name: "SECTION | 4.2 | Configure SSH Server" + ansible.builtin.import_tasks: cis_4.2.x.yml -- name: "SECTION | 4.2.2 | Configure journald" - ansible.builtin.import_tasks: cis_4.2.2.x.yml +- name: "SECTION | 4.3 | Configure Privilege escalations" + ansible.builtin.import_tasks: cis_4.3.x.yml -- name: "SECTION | 4.2.3 | Ensure permissions on all logfiles are configured" - ansible.builtin.import_tasks: cis_4.2.3.yml +- name: "SECTION | 4.4 | Configure PAM" + ansible.builtin.import_tasks: cis_4.4.x.yml -- name: "SECTION | 4.3 | Ensure logrotate is configured" - ansible.builtin.import_tasks: cis_4.3.yml +- name: "SECTION | 4.5.1.x | User Accounts and Environment | Shadow Suite" + ansible.builtin.import_tasks: cis_4.5.1.x.yml -- name: "SECTION | 4.4 | Ensure logrotate assigns appropriate permissions" - ansible.builtin.import_tasks: cis_4.4.yml +- name: "SECTION | 4.5.x | User Accounts and Environment | password params" + ansible.builtin.import_tasks: cis_4.5.x.yml diff --git a/tasks/section_5/cis_5.1.1.1.x.yml b/tasks/section_5/cis_5.1.1.1.x.yml new file mode 100644 index 00000000..e6a7dde4 --- /dev/null +++ b/tasks/section_5/cis_5.1.1.1.x.yml @@ -0,0 +1,68 @@ +--- + +- name: "5.1.1.1.1 | Ensure systemd-journal-remote is installed" + ansible.builtin.package: + name: systemd-journal-remote + state: present + when: + - ubtu20cis_rule_5_1_1_1_1 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_5.1.1.1.1 + - journald + +- name: "5.1.1.1.2 | PATCH | Ensure systemd-journal-remote is configured" + ansible.builtin.lineinfile: + path: /etc/systemd/journal-upload.conf + regexp: "{{ item.regexp }}" + line: "{{ item.line }}" + insertafter: "{{ item.after | default (omit) }}" + notify: restart journal-upload + loop: + - { regexp: 'URL=', line: 'URL={{ ubtu20cis_journald_upload.remote_url }}' } + - { regexp: 'ServerKeyFile=', line: 'ServerKeyFile={{ ubtu20cis_journal_upload.serverkeyfile }}' } + - { regexp: 'ServerCertificateFile=', line: 'ServerKeyFile={{ ubtu20cis_journal_upload.servercertfile }}' } + - { regexp: 'TrustedCertificateFile=', line: 'ServerKeyFile={{ ubtu20cis_journal_upload.trustfile }}' } + when: + - ubtu20cis_rule_5_1_1_1_2 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_5.1.1.1.2 + - journald + +- name: "5.1.1.1.3 | PATCH | Ensure systemd-journal-remote is enabled" + ansible.builtin.systemd: + name: systemd-journal-upload.service + state: started + enabled: true + when: + - ubtu20cis_rule_5.1.1.1.3 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_5.1.1.1.3 + - journald + +- name: "5.1.1.1.4 | Ensure journald is not configured to receive logs from a remote client" + ansible.builtin.systemd: + name: systemd-journal-remote + state: stopped + enabled: false + when: + - ubtu20cis_rule_5.1.1.1.4 + - not journald_log_server + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_5.1.1.1.4 + - journald diff --git a/tasks/section_5/cis_5.1.1.x.yml b/tasks/section_5/cis_5.1.1.x.yml new file mode 100644 index 00000000..f39787c3 --- /dev/null +++ b/tasks/section_5/cis_5.1.1.x.yml @@ -0,0 +1,111 @@ +--- + +- name: "5.1.1.2 | Ensure journald service is enabled" + block: + - name: "5.1.1.2 | Ensure journald service is enabled | capture status" + ansible.builtin.shell: systemctl is-enabled systemd-journald.service + register: ubtu20cis_5_1_1_2_journald_enabled + changed_when: false + failed_when: ubtu20cis_5_1_1_2_journald_enabled.rc not in [ 0, 1, 2 ] + + - name: "5.1.1.2 | Ensure journald service is enabled | warn count if not as expected" + ansible.builtin.import_tasks: warning_facts.yml + when: "'static' not in ubtu20cis_5_1_1_2_journald_enabled.stdout" + vars: + warn_control_id: '5.1.1.2' + when: + - ubtu20cis_rule_5_1_1_2 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_5.1.1.2 + - journald + +- name: "5.1.1.3 | PATCH | Ensure journald is configured to compress large log files" + ansible.builtin.lineinfile: + path: /etc/systemd/journald.conf + regexp: '^Compress|^#Compress' + line: 'Compress=yes' + insertafter: '\[Journal\]' + when: + - ubtu20cis_rule_5_1_1_3 + tags: + - level1-server + - level1-workstation + - patch + - rule_5.1.1.3 + - journald + +- name: "5.1.1.4 | PATCH | Ensure journald is configured to write logfiles to persistent disk" + ansible.builtin.lineinfile: + path: /etc/systemd/journald.conf + regexp: '^Storage|^#Storage' + line: 'Storage=persistent' + insertafter: '\[Journal\]' + when: + - ubtu20cis_rule_5_1_1_4 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_5.1.1.4 + - journald + +- name: "5.1.1.5 | PATCH | Ensure journald is not configured to send logs to rsyslog" + ansible.builtin.lineinfile: + path: /etc/systemd/journald.conf + regexp: '^ForwardToSyslog|^#ForwardToSyslog' + line: 'ForwardToSyslog=no' + insertafter: '\[Journal\]' + when: + - ubtu20cis_rule_5_1_1_5 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_5.1.1.5 + - journald + +- name: "5.1.1.6 | PATCH | Ensure journald log rotation is configured per site policy" + ansible.builtin.lineinfile: + path: /etc/systemd/journald.conf + regexp: "{{ item.regexp }}" + line: "{{ item.line }}" + loop: + - { regexp: 'SystemMaxUse=', line: "SystemMaxUse={{ ubtu20cis_journald_log_rotate.systemmaxuse }}" } + - { regexp: 'SystemKeepFree=', line: "SystemKeepFree={{ ubtu20cis_journald_log_rotate.systemkeepfree }}" } + - { regexp: 'RunTimeMaxUse=', line: "RunTimeMaxUse={{ ubtu20cis_journald_log_rotate.runtimemaxuse }}" } + - { regexp: 'RuntimeKeepFree=', line: "RuntimeKeepFree={{ ubtu20cis_journald_log_rotate.runtimekeepfree }}" } + - { regexp: 'MaxFileSec=', line: "MaxFileSec={{ ubtu20cis_journald_log_rotate.maxfilesec }}" } + when: + - ubtu20cis_rule_5_1_1_6 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_5.1.1.6 + - journald + +- name: "5.1.1.7 | PATCH | Ensure journald default file permissions configured" + block: + - name: "5.1.1.7 | AUDIT | Ensure journald default file permissions configured | check for override file" + ansible.builtin.stat: + path: /etc/tmpfiles.d/systemd.conf + register: ubtu20cis_5_1_1_7_tmpfile + + - name: "5.1.1.7 | AUDIT | Ensure journald default file permissions configured | update settings if override exists" + ansible.builtin.lineinfile: + path: /etc/tmpfiles.d/systemd.conf + regexp: "{{ item.line }}" + line: "{{ item.line }}" + backrefs: true + when: ubtu20cis_5_1_1_7_tmpfile.stat.exists + loop: + - { regexp: '.*system.journal.*(:r(w|-)(x|-))(,|$)', line: 'a+ /var/log/journal/%m/system.journal - - - - group:adm:r--'} + - { regexp: '.*\/var\/log\/journal\/\%m\s.*(:r(w|-)(x|-))(,|$)', line: 'a+ /var/log/journal/%m - - - - d:group:adm:r-x,group:adm:r-x'} + - { regexp: '.*\/var\/log\/journal\s.*(:r(w|-)(x|-))(,|$)', line: 'a+ /var/log/journal - - - - d:group::r-x,d:group:adm:r-x,group::r-x,group:adm:r-x'} diff --git a/tasks/section_4/cis_4.2.1.x.yml b/tasks/section_5/cis_5.1.2.x.yml similarity index 60% rename from tasks/section_4/cis_4.2.1.x.yml rename to tasks/section_5/cis_5.1.2.x.yml index 9d216197..e83ee509 100644 --- a/tasks/section_4/cis_4.2.1.x.yml +++ b/tasks/section_5/cis_5.1.2.x.yml @@ -1,59 +1,91 @@ --- -- name: "4.2.1.1 | PATCH | Ensure rsyslog is installed" +- name: "5.1.2.1 | PATCH | Ensure rsyslog is installed" ansible.builtin.package: name: rsyslog state: present when: - - ubtu20cis_rule_4_2_1_1 + - ubtu20cis_rule_5_1_2_1 tags: - level1-server - level1-workstation - automated - patch - - rule_4.2.1.1 + - rule_5.1.2.1 - rsyslog - apt -- name: "4.2.1.2 | PATCH | Ensure rsyslog Service is enabled" +- name: "5.1.2.2 | PATCH | Ensure rsyslog Service is enabled" ansible.builtin.service: name: rsyslog enabled: true when: - - ubtu20cis_rule_4_2_1_2 + - ubtu20cis_rule_5_1_2_2 tags: - level1-server - level1-workstation - automated - patch - - rule_4.2.1.2 + - rule_5.1.2.2 - rsyslog -- name: "4.2.1.3 | PATCH | Ensure logging is configured" +- name: "5.1.2.3 | PATCH | Ensure journald is configured to send logs to rsyslog" + ansible.builtin.lineinfile: + path: /etc/systemd/journald.conf + regexp: ^ForwardToSyslog= + line: ForwardToSyslog=yes + notify: restart syslog service + when: + - ubtu20cis_rule_5_1_2_3 + tags: + - level1-server + - level1-workstation + - manual + - patch + - journald + - rule_5.1.2.3 + +- name: "5.1.2.4 | PATCH | Ensure rsyslog default file permissions configured" + ansible.builtin.lineinfile: + path: /etc/rsyslog.conf + regexp: '^\$FileCreateMode|^#\$FileCreateMode' + line: '$FileCreateMode 0640' + notify: restart syslog service + when: + - ubtu20cis_rule_5_1_2_4 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_5.1.2.4 + - rsyslog + +- name: "5.1.2.5 | PATCH | Ensure logging is configured" block: - - name: "4.2.1.3 | AUDIT | Ensure logging is configured | Find configuration file" + - name: "5.1.2.5 | AUDIT | Ensure logging is configured | Find configuration file" ansible.builtin.shell: grep -r "*.emerg" /etc/* | cut -f1 -d":" changed_when: false failed_when: false check_mode: false - register: ubtu20cis_4_2_1_3_rsyslog_config_path + register: ubtu20cis_5_1_2_5_rsyslog_config_path - - name: "4.2.1.3 | AUDIT | Ensure logging is configured | Gather rsyslog current config" - ansible.builtin.shell: "cat {{ ubtu20cis_4_2_1_3_rsyslog_config_path.stdout }}" + - name: "5.1.2.5 | AUDIT | Ensure logging is configured | Gather rsyslog current config" + ansible.builtin.shell: "cat {{ ubtu20cis_5_1_2_5_rsyslog_config_path.stdout }}" changed_when: false failed_when: false check_mode: false - register: ubtu20cis_4_2_1_3_rsyslog_config + register: ubtu20cis_5_1_2_5_rsyslog_config - - name: "4.2.1.3 | AUDIT | Ensure logging is configured | Message out config" + - name: "5.1.2.5 | AUDIT | Ensure logging is configured | Message out config" ansible.builtin.debug: msg: - "Warning!! Below is the current logging configurations for rsyslog, please review" - - "{{ ubtu20cis_4_2_1_3_rsyslog_config.stdout_lines }}" + - "{{ ubtu20cis_5_1_2_5_rsyslog_config.stdout_lines }}" when: not ubtu20cis_rsyslog_ansible_managed - - name: "4.2.1.3 | PATCH | Ensure logging is configured | Automated rsyslog configuration" + - name: "5.1.2.5 | PATCH | Ensure logging is configured | Automated rsyslog configuration" ansible.builtin.lineinfile: - path: "{{ ubtu20cis_4_2_1_3_rsyslog_config_path.stdout }}" + path: "{{ ubtu20cis_5_1_2_5_rsyslog_config_path.stdout }}" regexp: "{{ item.regexp }}" line: "{{ item.line }}" insertafter: "{{ item.insertafter }}" @@ -64,9 +96,7 @@ - { regexp: '^mail.info|^#mail.info', line: 'mail.info -/var/log/mail.info', insertafter: '^# Logging for the mail system' } - { regexp: '^mail.warn|^#mail.warn', line: 'mail.warn -/var/log/mail.warn', insertafter: '^# Logging for the mail system.' } - { regexp: '^mail.err|^#mail.err', line: 'mail.err /var/log/mail.err', insertafter: '^# Logging for the mail system.' } - - { regexp: '^news.crit|^#news.crit', line: 'news.crit -/var/log/news/news.crit', insertafter: '^# First some standard log files'} - - { regexp: '^news.err|^#news.err', line: 'news.err -/var/log/news/news.err', insertafter: '^# First some standard log files' } - - { regexp: '^news.notice|^#news.notice', line: 'news.notice -/var/log/news/news.notice', insertafter: '^# First some standard log files' } + - { regexp: '^cron.\*|^#cron.\*', line: 'cron.\* -/var/log/news/news.notice', insertafter: '^# First some standard log files' } - { regexp: '^\*.=warning;\*.=err|^#\*.=warning;\*.=err', line: '*.=warning;*.=err -/var/log/warn', insertafter: '^# First some standard log files' } - { regexp: '^\*.crit|^#\*.crit', line: '*.crit /var/log/warn', insertafter: '^# First some standard log files' } - { regexp: '^\*.\*;mail.none;news.none|^#\*.\*;mail.none;news.none', line: '*.*;mail.none;news.none -/var/log/messages', insertafter: '^# First some standard log files' } @@ -76,41 +106,25 @@ - { regexp: '^local6,local7.\*|^#local6,local7.\*', line: 'local6,local7.* -/var/log/localmessages', insertafter: '^# First some standard log files' } loop_control: label: "{{ item.line }}" - notify: restart rsyslog + notify: restart syslog service when: ubtu20cis_rsyslog_ansible_managed - - name: "4.2.1.3 | AUDIT | Ensure logging is configured | Warn Count" + - name: "5.1.2.5 | AUDIT | Ensure logging is configured | Warn Count" ansible.builtin.import_tasks: warning_facts.yml when: not ubtu20cis_rsyslog_ansible_managed vars: - warn_control_id: '4.2.1.3' + warn_control_id: '5.1.2.5' when: - - ubtu20cis_rule_4_2_1_3 + - ubtu20cis_rule_5_1_2_5 tags: - level1-server - level1-workstation - manual - patch - - rule_4.2.1.3 + - rule_5.1.2.5 - rsyslog -- name: "4.2.1.4 | PATCH | Ensure rsyslog default file permissions configured" - ansible.builtin.lineinfile: - path: /etc/rsyslog.conf - regexp: '^\$FileCreateMode|^#\$FileCreateMode' - line: '$FileCreateMode 0640' - notify: restart rsyslog - when: - - ubtu20cis_rule_4_2_1_4 - tags: - - level1-server - - level1-workstation - - automated - - patch - - rule_4.2.1.4 - - rsyslog - -- name: "4.2.1.5 | PATCH | Ensure rsyslog is configured to send logs to a remote log host" +- name: "5.1.2.6 | PATCH | Ensure rsyslog is configured to send logs to a remote log host" ansible.builtin.blockinfile: path: /etc/rsyslog.conf block: | @@ -118,44 +132,33 @@ *.* @@{{ ubtu20cis_remote_log_server }} insertafter: EOF when: - - ubtu20cis_rule_4_2_1_5 + - ubtu20cis_rule_5_1_2_6 tags: - level1-server - level1-workstation - automated - patch - - rule_4.2.1.5 + - rule_5.1.2.6 - rsyslog -- name: "4.2.1.6 | PATCH | Ensure remote rsyslog messages are only accepted on designated log hosts" - block: - - name: "4.2.1.6 | PATCH | Ensure remote rsyslog messages are only accepted on designated log hosts | When not a log host" - ansible.builtin.replace: - path: /etc/rsyslog.conf - regexp: '({{ item }})' - replace: '#\1' - with_items: - - '^(\$ModLoad)' - - '^(\$InputTCPServerRun)' - notify: restart rsyslog - when: not ubtu20cis_system_is_log_server - - - name: "4.2.1.6 | PATCH | Ensure remote rsyslog messages are only accepted on designated log hosts | When a log server" - ansible.builtin.lineinfile: - path: /etc/rsyslog.conf - regexp: "{{ item.regexp }}" - line: "{{ item.line }}" - with_items: - - { regexp: '^\$ModLoad|^#\$ModLoad', line: '$ModLoad imtcp' } - - { regexp: '^\$InputTCPServerRun|^#\$InputTCPServerRun', line: '$InputTCPServerRun 514' } - notify: restart rsyslog - when: ubtu20cis_system_is_log_server +- name: "5.1.2.7 | PATCH | Ensure rsyslog is not configured to receive logs from a remote client | When not a log host" + ansible.builtin.replace: + path: /etc/rsyslog.conf + regexp: '({{ item }})' + replace: '#\1' + with_items: + - '^(\$ModLoad)' + - '^(\$InputTCPServerRun)' + - '^(module\(load="itcmp"\))' + - '^(input\(type="imtcp" port="\d{1,5}"\))' + notify: restart syslog service when: - - ubtu20cis_rule_4_2_1_6 + - not ubtu20cis_system_is_log_server + - ubtu20cis_rule_5_1_2_7 tags: - level1-server - level1-workstation - manual - patch - - rule_4.2.1.6 + - rule_5.1.2.7 - rsyslog diff --git a/tasks/section_5/cis_5.1.3.yml b/tasks/section_5/cis_5.1.3.yml new file mode 100644 index 00000000..200a077a --- /dev/null +++ b/tasks/section_5/cis_5.1.3.yml @@ -0,0 +1,30 @@ +--- + +- name: "5.1.3 | PATCH | Ensure permissions on all logfiles are configured" + block: + - name: "5.1.3 | AUDIT | Ensure permissions on all logfiles are configured | find files" + ansible.builtin.find: + paths: "/var/log" + file_type: file + recurse: true + register: logfiles + + - name: "5.1.3 | PATCH | Ensure permissions on all logfiles are configured | change permissions" + ansible.builtin.file: + path: "{{ item.path }}" + mode: 0640 + loop: "{{ logfiles.files }}" + loop_control: + label: "{{ item.path }}" + when: + - item.path != "/var/log/btmp" + - item.path != "/var/log/utmp" + - item.path != "/var/log/wtmp" + when: + - ubtu20cis_rule_5_1_3 + tags: + - level1-server + - level1-workstation + - patch + - logfiles + - rule_5.1.3 diff --git a/tasks/section_5/cis_5.1.x.yml b/tasks/section_5/cis_5.1.x.yml deleted file mode 100644 index 15a4d536..00000000 --- a/tasks/section_5/cis_5.1.x.yml +++ /dev/null @@ -1,161 +0,0 @@ ---- -- name: "5.1.1 | PATCH | Ensure cron daemon is enabled and running" - ansible.builtin.service: - name: cron - state: started - enabled: true - when: - - ubtu20cis_rule_5_1_1 - tags: - - level1-server - - level1-workstation - - automated - - patch - - rule_5.1.1 - - cron - -- name: "5.1.2 | PATCH | Ensure permissions on /etc/crontab are configured" - ansible.builtin.file: - path: /etc/crontab - owner: root - group: root - mode: 0600 - when: - - ubtu20cis_rule_5_1_2 - tags: - - level1-server - - level1-workstation - - automated - - patch - - rule_5.1.2 - - cron - -- name: "5.1.3 | PATCH | Ensure permissions on /etc/cron.hourly are configured" - ansible.builtin.file: - path: /etc/cron.hourly - owner: root - group: root - mode: 0700 - when: - - ubtu20cis_rule_5_1_3 - tags: - - level1-server - - level1-workstation - - automated - - patch - - rule_5.1.3 - - cron - -- name: "5.1.4 | PATCH | Ensure permissions on /etc/cron.daily are configured" - ansible.builtin.file: - path: /etc/cron.daily - owner: root - group: root - mode: 0700 - when: - - ubtu20cis_rule_5_1_4 - tags: - - level1-server - - level1-workstation - - automated - - patch - - rule_5.1.4 - - cron - -- name: "5.1.5 | PATCH | Ensure permissions on /etc/cron.weekly are configured" - ansible.builtin.file: - path: /etc/cron.weekly - owner: root - group: root - mode: 0700 - when: - - ubtu20cis_rule_5_1_5 - tags: - - level1-server - - level1-workstation - - automated - - patch - - rule_5.1.5 - - cron - -- name: "5.1.6 | PATCH | Ensure permissions on /etc/cron.monthly are configured" - ansible.builtin.file: - path: /etc/cron.monthly - owner: root - group: root - mode: 0700 - when: - - ubtu20cis_rule_5_1_6 - tags: - - level1-server - - level1-workstation - - automated - - patch - - rule_5.1.6 - - cron - -- name: "5.1.7 | PATCH | Ensure permissions on /etc/cron.d are configured" - ansible.builtin.file: - path: /etc/cron.d - owner: root - group: root - mode: 0700 - when: - - ubtu20cis_rule_5_1_7 - tags: - - level1-server - - level1-workstation - - automated - - patch - - rule_5.1.7 - - cron - -- name: "5.1.8 | PATCH | Ensure at/cron is restricted to authorized users" - block: - - name: "5.1.8 | PATCH | Ensure at/cron is restricted to authorized users | Remove cron.deny" - ansible.builtin.file: - path: /etc/cron.deny - state: absent - - - name: "5.1.8 | PATCH | Ensure at/cron is restricted to authorized users | Create cron.allow" - ansible.builtin.copy: - content: "" - dest: /etc/cron.allow - force: false - owner: root - group: root - mode: 0640 - when: - - ubtu20cis_rule_5_1_8 - tags: - - level1-server - - level1-workstation - - automated - - patch - - rule_5.1.8 - - cron - -- name: "5.1.9 | PATCH | Ensure at is restricted to authorized users" - block: - - name: "5.1.9 | PATCH | Ensure at is restricted to authorized users | Remove at.deny" - ansible.builtin.file: - path: /etc/at.deny - state: absent - - - name: "5.1.9 | PATCH | Ensure at is restricted to authorized users | Create at.allow" - ansible.builtin.copy: - content: "" - dest: /etc/at.allow - force: false - owner: root - group: root - mode: 0640 - when: - - ubtu20cis_rule_5_1_9 - tags: - - level1-server - - level1-workstation - - automated - - patch - - rule_5.1.9 - - cron diff --git a/tasks/section_4/cis_4.1.1.x.yml b/tasks/section_5/cis_5.2.1.x.yml similarity index 61% rename from tasks/section_4/cis_4.1.1.x.yml rename to tasks/section_5/cis_5.2.1.x.yml index 616bdcfb..4c9718d3 100644 --- a/tasks/section_4/cis_4.1.1.x.yml +++ b/tasks/section_5/cis_5.2.1.x.yml @@ -1,100 +1,100 @@ --- -- name: "4.1.1.1 | PATCH | Ensure auditd is installed" +- name: "5.2.1.1 | PATCH | Ensure auditd is installed" ansible.builtin.package: name: ['auditd', 'audispd-plugins'] state: present when: - - ubtu20cis_rule_4_1_1_1 + - ubtu20cis_rule_5_2_1_1 tags: - level2-server - level2-workstation - automated - patch - - rule_4.1.1.1 + - rule_5.2.1.1 - auditd -- name: "4.1.1.2 | PATCH | Ensure auditd service is enabled" +- name: "5.2.1.2 | PATCH | Ensure auditd service is enabled and active" ansible.builtin.service: name: auditd state: started enabled: true when: - - ubtu20cis_rule_4_1_1_2 + - ubtu20cis_rule_5_2_1_2 tags: - level2-server - level2-workstation - automated - patch - - rule_4.1.1.2 + - rule_5.2.1.2 - auditd -- name: "4.1.1.3 | PATCH | Ensure auditing for processes that start prior to auditd is enabled" +- name: "5.2.1.3 | PATCH | Ensure auditing for processes that start prior to auditd is enabled" block: - - name: "4.1.1.3 | AUDIT | Ensure auditing for processes that start prior to auditd is enabled | Get GRUB_CMDLINE_LINUX" + - name: "5.2.1.3 | AUDIT | Ensure auditing for processes that start prior to auditd is enabled | Get GRUB_CMDLINE_LINUX" ansible.builtin.shell: grep "GRUB_CMDLINE_LINUX=" /etc/default/grub | cut -f2 -d'"' changed_when: false failed_when: false check_mode: false - register: ubtu20cis_4_1_1_3_cmdline_settings + register: ubtu20cis_5_2_1_3_cmdline_settings - - name: "4.1.1.3 | PATCH | Ensure auditing for processes that start prior to auditd is enabled | Add setting if doesn't exist" + - name: "5.2.1.3 | PATCH | Ensure auditing for processes that start prior to auditd is enabled | Add setting if doesn't exist" ansible.builtin.lineinfile: path: /etc/default/grub regexp: '^GRUB_CMDLINE_LINUX=' - line: 'GRUB_CMDLINE_LINUX="{{ ubtu20cis_4_1_1_3_cmdline_settings.stdout }} audit=1"' - when: "'audit=' not in ubtu20cis_4_1_1_3_cmdline_settings.stdout" - notify: grub update + line: 'GRUB_CMDLINE_LINUX="{{ ubtu20cis_5_2_1_3_cmdline_settings.stdout }} audit=1"' + when: "'audit=' not in ubtu20cis_5_2_1_3_cmdline_settings.stdout" + notify: Grub update - - name: "4.1.1.3 | PATCH | Ensure auditing for processes that start prior to auditd is enabled | Update setting if exists" + - name: "5.2.1.3 | PATCH | Ensure auditing for processes that start prior to auditd is enabled | Update setting if exists" ansible.builtin.replace: dest: /etc/default/grub regexp: 'audit=([0-9]+)' replace: 'audit=1' after: '^GRUB_CMDLINE_LINUX="' before: '"' - notify: grub update - when: "'audit=' in ubtu20cis_4_1_1_3_cmdline_settings.stdout" + notify: Grub update + when: "'audit=' in ubtu20cis_5_2_1_3_cmdline_settings.stdout" when: - - ubtu20cis_rule_4_1_1_3 + - ubtu20cis_rule_5_2_1_3 tags: - level2-server - level2-workstation - automated - patch - - rule_4_1_1_3 + - rule_5_2_1_3 - auditd -- name: "4.1.1.4 | PATCH | Ensure audit_backlog_limit is sufficient" +- name: "5.2.1.4 | PATCH | Ensure audit_backlog_limit is sufficient" block: - - name: "4.1.1.4 | PATCH | Ensure audit_backlog_limit is sufficient | Get current GRUB_CMDLINE_LINUX" + - name: "5.2.1.4 | PATCH | Ensure audit_backlog_limit is sufficient | Get current GRUB_CMDLINE_LINUX" ansible.builtin.shell: grep "GRUB_CMDLINE_LINUX=" /etc/default/grub | cut -f2 -d'"' changed_when: false failed_when: false check_mode: false - register: ubtu20cis_4_1_1_4_cmdline_settings + register: ubtu20cis_5_2_1_4_cmdline_settings - - name: "4.1.1.4 | PATCH | Ensure audit_backlog_limit is sufficient | Add setting if doesn't exist" + - name: "5.2.1.4 | PATCH | Ensure audit_backlog_limit is sufficient | Add setting if doesn't exist" ansible.builtin.lineinfile: path: /etc/default/grub regexp: '^GRUB_CMDLINE_LINUX=' - line: 'GRUB_CMDLINE_LINUX="{{ ubtu20cis_4_1_1_4_cmdline_settings.stdout }} audit_backlog_limit={{ ubtu20cis_audit_back_log_limit }}"' - notify: grub update - when: "'audit_backlog_limit=' not in ubtu20cis_4_1_1_4_cmdline_settings.stdout" + line: 'GRUB_CMDLINE_LINUX="{{ ubtu20cis_5_2_1_4_cmdline_settings.stdout }} audit_backlog_limit={{ ubtu20cis_audit_back_log_limit }}"' + notify: Grub update + when: "'audit_backlog_limit=' not in ubtu20cis_5_2_1_4_cmdline_settings.stdout" - - name: "4.1.1.4 | PATCH | Ensure audit_backlog_limit is sufficient | Update setting if exists" + - name: "5.2.1.4 | PATCH | Ensure audit_backlog_limit is sufficient | Update setting if exists" ansible.builtin.replace: dest: /etc/default/grub regexp: 'audit_backlog_limit=([0-9]+)' replace: 'audit_backlog_limit={{ ubtu20cis_audit_back_log_limit }}' after: '^GRUB_CMDLINE_LINUX="' before: '"' - notify: grub update + notify: Grub update when: - - ubtu20cis_rule_4_1_1_4 + - ubtu20cis_rule_5_2_1_4 tags: - level2-server - level2-workstation - automated - patch - - rule_4.1.1.4 + - rule_5.2.1.4 - auditd diff --git a/tasks/section_4/cis_4.1.2.x.yml b/tasks/section_5/cis_5.2.2.x.yml similarity index 77% rename from tasks/section_4/cis_4.1.2.x.yml rename to tasks/section_5/cis_5.2.2.x.yml index f285835e..5cde7328 100644 --- a/tasks/section_4/cis_4.1.2.x.yml +++ b/tasks/section_5/cis_5.2.2.x.yml @@ -1,5 +1,6 @@ --- -- name: "4.1.2.1 | PATCH | Ensure audit log storage size is configured" + +- name: "5.2.2.1 | PATCH | Ensure audit log storage size is configured" ansible.builtin.lineinfile: path: /etc/audit/auditd.conf regexp: "^max_log_file( |=)" @@ -7,32 +8,32 @@ state: present notify: restart auditd when: - - ubtu20cis_rule_4_1_2_1 + - ubtu20cis_rule_5_2_2_1 tags: - level2-server - level2-workstation - automated - patch - - rule_4.1.2.1 + - rule_5.2.2.1 - auditd -- name: "4.1.2.2 | PATCH | Ensure audit logs are not automatically deleted" +- name: "5.2.2.2 | PATCH | Ensure audit logs are not automatically deleted" ansible.builtin.lineinfile: path: /etc/audit/auditd.conf regexp: '^max_log_file_action' line: "max_log_file_action = {{ ubtu20cis_auditd['max_log_file_action'] }}" notify: restart auditd when: - - ubtu20cis_rule_4_1_2_2 + - ubtu20cis_rule_5_2_2_2 tags: - level2-server - level2-workstation - automated - patch - - rule_4.1.2.2 + - rule_5.2.2.2 - auditd -- name: "4.1.2.3 | PATCH | Ensure system is disabled when audit logs are full" +- name: "5.2.2.3 | PATCH | Ensure system is disabled when audit logs are full" ansible.builtin.lineinfile: path: /etc/audit/auditd.conf regexp: "{{ item.regexp }}" @@ -43,11 +44,11 @@ - { regexp: '^admin_space_left_action', line: 'admin_space_left_action = halt' } notify: restart auditd when: - - ubtu20cis_rule_4_1_2_3 + - ubtu20cis_rule_5_2_2_3 tags: - level2-server - level2-workstation - automated - patch - - rule_4.1.2.3 + - rule_5.2.2.3 - auditd diff --git a/tasks/section_5/cis_5.2.3.x.yml b/tasks/section_5/cis_5.2.3.x.yml new file mode 100644 index 00000000..39075bb3 --- /dev/null +++ b/tasks/section_5/cis_5.2.3.x.yml @@ -0,0 +1,381 @@ +--- + +- name: "5.2.3.1 | PATCH | Ensure changes to system administration scope (sudoers) is collected" + ansible.builtin.template: + src: audit/ubtu20cis_5_2_3_1_scope.rules.j2 + dest: /etc/audit/rules.d/50-scope.rules + owner: root + group: root + mode: 0600 + notify: restart auditd + when: + - ubtu20cis_rule_5_2_3_1 + tags: + - level2-server + - level2-workstation + - automated + - patch + - rule_5.2.3.1 + - auditd + +- name: "5.2.3.2 | PATCH | Ensure actions as another user are always logged" + ansible.builtin.template: + src: audit/ubtu20cis_5_2_3_2_user_emulation.rules.j2 + dest: /etc/audit/rules.d/50-user_emulation.rules + owner: root + group: root + mode: 0600 + notify: restart auditd + when: + - ubtu20cis_rule_5_2_3_2 + tags: + - level2-server + - level2-workstation + - automated + - patch + - rule_5.2.3.2 + - auditd + +- name: "5.2.3.3 | PATCH | Ensure events that modify the sudo log file are collected" + ansible.builtin.template: + src: audit/ubtu20cis_5_2_3_3_sudo_log.rules.j2 + dest: /etc/audit/rules.d/50-sudo.rules + owner: root + group: root + mode: 0600 + notify: restart auditd + when: + - ubtu20cis_rule_5_2_3_3 + tags: + - level2-server + - level2-workstation + - automated + - patch + - rule_5.2.3.3 + - auditd + +- name: "5.2.3.4 | PATCH | Ensure events that modify date and time information are collected" + ansible.builtin.template: + src: audit/ubtu20cis_5_2_3_4_timechange.rules.j2 + dest: /etc/audit/rules.d/50-time-change.rules + owner: root + group: root + mode: 0600 + notify: restart auditd + when: + - ubtu20cis_rule_5_2_3_4 + tags: + - level2-server + - level2-workstation + - automated + - patch + - rule_5.2.3.4 + - auditd + +- name: "5.2.3.5 | PATCH | Ensure events that modify the system's network environment are collected" + ansible.builtin.template: + src: audit/ubtu20cis_5_2_3_5_systemlocale.rules.j2 + dest: /etc/audit/rules.d/50-system-locale.rules + owner: root + group: root + mode: 0600 + notify: restart auditd + when: + - ubtu20cis_rule_5_2_3_5 + tags: + - level2-server + - level2-workstation + - automated + - patch + - rule_5.2.3.5 + - auditd + +- name: "5.2.3.6 | PATCH | Ensure use of privileged commands is collected" + block: + - name: "5.2.3.6 | AUDIT | Ensure use of privileged commands is collected | Get list of privileged programs" + ansible.builtin.shell: for i in $(df | grep '^/dev' | awk '{ print $NF }'); do find $i -xdev -type f -perm -4000 -o -type f -perm -2000 2>/dev/null; done + register: priv_procs + changed_when: false + check_mode: false + + - name: "5.2.3.6 | PATCH | Ensure use of privileged commands is collected | Set privileged rules" + ansible.builtin.template: + src: audit/ubtu20cis_5_2_3_6_privileged.rules.j2 + dest: /etc/audit/rules.d/50-privileged.rules + owner: root + group: root + mode: 0600 + notify: restart auditd + when: + - ubtu20cis_rule_5_2_3_6 + tags: + - level2-server + - level2-workstation + - automated + - patch + - rule_5.2.3.6 + - auditd + +- name: "5.2.3.7 | PATCH | Ensure unsuccessfulfile access attempts are collected" + ansible.builtin.template: + src: audit/ubtu20cis_5_2_3_7_access.rules.j2 + dest: /etc/audit/rules.d/50-access.rules + owner: root + group: root + mode: 0600 + notify: restart auditd + when: + - ubtu20cis_rule_5_2_3_7 + tags: + - level2-server + - level2-workstation + - automated + - patch + - rule_5.2.3.7 + - auditd + +- name: "5.2.3.8 | PATCH | Ensure events that modify user/group information are collected" + ansible.builtin.template: + src: audit/ubtu20cis_5_2_3_8_identity.rules.j2 + dest: /etc/audit/rules.d/50-identity.rules + owner: root + group: root + mode: 0600 + notify: restart auditd + when: + - ubtu20cis_rule_5_2_3_8 + tags: + - level2-server + - level2-workstation + - automated + - patch + - rule_5.2.3.8 + - auditd + +- name: "5.2.3.9 | PATCH | Ensure discretionary access control permission modification events are collected" + ansible.builtin.template: + src: audit/ubtu20cis_5_2_3_9_permmod.rules.j2 + dest: /etc/audit/rules.d/50-perm_mod.rules + owner: root + group: root + mode: 0600 + notify: restart auditd + when: + - ubtu20cis_rule_5_2_3_9 + tags: + - level2-server + - level2-workstation + - automated + - patch + - rule_5.2.3.9 + - auditd + +- name: "5.2.3.10 | PATCH | Ensure successful file system mounts are collected" + ansible.builtin.template: + src: audit/ubtu20cis_5_2_3_10_mount.rules.j2 + dest: /etc/audit/rules.d/50-mount.rules + owner: root + group: root + mode: 0600 + notify: restart auditd + when: + ubtu20cis_rule_5_2_3_10 + tags: + - level2-server + - level2-workstation + - automated + - patch + - rule_5.2.3.10 + - auditd + +- name: "5.2.3.11 | PATCH | Ensure session initiation information is collected" + ansible.builtin.template: + src: audit/ubtu20cis_5_2_3_11_session.rules.j2 + dest: /etc/audit/rules.d/50-session.rules + owner: root + group: root + mode: 0600 + notify: restart auditd + when: + - ubtu20cis_rule_5_2_3_11 + tags: + - level2-server + - level2-workstation + - automated + - patch + - rule_5.2.3.11 + - auditd + +- name: "5.2.3.12 | PATCH | Ensure login and logout events are collected" + ansible.builtin.template: + src: audit/ubtu20cis_5_2_3_12_logins.rules.j2 + dest: /etc/audit/rules.d/50-logins.rules + owner: root + group: root + mode: 0600 + notify: restart auditd + when: + - ubtu20cis_rule_5_2_3_12 + tags: + - level2-server + - level2-workstation + - automated + - patch + - rule_5.2.3.12 + - auditd + +- name: "5.2.3.13 | PATCH | Ensure file deletion events by users are collected" + ansible.builtin.template: + src: audit/ubtu20cis_5_2_3_13_delete.rules.j2 + dest: /etc/audit/rules.d/50-delete.rules + owner: root + group: root + mode: 0600 + notify: restart auditd + when: + - ubtu20cis_rule_5_2_3_13 + tags: + - level2-server + - level2-workstation + - automated + - patch + - rule_5.2.3.13 + - auditd + +- name: "5.2.3.14 | PATCH | Ensure events that modify the system's Mandatory Access Controls are collected" + ansible.builtin.template: + src: audit/ubtu20cis_5_2_3_14_macpolicy.rules.j2 + dest: /etc/audit/rules.d/50-MAC-policy.rules + owner: root + group: root + mode: 0600 + notify: restart auditd + when: + - ubtu20cis_rule_5_2_3_14 + tags: + - level2-server + - level2-workstation + - automated + - patch + - rule_5.2.3.14 + - auditd + +- name: "5.2.3.15 | PATCH | Ensure successful and unsuccessful attempts to use the chcon command are recorded" + ansible.builtin.template: + src: audit/ubtu20cis_5_2_3_x_perm_chng.rules.j2 + dest: /etc/audit/rules.d/50-perm_chng.rules + owner: root + group: root + mode: 0600 + when: + - ubtu20cis_rule_5_2_3_15 + tags: + - level2-server + - level2-workstation + - automated + - patch + - rule_5.2.3.15 + - auditd + +- name: "5.2.3.16 | PATCH | Ensure successful and unsuccessful attempts to use the setfacl command are recorded" + ansible.builtin.template: + src: audit/ubtu20cis_5_2_3_x_perm_chng.rules.j2 + dest: /etc/audit/rules.d/50-perm_chng.rules + owner: root + group: root + mode: 0600 + when: + - ubtu20cis_rule_5_2_3_16 + tags: + - level2-server + - level2-workstation + - automated + - patch + - rule_5.2.3.16 + - auditd + +- name: "5.2.3.17 | PATCH | Ensure successful and unsuccessful attempts to use the chacl command are recorded" + ansible.builtin.template: + src: audit/ubtu20cis_5_2_3_x_perm_chng.rules.j2 + dest: /etc/audit/rules.d/50-perm_chng.rules + owner: root + group: root + mode: 0600 + when: + - ubtu20cis_rule_5_2_3_17 + tags: + - level2-server + - level2-workstation + - automated + - patch + - rule_5.2.3.17 + - auditd + +- name: "5.2.3.18 | PATCH | Ensure successful and unsuccessful attempts to use the usermod command are recorded" + ansible.builtin.template: + src: audit/ubtu20cis_5_2_3_x_perm_chng.rules.j2 + dest: /etc/audit/rules.d/50-perm_chng.rules + owner: root + group: root + mode: 0600 + when: + - ubtu20cis_rule_5_2_3_18 + tags: + - level2-server + - level2-workstation + - automated + - patch + - rule_5.2.3.18 + - auditd + +- name: "5.2.3.19 | PATCH | Ensure kernel module loading and unloading is collected" + ansible.builtin.template: + src: audit/ubtu20cis_5_2_3_19_modules.rules.j2 + dest: /etc/audit/rules.d/50-kernel_modules.rules + owner: root + group: root + mode: 0600 + notify: restart auditd + when: + - ubtu20cis_rule_5_2_3_19 + tags: + - level2-server + - level2-workstation + - automated + - patch + - rule_5.2.3.19 + - auditd + +- name: "5.2.3.10 | PATCH | Ensure the audit configuration is immutable" + ansible.builtin.template: + src: audit/ubtu20cis_5_2_3_20_finalize.rules.j2 + dest: /etc/audit/rules.d/99-finalize.rules + owner: root + group: root + mode: 0600 + notify: restart auditd + when: + - ubtu20cis_rule_5_2_3_20 + tags: + - level2-server + - level2-workstation + - automated + - scored + - patch + - rule_5.2.3.20 + - auditd + +- name: "5.2.3.21 | PATCH | Ensure the running and on disk configuration is the same" + ansible.builtin.shell: augenrules --check + changed_when: false + register: ubtu20cis_rule_5_2_3_21_augen_check + when: + - ubtu20cis_rule_5_2_3_21 + tags: + - level2-server + - level2-workstation + - automated + - scored + - patch + - rule_5.2.3.21 + - auditd diff --git a/tasks/section_5/cis_5.2.4.x.yml b/tasks/section_5/cis_5.2.4.x.yml new file mode 100644 index 00000000..a1fd1d6e --- /dev/null +++ b/tasks/section_5/cis_5.2.4.x.yml @@ -0,0 +1,206 @@ +--- + +- name: | + "5.2.4.1 | PATCH | Ensure audit log files are mode 0640 or less permissive" + "5.2.4.2 | PATCH | Ensure only authorized users own audit log files" + "5.2.4.3 | PATCH | Ensure only authorized groups are assigned ownership of audit log files" + + block: + - name: "5.2.4.1 | AUDIT | Ensure audit log files are mode 0640 or less permissive | discover file" + ansible.builtin.shell: grep ^log_file /etc/audit/auditd.conf | awk '{ print $NF }' + changed_when: false + register: audit_discovered_logfile + + - name: "5.2.4.1 | AUDIT | Ensure audit log files are mode 0640 or less permissive | stat file" + ansible.builtin.stat: + path: "{{ audit_discovered_logfile.stdout }}" + changed_when: false + register: auditd_logfile + + - name: | + "5.2.4.1 | PATCH | Ensure audit log files are mode 0640 or less permissive" + "5.2.4.2 | PATCH | Ensure only authorized users own audit log files" + "5.2.4.3 | PATCH | Ensure only authorized groups are assigned ownership of audit log files" + ansible.builtin.file: + path: "{{ audit_discovered_logfile.stdout }}" + mode: "{% if auditd_logfile.stat.mode > '0640' %}0640{% endif %}" + owner: root + group: root + when: + - ubtu20cis_rule_5_2_4_1 or + ubtu20cis_rule_5_2_4_2 or + ubtu20cis_rule_5_2_4_3 + tags: + - level2-server + - level2-workstation + - patch + - auditd + - rule_5.2.4.1 + - rule_5.2.4.2 + - rule_5.2.4.3 + +- name: "5.2.4.4 | PATCH | Ensure the audit log directory is 0750 or more restrictive" + block: + - name: "5.2.4.4 | AUDIT | Ensure the audit log directory is 0750 or more restrictive | get current permissions" + ansible.builtin.stat: + path: "{{ audit_discovered_logfile.stdout | dirname }}" + register: auditlog_dir + + - name: "5.2.4.4 | PATCH | Ensure the audit log directory is 0750 or more restrictive | set" + ansible.builtin.file: + path: "{{ audit_discovered_logfile.stdout | dirname }}" + state: directory + mode: 0750 + when: not auditlog_dir.stat.mode is match('07(0|5)0') + when: + - ubtu20cis_rule_5_2_4_4 + tags: + - level2-server + - level2-workstation + - patch + - auditd + - rule_5.2.4.4 + +- name: "5.2.4.5 | PATCH | Ensure audit configuration files are 640 or more restrictive" + ansible.builtin.file: + path: "{{ item.path }}" + mode: '0640' + loop: "{{ auditd_conf_files.files }}" + loop_control: + label: "{{ item.path }}" + when: + - item.mode > '0640' + - ubtu20cis_rule_5_2_4_5 + tags: + - level2-server + - level2-workstation + - patch + - auditd + - rule_5.2.4.5 + +- name: "5.2.4.6 | PATCH | Ensure audit configuration files are owned by root" + ansible.builtin.file: + path: "{{ item.path }}" + owner: root + loop: "{{ auditd_conf_files.files }}" + loop_control: + label: "{{ item.path }}" + when: + - ubtu20cis_rule_5_2_4_6 + tags: + - level2-server + - level2-workstation + - patch + - auditd + - rule_5.2.4.6 + +- name: "5.2.4.7 | PATCH | Ensure audit configuration files belong to group root" + ansible.builtin.file: + path: "{{ item.path }}" + group: root + loop: "{{ auditd_conf_files.files }}" + loop_control: + label: "{{ item.path }}" + when: + - ubtu20cis_rule_5_2_4_7 + tags: + - level2-server + - level2-workstation + - patch + - auditd + - rule_5.2.4.7 + +- name: "5.2.4.8 | PATCH | Ensure audit tools are 755 or more restrictive" + block: + - name: "5.2.4.8 | AUDIT | Get audit binary file stat | get current mode" + ansible.builtin.stat: + path: "{{ item }}" + register: "audit_bins" + loop: + - /sbin/auditctl + - /sbin/aureport + - /sbin/ausearch + - /sbin/autrace + - /sbin/auditd + - /sbin/augenrules + + - name: "5.2.4.8 | PATCH | Ensure audit tools are 755 or more restrictive | set if required" + ansible.builtin.file: + path: "{{ item.item }}" + mode: 0750 + + loop: "{{ audit_bins.results }}" + loop_control: + label: "{{ item.item }}" + when: not item.stat.mode is match('07(0|5)0') + when: + - ubtu20cis_rule_5_2_4_8 + tags: + - level2-server + - level2-workstation + - patch + - auditd + - rule_5.2.4.8 + +- name: "5.2.4.9 | PATCH | Ensure audit tools are owned by root" + ansible.builtin.file: + path: "{{ item }}" + owner: root + group: root + loop: + - /sbin/auditctl + - /sbin/aureport + - /sbin/ausearch + - /sbin/autrace + - /sbin/auditd + - /sbin/augenrules + when: + - ubtu20cis_rule_5_2_4_9 + tags: + - level2-server + - level2-workstation + - patch + - auditd + - rule_5.2.4.9 + +- name: "5.2.4.10 | PATCH | Ensure audit tools belong to group root" + ansible.builtin.file: + path: "{{ item }}" + group: root + loop: + - /sbin/auditctl + - /sbin/aureport + - /sbin/ausearch + - /sbin/autrace + - /sbin/auditd + - /sbin/augenrules + when: + - ubtu20cis_rule_5_2_4_10 + tags: + - level2-server + - level2-workstation + - patch + - auditd + - rule_5.2.4.10 + +- name: "5.2.4.11 | PATCH | Ensure cryptographic mechanisms are used to protect the integrity of audit tools" + ansible.builtin.lineinfile: + path: /etc/aide/aide.conf + regexp: "{{ item }}" + line: "{{ item }}" + loop: + - '# Audit tools' + - /sbin/auditctl p+i+n+u+g+s+b+acl+xattrs+sha512 + - /sbin/aureport p+i+n+u+g+s+b+acl+xattrs+sha512 + - /sbin/ausearch p+i+n+u+g+s+b+acl+xattrs+sha512 + - /sbin/autrace p+i+n+u+g+s+b+acl+xattrs+sha512 + - /sbin/auditd p+i+n+u+g+s+b+acl+xattrs+sha512 + - /sbin/augenrules p+i+n+u+g+s+b+acl+xattrs+sha512 + when: + - ubtu20cis_rule_5_2_4_11 + tags: + - level2-server + - level2-workstation + - patch + - auditd + - rule_5.2.4.11 diff --git a/tasks/section_5/cis_5.2.x.yml b/tasks/section_5/cis_5.2.x.yml deleted file mode 100644 index 231ea26c..00000000 --- a/tasks/section_5/cis_5.2.x.yml +++ /dev/null @@ -1,47 +0,0 @@ ---- - -- name: "5.2.1 | PATCH | Ensure sudo is installed" - ansible.builtin.package: - name: "{{ ubtu20cis_sudo_package }}" - state: present - when: - - ubtu20cis_rule_5_2_1 - tags: - - level1-server - - level1-workstation - - automated - - patch - - rule_5.2.1 - - sudo - -- name: "5.2.2 | PATCH | Ensure sudo commands use pty" - ansible.builtin.lineinfile: - path: /etc/sudoers - regexp: '^Defaults use_' - line: 'Defaults use_pty' - insertafter: '^Defaults' - when: - - ubtu20cis_rule_5_2_2 - tags: - - level1-server - - level1-workstation - - automated - - patch - - rule_5.2.2 - - sudo - -- name: "5.2.3 | PATCH | Ensure sudo log file exists" - ansible.builtin.lineinfile: - path: /etc/sudoers - regexp: '^Defaults logfile' - line: 'Defaults logfile="{{ ubtu20cis_sudo_logfile }}"' - insertafter: '^Defaults' - when: - - ubtu20cis_rule_5_2_3 - tags: - - level1-server - - level1-workstation - - automated - - patch - - rule_5.2.3 - - sudo diff --git a/tasks/section_5/cis_5.5.x.yml b/tasks/section_5/cis_5.5.x.yml deleted file mode 100644 index 1391a150..00000000 --- a/tasks/section_5/cis_5.5.x.yml +++ /dev/null @@ -1,289 +0,0 @@ ---- - -- name: "5.5.1.1 | PATCH | Ensure minimum days between password changes is configured" - block: - - name: "5.5.1.1 | PATCH | Ensure minimum days between password changes is configured | Set /etc/login.defs PASS_MIN_DAYS" - ansible.builtin.lineinfile: - path: /etc/login.defs - regexp: '^PASS_MIN_DAYS|^#PASS_MIN_DAYS' - line: 'PASS_MIN_DAYS {{ ubtu20cis_pass.min_days }}' - - - name: "5.5.1.1 | PATCH | Ensure minimum days between password changes is configured | Set existing users PASS_MIN_DAYS" - ansible.builtin.shell: chage --mindays {{ ubtu20cis_pass.min_days }} {{ item }} - failed_when: false - with_items: - - "{{ ubtu20cis_passwd | selectattr('uid', '>=', 1000) | map(attribute='id') | list }}" - when: ubtu20cis_disruption_high - when: - - ubtu20cis_rule_5_5_1_1 - tags: - - level1-server - - level1-workstation - - automated - - patch - - rule_5.5.1.1 - - user - - login - -- name: "5.5.1.2 | PATCH | Ensure password expiration is 365 days or less" - block: - - name: "5.5.1.2 | PATCH | Ensure password expiration is 365 days or less | Set /etc/login.defs PASS_MAX_DAYS" - ansible.builtin.lineinfile: - path: /etc/login.defs - regexp: '^PASS_MAX_DAYS|^#PASS_MAX_DAYS' - line: 'PASS_MAX_DAYS {{ ubtu20cis_pass.max_days }}' - insertafter: '# Password aging controls' - - - name: "5.5.1.2 | PATCH | Ensure password expiration is 365 days or less | Set existing users PASS_MAX_DAYS" - ansible.builtin.shell: chage --maxdays {{ ubtu20cis_pass.max_days }} {{ item }} - failed_when: false - with_items: - - "{{ ubtu20cis_passwd | selectattr('uid', '>=', 1000) | map(attribute='id') | list }}" - when: ubtu20cis_disruption_high - when: - - ubtu20cis_rule_5_5_1_2 - tags: - - level1-server - - level1-workstation - - automated - - patch - - rule_5.5.1.2 - - user - - login - -- name: "5.5.1.3 | PATCH | Ensure password expiration warning days is 7 or more" - block: - - name: "5.5.1.3 | PATCH | Ensure password expiration warning days is 7 or more | Set /etc/login.defs PASS_WARN_AGE" - ansible.builtin.lineinfile: - path: /etc/login.defs - regexp: '^PASS_WARN_AGE|^#PASS_WARN_AGE' - line: 'PASS_WARN_AGE {{ ubtu20cis_pass.warn_age }}' - - - name: "5.5.1.3 | PATCH | Ensure password expiration warning days is 7 or more | Set existing users PASS_WARN_AGE" - ansible.builtin.shell: chage --warndays {{ ubtu20cis_pass.warn_age }} {{ item }} - failed_when: false - with_items: - - "{{ ubtu20cis_passwd | selectattr('uid', '>=', 1000) | map(attribute='id') | list }}" - when: ubtu20cis_disruption_high - when: - - ubtu20cis_rule_5_5_1_3 - tags: - - level1-server - - level1-workstation - - automated - - patch - - rule_5.5.1.3 - - user - - login - -- name: "5.5.1.4 | PATCH | Ensure inactive password lock is 30 days or less" - block: - - name: "5.5.1.4 | AUDIT | Ensure inactive password lock is 30 days or less | Get current inactive period" - ansible.builtin.shell: useradd -D | grep INACTIVE | cut -d= -f2 - changed_when: false - failed_when: false - register: ubtu20cis_5_5_1_5_inactive_period - - - name: "5.5.1.4 | AUDIT | Ensure inactive password lock is 30 days or less | Get current users out of compliance" - ansible.builtin.shell: awk -F":" '(/^[^:]+:[^!*]/ && ($7<0 || $7>30)){print $1}' //etc/shadow - changed_when: false - failed_when: false - register: ubtu20cis_5_5_1_4_current_users - - - name: "5.5.1.4 | PATCH | Ensure inactive password lock is 30 days or less | Set inactive period for new users" - ansible.builtin.shell: useradd -D -f {{ ubtu20cis_pass.inactive }} - changed_when: true - failed_when: false - when: ubtu20cis_5_5_1_5_inactive_period.stdout != ubtu20cis_pass.inactive | string - - - name: "5.5.1.4 | PATCH | Ensure inactive password lock is 30 days or less | Set inactive period for existing users" - ansible.builtin.shell: chage --inactive {{ ubtu20cis_pass.inactive }} {{ item }} - failed_when: false - with_items: - - "{{ ubtu20cis_passwd | selectattr('uid', '>=', 1000) | map(attribute='id') | list | intersect(ubtu20cis_5_5_1_4_current_users.stdout_lines) | list }}" - when: - - ubtu20cis_disruption_high - - ubtu20cis_5_5_1_4_current_users.stdout | length > 0 - when: - - ubtu20cis_rule_5_5_1_4 - tags: - - level1-server - - level1-workstation - - automated - - patch - - rule_5.5.1.4 - - user - - login - -- name: "5.5.1.5 | PATCH | Ensure all users last password change date is in the past" - block: - - name: "5.5.1.5 | AUDIT | Ensure all users last password change date is in the past | Get current date in Unix Time" - ansible.builtin.shell: echo $(($(date --utc --date "$1" +%s)/86400)) - changed_when: false - failed_when: false - check_mode: false - register: ubtu20cis_5_5_1_5_current_time - - - name: "5.5.1.5 | AUDIT | Ensure all users last password change date is in the past | Get list of users with last changed PW date in future" - ansible.builtin.shell: "cat /etc/shadow | awk -F: '{if($3>{{ ubtu20cis_5_5_1_5_current_time.stdout }})print$1}'" - changed_when: false - failed_when: false - check_mode: false - register: ubtu20cis_5_5_1_5_user_list - - - name: "5.5.1.5 | PATCH | Ensure all users last password change date is in the past | Warn about users" - ansible.builtin.debug: - msg: - - "Warning!! The following accounts have the last PW change date in the future" - - "{{ ubtu20cis_5_5_1_5_user_list.stdout_lines }}" - when: ubtu20cis_5_5_1_5_user_list.stdout | length > 0 - - - name: "5.5.1.5 | PATCH | Ensure all users last password change date is in the past | Warn Count" - ansible.builtin.import_tasks: warning_facts.yml - when: ubtu20cis_5_5_1_5_user_list.stdout | length > 0 - - - name: "5.5.1.5 | PATCH | Ensure all users last password change date is in the past | Lock accounts with future PW changed dates" - ansible.builtin.shell: passwd --expire {{ item }} - failed_when: false - with_items: - - "{{ ubtu20cis_5_5_1_5_user_list.stdout_lines }}" - when: - - ubtu20cis_disruption_high - - ubtu20cis_5_5_1_5_user_list.stdout | length > 0 - vars: - warn_control_id: '5.5.1.5' - when: - - ubtu20cis_rule_5_5_1_5 - tags: - - level1-server - - level1-workstation - - automated - - patch - - rule_5.5.1.5 - - user - - login - -- name: "5.5.2 | PATCH | Ensure system accounts are secured" - block: - - name: "5.5.2 | PATCH | Ensure system accounts are secured | Set system accounts to login" - ansible.builtin.user: - name: "{{ item }}" - shell: /sbin/nologin - with_items: - - "{{ ubtu20cis_passwd | selectattr('uid', '<', 1000) | map(attribute='id') | list }}" - when: - - item != "root" - - item != "sync" - - item != "shutdown" - - item != "halt" - - - name: "5.5.2 | PATCH | Ensure system accounts are secured | Lock non-root system accounts" - ansible.builtin.user: - name: "{{ item }}" - password_lock: true - with_items: - - "{{ ubtu20cis_passwd | selectattr('uid', '<', 1000) | map(attribute='id') | list }}" - when: - - item != "root" - when: - - ubtu20cis_rule_5_5_2 - - ubtu20cis_disruption_high - tags: - - level1-server - - level1-workstation - - automated - - patch - - rule_5.5.2 - - user - - system - -- name: "5.5.3 | PATCH | Ensure default group for the root account is GID 0" - block: - - name: "5.5.3 | PATCH | Ensure default group for the root account is GID 0 | Set root group to GUID 0" - ansible.builtin.group: - name: root - gid: 0 - - - name: "5.5.3 | PATCH | Ensure default group for the root account is GID 0 | Set root user to root group" - ansible.builtin.user: - name: root - group: root - when: - - ubtu20cis_rule_5_5_3 - tags: - - level1-server - - level1-workstation - - automated - - patch - - rule_5.5.3 - - user - - system - -- name: "5.5.4 | PATCH | Ensure default user umask is 027 or more restrictive" - block: - - name: "5.5.4 | AUDIT | Ensure default user umask is 027 or more restrictive" - ansible.builtin.shell: grep -E '^session\s+optional\s+pam_umask.so' /etc/pam.d/common-session - changed_when: false - failed_when: false - check_mode: false - register: ubtu20cis_5_5_4_umask_pam_status - - - name: "5.5.4 | PATCH | Ensure default user umask is 027 or more restrictive" - ansible.builtin.lineinfile: - path: /etc/pam.d/common-session - line: 'session optional pam_umask.so' - insertbefore: '^# end of pam-auth-update config' - when: ubtu20cis_5_5_4_umask_pam_status.stdout | length == 0 - - - name: "5.5.4 | PATCH | Ensure default user umask is 027 or more restrictive" - ansible.builtin.lineinfile: - path: "{{ item }}" - regexp: '(?i)(umask\s*)' - line: '\g<1>{{ ubtu20cis_bash_umask }}' - backrefs: true - with_items: - - /etc/bash.bashrc - - /etc/profile - - /etc/login.defs - - - name: "5.5.4 | PATCH | Ensure default user umask is 027 or more restrictive" - ansible.builtin.lineinfile: - path: /etc/login.defs - regexp: '^USERGROUPS_ENAB' - line: USERGROUPS_ENAB no - when: - - ubtu20cis_rule_5_5_4 - tags: - - level1-server - - level1-workstation - - automated - - patch - - rule_5.5.4 - - user - -- name: "5.5.5 | PATCH | Ensure default user shell timeout is 900 seconds or less" - ansible.builtin.blockinfile: - create: true - mode: 0644 - dest: "{{ item.dest }}" - state: "{{ item.state }}" - marker: "# {mark} ANSIBLE MANAGED" - block: | - # Set session timeout - CIS ID 5.5.5 - # only set TMOUT if it isn't set yet to avoid a shell error - : ${TMOUT={{ ubtu20cis_shell_session_timeout.timeout }}} - readonly TMOUT - export TMOUT - with_items: - - { dest: "{{ ubtu20cis_shell_session_timeout.file }}", state: present } - - { dest: /etc/profile, state: "{{ (ubtu20cis_shell_session_timeout.file == '/etc/profile') | ternary('present', 'absent') }}" } - - { dest: /etc/bash.bashrc, state: present } - when: - - ubtu20cis_rule_5_5_5 - tags: - - level1-server - - level1-workstation - - automated - - patch - - rule_5.5.5 - - user diff --git a/tasks/section_5/cis_5.6.yml b/tasks/section_5/cis_5.6.yml deleted file mode 100644 index faa5e65f..00000000 --- a/tasks/section_5/cis_5.6.yml +++ /dev/null @@ -1,23 +0,0 @@ ---- -- name: "5.6 | AUDIT | Ensure root login is restricted to system console" - block: - - name: "5.6 | AUDIT | Ensure root login is restricted to system console | Get list of all terminals" - ansible.builtin.shell: cat /etc/securetty - changed_when: false - failed_when: false - check_mode: false - register: ubtu20cis_5_6_terminal_list - - - name: "5.6 | AUDIT | Ensure root login is restricted to system console | Warn Count" - ansible.builtin.import_tasks: warning_facts.yml - vars: - warn_control_id: '5.6' - when: - - ubtu20cis_rule_5_6 - tags: - - level1-server - - level1-workstation - - manual - - audit - - rule_5.6 - - user diff --git a/tasks/section_5/cis_5.7.yml b/tasks/section_5/cis_5.7.yml deleted file mode 100644 index 42e1036c..00000000 --- a/tasks/section_5/cis_5.7.yml +++ /dev/null @@ -1,39 +0,0 @@ ---- - -- name: "5.7 | PATCH | Ensure access to the su command is restricted" - block: - - name: "5.7 | PATCH | Ensure access to the su command is restricted | Check for pam_wheel.so module" - ansible.builtin.shell: grep '^auth.*required.*pam_wheel' /etc/pam.d/su - changed_when: false - failed_when: false - check_mode: false - register: ubtu20cis_5_7_pam_wheel_status - - - name: "5.7 | PATCH | Ensure access to the su command is restricted | Create empty sugroup" - ansible.builtin.group: - name: "{{ ubtu20cis_su_group }}" - - - name: "5.7 | PATCH | Ensure access to the su command is restricted | Set pam_wheel if exists" - community.general.pamd: - name: su - type: auth - control: required - module_path: pam_wheel.so - module_arguments: 'use_uid group={{ ubtu20cis_su_group }}' - when: ubtu20cis_5_7_pam_wheel_status.stdout | length > 0 - - - name: "5.7 | PATCH | Ensure access to the su command is restricted | Set pam_wheel if does not exist" - ansible.builtin.lineinfile: - path: /etc/pam.d/su - line: 'auth required pam_wheel.so use_uid group={{ ubtu20cis_su_group }}' - create: true - when: ubtu20cis_5_7_pam_wheel_status.stdout | length == 0 - when: - - ubtu20cis_rule_5_7 - tags: - - level1-server - - level1-workstation - - automated - - patch - - rule_5.7 - - user diff --git a/tasks/section_5/main.yml b/tasks/section_5/main.yml index 355469c1..e9815ad3 100644 --- a/tasks/section_5/main.yml +++ b/tasks/section_5/main.yml @@ -1,21 +1,30 @@ --- -- name: "SECTION | 5.1 | Configure time-based job schedulers" - ansible.builtin.import_tasks: cis_5.1.x.yml +- name: "SECTION | 5.1.1.1 | Configure journald remote" + ansible.builtin.import_tasks: cis_5.1.1.1.x.yml + when: + - ubtu20cis_syslog_service == 'journald' -- name: "SECTION | 5.2 | Configure sudo" - ansible.builtin.import_tasks: cis_5.2.x.yml +- name: "SECTION | 5.1.1 | Configure journald remote" + ansible.builtin.import_tasks: cis_5.1.1.x.yml + when: + - ubtu20cis_syslog_service == 'journald' -- name: "SECTION | 5.3 | Configure SSH Server" - ansible.builtin.import_tasks: cis_5.3.x.yml +- name: "SECTION | 5.1.2 | Configure rsyslog" + ansible.builtin.import_tasks: cis_5.1.2.x.yml + when: + - ubtu20cis_syslog_service == 'rsyslog' -- name: "SECTION | 5.4.x | User PAM" - ansible.builtin.import_tasks: cis_5.4.x.yml +- name: "SECTION | 5.1.3 | Configure logfiles" + ansible.builtin.import_tasks: cis_5.1.3.yml -- name: "SECTION | 5.5.x | User Accounts and Environment" - ansible.builtin.import_tasks: cis_5.5.x.yml +- name: "SECTION | 5.2.1.x | Configure auditd" + ansible.builtin.import_tasks: cis_5.2.1.x.yml -- name: "SECTION | 5.6 | Ensure root login is restricted to system console" - ansible.builtin.import_tasks: cis_5.6.yml +- name: "SECTION | 5.2.2.x | Configure auditd data retention" + ansible.builtin.import_tasks: cis_5.2.2.x.yml -- name: "SECTION | 5.7 | Ensure access to the su command is restricted" - ansible.builtin.import_tasks: cis_5.7.yml +- name: "SECTION | 5.2.3.x | Configure auditd rules" + ansible.builtin.import_tasks: cis_5.2.3.x.yml + +- name: "SECTION | 5.2.4.x | Configure auditd file access" + ansible.builtin.import_tasks: cis_5.2.4.x.yml diff --git a/tasks/section_6/cis_6.1.x.yml b/tasks/section_6/cis_6.1.x.yml index 4eb9cd3a..ebfee008 100644 --- a/tasks/section_6/cis_6.1.x.yml +++ b/tasks/section_6/cis_6.1.x.yml @@ -1,40 +1,27 @@ --- -- name: "6.1.1 | AUDIT | Audit system file permissions" - block: - - name: "6.1.1 | AUDIT | Audit system file permissions | Register package list" - ansible.builtin.shell: ls -a /bin/ - changed_when: false - failed_when: false - check_mode: false - register: ubtu20cis_6_1_1_packages - - name: "6.1.1 | AUDIT | Audit system file permissions | Message out packages results for review" - ansible.builtin.debug: - msg: - - "Warning!! Below are the packages that need to be reviewed." - - "You can run dpkg --verify and if nothing is returned the package is installed correctly" - - "{{ ubtu20cis_6_1_1_packages.stdout_lines }}" - - - name: "6.1.1 | AUDIT | Audit system file permissions | Warn Count" - ansible.builtin.import_tasks: warning_facts.yml - vars: - warn_control_id: '6.1.1' +- name: "6.1.1 | PATCH | Ensure permissions on /etc/passwd are configured" + ansible.builtin.file: + path: /etc/passwd + owner: root + group: root + mode: 0644 when: - ubtu20cis_rule_6_1_1 tags: - - level2-server - - level2-workstation - - manual - - audit + - level1-server + - level1-workstation + - automated + - patch - rule_6.1.1 - permissions -- name: "6.1.2 | PATCH | Ensure permissions on /etc/passwd are configured" +- name: "6.1.2 | PATCH | Ensure permissions on /etc/passwd- are configured" ansible.builtin.file: - path: /etc/passwd + path: /etc/passwd- owner: root group: root - mode: 0644 + mode: 0600 when: - ubtu20cis_rule_6_1_2 tags: @@ -45,12 +32,12 @@ - rule_6.1.2 - permissions -- name: "6.1.3 | PATCH | Ensure permissions on /etc/passwd- are configured" +- name: "6.1.3 | PATCH | Ensure permissions on /etc/group are configured" ansible.builtin.file: - path: /etc/passwd- + path: /etc/group owner: root group: root - mode: 0600 + mode: 0644 when: - ubtu20cis_rule_6_1_3 tags: @@ -61,9 +48,9 @@ - rule_6.1.3 - permissions -- name: "6.1.4 | PATCH | Ensure permissions on /etc/group are configured" +- name: "6.1.4 | PATCH | Ensure permissions on /etc/group- are configured" ansible.builtin.file: - path: /etc/group + path: /etc/group- owner: root group: root mode: 0644 @@ -77,12 +64,12 @@ - rule_6.1.4 - permissions -- name: "6.1.5 | PATCH | Ensure permissions on /etc/group- are configured" +- name: "6.1.5 | PATCH | Ensure permissions on /etc/shadow are configured" ansible.builtin.file: - path: /etc/group- + path: /etc/shadow owner: root - group: root - mode: 0644 + group: shadow + mode: 0640 when: - ubtu20cis_rule_6_1_5 tags: @@ -93,9 +80,9 @@ - rule_6.1.5 - permissions -- name: "6.1.6 | PATCH | Ensure permissions on /etc/shadow are configured" +- name: "6.1.6 | PATCH | Ensure permissions on /etc/shadow- are configured" ansible.builtin.file: - path: /etc/shadow + path: /etc/shadow- owner: root group: shadow mode: 0640 @@ -109,9 +96,9 @@ - rule_6.1.6 - permissions -- name: "6.1.7 | PATCH | Ensure permissions on /etc/shadow- are configured" +- name: "6.1.7 | PATCH | Ensure permissions on /etc/gshadow are configured" ansible.builtin.file: - path: /etc/shadow- + path: /etc/gshadow owner: root group: shadow mode: 0640 @@ -125,9 +112,9 @@ - rule_6.1.7 - permissions -- name: "6.1.8 | PATCH | Ensure permissions on /etc/gshadow are configured" +- name: "6.1.8 | PATCH | Ensure permissions on /etc/gshadow- are configured" ansible.builtin.file: - path: /etc/gshadow + path: /etc/gshadow- owner: root group: shadow mode: 0640 @@ -141,12 +128,12 @@ - rule_6.1.8 - permissions -- name: "6.1.9 | PATCH | Ensure permissions on /etc/gshadow- are configured" +- name: "6.1.9 | PATCH | Ensure permissions on /etc/shells are configured" ansible.builtin.file: - path: /etc/gshadow- + path: /etc/shells owner: root - group: shadow - mode: 0640 + group: root + mode: 0644 when: - ubtu20cis_rule_6_1_9 tags: @@ -157,93 +144,120 @@ - rule_6.1.9 - permissions -- name: "6.1.10 | PATCH | Ensure no world writable files exist" +- name: "6.1.10 | PATCH | Ensure permissions on /etc/opasswd are configured" + block: + - name: "6.1.10 | PATCH | Ensure permissions on /etc/opasswd are configured | stat opasswd" + ansible.builtin.stat: + path: /etc/opasswd + register: opasswd + + - name: "6.1.10 | PATCH | Ensure permissions on /etc/opasswd are configured | opasswd" + ansible.builtin.file: + path: /etc/opasswd + owner: root + group: root + mode: 0600 + state: touch + when: opasswd.stat.exists + + - name: "6.1.10 | PATCH | Ensure permissions on /etc/opasswd are configured | stat opasswd.old" + ansible.builtin.stat: + path: /etc/opasswd.old + register: opasswd_old + + - name: "6.1.10 | PATCH | Ensure permissions on /etc/opasswd are configured | set opasswd.old is exists" + ansible.builtin.file: + path: /etc/opasswd.old + owner: root + group: root + mode: 0600 + when: opasswd_old.stat.exists + when: + - ubtu20cis_rule_6_1_10 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_6.1.10 + - permissions + +- name: "6.1.11 | PATCH | Ensure world writable files and directories are secured" block: - - name: "6.1.10 | AUDIT | Ensure no world writable files exist | Get list of world-writable files" + - name: "6.1.11 | AUDIT | Ensure world writable files and directories are secured | Get list of world-writable files" ansible.builtin.shell: find {{ item.mount }} -xdev -type f -perm -0002 -not -fstype nfs changed_when: false failed_when: false check_mode: false - register: ubtu20cis_6_1_10_wwf + register: ubtu20cis_6_1_11_wwf with_items: - "{{ ansible_mounts }}" loop_control: label: "{{ item.mount }}" - - name: "6.1.10 | PATCH | Ensure no world writable files exist | Adjust world-writable files if they exist" + - name: "6.1.11 | PATCH | Ensure world writable files and directories are secured | Adjust world-writable files if they exist" ansible.builtin.file: path: "{{ item }}" mode: o-w with_items: - - "{{ ubtu20cis_6_1_10_wwf.results | map(attribute='stdout_lines') | flatten }}" + - "{{ ubtu20cis_6_1_11_wwf.results | map(attribute='stdout_lines') | flatten }}" when: ubtu20cis_no_world_write_adjust when: - - ubtu20cis_rule_6_1_10 + - ubtu20cis_rule_6_1_11 tags: - level1-server - level1-workstation - automated - patch - - rule_6.1.10 + - rule_6.1.11 - permissions -- name: "6.1.11 | PATCH | Ensure no unowned files or directories exist" +- name: "6.1.12 | PATCH | Ensure no unowned or ungrouped files or directories exist" block: - - name: "6.1.11 | AUDIT | Ensure no unowned files or directories exist | Get unowned files or directories" + - name: "6.1.12 | AUDIT | Ensure no unowned or ungrouped files or directories exist | Get unowned files or directories" ansible.builtin.shell: find {{ item.mount }} -xdev -nouser -not -fstype nfs changed_when: false failed_when: false check_mode: false - register: ubtu20cis_6_1_11_no_user_items + register: ubtu20cis_6_1_12_no_user_items with_items: - "{{ ansible_mounts }}" loop_control: label: "{{ item.mount }}" - - name: "6.1.11 | AUDIT | Ensure no unowned files or directories exist | Flatten no_user_items results for easier use" + - name: "6.1.12 | AUDIT | Ensure no unowned or ungrouped files or directories exist | Flatten no_user_items results for easier use" ansible.builtin.set_fact: - ubtu20cis_6_1_11_no_user_items_flatten: "{{ ubtu20cis_6_1_11_no_user_items.results | map(attribute='stdout_lines') | flatten }}" + ubtu20cis_6_1_12_no_user_items_flatten: "{{ ubtu20cis_6_1_12_no_user_items.results | map(attribute='stdout_lines') | flatten }}" - - name: "6.1.11 | AUDIT | Ensure no unowned files or directories exist | Alert on unowned files and directories" + - name: "6.1.12 | AUDIT | Ensure no unowned or ungrouped files or directories exist | Alert on unowned files and directories" ansible.builtin.debug: msg: - "Warning!! You have unowned files and are configured to not auto-remediate for this task" - "Please review the files/directories below and assign an owner" - - "{{ ubtu20cis_6_1_11_no_user_items_flatten }}" + - "{{ ubtu20cis_6_1_12_no_user_items_flatten }}" when: - not ubtu20cis_no_owner_adjust - - ubtu20cis_6_1_11_no_user_items_flatten | length > 0 + - ubtu20cis_6_1_12_no_user_items_flatten | length > 0 - - name: "6.1.11 | PATCH | Ensure no unowned files or directories exist | Set unowned files/directories to configured owner" + - name: "6.1.12 | PATCH | Ensure no unowned or ungrouped files or directories exist | Set unowned files/directories to configured owner" ansible.builtin.file: path: "{{ item }}" owner: "{{ ubtu20cis_unowned_owner }}" with_items: - - "{{ ubtu20cis_6_1_11_no_user_items_flatten }}" + - "{{ ubtu20cis_6_1_12_no_user_items_flatten }}" when: - ubtu20cis_no_owner_adjust - - ubtu20cis_6_1_11_no_user_items_flatten | length > 0 + - ubtu20cis_6_1_12_no_user_items_flatten | length > 0 - - name: "6.1.11 | AUDIT | Ensure no unowned files or directories exist | Warn Count" + - name: "6.1.12 | AUDIT | Ensure no unowned or ungrouped files or directories exist | Warn Count" ansible.builtin.import_tasks: warning_facts.yml when: - not ubtu20cis_no_owner_adjust - - ubtu20cis_6_1_11_no_user_items_flatten | length > 0 + - ubtu20cis_6_1_12_no_user_items_flatten | length > 0 vars: - warn_control_id: '6.1.11' - when: - - ubtu20cis_rule_6_1_11 - tags: - - level1-server - - level1-workstation - - automated - - patch - - rule_6.1.11 - - permissions + warn_control_id: '6.1.12_unowned' -- name: "6.1.12 | PATCH | Ensure no ungrouped files or directories exist" - block: - - name: "6.1.12 | AUDIT | Ensure no ungrouped files or directories exist | Get ungrouped fiels or directories" + - name: "6.1.12 | AUDIT | Ensure no unowned or ungrouped files or directories exist | Get ungrouped fiels or directories" ansible.builtin.shell: find {{ item.mount }} -xdev -nogroup -not -fstype nfs changed_when: false failed_when: false @@ -254,11 +268,11 @@ loop_control: label: "{{ item.mount }}" - - name: "6.1.12 | AUDIT | Ensure no ungrouped files or directories exist | Flatten ungrouped_items results for easier use" + - name: "6.1.12 | AUDIT | Ensure no unowned or ungrouped files or directories exist | Flatten ungrouped_items results for easier use" ansible.builtin.set_fact: ubtu20cis_6_1_12_ungrouped_items_flatten: "{{ ubtu20cis_6_1_12_ungrouped_items.results | map(attribute='stdout_lines') | flatten }}" - - name: "6.1.12 | AUDIT | Ensure no ungrouped files or directories exist | Alert on ungrouped files and directories" + - name: "6.1.12 | AUDIT | Ensure no unowned or ungrouped files or directories exist | Alert on ungrouped files and directories" ansible.builtin.debug: msg: - "Warning!! You have ungrouped files/directories and are configured to not auto-remediate for this task" @@ -284,7 +298,7 @@ - not ubtu20cis_no_group_adjust - ubtu20cis_6_1_12_ungrouped_items_flatten | length > 0 vars: - warn_control_id: '6.1.12' + warn_control_id: '6.1.12_ungrouped' when: - ubtu20cis_rule_6_1_12 tags: @@ -295,9 +309,9 @@ - rule_6.1.12 - permissions -- name: "6.1.13 | AUDIT | Audit SUID executables" +- name: "6.1.13 | AUDIT | Ensure SUID and SGID files are reviewed" block: - - name: "6.1.13 | AUDIT | Audit SUID executables | Find SUID executables" + - name: "6.1.13 | AUDIT | Ensure SUID and SGID files are reviewed | Find SUID executables" ansible.builtin.shell: find {{ item.mount }} -xdev -type f -perm -4000 -not -fstype nfs changed_when: false failed_when: false @@ -308,11 +322,11 @@ loop_control: label: "{{ item.mount }}" - - name: "6.1.13 | AUDIT | Audit SUID executables | Flatten suid_executables results for easier use" + - name: "6.1.13 | AUDIT | Ensure SUID and SGID files are reviewed | Flatten suid_executables results for easier use" ansible.builtin.set_fact: ubtu20cis_6_1_13_suid_executables_flatten: "{{ ubtu20cis_6_1_13_suid_executables.results | map(attribute='stdout_lines') | flatten }}" - - name: "6.1.13 | AUDIT | Audit SUID executables | Alert SUID executables exist" + - name: "6.1.13 | AUDIT | Ensure SUID and SGID files are reviewed | Alert SUID executables exist" ansible.builtin.debug: msg: - "Warning!! You have SUID executables" @@ -322,7 +336,7 @@ - ubtu20cis_6_1_13_suid_executables_flatten | length > 0 - not ubtu20cis_suid_adjust - - name: "6.1.13 | PATCH | Audit SUID executables | Remove SUID bit" + - name: "6.1.13 | PATCH | Ensure SUID and SGID files are reviewed | Remove SUID bit" ansible.builtin.file: path: "{{ item }}" mode: 'u-s' @@ -332,73 +346,62 @@ - ubtu20cis_suid_adjust - ubtu20cis_6_1_13_suid_executables_flatten | length > 0 - - name: "6.1.13 | AUDIT | Audit SUID executables | Warn Count" + - name: "6.1.13 | AUDIT | Ensure SUID and SGID files are reviewed | SUID Warn Count" ansible.builtin.import_tasks: warning_facts.yml when: - ubtu20cis_6_1_13_suid_executables_flatten | length > 0 - not ubtu20cis_suid_adjust vars: - warn_control_id: '6.1.13' - when: - - ubtu20cis_rule_6_1_13 - tags: - - level1-server - - level1-workstation - - manual - - audit - - rule_6.1.13 - - permissions + warn_control_id: '6.1.13_suid' -- name: "6.1.14 | AUDIT | Audit SGID executables" - block: - - name: "6.1.14 | AUDIT | Audit SGID executables | Find SGID executables" + - name: "6.1.13 | AUDIT | Ensure SUID and SGID files are reviewed | Find SGID executables" ansible.builtin.shell: find {{ item }} -xdev -type f -perm -2000 -not -fstype nfs changed_when: false failed_when: false check_mode: false - register: ubtu20cis_6_1_14_sgid_executables + register: ubtu20cis_6_1_13_sgid_executables with_items: - "{{ ansible_mounts }}" loop_control: label: "{{ item.mount }}" - - name: "6.1.14 | AUDIT | Audit SGID executables | Flatten sgid_executables results for easier use" + - name: "6.1.13 | AUDIT | Ensure SUID and SGID files are reviewed | Flatten sgid_executables results for easier use" ansible.builtin.set_fact: - ubtu20cis_6_1_14_sgid_executables_flatten: "{{ ubtu20cis_6_1_14_sgid_executables.results | map(attribute='stdout_lines') | flatten }}" + ubtu20cis_6_1_13_sgid_executables_flatten: "{{ ubtu20cis_6_1_13_sgid_executables.results | map(attribute='stdout_lines') | flatten }}" - - name: "6.1.14 | AUDIT | Audit SGID executables | Alert SGID executables exist" + - name: "6.1.13 | AUDIT | Ensure SUID and SGID files are reviewed | Alert SGID executables exist" ansible.builtin.debug: msg: - "Warning!! You have SGID executables" - "The files are listed below, please review the integrity of these binaries" - - "{{ ubtu20cis_6_1_14_sgid_executables_flatten }}" + - "{{ ubtu20cis_6_1_13_sgid_executables_flatten }}" when: - - ubtu20cis_6_1_14_sgid_executables_flatten | length > 0 + - ubtu20cis_6_1_13_sgid_executables_flatten | length > 0 - not ubtu20cis_sgid_adjust - - name: "6.1.14 | AUDIT | Audit SGID executables | Warn Count" + - name: "6.1.13 | AUDIT | Ensure SUID and SGID files are reviewed | SGID Warn Count" ansible.builtin.import_tasks: warning_facts.yml when: - - ubtu20cis_6_1_14_sgid_executables_flatten | length > 0 + - ubtu20cis_6_1_13_sgid_executables_flatten | length > 0 - not ubtu20cis_sgid_adjust + vars: + warn_control_id: '6.1.13_sgid' - - name: "6.1.14 | PATCH | Audit SGID executables | Remove SGID bit" + - name: "6.1.13 | PATCH | Ensure SUID and SGID files are reviewed | Remove SGID bit" ansible.builtin.file: path: "{{ item }}" mode: 'g-s' with_items: - - "{{ ubtu20cis_6_1_14_sgid_executables_flatten }}" + - "{{ ubtu20cis_6_1_13_sgid_executables_flatten }}" when: - ubtu20cis_sgid_adjust - - ubtu20cis_6_1_14_sgid_executables_flatten | length > 0 - vars: - warn_control_id: '6.1.14' + - ubtu20cis_6_1_13_sgid_executables_flatten | length > 0 when: - - ubtu20cis_rule_6_1_14 + - ubtu20cis_rule_6_1_13 tags: - level1-server - level1-workstation - manual - audit - - rule_6.1.14 + - rule_6.1.13 - permissions diff --git a/tasks/section_6/cis_6.2.x.yml b/tasks/section_6/cis_6.2.x.yml index 793eeb79..1a757c79 100644 --- a/tasks/section_6/cis_6.2.x.yml +++ b/tasks/section_6/cis_6.2.x.yml @@ -29,15 +29,15 @@ - rule_6.2.1 - user_accounts -- name: "6.2.2 | PATCH | Ensure password fields are not empty" +- name: "6.2.2 | PATCH | Ensure /etc/shadow password fields are not empty " block: - - name: "6.2.2 | AUDIT | Ensure password fields are not empty | Find users with no password" + - name: "6.2.2 | AUDIT | Ensure /etc/shadow password fields are not empty | Find users with no password" ansible.builtin.shell: awk -F":" '($2 == "" ) { print $1 }' /etc/shadow changed_when: false check_mode: false register: ubtu20cis_6_2_2_empty_password_acct - - name: "6.2.2 | PATCH | Ensure password fields are not empty | Lock users with empty password" + - name: "6.2.2 | PATCH | Ensure /etc/shadow password fields are not empty | Lock users with empty password" ansible.builtin.user: name: "{{ item }}" password_lock: true @@ -84,45 +84,24 @@ - rule_6.2.3 - groups -- name: "6.2.4 | PATCH | Ensure all users' home directories exist" +- name: "6.2.4 | PATCH | Ensure shadow group is empty" block: - - name: capture audit task for missing homedirs - block: &u20s_homedir_audit - - name: "6.2.4 | PATCH | Ensure all users' home directories exist | Find users missing home directories" - ansible.builtin.shell: pwck -r | grep -vF -e "'/nonexistent'" | grep -P {{ ld_regex | quote }} - check_mode: false - register: ubtu20cis_users_missing_home - changed_when: ubtu20cis_6_2_4_audit | length > 0 - # failed_when: 0: success, 1: false grep match, 2: pwck found something - failed_when: ubtu20cis_users_missing_home.rc not in [0,1,2] - - ### NOTE: due to https://github.com/ansible/ansible/issues/24862 This is a shell command, and is quite frankly less than ideal. - - name: "6.2.4 | PATCH | Ensure all users' home directories exist| Creates home directories" - ansible.builtin.shell: "mkhomedir_helper {{ item }}" - # check_mode: "{{ ubtu20cis_disruptive_check_mode }}" - with_items: "{{ ubtu20cis_6_2_4_audit | map(attribute='id') | list }}" - when: - - ubtu20cis_users_missing_home is changed - - ubtu20cis_disruption_high + - name: "6.2.4 | AUDIT | Ensure shadow group is empty | check users in group" + ansible.builtin.getent: + database: group + split: ':' + key: shadow - ### NOTE: falsew we need to address that SELINUX will not let mkhomedir_helper create home directories for UUID < 500, so the ftp user will still show up in a pwck. Not sure this is needed, I need to confirm if that user is removed in an earlier task. - ### ^ Likely doesn't matter as 6.2.7 defines "local interactive users" as those w/ uid 1000-4999 - - name: replay audit task - block: *u20s_homedir_audit - - # CAUTION: debug loops don't show changed since 2.4: - # Fix: https://github.com/ansible/ansible/pull/59958 - - name: "6.2.4 | PATCH | Ensure all users' home directories exist | Alert about correcting owner and group" - ansible.builtin.debug: msg="You will need to mkdir -p {{ item }} and chown properly to the correct owner and group." - with_items: "{{ ubtu20cis_6_2_4_audit | map(attribute='dir') | list }}" - changed_when: ubtu20cis_audit_complex - when: - - ubtu20cis_users_missing_home is changed + - name: "6.2.4 | AUDIT | Ensure shadow group is empty | check users in group" + ansible.builtin.debug: + msg: "Warning!! - You have users in the shadow group" + when: getent_group.shadow[2] | length > 0 + + - name: "6.2.4 | AUDIT | Ensure shadow group is empty | check users in group" + ansible.builtin.import_tasks: warning_facts.yml + when: getent_group.shadow[2] | length > 0 vars: - ld_regex: >- - ^user '(?P.*)': directory '(?P.*)' does not exist$ - ld_users: "{{ ubtu20cis_users_missing_home.stdout_lines | map('regex_replace', ld_regex, '\\g') | list }}" - ubtu20cis_6_2_4_audit: "{{ ubtu20cis_passwd | selectattr('uid', '>=', 1000) | selectattr('id', 'in', ld_users) | list }}" + warn_control_id: '6.2.4' when: - ubtu20cis_rule_6_2_4 tags: @@ -133,438 +112,323 @@ - rule_6.2.4 - user -- name: "6.2.5 | PATCH | Ensure users own their home directories" - ansible.builtin.file: - path: "{{ item.dir }}" - owner: "{{ item.id }}" - state: directory - with_items: - - "{{ ubtu20cis_passwd }}" - loop_control: - label: "{{ ubtu20cis_passwd_label }}" +- name: "6.2.5 | AUDIT | Ensure no duplicate UIDs exist" + block: + - name: "6.2.5 | AUDIT | Ensure no duplicate UIDs exist | Check for duplicate UIDs" + ansible.builtin.shell: "pwck -r | awk -F: '{if ($3 in uid) print $1 ; else uid[$3]}' /etc/passwd" + changed_when: false + failed_when: false + check_mode: false + register: ubtu20cis_6_2_5_user_uid_check + + - name: "6.2.5 | AUDIT | Ensure no duplicate UIDs exist | Warn Count" + ansible.builtin.import_tasks: warning_facts.yml + when: ubtu20cis_6_2_5_user_uid_check.stdout | length > 0 + + - name: "6.2.5 | AUDIT | Ensure no duplicate UIDs exist | Print warning about users with duplicate UIDs" + ansible.builtin.debug: + msg: "Warning!! The following users have UIDs that are duplicates: {{ ubtu20cis_6_2_5_user_uid_check.stdout_lines }}" + when: ubtu20cis_6_2_5_user_uid_check.stdout | length > 0 + vars: + warn_control_id: '6.2.5' when: - ubtu20cis_rule_6_2_5 - - item.uid >= 1000 - - item.dir != '/nonexistent' tags: - level1-server - level1-workstation - automated - - patch + - audit - rule_6.2.5 - user -- name: "6.2.6 | PATCH | Ensure users' home directories permissions are 750 or more restrictive" +- name: "6.2.6 | AUDIT | Ensure no duplicate user names exist" block: - - name: "6.2.6 | AUDIT | Ensure users' home directories permissions are 750 or more restrictive | Get home directories" - ansible.builtin.stat: - path: "{{ item }}" - with_items: "{{ ubtu20cis_passwd | selectattr('uid', '>=', 1000) | selectattr('uid', '!=', 65534) | map(attribute='dir') | list }}" - check_mode: false - register: ubtu20cis_6_2_6_audit - - - name: "6.2.6 | AUDIT | Ensure users' home directories permissions are 750 or more restrictive | Find home directories more 750" - ansible.builtin.shell: find -H {{ item.0 | quote }} -not -type l -perm /027 - register: ubtu20cis_6_2_6_patch_audit - changed_when: ubtu20cis_6_2_6_patch_audit.stdout | length > 0 + - name: "6.2.6 | AUDIT | Ensure no duplicate user names exist | Check for duplicate User Names" + ansible.builtin.shell: "pwck -r | awk -F: '{if ($1 in users) print $1 ; else users[$1]}' /etc/passwd" + changed_when: false + failed_when: false check_mode: false - when: - - item.1.exists - with_together: - - "{{ ubtu20cis_6_2_6_audit.results | map(attribute='item') | list }}" - - "{{ ubtu20cis_6_2_6_audit.results | map(attribute='stat') | list }}" - loop_control: - label: "{{ item.0 }}" + register: ubtu20cis_6_2_6_user_username_check - - name: "6.2.6 | PATCH | Ensure users' home directories permissions are 750 or more restrictive | Set home perms" - ansible.builtin.file: - path: "{{ item.0 }}" - recurse: true - mode: a-st,g-w,o-rwx - register: ubtu20cis_6_2_6_patch - when: - - ubtu20cis_disruption_high - - item.1.exists - with_together: - - "{{ ubtu20cis_6_2_6_audit.results | map(attribute='item') | list }}" - - "{{ ubtu20cis_6_2_6_audit.results | map(attribute='stat') | list }}" - loop_control: - label: "{{ item.0 }}" + - name: "6.2.6 | AUDIT | Ensure no duplicate user names exist | Print warning about users with duplicate User Names" + ansible.builtin.debug: + msg: "Warning!! The following user names are duplicates: {{ ubtu20cis_6_2_6_user_username_check.stdout_lines }}" + when: ubtu20cis_6_2_6_user_username_check.stdout | length > 0 - # set default ACLs so the homedir has an effective umask of 0027 - - name: "6.2.6 | PATCH | Ensure users' home directories permissions are 750 or more restrictive | Set ACL's" - ansible.posix.acl: - path: "{{ item.0 }}" - default: true - state: present - recursive: true - etype: "{{ item.1.etype }}" - permissions: "{{ item.1.mode }}" - when: not ubtu20cis_system_is_container - with_nested: - - "{{ (ansible_check_mode | ternary(ubtu20cis_6_2_6_patch_audit, ubtu20cis_6_2_6_patch)).results | - rejectattr('skipped', 'defined') | map(attribute='item') | map('first') | list }}" - - - - etype: group - mode: rx - - etype: other - mode: '0' + - name: "6.2.6 | AUDIT | Ensure no duplicate user names exist | Warn Count" + ansible.builtin.import_tasks: warning_facts.yml + when: ubtu20cis_6_2_6_user_username_check.stdout | length > 0 + vars: + warn_control_id: '6.2.6' when: - ubtu20cis_rule_6_2_6 - - ubtu20cis_disruption_high tags: - level1-server - level1-workstation - automated - - patch + - audit - rule_6.2.6 - user -- name: "6.2.7 | PATCH | Ensure users' dot files are not group or world writable" +- name: "6.2.7 | AUDIT | Ensure no duplicate user names exist" block: - - name: "6.2.7 | AUDIT | Ensure users' dot files are not group or world-writable | Check for files" - ansible.builtin.shell: find /home/ -name "\.*" -perm /g+w,o+w + - name: "6.2.7 | AUDIT | Ensure no duplicate user names exist | Check for duplicate User Names" + ansible.builtin.shell: "pwck -r | awk -F: '{if ($1 in users) print $1 ; else users[$1]}' /etc/passwd" changed_when: false failed_when: false check_mode: false - register: ubtu20cis_6_2_7_audit + register: ubtu20cis_6_2_7_user_username_check - - name: "6.2.7 | PATCH | Ensure users' dot files are not group or world-writable | Changes files if configured" - ansible.builtin.file: - path: '{{ item }}' - mode: go-w - with_items: "{{ ubtu20cis_6_2_7_audit.stdout_lines }}" - when: - - ubtu20cis_6_2_7_audit.stdout | length > 0 - - ubtu20cis_dotperm_ansibleManaged + - name: "6.2.7 | AUDIT | Ensure no duplicate user names exist | Print warning about users with duplicate User Names" + ansible.builtin.debug: + msg: "Warning!! The following user names are duplicates: {{ ubtu20cis_6_2_7_user_username_check.stdout_lines }}" + when: ubtu20cis_6_2_7_user_username_check.stdout | length > 0 + + - name: "6.2.7 | AUDIT | Ensure no duplicate user names exist | Set warning count" + ansible.builtin.import_tasks: warning_facts.yml + when: ubtu20cis_6_2_7_user_username_check.stdout | length > 0 + vars: + warn_control_id: '6.2.7' when: - ubtu20cis_rule_6_2_7 - - ubtu20cis_disruption_high tags: - level1-server - level1-workstation - automated - - patch + - audit - rule_6.2.7 - user -- name: "6.2.8 | PATCH | Ensure no users have .netrc files" - ansible.builtin.file: - dest: "~{{ item }}/.netrc" - state: absent - with_items: - - "{{ ubtu20cis_users.stdout_lines }}" - when: - - ubtu20cis_rule_6_2_8 - - ubtu20cis_disruption_high - tags: - - level1-server - - level1-workstation - - automated - - patch - - rule_6.2.8 - - user - -- name: "6.2.9 | PATCH | Ensure no users have .forward files" - ansible.builtin.file: - dest: "~{{ item }}/.forward" - state: absent - with_items: - - "{{ ubtu20cis_users.stdout_lines }}" - when: - - ubtu20cis_rule_6_2_9 - - ubtu20cis_disruption_high - tags: - - level1-server - - level1-workstation - - automated - - patch - - rule_6.2.9 - - user - -- name: "6.2.10 | PATCH | Ensure no users have .rhosts files" - ansible.builtin.file: - dest: "~{{ item }}/.rhosts" - state: absent - with_items: - - "{{ ubtu20cis_users.stdout_lines }}" - when: - - ubtu20cis_rule_6_2_10 - - ubtu20cis_disruption_high - tags: - - level1-server - - level1-workstation - - automated - - patch - - rule_6.2.10 - - user - -- name: "6.2.11 | PATCH | Ensure root is the only UID 0 account" +- name: "6.2.8 | AUDIT | Ensure no duplicate group names exist" block: - - name: "6.2.11 | AUDIT | Ensure root is the only UID 0 account | Get non-root users with UID of 0" - ansible.builtin.shell: awk -F":" '($3 == 0 && $1 != \"root\") {i++;print $1 }' /etc/passwd + - name: "6.2.8 | AUDIT | Ensure no duplicate group names exist | Check for duplicate group names" + ansible.builtin.shell: 'getent passwd | cut -d: -f1 | sort -n | uniq -d' changed_when: false failed_when: false check_mode: false - register: ubtu20cis_6_2_11_uid_0_notroot - - - name: "6.2.11 | PATCH | Ensure root is the only UID 0 account | Lock UID 0 users" - ansible.builtin.user: - name: "{{ item }}" - password_lock: true - with_items: - - "{{ ubtu20cis_6_2_11_uid_0_notroot.stdout_lines }}" - when: - - ubtu20cis_disruption_high - - ubtu20cis_6_2_11_uid_0_notroot.stdout | length > 0 + register: ubtu20cis_6_2_8_group_group_check - - name: "6.2.11 | AUDIT | Ensure root is the only UID 0 account | Alert about accounts disruption high" + - name: "6.2.8 | AUDIT | Ensure no duplicate group names exist | Print warning about users with duplicate group names" ansible.builtin.debug: - msg: - - "Warning!! You have non-root users with a UID of 0 and ubtu18cis_disruption_high enabled" - - "This means the following accounts were password locked and will need to have the UID's manually adjusted" - - "{{ ubtu20cis_6_2_11_uid_0_notroot.stdout_lines }}" - when: - - ubtu20cis_disruption_high - - ubtu20cis_6_2_11_uid_0_notroot.stdout | length > 0 + msg: "Warning!! The following group names are duplicates: {{ ubtu20cis_6_2_8_group_group_check.stdout_lines }}" + when: ubtu20cis_6_2_8_group_group_check.stdout | length > 0 - - name: "6.2.11 | AUDIT | Ensure root is the only UID 0 account | Warn Count" + - name: "6.2.8 | AUDIT | Ensure no duplicate group names exist | Set warning count" ansible.builtin.import_tasks: warning_facts.yml - when: - - ubtu20cis_disruption_high - - ubtu20cis_6_2_11_uid_0_notroot.stdout | length > 0 - - - name: "6.2.11 | AUDIT | Ensure root is the only UID 0 account | Alert about accounts disruption low" - ansible.builtin.debug: - msg: - - "Warning!! You have non-root users with a UID of 0 and ubtu18cis_disruption_high disabled" - - "This means no action was taken, you will need to have the UID's of the users below manually adjusted" - - "{{ ubtu20cis_6_2_11_uid_0_notroot.stdout_lines }}" - when: - - not ubtu20cis_disruption_high - - ubtu20cis_6_2_11_uid_0_notroot.stdout | length > 0 + when: ubtu20cis_6_2_8_group_group_check.stdout | length > 0 vars: - warn_control_id: '6.2.11' + warn_control_id: '6.2.8' when: - - ubtu20cis_rule_6_2_11 + - ubtu20cis_rule_6_2_8 tags: - level1-server - level1-workstation - automated - - scored - - rule_6.2.11 - - user - - root + - audit + - rule_6.2.8 + - groups -- name: "6.2.12 | PATCH | Ensure root PATH Integrity" +- name: "6.2.9 | PATCH | Ensure root PATH Integrity" block: - - name: "6.2.12 | AUDIT | Ensure root PATH Integrity | Determine empty value" + - name: "6.2.9 | AUDIT | Ensure root PATH Integrity | Determine empty value" ansible.builtin.shell: 'echo $PATH | grep ::' changed_when: false - failed_when: ubtu20cis_rule_6_2_12_path_colon.rc == 0 + failed_when: ubtu20cis_rule_6_2_9_path_colon.rc == 0 check_mode: false - register: ubtu20cis_rule_6_2_12_path_colon + register: ubtu20cis_rule_6_2_9_path_colon - - name: "6.2.12 | AUDIT | Ensure root PATH Integrity | Determin colon end" + - name: "6.2.9 | AUDIT | Ensure root PATH Integrity | Determin colon end" ansible.builtin.shell: 'echo $PATH | grep :$' changed_when: false - failed_when: ubtu20cis_rule_6_2_12_path_colon_end.rc == 0 + failed_when: ubtu20cis_rule_6_2_9_path_colon_end.rc == 0 check_mode: false - register: ubtu20cis_rule_6_2_12_path_colon_end + register: ubtu20cis_rule_6_2_9_path_colon_end - - name: "6.2.12 | AUDIT | Ensure root PATH Integrity | Determine dot in path" + - name: "6.2.9 | AUDIT | Ensure root PATH Integrity | Determine dot in path" ansible.builtin.shell: "/bin/bash --login -c 'env | grep ^PATH=' | sed -e 's/PATH=//' -e 's/::/:/' -e 's/:$//' -e 's/:/\\n/g'" changed_when: false - failed_when: '"." in ubtu20cis_rule_6_2_12_dot_in_path.stdout_lines' + failed_when: '"." in ubtu20cis_rule_6_2_9_dot_in_path.stdout_lines' check_mode: false - register: ubtu20cis_rule_6_2_12_dot_in_path + register: ubtu20cis_rule_6_2_9_dot_in_path - - name: "6.2.12 | AUDIT | Ensure root PATH Integrity | Alert on empty value, colon end, and dot in path" + - name: "6.2.9 | AUDIT | Ensure root PATH Integrity | Alert on empty value, colon end, and dot in path" ansible.builtin.debug: msg: - - "The following paths have an empty value: {{ ubtu20cis_rule_6_2_12_path_colon.stdout_lines }}" - - "The following paths have colon end: {{ ubtu20cis_rule_6_2_12_path_colon_end.stdout_lines }}" - - "The following paths have a dot in the path: {{ ubtu20cis_rule_6_2_12_dot_in_path.stdout_lines }}" - - - name: "6.2.12 | PATCH | Ensure root PATH Integrity | Determine rights and owner" - ansible.builtin.file: > - path='{{ item }}' - follow=yes - state=directory - owner=root - mode='o-w,g-w' - with_items: "{{ ubtu20cis_rule_6_2_12_dot_in_path.stdout_lines }}" - - - name: "6.2.12 | AUDIT | Ensure root PATH Integrity | Warn Count" + - "The following paths have an empty value: {{ ubtu20cis_rule_6_2_9_path_colon.stdout_lines }}" + - "The following paths have colon end: {{ ubtu20cis_rule_6_2_9_path_colon_end.stdout_lines }}" + - "The following paths have a dot in the path: {{ ubtu20cis_rule_6_2_9_dot_in_path.stdout_lines }}" + + - name: "6.2.9 | PATCH | Ensure root PATH Integrity | Determine rights and owner" + ansible.builtin.file: + path: "{{ item }}" + follow: true + state: directory + owner: root + mode: 'o-w,g-w' + with_items: "{{ ubtu20cis_rule_6_2_9_dot_in_path.stdout_lines }}" + + - name: "6.2.9 | AUDIT | Ensure root PATH Integrity | Warn Count" ansible.builtin.import_tasks: warning_facts.yml - when: ubtu20cis_rule_6_2_12_dot_in_path.stdout | length > 0 + when: ubtu20cis_rule_6_2_9_dot_in_path.stdout | length > 0 vars: - warn_control_id: '6.2.12' + warn_control_id: '6.2.9' when: - - ubtu20cis_rule_6_2_12 + - ubtu20cis_rule_6_2_9 tags: - level1-server - level1-workstation - automated - patch - - rule_6.2.12 + - rule_6.2.9 - user - root - notimplemented -- name: "6.2.13 | AUDIT | Ensure no duplicate UIDs exist" +- name: "6.2.10 | PATCH | Ensure root is the only UID 0 account" block: - - name: "6.2.13 | AUDIT | Ensure no duplicate UIDs exist | Check for duplicate UIDs" - ansible.builtin.shell: "pwck -r | awk -F: '{if ($3 in uid) print $1 ; else uid[$3]}' /etc/passwd" + - name: "6.2.10 | AUDIT | Ensure root is the only UID 0 account | Get non-root users with UID of 0" + ansible.builtin.shell: awk -F":" '($3 == 0 && $1 != \"root\") {i++;print $1 }' /etc/passwd changed_when: false failed_when: false check_mode: false - register: ubtu20cis_6_2_13_user_uid_check - - - name: "6.2.13 | AUDIT | Ensure no duplicate UIDs exist | Warn Count" - ansible.builtin.import_tasks: warning_facts.yml - when: ubtu20cis_6_2_13_user_uid_check.stdout | length > 0 - - - name: "6.2.13 | AUDIT | Ensure no duplicate UIDs exist | Print warning about users with duplicate UIDs" - ansible.builtin.debug: - msg: "Warning!! The following users have UIDs that are duplicates: {{ ubtu20cis_6_2_13_user_uid_check.stdout_lines }}" - when: ubtu20cis_6_2_13_user_uid_check.stdout | length > 0 - vars: - warn_control_id: '6.2.13' - when: - - ubtu20cis_rule_6_2_13 - tags: - - level1-server - - level1-workstation - - automated - - audit - - rule_6.2.13 - - user + register: ubtu20cis_6_2_10_uid_0_notroot -- name: "6.2.14 | AUDIT | Ensure no duplicate GIDs exist" - block: - - name: "6.2.14 | AUDIT | Ensure no duplicate GIDs exist | Check for duplicate GIDs" - ansible.builtin.shell: "pwck -r | awk -F: '{if ($3 in users) print $1 ; else users[$3]}' /etc/group" - changed_when: false - failed_when: false - check_mode: false - register: ubtu20cis_6_2_14_user_user_check + - name: "6.2.10 | PATCH | Ensure root is the only UID 0 account | Lock UID 0 users" + ansible.builtin.user: + name: "{{ item }}" + password_lock: true + with_items: + - "{{ ubtu20cis_6_2_10_uid_0_notroot.stdout_lines }}" + when: + - ubtu20cis_disruption_high + - ubtu20cis_6_2_10_uid_0_notroot.stdout | length > 0 - - name: "6.2.14 | AUDIT | Ensure no duplicate GIDs exist | Print warning about users with duplicate GIDs" + - name: "6.2.10 | AUDIT | Ensure root is the only UID 0 account | Alert about accounts disruption high" ansible.builtin.debug: - msg: "Warning!! The following groups have duplicate GIDs: {{ ubtu20cis_6_2_14_user_user_check.stdout_lines }}" - when: ubtu20cis_6_2_14_user_user_check.stdout | length > 0 + msg: + - "Warning!! You have non-root users with a UID of 0 and ubtu20cis_disruption_high enabled" + - "This means the following accounts were password locked and will need to have the UID's manually adjusted" + - "{{ ubtu20cis_6_2_10_uid_0_notroot.stdout_lines }}" + when: + - ubtu20cis_disruption_high + - ubtu20cis_6_2_10_uid_0_notroot.stdout | length > 0 - - name: "6.2.14 | AUDIT | Ensure no duplicate GIDs exist | Warn Count" + - name: "6.2.10 | AUDIT | Ensure root is the only UID 0 account | Warn Count" ansible.builtin.import_tasks: warning_facts.yml - when: ubtu20cis_6_2_14_user_user_check.stdout | length > 0 - vars: - warn_control_id: '6.2.14' - when: - - ubtu20cis_rule_6_2_14 - tags: - - level1-server - - level1-workstation - - automated - - audit - - rule_6.2.14 - - groups - -- name: "6.2.15 | AUDIT | Ensure no duplicate user names exist" - block: - - name: "6.2.15 | AUDIT | Ensure no duplicate user names exist | Check for duplicate User Names" - ansible.builtin.shell: "pwck -r | awk -F: '{if ($1 in users) print $1 ; else users[$1]}' /etc/passwd" - changed_when: false - failed_when: false - check_mode: false - register: ubtu20cis_6_2_15_user_username_check + when: + - ubtu20cis_disruption_high + - ubtu20cis_6_2_10_uid_0_notroot.stdout | length > 0 - - name: "6.2.15 | AUDIT | Ensure no duplicate user names exist | Print warning about users with duplicate User Names" + - name: "6.2.10 | AUDIT | Ensure root is the only UID 0 account | Alert about accounts disruption low" ansible.builtin.debug: - msg: "Warning!! The following user names are duplicates: {{ ubtu20cis_6_2_15_user_username_check.stdout_lines }}" - when: ubtu20cis_6_2_15_user_username_check.stdout | length > 0 - - - name: "6.2.15 | AUDIT | Ensure no duplicate user names exist | Warn Count" - ansible.builtin.import_tasks: warning_facts.yml - when: ubtu20cis_6_2_15_user_username_check.stdout | length > 0 - vars: - warn_control_id: '6.2.15' + msg: + - "Warning!! You have non-root users with a UID of 0 and ubtu18cis_disruption_high disabled" + - "This means no action was taken, you will need to have the UID's of the users below manually adjusted" + - "{{ ubtu20cis_6_2_10_uid_0_notroot.stdout_lines }}" + when: + - not ubtu20cis_disruption_high + - ubtu20cis_6_2_10_uid_0_notroot.stdout | length > 0 + vars: + warn_control_id: '6.2.10' when: - - ubtu20cis_rule_6_2_15 + - ubtu20cis_rule_6_2_10 tags: - level1-server - level1-workstation - automated - - audit - - rule_6.2.15 + - scored + - rule_6.2.10 - user + - root -- name: "6.2.16 | AUDIT | Ensure no duplicate group names exist" +- name: "6.2.11 | PATCH | Ensure local interactive user home directories exist" block: - - name: "6.2.16 | AUDIT | Ensure no duplicate group names exist | Check for duplicate group names" - ansible.builtin.shell: 'getent passwd | cut -d: -f1 | sort -n | uniq -d' - changed_when: false - failed_when: false - check_mode: false - register: ubtu20cis_6_2_16_group_group_check + - name: "6.2.11 | PATCH | Ensure local interactive user home directories exist | Create dir if absent" + ansible.builtin.file: + path: "{{ item.dir }}" + state: directory + owner: "{{ item.id }}" + group: "{{ item.gid }}" + mode: "0750" + register: ubtu20cis_6_2_11_home_dir + loop: "{{ ubtu20cis_passwd | selectattr('uid', '>=', min_int_uid | int ) | selectattr('uid', '<=', max_int_uid | int ) | list }}" + loop_control: + label: "{{ item.id }}" - - name: "6.2.16 | AUDIT | Ensure no duplicate group names exist | Print warning about users with duplicate group names" - ansible.builtin.debug: - msg: "Warning!! The following group names are duplicates: {{ ubtu20cis_6_2_16_group_group_check.stdout_lines }}" - when: ubtu20cis_6_2_16_group_group_check.stdout | length > 0 + # set default ACLs so the homedir has an effective umask of 0027 + - name: "6.2.11 | PATCH | Ensure local interactive user home directories exist | Set group ACL" + ansible.posix.acl: + path: "{{ item }}" + default: true + etype: group + permissions: rx + state: present + loop: "{{ interactive_users_home.stdout_lines }}" + when: not system_is_container - - name: "6.2.16 | AUDIT | Ensure no duplicate group names exist | Warn Count" - ansible.builtin.import_tasks: warning_facts.yml - when: ubtu20cis_6_2_16_group_group_check.stdout | length > 0 - vars: - warn_control_id: '6.2.16' + - name: "6.2.11 | PATCH | Ensure local interactive user home directories exist | Set other ACL" + ansible.posix.acl: + path: "{{ item }}" + default: true + etype: other + permissions: 0 + state: present + loop: "{{ interactive_users_home.stdout_lines }}" + when: not system_is_container when: - - ubtu20cis_rule_6_2_16 + - ubtu20cis_rule_6_2_11 tags: - level1-server - level1-workstation - - automated - - audit - - rule_6.2.16 - - groups + - patch + - users + - rule_6.2.11 -- name: "6.2.17 | AUDIT | Ensure shadow group is empty" +- name: "6.2.12 | PATCH | Ensure local interactive user dot files access is configured" block: - - name: "6.2.17 | AUDIT | Ensure shadow group is empty | Get Shadow GID" - ansible.builtin.shell: grep ^shadow /etc/group | cut -f3 -d":" + - name: "6.2.12 | AUDIT | Ensure local interactive user dot files access is configured | Check for dot files" + ansible.builtin.shell: find /home/ -name "\.*" -perm /g+w,o+w changed_when: false failed_when: false check_mode: false - register: ubtu20cis_6_2_17_shadow_gid + register: ubtu20cis_6_2_12_audit - - name: "6.2.17 | AUDIT | Ensure shadow group is empty | List of users with Shadow GID" - ansible.builtin.shell: awk -F":" '($4 == "{{ ubtu20cis_6_2_17_shadow_gid.stdout }}") { print }' /etc/passwd | cut -f1 -d":" - changed_when: false - failed_when: false - check_mode: false - register: ubtu20cis_6_2_17_users_shadow_gid + - name: "6.2.12 | PATCH | Ensure local interactive user dot files access is configured | Changes files if dot configured" + ansible.builtin.file: + path: '{{ item }}' + mode: go-w + with_items: "{{ ubtu20cis_6_2_12_audit.stdout_lines }}" + when: + - ubtu20cis_6_2_12_audit.stdout | length > 0 + - ubtu20cis_dotperm_ansibleManaged - - name: "6.2.17 | AUDIT | Ensure shadow group is empty | Message on users with Shadow GID" - ansible.builtin.debug: - msg: - - "Warning!! There are users that are in the Shadow group" - - "To conform to CIS standards no users should be in this group" - - "Please move the users below into another group" - - "{{ ubtu20cis_6_2_17_users_shadow_gid.stdout_lines }}" - when: ubtu20cis_6_2_17_users_shadow_gid.stdout | length > 0 + - name: "6.2.12 | PATCH | Ensure local interactive user dot files access is configured | Ensure no users have .netrc files" + ansible.builtin.file: + dest: "~{{ item }}/.netrc" + state: absent + with_items: + - "{{ ubtu20cis_users.stdout_lines }}" - - name: "6.2.17 | AUDIT | Ensure shadow group is empty | Warn Count" - ansible.builtin.import_tasks: warning_facts.yml - when: ubtu20cis_6_2_17_users_shadow_gid.stdout | length > 0 - vars: - warn_control_id: '6.2.17' + - name: "6.2.12 | PATCH | Ensure local interactive user dot files access is configured | Ensure no users have .forward files" + ansible.builtin.file: + dest: "~{{ item }}/.forward" + state: absent + with_items: + - "{{ ubtu20cis_users.stdout_lines }}" + + - name: "6.2.12 | PATCH | Ensure local interactive user dot files access is configured | Ensure no users have .rhostfiles" + ansible.builtin.file: + dest: "~{{ item }}/.rhost" + state: absent + with_items: + - "{{ ubtu20cis_users.stdout_lines }}" when: - - ubtu20cis_rule_6_2_17 + - ubtu20cis_rule_6_2_12 + - ubtu20cis_disruption_high tags: - level1-server - level1-workstation - automated - - audit - - rule_6.2.17 - - groups + - patch + - rule_6.2.12 - user diff --git a/templates/ansible_vars_goss.yml.j2 b/templates/ansible_vars_goss.yml.j2 index c4bae141..5ee2bcf9 100644 --- a/templates/ansible_vars_goss.yml.j2 +++ b/templates/ansible_vars_goss.yml.j2 @@ -1,7 +1,7 @@ audit_run: ansible # This is forced to wrapper by running the run_audit wrapper script (placeholder only if run via ansible) ## metadata for Audit benchmark -benchmark_version: '1.1.0' +benchmark_version: '2.0.1' # Some audit tests may need to scan every filesystem or have an impact on a system @@ -10,298 +10,358 @@ run_heavy_tests: {{ audit_run_heavy_tests }} timeout_ms: {{ audit_cmd_timeout }} -ubuntu20cis_section1: true -ubuntu20cis_section2: true -ubuntu20cis_section3: true -ubuntu20cis_section4: true -ubuntu20cis_section5: true -ubuntu20cis_section6: true +ubtu20cis_section1: true +ubtu20cis_section2: true +ubtu20cis_section3: true +ubtu20cis_section4: true +ubtu20cis_section5: true +ubtu20cis_section6: true -ubuntu20cis_level_1: true -ubuntu20cis_level_2: true +ubtu20cis_level_1: true +ubtu20cis_level_2: true -ubuntu20cis_apparmor_disable: true +ubtu20cis_apparmor_disable: true # to enable rules that may have IO impact on a system e.g. full filesystem scans or CPU heavy run_heavy_tests: true # True is BIOS based system else set to false -ubuntu20_legacy_boot: true +ubtu20_legacy_boot: true -ubuntu20_set_boot_pass: true +ubtu20_set_boot_pass: {{ ubtu20cis_set_boot_pass }} # These variables correspond with the CIS rule IDs or paragraph numbers defined in # the CIS benchmark documents. # PLEASE NOTE: These work in coordination with the section # group variables and tags. # You must enable an entire section in order for the variables below to take effect. # Section 1 rules -ubuntu20cis_rule_1_1_1_1: {{ ubtu20cis_rule_1_1_1_1 }} -ubuntu20cis_rule_1_1_1_2: {{ ubtu20cis_rule_1_1_1_2 }} -ubuntu20cis_rule_1_1_1_3: {{ ubtu20cis_rule_1_1_1_3 }} -ubuntu20cis_rule_1_1_1_4: {{ ubtu20cis_rule_1_1_1_4 }} -ubuntu20cis_rule_1_1_1_5: {{ ubtu20cis_rule_1_1_1_5 }} -ubuntu20cis_rule_1_1_1_6: {{ ubtu20cis_rule_1_1_1_6 }} -ubuntu20cis_rule_1_1_1_7: {{ ubtu20cis_rule_1_1_1_7 }} -ubuntu20cis_rule_1_1_2: {{ ubtu20cis_rule_1_1_2 }} -ubuntu20cis_rule_1_1_3: {{ ubtu20cis_rule_1_1_3 }} -ubuntu20cis_rule_1_1_4: {{ ubtu20cis_rule_1_1_4 }} -ubuntu20cis_rule_1_1_5: {{ ubtu20cis_rule_1_1_5 }} -ubuntu20cis_rule_1_1_6: {{ ubtu20cis_rule_1_1_6 }} -ubuntu20cis_rule_1_1_7: {{ ubtu20cis_rule_1_1_7 }} -ubuntu20cis_rule_1_1_8: {{ ubtu20cis_rule_1_1_8 }} -ubuntu20cis_rule_1_1_9: {{ ubtu20cis_rule_1_1_9 }} -ubuntu20cis_rule_1_1_10: {{ ubtu20cis_rule_1_1_10 }} -ubuntu20cis_rule_1_1_11: {{ ubtu20cis_rule_1_1_11 }} -ubuntu20cis_rule_1_1_12: {{ ubtu20cis_rule_1_1_12 }} -ubuntu20cis_rule_1_1_13: {{ ubtu20cis_rule_1_1_13 }} -ubuntu20cis_rule_1_1_14: {{ ubtu20cis_rule_1_1_14 }} -ubuntu20cis_rule_1_1_15: {{ ubtu20cis_rule_1_1_15 }} -ubuntu20cis_rule_1_1_16: {{ ubtu20cis_rule_1_1_16 }} -ubuntu20cis_rule_1_1_17: {{ ubtu20cis_rule_1_1_17 }} -ubuntu20cis_rule_1_1_18: {{ ubtu20cis_rule_1_1_18 }} -ubuntu20cis_rule_1_1_19: {{ ubtu20cis_rule_1_1_19 }} -ubuntu20cis_rule_1_1_20: {{ ubtu20cis_rule_1_1_20 }} -ubuntu20cis_rule_1_1_21: {{ ubtu20cis_rule_1_1_21 }} -ubuntu20cis_rule_1_1_22: {{ ubtu20cis_rule_1_1_22 }} -ubuntu20cis_rule_1_1_23: {{ ubtu20cis_rule_1_1_23 }} -ubuntu20cis_rule_1_1_24: {{ ubtu20cis_rule_1_1_24 }} -ubuntu20cis_rule_1_2_1: {{ ubtu20cis_rule_1_2_1 }} -ubuntu20cis_rule_1_2_2: {{ ubtu20cis_rule_1_2_2 }} -ubuntu20cis_rule_1_3_1: {{ ubtu20cis_rule_1_3_1 }} -ubuntu20cis_rule_1_3_2: {{ ubtu20cis_rule_1_3_2 }}= -ubuntu20cis_rule_1_4_1: {{ ubtu20cis_rule_1_4_1 }} -ubuntu20cis_rule_1_4_2: {{ ubtu20cis_rule_1_4_2 }} -ubuntu20cis_rule_1_4_3: {{ ubtu20cis_rule_1_4_3 }} -ubuntu20cis_rule_1_4_4: {{ ubtu20cis_rule_1_4_4 }} -ubuntu20cis_rule_1_5_1: {{ ubtu20cis_rule_1_5_1 }} -ubuntu20cis_rule_1_5_2: {{ ubtu20cis_rule_1_5_2 }} -ubuntu20cis_rule_1_5_3: {{ ubtu20cis_rule_1_5_3 }} -ubuntu20cis_rule_1_5_4: {{ ubtu20cis_rule_1_5_4 }} -ubuntu20cis_rule_1_6_1_1: {{ ubtu20cis_rule_1_6_1_1 }} -ubuntu20cis_rule_1_6_1_2: {{ ubtu20cis_rule_1_6_1_2 }} -ubuntu20cis_rule_1_6_1_3: {{ ubtu20cis_rule_1_6_1_3 }} -ubuntu20cis_rule_1_6_1_4: {{ ubtu20cis_rule_1_6_1_4 }} -ubuntu20cis_rule_1_7_1: {{ ubtu20cis_rule_1_7_1 }} -ubuntu20cis_rule_1_7_2: {{ ubtu20cis_rule_1_7_2 }} -ubuntu20cis_rule_1_7_3: {{ ubtu20cis_rule_1_7_3 }} -ubuntu20cis_rule_1_7_4: {{ ubtu20cis_rule_1_7_4 }} -ubuntu20cis_rule_1_7_5: {{ ubtu20cis_rule_1_7_5 }} -ubuntu20cis_rule_1_7_6: {{ ubtu20cis_rule_1_7_6 }} -ubuntu20cis_rule_1_8_1: {{ ubtu20cis_rule_1_8_1 }} -ubuntu20cis_rule_1_8_2: {{ ubtu20cis_rule_1_8_2 }} -ubuntu20cis_rule_1_8_3: {{ ubtu20cis_rule_1_8_3 }} -ubuntu20cis_rule_1_8_4: {{ ubtu20cis_rule_1_8_4 }} -ubuntu20cis_rule_1_9: {{ ubtu20cis_rule_1_9 }} +ubtu20cis_rule_1_1_1_1: {{ ubtu20cis_rule_1_1_1_1 }} +ubtu20cis_rule_1_1_1_2: {{ ubtu20cis_rule_1_1_1_2 }} +ubtu20cis_rule_1_1_1_3: {{ ubtu20cis_rule_1_1_1_3 }} +ubtu20cis_rule_1_1_1_4: {{ ubtu20cis_rule_1_1_1_4 }} +ubtu20cis_rule_1_1_1_5: {{ ubtu20cis_rule_1_1_1_5 }} +ubtu20cis_rule_1_1_1_6: {{ ubtu20cis_rule_1_1_1_6 }} +ubtu20cis_rule_1_1_1_7: {{ ubtu20cis_rule_1_1_1_7 }} +ubtu20cis_rule_1_1_2_1: {{ ubtu20cis_rule_1_1_2_1 }} +ubtu20cis_rule_1_1_2_2: {{ ubtu20cis_rule_1_1_2_2 }} +ubtu20cis_rule_1_1_2_3: {{ ubtu20cis_rule_1_1_2_3 }} +ubtu20cis_rule_1_1_2_4: {{ ubtu20cis_rule_1_1_2_4 }} +ubtu20cis_rule_1_1_3_1: {{ ubtu20cis_rule_1_1_3_1 }} +ubtu20cis_rule_1_1_3_2: {{ ubtu20cis_rule_1_1_3_2 }} +ubtu20cis_rule_1_1_3_3: {{ ubtu20cis_rule_1_1_3_3 }} +ubtu20cis_rule_1_1_4_1: {{ ubtu20cis_rule_1_1_4_1 }} +ubtu20cis_rule_1_1_4_2: {{ ubtu20cis_rule_1_1_4_2 }} +ubtu20cis_rule_1_1_4_3: {{ ubtu20cis_rule_1_1_4_3 }} +ubtu20cis_rule_1_1_4_4: {{ ubtu20cis_rule_1_1_4_4 }} +ubtu20cis_rule_1_1_5_1: {{ ubtu20cis_rule_1_1_5_1 }} +ubtu20cis_rule_1_1_5_2: {{ ubtu20cis_rule_1_1_5_2 }} +ubtu20cis_rule_1_1_5_3: {{ ubtu20cis_rule_1_1_5_3 }} +ubtu20cis_rule_1_1_5_4: {{ ubtu20cis_rule_1_1_5_4 }} +ubtu20cis_rule_1_1_6_1: {{ ubtu20cis_rule_1_1_6_1 }} +ubtu20cis_rule_1_1_6_2: {{ ubtu20cis_rule_1_1_6_2 }} +ubtu20cis_rule_1_1_6_3: {{ ubtu20cis_rule_1_1_6_3 }} +ubtu20cis_rule_1_1_6_4: {{ ubtu20cis_rule_1_1_6_4 }} +ubtu20cis_rule_1_1_7_1: {{ ubtu20cis_rule_1_1_7_1 }} +ubtu20cis_rule_1_1_7_2: {{ ubtu20cis_rule_1_1_7_2 }} +ubtu20cis_rule_1_1_7_3: {{ ubtu20cis_rule_1_1_7_3 }} +ubtu20cis_rule_1_1_8_1: {{ ubtu20cis_rule_1_1_8_1 }} +ubtu20cis_rule_1_1_8_2: {{ ubtu20cis_rule_1_1_8_2 }} +ubtu20cis_rule_1_1_8_3: {{ ubtu20cis_rule_1_1_8_3 }} +ubtu20cis_rule_1_1_9: {{ ubtu20cis_rule_1_1_9 }} +ubtu20cis_rule_1_1_10: {{ ubtu20cis_rule_1_1_10 }} +ubtu20cis_rule_1_2_1: {{ ubtu20cis_rule_1_2_1 }} +ubtu20cis_rule_1_2_2: {{ ubtu20cis_rule_1_2_2 }} +ubtu20cis_rule_1_3_1: {{ ubtu20cis_rule_1_3_1 }} +ubtu20cis_rule_1_3_2: {{ ubtu20cis_rule_1_3_2 }} +ubtu20cis_rule_1_3_3: {{ ubtu20cis_rule_1_3_3 }} +ubtu20cis_rule_1_4_1: {{ ubtu20cis_rule_1_4_1 }} +ubtu20cis_rule_1_4_2: {{ ubtu20cis_rule_1_4_2 }} +ubtu20cis_rule_1_4_3: {{ ubtu20cis_rule_1_4_3 }} +ubtu20cis_rule_1_5_1: {{ ubtu20cis_rule_1_5_1 }} +ubtu20cis_rule_1_5_2: {{ ubtu20cis_rule_1_5_2 }} +ubtu20cis_rule_1_5_3: {{ ubtu20cis_rule_1_5_3 }} +ubtu20cis_rule_1_5_4: {{ ubtu20cis_rule_1_5_4 }} +ubtu20cis_rule_1_5_5: {{ ubtu20cis_rule_1_5_5 }} +ubtu20cis_rule_1_6_1_1: {{ ubtu20cis_rule_1_6_1_1 }} +ubtu20cis_rule_1_6_1_2: {{ ubtu20cis_rule_1_6_1_2 }} +ubtu20cis_rule_1_6_1_3: {{ ubtu20cis_rule_1_6_1_3 }} +ubtu20cis_rule_1_6_1_4: {{ ubtu20cis_rule_1_6_1_4 }} +ubtu20cis_rule_1_7_1: {{ ubtu20cis_rule_1_7_1 }} +ubtu20cis_rule_1_7_2: {{ ubtu20cis_rule_1_7_2 }} +ubtu20cis_rule_1_7_3: {{ ubtu20cis_rule_1_7_3 }} +ubtu20cis_rule_1_7_4: {{ ubtu20cis_rule_1_7_4 }} +ubtu20cis_rule_1_7_5: {{ ubtu20cis_rule_1_7_5 }} +ubtu20cis_rule_1_7_6: {{ ubtu20cis_rule_1_7_6 }} +ubtu20cis_rule_1_8_1: {{ ubtu20cis_rule_1_8_1 }} +ubtu20cis_rule_1_8_2: {{ ubtu20cis_rule_1_8_2 }} +ubtu20cis_rule_1_8_3: {{ ubtu20cis_rule_1_8_3 }} +ubtu20cis_rule_1_8_4: {{ ubtu20cis_rule_1_8_4 }} +ubtu20cis_rule_1_8_5: {{ ubtu20cis_rule_1_8_5 }} +ubtu20cis_rule_1_8_6: {{ ubtu20cis_rule_1_8_6 }} +ubtu20cis_rule_1_8_7: {{ ubtu20cis_rule_1_8_7 }} +ubtu20cis_rule_1_8_8: {{ ubtu20cis_rule_1_8_8 }} +ubtu20cis_rule_1_8_9: {{ ubtu20cis_rule_1_8_9 }} +ubtu20cis_rule_1_8_10: {{ ubtu20cis_rule_1_8_10 }} # section 2 rules -ubuntu20cis_rule_2_1_1_1: {{ ubtu20cis_rule_2_1_1_1 }} -ubuntu20cis_rule_2_1_1_2: {{ ubtu20cis_rule_2_1_1_2 }} -ubuntu20cis_rule_2_1_1_3: {{ ubtu20cis_rule_2_1_1_3 }} -ubuntu20cis_rule_2_1_1_4: {{ ubtu20cis_rule_2_1_1_4 }} -ubuntu20cis_rule_2_1_2: {{ ubtu20cis_rule_2_1_2 }} -ubuntu20cis_rule_2_1_3: {{ ubtu20cis_rule_2_1_3 }} -ubuntu20cis_rule_2_1_4: {{ ubtu20cis_rule_2_1_4 }} -ubuntu20cis_rule_2_1_5: {{ ubtu20cis_rule_2_1_5 }} -ubuntu20cis_rule_2_1_6: {{ ubtu20cis_rule_2_1_6 }} -ubuntu20cis_rule_2_1_7: {{ ubtu20cis_rule_2_1_7 }} -ubuntu20cis_rule_2_1_8: {{ ubtu20cis_rule_2_1_8 }} -ubuntu20cis_rule_2_1_9: {{ ubtu20cis_rule_2_1_9 }} -ubuntu20cis_rule_2_1_10: {{ ubtu20cis_rule_2_1_10 }} -ubuntu20cis_rule_2_1_11: {{ ubtu20cis_rule_2_1_11 }} -ubuntu20cis_rule_2_1_12: {{ ubtu20cis_rule_2_1_12 }} -ubuntu20cis_rule_2_1_13: {{ ubtu20cis_rule_2_1_13 }} -ubuntu20cis_rule_2_1_14: {{ ubtu20cis_rule_2_1_14 }} -ubuntu20cis_rule_2_1_15: {{ ubtu20cis_rule_2_1_15 }} -ubuntu20cis_rule_2_1_16: {{ ubtu20cis_rule_2_1_16 }} -ubuntu20cis_rule_2_1_17: {{ ubtu20cis_rule_2_1_17 }} -ubuntu20cis_rule_2_2_1: {{ ubtu20cis_rule_2_2_1 }} -ubuntu20cis_rule_2_2_2: {{ ubtu20cis_rule_2_2_2 }} -ubuntu20cis_rule_2_2_3: {{ ubtu20cis_rule_2_2_3 }} -ubuntu20cis_rule_2_2_4: {{ ubtu20cis_rule_2_2_4 }} -ubuntu20cis_rule_2_2_5: {{ ubtu20cis_rule_2_2_5 }} -ubuntu20cis_rule_2_2_6: {{ ubtu20cis_rule_2_2_6 }} -ubuntu20cis_rule_2_3: {{ ubtu20cis_rule_2_3 }} +ubtu20cis_rule_2_1_1_1: {{ ubtu20cis_rule_2_1_1_1 }} +ubtu20cis_rule_2_1_2_1: {{ ubtu20cis_rule_2_1_2_1 }} +ubtu20cis_rule_2_1_2_2: {{ ubtu20cis_rule_2_1_2_2 }} +ubtu20cis_rule_2_1_2_3: {{ ubtu20cis_rule_2_1_2_3 }} +ubtu20cis_rule_2_1_3_1: {{ ubtu20cis_rule_2_1_3_1 }} +ubtu20cis_rule_2_1_3_2: {{ ubtu20cis_rule_2_1_3_2 }} +ubtu20cis_rule_2_1_4_1: {{ ubtu20cis_rule_2_1_4_1 }} +ubtu20cis_rule_2_1_4_2: {{ ubtu20cis_rule_2_1_4_2 }} +ubtu20cis_rule_2_1_4_3: {{ ubtu20cis_rule_2_1_4_3 }} +ubtu20cis_rule_2_1_4_4: {{ ubtu20cis_rule_2_1_4_4 }} + +ubtu20cis_rule_2_2_1: {{ ubtu20cis_rule_2_2_1 }} +ubtu20cis_rule_2_2_2: {{ ubtu20cis_rule_2_2_2 }} +ubtu20cis_rule_2_2_3: {{ ubtu20cis_rule_2_2_3 }} +ubtu20cis_rule_2_2_4: {{ ubtu20cis_rule_2_2_4 }} +ubtu20cis_rule_2_2_5: {{ ubtu20cis_rule_2_2_5 }} +ubtu20cis_rule_2_2_6: {{ ubtu20cis_rule_2_2_6 }} +ubtu20cis_rule_2_2_7: {{ ubtu20cis_rule_2_2_7 }} +ubtu20cis_rule_2_2_8: {{ ubtu20cis_rule_2_2_8 }} +ubtu20cis_rule_2_2_9: {{ ubtu20cis_rule_2_2_9 }} +ubtu20cis_rule_2_2_10: {{ ubtu20cis_rule_2_2_10 }} +ubtu20cis_rule_2_2_11: {{ ubtu20cis_rule_2_2_11 }} +ubtu20cis_rule_2_2_12: {{ ubtu20cis_rule_2_2_12 }} +ubtu20cis_rule_2_2_13: {{ ubtu20cis_rule_2_2_13 }} +ubtu20cis_rule_2_2_14: {{ ubtu20cis_rule_2_2_14 }} +ubtu20cis_rule_2_2_15: {{ ubtu20cis_rule_2_2_15 }} +ubtu20cis_rule_2_2_16: {{ ubtu20cis_rule_2_2_16 }} +ubtu20cis_rule_2_2_17: {{ ubtu20cis_rule_2_2_17 }} +ubtu20cis_rule_2_3_1: {{ ubtu20cis_rule_2_3_1 }} +ubtu20cis_rule_2_3_2: {{ ubtu20cis_rule_2_3_2 }} +ubtu20cis_rule_2_3_3: {{ ubtu20cis_rule_2_3_3 }} +ubtu20cis_rule_2_3_4: {{ ubtu20cis_rule_2_3_4 }} +ubtu20cis_rule_2_3_5: {{ ubtu20cis_rule_2_3_5 }} +ubtu20cis_rule_2_3_6: {{ ubtu20cis_rule_2_3_6 }} +ubtu20cis_rule_2_4: {{ ubtu20cis_rule_2_4 }} # Section 3 rules -ubuntu20cis_rule_3_1_1: {{ ubtu20cis_rule_3_1_1 }} -ubuntu20cis_rule_3_1_2: {{ ubtu20cis_rule_3_1_2 }} -ubuntu20cis_rule_3_2_1: {{ ubtu20cis_rule_3_2_1 }} -ubuntu20cis_rule_3_2_2: {{ ubtu20cis_rule_3_2_2 }} -ubuntu20cis_rule_3_3_1: {{ ubtu20cis_rule_3_3_1 }} -ubuntu20cis_rule_3_3_2: {{ ubtu20cis_rule_3_3_2 }} -ubuntu20cis_rule_3_3_3: {{ ubtu20cis_rule_3_3_3 }} -ubuntu20cis_rule_3_3_4: {{ ubtu20cis_rule_3_3_4 }} -ubuntu20cis_rule_3_3_5: {{ ubtu20cis_rule_3_3_5 }} -ubuntu20cis_rule_3_3_6: {{ ubtu20cis_rule_3_3_6 }} -ubuntu20cis_rule_3_3_7: {{ ubtu20cis_rule_3_3_7 }} -ubuntu20cis_rule_3_3_8: {{ ubtu20cis_rule_3_3_8 }} -ubuntu20cis_rule_3_3_9: {{ ubtu20cis_rule_3_3_9 }} -ubuntu20cis_rule_3_4_1: {{ ubtu20cis_rule_3_4_1 }} -ubuntu20cis_rule_3_4_2: {{ ubtu20cis_rule_3_4_2 }} -ubuntu20cis_rule_3_4_3: {{ ubtu20cis_rule_3_4_3 }} -ubuntu20cis_rule_3_4_4: {{ ubtu20cis_rule_3_4_4 }} +ubtu20cis_rule_3_1_1: {{ ubtu20cis_rule_3_1_1 }} +ubtu20cis_rule_3_1_2: {{ ubtu20cis_rule_3_1_2 }} +ubtu20cis_rule_3_1_3: {{ ubtu20cis_rule_3_1_3 }} +ubtu20cis_rule_3_1_4: {{ ubtu20cis_rule_3_1_4 }} +ubtu20cis_rule_3_1_5: {{ ubtu20cis_rule_3_1_5 }} +ubtu20cis_rule_3_1_6: {{ ubtu20cis_rule_3_1_6 }} +ubtu20cis_rule_3_1_7: {{ ubtu20cis_rule_3_1_7 }} +ubtu20cis_rule_3_2_1: {{ ubtu20cis_rule_3_2_1 }} +ubtu20cis_rule_3_2_2: {{ ubtu20cis_rule_3_2_2 }} +ubtu20cis_rule_3_3_1: {{ ubtu20cis_rule_3_3_1 }} +ubtu20cis_rule_3_3_2: {{ ubtu20cis_rule_3_3_2 }} +ubtu20cis_rule_3_3_3: {{ ubtu20cis_rule_3_3_3 }} +ubtu20cis_rule_3_3_4: {{ ubtu20cis_rule_3_3_4 }} +ubtu20cis_rule_3_3_5: {{ ubtu20cis_rule_3_3_5 }} +ubtu20cis_rule_3_3_6: {{ ubtu20cis_rule_3_3_6 }} +ubtu20cis_rule_3_3_7: {{ ubtu20cis_rule_3_3_7 }} +ubtu20cis_rule_3_3_8: {{ ubtu20cis_rule_3_3_8 }} +ubtu20cis_rule_3_3_9: {{ ubtu20cis_rule_3_3_9 }} +ubtu20cis_rule_3_4_1: {{ ubtu20cis_rule_3_4_1 }} +ubtu20cis_rule_3_4_2: {{ ubtu20cis_rule_3_4_2 }} + # UFW -ubuntu20cis_rule_3_5_1_1: {{ ubtu20cis_rule_3_5_1_1 }} -ubuntu20cis_rule_3_5_1_2: {{ ubtu20cis_rule_3_5_1_2 }} -ubuntu20cis_rule_3_5_1_3: {{ ubtu20cis_rule_3_5_1_3 }} -ubuntu20cis_rule_3_5_1_4: {{ ubtu20cis_rule_3_5_1_4 }} -ubuntu20cis_rule_3_5_1_5: {{ ubtu20cis_rule_3_5_1_5 }} -ubuntu20cis_rule_3_5_1_6: {{ ubtu20cis_rule_3_5_1_6 }} -ubuntu20cis_rule_3_5_1_7: {{ ubtu20cis_rule_3_5_1_7 }} +ubtu20cis_rule_3_4_1_1: {{ ubtu20cis_rule_3_4_1_1 }} +ubtu20cis_rule_3_4_1_2: {{ ubtu20cis_rule_3_4_1_2 }} +ubtu20cis_rule_3_4_1_3: {{ ubtu20cis_rule_3_4_1_3 }} +ubtu20cis_rule_3_4_1_4: {{ ubtu20cis_rule_3_4_1_4 }} +ubtu20cis_rule_3_4_1_5: {{ ubtu20cis_rule_3_4_1_5 }} +ubtu20cis_rule_3_4_1_6: {{ ubtu20cis_rule_3_4_1_6 }} +ubtu20cis_rule_3_4_1_7: {{ ubtu20cis_rule_3_4_1_7 }} # NFTables -ubuntu20cis_rule_3_5_2_1: {{ ubtu20cis_rule_3_5_2_1 }} -ubuntu20cis_rule_3_5_2_2: {{ ubtu20cis_rule_3_5_2_2 }} -ubuntu20cis_rule_3_5_2_3: {{ ubtu20cis_rule_3_5_2_3 }} -ubuntu20cis_rule_3_5_2_4: {{ ubtu20cis_rule_3_5_2_4 }} -ubuntu20cis_rule_3_5_2_5: {{ ubtu20cis_rule_3_5_2_5 }} -ubuntu20cis_rule_3_5_2_6: {{ ubtu20cis_rule_3_5_2_6 }} -ubuntu20cis_rule_3_5_2_7: {{ ubtu20cis_rule_3_5_2_7 }} -ubuntu20cis_rule_3_5_2_8: {{ ubtu20cis_rule_3_5_2_8 }} -ubuntu20cis_rule_3_5_2_9: {{ ubtu20cis_rule_3_5_2_9 }} -ubuntu20cis_rule_3_5_2_10: {{ ubtu20cis_rule_3_5_2_10 }} +ubtu20cis_rule_3_4_2_1: {{ ubtu20cis_rule_3_4_2_1 }} +ubtu20cis_rule_3_4_2_2: {{ ubtu20cis_rule_3_4_2_2 }} +ubtu20cis_rule_3_4_2_3: {{ ubtu20cis_rule_3_4_2_3 }} +ubtu20cis_rule_3_4_2_4: {{ ubtu20cis_rule_3_4_2_4 }} +ubtu20cis_rule_3_4_2_5: {{ ubtu20cis_rule_3_4_2_5 }} +ubtu20cis_rule_3_4_2_6: {{ ubtu20cis_rule_3_4_2_6 }} +ubtu20cis_rule_3_4_2_7: {{ ubtu20cis_rule_3_4_2_7 }} +ubtu20cis_rule_3_4_2_8: {{ ubtu20cis_rule_3_4_2_8 }} +ubtu20cis_rule_3_4_2_9: {{ ubtu20cis_rule_3_4_2_9 }} +ubtu20cis_rule_3_4_2_10: {{ ubtu20cis_rule_3_4_2_10 }} # IPTables -ubuntu20cis_rule_3_5_3_1_1: {{ ubtu20cis_rule_3_5_3_1_1 }} -ubuntu20cis_rule_3_5_3_1_2: {{ ubtu20cis_rule_3_5_3_1_2 }} -ubuntu20cis_rule_3_5_3_1_3: {{ ubtu20cis_rule_3_5_3_1_3 }} -ubuntu20cis_rule_3_5_3_2_1: {{ ubtu20cis_rule_3_5_3_2_1 }} -ubuntu20cis_rule_3_5_3_2_2: {{ ubtu20cis_rule_3_5_3_2_2 }} -ubuntu20cis_rule_3_5_3_2_3: {{ ubtu20cis_rule_3_5_3_2_3 }} -ubuntu20cis_rule_3_5_3_2_4: {{ ubtu20cis_rule_3_5_3_2_4 }} -ubuntu20cis_rule_3_5_3_3_1: {{ ubtu20cis_rule_3_5_3_3_1 }} -ubuntu20cis_rule_3_5_3_3_2: {{ ubtu20cis_rule_3_5_3_3_2 }} -ubuntu20cis_rule_3_5_3_3_3: {{ ubtu20cis_rule_3_5_3_3_3 }} -ubuntu20cis_rule_3_5_3_3_4: {{ ubtu20cis_rule_3_5_3_3_4 }} +ubtu20cis_rule_3_4_3_1_1: {{ ubtu20cis_rule_3_4_3_1_1 }} +ubtu20cis_rule_3_4_3_1_2: {{ ubtu20cis_rule_3_4_3_1_2 }} +ubtu20cis_rule_3_4_3_1_3: {{ ubtu20cis_rule_3_4_3_1_3 }} +ubtu20cis_rule_3_4_3_2_1: {{ ubtu20cis_rule_3_4_3_2_1 }} +ubtu20cis_rule_3_4_3_2_2: {{ ubtu20cis_rule_3_4_3_2_2 }} +ubtu20cis_rule_3_4_3_2_3: {{ ubtu20cis_rule_3_4_3_2_3 }} +ubtu20cis_rule_3_4_3_2_4: {{ ubtu20cis_rule_3_4_3_2_4 }} +ubtu20cis_rule_3_4_3_3_1: {{ ubtu20cis_rule_3_4_3_3_1 }} +ubtu20cis_rule_3_4_3_3_2: {{ ubtu20cis_rule_3_4_3_3_2 }} +ubtu20cis_rule_3_4_3_3_3: {{ ubtu20cis_rule_3_4_3_3_3 }} +ubtu20cis_rule_3_4_3_3_4: {{ ubtu20cis_rule_3_4_3_3_4 }} # Section 4 rules -ubuntu20cis_rule_4_1_1_1: {{ ubtu20cis_rule_4_1_1_1 }} -ubuntu20cis_rule_4_1_1_2: {{ ubtu20cis_rule_4_1_1_2 }} -ubuntu20cis_rule_4_1_1_3: {{ ubtu20cis_rule_4_1_1_3 }} -ubuntu20cis_rule_4_1_1_4: {{ ubtu20cis_rule_4_1_1_4 }} -ubuntu20cis_rule_4_1_2_1: {{ ubtu20cis_rule_4_1_2_1 }} -ubuntu20cis_rule_4_1_2_2: {{ ubtu20cis_rule_4_1_2_2 }} -ubuntu20cis_rule_4_1_2_3: {{ ubtu20cis_rule_4_1_2_3 }} -ubuntu20cis_rule_4_1_3: {{ ubtu20cis_rule_4_1_3 }} -ubuntu20cis_rule_4_1_4: {{ ubtu20cis_rule_4_1_4 }} -ubuntu20cis_rule_4_1_5: {{ ubtu20cis_rule_4_1_5 }} -ubuntu20cis_rule_4_1_6: {{ ubtu20cis_rule_4_1_6 }} -ubuntu20cis_rule_4_1_7: {{ ubtu20cis_rule_4_1_7 }} -ubuntu20cis_rule_4_1_8: {{ ubtu20cis_rule_4_1_8 }} -ubuntu20cis_rule_4_1_9: {{ ubtu20cis_rule_4_1_9 }} -ubuntu20cis_rule_4_1_10: {{ ubtu20cis_rule_4_1_10 }} -ubuntu20cis_rule_4_1_11: {{ ubtu20cis_rule_4_1_11 }} -ubuntu20cis_rule_4_1_12: {{ ubtu20cis_rule_4_1_12 }} -ubuntu20cis_rule_4_1_13: {{ ubtu20cis_rule_4_1_13}} -ubuntu20cis_rule_4_1_14: {{ ubtu20cis_rule_4_1_14 }} -ubuntu20cis_rule_4_1_15: {{ ubtu20cis_rule_4_1_15 }} -ubuntu20cis_rule_4_1_16: {{ ubtu20cis_rule_4_1_16 }} -ubuntu20cis_rule_4_1_17: {{ ubtu20cis_rule_4_1_17 }} -ubuntu20cis_rule_4_2_1_1: {{ ubtu20cis_rule_4_2_1_1 }} -ubuntu20cis_rule_4_2_1_2: {{ ubtu20cis_rule_4_2_1_2 }} -ubuntu20cis_rule_4_2_1_3: {{ ubtu20cis_rule_4_2_1_3 }} -ubuntu20cis_rule_4_2_1_4: {{ ubtu20cis_rule_4_2_1_4 }} -ubuntu20cis_rule_4_2_1_5: {{ ubtu20cis_rule_4_2_1_5 }} -ubuntu20cis_rule_4_2_1_6: {{ ubtu20cis_rule_4_2_1_6 }} -ubuntu20cis_rule_4_2_2_1: {{ ubtu20cis_rule_4_2_2_1 }} -ubuntu20cis_rule_4_2_2_2: {{ ubtu20cis_rule_4_2_2_2 }} -ubuntu20cis_rule_4_2_2_3: {{ ubtu20cis_rule_4_2_2_3 }} -ubuntu20cis_rule_4_2_3: {{ ubtu20cis_rule_4_2_3 }} -ubuntu20cis_rule_4_3: {{ ubtu20cis_rule_4_3 }} -ubuntu20cis_rule_4_4: {{ ubtu20cis_rule_4_4 }} +ubtu20cis_rule_4_1_1: {{ ubtu20cis_rule_4_1_1 }} +ubtu20cis_rule_4_1_2: {{ ubtu20cis_rule_4_1_2 }} +ubtu20cis_rule_4_1_3: {{ ubtu20cis_rule_4_1_3 }} +ubtu20cis_rule_4_1_4: {{ ubtu20cis_rule_4_1_4 }} +ubtu20cis_rule_4_1_5: {{ ubtu20cis_rule_4_1_5 }} +ubtu20cis_rule_4_1_6: {{ ubtu20cis_rule_4_1_6 }} +ubtu20cis_rule_4_1_7: {{ ubtu20cis_rule_4_1_7 }} +ubtu20cis_rule_4_1_8: {{ ubtu20cis_rule_4_1_8 }} +ubtu20cis_rule_4_1_9: {{ ubtu20cis_rule_4_1_9 }} +ubtu20cis_rule_4_2_1: {{ ubtu20cis_rule_4_2_1 }} +ubtu20cis_rule_4_2_2: {{ ubtu20cis_rule_4_2_2 }} +ubtu20cis_rule_4_2_3: {{ ubtu20cis_rule_4_2_3 }} +ubtu20cis_rule_4_2_4: {{ ubtu20cis_rule_4_2_4 }} +ubtu20cis_rule_4_2_5: {{ ubtu20cis_rule_4_2_5 }} +ubtu20cis_rule_4_2_6: {{ ubtu20cis_rule_4_2_6 }} +ubtu20cis_rule_4_2_7: {{ ubtu20cis_rule_4_2_7 }} +ubtu20cis_rule_4_2_8: {{ ubtu20cis_rule_4_2_8 }} +ubtu20cis_rule_4_2_9: {{ ubtu20cis_rule_4_2_9 }} +ubtu20cis_rule_4_2_10: {{ ubtu20cis_rule_4_2_10 }} +ubtu20cis_rule_4_2_11: {{ ubtu20cis_rule_4_2_11 }} +ubtu20cis_rule_4_2_12: {{ ubtu20cis_rule_4_2_12 }} +ubtu20cis_rule_4_2_13: {{ ubtu20cis_rule_4_2_13 }} +ubtu20cis_rule_4_2_14: {{ ubtu20cis_rule_4_2_14 }} +ubtu20cis_rule_4_2_15: {{ ubtu20cis_rule_4_2_15 }} +ubtu20cis_rule_4_2_16: {{ ubtu20cis_rule_4_2_16 }} +ubtu20cis_rule_4_2_17: {{ ubtu20cis_rule_4_2_17 }} +ubtu20cis_rule_4_2_18: {{ ubtu20cis_rule_4_2_18 }} +ubtu20cis_rule_4_2_19: {{ ubtu20cis_rule_4_2_19 }} +ubtu20cis_rule_4_2_20: {{ ubtu20cis_rule_4_2_20 }} +ubtu20cis_rule_4_2_21: {{ ubtu20cis_rule_4_2_21 }} +ubtu20cis_rule_4_2_22: {{ ubtu20cis_rule_4_2_22 }} +ubtu20cis_rule_4_3_1: {{ ubtu20cis_rule_4_3_1 }} +ubtu20cis_rule_4_3_2: {{ ubtu20cis_rule_4_3_2 }} +ubtu20cis_rule_4_3_3: {{ ubtu20cis_rule_4_3_3 }} +ubtu20cis_rule_4_3_4: {{ ubtu20cis_rule_4_3_4 }} +ubtu20cis_rule_4_3_5: {{ ubtu20cis_rule_4_3_5 }} +ubtu20cis_rule_4_3_6: {{ ubtu20cis_rule_4_3_6 }} +ubtu20cis_rule_4_3_7: {{ ubtu20cis_rule_4_3_7 }} +ubtu20cis_rule_4_4_1: {{ ubtu20cis_rule_4_4_1 }} +ubtu20cis_rule_4_4_2: {{ ubtu20cis_rule_4_4_2 }} +ubtu20cis_rule_4_4_3: {{ ubtu20cis_rule_4_4_3 }} +ubtu20cis_rule_4_4_4: {{ ubtu20cis_rule_4_4_4 }} +ubtu20cis_rule_4_4_5: {{ ubtu20cis_rule_4_4_5 }} + +ubtu20cis_rule_4_5_1_1: {{ ubtu20cis_rule_4_5_1_1 }} +ubtu20cis_rule_4_5_1_2: {{ ubtu20cis_rule_4_5_1_2 }} +ubtu20cis_rule_4_5_1_3: {{ ubtu20cis_rule_4_5_1_3 }} +ubtu20cis_rule_4_5_1_4: {{ ubtu20cis_rule_4_5_1_4 }} +ubtu20cis_rule_4_5_1_5: {{ ubtu20cis_rule_4_5_1_5 }} +ubtu20cis_rule_4_5_1_6: {{ ubtu20cis_rule_4_5_1_6 }} +ubtu20cis_rule_4_5_1_7: {{ ubtu20cis_rule_4_5_1_7 }} + +ubtu20cis_rule_4_5_1: {{ ubtu20cis_rule_4_5_1 }} +ubtu20cis_rule_4_5_2: {{ ubtu20cis_rule_4_5_2 }} +ubtu20cis_rule_4_5_3: {{ ubtu20cis_rule_4_5_3 }} +ubtu20cis_rule_4_5_4: {{ ubtu20cis_rule_4_5_4 }} +ubtu20cis_rule_4_5_5: {{ ubtu20cis_rule_4_5_5 }} +ubtu20cis_rule_4_5_6: {{ ubtu20cis_rule_4_5_6 }} +ubtu20cis_rule_4_5_7: {{ ubtu20cis_rule_4_5_7 }} # Section 5 -ubuntu20cis_rule_5_1_1: {{ ubtu20cis_rule_5_1_1 }} -ubuntu20cis_rule_5_1_2: {{ ubtu20cis_rule_5_1_2 }} -ubuntu20cis_rule_5_1_3: {{ ubtu20cis_rule_5_1_3 }} -ubuntu20cis_rule_5_1_4: {{ ubtu20cis_rule_5_1_4 }} -ubuntu20cis_rule_5_1_5: {{ ubtu20cis_rule_5_1_5 }} -ubuntu20cis_rule_5_1_6: {{ ubtu20cis_rule_5_1_6 }} -ubuntu20cis_rule_5_1_7: {{ ubtu20cis_rule_5_1_7 }} -ubuntu20cis_rule_5_1_8: {{ ubtu20cis_rule_5_1_8 }} -ubuntu20cis_rule_5_1_9: {{ ubtu20cis_rule_5_1_9 }} -ubuntu20cis_rule_5_2_1: {{ ubtu20cis_rule_5_2_1 }} -ubuntu20cis_rule_5_2_2: {{ ubtu20cis_rule_5_2_2 }} -ubuntu20cis_rule_5_2_3: {{ ubtu20cis_rule_5_2_3 }} -ubuntu20cis_rule_5_3_1: {{ ubtu20cis_rule_5_3_1 }} -ubuntu20cis_rule_5_3_2: {{ ubtu20cis_rule_5_3_2 }} -ubuntu20cis_rule_5_3_3: {{ ubtu20cis_rule_5_3_3 }} -ubuntu20cis_rule_5_3_4: {{ ubtu20cis_rule_5_3_4 }} -ubuntu20cis_rule_5_3_5: {{ ubtu20cis_rule_5_3_5 }} -ubuntu20cis_rule_5_3_6: {{ ubtu20cis_rule_5_3_6 }} -ubuntu20cis_rule_5_3_7: {{ ubtu20cis_rule_5_3_7 }} -ubuntu20cis_rule_5_3_8: {{ ubtu20cis_rule_5_3_8 }} -ubuntu20cis_rule_5_3_9: {{ ubtu20cis_rule_5_3_9 }} -ubuntu20cis_rule_5_3_10: {{ ubtu20cis_rule_5_3_10 }} -ubuntu20cis_rule_5_3_11: {{ ubtu20cis_rule_5_3_11 }} -ubuntu20cis_rule_5_3_12: {{ ubtu20cis_rule_5_3_12 }} -ubuntu20cis_rule_5_3_13: {{ ubtu20cis_rule_5_3_13 }} -ubuntu20cis_rule_5_3_14: {{ ubtu20cis_rule_5_3_14 }} -ubuntu20cis_rule_5_3_15: {{ ubtu20cis_rule_5_3_15 }} -ubuntu20cis_rule_5_3_16: {{ ubtu20cis_rule_5_3_16 }} -ubuntu20cis_rule_5_3_17: {{ ubtu20cis_rule_5_3_17 }} -ubuntu20cis_rule_5_3_18: {{ ubtu20cis_rule_5_3_18 }} -ubuntu20cis_rule_5_3_19: {{ ubtu20cis_rule_5_3_19 }} -ubuntu20cis_rule_5_3_20: {{ ubtu20cis_rule_5_3_20 }} -ubuntu20cis_rule_5_3_21: {{ ubtu20cis_rule_5_3_21 }} -ubuntu20cis_rule_5_3_22: {{ ubtu20cis_rule_5_3_22 }} -ubuntu20cis_rule_5_4_1: {{ ubtu20cis_rule_5_4_1 }} -ubuntu20cis_rule_5_4_2: {{ ubtu20cis_rule_5_4_2 }} -ubuntu20cis_rule_5_4_3: {{ ubtu20cis_rule_5_4_3 }} -ubuntu20cis_rule_5_4_4: {{ ubtu20cis_rule_5_4_4 }} -ubuntu20cis_rule_5_5_1_1: {{ ubtu20cis_rule_5_5_1_1 }} -ubuntu20cis_rule_5_5_1_2: {{ ubtu20cis_rule_5_5_1_2 }} -ubuntu20cis_rule_5_5_1_3: {{ ubtu20cis_rule_5_5_1_3 }} -ubuntu20cis_rule_5_5_1_4: {{ ubtu20cis_rule_5_5_1_4 }} -ubuntu20cis_rule_5_5_1_5: {{ ubtu20cis_rule_5_5_1_5 }} -ubuntu20cis_rule_5_5_2: {{ ubtu20cis_rule_5_5_2 }} -ubuntu20cis_rule_5_5_3: {{ ubtu20cis_rule_5_5_3 }} -ubuntu20cis_rule_5_5_4: {{ ubtu20cis_rule_5_5_4 }} -ubuntu20cis_rule_5_5_5: {{ ubtu20cis_rule_5_5_5 }} -ubuntu20cis_rule_5_6: {{ ubtu20cis_rule_5_6 }} -ubuntu20cis_rule_5_7: {{ ubtu20cis_rule_5_7 }} + +# JournalD +ubtu20cis_rule_5_1_1_1_1: {{ ubtu20cis_rule_5_1_1_1_1 }} +ubtu20cis_rule_5_1_1_1_2: {{ ubtu20cis_rule_5_1_1_1_2 }} +ubtu20cis_rule_5_1_1_1_3: {{ ubtu20cis_rule_5_1_1_1_3 }} +ubtu20cis_rule_5_1_1_1_4: {{ ubtu20cis_rule_5_1_1_1_4 }} +ubtu20cis_rule_5_1_1_2: {{ ubtu20cis_rule_5_1_1_2 }} +ubtu20cis_rule_5_1_1_3: {{ ubtu20cis_rule_5_1_1_3 }} +ubtu20cis_rule_5_1_1_4: {{ ubtu20cis_rule_5_1_1_4 }} +ubtu20cis_rule_5_1_1_5: {{ ubtu20cis_rule_5_1_1_5 }} +ubtu20cis_rule_5_1_1_6: {{ ubtu20cis_rule_5_1_1_6 }} +ubtu20cis_rule_5_1_1_7: {{ ubtu20cis_rule_5_1_1_7 }} + +#rsyslog +ubtu20cis_rule_5_1_2_1: {{ ubtu20cis_rule_5_1_2_1 }} +ubtu20cis_rule_5_1_2_2: {{ ubtu20cis_rule_5_1_2_2 }} +ubtu20cis_rule_5_1_2_3: {{ ubtu20cis_rule_5_1_2_3 }} +ubtu20cis_rule_5_1_2_4: {{ ubtu20cis_rule_5_1_2_4 }} +ubtu20cis_rule_5_1_2_5: {{ ubtu20cis_rule_5_1_2_5 }} +ubtu20cis_rule_5_1_2_6: {{ ubtu20cis_rule_5_1_2_6 }} +ubtu20cis_rule_5_1_2_7: {{ ubtu20cis_rule_5_1_2_7 }} + +ubtu20cis_rule_5_1_3: {{ ubtu20cis_rule_5_1_3 }} + + +ubtu20cis_rule_5_2_1_1: {{ ubtu20cis_rule_5_2_1_1 }} +ubtu20cis_rule_5_2_1_2: {{ ubtu20cis_rule_5_2_1_2 }} +ubtu20cis_rule_5_2_1_3: {{ ubtu20cis_rule_5_2_1_3 }} +ubtu20cis_rule_5_2_1_4: {{ ubtu20cis_rule_5_2_1_4 }} +ubtu20cis_rule_5_2_2_1: {{ ubtu20cis_rule_5_2_2_1 }} +ubtu20cis_rule_5_2_2_2: {{ ubtu20cis_rule_5_2_2_2 }} +ubtu20cis_rule_5_2_2_3: {{ ubtu20cis_rule_5_2_2_3 }} + +ubtu20cis_rule_5_2_3_1: {{ ubtu20cis_rule_5_2_3_1 }} +ubtu20cis_rule_5_2_3_2: {{ ubtu20cis_rule_5_2_3_2 }} +ubtu20cis_rule_5_2_3_3: {{ ubtu20cis_rule_5_2_3_3 }} +ubtu20cis_rule_5_2_3_4: {{ ubtu20cis_rule_5_2_3_4 }} +ubtu20cis_rule_5_2_3_5: {{ ubtu20cis_rule_5_2_3_5 }} +ubtu20cis_rule_5_2_3_6: {{ ubtu20cis_rule_5_2_3_6 }} +ubtu20cis_rule_5_2_3_7: {{ ubtu20cis_rule_5_2_3_7 }} +ubtu20cis_rule_5_2_3_8: {{ ubtu20cis_rule_5_2_3_8 }} +ubtu20cis_rule_5_2_3_9: {{ ubtu20cis_rule_5_2_3_9 }} +ubtu20cis_rule_5_2_3_10: {{ ubtu20cis_rule_5_2_3_10 }} +ubtu20cis_rule_5_2_3_11: {{ ubtu20cis_rule_5_2_3_11 }} +ubtu20cis_rule_5_2_3_12: {{ ubtu20cis_rule_5_2_3_12 }} +ubtu20cis_rule_5_2_3_13: {{ ubtu20cis_rule_5_2_3_13 }} +ubtu20cis_rule_5_2_3_14: {{ ubtu20cis_rule_5_2_3_14 }} +ubtu20cis_rule_5_2_3_15: {{ ubtu20cis_rule_5_2_3_15 }} +ubtu20cis_rule_5_2_3_16: {{ ubtu20cis_rule_5_2_3_16 }} +ubtu20cis_rule_5_2_3_17: {{ ubtu20cis_rule_5_2_3_17 }} +ubtu20cis_rule_5_2_3_18: {{ ubtu20cis_rule_5_2_3_18 }} +ubtu20cis_rule_5_2_3_19: {{ ubtu20cis_rule_5_2_3_19 }} +ubtu20cis_rule_5_2_3_20: {{ ubtu20cis_rule_5_2_3_20 }} +ubtu20cis_rule_5_2_3_21: {{ ubtu20cis_rule_5_2_3_21 }} + +ubtu20cis_rule_5_2_4_1: {{ ubtu20cis_rule_5_2_4_1 }} +ubtu20cis_rule_5_2_4_2: {{ ubtu20cis_rule_5_2_4_2 }} +ubtu20cis_rule_5_2_4_3: {{ ubtu20cis_rule_5_2_4_3 }} +ubtu20cis_rule_5_2_4_4: {{ ubtu20cis_rule_5_2_4_4 }} +ubtu20cis_rule_5_2_4_5: {{ ubtu20cis_rule_5_2_4_5 }} +ubtu20cis_rule_5_2_4_6: {{ ubtu20cis_rule_5_2_4_6 }} +ubtu20cis_rule_5_2_4_7: {{ ubtu20cis_rule_5_2_4_7 }} +ubtu20cis_rule_5_2_4_8: {{ ubtu20cis_rule_5_2_4_8 }} +ubtu20cis_rule_5_2_4_9: {{ ubtu20cis_rule_5_2_4_9 }} +ubtu20cis_rule_5_2_4_10: {{ ubtu20cis_rule_5_2_4_10 }} +ubtu20cis_rule_5_2_4_11: {{ ubtu20cis_rule_5_2_4_11 }} # Section 6 -ubuntu20cis_rule_6_1_1: {{ ubtu20cis_rule_6_1_1 }} -ubuntu20cis_rule_6_1_2: {{ ubtu20cis_rule_6_1_2 }} -ubuntu20cis_rule_6_1_3: {{ ubtu20cis_rule_6_1_3 }} -ubuntu20cis_rule_6_1_4: {{ ubtu20cis_rule_6_1_4 }} -ubuntu20cis_rule_6_1_5: {{ ubtu20cis_rule_6_1_5 }} -ubuntu20cis_rule_6_1_6: {{ ubtu20cis_rule_6_1_6 }} -ubuntu20cis_rule_6_1_7: {{ ubtu20cis_rule_6_1_7 }} -ubuntu20cis_rule_6_1_8: {{ ubtu20cis_rule_6_1_8 }} -ubuntu20cis_rule_6_1_9: {{ ubtu20cis_rule_6_1_9 }} -ubuntu20cis_rule_6_1_10: {{ ubtu20cis_rule_6_1_10 }} -ubuntu20cis_rule_6_1_11: {{ ubtu20cis_rule_6_1_11 }} -ubuntu20cis_rule_6_1_12: {{ ubtu20cis_rule_6_1_12 }} -ubuntu20cis_rule_6_1_13: {{ ubtu20cis_rule_6_1_13 }} -ubuntu20cis_rule_6_1_14: {{ ubtu20cis_rule_6_1_14 }} - -ubuntu20cis_rule_6_2_1: {{ ubtu20cis_rule_6_2_1 }} -ubuntu20cis_rule_6_2_2: {{ ubtu20cis_rule_6_2_2 }} -ubuntu20cis_rule_6_2_3: {{ ubtu20cis_rule_6_2_3 }} -ubuntu20cis_rule_6_2_4: {{ ubtu20cis_rule_6_2_4 }} -ubuntu20cis_rule_6_2_5: {{ ubtu20cis_rule_6_2_5 }} -ubuntu20cis_rule_6_2_6: {{ ubtu20cis_rule_6_2_6 }} -ubuntu20cis_rule_6_2_7: {{ ubtu20cis_rule_6_2_7 }} -ubuntu20cis_rule_6_2_8: {{ ubtu20cis_rule_6_2_8 }} -ubuntu20cis_rule_6_2_9: {{ ubtu20cis_rule_6_2_9 }} -ubuntu20cis_rule_6_2_10: {{ ubtu20cis_rule_6_2_10 }} -ubuntu20cis_rule_6_2_11: {{ ubtu20cis_rule_6_2_11 }} -ubuntu20cis_rule_6_2_12: {{ ubtu20cis_rule_6_2_12 }} -ubuntu20cis_rule_6_2_13: {{ ubtu20cis_rule_6_2_13 }} -ubuntu20cis_rule_6_2_14: {{ ubtu20cis_rule_6_2_14 }} -ubuntu20cis_rule_6_2_15: {{ ubtu20cis_rule_6_2_15 }} -ubuntu20cis_rule_6_2_16: {{ ubtu20cis_rule_6_2_16 }} -ubuntu20cis_rule_6_2_17: {{ ubtu20cis_rule_6_2_17 }} +ubtu20cis_rule_6_1_1: {{ ubtu20cis_rule_6_1_1 }} +ubtu20cis_rule_6_1_2: {{ ubtu20cis_rule_6_1_2 }} +ubtu20cis_rule_6_1_3: {{ ubtu20cis_rule_6_1_3 }} +ubtu20cis_rule_6_1_4: {{ ubtu20cis_rule_6_1_4 }} +ubtu20cis_rule_6_1_5: {{ ubtu20cis_rule_6_1_5 }} +ubtu20cis_rule_6_1_6: {{ ubtu20cis_rule_6_1_6 }} +ubtu20cis_rule_6_1_7: {{ ubtu20cis_rule_6_1_7 }} +ubtu20cis_rule_6_1_8: {{ ubtu20cis_rule_6_1_8 }} +ubtu20cis_rule_6_1_9: {{ ubtu20cis_rule_6_1_9 }} +ubtu20cis_rule_6_1_10: {{ ubtu20cis_rule_6_1_10 }} +ubtu20cis_rule_6_1_11: {{ ubtu20cis_rule_6_1_11 }} +ubtu20cis_rule_6_1_12: {{ ubtu20cis_rule_6_1_12 }} +ubtu20cis_rule_6_1_13: {{ ubtu20cis_rule_6_1_13 }} + + +ubtu20cis_rule_6_2_1: {{ ubtu20cis_rule_6_2_1 }} +ubtu20cis_rule_6_2_2: {{ ubtu20cis_rule_6_2_2 }} +ubtu20cis_rule_6_2_3: {{ ubtu20cis_rule_6_2_3 }} +ubtu20cis_rule_6_2_4: {{ ubtu20cis_rule_6_2_4 }} +ubtu20cis_rule_6_2_5: {{ ubtu20cis_rule_6_2_5 }} +ubtu20cis_rule_6_2_6: {{ ubtu20cis_rule_6_2_6 }} +ubtu20cis_rule_6_2_7: {{ ubtu20cis_rule_6_2_7 }} +ubtu20cis_rule_6_2_8: {{ ubtu20cis_rule_6_2_8 }} +ubtu20cis_rule_6_2_9: {{ ubtu20cis_rule_6_2_9 }} +ubtu20cis_rule_6_2_10: {{ ubtu20cis_rule_6_2_10 }} +ubtu20cis_rule_6_2_11: {{ ubtu20cis_rule_6_2_11 }} +ubtu20cis_rule_6_2_12: {{ ubtu20cis_rule_6_2_12 }} + # AIDE -ubuntu20cis_config_aide: true +ubtu20cis_config_aide: true # aide setup via - cron, timer -ubuntu20cis_aide_scan: cron +ubtu20cis_aide_scan: cron # AIDE cron settings -ubuntu20_aide_cron: +ubtu20_aide_cron: cron_user: {{ ubtu20cis_aide_cron.cron_user }} cron_file: {{ ubtu20cis_aide_cron.cron_file }} aide_job: {{ ubtu20cis_aide_cron.aide_job }} @@ -312,55 +372,64 @@ ubuntu20_aide_cron: aide_weekday: '{{ ubtu20cis_aide_cron.aide_weekday }}' # 1.1 -ubuntu20cis_allow_autofs: {{ ubtu20cis_allow_autofs }} +ubtu20cis_allow_autofs: {{ ubtu20cis_allow_autofs }} # 1.4 -ubuntu20cis_grub_conf_file: /boot/grub/grub.cfg -ubuntu20cis_grub_user_file: {{ ubtu20cis_grub_user_file }} -ubuntu20cis_grub_username: {{ ubtu20cis_grub_users }} -ubuntu20cis_grub_hash: {{ ubtu20cis_grub_pw }} +ubtu20cis_grub_conf_file: /boot/grub/grub.cfg +ubtu20cis_grub_user_file: {{ ubtu20cis_grub_user_file }} +ubtu20cis_grub_username: {{ ubtu20cis_grub_user }} +ubtu20cis_grub_hash: {{ ubtu20cis_grub_pw }} -ubuntu20cis_root_pw: {{ ubtu20cis_root_pw }} +ubtu20cis_root_pw: {{ ubtu20cis_root_pw }} # 1.6 - Only have apparmor enforcing -ubuntu20cis_apparmor_enforce_only: false +ubtu20cis_apparmor_enforce_only: false # Warning Banner Content (issue, issue.net, motd) -ubuntu20_warning_banner: {{ ubtu20cis_warning_banner }} +ubtu20_warning_banner: {{ ubtu20cis_warning_banner }} # End Banner # Section 2 -# Time sync - can be timesync or chriny or ntp -ubuntu20cis_time_service: {{ ubtu20cis_time_sync_tool }} -ubuntu20cis_ntp_servers: {{ ubtu20cis_ntp_server_list }} -ubuntu20cis_ntp_fallback: {{ ubtu20cis_ntp_fallback_server_list }} -ubuntu20cis_ntp_root_distance: +# Time sync - can be timesync or chrony or ntp +ubtu20cis_time_service: {{ ubtu20cis_time_sync_tool }} +ubtu20cis_time_servers: +{% for server in ubtu20cis_time_servers %} +- name: {{ server.name }} + options: {{ server.options }} +{% endfor %} + +ubtu20cis_time_pool: +{% for pool in ubtu20cis_time_pool %} +- name: {{ pool.name }} + options: {{ pool.options }} +{% endfor %} # Whether or not to run tasks related to auditing/patching the desktop environment -ubuntu20cis_gui: {{ ubtu20cis_desktop_required }} +ubtu20cis_gui: {{ ubtu20cis_desktop_required }} # Service configuration booleans set true to keep service -ubuntu20cis_avahi_server: {{ ubtu20cis_avahi_server }} -ubuntu20cis_cups_server: {{ ubtu20cis_cups_server }} -ubuntu20cis_nfs_server: {{ ubtu20cis_nfs_server }} -ubuntu20cis_dhcp_server: {{ ubtu20cis_dhcp_server }} -ubuntu20cis_ldap_server: {{ ubtu20cis_ldap_server }} -ubuntu20cis_dns_server: {{ ubtu20cis_dns_server }} -ubuntu20cis_vsftpd_server: {{ ubtu20cis_vsftpd_server }} -ubuntu20cis_httpd_server: {{ ubtu20cis_httpd_server }} -ubuntu20cis_is_mail_server: false -ubuntu20cis_dovecot_server: {{ ubtu20cis_dovecot_server }} -ubuntu20cis_samba_server: {{ ubtu20cis_smb_server }} -ubuntu20cis_squid_server: {{ ubtu20cis_squid_server }} -ubuntu20cis_snmp_server: {{ ubtu20cis_snmp_server }} +ubtu20cis_avahi_server: {{ ubtu20cis_avahi_server }} +ubtu20cis_cups_server: {{ ubtu20cis_cups_server }} +ubtu20cis_nfs_server: {{ ubtu20cis_nfs_server }} +ubtu20cis_dhcp_server: {{ ubtu20cis_dhcp_server }} +ubtu20cis_dnsmasq_server: {{ ubtu20cis_dnsmasq_server }} +ubtu20cis_ldap_server: {{ ubtu20cis_ldap_server }} +ubtu20cis_dns_server: {{ ubtu20cis_dns_server }} +ubtu20cis_vsftpd_server: {{ ubtu20cis_vsftpd_server }} +ubtu20cis_httpd_server: {{ ubtu20cis_httpd_server }} +ubtu20cis_is_mail_server: false +ubtu20cis_dovecot_server: {{ ubtu20cis_dovecot_server }} +ubtu20cis_samba_server: {{ ubtu20cis_smb_server }} +ubtu20cis_squid_server: {{ ubtu20cis_squid_server }} +ubtu20cis_snmp_server: {{ ubtu20cis_snmp_server }} # Mail Server config {% if ubtu20_cis_mail_transfer_agent is defined %} -ubuntu20cis_mailserver: {{ ubtu20_cis_mail_transfer_agent }} +ubtu20cis_mailserver: {{ ubtu20_cis_mail_transfer_agent }} {% else %} -ubuntu20cis_mailserver: Not_defined +ubtu20cis_mailserver: Not_defined {% endif %} -ubuntu20_exim_conf: +ubtu20_exim_conf: - dc_eximconfig_configtype='local' - dc_local_interfaces='127.0.0.1 ; ::1' - dc_readhost='' @@ -374,59 +443,48 @@ ubuntu20_exim_conf: - dc_localdelivery='mail_spool' -ubuntu20cis_rsyncd_server: {{ ubtu20cis_rsync_server }} -ubuntu20cis_nis_server: {{ ubtu20cis_nis_server }} +ubtu20cis_rsyncd_server: {{ ubtu20cis_rsync_server }} +ubtu20cis_nis_server: {{ ubtu20cis_nis_server }} -ubuntu20cis_xwindows_required: false +ubtu20cis_xwindows_required: false # 2.2 client services -ubuntu20cis_rsh_required: {{ ubtu20cis_rsh_required }} -ubuntu20cis_talk_required: {{ ubtu20cis_talk_required }} -ubuntu20cis_telnet_required: {{ ubtu20cis_telnet_required }} -ubuntu20cis_ldap_clients_required: {{ ubtu20cis_ldap_clients_required }} -ubuntu20cis_rpc_required: {{ ubtu20cis_rpc_required }} +ubtu20cis_rsh_required: {{ ubtu20cis_rsh_required }} +ubtu20cis_talk_required: {{ ubtu20cis_talk_required }} +ubtu20cis_telnet_required: {{ ubtu20cis_telnet_required }} +ubtu20cis_ldap_clients_required: {{ ubtu20cis_ldap_clients_required }} +ubtu20cis_rpc_required: {{ ubtu20cis_rpc_required }} # Section 3 # IPv6 required -ubuntu20cis_ipv6_required: {{ ubtu20cis_ipv6_required }} +ubtu20cis_ipv6_required: {{ ubtu20cis_ipv6_required }} # System network parameters (host only OR host and router) -ubuntu20cis_is_router: false +ubtu20cis_is_router: false -ubuntu20cis_firewall: {{ ubtu20cis_firewall_package }} +ubtu20cis_firewall: {{ ubtu20cis_firewall_package }} -ubuntu20_default_firewall_zone: public -ubuntu20_firewall_interface: +ubtu20_default_firewall_zone: public +ubtu20_firewall_interface: - ['ens224'] - ['ens192'] -ubuntu20_firewall_services: +ubtu20_firewall_services: - ssh - dhcpv6-client ### Section 4 -## auditd settings -ubuntu20cis_auditd: - space_left_action: email - action_mail_acct: root - admin_space_left_action: {{ ubtu20cis_auditd.admin_space_left_action }} - max_log_file_action: {{ ubtu20cis_auditd.max_log_file_action }} - auditd_backlog_limit: {{ ubtu20cis_audit_back_log_limit }} - -## syslog -ubuntu20cis_is_syslog_server: {{ ubtu20cis_system_is_log_server }} -### Section 5 -ubuntu20cis_sshd_limited: false +ubtu20cis_sshd_limited: false # Note the following to understand precedence and layout -ubuntu20cis_sshd_access: +ubtu20cis_sshd_access: - AllowUser - AllowGroup - DenyUser - DenyGroup -ubuntu20cis_ssh_strong_ciphers: Ciphers chacha20-poly1305@openssh.com,aes256-gcm@openssh.com,aes128-gcm@openssh.com,aes256-ctr,aes192-ctr,aes128-ctr -ubuntu20cis_ssh_weak_ciphers: +ubtu20cis_ssh_strong_ciphers: Ciphers chacha20-poly1305@openssh.com,aes256-gcm@openssh.com,aes128-gcm@openssh.com,aes256-ctr,aes192-ctr,aes128-ctr +ubtu20cis_ssh_weak_ciphers: - 3des-cbc - aes128-cbc - aes192-cbc @@ -438,8 +496,8 @@ ubuntu20cis_ssh_weak_ciphers: - cast128-cbc - rijndael-cbc@lysator.liu.se -ubuntu20cis_ssh_strong_macs: MACs hmac-sha2-512-etm@openssh.com,hmac-sha2-256-etm@openssh.com,hmac-sha2-512,hmac-sha2-256 -ubuntu20cis_ssh_weak_macs: +ubtu20cis_ssh_strong_macs: MACs hmac-sha2-512-etm@openssh.com,hmac-sha2-256-etm@openssh.com,hmac-sha2-512,hmac-sha2-256 +ubtu20cis_ssh_weak_macs: - hmac-md5 - hmac-md5-96 - hmac-ripemd160 @@ -455,38 +513,55 @@ ubuntu20cis_ssh_weak_macs: - umac-64-etm@openssh.com - umac-128-etm@openssh.com -ubuntu20cis_ssh_strong_kex: KexAlgorithms curve25519-sha256,curve25519-sha256@libssh.org,diffie-hellman-group14-sha256,diffie-hellman-group16-sha512,diffie-hellman-group18-sha512,ecdh-sha2-nistp521,ecdh-sha2-nistp384,ecdh-sha2-nistp256,diffie-hellman-group-exchange-sha256 -ubuntu20cis_ssh_weak_kex: +ubtu20cis_ssh_strong_kex: KexAlgorithms curve25519-sha256,curve25519-sha256@libssh.org,diffie-hellman-group14-sha256,diffie-hellman-group16-sha512,diffie-hellman-group18-sha512,ecdh-sha2-nistp521,ecdh-sha2-nistp384,ecdh-sha2-nistp256,diffie-hellman-group-exchange-sha256 +ubtu20cis_ssh_weak_kex: - diffie-hellman-group1-sha1 - diffie-hellman-group14-sha1 - diffie-hellman-group-exchange-sha1 -ubuntu20cis_ssh_aliveinterval: 300 -ubuntu20cis_ssh_countmax: 3 +ubtu20cis_ssh_aliveinterval: 300 +ubtu20cis_ssh_countmax: 3 ## PAM -ubuntu20cis_pam_password: +ubtu20cis_pam_password: minlen: "14" minclass: "4" -ubuntu20cis_pam_passwd_retry: "3" +ubtu20cis_pam_passwd_retry: "3" # choose one of below -ubuntu20cis_pwhistory_so: "14" -ubuntu20cis_unix_so: false -ubuntu20cis_passwd_remember: {{ ubtu20cis_pamd_pwhistory_remember }} +ubtu20cis_pwhistory_so: "14" +ubtu20cis_unix_so: false +ubtu20cis_passwd_remember: {{ ubtu20cis_pamd_pwhistory_remember }} # logins.def password settings -ubuntu20cis_pass: +ubtu20cis_pass: max_days: {{ ubtu20cis_pass.max_days }} min_days: {{ ubtu20cis_pass.min_days }} warn_age: {{ ubtu20cis_pass.warn_age }} + character_changed: {{ ubtu20cis_pass.character_changed }} + max_repeat: {{ ubtu20cis_pass.max_repeat_letters }} # set sugroup if differs from wheel -ubuntu20cis_sugroup: {{ ubtu20cis_su_group }} +ubtu20cis_sugroup: {{ ubtu20cis_sugroup }} # sugroup users list -ubuntu20_sugroup_users: "root" +ubtu20_sugroup_users: "root" # var log location variable -ubuntu20_varlog_location: {{ ubtu20cis_sudo_logfile }} +ubtu20_varlog_location: {{ ubtu20cis_sudo_logfile }} + +### Section 5 +## auditd settings +ubtu20cis_auditd: + space_left_action: email + action_mail_acct: root + admin_space_left_action: {{ ubtu20cis_auditd.admin_space_left_action }} + max_log_file_action: {{ ubtu20cis_auditd.max_log_file_action }} + auditd_backlog_limit: {{ ubtu20cis_audit_back_log_limit }} + +## syslog +ubtu20cis_syslog_service: {{ ubtu20cis_syslog_service }} +ubtu20cis_is_syslog_server: {{ ubtu20cis_system_is_log_server }} + +ubtu20cis_min_uid: {{ min_int_uid }} diff --git a/templates/audit/ubtu20cis_4_1_10_access.rules.j2 b/templates/audit/ubtu20cis_4_1_10_access.rules.j2 deleted file mode 100644 index b28cd459..00000000 --- a/templates/audit/ubtu20cis_4_1_10_access.rules.j2 +++ /dev/null @@ -1,6 +0,0 @@ -{% if ansible_architecture == 'x86_64' -%} --a always,exit -F arch=b64 -S creat -S open -S openat -S truncate -S ftruncate -F exit=-EACCES -F auid>=1000 -F auid!=4294967295 -k access --a always,exit -F arch=b64 -S creat -S open -S openat -S truncate -S ftruncate -F exit=-EPERM -F auid>=1000 -F auid!=4294967295 -k access -{% endif %} --a always,exit -F arch=b32 -S creat -S open -S openat -S truncate -S ftruncate -F exit=-EACCES -F auid>=1000 -F auid!=4294967295 -k access --a always,exit -F arch=b32 -S creat -S open -S openat -S truncate -S ftruncate -F exit=-EPERM -F auid>=1000 -F auid!=4294967295 -k access diff --git a/templates/audit/ubtu20cis_4_1_12_audit.rules.j2 b/templates/audit/ubtu20cis_4_1_12_audit.rules.j2 deleted file mode 100644 index fa95efb9..00000000 --- a/templates/audit/ubtu20cis_4_1_12_audit.rules.j2 +++ /dev/null @@ -1,4 +0,0 @@ --a always,exit -F arch=b32 -S mount -F auid>=1000 -F auid!=4294967295 -k mounts -{% if ansible_architecture == 'x86_64' -%} --a always,exit -F arch=b64 -S mount -F auid>=1000 -F auid!=4294967295 -k mounts -{% endif %} diff --git a/templates/audit/ubtu20cis_4_1_13_delete.rules.j2 b/templates/audit/ubtu20cis_4_1_13_delete.rules.j2 deleted file mode 100644 index 7a97b229..00000000 --- a/templates/audit/ubtu20cis_4_1_13_delete.rules.j2 +++ /dev/null @@ -1,4 +0,0 @@ --a always,exit -F arch=b32 -S unlink -S unlinkat -S rename -S renameat -F auid>=1000 -F auid!=4294967295 -k delete -{% if ansible_architecture == 'x86_64' -%} --a always,exit -F arch=b64 -S unlink -S unlinkat -S rename -S renameat -F auid>=1000 -F auid!=4294967295 -k delete -{% endif %} diff --git a/templates/audit/ubtu20cis_4_1_16_modules.rules.j2 b/templates/audit/ubtu20cis_4_1_16_modules.rules.j2 deleted file mode 100644 index bc1813b3..00000000 --- a/templates/audit/ubtu20cis_4_1_16_modules.rules.j2 +++ /dev/null @@ -1,9 +0,0 @@ --w /sbin/insmod -p x -k modules --w /sbin/rmmod -p x -k modules --w /sbin/modprobe -p x -k modules -{% if ansible_architecture != 'x86_64' -%} --a always,exit -F arch=b32 -S init_module -S delete_module -k modules -{% endif %} -{% if ansible_architecture == 'x86_64' -%} --a always,exit -F arch=b64 -S init_module -S delete_module -k modules -{% endif %} diff --git a/templates/audit/ubtu20cis_4_1_3_timechange.rules.j2 b/templates/audit/ubtu20cis_4_1_3_timechange.rules.j2 deleted file mode 100644 index 7da9f953..00000000 --- a/templates/audit/ubtu20cis_4_1_3_timechange.rules.j2 +++ /dev/null @@ -1,7 +0,0 @@ -{% if ansible_architecture == 'x86_64' -%} --a always,exit -F arch=b64 -S adjtimex -S settimeofday -k time-change --a always,exit -F arch=b64 -S clock_settime -k time-change -{% endif %} --a always,exit -F arch=b32 -S adjtimex -S settimeofday -S stime -k time-change --a always,exit -F arch=b32 -S clock_settime -k time-change --w /etc/localtime -p wa -k time-change diff --git a/templates/audit/ubtu20cis_4_1_5_systemlocale.rules.j2 b/templates/audit/ubtu20cis_4_1_5_systemlocale.rules.j2 deleted file mode 100644 index f56b572b..00000000 --- a/templates/audit/ubtu20cis_4_1_5_systemlocale.rules.j2 +++ /dev/null @@ -1,8 +0,0 @@ -{% if ansible_architecture == 'x86_64' -%} --a always,exit -F arch=b64 -S sethostname -S setdomainname -k system-locale -{% endif %} --a always,exit -F arch=b32 -S sethostname -S setdomainname -k system-locale --w /etc/issue -p wa -k system-locale --w /etc/issue.net -p wa -k system-locale --w /etc/hosts -p wa -k system-locale --w /etc/network -p wa -k system-locale diff --git a/templates/audit/ubtu20cis_4_1_7_logins.rules.j2 b/templates/audit/ubtu20cis_4_1_7_logins.rules.j2 deleted file mode 100644 index b38f8239..00000000 --- a/templates/audit/ubtu20cis_4_1_7_logins.rules.j2 +++ /dev/null @@ -1,3 +0,0 @@ --w /var/log/faillog -p wa -k logins --w /var/log/lastlog -p wa -k logins --w /var/log/tallylog -p wa -k logins diff --git a/templates/audit/ubtu20cis_4_1_8_session.rules.j2 b/templates/audit/ubtu20cis_4_1_8_session.rules.j2 deleted file mode 100644 index 51d7254f..00000000 --- a/templates/audit/ubtu20cis_4_1_8_session.rules.j2 +++ /dev/null @@ -1,3 +0,0 @@ --w /var/run/utmp -p wa -k session --w /var/log/wtmp -p wa -k logins --w /var/log/btmp -p wa -k logins diff --git a/templates/audit/ubtu20cis_4_1_9_permmod.rules.j2 b/templates/audit/ubtu20cis_4_1_9_permmod.rules.j2 deleted file mode 100644 index a3974942..00000000 --- a/templates/audit/ubtu20cis_4_1_9_permmod.rules.j2 +++ /dev/null @@ -1,8 +0,0 @@ -{% if ansible_architecture == 'x86_64' -%} --a always,exit -F arch=b64 -S chmod -S fchmod -S fchmodat -F auid>=1000 -F auid!=4294967295 -k perm_mod --a always,exit -F arch=b64 -S chown -S fchown -S fchownat -S lchown -F auid>=1000 -F auid!=4294967295 -k perm_mod --a always,exit -F arch=b64 -S setxattr -S lsetxattr -S fsetxattr -S removexattr -S lremovexattr -S fremovexattr -F auid>=1000 -F auid!=4294967295 -k perm_mod -{% endif %} --a always,exit -F arch=b32 -S chmod -S fchmod -S fchmodat -F auid>=1000 -F auid!=4294967295 -k perm_mod --a always,exit -F arch=b32 -S chown -S fchown -S fchownat -S lchown -F auid>=1000 -F auid!=4294967295 -k perm_mod --a always,exit -F arch=b32 -S setxattr -S lsetxattr -S fsetxattr -S removexattr -S lremovexattr -S fremovexattr -F auid>=1000 -F auid!=4294967295 -k perm_mod diff --git a/templates/audit/ubtu20cis_5_2_3_10_mount.rules.j2 b/templates/audit/ubtu20cis_5_2_3_10_mount.rules.j2 new file mode 100644 index 00000000..ad77baf5 --- /dev/null +++ b/templates/audit/ubtu20cis_5_2_3_10_mount.rules.j2 @@ -0,0 +1,4 @@ +-a always,exit -F arch=b32 -S mount -F auid>=1000 -F auid!=unset -k mounts +{% if ansible_architecture == 'x86_64' -%} +-a always,exit -F arch=b64 -S mount -F auid>=1000 -F auid!=unset -k mounts +{% endif %} diff --git a/templates/audit/ubtu20cis_5_2_3_11_session.rules.j2 b/templates/audit/ubtu20cis_5_2_3_11_session.rules.j2 new file mode 100644 index 00000000..f82963da --- /dev/null +++ b/templates/audit/ubtu20cis_5_2_3_11_session.rules.j2 @@ -0,0 +1,3 @@ +-w /var/run/utmp -p wa -k session +-w /var/log/wtmp -p wa -k session +-w /var/log/btmp -p wa -k session diff --git a/templates/audit/ubtu20cis_5_2_3_12_logins.rules.j2 b/templates/audit/ubtu20cis_5_2_3_12_logins.rules.j2 new file mode 100644 index 00000000..c26bc611 --- /dev/null +++ b/templates/audit/ubtu20cis_5_2_3_12_logins.rules.j2 @@ -0,0 +1,2 @@ +-w /var/log/faillock -p wa -k logins +-w /var/log/lastlog -p wa -k logins diff --git a/templates/audit/ubtu20cis_5_2_3_13_delete.rules.j2 b/templates/audit/ubtu20cis_5_2_3_13_delete.rules.j2 new file mode 100644 index 00000000..21f869af --- /dev/null +++ b/templates/audit/ubtu20cis_5_2_3_13_delete.rules.j2 @@ -0,0 +1,4 @@ +-a always,exit -F arch=b32 -S rename,unlink,unlinkat,renameat -F auid>=1000 -F auid!=unset -k delete +{% if ansible_architecture == 'x86_64' -%} +-a always,exit -F arch=b64 -S rename,unlink,unlinkat,renameat -F auid>=1000 -F auid!=unset -k delete +{% endif %} diff --git a/templates/audit/ubtu20cis_4_1_6_macpolicy.rules.j2 b/templates/audit/ubtu20cis_5_2_3_14_macpolicy.rules.j2 similarity index 100% rename from templates/audit/ubtu20cis_4_1_6_macpolicy.rules.j2 rename to templates/audit/ubtu20cis_5_2_3_14_macpolicy.rules.j2 diff --git a/templates/audit/ubtu20cis_5_2_3_19_modules.rules.j2 b/templates/audit/ubtu20cis_5_2_3_19_modules.rules.j2 new file mode 100644 index 00000000..853bb53a --- /dev/null +++ b/templates/audit/ubtu20cis_5_2_3_19_modules.rules.j2 @@ -0,0 +1,4 @@ +{% if ansible_architecture == 'x86_64' -%} +-a always,exit -F arch=b64 -S init_module,finit_module,delete_module,create_module,query_module -F auid>=1000 -F auid!=unset -k kernel_modules +{% endif %} +-a always,exit -F path=/usr/bin/kmod -F perm=x -F auid>=1000 -F auid!=unset -k kernel_modules diff --git a/templates/audit/ubtu20cis_4_1_14_scope.rules.j2 b/templates/audit/ubtu20cis_5_2_3_1_scope.rules.j2 similarity index 100% rename from templates/audit/ubtu20cis_4_1_14_scope.rules.j2 rename to templates/audit/ubtu20cis_5_2_3_1_scope.rules.j2 diff --git a/templates/audit/ubtu20cis_4_1_17_99finalize.rules.j2 b/templates/audit/ubtu20cis_5_2_3_20_finalize.rules.j2 similarity index 100% rename from templates/audit/ubtu20cis_4_1_17_99finalize.rules.j2 rename to templates/audit/ubtu20cis_5_2_3_20_finalize.rules.j2 diff --git a/templates/audit/ubtu20cis_4_1_15_actions.rules.j2 b/templates/audit/ubtu20cis_5_2_3_2_user_emulation.rules.j2 similarity index 68% rename from templates/audit/ubtu20cis_4_1_15_actions.rules.j2 rename to templates/audit/ubtu20cis_5_2_3_2_user_emulation.rules.j2 index ef134a98..5f7e35e0 100644 --- a/templates/audit/ubtu20cis_4_1_15_actions.rules.j2 +++ b/templates/audit/ubtu20cis_5_2_3_2_user_emulation.rules.j2 @@ -1,4 +1,4 @@ --a always,exit -F arch=b32 -C euid!=uid -F euid=0 -F auid>=1000 -F auid!=4294967295 -S execve -k actions +-a always,exit -F arch=b32 -C euid!=uid -F euid=0 -F auid>=1000 -F auid!=unset -S execve -k user_emulation {% if ansible_architecture == 'x86_64' -%} --a always,exit -F arch=b64 -C euid!=uid -F euid=0 -F auid>=1000 -F auid!=4294967295 -S execve -k actions +-a always,exit -F arch=b64 -C euid!=uid -F euid=0 -F auid>=1000 -F auid!=unset -S execve -k user_emulation {% endif %} diff --git a/templates/audit/ubtu20cis_5_2_3_3_sudo_log.rules.j2 b/templates/audit/ubtu20cis_5_2_3_3_sudo_log.rules.j2 new file mode 100644 index 00000000..b67c0670 --- /dev/null +++ b/templates/audit/ubtu20cis_5_2_3_3_sudo_log.rules.j2 @@ -0,0 +1 @@ +-w {{ ubtu20cis_sudo_logfile }} -p wa -k sudo_log_file diff --git a/templates/audit/ubtu20cis_5_2_3_4_timechange.rules.j2 b/templates/audit/ubtu20cis_5_2_3_4_timechange.rules.j2 new file mode 100644 index 00000000..c670cecd --- /dev/null +++ b/templates/audit/ubtu20cis_5_2_3_4_timechange.rules.j2 @@ -0,0 +1,5 @@ +{% if ansible_architecture == 'x86_64' -%} +-a always,exit -F arch=b64 -S adjtimex,settimeofday,clock_settime -k time-change +{% endif %} +-a always,exit -F arch=b32 -S adjtimex,settimeofday,clock_settime -k time-change +-w /etc/localtime -p wa -k time-change diff --git a/templates/audit/ubtu20cis_5_2_3_5_systemlocale.rules.j2 b/templates/audit/ubtu20cis_5_2_3_5_systemlocale.rules.j2 new file mode 100644 index 00000000..aa7be2ad --- /dev/null +++ b/templates/audit/ubtu20cis_5_2_3_5_systemlocale.rules.j2 @@ -0,0 +1,9 @@ +{% if ansible_architecture == 'x86_64' -%} +-a always,exit -F arch=b64 -S sethostname,setdomainname -k system-locale +{% endif %} +-a always,exit -F arch=b32 -S sethostname,setdomainname -k system-locale +-w /etc/issue -p wa -k system-locale +-w /etc/issue.net -p wa -k system-locale +-w /etc/hosts -p wa -k system-locale +-w /etc/networks -p wa -k system-locale +-w /etc/network/ -p wa -k system-locale diff --git a/templates/audit/ubtu20cis_4_1_11_privileged.rules.j2 b/templates/audit/ubtu20cis_5_2_3_6_privileged.rules.j2 similarity index 100% rename from templates/audit/ubtu20cis_4_1_11_privileged.rules.j2 rename to templates/audit/ubtu20cis_5_2_3_6_privileged.rules.j2 diff --git a/templates/audit/ubtu20cis_5_2_3_7_access.rules.j2 b/templates/audit/ubtu20cis_5_2_3_7_access.rules.j2 new file mode 100644 index 00000000..13e3419b --- /dev/null +++ b/templates/audit/ubtu20cis_5_2_3_7_access.rules.j2 @@ -0,0 +1,6 @@ +{% if ansible_architecture == 'x86_64' -%} +-a always,exit -F arch=b64 -S creat,open,openat,truncate,ftruncate -F exit=-EACCES -F auid>=1000 -F auid!=unset -k access +-a always,exit -F arch=b64 -S creat,open,openat,truncate,ftruncate -F exit=-EPERM -F auid>=1000 -F auid!=unset -k access +{% endif %} +-a always,exit -F arch=b32 -S creat,open,openat,truncate,ftruncate -F exit=-EACCES -F auid>=1000 -F auid!=unset -k access +-a always,exit -F arch=b32 -S creat,open,openat,truncate,ftruncate -F exit=-EPERM -F auid>=1000 -F auid!=unset -k access diff --git a/templates/audit/ubtu20cis_4_1_4_identity.rules.j2 b/templates/audit/ubtu20cis_5_2_3_8_identity.rules.j2 similarity index 100% rename from templates/audit/ubtu20cis_4_1_4_identity.rules.j2 rename to templates/audit/ubtu20cis_5_2_3_8_identity.rules.j2 diff --git a/templates/audit/ubtu20cis_5_2_3_9_permmod.rules.j2 b/templates/audit/ubtu20cis_5_2_3_9_permmod.rules.j2 new file mode 100644 index 00000000..7089819b --- /dev/null +++ b/templates/audit/ubtu20cis_5_2_3_9_permmod.rules.j2 @@ -0,0 +1,8 @@ +{% if ansible_architecture == 'x86_64' -%} +-a always,exit -F arch=b64 -S chmod,fchmod,fchmodat -F auid>=1000 -F auid!=unset -k perm_mod +-a always,exit -F arch=b64 -S chown,fchown,lchown,fchownat -F auid>=1000 -F auid!=unset -k perm_mod +-a always,exit -F arch=b64 -S setxattr,lsetxattr,fsetxattr,removexattr,lremovexattr,fremovexattr -F auid>=1000 -F auid!=unset -k perm_mod +{% endif %} +-a always,exit -F arch=b32 -S chmod,fchmod,fchmodat -F auid>=1000 -F auid!=unset -k perm_mod +-a always,exit -F arch=b32 -S chown,fchown,lchown,fchownat -F auid>=1000 -F auid!=unset -k perm_mod +-a always,exit -F arch=b32 -S setxattr,lsetxattr,fsetxattr,removexattr,lremovexattr,fremovexattr -F auid>=1000 -F auid!=unset -k perm_mod diff --git a/templates/audit/ubtu20cis_5_2_3_x_perm_chng.rules.j2 b/templates/audit/ubtu20cis_5_2_3_x_perm_chng.rules.j2 new file mode 100644 index 00000000..c7bc845f --- /dev/null +++ b/templates/audit/ubtu20cis_5_2_3_x_perm_chng.rules.j2 @@ -0,0 +1,12 @@ +{% if ubtu20cis_rule_5_2_3_15 %} +-a always,exit -F path=/usr/bin/chcon -F perm=x -F auid>=1000 -F auid!=unset -k perm_chng +{% endif %} +{% if ubtu20cis_rule_5_2_3_16 %} +-a always,exit -F path=/usr/bin/setfacl -F perm=x -F auid>=1000 -F auid!=unset -k perm_chng +{% endif %} +{% if ubtu20cis_rule_5_2_3_17 %} +-a always,exit -F path=/usr/bin/chacl -F perm=x -F auid>=1000 -F auid!=unset -k perm_chng +{% endif %} +{% if ubtu20cis_rule_5_2_3_18 %} +-a always,exit -F path=/usr/sbin/usermod -F perm=x -F auid>=1000 -F auid!=unset -k usermod +{% endif %} diff --git a/templates/etc/chrony/sources.d/time.sources.j2 b/templates/etc/chrony/sources.d/time.sources.j2 new file mode 100644 index 00000000..4c5c6b8c --- /dev/null +++ b/templates/etc/chrony/sources.d/time.sources.j2 @@ -0,0 +1,14 @@ +## Ansible controlled file +# Added as part of ansible-lockdown CIS baseline +# provided by MindPointGroup LLC +{% if ubtu20cis_chrony_timesource == 'pool' %} +# pool chosen as source for time sync +{% for pool in ubtu20cis_time_pool %} +pool {{ pool.name }} {{ pool.options }} +{% endfor %} +{% elif ubtu20cis_chrony_timesource == 'servers' %} +# Servers chosen as source for time sync +{% for server in ubtu20cis_time_servers %} +server {{ server.name }} {{ server.options }} +{% endfor %} +{% endif %} diff --git a/templates/etc/systemd/system/tmp.mount.j2 b/templates/etc/systemd/system/tmp.mount.j2 new file mode 100644 index 00000000..dc926f4e --- /dev/null +++ b/templates/etc/systemd/system/tmp.mount.j2 @@ -0,0 +1,12 @@ +[Unit] +Description=Temporary Directory /tmp +ConditionPathIsSymbolicLink=!/tmp +DefaultDependencies=no +Conflicts=umount.target Before=local-fs.target umount.target +After=swap.target + +[Mount] +What=tmpfs +Where=/tmp +Type=tmpfs +Options: {% if ubtu20cis_rule_1_1_2_2 %}nodev,{% endif %}{% if ubtu20cis_rule_1_1_2_3 %}noexec,{% endif %}{% if ubtu20cis_rule_1_1_2_4 %}nosuid{% endif %} diff --git a/templates/etc/systemd/timesyncd.conf.d/50-timesyncd.conf.j2 b/templates/etc/systemd/timesyncd.conf.d/50-timesyncd.conf.j2 new file mode 100644 index 00000000..7442cd42 --- /dev/null +++ b/templates/etc/systemd/timesyncd.conf.d/50-timesyncd.conf.j2 @@ -0,0 +1,11 @@ +## Ansible controlled file +# Added as part of ansible-lockdown CIS baseline +# provided by MindPointGroup LLC + +[Time] + +NTP={% for pool in ubtu20cis_time_pool %}{{ pool.name }}{% endfor %} + + +FallbackNTP={% for servers in ubtu20cis_time_servers %}{{ servers.name }} {% endfor %} +