From 6493c2840fe54a4c6ca9b298f078e276baf9cabb Mon Sep 17 00:00:00 2001 From: AnsibleGuy Date: Fri, 3 May 2024 20:24:40 +0200 Subject: [PATCH] method blocking --- README.md | 18 +++++++++++++++--- defaults/main/1_main.yml | 15 +++++++++++++-- templates/etc/haproxy/conf.d/backend.cfg.j2 | 4 +++- templates/etc/haproxy/conf.d/frontend.cfg.j2 | 12 +++++++----- templates/etc/haproxy/conf.d/inc/security.j2 | 12 +++++++++--- 5 files changed, 47 insertions(+), 14 deletions(-) diff --git a/README.md b/README.md index 0fb1bfb..da1af87 100644 --- a/README.md +++ b/README.md @@ -61,9 +61,14 @@ ansible-galaxy install -r requirements.yml * **Default opt-ins**: * Frontend - * Redirect non SSL traffic to SSL if in HTTP mode - * Logging User-Agent - * Setting basic security-headers + * HTTP mode + * Redirect non SSL traffic to SSL + * Logging User-Agent + * Setting basic security-headers + + * Backend + * HTTP mode + * Blocking TRACE & CONNECT methods * **Default opt-outs**: @@ -72,6 +77,7 @@ ansible-galaxy install -r requirements.yml * [ACME/LetsEncrypt](https://github.com/dehydrated-io/dehydrated) * [GeoIP Lookups](https://github.com/superstes/haproxy-geoip) * Blocking of well-known Script-Bots + * Blocking TRACE & CONNECT methods ---- @@ -118,6 +124,8 @@ ansible-galaxy install -r requirements.yml * **Info**: A very basic user-agent based Script- & Bad-Crawler-Bot blocking can be activated for frontends and backends. Check out the [defaults](https://github.com/ansibleguy/infra_haproxy/blob/latest/defaults/main/0_hardcoded.yml) for the list of bots that are blocked. + +* **Info**: You can easily restrict the HTTP methods allowed on a specific frontend or backend by setting `security.restrict_methods` to true and specifying `security.allow_only_methods` ---- @@ -198,6 +206,9 @@ haproxy: enable: true security: + restrict_methods: true + allow_only_methods: ['HEAD', 'GET', 'POST'] + # very basic filtering of bad bots based on user-agent matching block_script_bots: true block_bad_crawler_bots: true @@ -306,6 +317,7 @@ ansible-playbook -K -D -i inventory/hosts.yml playbook.yml -e debug=yes ### Roadmap * Security - Basic bot flagging +* Security - Basic rate limit (GET/HEAD and POST/PUT/DELETE separated) * 'Interface' for Dict to Map-File translation/creation * Option to easily Download & Integrate IPLists (*like Tor Exit nodes*) * Easy way to override the default error-files diff --git a/defaults/main/1_main.yml b/defaults/main/1_main.yml index fe13748..31ac9e2 100644 --- a/defaults/main/1_main.yml +++ b/defaults/main/1_main.yml @@ -45,9 +45,9 @@ defaults_haproxy: 'stats timeout': '30s' ca-base: '/etc/ssl/certs' crt-base: '/etc/ssl/private' - ssl-default-bind-ciphers: 'ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384\ + ssl-default-bind-ciphers: "ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384\ :ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:\ - DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384' + DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384" ssl-default-bind-ciphersuites: 'TLS_AES_128_GCM_SHA256:TLS_AES_256_GCM_SHA384:TLS_CHACHA20_POLY1305_SHA256' ssl-default-bind-options: 'ssl-min-ver TLSv1.2 no-tls-tickets' @@ -82,6 +82,12 @@ defaults_frontend: ssl_redirect: true security: headers: true + + restrict_methods: false + allow_only_methods: ['HEAD', 'GET', 'POST'] + # if 'restrict_methods' is disabled - this will still deny 'TRACE' & 'CONNECT' as they might open your server/services up to attacks + deny_dangerous_methods: false + block_script_bots: false block_bad_crawler_bots: false block_status_code: 425 @@ -124,6 +130,11 @@ defaults_backend: ssl_verify: 'none' # example: 'required ca-file /etc/ssl/certs/my_ca.crt verifyhost host01.intern' security: + restrict_methods: false + allow_only_methods: ['HEAD', 'GET', 'POST'] + # if 'restrict_methods' is disabled - this will still deny 'TRACE' & 'CONNECT' as they might open your server/services up to attacks + deny_dangerous_methods: true + block_script_bots: false block_bad_crawler_bots: false block_status_code: 425 diff --git a/templates/etc/haproxy/conf.d/backend.cfg.j2 b/templates/etc/haproxy/conf.d/backend.cfg.j2 index ac5e963..a81ab83 100644 --- a/templates/etc/haproxy/conf.d/backend.cfg.j2 +++ b/templates/etc/haproxy/conf.d/backend.cfg.j2 @@ -20,7 +20,9 @@ backend {{ name }} {% endif %} {% endif %} -{% include "inc/security.j2" %} +{% if cnf.mode == 'http' %} +{% include "inc/security.j2" %} +{% endif %} {% if cnf.lines | is_dict %} {% for section, lines in cnf.lines.items() %} diff --git a/templates/etc/haproxy/conf.d/frontend.cfg.j2 b/templates/etc/haproxy/conf.d/frontend.cfg.j2 index 554198a..3e4f382 100644 --- a/templates/etc/haproxy/conf.d/frontend.cfg.j2 +++ b/templates/etc/haproxy/conf.d/frontend.cfg.j2 @@ -27,13 +27,15 @@ frontend {{ name }} {% include "inc/geoip.j2" %} {% endif %} -{% if cnf.security.headers | bool %} -{% for header, value in defaults_security_headers.items() %} +{% if cnf.mode == 'http' %} +{% if cnf.security.headers | bool %} +{% for header, value in defaults_security_headers.items() %} http-response set-header {{ header }} "{{ value }}" -{% endfor %} -{% endif %} +{% endfor %} +{% endif %} -{% include "inc/security.j2" %} +{% include "inc/security.j2" %} +{% endif %} {% if cnf.log.user_agent | bool %} http-request capture req.fhdr(User-Agent) len 200 diff --git a/templates/etc/haproxy/conf.d/inc/security.j2 b/templates/etc/haproxy/conf.d/inc/security.j2 index cb9e518..67fcbe3 100644 --- a/templates/etc/haproxy/conf.d/inc/security.j2 +++ b/templates/etc/haproxy/conf.d/inc/security.j2 @@ -1,4 +1,10 @@ +{% if cnf.security.restrict_methods | bool and cnf.security.allow_only_methods | length > 0 %} + http-request deny status 405 default-errorfiles if !{ method {{ cnf.security.allow_only_methods | join(' ') }} } +{% elif cnf.security.deny_dangerous_methods | bool %} + http-request deny status 405 default-errorfiles if { method TRACE CONNECT } +{% endif %} + {% if cnf.security.block_script_bots | bool %} # block well-known script-bots http-request deny status {{ cnf.security.block_status_code }} default-errorfiles if !{ req.fhdr(User-Agent) -m found } @@ -6,15 +12,15 @@ http-request deny status {{ cnf.security.block_status_code }} default-errorfiles if { req.fhdr(User-Agent) -m str -i {{ HAPROXY_HC.user_agents.script.full | ensure_list | join(' ') }} } {% endif %} {% if HAPROXY_HC.user_agents.script.sub | length > 0 %} - http-request deny status {{ cnf.security.block_status_code }} default-errorfiles if { req.fhdr(User-Agent) -m sub -i {{ HAPROXY_HC.user_agents.script.sub | ensure_list | join(' ') }} } + http-request deny status {{ cnf.security.block_status_code }} default-errorfiles if { req.fhdr(User-Agent) -m sub -i {{ HAPROXY_HC.user_agents.script.sub | ensure_list | join(' ') }} } {% endif %} {% endif %} {% if cnf.security.block_bad_crawler_bots | bool %} # block well-known bad-crawler-bots {% if HAPROXY_HC.user_agents.bad_crawlers.full | length > 0 %} - http-request deny status {{ cnf.security.block_status_code }} default-errorfiles if { req.fhdr(User-Agent) -m str -i {{ HAPROXY_HC.user_agents.bad_crawlers.full | ensure_list | join(' ') }} } + http-request deny status {{ cnf.security.block_status_code }} default-errorfiles if { req.fhdr(User-Agent) -m str -i {{ HAPROXY_HC.user_agents.bad_crawlers.full | ensure_list | join(' ') }} } {% endif %} {% if HAPROXY_HC.user_agents.bad_crawlers.sub | length > 0 %} - http-request deny status {{ cnf.security.block_status_code }} default-errorfiles if { req.fhdr(User-Agent) -m sub -i {{ HAPROXY_HC.user_agents.bad_crawlers.sub | ensure_list | join(' ') }} } + http-request deny status {{ cnf.security.block_status_code }} default-errorfiles if { req.fhdr(User-Agent) -m sub -i {{ HAPROXY_HC.user_agents.bad_crawlers.sub | ensure_list | join(' ') }} } {% endif %} {% endif %}