From 5897b97065cea3b40fc432450ca89dcf7f8f7611 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Kleinekath=C3=B6fer?= Date: Fri, 6 Jan 2023 15:35:06 +0100 Subject: [PATCH 001/126] Renamed mail notification method for watchdog to be more general --- data/Dockerfiles/watchdog/watchdog.sh | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/data/Dockerfiles/watchdog/watchdog.sh b/data/Dockerfiles/watchdog/watchdog.sh index 231d0ecd..517d160e 100755 --- a/data/Dockerfiles/watchdog/watchdog.sh +++ b/data/Dockerfiles/watchdog/watchdog.sh @@ -97,7 +97,7 @@ log_msg() { echo $(date) $(printf '%s\n' "${1}") } -function mail_error() { +function notify_error() { THROTTLE= [[ -z ${1} ]] && return 1 # If exists, body will be the content of "/tmp/${1}", even if ${2} is set @@ -197,7 +197,7 @@ get_container_ip() { # One-time check if grep -qi "$(echo ${IPV6_NETWORK} | cut -d: -f1-3)" <<< "$(ip a s)"; then if [[ -z "$(get_ipv6)" ]]; then - mail_error "ipv6-config" "enable_ipv6 is true in docker-compose.yml, but an IPv6 link could not be established. Please verify your IPv6 connection." + notify_error "ipv6-config" "enable_ipv6 is true in docker-compose.yml, but an IPv6 link could not be established. Please verify your IPv6 connection." fi fi @@ -747,7 +747,7 @@ olefy_checks() { # Notify about start if [[ ! -z ${WATCHDOG_NOTIFY_EMAIL} ]]; then - mail_error "watchdog-mailcow" "Watchdog started monitoring mailcow." + notify_error "watchdog-mailcow" "Watchdog started monitoring mailcow." fi # Create watchdog agents @@ -1029,33 +1029,33 @@ while true; do fi if [[ ${com_pipe_answer} == "ratelimit" ]]; then log_msg "At least one ratelimit was applied" - [[ ! -z ${WATCHDOG_NOTIFY_EMAIL} ]] && mail_error "${com_pipe_answer}" + [[ ! -z ${WATCHDOG_NOTIFY_EMAIL} ]] && notify_error "${com_pipe_answer}" elif [[ ${com_pipe_answer} == "mail_queue_status" ]]; then log_msg "Mail queue status is critical" - [[ ! -z ${WATCHDOG_NOTIFY_EMAIL} ]] && mail_error "${com_pipe_answer}" + [[ ! -z ${WATCHDOG_NOTIFY_EMAIL} ]] && notify_error "${com_pipe_answer}" elif [[ ${com_pipe_answer} == "external_checks" ]]; then log_msg "Your mailcow is an open relay!" # Define $2 to override message text, else print service was restarted at ... - [[ ! -z ${WATCHDOG_NOTIFY_EMAIL} ]] && mail_error "${com_pipe_answer}" "Please stop mailcow now and check your network configuration!" + [[ ! -z ${WATCHDOG_NOTIFY_EMAIL} ]] && notify_error "${com_pipe_answer}" "Please stop mailcow now and check your network configuration!" elif [[ ${com_pipe_answer} == "mysql_repl_checks" ]]; then log_msg "MySQL replication is not working properly" # Define $2 to override message text, else print service was restarted at ... # Once mail per 10 minutes - [[ ! -z ${WATCHDOG_NOTIFY_EMAIL} ]] && mail_error "${com_pipe_answer}" "Please check the SQL replication status" 600 + [[ ! -z ${WATCHDOG_NOTIFY_EMAIL} ]] && notify_error "${com_pipe_answer}" "Please check the SQL replication status" 600 elif [[ ${com_pipe_answer} == "dovecot_repl_checks" ]]; then log_msg "Dovecot replication is not working properly" # Define $2 to override message text, else print service was restarted at ... # Once mail per 10 minutes - [[ ! -z ${WATCHDOG_NOTIFY_EMAIL} ]] && mail_error "${com_pipe_answer}" "Please check the Dovecot replicator status" 600 + [[ ! -z ${WATCHDOG_NOTIFY_EMAIL} ]] && notify_error "${com_pipe_answer}" "Please check the Dovecot replicator status" 600 elif [[ ${com_pipe_answer} == "certcheck" ]]; then log_msg "Certificates are about to expire" # Define $2 to override message text, else print service was restarted at ... # Only mail once a day - [[ ! -z ${WATCHDOG_NOTIFY_EMAIL} ]] && mail_error "${com_pipe_answer}" "Please renew your certificate" 86400 + [[ ! -z ${WATCHDOG_NOTIFY_EMAIL} ]] && notify_error "${com_pipe_answer}" "Please renew your certificate" 86400 elif [[ ${com_pipe_answer} == "acme-mailcow" ]]; then log_msg "acme-mailcow did not complete successfully" # Define $2 to override message text, else print service was restarted at ... - [[ ! -z ${WATCHDOG_NOTIFY_EMAIL} ]] && mail_error "${com_pipe_answer}" "Please check acme-mailcow for further information." + [[ ! -z ${WATCHDOG_NOTIFY_EMAIL} ]] && notify_error "${com_pipe_answer}" "Please check acme-mailcow for further information." elif [[ ${com_pipe_answer} == "fail2ban" ]]; then F2B_RES=($(timeout 4s ${REDIS_CMDLINE} --raw GET F2B_RES 2> /dev/null)) if [[ ! -z "${F2B_RES}" ]]; then @@ -1065,7 +1065,7 @@ while true; do log_msg "Banned ${host}" rm /tmp/fail2ban 2> /dev/null timeout 2s whois "${host}" > /tmp/fail2ban - [[ ! -z ${WATCHDOG_NOTIFY_EMAIL} ]] && [[ ${WATCHDOG_NOTIFY_BAN} =~ ^([yY][eE][sS]|[yY])+$ ]] && mail_error "${com_pipe_answer}" "IP ban: ${host}" + [[ ! -z ${WATCHDOG_NOTIFY_EMAIL} ]] && [[ ${WATCHDOG_NOTIFY_BAN} =~ ^([yY][eE][sS]|[yY])+$ ]] && notify_error "${com_pipe_answer}" "IP ban: ${host}" done fi elif [[ ${com_pipe_answer} =~ .+-mailcow ]]; then @@ -1085,7 +1085,7 @@ while true; do else log_msg "Sending restart command to ${CONTAINER_ID}..." curl --silent --insecure -XPOST https://dockerapi/containers/${CONTAINER_ID}/restart - [[ ! -z ${WATCHDOG_NOTIFY_EMAIL} ]] && mail_error "${com_pipe_answer}" + [[ ! -z ${WATCHDOG_NOTIFY_EMAIL} ]] && notify_error "${com_pipe_answer}" log_msg "Wait for restarted container to settle and continue watching..." sleep 35 fi From 9ef14a20d17ebdeaa49249e66068699827040fb7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Kleinekath=C3=B6fer?= Date: Fri, 6 Jan 2023 15:43:43 +0100 Subject: [PATCH 002/126] Centralized checking of enabled notifications --- data/Dockerfiles/watchdog/watchdog.sh | 25 +++++++++++++------------ 1 file changed, 13 insertions(+), 12 deletions(-) diff --git a/data/Dockerfiles/watchdog/watchdog.sh b/data/Dockerfiles/watchdog/watchdog.sh index 517d160e..e6e70ba7 100755 --- a/data/Dockerfiles/watchdog/watchdog.sh +++ b/data/Dockerfiles/watchdog/watchdog.sh @@ -98,6 +98,8 @@ log_msg() { } function notify_error() { + # Check if one of the notification options is enabled + [[ -z ${WATCHDOG_NOTIFY_EMAIL} ]] && [[ -z ${WATCHDOG_NOTIFY_WEBHOOK} ]] && return 0 THROTTLE= [[ -z ${1} ]] && return 1 # If exists, body will be the content of "/tmp/${1}", even if ${2} is set @@ -746,9 +748,7 @@ olefy_checks() { } # Notify about start -if [[ ! -z ${WATCHDOG_NOTIFY_EMAIL} ]]; then - notify_error "watchdog-mailcow" "Watchdog started monitoring mailcow." -fi +notify_error "watchdog-mailcow" "Watchdog started monitoring mailcow." # Create watchdog agents @@ -1029,33 +1029,33 @@ while true; do fi if [[ ${com_pipe_answer} == "ratelimit" ]]; then log_msg "At least one ratelimit was applied" - [[ ! -z ${WATCHDOG_NOTIFY_EMAIL} ]] && notify_error "${com_pipe_answer}" + notify_error "${com_pipe_answer}" elif [[ ${com_pipe_answer} == "mail_queue_status" ]]; then log_msg "Mail queue status is critical" - [[ ! -z ${WATCHDOG_NOTIFY_EMAIL} ]] && notify_error "${com_pipe_answer}" + notify_error "${com_pipe_answer}" elif [[ ${com_pipe_answer} == "external_checks" ]]; then log_msg "Your mailcow is an open relay!" # Define $2 to override message text, else print service was restarted at ... - [[ ! -z ${WATCHDOG_NOTIFY_EMAIL} ]] && notify_error "${com_pipe_answer}" "Please stop mailcow now and check your network configuration!" + notify_error "${com_pipe_answer}" "Please stop mailcow now and check your network configuration!" elif [[ ${com_pipe_answer} == "mysql_repl_checks" ]]; then log_msg "MySQL replication is not working properly" # Define $2 to override message text, else print service was restarted at ... # Once mail per 10 minutes - [[ ! -z ${WATCHDOG_NOTIFY_EMAIL} ]] && notify_error "${com_pipe_answer}" "Please check the SQL replication status" 600 + notify_error "${com_pipe_answer}" "Please check the SQL replication status" 600 elif [[ ${com_pipe_answer} == "dovecot_repl_checks" ]]; then log_msg "Dovecot replication is not working properly" # Define $2 to override message text, else print service was restarted at ... # Once mail per 10 minutes - [[ ! -z ${WATCHDOG_NOTIFY_EMAIL} ]] && notify_error "${com_pipe_answer}" "Please check the Dovecot replicator status" 600 + notify_error "${com_pipe_answer}" "Please check the Dovecot replicator status" 600 elif [[ ${com_pipe_answer} == "certcheck" ]]; then log_msg "Certificates are about to expire" # Define $2 to override message text, else print service was restarted at ... # Only mail once a day - [[ ! -z ${WATCHDOG_NOTIFY_EMAIL} ]] && notify_error "${com_pipe_answer}" "Please renew your certificate" 86400 + notify_error "${com_pipe_answer}" "Please renew your certificate" 86400 elif [[ ${com_pipe_answer} == "acme-mailcow" ]]; then log_msg "acme-mailcow did not complete successfully" # Define $2 to override message text, else print service was restarted at ... - [[ ! -z ${WATCHDOG_NOTIFY_EMAIL} ]] && notify_error "${com_pipe_answer}" "Please check acme-mailcow for further information." + notify_error "${com_pipe_answer}" "Please check acme-mailcow for further information." elif [[ ${com_pipe_answer} == "fail2ban" ]]; then F2B_RES=($(timeout 4s ${REDIS_CMDLINE} --raw GET F2B_RES 2> /dev/null)) if [[ ! -z "${F2B_RES}" ]]; then @@ -1065,7 +1065,7 @@ while true; do log_msg "Banned ${host}" rm /tmp/fail2ban 2> /dev/null timeout 2s whois "${host}" > /tmp/fail2ban - [[ ! -z ${WATCHDOG_NOTIFY_EMAIL} ]] && [[ ${WATCHDOG_NOTIFY_BAN} =~ ^([yY][eE][sS]|[yY])+$ ]] && notify_error "${com_pipe_answer}" "IP ban: ${host}" + [[ ${WATCHDOG_NOTIFY_BAN} =~ ^([yY][eE][sS]|[yY])+$ ]] && notify_error "${com_pipe_answer}" "IP ban: ${host}" done fi elif [[ ${com_pipe_answer} =~ .+-mailcow ]]; then @@ -1085,7 +1085,7 @@ while true; do else log_msg "Sending restart command to ${CONTAINER_ID}..." curl --silent --insecure -XPOST https://dockerapi/containers/${CONTAINER_ID}/restart - [[ ! -z ${WATCHDOG_NOTIFY_EMAIL} ]] && notify_error "${com_pipe_answer}" + notify_error "${com_pipe_answer}" log_msg "Wait for restarted container to settle and continue watching..." sleep 35 fi @@ -1095,3 +1095,4 @@ while true; do kill -USR1 ${BACKGROUND_TASKS[*]} fi done + From fe8131f7430ec3d2b97793587bcbf88bd058a67a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Kleinekath=C3=B6fer?= Date: Fri, 6 Jan 2023 15:52:36 +0100 Subject: [PATCH 003/126] Only sent mail if enabled --- data/Dockerfiles/watchdog/watchdog.sh | 63 ++++++++++++++------------- 1 file changed, 33 insertions(+), 30 deletions(-) diff --git a/data/Dockerfiles/watchdog/watchdog.sh b/data/Dockerfiles/watchdog/watchdog.sh index e6e70ba7..5d962309 100755 --- a/data/Dockerfiles/watchdog/watchdog.sh +++ b/data/Dockerfiles/watchdog/watchdog.sh @@ -124,37 +124,40 @@ function notify_error() { else SUBJECT="${WATCHDOG_SUBJECT}: ${1}" fi - IFS=',' read -r -a MAIL_RCPTS <<< "${WATCHDOG_NOTIFY_EMAIL}" - for rcpt in "${MAIL_RCPTS[@]}"; do - RCPT_DOMAIN= - RCPT_MX= - RCPT_DOMAIN=$(echo ${rcpt} | awk -F @ {'print $NF'}) - CHECK_FOR_VALID_MX=$(dig +short ${RCPT_DOMAIN} mx) - if [[ -z ${CHECK_FOR_VALID_MX} ]]; then - log_msg "Cannot determine MX for ${rcpt}, skipping email notification..." - return 1 - fi - [ -f "/tmp/${1}" ] && BODY="/tmp/${1}" - timeout 10s ./smtp-cli --missing-modules-ok \ - "${SMTP_VERBOSE}" \ - --charset=UTF-8 \ - --subject="${SUBJECT}" \ - --body-plain="${BODY}" \ - --add-header="X-Priority: 1" \ - --to=${rcpt} \ - --from="watchdog@${MAILCOW_HOSTNAME}" \ - --hello-host=${MAILCOW_HOSTNAME} \ - --ipv4 - if [[ $? -eq 1 ]]; then # exit code 1 is fine - log_msg "Sent notification email to ${rcpt}" - else - if [[ "${SMTP_VERBOSE}" == "" ]]; then - log_msg "Error while sending notification email to ${rcpt}. You can enable verbose logging by setting 'WATCHDOG_VERBOSE=y' in mailcow.conf." - else - log_msg "Error while sending notification email to ${rcpt}." + + if [[ ! -z ${WATCHDOG_NOTIFY_EMAIL} ]]; then + IFS=',' read -r -a MAIL_RCPTS <<< "${WATCHDOG_NOTIFY_EMAIL}" + for rcpt in "${MAIL_RCPTS[@]}"; do + RCPT_DOMAIN= + RCPT_MX= + RCPT_DOMAIN=$(echo ${rcpt} | awk -F @ {'print $NF'}) + CHECK_FOR_VALID_MX=$(dig +short ${RCPT_DOMAIN} mx) + if [[ -z ${CHECK_FOR_VALID_MX} ]]; then + log_msg "Cannot determine MX for ${rcpt}, skipping email notification..." + return 1 fi - fi - done + [ -f "/tmp/${1}" ] && BODY="/tmp/${1}" + timeout 10s ./smtp-cli --missing-modules-ok \ + "${SMTP_VERBOSE}" \ + --charset=UTF-8 \ + --subject="${SUBJECT}" \ + --body-plain="${BODY}" \ + --add-header="X-Priority: 1" \ + --to=${rcpt} \ + --from="watchdog@${MAILCOW_HOSTNAME}" \ + --hello-host=${MAILCOW_HOSTNAME} \ + --ipv4 + if [[ $? -eq 1 ]]; then # exit code 1 is fine + log_msg "Sent notification email to ${rcpt}" + else + if [[ "${SMTP_VERBOSE}" == "" ]]; then + log_msg "Error while sending notification email to ${rcpt}. You can enable verbose logging by setting 'WATCHDOG_VERBOSE=y' in mailcow.conf." + else + log_msg "Error while sending notification email to ${rcpt}." + fi + fi + done + fi } get_container_ip() { From 3e69304f0f9c22abd8b777d66c8a1099a3d2ef12 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Kleinekath=C3=B6fer?= Date: Fri, 6 Jan 2023 16:25:18 +0100 Subject: [PATCH 004/126] Send webhook --- data/Dockerfiles/watchdog/watchdog.sh | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/data/Dockerfiles/watchdog/watchdog.sh b/data/Dockerfiles/watchdog/watchdog.sh index 5d962309..741417c5 100755 --- a/data/Dockerfiles/watchdog/watchdog.sh +++ b/data/Dockerfiles/watchdog/watchdog.sh @@ -125,6 +125,7 @@ function notify_error() { SUBJECT="${WATCHDOG_SUBJECT}: ${1}" fi + # Send mail notification if enabled if [[ ! -z ${WATCHDOG_NOTIFY_EMAIL} ]]; then IFS=',' read -r -a MAIL_RCPTS <<< "${WATCHDOG_NOTIFY_EMAIL}" for rcpt in "${MAIL_RCPTS[@]}"; do @@ -158,6 +159,23 @@ function notify_error() { fi done fi + + # Send webhook notification if enabled + if [[ ! -z ${WATCHDOG_NOTIFY_WEBHOOK} ]]; then + if [[ -z ${WATCHDOG_NOTIFY_WEBHOOK_BODY} ]]; then + log_msg "No webhook body set, skipping webhook notification..." + return 1 + fi + + WEBHOOK_BODY=$(echo "${WATCHDOG_NOTIFY_WEBHOOK_BODY}" | envsubst '$SUBJECT,$BODY') + + curl -X POST \ + -H "Content-Type: application/json" \ + -d ${WEBHOOK_BODY} \ + ${WATCHDOG_NOTIFY_WEBHOOK} + + log_msg "Posted notification to webhook" + fi } get_container_ip() { From b83841d2533a62e44300ee5b36c3b1a4121e7b02 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Kleinekath=C3=B6fer?= Date: Sat, 7 Jan 2023 15:44:29 +0100 Subject: [PATCH 005/126] Replace placeholders with sed --- data/Dockerfiles/watchdog/watchdog.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/data/Dockerfiles/watchdog/watchdog.sh b/data/Dockerfiles/watchdog/watchdog.sh index 741417c5..ada402ba 100755 --- a/data/Dockerfiles/watchdog/watchdog.sh +++ b/data/Dockerfiles/watchdog/watchdog.sh @@ -167,7 +167,7 @@ function notify_error() { return 1 fi - WEBHOOK_BODY=$(echo "${WATCHDOG_NOTIFY_WEBHOOK_BODY}" | envsubst '$SUBJECT,$BODY') + WEBHOOK_BODY=$(echo ${WATCHDOG_NOTIFY_WEBHOOK_BODY} | sed "s/\$SUBJECT\|\${SUBJECT}/$SUBJECT/g" | sed "s/\$BODY\|\${BODY}/$BODY/" | sed "s/\"/\\\\\"/g") curl -X POST \ -H "Content-Type: application/json" \ From b6b399a590dc3da14ac51d976ed59d54be530ad1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Kleinekath=C3=B6fer?= Date: Sat, 7 Jan 2023 16:00:17 +0100 Subject: [PATCH 006/126] Fixed POST to webhook --- data/Dockerfiles/watchdog/watchdog.sh | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/data/Dockerfiles/watchdog/watchdog.sh b/data/Dockerfiles/watchdog/watchdog.sh index ada402ba..2b8ff78f 100755 --- a/data/Dockerfiles/watchdog/watchdog.sh +++ b/data/Dockerfiles/watchdog/watchdog.sh @@ -167,14 +167,13 @@ function notify_error() { return 1 fi - WEBHOOK_BODY=$(echo ${WATCHDOG_NOTIFY_WEBHOOK_BODY} | sed "s/\$SUBJECT\|\${SUBJECT}/$SUBJECT/g" | sed "s/\$BODY\|\${BODY}/$BODY/" | sed "s/\"/\\\\\"/g") + # Replace subject and body placeholders + WEBHOOK_BODY=$(echo ${WATCHDOG_NOTIFY_WEBHOOK_BODY} | sed "s/\$SUBJECT\|\${SUBJECT}/$SUBJECT/g" | sed "s/\$BODY\|\${BODY}/$BODY/") + + # POST to webhook + curl -X POST -H "Content-Type: application/json" -d "${WEBHOOK_BODY}" ${WATCHDOG_NOTIFY_WEBHOOK} - curl -X POST \ - -H "Content-Type: application/json" \ - -d ${WEBHOOK_BODY} \ - ${WATCHDOG_NOTIFY_WEBHOOK} - - log_msg "Posted notification to webhook" + log_msg "Sent notification using webhook" fi } From 38f5e293b0770db1dcd86564a89357a46fb708d7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Kleinekath=C3=B6fer?= Date: Sat, 7 Jan 2023 16:21:11 +0100 Subject: [PATCH 007/126] Webhook variables in config generation --- generate_config.sh | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/generate_config.sh b/generate_config.sh index 70dc5887..da11f60a 100755 --- a/generate_config.sh +++ b/generate_config.sh @@ -338,6 +338,13 @@ USE_WATCHDOG=y #WATCHDOG_NOTIFY_EMAIL=a@example.com,b@example.com,c@example.com #WATCHDOG_NOTIFY_EMAIL= +# Send notifications to a webhook URL that receives a POST request with the content type "application/json". +# You can use this to send notifications to services like Discord, Slack and others. +#WATCHDOG_NOTIFY_WEBHOOK=https://discord.com/api/webhooks/XXXXXXXXXXXXXXXXXXX/XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX +# JSON body included in the webhook POST request. Needs to be in single quotes. +# Following variables are available: SUBJECT, BODY +#WATCHDOG_NOTIFY_WEBHOOK_BODY='{"username": "Mailcow Watchdog", "content": "**${SUBJECT}**\n${BODY}"}' + # Notify about banned IP (includes whois lookup) WATCHDOG_NOTIFY_BAN=n From 50fde60899c42a4a3e5bc2dcf8c5a6f00a00a2e0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Kleinekath=C3=B6fer?= Date: Sat, 7 Jan 2023 16:29:43 +0100 Subject: [PATCH 008/126] Added webhook variables to update script --- update.sh | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/update.sh b/update.sh index 34d17354..4ada78d8 100755 --- a/update.sh +++ b/update.sh @@ -367,6 +367,8 @@ CONFIG_ARRAY=( "SKIP_SOGO" "USE_WATCHDOG" "WATCHDOG_NOTIFY_EMAIL" + "WATCHDOG_NOTIFY_WEBHOOK" + "WATCHDOG_NOTIFY_WEBHOOK_BODY" "WATCHDOG_NOTIFY_BAN" "WATCHDOG_EXTERNAL_CHECKS" "WATCHDOG_SUBJECT" @@ -546,6 +548,20 @@ for option in ${CONFIG_ARRAY[@]}; do echo "#MAILDIR_SUB=Maildir" >> mailcow.conf echo "MAILDIR_SUB=" >> mailcow.conf fi + elif [[ ${option} == "WATCHDOG_NOTIFY_WEBHOOK" ]]; then + if ! grep -q ${option} mailcow.conf; then + echo "Adding new option \"${option}\" to mailcow.conf" + echo '# Send notifications to a webhook URL that receives a POST request with the content type "application/json".' >> mailcow.conf + echo '# You can use this to send notifications to services like Discord, Slack and others.' >> mailcow.conf + echo '#WATCHDOG_NOTIFY_WEBHOOK=https://discord.com/api/webhooks/XXXXXXXXXXXXXXXXXXX/XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX' >> mailcow.conf + fi + elif [[ ${option} == "WATCHDOG_NOTIFY_WEBHOOK_BODY" ]]; then + if ! grep -q ${option} mailcow.conf; then + echo "Adding new option \"${option}\" to mailcow.conf" + echo '# JSON body included in the webhook POST request. Needs to be in single quotes.' >> mailcow.conf + echo '# Following variables are available: SUBJECT, BODY' >> mailcow.conf + echo '#WATCHDOG_NOTIFY_WEBHOOK_BODY=\'{"username": "Mailcow Watchdog", "content": "**${SUBJECT}**\n${BODY}"}\'' >> mailcow.conf + fi elif [[ ${option} == "WATCHDOG_NOTIFY_BAN" ]]; then if ! grep -q ${option} mailcow.conf; then echo "Adding new option \"${option}\" to mailcow.conf" @@ -925,4 +941,4 @@ fi # echo # git reflog --color=always | grep "Before update on " # echo -# echo "Use \"git reset --hard hash-on-the-left\" and run $COMPOSE_COMMAND up -d afterwards." \ No newline at end of file +# echo "Use \"git reset --hard hash-on-the-left\" and run $COMPOSE_COMMAND up -d afterwards." From e4347792b87e6537b02e885c223f4e5b95957400 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Kleinekath=C3=B6fer?= Date: Sun, 8 Jan 2023 20:02:18 +0100 Subject: [PATCH 009/126] mailcow should be llow --- generate_config.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/generate_config.sh b/generate_config.sh index da11f60a..3181eac7 100755 --- a/generate_config.sh +++ b/generate_config.sh @@ -343,7 +343,7 @@ USE_WATCHDOG=y #WATCHDOG_NOTIFY_WEBHOOK=https://discord.com/api/webhooks/XXXXXXXXXXXXXXXXXXX/XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX # JSON body included in the webhook POST request. Needs to be in single quotes. # Following variables are available: SUBJECT, BODY -#WATCHDOG_NOTIFY_WEBHOOK_BODY='{"username": "Mailcow Watchdog", "content": "**${SUBJECT}**\n${BODY}"}' +#WATCHDOG_NOTIFY_WEBHOOK_BODY='{"username": "mailcow Watchdog", "content": "**${SUBJECT}**\n${BODY}"}' # Notify about banned IP (includes whois lookup) WATCHDOG_NOTIFY_BAN=n From 7877215d597e42f1f761a4ea4695e6888eac4238 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Kleinekath=C3=B6fer?= Date: Sun, 8 Jan 2023 20:02:46 +0100 Subject: [PATCH 010/126] mailcow should be lowercase --- update.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/update.sh b/update.sh index 4ada78d8..8e23f414 100755 --- a/update.sh +++ b/update.sh @@ -560,7 +560,7 @@ for option in ${CONFIG_ARRAY[@]}; do echo "Adding new option \"${option}\" to mailcow.conf" echo '# JSON body included in the webhook POST request. Needs to be in single quotes.' >> mailcow.conf echo '# Following variables are available: SUBJECT, BODY' >> mailcow.conf - echo '#WATCHDOG_NOTIFY_WEBHOOK_BODY=\'{"username": "Mailcow Watchdog", "content": "**${SUBJECT}**\n${BODY}"}\'' >> mailcow.conf + echo '#WATCHDOG_NOTIFY_WEBHOOK_BODY=\'{"username": "mailcow Watchdog", "content": "**${SUBJECT}**\n${BODY}"}\'' >> mailcow.conf fi elif [[ ${option} == "WATCHDOG_NOTIFY_BAN" ]]; then if ! grep -q ${option} mailcow.conf; then From a3c5f785e9dd8412cb2807de71246ce12f91ebf6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Kleinekath=C3=B6fer?= Date: Mon, 20 Feb 2023 22:34:53 +0100 Subject: [PATCH 011/126] Added new env vars to docker compose --- docker-compose.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docker-compose.yml b/docker-compose.yml index b940b336..61e7a78e 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -471,6 +471,8 @@ services: - WATCHDOG_NOTIFY_EMAIL=${WATCHDOG_NOTIFY_EMAIL:-} - WATCHDOG_NOTIFY_BAN=${WATCHDOG_NOTIFY_BAN:-y} - WATCHDOG_SUBJECT=${WATCHDOG_SUBJECT:-Watchdog ALERT} + - WATCHDOG_NOTIFY_WEBHOOK=${WATCHDOG_NOTIFY_WEBHOOK} + - WATCHDOG_NOTIFY_WEBHOOK_BODY=${WATCHDOG_NOTIFY_WEBHOOK_BODY} - WATCHDOG_EXTERNAL_CHECKS=${WATCHDOG_EXTERNAL_CHECKS:-n} - WATCHDOG_MYSQL_REPLICATION_CHECKS=${WATCHDOG_MYSQL_REPLICATION_CHECKS:-n} - WATCHDOG_VERBOSE=${WATCHDOG_VERBOSE:-n} From e2e8fbe3131327eb65f22e31fb200d55c59512dd Mon Sep 17 00:00:00 2001 From: FreddleSpl0it Date: Mon, 10 Jul 2023 13:54:23 +0200 Subject: [PATCH 012/126] [Web] add f2b_banlist endpoint --- data/Dockerfiles/netfilter/server.py | 2 + data/web/admin.php | 6 +- data/web/inc/functions.fail2ban.inc.php | 65 +++++++++++++++++++- data/web/inc/functions.inc.php | 15 +++++ data/web/js/build/013-mailcow.js | 8 +++ data/web/json_api.php | 21 ++++++- data/web/lang/lang.de-de.json | 2 + data/web/lang/lang.en-gb.json | 2 + data/web/templates/admin/tab-config-f2b.twig | 9 +++ 9 files changed, 127 insertions(+), 3 deletions(-) diff --git a/data/Dockerfiles/netfilter/server.py b/data/Dockerfiles/netfilter/server.py index 698137bf..9f3cacb3 100644 --- a/data/Dockerfiles/netfilter/server.py +++ b/data/Dockerfiles/netfilter/server.py @@ -16,6 +16,7 @@ import json import iptc import dns.resolver import dns.exception +import uuid while True: try: @@ -94,6 +95,7 @@ def verifyF2boptions(f2boptions): verifyF2boption(f2boptions,'retry_window', 600) verifyF2boption(f2boptions,'netban_ipv4', 32) verifyF2boption(f2boptions,'netban_ipv6', 128) + verifyF2boption(f2boptions,'banlist_id', str(uuid.uuid4())) def verifyF2boption(f2boptions, f2boption, f2bdefault): f2boptions[f2boption] = f2boptions[f2boption] if f2boption in f2boptions and f2boptions[f2boption] is not None else f2bdefault diff --git a/data/web/admin.php b/data/web/admin.php index 14cb89f5..8a96ee51 100644 --- a/data/web/admin.php +++ b/data/web/admin.php @@ -85,6 +85,8 @@ $cors_settings = cors('get'); $cors_settings['allowed_origins'] = str_replace(", ", "\n", $cors_settings['allowed_origins']); $cors_settings['allowed_methods'] = explode(", ", $cors_settings['allowed_methods']); +$f2b_data = fail2ban('get'); + $template = 'admin.twig'; $template_data = [ 'tfa_data' => $tfa_data, @@ -101,7 +103,8 @@ $template_data = [ 'domains' => $domains, 'all_domains' => $all_domains, 'mailboxes' => $mailboxes, - 'f2b_data' => fail2ban('get'), + 'f2b_data' => $f2b_data, + 'f2b_banlist_url' => getBaseUrl() . "/api/v1/get/fail2ban/banlist/" . $f2b_data['banlist_id'], 'q_data' => quarantine('settings'), 'qn_data' => quota_notification('get'), 'rsettings_map' => file_get_contents('http://nginx:8081/settings.php'), @@ -112,6 +115,7 @@ $template_data = [ 'password_complexity' => password_complexity('get'), 'show_rspamd_global_filters' => @$_SESSION['show_rspamd_global_filters'], 'cors_settings' => $cors_settings, + 'is_https' => isset($_SERVER['HTTPS']) && $_SERVER['HTTPS'] === 'on', 'lang_admin' => json_encode($lang['admin']), 'lang_datatables' => json_encode($lang['datatables']) ]; diff --git a/data/web/inc/functions.fail2ban.inc.php b/data/web/inc/functions.fail2ban.inc.php index 2c4aa41d..3e0c75c4 100644 --- a/data/web/inc/functions.fail2ban.inc.php +++ b/data/web/inc/functions.fail2ban.inc.php @@ -1,5 +1,5 @@ 'f2b_modified' ); break; + case 'banlist': + try { + $f2b_options = json_decode($redis->Get('F2B_OPTIONS'), true); + } + catch (RedisException $e) { + $_SESSION['return'][] = array( + 'type' => 'danger', + 'log' => array(__FUNCTION__, $_action, $_data_log, $_extra), + 'msg' => array('redis_error', $e) + ); + return false; + } + if (is_array($_extra)) { + $_extra = $_extra[0]; + } + if ($_extra != $f2b_options['banlist_id']){ + return false; + } + + switch ($_data) { + case 'get': + try { + $bl = $redis->hGetAll('F2B_BLACKLIST'); + $active_bans = $redis->hGetAll('F2B_ACTIVE_BANS'); + } + catch (RedisException $e) { + $_SESSION['return'][] = array( + 'type' => 'danger', + 'log' => array(__FUNCTION__, $_action, $_data_log, $_extra), + 'msg' => array('redis_error', $e) + ); + return false; + } + $banlist = implode("\n", array_merge(array_keys($bl), array_keys($active_bans))); + return $banlist; + break; + case 'refresh': + if ($_SESSION['mailcow_cc_role'] != "admin") { + return false; + } + + $f2b_options['banlist_id'] = uuid4(); + try { + $redis->Set('F2B_OPTIONS', json_encode($f2b_options)); + } + catch (RedisException $e) { + $_SESSION['return'][] = array( + 'type' => 'danger', + 'log' => array(__FUNCTION__, $_action, $_data_log, $_extra), + 'msg' => array('redis_error', $e) + ); + return false; + } + + $_SESSION['return'][] = array( + 'type' => 'success', + 'log' => array(__FUNCTION__, $_action, $_data_log, $_extra), + 'msg' => 'f2b_banlist_refreshed' + ); + return true; + break; + } + break; } } diff --git a/data/web/inc/functions.inc.php b/data/web/inc/functions.inc.php index 6418945c..3cff09b9 100644 --- a/data/web/inc/functions.inc.php +++ b/data/web/inc/functions.inc.php @@ -2246,6 +2246,21 @@ function cors($action, $data = null) { break; } } +function getBaseURL() { + $protocol = isset($_SERVER['HTTPS']) && $_SERVER['HTTPS'] === 'on' ? 'https' : 'http'; + $host = $_SERVER['HTTP_HOST']; + $base_url = $protocol . '://' . $host; + + return $base_url; +} +function uuid4() { + $data = openssl_random_pseudo_bytes(16); + + $data[6] = chr(ord($data[6]) & 0x0f | 0x40); + $data[8] = chr(ord($data[8]) & 0x3f | 0x80); + + return vsprintf('%s%s-%s-%s-%s-%s%s%s', str_split(bin2hex($data), 4)); +} function get_logs($application, $lines = false) { if ($lines === false) { diff --git a/data/web/js/build/013-mailcow.js b/data/web/js/build/013-mailcow.js index e659915b..cc54fafb 100644 --- a/data/web/js/build/013-mailcow.js +++ b/data/web/js/build/013-mailcow.js @@ -371,3 +371,11 @@ function addTag(tagAddElem, tag = null){ $(tagValuesElem).val(JSON.stringify(value_tags)); $(tagInputElem).val(''); } +function copyToClipboard(id) { + var copyText = document.getElementById(id); + copyText.select(); + copyText.setSelectionRange(0, 99999); + // only works with https connections + navigator.clipboard.writeText(copyText.value); + mailcow_alert_box(lang.copy_to_clipboard, "success"); +} \ No newline at end of file diff --git a/data/web/json_api.php b/data/web/json_api.php index 16c78baf..50a45b56 100644 --- a/data/web/json_api.php +++ b/data/web/json_api.php @@ -503,6 +503,15 @@ if (isset($_GET['query'])) { print(json_encode($getArgs)); $_SESSION['challenge'] = $WebAuthn->getChallenge(); return; + break; + case "fail2ban": + if (!isset($_SESSION['mailcow_cc_role'])){ + switch ($object) { + case 'banlist': + echo fail2ban('banlist', 'get', $extra); + break; + } + } break; } if (isset($_SESSION['mailcow_cc_role'])) { @@ -1324,6 +1333,9 @@ if (isset($_GET['query'])) { break; case "fail2ban": switch ($object) { + case 'banlist': + echo fail2ban('banlist', 'get', $extra); + break; default: $data = fail2ban('get'); process_get_return($data); @@ -1930,7 +1942,14 @@ if (isset($_GET['query'])) { process_edit_return(fwdhost('edit', array_merge(array('fwdhost' => $items), $attr))); break; case "fail2ban": - process_edit_return(fail2ban('edit', array_merge(array('network' => $items), $attr))); + switch ($object) { + case 'banlist': + process_edit_return(fail2ban('banlist', 'refresh', $items)); + break; + default: + process_edit_return(fail2ban('edit', array_merge(array('network' => $items), $attr))); + break; + } break; case "ui_texts": process_edit_return(customize('edit', 'ui_texts', $attr)); diff --git a/data/web/lang/lang.de-de.json b/data/web/lang/lang.de-de.json index d6f79dc5..2091e670 100644 --- a/data/web/lang/lang.de-de.json +++ b/data/web/lang/lang.de-de.json @@ -147,6 +147,7 @@ "change_logo": "Logo ändern", "configuration": "Konfiguration", "convert_html_to_text": "Konvertiere HTML zu reinem Text", + "copy_to_clipboard": "Text wurde in die Zwischenablage kopiert!", "cors_settings": "CORS Einstellungen", "credentials_transport_warning": "Warnung: Das Hinzufügen einer neuen Regel bewirkt die Aktualisierung der Authentifizierungsdaten aller vorhandenen Einträge mit identischem Next Hop.", "customer_id": "Kunde", @@ -1019,6 +1020,7 @@ "domain_removed": "Domain %s wurde entfernt", "dovecot_restart_success": "Dovecot wurde erfolgreich neu gestartet", "eas_reset": "ActiveSync Gerät des Benutzers %s wurde zurückgesetzt", + "f2b_banlist_refreshed": "Banlist ID wurde erfolgreich erneuert.", "f2b_modified": "Änderungen an Fail2ban-Parametern wurden gespeichert", "forwarding_host_added": "Weiterleitungs-Host %s wurde hinzugefügt", "forwarding_host_removed": "Weiterleitungs-Host %s wurde entfernt", diff --git a/data/web/lang/lang.en-gb.json b/data/web/lang/lang.en-gb.json index 28ff19b8..b176bc28 100644 --- a/data/web/lang/lang.en-gb.json +++ b/data/web/lang/lang.en-gb.json @@ -151,6 +151,7 @@ "change_logo": "Change logo", "configuration": "Configuration", "convert_html_to_text": "Convert HTML to plain text", + "copy_to_clipboard": "Text copied to clipboard!", "cors_settings": "CORS Settings", "credentials_transport_warning": "Warning: Adding a new transport map entry will update the credentials for all entries with a matching next hop column.", "customer_id": "Customer ID", @@ -1028,6 +1029,7 @@ "domain_removed": "Domain %s has been removed", "dovecot_restart_success": "Dovecot was restarted successfully", "eas_reset": "ActiveSync devices for user %s were reset", + "f2b_banlist_refreshed": "Banlist ID has been successfully refreshed.", "f2b_modified": "Changes to Fail2ban parameters have been saved", "forwarding_host_added": "Forwarding host %s has been added", "forwarding_host_removed": "Forwarding host %s has been removed", diff --git a/data/web/templates/admin/tab-config-f2b.twig b/data/web/templates/admin/tab-config-f2b.twig index c15fb72f..68aa57a4 100644 --- a/data/web/templates/admin/tab-config-f2b.twig +++ b/data/web/templates/admin/tab-config-f2b.twig @@ -90,6 +90,15 @@ {% if not f2b_data.active_bans and not f2b_data.perm_bans %} {{ lang.admin.no_active_bans }} {% endif %} +
+
+ + {% if is_https %} + + {% endif %} + +
+
{% for active_ban in f2b_data.active_bans %}

From 65cbc478b8ac644c826bbb5153bd557f29cda10f Mon Sep 17 00:00:00 2001 From: FreddleSpl0it Date: Tue, 11 Jul 2023 10:13:00 +0200 Subject: [PATCH 013/126] [Web] add manage f2b external option --- data/Dockerfiles/netfilter/server.py | 79 ++++++++++++-------- data/web/inc/functions.fail2ban.inc.php | 9 ++- data/web/lang/lang.de-de.json | 2 + data/web/lang/lang.en-gb.json | 2 + data/web/templates/admin/tab-config-f2b.twig | 7 ++ 5 files changed, 65 insertions(+), 34 deletions(-) diff --git a/data/Dockerfiles/netfilter/server.py b/data/Dockerfiles/netfilter/server.py index 9f3cacb3..428ddb96 100644 --- a/data/Dockerfiles/netfilter/server.py +++ b/data/Dockerfiles/netfilter/server.py @@ -96,6 +96,7 @@ def verifyF2boptions(f2boptions): verifyF2boption(f2boptions,'netban_ipv4', 32) verifyF2boption(f2boptions,'netban_ipv6', 128) verifyF2boption(f2boptions,'banlist_id', str(uuid.uuid4())) + verifyF2boption(f2boptions,'manage_external', 0) def verifyF2boption(f2boptions, f2boption, f2bdefault): f2boptions[f2boption] = f2boptions[f2boption] if f2boption in f2boptions and f2boptions[f2boption] is not None else f2bdefault @@ -158,6 +159,7 @@ def mailcowChainOrder(): exit_code = 2 def ban(address): + global f2boptions global lock refreshF2boptions() BAN_TIME = int(f2boptions['ban_time']) @@ -199,7 +201,7 @@ def ban(address): cur_time = int(round(time.time())) NET_BAN_TIME = BAN_TIME if not BAN_TIME_INCREMENT else BAN_TIME * 2 ** bans[net]['ban_counter'] logCrit('Banning %s for %d minutes' % (net, NET_BAN_TIME / 60 )) - if type(ip) is ipaddress.IPv4Address: + if type(ip) is ipaddress.IPv4Address and int(f2boptions['manage_external']) != 1: with lock: chain = iptc.Chain(iptc.Table(iptc.Table.FILTER), 'MAILCOW') rule = iptc.Rule() @@ -208,7 +210,7 @@ def ban(address): rule.target = target if rule not in chain.rules: chain.insert_rule(rule) - else: + elif int(f2boptions['manage_external']) != 1: with lock: chain = iptc.Chain(iptc.Table6(iptc.Table6.FILTER), 'MAILCOW') rule = iptc.Rule6() @@ -253,37 +255,52 @@ def unban(net): bans[net]['ban_counter'] += 1 def permBan(net, unban=False): + global f2boptions global lock if type(ipaddress.ip_network(net, strict=False)) is ipaddress.IPv4Network: - with lock: - chain = iptc.Chain(iptc.Table(iptc.Table.FILTER), 'MAILCOW') - rule = iptc.Rule() - rule.src = net - target = iptc.Target(rule, "REJECT") - rule.target = target - if rule not in chain.rules and not unban: - logCrit('Add host/network %s to blacklist' % net) - chain.insert_rule(rule) - r.hset('F2B_PERM_BANS', '%s' % net, int(round(time.time()))) - elif rule in chain.rules and unban: - logCrit('Remove host/network %s from blacklist' % net) - chain.delete_rule(rule) - r.hdel('F2B_PERM_BANS', '%s' % net) + if int(f2boptions['manage_external']) != 1: + with lock: + chain = iptc.Chain(iptc.Table(iptc.Table.FILTER), 'MAILCOW') + rule = iptc.Rule() + rule.src = net + target = iptc.Target(rule, "REJECT") + rule.target = target + if rule not in chain.rules and not unban: + logCrit('Add host/network %s to blacklist' % net) + chain.insert_rule(rule) + r.hset('F2B_PERM_BANS', '%s' % net, int(round(time.time()))) + elif rule in chain.rules and unban: + logCrit('Remove host/network %s from blacklist' % net) + chain.delete_rule(rule) + r.hdel('F2B_PERM_BANS', '%s' % net) + elif not unban: + logCrit('Add host/network %s to blacklist' % net) + r.hset('F2B_PERM_BANS', '%s' % net, int(round(time.time()))) + elif unban: + logCrit('Remove host/network %s from blacklist' % net) + r.hdel('F2B_PERM_BANS', '%s' % net) else: - with lock: - chain = iptc.Chain(iptc.Table6(iptc.Table6.FILTER), 'MAILCOW') - rule = iptc.Rule6() - rule.src = net - target = iptc.Target(rule, "REJECT") - rule.target = target - if rule not in chain.rules and not unban: - logCrit('Add host/network %s to blacklist' % net) - chain.insert_rule(rule) - r.hset('F2B_PERM_BANS', '%s' % net, int(round(time.time()))) - elif rule in chain.rules and unban: - logCrit('Remove host/network %s from blacklist' % net) - chain.delete_rule(rule) - r.hdel('F2B_PERM_BANS', '%s' % net) + if int(f2boptions['manage_external']) != 1: + with lock: + chain = iptc.Chain(iptc.Table6(iptc.Table6.FILTER), 'MAILCOW') + rule = iptc.Rule6() + rule.src = net + target = iptc.Target(rule, "REJECT") + rule.target = target + if rule not in chain.rules and not unban: + logCrit('Add host/network %s to blacklist' % net) + chain.insert_rule(rule) + r.hset('F2B_PERM_BANS', '%s' % net, int(round(time.time()))) + elif rule in chain.rules and unban: + logCrit('Remove host/network %s from blacklist' % net) + chain.delete_rule(rule) + r.hdel('F2B_PERM_BANS', '%s' % net) + elif not unban: + logCrit('Add host/network %s to blacklist' % net) + r.hset('F2B_PERM_BANS', '%s' % net, int(round(time.time()))) + elif unban: + logCrit('Remove host/network %s from blacklist' % net) + r.hdel('F2B_PERM_BANS', '%s' % net) def quit(signum, frame): global quit_now @@ -555,7 +572,7 @@ def initChain(): chain.insert_rule(rule) if __name__ == '__main__': - + refreshF2boptions() # In case a previous session was killed without cleanup clear() # Reinit MAILCOW chain diff --git a/data/web/inc/functions.fail2ban.inc.php b/data/web/inc/functions.fail2ban.inc.php index 3e0c75c4..abc12cc3 100644 --- a/data/web/inc/functions.fail2ban.inc.php +++ b/data/web/inc/functions.fail2ban.inc.php @@ -247,6 +247,7 @@ function fail2ban($_action, $_data = null, $_extra = null) { $netban_ipv6 = intval((isset($_data['netban_ipv6'])) ? $_data['netban_ipv6'] : $is_now['netban_ipv6']); $wl = (isset($_data['whitelist'])) ? $_data['whitelist'] : $is_now['whitelist']; $bl = (isset($_data['blacklist'])) ? $_data['blacklist'] : $is_now['blacklist']; + $manage_external = (isset($_data['manage_external'])) ? intval($_data['manage_external']) : 0; } else { $_SESSION['return'][] = array( @@ -266,6 +267,8 @@ function fail2ban($_action, $_data = null, $_extra = null) { $f2b_options['netban_ipv6'] = ($netban_ipv6 > 128) ? 128 : $netban_ipv6; $f2b_options['max_attempts'] = ($max_attempts < 1) ? 1 : $max_attempts; $f2b_options['retry_window'] = ($retry_window < 1) ? 1 : $retry_window; + $f2b_options['banlist_id'] = $is_now['banlist_id']; + $f2b_options['manage_external'] = ($manage_external > 0) ? 1 : 0; try { $redis->Set('F2B_OPTIONS', json_encode($f2b_options)); $redis->Del('F2B_WHITELIST'); @@ -351,8 +354,8 @@ function fail2ban($_action, $_data = null, $_extra = null) { switch ($_data) { case 'get': try { - $bl = $redis->hGetAll('F2B_BLACKLIST'); - $active_bans = $redis->hGetAll('F2B_ACTIVE_BANS'); + $bl = $redis->hKeys('F2B_BLACKLIST'); + $active_bans = $redis->hKeys('F2B_ACTIVE_BANS'); } catch (RedisException $e) { $_SESSION['return'][] = array( @@ -362,7 +365,7 @@ function fail2ban($_action, $_data = null, $_extra = null) { ); return false; } - $banlist = implode("\n", array_merge(array_keys($bl), array_keys($active_bans))); + $banlist = implode("\n", array_merge($bl, $active_bans)); return $banlist; break; case 'refresh': diff --git a/data/web/lang/lang.de-de.json b/data/web/lang/lang.de-de.json index 2091e670..7c2171aa 100644 --- a/data/web/lang/lang.de-de.json +++ b/data/web/lang/lang.de-de.json @@ -181,6 +181,8 @@ "f2b_blacklist": "Blacklist für Netzwerke und Hosts", "f2b_filter": "Regex-Filter", "f2b_list_info": "Ein Host oder Netzwerk auf der Blacklist wird immer eine Whitelist-Einheit überwiegen. Die Aktualisierung der Liste dauert einige Sekunden.", + "f2b_manage_external": "Fail2Ban extern verwalten", + "f2b_manage_external_info": "Fail2ban wird die Banlist weiterhin pflegen, jedoch werden keine aktiven Regeln zum blockieren gesetzt. Die unten generierte Banlist, kann verwendet werden, um den Datenverkehr extern zu blockieren.", "f2b_max_attempts": "Max. Versuche", "f2b_max_ban_time": "Maximale Bannzeit in Sekunden", "f2b_netban_ipv4": "Netzbereich für IPv4-Banns (8-32)", diff --git a/data/web/lang/lang.en-gb.json b/data/web/lang/lang.en-gb.json index b176bc28..e7c82cda 100644 --- a/data/web/lang/lang.en-gb.json +++ b/data/web/lang/lang.en-gb.json @@ -185,6 +185,8 @@ "f2b_blacklist": "Blacklisted networks/hosts", "f2b_filter": "Regex filters", "f2b_list_info": "A blacklisted host or network will always outweigh a whitelist entity. List updates will take a few seconds to be applied.", + "f2b_manage_external": "Manage Fail2Ban externally", + "f2b_manage_external_info": "Fail2ban will still maintain the banlist, but it will not actively set rules to block traffic. Use the generated banlist below to externally block the traffic.", "f2b_max_attempts": "Max. attempts", "f2b_max_ban_time": "Max. ban time (s)", "f2b_netban_ipv4": "IPv4 subnet size to apply ban on (8-32)", diff --git a/data/web/templates/admin/tab-config-f2b.twig b/data/web/templates/admin/tab-config-f2b.twig index 68aa57a4..dac69516 100644 --- a/data/web/templates/admin/tab-config-f2b.twig +++ b/data/web/templates/admin/tab-config-f2b.twig @@ -42,6 +42,13 @@ +

+
+ + +
+

{{ lang.admin.f2b_manage_external_info }}

+

{{ lang.admin.f2b_list_info|raw }}

From 1537fb39c0c8c996a05ae677b5fa7e20775b4851 Mon Sep 17 00:00:00 2001 From: FreddleSpl0it Date: Tue, 11 Jul 2023 10:19:32 +0200 Subject: [PATCH 014/126] [Web] add manage f2b external option --- data/Dockerfiles/netfilter/server.py | 70 +++++++++++----------------- 1 file changed, 28 insertions(+), 42 deletions(-) diff --git a/data/Dockerfiles/netfilter/server.py b/data/Dockerfiles/netfilter/server.py index 428ddb96..982fa97c 100644 --- a/data/Dockerfiles/netfilter/server.py +++ b/data/Dockerfiles/netfilter/server.py @@ -258,49 +258,35 @@ def permBan(net, unban=False): global f2boptions global lock if type(ipaddress.ip_network(net, strict=False)) is ipaddress.IPv4Network: - if int(f2boptions['manage_external']) != 1: - with lock: - chain = iptc.Chain(iptc.Table(iptc.Table.FILTER), 'MAILCOW') - rule = iptc.Rule() - rule.src = net - target = iptc.Target(rule, "REJECT") - rule.target = target - if rule not in chain.rules and not unban: - logCrit('Add host/network %s to blacklist' % net) - chain.insert_rule(rule) - r.hset('F2B_PERM_BANS', '%s' % net, int(round(time.time()))) - elif rule in chain.rules and unban: - logCrit('Remove host/network %s from blacklist' % net) - chain.delete_rule(rule) - r.hdel('F2B_PERM_BANS', '%s' % net) - elif not unban: - logCrit('Add host/network %s to blacklist' % net) - r.hset('F2B_PERM_BANS', '%s' % net, int(round(time.time()))) - elif unban: - logCrit('Remove host/network %s from blacklist' % net) - r.hdel('F2B_PERM_BANS', '%s' % net) + with lock: + chain = iptc.Chain(iptc.Table(iptc.Table.FILTER), 'MAILCOW') + rule = iptc.Rule() + rule.src = net + target = iptc.Target(rule, "REJECT") + rule.target = target + if rule not in chain.rules and not unban and int(f2boptions['manage_external']) != 1: + logCrit('Add host/network %s to blacklist' % net) + chain.insert_rule(rule) + r.hset('F2B_PERM_BANS', '%s' % net, int(round(time.time()))) + elif rule in chain.rules and unban: + logCrit('Remove host/network %s from blacklist' % net) + chain.delete_rule(rule) + r.hdel('F2B_PERM_BANS', '%s' % net) else: - if int(f2boptions['manage_external']) != 1: - with lock: - chain = iptc.Chain(iptc.Table6(iptc.Table6.FILTER), 'MAILCOW') - rule = iptc.Rule6() - rule.src = net - target = iptc.Target(rule, "REJECT") - rule.target = target - if rule not in chain.rules and not unban: - logCrit('Add host/network %s to blacklist' % net) - chain.insert_rule(rule) - r.hset('F2B_PERM_BANS', '%s' % net, int(round(time.time()))) - elif rule in chain.rules and unban: - logCrit('Remove host/network %s from blacklist' % net) - chain.delete_rule(rule) - r.hdel('F2B_PERM_BANS', '%s' % net) - elif not unban: - logCrit('Add host/network %s to blacklist' % net) - r.hset('F2B_PERM_BANS', '%s' % net, int(round(time.time()))) - elif unban: - logCrit('Remove host/network %s from blacklist' % net) - r.hdel('F2B_PERM_BANS', '%s' % net) + with lock: + chain = iptc.Chain(iptc.Table6(iptc.Table6.FILTER), 'MAILCOW') + rule = iptc.Rule6() + rule.src = net + target = iptc.Target(rule, "REJECT") + rule.target = target + if rule not in chain.rules and not unban and int(f2boptions['manage_external']) != 1: + logCrit('Add host/network %s to blacklist' % net) + chain.insert_rule(rule) + r.hset('F2B_PERM_BANS', '%s' % net, int(round(time.time()))) + elif rule in chain.rules and unban: + logCrit('Remove host/network %s from blacklist' % net) + chain.delete_rule(rule) + r.hdel('F2B_PERM_BANS', '%s' % net) def quit(signum, frame): global quit_now From 987cfd5dae4014b35c183ce4be0e1f8856950116 Mon Sep 17 00:00:00 2001 From: FreddleSpl0it Date: Tue, 11 Jul 2023 10:31:25 +0200 Subject: [PATCH 015/126] [Web] f2b banlist - add http status codes --- data/web/inc/functions.fail2ban.inc.php | 3 +++ data/web/inc/prerequisites.inc.php | 1 + 2 files changed, 4 insertions(+) diff --git a/data/web/inc/functions.fail2ban.inc.php b/data/web/inc/functions.fail2ban.inc.php index abc12cc3..5962237f 100644 --- a/data/web/inc/functions.fail2ban.inc.php +++ b/data/web/inc/functions.fail2ban.inc.php @@ -342,12 +342,14 @@ function fail2ban($_action, $_data = null, $_extra = null) { 'log' => array(__FUNCTION__, $_action, $_data_log, $_extra), 'msg' => array('redis_error', $e) ); + http_response_code(500); return false; } if (is_array($_extra)) { $_extra = $_extra[0]; } if ($_extra != $f2b_options['banlist_id']){ + http_response_code(404); return false; } @@ -363,6 +365,7 @@ function fail2ban($_action, $_data = null, $_extra = null) { 'log' => array(__FUNCTION__, $_action, $_data_log, $_extra), 'msg' => array('redis_error', $e) ); + http_response_code(500); return false; } $banlist = implode("\n", array_merge($bl, $active_bans)); diff --git a/data/web/inc/prerequisites.inc.php b/data/web/inc/prerequisites.inc.php index b3b1cc13..f7fd80b4 100644 --- a/data/web/inc/prerequisites.inc.php +++ b/data/web/inc/prerequisites.inc.php @@ -70,6 +70,7 @@ try { } } catch (Exception $e) { +http_response_code(500); ?>
Connection to Redis failed.

The following error was reported:
getMessage();?>
Date: Wed, 12 Jul 2023 09:42:17 +0200 Subject: [PATCH 016/126] [Netfilter] Update Compose File to 1.53 --- docker-compose.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-compose.yml b/docker-compose.yml index 4c854aeb..b68a97fa 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -426,7 +426,7 @@ services: - acme netfilter-mailcow: - image: mailcow/netfilter:1.52 + image: mailcow/netfilter:1.53 stop_grace_period: 30s depends_on: - dovecot-mailcow From db2759b7d184e68713bf0441f84df9f624ce3c6d Mon Sep 17 00:00:00 2001 From: FreddleSpl0it Date: Wed, 12 Jul 2023 16:46:32 +0200 Subject: [PATCH 017/126] [Web] fix wrong content type + add more http 500 responses --- data/web/inc/prerequisites.inc.php | 3 +++ data/web/json_api.php | 2 ++ 2 files changed, 5 insertions(+) diff --git a/data/web/inc/prerequisites.inc.php b/data/web/inc/prerequisites.inc.php index f7fd80b4..9c5203e7 100644 --- a/data/web/inc/prerequisites.inc.php +++ b/data/web/inc/prerequisites.inc.php @@ -70,6 +70,7 @@ try { } } catch (Exception $e) { +// Stop when redis is not available http_response_code(500); ?>
Connection to Redis failed.

The following error was reported:
getMessage();?>
@@ -99,6 +100,7 @@ try { } catch (PDOException $e) { // Stop when SQL connection fails +http_response_code(500); ?>
Connection to database failed.

The following error was reported:
getMessage();?>
Connection to dockerapi container failed.

The following error was reported:
-
Date: Thu, 12 Oct 2023 12:46:02 +0200 Subject: [PATCH 018/126] Allow suppressing watchdog start notification. The default behavior is still the old one (send a notifcation when the watchdog is started), but this notification can now be suppressed by setting WATCHDOG_NOTIFY_START=n. --- data/Dockerfiles/watchdog/watchdog.sh | 2 +- docker-compose.yml | 1 + generate_config.sh | 3 +++ update.sh | 7 +++++++ 4 files changed, 12 insertions(+), 1 deletion(-) diff --git a/data/Dockerfiles/watchdog/watchdog.sh b/data/Dockerfiles/watchdog/watchdog.sh index 231d0ecd..77281b71 100755 --- a/data/Dockerfiles/watchdog/watchdog.sh +++ b/data/Dockerfiles/watchdog/watchdog.sh @@ -746,7 +746,7 @@ olefy_checks() { } # Notify about start -if [[ ! -z ${WATCHDOG_NOTIFY_EMAIL} ]]; then +if [[ ! -z ${WATCHDOG_NOTIFY_EMAIL} ]] && [[ ${WATCHDOG_NOTIFY_START} =~ ^([yY][eE][sS]|[yY])+$ ]]; then mail_error "watchdog-mailcow" "Watchdog started monitoring mailcow." fi diff --git a/docker-compose.yml b/docker-compose.yml index c8cfd589..f208de0c 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -488,6 +488,7 @@ services: - USE_WATCHDOG=${USE_WATCHDOG:-n} - WATCHDOG_NOTIFY_EMAIL=${WATCHDOG_NOTIFY_EMAIL:-} - WATCHDOG_NOTIFY_BAN=${WATCHDOG_NOTIFY_BAN:-y} + - WATCHDOG_NOTIFY_START=${WATCHDOG_NOTIFY_START:-y} - WATCHDOG_SUBJECT=${WATCHDOG_SUBJECT:-Watchdog ALERT} - WATCHDOG_EXTERNAL_CHECKS=${WATCHDOG_EXTERNAL_CHECKS:-n} - WATCHDOG_MYSQL_REPLICATION_CHECKS=${WATCHDOG_MYSQL_REPLICATION_CHECKS:-n} diff --git a/generate_config.sh b/generate_config.sh index f25309ea..30af54e3 100755 --- a/generate_config.sh +++ b/generate_config.sh @@ -401,6 +401,9 @@ USE_WATCHDOG=y # Notify about banned IP (includes whois lookup) WATCHDOG_NOTIFY_BAN=n +# Send a notification when the watchdog is started. +WATCHDOG_NOTIFY_START=y + # Subject for watchdog mails. Defaults to "Watchdog ALERT" followed by the error message. #WATCHDOG_SUBJECT= diff --git a/update.sh b/update.sh index 5204659e..533612ed 100755 --- a/update.sh +++ b/update.sh @@ -449,6 +449,7 @@ CONFIG_ARRAY=( "USE_WATCHDOG" "WATCHDOG_NOTIFY_EMAIL" "WATCHDOG_NOTIFY_BAN" + "WATCHDOG_NOTIFY_START" "WATCHDOG_EXTERNAL_CHECKS" "WATCHDOG_SUBJECT" "SKIP_CLAMD" @@ -636,6 +637,12 @@ for option in ${CONFIG_ARRAY[@]}; do echo '# Notify about banned IP. Includes whois lookup.' >> mailcow.conf echo "WATCHDOG_NOTIFY_BAN=y" >> mailcow.conf fi + elif [[ ${option} == "WATCHDOG_NOTIFY_START" ]]; then + if ! grep -q ${option} mailcow.conf; then + echo "Adding new option \"${option}\" to mailcow.conf" + echo '# Send a notification when the watchdog is started.' >> mailcow.conf + echo "WATCHDOG_NOTIFY_START=y" >> mailcow.conf + fi elif [[ ${option} == "WATCHDOG_SUBJECT" ]]; then if ! grep -q ${option} mailcow.conf; then echo "Adding new option \"${option}\" to mailcow.conf" From 3b183933e3a83cca8d1605c7a24294a8d332c596 Mon Sep 17 00:00:00 2001 From: FreddleSpl0it Date: Fri, 20 Oct 2023 10:48:04 +0200 Subject: [PATCH 019/126] [Web] add api get spam-score endpoint --- data/web/api/openapi.yaml | 34 ++++++++++++++++++++++++++++++++++ data/web/json_api.php | 6 ++++++ 2 files changed, 40 insertions(+) diff --git a/data/web/api/openapi.yaml b/data/web/api/openapi.yaml index 1d262168..e8e963e6 100644 --- a/data/web/api/openapi.yaml +++ b/data/web/api/openapi.yaml @@ -5646,6 +5646,40 @@ paths: items: type: string summary: Edit Cross-Origin Resource Sharing (CORS) settings + "/api/v1/get/spam-score/{mailbox}": + get: + parameters: + - description: name of mailbox or empty for current user - admin user will retrieve the global spam filter score + in: path + name: mailbox + required: true + schema: + type: string + - description: e.g. api-key-string + example: api-key-string + in: header + name: X-API-Key + required: false + schema: + type: string + responses: + "401": + $ref: "#/components/responses/Unauthorized" + "200": + content: + application/json: + examples: + response: + value: + spam_score: "8,15" + description: OK + headers: {} + tags: + - Mailboxes + description: >- + Using this endpoint you can get the global spam filter score or the spam filter score of a certain mailbox. + operationId: Get mailbox or global spam filter score + summary: Get mailbox or global spam filter score tags: - name: Domains diff --git a/data/web/json_api.php b/data/web/json_api.php index b375bc8e..b8c465fb 100644 --- a/data/web/json_api.php +++ b/data/web/json_api.php @@ -1591,6 +1591,12 @@ if (isset($_GET['query'])) { } } break; + case "spam-score": + $score = mailbox('get', 'spam_score', $object); + if ($score) + $score = array("score" => preg_replace("/\s+/", "", $score)); + process_get_return($score); + break; break; // return no route found if no case is matched default: From f39005b72ddd41662855e9f83d2021e6a23bffa6 Mon Sep 17 00:00:00 2001 From: FreddleSpl0it Date: Mon, 30 Oct 2023 11:54:14 +0100 Subject: [PATCH 020/126] [Netfilter] add nftables support --- data/Dockerfiles/netfilter/Dockerfile | 14 +- .../netfilter/docker-entrypoint.sh | 29 + data/Dockerfiles/netfilter/main.py | 459 ++++++++++++++++ .../Dockerfiles/netfilter/modules/IPTables.py | 213 ++++++++ data/Dockerfiles/netfilter/modules/Logger.py | 23 + .../Dockerfiles/netfilter/modules/NFTables.py | 495 ++++++++++++++++++ .../Dockerfiles/netfilter/modules/__init__.py | 0 docker-compose.yml | 2 +- 8 files changed, 1232 insertions(+), 3 deletions(-) create mode 100755 data/Dockerfiles/netfilter/docker-entrypoint.sh create mode 100644 data/Dockerfiles/netfilter/main.py create mode 100644 data/Dockerfiles/netfilter/modules/IPTables.py create mode 100644 data/Dockerfiles/netfilter/modules/Logger.py create mode 100644 data/Dockerfiles/netfilter/modules/NFTables.py create mode 100644 data/Dockerfiles/netfilter/modules/__init__.py diff --git a/data/Dockerfiles/netfilter/Dockerfile b/data/Dockerfiles/netfilter/Dockerfile index 4fcb5eef..8f76ec63 100644 --- a/data/Dockerfiles/netfilter/Dockerfile +++ b/data/Dockerfiles/netfilter/Dockerfile @@ -1,6 +1,8 @@ FROM alpine:3.17 LABEL maintainer "The Infrastructure Company GmbH " +WORKDIR /app + ENV XTABLES_LIBDIR /usr/lib/xtables ENV PYTHON_IPTABLES_XTABLES_VERSION 12 ENV IPTABLES_LIBDIR /usr/lib @@ -14,10 +16,13 @@ RUN apk add --virtual .build-deps \ iptables \ ip6tables \ xtables-addons \ + nftables \ tzdata \ py3-pip \ + py3-nftables \ musl-dev \ && pip3 install --ignore-installed --upgrade pip \ + jsonschema \ python-iptables \ redis \ ipaddress \ @@ -26,5 +31,10 @@ RUN apk add --virtual .build-deps \ # && pip3 install --upgrade pip python-iptables==0.13.0 redis ipaddress dnspython \ -COPY server.py / -CMD ["python3", "-u", "/server.py"] +COPY modules /app/modules +COPY main.py /app/ +COPY ./docker-entrypoint.sh /app/ + +RUN chmod +x /app/docker-entrypoint.sh + +CMD ["/bin/sh", "-c", "/app/docker-entrypoint.sh"] \ No newline at end of file diff --git a/data/Dockerfiles/netfilter/docker-entrypoint.sh b/data/Dockerfiles/netfilter/docker-entrypoint.sh new file mode 100755 index 00000000..47370a1f --- /dev/null +++ b/data/Dockerfiles/netfilter/docker-entrypoint.sh @@ -0,0 +1,29 @@ +#!/bin/sh + +backend=iptables + +nft list table ip filter &>/dev/null +nftables_found=$? + +iptables -L -n &>/dev/null +iptables_found=$? + +if [ $nftables_found -lt $iptables_found ]; then + backend=nftables +fi + +if [ $nftables_found -gt $iptables_found ]; then + backend=iptables +fi + +if [ $nftables_found -eq 0 ] && [ $nftables_found -eq $iptables_found ]; then + nftables_lines=$(nft list ruleset | wc -l) + iptables_lines=$(iptables-save | wc -l) + if [ $nftables_lines -gt $iptables_lines ]; then + backend=nftables + else + backend=iptables + fi +fi + +exec python -u /app/main.py $backend diff --git a/data/Dockerfiles/netfilter/main.py b/data/Dockerfiles/netfilter/main.py new file mode 100644 index 00000000..a6859c95 --- /dev/null +++ b/data/Dockerfiles/netfilter/main.py @@ -0,0 +1,459 @@ +#!/usr/bin/env python3 + +import re +import os +import sys +import time +import atexit +import signal +import ipaddress +from collections import Counter +from random import randint +from threading import Thread +from threading import Lock +import redis +import json +import dns.resolver +import dns.exception +from modules.Logger import Logger +from modules.IPTables import IPTables +from modules.NFTables import NFTables + + +# connect to redis +while True: + try: + redis_slaveof_ip = os.getenv('REDIS_SLAVEOF_IP', '') + redis_slaveof_port = os.getenv('REDIS_SLAVEOF_PORT', '') + if "".__eq__(redis_slaveof_ip): + r = redis.StrictRedis(host=os.getenv('IPV4_NETWORK', '172.22.1') + '.249', decode_responses=True, port=6379, db=0) + else: + r = redis.StrictRedis(host=redis_slaveof_ip, decode_responses=True, port=redis_slaveof_port, db=0) + r.ping() + except Exception as ex: + print('%s - trying again in 3 seconds' % (ex)) + time.sleep(3) + else: + break +pubsub = r.pubsub() + +# rename fail2ban to netfilter +if r.exists('F2B_LOG'): + r.rename('F2B_LOG', 'NETFILTER_LOG') + + +# globals +WHITELIST = [] +BLACKLIST= [] +bans = {} +quit_now = False +exit_code = 0 +lock = Lock() + + +# init Logger +logger = Logger(r) +# init backend +backend = sys.argv[1] +if backend == "nftables": + logger.logInfo('Using NFTables backend') + tables = NFTables("MAILCOW", logger) +else: + logger.logInfo('Using IPTables backend') + tables = IPTables("MAILCOW", logger) + + +def refreshF2boptions(): + global f2boptions + global quit_now + global exit_code + + f2boptions = {} + + if not r.get('F2B_OPTIONS'): + f2boptions['ban_time'] = r.get('F2B_BAN_TIME') + f2boptions['max_ban_time'] = r.get('F2B_MAX_BAN_TIME') + f2boptions['ban_time_increment'] = r.get('F2B_BAN_TIME_INCREMENT') + f2boptions['max_attempts'] = r.get('F2B_MAX_ATTEMPTS') + f2boptions['retry_window'] = r.get('F2B_RETRY_WINDOW') + f2boptions['netban_ipv4'] = r.get('F2B_NETBAN_IPV4') + f2boptions['netban_ipv6'] = r.get('F2B_NETBAN_IPV6') + else: + try: + f2boptions = json.loads(r.get('F2B_OPTIONS')) + except ValueError: + logger.logCrit('Error loading F2B options: F2B_OPTIONS is not json') + quit_now = True + exit_code = 2 + + verifyF2boptions(f2boptions) + r.set('F2B_OPTIONS', json.dumps(f2boptions, ensure_ascii=False)) + +def verifyF2boptions(f2boptions): + verifyF2boption(f2boptions,'ban_time', 1800) + verifyF2boption(f2boptions,'max_ban_time', 10000) + verifyF2boption(f2boptions,'ban_time_increment', True) + verifyF2boption(f2boptions,'max_attempts', 10) + verifyF2boption(f2boptions,'retry_window', 600) + verifyF2boption(f2boptions,'netban_ipv4', 32) + verifyF2boption(f2boptions,'netban_ipv6', 128) + +def verifyF2boption(f2boptions, f2boption, f2bdefault): + f2boptions[f2boption] = f2boptions[f2boption] if f2boption in f2boptions and f2boptions[f2boption] is not None else f2bdefault + +def refreshF2bregex(): + global f2bregex + global quit_now + global exit_code + if not r.get('F2B_REGEX'): + f2bregex = {} + f2bregex[1] = 'mailcow UI: Invalid password for .+ by ([0-9a-f\.:]+)' + f2bregex[2] = 'Rspamd UI: Invalid password by ([0-9a-f\.:]+)' + f2bregex[3] = 'warning: .*\[([0-9a-f\.:]+)\]: SASL .+ authentication failed: (?!.*Connection lost to authentication server).+' + f2bregex[4] = 'warning: non-SMTP command from .*\[([0-9a-f\.:]+)]:.+' + f2bregex[5] = 'NOQUEUE: reject: RCPT from \[([0-9a-f\.:]+)].+Protocol error.+' + f2bregex[6] = '-login: Disconnected.+ \(auth failed, .+\): user=.*, method=.+, rip=([0-9a-f\.:]+),' + f2bregex[7] = '-login: Aborted login.+ \(auth failed .+\): user=.+, rip=([0-9a-f\.:]+), lip.+' + f2bregex[8] = '-login: Aborted login.+ \(tried to use disallowed .+\): user=.+, rip=([0-9a-f\.:]+), lip.+' + f2bregex[9] = 'SOGo.+ Login from \'([0-9a-f\.:]+)\' for user .+ might not have worked' + f2bregex[10] = '([0-9a-f\.:]+) \"GET \/SOGo\/.* HTTP.+\" 403 .+' + r.set('F2B_REGEX', json.dumps(f2bregex, ensure_ascii=False)) + else: + try: + f2bregex = {} + f2bregex = json.loads(r.get('F2B_REGEX')) + except ValueError: + logger.logCrit('Error loading F2B options: F2B_REGEX is not json') + quit_now = True + exit_code = 2 + +def get_ip(address): + ip = ipaddress.ip_address(address) + if type(ip) is ipaddress.IPv6Address and ip.ipv4_mapped: + ip = ip.ipv4_mapped + if ip.is_private or ip.is_loopback: + return False + + return ip + +def ban(address): + global lock + + refreshF2boptions() + BAN_TIME = int(f2boptions['ban_time']) + BAN_TIME_INCREMENT = bool(f2boptions['ban_time_increment']) + MAX_ATTEMPTS = int(f2boptions['max_attempts']) + RETRY_WINDOW = int(f2boptions['retry_window']) + NETBAN_IPV4 = '/' + str(f2boptions['netban_ipv4']) + NETBAN_IPV6 = '/' + str(f2boptions['netban_ipv6']) + + ip = get_ip(address) + if not ip: return + address = str(ip) + self_network = ipaddress.ip_network(address) + + with lock: + temp_whitelist = set(WHITELIST) + if temp_whitelist: + for wl_key in temp_whitelist: + wl_net = ipaddress.ip_network(wl_key, False) + if wl_net.overlaps(self_network): + logger.logInfo('Address %s is whitelisted by rule %s' % (self_network, wl_net)) + return + + net = ipaddress.ip_network((address + (NETBAN_IPV4 if type(ip) is ipaddress.IPv4Address else NETBAN_IPV6)), strict=False) + net = str(net) + + if not net in bans: + bans[net] = {'attempts': 0, 'last_attempt': 0, 'ban_counter': 0} + + bans[net]['attempts'] += 1 + bans[net]['last_attempt'] = time.time() + + if bans[net]['attempts'] >= MAX_ATTEMPTS: + cur_time = int(round(time.time())) + NET_BAN_TIME = BAN_TIME if not BAN_TIME_INCREMENT else BAN_TIME * 2 ** bans[net]['ban_counter'] + logger.logCrit('Banning %s for %d minutes' % (net, NET_BAN_TIME / 60 )) + if type(ip) is ipaddress.IPv4Address: + with lock: + tables.banIPv4(net) + else: + with lock: + tables.banIPv6(net) + + r.hset('F2B_ACTIVE_BANS', '%s' % net, cur_time + NET_BAN_TIME) + else: + logger.logWarn('%d more attempts in the next %d seconds until %s is banned' % (MAX_ATTEMPTS - bans[net]['attempts'], RETRY_WINDOW, net)) + +def unban(net): + global lock + + if not net in bans: + logger.logInfo('%s is not banned, skipping unban and deleting from queue (if any)' % net) + r.hdel('F2B_QUEUE_UNBAN', '%s' % net) + return + + logger.logInfo('Unbanning %s' % net) + if type(ipaddress.ip_network(net)) is ipaddress.IPv4Network: + with lock: + tables.unbanIPv4(net) + else: + with lock: + tables.unbanIPv6(net) + + r.hdel('F2B_ACTIVE_BANS', '%s' % net) + r.hdel('F2B_QUEUE_UNBAN', '%s' % net) + if net in bans: + bans[net]['attempts'] = 0 + bans[net]['ban_counter'] += 1 + +def permBan(net, unban=False): + global lock + + is_unbanned = False + is_banned = False + if type(ipaddress.ip_network(net, strict=False)) is ipaddress.IPv4Network: + with lock: + if unban: + is_unbanned = tables.unbanIPv4(net) + else: + is_banned = tables.banIPv4(net) + else: + with lock: + if unban: + is_unbanned = tables.unbanIPv6(net) + else: + is_banned = tables.banIPv6(net) + + + if is_unbanned: + r.hdel('F2B_PERM_BANS', '%s' % net) + logger.logCrit('Removed host/network %s from blacklist' % net) + elif is_banned: + r.hset('F2B_PERM_BANS', '%s' % net, int(round(time.time()))) + logger.logCrit('Added host/network %s to blacklist' % net) + +def clear(): + global lock + logger.logInfo('Clearing all bans') + for net in bans.copy(): + unban(net) + with lock: + tables.clearIPv4Table() + tables.clearIPv6Table() + r.delete('F2B_ACTIVE_BANS') + r.delete('F2B_PERM_BANS') + pubsub.unsubscribe() + +def watch(): + logger.logInfo('Watching Redis channel F2B_CHANNEL') + pubsub.subscribe('F2B_CHANNEL') + + global quit_now + global exit_code + + while not quit_now: + try: + for item in pubsub.listen(): + refreshF2bregex() + for rule_id, rule_regex in f2bregex.items(): + if item['data'] and item['type'] == 'message': + try: + result = re.search(rule_regex, item['data']) + except re.error: + result = False + if result: + addr = result.group(1) + ip = ipaddress.ip_address(addr) + if ip.is_private or ip.is_loopback: + continue + logger.logWarn('%s matched rule id %s (%s)' % (addr, rule_id, item['data'])) + ban(addr) + except Exception as ex: + logger.logWarn('Error reading log line from pubsub: %s' % ex) + quit_now = True + exit_code = 2 + +def snat4(snat_target): + global lock + global quit_now + + while not quit_now: + time.sleep(10) + with lock: + tables.snat4(snat_target, os.getenv('IPV4_NETWORK', '172.22.1') + '.0/24') + +def snat6(snat_target): + global lock + global quit_now + + while not quit_now: + time.sleep(10) + with lock: + tables.snat6(snat_target, os.getenv('IPV6_NETWORK', 'fd4d:6169:6c63:6f77::/64')) + +def autopurge(): + while not quit_now: + time.sleep(10) + refreshF2boptions() + BAN_TIME = int(f2boptions['ban_time']) + MAX_BAN_TIME = int(f2boptions['max_ban_time']) + BAN_TIME_INCREMENT = bool(f2boptions['ban_time_increment']) + MAX_ATTEMPTS = int(f2boptions['max_attempts']) + QUEUE_UNBAN = r.hgetall('F2B_QUEUE_UNBAN') + if QUEUE_UNBAN: + for net in QUEUE_UNBAN: + unban(str(net)) + for net in bans.copy(): + if bans[net]['attempts'] >= MAX_ATTEMPTS: + NET_BAN_TIME = BAN_TIME if not BAN_TIME_INCREMENT else BAN_TIME * 2 ** bans[net]['ban_counter'] + TIME_SINCE_LAST_ATTEMPT = time.time() - bans[net]['last_attempt'] + if TIME_SINCE_LAST_ATTEMPT > NET_BAN_TIME or TIME_SINCE_LAST_ATTEMPT > MAX_BAN_TIME: + unban(net) + +def mailcowChainOrder(): + global lock + global quit_now + global exit_code + while not quit_now: + time.sleep(10) + with lock: + quit_now, exit_code = tables.checkIPv4ChainOrder() + if quit_now: return + quit_now, exit_code = tables.checkIPv6ChainOrder() + +def isIpNetwork(address): + try: + ipaddress.ip_network(address, False) + except ValueError: + return False + return True + +def genNetworkList(list): + resolver = dns.resolver.Resolver() + hostnames = [] + networks = [] + for key in list: + if isIpNetwork(key): + networks.append(key) + else: + hostnames.append(key) + for hostname in hostnames: + hostname_ips = [] + for rdtype in ['A', 'AAAA']: + try: + answer = resolver.resolve(qname=hostname, rdtype=rdtype, lifetime=3) + except dns.exception.Timeout: + logger.logInfo('Hostname %s timedout on resolve' % hostname) + break + except (dns.resolver.NXDOMAIN, dns.resolver.NoAnswer): + continue + except dns.exception.DNSException as dnsexception: + logger.logInfo('%s' % dnsexception) + continue + for rdata in answer: + hostname_ips.append(rdata.to_text()) + networks.extend(hostname_ips) + return set(networks) + +def whitelistUpdate(): + global lock + global quit_now + global WHITELIST + while not quit_now: + start_time = time.time() + list = r.hgetall('F2B_WHITELIST') + new_whitelist = [] + if list: + new_whitelist = genNetworkList(list) + with lock: + if Counter(new_whitelist) != Counter(WHITELIST): + WHITELIST = new_whitelist + logger.logInfo('Whitelist was changed, it has %s entries' % len(WHITELIST)) + time.sleep(60.0 - ((time.time() - start_time) % 60.0)) + +def blacklistUpdate(): + global quit_now + global BLACKLIST + while not quit_now: + start_time = time.time() + list = r.hgetall('F2B_BLACKLIST') + new_blacklist = [] + if list: + new_blacklist = genNetworkList(list) + if Counter(new_blacklist) != Counter(BLACKLIST): + addban = set(new_blacklist).difference(BLACKLIST) + delban = set(BLACKLIST).difference(new_blacklist) + BLACKLIST = new_blacklist + logger.logInfo('Blacklist was changed, it has %s entries' % len(BLACKLIST)) + if addban: + for net in addban: + permBan(net=net) + if delban: + for net in delban: + permBan(net=net, unban=True) + time.sleep(60.0 - ((time.time() - start_time) % 60.0)) + +def quit(signum, frame): + global quit_now + quit_now = True + + +if __name__ == '__main__': + # In case a previous session was killed without cleanup + clear() + # Reinit MAILCOW chain + # Is called before threads start, no locking + logger.logInfo("Initializing mailcow netfilter chain") + tables.initChainIPv4() + tables.initChainIPv6() + + watch_thread = Thread(target=watch) + watch_thread.daemon = True + watch_thread.start() + + if os.getenv('SNAT_TO_SOURCE') and os.getenv('SNAT_TO_SOURCE') != 'n': + try: + snat_ip = os.getenv('SNAT_TO_SOURCE') + snat_ipo = ipaddress.ip_address(snat_ip) + if type(snat_ipo) is ipaddress.IPv4Address: + snat4_thread = Thread(target=snat4,args=(snat_ip,)) + snat4_thread.daemon = True + snat4_thread.start() + except ValueError: + print(os.getenv('SNAT_TO_SOURCE') + ' is not a valid IPv4 address') + + if os.getenv('SNAT6_TO_SOURCE') and os.getenv('SNAT6_TO_SOURCE') != 'n': + try: + snat_ip = os.getenv('SNAT6_TO_SOURCE') + snat_ipo = ipaddress.ip_address(snat_ip) + if type(snat_ipo) is ipaddress.IPv6Address: + snat6_thread = Thread(target=snat6,args=(snat_ip,)) + snat6_thread.daemon = True + snat6_thread.start() + except ValueError: + print(os.getenv('SNAT6_TO_SOURCE') + ' is not a valid IPv6 address') + + autopurge_thread = Thread(target=autopurge) + autopurge_thread.daemon = True + autopurge_thread.start() + + mailcowchainwatch_thread = Thread(target=mailcowChainOrder) + mailcowchainwatch_thread.daemon = True + mailcowchainwatch_thread.start() + + blacklistupdate_thread = Thread(target=blacklistUpdate) + blacklistupdate_thread.daemon = True + blacklistupdate_thread.start() + + whitelistupdate_thread = Thread(target=whitelistUpdate) + whitelistupdate_thread.daemon = True + whitelistupdate_thread.start() + + signal.signal(signal.SIGTERM, quit) + atexit.register(clear) + + while not quit_now: + time.sleep(0.5) + + sys.exit(exit_code) diff --git a/data/Dockerfiles/netfilter/modules/IPTables.py b/data/Dockerfiles/netfilter/modules/IPTables.py new file mode 100644 index 00000000..c60ecc61 --- /dev/null +++ b/data/Dockerfiles/netfilter/modules/IPTables.py @@ -0,0 +1,213 @@ +import iptc +import time + +class IPTables: + def __init__(self, chain_name, logger): + self.chain_name = chain_name + self.logger = logger + + def initChainIPv4(self): + if not iptc.Chain(iptc.Table(iptc.Table.FILTER), self.chain_name) in iptc.Table(iptc.Table.FILTER).chains: + iptc.Table(iptc.Table.FILTER).create_chain(self.chain_name) + for c in ['FORWARD', 'INPUT']: + chain = iptc.Chain(iptc.Table(iptc.Table.FILTER), c) + rule = iptc.Rule() + rule.src = '0.0.0.0/0' + rule.dst = '0.0.0.0/0' + target = iptc.Target(rule, self.chain_name) + rule.target = target + if rule not in chain.rules: + chain.insert_rule(rule) + + def initChainIPv6(self): + if not iptc.Chain(iptc.Table6(iptc.Table6.FILTER), self.chain_name) in iptc.Table6(iptc.Table6.FILTER).chains: + iptc.Table6(iptc.Table6.FILTER).create_chain(self.chain_name) + for c in ['FORWARD', 'INPUT']: + chain = iptc.Chain(iptc.Table6(iptc.Table6.FILTER), c) + rule = iptc.Rule6() + rule.src = '::/0' + rule.dst = '::/0' + target = iptc.Target(rule, self.chain_name) + rule.target = target + if rule not in chain.rules: + chain.insert_rule(rule) + + def checkIPv4ChainOrder(self): + filter_table = iptc.Table(iptc.Table.FILTER) + filter_table.refresh() + return self.checkChainOrder(filter_table) + + def checkIPv6ChainOrder(self): + filter_table = iptc.Table6(iptc.Table6.FILTER) + filter_table.refresh() + return self.checkChainOrder(filter_table) + + def checkChainOrder(self, filter_table): + err = False + exit_code = None + + forward_chain = iptc.Chain(filter_table, 'FORWARD') + input_chain = iptc.Chain(filter_table, 'INPUT') + for chain in [forward_chain, input_chain]: + target_found = False + for position, item in enumerate(chain.rules): + if item.target.name == self.chain_name: + target_found = True + if position > 2: + self.logger.logCrit('Error in %s chain: %s target not found, restarting container' % (chain.name, self.chain_name)) + err = True + exit_code = 2 + if not target_found: + self.logger.logCrit('Error in %s chain: %s target not found, restarting container' % (chain.name, self.chain_name)) + err = True + exit_code = 2 + + return err, exit_code + + def clearIPv4Table(self): + self.clearTable(iptc.Table(iptc.Table.FILTER)) + + def clearIPv6Table(self): + self.clearTable(iptc.Table6(iptc.Table6.FILTER)) + + def clearTable(self, filter_table): + filter_table.autocommit = False + forward_chain = iptc.Chain(filter_table, "FORWARD") + input_chain = iptc.Chain(filter_table, "INPUT") + mailcow_chain = iptc.Chain(filter_table, self.chain_name) + if mailcow_chain in filter_table.chains: + for rule in mailcow_chain.rules: + mailcow_chain.delete_rule(rule) + for rule in forward_chain.rules: + if rule.target.name == self.chain_name: + forward_chain.delete_rule(rule) + for rule in input_chain.rules: + if rule.target.name == self.chain_name: + input_chain.delete_rule(rule) + filter_table.delete_chain(self.chain_name) + filter_table.commit() + filter_table.refresh() + filter_table.autocommit = True + + def banIPv4(self, source): + chain = iptc.Chain(iptc.Table(iptc.Table.FILTER), self.chain_name) + rule = iptc.Rule() + rule.src = source + target = iptc.Target(rule, "REJECT") + rule.target = target + if rule in chain.rules: + return False + chain.insert_rule(rule) + return True + + def banIPv6(self, source): + chain = iptc.Chain(iptc.Table6(iptc.Table6.FILTER), self.chain_name) + rule = iptc.Rule6() + rule.src = source + target = iptc.Target(rule, "REJECT") + rule.target = target + if rule in chain.rules: + return False + chain.insert_rule(rule) + return True + + def unbanIPv4(self, source): + chain = iptc.Chain(iptc.Table(iptc.Table.FILTER), self.chain_name) + rule = iptc.Rule() + rule.src = source + target = iptc.Target(rule, "REJECT") + rule.target = target + if rule not in chain.rules: + return False + chain.delete_rule(rule) + return True + + def unbanIPv6(self, source): + chain = iptc.Chain(iptc.Table6(iptc.Table6.FILTER), self.chain_name) + rule = iptc.Rule6() + rule.src = source + target = iptc.Target(rule, "REJECT") + rule.target = target + if rule not in chain.rules: + return False + chain.delete_rule(rule) + return True + + def snat4(self, snat_target, source): + try: + table = iptc.Table('nat') + table.refresh() + chain = iptc.Chain(table, 'POSTROUTING') + table.autocommit = False + new_rule = self.getSnat4Rule(snat_target, source) + + if not chain.rules: + # if there are no rules in the chain, insert the new rule directly + self.logger.logInfo(f'Added POSTROUTING rule for source network {new_rule.src} to SNAT target {snat_target}') + chain.insert_rule(new_rule) + else: + for position, rule in enumerate(chain.rules): + if not hasattr(rule.target, 'parameter'): + continue + match = all(( + new_rule.get_src() == rule.get_src(), + new_rule.get_dst() == rule.get_dst(), + new_rule.target.parameters == rule.target.parameters, + new_rule.target.name == rule.target.name + )) + if position == 0: + if not match: + self.logger.logInfo(f'Added POSTROUTING rule for source network {new_rule.src} to SNAT target {snat_target}') + chain.insert_rule(new_rule) + else: + if match: + self.logger.logInfo(f'Remove rule for source network {new_rule.src} to SNAT target {snat_target} from POSTROUTING chain at position {position}') + chain.delete_rule(rule) + + table.commit() + table.autocommit = True + return True + except: + self.logger.logCrit('Error running SNAT4, retrying...') + return False + + def snat6(self, snat_target, source): + try: + table = iptc.Table6('nat') + table.refresh() + chain = iptc.Chain(table, 'POSTROUTING') + table.autocommit = False + new_rule = self.getSnat6Rule(snat_target, source) + + if new_rule not in chain.rules: + self.logger.logInfo('Added POSTROUTING rule for source network %s to SNAT target %s' % (new_rule.src, snat_target)) + chain.insert_rule(new_rule) + else: + for position, item in enumerate(chain.rules): + if item == new_rule: + if position != 0: + chain.delete_rule(new_rule) + + table.commit() + table.autocommit = True + except: + self.logger.logCrit('Error running SNAT6, retrying...') + + + def getSnat4Rule(self, snat_target, source): + rule = iptc.Rule() + rule.src = source + rule.dst = '!' + rule.src + target = rule.create_target("SNAT") + target.to_source = snat_target + match = rule.create_match("comment") + match.comment = f'{int(round(time.time()))}' + return rule + + def getSnat6Rule(self, snat_target, source): + rule = iptc.Rule6() + rule.src = source + rule.dst = '!' + rule.src + target = rule.create_target("SNAT") + target.to_source = snat_target + return rule diff --git a/data/Dockerfiles/netfilter/modules/Logger.py b/data/Dockerfiles/netfilter/modules/Logger.py new file mode 100644 index 00000000..d60d52fa --- /dev/null +++ b/data/Dockerfiles/netfilter/modules/Logger.py @@ -0,0 +1,23 @@ +import time +import json + +class Logger: + def __init__(self, redis): + self.r = redis + + def log(self, priority, message): + tolog = {} + tolog['time'] = int(round(time.time())) + tolog['priority'] = priority + tolog['message'] = message + self.r.lpush('NETFILTER_LOG', json.dumps(tolog, ensure_ascii=False)) + print(message) + + def logWarn(self, message): + self.log('warn', message) + + def logCrit(self, message): + self.log('crit', message) + + def logInfo(self, message): + self.log('info', message) diff --git a/data/Dockerfiles/netfilter/modules/NFTables.py b/data/Dockerfiles/netfilter/modules/NFTables.py new file mode 100644 index 00000000..d341dc36 --- /dev/null +++ b/data/Dockerfiles/netfilter/modules/NFTables.py @@ -0,0 +1,495 @@ +import nftables +import ipaddress + +class NFTables: + def __init__(self, chain_name, logger): + self.chain_name = chain_name + self.logger = logger + + self.nft = nftables.Nftables() + self.nft.set_json_output(True) + self.nft.set_handle_output(True) + self.nft_chain_names = {'ip': {'filter': {'input': '', 'forward': ''}, 'nat': {'postrouting': ''} }, + 'ip6': {'filter': {'input': '', 'forward': ''}, 'nat': {'postrouting': ''} } } + + self.search_current_chains() + + def initChainIPv4(self): + self.insert_mailcow_chains("ip") + + def initChainIPv6(self): + self.insert_mailcow_chains("ip6") + + def checkIPv4ChainOrder(self): + return self.checkChainOrder("ip") + + def checkIPv6ChainOrder(self): + return self.checkChainOrder("ip6") + + def checkChainOrder(self, filter_table): + err = False + exit_code = None + + for chain in ['input', 'forward']: + chain_position = self.check_mailcow_chains(filter_table, chain) + if chain_position is None: continue + + if chain_position is False: + self.logger.logCrit(f'MAILCOW target not found in {filter_table} {chain} table, restarting container to fix it...') + err = True + exit_code = 2 + + if chain_position > 0: + self.logger.logCrit(f'MAILCOW target is in position {chain_position} in the {filter_table} {chain} table, restarting container to fix it...') + err = True + exit_code = 2 + + return err, exit_code + + def clearIPv4Table(self): + self.clearTable("ip") + + def clearIPv6Table(self): + self.clearTable("ip6") + + def clearTable(self, _family): + is_empty_dict = True + json_command = self.get_base_dict() + chain_handle = self.get_chain_handle(_family, "filter", self.chain_name) + # if no handle, the chain doesn't exists + if chain_handle is not None: + is_empty_dict = False + # flush chain + mailcow_chain = {'family': _family, 'table': 'filter', 'name': self.chain_name} + flush_chain = {'flush': {'chain': mailcow_chain}} + json_command["nftables"].append(flush_chain) + + # remove rule in forward chain + # remove rule in input chain + chains_family = [self.nft_chain_names[_family]['filter']['input'], + self.nft_chain_names[_family]['filter']['forward'] ] + + for chain_base in chains_family: + if not chain_base: continue + + rules_handle = self.get_rules_handle(_family, "filter", chain_base) + if rules_handle is not None: + for r_handle in rules_handle: + is_empty_dict = False + mailcow_rule = {'family':_family, + 'table': 'filter', + 'chain': chain_base, + 'handle': r_handle } + delete_rules = {'delete': {'rule': mailcow_rule} } + json_command["nftables"].append(delete_rules) + + # remove chain + # after delete all rules referencing this chain + if chain_handle is not None: + mc_chain_handle = {'family':_family, + 'table': 'filter', + 'name': self.chain_name, + 'handle': chain_handle } + delete_chain = {'delete': {'chain': mc_chain_handle} } + json_command["nftables"].append(delete_chain) + + if is_empty_dict == False: + if self.nft_exec_dict(json_command): + self.logger.logInfo(f"Clear completed: {_family}") + + def banIPv4(self, source): + ban_dict = self.get_ban_ip_dict(source, "ip") + return self.nft_exec_dict(ban_dict) + + def banIPv6(self, source): + ban_dict = self.get_ban_ip_dict(source, "ip6") + return self.nft_exec_dict(ban_dict) + + def unbanIPv4(self, source): + unban_dict = self.get_unban_ip_dict(source, "ip") + if not unban_dict: + return False + return self.nft_exec_dict(unban_dict) + + def unbanIPv6(self, source): + unban_dict = self.get_unban_ip_dict(source, "ip6") + if not unban_dict: + return False + return self.nft_exec_dict(unban_dict) + + def snat4(self, snat_target, source): + self.snat_rule("ip", snat_target, source) + + def snat6(self, snat_target, source): + self.snat_rule("ip6", snat_target, source) + + + def nft_exec_dict(self, query: dict): + if not query: return False + + rc, output, error = self.nft.json_cmd(query) + if rc != 0: + #self.logger.logCrit(f"Nftables Error: {error}") + return False + + # Prevent returning False or empty string on commands that do not produce output + if rc == 0 and len(output) == 0: + return True + + return output + + def get_base_dict(self): + return {'nftables': [{ 'metainfo': { 'json_schema_version': 1} } ] } + + def search_current_chains(self): + nft_chain_priority = {'ip': {'filter': {'input': None, 'forward': None}, 'nat': {'postrouting': None} }, + 'ip6': {'filter': {'input': None, 'forward': None}, 'nat': {'postrouting': None} } } + + # Command: 'nft list chains' + _list = {'list' : {'chains': 'null'} } + command = self.get_base_dict() + command['nftables'].append(_list) + kernel_ruleset = self.nft_exec_dict(command) + if kernel_ruleset: + for _object in kernel_ruleset['nftables']: + chain = _object.get("chain") + if not chain: continue + + _family = chain['family'] + _table = chain['table'] + _hook = chain.get("hook") + _priority = chain.get("prio") + _name = chain['name'] + + if _family not in self.nft_chain_names: continue + if _table not in self.nft_chain_names[_family]: continue + if _hook not in self.nft_chain_names[_family][_table]: continue + if _priority is None: continue + + _saved_priority = nft_chain_priority[_family][_table][_hook] + if _saved_priority is None or _priority < _saved_priority: + # at this point, we know the chain has: + # hook and priority set + # and it has the lowest priority + nft_chain_priority[_family][_table][_hook] = _priority + self.nft_chain_names[_family][_table][_hook] = _name + + def search_for_chain(self, kernel_ruleset: dict, chain_name: str): + found = False + for _object in kernel_ruleset["nftables"]: + chain = _object.get("chain") + if not chain: + continue + ch_name = chain.get("name") + if ch_name == chain_name: + found = True + break + return found + + def get_chain_dict(self, _family: str, _name: str): + # nft (add | create) chain [] + _chain_opts = {'family': _family, 'table': 'filter', 'name': _name } + _add = {'add': {'chain': _chain_opts} } + final_chain = self.get_base_dict() + final_chain["nftables"].append(_add) + return final_chain + + def get_mailcow_jump_rule_dict(self, _family: str, _chain: str): + _jump_rule = self.get_base_dict() + _expr_opt=[] + _expr_counter = {'family': _family, 'table': 'filter', 'packets': 0, 'bytes': 0} + _counter_dict = {'counter': _expr_counter} + _expr_opt.append(_counter_dict) + + _jump_opts = {'jump': {'target': self.chain_name} } + + _expr_opt.append(_jump_opts) + + _rule_params = {'family': _family, + 'table': 'filter', + 'chain': _chain, + 'expr': _expr_opt, + 'comment': "mailcow" } + + _add_rule = {'insert': {'rule': _rule_params} } + + _jump_rule["nftables"].append(_add_rule) + + return _jump_rule + + def insert_mailcow_chains(self, _family: str): + nft_input_chain = self.nft_chain_names[_family]['filter']['input'] + nft_forward_chain = self.nft_chain_names[_family]['filter']['forward'] + # Command: 'nft list table filter' + _table_opts = {'family': _family, 'name': 'filter'} + _list = {'list': {'table': _table_opts} } + command = self.get_base_dict() + command['nftables'].append(_list) + kernel_ruleset = self.nft_exec_dict(command) + if kernel_ruleset: + # chain + if not self.search_for_chain(kernel_ruleset, self.chain_name): + cadena = self.get_chain_dict(_family, self.chain_name) + if self.nft_exec_dict(cadena): + self.logger.logInfo(f"MAILCOW {_family} chain created successfully.") + + input_jump_found, forward_jump_found = False, False + + for _object in kernel_ruleset["nftables"]: + if not _object.get("rule"): + continue + + rule = _object["rule"] + if nft_input_chain and rule["chain"] == nft_input_chain: + if rule.get("comment") and rule["comment"] == "mailcow": + input_jump_found = True + if nft_forward_chain and rule["chain"] == nft_forward_chain: + if rule.get("comment") and rule["comment"] == "mailcow": + forward_jump_found = True + + if not input_jump_found: + command = self.get_mailcow_jump_rule_dict(_family, nft_input_chain) + self.nft_exec_dict(command) + + if not forward_jump_found: + command = self.get_mailcow_jump_rule_dict(_family, nft_forward_chain) + self.nft_exec_dict(command) + + def delete_nat_rule(self, _family:str, _chain: str, _handle:str): + delete_command = self.get_base_dict() + _rule_opts = {'family': _family, + 'table': 'nat', + 'chain': _chain, + 'handle': _handle } + _delete = {'delete': {'rule': _rule_opts} } + delete_command["nftables"].append(_delete) + + return self.nft_exec_dict(delete_command) + + def snat_rule(self, _family: str, snat_target: str, source_address: str): + chain_name = self.nft_chain_names[_family]['nat']['postrouting'] + + # no postrouting chain, may occur if docker has ipv6 disabled. + if not chain_name: return + + # Command: nft list chain nat + _chain_opts = {'family': _family, 'table': 'nat', 'name': chain_name} + _list = {'list':{'chain': _chain_opts} } + command = self.get_base_dict() + command['nftables'].append(_list) + kernel_ruleset = self.nft_exec_dict(command) + if not kernel_ruleset: + return + + rule_position = 0 + rule_handle = None + rule_found = False + for _object in kernel_ruleset["nftables"]: + if not _object.get("rule"): + continue + + rule = _object["rule"] + if not rule.get("comment") or not rule["comment"] == "mailcow": + rule_position +=1 + continue + + rule_found = True + rule_handle = rule["handle"] + break + + dest_net = ipaddress.ip_network(source_address) + target_net = ipaddress.ip_network(snat_target) + + if rule_found: + saddr_ip = rule["expr"][0]["match"]["right"]["prefix"]["addr"] + saddr_len = int(rule["expr"][0]["match"]["right"]["prefix"]["len"]) + + daddr_ip = rule["expr"][1]["match"]["right"]["prefix"]["addr"] + daddr_len = int(rule["expr"][1]["match"]["right"]["prefix"]["len"]) + + target_ip = rule["expr"][3]["snat"]["addr"] + + saddr_net = ipaddress.ip_network(saddr_ip + '/' + str(saddr_len)) + daddr_net = ipaddress.ip_network(daddr_ip + '/' + str(daddr_len)) + current_target_net = ipaddress.ip_network(target_ip) + + match = all(( + dest_net == saddr_net, + dest_net == daddr_net, + target_net == current_target_net + )) + try: + if rule_position == 0: + if not match: + # Position 0 , it is a mailcow rule , but it does not have the same parameters + if self.delete_nat_rule(_family, chain_name, rule_handle): + self.logger.logInfo(f'Remove rule for source network {saddr_net} to SNAT target {target_net} from {_family} nat {chain_name} chain, rule does not match configured parameters') + else: + # Position > 0 and is mailcow rule + if self.delete_nat_rule(_family, chain_name, rule_handle): + self.logger.logInfo(f'Remove rule for source network {saddr_net} to SNAT target {target_net} from {_family} nat {chain_name} chain, rule is at position {rule_position}') + except: + self.logger.logCrit(f"Error running SNAT on {_family}, retrying..." ) + else: + # rule not found + json_command = self.get_base_dict() + try: + snat_dict = {'snat': {'addr': str(target_net.network_address)} } + + expr_counter = {'family': _family, 'table': 'nat', 'packets': 0, 'bytes': 0} + counter_dict = {'counter': expr_counter} + + prefix_dict = {'prefix': {'addr': str(dest_net.network_address), 'len': int(dest_net.prefixlen)} } + payload_dict = {'payload': {'protocol': _family, 'field': "saddr"} } + match_dict1 = {'match': {'op': '==', 'left': payload_dict, 'right': prefix_dict} } + + payload_dict2 = {'payload': {'protocol': _family, 'field': "daddr"} } + match_dict2 = {'match': {'op': '!=', 'left': payload_dict2, 'right': prefix_dict } } + expr_list = [ + match_dict1, + match_dict2, + counter_dict, + snat_dict + ] + rule_fields = {'family': _family, + 'table': 'nat', + 'chain': chain_name, + 'comment': "mailcow", + 'expr': expr_list } + + insert_dict = {'insert': {'rule': rule_fields} } + json_command["nftables"].append(insert_dict) + if self.nft_exec_dict(json_command): + self.logger.logInfo(f'Added {_family} nat {chain_name} rule for source network {dest_net} to {target_net}') + except: + self.logger.logCrit(f"Error running SNAT on {_family}, retrying...") + + def get_chain_handle(self, _family: str, _table: str, chain_name: str): + chain_handle = None + # Command: 'nft list chains {family}' + _list = {'list': {'chains': {'family': _family} } } + command = self.get_base_dict() + command['nftables'].append(_list) + kernel_ruleset = self.nft_exec_dict(command) + if kernel_ruleset: + for _object in kernel_ruleset["nftables"]: + if not _object.get("chain"): + continue + chain = _object["chain"] + if chain["family"] == _family and chain["table"] == _table and chain["name"] == chain_name: + chain_handle = chain["handle"] + break + return chain_handle + + def get_rules_handle(self, _family: str, _table: str, chain_name: str): + rule_handle = [] + # Command: 'nft list chain {family} {table} {chain_name}' + _chain_opts = {'family': _family, 'table': _table, 'name': chain_name} + _list = {'list': {'chain': _chain_opts} } + command = self.get_base_dict() + command['nftables'].append(_list) + + kernel_ruleset = self.nft_exec_dict(command) + if kernel_ruleset: + for _object in kernel_ruleset["nftables"]: + if not _object.get("rule"): + continue + + rule = _object["rule"] + if rule["family"] == _family and rule["table"] == _table and rule["chain"] == chain_name: + if rule.get("comment") and rule["comment"] == "mailcow": + rule_handle.append(rule["handle"]) + return rule_handle + + def get_ban_ip_dict(self, ipaddr: str, _family: str): + json_command = self.get_base_dict() + + expr_opt = [] + ipaddr_net = ipaddress.ip_network(ipaddr) + right_dict = {'prefix': {'addr': str(ipaddr_net.network_address), 'len': int(ipaddr_net.prefixlen) } } + + left_dict = {'payload': {'protocol': _family, 'field': 'saddr'} } + match_dict = {'op': '==', 'left': left_dict, 'right': right_dict } + expr_opt.append({'match': match_dict}) + + counter_dict = {'counter': {'family': _family, 'table': "filter", 'packets': 0, 'bytes': 0} } + expr_opt.append(counter_dict) + + expr_opt.append({'drop': "null"}) + + rule_dict = {'family': _family, 'table': "filter", 'chain': self.chain_name, 'expr': expr_opt} + + base_dict = {'insert': {'rule': rule_dict} } + json_command["nftables"].append(base_dict) + + return json_command + + def get_unban_ip_dict(self, ipaddr:str, _family: str): + json_command = self.get_base_dict() + # Command: 'nft list chain {s_family} filter MAILCOW' + _chain_opts = {'family': _family, 'table': 'filter', 'name': self.chain_name} + _list = {'list': {'chain': _chain_opts} } + command = self.get_base_dict() + command['nftables'].append(_list) + kernel_ruleset = self.nft_exec_dict(command) + rule_handle = None + if kernel_ruleset: + for _object in kernel_ruleset["nftables"]: + if not _object.get("rule"): + continue + + rule = _object["rule"]["expr"][0]["match"] + left_opt = rule["left"]["payload"] + if not left_opt["protocol"] == _family: + continue + if not left_opt["field"] =="saddr": + continue + + # ip currently banned + rule_right = rule["right"] + if isinstance(rule_right, dict): + current_rule_ip = rule_right["prefix"]["addr"] + '/' + str(rule_right["prefix"]["len"]) + else: + current_rule_ip = rule_right + current_rule_net = ipaddress.ip_network(current_rule_ip) + + # ip to ban + candidate_net = ipaddress.ip_network(ipaddr) + + if current_rule_net == candidate_net: + rule_handle = _object["rule"]["handle"] + break + + if rule_handle is not None: + mailcow_rule = {'family': _family, 'table': 'filter', 'chain': self.chain_name, 'handle': rule_handle} + delete_rule = {'delete': {'rule': mailcow_rule} } + json_command["nftables"].append(delete_rule) + else: + return False + + return json_command + + def check_mailcow_chains(self, family: str, chain: str): + position = 0 + rule_found = False + chain_name = self.nft_chain_names[family]['filter'][chain] + + if not chain_name: return None + + _chain_opts = {'family': family, 'table': 'filter', 'name': chain_name} + _list = {'list': {'chain': _chain_opts}} + command = self.get_base_dict() + command['nftables'].append(_list) + kernel_ruleset = self.nft_exec_dict(command) + if kernel_ruleset: + for _object in kernel_ruleset["nftables"]: + if not _object.get("rule"): + continue + rule = _object["rule"] + if rule.get("comment") and rule["comment"] == "mailcow": + rule_found = True + break + + position+=1 + + return position if rule_found else False diff --git a/data/Dockerfiles/netfilter/modules/__init__.py b/data/Dockerfiles/netfilter/modules/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/docker-compose.yml b/docker-compose.yml index 8d84e3a7..bba0610f 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -434,7 +434,7 @@ services: - acme netfilter-mailcow: - image: mailcow/netfilter:1.52 + image: mailcow/netfilter:1.53 stop_grace_period: 30s depends_on: - dovecot-mailcow From 3e194c79060b84d04ce0b2c577ea4b68c7d8d732 Mon Sep 17 00:00:00 2001 From: Kristian Feldsam Date: Sat, 11 Nov 2023 17:12:00 +0100 Subject: [PATCH 021/126] Domains datatable - server side processing Signed-off-by: Kristian Feldsam --- data/web/inc/lib/ssp.class.php | 587 +++++++++++++++++++++++++++++++++ data/web/js/site/mailbox.js | 33 +- data/web/json_api.php | 65 +++- 3 files changed, 666 insertions(+), 19 deletions(-) create mode 100644 data/web/inc/lib/ssp.class.php diff --git a/data/web/inc/lib/ssp.class.php b/data/web/inc/lib/ssp.class.php new file mode 100644 index 00000000..c36c627c --- /dev/null +++ b/data/web/inc/lib/ssp.class.php @@ -0,0 +1,587 @@ + 'utf8'` - you might need this depending on your PHP / MySQL config + * @return resource PDO connection + */ + static function db ( $conn ) + { + if ( is_array( $conn ) ) { + return self::sql_connect( $conn ); + } + + return $conn; + } + + + /** + * Paging + * + * Construct the LIMIT clause for server-side processing SQL query + * + * @param array $request Data sent to server by DataTables + * @param array $columns Column information array + * @return string SQL limit clause + */ + static function limit ( $request, $columns ) + { + $limit = ''; + + if ( isset($request['start']) && $request['length'] != -1 ) { + $limit = "LIMIT ".intval($request['start']).", ".intval($request['length']); + } + + return $limit; + } + + + /** + * Ordering + * + * Construct the ORDER BY clause for server-side processing SQL query + * + * @param array $request Data sent to server by DataTables + * @param array $columns Column information array + * @return string SQL order by clause + */ + static function order ( $tableAS, $request, $columns ) + { + $order = ''; + + if ( isset($request['order']) && count($request['order']) ) { + $orderBy = array(); + $dtColumns = self::pluck( $columns, 'dt' ); + + for ( $i=0, $ien=count($request['order']) ; $i<$ien ; $i++ ) { + // Convert the column index into the column data property + $columnIdx = intval($request['order'][$i]['column']); + $requestColumn = $request['columns'][$columnIdx]; + + $columnIdx = array_search( $columnIdx, $dtColumns ); + $column = $columns[ $columnIdx ]; + + if ( $requestColumn['orderable'] == 'true' ) { + $dir = $request['order'][$i]['dir'] === 'asc' ? + 'ASC' : + 'DESC'; + + $orderBy[] = '`'.$tableAS.'`.`'.$column['db'].'` '.$dir; + } + } + + if ( count( $orderBy ) ) { + $order = 'ORDER BY '.implode(', ', $orderBy); + } + } + + return $order; + } + + + /** + * Searching / Filtering + * + * Construct the WHERE clause for server-side processing SQL query. + * + * NOTE this does not match the built-in DataTables filtering which does it + * word by word on any field. It's possible to do here performance on large + * databases would be very poor + * + * @param array $request Data sent to server by DataTables + * @param array $columns Column information array + * @param array $bindings Array of values for PDO bindings, used in the + * sql_exec() function + * @return string SQL where clause + */ + static function filter ( $tablesAS, $request, $columns, &$bindings ) + { + $globalSearch = array(); + $columnSearch = array(); + $dtColumns = self::pluck( $columns, 'dt' ); + + if ( isset($request['search']) && $request['search']['value'] != '' ) { + $str = $request['search']['value']; + + for ( $i=0, $ien=count($request['columns']) ; $i<$ien ; $i++ ) { + $requestColumn = $request['columns'][$i]; + $columnIdx = array_search( $requestColumn['data'], $dtColumns ); + $column = $columns[ $columnIdx ]; + + if ( $requestColumn['searchable'] == 'true' ) { + if(!empty($column['db'])){ + $binding = self::bind( $bindings, '%'.$str.'%', PDO::PARAM_STR ); + $globalSearch[] = "`".$tablesAS."`.`".$column['db']."` LIKE ".$binding; + } + } + } + } + + // Individual column filtering + if ( isset( $request['columns'] ) ) { + for ( $i=0, $ien=count($request['columns']) ; $i<$ien ; $i++ ) { + $requestColumn = $request['columns'][$i]; + $columnIdx = array_search( $requestColumn['data'], $dtColumns ); + $column = $columns[ $columnIdx ]; + + $str = $requestColumn['search']['value']; + + if ( $requestColumn['searchable'] == 'true' && + $str != '' ) { + if(!empty($column['db'])){ + $binding = self::bind( $bindings, '%'.$str.'%', PDO::PARAM_STR ); + $columnSearch[] = "`".$tablesAS."`.`".$column['db']."` LIKE ".$binding; + } + } + } + } + + // Combine the filters into a single string + $where = ''; + + if ( count( $globalSearch ) ) { + $where = '('.implode(' OR ', $globalSearch).')'; + } + + if ( count( $columnSearch ) ) { + $where = $where === '' ? + implode(' AND ', $columnSearch) : + $where .' AND '. implode(' AND ', $columnSearch); + } + + if ( $where !== '' ) { + $where = 'WHERE '.$where; + } + + return $where; + } + + + /** + * Perform the SQL queries needed for an server-side processing requested, + * utilising the helper functions of this class, limit(), order() and + * filter() among others. The returned array is ready to be encoded as JSON + * in response to an SSP request, or can be modified if needed before + * sending back to the client. + * + * @param array $request Data sent to server by DataTables + * @param array|PDO $conn PDO connection resource or connection parameters array + * @param string $table SQL table to query + * @param string $primaryKey Primary key of the table + * @param array $columns Column information array + * @return array Server-side processing response array + */ + static function simple ( $request, $conn, $table, $primaryKey, $columns ) + { + $bindings = array(); + $db = self::db( $conn ); + + // Allow for a JSON string to be passed in + if (isset($request['json'])) { + $request = json_decode($request['json'], true); + } + + // table AS + $tablesAS = null; + if(is_array($table)) { + $tablesAS = $table[1]; + $table = $table[0]; + } + + // Build the SQL query string from the request + $limit = self::limit( $request, $columns ); + $order = self::order( $tablesAS, $request, $columns ); + $where = self::filter( $tablesAS, $request, $columns, $bindings ); + + // Main query to actually get the data + $data = self::sql_exec( $db, $bindings, + "SELECT `$tablesAS`.`".implode("`, `$tablesAS`.`", self::pluck($columns, 'db'))."` + FROM `$table` AS `$tablesAS` + $where + $order + $limit" + ); + + // Data set length after filtering + $resFilterLength = self::sql_exec( $db, $bindings, + "SELECT COUNT(`{$primaryKey}`) + FROM `$table` AS `$tablesAS` + $where" + ); + $recordsFiltered = $resFilterLength[0][0]; + + // Total data set length + $resTotalLength = self::sql_exec( $db, + "SELECT COUNT(`{$primaryKey}`) + FROM `$table` AS `$tablesAS`" + ); + $recordsTotal = $resTotalLength[0][0]; + + /* + * Output + */ + return array( + "draw" => isset ( $request['draw'] ) ? + intval( $request['draw'] ) : + 0, + "recordsTotal" => intval( $recordsTotal ), + "recordsFiltered" => intval( $recordsFiltered ), + "data" => self::data_output( $columns, $data ) + ); + } + + + /** + * The difference between this method and the `simple` one, is that you can + * apply additional `where` conditions to the SQL queries. These can be in + * one of two forms: + * + * * 'Result condition' - This is applied to the result set, but not the + * overall paging information query - i.e. it will not effect the number + * of records that a user sees they can have access to. This should be + * used when you want apply a filtering condition that the user has sent. + * * 'All condition' - This is applied to all queries that are made and + * reduces the number of records that the user can access. This should be + * used in conditions where you don't want the user to ever have access to + * particular records (for example, restricting by a login id). + * + * In both cases the extra condition can be added as a simple string, or if + * you are using external values, as an assoc. array with `condition` and + * `bindings` parameters. The `condition` is a string with the SQL WHERE + * condition and `bindings` is an assoc. array of the binding names and + * values. + * + * @param array $request Data sent to server by DataTables + * @param array|PDO $conn PDO connection resource or connection parameters array + * @param string|array $table SQL table to query, if array second key is AS + * @param string $primaryKey Primary key of the table + * @param array $columns Column information array + * @param string $join JOIN sql string + * @param string|array $whereResult WHERE condition to apply to the result set + * @return array Server-side processing response array + */ + static function complex ( + $request, + $conn, + $table, + $primaryKey, + $columns, + $join=null, + $whereResult=null + ) { + $bindings = array(); + $db = self::db( $conn ); + + // table AS + $tablesAS = null; + if(is_array($table)) { + $tablesAS = $table[1]; + $table = $table[0]; + } + + // Build the SQL query string from the request + $limit = self::limit( $request, $columns ); + $order = self::order( $tablesAS, $request, $columns ); + $where = self::filter( $tablesAS, $request, $columns, $bindings ); + + // whereResult can be a simple string, or an assoc. array with a + // condition and bindings + if ( $whereResult ) { + $str = $whereResult; + + if ( is_array($whereResult) ) { + $str = $whereResult['condition']; + + if ( isset($whereResult['bindings']) ) { + self::add_bindings($bindings, $whereResult); + } + } + + $where = $where ? + $where .' AND '.$str : + 'WHERE '.$str; + } + + // Main query to actually get the data + $data = self::sql_exec( $db, $bindings, + "SELECT `$tablesAS`.`".implode("`, `$tablesAS`.`", self::pluck($columns, 'db'))."` + FROM `$table` AS `$tablesAS` + $join + $where + $order + $limit" + ); + + // Data set length after filtering + $resFilterLength = self::sql_exec( $db, $bindings, + "SELECT COUNT(`{$tablesAS}`.`{$primaryKey}`) + FROM `$table` AS `$tablesAS` + $join + $where" + ); + $recordsFiltered = $resFilterLength[0][0]; + + // Total data set length + $resTotalLength = self::sql_exec( $db, $bindings, + "SELECT COUNT(`{$tablesAS}`.`{$primaryKey}`) + FROM `$table` AS `$tablesAS` + $join + $where" + ); + $recordsTotal = $resTotalLength[0][0]; + + /* + * Output + */ + return array( + "draw" => isset ( $request['draw'] ) ? + intval( $request['draw'] ) : + 0, + "recordsTotal" => intval( $recordsTotal ), + "recordsFiltered" => intval( $recordsFiltered ), + "data" => self::data_output( $columns, $data ) + ); + } + + + /** + * Connect to the database + * + * @param array $sql_details SQL server connection details array, with the + * properties: + * * host - host name + * * db - database name + * * user - user name + * * pass - user password + * @return resource Database connection handle + */ + static function sql_connect ( $sql_details ) + { + try { + $db = @new PDO( + "mysql:host={$sql_details['host']};dbname={$sql_details['db']}", + $sql_details['user'], + $sql_details['pass'], + array( PDO::ATTR_ERRMODE => PDO::ERRMODE_EXCEPTION ) + ); + } + catch (PDOException $e) { + self::fatal( + "An error occurred while connecting to the database. ". + "The error reported by the server was: ".$e->getMessage() + ); + } + + return $db; + } + + + /** + * Execute an SQL query on the database + * + * @param resource $db Database handler + * @param array $bindings Array of PDO binding values from bind() to be + * used for safely escaping strings. Note that this can be given as the + * SQL query string if no bindings are required. + * @param string $sql SQL query to execute. + * @return array Result from the query (all rows) + */ + static function sql_exec ( $db, $bindings, $sql=null ) + { + // Argument shifting + if ( $sql === null ) { + $sql = $bindings; + } + + $stmt = $db->prepare( $sql ); + + // Bind parameters + if ( is_array( $bindings ) ) { + for ( $i=0, $ien=count($bindings) ; $i<$ien ; $i++ ) { + $binding = $bindings[$i]; + $stmt->bindValue( $binding['key'], $binding['val'], $binding['type'] ); + } + } + + // Execute + try { + $stmt->execute(); + } + catch (PDOException $e) { + self::fatal( "An SQL error occurred: ".$e->getMessage() ); + } + + // Return all + return $stmt->fetchAll( PDO::FETCH_BOTH ); + } + + + /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Internal methods + */ + + /** + * Throw a fatal error. + * + * This writes out an error message in a JSON string which DataTables will + * see and show to the user in the browser. + * + * @param string $msg Message to send to the client + */ + static function fatal ( $msg ) + { + echo json_encode( array( + "error" => $msg + ) ); + + exit(0); + } + + /** + * Create a PDO binding key which can be used for escaping variables safely + * when executing a query with sql_exec() + * + * @param array &$a Array of bindings + * @param * $val Value to bind + * @param int $type PDO field type + * @return string Bound key to be used in the SQL where this parameter + * would be used. + */ + static function bind ( &$a, $val, $type ) + { + $key = ':binding_'.count( $a ); + + $a[] = array( + 'key' => $key, + 'val' => $val, + 'type' => $type + ); + + return $key; + } + + static function add_bindings(&$bindings, $vals) + { + foreach($vals['bindings'] as $key => $value) { + $bindings[] = array( + 'key' => $key, + 'val' => $value, + 'type' => PDO::PARAM_STR + ); + } + } + + + /** + * Pull a particular property from each assoc. array in a numeric array, + * returning and array of the property values from each item. + * + * @param array $a Array to get data from + * @param string $prop Property to read + * @return array Array of property values + */ + static function pluck ( $a, $prop ) + { + $out = array(); + + for ( $i=0, $len=count($a) ; $i<$len ; $i++ ) { + if ( empty($a[$i][$prop]) && $a[$i][$prop] !== 0 ) { + continue; + } + + //removing the $out array index confuses the filter method in doing proper binding, + //adding it ensures that the array data are mapped correctly + $out[$i] = $a[$i][$prop]; + } + + return $out; + } + + + /** + * Return a string from an array or a string + * + * @param array|string $a Array to join + * @param string $join Glue for the concatenation + * @return string Joined string + */ + static function _flatten ( $a, $join = ' AND ' ) + { + if ( ! $a ) { + return ''; + } + else if ( $a && is_array($a) ) { + return implode( $join, $a ); + } + return $a; + } +} + diff --git a/data/web/js/site/mailbox.js b/data/web/js/site/mailbox.js index c2b1761d..5e1ba315 100644 --- a/data/web/js/site/mailbox.js +++ b/data/web/js/site/mailbox.js @@ -435,7 +435,7 @@ jQuery(function($){ var table = $('#domain_table').DataTable({ responsive: true, processing: true, - serverSide: false, + serverSide: true, stateSave: true, pageLength: pagination_size, dom: "<'row'<'col-sm-12 col-md-6'f><'col-sm-12 col-md-6'l>>" + @@ -447,9 +447,9 @@ jQuery(function($){ }, ajax: { type: "GET", - url: "/api/v1/get/domain/all", + url: "/api/v1/get/domain/datatables", dataSrc: function(json){ - $.each(json, function(i, item) { + $.each(json.data, function(i, item) { item.domain_name = escapeHtml(item.domain_name); item.aliases = item.aliases_in_domain + " / " + item.max_num_aliases_for_domain; @@ -498,7 +498,7 @@ jQuery(function($){ } }); - return json; + return json.data; } }, columns: [ @@ -528,17 +528,20 @@ jQuery(function($){ { title: lang.aliases, data: 'aliases', + searchable: false, defaultContent: '' }, { title: lang.mailboxes, data: 'mailboxes', + searchable: false, responsivePriority: 4, defaultContent: '' }, { title: lang.domain_quota, data: 'quota', + searchable: false, defaultContent: '', render: function (data, type) { data = data.split("/"); @@ -548,6 +551,8 @@ jQuery(function($){ { title: lang.stats, data: 'stats', + searchable: false, + orderable: false, defaultContent: '', render: function (data, type) { data = data.split("/"); @@ -557,21 +562,29 @@ jQuery(function($){ { title: lang.mailbox_defquota, data: 'def_quota_for_mbox', + searchable: false, + orderable: false, defaultContent: '' }, { title: lang.mailbox_quota, data: 'max_quota_for_mbox', + searchable: false, + orderable: false, defaultContent: '' }, { title: 'RL', data: 'rl', + searchable: false, + orderable: false, defaultContent: '' }, { title: lang.backup_mx, data: 'backupmx', + searchable: false, + orderable: false, defaultContent: '', redner: function (data, type){ return 1==value ? '' : 0==value && ''; @@ -580,30 +593,40 @@ jQuery(function($){ { title: lang.domain_admins, data: 'domain_admins', + searchable: false, + orderable: false, defaultContent: '', className: 'none' }, { title: lang.created_on, data: 'created', + searchable: false, + orderable: false, defaultContent: '', className: 'none' }, { title: lang.last_modified, data: 'modified', + searchable: false, + orderable: false, defaultContent: '', className: 'none' }, { title: 'Tags', data: 'tags', + searchable: false, + orderable: false, defaultContent: '', className: 'none' }, { title: lang.active, data: 'active', + searchable: false, + orderable: false, defaultContent: '', responsivePriority: 6, render: function (data, type) { @@ -613,6 +636,8 @@ jQuery(function($){ { title: lang.action, data: 'action', + searchable: false, + orderable: false, className: 'dt-sm-head-hidden dt-data-w100 dtr-col-md dt-text-right', responsivePriority: 5, defaultContent: '' diff --git a/data/web/json_api.php b/data/web/json_api.php index b375bc8e..6c008a60 100644 --- a/data/web/json_api.php +++ b/data/web/json_api.php @@ -15,7 +15,7 @@ function api_log($_data) { continue; } - $value = json_decode($value, true); + $value = json_decode($value, true); if ($value) { if (is_array($value)) unset($value["csrf_token"]); foreach ($value as $key => &$val) { @@ -23,7 +23,7 @@ function api_log($_data) { $val = '*'; } } - $value = json_encode($value); + $value = json_encode($value); } $data_var[] = $data . "='" . $value . "'"; } @@ -44,7 +44,7 @@ function api_log($_data) { 'msg' => 'Redis: '.$e ); return false; - } + } } if (isset($_GET['query'])) { @@ -178,12 +178,12 @@ if (isset($_GET['query'])) { // parse post data $post = trim(file_get_contents('php://input')); if ($post) $post = json_decode($post); - + // process registration data from authenticator try { // decode base64 strings $clientDataJSON = base64_decode($post->clientDataJSON); - $attestationObject = base64_decode($post->attestationObject); + $attestationObject = base64_decode($post->attestationObject); // processCreate($clientDataJSON, $attestationObject, $challenge, $requireUserVerification=false, $requireUserPresent=true, $failIfRootMismatch=true) $data = $WebAuthn->processCreate($clientDataJSON, $attestationObject, $_SESSION['challenge'], false, true); @@ -250,7 +250,7 @@ if (isset($_GET['query'])) { default: process_add_return(mailbox('add', 'domain', $attr)); break; - } + } break; case "resource": process_add_return(mailbox('add', 'resource', $attr)); @@ -470,7 +470,7 @@ if (isset($_GET['query'])) { // false, if only internal is allowed // null, if internal and cross-platform is allowed $createArgs = $WebAuthn->getCreateArgs($_SESSION["mailcow_cc_username"], $_SESSION["mailcow_cc_username"], $_SESSION["mailcow_cc_username"], 30, false, $GLOBALS['WEBAUTHN_UV_FLAG_REGISTER'], null, $excludeCredentialIds); - + print(json_encode($createArgs)); $_SESSION['challenge'] = $WebAuthn->getChallenge(); return; @@ -523,9 +523,44 @@ if (isset($_GET['query'])) { case "domain": switch ($object) { + case "datatables": + $table = ['domain', 'd']; + $primaryKey = 'domain'; + $columns = [ + ['db' => 'domain', 'dt' => 2], + ['db' => 'aliases', 'dt' => 3], + ['db' => 'mailboxes', 'dt' => 4], + ['db' => 'quota', 'dt' => 5], + ]; + + require_once $_SERVER['DOCUMENT_ROOT'] . '/inc/lib/ssp.class.php'; + global $pdo; + if($_SESSION['mailcow_cc_role'] === 'admin') { + $data = SSP::simple($_GET, $pdo, $table, $primaryKey, $columns); + } elseif ($_SESSION['mailcow_cc_role'] === 'domainadmin') { + $data = SSP::complex($_GET, $pdo, $table, $primaryKey, $columns, + 'INNER JOIN domain_admins as da ON da.domain = d.domain', + [ + 'condition' => 'da.active = 1 and da.username = :username', + 'bindings' => ['username' => $_SESSION['mailcow_cc_username']] + ]); + } + + if (!empty($data['data'])) { + $domainsData = []; + foreach ($data['data'] as $domain) { + if ($details = mailbox('get', 'domain_details', $domain[2])) { + $domainsData[] = $details; + } + } + $data['data'] = $domainsData; + } + + process_get_return($data); + break; case "all": $tags = null; - if (isset($_GET['tags']) && $_GET['tags'] != '') + if (isset($_GET['tags']) && $_GET['tags'] != '') $tags = explode(',', $_GET['tags']); $domains = mailbox('get', 'domains', null, $tags); @@ -1014,7 +1049,7 @@ if (isset($_GET['query'])) { case "all": case "reduced": $tags = null; - if (isset($_GET['tags']) && $_GET['tags'] != '') + if (isset($_GET['tags']) && $_GET['tags'] != '') $tags = explode(',', $_GET['tags']); if (empty($extra)) $domains = mailbox('get', 'domains'); @@ -1048,7 +1083,7 @@ if (isset($_GET['query'])) { break; default: $tags = null; - if (isset($_GET['tags']) && $_GET['tags'] != '') + if (isset($_GET['tags']) && $_GET['tags'] != '') $tags = explode(',', $_GET['tags']); if ($tags === null) { @@ -1058,7 +1093,7 @@ if (isset($_GET['query'])) { $mailboxes = mailbox('get', 'mailboxes', $object, $tags); if (is_array($mailboxes)) { foreach ($mailboxes as $mailbox) { - if ($details = mailbox('get', 'mailbox_details', $mailbox)) + if ($details = mailbox('get', 'mailbox_details', $mailbox)) $data[] = $details; } } @@ -1557,15 +1592,15 @@ if (isset($_GET['query'])) { 'solr_size' => $solr_size, 'solr_documents' => $solr_documents )); - break; + break; case "host": if (!$extra){ $stats = docker("host_stats"); echo json_encode($stats); - } + } else if ($extra == "ip") { // get public ips - + $curl = curl_init(); curl_setopt($curl, CURLOPT_RETURNTRANSFER, 1); curl_setopt($curl, CURLOPT_POST, 0); @@ -1972,7 +2007,7 @@ if (isset($_GET['query'])) { exit(); } } -if ($_SESSION['mailcow_cc_api'] === true) { +if (array_key_exists('mailcow_cc_api', $_SESSION) && $_SESSION['mailcow_cc_api'] === true) { if (isset($_SESSION['mailcow_cc_api']) && $_SESSION['mailcow_cc_api'] === true) { unset($_SESSION['return']); } From 28cec9969931de92b128c086b1f113c35e600210 Mon Sep 17 00:00:00 2001 From: Kristian Feldsam Date: Sun, 12 Nov 2023 10:26:38 +0100 Subject: [PATCH 022/126] Mailboxes datatable - server side processing Signed-off-by: Kristian Feldsam --- data/web/js/site/mailbox.js | 39 ++++++++++++++++++------------------- data/web/json_api.php | 33 +++++++++++++++++++++++++++++++ 2 files changed, 52 insertions(+), 20 deletions(-) diff --git a/data/web/js/site/mailbox.js b/data/web/js/site/mailbox.js index 5e1ba315..fe6113a8 100644 --- a/data/web/js/site/mailbox.js +++ b/data/web/js/site/mailbox.js @@ -869,7 +869,7 @@ jQuery(function($){ var table = $('#mailbox_table').DataTable({ responsive: true, processing: true, - serverSide: false, + serverSide: true, stateSave: true, pageLength: pagination_size, dom: "<'row'<'col-sm-12 col-md-6'f><'col-sm-12 col-md-6'l>>" + @@ -878,13 +878,12 @@ jQuery(function($){ language: lang_datatables, initComplete: function(settings, json){ hideTableExpandCollapseBtn('#tab-mailboxes', '#mailbox_table'); - filterByDomain(json, 8, table); }, ajax: { type: "GET", - url: "/api/v1/get/mailbox/reduced", + url: "/api/v1/get/mailbox/datatables", dataSrc: function(json){ - $.each(json, function (i, item) { + $.each(json.data, function (i, item) { item.quota = { sortBy: item.quota_used, value: item.quota @@ -970,7 +969,7 @@ jQuery(function($){ } }); - return json; + return json.data; } }, columns: [ @@ -1000,13 +999,15 @@ jQuery(function($){ { title: lang.domain_quota, data: 'quota.value', + searchable: false, responsivePriority: 8, - defaultContent: '', - orderData: 23 + defaultContent: '' }, { title: lang.last_mail_login, data: 'last_mail_login', + searchable: false, + orderable: false, defaultContent: '', responsivePriority: 7, render: function (data, type) { @@ -1019,11 +1020,15 @@ jQuery(function($){ { title: lang.last_pw_change, data: 'last_pw_change', + searchable: false, + orderable: false, defaultContent: '' }, { title: lang.in_use, data: 'in_use.value', + searchable: false, + orderable: false, defaultContent: '', responsivePriority: 9, className: 'dt-data-w100', @@ -1092,6 +1097,8 @@ jQuery(function($){ { title: lang.msg_num, data: 'messages', + searchable: false, + orderable: false, defaultContent: '', responsivePriority: 5 }, @@ -1116,6 +1123,8 @@ jQuery(function($){ { title: lang.active, data: 'active', + searchable: false, + orderable: false, defaultContent: '', responsivePriority: 4, render: function (data, type) { @@ -1125,22 +1134,12 @@ jQuery(function($){ { title: lang.action, data: 'action', + searchable: false, + orderable: false, className: 'dt-sm-head-hidden dt-data-w100 dtr-col-md dt-text-right', responsivePriority: 6, defaultContent: '' - }, - { - title: "", - data: 'quota.sortBy', - defaultContent: '', - className: "d-none" - }, - { - title: "", - data: 'in_use.sortBy', - defaultContent: '', - className: "d-none" - }, + } ] }); diff --git a/data/web/json_api.php b/data/web/json_api.php index 6c008a60..3077bf7b 100644 --- a/data/web/json_api.php +++ b/data/web/json_api.php @@ -1046,6 +1046,39 @@ if (isset($_GET['query'])) { break; case "mailbox": switch ($object) { + case "datatables": + $table = ['mailbox', 'm']; + $primaryKey = 'username'; + $columns = [ + ['db' => 'username', 'dt' => 2], + ['db' => 'quota', 'dt' => 3], + ]; + + require_once $_SERVER['DOCUMENT_ROOT'] . '/inc/lib/ssp.class.php'; + global $pdo; + if($_SESSION['mailcow_cc_role'] === 'admin') { + $data = SSP::complex($_GET, $pdo, $table, $primaryKey, $columns, null, "(`m`.`kind` = '' OR `m`.`kind` = NULL)"); + } elseif ($_SESSION['mailcow_cc_role'] === 'domainadmin') { + $data = SSP::complex($_GET, $pdo, $table, $primaryKey, $columns, + 'INNER JOIN domain_admins as da ON da.domain = m.domain', + [ + 'condition' => "(`m`.`kind` = '' OR `m`.`kind` = NULL) AND `da`.`active` = 1 AND `da`.`username` = :username", + 'bindings' => ['username' => $_SESSION['mailcow_cc_username']] + ]); + } + + if (!empty($data['data'])) { + $mailboxData = []; + foreach ($data['data'] as $mailbox) { + if ($details = mailbox('get', 'mailbox_details', $mailbox[2])) { + $mailboxData[] = $details; + } + } + $data['data'] = $mailboxData; + } + + process_get_return($data); + break; case "all": case "reduced": $tags = null; From 4f109c1a9401582264353dc7ac9aa59fb0243ef0 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Sun, 12 Nov 2023 17:28:57 +0000 Subject: [PATCH 023/126] Update dependency krakjoe/apcu to v5.1.23 Signed-off-by: milkmaker --- data/Dockerfiles/phpfpm/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/data/Dockerfiles/phpfpm/Dockerfile b/data/Dockerfiles/phpfpm/Dockerfile index 49031033..123061a5 100644 --- a/data/Dockerfiles/phpfpm/Dockerfile +++ b/data/Dockerfiles/phpfpm/Dockerfile @@ -2,7 +2,7 @@ FROM php:8.2-fpm-alpine3.17 LABEL maintainer "The Infrastructure Company GmbH " # renovate: datasource=github-tags depName=krakjoe/apcu versioning=semver-coerced extractVersion=^v(?.*)$ -ARG APCU_PECL_VERSION=5.1.22 +ARG APCU_PECL_VERSION=5.1.23 # renovate: datasource=github-tags depName=Imagick/imagick versioning=semver-coerced extractVersion=(?.*)$ ARG IMAGICK_PECL_VERSION=3.7.0 # renovate: datasource=github-tags depName=php/pecl-mail-mailparse versioning=semver-coerced extractVersion=^v(?.*)$ From a8dfa951268169bc9017165e2f6b0b46a30aaf6e Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Tue, 21 Nov 2023 09:03:02 +0000 Subject: [PATCH 024/126] Update dependency phpredis/phpredis to v6.0.2 Signed-off-by: milkmaker --- data/Dockerfiles/phpfpm/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/data/Dockerfiles/phpfpm/Dockerfile b/data/Dockerfiles/phpfpm/Dockerfile index 49031033..88e5c990 100644 --- a/data/Dockerfiles/phpfpm/Dockerfile +++ b/data/Dockerfiles/phpfpm/Dockerfile @@ -10,7 +10,7 @@ ARG MAILPARSE_PECL_VERSION=3.1.6 # renovate: datasource=github-tags depName=php-memcached-dev/php-memcached versioning=semver-coerced extractVersion=^v(?.*)$ ARG MEMCACHED_PECL_VERSION=3.2.0 # renovate: datasource=github-tags depName=phpredis/phpredis versioning=semver-coerced extractVersion=(?.*)$ -ARG REDIS_PECL_VERSION=6.0.1 +ARG REDIS_PECL_VERSION=6.0.2 # renovate: datasource=github-tags depName=composer/composer versioning=semver-coerced extractVersion=(?.*)$ ARG COMPOSER_VERSION=2.6.5 From d4dd1e37ce02f264639434e9104a3bf742195c2c Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Tue, 21 Nov 2023 09:03:09 +0000 Subject: [PATCH 025/126] Update dependency tianon/gosu to v1.17 Signed-off-by: milkmaker --- data/Dockerfiles/dovecot/Dockerfile | 2 +- data/Dockerfiles/sogo/Dockerfile | 2 +- data/Dockerfiles/solr/Dockerfile | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/data/Dockerfiles/dovecot/Dockerfile b/data/Dockerfiles/dovecot/Dockerfile index 2ace9029..d1413b2a 100644 --- a/data/Dockerfiles/dovecot/Dockerfile +++ b/data/Dockerfiles/dovecot/Dockerfile @@ -5,7 +5,7 @@ ARG DEBIAN_FRONTEND=noninteractive # renovate: datasource=github-tags depName=dovecot/core versioning=semver-coerced extractVersion=(?.*)$ ARG DOVECOT=2.3.21 # renovate: datasource=github-releases depName=tianon/gosu versioning=semver-coerced extractVersion=(?.*)$ -ARG GOSU_VERSION=1.16 +ARG GOSU_VERSION=1.17 ENV LC_ALL C diff --git a/data/Dockerfiles/sogo/Dockerfile b/data/Dockerfiles/sogo/Dockerfile index e8a7410f..cbc5c93a 100644 --- a/data/Dockerfiles/sogo/Dockerfile +++ b/data/Dockerfiles/sogo/Dockerfile @@ -4,7 +4,7 @@ LABEL maintainer "The Infrastructure Company GmbH " ARG DEBIAN_FRONTEND=noninteractive ARG SOGO_DEBIAN_REPOSITORY=http://packages.sogo.nu/nightly/5/debian/ # renovate: datasource=github-releases depName=tianon/gosu versioning=semver-coerced extractVersion=^(?.*)$ -ARG GOSU_VERSION=1.16 +ARG GOSU_VERSION=1.17 ENV LC_ALL C # Prerequisites diff --git a/data/Dockerfiles/solr/Dockerfile b/data/Dockerfiles/solr/Dockerfile index a6359876..42913351 100644 --- a/data/Dockerfiles/solr/Dockerfile +++ b/data/Dockerfiles/solr/Dockerfile @@ -3,7 +3,7 @@ FROM solr:7.7-slim USER root # renovate: datasource=github-releases depName=tianon/gosu versioning=semver-coerced extractVersion=(?.*)$ -ARG GOSU_VERSION=1.16 +ARG GOSU_VERSION=1.17 COPY solr.sh / COPY solr-config-7.7.0.xml / From d2e38678936158637fae1f9518248cc10099bf7f Mon Sep 17 00:00:00 2001 From: FreddleSpl0it Date: Thu, 23 Nov 2023 16:12:43 +0100 Subject: [PATCH 026/126] [Web][Rspamd] implement custom mailbox attributes and improve domain wide footer --- data/Dockerfiles/rspamd/docker-entrypoint.sh | 3 + data/conf/rspamd/dynmaps/footer.php | 89 ++++++++++ data/conf/rspamd/lua/rspamd.local.lua | 53 +++--- data/web/api/openapi.yaml | 161 +++++++++++++++++ data/web/edit.php | 3 + data/web/inc/functions.mailbox.inc.php | 178 +++++++++++++++---- data/web/inc/init_db.inc.php | 17 +- data/web/js/site/edit.js | 17 ++ data/web/json_api.php | 8 +- data/web/lang/lang.de-de.json | 4 + data/web/lang/lang.en-gb.json | 7 +- data/web/templates/edit.twig | 1 + data/web/templates/edit/domain.twig | 26 ++- data/web/templates/edit/mailbox.twig | 32 ++++ 14 files changed, 530 insertions(+), 69 deletions(-) create mode 100644 data/conf/rspamd/dynmaps/footer.php diff --git a/data/Dockerfiles/rspamd/docker-entrypoint.sh b/data/Dockerfiles/rspamd/docker-entrypoint.sh index e6d329b5..8af7619c 100755 --- a/data/Dockerfiles/rspamd/docker-entrypoint.sh +++ b/data/Dockerfiles/rspamd/docker-entrypoint.sh @@ -79,6 +79,9 @@ EOF redis-cli -h redis-mailcow SLAVEOF NO ONE fi +# Provide additional lua modules +ln -s /usr/lib/$(uname -m)-linux-gnu/liblua5.1-cjson.so.0.0.0 /usr/lib/rspamd/cjson.so + chown -R _rspamd:_rspamd /var/lib/rspamd \ /etc/rspamd/local.d \ /etc/rspamd/override.d \ diff --git a/data/conf/rspamd/dynmaps/footer.php b/data/conf/rspamd/dynmaps/footer.php new file mode 100644 index 00000000..ca18697a --- /dev/null +++ b/data/conf/rspamd/dynmaps/footer.php @@ -0,0 +1,89 @@ + PDO::ERRMODE_EXCEPTION, + PDO::ATTR_DEFAULT_FETCH_MODE => PDO::FETCH_ASSOC, + PDO::ATTR_EMULATE_PREPARES => false, +]; +try { + $pdo = new PDO($dsn, $database_user, $database_pass, $opt); +} +catch (PDOException $e) { + error_log("FOOTER: " . $e . PHP_EOL); + http_response_code(501); + exit; +} + +if (!function_exists('getallheaders')) { + function getallheaders() { + if (!is_array($_SERVER)) { + return array(); + } + $headers = array(); + foreach ($_SERVER as $name => $value) { + if (substr($name, 0, 5) == 'HTTP_') { + $headers[str_replace(' ', '-', ucwords(strtolower(str_replace('_', ' ', substr($name, 5)))))] = $value; + } + } + return $headers; + } +} + +// Read headers +$headers = getallheaders(); +// Get Domain +$domain = $headers['Domain']; +// Get Username +$username = $headers['Username']; +// define empty footer +$empty_footer = json_encode(array( + 'html' => '', + 'plain' => '', + 'vars' => array() +)); + +error_log("FOOTER: checking for domain " . $domain . " and user " . $username . PHP_EOL); + +try { + $stmt = $pdo->prepare("SELECT `plain`, `html`, `mbox_exclude` FROM `domain_wide_footer` + WHERE `domain` = :domain"); + $stmt->execute(array( + ':domain' => $domain + )); + $footer = $stmt->fetch(PDO::FETCH_ASSOC); + if (in_array($username, json_decode($footer['mbox_exclude']))){ + $footer = false; + } + if (empty($footer)){ + echo $empty_footer; + exit; + } + error_log("FOOTER: " . json_encode($footer) . PHP_EOL); + + $stmt = $pdo->prepare("SELECT `custom_attributes` FROM `mailbox` WHERE `username` = :username"); + $stmt->execute(array( + ':username' => $username + )); + $custom_attributes = $stmt->fetch(PDO::FETCH_ASSOC)['custom_attributes']; + if (empty($custom_attributes)){ + $custom_attributes = (object)array(); + } +} +catch (Exception $e) { + error_log("FOOTER: " . $e->getMessage() . PHP_EOL); + http_response_code(502); + exit; +} + + +// return footer +$footer["vars"] = $custom_attributes; +echo json_encode($footer); diff --git a/data/conf/rspamd/lua/rspamd.local.lua b/data/conf/rspamd/lua/rspamd.local.lua index 459b2339..44be2e34 100644 --- a/data/conf/rspamd/lua/rspamd.local.lua +++ b/data/conf/rspamd/lua/rspamd.local.lua @@ -527,12 +527,11 @@ rspamd_config:register_symbol({ name = 'MOO_FOOTER', type = 'prefilter', callback = function(task) + local cjson = require "cjson" local lua_mime = require "lua_mime" local lua_util = require "lua_util" local rspamd_logger = require "rspamd_logger" - local rspamd_redis = require "rspamd_redis" - local ucl = require "ucl" - local redis_params = rspamd_parse_redis_server('footer') + local rspamd_http = require "rspamd_http" local envfrom = task:get_from(1) local uname = task:get_user() if not envfrom or not uname then @@ -541,6 +540,7 @@ rspamd_config:register_symbol({ local uname = uname:lower() local env_from_domain = envfrom[1].domain:lower() -- get smtp from domain in lower case + -- determine newline type local function newline(task) local t = task:get_newlines_type() @@ -552,20 +552,19 @@ rspamd_config:register_symbol({ return '\r\n' end - local function redis_cb_footer(err, data) + -- retrieve footer + local function footer_cb(err_message, code, data, headers) if err or type(data) ~= 'string' then rspamd_logger.infox(rspamd_config, "domain wide footer request for user %s returned invalid or empty data (\"%s\") or error (\"%s\")", uname, data, err) else + -- parse json string - local parser = ucl.parser() - local res,err = parser:parse_string(data) - if not res then + local footer = cjson.decode(data) + if not footer then rspamd_logger.infox(rspamd_config, "parsing domain wide footer for user %s returned invalid or empty data (\"%s\") or error (\"%s\")", uname, data, err) else - local footer = parser:get_object() - if footer and type(footer) == "table" and (footer.html and footer.html ~= "" or footer.plain and footer.plain ~= "") then - rspamd_logger.infox(rspamd_config, "found domain wide footer for user %s: html=%s, plain=%s", uname, footer.html, footer.plain) + rspamd_logger.infox(rspamd_config, "found domain wide footer for user %s: html=%s, plain=%s, vars=%s", uname, footer.html, footer.plain, footer.vars) local envfrom_mime = task:get_from(2) local from_name = "" @@ -575,6 +574,7 @@ rspamd_config:register_symbol({ from_name = envfrom[1].name end + -- default replacements local replacements = { auth_user = uname, from_user = envfrom[1].user, @@ -582,10 +582,20 @@ rspamd_config:register_symbol({ from_addr = envfrom[1].addr, from_domain = envfrom[1].domain:lower() } - if footer.html then + -- add custom mailbox attributes + if footer.vars and type(footer.vars) == "string" then + local footer_vars = cjson.decode(footer.vars) + + if type(footer_vars) == "table" then + for key, value in pairs(footer_vars) do + replacements[key] = value + end + end + end + if footer.html and footer.html ~= "" then footer.html = lua_util.jinja_template(footer.html, replacements, true) end - if footer.plain then + if footer.plain and footer.plain ~= "" then footer.plain = lua_util.jinja_template(footer.plain, replacements, true) end @@ -653,17 +663,14 @@ rspamd_config:register_symbol({ end end - local redis_ret_footer = rspamd_redis_make_request(task, - redis_params, -- connect params - env_from_domain, -- hash key - false, -- is write - redis_cb_footer, --callback - 'HGET', -- command - {"DOMAIN_WIDE_FOOTER", env_from_domain} -- arguments - ) - if not redis_ret_footer then - rspamd_logger.infox(rspamd_config, "cannot make request to load footer for domain") - end + -- fetch footer + rspamd_http.request({ + task=task, + url='http://nginx:8081/footer.php', + body='', + callback=footer_cb, + headers={Domain=env_from_domain,Username=uname}, + }) return true end, diff --git a/data/web/api/openapi.yaml b/data/web/api/openapi.yaml index 1d262168..a363dcbb 100644 --- a/data/web/api/openapi.yaml +++ b/data/web/api/openapi.yaml @@ -3137,6 +3137,86 @@ paths: type: string type: object summary: Update domain + /api/v1/edit/domain/footer: + post: + responses: + "401": + $ref: "#/components/responses/Unauthorized" + "200": + content: + application/json: + examples: + response: + value: + - log: + - mailbox + - edit + - domain_wide_footer + - domains: + - mailcow.tld + html: "
foo {= foo =}" + plain: "- + You can update the footer of one or more domains per request. + operationId: Update domain wide footer + requestBody: + content: + application/json: + schema: + example: + attr: + html: "
foo {= foo =}" + plain: "foo {= foo =}" + mbox_exclude: + - moo@mailcow.tld + items: mailcow.tld + properties: + attr: + properties: + html: + description: Footer text in HTML format + type: string + plain: + description: Footer text in PLAIN text format + type: string + mbox_exclude: + description: Array of mailboxes to exclude from domain wide footer + type: object + type: object + items: + description: contains a list of domain names where you want to update the footer + type: array + items: + type: string + type: object + summary: Update domain wide footer /api/v1/edit/fail2ban: post: responses: @@ -3336,6 +3416,86 @@ paths: type: object type: object summary: Update mailbox + /api/v1/edit/mailbox/custom-attribute: + post: + responses: + "401": + $ref: "#/components/responses/Unauthorized" + "200": + content: + application/json: + examples: + response: + value: + - log: + - mailbox + - edit + - mailbox_custom_attribute + - mailboxes: + - moo@mailcow.tld + attribute: + - role + - foo + value: + - cow + - bar + - null + msg: + - mailbox_modified + - moo@mailcow.tld + type: success + schema: + properties: + log: + description: contains request object + items: {} + type: array + msg: + items: {} + type: array + type: + enum: + - success + - danger + - error + type: string + type: object + description: OK + headers: {} + tags: + - Mailboxes + description: >- + You can update custom attributes of one or more mailboxes per request. + operationId: Update mailbox custom attributes + requestBody: + content: + application/json: + schema: + example: + attr: + attribute: + - role + - foo + value: + - cow + - bar + items: + - moo@mailcow.tld + properties: + attr: + properties: + attribute: + description: Array of attribute keys + type: object + value: + description: Array of attribute values + type: object + type: object + items: + description: contains list of mailboxes you want update + type: object + type: object + summary: Update mailbox custom attributes /api/v1/edit/mailq: post: responses: @@ -5581,6 +5741,7 @@ paths: sogo_access: "1" tls_enforce_in: "0" tls_enforce_out: "0" + custom_attributes: {} domain: domain3.tld is_relayed: 0 local_part: info diff --git a/data/web/edit.php b/data/web/edit.php index 8061441b..83ae1467 100644 --- a/data/web/edit.php +++ b/data/web/edit.php @@ -58,6 +58,8 @@ if (isset($_SESSION['mailcow_cc_role'])) { 'dkim' => dkim('details', $domain), 'domain_details' => $result, 'domain_footer' => $domain_footer, + 'mailboxes' => mailbox('get', 'mailboxes', $_GET["domain"]), + 'aliases' => mailbox('get', 'aliases', $_GET["domain"], 'address') ]; } } @@ -218,6 +220,7 @@ $js_minifier->add('/web/js/site/pwgen.js'); $template_data['result'] = $result; $template_data['return_to'] = $_SESSION['return_to']; $template_data['lang_user'] = json_encode($lang['user']); +$template_data['lang_admin'] = json_encode($lang['admin']); $template_data['lang_datatables'] = json_encode($lang['datatables']); require_once $_SERVER['DOCUMENT_ROOT'] . '/inc/footer.inc.php'; diff --git a/data/web/inc/functions.mailbox.inc.php b/data/web/inc/functions.mailbox.inc.php index 298f607d..7c2955f6 100644 --- a/data/web/inc/functions.mailbox.inc.php +++ b/data/web/inc/functions.mailbox.inc.php @@ -3264,6 +3264,62 @@ function mailbox($_action, $_type, $_data = null, $_extra = null) { ); return true; break; + case 'mailbox_custom_attribute': + $_data['attribute'] = isset($_data['attribute']) ? $_data['attribute'] : array(); + $_data['attribute'] = is_array($_data['attribute']) ? $_data['attribute'] : array($_data['attribute']); + $_data['attribute'] = array_map(function($value) { return str_replace(' ', '', $value); }, $_data['attribute']); + $_data['value'] = isset($_data['value']) ? $_data['value'] : array(); + $_data['value'] = is_array($_data['value']) ? $_data['value'] : array($_data['value']); + $attributes = (object)array_combine($_data['attribute'], $_data['value']); + $mailboxes = is_array($_data['mailboxes']) ? $_data['mailboxes'] : array($_data['mailboxes']); + + foreach ($mailboxes as $mailbox) { + if (!filter_var($mailbox, FILTER_VALIDATE_EMAIL)) { + $_SESSION['return'][] = array( + 'type' => 'danger', + 'log' => array(__FUNCTION__, $_action, $_type, $_data_log, $_attr), + 'msg' => array('username_invalid', $mailbox) + ); + continue; + } + $is_now = mailbox('get', 'mailbox_details', $mailbox); + if(!empty($is_now)){ + if (!hasDomainAccess($_SESSION['mailcow_cc_username'], $_SESSION['mailcow_cc_role'], $is_now['domain'])) { + $_SESSION['return'][] = array( + 'type' => 'danger', + 'log' => array(__FUNCTION__, $_action, $_type, $_data_log, $_attr), + 'msg' => 'access_denied' + ); + continue; + } + } + else { + $_SESSION['return'][] = array( + 'type' => 'danger', + 'log' => array(__FUNCTION__, $_action, $_type, $_data_log, $_attr), + 'msg' => 'access_denied' + ); + continue; + } + + + $stmt = $pdo->prepare("UPDATE `mailbox` + SET `custom_attributes` = :custom_attributes + WHERE username = :username"); + $stmt->execute(array( + ":username" => $mailbox, + ":custom_attributes" => json_encode($attributes) + )); + + $_SESSION['return'][] = array( + 'type' => 'success', + 'log' => array(__FUNCTION__, $_action, $_type, $_data_log, $_attr), + 'msg' => array('mailbox_modified', $mailbox) + ); + } + + return true; + break; case 'resource': if (!is_array($_data['name'])) { $names = array(); @@ -3343,44 +3399,79 @@ function mailbox($_action, $_type, $_data = null, $_extra = null) { ); } break; - case 'domain_wide_footer': - $domain = idn_to_ascii(strtolower(trim($_data['domain'])), 0, INTL_IDNA_VARIANT_UTS46); - if (!is_valid_domain_name($domain)) { - $_SESSION['return'][] = array( - 'type' => 'danger', - 'log' => array(__FUNCTION__, $_action, $_type, $_data_log, $_attr), - 'msg' => 'domain_invalid' - ); - return false; + case 'domain_wide_footer': + if (!is_array($_data['domains'])) { + $domains = array(); + $domains[] = $_data['domains']; } - if (!hasDomainAccess($_SESSION['mailcow_cc_username'], $_SESSION['mailcow_cc_role'], $domain)) { - $_SESSION['return'][] = array( - 'type' => 'danger', - 'log' => array(__FUNCTION__, $_action, $_type, $_data_log, $_attr), - 'msg' => 'access_denied' - ); - return false; + else { + $domains = $_data['domains']; } - $footers = array(); - $footers['html'] = isset($_data['footer_html']) ? $_data['footer_html'] : ''; - $footers['plain'] = isset($_data['footer_plain']) ? $_data['footer_plain'] : ''; - try { - $redis->hSet('DOMAIN_WIDE_FOOTER', $domain, json_encode($footers)); - } - catch (RedisException $e) { + foreach ($domains as $domain) { + $domain = idn_to_ascii(strtolower(trim($domain)), 0, INTL_IDNA_VARIANT_UTS46); + if (!is_valid_domain_name($domain)) { + $_SESSION['return'][] = array( + 'type' => 'danger', + 'log' => array(__FUNCTION__, $_action, $_type, $_data_log, $_attr), + 'msg' => 'domain_invalid' + ); + return false; + } + if (!hasDomainAccess($_SESSION['mailcow_cc_username'], $_SESSION['mailcow_cc_role'], $domain)) { + $_SESSION['return'][] = array( + 'type' => 'danger', + 'log' => array(__FUNCTION__, $_action, $_type, $_data_log, $_attr), + 'msg' => 'access_denied' + ); + return false; + } + + $footers = array(); + $footers['html'] = isset($_data['html']) ? $_data['html'] : ''; + $footers['plain'] = isset($_data['plain']) ? $_data['plain'] : ''; + $footers['mbox_exclude'] = array(); + if (isset($_data["mbox_exclude"])){ + if (!is_array($_data["mbox_exclude"])) { + $_data["mbox_exclude"] = array($_data["mbox_exclude"]); + } + foreach ($_data["mbox_exclude"] as $mailbox) { + if (!filter_var($mailbox, FILTER_VALIDATE_EMAIL)) { + $_SESSION['return'][] = array( + 'type' => 'danger', + 'log' => array(__FUNCTION__, $_action, $_type, $_data_log, $_attr), + 'msg' => array('username_invalid', $mailbox) + ); + } else { + array_push($footers['mbox_exclude'], $mailbox); + } + } + } + try { + $stmt = $pdo->prepare("DELETE FROM `domain_wide_footer` WHERE `domain`= :domain"); + $stmt->execute(array(':domain' => $domain)); + $stmt = $pdo->prepare("INSERT INTO `domain_wide_footer` (`domain`, `html`, `plain`, `mbox_exclude`) VALUES (:domain, :html, :plain, :mbox_exclude)"); + $stmt->execute(array( + ':domain' => $domain, + ':html' => $footers['html'], + ':plain' => $footers['plain'], + ':mbox_exclude' => json_encode($footers['mbox_exclude']), + )); + } + catch (PDOException $e) { + $_SESSION['return'][] = array( + 'type' => 'danger', + 'log' => array(__FUNCTION__, $_action, $_type, $_data_log, $_attr), + 'msg' => $e->getMessage() + ); + return false; + } $_SESSION['return'][] = array( - 'type' => 'danger', + 'type' => 'success', 'log' => array(__FUNCTION__, $_action, $_type, $_data_log, $_attr), - 'msg' => array('redis_error', $e) + 'msg' => array('domain_footer_modified', htmlspecialchars($domain)) ); - return false; } - $_SESSION['return'][] = array( - 'type' => 'success', - 'log' => array(__FUNCTION__, $_action, $_type, $_data_log, $_attr), - 'msg' => array('domain_footer_modified', htmlspecialchars($domain)) - ); break; } break; @@ -3934,13 +4025,17 @@ function mailbox($_action, $_type, $_data = null, $_extra = null) { if (!hasDomainAccess($_SESSION['mailcow_cc_username'], $_SESSION['mailcow_cc_role'], $_data)) { return false; } - $stmt = $pdo->prepare("SELECT `id` FROM `alias` WHERE `address` != `goto` AND `domain` = :domain"); + $stmt = $pdo->prepare("SELECT `id`, `address` FROM `alias` WHERE `address` != `goto` AND `domain` = :domain"); $stmt->execute(array( ':domain' => $_data, )); $rows = $stmt->fetchAll(PDO::FETCH_ASSOC); while($row = array_shift($rows)) { - $aliases[] = $row['id']; + if ($_extra == "address"){ + $aliases[] = $row['address']; + } else { + $aliases[] = $row['id']; + } } return $aliases; break; @@ -4292,6 +4387,7 @@ function mailbox($_action, $_type, $_data = null, $_extra = null) { `mailbox`.`modified`, `quota2`.`bytes`, `attributes`, + `custom_attributes`, `quota2`.`messages` FROM `mailbox`, `quota2`, `domain` WHERE (`mailbox`.`kind` = '' OR `mailbox`.`kind` = NULL) @@ -4312,6 +4408,7 @@ function mailbox($_action, $_type, $_data = null, $_extra = null) { `mailbox`.`modified`, `quota2replica`.`bytes`, `attributes`, + `custom_attributes`, `quota2replica`.`messages` FROM `mailbox`, `quota2replica`, `domain` WHERE (`mailbox`.`kind` = '' OR `mailbox`.`kind` = NULL) @@ -4334,6 +4431,7 @@ function mailbox($_action, $_type, $_data = null, $_extra = null) { $mailboxdata['quota'] = $row['quota']; $mailboxdata['messages'] = $row['messages']; $mailboxdata['attributes'] = json_decode($row['attributes'], true); + $mailboxdata['custom_attributes'] = json_decode($row['custom_attributes'], true); $mailboxdata['quota_used'] = intval($row['bytes']); $mailboxdata['percent_in_use'] = ($row['quota'] == 0) ? '- ' : round((intval($row['bytes']) / intval($row['quota'])) * 100); $mailboxdata['created'] = $row['created']; @@ -4514,19 +4612,23 @@ function mailbox($_action, $_type, $_data = null, $_extra = null) { } try { - $footers = $redis->hGet('DOMAIN_WIDE_FOOTER', $domain); - $footers = json_decode($footers, true); + $stmt = $pdo->prepare("SELECT `html`, `plain`, `mbox_exclude` FROM `domain_wide_footer` + WHERE `domain` = :domain"); + $stmt->execute(array( + ':domain' => $domain + )); + $footer = $stmt->fetch(PDO::FETCH_ASSOC); } - catch (RedisException $e) { + catch (PDOException $e) { $_SESSION['return'][] = array( 'type' => 'danger', 'log' => array(__FUNCTION__, $_action, $_type, $_data_log, $_attr), - 'msg' => array('redis_error', $e) + 'msg' => $e->getMessage() ); return false; } - return $footers; + return $footer; break; } break; diff --git a/data/web/inc/init_db.inc.php b/data/web/inc/init_db.inc.php index 2ce6a28c..4ea79d9b 100644 --- a/data/web/inc/init_db.inc.php +++ b/data/web/inc/init_db.inc.php @@ -3,7 +3,7 @@ function init_db_schema() { try { global $pdo; - $db_version = "15112023_1536"; + $db_version = "21112023_1644"; $stmt = $pdo->query("SHOW TABLES LIKE 'versions'"); $num_results = count($stmt->fetchAll(PDO::FETCH_ASSOC)); @@ -267,6 +267,20 @@ function init_db_schema() { ), "attr" => "ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 ROW_FORMAT=DYNAMIC" ), + "domain_wide_footer" => array( + "cols" => array( + "domain" => "VARCHAR(255) NOT NULL", + "html" => "LONGTEXT", + "plain" => "LONGTEXT", + "mbox_exclude" => "JSON NOT NULL DEFAULT ('[]')", + ), + "keys" => array( + "primary" => array( + "" => array("domain") + ) + ), + "attr" => "ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 ROW_FORMAT=DYNAMIC" + ), "tags_domain" => array( "cols" => array( "tag_name" => "VARCHAR(255) NOT NULL", @@ -344,6 +358,7 @@ function init_db_schema() { "local_part" => "VARCHAR(255) NOT NULL", "domain" => "VARCHAR(255) NOT NULL", "attributes" => "JSON", + "custom_attributes" => "JSON NOT NULL DEFAULT ('{}')", "kind" => "VARCHAR(100) NOT NULL DEFAULT ''", "multiple_bookings" => "INT NOT NULL DEFAULT -1", "created" => "DATETIME(0) NOT NULL DEFAULT NOW(0)", diff --git a/data/web/js/site/edit.js b/data/web/js/site/edit.js index cd938cd8..d6895498 100644 --- a/data/web/js/site/edit.js +++ b/data/web/js/site/edit.js @@ -199,6 +199,23 @@ jQuery(function($){ }); } + function add_table_row(table_id, type) { + var row = $('
'); + if (type == "mbox_attr") { + cols = ''; + cols += ''; + cols += ''; + } + row.append(cols); + table_id.append(row); + } + $('#mbox_attr_table').on('click', 'tr a', function (e) { + e.preventDefault(); + $(this).parents('tr').remove(); + }); + $('#add_mbox_attr_row').click(function() { + add_table_row($('#mbox_attr_table'), "mbox_attr"); + }); // detect element visibility changes function onVisible(element, callback) { diff --git a/data/web/json_api.php b/data/web/json_api.php index b375bc8e..668be0d1 100644 --- a/data/web/json_api.php +++ b/data/web/json_api.php @@ -1867,8 +1867,6 @@ if (isset($_GET['query'])) { case "quota_notification_bcc": process_edit_return(quota_notification_bcc('edit', $attr)); break; - case "domain-wide-footer": - process_edit_return(mailbox('edit', 'domain_wide_footer', $attr)); break; case "mailq": process_edit_return(mailq('edit', array_merge(array('qid' => $items), $attr))); @@ -1881,6 +1879,9 @@ if (isset($_GET['query'])) { case "template": process_edit_return(mailbox('edit', 'mailbox_templates', array_merge(array('ids' => $items), $attr))); break; + case "custom-attribute": + process_edit_return(mailbox('edit', 'mailbox_custom_attribute', array_merge(array('mailboxes' => $items), $attr))); + break; default: process_edit_return(mailbox('edit', 'mailbox', array_merge(array('username' => $items), $attr))); break; @@ -1900,6 +1901,9 @@ if (isset($_GET['query'])) { case "template": process_edit_return(mailbox('edit', 'domain_templates', array_merge(array('ids' => $items), $attr))); break; + case "footer": + process_edit_return(mailbox('edit', 'domain_wide_footer', array_merge(array('domains' => $items), $attr))); + break; default: process_edit_return(mailbox('edit', 'domain', array_merge(array('domain' => $items), $attr))); break; diff --git a/data/web/lang/lang.de-de.json b/data/web/lang/lang.de-de.json index 75c66af6..5737a664 100644 --- a/data/web/lang/lang.de-de.json +++ b/data/web/lang/lang.de-de.json @@ -574,6 +574,7 @@ "client_secret": "Client-Secret", "comment_info": "Ein privater Kommentar ist für den Benutzer nicht einsehbar. Ein öffentlicher Kommentar wird als Tooltip im Interface des Benutzers angezeigt.", "created_on": "Erstellt am", + "custom_attributes": "benutzerdefinierte Attribute", "delete1": "Lösche Nachricht nach Übertragung vom Quell-Server", "delete2": "Lösche Nachrichten von Ziel-Server, die nicht auf Quell-Server vorhanden sind", "delete2duplicates": "Lösche Duplikate im Ziel", @@ -614,6 +615,7 @@ "max_quota": "Max. Größe per Mailbox (MiB)", "maxage": "Maximales Alter in Tagen einer Nachricht, die kopiert werden soll
(0 = alle Nachrichten kopieren)", "maxbytespersecond": "Max. Übertragungsrate in Bytes/s (0 für unlimitiert)", + "mbox_exclude": "Mailboxen ausschließen", "mbox_rl_info": "Dieses Limit wird auf den SASL Loginnamen angewendet und betrifft daher alle Absenderadressen, die der eingeloggte Benutzer verwendet. Bei Mailbox Ratelimit überwiegt ein Domain-weites Ratelimit.", "mins_interval": "Intervall (min)", "multiple_bookings": "Mehrfaches Buchen", @@ -1125,6 +1127,7 @@ "apple_connection_profile_complete": "Dieses Verbindungsprofil beinhaltet neben IMAP- und SMTP-Konfigurationen auch Pfade für die Konfiguration von CalDAV (Kalender) und CardDAV (Adressbücher) für ein Apple-Gerät.", "apple_connection_profile_mailonly": "Dieses Verbindungsprofil beinhaltet IMAP- und SMTP-Konfigurationen für ein Apple-Gerät.", "apple_connection_profile_with_app_password": "Es wird ein neues App-Passwort erzeugt und in das Profil eingefügt, damit bei der Einrichtung kein Passwort eingegeben werden muss. Geben Sie das Profil nicht weiter, da es einen vollständigen Zugriff auf Ihr Postfach ermöglicht.", + "attribute": "Attribut", "change_password": "Passwort ändern", "change_password_hint_app_passwords": "Ihre Mailbox hat %d App-Passwörter, die nicht geändert werden. Um diese zu verwalten, gehen Sie bitte zum App-Passwörter-Tab.", "clear_recent_successful_connections": "Alle erfolgreichen Verbindungen bereinigen", @@ -1244,6 +1247,7 @@ "tls_policy_warning": "Vorsicht: Entscheiden Sie sich unverschlüsselte Verbindungen abzulehnen, kann dies dazu führen, dass Kontakte Sie nicht mehr erreichen.
Nachrichten, die die Richtlinie nicht erfüllen, werden durch einen Hard-Fail im Mailsystem abgewiesen.
Diese Einstellung ist aktiv für die primäre Mailbox, für alle Alias-Adressen, die dieser Mailbox direkt zugeordnet sind (lediglich eine einzige Ziel-Adresse) und der Adressen, die sich aus Alias-Domains ergeben. Ausgeschlossen sind temporäre Aliasse (\"Spam-Alias-Adressen\"), Catch-All Alias-Adressen sowie Alias-Adressen mit mehreren Zielen.", "user_settings": "Benutzereinstellungen", "username": "Benutzername", + "value": "Wert", "verify": "Verifizieren", "waiting": "Warte auf Ausführung", "week": "Woche", diff --git a/data/web/lang/lang.en-gb.json b/data/web/lang/lang.en-gb.json index da97bebd..723ed62e 100644 --- a/data/web/lang/lang.en-gb.json +++ b/data/web/lang/lang.en-gb.json @@ -576,6 +576,7 @@ "client_secret": "Client secret", "comment_info": "A private comment is not visible to the user, while a public comment is shown as tooltip when hovering it in a user's overview", "created_on": "Created on", + "custom_attributes": "Custom attributes", "delete1": "Delete from source when completed", "delete2": "Delete messages on destination that are not on source", "delete2duplicates": "Delete duplicates on destination", @@ -592,7 +593,8 @@ "from_user": "{= from_user =} - From user part of envelope, e.g for \"moo@mailcow.tld\" it returns \"moo\"", "from_name": "{= from_name =} - From name of envelope, e.g for \"Mailcow <moo@mailcow.tld>\" it returns \"Mailcow\"", "from_addr": "{= from_addr =} - From address part of envelope", - "from_domain": "{= from_domain =} - From domain part of envelope" + "from_domain": "{= from_domain =} - From domain part of envelope", + "custom": "{= foo =} - If mailbox has the custom attribute \"foo\" with value \"bar\" it returns \"bar\"" }, "domain_footer_plain": "PLAIN footer", "domain_quota": "Domain quota", @@ -623,6 +625,7 @@ "max_quota": "Max. quota per mailbox (MiB)", "maxage": "Maximum age of messages in days that will be polled from remote
(0 = ignore age)", "maxbytespersecond": "Max. bytes per second
(0 = unlimited)", + "mbox_exclude": "Exclude mailboxes", "mbox_rl_info": "This rate limit is applied on the SASL login name, it matches any \"from\" address used by the logged-in user. A mailbox rate limit overrides a domain-wide rate limit.", "mins_interval": "Interval (min)", "multiple_bookings": "Multiple bookings", @@ -1141,6 +1144,7 @@ "apple_connection_profile_complete": "This connection profile includes IMAP and SMTP parameters as well as CalDAV (calendars) and CardDAV (contacts) paths for an Apple device.", "apple_connection_profile_mailonly": "This connection profile includes IMAP and SMTP configuration parameters for an Apple device.", "apple_connection_profile_with_app_password": "A new app password is generated and added to the profile so that no password needs to be entered when setting up your device. Please do not share the file as it grants full access to your mailbox.", + "attribute": "Attribute", "change_password": "Change password", "change_password_hint_app_passwords": "Your account has %d app passwords that will not be changed. To manage these, go to the App passwords tab.", "clear_recent_successful_connections": "Clear seen successful connections", @@ -1271,6 +1275,7 @@ "tls_policy_warning": "Warning: If you decide to enforce encrypted mail transfer, you may lose emails.
Messages to not satisfy the policy will be bounced with a hard fail by the mail system.
This option applies to your primary email address (login name), all addresses derived from alias domains as well as alias addresses with only this single mailbox as target.", "user_settings": "User settings", "username": "Username", + "value": "Value", "verify": "Verify", "waiting": "Waiting", "week": "week", diff --git a/data/web/templates/edit.twig b/data/web/templates/edit.twig index af83a31d..375018c6 100644 --- a/data/web/templates/edit.twig +++ b/data/web/templates/edit.twig @@ -24,6 +24,7 @@
' + lang_admin.remove_row + '