Merge branch 'staging' into staging

This commit is contained in:
Patrick Schult 2023-12-11 16:28:05 +01:00 committed by GitHub
commit 7f4e9c1ad4
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
231 changed files with 15439 additions and 8892 deletions

View File

@ -1,7 +1,7 @@
blank_issues_enabled: false
contact_links:
- name: ❓ Community-driven support
url: https://mailcow.github.io/mailcow-dockerized-docs/#get-support
url: https://docs.mailcow.email/#get-support
about: Please use the community forum for questions or assistance
- name: 🚨 Report a security vulnerability
url: https://www.servercow.de/anfrage?lang=en

14
.github/renovate.json vendored
View File

@ -1,21 +1,31 @@
{
"enabled": true,
"timezone": "Europe/Berlin",
"dependencyDashboard": false,
"dependencyDashboard": true,
"dependencyDashboardTitle": "Renovate Dashboard",
"commitBody": "Signed-off-by: milkmaker <milkmaker@mailcow.de>",
"rebaseWhen": "auto",
"labels": ["renovate"],
"assignees": [
"@magiccc"
],
"baseBranches": ["staging"],
"enabledManagers": ["github-actions", "regex"],
"enabledManagers": ["github-actions", "regex", "docker-compose"],
"ignorePaths": [
"data\/web\/inc\/lib\/vendor\/**"
],
"regexManagers": [
{
"fileMatch": ["^helper-scripts\/nextcloud.sh$"],
"matchStrings": [
"#\\srenovate:\\sdatasource=(?<datasource>.*?) depName=(?<depName>.*?)( versioning=(?<versioning>.*?))?( extractVersion=(?<extractVersion>.*?))?\\s.*?_VERSION=(?<currentValue>.*)"
]
},
{
"fileMatch": ["(^|/)Dockerfile[^/]*$"],
"matchStrings": [
"#\\srenovate:\\sdatasource=(?<datasource>.*?) depName=(?<depName>.*?)( versioning=(?<versioning>.*?))?( extractVersion=(?<extractVersion>.*?))?\\s(ENV|ARG) .*?_VERSION=(?<currentValue>.*)\\s"
]
}
]
}

View File

@ -10,7 +10,7 @@ jobs:
if: github.event.pull_request.base.ref != 'staging' #check if the target branch is not staging
steps:
- name: Send message
uses: thollander/actions-comment-pull-request@v2.3.0
uses: thollander/actions-comment-pull-request@v2.4.3
with:
GITHUB_TOKEN: ${{ secrets.CHECKIFPRISSTAGING_ACTION_PAT }}
message: |

View File

@ -14,7 +14,7 @@ jobs:
pull-requests: write
steps:
- name: Mark/Close Stale Issues and Pull Requests 🗑️
uses: actions/stale@v7.0.0
uses: actions/stale@v9.0.0
with:
repo-token: ${{ secrets.STALE_ACTION_PAT }}
days-before-stale: 60

View File

@ -28,7 +28,7 @@ jobs:
- "watchdog-mailcow"
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Setup Docker
run: |
curl -sSL https://get.docker.com/ | CHANNEL=stable sudo sh

View File

@ -1,63 +0,0 @@
name: mailcow Integration Tests
on:
push:
branches: [ "master", "staging" ]
workflow_dispatch:
permissions:
contents: read
jobs:
integration_tests:
runs-on: ubuntu-latest
steps:
- name: Setup Ansible
run: |
export DEBIAN_FRONTEND=noninteractive
sudo apt-get update
sudo apt-get install python3 python3-pip git
sudo pip3 install ansible
- name: Prepair Test Environment
run: |
git clone https://github.com/mailcow/mailcow-integration-tests.git --branch $(curl -sL https://api.github.com/repos/mailcow/mailcow-integration-tests/releases/latest | jq -r '.tag_name') --single-branch .
./fork_check.sh
./ci.sh
./ci-pip-requirements.sh
env:
VAULT_PW: ${{ secrets.MAILCOW_TESTS_VAULT_PW }}
VAULT_FILE: ${{ secrets.MAILCOW_TESTS_VAULT_FILE }}
- name: Start Integration Test Server
run: |
./fork_check.sh
ansible-playbook mailcow-start-server.yml --diff
env:
PY_COLORS: '1'
ANSIBLE_FORCE_COLOR: '1'
ANSIBLE_HOST_KEY_CHECKING: 'false'
- name: Setup Integration Test Server
run: |
./fork_check.sh
sleep 30
ansible-playbook mailcow-setup-server.yml --private-key id_ssh_rsa --diff
env:
PY_COLORS: '1'
ANSIBLE_FORCE_COLOR: '1'
ANSIBLE_HOST_KEY_CHECKING: 'false'
- name: Run Integration Tests
run: |
./fork_check.sh
ansible-playbook mailcow-integration-tests.yml --private-key id_ssh_rsa --diff
env:
PY_COLORS: '1'
ANSIBLE_FORCE_COLOR: '1'
ANSIBLE_HOST_KEY_CHECKING: 'false'
- name: Delete Integration Test Server
if: always()
run: |
./fork_check.sh
ansible-playbook mailcow-delete-server.yml --diff
env:
PY_COLORS: '1'
ANSIBLE_FORCE_COLOR: '1'
ANSIBLE_HOST_KEY_CHECKING: 'false'

View File

@ -8,11 +8,11 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v3
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Run the Action
uses: devops-infra/action-pull-request@v0.5.3
uses: devops-infra/action-pull-request@v0.5.5
with:
github_token: ${{ secrets.PRTONIGHTLY_ACTION_PAT }}
title: Automatic PR to nightly from ${{ github.event.repository.updated_at}}

View File

@ -11,24 +11,25 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
uses: actions/checkout@v4
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
uses: docker/setup-buildx-action@v3
- name: Login to Docker Hub
uses: docker/login-action@v2
uses: docker/login-action@v3
with:
username: ${{ secrets.BACKUPIMAGEBUILD_ACTION_DOCKERHUB_USERNAME }}
password: ${{ secrets.BACKUPIMAGEBUILD_ACTION_DOCKERHUB_TOKEN }}
- name: Build and push
uses: docker/build-push-action@v3
uses: docker/build-push-action@v5
with:
context: .
platforms: linux/amd64,linux/arm64
file: data/Dockerfiles/backup/Dockerfile
push: true
tags: mailcow/backup:latest

View File

@ -1,20 +0,0 @@
name: "Tweet trigger release"
on:
release:
types: [published]
jobs:
tweet:
runs-on: ubuntu-latest
steps:
- name: "Get Release Tag"
run: |
RELEASE_TAG=$(curl https://api.github.com/repos/mailcow/mailcow-dockerized/releases/latest | jq -r '.tag_name')
- name: Tweet-trigger-publish-release
uses: mugi111/tweet-trigger-release@v1.2
with:
consumer_key: ${{ secrets.CONSUMER_KEY }}
consumer_secret: ${{ secrets.CONSUMER_SECRET }}
access_token_key: ${{ secrets.ACCESS_TOKEN_KEY }}
access_token_secret: ${{ secrets.ACCESS_TOKEN_SECRET }}
tweet_body: 'A new mailcow update has just been released! Checkout the GitHub Page for changelog and more informations: https://github.com/mailcow/mailcow-dockerized/releases/latest'

View File

@ -0,0 +1,39 @@
name: Update postscreen_access.cidr
on:
schedule:
# Monthly
- cron: "0 0 1 * *"
workflow_dispatch: # Allow to run workflow manually
permissions:
contents: read # to fetch code (actions/checkout)
jobs:
Update-postscreen_access_cidr:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Generate postscreen_access.cidr
run: |
bash helper-scripts/update_postscreen_whitelist.sh
- name: Create Pull Request
uses: peter-evans/create-pull-request@v5
with:
token: ${{ secrets.mailcow_action_Update_postscreen_access_cidr_pat }}
commit-message: update postscreen_access.cidr
committer: milkmaker <milkmaker@mailcow.de>
author: milkmaker <milkmaker@mailcow.de>
signoff: false
branch: update/postscreen_access.cidr
base: staging
delete-branch: true
add-paths: |
data/conf/postfix/postscreen_access.cidr
title: '[Postfix] update postscreen_access.cidr'
body: |
This PR updates the postscreen_access.cidr using GitHub Actions and [helper-scripts/update_postscreen_whitelist.sh](https://github.com/mailcow/mailcow-dockerized/blob/master/helper-scripts/update_postscreen_whitelist.sh)

2
.gitignore vendored
View File

@ -36,6 +36,8 @@ data/conf/postfix/extra.cf
data/conf/postfix/sni.map
data/conf/postfix/sni.map.db
data/conf/postfix/sql
data/conf/postfix/dns_blocklists.cf
data/conf/postfix/dnsbl_reply.map
data/conf/rspamd/custom/*
data/conf/rspamd/local.d/*
data/conf/rspamd/override.d/*

View File

@ -3,7 +3,7 @@ When a problem occurs, then always for a reason! What you want to do in such a c
1. Read your logs; follow them to see what the reason for your problem is.
2. Follow the leads given to you in your logfiles and start investigating.
3. Restarting the troubled service or the whole stack to see if the problem persists.
4. Read the [documentation](https://mailcow.github.io/mailcow-dockerized-docs/) of the troubled service and search its bugtracker for your problem.
4. Read the [documentation](https://docs.mailcow.email/) of the troubled service and search its bugtracker for your problem.
5. Search our [issues](https://github.com/mailcow/mailcow-dockerized/issues) for your problem.
6. [Create an issue](https://github.com/mailcow/mailcow-dockerized/issues/new/choose) over at our GitHub repository if you think your problem might be a bug or a missing feature you badly need. But please make sure, that you include **all the logs** and a full description to your problem.
7. Ask your questions in our community-driven [support channels](https://mailcow.github.io/mailcow-dockerized-docs/#community-support-and-chat).
7. Ask your questions in our community-driven [support channels](https://docs.mailcow.email/#community-support-and-chat).

View File

@ -1,8 +1,9 @@
# mailcow: dockerized - 🐮 + 🐋 = 💕
[![Mailcow Integration Tests](https://github.com/mailcow/mailcow-dockerized/actions/workflows/integration_tests.yml/badge.svg?branch=master)](https://github.com/mailcow/mailcow-dockerized/actions/workflows/integration_tests.yml)
[![Translation status](https://translate.mailcow.email/widgets/mailcow-dockerized/-/translation/svg-badge.svg)](https://translate.mailcow.email/engage/mailcow-dockerized/)
[![Twitter URL](https://img.shields.io/twitter/url/https/twitter.com/mailcow_email.svg?style=social&label=Follow%20%40mailcow_email)](https://twitter.com/mailcow_email)
![Mastodon Follow](https://img.shields.io/mastodon/follow/109388212176073348?domain=https%3A%2F%2Fmailcow.social&label=Follow%20%40doncow%40mailcow.social&link=https%3A%2F%2Fmailcow.social%2F%40doncow)
## Want to support mailcow?
@ -14,7 +15,7 @@ Or just spread the word: moo.
## Info, documentation and support
Please see [the official documentation](https://mailcow.github.io/mailcow-dockerized-docs/) for installation and support instructions. 🐄
Please see [the official documentation](https://docs.mailcow.email/) for installation and support instructions. 🐄
🐛 **If you found a critical security issue, please mail us to [info at servercow.de](mailto:info@servercow.de).**
@ -26,7 +27,9 @@ Please see [the official documentation](https://mailcow.github.io/mailcow-docker
[Telegram mailcow Off-Topic channel](https://t.me/mailcowOfftopic)
[Official Twitter Account](https://twitter.com/mailcow_email)
[Official 𝕏 (Twitter) Account](https://twitter.com/mailcow_email)
[Official Mastodon Account](https://mailcow.social/@doncow)
Telegram desktop clients are available for [multiple platforms](https://desktop.telegram.org). You can search the groups history for keywords.

View File

@ -1,6 +1,6 @@
FROM alpine:3.17
LABEL maintainer "Andre Peters <andre.peters@servercow.de>"
LABEL maintainer "The Infrastructure Company GmbH <info@servercow.de>"
RUN apk upgrade --no-cache \
&& apk add --update --no-cache \

View File

@ -213,11 +213,13 @@ while true; do
done
ADDITIONAL_WC_ARR+=('autodiscover' 'autoconfig')
if [[ ${SKIP_IP_CHECK} != "y" ]]; then
# Start IP detection
log_f "Detecting IP addresses..."
IPV4=$(get_ipv4)
IPV6=$(get_ipv6)
log_f "OK: ${IPV4}, ${IPV6:-"0000:0000:0000:0000:0000:0000:0000:0000"}"
fi
#########################################
# IP and webroot challenge verification #

View File

@ -1,6 +1,6 @@
FROM clamav/clamav:1.0_base
FROM clamav/clamav:1.0.3_base
LABEL maintainer "André Peters <andre.peters@servercow.de>"
LABEL maintainer "The Infrastructure Company GmbH <info@servercow.de>"
RUN apk upgrade --no-cache \
&& apk add --update --no-cache \

View File

@ -1,6 +1,6 @@
FROM alpine:3.17
LABEL maintainer "Andre Peters <andre.peters@servercow.de>"
LABEL maintainer "The Infrastructure Company GmbH <info@servercow.de>"
WORKDIR /app
@ -13,9 +13,13 @@ RUN apk add --update --no-cache python3 \
fastapi \
uvicorn \
aiodocker \
redis
docker \
aioredis
RUN mkdir /app/modules
COPY docker-entrypoint.sh /app/
COPY dockerapi.py /app/
COPY main.py /app/main.py
COPY modules/ /app/modules/
ENTRYPOINT ["/bin/sh", "/app/docker-entrypoint.sh"]
CMD exec python main.py

View File

@ -6,4 +6,4 @@
-subj /CN=dockerapi/O=mailcow \
-addext subjectAltName=DNS:dockerapi`
`uvicorn --host 0.0.0.0 --port 443 --ssl-certfile=/app/dockerapi_cert.pem --ssl-keyfile=/app/dockerapi_key.pem dockerapi:app`
exec "$@"

View File

@ -1,623 +0,0 @@
from fastapi import FastAPI, Response, Request
import aiodocker
import psutil
import sys
import re
import time
import os
import json
import asyncio
import redis
from datetime import datetime
containerIds_to_update = []
host_stats_isUpdating = False
app = FastAPI()
@app.get("/host/stats")
async def get_host_update_stats():
global host_stats_isUpdating
if host_stats_isUpdating == False:
print("start host stats task")
asyncio.create_task(get_host_stats())
host_stats_isUpdating = True
while True:
if redis_client.exists('host_stats'):
break
print("wait for host_stats results")
await asyncio.sleep(1.5)
print("host stats pulled")
stats = json.loads(redis_client.get('host_stats'))
return Response(content=json.dumps(stats, indent=4), media_type="application/json")
@app.get("/containers/{container_id}/json")
async def get_container(container_id : str):
if container_id and container_id.isalnum():
try:
for container in (await async_docker_client.containers.list()):
if container._id == container_id:
container_info = await container.show()
return Response(content=json.dumps(container_info, indent=4), media_type="application/json")
res = {
"type": "danger",
"msg": "no container found"
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
except Exception as e:
res = {
"type": "danger",
"msg": str(e)
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
else:
res = {
"type": "danger",
"msg": "no or invalid id defined"
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
@app.get("/containers/json")
async def get_containers():
containers = {}
try:
for container in (await async_docker_client.containers.list()):
container_info = await container.show()
containers.update({container_info['Id']: container_info})
return Response(content=json.dumps(containers, indent=4), media_type="application/json")
except Exception as e:
res = {
"type": "danger",
"msg": str(e)
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
@app.post("/containers/{container_id}/{post_action}")
async def post_containers(container_id : str, post_action : str, request: Request):
try :
request_json = await request.json()
except Exception as err:
request_json = {}
if container_id and container_id.isalnum() and post_action:
try:
"""Dispatch container_post api call"""
if post_action == 'exec':
if not request_json or not 'cmd' in request_json:
res = {
"type": "danger",
"msg": "cmd is missing"
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
if not request_json or not 'task' in request_json:
res = {
"type": "danger",
"msg": "task is missing"
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
api_call_method_name = '__'.join(['container_post', str(post_action), str(request_json['cmd']), str(request_json['task']) ])
else:
api_call_method_name = '__'.join(['container_post', str(post_action) ])
docker_utils = DockerUtils(async_docker_client)
api_call_method = getattr(docker_utils, api_call_method_name, lambda container_id: Response(content=json.dumps({'type': 'danger', 'msg':'container_post - unknown api call' }, indent=4), media_type="application/json"))
print("api call: %s, container_id: %s" % (api_call_method_name, container_id))
return await api_call_method(container_id, request_json)
except Exception as e:
print("error - container_post: %s" % str(e))
res = {
"type": "danger",
"msg": str(e)
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
else:
res = {
"type": "danger",
"msg": "invalid container id or missing action"
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
@app.post("/container/{container_id}/stats/update")
async def post_container_update_stats(container_id : str):
global containerIds_to_update
# start update task for container if no task is running
if container_id not in containerIds_to_update:
asyncio.create_task(get_container_stats(container_id))
containerIds_to_update.append(container_id)
while True:
if redis_client.exists(container_id + '_stats'):
break
await asyncio.sleep(1.5)
stats = json.loads(redis_client.get(container_id + '_stats'))
return Response(content=json.dumps(stats, indent=4), media_type="application/json")
class DockerUtils:
def __init__(self, docker_client):
self.docker_client = docker_client
# api call: container_post - post_action: stop
async def container_post__stop(self, container_id, request_json):
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
await container.stop()
res = {
'type': 'success',
'msg': 'command completed successfully'
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
# api call: container_post - post_action: start
async def container_post__start(self, container_id, request_json):
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
await container.start()
res = {
'type': 'success',
'msg': 'command completed successfully'
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
# api call: container_post - post_action: restart
async def container_post__restart(self, container_id, request_json):
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
await container.restart()
res = {
'type': 'success',
'msg': 'command completed successfully'
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
# api call: container_post - post_action: top
async def container_post__top(self, container_id, request_json):
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
ps_exec = await container.exec("ps")
async with ps_exec.start(detach=False) as stream:
ps_return = await stream.read_out()
exec_details = await ps_exec.inspect()
if exec_details["ExitCode"] == None or exec_details["ExitCode"] == 0:
res = {
'type': 'success',
'msg': ps_return.data.decode('utf-8')
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
else:
res = {
'type': 'danger',
'msg': ''
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
# api call: container_post - post_action: exec - cmd: mailq - task: delete
async def container_post__exec__mailq__delete(self, container_id, request_json):
if 'items' in request_json:
r = re.compile("^[0-9a-fA-F]+$")
filtered_qids = filter(r.match, request_json['items'])
if filtered_qids:
flagged_qids = ['-d %s' % i for i in filtered_qids]
sanitized_string = str(' '.join(flagged_qids))
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
postsuper_r_exec = await container.exec(["/bin/bash", "-c", "/usr/sbin/postsuper " + sanitized_string])
return await exec_run_handler('generic', postsuper_r_exec)
# api call: container_post - post_action: exec - cmd: mailq - task: hold
async def container_post__exec__mailq__hold(self, container_id, request_json):
if 'items' in request_json:
r = re.compile("^[0-9a-fA-F]+$")
filtered_qids = filter(r.match, request_json['items'])
if filtered_qids:
flagged_qids = ['-h %s' % i for i in filtered_qids]
sanitized_string = str(' '.join(flagged_qids))
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
postsuper_r_exec = await container.exec(["/bin/bash", "-c", "/usr/sbin/postsuper " + sanitized_string])
return await exec_run_handler('generic', postsuper_r_exec)
# api call: container_post - post_action: exec - cmd: mailq - task: cat
async def container_post__exec__mailq__cat(self, container_id, request_json):
if 'items' in request_json:
r = re.compile("^[0-9a-fA-F]+$")
filtered_qids = filter(r.match, request_json['items'])
if filtered_qids:
sanitized_string = str(' '.join(filtered_qids))
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
postcat_exec = await container.exec(["/bin/bash", "-c", "/usr/sbin/postcat -q " + sanitized_string], user='postfix')
return await exec_run_handler('utf8_text_only', postcat_exec)
# api call: container_post - post_action: exec - cmd: mailq - task: unhold
async def container_post__exec__mailq__unhold(self, container_id, request_json):
if 'items' in request_json:
r = re.compile("^[0-9a-fA-F]+$")
filtered_qids = filter(r.match, request_json['items'])
if filtered_qids:
flagged_qids = ['-H %s' % i for i in filtered_qids]
sanitized_string = str(' '.join(flagged_qids))
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
postsuper_r_exec = await container.exec(["/bin/bash", "-c", "/usr/sbin/postsuper " + sanitized_string])
return await exec_run_handler('generic', postsuper_r_exec)
# api call: container_post - post_action: exec - cmd: mailq - task: deliver
async def container_post__exec__mailq__deliver(self, container_id, request_json):
if 'items' in request_json:
r = re.compile("^[0-9a-fA-F]+$")
filtered_qids = filter(r.match, request_json['items'])
if filtered_qids:
flagged_qids = ['-i %s' % i for i in filtered_qids]
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
for i in flagged_qids:
postsuper_r_exec = await container.exec(["/bin/bash", "-c", "/usr/sbin/postqueue " + i], user='postfix')
async with postsuper_r_exec.start(detach=False) as stream:
postsuper_r_return = await stream.read_out()
# todo: check each exit code
res = {
'type': 'success',
'msg': 'Scheduled immediate delivery'
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
# api call: container_post - post_action: exec - cmd: mailq - task: list
async def container_post__exec__mailq__list(self, container_id, request_json):
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
mailq_exec = await container.exec(["/usr/sbin/postqueue", "-j"], user='postfix')
return await exec_run_handler('utf8_text_only', mailq_exec)
# api call: container_post - post_action: exec - cmd: mailq - task: flush
async def container_post__exec__mailq__flush(self, container_id, request_json):
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
postsuper_r_exec = await container.exec(["/usr/sbin/postqueue", "-f"], user='postfix')
return await exec_run_handler('generic', postsuper_r_exec)
# api call: container_post - post_action: exec - cmd: mailq - task: super_delete
async def container_post__exec__mailq__super_delete(self, container_id, request_json):
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
postsuper_r_exec = await container.exec(["/usr/sbin/postsuper", "-d", "ALL"])
return await exec_run_handler('generic', postsuper_r_exec)
# api call: container_post - post_action: exec - cmd: system - task: fts_rescan
async def container_post__exec__system__fts_rescan(self, container_id, request_json):
if 'username' in request_json:
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
rescan_exec = await container.exec(["/bin/bash", "-c", "/usr/bin/doveadm fts rescan -u '" + request_json['username'].replace("'", "'\\''") + "'"], user='vmail')
async with rescan_exec.start(detach=False) as stream:
rescan_return = await stream.read_out()
exec_details = await rescan_exec.inspect()
if exec_details["ExitCode"] == None or exec_details["ExitCode"] == 0:
res = {
'type': 'success',
'msg': 'fts_rescan: rescan triggered'
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
else:
res = {
'type': 'warning',
'msg': 'fts_rescan error'
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
if 'all' in request_json:
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
rescan_exec = await container.exec(["/bin/bash", "-c", "/usr/bin/doveadm fts rescan -A"], user='vmail')
async with rescan_exec.start(detach=False) as stream:
rescan_return = await stream.read_out()
exec_details = await rescan_exec.inspect()
if exec_details["ExitCode"] == None or exec_details["ExitCode"] == 0:
res = {
'type': 'success',
'msg': 'fts_rescan: rescan triggered'
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
else:
res = {
'type': 'warning',
'msg': 'fts_rescan error'
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
# api call: container_post - post_action: exec - cmd: system - task: df
async def container_post__exec__system__df(self, container_id, request_json):
if 'dir' in request_json:
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
df_exec = await container.exec(["/bin/bash", "-c", "/bin/df -H '" + request_json['dir'].replace("'", "'\\''") + "' | /usr/bin/tail -n1 | /usr/bin/tr -s [:blank:] | /usr/bin/tr ' ' ','"], user='nobody')
async with df_exec.start(detach=False) as stream:
df_return = await stream.read_out()
print(df_return)
print(await df_exec.inspect())
exec_details = await df_exec.inspect()
if exec_details["ExitCode"] == None or exec_details["ExitCode"] == 0:
return df_return.data.decode('utf-8').rstrip()
else:
return "0,0,0,0,0,0"
# api call: container_post - post_action: exec - cmd: system - task: mysql_upgrade
async def container_post__exec__system__mysql_upgrade(self, container_id, request_json):
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
sql_exec = await container.exec(["/bin/bash", "-c", "/usr/bin/mysql_upgrade -uroot -p'" + os.environ['DBROOT'].replace("'", "'\\''") + "'\n"], user='mysql')
async with sql_exec.start(detach=False) as stream:
sql_return = await stream.read_out()
exec_details = await sql_exec.inspect()
if exec_details["ExitCode"] == None or exec_details["ExitCode"] == 0:
matched = False
for line in sql_return.data.decode('utf-8').split("\n"):
if 'is already upgraded to' in line:
matched = True
if matched:
res = {
'type': 'success',
'msg': 'mysql_upgrade: already upgraded',
'text': sql_return.data.decode('utf-8')
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
else:
await container.restart()
res = {
'type': 'warning',
'msg': 'mysql_upgrade: upgrade was applied',
'text': sql_return.data.decode('utf-8')
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
else:
res = {
'type': 'error',
'msg': 'mysql_upgrade: error running command',
'text': sql_return.data.decode('utf-8')
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
# api call: container_post - post_action: exec - cmd: system - task: mysql_tzinfo_to_sql
async def container_post__exec__system__mysql_tzinfo_to_sql(self, container_id, request_json):
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
sql_exec = await container.exec(["/bin/bash", "-c", "/usr/bin/mysql_tzinfo_to_sql /usr/share/zoneinfo | /bin/sed 's/Local time zone must be set--see zic manual page/FCTY/' | /usr/bin/mysql -uroot -p'" + os.environ['DBROOT'].replace("'", "'\\''") + "' mysql \n"], user='mysql')
async with sql_exec.start(detach=False) as stream:
sql_return = await stream.read_out()
exec_details = await sql_exec.inspect()
if exec_details["ExitCode"] == None or exec_details["ExitCode"] == 0:
res = {
'type': 'info',
'msg': 'mysql_tzinfo_to_sql: command completed successfully',
'text': sql_return.data.decode('utf-8')
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
else:
res = {
'type': 'error',
'msg': 'mysql_tzinfo_to_sql: error running command',
'text': sql_return.data.decode('utf-8')
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
# api call: container_post - post_action: exec - cmd: reload - task: dovecot
async def container_post__exec__reload__dovecot(self, container_id, request_json):
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
reload_exec = await container.exec(["/bin/bash", "-c", "/usr/sbin/dovecot reload"])
return await exec_run_handler('generic', reload_exec)
# api call: container_post - post_action: exec - cmd: reload - task: postfix
async def container_post__exec__reload__postfix(self, container_id, request_json):
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
reload_exec = await container.exec(["/bin/bash", "-c", "/usr/sbin/postfix reload"])
return await exec_run_handler('generic', reload_exec)
# api call: container_post - post_action: exec - cmd: reload - task: nginx
async def container_post__exec__reload__nginx(self, container_id, request_json):
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
reload_exec = await container.exec(["/bin/sh", "-c", "/usr/sbin/nginx -s reload"])
return await exec_run_handler('generic', reload_exec)
# api call: container_post - post_action: exec - cmd: sieve - task: list
async def container_post__exec__sieve__list(self, container_id, request_json):
if 'username' in request_json:
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
sieve_exec = await container.exec(["/bin/bash", "-c", "/usr/bin/doveadm sieve list -u '" + request_json['username'].replace("'", "'\\''") + "'"])
return await exec_run_handler('utf8_text_only', sieve_exec)
# api call: container_post - post_action: exec - cmd: sieve - task: print
async def container_post__exec__sieve__print(self, container_id, request_json):
if 'username' in request_json and 'script_name' in request_json:
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
cmd = ["/bin/bash", "-c", "/usr/bin/doveadm sieve get -u '" + request_json['username'].replace("'", "'\\''") + "' '" + request_json['script_name'].replace("'", "'\\''") + "'"]
sieve_exec = await container.exec(cmd)
return await exec_run_handler('utf8_text_only', sieve_exec)
# api call: container_post - post_action: exec - cmd: maildir - task: cleanup
async def container_post__exec__maildir__cleanup(self, container_id, request_json):
if 'maildir' in request_json:
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
sane_name = re.sub(r'\W+', '', request_json['maildir'])
cmd = ["/bin/bash", "-c", "if [[ -d '/var/vmail/" + request_json['maildir'].replace("'", "'\\''") + "' ]]; then /bin/mv '/var/vmail/" + request_json['maildir'].replace("'", "'\\''") + "' '/var/vmail/_garbage/" + str(int(time.time())) + "_" + sane_name + "'; fi"]
maildir_cleanup_exec = await container.exec(cmd, user='vmail')
return await exec_run_handler('generic', maildir_cleanup_exec)
# api call: container_post - post_action: exec - cmd: rspamd - task: worker_password
async def container_post__exec__rspamd__worker_password(self, container_id, request_json):
if 'raw' in request_json:
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
cmd = "./set_worker_password.sh '" + request_json['raw'].replace("'", "'\\''") + "' 2> /dev/null"
rspamd_password_exec = await container.exec(cmd, user='_rspamd')
async with rspamd_password_exec.start(detach=False) as stream:
rspamd_password_return = await stream.read_out()
matched = False
if "OK" in rspamd_password_return.data.decode('utf-8'):
matched = True
await container.restart()
if matched:
res = {
'type': 'success',
'msg': 'command completed successfully'
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
else:
res = {
'type': 'danger',
'msg': 'command did not complete'
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
async def exec_run_handler(type, exec_obj):
async with exec_obj.start(detach=False) as stream:
exec_return = await stream.read_out()
if exec_return == None:
exec_return = ""
else:
exec_return = exec_return.data.decode('utf-8')
if type == 'generic':
exec_details = await exec_obj.inspect()
if exec_details["ExitCode"] == None or exec_details["ExitCode"] == 0:
res = {
"type": "success",
"msg": "command completed successfully"
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
else:
res = {
"type": "success",
"msg": "'command failed: " + exec_return
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
if type == 'utf8_text_only':
return Response(content=exec_return, media_type="text/plain")
async def get_host_stats(wait=5):
global host_stats_isUpdating
try:
system_time = datetime.now()
host_stats = {
"cpu": {
"cores": psutil.cpu_count(),
"usage": psutil.cpu_percent()
},
"memory": {
"total": psutil.virtual_memory().total,
"usage": psutil.virtual_memory().percent,
"swap": psutil.swap_memory()
},
"uptime": time.time() - psutil.boot_time(),
"system_time": system_time.strftime("%d.%m.%Y %H:%M:%S")
}
redis_client.set('host_stats', json.dumps(host_stats), ex=10)
except Exception as e:
res = {
"type": "danger",
"msg": str(e)
}
print(json.dumps(res, indent=4))
await asyncio.sleep(wait)
host_stats_isUpdating = False
async def get_container_stats(container_id, wait=5, stop=False):
global containerIds_to_update
if container_id and container_id.isalnum():
try:
for container in (await async_docker_client.containers.list()):
if container._id == container_id:
res = await container.stats(stream=False)
if redis_client.exists(container_id + '_stats'):
stats = json.loads(redis_client.get(container_id + '_stats'))
else:
stats = []
stats.append(res[0])
if len(stats) > 3:
del stats[0]
redis_client.set(container_id + '_stats', json.dumps(stats), ex=60)
except Exception as e:
res = {
"type": "danger",
"msg": str(e)
}
print(json.dumps(res, indent=4))
else:
res = {
"type": "danger",
"msg": "no or invalid id defined"
}
print(json.dumps(res, indent=4))
await asyncio.sleep(wait)
if stop == True:
# update task was called second time, stop
containerIds_to_update.remove(container_id)
else:
# call update task a second time
await get_container_stats(container_id, wait=0, stop=True)
if os.environ['REDIS_SLAVEOF_IP'] != "":
redis_client = redis.Redis(host=os.environ['REDIS_SLAVEOF_IP'], port=os.environ['REDIS_SLAVEOF_PORT'], db=0)
else:
redis_client = redis.Redis(host='redis-mailcow', port=6379, db=0)
async_docker_client = aiodocker.Docker(url='unix:///var/run/docker.sock')

View File

@ -0,0 +1,260 @@
import os
import sys
import uvicorn
import json
import uuid
import async_timeout
import asyncio
import aioredis
import aiodocker
import docker
import logging
from logging.config import dictConfig
from fastapi import FastAPI, Response, Request
from modules.DockerApi import DockerApi
dockerapi = None
app = FastAPI()
# Define Routes
@app.get("/host/stats")
async def get_host_update_stats():
global dockerapi
if dockerapi.host_stats_isUpdating == False:
asyncio.create_task(dockerapi.get_host_stats())
dockerapi.host_stats_isUpdating = True
while True:
if await dockerapi.redis_client.exists('host_stats'):
break
await asyncio.sleep(1.5)
stats = json.loads(await dockerapi.redis_client.get('host_stats'))
return Response(content=json.dumps(stats, indent=4), media_type="application/json")
@app.get("/containers/{container_id}/json")
async def get_container(container_id : str):
global dockerapi
if container_id and container_id.isalnum():
try:
for container in (await dockerapi.async_docker_client.containers.list()):
if container._id == container_id:
container_info = await container.show()
return Response(content=json.dumps(container_info, indent=4), media_type="application/json")
res = {
"type": "danger",
"msg": "no container found"
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
except Exception as e:
res = {
"type": "danger",
"msg": str(e)
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
else:
res = {
"type": "danger",
"msg": "no or invalid id defined"
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
@app.get("/containers/json")
async def get_containers():
global dockerapi
containers = {}
try:
for container in (await dockerapi.async_docker_client.containers.list()):
container_info = await container.show()
containers.update({container_info['Id']: container_info})
return Response(content=json.dumps(containers, indent=4), media_type="application/json")
except Exception as e:
res = {
"type": "danger",
"msg": str(e)
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
@app.post("/containers/{container_id}/{post_action}")
async def post_containers(container_id : str, post_action : str, request: Request):
global dockerapi
try :
request_json = await request.json()
except Exception as err:
request_json = {}
if container_id and container_id.isalnum() and post_action:
try:
"""Dispatch container_post api call"""
if post_action == 'exec':
if not request_json or not 'cmd' in request_json:
res = {
"type": "danger",
"msg": "cmd is missing"
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
if not request_json or not 'task' in request_json:
res = {
"type": "danger",
"msg": "task is missing"
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
api_call_method_name = '__'.join(['container_post', str(post_action), str(request_json['cmd']), str(request_json['task']) ])
else:
api_call_method_name = '__'.join(['container_post', str(post_action) ])
api_call_method = getattr(dockerapi, api_call_method_name, lambda container_id: Response(content=json.dumps({'type': 'danger', 'msg':'container_post - unknown api call' }, indent=4), media_type="application/json"))
dockerapi.logger.info("api call: %s, container_id: %s" % (api_call_method_name, container_id))
return api_call_method(request_json, container_id=container_id)
except Exception as e:
dockerapi.logger.error("error - container_post: %s" % str(e))
res = {
"type": "danger",
"msg": str(e)
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
else:
res = {
"type": "danger",
"msg": "invalid container id or missing action"
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
@app.post("/container/{container_id}/stats/update")
async def post_container_update_stats(container_id : str):
global dockerapi
# start update task for container if no task is running
if container_id not in dockerapi.containerIds_to_update:
asyncio.create_task(dockerapi.get_container_stats(container_id))
dockerapi.containerIds_to_update.append(container_id)
while True:
if await dockerapi.redis_client.exists(container_id + '_stats'):
break
await asyncio.sleep(1.5)
stats = json.loads(await dockerapi.redis_client.get(container_id + '_stats'))
return Response(content=json.dumps(stats, indent=4), media_type="application/json")
# Events
@app.on_event("startup")
async def startup_event():
global dockerapi
# Initialize a custom logger
logger = logging.getLogger("dockerapi")
logger.setLevel(logging.INFO)
# Configure the logger to output logs to the terminal
handler = logging.StreamHandler()
handler.setLevel(logging.INFO)
formatter = logging.Formatter("%(levelname)s: %(message)s")
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.info("Init APP")
# Init redis client
if os.environ['REDIS_SLAVEOF_IP'] != "":
redis_client = redis = await aioredis.from_url(f"redis://{os.environ['REDIS_SLAVEOF_IP']}:{os.environ['REDIS_SLAVEOF_PORT']}/0")
else:
redis_client = redis = await aioredis.from_url("redis://redis-mailcow:6379/0")
# Init docker clients
sync_docker_client = docker.DockerClient(base_url='unix://var/run/docker.sock', version='auto')
async_docker_client = aiodocker.Docker(url='unix:///var/run/docker.sock')
dockerapi = DockerApi(redis_client, sync_docker_client, async_docker_client, logger)
logger.info("Subscribe to redis channel")
# Subscribe to redis channel
dockerapi.pubsub = redis.pubsub()
await dockerapi.pubsub.subscribe("MC_CHANNEL")
asyncio.create_task(handle_pubsub_messages(dockerapi.pubsub))
@app.on_event("shutdown")
async def shutdown_event():
global dockerapi
# Close docker connections
dockerapi.sync_docker_client.close()
await dockerapi.async_docker_client.close()
# Close redis
await dockerapi.pubsub.unsubscribe("MC_CHANNEL")
await dockerapi.redis_client.close()
# PubSub Handler
async def handle_pubsub_messages(channel: aioredis.client.PubSub):
global dockerapi
while True:
try:
async with async_timeout.timeout(60):
message = await channel.get_message(ignore_subscribe_messages=True, timeout=30)
if message is not None:
# Parse message
data_json = json.loads(message['data'].decode('utf-8'))
dockerapi.logger.info(f"PubSub Received - {json.dumps(data_json)}")
# Handle api_call
if 'api_call' in data_json:
# api_call: container_post
if data_json['api_call'] == "container_post":
if 'post_action' in data_json and 'container_name' in data_json:
try:
"""Dispatch container_post api call"""
request_json = {}
if data_json['post_action'] == 'exec':
if 'request' in data_json:
request_json = data_json['request']
if 'cmd' in request_json:
if 'task' in request_json:
api_call_method_name = '__'.join(['container_post', str(data_json['post_action']), str(request_json['cmd']), str(request_json['task']) ])
else:
dockerapi.logger.error("api call: task missing")
else:
dockerapi.logger.error("api call: cmd missing")
else:
dockerapi.logger.error("api call: request missing")
else:
api_call_method_name = '__'.join(['container_post', str(data_json['post_action'])])
if api_call_method_name:
api_call_method = getattr(dockerapi, api_call_method_name)
if api_call_method:
dockerapi.logger.info("api call: %s, container_name: %s" % (api_call_method_name, data_json['container_name']))
api_call_method(request_json, container_name=data_json['container_name'])
else:
dockerapi.logger.error("api call not found: %s, container_name: %s" % (api_call_method_name, data_json['container_name']))
except Exception as e:
dockerapi.logger.error("container_post: %s" % str(e))
else:
dockerapi.logger.error("api call: missing container_name, post_action or request")
else:
dockerapi.logger.error("Unknwon PubSub recieved - %s" % json.dumps(data_json))
else:
dockerapi.logger.error("Unknwon PubSub recieved - %s" % json.dumps(data_json))
await asyncio.sleep(0.0)
except asyncio.TimeoutError:
pass
if __name__ == '__main__':
uvicorn.run(
app,
host="0.0.0.0",
port=443,
ssl_certfile="/app/dockerapi_cert.pem",
ssl_keyfile="/app/dockerapi_key.pem",
log_level="info",
loop="none"
)

View File

@ -0,0 +1,487 @@
import psutil
import sys
import os
import re
import time
import json
import asyncio
import platform
from datetime import datetime
from fastapi import FastAPI, Response, Request
class DockerApi:
def __init__(self, redis_client, sync_docker_client, async_docker_client, logger):
self.redis_client = redis_client
self.sync_docker_client = sync_docker_client
self.async_docker_client = async_docker_client
self.logger = logger
self.host_stats_isUpdating = False
self.containerIds_to_update = []
# api call: container_post - post_action: stop
def container_post__stop(self, request_json, **kwargs):
if 'container_id' in kwargs:
filters = {"id": kwargs['container_id']}
elif 'container_name' in kwargs:
filters = {"name": kwargs['container_name']}
for container in self.sync_docker_client.containers.list(all=True, filters=filters):
container.stop()
res = { 'type': 'success', 'msg': 'command completed successfully'}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
# api call: container_post - post_action: start
def container_post__start(self, request_json, **kwargs):
if 'container_id' in kwargs:
filters = {"id": kwargs['container_id']}
elif 'container_name' in kwargs:
filters = {"name": kwargs['container_name']}
for container in self.sync_docker_client.containers.list(all=True, filters=filters):
container.start()
res = { 'type': 'success', 'msg': 'command completed successfully'}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
# api call: container_post - post_action: restart
def container_post__restart(self, request_json, **kwargs):
if 'container_id' in kwargs:
filters = {"id": kwargs['container_id']}
elif 'container_name' in kwargs:
filters = {"name": kwargs['container_name']}
for container in self.sync_docker_client.containers.list(all=True, filters=filters):
container.restart()
res = { 'type': 'success', 'msg': 'command completed successfully'}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
# api call: container_post - post_action: top
def container_post__top(self, request_json, **kwargs):
if 'container_id' in kwargs:
filters = {"id": kwargs['container_id']}
elif 'container_name' in kwargs:
filters = {"name": kwargs['container_name']}
for container in self.sync_docker_client.containers.list(all=True, filters=filters):
res = { 'type': 'success', 'msg': container.top()}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
# api call: container_post - post_action: stats
def container_post__stats(self, request_json, **kwargs):
if 'container_id' in kwargs:
filters = {"id": kwargs['container_id']}
elif 'container_name' in kwargs:
filters = {"name": kwargs['container_name']}
for container in self.sync_docker_client.containers.list(all=True, filters=filters):
for stat in container.stats(decode=True, stream=True):
res = { 'type': 'success', 'msg': stat}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
# api call: container_post - post_action: exec - cmd: mailq - task: delete
def container_post__exec__mailq__delete(self, request_json, **kwargs):
if 'container_id' in kwargs:
filters = {"id": kwargs['container_id']}
elif 'container_name' in kwargs:
filters = {"name": kwargs['container_name']}
if 'items' in request_json:
r = re.compile("^[0-9a-fA-F]+$")
filtered_qids = filter(r.match, request_json['items'])
if filtered_qids:
flagged_qids = ['-d %s' % i for i in filtered_qids]
sanitized_string = str(' '.join(flagged_qids))
for container in self.sync_docker_client.containers.list(filters=filters):
postsuper_r = container.exec_run(["/bin/bash", "-c", "/usr/sbin/postsuper " + sanitized_string])
return self.exec_run_handler('generic', postsuper_r)
# api call: container_post - post_action: exec - cmd: mailq - task: hold
def container_post__exec__mailq__hold(self, request_json, **kwargs):
if 'container_id' in kwargs:
filters = {"id": kwargs['container_id']}
elif 'container_name' in kwargs:
filters = {"name": kwargs['container_name']}
if 'items' in request_json:
r = re.compile("^[0-9a-fA-F]+$")
filtered_qids = filter(r.match, request_json['items'])
if filtered_qids:
flagged_qids = ['-h %s' % i for i in filtered_qids]
sanitized_string = str(' '.join(flagged_qids))
for container in self.sync_docker_client.containers.list(filters=filters):
postsuper_r = container.exec_run(["/bin/bash", "-c", "/usr/sbin/postsuper " + sanitized_string])
return self.exec_run_handler('generic', postsuper_r)
# api call: container_post - post_action: exec - cmd: mailq - task: cat
def container_post__exec__mailq__cat(self, request_json, **kwargs):
if 'container_id' in kwargs:
filters = {"id": kwargs['container_id']}
elif 'container_name' in kwargs:
filters = {"name": kwargs['container_name']}
if 'items' in request_json:
r = re.compile("^[0-9a-fA-F]+$")
filtered_qids = filter(r.match, request_json['items'])
if filtered_qids:
sanitized_string = str(' '.join(filtered_qids))
for container in self.sync_docker_client.containers.list(filters=filters):
postcat_return = container.exec_run(["/bin/bash", "-c", "/usr/sbin/postcat -q " + sanitized_string], user='postfix')
if not postcat_return:
postcat_return = 'err: invalid'
return self.exec_run_handler('utf8_text_only', postcat_return)
# api call: container_post - post_action: exec - cmd: mailq - task: unhold
def container_post__exec__mailq__unhold(self, request_json, **kwargs):
if 'container_id' in kwargs:
filters = {"id": kwargs['container_id']}
elif 'container_name' in kwargs:
filters = {"name": kwargs['container_name']}
if 'items' in request_json:
r = re.compile("^[0-9a-fA-F]+$")
filtered_qids = filter(r.match, request_json['items'])
if filtered_qids:
flagged_qids = ['-H %s' % i for i in filtered_qids]
sanitized_string = str(' '.join(flagged_qids))
for container in self.sync_docker_client.containers.list(filters=filters):
postsuper_r = container.exec_run(["/bin/bash", "-c", "/usr/sbin/postsuper " + sanitized_string])
return self.exec_run_handler('generic', postsuper_r)
# api call: container_post - post_action: exec - cmd: mailq - task: deliver
def container_post__exec__mailq__deliver(self, request_json, **kwargs):
if 'container_id' in kwargs:
filters = {"id": kwargs['container_id']}
elif 'container_name' in kwargs:
filters = {"name": kwargs['container_name']}
if 'items' in request_json:
r = re.compile("^[0-9a-fA-F]+$")
filtered_qids = filter(r.match, request_json['items'])
if filtered_qids:
flagged_qids = ['-i %s' % i for i in filtered_qids]
for container in self.sync_docker_client.containers.list(filters=filters):
for i in flagged_qids:
postqueue_r = container.exec_run(["/bin/bash", "-c", "/usr/sbin/postqueue " + i], user='postfix')
# todo: check each exit code
res = { 'type': 'success', 'msg': 'Scheduled immediate delivery'}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
# api call: container_post - post_action: exec - cmd: mailq - task: list
def container_post__exec__mailq__list(self, request_json, **kwargs):
if 'container_id' in kwargs:
filters = {"id": kwargs['container_id']}
elif 'container_name' in kwargs:
filters = {"name": kwargs['container_name']}
for container in self.sync_docker_client.containers.list(filters=filters):
mailq_return = container.exec_run(["/usr/sbin/postqueue", "-j"], user='postfix')
return self.exec_run_handler('utf8_text_only', mailq_return)
# api call: container_post - post_action: exec - cmd: mailq - task: flush
def container_post__exec__mailq__flush(self, request_json, **kwargs):
if 'container_id' in kwargs:
filters = {"id": kwargs['container_id']}
elif 'container_name' in kwargs:
filters = {"name": kwargs['container_name']}
for container in self.sync_docker_client.containers.list(filters=filters):
postqueue_r = container.exec_run(["/usr/sbin/postqueue", "-f"], user='postfix')
return self.exec_run_handler('generic', postqueue_r)
# api call: container_post - post_action: exec - cmd: mailq - task: super_delete
def container_post__exec__mailq__super_delete(self, request_json, **kwargs):
if 'container_id' in kwargs:
filters = {"id": kwargs['container_id']}
elif 'container_name' in kwargs:
filters = {"name": kwargs['container_name']}
for container in self.sync_docker_client.containers.list(filters=filters):
postsuper_r = container.exec_run(["/usr/sbin/postsuper", "-d", "ALL"])
return self.exec_run_handler('generic', postsuper_r)
# api call: container_post - post_action: exec - cmd: system - task: fts_rescan
def container_post__exec__system__fts_rescan(self, request_json, **kwargs):
if 'container_id' in kwargs:
filters = {"id": kwargs['container_id']}
elif 'container_name' in kwargs:
filters = {"name": kwargs['container_name']}
if 'username' in request_json:
for container in self.sync_docker_client.containers.list(filters=filters):
rescan_return = container.exec_run(["/bin/bash", "-c", "/usr/bin/doveadm fts rescan -u '" + request_json['username'].replace("'", "'\\''") + "'"], user='vmail')
if rescan_return.exit_code == 0:
res = { 'type': 'success', 'msg': 'fts_rescan: rescan triggered'}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
else:
res = { 'type': 'warning', 'msg': 'fts_rescan error'}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
if 'all' in request_json:
for container in self.sync_docker_client.containers.list(filters=filters):
rescan_return = container.exec_run(["/bin/bash", "-c", "/usr/bin/doveadm fts rescan -A"], user='vmail')
if rescan_return.exit_code == 0:
res = { 'type': 'success', 'msg': 'fts_rescan: rescan triggered'}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
else:
res = { 'type': 'warning', 'msg': 'fts_rescan error'}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
# api call: container_post - post_action: exec - cmd: system - task: df
def container_post__exec__system__df(self, request_json, **kwargs):
if 'container_id' in kwargs:
filters = {"id": kwargs['container_id']}
elif 'container_name' in kwargs:
filters = {"name": kwargs['container_name']}
if 'dir' in request_json:
for container in self.sync_docker_client.containers.list(filters=filters):
df_return = container.exec_run(["/bin/bash", "-c", "/bin/df -H '" + request_json['dir'].replace("'", "'\\''") + "' | /usr/bin/tail -n1 | /usr/bin/tr -s [:blank:] | /usr/bin/tr ' ' ','"], user='nobody')
if df_return.exit_code == 0:
return df_return.output.decode('utf-8').rstrip()
else:
return "0,0,0,0,0,0"
# api call: container_post - post_action: exec - cmd: system - task: mysql_upgrade
def container_post__exec__system__mysql_upgrade(self, request_json, **kwargs):
if 'container_id' in kwargs:
filters = {"id": kwargs['container_id']}
elif 'container_name' in kwargs:
filters = {"name": kwargs['container_name']}
for container in self.sync_docker_client.containers.list(filters=filters):
sql_return = container.exec_run(["/bin/bash", "-c", "/usr/bin/mysql_upgrade -uroot -p'" + os.environ['DBROOT'].replace("'", "'\\''") + "'\n"], user='mysql')
if sql_return.exit_code == 0:
matched = False
for line in sql_return.output.decode('utf-8').split("\n"):
if 'is already upgraded to' in line:
matched = True
if matched:
res = { 'type': 'success', 'msg':'mysql_upgrade: already upgraded', 'text': sql_return.output.decode('utf-8')}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
else:
container.restart()
res = { 'type': 'warning', 'msg':'mysql_upgrade: upgrade was applied', 'text': sql_return.output.decode('utf-8')}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
else:
res = { 'type': 'error', 'msg': 'mysql_upgrade: error running command', 'text': sql_return.output.decode('utf-8')}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
# api call: container_post - post_action: exec - cmd: system - task: mysql_tzinfo_to_sql
def container_post__exec__system__mysql_tzinfo_to_sql(self, request_json, **kwargs):
if 'container_id' in kwargs:
filters = {"id": kwargs['container_id']}
elif 'container_name' in kwargs:
filters = {"name": kwargs['container_name']}
for container in self.sync_docker_client.containers.list(filters=filters):
sql_return = container.exec_run(["/bin/bash", "-c", "/usr/bin/mysql_tzinfo_to_sql /usr/share/zoneinfo | /bin/sed 's/Local time zone must be set--see zic manual page/FCTY/' | /usr/bin/mysql -uroot -p'" + os.environ['DBROOT'].replace("'", "'\\''") + "' mysql \n"], user='mysql')
if sql_return.exit_code == 0:
res = { 'type': 'info', 'msg': 'mysql_tzinfo_to_sql: command completed successfully', 'text': sql_return.output.decode('utf-8')}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
else:
res = { 'type': 'error', 'msg': 'mysql_tzinfo_to_sql: error running command', 'text': sql_return.output.decode('utf-8')}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
# api call: container_post - post_action: exec - cmd: reload - task: dovecot
def container_post__exec__reload__dovecot(self, request_json, **kwargs):
if 'container_id' in kwargs:
filters = {"id": kwargs['container_id']}
elif 'container_name' in kwargs:
filters = {"name": kwargs['container_name']}
for container in self.sync_docker_client.containers.list(filters=filters):
reload_return = container.exec_run(["/bin/bash", "-c", "/usr/sbin/dovecot reload"])
return self.exec_run_handler('generic', reload_return)
# api call: container_post - post_action: exec - cmd: reload - task: postfix
def container_post__exec__reload__postfix(self, request_json, **kwargs):
if 'container_id' in kwargs:
filters = {"id": kwargs['container_id']}
elif 'container_name' in kwargs:
filters = {"name": kwargs['container_name']}
for container in self.sync_docker_client.containers.list(filters=filters):
reload_return = container.exec_run(["/bin/bash", "-c", "/usr/sbin/postfix reload"])
return self.exec_run_handler('generic', reload_return)
# api call: container_post - post_action: exec - cmd: reload - task: nginx
def container_post__exec__reload__nginx(self, request_json, **kwargs):
if 'container_id' in kwargs:
filters = {"id": kwargs['container_id']}
elif 'container_name' in kwargs:
filters = {"name": kwargs['container_name']}
for container in self.sync_docker_client.containers.list(filters=filters):
reload_return = container.exec_run(["/bin/sh", "-c", "/usr/sbin/nginx -s reload"])
return self.exec_run_handler('generic', reload_return)
# api call: container_post - post_action: exec - cmd: sieve - task: list
def container_post__exec__sieve__list(self, request_json, **kwargs):
if 'container_id' in kwargs:
filters = {"id": kwargs['container_id']}
elif 'container_name' in kwargs:
filters = {"name": kwargs['container_name']}
if 'username' in request_json:
for container in self.sync_docker_client.containers.list(filters=filters):
sieve_return = container.exec_run(["/bin/bash", "-c", "/usr/bin/doveadm sieve list -u '" + request_json['username'].replace("'", "'\\''") + "'"])
return self.exec_run_handler('utf8_text_only', sieve_return)
# api call: container_post - post_action: exec - cmd: sieve - task: print
def container_post__exec__sieve__print(self, request_json, **kwargs):
if 'container_id' in kwargs:
filters = {"id": kwargs['container_id']}
elif 'container_name' in kwargs:
filters = {"name": kwargs['container_name']}
if 'username' in request_json and 'script_name' in request_json:
for container in self.sync_docker_client.containers.list(filters=filters):
cmd = ["/bin/bash", "-c", "/usr/bin/doveadm sieve get -u '" + request_json['username'].replace("'", "'\\''") + "' '" + request_json['script_name'].replace("'", "'\\''") + "'"]
sieve_return = container.exec_run(cmd)
return self.exec_run_handler('utf8_text_only', sieve_return)
# api call: container_post - post_action: exec - cmd: maildir - task: cleanup
def container_post__exec__maildir__cleanup(self, request_json, **kwargs):
if 'container_id' in kwargs:
filters = {"id": kwargs['container_id']}
elif 'container_name' in kwargs:
filters = {"name": kwargs['container_name']}
if 'maildir' in request_json:
for container in self.sync_docker_client.containers.list(filters=filters):
sane_name = re.sub(r'\W+', '', request_json['maildir'])
vmail_name = request_json['maildir'].replace("'", "'\\''")
cmd_vmail = "if [[ -d '/var/vmail/" + vmail_name + "' ]]; then /bin/mv '/var/vmail/" + vmail_name + "' '/var/vmail/_garbage/" + str(int(time.time())) + "_" + sane_name + "'; fi"
index_name = request_json['maildir'].split("/")
if len(index_name) > 1:
index_name = index_name[1].replace("'", "'\\''") + "@" + index_name[0].replace("'", "'\\''")
cmd_vmail_index = "if [[ -d '/var/vmail_index/" + index_name + "' ]]; then /bin/mv '/var/vmail_index/" + index_name + "' '/var/vmail/_garbage/" + str(int(time.time())) + "_" + sane_name + "_index'; fi"
cmd = ["/bin/bash", "-c", cmd_vmail + " && " + cmd_vmail_index]
else:
cmd = ["/bin/bash", "-c", cmd_vmail]
maildir_cleanup = container.exec_run(cmd, user='vmail')
return self.exec_run_handler('generic', maildir_cleanup)
# api call: container_post - post_action: exec - cmd: rspamd - task: worker_password
def container_post__exec__rspamd__worker_password(self, request_json, **kwargs):
if 'container_id' in kwargs:
filters = {"id": kwargs['container_id']}
elif 'container_name' in kwargs:
filters = {"name": kwargs['container_name']}
if 'raw' in request_json:
for container in self.sync_docker_client.containers.list(filters=filters):
cmd = "/usr/bin/rspamadm pw -e -p '" + request_json['raw'].replace("'", "'\\''") + "' 2> /dev/null"
cmd_response = self.exec_cmd_container(container, cmd, user="_rspamd")
matched = False
for line in cmd_response.split("\n"):
if '$2$' in line:
hash = line.strip()
hash_out = re.search('\$2\$.+$', hash).group(0)
rspamd_passphrase_hash = re.sub('[^0-9a-zA-Z\$]+', '', hash_out.rstrip())
rspamd_password_filename = "/etc/rspamd/override.d/worker-controller-password.inc"
cmd = '''/bin/echo 'enable_password = "%s";' > %s && cat %s''' % (rspamd_passphrase_hash, rspamd_password_filename, rspamd_password_filename)
cmd_response = self.exec_cmd_container(container, cmd, user="_rspamd")
if rspamd_passphrase_hash.startswith("$2$") and rspamd_passphrase_hash in cmd_response:
container.restart()
matched = True
if matched:
res = { 'type': 'success', 'msg': 'command completed successfully' }
self.logger.info('success changing Rspamd password')
return Response(content=json.dumps(res, indent=4), media_type="application/json")
else:
self.logger.error('failed changing Rspamd password')
res = { 'type': 'danger', 'msg': 'command did not complete' }
return Response(content=json.dumps(res, indent=4), media_type="application/json")
# Collect host stats
async def get_host_stats(self, wait=5):
try:
system_time = datetime.now()
host_stats = {
"cpu": {
"cores": psutil.cpu_count(),
"usage": psutil.cpu_percent()
},
"memory": {
"total": psutil.virtual_memory().total,
"usage": psutil.virtual_memory().percent,
"swap": psutil.swap_memory()
},
"uptime": time.time() - psutil.boot_time(),
"system_time": system_time.strftime("%d.%m.%Y %H:%M:%S"),
"architecture": platform.machine()
}
await self.redis_client.set('host_stats', json.dumps(host_stats), ex=10)
except Exception as e:
res = {
"type": "danger",
"msg": str(e)
}
await asyncio.sleep(wait)
self.host_stats_isUpdating = False
# Collect container stats
async def get_container_stats(self, container_id, wait=5, stop=False):
if container_id and container_id.isalnum():
try:
for container in (await self.async_docker_client.containers.list()):
if container._id == container_id:
res = await container.stats(stream=False)
if await self.redis_client.exists(container_id + '_stats'):
stats = json.loads(await self.redis_client.get(container_id + '_stats'))
else:
stats = []
stats.append(res[0])
if len(stats) > 3:
del stats[0]
await self.redis_client.set(container_id + '_stats', json.dumps(stats), ex=60)
except Exception as e:
res = {
"type": "danger",
"msg": str(e)
}
else:
res = {
"type": "danger",
"msg": "no or invalid id defined"
}
await asyncio.sleep(wait)
if stop == True:
# update task was called second time, stop
self.containerIds_to_update.remove(container_id)
else:
# call update task a second time
await self.get_container_stats(container_id, wait=0, stop=True)
def exec_cmd_container(self, container, cmd, user, timeout=2, shell_cmd="/bin/bash"):
def recv_socket_data(c_socket, timeout):
c_socket.setblocking(0)
total_data=[]
data=''
begin=time.time()
while True:
if total_data and time.time()-begin > timeout:
break
elif time.time()-begin > timeout*2:
break
try:
data = c_socket.recv(8192)
if data:
total_data.append(data.decode('utf-8'))
#change the beginning time for measurement
begin=time.time()
else:
#sleep for sometime to indicate a gap
time.sleep(0.1)
break
except:
pass
return ''.join(total_data)
try :
socket = container.exec_run([shell_cmd], stdin=True, socket=True, user=user).output._sock
if not cmd.endswith("\n"):
cmd = cmd + "\n"
socket.send(cmd.encode('utf-8'))
data = recv_socket_data(socket, timeout)
socket.close()
return data
except Exception as e:
self.logger.error("error - exec_cmd_container: %s" % str(e))
traceback.print_exc(file=sys.stdout)
def exec_run_handler(self, type, output):
if type == 'generic':
if output.exit_code == 0:
res = { 'type': 'success', 'msg': 'command completed successfully' }
return Response(content=json.dumps(res, indent=4), media_type="application/json")
else:
res = { 'type': 'danger', 'msg': 'command failed: ' + output.output.decode('utf-8') }
return Response(content=json.dumps(res, indent=4), media_type="application/json")
if type == 'utf8_text_only':
return Response(content=output.output.decode('utf-8'), media_type="text/plain")

View File

@ -1,10 +1,13 @@
FROM debian:bullseye-slim
LABEL maintainer "Andre Peters <andre.peters@servercow.de>"
LABEL maintainer "The Infrastructure Company GmbH <info@servercow.de>"
ARG DEBIAN_FRONTEND=noninteractive
ARG DOVECOT=2.3.19.1
# renovate: datasource=github-tags depName=dovecot/core versioning=semver-coerced extractVersion=(?<version>.*)$
ARG DOVECOT=2.3.21
# renovate: datasource=github-releases depName=tianon/gosu versioning=semver-coerced extractVersion=(?<version>.*)$
ARG GOSU_VERSION=1.16
ENV LC_ALL C
ENV GOSU_VERSION 1.14
# Add groups and users before installing Dovecot to not break compatibility
RUN groupadd -g 5000 vmail \
@ -18,6 +21,7 @@ RUN groupadd -g 5000 vmail \
&& touch /etc/default/locale \
&& apt-get update \
&& apt-get -y --no-install-recommends install \
build-essential \
apt-transport-https \
ca-certificates \
cpanminus \
@ -58,6 +62,7 @@ RUN groupadd -g 5000 vmail \
libproc-processtable-perl \
libreadonly-perl \
libregexp-common-perl \
libssl-dev \
libsys-meminfo-perl \
libterm-readkey-perl \
libtest-deep-perl \
@ -107,6 +112,8 @@ RUN groupadd -g 5000 vmail \
&& apt-get autoclean \
&& rm -rf /var/lib/apt/lists/* \
&& rm -rf /tmp/* /var/tmp/* /root/.cache/
# imapsync dependencies
RUN cpan Crypt::OpenSSL::PKCS12
COPY trim_logs.sh /usr/local/bin/trim_logs.sh
COPY clean_q_aged.sh /usr/local/bin/clean_q_aged.sh

View File

@ -159,7 +159,7 @@ function auth_password_verify(req, pass)
VALUES ("%s", 0, "%s", "%s")]], con:escape(req.service), con:escape(req.user), con:escape(req.real_rip)))
cur:close()
con:close()
return dovecot.auth.PASSDB_RESULT_OK, "password=" .. pass
return dovecot.auth.PASSDB_RESULT_OK, ""
end
row = cur:fetch (row, "a")
end
@ -180,13 +180,13 @@ function auth_password_verify(req, pass)
if tostring(req.real_rip) == "__IPV4_SOGO__" then
cur:close()
con:close()
return dovecot.auth.PASSDB_RESULT_OK, "password=" .. pass
return dovecot.auth.PASSDB_RESULT_OK, ""
elseif row.has_prot_access == "1" then
con:execute(string.format([[REPLACE INTO sasl_log (service, app_password, username, real_rip)
VALUES ("%s", %d, "%s", "%s")]], con:escape(req.service), row.id, con:escape(req.user), con:escape(req.real_rip)))
cur:close()
con:close()
return dovecot.auth.PASSDB_RESULT_OK, "password=" .. pass
return dovecot.auth.PASSDB_RESULT_OK, ""
end
end
row = cur:fetch (row, "a")

View File

@ -8492,6 +8492,7 @@ sub xoauth2
require HTML::Entities ;
require JSON ;
require JSON::WebToken::Crypt::RSA ;
require Crypt::OpenSSL::PKCS12;
require Crypt::OpenSSL::RSA ;
require Encode::Byte ;
require IO::Socket::SSL ;
@ -8532,8 +8533,9 @@ sub xoauth2
$sync->{ debug } and myprint( "Service account: $iss\nKey file: $keyfile\nKey password: $keypass\n");
# Get private key from p12 file (would be better in perl...)
$key = `openssl pkcs12 -in "$keyfile" -nodes -nocerts -passin pass:$keypass -nomacver`;
# Get private key from p12 file
my $pkcs12 = Crypt::OpenSSL::PKCS12->new_from_file($keyfile);
$key = $pkcs12->private_key($keypass);
$sync->{ debug } and myprint( "Private key:\n$key\n");
}

View File

@ -75,7 +75,8 @@ my $sth = $dbh->prepare("SELECT id,
custom_params,
subscribeall,
timeout1,
timeout2
timeout2,
dry
FROM imapsync
WHERE active = 1
AND is_running = 0
@ -111,6 +112,7 @@ while ($row = $sth->fetchrow_arrayref()) {
$subscribeall = @$row[18];
$timeout1 = @$row[19];
$timeout2 = @$row[20];
$dry = @$row[21];
if ($enc1 eq "TLS") { $enc1 = "--tls1"; } elsif ($enc1 eq "SSL") { $enc1 = "--ssl1"; } else { undef $enc1; }
@ -118,6 +120,8 @@ while ($row = $sth->fetchrow_arrayref()) {
my $passfile1 = File::Temp->new(TEMPLATE => $template);
my $passfile2 = File::Temp->new(TEMPLATE => $template);
binmode( $passfile1, ":utf8" );
print $passfile1 "$password1\n";
print $passfile2 trim($master_pass) . "\n";
@ -148,6 +152,7 @@ while ($row = $sth->fetchrow_arrayref()) {
"--host2", "localhost",
"--user2", $user2 . '*' . trim($master_user),
"--passfile2", $passfile2->filename,
($dry eq "1" ? ('--dry') : ()),
'--no-modulesversion',
'--noreleasecheck'];

View File

@ -1,5 +1,7 @@
FROM alpine:3.17
LABEL maintainer "Andre Peters <andre.peters@servercow.de>"
LABEL maintainer "The Infrastructure Company GmbH <info@servercow.de>"
WORKDIR /app
ENV XTABLES_LIBDIR /usr/lib/xtables
ENV PYTHON_IPTABLES_XTABLES_VERSION 12
@ -14,10 +16,13 @@ RUN apk add --virtual .build-deps \
iptables \
ip6tables \
xtables-addons \
nftables \
tzdata \
py3-pip \
py3-nftables \
musl-dev \
&& pip3 install --ignore-installed --upgrade pip \
jsonschema \
python-iptables \
redis \
ipaddress \
@ -26,5 +31,10 @@ RUN apk add --virtual .build-deps \
# && pip3 install --upgrade pip python-iptables==0.13.0 redis ipaddress dnspython \
COPY server.py /
CMD ["python3", "-u", "/server.py"]
COPY modules /app/modules
COPY main.py /app/
COPY ./docker-entrypoint.sh /app/
RUN chmod +x /app/docker-entrypoint.sh
CMD ["/bin/sh", "-c", "/app/docker-entrypoint.sh"]

View File

@ -0,0 +1,29 @@
#!/bin/sh
backend=iptables
nft list table ip filter &>/dev/null
nftables_found=$?
iptables -L -n &>/dev/null
iptables_found=$?
if [ $nftables_found -lt $iptables_found ]; then
backend=nftables
fi
if [ $nftables_found -gt $iptables_found ]; then
backend=iptables
fi
if [ $nftables_found -eq 0 ] && [ $nftables_found -eq $iptables_found ]; then
nftables_lines=$(nft list ruleset | wc -l)
iptables_lines=$(iptables-save | wc -l)
if [ $nftables_lines -gt $iptables_lines ]; then
backend=nftables
else
backend=iptables
fi
fi
exec python -u /app/main.py $backend

View File

@ -0,0 +1,469 @@
#!/usr/bin/env python3
import re
import os
import sys
import time
import atexit
import signal
import ipaddress
from collections import Counter
from random import randint
from threading import Thread
from threading import Lock
import redis
import json
import dns.resolver
import dns.exception
import uuid
from modules.Logger import Logger
from modules.IPTables import IPTables
from modules.NFTables import NFTables
# connect to redis
while True:
try:
redis_slaveof_ip = os.getenv('REDIS_SLAVEOF_IP', '')
redis_slaveof_port = os.getenv('REDIS_SLAVEOF_PORT', '')
if "".__eq__(redis_slaveof_ip):
r = redis.StrictRedis(host=os.getenv('IPV4_NETWORK', '172.22.1') + '.249', decode_responses=True, port=6379, db=0)
else:
r = redis.StrictRedis(host=redis_slaveof_ip, decode_responses=True, port=redis_slaveof_port, db=0)
r.ping()
except Exception as ex:
print('%s - trying again in 3 seconds' % (ex))
time.sleep(3)
else:
break
pubsub = r.pubsub()
# rename fail2ban to netfilter
if r.exists('F2B_LOG'):
r.rename('F2B_LOG', 'NETFILTER_LOG')
# globals
WHITELIST = []
BLACKLIST= []
bans = {}
quit_now = False
exit_code = 0
lock = Lock()
# init Logger
logger = Logger(r)
# init backend
backend = sys.argv[1]
if backend == "nftables":
logger.logInfo('Using NFTables backend')
tables = NFTables("MAILCOW", logger)
else:
logger.logInfo('Using IPTables backend')
tables = IPTables("MAILCOW", logger)
def refreshF2boptions():
global f2boptions
global quit_now
global exit_code
f2boptions = {}
if not r.get('F2B_OPTIONS'):
f2boptions['ban_time'] = r.get('F2B_BAN_TIME')
f2boptions['max_ban_time'] = r.get('F2B_MAX_BAN_TIME')
f2boptions['ban_time_increment'] = r.get('F2B_BAN_TIME_INCREMENT')
f2boptions['max_attempts'] = r.get('F2B_MAX_ATTEMPTS')
f2boptions['retry_window'] = r.get('F2B_RETRY_WINDOW')
f2boptions['netban_ipv4'] = r.get('F2B_NETBAN_IPV4')
f2boptions['netban_ipv6'] = r.get('F2B_NETBAN_IPV6')
else:
try:
f2boptions = json.loads(r.get('F2B_OPTIONS'))
except ValueError:
logger.logCrit('Error loading F2B options: F2B_OPTIONS is not json')
quit_now = True
exit_code = 2
verifyF2boptions(f2boptions)
r.set('F2B_OPTIONS', json.dumps(f2boptions, ensure_ascii=False))
def verifyF2boptions(f2boptions):
verifyF2boption(f2boptions,'ban_time', 1800)
verifyF2boption(f2boptions,'max_ban_time', 10000)
verifyF2boption(f2boptions,'ban_time_increment', True)
verifyF2boption(f2boptions,'max_attempts', 10)
verifyF2boption(f2boptions,'retry_window', 600)
verifyF2boption(f2boptions,'netban_ipv4', 32)
verifyF2boption(f2boptions,'netban_ipv6', 128)
verifyF2boption(f2boptions,'banlist_id', str(uuid.uuid4()))
verifyF2boption(f2boptions,'manage_external', 0)
def verifyF2boption(f2boptions, f2boption, f2bdefault):
f2boptions[f2boption] = f2boptions[f2boption] if f2boption in f2boptions and f2boptions[f2boption] is not None else f2bdefault
def refreshF2bregex():
global f2bregex
global quit_now
global exit_code
if not r.get('F2B_REGEX'):
f2bregex = {}
f2bregex[1] = 'mailcow UI: Invalid password for .+ by ([0-9a-f\.:]+)'
f2bregex[2] = 'Rspamd UI: Invalid password by ([0-9a-f\.:]+)'
f2bregex[3] = 'warning: .*\[([0-9a-f\.:]+)\]: SASL .+ authentication failed: (?!.*Connection lost to authentication server).+'
f2bregex[4] = 'warning: non-SMTP command from .*\[([0-9a-f\.:]+)]:.+'
f2bregex[5] = 'NOQUEUE: reject: RCPT from \[([0-9a-f\.:]+)].+Protocol error.+'
f2bregex[6] = '-login: Disconnected.+ \(auth failed, .+\): user=.*, method=.+, rip=([0-9a-f\.:]+),'
f2bregex[7] = '-login: Aborted login.+ \(auth failed .+\): user=.+, rip=([0-9a-f\.:]+), lip.+'
f2bregex[8] = '-login: Aborted login.+ \(tried to use disallowed .+\): user=.+, rip=([0-9a-f\.:]+), lip.+'
f2bregex[9] = 'SOGo.+ Login from \'([0-9a-f\.:]+)\' for user .+ might not have worked'
f2bregex[10] = '([0-9a-f\.:]+) \"GET \/SOGo\/.* HTTP.+\" 403 .+'
r.set('F2B_REGEX', json.dumps(f2bregex, ensure_ascii=False))
else:
try:
f2bregex = {}
f2bregex = json.loads(r.get('F2B_REGEX'))
except ValueError:
logger.logCrit('Error loading F2B options: F2B_REGEX is not json')
quit_now = True
exit_code = 2
def get_ip(address):
ip = ipaddress.ip_address(address)
if type(ip) is ipaddress.IPv6Address and ip.ipv4_mapped:
ip = ip.ipv4_mapped
if ip.is_private or ip.is_loopback:
return False
return ip
def ban(address):
global f2boptions
global lock
refreshF2boptions()
BAN_TIME = int(f2boptions['ban_time'])
BAN_TIME_INCREMENT = bool(f2boptions['ban_time_increment'])
MAX_ATTEMPTS = int(f2boptions['max_attempts'])
RETRY_WINDOW = int(f2boptions['retry_window'])
NETBAN_IPV4 = '/' + str(f2boptions['netban_ipv4'])
NETBAN_IPV6 = '/' + str(f2boptions['netban_ipv6'])
ip = get_ip(address)
if not ip: return
address = str(ip)
self_network = ipaddress.ip_network(address)
with lock:
temp_whitelist = set(WHITELIST)
if temp_whitelist:
for wl_key in temp_whitelist:
wl_net = ipaddress.ip_network(wl_key, False)
if wl_net.overlaps(self_network):
logger.logInfo('Address %s is whitelisted by rule %s' % (self_network, wl_net))
return
net = ipaddress.ip_network((address + (NETBAN_IPV4 if type(ip) is ipaddress.IPv4Address else NETBAN_IPV6)), strict=False)
net = str(net)
if not net in bans:
bans[net] = {'attempts': 0, 'last_attempt': 0, 'ban_counter': 0}
current_attempt = time.time()
if current_attempt - bans[net]['last_attempt'] > RETRY_WINDOW:
bans[net]['attempts'] = 0
bans[net]['attempts'] += 1
bans[net]['last_attempt'] = current_attempt
if bans[net]['attempts'] >= MAX_ATTEMPTS:
cur_time = int(round(time.time()))
NET_BAN_TIME = BAN_TIME if not BAN_TIME_INCREMENT else BAN_TIME * 2 ** bans[net]['ban_counter']
logger.logCrit('Banning %s for %d minutes' % (net, NET_BAN_TIME / 60 ))
if type(ip) is ipaddress.IPv4Address and int(f2boptions['manage_external']) != 1:
with lock:
tables.banIPv4(net)
elif int(f2boptions['manage_external']) != 1:
with lock:
tables.banIPv6(net)
r.hset('F2B_ACTIVE_BANS', '%s' % net, cur_time + NET_BAN_TIME)
else:
logger.logWarn('%d more attempts in the next %d seconds until %s is banned' % (MAX_ATTEMPTS - bans[net]['attempts'], RETRY_WINDOW, net))
def unban(net):
global lock
if not net in bans:
logger.logInfo('%s is not banned, skipping unban and deleting from queue (if any)' % net)
r.hdel('F2B_QUEUE_UNBAN', '%s' % net)
return
logger.logInfo('Unbanning %s' % net)
if type(ipaddress.ip_network(net)) is ipaddress.IPv4Network:
with lock:
tables.unbanIPv4(net)
else:
with lock:
tables.unbanIPv6(net)
r.hdel('F2B_ACTIVE_BANS', '%s' % net)
r.hdel('F2B_QUEUE_UNBAN', '%s' % net)
if net in bans:
bans[net]['attempts'] = 0
bans[net]['ban_counter'] += 1
def permBan(net, unban=False):
global f2boptions
global lock
is_unbanned = False
is_banned = False
if type(ipaddress.ip_network(net, strict=False)) is ipaddress.IPv4Network:
with lock:
if unban:
is_unbanned = tables.unbanIPv4(net)
elif int(f2boptions['manage_external']) != 1:
is_banned = tables.banIPv4(net)
else:
with lock:
if unban:
is_unbanned = tables.unbanIPv6(net)
elif int(f2boptions['manage_external']) != 1:
is_banned = tables.banIPv6(net)
if is_unbanned:
r.hdel('F2B_PERM_BANS', '%s' % net)
logger.logCrit('Removed host/network %s from blacklist' % net)
elif is_banned:
r.hset('F2B_PERM_BANS', '%s' % net, int(round(time.time())))
logger.logCrit('Added host/network %s to blacklist' % net)
def clear():
global lock
logger.logInfo('Clearing all bans')
for net in bans.copy():
unban(net)
with lock:
tables.clearIPv4Table()
tables.clearIPv6Table()
r.delete('F2B_ACTIVE_BANS')
r.delete('F2B_PERM_BANS')
pubsub.unsubscribe()
def watch():
logger.logInfo('Watching Redis channel F2B_CHANNEL')
pubsub.subscribe('F2B_CHANNEL')
global quit_now
global exit_code
while not quit_now:
try:
for item in pubsub.listen():
refreshF2bregex()
for rule_id, rule_regex in f2bregex.items():
if item['data'] and item['type'] == 'message':
try:
result = re.search(rule_regex, item['data'])
except re.error:
result = False
if result:
addr = result.group(1)
ip = ipaddress.ip_address(addr)
if ip.is_private or ip.is_loopback:
continue
logger.logWarn('%s matched rule id %s (%s)' % (addr, rule_id, item['data']))
ban(addr)
except Exception as ex:
logger.logWarn('Error reading log line from pubsub: %s' % ex)
quit_now = True
exit_code = 2
def snat4(snat_target):
global lock
global quit_now
while not quit_now:
time.sleep(10)
with lock:
tables.snat4(snat_target, os.getenv('IPV4_NETWORK', '172.22.1') + '.0/24')
def snat6(snat_target):
global lock
global quit_now
while not quit_now:
time.sleep(10)
with lock:
tables.snat6(snat_target, os.getenv('IPV6_NETWORK', 'fd4d:6169:6c63:6f77::/64'))
def autopurge():
while not quit_now:
time.sleep(10)
refreshF2boptions()
BAN_TIME = int(f2boptions['ban_time'])
MAX_BAN_TIME = int(f2boptions['max_ban_time'])
BAN_TIME_INCREMENT = bool(f2boptions['ban_time_increment'])
MAX_ATTEMPTS = int(f2boptions['max_attempts'])
QUEUE_UNBAN = r.hgetall('F2B_QUEUE_UNBAN')
if QUEUE_UNBAN:
for net in QUEUE_UNBAN:
unban(str(net))
for net in bans.copy():
if bans[net]['attempts'] >= MAX_ATTEMPTS:
NET_BAN_TIME = BAN_TIME if not BAN_TIME_INCREMENT else BAN_TIME * 2 ** bans[net]['ban_counter']
TIME_SINCE_LAST_ATTEMPT = time.time() - bans[net]['last_attempt']
if TIME_SINCE_LAST_ATTEMPT > NET_BAN_TIME or TIME_SINCE_LAST_ATTEMPT > MAX_BAN_TIME:
unban(net)
def mailcowChainOrder():
global lock
global quit_now
global exit_code
while not quit_now:
time.sleep(10)
with lock:
quit_now, exit_code = tables.checkIPv4ChainOrder()
if quit_now: return
quit_now, exit_code = tables.checkIPv6ChainOrder()
def isIpNetwork(address):
try:
ipaddress.ip_network(address, False)
except ValueError:
return False
return True
def genNetworkList(list):
resolver = dns.resolver.Resolver()
hostnames = []
networks = []
for key in list:
if isIpNetwork(key):
networks.append(key)
else:
hostnames.append(key)
for hostname in hostnames:
hostname_ips = []
for rdtype in ['A', 'AAAA']:
try:
answer = resolver.resolve(qname=hostname, rdtype=rdtype, lifetime=3)
except dns.exception.Timeout:
logger.logInfo('Hostname %s timedout on resolve' % hostname)
break
except (dns.resolver.NXDOMAIN, dns.resolver.NoAnswer):
continue
except dns.exception.DNSException as dnsexception:
logger.logInfo('%s' % dnsexception)
continue
for rdata in answer:
hostname_ips.append(rdata.to_text())
networks.extend(hostname_ips)
return set(networks)
def whitelistUpdate():
global lock
global quit_now
global WHITELIST
while not quit_now:
start_time = time.time()
list = r.hgetall('F2B_WHITELIST')
new_whitelist = []
if list:
new_whitelist = genNetworkList(list)
with lock:
if Counter(new_whitelist) != Counter(WHITELIST):
WHITELIST = new_whitelist
logger.logInfo('Whitelist was changed, it has %s entries' % len(WHITELIST))
time.sleep(60.0 - ((time.time() - start_time) % 60.0))
def blacklistUpdate():
global quit_now
global BLACKLIST
while not quit_now:
start_time = time.time()
list = r.hgetall('F2B_BLACKLIST')
new_blacklist = []
if list:
new_blacklist = genNetworkList(list)
if Counter(new_blacklist) != Counter(BLACKLIST):
addban = set(new_blacklist).difference(BLACKLIST)
delban = set(BLACKLIST).difference(new_blacklist)
BLACKLIST = new_blacklist
logger.logInfo('Blacklist was changed, it has %s entries' % len(BLACKLIST))
if addban:
for net in addban:
permBan(net=net)
if delban:
for net in delban:
permBan(net=net, unban=True)
time.sleep(60.0 - ((time.time() - start_time) % 60.0))
def quit(signum, frame):
global quit_now
quit_now = True
if __name__ == '__main__':
refreshF2boptions()
# In case a previous session was killed without cleanup
clear()
# Reinit MAILCOW chain
# Is called before threads start, no locking
logger.logInfo("Initializing mailcow netfilter chain")
tables.initChainIPv4()
tables.initChainIPv6()
watch_thread = Thread(target=watch)
watch_thread.daemon = True
watch_thread.start()
if os.getenv('SNAT_TO_SOURCE') and os.getenv('SNAT_TO_SOURCE') != 'n':
try:
snat_ip = os.getenv('SNAT_TO_SOURCE')
snat_ipo = ipaddress.ip_address(snat_ip)
if type(snat_ipo) is ipaddress.IPv4Address:
snat4_thread = Thread(target=snat4,args=(snat_ip,))
snat4_thread.daemon = True
snat4_thread.start()
except ValueError:
print(os.getenv('SNAT_TO_SOURCE') + ' is not a valid IPv4 address')
if os.getenv('SNAT6_TO_SOURCE') and os.getenv('SNAT6_TO_SOURCE') != 'n':
try:
snat_ip = os.getenv('SNAT6_TO_SOURCE')
snat_ipo = ipaddress.ip_address(snat_ip)
if type(snat_ipo) is ipaddress.IPv6Address:
snat6_thread = Thread(target=snat6,args=(snat_ip,))
snat6_thread.daemon = True
snat6_thread.start()
except ValueError:
print(os.getenv('SNAT6_TO_SOURCE') + ' is not a valid IPv6 address')
autopurge_thread = Thread(target=autopurge)
autopurge_thread.daemon = True
autopurge_thread.start()
mailcowchainwatch_thread = Thread(target=mailcowChainOrder)
mailcowchainwatch_thread.daemon = True
mailcowchainwatch_thread.start()
blacklistupdate_thread = Thread(target=blacklistUpdate)
blacklistupdate_thread.daemon = True
blacklistupdate_thread.start()
whitelistupdate_thread = Thread(target=whitelistUpdate)
whitelistupdate_thread.daemon = True
whitelistupdate_thread.start()
signal.signal(signal.SIGTERM, quit)
atexit.register(clear)
while not quit_now:
time.sleep(0.5)
sys.exit(exit_code)

View File

@ -0,0 +1,213 @@
import iptc
import time
class IPTables:
def __init__(self, chain_name, logger):
self.chain_name = chain_name
self.logger = logger
def initChainIPv4(self):
if not iptc.Chain(iptc.Table(iptc.Table.FILTER), self.chain_name) in iptc.Table(iptc.Table.FILTER).chains:
iptc.Table(iptc.Table.FILTER).create_chain(self.chain_name)
for c in ['FORWARD', 'INPUT']:
chain = iptc.Chain(iptc.Table(iptc.Table.FILTER), c)
rule = iptc.Rule()
rule.src = '0.0.0.0/0'
rule.dst = '0.0.0.0/0'
target = iptc.Target(rule, self.chain_name)
rule.target = target
if rule not in chain.rules:
chain.insert_rule(rule)
def initChainIPv6(self):
if not iptc.Chain(iptc.Table6(iptc.Table6.FILTER), self.chain_name) in iptc.Table6(iptc.Table6.FILTER).chains:
iptc.Table6(iptc.Table6.FILTER).create_chain(self.chain_name)
for c in ['FORWARD', 'INPUT']:
chain = iptc.Chain(iptc.Table6(iptc.Table6.FILTER), c)
rule = iptc.Rule6()
rule.src = '::/0'
rule.dst = '::/0'
target = iptc.Target(rule, self.chain_name)
rule.target = target
if rule not in chain.rules:
chain.insert_rule(rule)
def checkIPv4ChainOrder(self):
filter_table = iptc.Table(iptc.Table.FILTER)
filter_table.refresh()
return self.checkChainOrder(filter_table)
def checkIPv6ChainOrder(self):
filter_table = iptc.Table6(iptc.Table6.FILTER)
filter_table.refresh()
return self.checkChainOrder(filter_table)
def checkChainOrder(self, filter_table):
err = False
exit_code = None
forward_chain = iptc.Chain(filter_table, 'FORWARD')
input_chain = iptc.Chain(filter_table, 'INPUT')
for chain in [forward_chain, input_chain]:
target_found = False
for position, item in enumerate(chain.rules):
if item.target.name == self.chain_name:
target_found = True
if position > 2:
self.logger.logCrit('Error in %s chain: %s target not found, restarting container' % (chain.name, self.chain_name))
err = True
exit_code = 2
if not target_found:
self.logger.logCrit('Error in %s chain: %s target not found, restarting container' % (chain.name, self.chain_name))
err = True
exit_code = 2
return err, exit_code
def clearIPv4Table(self):
self.clearTable(iptc.Table(iptc.Table.FILTER))
def clearIPv6Table(self):
self.clearTable(iptc.Table6(iptc.Table6.FILTER))
def clearTable(self, filter_table):
filter_table.autocommit = False
forward_chain = iptc.Chain(filter_table, "FORWARD")
input_chain = iptc.Chain(filter_table, "INPUT")
mailcow_chain = iptc.Chain(filter_table, self.chain_name)
if mailcow_chain in filter_table.chains:
for rule in mailcow_chain.rules:
mailcow_chain.delete_rule(rule)
for rule in forward_chain.rules:
if rule.target.name == self.chain_name:
forward_chain.delete_rule(rule)
for rule in input_chain.rules:
if rule.target.name == self.chain_name:
input_chain.delete_rule(rule)
filter_table.delete_chain(self.chain_name)
filter_table.commit()
filter_table.refresh()
filter_table.autocommit = True
def banIPv4(self, source):
chain = iptc.Chain(iptc.Table(iptc.Table.FILTER), self.chain_name)
rule = iptc.Rule()
rule.src = source
target = iptc.Target(rule, "REJECT")
rule.target = target
if rule in chain.rules:
return False
chain.insert_rule(rule)
return True
def banIPv6(self, source):
chain = iptc.Chain(iptc.Table6(iptc.Table6.FILTER), self.chain_name)
rule = iptc.Rule6()
rule.src = source
target = iptc.Target(rule, "REJECT")
rule.target = target
if rule in chain.rules:
return False
chain.insert_rule(rule)
return True
def unbanIPv4(self, source):
chain = iptc.Chain(iptc.Table(iptc.Table.FILTER), self.chain_name)
rule = iptc.Rule()
rule.src = source
target = iptc.Target(rule, "REJECT")
rule.target = target
if rule not in chain.rules:
return False
chain.delete_rule(rule)
return True
def unbanIPv6(self, source):
chain = iptc.Chain(iptc.Table6(iptc.Table6.FILTER), self.chain_name)
rule = iptc.Rule6()
rule.src = source
target = iptc.Target(rule, "REJECT")
rule.target = target
if rule not in chain.rules:
return False
chain.delete_rule(rule)
return True
def snat4(self, snat_target, source):
try:
table = iptc.Table('nat')
table.refresh()
chain = iptc.Chain(table, 'POSTROUTING')
table.autocommit = False
new_rule = self.getSnat4Rule(snat_target, source)
if not chain.rules:
# if there are no rules in the chain, insert the new rule directly
self.logger.logInfo(f'Added POSTROUTING rule for source network {new_rule.src} to SNAT target {snat_target}')
chain.insert_rule(new_rule)
else:
for position, rule in enumerate(chain.rules):
if not hasattr(rule.target, 'parameter'):
continue
match = all((
new_rule.get_src() == rule.get_src(),
new_rule.get_dst() == rule.get_dst(),
new_rule.target.parameters == rule.target.parameters,
new_rule.target.name == rule.target.name
))
if position == 0:
if not match:
self.logger.logInfo(f'Added POSTROUTING rule for source network {new_rule.src} to SNAT target {snat_target}')
chain.insert_rule(new_rule)
else:
if match:
self.logger.logInfo(f'Remove rule for source network {new_rule.src} to SNAT target {snat_target} from POSTROUTING chain at position {position}')
chain.delete_rule(rule)
table.commit()
table.autocommit = True
return True
except:
self.logger.logCrit('Error running SNAT4, retrying...')
return False
def snat6(self, snat_target, source):
try:
table = iptc.Table6('nat')
table.refresh()
chain = iptc.Chain(table, 'POSTROUTING')
table.autocommit = False
new_rule = self.getSnat6Rule(snat_target, source)
if new_rule not in chain.rules:
self.logger.logInfo('Added POSTROUTING rule for source network %s to SNAT target %s' % (new_rule.src, snat_target))
chain.insert_rule(new_rule)
else:
for position, item in enumerate(chain.rules):
if item == new_rule:
if position != 0:
chain.delete_rule(new_rule)
table.commit()
table.autocommit = True
except:
self.logger.logCrit('Error running SNAT6, retrying...')
def getSnat4Rule(self, snat_target, source):
rule = iptc.Rule()
rule.src = source
rule.dst = '!' + rule.src
target = rule.create_target("SNAT")
target.to_source = snat_target
match = rule.create_match("comment")
match.comment = f'{int(round(time.time()))}'
return rule
def getSnat6Rule(self, snat_target, source):
rule = iptc.Rule6()
rule.src = source
rule.dst = '!' + rule.src
target = rule.create_target("SNAT")
target.to_source = snat_target
return rule

View File

@ -0,0 +1,23 @@
import time
import json
class Logger:
def __init__(self, redis):
self.r = redis
def log(self, priority, message):
tolog = {}
tolog['time'] = int(round(time.time()))
tolog['priority'] = priority
tolog['message'] = message
self.r.lpush('NETFILTER_LOG', json.dumps(tolog, ensure_ascii=False))
print(message)
def logWarn(self, message):
self.log('warn', message)
def logCrit(self, message):
self.log('crit', message)
def logInfo(self, message):
self.log('info', message)

View File

@ -0,0 +1,495 @@
import nftables
import ipaddress
class NFTables:
def __init__(self, chain_name, logger):
self.chain_name = chain_name
self.logger = logger
self.nft = nftables.Nftables()
self.nft.set_json_output(True)
self.nft.set_handle_output(True)
self.nft_chain_names = {'ip': {'filter': {'input': '', 'forward': ''}, 'nat': {'postrouting': ''} },
'ip6': {'filter': {'input': '', 'forward': ''}, 'nat': {'postrouting': ''} } }
self.search_current_chains()
def initChainIPv4(self):
self.insert_mailcow_chains("ip")
def initChainIPv6(self):
self.insert_mailcow_chains("ip6")
def checkIPv4ChainOrder(self):
return self.checkChainOrder("ip")
def checkIPv6ChainOrder(self):
return self.checkChainOrder("ip6")
def checkChainOrder(self, filter_table):
err = False
exit_code = None
for chain in ['input', 'forward']:
chain_position = self.check_mailcow_chains(filter_table, chain)
if chain_position is None: continue
if chain_position is False:
self.logger.logCrit(f'MAILCOW target not found in {filter_table} {chain} table, restarting container to fix it...')
err = True
exit_code = 2
if chain_position > 0:
self.logger.logCrit(f'MAILCOW target is in position {chain_position} in the {filter_table} {chain} table, restarting container to fix it...')
err = True
exit_code = 2
return err, exit_code
def clearIPv4Table(self):
self.clearTable("ip")
def clearIPv6Table(self):
self.clearTable("ip6")
def clearTable(self, _family):
is_empty_dict = True
json_command = self.get_base_dict()
chain_handle = self.get_chain_handle(_family, "filter", self.chain_name)
# if no handle, the chain doesn't exists
if chain_handle is not None:
is_empty_dict = False
# flush chain
mailcow_chain = {'family': _family, 'table': 'filter', 'name': self.chain_name}
flush_chain = {'flush': {'chain': mailcow_chain}}
json_command["nftables"].append(flush_chain)
# remove rule in forward chain
# remove rule in input chain
chains_family = [self.nft_chain_names[_family]['filter']['input'],
self.nft_chain_names[_family]['filter']['forward'] ]
for chain_base in chains_family:
if not chain_base: continue
rules_handle = self.get_rules_handle(_family, "filter", chain_base)
if rules_handle is not None:
for r_handle in rules_handle:
is_empty_dict = False
mailcow_rule = {'family':_family,
'table': 'filter',
'chain': chain_base,
'handle': r_handle }
delete_rules = {'delete': {'rule': mailcow_rule} }
json_command["nftables"].append(delete_rules)
# remove chain
# after delete all rules referencing this chain
if chain_handle is not None:
mc_chain_handle = {'family':_family,
'table': 'filter',
'name': self.chain_name,
'handle': chain_handle }
delete_chain = {'delete': {'chain': mc_chain_handle} }
json_command["nftables"].append(delete_chain)
if is_empty_dict == False:
if self.nft_exec_dict(json_command):
self.logger.logInfo(f"Clear completed: {_family}")
def banIPv4(self, source):
ban_dict = self.get_ban_ip_dict(source, "ip")
return self.nft_exec_dict(ban_dict)
def banIPv6(self, source):
ban_dict = self.get_ban_ip_dict(source, "ip6")
return self.nft_exec_dict(ban_dict)
def unbanIPv4(self, source):
unban_dict = self.get_unban_ip_dict(source, "ip")
if not unban_dict:
return False
return self.nft_exec_dict(unban_dict)
def unbanIPv6(self, source):
unban_dict = self.get_unban_ip_dict(source, "ip6")
if not unban_dict:
return False
return self.nft_exec_dict(unban_dict)
def snat4(self, snat_target, source):
self.snat_rule("ip", snat_target, source)
def snat6(self, snat_target, source):
self.snat_rule("ip6", snat_target, source)
def nft_exec_dict(self, query: dict):
if not query: return False
rc, output, error = self.nft.json_cmd(query)
if rc != 0:
#self.logger.logCrit(f"Nftables Error: {error}")
return False
# Prevent returning False or empty string on commands that do not produce output
if rc == 0 and len(output) == 0:
return True
return output
def get_base_dict(self):
return {'nftables': [{ 'metainfo': { 'json_schema_version': 1} } ] }
def search_current_chains(self):
nft_chain_priority = {'ip': {'filter': {'input': None, 'forward': None}, 'nat': {'postrouting': None} },
'ip6': {'filter': {'input': None, 'forward': None}, 'nat': {'postrouting': None} } }
# Command: 'nft list chains'
_list = {'list' : {'chains': 'null'} }
command = self.get_base_dict()
command['nftables'].append(_list)
kernel_ruleset = self.nft_exec_dict(command)
if kernel_ruleset:
for _object in kernel_ruleset['nftables']:
chain = _object.get("chain")
if not chain: continue
_family = chain['family']
_table = chain['table']
_hook = chain.get("hook")
_priority = chain.get("prio")
_name = chain['name']
if _family not in self.nft_chain_names: continue
if _table not in self.nft_chain_names[_family]: continue
if _hook not in self.nft_chain_names[_family][_table]: continue
if _priority is None: continue
_saved_priority = nft_chain_priority[_family][_table][_hook]
if _saved_priority is None or _priority < _saved_priority:
# at this point, we know the chain has:
# hook and priority set
# and it has the lowest priority
nft_chain_priority[_family][_table][_hook] = _priority
self.nft_chain_names[_family][_table][_hook] = _name
def search_for_chain(self, kernel_ruleset: dict, chain_name: str):
found = False
for _object in kernel_ruleset["nftables"]:
chain = _object.get("chain")
if not chain:
continue
ch_name = chain.get("name")
if ch_name == chain_name:
found = True
break
return found
def get_chain_dict(self, _family: str, _name: str):
# nft (add | create) chain [<family>] <table> <name>
_chain_opts = {'family': _family, 'table': 'filter', 'name': _name }
_add = {'add': {'chain': _chain_opts} }
final_chain = self.get_base_dict()
final_chain["nftables"].append(_add)
return final_chain
def get_mailcow_jump_rule_dict(self, _family: str, _chain: str):
_jump_rule = self.get_base_dict()
_expr_opt=[]
_expr_counter = {'family': _family, 'table': 'filter', 'packets': 0, 'bytes': 0}
_counter_dict = {'counter': _expr_counter}
_expr_opt.append(_counter_dict)
_jump_opts = {'jump': {'target': self.chain_name} }
_expr_opt.append(_jump_opts)
_rule_params = {'family': _family,
'table': 'filter',
'chain': _chain,
'expr': _expr_opt,
'comment': "mailcow" }
_add_rule = {'insert': {'rule': _rule_params} }
_jump_rule["nftables"].append(_add_rule)
return _jump_rule
def insert_mailcow_chains(self, _family: str):
nft_input_chain = self.nft_chain_names[_family]['filter']['input']
nft_forward_chain = self.nft_chain_names[_family]['filter']['forward']
# Command: 'nft list table <family> filter'
_table_opts = {'family': _family, 'name': 'filter'}
_list = {'list': {'table': _table_opts} }
command = self.get_base_dict()
command['nftables'].append(_list)
kernel_ruleset = self.nft_exec_dict(command)
if kernel_ruleset:
# chain
if not self.search_for_chain(kernel_ruleset, self.chain_name):
cadena = self.get_chain_dict(_family, self.chain_name)
if self.nft_exec_dict(cadena):
self.logger.logInfo(f"MAILCOW {_family} chain created successfully.")
input_jump_found, forward_jump_found = False, False
for _object in kernel_ruleset["nftables"]:
if not _object.get("rule"):
continue
rule = _object["rule"]
if nft_input_chain and rule["chain"] == nft_input_chain:
if rule.get("comment") and rule["comment"] == "mailcow":
input_jump_found = True
if nft_forward_chain and rule["chain"] == nft_forward_chain:
if rule.get("comment") and rule["comment"] == "mailcow":
forward_jump_found = True
if not input_jump_found:
command = self.get_mailcow_jump_rule_dict(_family, nft_input_chain)
self.nft_exec_dict(command)
if not forward_jump_found:
command = self.get_mailcow_jump_rule_dict(_family, nft_forward_chain)
self.nft_exec_dict(command)
def delete_nat_rule(self, _family:str, _chain: str, _handle:str):
delete_command = self.get_base_dict()
_rule_opts = {'family': _family,
'table': 'nat',
'chain': _chain,
'handle': _handle }
_delete = {'delete': {'rule': _rule_opts} }
delete_command["nftables"].append(_delete)
return self.nft_exec_dict(delete_command)
def snat_rule(self, _family: str, snat_target: str, source_address: str):
chain_name = self.nft_chain_names[_family]['nat']['postrouting']
# no postrouting chain, may occur if docker has ipv6 disabled.
if not chain_name: return
# Command: nft list chain <family> nat <chain_name>
_chain_opts = {'family': _family, 'table': 'nat', 'name': chain_name}
_list = {'list':{'chain': _chain_opts} }
command = self.get_base_dict()
command['nftables'].append(_list)
kernel_ruleset = self.nft_exec_dict(command)
if not kernel_ruleset:
return
rule_position = 0
rule_handle = None
rule_found = False
for _object in kernel_ruleset["nftables"]:
if not _object.get("rule"):
continue
rule = _object["rule"]
if not rule.get("comment") or not rule["comment"] == "mailcow":
rule_position +=1
continue
rule_found = True
rule_handle = rule["handle"]
break
dest_net = ipaddress.ip_network(source_address)
target_net = ipaddress.ip_network(snat_target)
if rule_found:
saddr_ip = rule["expr"][0]["match"]["right"]["prefix"]["addr"]
saddr_len = int(rule["expr"][0]["match"]["right"]["prefix"]["len"])
daddr_ip = rule["expr"][1]["match"]["right"]["prefix"]["addr"]
daddr_len = int(rule["expr"][1]["match"]["right"]["prefix"]["len"])
target_ip = rule["expr"][3]["snat"]["addr"]
saddr_net = ipaddress.ip_network(saddr_ip + '/' + str(saddr_len))
daddr_net = ipaddress.ip_network(daddr_ip + '/' + str(daddr_len))
current_target_net = ipaddress.ip_network(target_ip)
match = all((
dest_net == saddr_net,
dest_net == daddr_net,
target_net == current_target_net
))
try:
if rule_position == 0:
if not match:
# Position 0 , it is a mailcow rule , but it does not have the same parameters
if self.delete_nat_rule(_family, chain_name, rule_handle):
self.logger.logInfo(f'Remove rule for source network {saddr_net} to SNAT target {target_net} from {_family} nat {chain_name} chain, rule does not match configured parameters')
else:
# Position > 0 and is mailcow rule
if self.delete_nat_rule(_family, chain_name, rule_handle):
self.logger.logInfo(f'Remove rule for source network {saddr_net} to SNAT target {target_net} from {_family} nat {chain_name} chain, rule is at position {rule_position}')
except:
self.logger.logCrit(f"Error running SNAT on {_family}, retrying..." )
else:
# rule not found
json_command = self.get_base_dict()
try:
snat_dict = {'snat': {'addr': str(target_net.network_address)} }
expr_counter = {'family': _family, 'table': 'nat', 'packets': 0, 'bytes': 0}
counter_dict = {'counter': expr_counter}
prefix_dict = {'prefix': {'addr': str(dest_net.network_address), 'len': int(dest_net.prefixlen)} }
payload_dict = {'payload': {'protocol': _family, 'field': "saddr"} }
match_dict1 = {'match': {'op': '==', 'left': payload_dict, 'right': prefix_dict} }
payload_dict2 = {'payload': {'protocol': _family, 'field': "daddr"} }
match_dict2 = {'match': {'op': '!=', 'left': payload_dict2, 'right': prefix_dict } }
expr_list = [
match_dict1,
match_dict2,
counter_dict,
snat_dict
]
rule_fields = {'family': _family,
'table': 'nat',
'chain': chain_name,
'comment': "mailcow",
'expr': expr_list }
insert_dict = {'insert': {'rule': rule_fields} }
json_command["nftables"].append(insert_dict)
if self.nft_exec_dict(json_command):
self.logger.logInfo(f'Added {_family} nat {chain_name} rule for source network {dest_net} to {target_net}')
except:
self.logger.logCrit(f"Error running SNAT on {_family}, retrying...")
def get_chain_handle(self, _family: str, _table: str, chain_name: str):
chain_handle = None
# Command: 'nft list chains {family}'
_list = {'list': {'chains': {'family': _family} } }
command = self.get_base_dict()
command['nftables'].append(_list)
kernel_ruleset = self.nft_exec_dict(command)
if kernel_ruleset:
for _object in kernel_ruleset["nftables"]:
if not _object.get("chain"):
continue
chain = _object["chain"]
if chain["family"] == _family and chain["table"] == _table and chain["name"] == chain_name:
chain_handle = chain["handle"]
break
return chain_handle
def get_rules_handle(self, _family: str, _table: str, chain_name: str):
rule_handle = []
# Command: 'nft list chain {family} {table} {chain_name}'
_chain_opts = {'family': _family, 'table': _table, 'name': chain_name}
_list = {'list': {'chain': _chain_opts} }
command = self.get_base_dict()
command['nftables'].append(_list)
kernel_ruleset = self.nft_exec_dict(command)
if kernel_ruleset:
for _object in kernel_ruleset["nftables"]:
if not _object.get("rule"):
continue
rule = _object["rule"]
if rule["family"] == _family and rule["table"] == _table and rule["chain"] == chain_name:
if rule.get("comment") and rule["comment"] == "mailcow":
rule_handle.append(rule["handle"])
return rule_handle
def get_ban_ip_dict(self, ipaddr: str, _family: str):
json_command = self.get_base_dict()
expr_opt = []
ipaddr_net = ipaddress.ip_network(ipaddr)
right_dict = {'prefix': {'addr': str(ipaddr_net.network_address), 'len': int(ipaddr_net.prefixlen) } }
left_dict = {'payload': {'protocol': _family, 'field': 'saddr'} }
match_dict = {'op': '==', 'left': left_dict, 'right': right_dict }
expr_opt.append({'match': match_dict})
counter_dict = {'counter': {'family': _family, 'table': "filter", 'packets': 0, 'bytes': 0} }
expr_opt.append(counter_dict)
expr_opt.append({'drop': "null"})
rule_dict = {'family': _family, 'table': "filter", 'chain': self.chain_name, 'expr': expr_opt}
base_dict = {'insert': {'rule': rule_dict} }
json_command["nftables"].append(base_dict)
return json_command
def get_unban_ip_dict(self, ipaddr:str, _family: str):
json_command = self.get_base_dict()
# Command: 'nft list chain {s_family} filter MAILCOW'
_chain_opts = {'family': _family, 'table': 'filter', 'name': self.chain_name}
_list = {'list': {'chain': _chain_opts} }
command = self.get_base_dict()
command['nftables'].append(_list)
kernel_ruleset = self.nft_exec_dict(command)
rule_handle = None
if kernel_ruleset:
for _object in kernel_ruleset["nftables"]:
if not _object.get("rule"):
continue
rule = _object["rule"]["expr"][0]["match"]
left_opt = rule["left"]["payload"]
if not left_opt["protocol"] == _family:
continue
if not left_opt["field"] =="saddr":
continue
# ip currently banned
rule_right = rule["right"]
if isinstance(rule_right, dict):
current_rule_ip = rule_right["prefix"]["addr"] + '/' + str(rule_right["prefix"]["len"])
else:
current_rule_ip = rule_right
current_rule_net = ipaddress.ip_network(current_rule_ip)
# ip to ban
candidate_net = ipaddress.ip_network(ipaddr)
if current_rule_net == candidate_net:
rule_handle = _object["rule"]["handle"]
break
if rule_handle is not None:
mailcow_rule = {'family': _family, 'table': 'filter', 'chain': self.chain_name, 'handle': rule_handle}
delete_rule = {'delete': {'rule': mailcow_rule} }
json_command["nftables"].append(delete_rule)
else:
return False
return json_command
def check_mailcow_chains(self, family: str, chain: str):
position = 0
rule_found = False
chain_name = self.nft_chain_names[family]['filter'][chain]
if not chain_name: return None
_chain_opts = {'family': family, 'table': 'filter', 'name': chain_name}
_list = {'list': {'chain': _chain_opts}}
command = self.get_base_dict()
command['nftables'].append(_list)
kernel_ruleset = self.nft_exec_dict(command)
if kernel_ruleset:
for _object in kernel_ruleset["nftables"]:
if not _object.get("rule"):
continue
rule = _object["rule"]
if rule.get("comment") and rule["comment"] == "mailcow":
rule_found = True
break
position+=1
return position if rule_found else False

View File

@ -1,587 +0,0 @@
#!/usr/bin/env python3
import re
import os
import sys
import time
import atexit
import signal
import ipaddress
from collections import Counter
from random import randint
from threading import Thread
from threading import Lock
import redis
import json
import iptc
import dns.resolver
import dns.exception
while True:
try:
redis_slaveof_ip = os.getenv('REDIS_SLAVEOF_IP', '')
redis_slaveof_port = os.getenv('REDIS_SLAVEOF_PORT', '')
if "".__eq__(redis_slaveof_ip):
r = redis.StrictRedis(host=os.getenv('IPV4_NETWORK', '172.22.1') + '.249', decode_responses=True, port=6379, db=0)
else:
r = redis.StrictRedis(host=redis_slaveof_ip, decode_responses=True, port=redis_slaveof_port, db=0)
r.ping()
except Exception as ex:
print('%s - trying again in 3 seconds' % (ex))
time.sleep(3)
else:
break
pubsub = r.pubsub()
WHITELIST = []
BLACKLIST= []
bans = {}
quit_now = False
exit_code = 0
lock = Lock()
def log(priority, message):
tolog = {}
tolog['time'] = int(round(time.time()))
tolog['priority'] = priority
tolog['message'] = message
r.lpush('NETFILTER_LOG', json.dumps(tolog, ensure_ascii=False))
print(message)
def logWarn(message):
log('warn', message)
def logCrit(message):
log('crit', message)
def logInfo(message):
log('info', message)
def refreshF2boptions():
global f2boptions
global quit_now
global exit_code
if not r.get('F2B_OPTIONS'):
f2boptions = {}
f2boptions['ban_time'] = int
f2boptions['max_attempts'] = int
f2boptions['retry_window'] = int
f2boptions['netban_ipv4'] = int
f2boptions['netban_ipv6'] = int
f2boptions['ban_time'] = r.get('F2B_BAN_TIME') or 1800
f2boptions['max_attempts'] = r.get('F2B_MAX_ATTEMPTS') or 10
f2boptions['retry_window'] = r.get('F2B_RETRY_WINDOW') or 600
f2boptions['netban_ipv4'] = r.get('F2B_NETBAN_IPV4') or 32
f2boptions['netban_ipv6'] = r.get('F2B_NETBAN_IPV6') or 128
r.set('F2B_OPTIONS', json.dumps(f2boptions, ensure_ascii=False))
else:
try:
f2boptions = {}
f2boptions = json.loads(r.get('F2B_OPTIONS'))
except ValueError:
print('Error loading F2B options: F2B_OPTIONS is not json')
quit_now = True
exit_code = 2
def refreshF2bregex():
global f2bregex
global quit_now
global exit_code
if not r.get('F2B_REGEX'):
f2bregex = {}
f2bregex[1] = 'mailcow UI: Invalid password for .+ by ([0-9a-f\.:]+)'
f2bregex[2] = 'Rspamd UI: Invalid password by ([0-9a-f\.:]+)'
f2bregex[3] = 'warning: .*\[([0-9a-f\.:]+)\]: SASL .+ authentication failed: (?!.*Connection lost to authentication server).+'
f2bregex[4] = 'warning: non-SMTP command from .*\[([0-9a-f\.:]+)]:.+'
f2bregex[5] = 'NOQUEUE: reject: RCPT from \[([0-9a-f\.:]+)].+Protocol error.+'
f2bregex[6] = '-login: Disconnected.+ \(auth failed, .+\): user=.*, method=.+, rip=([0-9a-f\.:]+),'
f2bregex[7] = '-login: Aborted login.+ \(auth failed .+\): user=.+, rip=([0-9a-f\.:]+), lip.+'
f2bregex[8] = '-login: Aborted login.+ \(tried to use disallowed .+\): user=.+, rip=([0-9a-f\.:]+), lip.+'
f2bregex[9] = 'SOGo.+ Login from \'([0-9a-f\.:]+)\' for user .+ might not have worked'
f2bregex[10] = '([0-9a-f\.:]+) \"GET \/SOGo\/.* HTTP.+\" 403 .+'
r.set('F2B_REGEX', json.dumps(f2bregex, ensure_ascii=False))
else:
try:
f2bregex = {}
f2bregex = json.loads(r.get('F2B_REGEX'))
except ValueError:
print('Error loading F2B options: F2B_REGEX is not json')
quit_now = True
exit_code = 2
if r.exists('F2B_LOG'):
r.rename('F2B_LOG', 'NETFILTER_LOG')
def mailcowChainOrder():
global lock
global quit_now
global exit_code
while not quit_now:
time.sleep(10)
with lock:
filter4_table = iptc.Table(iptc.Table.FILTER)
filter6_table = iptc.Table6(iptc.Table6.FILTER)
filter4_table.refresh()
filter6_table.refresh()
for f in [filter4_table, filter6_table]:
forward_chain = iptc.Chain(f, 'FORWARD')
input_chain = iptc.Chain(f, 'INPUT')
for chain in [forward_chain, input_chain]:
target_found = False
for position, item in enumerate(chain.rules):
if item.target.name == 'MAILCOW':
target_found = True
if position > 2:
logCrit('Error in %s chain order: MAILCOW on position %d, restarting container' % (chain.name, position))
quit_now = True
exit_code = 2
if not target_found:
logCrit('Error in %s chain: MAILCOW target not found, restarting container' % (chain.name))
quit_now = True
exit_code = 2
def ban(address):
global lock
refreshF2boptions()
BAN_TIME = int(f2boptions['ban_time'])
MAX_ATTEMPTS = int(f2boptions['max_attempts'])
RETRY_WINDOW = int(f2boptions['retry_window'])
NETBAN_IPV4 = '/' + str(f2boptions['netban_ipv4'])
NETBAN_IPV6 = '/' + str(f2boptions['netban_ipv6'])
ip = ipaddress.ip_address(address)
if type(ip) is ipaddress.IPv6Address and ip.ipv4_mapped:
ip = ip.ipv4_mapped
address = str(ip)
if ip.is_private or ip.is_loopback:
return
self_network = ipaddress.ip_network(address)
with lock:
temp_whitelist = set(WHITELIST)
if temp_whitelist:
for wl_key in temp_whitelist:
wl_net = ipaddress.ip_network(wl_key, False)
if wl_net.overlaps(self_network):
logInfo('Address %s is whitelisted by rule %s' % (self_network, wl_net))
return
net = ipaddress.ip_network((address + (NETBAN_IPV4 if type(ip) is ipaddress.IPv4Address else NETBAN_IPV6)), strict=False)
net = str(net)
if not net in bans or time.time() - bans[net]['last_attempt'] > RETRY_WINDOW:
bans[net] = { 'attempts': 0 }
active_window = RETRY_WINDOW
else:
active_window = time.time() - bans[net]['last_attempt']
bans[net]['attempts'] += 1
bans[net]['last_attempt'] = time.time()
active_window = time.time() - bans[net]['last_attempt']
if bans[net]['attempts'] >= MAX_ATTEMPTS:
cur_time = int(round(time.time()))
logCrit('Banning %s for %d minutes' % (net, BAN_TIME / 60))
if type(ip) is ipaddress.IPv4Address:
with lock:
chain = iptc.Chain(iptc.Table(iptc.Table.FILTER), 'MAILCOW')
rule = iptc.Rule()
rule.src = net
target = iptc.Target(rule, "REJECT")
rule.target = target
if rule not in chain.rules:
chain.insert_rule(rule)
else:
with lock:
chain = iptc.Chain(iptc.Table6(iptc.Table6.FILTER), 'MAILCOW')
rule = iptc.Rule6()
rule.src = net
target = iptc.Target(rule, "REJECT")
rule.target = target
if rule not in chain.rules:
chain.insert_rule(rule)
r.hset('F2B_ACTIVE_BANS', '%s' % net, cur_time + BAN_TIME)
else:
logWarn('%d more attempts in the next %d seconds until %s is banned' % (MAX_ATTEMPTS - bans[net]['attempts'], RETRY_WINDOW, net))
def unban(net):
global lock
if not net in bans:
logInfo('%s is not banned, skipping unban and deleting from queue (if any)' % net)
r.hdel('F2B_QUEUE_UNBAN', '%s' % net)
return
logInfo('Unbanning %s' % net)
if type(ipaddress.ip_network(net)) is ipaddress.IPv4Network:
with lock:
chain = iptc.Chain(iptc.Table(iptc.Table.FILTER), 'MAILCOW')
rule = iptc.Rule()
rule.src = net
target = iptc.Target(rule, "REJECT")
rule.target = target
if rule in chain.rules:
chain.delete_rule(rule)
else:
with lock:
chain = iptc.Chain(iptc.Table6(iptc.Table6.FILTER), 'MAILCOW')
rule = iptc.Rule6()
rule.src = net
target = iptc.Target(rule, "REJECT")
rule.target = target
if rule in chain.rules:
chain.delete_rule(rule)
r.hdel('F2B_ACTIVE_BANS', '%s' % net)
r.hdel('F2B_QUEUE_UNBAN', '%s' % net)
if net in bans:
del bans[net]
def permBan(net, unban=False):
global lock
if type(ipaddress.ip_network(net, strict=False)) is ipaddress.IPv4Network:
with lock:
chain = iptc.Chain(iptc.Table(iptc.Table.FILTER), 'MAILCOW')
rule = iptc.Rule()
rule.src = net
target = iptc.Target(rule, "REJECT")
rule.target = target
if rule not in chain.rules and not unban:
logCrit('Add host/network %s to blacklist' % net)
chain.insert_rule(rule)
r.hset('F2B_PERM_BANS', '%s' % net, int(round(time.time())))
elif rule in chain.rules and unban:
logCrit('Remove host/network %s from blacklist' % net)
chain.delete_rule(rule)
r.hdel('F2B_PERM_BANS', '%s' % net)
else:
with lock:
chain = iptc.Chain(iptc.Table6(iptc.Table6.FILTER), 'MAILCOW')
rule = iptc.Rule6()
rule.src = net
target = iptc.Target(rule, "REJECT")
rule.target = target
if rule not in chain.rules and not unban:
logCrit('Add host/network %s to blacklist' % net)
chain.insert_rule(rule)
r.hset('F2B_PERM_BANS', '%s' % net, int(round(time.time())))
elif rule in chain.rules and unban:
logCrit('Remove host/network %s from blacklist' % net)
chain.delete_rule(rule)
r.hdel('F2B_PERM_BANS', '%s' % net)
def quit(signum, frame):
global quit_now
quit_now = True
def clear():
global lock
logInfo('Clearing all bans')
for net in bans.copy():
unban(net)
with lock:
filter4_table = iptc.Table(iptc.Table.FILTER)
filter6_table = iptc.Table6(iptc.Table6.FILTER)
for filter_table in [filter4_table, filter6_table]:
filter_table.autocommit = False
forward_chain = iptc.Chain(filter_table, "FORWARD")
input_chain = iptc.Chain(filter_table, "INPUT")
mailcow_chain = iptc.Chain(filter_table, "MAILCOW")
if mailcow_chain in filter_table.chains:
for rule in mailcow_chain.rules:
mailcow_chain.delete_rule(rule)
for rule in forward_chain.rules:
if rule.target.name == 'MAILCOW':
forward_chain.delete_rule(rule)
for rule in input_chain.rules:
if rule.target.name == 'MAILCOW':
input_chain.delete_rule(rule)
filter_table.delete_chain("MAILCOW")
filter_table.commit()
filter_table.refresh()
filter_table.autocommit = True
r.delete('F2B_ACTIVE_BANS')
r.delete('F2B_PERM_BANS')
pubsub.unsubscribe()
def watch():
logInfo('Watching Redis channel F2B_CHANNEL')
pubsub.subscribe('F2B_CHANNEL')
global quit_now
global exit_code
while not quit_now:
try:
for item in pubsub.listen():
refreshF2bregex()
for rule_id, rule_regex in f2bregex.items():
if item['data'] and item['type'] == 'message':
try:
result = re.search(rule_regex, item['data'])
except re.error:
result = False
if result:
addr = result.group(1)
ip = ipaddress.ip_address(addr)
if ip.is_private or ip.is_loopback:
continue
logWarn('%s matched rule id %s (%s)' % (addr, rule_id, item['data']))
ban(addr)
except Exception as ex:
logWarn('Error reading log line from pubsub')
quit_now = True
exit_code = 2
def snat4(snat_target):
global lock
global quit_now
def get_snat4_rule():
rule = iptc.Rule()
rule.src = os.getenv('IPV4_NETWORK', '172.22.1') + '.0/24'
rule.dst = '!' + rule.src
target = rule.create_target("SNAT")
target.to_source = snat_target
match = rule.create_match("comment")
match.comment = f'{int(round(time.time()))}'
return rule
while not quit_now:
time.sleep(10)
with lock:
try:
table = iptc.Table('nat')
table.refresh()
chain = iptc.Chain(table, 'POSTROUTING')
table.autocommit = False
new_rule = get_snat4_rule()
for position, rule in enumerate(chain.rules):
match = all((
new_rule.get_src() == rule.get_src(),
new_rule.get_dst() == rule.get_dst(),
new_rule.target.parameters == rule.target.parameters,
new_rule.target.name == rule.target.name
))
if position == 0:
if not match:
logInfo(f'Added POSTROUTING rule for source network {new_rule.src} to SNAT target {snat_target}')
chain.insert_rule(new_rule)
else:
if match:
logInfo(f'Remove rule for source network {new_rule.src} to SNAT target {snat_target} from POSTROUTING chain at position {position}')
chain.delete_rule(rule)
table.commit()
table.autocommit = True
except:
print('Error running SNAT4, retrying...')
def snat6(snat_target):
global lock
global quit_now
def get_snat6_rule():
rule = iptc.Rule6()
rule.src = os.getenv('IPV6_NETWORK', 'fd4d:6169:6c63:6f77::/64')
rule.dst = '!' + rule.src
target = rule.create_target("SNAT")
target.to_source = snat_target
return rule
while not quit_now:
time.sleep(10)
with lock:
try:
table = iptc.Table6('nat')
table.refresh()
chain = iptc.Chain(table, 'POSTROUTING')
table.autocommit = False
if get_snat6_rule() not in chain.rules:
logInfo('Added POSTROUTING rule for source network %s to SNAT target %s' % (get_snat6_rule().src, snat_target))
chain.insert_rule(get_snat6_rule())
table.commit()
else:
for position, item in enumerate(chain.rules):
if item == get_snat6_rule():
if position != 0:
chain.delete_rule(get_snat6_rule())
table.commit()
table.autocommit = True
except:
print('Error running SNAT6, retrying...')
def autopurge():
while not quit_now:
time.sleep(10)
refreshF2boptions()
BAN_TIME = int(f2boptions['ban_time'])
MAX_ATTEMPTS = int(f2boptions['max_attempts'])
QUEUE_UNBAN = r.hgetall('F2B_QUEUE_UNBAN')
if QUEUE_UNBAN:
for net in QUEUE_UNBAN:
unban(str(net))
for net in bans.copy():
if bans[net]['attempts'] >= MAX_ATTEMPTS:
if time.time() - bans[net]['last_attempt'] > BAN_TIME:
unban(net)
def isIpNetwork(address):
try:
ipaddress.ip_network(address, False)
except ValueError:
return False
return True
def genNetworkList(list):
resolver = dns.resolver.Resolver()
hostnames = []
networks = []
for key in list:
if isIpNetwork(key):
networks.append(key)
else:
hostnames.append(key)
for hostname in hostnames:
hostname_ips = []
for rdtype in ['A', 'AAAA']:
try:
answer = resolver.resolve(qname=hostname, rdtype=rdtype, lifetime=3)
except dns.exception.Timeout:
logInfo('Hostname %s timedout on resolve' % hostname)
break
except (dns.resolver.NXDOMAIN, dns.resolver.NoAnswer):
continue
except dns.exception.DNSException as dnsexception:
logInfo('%s' % dnsexception)
continue
for rdata in answer:
hostname_ips.append(rdata.to_text())
networks.extend(hostname_ips)
return set(networks)
def whitelistUpdate():
global lock
global quit_now
global WHITELIST
while not quit_now:
start_time = time.time()
list = r.hgetall('F2B_WHITELIST')
new_whitelist = []
if list:
new_whitelist = genNetworkList(list)
with lock:
if Counter(new_whitelist) != Counter(WHITELIST):
WHITELIST = new_whitelist
logInfo('Whitelist was changed, it has %s entries' % len(WHITELIST))
time.sleep(60.0 - ((time.time() - start_time) % 60.0))
def blacklistUpdate():
global quit_now
global BLACKLIST
while not quit_now:
start_time = time.time()
list = r.hgetall('F2B_BLACKLIST')
new_blacklist = []
if list:
new_blacklist = genNetworkList(list)
if Counter(new_blacklist) != Counter(BLACKLIST):
addban = set(new_blacklist).difference(BLACKLIST)
delban = set(BLACKLIST).difference(new_blacklist)
BLACKLIST = new_blacklist
logInfo('Blacklist was changed, it has %s entries' % len(BLACKLIST))
if addban:
for net in addban:
permBan(net=net)
if delban:
for net in delban:
permBan(net=net, unban=True)
time.sleep(60.0 - ((time.time() - start_time) % 60.0))
def initChain():
# Is called before threads start, no locking
print("Initializing mailcow netfilter chain")
# IPv4
if not iptc.Chain(iptc.Table(iptc.Table.FILTER), "MAILCOW") in iptc.Table(iptc.Table.FILTER).chains:
iptc.Table(iptc.Table.FILTER).create_chain("MAILCOW")
for c in ['FORWARD', 'INPUT']:
chain = iptc.Chain(iptc.Table(iptc.Table.FILTER), c)
rule = iptc.Rule()
rule.src = '0.0.0.0/0'
rule.dst = '0.0.0.0/0'
target = iptc.Target(rule, "MAILCOW")
rule.target = target
if rule not in chain.rules:
chain.insert_rule(rule)
# IPv6
if not iptc.Chain(iptc.Table6(iptc.Table6.FILTER), "MAILCOW") in iptc.Table6(iptc.Table6.FILTER).chains:
iptc.Table6(iptc.Table6.FILTER).create_chain("MAILCOW")
for c in ['FORWARD', 'INPUT']:
chain = iptc.Chain(iptc.Table6(iptc.Table6.FILTER), c)
rule = iptc.Rule6()
rule.src = '::/0'
rule.dst = '::/0'
target = iptc.Target(rule, "MAILCOW")
rule.target = target
if rule not in chain.rules:
chain.insert_rule(rule)
if __name__ == '__main__':
# In case a previous session was killed without cleanup
clear()
# Reinit MAILCOW chain
initChain()
watch_thread = Thread(target=watch)
watch_thread.daemon = True
watch_thread.start()
if os.getenv('SNAT_TO_SOURCE') and os.getenv('SNAT_TO_SOURCE') != 'n':
try:
snat_ip = os.getenv('SNAT_TO_SOURCE')
snat_ipo = ipaddress.ip_address(snat_ip)
if type(snat_ipo) is ipaddress.IPv4Address:
snat4_thread = Thread(target=snat4,args=(snat_ip,))
snat4_thread.daemon = True
snat4_thread.start()
except ValueError:
print(os.getenv('SNAT_TO_SOURCE') + ' is not a valid IPv4 address')
if os.getenv('SNAT6_TO_SOURCE') and os.getenv('SNAT6_TO_SOURCE') != 'n':
try:
snat_ip = os.getenv('SNAT6_TO_SOURCE')
snat_ipo = ipaddress.ip_address(snat_ip)
if type(snat_ipo) is ipaddress.IPv6Address:
snat6_thread = Thread(target=snat6,args=(snat_ip,))
snat6_thread.daemon = True
snat6_thread.start()
except ValueError:
print(os.getenv('SNAT6_TO_SOURCE') + ' is not a valid IPv6 address')
autopurge_thread = Thread(target=autopurge)
autopurge_thread.daemon = True
autopurge_thread.start()
mailcowchainwatch_thread = Thread(target=mailcowChainOrder)
mailcowchainwatch_thread.daemon = True
mailcowchainwatch_thread.start()
blacklistupdate_thread = Thread(target=blacklistUpdate)
blacklistupdate_thread.daemon = True
blacklistupdate_thread.start()
whitelistupdate_thread = Thread(target=whitelistUpdate)
whitelistupdate_thread.daemon = True
whitelistupdate_thread.start()
signal.signal(signal.SIGTERM, quit)
atexit.register(clear)
while not quit_now:
time.sleep(0.5)
sys.exit(exit_code)

View File

@ -1,5 +1,5 @@
FROM alpine:3.17
LABEL maintainer "Andre Peters <andre.peters@servercow.de>"
LABEL maintainer "The Infrastructure Company GmbH <info@servercow.de>"
WORKDIR /app

View File

@ -1,12 +1,18 @@
FROM php:8.1-fpm-alpine3.17
LABEL maintainer "Andre Peters <andre.peters@servercow.de>"
FROM php:8.2-fpm-alpine3.17
LABEL maintainer "The Infrastructure Company GmbH <info@servercow.de>"
ENV APCU_PECL 5.1.22
ENV IMAGICK_PECL 3.7.0
ENV MAILPARSE_PECL 3.1.4
ENV MEMCACHED_PECL 3.2.0
ENV REDIS_PECL 5.3.7
ENV COMPOSER 2.4.4
# renovate: datasource=github-tags depName=krakjoe/apcu versioning=semver-coerced extractVersion=^v(?<version>.*)$
ARG APCU_PECL_VERSION=5.1.22
# renovate: datasource=github-tags depName=Imagick/imagick versioning=semver-coerced extractVersion=(?<version>.*)$
ARG IMAGICK_PECL_VERSION=3.7.0
# renovate: datasource=github-tags depName=php/pecl-mail-mailparse versioning=semver-coerced extractVersion=^v(?<version>.*)$
ARG MAILPARSE_PECL_VERSION=3.1.6
# renovate: datasource=github-tags depName=php-memcached-dev/php-memcached versioning=semver-coerced extractVersion=^v(?<version>.*)$
ARG MEMCACHED_PECL_VERSION=3.2.0
# renovate: datasource=github-tags depName=phpredis/phpredis versioning=semver-coerced extractVersion=(?<version>.*)$
ARG REDIS_PECL_VERSION=6.0.1
# renovate: datasource=github-tags depName=composer/composer versioning=semver-coerced extractVersion=(?<version>.*)$
ARG COMPOSER_VERSION=2.6.5
RUN apk add -U --no-cache autoconf \
aspell-dev \
@ -46,6 +52,7 @@ RUN apk add -U --no-cache autoconf \
libxpm-dev \
libzip \
libzip-dev \
linux-headers \
make \
mysql-client \
openldap-dev \
@ -55,11 +62,11 @@ RUN apk add -U --no-cache autoconf \
samba-client \
zlib-dev \
tzdata \
&& pecl install mailparse-${MAILPARSE_PECL} \
&& pecl install redis-${REDIS_PECL} \
&& pecl install memcached-${MEMCACHED_PECL} \
&& pecl install APCu-${APCU_PECL} \
&& pecl install imagick-${IMAGICK_PECL} \
&& pecl install APCu-${APCU_PECL_VERSION} \
&& pecl install imagick-${IMAGICK_PECL_VERSION} \
&& pecl install mailparse-${MAILPARSE_PECL_VERSION} \
&& pecl install memcached-${MEMCACHED_PECL_VERSION} \
&& pecl install redis-${REDIS_PECL_VERSION} \
&& docker-php-ext-enable apcu imagick memcached mailparse redis \
&& pecl clear-cache \
&& docker-php-ext-configure intl \
@ -69,10 +76,10 @@ RUN apk add -U --no-cache autoconf \
--with-webp \
--with-xpm \
--with-avif \
&& docker-php-ext-install -j 4 exif gd gettext intl ldap opcache pcntl pdo pdo_mysql pspell soap sockets zip bcmath gmp \
&& docker-php-ext-install -j 4 exif gd gettext intl ldap opcache pcntl pdo pdo_mysql pspell soap sockets sysvsem zip bcmath gmp \
&& docker-php-ext-configure imap --with-imap --with-imap-ssl \
&& docker-php-ext-install -j 4 imap \
&& curl --silent --show-error https://getcomposer.org/installer | php -- --version=${COMPOSER} \
&& curl --silent --show-error https://getcomposer.org/installer | php -- --version=${COMPOSER_VERSION} \
&& mv composer.phar /usr/local/bin/composer \
&& chmod +x /usr/local/bin/composer \
&& apk del --purge autoconf \
@ -93,6 +100,7 @@ RUN apk add -U --no-cache autoconf \
libxml2-dev \
libxpm-dev \
libzip-dev \
linux-headers \
make \
openldap-dev \
pcre-dev \

View File

@ -172,6 +172,24 @@ BEGIN
END;
//
DELIMITER ;
DROP EVENT IF EXISTS clean_sasl_log;
DELIMITER //
CREATE EVENT clean_sasl_log
ON SCHEDULE EVERY 1 DAY DO
BEGIN
DELETE sasl_log.* FROM sasl_log
LEFT JOIN (
SELECT username, service, MAX(datetime) AS lastdate
FROM sasl_log
GROUP BY username, service
) AS last ON sasl_log.username = last.username AND sasl_log.service = last.service
WHERE datetime < DATE_SUB(NOW(), INTERVAL 31 DAY) AND datetime < lastdate;
DELETE FROM sasl_log
WHERE username NOT IN (SELECT username FROM mailbox) AND
datetime < DATE_SUB(NOW(), INTERVAL 31 DAY);
END;
//
DELIMITER ;
EOF
fi

View File

@ -1,5 +1,5 @@
FROM debian:bullseye-slim
LABEL maintainer "Andre Peters <andre.peters@servercow.de>"
LABEL maintainer "The Infrastructure Company GmbH <info@servercow.de>"
ARG DEBIAN_FRONTEND=noninteractive
ENV LC_ALL C
@ -17,10 +17,10 @@ RUN groupadd -g 102 postfix \
ca-certificates \
curl \
dirmngr \
dnsutils \
dnsutils \
gnupg \
libsasl2-modules \
mariadb-client \
mariadb-client \
perl \
postfix \
postfix-mysql \
@ -32,7 +32,7 @@ RUN groupadd -g 102 postfix \
syslog-ng \
syslog-ng-core \
syslog-ng-mod-redis \
tzdata \
tzdata \
&& rm -rf /var/lib/apt/lists/* \
&& touch /etc/default/locale \
&& printf '#!/bin/bash\n/usr/sbin/postconf -c /opt/postfix/conf "$@"' > /usr/local/sbin/postconf \

View File

@ -393,12 +393,101 @@ query = SELECT goto FROM spamalias
AND validity >= UNIX_TIMESTAMP()
EOF
sed -i '/User overrides/q' /opt/postfix/conf/main.cf
echo >> /opt/postfix/conf/main.cf
touch /opt/postfix/conf/extra.cf
sed -i '/myhostname/d' /opt/postfix/conf/extra.cf
echo -e "myhostname = ${MAILCOW_HOSTNAME}\n$(cat /opt/postfix/conf/extra.cf)" > /opt/postfix/conf/extra.cf
if [ ! -f /opt/postfix/conf/dns_blocklists.cf ]; then
cat <<EOF > /opt/postfix/conf/dns_blocklists.cf
# This file can be edited.
# Delete this file and restart postfix container to revert any changes.
postscreen_dnsbl_sites = wl.mailspike.net=127.0.0.[18;19;20]*-2
hostkarma.junkemailfilter.com=127.0.0.1*-2
list.dnswl.org=127.0.[0..255].0*-2
list.dnswl.org=127.0.[0..255].1*-4
list.dnswl.org=127.0.[0..255].2*-6
list.dnswl.org=127.0.[0..255].3*-8
ix.dnsbl.manitu.net*2
bl.spamcop.net*2
bl.suomispam.net*2
hostkarma.junkemailfilter.com=127.0.0.2*3
hostkarma.junkemailfilter.com=127.0.0.4*2
hostkarma.junkemailfilter.com=127.0.1.2*1
backscatter.spameatingmonkey.net*2
bl.ipv6.spameatingmonkey.net*2
bl.spameatingmonkey.net*2
b.barracudacentral.org=127.0.0.2*7
bl.mailspike.net=127.0.0.2*5
bl.mailspike.net=127.0.0.[10;11;12]*4
dnsbl.sorbs.net=127.0.0.10*8
dnsbl.sorbs.net=127.0.0.5*6
dnsbl.sorbs.net=127.0.0.7*3
dnsbl.sorbs.net=127.0.0.8*2
dnsbl.sorbs.net=127.0.0.6*2
dnsbl.sorbs.net=127.0.0.9*2
EOF
fi
DNSBL_CONFIG=$(grep -v '^#' /opt/postfix/conf/dns_blocklists.cf | grep '\S')
if [ ! -z "$DNSBL_CONFIG" ]; then
echo -e "\e[33mChecking if ASN for your IP is listed for Spamhaus Bad ASN List...\e[0m"
if [ -n "$SPAMHAUS_DQS_KEY" ]; then
echo -e "\e[32mDetected SPAMHAUS_DQS_KEY variable from mailcow.conf...\e[0m"
echo -e "\e[33mUsing DQS Blocklists from Spamhaus!\e[0m"
SPAMHAUS_DNSBL_CONFIG=$(cat <<EOF
${SPAMHAUS_DQS_KEY}.zen.dq.spamhaus.net=127.0.0.[4..7]*6
${SPAMHAUS_DQS_KEY}.zen.dq.spamhaus.net=127.0.0.[10;11]*8
${SPAMHAUS_DQS_KEY}.zen.dq.spamhaus.net=127.0.0.3*4
${SPAMHAUS_DQS_KEY}.zen.dq.spamhaus.net=127.0.0.2*3
postscreen_dnsbl_reply_map = texthash:/opt/postfix/conf/dnsbl_reply.map
EOF
cat <<EOF > /opt/postfix/conf/dnsbl_reply.map
# Autogenerated by mailcow, using Spamhaus DQS reply domains
${SPAMHAUS_DQS_KEY}.sbl.dq.spamhaus.net sbl.spamhaus.org
${SPAMHAUS_DQS_KEY}.xbl.dq.spamhaus.net xbl.spamhaus.org
${SPAMHAUS_DQS_KEY}.pbl.dq.spamhaus.net pbl.spamhaus.org
${SPAMHAUS_DQS_KEY}.zen.dq.spamhaus.net zen.spamhaus.org
${SPAMHAUS_DQS_KEY}.dbl.dq.spamhaus.net dbl.spamhaus.org
${SPAMHAUS_DQS_KEY}.zrd.dq.spamhaus.net zrd.spamhaus.org
EOF
)
else
if [ -f "/opt/postfix/conf/dnsbl_reply.map" ]; then
rm /opt/postfix/conf/dnsbl_reply.map
fi
response=$(curl --connect-timeout 15 --max-time 30 -s -o /dev/null -w "%{http_code}" "https://asn-check.mailcow.email")
if [ "$response" -eq 503 ]; then
echo -e "\e[31mThe AS of your IP is listed as a banned AS from Spamhaus!\e[0m"
echo -e "\e[33mNo SPAMHAUS_DQS_KEY found... Skipping Spamhaus blocklists entirely!\e[0m"
SPAMHAUS_DNSBL_CONFIG=""
elif [ "$response" -eq 200 ]; then
echo -e "\e[32mThe AS of your IP is NOT listed as a banned AS from Spamhaus!\e[0m"
echo -e "\e[33mUsing the open Spamhaus blocklists.\e[0m"
SPAMHAUS_DNSBL_CONFIG=$(cat <<EOF
zen.spamhaus.org=127.0.0.[10;11]*8
zen.spamhaus.org=127.0.0.[4..7]*6
zen.spamhaus.org=127.0.0.3*4
zen.spamhaus.org=127.0.0.2*3
EOF
)
else
echo -e "\e[31mWe couldn't determine your AS... (maybe DNS/Network issue?) Response Code: $response\e[0m"
echo -e "\e[33mDeactivating Spamhaus DNS Blocklists to be on the safe site!\e[0m"
SPAMHAUS_DNSBL_CONFIG=""
fi
fi
fi
# Reset main.cf
sed -i '/Overrides/q' /opt/postfix/conf/main.cf
echo >> /opt/postfix/conf/main.cf
# Append postscreen dnsbl sites to main.cf
if [ ! -z "$DNSBL_CONFIG" ]; then
echo -e "${DNSBL_CONFIG}\n${SPAMHAUS_DNSBL_CONFIG}" >> /opt/postfix/conf/main.cf
fi
# Append user overrides
echo -e "\n# User Overrides" >> /opt/postfix/conf/main.cf
touch /opt/postfix/conf/extra.cf
sed -i '/\$myhostname/! { /myhostname/d }' /opt/postfix/conf/extra.cf
echo -e "myhostname = ${MAILCOW_HOSTNAME}\n$(cat /opt/postfix/conf/extra.cf)" > /opt/postfix/conf/extra.cf
cat /opt/postfix/conf/extra.cf >> /opt/postfix/conf/main.cf
if [ ! -f /opt/postfix/conf/custom_transport.pcre ]; then

View File

@ -1,5 +1,5 @@
FROM debian:bullseye-slim
LABEL maintainer "Andre Peters <andre.peters@tinc.gmbh>"
LABEL maintainer "The Infrastructure Company GmbH <info@servercow.de>"
ARG DEBIAN_FRONTEND=noninteractive
ARG CODENAME=bullseye

View File

@ -79,6 +79,9 @@ EOF
redis-cli -h redis-mailcow SLAVEOF NO ONE
fi
# Provide additional lua modules
ln -s /usr/lib/$(uname -m)-linux-gnu/liblua5.1-cjson.so.0.0.0 /usr/lib/rspamd/cjson.so
chown -R _rspamd:_rspamd /var/lib/rspamd \
/etc/rspamd/local.d \
/etc/rspamd/override.d \

View File

@ -1,10 +1,11 @@
FROM debian:bullseye-slim
LABEL maintainer "Andre Peters <andre.peters@servercow.de>"
LABEL maintainer "The Infrastructure Company GmbH <info@servercow.de>"
ARG DEBIAN_FRONTEND=noninteractive
ARG SOGO_DEBIAN_REPOSITORY=http://packages.sogo.nu/nightly/5/debian/
# renovate: datasource=github-releases depName=tianon/gosu versioning=semver-coerced extractVersion=^(?<version>.*)$
ARG GOSU_VERSION=1.16
ENV LC_ALL C
ENV GOSU_VERSION 1.14
# Prerequisites
RUN echo "Building from repository $SOGO_DEBIAN_REPOSITORY" \

View File

@ -2,7 +2,8 @@ FROM solr:7.7-slim
USER root
ENV GOSU_VERSION 1.11
# renovate: datasource=github-releases depName=tianon/gosu versioning=semver-coerced extractVersion=(?<version>.*)$
ARG GOSU_VERSION=1.16
COPY solr.sh /
COPY solr-config-7.7.0.xml /

View File

@ -1,6 +1,6 @@
FROM alpine:3.17
LABEL maintainer "Andre Peters <andre.peters@servercow.de>"
LABEL maintainer "The Infrastructure Company GmbH <info@servercow.de>"
RUN apk add --update --no-cache \
curl \
@ -18,6 +18,11 @@ EXPOSE 53/udp 53/tcp
COPY docker-entrypoint.sh /docker-entrypoint.sh
# healthcheck (nslookup)
COPY healthcheck.sh /healthcheck.sh
RUN chmod +x /healthcheck.sh
HEALTHCHECK --interval=30s --timeout=10s CMD [ "/healthcheck.sh" ]
ENTRYPOINT ["/docker-entrypoint.sh"]
CMD ["/usr/sbin/unbound"]

View File

@ -0,0 +1,12 @@
#!/bin/bash
nslookup mailcow.email 127.0.0.1 1> /dev/null
if [ $? == 0 ]; then
echo "DNS resolution is working!"
exit 0
else
echo "DNS resolution is not working correctly..."
echo "Maybe check your outbound firewall, as it needs to resolve DNS over TCP AND UDP!"
exit 1
fi

View File

@ -24,7 +24,7 @@ server {
add_header X-Download-Options "noopen" always;
add_header X-Frame-Options "SAMEORIGIN" always;
add_header X-Permitted-Cross-Domain-Policies "none" always;
add_header X-Robots-Tag "none" always;
add_header X-Robots-Tag "noindex, nofollow" always;
add_header X-XSS-Protection "1; mode=block" always;
fastcgi_hide_header X-Powered-By;
@ -86,7 +86,7 @@ server {
deny all;
}
location ~ ^\/(?:index|remote|public|cron|core\/ajax\/update|status|ocs\/v[12]|updater\/.+|oc[ms]-provider\/.+)\.php(?:$|\/) {
location ~ ^\/(?:index|remote|public|cron|core\/ajax\/update|status|ocs\/v[12]|updater\/.+|ocs-provider\/.+)\.php(?:$|\/) {
fastcgi_split_path_info ^(.+?\.php)(\/.*|)$;
set $path_info $fastcgi_path_info;
try_files $fastcgi_script_name =404;
@ -105,7 +105,7 @@ server {
fastcgi_read_timeout 1200;
}
location ~ ^\/(?:updater|oc[ms]-provider)(?:$|\/) {
location ~ ^\/(?:updater|ocs-provider)(?:$|\/) {
try_files $uri/ =404;
index index.php;
}

View File

@ -24,6 +24,11 @@ mail_plugins = </etc/dovecot/mail_plugins
mail_attachment_fs = crypt:set_prefix=mail_crypt_global:posix:
mail_attachment_dir = /var/attachments
mail_attachment_min_size = 128k
# Significantly speeds up very large mailboxes, but is only safe to enable if
# you do not manually modify the files in the `cur` directories in
# mailcowdockerized_vmail-vol-1.
# https://docs.mailcow.email/manual-guides/Dovecot/u_e-dovecot-performance/
maildir_very_dirty_syncs = yes
# Dovecot 2.2
#ssl_protocols = !SSLv3

View File

@ -114,7 +114,7 @@
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Real-IP $remote_addr;
proxy_redirect off;
error_page 403 /_rspamderror.php;
error_page 401 /_rspamderror.php;
}
proxy_pass http://rspamd:11334/;
proxy_set_header Host $http_host;

View File

@ -1,5 +1,6 @@
proxy_cache_path /tmp levels=1:2 keys_zone=sogo:10m inactive=24h max_size=1g;
server_names_hash_bucket_size 64;
server_names_hash_max_size 512;
server_names_hash_bucket_size 128;
map $http_x_forwarded_proto $client_req_scheme {
default $scheme;

View File

@ -1,2 +1,3 @@
listen ${HTTPS_PORT} ssl http2;
listen [::]:${HTTPS_PORT} ssl http2;
listen ${HTTPS_PORT} ssl;
listen [::]:${HTTPS_PORT} ssl;
http2 on;

View File

@ -40,34 +40,6 @@ postscreen_blacklist_action = drop
postscreen_cache_cleanup_interval = 24h
postscreen_cache_map = proxy:btree:$data_directory/postscreen_cache
postscreen_dnsbl_action = enforce
postscreen_dnsbl_sites = wl.mailspike.net=127.0.0.[18;19;20]*-2
hostkarma.junkemailfilter.com=127.0.0.1*-2
list.dnswl.org=127.0.[0..255].0*-2
list.dnswl.org=127.0.[0..255].1*-4
list.dnswl.org=127.0.[0..255].2*-6
list.dnswl.org=127.0.[0..255].3*-8
ix.dnsbl.manitu.net*2
bl.spamcop.net*2
bl.suomispam.net*2
hostkarma.junkemailfilter.com=127.0.0.2*3
hostkarma.junkemailfilter.com=127.0.0.4*2
hostkarma.junkemailfilter.com=127.0.1.2*1
backscatter.spameatingmonkey.net*2
bl.ipv6.spameatingmonkey.net*2
bl.spameatingmonkey.net*2
b.barracudacentral.org=127.0.0.2*7
bl.mailspike.net=127.0.0.2*5
bl.mailspike.net=127.0.0.[10;11;12]*4
dnsbl.sorbs.net=127.0.0.10*8
dnsbl.sorbs.net=127.0.0.5*6
dnsbl.sorbs.net=127.0.0.7*3
dnsbl.sorbs.net=127.0.0.8*2
dnsbl.sorbs.net=127.0.0.6*2
dnsbl.sorbs.net=127.0.0.9*2
zen.spamhaus.org=127.0.0.[10;11]*8
zen.spamhaus.org=127.0.0.[4..7]*6
zen.spamhaus.org=127.0.0.3*4
zen.spamhaus.org=127.0.0.2*3
postscreen_dnsbl_threshold = 6
postscreen_dnsbl_ttl = 5m
postscreen_greet_action = enforce
@ -197,4 +169,4 @@ smtps_smtpd_tls_mandatory_protocols = !SSLv2, !SSLv3, !TLSv1, !TLSv1.1
parent_domain_matches_subdomains = debug_peer_list,fast_flush_domains,mynetworks,qmqpd_authorized_clients
# DO NOT EDIT ANYTHING BELOW #
# User overrides #
# Overrides #

View File

@ -1,15 +1,20 @@
# Whitelist generated by Postwhite v3.4 on Mon 21 Mar 2022 06:50:26 PM CET
# Whitelist generated by Postwhite v3.4 on Fri Dec 1 00:15:18 UTC 2023
# https://github.com/stevejenkins/postwhite/
# 1898 total rules
# 2038 total rules
2a00:1450:4000::/36 permit
2a01:111:f400::/48 permit
2a01:111:f403::/48 permit
2a01:4180:4050:0400::/64 permit
2a01:4180:4050:0800::/64 permit
2a01:4180:4051:0400::/64 permit
2a01:4180:4051:0800::/64 permit
2a01:111:f403:8000::/50 permit
2a01:111:f403::/49 permit
2a01:111:f403:c000::/51 permit
2a01:111:f403:f000::/52 permit
2a02:a60:0:5::/64 permit
2c0f:fb50:4000::/36 permit
2.207.151.53 permit
3.70.123.177 permit
3.93.157.0/24 permit
3.129.120.190 permit
3.137.78.75 permit
3.210.190.0/24 permit
8.20.114.31 permit
8.25.194.0/23 permit
8.25.196.0/23 permit
@ -19,41 +24,53 @@
13.70.32.43 permit
13.72.50.45 permit
13.74.143.28 permit
13.77.161.179 permit
13.78.233.182 permit
13.92.31.129 permit
13.110.208.0/21 permit
13.110.209.0/24 permit
13.110.216.0/22 permit
13.110.224.0/20 permit
13.111.0.0/16 permit
17.41.0.0/16 permit
15.200.21.50 permit
15.200.44.248 permit
15.200.201.185 permit
17.57.155.0/24 permit
17.57.156.0/24 permit
17.58.0.0/16 permit
17.110.0.0/15 permit
17.142.0.0/15 permit
17.162.0.0/15 permit
17.164.0.0/16 permit
17.171.37.0/24 permit
17.172.0.0/16 permit
17.179.168.0/23 permit
18.156.89.250 permit
18.157.243.190 permit
18.194.95.56 permit
18.198.96.88 permit
20.47.149.138 permit
20.48.0.0/12 permit
18.208.124.128/25 permit
18.216.232.154 permit
18.234.1.244 permit
18.236.40.242 permit
20.51.6.32/30 permit
20.52.52.2 permit
20.52.128.133 permit
20.59.80.4/30 permit
20.63.210.192/28 permit
20.64.0.0/10 permit
20.69.8.108/30 permit
20.70.246.20 permit
20.76.201.171 permit
20.83.222.104/30 permit
20.88.157.184/30 permit
20.94.180.64/28 permit
20.97.34.220/30 permit
20.98.148.156/30 permit
20.98.194.68/30 permit
20.105.209.76/30 permit
20.107.239.64/30 permit
20.112.250.133 permit
20.118.139.208/30 permit
20.185.213.160/27 permit
20.185.213.224/27 permit
20.185.214.0/27 permit
20.185.214.2 permit
20.185.214.32/27 permit
20.185.214.64/27 permit
20.192.0.0/10 permit
23.100.85.1 permit
20.231.239.246 permit
20.236.44.162 permit
23.103.224.0/19 permit
23.249.208.0/20 permit
23.251.224.0/19 permit
@ -78,46 +95,34 @@
27.123.206.56/29 permit
27.123.206.76/30 permit
27.123.206.80/28 permit
34.194.25.167 permit
34.194.144.120 permit
31.25.48.222 permit
34.195.217.107 permit
34.202.239.6 permit
34.212.163.75 permit
34.215.104.144 permit
34.225.212.172 permit
34.247.168.44 permit
35.161.32.253 permit
35.167.93.243 permit
35.176.132.251 permit
35.190.247.0/24 permit
35.191.0.0/16 permit
37.188.97.188 permit
37.218.248.47 permit
37.218.249.47 permit
37.218.251.62 permit
39.156.163.64/29 permit
40.71.187.0/24 permit
40.76.4.15 permit
40.77.102.222 permit
40.92.0.0/15 permit
40.97.116.82 permit
40.97.128.194 permit
40.97.148.226 permit
40.97.153.146 permit
40.97.156.114 permit
40.97.160.2 permit
40.97.161.50 permit
40.97.164.146 permit
40.92.0.0/16 permit
40.107.0.0/16 permit
40.112.65.63 permit
40.112.72.205 permit
40.113.200.201 permit
40.117.80.0/24 permit
40.121.71.46 permit
41.74.192.0/22 permit
41.74.196.0/22 permit
41.74.200.0/23 permit
41.74.204.0/23 permit
41.74.206.0/24 permit
42.159.163.81 permit
42.159.163.82 permit
42.159.163.83 permit
43.228.184.0/22 permit
44.206.138.57 permit
44.209.42.157 permit
44.236.56.93 permit
44.238.220.251 permit
46.19.168.0/23 permit
46.226.48.0/21 permit
46.228.36.37 permit
46.228.36.38/31 permit
@ -167,6 +172,8 @@
46.243.88.175 permit
46.243.88.176 permit
46.243.88.177 permit
46.243.95.179 permit
46.243.95.180 permit
50.18.45.249 permit
50.18.121.236 permit
50.18.121.248 permit
@ -176,33 +183,36 @@
50.18.125.237 permit
50.18.126.162 permit
50.31.32.0/19 permit
50.31.156.96/27 permit
50.31.205.0/24 permit
51.4.71.62 permit
51.4.72.0/24 permit
51.4.80.0/27 permit
51.5.72.0/24 permit
51.5.80.0/27 permit
51.137.58.21 permit
51.140.75.55 permit
51.144.100.179 permit
51.163.158.0/24 permit
51.163.159.21 permit
52.5.230.59 permit
52.27.5.72 permit
52.27.28.47 permit
52.33.191.91 permit
52.28.63.81 permit
52.36.138.31 permit
52.37.142.146 permit
52.38.191.253 permit
52.41.64.145 permit
52.58.216.183 permit
52.59.143.3 permit
52.60.41.5 permit
52.60.115.116 permit
52.61.91.9 permit
52.71.0.205 permit
52.82.172.0/22 permit
52.94.124.0/28 permit
52.95.48.152/29 permit
52.95.49.88/29 permit
52.96.91.34 permit
52.96.111.82 permit
52.96.172.98 permit
52.96.214.50 permit
52.96.222.194 permit
52.96.222.226 permit
52.96.223.2 permit
52.96.228.130 permit
52.96.229.242 permit
52.100.0.0/14 permit
52.103.0.0/17 permit
52.119.213.144/28 permit
52.160.39.140 permit
52.165.175.144 permit
@ -214,23 +224,29 @@
52.222.73.83 permit
52.222.73.120 permit
52.222.75.85 permit
52.222.89.228 permit
52.234.172.96/28 permit
52.236.28.240/28 permit
52.237.141.173 permit
52.244.206.214 permit
52.247.53.144 permit
52.250.107.196 permit
52.250.126.174 permit
52.251.55.143 permit
54.90.148.255 permit
54.156.255.69 permit
54.172.97.247 permit
54.174.52.0/24 permit
54.174.53.128/30 permit
54.174.57.0/24 permit
54.174.59.0/24 permit
54.174.60.0/23 permit
54.174.63.0/24 permit
54.186.193.102 permit
54.191.223.5 permit
54.191.223.56 permit
54.194.61.95 permit
54.195.113.45 permit
54.213.20.246 permit
54.214.39.184 permit
54.216.77.168 permit
54.221.227.204 permit
54.240.0.0/18 permit
54.240.64.0/19 permit
54.240.96.0/19 permit
@ -238,7 +254,9 @@
54.244.54.130 permit
54.244.242.0/24 permit
54.246.232.180 permit
54.255.61.23 permit
62.13.128.0/24 permit
62.13.128.150 permit
62.13.129.128/25 permit
62.13.136.0/22 permit
62.13.140.0/22 permit
@ -247,29 +265,32 @@
62.13.150.0/23 permit
62.13.152.0/23 permit
62.17.146.128/26 permit
62.140.7.0/24 permit
62.140.10.21 permit
62.179.121.0/24 permit
62.201.172.0/27 permit
62.201.172.32/27 permit
62.253.227.114 permit
63.32.13.159 permit
63.80.14.0/23 permit
63.111.28.137 permit
63.128.21.0/24 permit
63.143.57.128/25 permit
63.143.59.128/25 permit
64.18.0.0/20 permit
64.20.241.45 permit
64.34.47.128/27 permit
64.34.57.192/26 permit
64.69.212.0/24 permit
64.71.149.160/28 permit
64.79.155.0/24 permit
64.79.155.192 permit
64.79.155.193 permit
64.79.155.205 permit
64.79.155.206 permit
64.89.44.85 permit
64.89.45.80 permit
64.89.45.194 permit
64.89.45.196 permit
64.95.144.196 permit
64.127.115.252 permit
64.132.88.0/23 permit
64.132.92.0/24 permit
64.135.77.0/24 permit
64.135.83.0/24 permit
64.147.123.17 permit
64.147.123.18 permit
64.147.123.19 permit
@ -281,28 +302,35 @@
64.147.123.27 permit
64.147.123.28 permit
64.147.123.29 permit
64.147.123.128/27 permit
64.207.219.7 permit
64.207.219.8 permit
64.207.219.9 permit
64.207.219.10 permit
64.207.219.11 permit
64.207.219.12 permit
64.207.219.13 permit
64.207.219.14 permit
64.207.219.15 permit
64.207.219.71 permit
64.207.219.72 permit
64.207.219.73 permit
64.207.219.74 permit
64.207.219.75 permit
64.207.219.76 permit
64.207.219.77 permit
64.207.219.78 permit
64.207.219.79 permit
64.207.219.135 permit
64.207.219.136 permit
64.207.219.137 permit
64.207.219.138 permit
64.207.219.139 permit
64.207.219.140 permit
64.207.219.141 permit
64.207.219.142 permit
64.207.219.143 permit
64.233.160.0/19 permit
65.38.115.76 permit
65.38.115.84 permit
65.39.215.0/24 permit
65.52.80.137 permit
65.54.51.64/26 permit
65.54.61.64/26 permit
@ -342,6 +370,10 @@
66.111.4.225 permit
66.111.4.229 permit
66.111.4.230 permit
66.119.150.192/26 permit
66.135.202.0/27 permit
66.135.215.0/24 permit
66.135.222.1 permit
66.162.193.226/31 permit
66.163.184.0/21 permit
66.163.184.0/24 permit
@ -372,8 +404,8 @@
66.196.81.232/31 permit
66.196.81.234 permit
66.211.168.230/31 permit
66.211.170.86/31 permit
66.211.170.88/30 permit
66.211.170.88/29 permit
66.211.184.0/23 permit
66.218.74.64/30 permit
66.218.74.68/31 permit
66.218.75.112/30 permit
@ -445,6 +477,8 @@
68.142.230.72/30 permit
68.142.230.76/31 permit
68.142.230.78 permit
68.232.140.138 permit
68.232.157.143 permit
68.232.192.0/20 permit
69.63.178.128/25 permit
69.63.181.0/24 permit
@ -452,6 +486,10 @@
69.65.42.195 permit
69.65.49.192/29 permit
69.72.32.0/20 permit
69.72.40.93 permit
69.72.40.94/31 permit
69.72.40.96/30 permit
69.72.47.205 permit
69.147.84.227 permit
69.162.98.0/24 permit
69.169.224.0/20 permit
@ -460,7 +498,7 @@
70.37.151.128/25 permit
70.42.149.0/24 permit
70.42.149.35 permit
72.3.185.0/24 permit
72.3.237.64/28 permit
72.14.192.0/18 permit
72.21.192.0/19 permit
72.21.217.142 permit
@ -522,15 +560,11 @@
72.30.239.228/31 permit
72.30.239.244/30 permit
72.30.239.248/31 permit
72.32.154.0/24 permit
72.32.217.0/24 permit
72.32.243.0/24 permit
72.34.168.76 permit
72.34.168.80 permit
72.34.168.85 permit
72.34.168.86 permit
72.52.72.32/28 permit
72.52.72.36 permit
74.6.128.0/21 permit
74.6.128.0/24 permit
74.6.129.0/24 permit
@ -558,8 +592,11 @@
74.112.67.243 permit
74.125.0.0/16 permit
74.202.227.40 permit
74.208.4.192/26 permit
74.208.5.64/26 permit
74.208.122.0/26 permit
74.209.250.0/24 permit
74.209.250.12 permit
76.223.128.0/19 permit
76.223.176.0/20 permit
77.238.176.0/22 permit
77.238.176.0/24 permit
@ -582,8 +619,17 @@
77.238.189.142 permit
77.238.189.146/31 permit
77.238.189.148/30 permit
81.7.169.128/25 permit
81.223.46.0/27 permit
84.16.77.1 permit
82.165.159.0/24 permit
82.165.159.0/26 permit
82.165.229.31 permit
82.165.229.130 permit
82.165.230.21 permit
82.165.230.22 permit
84.116.6.0/23 permit
84.116.36.0/24 permit
84.116.50.0/23 permit
85.158.136.0/21 permit
86.61.88.25 permit
87.198.219.130 permit
@ -624,11 +670,9 @@
87.248.117.201 permit
87.248.117.202 permit
87.248.117.205 permit
87.252.219.254 permit
87.253.232.0/21 permit
89.22.108.0/24 permit
91.220.42.0/24 permit
94.236.119.0/26 permit
91.211.240.0/22 permit
94.245.112.0/27 permit
94.245.112.10/31 permit
95.131.104.0/21 permit
@ -638,6 +682,7 @@
96.43.148.64/28 permit
96.43.148.64/31 permit
96.43.151.64/28 permit
98.97.248.0/21 permit
98.136.44.181 permit
98.136.44.182/31 permit
98.136.44.184 permit
@ -1142,24 +1187,20 @@
98.139.245.212/31 permit
99.78.197.208/28 permit
103.2.140.0/22 permit
103.9.8.121 permit
103.9.8.122 permit
103.9.8.123 permit
103.9.96.0/22 permit
103.13.69.0/24 permit
103.28.42.0/24 permit
103.47.204.0/22 permit
103.96.21.0/24 permit
103.96.23.0/24 permit
103.151.192.0/23 permit
103.237.104.0/22 permit
103.168.172.128/27 permit
104.43.243.237 permit
104.44.112.128/25 permit
104.47.0.0/17 permit
104.47.20.0/23 permit
104.47.75.0/24 permit
104.47.108.0/23 permit
104.130.96.0/28 permit
104.130.122.0/23 permit
104.214.25.77 permit
104.215.148.63 permit
104.215.186.3 permit
104.245.209.192/26 permit
106.10.144.64/27 permit
106.10.144.100/31 permit
106.10.144.103 permit
@ -1320,9 +1361,9 @@
117.120.16.0/21 permit
119.42.242.52/31 permit
119.42.242.156 permit
121.244.91.48 permit
122.15.156.182 permit
123.126.78.64/29 permit
124.47.150.0/24 permit
124.47.189.0/24 permit
124.108.96.0/24 permit
124.108.96.24/31 permit
124.108.96.28/31 permit
@ -1335,20 +1376,40 @@
128.127.70.0/26 permit
128.245.0.0/20 permit
128.245.64.0/20 permit
128.245.176.0/20 permit
128.245.240.0/24 permit
128.245.241.0/24 permit
128.245.242.0/24 permit
128.245.242.16 permit
128.245.242.17 permit
128.245.242.18 permit
128.245.243.0/24 permit
128.245.244.0/24 permit
128.245.245.0/24 permit
128.245.246.0/24 permit
128.245.247.0/24 permit
128.245.248.0/21 permit
129.41.77.70 permit
129.41.169.249 permit
129.80.5.164 permit
129.80.67.121 permit
129.145.74.12 permit
129.146.88.28 permit
129.146.147.105 permit
129.146.236.58 permit
129.151.67.221 permit
129.153.62.216 permit
129.153.104.71 permit
129.153.168.146 permit
129.153.190.200 permit
129.153.194.228 permit
129.159.87.137 permit
129.213.195.191 permit
130.61.9.72 permit
130.162.39.83 permit
130.211.0.0/22 permit
130.248.172.0/24 permit
130.248.173.0/24 permit
131.107.0.0/16 permit
131.253.30.0/24 permit
131.253.121.0/26 permit
131.253.121.20 permit
131.253.121.52 permit
132.145.13.209 permit
132.226.26.225 permit
132.226.49.32 permit
@ -1358,45 +1419,67 @@
134.170.141.64/26 permit
134.170.143.0/24 permit
134.170.174.0/24 permit
135.84.80.192/26 permit
135.84.80.0/24 permit
135.84.81.0/24 permit
135.84.82.0/24 permit
135.84.83.0/24 permit
135.84.216.0/22 permit
136.143.160.0/24 permit
136.143.161.0/24 permit
136.143.182.0/23 permit
136.143.184.0/24 permit
136.143.188.0/24 permit
136.143.190.0/23 permit
136.147.128.0/20 permit
136.147.135.0/24 permit
136.147.176.0/20 permit
136.147.176.0/24 permit
136.147.182.0/24 permit
136.179.50.206 permit
138.91.172.26 permit
139.60.152.0/22 permit
139.178.64.159 permit
139.178.64.195 permit
139.138.35.44 permit
139.138.46.121 permit
139.138.46.176 permit
139.138.46.219 permit
139.138.57.55 permit
139.138.58.119 permit
139.180.17.0/24 permit
141.148.159.229 permit
141.193.32.0/23 permit
143.55.224.0/21 permit
143.55.232.0/22 permit
143.55.236.0/22 permit
143.244.80.0/20 permit
144.24.6.140 permit
144.34.8.247 permit
144.34.9.247 permit
144.34.32.247 permit
144.34.33.247 permit
144.178.36.0/24 permit
144.178.38.0/24 permit
145.253.228.160/29 permit
145.253.239.128/29 permit
146.20.112.0/26 permit
146.20.113.0/24 permit
146.20.191.0/24 permit
146.20.215.0/24 permit
146.101.78.0/24 permit
147.75.65.173 permit
147.75.65.174 permit
147.75.98.190 permit
147.160.158.0/24 permit
146.20.215.182 permit
146.88.28.0/24 permit
147.243.1.47 permit
147.243.1.48 permit
147.243.1.153 permit
147.243.128.24 permit
147.243.128.26 permit
148.105.0.14 permit
148.105.0.0/16 permit
148.105.8.0/21 permit
149.72.0.0/16 permit
149.72.248.236 permit
149.97.173.180 permit
150.230.98.160 permit
152.67.105.195 permit
152.69.200.236 permit
155.248.208.51 permit
157.55.0.192/26 permit
157.55.1.128/26 permit
157.55.2.0/25 permit
@ -1412,37 +1495,54 @@
157.56.232.0/21 permit
157.56.240.0/20 permit
157.56.248.0/21 permit
157.58.30.128/25 permit
157.58.196.96/29 permit
157.58.249.3 permit
157.151.208.65 permit
157.255.1.64/29 permit
158.101.211.207 permit
158.120.80.0/21 permit
158.247.16.0/20 permit
159.92.154.0/24 permit
159.92.155.0/24 permit
159.92.157.0/24 permit
159.92.157.16 permit
159.92.157.17 permit
159.92.157.18 permit
159.92.158.0/24 permit
159.92.159.0/24 permit
159.92.160.0/24 permit
159.92.161.0/24 permit
159.92.162.0/24 permit
159.92.163.0/24 permit
159.92.164.0/22 permit
159.92.168.0/21 permit
159.112.240.0/20 permit
159.112.242.162 permit
159.135.132.128/25 permit
159.135.140.80/29 permit
159.135.224.0/20 permit
159.135.228.10 permit
159.183.0.0/16 permit
160.1.62.192 permit
161.38.192.0/20 permit
161.38.204.0/22 permit
161.71.32.0/19 permit
161.71.64.0/20 permit
162.208.119.181 permit
162.247.216.0/22 permit
163.47.180.0/22 permit
163.47.180.0/23 permit
163.114.130.16 permit
163.114.132.120 permit
165.173.128.0/24 permit
166.78.68.0/22 permit
166.78.68.221 permit
166.78.69.146 permit
166.78.69.169 permit
166.78.69.170 permit
166.78.71.131 permit
167.89.0.0/17 permit
167.89.46.159 permit
167.89.54.103 permit
167.89.64.9 permit
167.89.65.0 permit
167.89.65.53 permit
@ -1457,10 +1557,18 @@
167.216.129.210 permit
167.216.131.180 permit
167.220.67.232/29 permit
167.220.67.238 permit
168.138.5.36 permit
168.138.73.51 permit
168.245.0.0/17 permit
168.245.12.252 permit
168.245.46.9 permit
168.245.127.231 permit
169.148.129.0/24 permit
169.148.131.0/24 permit
169.148.142.10 permit
169.148.144.0/25 permit
170.10.68.0/22 permit
170.10.128.0/24 permit
170.10.129.0/24 permit
170.10.133.0/24 permit
172.217.0.0/19 permit
@ -1475,10 +1583,8 @@
173.194.0.0/16 permit
173.203.79.182 permit
173.203.81.39 permit
173.224.160.128/25 permit
173.224.160.188 permit
173.224.161.128/25 permit
173.228.155.0/24 permit
173.224.165.0/26 permit
174.36.84.8/29 permit
174.36.84.16/29 permit
174.36.84.32/29 permit
@ -1491,27 +1597,27 @@
174.36.114.152/29 permit
174.37.67.28/30 permit
174.129.203.189 permit
175.41.215.51 permit
176.32.105.0/24 permit
176.32.127.0/24 permit
178.236.10.128/26 permit
180.189.28.0/24 permit
182.50.76.0/22 permit
182.50.78.64/28 permit
183.240.219.64/29 permit
185.4.120.0/23 permit
185.4.122.0/24 permit
185.12.80.0/22 permit
185.28.196.0/22 permit
185.58.84.93 permit
185.58.85.0/24 permit
185.58.86.0/24 permit
185.72.128.75 permit
185.72.128.76 permit
185.72.128.80 permit
185.80.93.204 permit
185.80.93.227 permit
185.80.95.31 permit
185.90.20.0/22 permit
185.189.236.0/22 permit
185.211.120.0/22 permit
185.250.236.0/22 permit
185.250.239.148 permit
185.250.239.168 permit
185.250.239.190 permit
188.125.68.132 permit
188.125.68.152/31 permit
188.125.68.156 permit
@ -1563,7 +1669,7 @@
188.125.85.238 permit
188.172.128.0/20 permit
192.0.64.0/18 permit
192.28.128.0/18 permit
192.18.139.154 permit
192.30.252.0/22 permit
192.64.236.0/24 permit
192.64.237.0/24 permit
@ -1579,17 +1685,17 @@
192.254.113.10 permit
192.254.113.101 permit
192.254.114.176 permit
192.254.118.63 permit
193.7.206.0/25 permit
193.7.207.0/25 permit
193.109.254.0/23 permit
193.122.128.100 permit
193.123.56.63 permit
194.19.134.0/25 permit
194.64.234.128/27 permit
194.64.234.129 permit
194.104.109.0/24 permit
194.104.111.0/24 permit
194.106.220.0/23 permit
194.113.24.0/22 permit
194.154.193.192/27 permit
195.130.217.0/24 permit
195.4.92.0/23 permit
195.54.172.0/23 permit
195.234.109.226 permit
195.245.230.0/23 permit
198.2.128.0/18 permit
@ -1605,19 +1711,24 @@
198.37.144.0/20 permit
198.37.152.186 permit
198.61.254.0/23 permit
198.61.254.21 permit
198.61.254.231 permit
198.74.56.28 permit
198.178.234.57 permit
198.244.48.0/20 permit
198.244.60.0/22 permit
198.245.80.0/20 permit
198.245.81.0/24 permit
199.15.176.173 permit
199.15.212.0/22 permit
199.15.213.187 permit
199.15.226.37 permit
199.16.156.0/22 permit
199.33.145.1 permit
199.33.145.32 permit
199.34.22.36 permit
199.59.148.0/22 permit
199.67.80.2 permit
199.67.84.0/24 permit
199.67.86.0/24 permit
199.67.88.0/24 permit
199.101.161.130 permit
199.101.162.0/25 permit
199.122.120.0/21 permit
@ -1630,8 +1741,10 @@
202.177.148.110 permit
203.31.36.0/22 permit
203.32.4.25 permit
203.55.21.0/24 permit
203.81.17.0/24 permit
203.122.32.250 permit
203.145.57.160/27 permit
203.188.194.32 permit
203.188.194.151 permit
203.188.194.203 permit
@ -1666,28 +1779,32 @@
203.209.230.76/31 permit
204.11.168.0/21 permit
204.13.11.48/29 permit
204.13.11.48/30 permit
204.14.232.0/21 permit
204.14.232.64/28 permit
204.14.234.64/28 permit
204.29.186.0/23 permit
204.75.142.0/24 permit
204.79.197.212 permit
204.92.114.187 permit
204.92.114.203 permit
204.92.114.204/31 permit
204.141.32.0/23 permit
204.141.42.0/23 permit
204.153.121.0/24 permit
204.220.160.0/20 permit
204.232.168.0/24 permit
205.139.110.0/24 permit
205.201.128.0/20 permit
205.201.131.128/25 permit
205.201.134.128/25 permit
205.201.136.0/23 permit
205.201.137.229 permit
205.201.139.0/24 permit
205.207.104.0/22 permit
205.207.104.108 permit
205.220.167.17 permit
205.220.167.98 permit
205.220.179.17 permit
205.220.179.98 permit
205.251.233.32 permit
205.251.233.36 permit
206.25.247.143 permit
@ -1715,7 +1832,6 @@
207.67.98.192/27 permit
207.68.176.0/26 permit
207.68.176.96/27 permit
207.82.80.0/24 permit
207.126.144.0/20 permit
207.171.160.0/19 permit
207.211.30.64/26 permit
@ -1723,6 +1839,7 @@
207.211.31.0/25 permit
207.211.41.113 permit
207.218.90.0/24 permit
207.218.90.122 permit
207.250.68.0/24 permit
208.40.232.70 permit
208.43.21.28/30 permit
@ -1758,8 +1875,10 @@
208.71.42.212/31 permit
208.71.42.214 permit
208.72.249.240/29 permit
208.74.204.0/22 permit
208.74.204.9 permit
208.75.120.0/22 permit
208.75.121.246 permit
208.75.122.246 permit
208.82.237.96/29 permit
208.82.237.104/31 permit
@ -1773,14 +1892,12 @@
209.46.117.168 permit
209.46.117.179 permit
209.61.151.0/24 permit
209.61.151.236 permit
209.61.151.249 permit
209.61.151.251 permit
209.67.98.46 permit
209.67.98.59 permit
209.85.128.0/17 permit
212.4.136.0/26 permit
212.25.240.80 permit
212.25.240.83 permit
212.25.240.84/31 permit
212.25.240.88 permit
212.82.96.0/24 permit
212.82.96.32/27 permit
212.82.96.64/29 permit
@ -1821,8 +1938,12 @@
212.82.111.228/31 permit
212.82.111.230 permit
212.123.28.40 permit
213.167.75.0/25 permit
213.167.81.0/25 permit
212.227.15.0/24 permit
212.227.15.0/25 permit
212.227.17.0/27 permit
212.227.126.128/25 permit
213.46.255.0/24 permit
213.165.64.0/23 permit
213.199.128.139 permit
213.199.128.145 permit
213.199.138.181 permit
@ -1861,6 +1982,10 @@
216.46.168.0/24 permit
216.58.192.0/19 permit
216.66.217.240/29 permit
216.71.138.33 permit
216.71.152.207 permit
216.71.154.29 permit
216.71.155.89 permit
216.74.162.13 permit
216.74.162.14 permit
216.82.240.0/20 permit
@ -1870,33 +1995,48 @@
216.109.114.0/24 permit
216.109.114.32/27 permit
216.109.114.64/29 permit
216.113.160.0/24 permit
216.113.172.0/25 permit
216.113.175.0/24 permit
216.128.126.97 permit
216.136.162.65 permit
216.136.162.120/29 permit
216.136.168.80/28 permit
216.145.221.0/24 permit
216.198.0.0/18 permit
216.203.30.55 permit
216.203.33.178/31 permit
216.205.24.0/24 permit
216.239.32.0/19 permit
217.72.192.64/26 permit
217.72.192.248/29 permit
217.72.207.0/27 permit
217.77.141.52 permit
217.77.141.59 permit
217.175.194.0/24 permit
222.73.195.64/29 permit
223.165.113.0/24 permit
223.165.115.0/24 permit
223.165.118.0/23 permit
223.165.120.0/23 permit
2001:0868:0100:0600::/64 permit
2001:4860:4000::/36 permit
2001:748:100:40::2:0/112 permit
2404:6800:4000::/36 permit
2603:1010:3:3::5b permit
2603:1020:201:10::10f permit
2603:1030:20e:3::23c permit
2603:1030:b:3::152 permit
2603:1030:c02:8::14 permit
2607:f8b0:4000::/36 permit
2620:109:c003:104::215 permit
2620:109:c003:104::/64 permit
2620:109:c006:104::215 permit
2620:109:c003:104::215 permit
2620:109:c006:104::/64 permit
2620:109:c006:104::215 permit
2620:109:c00d:104::/64 permit
2620:10d:c090:450::120 permit
2620:10d:c091:450::16 permit
2620:119:50c0:207::215 permit
2620:10d:c090:400::8:1 permit
2620:10d:c091:400::8:1 permit
2620:119:50c0:207::/64 permit
2620:119:50c0:207::215 permit
2800:3f0:4000::/36 permit
194.25.134.0/24 permit # t-online.de

View File

@ -28,3 +28,4 @@
#197695 2 #Domain names registrar REG.RU Ltd, Russia
#198068 2 #P.A.G.M. OU, Estonia
#201942 5 #Soltia Consulting SL, Spain
#213373 4 #IP Connect Inc

View File

@ -0,0 +1,91 @@
<?php
// File size is limited by Nginx site to 10M
// To speed things up, we do not include prerequisites
header('Content-Type: text/plain');
require_once "vars.inc.php";
// Do not show errors, we log to using error_log
ini_set('error_reporting', 0);
// Init database
//$dsn = $database_type . ':host=' . $database_host . ';dbname=' . $database_name;
$dsn = $database_type . ":unix_socket=" . $database_sock . ";dbname=" . $database_name;
$opt = [
PDO::ATTR_ERRMODE => PDO::ERRMODE_EXCEPTION,
PDO::ATTR_DEFAULT_FETCH_MODE => PDO::FETCH_ASSOC,
PDO::ATTR_EMULATE_PREPARES => false,
];
try {
$pdo = new PDO($dsn, $database_user, $database_pass, $opt);
}
catch (PDOException $e) {
error_log("FOOTER: " . $e . PHP_EOL);
http_response_code(501);
exit;
}
if (!function_exists('getallheaders')) {
function getallheaders() {
if (!is_array($_SERVER)) {
return array();
}
$headers = array();
foreach ($_SERVER as $name => $value) {
if (substr($name, 0, 5) == 'HTTP_') {
$headers[str_replace(' ', '-', ucwords(strtolower(str_replace('_', ' ', substr($name, 5)))))] = $value;
}
}
return $headers;
}
}
// Read headers
$headers = getallheaders();
// Get Domain
$domain = $headers['Domain'];
// Get Username
$username = $headers['Username'];
// Get From
$from = $headers['From'];
// define empty footer
$empty_footer = json_encode(array(
'html' => '',
'plain' => '',
'vars' => array()
));
error_log("FOOTER: checking for domain " . $domain . ", user " . $username . " and address " . $from . PHP_EOL);
try {
$stmt = $pdo->prepare("SELECT `plain`, `html`, `mbox_exclude` FROM `domain_wide_footer`
WHERE `domain` = :domain");
$stmt->execute(array(
':domain' => $domain
));
$footer = $stmt->fetch(PDO::FETCH_ASSOC);
if (in_array($from, json_decode($footer['mbox_exclude']))){
$footer = false;
}
if (empty($footer)){
echo $empty_footer;
exit;
}
error_log("FOOTER: " . json_encode($footer) . PHP_EOL);
$stmt = $pdo->prepare("SELECT `custom_attributes` FROM `mailbox` WHERE `username` = :username");
$stmt->execute(array(
':username' => $username
));
$custom_attributes = $stmt->fetch(PDO::FETCH_ASSOC)['custom_attributes'];
if (empty($custom_attributes)){
$custom_attributes = (object)array();
}
}
catch (Exception $e) {
error_log("FOOTER: " . $e->getMessage() . PHP_EOL);
http_response_code(502);
exit;
}
// return footer
$footer["vars"] = $custom_attributes;
echo json_encode($footer);

View File

@ -8,7 +8,7 @@ VIRUS_FOUND {
}
# Bad policy from free mail providers
FREEMAIL_POLICY_FAILURE {
expression = "-g+:policies & !DMARC_POLICY_ALLOW & !MAILLIST & ( FREEMAIL_ENVFROM | FREEMAIL_FROM ) & !WHITELISTED_FWD_HOST";
expression = "FREEMAIL_FROM & !DMARC_POLICY_ALLOW & !MAILLIST& !WHITELISTED_FWD_HOST & -g+:policies";
score = 16.0;
}
# Applies to freemail with undisclosed recipients
@ -68,3 +68,39 @@ WL_FWD_HOST {
ENCRYPTED_CHAT {
expression = "CHAT_VERSION_HEADER & ENCRYPTED_PGP";
}
CLAMD_SPAM_FOUND {
expression = "CLAM_SECI_SPAM & !MAILCOW_WHITE";
description = "Probably Spam, Securite Spam Flag set through ClamAV";
score = 5;
}
CLAMD_BAD_PDF {
expression = "CLAM_SECI_PDF & !MAILCOW_WHITE";
description = "Bad PDF Found, Securite bad PDF Flag set through ClamAV";
score = 8;
}
CLAMD_BAD_JPG {
expression = "CLAM_SECI_JPG & !MAILCOW_WHITE";
description = "Bad JPG Found, Securite bad JPG Flag set through ClamAV";
score = 8;
}
CLAMD_ASCII_MALWARE {
expression = "CLAM_SECI_ASCII & !MAILCOW_WHITE";
description = "ASCII malware found, Securite ASCII malware Flag set through ClamAV";
score = 8;
}
CLAMD_HTML_MALWARE {
expression = "CLAM_SECI_HTML & !MAILCOW_WHITE";
description = "HTML malware found, Securite HTML malware Flag set through ClamAV";
score = 8;
}
CLAMD_JS_MALWARE {
expression = "CLAM_SECI_JS & !MAILCOW_WHITE";
description = "JS malware found, Securite JS malware Flag set through ClamAV";
score = 8;
}

View File

@ -159,8 +159,8 @@ BAZAAR_ABUSE_CH {
}
URLHAUS_ABUSE_CH {
type = "url";
filter = "full";
type = "selector";
selector = "urls";
map = "https://urlhaus.abuse.ch/downloads/text_online/";
score = 10.0;
}
@ -175,7 +175,7 @@ BAD_SUBJECT_00 {
type = "header";
header = "subject";
regexp = true;
map = "http://nullnull.org/bad-subject-regex.txt";
map = "http://fuzzy.mailcow.email/bad-subject-regex.txt";
score = 6.0;
symbols_set = ["BAD_SUBJECT_00"];
}

View File

@ -0,0 +1,9 @@
# Uncomment below to apply the ratelimits globally. Use Ratelimits inside mailcow UI to overwrite them for a specific domain/mailbox.
# rates {
# # Format: "1 / 1h" or "20 / 1m" etc.
# to = "100 / 1s";
# to_ip = "100 / 1s";
# to_ip_from = "100 / 1s";
# bounce_to = "100 / 1h";
# bounce_to_ip = "7 / 1m";
# }

View File

@ -221,6 +221,16 @@ rspamd_config:register_symbol({
local tagged_rcpt = task:get_symbol("TAGGED_RCPT")
local mailcow_domain = task:get_symbol("RCPT_MAILCOW_DOMAIN")
local function remove_moo_tag()
local moo_tag_header = task:get_header('X-Moo-Tag', false)
if moo_tag_header then
task:set_milter_reply({
remove_headers = {['X-Moo-Tag'] = 0},
})
end
return true
end
if tagged_rcpt and tagged_rcpt[1].options and mailcow_domain then
local tag = tagged_rcpt[1].options[1]
rspamd_logger.infox("found tag: %s", tag)
@ -229,6 +239,7 @@ rspamd_config:register_symbol({
if action ~= 'no action' and action ~= 'greylist' then
rspamd_logger.infox("skipping tag handler for action: %s", action)
remove_moo_tag()
return true
end
@ -243,6 +254,7 @@ rspamd_config:register_symbol({
local function tag_callback_subfolder(err, data)
if err or type(data) ~= 'string' then
rspamd_logger.infox(rspamd_config, "subfolder tag handler for rcpt %s returned invalid or empty data (\"%s\") or error (\"%s\")", body, data, err)
remove_moo_tag()
else
rspamd_logger.infox("Add X-Moo-Tag header")
task:set_milter_reply({
@ -261,6 +273,7 @@ rspamd_config:register_symbol({
)
if not redis_ret_subfolder then
rspamd_logger.infox(rspamd_config, "cannot make request to load tag handler for rcpt")
remove_moo_tag()
end
else
@ -268,7 +281,10 @@ rspamd_config:register_symbol({
local sbj = task:get_header('Subject')
new_sbj = '=?UTF-8?B?' .. tostring(util.encode_base64('[' .. tag .. '] ' .. sbj)) .. '?='
task:set_milter_reply({
remove_headers = {['Subject'] = 1},
remove_headers = {
['Subject'] = 1,
['X-Moo-Tag'] = 0
},
add_headers = {['Subject'] = new_sbj}
})
end
@ -284,6 +300,7 @@ rspamd_config:register_symbol({
)
if not redis_ret_subject then
rspamd_logger.infox(rspamd_config, "cannot make request to load tag handler for rcpt")
remove_moo_tag()
end
end
@ -295,6 +312,7 @@ rspamd_config:register_symbol({
if #rcpt_split == 2 then
if rcpt_split[1] == 'postmaster' then
rspamd_logger.infox(rspamd_config, "not expanding postmaster alias")
remove_moo_tag()
else
rspamd_http.request({
task=task,
@ -307,7 +325,8 @@ rspamd_config:register_symbol({
end
end
end
else
remove_moo_tag()
end
end,
priority = 19
@ -340,6 +359,10 @@ rspamd_config:register_symbol({
if not bcc_dest then
return -- stop
end
-- dot stuff content before sending
local email_content = tostring(task:get_content())
email_content = string.gsub(email_content, "\r\n%.", "\r\n..")
-- send mail
lua_smtp.sendmail({
task = task,
host = os.getenv("IPV4_NETWORK") .. '.253',
@ -347,8 +370,8 @@ rspamd_config:register_symbol({
from = task:get_from(stp)[1].addr,
recipients = bcc_dest,
helo = 'bcc',
timeout = 10,
}, task:get_content(), sendmail_cb)
timeout = 20,
}, email_content, sendmail_cb)
end
-- determine from
@ -499,3 +522,158 @@ rspamd_config:register_symbol({
end
end
})
rspamd_config:register_symbol({
name = 'MOO_FOOTER',
type = 'prefilter',
callback = function(task)
local cjson = require "cjson"
local lua_mime = require "lua_mime"
local lua_util = require "lua_util"
local rspamd_logger = require "rspamd_logger"
local rspamd_http = require "rspamd_http"
local envfrom = task:get_from(1)
local uname = task:get_user()
if not envfrom or not uname then
return false
end
local uname = uname:lower()
local env_from_domain = envfrom[1].domain:lower()
local env_from_addr = envfrom[1].addr:lower()
-- determine newline type
local function newline(task)
local t = task:get_newlines_type()
if t == 'cr' then
return '\r'
elseif t == 'lf' then
return '\n'
end
return '\r\n'
end
-- retrieve footer
local function footer_cb(err_message, code, data, headers)
if err or type(data) ~= 'string' then
rspamd_logger.infox(rspamd_config, "domain wide footer request for user %s returned invalid or empty data (\"%s\") or error (\"%s\")", uname, data, err)
else
-- parse json string
local footer = cjson.decode(data)
if not footer then
rspamd_logger.infox(rspamd_config, "parsing domain wide footer for user %s returned invalid or empty data (\"%s\") or error (\"%s\")", uname, data, err)
else
if footer and type(footer) == "table" and (footer.html and footer.html ~= "" or footer.plain and footer.plain ~= "") then
rspamd_logger.infox(rspamd_config, "found domain wide footer for user %s: html=%s, plain=%s, vars=%s", uname, footer.html, footer.plain, footer.vars)
local envfrom_mime = task:get_from(2)
local from_name = ""
if envfrom_mime and envfrom_mime[1].name then
from_name = envfrom_mime[1].name
elseif envfrom and envfrom[1].name then
from_name = envfrom[1].name
end
-- default replacements
local replacements = {
auth_user = uname,
from_user = envfrom[1].user,
from_name = from_name,
from_addr = envfrom[1].addr,
from_domain = envfrom[1].domain:lower()
}
-- add custom mailbox attributes
if footer.vars and type(footer.vars) == "string" then
local footer_vars = cjson.decode(footer.vars)
if type(footer_vars) == "table" then
for key, value in pairs(footer_vars) do
replacements[key] = value
end
end
end
if footer.html and footer.html ~= "" then
footer.html = lua_util.jinja_template(footer.html, replacements, true)
end
if footer.plain and footer.plain ~= "" then
footer.plain = lua_util.jinja_template(footer.plain, replacements, true)
end
-- add footer
local out = {}
local rewrite = lua_mime.add_text_footer(task, footer.html, footer.plain) or {}
local seen_cte
local newline_s = newline(task)
local function rewrite_ct_cb(name, hdr)
if rewrite.need_rewrite_ct then
if name:lower() == 'content-type' then
local nct = string.format('%s: %s/%s; charset=utf-8',
'Content-Type', rewrite.new_ct.type, rewrite.new_ct.subtype)
out[#out + 1] = nct
return
elseif name:lower() == 'content-transfer-encoding' then
out[#out + 1] = string.format('%s: %s',
'Content-Transfer-Encoding', 'quoted-printable')
seen_cte = true
return
end
end
out[#out + 1] = hdr.raw:gsub('\r?\n?$', '')
end
task:headers_foreach(rewrite_ct_cb, {full = true})
if not seen_cte and rewrite.need_rewrite_ct then
out[#out + 1] = string.format('%s: %s', 'Content-Transfer-Encoding', 'quoted-printable')
end
-- End of headers
out[#out + 1] = newline_s
if rewrite.out then
for _,o in ipairs(rewrite.out) do
out[#out + 1] = o
end
else
out[#out + 1] = task:get_rawbody()
end
local out_parts = {}
for _,o in ipairs(out) do
if type(o) ~= 'table' then
out_parts[#out_parts + 1] = o
out_parts[#out_parts + 1] = newline_s
else
local removePrefix = "--\x0D\x0AContent-Type"
if string.lower(string.sub(tostring(o[1]), 1, string.len(removePrefix))) == string.lower(removePrefix) then
o[1] = string.sub(tostring(o[1]), string.len("--\x0D\x0A") + 1)
end
out_parts[#out_parts + 1] = o[1]
if o[2] then
out_parts[#out_parts + 1] = newline_s
end
end
end
task:set_message(out_parts)
else
rspamd_logger.infox(rspamd_config, "domain wide footer request for user %s returned invalid or empty data (\"%s\")", uname, data)
end
end
end
end
-- fetch footer
rspamd_http.request({
task=task,
url='http://nginx:8081/footer.php',
body='',
callback=footer_cb,
headers={Domain=env_from_domain,Username=uname,From=env_from_addr},
})
return true
end,
priority = 1
})

View File

@ -1,11 +1,3 @@
rates {
# Format: "1 / 1h" or "20 / 1m" etc. - global ratelimits are disabled by default
to = "100 / 1s";
to_ip = "100 / 1s";
to_ip_from = "100 / 1s";
bounce_to = "100 / 1h";
bounce_to_ip = "7 / 1m";
}
whitelisted_rcpts = "postmaster,mailer-daemon";
max_rcpt = 25;
custom_keywords = "/etc/rspamd/lua/ratelimit.lua";

View File

@ -62,7 +62,7 @@
SOGoFirstDayOfWeek = "1";
SOGoSieveFolderEncoding = "UTF-8";
SOGoPasswordChangeEnabled = YES;
SOGoPasswordChangeEnabled = NO;
SOGoSentFolderName = "Sent";
SOGoMailShowSubscribedFoldersOnly = NO;
NGImap4ConnectionStringSeparator = "/";
@ -83,6 +83,7 @@
//SoDebugBaseURL = YES;
//ImapDebugEnabled = YES;
//SOGoEASDebugEnabled = YES;
SOGoEASSearchInBody = YES; // Experimental. Enabled since 2023-10
//LDAPDebugEnabled = YES;
//PGDebugEnabled = YES;
//MySQL4DebugEnabled = YES;

View File

@ -20,6 +20,6 @@
<pre>BACKUP_LOCATION=/tmp/ ./helper-scripts/backup_and_restore.sh backup all</pre>
<pre>docker compose down --volumes ; docker compose up -d</pre>
<p>Make sure your timezone is correct. Use "America/New_York" for example, do not use spaces. Check <a href="https://en.wikipedia.org/wiki/List_of_tz_database_time_zones">here</a> for a list.</p>
<br>Click to learn more about <a style="color:red;text-decoration:none;" href="https://mailcow.github.io/mailcow-dockerized-docs/#get-support" target="_blank">getting support.</a>
<br>Click to learn more about <a style="color:red;text-decoration:none;" href="https://docs.mailcow.email/#get-support" target="_blank">getting support.</a>
</body>
</html>

View File

@ -80,6 +80,13 @@ foreach ($RSPAMD_MAPS['regex'] as $rspamd_regex_desc => $rspamd_regex_map) {
];
}
// cors settings
$cors_settings = cors('get');
$cors_settings['allowed_origins'] = str_replace(", ", "\n", $cors_settings['allowed_origins']);
$cors_settings['allowed_methods'] = explode(", ", $cors_settings['allowed_methods']);
$f2b_data = fail2ban('get');
$template = 'admin.twig';
$template_data = [
'tfa_data' => $tfa_data,
@ -96,15 +103,20 @@ $template_data = [
'domains' => $domains,
'all_domains' => $all_domains,
'mailboxes' => $mailboxes,
'f2b_data' => fail2ban('get'),
'f2b_data' => $f2b_data,
'f2b_banlist_url' => getBaseUrl() . "/api/v1/get/fail2ban/banlist/" . $f2b_data['banlist_id'],
'q_data' => quarantine('settings'),
'qn_data' => quota_notification('get'),
'rsettings_map' => file_get_contents('http://nginx:8081/settings.php'),
'rsettings' => $rsettings,
'rspamd_regex_maps' => $rspamd_regex_maps,
'logo_specs' => customize('get', 'main_logo_specs'),
'logo_dark_specs' => customize('get', 'main_logo_dark_specs'),
'ip_check' => customize('get', 'ip_check'),
'password_complexity' => password_complexity('get'),
'show_rspamd_global_filters' => @$_SESSION['show_rspamd_global_filters'],
'cors_settings' => $cors_settings,
'is_https' => isset($_SERVER['HTTPS']) && $_SERVER['HTTPS'] === 'on',
'lang_admin' => json_encode($lang['admin']),
'lang_datatables' => json_encode($lang['datatables'])
];

View File

@ -1,4 +1,4 @@
openapi: 3.0.0
openapi: 3.1.0
info:
description: >-
mailcow is complete e-mailing solution with advanced antispam, antivirus,
@ -699,6 +699,38 @@ paths:
type: string
type: object
summary: Create Domain Admin user
/api/v1/add/sso/domain-admin:
post:
responses:
"401":
$ref: "#/components/responses/Unauthorized"
"200":
content:
application/json:
examples:
response:
value:
token: "591F6D-5C3DD2-7455CD-DAF1C1-AA4FCC"
description: OK
headers: { }
tags:
- Single Sign-On
description: >-
Using this endpoint you can issue a token for Domain Admin user. This token can be used for
autologin Domain Admin user by using query_string var sso_token={token}. Token expiration time is 30s
operationId: Issue Domain Admin SSO token
requestBody:
content:
application/json:
schema:
example:
username: testadmin
properties:
username:
description: the username for the admin user
type: object
type: object
summary: Issue Domain Admin SSO token
/api/v1/edit/da-acl:
post:
responses:
@ -3105,6 +3137,86 @@ paths:
type: string
type: object
summary: Update domain
/api/v1/edit/domain/footer:
post:
responses:
"401":
$ref: "#/components/responses/Unauthorized"
"200":
content:
application/json:
examples:
response:
value:
- log:
- mailbox
- edit
- domain_wide_footer
- domains:
- mailcow.tld
html: "<br>foo {= foo =}"
plain: "<foo {= foo =}"
mbox_exclude:
- moo@mailcow.tld
- null
msg:
- domain_footer_modified
- mailcow.tld
type: success
schema:
properties:
log:
description: contains request object
items: {}
type: array
msg:
items: {}
type: array
type:
enum:
- success
- danger
- error
type: string
type: object
description: OK
headers: {}
tags:
- Domains
description: >-
You can update the footer of one or more domains per request.
operationId: Update domain wide footer
requestBody:
content:
application/json:
schema:
example:
attr:
html: "<br>foo {= foo =}"
plain: "foo {= foo =}"
mbox_exclude:
- moo@mailcow.tld
items: mailcow.tld
properties:
attr:
properties:
html:
description: Footer text in HTML format
type: string
plain:
description: Footer text in PLAIN text format
type: string
mbox_exclude:
description: Array of mailboxes to exclude from domain wide footer
type: object
type: object
items:
description: contains a list of domain names where you want to update the footer
type: array
items:
type: string
type: object
summary: Update domain wide footer
/api/v1/edit/fail2ban:
post:
responses:
@ -3144,8 +3256,10 @@ paths:
example:
attr:
ban_time: "86400"
ban_time_increment: "1"
blacklist: "10.100.6.5/32,10.100.8.4/32"
max_attempts: "5"
max_ban_time: "86400"
netban_ipv4: "24"
netban_ipv6: "64"
retry_window: "600"
@ -3159,11 +3273,17 @@ paths:
description: the backlisted ips or hostnames separated by comma
type: string
ban_time:
description: the time a ip should be banned
description: the time an ip should be banned
type: number
ban_time_increment:
description: if the time of the ban should increase each time
type: boolean
max_attempts:
description: the maximum numbe of wrong logins before a ip is banned
type: number
max_ban_time:
description: the maximum time an ip should be banned
type: number
netban_ipv4:
description: the networks mask to ban for ipv4
type: number
@ -3296,6 +3416,86 @@ paths:
type: object
type: object
summary: Update mailbox
/api/v1/edit/mailbox/custom-attribute:
post:
responses:
"401":
$ref: "#/components/responses/Unauthorized"
"200":
content:
application/json:
examples:
response:
value:
- log:
- mailbox
- edit
- mailbox_custom_attribute
- mailboxes:
- moo@mailcow.tld
attribute:
- role
- foo
value:
- cow
- bar
- null
msg:
- mailbox_modified
- moo@mailcow.tld
type: success
schema:
properties:
log:
description: contains request object
items: {}
type: array
msg:
items: {}
type: array
type:
enum:
- success
- danger
- error
type: string
type: object
description: OK
headers: {}
tags:
- Mailboxes
description: >-
You can update custom attributes of one or more mailboxes per request.
operationId: Update mailbox custom attributes
requestBody:
content:
application/json:
schema:
example:
attr:
attribute:
- role
- foo
value:
- cow
- bar
items:
- moo@mailcow.tld
properties:
attr:
properties:
attribute:
description: Array of attribute keys
type: object
value:
description: Array of attribute values
type: object
type: object
items:
description: contains list of mailboxes you want update
type: object
type: object
summary: Update mailbox custom attributes
/api/v1/edit/mailq:
post:
responses:
@ -4081,10 +4281,12 @@ paths:
response:
value:
ban_time: 604800
ban_time_increment: 1
blacklist: |-
45.82.153.37/32
92.118.38.52/32
max_attempts: 1
max_ban_time: 604800
netban_ipv4: 32
netban_ipv6: 128
perm_bans:
@ -5539,6 +5741,7 @@ paths:
sogo_access: "1"
tls_enforce_in: "0"
tls_enforce_out: "0"
custom_attributes: {}
domain: domain3.tld
is_relayed: 0
local_part: info
@ -5560,6 +5763,84 @@ paths:
description: You can list all mailboxes existing in system for a specific domain.
operationId: Get mailboxes of a domain
summary: Get mailboxes of a domain
/api/v1/edit/cors:
post:
responses:
"401":
$ref: "#/components/responses/Unauthorized"
"200":
content:
application/json:
examples:
response:
value:
- type: "success"
log: ["cors", "edit", {"allowed_origins": ["*", "mail.mailcow.tld"], "allowed_methods": ["POST", "GET", "DELETE", "PUT"]}]
msg: "cors_headers_edited"
description: OK
headers: { }
tags:
- Cross-Origin Resource Sharing (CORS)
description: >-
This endpoint allows you to manage Cross-Origin Resource Sharing (CORS) settings for the API.
CORS is a security feature implemented by web browsers to prevent unauthorized cross-origin requests.
By editing the CORS settings, you can specify which domains and which methods are permitted to access the API resources from outside the mailcow domain.
operationId: Edit Cross-Origin Resource Sharing (CORS) settings
requestBody:
content:
application/json:
schema:
example:
attr:
allowed_origins: ["*", "mail.mailcow.tld"]
allowed_methods: ["POST", "GET", "DELETE", "PUT"]
properties:
attr:
type: object
properties:
allowed_origins:
type: array
items:
type: string
allowed_methods:
type: array
items:
type: string
summary: Edit Cross-Origin Resource Sharing (CORS) settings
"/api/v1/get/spam-score/{mailbox}":
get:
parameters:
- description: name of mailbox or empty for current user - admin user will retrieve the global spam filter score
in: path
name: mailbox
required: true
schema:
type: string
- description: e.g. api-key-string
example: api-key-string
in: header
name: X-API-Key
required: false
schema:
type: string
responses:
"401":
$ref: "#/components/responses/Unauthorized"
"200":
content:
application/json:
examples:
response:
value:
spam_score: "8,15"
description: OK
headers: {}
tags:
- Mailboxes
description: >-
Using this endpoint you can get the global spam filter score or the spam filter score of a certain mailbox.
operationId: Get mailbox or global spam filter score
summary: Get mailbox or global spam filter score
tags:
- name: Domains
@ -5586,6 +5867,8 @@ tags:
description: Manage DKIM keys
- name: Domain admin
description: Create or udpdate domain admin users
- name: Single Sign-On
description: Issue tokens for users
- name: Address Rewriting
description: Create BCC maps or recipient maps
- name: Outgoing TLS Policy Map Overrides
@ -5602,3 +5885,5 @@ tags:
description: Get the status of your cow
- name: Ratelimits
description: Edit domain ratelimits
- name: Cross-Origin Resource Sharing (CORS)
description: Manage Cross-Origin Resource Sharing (CORS) settings

View File

@ -1,6 +1,6 @@
window.onload = function() {
// Begin Swagger UI call region
const ui = SwaggerUIBundle({
window.ui = SwaggerUIBundle({
urls: [{url: "/api/openapi.yaml", name: "mailcow API"}],
dom_id: '#swagger-ui',
deepLinking: true,
@ -15,5 +15,4 @@ window.onload = function() {
});
// End Swagger UI call region
window.ui = ui;
};

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -342,6 +342,10 @@ div.dataTables_wrapper div.dt-row {
position: relative;
}
div.dataTables_wrapper span.sorting-value {
display: none;
}
div.dataTables_scrollHead table.dataTable {
margin-bottom: 0 !important;
}

View File

@ -42,11 +42,6 @@ table.dataTable.dtr-inline.collapsed>tbody>tr.parent>th.dtr-control:before,
table.dataTable td.dt-control:before {
background-color: #979797 !important;
}
table.dataTable.dtr-inline.collapsed>tbody>tr>td.child,
table.dataTable.dtr-inline.collapsed>tbody>tr>th.child,
table.dataTable.dtr-inline.collapsed>tbody>tr>td.dataTables_empty {
background-color: #fbfbfb;
}
table.dataTable.table-striped>tbody>tr>td {
vertical-align: middle;
}
@ -78,3 +73,21 @@ table.dataTable>tbody>tr.child span.dtr-title {
width: 30%;
max-width: 250px;
}
div.dataTables_wrapper div.dataTables_filter {
text-align: left;
}
div.dataTables_wrapper div.dataTables_length {
text-align: right;
}
.dataTables_paginate, .dataTables_length, .dataTables_filter {
margin: 10px 0!important;
}
td.dt-text-right {
text-align: end !important;
}
th.dt-text-right {
text-align: end !important;
}

View File

@ -357,6 +357,7 @@ button[aria-expanded='true'] > .caret {
}
.progress {
height: 16px;
background-color: #d5d5d5;
}
@ -370,3 +371,22 @@ button[aria-expanded='true'] > .caret {
.btn-check:checked+.btn-outline-secondary, .btn-check:active+.btn-outline-secondary, .btn-outline-secondary:active, .btn-outline-secondary.active, .btn-outline-secondary.dropdown-toggle.show {
background-color: #f0f0f0 !important;
}
.btn-check:checked+.btn-light, .btn-check:active+.btn-light, .btn-light:active, .btn-light.active, .show>.btn-light.dropdown-toggle {
color: #fff;
background-color: #555;
background-image: none;
border-color: #4d4d4d;
}
.btn-check:checked+.btn-light:focus, .btn-check:active+.btn-light:focus, .btn-light:active:focus, .btn-light.active:focus, .show>.btn-light.dropdown-toggle:focus,
.btn-check:focus+.btn-light, .btn-light:focus {
box-shadow: none;
}
.btn-group>.btn:not(:last-of-type) {
border-top-right-radius: 0;
border-bottom-right-radius: 0;
}
.badge.bg-info > a,
.badge.bg-danger > a {
color: #fff !important;
text-decoration: none;
}

View File

@ -38,7 +38,7 @@
@media (max-width: 767px) {
.responsive-tabs .tab-pane {
.responsive-tabs .tab-pane:not(.rsettings) {
display: block !important;
opacity: 1;
}
@ -199,6 +199,26 @@
display: none !important;
}
div.dataTables_wrapper div.dataTables_length {
text-align: left;
}
.senders-mw220 {
max-width: 100% !important;
}
table.dataTable.dtr-inline.collapsed>tbody>tr>td.dtr-control:before,
table.dataTable.dtr-inline.collapsed>tbody>tr>th.dtr-control:before,
table.dataTable td.dt-control:before {
height: 2rem;
width: 2rem;
line-height: 2rem;
margin-top: -15px;
}
li .dtr-data {
padding: 0;
}
}
@media (max-width: 350px) {

View File

@ -66,4 +66,6 @@ table tbody tr td input[type="checkbox"] {
padding: .2em .4em .3em !important;
background-color: #ececec!important;
}
.badge.bg-info .bi {
font-size: inherit;
}

View File

@ -99,4 +99,6 @@ table tbody tr td input[type="checkbox"] {
font-size:110%;
margin:20px;
}
.senders-mw220 {
max-width: 220px;
}

View File

@ -11,7 +11,86 @@
* Copyright 2011-2021 Twitter, Inc.
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)
*/
@import url("https://fonts.googleapis.com/css2?family=Source+Sans+Pro:ital,wght@0,300;0,400;0,700;1,400&display=swap");
/* source-sans-pro-300 - latin */
@font-face {
font-family: 'Source Sans Pro';
font-style: normal;
font-weight: 300;
src: url('/fonts/source-sans-pro-v21-latin-300.eot'); /* IE9 Compat Modes */
src: local(''),
url('/fonts/source-sans-pro-v21-latin-300.eot?#iefix') format('embedded-opentype'), /* IE6-IE8 */
url('/fonts/source-sans-pro-v21-latin-300.woff2') format('woff2'), /* Super Modern Browsers */
url('/fonts/source-sans-pro-v21-latin-300.woff') format('woff'), /* Modern Browsers */
url('/fonts/source-sans-pro-v21-latin-300.ttf') format('truetype'), /* Safari, Android, iOS */
url('/fonts/source-sans-pro-v21-latin-300.svg#SourceSansPro') format('svg'); /* Legacy iOS */
}
/* source-sans-pro-300italic - latin */
@font-face {
font-family: 'Source Sans Pro';
font-style: italic;
font-weight: 300;
src: url('/fonts/source-sans-pro-v21-latin-300italic.eot'); /* IE9 Compat Modes */
src: local(''),
url('/fonts/source-sans-pro-v21-latin-300italic.eot?#iefix') format('embedded-opentype'), /* IE6-IE8 */
url('/fonts/source-sans-pro-v21-latin-300italic.woff2') format('woff2'), /* Super Modern Browsers */
url('/fonts/source-sans-pro-v21-latin-300italic.woff') format('woff'), /* Modern Browsers */
url('/fonts/source-sans-pro-v21-latin-300italic.ttf') format('truetype'), /* Safari, Android, iOS */
url('/fonts/source-sans-pro-v21-latin-300italic.svg#SourceSansPro') format('svg'); /* Legacy iOS */
}
/* source-sans-pro-regular - latin */
@font-face {
font-family: 'Source Sans Pro';
font-style: normal;
font-weight: 400;
src: url('/fonts/source-sans-pro-v21-latin-regular.eot'); /* IE9 Compat Modes */
src: local(''),
url('/fonts/source-sans-pro-v21-latin-regular.eot?#iefix') format('embedded-opentype'), /* IE6-IE8 */
url('/fonts/source-sans-pro-v21-latin-regular.woff2') format('woff2'), /* Super Modern Browsers */
url('/fonts/source-sans-pro-v21-latin-regular.woff') format('woff'), /* Modern Browsers */
url('/fonts/source-sans-pro-v21-latin-regular.ttf') format('truetype'), /* Safari, Android, iOS */
url('/fonts/source-sans-pro-v21-latin-regular.svg#SourceSansPro') format('svg'); /* Legacy iOS */
}
/* source-sans-pro-italic - latin */
@font-face {
font-family: 'Source Sans Pro';
font-style: italic;
font-weight: 400;
src: url('/fonts/source-sans-pro-v21-latin-italic.eot'); /* IE9 Compat Modes */
src: local(''),
url('/fonts/source-sans-pro-v21-latin-italic.eot?#iefix') format('embedded-opentype'), /* IE6-IE8 */
url('/fonts/source-sans-pro-v21-latin-italic.woff2') format('woff2'), /* Super Modern Browsers */
url('/fonts/source-sans-pro-v21-latin-italic.woff') format('woff'), /* Modern Browsers */
url('/fonts/source-sans-pro-v21-latin-italic.ttf') format('truetype'), /* Safari, Android, iOS */
url('/fonts/source-sans-pro-v21-latin-italic.svg#SourceSansPro') format('svg'); /* Legacy iOS */
}
/* source-sans-pro-700 - latin */
@font-face {
font-family: 'Source Sans Pro';
font-style: normal;
font-weight: 700;
src: url('/fonts/source-sans-pro-v21-latin-700.eot'); /* IE9 Compat Modes */
src: local(''),
url('/fonts/source-sans-pro-v21-latin-700.eot?#iefix') format('embedded-opentype'), /* IE6-IE8 */
url('/fonts/source-sans-pro-v21-latin-700.woff2') format('woff2'), /* Super Modern Browsers */
url('/fonts/source-sans-pro-v21-latin-700.woff') format('woff'), /* Modern Browsers */
url('/fonts/source-sans-pro-v21-latin-700.ttf') format('truetype'), /* Safari, Android, iOS */
url('/fonts/source-sans-pro-v21-latin-700.svg#SourceSansPro') format('svg'); /* Legacy iOS */
}
/* source-sans-pro-700italic - latin */
@font-face {
font-family: 'Source Sans Pro';
font-style: italic;
font-weight: 700;
src: url('/fonts/source-sans-pro-v21-latin-700italic.eot'); /* IE9 Compat Modes */
src: local(''),
url('/fonts/source-sans-pro-v21-latin-700italic.eot?#iefix') format('embedded-opentype'), /* IE6-IE8 */
url('/fonts/source-sans-pro-v21-latin-700italic.woff2') format('woff2'), /* Super Modern Browsers */
url('/fonts/source-sans-pro-v21-latin-700italic.woff') format('woff'), /* Modern Browsers */
url('/fonts/source-sans-pro-v21-latin-700italic.ttf') format('truetype'), /* Safari, Android, iOS */
url('/fonts/source-sans-pro-v21-latin-700italic.svg#SourceSansPro') format('svg'); /* Legacy iOS */
}
:root {
--bs-blue: #158cba;
--bs-indigo: #6610f2;

View File

@ -1,85 +1,128 @@
body {
background-color: #414141;
color: #e0e0e0;
background-color: #1c1c1e;
color: #f2f2f7;
}
.card {
border: 1px solid #1c1c1c;
background-color: #3a3a3a;
border: 1px solid #2c2c2e;
background-color: #2c2c2e;
}
legend {
color: #f5f5f5;
color: #f2f2f7;
}
.card-header {
color: #bbb;
background-color: #2c2c2c;
color: #8e8e93;
background-color: #1c1c1e;
border-color: transparent;
}
.card-body {
--bs-card-color: #bbb;
}
.btn-secondary, .paginate_button, .page-link, .btn-light {
color: #fff !important;
background-color: #7a7a7a !important;
border-color: #5c5c5c !important;
color: #f2f2f7 !important;
background-color: #5e5e5e !important;
border-color: #4c4c4e !important;
}
.btn-check:checked+.btn-secondary, .btn-check:active+.btn-secondary, .btn-secondary:active, .btn-secondary.active, .show>.btn-secondary.dropdown-toggle {
border-color: #7a7a7a !important;
}
.alert-secondary {
color: #fff !important;
background-color: #7a7a7a !important;
border-color: #5c5c5c !important;
}
.bg-secondary {
color: #fff !important;
background-color: #7a7a7a !important;
}
.alert-secondary, .alert-secondary a, .alert-secondary .alert-link {
color: #fff;
}
.page-item.active .page-link {
background-color: #158cba !important;
border-color: #127ba3 !important;
.btn-dark {
color: #f2f2f7 !important;
background-color: #242424 !important;
border-color: #1c1c1e !important;
}
.btn-secondary:focus, .btn-secondary:hover, .btn-group.open .dropdown-toggle.btn-secondary {
background-color: #7a7a7a;
border-color: #5c5c5c !important;
color: #fff;
background-color: #444444;
border-color: #4c4c4e !important;
color: #f2f2f7;
}
.btn-check:checked+.btn-secondary, .btn-check:active+.btn-secondary, .btn-secondary:active, .btn-secondary.active, .show>.btn-secondary.dropdown-toggle {
border-color: #5e5e5e !important;
}
.alert-secondary {
color: #f2f2f7 !important;
background-color: #5e5e5e !important;
border-color: #4c4c4e !important;
}
.bg-secondary {
color: #f2f2f7 !important;
background-color: #5e5e5e !important;
}
.alert-secondary, .alert-secondary a, .alert-secondary .alert-link {
color: #f2f2f7;
}
.page-item.active .page-link {
background-color: #3e3e3e !important;
border-color: #3e3e3e !important;
}
.btn-secondary:focus, .btn-secondary:hover, .btn-group.open .dropdown-toggle.btn-secondary {
background-color: #5e5e5e;
border-color: #4c4c4e !important;
color: #f2f2f7;
}
.btn-secondary:disabled, .btn-secondary.disabled {
border-color: #7a7a7a !important;
border-color: #5e5e5e !important;
}
.modal-content {
background-color: #414141;
--bs-modal-color: #bbb;
background-color: #2c2c2e;
}
.modal-header {
border-bottom: 1px solid #161616;
border-bottom: 1px solid #999;
}
.modal-title {
color: white;
color: #bbb;
}
.modal .btn-close {
filter: invert(1) grayscale(100%) brightness(200%);
}
.navbar.bg-light {
background-color: #222222 !important;
border-color: #181818;
background-color: #1c1c1e !important;
border-color: #2c2c2e;
}
.nav-link {
color: #ccc !important;
color: #8e8e93 !important;
}
.nav-tabs .nav-link.active, .nav-tabs .nav-item.show .nav-link {
background: none;
}
.nav-tabs, .nav-tabs .nav-link {
border-color: #444444 !important;
}
.nav-tabs .nav-link:not(.disabled):hover, .nav-tabs .nav-link:not(.disabled):focus, .nav-tabs .nav-link.active {
border-bottom-color: #414141;
border-bottom-color: #1c1c1e !important;
}
.card .nav-tabs .nav-link:not(.disabled):hover, .card .nav-tabs .nav-link:not(.disabled):focus, .card .nav-tabs .nav-link.active {
border-bottom-color: #2c2c2e !important;
}
.table, .table-striped>tbody>tr:nth-of-type(odd)>*, tbody tr {
color: #ccc !important;
color: #f2f2f7 !important;
}
.dropdown-menu {
background-color: #585858;
border: 1px solid #333;
background-color: #424242;
border: 1px solid #282828;
}
.dropdown-menu>li>a:focus, .dropdown-menu>li>a:hover {
color: #fafafa;
@ -92,7 +135,7 @@ legend {
color: #d4d4d4 !important;
}
tbody tr {
color: #555;
color: #ccc;
}
.navbar-default .navbar-nav>.open>a, .navbar-default .navbar-nav>.open>a:focus, .navbar-default .navbar-nav>.open>a:hover {
color: #ccc;
@ -101,18 +144,15 @@ tbody tr {
color: #ccc;
}
.list-group-item {
background-color: #333;
background-color: #282828;
border: 1px solid #555;
}
.table-striped>tbody>tr:nth-of-type(odd) {
background-color: #333;
background-color: #424242;
}
table.dataTable>tbody>tr.child ul.dtr-details>li {
border-bottom: 1px solid rgba(255, 255, 255, 0.13);
}
tbody tr {
color: #ccc;
}
.label.label-last-login {
color: #ccc !important;
background-color: #555 !important;
@ -128,20 +168,20 @@ div.numberedtextarea-number {
}
.well {
border: 1px solid #555;
background-color: #333;
background-color: #282828;
}
pre {
color: #ccc;
background-color: #333;
background-color: #282828;
border: 1px solid #555;
}
input.form-control, textarea.form-control {
color: #e2e2e2 !important;
background-color: #555 !important;
background-color: #424242 !important;
border: 1px solid #999;
}
input.form-control:focus, textarea.form-control {
background-color: #555 !important;
background-color: #424242 !important;
}
input.form-control:disabled, textarea.form-disabled {
color: #a8a8a8 !important;
@ -149,16 +189,14 @@ input.form-control:disabled, textarea.form-disabled {
}
.input-group-addon {
color: #ccc;
background-color: #555 !important;
background-color: #424242 !important;
border: 1px solid #999;
}
.input-group-text {
color: #ccc;
background-color: #242424;
background-color: #1c1c1c;
}
.list-group-item {
color: #ccc;
}
@ -170,11 +208,11 @@ input.form-control:disabled, textarea.form-disabled {
}
.dropdown-item.active:hover {
color: #fff !important;
background-color: #31b1e4;
background-color: #007aff;
}
.form-select {
color: #e2e2e2!important;
background-color: #555!important;
background-color: #424242!important;
border: 1px solid #999;
}
@ -186,31 +224,6 @@ input.form-control:disabled, textarea.form-disabled {
color: #fff !important;
}
.table-secondary {
--bs-table-bg: #7a7a7a;
--bs-table-striped-bg: #e4e4e4;
--bs-table-striped-color: #000;
--bs-table-active-bg: #d8d8d8;
--bs-table-active-color: #000;
--bs-table-hover-bg: #dedede;
--bs-table-hover-color: #000;
color: #000;
border-color: #d8d8d8;
}
.table-light {
--bs-table-bg: #f6f6f6;
--bs-table-striped-bg: #eaeaea;
--bs-table-striped-color: #000;
--bs-table-active-bg: #dddddd;
--bs-table-active-color: #000;
--bs-table-hover-bg: #e4e4e4;
--bs-table-hover-color: #000;
color: #000;
border-color: #dddddd;
}
.form-control-plaintext {
color: #e0e0e0;
}
@ -284,12 +297,12 @@ a:hover {
}
.tag-box {
background-color: #555;
border: 1px solid #999;
background-color: #282828;
border: 1px solid #555;
}
.tag-input {
color: #fff;
background-color: #555;
background-color: #282828;
}
.tag-add {
color: #ccc;
@ -298,43 +311,24 @@ a:hover {
color: #d1d1d1;
}
table.dataTable.dtr-inline.collapsed>tbody>tr>td.dtr-control:before:hover,
table.dataTable.dtr-inline.collapsed>tbody>tr>th.dtr-control:before:hover {
background-color: #7a7a7a !important;
}
table.dataTable.dtr-inline.collapsed>tbody>tr>td.dtr-control:before,
table.dataTable.dtr-inline.collapsed>tbody>tr>th.dtr-control:before {
background-color: #7a7a7a !important;
border: 1.5px solid #5c5c5c !important;
color: #fff !important;
}
table.dataTable.dtr-inline.collapsed>tbody>tr.parent>td.dtr-control:before,
table.dataTable.dtr-inline.collapsed>tbody>tr.parent>th.dtr-control:before {
background-color: #949494;
}
table.dataTable.dtr-inline.collapsed>tbody>tr>td.child,
table.dataTable.dtr-inline.collapsed>tbody>tr>th.child,
table.dataTable.dtr-inline.collapsed>tbody>tr>td.dataTables_empty {
background-color: #444444;
}
.btn-check-label {
color: #fff;
}
.btn-outline-secondary:hover {
background-color: #c3c3c3;
background-color: #5c5c5c;
}
.btn.btn-outline-secondary {
color: #fff !important;
color: #e0e0e0 !important;
border-color: #7a7a7a !important;
}
.btn-check:checked+.btn-outline-secondary, .btn-check:active+.btn-outline-secondary, .btn-outline-secondary:active, .btn-outline-secondary.active, .btn-outline-secondary.dropdown-toggle.show {
background-color: #9b9b9b !important;
background-color: #7a7a7a !important;
}
.btn-check:checked+.btn-light, .btn-check:active+.btn-light, .btn-light:active, .btn-light.active, .show>.btn-light.dropdown-toggle {
color: #f2f2f7 !important;
background-color: #242424 !important;
border-color: #1c1c1e !important;
}
.btn-input-missing,
.btn-input-missing:hover,
.btn-input-missing:active,
@ -342,19 +336,119 @@ table.dataTable.dtr-inline.collapsed>tbody>tr>td.dataTables_empty {
.btn-input-missing:active:hover,
.btn-input-missing:active:focus {
color: #fff !important;
background-color: #ff2f24 !important;
border-color: #e21207 !important;
background-color: #ff3b30 !important;
border-color: #ff3b30 !important;
}
.inputMissingAttr {
border-color: #FF4136 !important;
border-color: #ff4136 !important;
}
.list-group-details {
background: #444444;
background: #555;
}
.list-group-header {
background: #333;
background: #444;
}
span.mail-address-item {
background-color: #444;
border-radius: 4px;
border: 1px solid #555;
padding: 2px 7px;
display: inline-block;
margin: 2px 6px 2px 0;
}
table.dataTable.dtr-inline.collapsed>tbody>tr>td.dtr-control:before:hover,
table.dataTable.dtr-inline.collapsed>tbody>tr>th.dtr-control:before:hover {
background-color: #7a7a7a !important;
}
table.dataTable.dtr-inline.collapsed>tbody>tr>td.dtr-control:before,
table.dataTable.dtr-inline.collapsed>tbody>tr>th.dtr-control:before {
background-color: #7a7a7a !important;
border: 1.5px solid #5c5c5c !important;
color: #e0e0e0 !important;
}
table.dataTable.dtr-inline.collapsed>tbody>tr.parent>td.dtr-control:before,
table.dataTable.dtr-inline.collapsed>tbody>tr.parent>th.dtr-control:before {
background-color: #949494;
}
table.dataTable.dtr-inline.collapsed>tbody>tr>td.child,
table.dataTable.dtr-inline.collapsed>tbody>tr>th.child,
table.dataTable.dtr-inline.collapsed>tbody>tr>td.dataTables_empty {
background-color: #414141;
}
table.table, .table-striped>tbody>tr:nth-of-type(odd)>*, tbody tr {
color: #ccc !important;
}
.table-secondary {
--bs-table-bg: #282828;
--bs-table-striped-bg: #343434;
--bs-table-striped-color: #f2f2f7;
--bs-table-active-bg: #4c4c4c;
--bs-table-active-color: #f2f2f7;
--bs-table-hover-bg: #3a3a3a;
--bs-table-hover-color: #f2f2f7;
color: #ccc;
border-color: #3a3a3a;
}
.table-light {
--bs-table-bg: #3a3a3a;
--bs-table-striped-bg: #444444;
--bs-table-striped-color: #f2f2f7;
--bs-table-active-bg: #5c5c5c;
--bs-table-active-color: #f2f2f7;
--bs-table-hover-bg: #4c4c4c;
--bs-table-hover-color: #f2f2f7;
color: #ccc;
border-color: #4c4c4c;
}
.table-bordered {
border-color: #3a3a3a;
}
.table-bordered th,
.table-bordered td {
border-color: #3a3a3a !important;
}
.table-bordered thead th,
.table-bordered thead td {
border-bottom-width: 2px;
}
.table-striped>tbody>tr:nth-of-type(odd)>td,
.table-striped>tbody>tr:nth-of-type(odd)>th {
background-color: #282828;
}
.table-hover>tbody>tr:hover {
background-color: #343434;
}
.table>:not(caption)>*>* {
border-color: #5c5c5c;
--bs-table-color-state:#bbb;
--bs-table-bg: #3a3a3a;
}
.text-muted {
--bs-secondary-color: #8e8e93;
}
input::placeholder {
color: #8e8e93 !important;
}
.form-select {
background-image: url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 16 16'%3e%3cpath fill='none' stroke='%238e8e93' stroke-linecap='round' stroke-linejoin='round' stroke-width='2' d='m2 5 6 6 6-6'/%3e%3c/svg%3e");
}
.btn-light, .btn-light:hover {
background-image: none;
}

View File

@ -65,6 +65,7 @@ $template_data = [
'solr_uptime' => round($solr_status['status']['dovecot-fts']['uptime'] / 1000 / 60 / 60),
'clamd_status' => $clamd_status,
'containers' => $containers,
'ip_check' => customize('get', 'ip_check'),
'lang_admin' => json_encode($lang['admin']),
'lang_debug' => json_encode($lang['debug']),
'lang_datatables' => json_encode($lang['datatables']),

View File

@ -47,6 +47,7 @@ if (isset($_SESSION['mailcow_cc_role'])) {
$quota_notification_bcc = quota_notification_bcc('get', $domain);
$rl = ratelimit('get', 'domain', $domain);
$rlyhosts = relayhost('get');
$domain_footer = mailbox('get', 'domain_wide_footer', $domain);
$template = 'edit/domain.twig';
$template_data = [
'acl' => $_SESSION['acl'],
@ -56,23 +57,28 @@ if (isset($_SESSION['mailcow_cc_role'])) {
'rlyhosts' => $rlyhosts,
'dkim' => dkim('details', $domain),
'domain_details' => $result,
'domain_footer' => $domain_footer,
'mailboxes' => mailbox('get', 'mailboxes', $_GET["domain"]),
'aliases' => mailbox('get', 'aliases', $_GET["domain"], 'address')
];
}
}
elseif (isset($_GET["template"])){
$domain_template = mailbox('get', 'domain_templates', $_GET["template"]);
elseif (isset($_GET['template'])){
$domain_template = mailbox('get', 'domain_templates', $_GET['template']);
if ($domain_template){
$template_data = [
'template' => $domain_template
'template' => $domain_template,
'rl' => ['frame' => $domain_template['attributes']['rl_frame']],
];
$template = 'edit/domain-templates.twig';
$result = true;
}
else {
$mailbox_template = mailbox('get', 'mailbox_templates', $_GET["template"]);
$mailbox_template = mailbox('get', 'mailbox_templates', $_GET['template']);
if ($mailbox_template){
$template_data = [
'template' => $mailbox_template
'template' => $mailbox_template,
'rl' => ['frame' => $mailbox_template['attributes']['rl_frame']],
];
$template = 'edit/mailbox-templates.twig';
$result = true;
@ -214,6 +220,7 @@ $js_minifier->add('/web/js/site/pwgen.js');
$template_data['result'] = $result;
$template_data['return_to'] = $_SESSION['return_to'];
$template_data['lang_user'] = json_encode($lang['user']);
$template_data['lang_admin'] = json_encode($lang['admin']);
$template_data['lang_datatables'] = json_encode($lang['datatables']);
require_once $_SERVER['DOCUMENT_ROOT'] . '/inc/footer.inc.php';

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@ -49,7 +49,9 @@ function bcc($_action, $_data = null, $_attr = null) {
}
elseif (filter_var($local_dest, FILTER_VALIDATE_EMAIL)) {
$mailbox = mailbox('get', 'mailbox_details', $local_dest);
if ($mailbox === false && array_key_exists($local_dest, array_merge($direct_aliases, $shared_aliases)) === false) {
$shared_aliases = mailbox('get', 'shared_aliases');
$direct_aliases = mailbox('get', 'direct_aliases');
if ($mailbox === false && in_array($local_dest, array_merge($direct_aliases, $shared_aliases)) === false) {
$_SESSION['return'][] = array(
'type' => 'danger',
'log' => array(__FUNCTION__, $_action, $_data, $_attr),

View File

@ -24,9 +24,10 @@ function customize($_action, $_item, $_data = null) {
}
switch ($_item) {
case 'main_logo':
if (in_array($_data['main_logo']['type'], array('image/gif', 'image/jpeg', 'image/pjpeg', 'image/x-png', 'image/png', 'image/svg+xml'))) {
case 'main_logo_dark':
if (in_array($_data[$_item]['type'], array('image/gif', 'image/jpeg', 'image/pjpeg', 'image/x-png', 'image/png', 'image/svg+xml'))) {
try {
if (file_exists($_data['main_logo']['tmp_name']) !== true) {
if (file_exists($_data[$_item]['tmp_name']) !== true) {
$_SESSION['return'][] = array(
'type' => 'danger',
'log' => array(__FUNCTION__, $_action, $_item, $_data),
@ -34,7 +35,7 @@ function customize($_action, $_item, $_data = null) {
);
return false;
}
$image = new Imagick($_data['main_logo']['tmp_name']);
$image = new Imagick($_data[$_item]['tmp_name']);
if ($image->valid() !== true) {
$_SESSION['return'][] = array(
'type' => 'danger',
@ -63,7 +64,7 @@ function customize($_action, $_item, $_data = null) {
return false;
}
try {
$redis->Set('MAIN_LOGO', 'data:' . $_data['main_logo']['type'] . ';base64,' . base64_encode(file_get_contents($_data['main_logo']['tmp_name'])));
$redis->Set(strtoupper($_item), 'data:' . $_data[$_item]['type'] . ';base64,' . base64_encode(file_get_contents($_data[$_item]['tmp_name'])));
}
catch (RedisException $e) {
$_SESSION['return'][] = array(
@ -160,6 +161,25 @@ function customize($_action, $_item, $_data = null) {
'msg' => 'ui_texts'
);
break;
case 'ip_check':
$ip_check = ($_data['ip_check_opt_in'] == "1") ? 1 : 0;
try {
$redis->set('IP_CHECK', $ip_check);
}
catch (RedisException $e) {
$_SESSION['return'][] = array(
'type' => 'danger',
'log' => array(__FUNCTION__, $_action, $_item, $_data),
'msg' => array('redis_error', $e)
);
return false;
}
$_SESSION['return'][] = array(
'type' => 'success',
'log' => array(__FUNCTION__, $_action, $_item, $_data),
'msg' => 'ip_check_opt_in_modified'
);
break;
}
break;
case 'delete':
@ -182,8 +202,9 @@ function customize($_action, $_item, $_data = null) {
}
switch ($_item) {
case 'main_logo':
case 'main_logo_dark':
try {
if ($redis->del('MAIN_LOGO')) {
if ($redis->del(strtoupper($_item))) {
$_SESSION['return'][] = array(
'type' => 'success',
'log' => array(__FUNCTION__, $_action, $_item, $_data),
@ -220,8 +241,9 @@ function customize($_action, $_item, $_data = null) {
return ($app_links) ? $app_links : false;
break;
case 'main_logo':
case 'main_logo_dark':
try {
return $redis->get('MAIN_LOGO');
return $redis->get(strtoupper($_item));
}
catch (RedisException $e) {
$_SESSION['return'][] = array(
@ -258,9 +280,14 @@ function customize($_action, $_item, $_data = null) {
}
break;
case 'main_logo_specs':
case 'main_logo_dark_specs':
try {
$image = new Imagick();
$img_data = explode('base64,', customize('get', 'main_logo'));
if($_item == 'main_logo_specs') {
$img_data = explode('base64,', customize('get', 'main_logo'));
} else {
$img_data = explode('base64,', customize('get', 'main_logo_dark'));
}
if ($img_data[1]) {
$image->readImageBlob(base64_decode($img_data[1]));
return $image->identifyImage();
@ -276,6 +303,20 @@ function customize($_action, $_item, $_data = null) {
return false;
}
break;
case 'ip_check':
try {
$ip_check = ($ip_check = $redis->get('IP_CHECK')) ? $ip_check : 0;
return $ip_check;
}
catch (RedisException $e) {
$_SESSION['return'][] = array(
'type' => 'danger',
'log' => array(__FUNCTION__, $_action, $_item, $_data),
'msg' => array('redis_error', $e)
);
return false;
}
break;
}
break;
}

Some files were not shown because too many files have changed in this diff Show More