diff --git a/.github/ISSUE_TEMPLATE/Bug_report.yml b/.github/ISSUE_TEMPLATE/Bug_report.yml
index 2fe7082c..3cfbbe0d 100644
--- a/.github/ISSUE_TEMPLATE/Bug_report.yml
+++ b/.github/ISSUE_TEMPLATE/Bug_report.yml
@@ -26,21 +26,21 @@ body:
attributes:
label: Description
description: Please provide a brief description of the bug in 1-2 sentences. If applicable, add screenshots to help explain your problem. Very useful for bugs in mailcow UI.
- render: text
+ render: plain text
validations:
required: true
- type: textarea
attributes:
label: "Logs:"
description: "Please take a look at the [official documentation](https://docs.mailcow.email/troubleshooting/debug-logs/) and post the last few lines of logs, when the error occurs. For example, docker container logs of affected containers. This will be automatically formatted into code, so no need for backticks."
- render: text
+ render: plain text
validations:
required: true
- type: textarea
attributes:
label: "Steps to reproduce:"
description: "Please describe the steps to reproduce the bug. Screenshots can be added, if helpful."
- render: text
+ render: plain text
placeholder: |-
1. ...
2. ...
@@ -117,41 +117,41 @@ body:
attributes:
label: "Logs of git diff:"
description: "#### Output of `git diff origin/master`, any other changes to the code? If so, **please post them**:"
- render: text
+ render: plain text
validations:
required: true
- type: textarea
attributes:
label: "Logs of iptables -L -vn:"
description: "#### Output of `iptables -L -vn`"
- render: text
+ render: plain text
validations:
required: true
- type: textarea
attributes:
label: "Logs of ip6tables -L -vn:"
description: "#### Output of `ip6tables -L -vn`"
- render: text
+ render: plain text
validations:
required: true
- type: textarea
attributes:
label: "Logs of iptables -L -vn -t nat:"
description: "#### Output of `iptables -L -vn -t nat`"
- render: text
+ render: plain text
validations:
required: true
- type: textarea
attributes:
label: "Logs of ip6tables -L -vn -t nat:"
description: "#### Output of `ip6tables -L -vn -t nat`"
- render: text
+ render: plain text
validations:
required: true
- type: textarea
attributes:
label: "DNS check:"
description: "#### Output of `docker exec -it $(docker ps -qf name=acme-mailcow) dig +short stackoverflow.com @172.22.1.254` (set the IP accordingly, if you changed the internal mailcow network)"
- render: text
+ render: plain text
validations:
required: true
diff --git a/.github/renovate.json b/.github/renovate.json
new file mode 100644
index 00000000..37962b2a
--- /dev/null
+++ b/.github/renovate.json
@@ -0,0 +1,13 @@
+{
+ "enabled": true,
+ "timezone": "Europe/Berlin",
+ "dependencyDashboard": false,
+ "dependencyDashboardTitle": "Renovate Dashboard",
+ "commitBody": "Signed-off-by: milkmaker ",
+ "rebaseWhen": "auto",
+ "assignees": [
+ "@magiccc"
+ ],
+ "baseBranches": ["staging"],
+ "enabledManagers": ["github-actions"]
+}
diff --git a/.github/workflows/image_builds.yml b/.github/workflows/image_builds.yml
index fe660754..65678dff 100644
--- a/.github/workflows/image_builds.yml
+++ b/.github/workflows/image_builds.yml
@@ -33,13 +33,11 @@ jobs:
run: |
curl -sSL https://get.docker.com/ | CHANNEL=stable sudo sh
sudo service docker start
- sudo curl -L https://github.com/docker/compose/releases/download/v$(curl -Ls https://www.servercow.de/docker-compose/latest.php)/docker-compose-$(uname -s)-$(uname -m) > /usr/local/bin/docker-compose
- sudo chmod +x /usr/local/bin/docker-compose
- name: Prepair Image Builds
run: |
cp helper-scripts/docker-compose.override.yml.d/BUILD_FLAGS/docker-compose.override.yml docker-compose.override.yml
- name: Build Docker Images
run: |
- docker-compose build ${image}
+ docker compose build ${image}
env:
image: ${{ matrix.images }}
diff --git a/data/Dockerfiles/acme/Dockerfile b/data/Dockerfiles/acme/Dockerfile
index f5b7b56c..571c3d08 100644
--- a/data/Dockerfiles/acme/Dockerfile
+++ b/data/Dockerfiles/acme/Dockerfile
@@ -1,4 +1,4 @@
-FROM alpine:3.16
+FROM alpine:3.17
LABEL maintainer "Andre Peters "
diff --git a/data/Dockerfiles/clamd/Dockerfile b/data/Dockerfiles/clamd/Dockerfile
index efbc6a4d..91716b84 100644
--- a/data/Dockerfiles/clamd/Dockerfile
+++ b/data/Dockerfiles/clamd/Dockerfile
@@ -1,4 +1,4 @@
-FROM clamav/clamav:0.105.1_base
+FROM clamav/clamav:1.0_base
LABEL maintainer "André Peters "
diff --git a/data/Dockerfiles/dockerapi/Dockerfile b/data/Dockerfiles/dockerapi/Dockerfile
index 41d4a78f..97c3808c 100644
--- a/data/Dockerfiles/dockerapi/Dockerfile
+++ b/data/Dockerfiles/dockerapi/Dockerfile
@@ -1,4 +1,4 @@
-FROM alpine:3.16
+FROM alpine:3.17
LABEL maintainer "Andre Peters "
@@ -8,11 +8,14 @@ RUN apk add --update --no-cache python3 \
py3-pip \
openssl \
tzdata \
+ py3-psutil \
&& pip3 install --upgrade pip \
- docker \
- flask \
- flask-restful
+ fastapi \
+ uvicorn \
+ aiodocker \
+ redis
+COPY docker-entrypoint.sh /app/
COPY dockerapi.py /app/
-CMD ["python3", "-u", "/app/dockerapi.py"]
+ENTRYPOINT ["/bin/sh", "/app/docker-entrypoint.sh"]
diff --git a/data/Dockerfiles/dockerapi/docker-entrypoint.sh b/data/Dockerfiles/dockerapi/docker-entrypoint.sh
new file mode 100755
index 00000000..aab6cd51
--- /dev/null
+++ b/data/Dockerfiles/dockerapi/docker-entrypoint.sh
@@ -0,0 +1,9 @@
+#!/bin/bash
+
+`openssl req -x509 -newkey rsa:4096 -sha256 -days 3650 -nodes \
+ -keyout /app/dockerapi_key.pem \
+ -out /app/dockerapi_cert.pem \
+ -subj /CN=dockerapi/O=mailcow \
+ -addext subjectAltName=DNS:dockerapi`
+
+`uvicorn --host 0.0.0.0 --port 443 --ssl-certfile=/app/dockerapi_cert.pem --ssl-keyfile=/app/dockerapi_key.pem dockerapi:app`
diff --git a/data/Dockerfiles/dockerapi/dockerapi.py b/data/Dockerfiles/dockerapi/dockerapi.py
index 20e9d0e0..304c1781 100644
--- a/data/Dockerfiles/dockerapi/dockerapi.py
+++ b/data/Dockerfiles/dockerapi/dockerapi.py
@@ -1,419 +1,623 @@
-#!/usr/bin/env python3
-
-from flask import Flask
-from flask_restful import Resource, Api
-from flask import jsonify
-from flask import Response
-from flask import request
-from threading import Thread
-import docker
-import uuid
-import signal
+from fastapi import FastAPI, Response, Request
+import aiodocker
+import psutil
+import sys
+import re
import time
import os
-import re
-import sys
-import ssl
-import socket
-import subprocess
-import traceback
+import json
+import asyncio
+import redis
+from datetime import datetime
-docker_client = docker.DockerClient(base_url='unix://var/run/docker.sock', version='auto')
-app = Flask(__name__)
-api = Api(app)
-class containers_get(Resource):
- def get(self):
- containers = {}
+containerIds_to_update = []
+host_stats_isUpdating = False
+app = FastAPI()
+
+
+@app.get("/host/stats")
+async def get_host_update_stats():
+ global host_stats_isUpdating
+
+ if host_stats_isUpdating == False:
+ print("start host stats task")
+ asyncio.create_task(get_host_stats())
+ host_stats_isUpdating = True
+
+ while True:
+ if redis_client.exists('host_stats'):
+ break
+ print("wait for host_stats results")
+ await asyncio.sleep(1.5)
+
+
+ print("host stats pulled")
+ stats = json.loads(redis_client.get('host_stats'))
+ return Response(content=json.dumps(stats, indent=4), media_type="application/json")
+
+@app.get("/containers/{container_id}/json")
+async def get_container(container_id : str):
+ if container_id and container_id.isalnum():
try:
- for container in docker_client.containers.list(all=True):
- containers.update({container.attrs['Id']: container.attrs})
- return containers
+ for container in (await async_docker_client.containers.list()):
+ if container._id == container_id:
+ container_info = await container.show()
+ return Response(content=json.dumps(container_info, indent=4), media_type="application/json")
+
+ res = {
+ "type": "danger",
+ "msg": "no container found"
+ }
+ return Response(content=json.dumps(res, indent=4), media_type="application/json")
except Exception as e:
- return jsonify(type='danger', msg=str(e))
+ res = {
+ "type": "danger",
+ "msg": str(e)
+ }
+ return Response(content=json.dumps(res, indent=4), media_type="application/json")
+ else:
+ res = {
+ "type": "danger",
+ "msg": "no or invalid id defined"
+ }
+ return Response(content=json.dumps(res, indent=4), media_type="application/json")
-class container_get(Resource):
- def get(self, container_id):
- if container_id and container_id.isalnum():
- try:
- for container in docker_client.containers.list(all=True, filters={"id": container_id}):
- return container.attrs
- except Exception as e:
- return jsonify(type='danger', msg=str(e))
- else:
- return jsonify(type='danger', msg='no or invalid id defined')
+@app.get("/containers/json")
+async def get_containers():
+ containers = {}
+ try:
+ for container in (await async_docker_client.containers.list()):
+ container_info = await container.show()
+ containers.update({container_info['Id']: container_info})
+ return Response(content=json.dumps(containers, indent=4), media_type="application/json")
+ except Exception as e:
+ res = {
+ "type": "danger",
+ "msg": str(e)
+ }
+ return Response(content=json.dumps(res, indent=4), media_type="application/json")
-class container_post(Resource):
- def post(self, container_id, post_action):
- if container_id and container_id.isalnum() and post_action:
- try:
- """Dispatch container_post api call"""
- if post_action == 'exec':
- if not request.json or not 'cmd' in request.json:
- return jsonify(type='danger', msg='cmd is missing')
- if not request.json or not 'task' in request.json:
- return jsonify(type='danger', msg='task is missing')
+@app.post("/containers/{container_id}/{post_action}")
+async def post_containers(container_id : str, post_action : str, request: Request):
+ try :
+ request_json = await request.json()
+ except Exception as err:
+ request_json = {}
- api_call_method_name = '__'.join(['container_post', str(post_action), str(request.json['cmd']), str(request.json['task']) ])
- else:
- api_call_method_name = '__'.join(['container_post', str(post_action) ])
+ if container_id and container_id.isalnum() and post_action:
+ try:
+ """Dispatch container_post api call"""
+ if post_action == 'exec':
+ if not request_json or not 'cmd' in request_json:
+ res = {
+ "type": "danger",
+ "msg": "cmd is missing"
+ }
+ return Response(content=json.dumps(res, indent=4), media_type="application/json")
+ if not request_json or not 'task' in request_json:
+ res = {
+ "type": "danger",
+ "msg": "task is missing"
+ }
+ return Response(content=json.dumps(res, indent=4), media_type="application/json")
- api_call_method = getattr(self, api_call_method_name, lambda container_id: jsonify(type='danger', msg='container_post - unknown api call'))
+ api_call_method_name = '__'.join(['container_post', str(post_action), str(request_json['cmd']), str(request_json['task']) ])
+ else:
+ api_call_method_name = '__'.join(['container_post', str(post_action) ])
+
+ docker_utils = DockerUtils(async_docker_client)
+ api_call_method = getattr(docker_utils, api_call_method_name, lambda container_id: Response(content=json.dumps({'type': 'danger', 'msg':'container_post - unknown api call' }, indent=4), media_type="application/json"))
- print("api call: %s, container_id: %s" % (api_call_method_name, container_id))
- return api_call_method(container_id)
- except Exception as e:
- print("error - container_post: %s" % str(e))
- return jsonify(type='danger', msg=str(e))
+ print("api call: %s, container_id: %s" % (api_call_method_name, container_id))
+ return await api_call_method(container_id, request_json)
+ except Exception as e:
+ print("error - container_post: %s" % str(e))
+ res = {
+ "type": "danger",
+ "msg": str(e)
+ }
+ return Response(content=json.dumps(res, indent=4), media_type="application/json")
- else:
- return jsonify(type='danger', msg='invalid container id or missing action')
+ else:
+ res = {
+ "type": "danger",
+ "msg": "invalid container id or missing action"
+ }
+ return Response(content=json.dumps(res, indent=4), media_type="application/json")
+@app.post("/container/{container_id}/stats/update")
+async def post_container_update_stats(container_id : str):
+ global containerIds_to_update
+
+ # start update task for container if no task is running
+ if container_id not in containerIds_to_update:
+ asyncio.create_task(get_container_stats(container_id))
+ containerIds_to_update.append(container_id)
+
+ while True:
+ if redis_client.exists(container_id + '_stats'):
+ break
+ await asyncio.sleep(1.5)
+
+ stats = json.loads(redis_client.get(container_id + '_stats'))
+ return Response(content=json.dumps(stats, indent=4), media_type="application/json")
+
+
+
+
+class DockerUtils:
+ def __init__(self, docker_client):
+ self.docker_client = docker_client
# api call: container_post - post_action: stop
- def container_post__stop(self, container_id):
- for container in docker_client.containers.list(all=True, filters={"id": container_id}):
- container.stop()
- return jsonify(type='success', msg='command completed successfully')
-
+ async def container_post__stop(self, container_id, request_json):
+ for container in (await self.docker_client.containers.list()):
+ if container._id == container_id:
+ await container.stop()
+ res = {
+ 'type': 'success',
+ 'msg': 'command completed successfully'
+ }
+ return Response(content=json.dumps(res, indent=4), media_type="application/json")
# api call: container_post - post_action: start
- def container_post__start(self, container_id):
- for container in docker_client.containers.list(all=True, filters={"id": container_id}):
- container.start()
- return jsonify(type='success', msg='command completed successfully')
+ async def container_post__start(self, container_id, request_json):
+ for container in (await self.docker_client.containers.list()):
+ if container._id == container_id:
+ await container.start()
+ res = {
+ 'type': 'success',
+ 'msg': 'command completed successfully'
+ }
+ return Response(content=json.dumps(res, indent=4), media_type="application/json")
# api call: container_post - post_action: restart
- def container_post__restart(self, container_id):
- for container in docker_client.containers.list(all=True, filters={"id": container_id}):
- container.restart()
- return jsonify(type='success', msg='command completed successfully')
+ async def container_post__restart(self, container_id, request_json):
+ for container in (await self.docker_client.containers.list()):
+ if container._id == container_id:
+ await container.restart()
+ res = {
+ 'type': 'success',
+ 'msg': 'command completed successfully'
+ }
+ return Response(content=json.dumps(res, indent=4), media_type="application/json")
# api call: container_post - post_action: top
- def container_post__top(self, container_id):
- for container in docker_client.containers.list(all=True, filters={"id": container_id}):
- return jsonify(type='success', msg=container.top())
+ async def container_post__top(self, container_id, request_json):
+ for container in (await self.docker_client.containers.list()):
+ if container._id == container_id:
+ ps_exec = await container.exec("ps")
+ async with ps_exec.start(detach=False) as stream:
+ ps_return = await stream.read_out()
-
- # api call: container_post - post_action: stats
- def container_post__stats(self, container_id):
- for container in docker_client.containers.list(all=True, filters={"id": container_id}):
- for stat in container.stats(decode=True, stream=True):
- return jsonify(type='success', msg=stat )
+ exec_details = await ps_exec.inspect()
+ if exec_details["ExitCode"] == None or exec_details["ExitCode"] == 0:
+ res = {
+ 'type': 'success',
+ 'msg': ps_return.data.decode('utf-8')
+ }
+ return Response(content=json.dumps(res, indent=4), media_type="application/json")
+ else:
+ res = {
+ 'type': 'danger',
+ 'msg': ''
+ }
+ return Response(content=json.dumps(res, indent=4), media_type="application/json")
# api call: container_post - post_action: exec - cmd: mailq - task: delete
- def container_post__exec__mailq__delete(self, container_id):
- if 'items' in request.json:
+ async def container_post__exec__mailq__delete(self, container_id, request_json):
+ if 'items' in request_json:
r = re.compile("^[0-9a-fA-F]+$")
- filtered_qids = filter(r.match, request.json['items'])
+ filtered_qids = filter(r.match, request_json['items'])
if filtered_qids:
flagged_qids = ['-d %s' % i for i in filtered_qids]
- sanitized_string = str(' '.join(flagged_qids));
+ sanitized_string = str(' '.join(flagged_qids))
- for container in docker_client.containers.list(filters={"id": container_id}):
- postsuper_r = container.exec_run(["/bin/bash", "-c", "/usr/sbin/postsuper " + sanitized_string])
- return exec_run_handler('generic', postsuper_r)
+ for container in (await self.docker_client.containers.list()):
+ if container._id == container_id:
+ postsuper_r_exec = await container.exec(["/bin/bash", "-c", "/usr/sbin/postsuper " + sanitized_string])
+ return await exec_run_handler('generic', postsuper_r_exec)
# api call: container_post - post_action: exec - cmd: mailq - task: hold
- def container_post__exec__mailq__hold(self, container_id):
- if 'items' in request.json:
+ async def container_post__exec__mailq__hold(self, container_id, request_json):
+ if 'items' in request_json:
r = re.compile("^[0-9a-fA-F]+$")
- filtered_qids = filter(r.match, request.json['items'])
+ filtered_qids = filter(r.match, request_json['items'])
if filtered_qids:
flagged_qids = ['-h %s' % i for i in filtered_qids]
- sanitized_string = str(' '.join(flagged_qids));
+ sanitized_string = str(' '.join(flagged_qids))
- for container in docker_client.containers.list(filters={"id": container_id}):
- postsuper_r = container.exec_run(["/bin/bash", "-c", "/usr/sbin/postsuper " + sanitized_string])
- return exec_run_handler('generic', postsuper_r)
+ for container in (await self.docker_client.containers.list()):
+ if container._id == container_id:
+ postsuper_r_exec = await container.exec(["/bin/bash", "-c", "/usr/sbin/postsuper " + sanitized_string])
+ return await exec_run_handler('generic', postsuper_r_exec)
# api call: container_post - post_action: exec - cmd: mailq - task: cat
- def container_post__exec__mailq__cat(self, container_id):
- if 'items' in request.json:
+ async def container_post__exec__mailq__cat(self, container_id, request_json):
+ if 'items' in request_json:
r = re.compile("^[0-9a-fA-F]+$")
- filtered_qids = filter(r.match, request.json['items'])
+ filtered_qids = filter(r.match, request_json['items'])
if filtered_qids:
- sanitized_string = str(' '.join(filtered_qids));
+ sanitized_string = str(' '.join(filtered_qids))
- for container in docker_client.containers.list(filters={"id": container_id}):
- postcat_return = container.exec_run(["/bin/bash", "-c", "/usr/sbin/postcat -q " + sanitized_string], user='postfix')
- if not postcat_return:
- postcat_return = 'err: invalid'
- return exec_run_handler('utf8_text_only', postcat_return)
+ for container in (await self.docker_client.containers.list()):
+ if container._id == container_id:
+ postcat_exec = await container.exec(["/bin/bash", "-c", "/usr/sbin/postcat -q " + sanitized_string], user='postfix')
+ return await exec_run_handler('utf8_text_only', postcat_exec)
# api call: container_post - post_action: exec - cmd: mailq - task: unhold
- def container_post__exec__mailq__unhold(self, container_id):
- if 'items' in request.json:
+ async def container_post__exec__mailq__unhold(self, container_id, request_json):
+ if 'items' in request_json:
r = re.compile("^[0-9a-fA-F]+$")
- filtered_qids = filter(r.match, request.json['items'])
+ filtered_qids = filter(r.match, request_json['items'])
if filtered_qids:
flagged_qids = ['-H %s' % i for i in filtered_qids]
- sanitized_string = str(' '.join(flagged_qids));
+ sanitized_string = str(' '.join(flagged_qids))
- for container in docker_client.containers.list(filters={"id": container_id}):
- postsuper_r = container.exec_run(["/bin/bash", "-c", "/usr/sbin/postsuper " + sanitized_string])
- return exec_run_handler('generic', postsuper_r)
+ for container in (await self.docker_client.containers.list()):
+ if container._id == container_id:
+ postsuper_r_exec = await container.exec(["/bin/bash", "-c", "/usr/sbin/postsuper " + sanitized_string])
+ return await exec_run_handler('generic', postsuper_r_exec)
# api call: container_post - post_action: exec - cmd: mailq - task: deliver
- def container_post__exec__mailq__deliver(self, container_id):
- if 'items' in request.json:
+ async def container_post__exec__mailq__deliver(self, container_id, request_json):
+ if 'items' in request_json:
r = re.compile("^[0-9a-fA-F]+$")
- filtered_qids = filter(r.match, request.json['items'])
+ filtered_qids = filter(r.match, request_json['items'])
if filtered_qids:
flagged_qids = ['-i %s' % i for i in filtered_qids]
- for container in docker_client.containers.list(filters={"id": container_id}):
- for i in flagged_qids:
- postqueue_r = container.exec_run(["/bin/bash", "-c", "/usr/sbin/postqueue " + i], user='postfix')
- # todo: check each exit code
- return jsonify(type='success', msg=str("Scheduled immediate delivery"))
+ for container in (await self.docker_client.containers.list()):
+ if container._id == container_id:
+ for i in flagged_qids:
+ postsuper_r_exec = await container.exec(["/bin/bash", "-c", "/usr/sbin/postqueue " + i], user='postfix')
+ async with postsuper_r_exec.start(detach=False) as stream:
+ postsuper_r_return = await stream.read_out()
+ # todo: check each exit code
+ res = {
+ 'type': 'success',
+ 'msg': 'Scheduled immediate delivery'
+ }
+ return Response(content=json.dumps(res, indent=4), media_type="application/json")
# api call: container_post - post_action: exec - cmd: mailq - task: list
- def container_post__exec__mailq__list(self, container_id):
- for container in docker_client.containers.list(filters={"id": container_id}):
- mailq_return = container.exec_run(["/usr/sbin/postqueue", "-j"], user='postfix')
- return exec_run_handler('utf8_text_only', mailq_return)
+ async def container_post__exec__mailq__list(self, container_id, request_json):
+ for container in (await self.docker_client.containers.list()):
+ if container._id == container_id:
+ mailq_exec = await container.exec(["/usr/sbin/postqueue", "-j"], user='postfix')
+ return await exec_run_handler('utf8_text_only', mailq_exec)
# api call: container_post - post_action: exec - cmd: mailq - task: flush
- def container_post__exec__mailq__flush(self, container_id):
- for container in docker_client.containers.list(filters={"id": container_id}):
- postqueue_r = container.exec_run(["/usr/sbin/postqueue", "-f"], user='postfix')
- return exec_run_handler('generic', postqueue_r)
+ async def container_post__exec__mailq__flush(self, container_id, request_json):
+ for container in (await self.docker_client.containers.list()):
+ if container._id == container_id:
+ postsuper_r_exec = await container.exec(["/usr/sbin/postqueue", "-f"], user='postfix')
+ return await exec_run_handler('generic', postsuper_r_exec)
# api call: container_post - post_action: exec - cmd: mailq - task: super_delete
- def container_post__exec__mailq__super_delete(self, container_id):
- for container in docker_client.containers.list(filters={"id": container_id}):
- postsuper_r = container.exec_run(["/usr/sbin/postsuper", "-d", "ALL"])
- return exec_run_handler('generic', postsuper_r)
+ async def container_post__exec__mailq__super_delete(self, container_id, request_json):
+ for container in (await self.docker_client.containers.list()):
+ if container._id == container_id:
+ postsuper_r_exec = await container.exec(["/usr/sbin/postsuper", "-d", "ALL"])
+ return await exec_run_handler('generic', postsuper_r_exec)
# api call: container_post - post_action: exec - cmd: system - task: fts_rescan
- def container_post__exec__system__fts_rescan(self, container_id):
- if 'username' in request.json:
- for container in docker_client.containers.list(filters={"id": container_id}):
- rescan_return = container.exec_run(["/bin/bash", "-c", "/usr/bin/doveadm fts rescan -u '" + request.json['username'].replace("'", "'\\''") + "'"], user='vmail')
- if rescan_return.exit_code == 0:
- return jsonify(type='success', msg='fts_rescan: rescan triggered')
- else:
- return jsonify(type='warning', msg='fts_rescan error')
+ async def container_post__exec__system__fts_rescan(self, container_id, request_json):
+ if 'username' in request_json:
+ for container in (await self.docker_client.containers.list()):
+ if container._id == container_id:
+ rescan_exec = await container.exec(["/bin/bash", "-c", "/usr/bin/doveadm fts rescan -u '" + request_json['username'].replace("'", "'\\''") + "'"], user='vmail')
+ async with rescan_exec.start(detach=False) as stream:
+ rescan_return = await stream.read_out()
- if 'all' in request.json:
- for container in docker_client.containers.list(filters={"id": container_id}):
- rescan_return = container.exec_run(["/bin/bash", "-c", "/usr/bin/doveadm fts rescan -A"], user='vmail')
- if rescan_return.exit_code == 0:
- return jsonify(type='success', msg='fts_rescan: rescan triggered')
- else:
- return jsonify(type='warning', msg='fts_rescan error')
+ exec_details = await rescan_exec.inspect()
+ if exec_details["ExitCode"] == None or exec_details["ExitCode"] == 0:
+ res = {
+ 'type': 'success',
+ 'msg': 'fts_rescan: rescan triggered'
+ }
+ return Response(content=json.dumps(res, indent=4), media_type="application/json")
+ else:
+ res = {
+ 'type': 'warning',
+ 'msg': 'fts_rescan error'
+ }
+ return Response(content=json.dumps(res, indent=4), media_type="application/json")
+
+ if 'all' in request_json:
+ for container in (await self.docker_client.containers.list()):
+ if container._id == container_id:
+ rescan_exec = await container.exec(["/bin/bash", "-c", "/usr/bin/doveadm fts rescan -A"], user='vmail')
+ async with rescan_exec.start(detach=False) as stream:
+ rescan_return = await stream.read_out()
+
+ exec_details = await rescan_exec.inspect()
+ if exec_details["ExitCode"] == None or exec_details["ExitCode"] == 0:
+ res = {
+ 'type': 'success',
+ 'msg': 'fts_rescan: rescan triggered'
+ }
+ return Response(content=json.dumps(res, indent=4), media_type="application/json")
+ else:
+ res = {
+ 'type': 'warning',
+ 'msg': 'fts_rescan error'
+ }
+ return Response(content=json.dumps(res, indent=4), media_type="application/json")
# api call: container_post - post_action: exec - cmd: system - task: df
- def container_post__exec__system__df(self, container_id):
- if 'dir' in request.json:
- for container in docker_client.containers.list(filters={"id": container_id}):
- df_return = container.exec_run(["/bin/bash", "-c", "/bin/df -H '" + request.json['dir'].replace("'", "'\\''") + "' | /usr/bin/tail -n1 | /usr/bin/tr -s [:blank:] | /usr/bin/tr ' ' ','"], user='nobody')
- if df_return.exit_code == 0:
- return df_return.output.decode('utf-8').rstrip()
- else:
- return "0,0,0,0,0,0"
+ async def container_post__exec__system__df(self, container_id, request_json):
+ if 'dir' in request_json:
+ for container in (await self.docker_client.containers.list()):
+ if container._id == container_id:
+ df_exec = await container.exec(["/bin/bash", "-c", "/bin/df -H '" + request_json['dir'].replace("'", "'\\''") + "' | /usr/bin/tail -n1 | /usr/bin/tr -s [:blank:] | /usr/bin/tr ' ' ','"], user='nobody')
+ async with df_exec.start(detach=False) as stream:
+ df_return = await stream.read_out()
+
+ print(df_return)
+ print(await df_exec.inspect())
+ exec_details = await df_exec.inspect()
+ if exec_details["ExitCode"] == None or exec_details["ExitCode"] == 0:
+ return df_return.data.decode('utf-8').rstrip()
+ else:
+ return "0,0,0,0,0,0"
# api call: container_post - post_action: exec - cmd: system - task: mysql_upgrade
- def container_post__exec__system__mysql_upgrade(self, container_id):
- for container in docker_client.containers.list(filters={"id": container_id}):
- sql_return = container.exec_run(["/bin/bash", "-c", "/usr/bin/mysql_upgrade -uroot -p'" + os.environ['DBROOT'].replace("'", "'\\''") + "'\n"], user='mysql')
- if sql_return.exit_code == 0:
- matched = False
- for line in sql_return.output.decode('utf-8').split("\n"):
- if 'is already upgraded to' in line:
- matched = True
- if matched:
- return jsonify(type='success', msg='mysql_upgrade: already upgraded', text=sql_return.output.decode('utf-8'))
+ async def container_post__exec__system__mysql_upgrade(self, container_id, request_json):
+ for container in (await self.docker_client.containers.list()):
+ if container._id == container_id:
+ sql_exec = await container.exec(["/bin/bash", "-c", "/usr/bin/mysql_upgrade -uroot -p'" + os.environ['DBROOT'].replace("'", "'\\''") + "'\n"], user='mysql')
+ async with sql_exec.start(detach=False) as stream:
+ sql_return = await stream.read_out()
+
+ exec_details = await sql_exec.inspect()
+ if exec_details["ExitCode"] == None or exec_details["ExitCode"] == 0:
+ matched = False
+ for line in sql_return.data.decode('utf-8').split("\n"):
+ if 'is already upgraded to' in line:
+ matched = True
+ if matched:
+ res = {
+ 'type': 'success',
+ 'msg': 'mysql_upgrade: already upgraded',
+ 'text': sql_return.data.decode('utf-8')
+ }
+ return Response(content=json.dumps(res, indent=4), media_type="application/json")
+ else:
+ await container.restart()
+ res = {
+ 'type': 'warning',
+ 'msg': 'mysql_upgrade: upgrade was applied',
+ 'text': sql_return.data.decode('utf-8')
+ }
+ return Response(content=json.dumps(res, indent=4), media_type="application/json")
else:
- container.restart()
- return jsonify(type='warning', msg='mysql_upgrade: upgrade was applied', text=sql_return.output.decode('utf-8'))
- else:
- return jsonify(type='error', msg='mysql_upgrade: error running command', text=sql_return.output.decode('utf-8'))
+ res = {
+ 'type': 'error',
+ 'msg': 'mysql_upgrade: error running command',
+ 'text': sql_return.data.decode('utf-8')
+ }
+ return Response(content=json.dumps(res, indent=4), media_type="application/json")
# api call: container_post - post_action: exec - cmd: system - task: mysql_tzinfo_to_sql
- def container_post__exec__system__mysql_tzinfo_to_sql(self, container_id):
- for container in docker_client.containers.list(filters={"id": container_id}):
- sql_return = container.exec_run(["/bin/bash", "-c", "/usr/bin/mysql_tzinfo_to_sql /usr/share/zoneinfo | /bin/sed 's/Local time zone must be set--see zic manual page/FCTY/' | /usr/bin/mysql -uroot -p'" + os.environ['DBROOT'].replace("'", "'\\''") + "' mysql \n"], user='mysql')
- if sql_return.exit_code == 0:
- return jsonify(type='info', msg='mysql_tzinfo_to_sql: command completed successfully', text=sql_return.output.decode('utf-8'))
- else:
- return jsonify(type='error', msg='mysql_tzinfo_to_sql: error running command', text=sql_return.output.decode('utf-8'))
+ async def container_post__exec__system__mysql_tzinfo_to_sql(self, container_id, request_json):
+ for container in (await self.docker_client.containers.list()):
+ if container._id == container_id:
+ sql_exec = await container.exec(["/bin/bash", "-c", "/usr/bin/mysql_tzinfo_to_sql /usr/share/zoneinfo | /bin/sed 's/Local time zone must be set--see zic manual page/FCTY/' | /usr/bin/mysql -uroot -p'" + os.environ['DBROOT'].replace("'", "'\\''") + "' mysql \n"], user='mysql')
+ async with sql_exec.start(detach=False) as stream:
+ sql_return = await stream.read_out()
+
+ exec_details = await sql_exec.inspect()
+ if exec_details["ExitCode"] == None or exec_details["ExitCode"] == 0:
+ res = {
+ 'type': 'info',
+ 'msg': 'mysql_tzinfo_to_sql: command completed successfully',
+ 'text': sql_return.data.decode('utf-8')
+ }
+ return Response(content=json.dumps(res, indent=4), media_type="application/json")
+ else:
+ res = {
+ 'type': 'error',
+ 'msg': 'mysql_tzinfo_to_sql: error running command',
+ 'text': sql_return.data.decode('utf-8')
+ }
+ return Response(content=json.dumps(res, indent=4), media_type="application/json")
# api call: container_post - post_action: exec - cmd: reload - task: dovecot
- def container_post__exec__reload__dovecot(self, container_id):
- for container in docker_client.containers.list(filters={"id": container_id}):
- reload_return = container.exec_run(["/bin/bash", "-c", "/usr/sbin/dovecot reload"])
- return exec_run_handler('generic', reload_return)
+ async def container_post__exec__reload__dovecot(self, container_id, request_json):
+ for container in (await self.docker_client.containers.list()):
+ if container._id == container_id:
+ reload_exec = await container.exec(["/bin/bash", "-c", "/usr/sbin/dovecot reload"])
+ return await exec_run_handler('generic', reload_exec)
# api call: container_post - post_action: exec - cmd: reload - task: postfix
- def container_post__exec__reload__postfix(self, container_id):
- for container in docker_client.containers.list(filters={"id": container_id}):
- reload_return = container.exec_run(["/bin/bash", "-c", "/usr/sbin/postfix reload"])
- return exec_run_handler('generic', reload_return)
+ async def container_post__exec__reload__postfix(self, container_id, request_json):
+ for container in (await self.docker_client.containers.list()):
+ if container._id == container_id:
+ reload_exec = await container.exec(["/bin/bash", "-c", "/usr/sbin/postfix reload"])
+ return await exec_run_handler('generic', reload_exec)
# api call: container_post - post_action: exec - cmd: reload - task: nginx
- def container_post__exec__reload__nginx(self, container_id):
- for container in docker_client.containers.list(filters={"id": container_id}):
- reload_return = container.exec_run(["/bin/sh", "-c", "/usr/sbin/nginx -s reload"])
- return exec_run_handler('generic', reload_return)
+ async def container_post__exec__reload__nginx(self, container_id, request_json):
+ for container in (await self.docker_client.containers.list()):
+ if container._id == container_id:
+ reload_exec = await container.exec(["/bin/sh", "-c", "/usr/sbin/nginx -s reload"])
+ return await exec_run_handler('generic', reload_exec)
# api call: container_post - post_action: exec - cmd: sieve - task: list
- def container_post__exec__sieve__list(self, container_id):
- if 'username' in request.json:
- for container in docker_client.containers.list(filters={"id": container_id}):
- sieve_return = container.exec_run(["/bin/bash", "-c", "/usr/bin/doveadm sieve list -u '" + request.json['username'].replace("'", "'\\''") + "'"])
- return exec_run_handler('utf8_text_only', sieve_return)
+ async def container_post__exec__sieve__list(self, container_id, request_json):
+ if 'username' in request_json:
+ for container in (await self.docker_client.containers.list()):
+ if container._id == container_id:
+ sieve_exec = await container.exec(["/bin/bash", "-c", "/usr/bin/doveadm sieve list -u '" + request_json['username'].replace("'", "'\\''") + "'"])
+ return await exec_run_handler('utf8_text_only', sieve_exec)
# api call: container_post - post_action: exec - cmd: sieve - task: print
- def container_post__exec__sieve__print(self, container_id):
- if 'username' in request.json and 'script_name' in request.json:
- for container in docker_client.containers.list(filters={"id": container_id}):
- cmd = ["/bin/bash", "-c", "/usr/bin/doveadm sieve get -u '" + request.json['username'].replace("'", "'\\''") + "' '" + request.json['script_name'].replace("'", "'\\''") + "'"]
- sieve_return = container.exec_run(cmd)
- return exec_run_handler('utf8_text_only', sieve_return)
+ async def container_post__exec__sieve__print(self, container_id, request_json):
+ if 'username' in request_json and 'script_name' in request_json:
+ for container in (await self.docker_client.containers.list()):
+ if container._id == container_id:
+ cmd = ["/bin/bash", "-c", "/usr/bin/doveadm sieve get -u '" + request_json['username'].replace("'", "'\\''") + "' '" + request_json['script_name'].replace("'", "'\\''") + "'"]
+ sieve_exec = await container.exec(cmd)
+ return await exec_run_handler('utf8_text_only', sieve_exec)
# api call: container_post - post_action: exec - cmd: maildir - task: cleanup
- def container_post__exec__maildir__cleanup(self, container_id):
- if 'maildir' in request.json:
- for container in docker_client.containers.list(filters={"id": container_id}):
- sane_name = re.sub(r'\W+', '', request.json['maildir'])
- cmd = ["/bin/bash", "-c", "if [[ -d '/var/vmail/" + request.json['maildir'].replace("'", "'\\''") + "' ]]; then /bin/mv '/var/vmail/" + request.json['maildir'].replace("'", "'\\''") + "' '/var/vmail/_garbage/" + str(int(time.time())) + "_" + sane_name + "'; fi"]
- maildir_cleanup = container.exec_run(cmd, user='vmail')
- return exec_run_handler('generic', maildir_cleanup)
-
-
+ async def container_post__exec__maildir__cleanup(self, container_id, request_json):
+ if 'maildir' in request_json:
+ for container in (await self.docker_client.containers.list()):
+ if container._id == container_id:
+ sane_name = re.sub(r'\W+', '', request_json['maildir'])
+ cmd = ["/bin/bash", "-c", "if [[ -d '/var/vmail/" + request_json['maildir'].replace("'", "'\\''") + "' ]]; then /bin/mv '/var/vmail/" + request_json['maildir'].replace("'", "'\\''") + "' '/var/vmail/_garbage/" + str(int(time.time())) + "_" + sane_name + "'; fi"]
+ maildir_cleanup_exec = await container.exec(cmd, user='vmail')
+ return await exec_run_handler('generic', maildir_cleanup_exec)
# api call: container_post - post_action: exec - cmd: rspamd - task: worker_password
- def container_post__exec__rspamd__worker_password(self, container_id):
- if 'raw' in request.json:
- for container in docker_client.containers.list(filters={"id": container_id}):
- cmd = "/usr/bin/rspamadm pw -e -p '" + request.json['raw'].replace("'", "'\\''") + "' 2> /dev/null"
- cmd_response = exec_cmd_container(container, cmd, user="_rspamd")
- matched = False
- for line in cmd_response.split("\n"):
- if '$2$' in line:
- hash = line.strip()
- hash_out = re.search('\$2\$.+$', hash).group(0)
- rspamd_passphrase_hash = re.sub('[^0-9a-zA-Z\$]+', '', hash_out.rstrip())
+ async def container_post__exec__rspamd__worker_password(self, container_id, request_json):
+ if 'raw' in request_json:
+ for container in (await self.docker_client.containers.list()):
+ if container._id == container_id:
+
+ cmd = "./set_worker_password.sh '" + request_json['raw'].replace("'", "'\\''") + "' 2> /dev/null"
+ rspamd_password_exec = await container.exec(cmd, user='_rspamd')
+ async with rspamd_password_exec.start(detach=False) as stream:
+ rspamd_password_return = await stream.read_out()
- rspamd_password_filename = "/etc/rspamd/override.d/worker-controller-password.inc"
- cmd = '''/bin/echo 'enable_password = "%s";' > %s && cat %s''' % (rspamd_passphrase_hash, rspamd_password_filename, rspamd_password_filename)
- cmd_response = exec_cmd_container(container, cmd, user="_rspamd")
+ matched = False
+ if "OK" in rspamd_password_return.data.decode('utf-8'):
+ matched = True
+ await container.restart()
- if rspamd_passphrase_hash.startswith("$2$") and rspamd_passphrase_hash in cmd_response:
- container.restart()
- matched = True
+ if matched:
+ res = {
+ 'type': 'success',
+ 'msg': 'command completed successfully'
+ }
+ return Response(content=json.dumps(res, indent=4), media_type="application/json")
+ else:
+ res = {
+ 'type': 'danger',
+ 'msg': 'command did not complete'
+ }
+ return Response(content=json.dumps(res, indent=4), media_type="application/json")
- if matched:
- return jsonify(type='success', msg='command completed successfully')
- else:
- return jsonify(type='danger', msg='command did not complete')
-def exec_cmd_container(container, cmd, user, timeout=2, shell_cmd="/bin/bash"):
- def recv_socket_data(c_socket, timeout):
- c_socket.setblocking(0)
- total_data=[];
- data='';
- begin=time.time()
- while True:
- if total_data and time.time()-begin > timeout:
- break
- elif time.time()-begin > timeout*2:
- break
- try:
- data = c_socket.recv(8192)
- if data:
- total_data.append(data.decode('utf-8'))
- #change the beginning time for measurement
- begin=time.time()
- else:
- #sleep for sometime to indicate a gap
- time.sleep(0.1)
- break
- except:
- pass
- return ''.join(total_data)
+async def exec_run_handler(type, exec_obj):
+ async with exec_obj.start(detach=False) as stream:
+ exec_return = await stream.read_out()
- try :
- socket = container.exec_run([shell_cmd], stdin=True, socket=True, user=user).output._sock
- if not cmd.endswith("\n"):
- cmd = cmd + "\n"
- socket.send(cmd.encode('utf-8'))
- data = recv_socket_data(socket, timeout)
- socket.close()
- return data
+ if exec_return == None:
+ exec_return = ""
+ else:
+ exec_return = exec_return.data.decode('utf-8')
- except Exception as e:
- print("error - exec_cmd_container: %s" % str(e))
- traceback.print_exc(file=sys.stdout)
-
-def exec_run_handler(type, output):
- if type == 'generic':
- if output.exit_code == 0:
- return jsonify(type='success', msg='command completed successfully')
+ if type == 'generic':
+ exec_details = await exec_obj.inspect()
+ if exec_details["ExitCode"] == None or exec_details["ExitCode"] == 0:
+ res = {
+ "type": "success",
+ "msg": "command completed successfully"
+ }
+ return Response(content=json.dumps(res, indent=4), media_type="application/json")
else:
- return jsonify(type='danger', msg='command failed: ' + output.output.decode('utf-8'))
+ res = {
+ "type": "success",
+ "msg": "'command failed: " + exec_return
+ }
+ return Response(content=json.dumps(res, indent=4), media_type="application/json")
if type == 'utf8_text_only':
- r = Response(response=output.output.decode('utf-8'), status=200, mimetype="text/plain")
- r.headers["Content-Type"] = "text/plain; charset=utf-8"
- return r
+ return Response(content=exec_return, media_type="text/plain")
-class GracefulKiller:
- kill_now = False
- def __init__(self):
- signal.signal(signal.SIGINT, self.exit_gracefully)
- signal.signal(signal.SIGTERM, self.exit_gracefully)
+async def get_host_stats(wait=5):
+ global host_stats_isUpdating
- def exit_gracefully(self, signum, frame):
- self.kill_now = True
-
-def create_self_signed_cert():
- process = subprocess.Popen(
- "openssl req -x509 -newkey rsa:4096 -sha256 -days 3650 -nodes -keyout /app/dockerapi_key.pem -out /app/dockerapi_cert.pem -subj /CN=dockerapi/O=mailcow -addext subjectAltName=DNS:dockerapi".split(),
- stdout = subprocess.PIPE, stderr = subprocess.PIPE, shell=False
- )
- process.wait()
-
-def startFlaskAPI():
- create_self_signed_cert()
try:
- ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
- ctx.check_hostname = False
- ctx.load_cert_chain(certfile='/app/dockerapi_cert.pem', keyfile='/app/dockerapi_key.pem')
- except:
- print ("Cannot initialize TLS, retrying in 5s...")
- time.sleep(5)
- app.run(debug=False, host='0.0.0.0', port=443, threaded=True, ssl_context=ctx)
+ system_time = datetime.now()
+ host_stats = {
+ "cpu": {
+ "cores": psutil.cpu_count(),
+ "usage": psutil.cpu_percent()
+ },
+ "memory": {
+ "total": psutil.virtual_memory().total,
+ "usage": psutil.virtual_memory().percent,
+ "swap": psutil.swap_memory()
+ },
+ "uptime": time.time() - psutil.boot_time(),
+ "system_time": system_time.strftime("%d.%m.%Y %H:%M:%S")
+ }
-api.add_resource(containers_get, '/containers/json')
-api.add_resource(container_get, '/containers//json')
-api.add_resource(container_post, '/containers//')
+ redis_client.set('host_stats', json.dumps(host_stats), ex=10)
+ except Exception as e:
+ res = {
+ "type": "danger",
+ "msg": str(e)
+ }
+ print(json.dumps(res, indent=4))
-if __name__ == '__main__':
- api_thread = Thread(target=startFlaskAPI)
- api_thread.daemon = True
- api_thread.start()
- killer = GracefulKiller()
- while True:
- time.sleep(1)
- if killer.kill_now:
- break
- print ("Stopping dockerapi-mailcow")
+ await asyncio.sleep(wait)
+ host_stats_isUpdating = False
+
+
+async def get_container_stats(container_id, wait=5, stop=False):
+ global containerIds_to_update
+
+ if container_id and container_id.isalnum():
+ try:
+ for container in (await async_docker_client.containers.list()):
+ if container._id == container_id:
+ res = await container.stats(stream=False)
+
+ if redis_client.exists(container_id + '_stats'):
+ stats = json.loads(redis_client.get(container_id + '_stats'))
+ else:
+ stats = []
+ stats.append(res[0])
+ if len(stats) > 3:
+ del stats[0]
+ redis_client.set(container_id + '_stats', json.dumps(stats), ex=60)
+ except Exception as e:
+ res = {
+ "type": "danger",
+ "msg": str(e)
+ }
+ print(json.dumps(res, indent=4))
+ else:
+ res = {
+ "type": "danger",
+ "msg": "no or invalid id defined"
+ }
+ print(json.dumps(res, indent=4))
+
+ await asyncio.sleep(wait)
+ if stop == True:
+ # update task was called second time, stop
+ containerIds_to_update.remove(container_id)
+ else:
+ # call update task a second time
+ await get_container_stats(container_id, wait=0, stop=True)
+
+
+if os.environ['REDIS_SLAVEOF_IP'] != "":
+ redis_client = redis.Redis(host=os.environ['REDIS_SLAVEOF_IP'], port=os.environ['REDIS_SLAVEOF_PORT'], db=0)
+else:
+ redis_client = redis.Redis(host='redis-mailcow', port=6379, db=0)
+
+async_docker_client = aiodocker.Docker(url='unix:///var/run/docker.sock')
diff --git a/data/Dockerfiles/netfilter/Dockerfile b/data/Dockerfiles/netfilter/Dockerfile
index 621da149..bc707391 100644
--- a/data/Dockerfiles/netfilter/Dockerfile
+++ b/data/Dockerfiles/netfilter/Dockerfile
@@ -1,4 +1,4 @@
-FROM alpine:3.16
+FROM alpine:3.17
LABEL maintainer "Andre Peters "
ENV XTABLES_LIBDIR /usr/lib/xtables
diff --git a/data/Dockerfiles/netfilter/server.py b/data/Dockerfiles/netfilter/server.py
index 382a3f78..1ccc150e 100644
--- a/data/Dockerfiles/netfilter/server.py
+++ b/data/Dockerfiles/netfilter/server.py
@@ -97,9 +97,9 @@ def refreshF2bregex():
f2bregex[3] = 'warning: .*\[([0-9a-f\.:]+)\]: SASL .+ authentication failed: (?!.*Connection lost to authentication server).+'
f2bregex[4] = 'warning: non-SMTP command from .*\[([0-9a-f\.:]+)]:.+'
f2bregex[5] = 'NOQUEUE: reject: RCPT from \[([0-9a-f\.:]+)].+Protocol error.+'
- f2bregex[6] = '-login: Disconnected \(auth failed, .+\): user=.*, method=.+, rip=([0-9a-f\.:]+),'
- f2bregex[7] = '-login: Aborted login \(auth failed .+\): user=.+, rip=([0-9a-f\.:]+), lip.+'
- f2bregex[8] = '-login: Aborted login \(tried to use disallowed .+\): user=.+, rip=([0-9a-f\.:]+), lip.+'
+ f2bregex[6] = '-login: Disconnected.+ \(auth failed, .+\): user=.*, method=.+, rip=([0-9a-f\.:]+),'
+ f2bregex[7] = '-login: Aborted login.+ \(auth failed .+\): user=.+, rip=([0-9a-f\.:]+), lip.+'
+ f2bregex[8] = '-login: Aborted login.+ \(tried to use disallowed .+\): user=.+, rip=([0-9a-f\.:]+), lip.+'
f2bregex[9] = 'SOGo.+ Login from \'([0-9a-f\.:]+)\' for user .+ might not have worked'
f2bregex[10] = '([0-9a-f\.:]+) \"GET \/SOGo\/.* HTTP.+\" 403 .+'
r.set('F2B_REGEX', json.dumps(f2bregex, ensure_ascii=False))
diff --git a/data/Dockerfiles/olefy/Dockerfile b/data/Dockerfiles/olefy/Dockerfile
index 889f84b4..10d63d02 100644
--- a/data/Dockerfiles/olefy/Dockerfile
+++ b/data/Dockerfiles/olefy/Dockerfile
@@ -1,4 +1,4 @@
-FROM alpine:3.16
+FROM alpine:3.17
LABEL maintainer "Andre Peters "
WORKDIR /app
diff --git a/data/Dockerfiles/phpfpm/Dockerfile b/data/Dockerfiles/phpfpm/Dockerfile
index 38c68f70..93acb33f 100644
--- a/data/Dockerfiles/phpfpm/Dockerfile
+++ b/data/Dockerfiles/phpfpm/Dockerfile
@@ -1,4 +1,4 @@
-FROM php:8.1-fpm-alpine3.16
+FROM php:8.1-fpm-alpine3.17
LABEL maintainer "Andre Peters "
ENV APCU_PECL 5.1.22
diff --git a/data/Dockerfiles/rspamd/Dockerfile b/data/Dockerfiles/rspamd/Dockerfile
index 23fcbb3f..2520ddcc 100644
--- a/data/Dockerfiles/rspamd/Dockerfile
+++ b/data/Dockerfiles/rspamd/Dockerfile
@@ -26,6 +26,7 @@ RUN apt-get update && apt-get install -y \
COPY settings.conf /etc/rspamd/settings.conf
COPY metadata_exporter.lua /usr/share/rspamd/plugins/metadata_exporter.lua
+COPY set_worker_password.sh /set_worker_password.sh
COPY docker-entrypoint.sh /docker-entrypoint.sh
ENTRYPOINT ["/docker-entrypoint.sh"]
diff --git a/data/Dockerfiles/rspamd/set_worker_password.sh b/data/Dockerfiles/rspamd/set_worker_password.sh
new file mode 100755
index 00000000..7205e888
--- /dev/null
+++ b/data/Dockerfiles/rspamd/set_worker_password.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+
+password_file='/etc/rspamd/override.d/worker-controller-password.inc'
+password_hash=`/usr/bin/rspamadm pw -e -p $1`
+
+echo 'enable_password = "'$password_hash'";' > $password_file
+
+if grep -q "$password_hash" "$password_file"; then
+ echo "OK"
+else
+ echo "ERROR"
+fi
\ No newline at end of file
diff --git a/data/Dockerfiles/unbound/Dockerfile b/data/Dockerfiles/unbound/Dockerfile
index 0b1cefe9..d9756d04 100644
--- a/data/Dockerfiles/unbound/Dockerfile
+++ b/data/Dockerfiles/unbound/Dockerfile
@@ -1,4 +1,4 @@
-FROM alpine:3.16
+FROM alpine:3.17
LABEL maintainer "Andre Peters "
diff --git a/data/Dockerfiles/watchdog/Dockerfile b/data/Dockerfiles/watchdog/Dockerfile
index 637c4680..654dea08 100644
--- a/data/Dockerfiles/watchdog/Dockerfile
+++ b/data/Dockerfiles/watchdog/Dockerfile
@@ -1,4 +1,4 @@
-FROM alpine:3.16
+FROM alpine:3.17
LABEL maintainer "André Peters "
# Installation
diff --git a/data/conf/rspamd/custom/bulk_header.map b/data/conf/rspamd/custom/bulk_header.map
index 39aa7fea..69a20af8 100644
--- a/data/conf/rspamd/custom/bulk_header.map
+++ b/data/conf/rspamd/custom/bulk_header.map
@@ -3,7 +3,6 @@
/.*episerver.*/i
/.*supergewinne.*/i
/List-Unsubscribe.*nbps\.eu/i
-/X-Mailer: AWeber.*/i
/.*regiofinder.*/i
/.*EmailSocket.*/i
/List-Unsubscribe:.*respread.*/i
diff --git a/data/conf/rspamd/meta_exporter/pushover.php b/data/conf/rspamd/meta_exporter/pushover.php
index 4c8092d1..10265d15 100644
--- a/data/conf/rspamd/meta_exporter/pushover.php
+++ b/data/conf/rspamd/meta_exporter/pushover.php
@@ -54,6 +54,7 @@ $rcpts = $headers['X-Rspamd-Rcpt'];
$sender = $headers['X-Rspamd-From'];
$ip = $headers['X-Rspamd-Ip'];
$subject = $headers['X-Rspamd-Subject'];
+$messageid= $json_body->message_id;
$priority = 0;
$symbols_array = json_decode($headers['X-Rspamd-Symbols'], true);
@@ -245,13 +246,13 @@ foreach ($rcpt_final_mailboxes as $rcpt_final) {
"token" => $api_data['token'],
"user" => $api_data['key'],
"title" => sprintf("%s", str_replace(
- array('{SUBJECT}', '{SENDER}', '{SENDER_NAME}', '{SENDER_ADDRESS}', '{TO_NAME}', '{TO_ADDRESS}'),
- array($subject, $sender, $sender_name, $sender_address, $to_name, $to_address), $title)
+ array('{SUBJECT}', '{SENDER}', '{SENDER_NAME}', '{SENDER_ADDRESS}', '{TO_NAME}', '{TO_ADDRESS}', '{MSG_ID}'),
+ array($subject, $sender, $sender_name, $sender_address, $to_name, $to_address, $messageid), $title)
),
"priority" => $priority,
"message" => sprintf("%s", str_replace(
- array('{SUBJECT}', '{SENDER}', '{SENDER_NAME}', '{SENDER_ADDRESS}', '{TO_NAME}', '{TO_ADDRESS}', '\n'),
- array($subject, $sender, $sender_name, $sender_address, $to_name, $to_address, PHP_EOL), $text)
+ array('{SUBJECT}', '{SENDER}', '{SENDER_NAME}', '{SENDER_ADDRESS}', '{TO_NAME}', '{TO_ADDRESS}', '{MSG_ID}', '\n'),
+ array($subject, $sender, $sender_name, $sender_address, $to_name, $to_address, $messageid, PHP_EOL), $text)
),
"sound" => $attributes['sound'] ?? "pushover"
);
diff --git a/data/web/_status.502.html b/data/web/_status.502.html
index efbc0e8b..35a66ba9 100644
--- a/data/web/_status.502.html
+++ b/data/web/_status.502.html
@@ -13,12 +13,12 @@
Please check the logs or contact support if the error persists.
Quick debugging
Check Nginx and PHP logs:
- docker-compose logs --tail=200 php-fpm-mailcow nginx-mailcow
+ docker compose logs --tail=200 php-fpm-mailcow nginx-mailcow
Make sure your SQL credentials in mailcow.conf (a link to .env) do fit your initialized SQL volume. If you see an access denied, you might have the wrong mailcow.conf:
- source mailcow.conf ; docker-compose exec mysql-mailcow mysql -u${DBUSER} -p${DBPASS} ${DBNAME}
+ source mailcow.conf ; docker compose exec mysql-mailcow mysql -u${DBUSER} -p${DBPASS} ${DBNAME}
In case of a previous failed installation, create a backup of your existing data, followed by removing all volumes and starting over (NEVER do this with a production system, it will remove ALL data):
BACKUP_LOCATION=/tmp/ ./helper-scripts/backup_and_restore.sh backup all
- docker-compose down --volumes ; docker-compose up -d
+ docker compose down --volumes ; docker compose up -d
Make sure your timezone is correct. Use "America/New_York" for example, do not use spaces. Check here for a list.
Click to learn more about getting support.