Merge pull request #5019 from mailcow/staging

2023-01a
This commit is contained in:
Niklas Meyer 2023-01-25 16:29:22 +01:00 committed by GitHub
commit 640f535e99
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
12 changed files with 297 additions and 442 deletions

View File

@ -1,63 +0,0 @@
name: mailcow Integration Tests
on:
push:
branches: [ "master", "staging" ]
workflow_dispatch:
permissions:
contents: read
jobs:
integration_tests:
runs-on: ubuntu-latest
steps:
- name: Setup Ansible
run: |
export DEBIAN_FRONTEND=noninteractive
sudo apt-get update
sudo apt-get install python3 python3-pip git
sudo pip3 install ansible
- name: Prepair Test Environment
run: |
git clone https://github.com/mailcow/mailcow-integration-tests.git --branch $(curl -sL https://api.github.com/repos/mailcow/mailcow-integration-tests/releases/latest | jq -r '.tag_name') --single-branch .
./fork_check.sh
./ci.sh
./ci-pip-requirements.sh
env:
VAULT_PW: ${{ secrets.MAILCOW_TESTS_VAULT_PW }}
VAULT_FILE: ${{ secrets.MAILCOW_TESTS_VAULT_FILE }}
- name: Start Integration Test Server
run: |
./fork_check.sh
ansible-playbook mailcow-start-server.yml --diff
env:
PY_COLORS: '1'
ANSIBLE_FORCE_COLOR: '1'
ANSIBLE_HOST_KEY_CHECKING: 'false'
- name: Setup Integration Test Server
run: |
./fork_check.sh
sleep 30
ansible-playbook mailcow-setup-server.yml --private-key id_ssh_rsa --diff
env:
PY_COLORS: '1'
ANSIBLE_FORCE_COLOR: '1'
ANSIBLE_HOST_KEY_CHECKING: 'false'
- name: Run Integration Tests
run: |
./fork_check.sh
ansible-playbook mailcow-integration-tests.yml --private-key id_ssh_rsa --diff
env:
PY_COLORS: '1'
ANSIBLE_FORCE_COLOR: '1'
ANSIBLE_HOST_KEY_CHECKING: 'false'
- name: Delete Integration Test Server
if: always()
run: |
./fork_check.sh
ansible-playbook mailcow-delete-server.yml --diff
env:
PY_COLORS: '1'
ANSIBLE_FORCE_COLOR: '1'
ANSIBLE_HOST_KEY_CHECKING: 'false'

View File

@ -1,6 +1,5 @@
# mailcow: dockerized - 🐮 + 🐋 = 💕 # mailcow: dockerized - 🐮 + 🐋 = 💕
[![Mailcow Integration Tests](https://github.com/mailcow/mailcow-dockerized/actions/workflows/integration_tests.yml/badge.svg?branch=master)](https://github.com/mailcow/mailcow-dockerized/actions/workflows/integration_tests.yml)
[![Translation status](https://translate.mailcow.email/widgets/mailcow-dockerized/-/translation/svg-badge.svg)](https://translate.mailcow.email/engage/mailcow-dockerized/) [![Translation status](https://translate.mailcow.email/widgets/mailcow-dockerized/-/translation/svg-badge.svg)](https://translate.mailcow.email/engage/mailcow-dockerized/)
[![Twitter URL](https://img.shields.io/twitter/url/https/twitter.com/mailcow_email.svg?style=social&label=Follow%20%40mailcow_email)](https://twitter.com/mailcow_email) [![Twitter URL](https://img.shields.io/twitter/url/https/twitter.com/mailcow_email.svg?style=social&label=Follow%20%40mailcow_email)](https://twitter.com/mailcow_email)

View File

@ -213,11 +213,13 @@ while true; do
done done
ADDITIONAL_WC_ARR+=('autodiscover' 'autoconfig') ADDITIONAL_WC_ARR+=('autodiscover' 'autoconfig')
if [[ ${SKIP_IP_CHECK} != "y" ]]; then
# Start IP detection # Start IP detection
log_f "Detecting IP addresses..." log_f "Detecting IP addresses..."
IPV4=$(get_ipv4) IPV4=$(get_ipv4)
IPV6=$(get_ipv6) IPV6=$(get_ipv6)
log_f "OK: ${IPV4}, ${IPV6:-"0000:0000:0000:0000:0000:0000:0000:0000"}" log_f "OK: ${IPV4}, ${IPV6:-"0000:0000:0000:0000:0000:0000:0000:0000"}"
fi
######################################### #########################################
# IP and webroot challenge verification # # IP and webroot challenge verification #

View File

@ -13,6 +13,7 @@ RUN apk add --update --no-cache python3 \
fastapi \ fastapi \
uvicorn \ uvicorn \
aiodocker \ aiodocker \
docker \
redis redis
COPY docker-entrypoint.sh /app/ COPY docker-entrypoint.sh /app/

View File

@ -1,5 +1,6 @@
from fastapi import FastAPI, Response, Request from fastapi import FastAPI, Response, Request
import aiodocker import aiodocker
import docker
import psutil import psutil
import sys import sys
import re import re
@ -9,11 +10,38 @@ import json
import asyncio import asyncio
import redis import redis
from datetime import datetime from datetime import datetime
import logging
from logging.config import dictConfig
log_config = {
"version": 1,
"disable_existing_loggers": False,
"formatters": {
"default": {
"()": "uvicorn.logging.DefaultFormatter",
"fmt": "%(levelprefix)s %(asctime)s %(message)s",
"datefmt": "%Y-%m-%d %H:%M:%S",
},
},
"handlers": {
"default": {
"formatter": "default",
"class": "logging.StreamHandler",
"stream": "ext://sys.stderr",
},
},
"loggers": {
"api-logger": {"handlers": ["default"], "level": "INFO"},
},
}
dictConfig(log_config)
containerIds_to_update = [] containerIds_to_update = []
host_stats_isUpdating = False host_stats_isUpdating = False
app = FastAPI() app = FastAPI()
logger = logging.getLogger('api-logger')
@app.get("/host/stats") @app.get("/host/stats")
@ -21,18 +49,15 @@ async def get_host_update_stats():
global host_stats_isUpdating global host_stats_isUpdating
if host_stats_isUpdating == False: if host_stats_isUpdating == False:
print("start host stats task")
asyncio.create_task(get_host_stats()) asyncio.create_task(get_host_stats())
host_stats_isUpdating = True host_stats_isUpdating = True
while True: while True:
if redis_client.exists('host_stats'): if redis_client.exists('host_stats'):
break break
print("wait for host_stats results")
await asyncio.sleep(1.5) await asyncio.sleep(1.5)
print("host stats pulled")
stats = json.loads(redis_client.get('host_stats')) stats = json.loads(redis_client.get('host_stats'))
return Response(content=json.dumps(stats, indent=4), media_type="application/json") return Response(content=json.dumps(stats, indent=4), media_type="application/json")
@ -106,14 +131,14 @@ async def post_containers(container_id : str, post_action : str, request: Reques
else: else:
api_call_method_name = '__'.join(['container_post', str(post_action) ]) api_call_method_name = '__'.join(['container_post', str(post_action) ])
docker_utils = DockerUtils(async_docker_client) docker_utils = DockerUtils(sync_docker_client)
api_call_method = getattr(docker_utils, api_call_method_name, lambda container_id: Response(content=json.dumps({'type': 'danger', 'msg':'container_post - unknown api call' }, indent=4), media_type="application/json")) api_call_method = getattr(docker_utils, api_call_method_name, lambda container_id: Response(content=json.dumps({'type': 'danger', 'msg':'container_post - unknown api call' }, indent=4), media_type="application/json"))
print("api call: %s, container_id: %s" % (api_call_method_name, container_id)) logger.info("api call: %s, container_id: %s" % (api_call_method_name, container_id))
return await api_call_method(container_id, request_json) return api_call_method(container_id, request_json)
except Exception as e: except Exception as e:
print("error - container_post: %s" % str(e)) logger.error("error - container_post: %s" % str(e))
res = { res = {
"type": "danger", "type": "danger",
"msg": str(e) "msg": str(e)
@ -152,398 +177,289 @@ class DockerUtils:
self.docker_client = docker_client self.docker_client = docker_client
# api call: container_post - post_action: stop # api call: container_post - post_action: stop
async def container_post__stop(self, container_id, request_json): def container_post__stop(self, container_id, request_json):
for container in (await self.docker_client.containers.list()): for container in self.docker_client.containers.list(all=True, filters={"id": container_id}):
if container._id == container_id: container.stop()
await container.stop()
res = {
'type': 'success',
'msg': 'command completed successfully'
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
res = { 'type': 'success', 'msg': 'command completed successfully'}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
# api call: container_post - post_action: start # api call: container_post - post_action: start
async def container_post__start(self, container_id, request_json): def container_post__start(self, container_id, request_json):
for container in (await self.docker_client.containers.list()): for container in self.docker_client.containers.list(all=True, filters={"id": container_id}):
if container._id == container_id: container.start()
await container.start()
res = { res = { 'type': 'success', 'msg': 'command completed successfully'}
'type': 'success',
'msg': 'command completed successfully'
}
return Response(content=json.dumps(res, indent=4), media_type="application/json") return Response(content=json.dumps(res, indent=4), media_type="application/json")
# api call: container_post - post_action: restart # api call: container_post - post_action: restart
async def container_post__restart(self, container_id, request_json): def container_post__restart(self, container_id, request_json):
for container in (await self.docker_client.containers.list()): for container in self.docker_client.containers.list(all=True, filters={"id": container_id}):
if container._id == container_id: container.restart()
await container.restart()
res = { res = { 'type': 'success', 'msg': 'command completed successfully'}
'type': 'success',
'msg': 'command completed successfully'
}
return Response(content=json.dumps(res, indent=4), media_type="application/json") return Response(content=json.dumps(res, indent=4), media_type="application/json")
# api call: container_post - post_action: top # api call: container_post - post_action: top
async def container_post__top(self, container_id, request_json): def container_post__top(self, container_id, request_json):
for container in (await self.docker_client.containers.list()): for container in self.docker_client.containers.list(all=True, filters={"id": container_id}):
if container._id == container_id: res = { 'type': 'success', 'msg': container.top()}
ps_exec = await container.exec("ps") return Response(content=json.dumps(res, indent=4), media_type="application/json")
async with ps_exec.start(detach=False) as stream: # api call: container_post - post_action: stats
ps_return = await stream.read_out() def container_post__stats(self, container_id, request_json):
for container in self.docker_client.containers.list(all=True, filters={"id": container_id}):
exec_details = await ps_exec.inspect() for stat in container.stats(decode=True, stream=True):
if exec_details["ExitCode"] == None or exec_details["ExitCode"] == 0: res = { 'type': 'success', 'msg': stat}
res = { return Response(content=json.dumps(res, indent=4), media_type="application/json")
'type': 'success',
'msg': ps_return.data.decode('utf-8')
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
else:
res = {
'type': 'danger',
'msg': ''
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
# api call: container_post - post_action: exec - cmd: mailq - task: delete # api call: container_post - post_action: exec - cmd: mailq - task: delete
async def container_post__exec__mailq__delete(self, container_id, request_json): def container_post__exec__mailq__delete(self, container_id, request_json):
if 'items' in request_json: if 'items' in request_json:
r = re.compile("^[0-9a-fA-F]+$") r = re.compile("^[0-9a-fA-F]+$")
filtered_qids = filter(r.match, request_json['items']) filtered_qids = filter(r.match, request_json['items'])
if filtered_qids: if filtered_qids:
flagged_qids = ['-d %s' % i for i in filtered_qids] flagged_qids = ['-d %s' % i for i in filtered_qids]
sanitized_string = str(' '.join(flagged_qids)) sanitized_string = str(' '.join(flagged_qids));
for container in self.docker_client.containers.list(filters={"id": container_id}):
postsuper_r = container.exec_run(["/bin/bash", "-c", "/usr/sbin/postsuper " + sanitized_string])
return exec_run_handler('generic', postsuper_r)
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
postsuper_r_exec = await container.exec(["/bin/bash", "-c", "/usr/sbin/postsuper " + sanitized_string])
return await exec_run_handler('generic', postsuper_r_exec)
# api call: container_post - post_action: exec - cmd: mailq - task: hold # api call: container_post - post_action: exec - cmd: mailq - task: hold
async def container_post__exec__mailq__hold(self, container_id, request_json): def container_post__exec__mailq__hold(self, container_id, request_json):
if 'items' in request_json: if 'items' in request_json:
r = re.compile("^[0-9a-fA-F]+$") r = re.compile("^[0-9a-fA-F]+$")
filtered_qids = filter(r.match, request_json['items']) filtered_qids = filter(r.match, request_json['items'])
if filtered_qids: if filtered_qids:
flagged_qids = ['-h %s' % i for i in filtered_qids] flagged_qids = ['-h %s' % i for i in filtered_qids]
sanitized_string = str(' '.join(flagged_qids)) sanitized_string = str(' '.join(flagged_qids));
for container in self.docker_client.containers.list(filters={"id": container_id}):
for container in (await self.docker_client.containers.list()): postsuper_r = container.exec_run(["/bin/bash", "-c", "/usr/sbin/postsuper " + sanitized_string])
if container._id == container_id: return exec_run_handler('generic', postsuper_r)
postsuper_r_exec = await container.exec(["/bin/bash", "-c", "/usr/sbin/postsuper " + sanitized_string])
return await exec_run_handler('generic', postsuper_r_exec)
# api call: container_post - post_action: exec - cmd: mailq - task: cat # api call: container_post - post_action: exec - cmd: mailq - task: cat
async def container_post__exec__mailq__cat(self, container_id, request_json): def container_post__exec__mailq__cat(self, container_id, request_json):
if 'items' in request_json: if 'items' in request_json:
r = re.compile("^[0-9a-fA-F]+$") r = re.compile("^[0-9a-fA-F]+$")
filtered_qids = filter(r.match, request_json['items']) filtered_qids = filter(r.match, request_json['items'])
if filtered_qids: if filtered_qids:
sanitized_string = str(' '.join(filtered_qids)) sanitized_string = str(' '.join(filtered_qids));
for container in (await self.docker_client.containers.list()): for container in self.docker_client.containers.list(filters={"id": container_id}):
if container._id == container_id: postcat_return = container.exec_run(["/bin/bash", "-c", "/usr/sbin/postcat -q " + sanitized_string], user='postfix')
postcat_exec = await container.exec(["/bin/bash", "-c", "/usr/sbin/postcat -q " + sanitized_string], user='postfix') if not postcat_return:
return await exec_run_handler('utf8_text_only', postcat_exec) postcat_return = 'err: invalid'
return exec_run_handler('utf8_text_only', postcat_return)
# api call: container_post - post_action: exec - cmd: mailq - task: unhold # api call: container_post - post_action: exec - cmd: mailq - task: unhold
async def container_post__exec__mailq__unhold(self, container_id, request_json): def container_post__exec__mailq__unhold(self, container_id, request_json):
if 'items' in request_json: if 'items' in request_json:
r = re.compile("^[0-9a-fA-F]+$") r = re.compile("^[0-9a-fA-F]+$")
filtered_qids = filter(r.match, request_json['items']) filtered_qids = filter(r.match, request_json['items'])
if filtered_qids: if filtered_qids:
flagged_qids = ['-H %s' % i for i in filtered_qids] flagged_qids = ['-H %s' % i for i in filtered_qids]
sanitized_string = str(' '.join(flagged_qids)) sanitized_string = str(' '.join(flagged_qids));
for container in self.docker_client.containers.list(filters={"id": container_id}):
for container in (await self.docker_client.containers.list()): postsuper_r = container.exec_run(["/bin/bash", "-c", "/usr/sbin/postsuper " + sanitized_string])
if container._id == container_id: return exec_run_handler('generic', postsuper_r)
postsuper_r_exec = await container.exec(["/bin/bash", "-c", "/usr/sbin/postsuper " + sanitized_string])
return await exec_run_handler('generic', postsuper_r_exec)
# api call: container_post - post_action: exec - cmd: mailq - task: deliver # api call: container_post - post_action: exec - cmd: mailq - task: deliver
async def container_post__exec__mailq__deliver(self, container_id, request_json): def container_post__exec__mailq__deliver(self, container_id, request_json):
if 'items' in request_json: if 'items' in request_json:
r = re.compile("^[0-9a-fA-F]+$") r = re.compile("^[0-9a-fA-F]+$")
filtered_qids = filter(r.match, request_json['items']) filtered_qids = filter(r.match, request_json['items'])
if filtered_qids: if filtered_qids:
flagged_qids = ['-i %s' % i for i in filtered_qids] flagged_qids = ['-i %s' % i for i in filtered_qids]
for container in self.docker_client.containers.list(filters={"id": container_id}):
for container in (await self.docker_client.containers.list()): for i in flagged_qids:
if container._id == container_id: postqueue_r = container.exec_run(["/bin/bash", "-c", "/usr/sbin/postqueue " + i], user='postfix')
for i in flagged_qids: # todo: check each exit code
postsuper_r_exec = await container.exec(["/bin/bash", "-c", "/usr/sbin/postqueue " + i], user='postfix') res = { 'type': 'success', 'msg': 'Scheduled immediate delivery'}
async with postsuper_r_exec.start(detach=False) as stream:
postsuper_r_return = await stream.read_out()
# todo: check each exit code
res = {
'type': 'success',
'msg': 'Scheduled immediate delivery'
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
# api call: container_post - post_action: exec - cmd: mailq - task: list
async def container_post__exec__mailq__list(self, container_id, request_json):
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
mailq_exec = await container.exec(["/usr/sbin/postqueue", "-j"], user='postfix')
return await exec_run_handler('utf8_text_only', mailq_exec)
# api call: container_post - post_action: exec - cmd: mailq - task: flush
async def container_post__exec__mailq__flush(self, container_id, request_json):
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
postsuper_r_exec = await container.exec(["/usr/sbin/postqueue", "-f"], user='postfix')
return await exec_run_handler('generic', postsuper_r_exec)
# api call: container_post - post_action: exec - cmd: mailq - task: super_delete
async def container_post__exec__mailq__super_delete(self, container_id, request_json):
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
postsuper_r_exec = await container.exec(["/usr/sbin/postsuper", "-d", "ALL"])
return await exec_run_handler('generic', postsuper_r_exec)
# api call: container_post - post_action: exec - cmd: system - task: fts_rescan
async def container_post__exec__system__fts_rescan(self, container_id, request_json):
if 'username' in request_json:
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
rescan_exec = await container.exec(["/bin/bash", "-c", "/usr/bin/doveadm fts rescan -u '" + request_json['username'].replace("'", "'\\''") + "'"], user='vmail')
async with rescan_exec.start(detach=False) as stream:
rescan_return = await stream.read_out()
exec_details = await rescan_exec.inspect()
if exec_details["ExitCode"] == None or exec_details["ExitCode"] == 0:
res = {
'type': 'success',
'msg': 'fts_rescan: rescan triggered'
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
else:
res = {
'type': 'warning',
'msg': 'fts_rescan error'
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
if 'all' in request_json:
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
rescan_exec = await container.exec(["/bin/bash", "-c", "/usr/bin/doveadm fts rescan -A"], user='vmail')
async with rescan_exec.start(detach=False) as stream:
rescan_return = await stream.read_out()
exec_details = await rescan_exec.inspect()
if exec_details["ExitCode"] == None or exec_details["ExitCode"] == 0:
res = {
'type': 'success',
'msg': 'fts_rescan: rescan triggered'
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
else:
res = {
'type': 'warning',
'msg': 'fts_rescan error'
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
# api call: container_post - post_action: exec - cmd: system - task: df
async def container_post__exec__system__df(self, container_id, request_json):
if 'dir' in request_json:
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
df_exec = await container.exec(["/bin/bash", "-c", "/bin/df -H '" + request_json['dir'].replace("'", "'\\''") + "' | /usr/bin/tail -n1 | /usr/bin/tr -s [:blank:] | /usr/bin/tr ' ' ','"], user='nobody')
async with df_exec.start(detach=False) as stream:
df_return = await stream.read_out()
print(df_return)
print(await df_exec.inspect())
exec_details = await df_exec.inspect()
if exec_details["ExitCode"] == None or exec_details["ExitCode"] == 0:
return df_return.data.decode('utf-8').rstrip()
else:
return "0,0,0,0,0,0"
# api call: container_post - post_action: exec - cmd: system - task: mysql_upgrade
async def container_post__exec__system__mysql_upgrade(self, container_id, request_json):
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
sql_exec = await container.exec(["/bin/bash", "-c", "/usr/bin/mysql_upgrade -uroot -p'" + os.environ['DBROOT'].replace("'", "'\\''") + "'\n"], user='mysql')
async with sql_exec.start(detach=False) as stream:
sql_return = await stream.read_out()
exec_details = await sql_exec.inspect()
if exec_details["ExitCode"] == None or exec_details["ExitCode"] == 0:
matched = False
for line in sql_return.data.decode('utf-8').split("\n"):
if 'is already upgraded to' in line:
matched = True
if matched:
res = {
'type': 'success',
'msg': 'mysql_upgrade: already upgraded',
'text': sql_return.data.decode('utf-8')
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
else:
await container.restart()
res = {
'type': 'warning',
'msg': 'mysql_upgrade: upgrade was applied',
'text': sql_return.data.decode('utf-8')
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
else:
res = {
'type': 'error',
'msg': 'mysql_upgrade: error running command',
'text': sql_return.data.decode('utf-8')
}
return Response(content=json.dumps(res, indent=4), media_type="application/json") return Response(content=json.dumps(res, indent=4), media_type="application/json")
# api call: container_post - post_action: exec - cmd: system - task: mysql_tzinfo_to_sql
async def container_post__exec__system__mysql_tzinfo_to_sql(self, container_id, request_json):
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
sql_exec = await container.exec(["/bin/bash", "-c", "/usr/bin/mysql_tzinfo_to_sql /usr/share/zoneinfo | /bin/sed 's/Local time zone must be set--see zic manual page/FCTY/' | /usr/bin/mysql -uroot -p'" + os.environ['DBROOT'].replace("'", "'\\''") + "' mysql \n"], user='mysql')
async with sql_exec.start(detach=False) as stream:
sql_return = await stream.read_out()
exec_details = await sql_exec.inspect()
if exec_details["ExitCode"] == None or exec_details["ExitCode"] == 0:
res = {
'type': 'info',
'msg': 'mysql_tzinfo_to_sql: command completed successfully',
'text': sql_return.data.decode('utf-8')
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
else:
res = {
'type': 'error',
'msg': 'mysql_tzinfo_to_sql: error running command',
'text': sql_return.data.decode('utf-8')
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
# api call: container_post - post_action: exec - cmd: reload - task: dovecot
async def container_post__exec__reload__dovecot(self, container_id, request_json):
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
reload_exec = await container.exec(["/bin/bash", "-c", "/usr/sbin/dovecot reload"])
return await exec_run_handler('generic', reload_exec)
# api call: container_post - post_action: exec - cmd: reload - task: postfix
async def container_post__exec__reload__postfix(self, container_id, request_json):
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
reload_exec = await container.exec(["/bin/bash", "-c", "/usr/sbin/postfix reload"])
return await exec_run_handler('generic', reload_exec)
# api call: container_post - post_action: exec - cmd: reload - task: nginx
async def container_post__exec__reload__nginx(self, container_id, request_json):
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
reload_exec = await container.exec(["/bin/sh", "-c", "/usr/sbin/nginx -s reload"])
return await exec_run_handler('generic', reload_exec)
# api call: container_post - post_action: exec - cmd: sieve - task: list
async def container_post__exec__sieve__list(self, container_id, request_json):
if 'username' in request_json:
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
sieve_exec = await container.exec(["/bin/bash", "-c", "/usr/bin/doveadm sieve list -u '" + request_json['username'].replace("'", "'\\''") + "'"])
return await exec_run_handler('utf8_text_only', sieve_exec)
# api call: container_post - post_action: exec - cmd: sieve - task: print
async def container_post__exec__sieve__print(self, container_id, request_json):
if 'username' in request_json and 'script_name' in request_json:
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
cmd = ["/bin/bash", "-c", "/usr/bin/doveadm sieve get -u '" + request_json['username'].replace("'", "'\\''") + "' '" + request_json['script_name'].replace("'", "'\\''") + "'"]
sieve_exec = await container.exec(cmd)
return await exec_run_handler('utf8_text_only', sieve_exec)
# api call: container_post - post_action: exec - cmd: maildir - task: cleanup
async def container_post__exec__maildir__cleanup(self, container_id, request_json):
if 'maildir' in request_json:
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
sane_name = re.sub(r'\W+', '', request_json['maildir'])
cmd = ["/bin/bash", "-c", "if [[ -d '/var/vmail/" + request_json['maildir'].replace("'", "'\\''") + "' ]]; then /bin/mv '/var/vmail/" + request_json['maildir'].replace("'", "'\\''") + "' '/var/vmail/_garbage/" + str(int(time.time())) + "_" + sane_name + "'; fi"]
maildir_cleanup_exec = await container.exec(cmd, user='vmail')
return await exec_run_handler('generic', maildir_cleanup_exec)
# api call: container_post - post_action: exec - cmd: rspamd - task: worker_password
async def container_post__exec__rspamd__worker_password(self, container_id, request_json):
if 'raw' in request_json:
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
cmd = "./set_worker_password.sh '" + request_json['raw'].replace("'", "'\\''") + "' 2> /dev/null" # api call: container_post - post_action: exec - cmd: mailq - task: list
rspamd_password_exec = await container.exec(cmd, user='_rspamd') def container_post__exec__mailq__list(self, container_id, request_json):
async with rspamd_password_exec.start(detach=False) as stream: for container in self.docker_client.containers.list(filters={"id": container_id}):
rspamd_password_return = await stream.read_out() mailq_return = container.exec_run(["/usr/sbin/postqueue", "-j"], user='postfix')
return exec_run_handler('utf8_text_only', mailq_return)
matched = False # api call: container_post - post_action: exec - cmd: mailq - task: flush
if "OK" in rspamd_password_return.data.decode('utf-8'): def container_post__exec__mailq__flush(self, container_id, request_json):
for container in self.docker_client.containers.list(filters={"id": container_id}):
postqueue_r = container.exec_run(["/usr/sbin/postqueue", "-f"], user='postfix')
return exec_run_handler('generic', postqueue_r)
# api call: container_post - post_action: exec - cmd: mailq - task: super_delete
def container_post__exec__mailq__super_delete(self, container_id, request_json):
for container in self.docker_client.containers.list(filters={"id": container_id}):
postsuper_r = container.exec_run(["/usr/sbin/postsuper", "-d", "ALL"])
return exec_run_handler('generic', postsuper_r)
# api call: container_post - post_action: exec - cmd: system - task: fts_rescan
def container_post__exec__system__fts_rescan(self, container_id, request_json):
if 'username' in request_json:
for container in self.docker_client.containers.list(filters={"id": container_id}):
rescan_return = container.exec_run(["/bin/bash", "-c", "/usr/bin/doveadm fts rescan -u '" + request_json['username'].replace("'", "'\\''") + "'"], user='vmail')
if rescan_return.exit_code == 0:
res = { 'type': 'success', 'msg': 'fts_rescan: rescan triggered'}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
else:
res = { 'type': 'warning', 'msg': 'fts_rescan error'}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
if 'all' in request_json:
for container in self.docker_client.containers.list(filters={"id": container_id}):
rescan_return = container.exec_run(["/bin/bash", "-c", "/usr/bin/doveadm fts rescan -A"], user='vmail')
if rescan_return.exit_code == 0:
res = { 'type': 'success', 'msg': 'fts_rescan: rescan triggered'}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
else:
res = { 'type': 'warning', 'msg': 'fts_rescan error'}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
# api call: container_post - post_action: exec - cmd: system - task: df
def container_post__exec__system__df(self, container_id, request_json):
if 'dir' in request_json:
for container in self.docker_client.containers.list(filters={"id": container_id}):
df_return = container.exec_run(["/bin/bash", "-c", "/bin/df -H '" + request_json['dir'].replace("'", "'\\''") + "' | /usr/bin/tail -n1 | /usr/bin/tr -s [:blank:] | /usr/bin/tr ' ' ','"], user='nobody')
if df_return.exit_code == 0:
return df_return.output.decode('utf-8').rstrip()
else:
return "0,0,0,0,0,0"
# api call: container_post - post_action: exec - cmd: system - task: mysql_upgrade
def container_post__exec__system__mysql_upgrade(self, container_id, request_json):
for container in self.docker_client.containers.list(filters={"id": container_id}):
sql_return = container.exec_run(["/bin/bash", "-c", "/usr/bin/mysql_upgrade -uroot -p'" + os.environ['DBROOT'].replace("'", "'\\''") + "'\n"], user='mysql')
if sql_return.exit_code == 0:
matched = False
for line in sql_return.output.decode('utf-8').split("\n"):
if 'is already upgraded to' in line:
matched = True matched = True
await container.restart() if matched:
res = { 'type': 'success', 'msg':'mysql_upgrade: already upgraded', 'text': sql_return.output.decode('utf-8')}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
else:
container.restart()
res = { 'type': 'warning', 'msg':'mysql_upgrade: upgrade was applied', 'text': sql_return.output.decode('utf-8')}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
else:
res = { 'type': 'error', 'msg': 'mysql_upgrade: error running command', 'text': sql_return.output.decode('utf-8')}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
# api call: container_post - post_action: exec - cmd: system - task: mysql_tzinfo_to_sql
def container_post__exec__system__mysql_tzinfo_to_sql(self, container_id, request_json):
for container in self.docker_client.containers.list(filters={"id": container_id}):
sql_return = container.exec_run(["/bin/bash", "-c", "/usr/bin/mysql_tzinfo_to_sql /usr/share/zoneinfo | /bin/sed 's/Local time zone must be set--see zic manual page/FCTY/' | /usr/bin/mysql -uroot -p'" + os.environ['DBROOT'].replace("'", "'\\''") + "' mysql \n"], user='mysql')
if sql_return.exit_code == 0:
res = { 'type': 'info', 'msg': 'mysql_tzinfo_to_sql: command completed successfully', 'text': sql_return.output.decode('utf-8')}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
else:
res = { 'type': 'error', 'msg': 'mysql_tzinfo_to_sql: error running command', 'text': sql_return.output.decode('utf-8')}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
# api call: container_post - post_action: exec - cmd: reload - task: dovecot
def container_post__exec__reload__dovecot(self, container_id, request_json):
for container in self.docker_client.containers.list(filters={"id": container_id}):
reload_return = container.exec_run(["/bin/bash", "-c", "/usr/sbin/dovecot reload"])
return exec_run_handler('generic', reload_return)
# api call: container_post - post_action: exec - cmd: reload - task: postfix
def container_post__exec__reload__postfix(self, container_id, request_json):
for container in self.docker_client.containers.list(filters={"id": container_id}):
reload_return = container.exec_run(["/bin/bash", "-c", "/usr/sbin/postfix reload"])
return exec_run_handler('generic', reload_return)
# api call: container_post - post_action: exec - cmd: reload - task: nginx
def container_post__exec__reload__nginx(self, container_id, request_json):
for container in self.docker_client.containers.list(filters={"id": container_id}):
reload_return = container.exec_run(["/bin/sh", "-c", "/usr/sbin/nginx -s reload"])
return exec_run_handler('generic', reload_return)
# api call: container_post - post_action: exec - cmd: sieve - task: list
def container_post__exec__sieve__list(self, container_id, request_json):
if 'username' in request_json:
for container in self.docker_client.containers.list(filters={"id": container_id}):
sieve_return = container.exec_run(["/bin/bash", "-c", "/usr/bin/doveadm sieve list -u '" + request_json['username'].replace("'", "'\\''") + "'"])
return exec_run_handler('utf8_text_only', sieve_return)
# api call: container_post - post_action: exec - cmd: sieve - task: print
def container_post__exec__sieve__print(self, container_id, request_json):
if 'username' in request.json and 'script_name' in request_json:
for container in self.docker_client.containers.list(filters={"id": container_id}):
cmd = ["/bin/bash", "-c", "/usr/bin/doveadm sieve get -u '" + request_json['username'].replace("'", "'\\''") + "' '" + request_json['script_name'].replace("'", "'\\''") + "'"]
sieve_return = container.exec_run(cmd)
return exec_run_handler('utf8_text_only', sieve_return)
# api call: container_post - post_action: exec - cmd: maildir - task: cleanup
def container_post__exec__maildir__cleanup(self, container_id, request_json):
if 'maildir' in request_json:
for container in self.docker_client.containers.list(filters={"id": container_id}):
sane_name = re.sub(r'\W+', '', request_json['maildir'])
cmd = ["/bin/bash", "-c", "if [[ -d '/var/vmail/" + request_json['maildir'].replace("'", "'\\''") + "' ]]; then /bin/mv '/var/vmail/" + request_json['maildir'].replace("'", "'\\''") + "' '/var/vmail/_garbage/" + str(int(time.time())) + "_" + sane_name + "'; fi"]
maildir_cleanup = container.exec_run(cmd, user='vmail')
return exec_run_handler('generic', maildir_cleanup)
# api call: container_post - post_action: exec - cmd: rspamd - task: worker_password
def container_post__exec__rspamd__worker_password(self, container_id, request_json):
if 'raw' in request_json:
for container in self.docker_client.containers.list(filters={"id": container_id}):
cmd = "/usr/bin/rspamadm pw -e -p '" + request_json['raw'].replace("'", "'\\''") + "' 2> /dev/null"
cmd_response = exec_cmd_container(container, cmd, user="_rspamd")
if matched: matched = False
res = { for line in cmd_response.split("\n"):
'type': 'success', if '$2$' in line:
'msg': 'command completed successfully' hash = line.strip()
} hash_out = re.search('\$2\$.+$', hash).group(0)
return Response(content=json.dumps(res, indent=4), media_type="application/json") rspamd_passphrase_hash = re.sub('[^0-9a-zA-Z\$]+', '', hash_out.rstrip())
else: rspamd_password_filename = "/etc/rspamd/override.d/worker-controller-password.inc"
res = { cmd = '''/bin/echo 'enable_password = "%s";' > %s && cat %s''' % (rspamd_passphrase_hash, rspamd_password_filename, rspamd_password_filename)
'type': 'danger', cmd_response = exec_cmd_container(container, cmd, user="_rspamd")
'msg': 'command did not complete' if rspamd_passphrase_hash.startswith("$2$") and rspamd_passphrase_hash in cmd_response:
} container.restart()
return Response(content=json.dumps(res, indent=4), media_type="application/json") matched = True
if matched:
res = { 'type': 'success', 'msg': 'command completed successfully' }
logger.info('success changing Rspamd password')
return Response(content=json.dumps(res, indent=4), media_type="application/json")
else:
logger.error('failed changing Rspamd password')
res = { 'type': 'danger', 'msg': 'command did not complete' }
return Response(content=json.dumps(res, indent=4), media_type="application/json")
def exec_cmd_container(container, cmd, user, timeout=2, shell_cmd="/bin/bash"):
async def exec_run_handler(type, exec_obj): def recv_socket_data(c_socket, timeout):
async with exec_obj.start(detach=False) as stream: c_socket.setblocking(0)
exec_return = await stream.read_out() total_data=[]
data=''
begin=time.time()
while True:
if total_data and time.time()-begin > timeout:
break
elif time.time()-begin > timeout*2:
break
try:
data = c_socket.recv(8192)
if data:
total_data.append(data.decode('utf-8'))
#change the beginning time for measurement
begin=time.time()
else:
#sleep for sometime to indicate a gap
time.sleep(0.1)
break
except:
pass
return ''.join(total_data)
if exec_return == None: try :
exec_return = "" socket = container.exec_run([shell_cmd], stdin=True, socket=True, user=user).output._sock
else: if not cmd.endswith("\n"):
exec_return = exec_return.data.decode('utf-8') cmd = cmd + "\n"
socket.send(cmd.encode('utf-8'))
if type == 'generic': data = recv_socket_data(socket, timeout)
exec_details = await exec_obj.inspect() socket.close()
if exec_details["ExitCode"] == None or exec_details["ExitCode"] == 0: return data
res = { except Exception as e:
"type": "success", logger.error("error - exec_cmd_container: %s" % str(e))
"msg": "command completed successfully" traceback.print_exc(file=sys.stdout)
} def exec_run_handler(type, output):
if type == 'generic':
if output.exit_code == 0:
res = { 'type': 'success', 'msg': 'command completed successfully' }
return Response(content=json.dumps(res, indent=4), media_type="application/json") return Response(content=json.dumps(res, indent=4), media_type="application/json")
else: else:
res = { res = { 'type': 'danger', 'msg': 'command failed: ' + output.output.decode('utf-8') }
"type": "success",
"msg": "'command failed: " + exec_return
}
return Response(content=json.dumps(res, indent=4), media_type="application/json") return Response(content=json.dumps(res, indent=4), media_type="application/json")
if type == 'utf8_text_only': if type == 'utf8_text_only':
return Response(content=exec_return, media_type="text/plain") return Response(content=output.output.decode('utf-8'), media_type="text/plain")
async def get_host_stats(wait=5): async def get_host_stats(wait=5):
global host_stats_isUpdating global host_stats_isUpdating
@ -570,12 +486,10 @@ async def get_host_stats(wait=5):
"type": "danger", "type": "danger",
"msg": str(e) "msg": str(e)
} }
print(json.dumps(res, indent=4))
await asyncio.sleep(wait) await asyncio.sleep(wait)
host_stats_isUpdating = False host_stats_isUpdating = False
async def get_container_stats(container_id, wait=5, stop=False): async def get_container_stats(container_id, wait=5, stop=False):
global containerIds_to_update global containerIds_to_update
@ -598,13 +512,11 @@ async def get_container_stats(container_id, wait=5, stop=False):
"type": "danger", "type": "danger",
"msg": str(e) "msg": str(e)
} }
print(json.dumps(res, indent=4))
else: else:
res = { res = {
"type": "danger", "type": "danger",
"msg": "no or invalid id defined" "msg": "no or invalid id defined"
} }
print(json.dumps(res, indent=4))
await asyncio.sleep(wait) await asyncio.sleep(wait)
if stop == True: if stop == True:
@ -615,9 +527,13 @@ async def get_container_stats(container_id, wait=5, stop=False):
await get_container_stats(container_id, wait=0, stop=True) await get_container_stats(container_id, wait=0, stop=True)
if os.environ['REDIS_SLAVEOF_IP'] != "": if os.environ['REDIS_SLAVEOF_IP'] != "":
redis_client = redis.Redis(host=os.environ['REDIS_SLAVEOF_IP'], port=os.environ['REDIS_SLAVEOF_PORT'], db=0) redis_client = redis.Redis(host=os.environ['REDIS_SLAVEOF_IP'], port=os.environ['REDIS_SLAVEOF_PORT'], db=0)
else: else:
redis_client = redis.Redis(host='redis-mailcow', port=6379, db=0) redis_client = redis.Redis(host='redis-mailcow', port=6379, db=0)
sync_docker_client = docker.DockerClient(base_url='unix://var/run/docker.sock', version='auto')
async_docker_client = aiodocker.Docker(url='unix:///var/run/docker.sock') async_docker_client = aiodocker.Docker(url='unix:///var/run/docker.sock')
logger.info('DockerApi started')

View File

@ -175,7 +175,7 @@ BAD_SUBJECT_00 {
type = "header"; type = "header";
header = "subject"; header = "subject";
regexp = true; regexp = true;
map = "http://nullnull.org/bad-subject-regex.txt"; map = "http://fuzzy.mailcow.email/bad-subject-regex.txt";
score = 6.0; score = 6.0;
symbols_set = ["BAD_SUBJECT_00"]; symbols_set = ["BAD_SUBJECT_00"];
} }

View File

@ -21,7 +21,6 @@ jQuery(function($){
url: '/api/v1/get/postcat/' + button.data('queue-id'), url: '/api/v1/get/postcat/' + button.data('queue-id'),
dataType: 'text', dataType: 'text',
complete: function (data) { complete: function (data) {
console.log(data);
$('#queue_msg_content').text(data.responseText); $('#queue_msg_content').text(data.responseText);
} }
}); });
@ -54,7 +53,7 @@ jQuery(function($){
}); });
item.recipients = rcpts.join('<hr style="margin:1px!important">'); item.recipients = rcpts.join('<hr style="margin:1px!important">');
item.action = '<div class="btn-group">' + item.action = '<div class="btn-group">' +
'<a href="#" data-bs-toggle="modal" data-bs-target="#showQueuedMsg" data-queue-id="' + encodeURI(item.queue_id) + '" class="btn btn-xs btn-secondary">' + lang.queue_show_message + '</a>' + '<a href="#" data-bs-toggle="modal" data-bs-target="#showQueuedMsg" data-queue-id="' + encodeURI(item.queue_id) + '" class="btn btn-xs btn-secondary">' + lang.show_message + '</a>' +
'</div>'; '</div>';
}); });
return data; return data;

View File

@ -1,6 +1,6 @@
{ {
"acl": { "acl": {
"alias_domains": "Tilføj kældenavn domæner", "alias_domains": "Tilføj domænealias",
"app_passwds": "Administrer app-adgangskoder", "app_passwds": "Administrer app-adgangskoder",
"bcc_maps": "BCC kort", "bcc_maps": "BCC kort",
"delimiter_action": "Afgrænsning handling", "delimiter_action": "Afgrænsning handling",
@ -22,9 +22,9 @@
"spam_alias": "Midlertidige aliasser", "spam_alias": "Midlertidige aliasser",
"spam_policy": "Sortliste / hvidliste", "spam_policy": "Sortliste / hvidliste",
"spam_score": "Spam-score", "spam_score": "Spam-score",
"syncjobs": "Synkroniser job", "syncjobs": "Synkroniserings job",
"tls_policy": "TLS politik", "tls_policy": "TLS politik",
"unlimited_quota": "Ubegrænset quote for mailbokse", "unlimited_quota": "Ubegrænset plads for mailbokse",
"domain_desc": "Skift domæne beskrivelse" "domain_desc": "Skift domæne beskrivelse"
}, },
"add": { "add": {
@ -33,7 +33,7 @@
"add": "Tilføj", "add": "Tilføj",
"add_domain_only": "Tilføj kun domæne", "add_domain_only": "Tilføj kun domæne",
"add_domain_restart": "Tilføj domæne og genstart SOGo", "add_domain_restart": "Tilføj domæne og genstart SOGo",
"alias_address": "Alias adresse (r)", "alias_address": "Alias adresse(r)",
"alias_address_info": "<small>Fuld e-mail-adresse eller @ eksempel.com for at fange alle beskeder til et domæne (kommasepareret). <b> kun mailcow-domæner</b>.</small>", "alias_address_info": "<small>Fuld e-mail-adresse eller @ eksempel.com for at fange alle beskeder til et domæne (kommasepareret). <b> kun mailcow-domæner</b>.</small>",
"alias_domain": "Alias-domæne", "alias_domain": "Alias-domæne",
"alias_domain_info": "<small>Kun gyldige domænenavne (kommasepareret).</small>", "alias_domain_info": "<small>Kun gyldige domænenavne (kommasepareret).</small>",

View File

@ -20,6 +20,7 @@ if (isset($_SESSION['mailcow_cc_role']) && $_SESSION['mailcow_cc_role'] == 'doma
'tfa_data' => $tfa_data, 'tfa_data' => $tfa_data,
'fido2_data' => $fido2_data, 'fido2_data' => $fido2_data,
'lang_user' => json_encode($lang['user']), 'lang_user' => json_encode($lang['user']),
'lang_datatables' => json_encode($lang['datatables']),
]; ];
} }
elseif (isset($_SESSION['mailcow_cc_role']) && $_SESSION['mailcow_cc_role'] == 'user') { elseif (isset($_SESSION['mailcow_cc_role']) && $_SESSION['mailcow_cc_role'] == 'user') {

View File

@ -389,7 +389,7 @@ services:
acme-mailcow: acme-mailcow:
depends_on: depends_on:
- nginx-mailcow - nginx-mailcow
image: mailcow/acme:1.83 image: mailcow/acme:1.84
dns: dns:
- ${IPV4_NETWORK:-172.22.1}.254 - ${IPV4_NETWORK:-172.22.1}.254
environment: environment:
@ -510,7 +510,7 @@ services:
- watchdog - watchdog
dockerapi-mailcow: dockerapi-mailcow:
image: mailcow/dockerapi:2.0 image: mailcow/dockerapi:2.01
security_opt: security_opt:
- label=disable - label=disable
restart: always restart: always

View File

@ -26,6 +26,6 @@ services:
- /var/run/mysqld/mysqld.sock:/var/run/mysqld/mysqld.sock - /var/run/mysqld/mysqld.sock:/var/run/mysqld/mysqld.sock
mysql-mailcow: mysql-mailcow:
image: alpine:3.10 image: alpine:3.17
command: /bin/true command: /bin/true
restart: "no" restart: "no"

View File

@ -1,6 +1,6 @@
#!/usr/bin/env bash #!/usr/bin/env bash
# renovate: datasource=github-releases depName=nextcloud/server versioning=semver extractVersion=^v(?<version>.*)$ # renovate: datasource=github-releases depName=nextcloud/server versioning=semver extractVersion=^v(?<version>.*)$
NEXTCLOUD_VERSION=25.0.2 NEXTCLOUD_VERSION=25.0.3
for bin in curl dirmngr; do for bin in curl dirmngr; do
if [[ -z $(which ${bin}) ]]; then echo "Cannot find ${bin}, exiting..."; exit 1; fi if [[ -z $(which ${bin}) ]]; then echo "Cannot find ${bin}, exiting..."; exit 1; fi
@ -46,22 +46,22 @@ if [[ ${NC_PURGE} == "y" ]]; then
echo -e "\033[33mDetecting Database information...\033[0m" echo -e "\033[33mDetecting Database information...\033[0m"
if [[ $(docker exec -it $(docker ps -f name=mysql-mailcow -q) mysql -uroot -p${DBROOT} -e "Show databases" | grep "nextcloud") ]]; then if [[ $(docker exec -it $(docker ps -f name=mysql-mailcow -q) mysql -uroot -p${DBROOT} -e "Show databases" | grep "nextcloud") ]]; then
echo -e "\033[32mFound seperate nextcloud Database (newer scheme)!\033[0m" echo -e "\033[32mFound seperate Nextcloud database (newer scheme)!\033[0m"
echo -e "\033[31mPurging...\033[0m" echo -e "\033[31mPurging...\033[0m"
docker exec -it $(docker ps -f name=mysql-mailcow -q) mysql -uroot -p${DBROOT} -e "DROP DATABASE nextcloud;" > /dev/null docker exec -it $(docker ps -f name=mysql-mailcow -q) mysql -uroot -p${DBROOT} -e "DROP DATABASE nextcloud;" > /dev/null
docker exec -it $(docker ps -f name=mysql-mailcow -q) mysql -uroot -p${DBROOT} -e "DROP USER 'nextcloud'@'%';" > /dev/null docker exec -it $(docker ps -f name=mysql-mailcow -q) mysql -uroot -p${DBROOT} -e "DROP USER 'nextcloud'@'%';" > /dev/null
elif [[ $(docker exec -it $(docker ps -f name=mysql-mailcow -q) mysql -uroot -p${DBROOT} mailcow -e "SHOW TABLES LIKE 'oc_%'") && $? -eq 0 ]]; then elif [[ $(docker exec -it $(docker ps -f name=mysql-mailcow -q) mysql -uroot -p${DBROOT} mailcow -e "SHOW TABLES LIKE 'oc_%'") && $? -eq 0 ]]; then
echo -e "\033[32mFound nextcloud (oc) tables inside of mailcow Database (old scheme)!\033[0m" echo -e "\033[32mFound Nextcloud (oc) tables inside of mailcow database (old scheme)!\033[0m"
echo -e "\033[31mPurging...\033[0m" echo -e "\033[31mPurging...\033[0m"
docker exec -it $(docker ps -f name=mysql-mailcow -q) mysql -uroot -p${DBROOT} -e \ docker exec -it $(docker ps -f name=mysql-mailcow -q) mysql -uroot -p${DBROOT} -e \
"$(docker exec -it $(docker ps -f name=mysql-mailcow -q) mysql -uroot -p${DBROOT} -e "SELECT IFNULL(GROUP_CONCAT('DROP TABLE ', TABLE_SCHEMA, '.', TABLE_NAME SEPARATOR ';'),'SELECT NULL;') FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME LIKE 'oc_%' AND TABLE_SCHEMA = '${DBNAME}';" -BN)" > /dev/null "$(docker exec -it $(docker ps -f name=mysql-mailcow -q) mysql -uroot -p${DBROOT} -e "SELECT IFNULL(GROUP_CONCAT('DROP TABLE ', TABLE_SCHEMA, '.', TABLE_NAME SEPARATOR ';'),'SELECT NULL;') FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME LIKE 'oc_%' AND TABLE_SCHEMA = '${DBNAME}';" -BN)" > /dev/null
elif [[ $(docker exec -it $(docker ps -f name=mysql-mailcow -q) mysql -uroot -p${DBROOT} mailcow -e "SHOW TABLES LIKE 'nc_%'") && $? -eq 0 ]]; then elif [[ $(docker exec -it $(docker ps -f name=mysql-mailcow -q) mysql -uroot -p${DBROOT} mailcow -e "SHOW TABLES LIKE 'nc_%'") && $? -eq 0 ]]; then
echo -e "\033[32mFound nextcloud (nc) tables inside of mailcow Database (old scheme)!\033[0m" echo -e "\033[32mFound Nextcloud (nc) tables inside of mailcow database (old scheme)!\033[0m"
echo -e "\033[31mPurging...\033[0m" echo -e "\033[31mPurging...\033[0m"
docker exec -it $(docker ps -f name=mysql-mailcow -q) mysql -uroot -p${DBROOT} -e \ docker exec -it $(docker ps -f name=mysql-mailcow -q) mysql -uroot -p${DBROOT} -e \
"$(docker exec -it $(docker ps -f name=mysql-mailcow -q) mysql -uroot -p${DBROOT} -e "SELECT IFNULL(GROUP_CONCAT('DROP TABLE ', TABLE_SCHEMA, '.', TABLE_NAME SEPARATOR ';'),'SELECT NULL;') FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME LIKE 'nc_%' AND TABLE_SCHEMA = '${DBNAME}';" -BN)" > /dev/null "$(docker exec -it $(docker ps -f name=mysql-mailcow -q) mysql -uroot -p${DBROOT} -e "SELECT IFNULL(GROUP_CONCAT('DROP TABLE ', TABLE_SCHEMA, '.', TABLE_NAME SEPARATOR ';'),'SELECT NULL;') FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME LIKE 'nc_%' AND TABLE_SCHEMA = '${DBNAME}';" -BN)" > /dev/null
else else
echo -e "\033[31mError: No Nextcloud Databases/Tables found!" echo -e "\033[31mError: No Nextcloud databases/tables found!"
echo -e "\033[33mNot purging anything...\033[0m" echo -e "\033[33mNot purging anything...\033[0m"
exit 1 exit 1
fi fi
@ -80,10 +80,10 @@ EOF
docker restart $(docker ps -aqf name=nginx-mailcow) docker restart $(docker ps -aqf name=nginx-mailcow)
echo -e "\033[32mNextcloud has been sucessfully uninstalled!\033[0m" echo -e "\033[32mNextcloud has been uninstalled sucessfully!\033[0m"
elif [[ ${NC_UPDATE} == "y" ]]; then elif [[ ${NC_UPDATE} == "y" ]]; then
read -r -p "Are you sure you want to update Nextcloud (with nextclouds own updater)? [y/N] " response read -r -p "Are you sure you want to update Nextcloud (with Nextclouds own updater)? [y/N] " response
response=${response,,} response=${response,,}
if [[ ! "$response" =~ ^(yes|y)$ ]]; then if [[ ! "$response" =~ ^(yes|y)$ ]]; then
echo "OK, aborting." echo "OK, aborting."
@ -118,18 +118,18 @@ elif [[ ${NC_INSTALL} == "y" ]]; then
&& mkdir -p ./data/web/nextcloud/data \ && mkdir -p ./data/web/nextcloud/data \
&& chmod +x ./data/web/nextcloud/occ && chmod +x ./data/web/nextcloud/occ
echo -e "\033[33mCreating Nextcloud Database...\033[0m" echo -e "\033[33mCreating 'nextcloud' database...\033[0m"
NC_DBPASS=$(</dev/urandom tr -dc A-Za-z0-9 | head -c 28) NC_DBPASS=$(</dev/urandom tr -dc A-Za-z0-9 | head -c 28)
NC_DBUSER=nextcloud NC_DBUSER=nextcloud
NC_DBNAME=nextcloud NC_DBNAME=nextcloud
echo -ne "[1/3] Creating nextcloud Database" echo -ne "[1/3] Creating 'nextcloud' database"
docker exec -it $(docker ps -f name=mysql-mailcow -q) mysql -uroot -p${DBROOT} -e "CREATE DATABASE ${NC_DBNAME};" docker exec -it $(docker ps -f name=mysql-mailcow -q) mysql -uroot -p${DBROOT} -e "CREATE DATABASE ${NC_DBNAME};"
sleep 2 sleep 2
echo -ne "\r[2/3] Creating nextcloud Database user" echo -ne "\r[2/3] Creating 'nextcloud' database user"
docker exec -it $(docker ps -f name=mysql-mailcow -q) mysql -uroot -p${DBROOT} -e "CREATE USER '${NC_DBUSER}'@'%' IDENTIFIED BY '${NC_DBPASS}';" docker exec -it $(docker ps -f name=mysql-mailcow -q) mysql -uroot -p${DBROOT} -e "CREATE USER '${NC_DBUSER}'@'%' IDENTIFIED BY '${NC_DBPASS}';"
sleep 2 sleep 2
echo -ne "\r[3/3] Granting nextcloud user all permissions on database nextcloud" echo -ne "\r[3/3] Granting 'nextcloud' user all permissions on database 'nextcloud'"
docker exec -it $(docker ps -f name=mysql-mailcow -q) mysql -uroot -p${DBROOT} -e "GRANT ALL PRIVILEGES ON ${NC_DBNAME}.* TO '${NC_DBUSER}'@'%';" docker exec -it $(docker ps -f name=mysql-mailcow -q) mysql -uroot -p${DBROOT} -e "GRANT ALL PRIVILEGES ON ${NC_DBNAME}.* TO '${NC_DBUSER}'@'%';"
sleep 2 sleep 2
@ -140,7 +140,7 @@ elif [[ ${NC_INSTALL} == "y" ]]; then
echo -ne "[1/4] Setting correct permissions for www-data" echo -ne "[1/4] Setting correct permissions for www-data"
docker exec -it $(docker ps -f name=php-fpm-mailcow -q) /bin/bash -c "chown -R www-data:www-data /web/nextcloud" docker exec -it $(docker ps -f name=php-fpm-mailcow -q) /bin/bash -c "chown -R www-data:www-data /web/nextcloud"
sleep 2 sleep 2
echo -ne "\r[2/4] Running occ maintenance:install to install nextcloud" echo -ne "\r[2/4] Running occ maintenance:install to install Nextcloud"
docker exec -it -u www-data $(docker ps -f name=php-fpm-mailcow -q) /web/nextcloud/occ --no-warnings maintenance:install \ docker exec -it -u www-data $(docker ps -f name=php-fpm-mailcow -q) /web/nextcloud/occ --no-warnings maintenance:install \
--database mysql \ --database mysql \
--database-host mysql \ --database-host mysql \
@ -149,9 +149,9 @@ elif [[ ${NC_INSTALL} == "y" ]]; then
--database-pass ${NC_DBPASS} \ --database-pass ${NC_DBPASS} \
--admin-user admin \ --admin-user admin \
--admin-pass ${ADMIN_NC_PASS} \ --admin-pass ${ADMIN_NC_PASS} \
--data-dir /web/nextcloud/data 2>&1 /dev/null --data-dir /web/nextcloud/data > /dev/null 2>&1
echo -ne "\r[3/4] Setting custom parameters inside the nextcloud config file" echo -ne "\r[3/4] Setting custom parameters inside the Nextcloud config file"
echo "" echo ""
docker exec -it -u www-data $(docker ps -f name=php-fpm-mailcow -q) bash -c "/web/nextcloud/occ --no-warnings config:system:set redis host --value=redis --type=string; \ docker exec -it -u www-data $(docker ps -f name=php-fpm-mailcow -q) bash -c "/web/nextcloud/occ --no-warnings config:system:set redis host --value=redis --type=string; \
/web/nextcloud/occ --no-warnings config:system:set redis port --value=6379 --type=integer; \ /web/nextcloud/occ --no-warnings config:system:set redis port --value=6379 --type=integer; \
@ -178,7 +178,7 @@ elif [[ ${NC_INSTALL} == "y" ]]; then
#/web/nextcloud/occ --no-warnings config:system:set user_backends 0 arguments 0 --value={dovecot:143/imap/tls/novalidate-cert}; \ #/web/nextcloud/occ --no-warnings config:system:set user_backends 0 arguments 0 --value={dovecot:143/imap/tls/novalidate-cert}; \
#/web/nextcloud/occ --no-warnings config:system:set user_backends 0 class --value=OC_User_IMAP; \ #/web/nextcloud/occ --no-warnings config:system:set user_backends 0 class --value=OC_User_IMAP; \
echo -e "\r[4/4] Enabling NGINX Configuration" echo -e "\r[4/4] Enabling Nginx Configuration"
cp ./data/assets/nextcloud/nextcloud.conf ./data/conf/nginx/ cp ./data/assets/nextcloud/nextcloud.conf ./data/conf/nginx/
sed -i "s/NC_SUBD/${NC_SUBD}/g" ./data/conf/nginx/nextcloud.conf sed -i "s/NC_SUBD/${NC_SUBD}/g" ./data/conf/nginx/nextcloud.conf
sleep 2 sleep 2
@ -193,11 +193,11 @@ elif [[ ${NC_INSTALL} == "y" ]]; then
echo "* INSTALL DATE: $(date +%Y-%m-%d_%H-%M-%S) *" echo "* INSTALL DATE: $(date +%Y-%m-%d_%H-%M-%S) *"
echo "******************************************" echo "******************************************"
echo "" echo ""
echo -e "\033[36mDatabase Name: ${NC_DBNAME}\033[0m" echo -e "\033[36mDatabase name: ${NC_DBNAME}\033[0m"
echo -e "\033[36mDatabase User: ${NC_DBUSER}\033[0m" echo -e "\033[36mDatabase user: ${NC_DBUSER}\033[0m"
echo -e "\033[36mDatabase Password: ${NC_DBPASS}\033[0m" echo -e "\033[36mDatabase password: ${NC_DBPASS}\033[0m"
echo "" echo ""
echo -e "\033[31mUI Admin Password: ${ADMIN_NC_PASS}\033[0m" echo -e "\033[31mUI admin password: ${ADMIN_NC_PASS}\033[0m"
echo "" echo ""