diff --git a/CHANGELOG.md b/CHANGELOG.md index f27aff7c..45943e95 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,42 @@ CHANGELOG ========= +Version 71 (January 4, 2025) +---------------------------- + +(Version 71a was posted on January 6, 2025 and fixes a setup regression.) + +Upgrades + +* Roundcube upgraded to version 1.6.9. +* Z-Push upgraded to version 2.7.5. + +Automated Maintenance + +* Daily automated tasks are now run at 1am in the box's timezone and full backups are now restricted to running only on Saturdays and Sundays at that time. +* Backups now exclude the owncloud-backup folder so that we're not backing up backups. +* Old TLS certificates are now automatically deleted to improve control panel performance. + +Setup + +* Fixed broken setup if SSH was configured to listen on multiple ports. +* Ubuntu MOTD advertisements are now disabled. +* Fixed missing Roundcube dependency package if NextCloud isn't installed. + +Control Panel + +* Improved status checks for secondary nameservers. +* Spamhaus is now queried for the box's IPv6 address also. +* DSA and EC private keys are now accepted for TLS certificates. +* Timeouts for loading slow control panel pages are reduced. + +And other minor fixes. + +Version 70 (August 15, 2024) +---------------------------- + +* Roundcube is updated to version 1.6.8 fixing security vulnerabilities. + Version 69 (July 20, 2024) -------------------------- @@ -68,7 +104,7 @@ Version 64 (September 2, 2023) * Fixed backups to work with the latest duplicity package which was not backwards compatible. * Fixed setting B2 as a backup target with a slash in the application key. * Turned off OpenDMARC diagnostic reports sent in response to incoming mail. -* Fixed some crashes when using an unrelased version of Mail-in-a-Box. +* Fixed some crashes when using an unreleased version of Mail-in-a-Box. * Added z-push administration scripts. Version 63 (July 27, 2023) @@ -1124,7 +1160,7 @@ Control panel: System: * The munin system monitoring tool is now installed and accessible at /admin/munin. -* ownCloud updated to version 8.0.4. The ownCloud installation step now is reslient to download problems. The ownCloud configuration file is now stored in STORAGE_ROOT to fix loss of data when moving STORAGE_ROOT to a new machine. +* ownCloud updated to version 8.0.4. The ownCloud installation step now is resilient to download problems. The ownCloud configuration file is now stored in STORAGE_ROOT to fix loss of data when moving STORAGE_ROOT to a new machine. * The setup scripts now run `apt-get update` prior to installing anything to ensure the apt database is in sync with the packages actually available. @@ -1162,7 +1198,7 @@ DNS: * Internationalized Domain Names (IDNs) should now work in email. If you had custom DNS or custom web settings for internationalized domains, check that they are still working. * It is now possible to set multiple TXT and other types of records on the same domain in the control panel. * The custom DNS API was completely rewritten to support setting multiple records of the same type on a domain. Any existing client code using the DNS API will have to be rewritten. (Existing code will just get 404s back.) -* On some systems the `nsd` service failed to start if network inferfaces were not ready. +* On some systems the `nsd` service failed to start if network interfaces were not ready. System / Control Panel: diff --git a/conf/nginx-primaryonly.conf b/conf/nginx-primaryonly.conf index b361a7b2..36f62aab 100644 --- a/conf/nginx-primaryonly.conf +++ b/conf/nginx-primaryonly.conf @@ -8,7 +8,6 @@ rewrite ^/admin/munin$ /admin/munin/ redirect; location /admin/ { proxy_pass http://127.0.0.1:10222/; - proxy_read_timeout 600s; proxy_set_header X-Forwarded-For $remote_addr; add_header X-Frame-Options "DENY"; add_header X-Content-Type-Options nosniff; diff --git a/management/backup.py b/management/backup.py index 990813cb..ea37b1a6 100755 --- a/management/backup.py +++ b/management/backup.py @@ -9,6 +9,7 @@ import os, os.path, re, datetime, sys import dateutil.parser, dateutil.relativedelta, dateutil.tz +from datetime import date import rtyaml from exclusiveprocess import Lock @@ -157,6 +158,8 @@ def should_force_full(config, env): # since the last full backup is greater than half the size # of that full backup. inc_size = 0 + # Check if day of week is a weekend day + weekend = date.today().weekday()>=5 for bak in backup_status(env)["backups"]: if not bak["full"]: # Scan through the incremental backups cumulating @@ -165,12 +168,14 @@ def should_force_full(config, env): else: # ...until we reach the most recent full backup. # Return if we should to a full backup, which is based - # on the size of the increments relative to the full - # backup, as well as the age of the full backup. - if inc_size > .5*bak["size"]: - return True - if dateutil.parser.parse(bak["date"]) + datetime.timedelta(days=config["min_age_in_days"]*10+1) < datetime.datetime.now(dateutil.tz.tzlocal()): - return True + # on whether it is a weekend day, the size of the + # increments relative to the full backup, as well as + # the age of the full backup. + if weekend: + if inc_size > .5*bak["size"]: + return True + if dateutil.parser.parse(bak["date"]) + datetime.timedelta(days=config["min_age_in_days"]*10+1) < datetime.datetime.now(dateutil.tz.tzlocal()): + return True return False else: # If we got here there are no (full) backups, so make one. @@ -321,6 +326,7 @@ def perform_backup(full_backup): "--verbosity", "warning", "--no-print-statistics", "--archive-dir", backup_cache_dir, "--exclude", backup_root, + "--exclude", os.path.join(env["STORAGE_ROOT"], "owncloud-backup"), "--volsize", "250", "--gpg-options", "'--cipher-algo=AES256'", "--allow-source-mismatch", @@ -400,6 +406,7 @@ def run_duplicity_verification(): "--compare-data", "--archive-dir", backup_cache_dir, "--exclude", backup_root, + "--exclude", os.path.join(env["STORAGE_ROOT"], "owncloud-backup"), *get_duplicity_additional_args(env), get_duplicity_target_url(config), env["STORAGE_ROOT"], diff --git a/management/ssl_certificates.py b/management/ssl_certificates.py index c9f1126c..8c1b841e 100755 --- a/management/ssl_certificates.py +++ b/management/ssl_certificates.py @@ -14,7 +14,7 @@ def get_ssl_certificates(env): # that the certificates are good for to the best certificate for # the domain. - from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey + from cryptography.hazmat.primitives.asymmetric import dsa, rsa, ec from cryptography.x509 import Certificate # The certificates are all stored here: @@ -59,13 +59,15 @@ def get_ssl_certificates(env): # Not a valid PEM format for a PEM type we care about. continue - # Is it a private key? - if isinstance(pem, RSAPrivateKey): - private_keys[pem.public_key().public_numbers()] = { "filename": fn, "key": pem } - # Is it a certificate? if isinstance(pem, Certificate): certificates.append({ "filename": fn, "cert": pem }) + # It is a private key + elif (isinstance(pem, rsa.RSAPrivateKey) + or isinstance(pem, dsa.DSAPrivateKey) + or isinstance(pem, ec.EllipticCurvePrivateKey)): + private_keys[pem.public_key().public_numbers()] = { "filename": fn, "key": pem } + # Process the certificates. domains = { } @@ -505,7 +507,7 @@ def check_certificate(domain, ssl_certificate, ssl_private_key, warn_if_expiring # Check that the ssl_certificate & ssl_private_key files are good # for the provided domain. - from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey + from cryptography.hazmat.primitives.asymmetric import rsa, dsa, ec from cryptography.x509 import Certificate # The ssl_certificate file may contain a chain of certificates. We'll @@ -539,7 +541,9 @@ def check_certificate(domain, ssl_certificate, ssl_private_key, warn_if_expiring except ValueError as e: return (f"The private key file {ssl_private_key} is not a private key file: {e!s}", None) - if not isinstance(priv_key, RSAPrivateKey): + if (not isinstance(priv_key, rsa.RSAPrivateKey) + and not isinstance(priv_key, dsa.DSAPrivateKey) + and not isinstance(priv_key, ec.EllipticCurvePrivateKey)): return ("The private key file %s is not a private key file." % ssl_private_key, None) if priv_key.public_key().public_numbers() != cert.public_key().public_numbers(): @@ -639,7 +643,7 @@ def load_pem(pem): msg = "File is not a valid PEM-formatted file." raise ValueError(msg) pem_type = pem_type.group(1) - if pem_type in {b"RSA PRIVATE KEY", b"PRIVATE KEY"}: + if pem_type.endswith(b"PRIVATE KEY"): return serialization.load_pem_private_key(pem, password=None, backend=default_backend()) if pem_type == b"CERTIFICATE": return load_pem_x509_certificate(pem, default_backend()) diff --git a/management/status_checks.py b/management/status_checks.py index 51f8e631..68755cb7 100755 --- a/management/status_checks.py +++ b/management/status_checks.py @@ -282,26 +282,45 @@ def run_network_checks(env, output): # The user might have ended up on an IP address that was previously in use # by a spammer, or the user may be deploying on a residential network. We # will not be able to reliably send mail in these cases. - - # See https://www.spamhaus.org/news/article/807/using-our-public-mirrors-check-your-return-codes-now. for - # information on spamhaus return codes rev_ip4 = ".".join(reversed(env['PUBLIC_IP'].split('.'))) zen = query_dns(rev_ip4+'.zen.spamhaus.org', 'A', nxdomain=None) + evaluate_spamhaus_lookup(env['PUBLIC_IP'], 'IPv4', rev_ip4, output, zen) + + if not env['PUBLIC_IPV6']: + return + + from ipaddress import IPv6Address + + rev_ip6 = ".".join(reversed(IPv6Address(env['PUBLIC_IPV6']).exploded.split(':'))) + zen = query_dns(rev_ip6+'.zen.spamhaus.org', 'A', nxdomain=None) + evaluate_spamhaus_lookup(env['PUBLIC_IPV6'], 'IPv6', rev_ip6, output, zen) + + +def evaluate_spamhaus_lookup(lookupaddress, lookuptype, lookupdomain, output, zen): + # See https://www.spamhaus.org/news/article/807/using-our-public-mirrors-check-your-return-codes-now. for + # information on spamhaus return codes if zen is None: - output.print_ok("IP address is not blacklisted by zen.spamhaus.org.") + output.print_ok(f"{lookuptype} address is not blacklisted by zen.spamhaus.org.") elif zen == "[timeout]": - output.print_warning("Connection to zen.spamhaus.org timed out. Could not determine whether this box's IP address is blacklisted. Please try again later.") + output.print_warning(f"""Connection to zen.spamhaus.org timed out. Could not determine whether this box's + {lookuptype} address is blacklisted. Please try again later.""") elif zen == "[Not Set]": - output.print_warning("Could not connect to zen.spamhaus.org. Could not determine whether this box's IP address is blacklisted. Please try again later.") + output.print_warning(f"""Could not connect to zen.spamhaus.org. Could not determine whether this box's + {lookuptype} address is blacklisted. Please try again later.""") elif zen == "127.255.255.252": - output.print_warning("Incorrect spamhaus query: %s. Could not determine whether this box's IP address is blacklisted." % (rev_ip4+'.zen.spamhaus.org')) + output.print_warning(f"""Incorrect spamhaus query: {lookupdomain + '.zen.spamhaus.org'}. Could not determine whether + this box's {lookuptype} address is blacklisted.""") elif zen == "127.255.255.254": - output.print_warning("Mail-in-a-Box is configured to use a public DNS server. This is not supported by spamhaus. Could not determine whether this box's IP address is blacklisted.") + output.print_warning(f"""Mail-in-a-Box is configured to use a public DNS server. This is not supported by + spamhaus. Could not determine whether this box's {lookuptype} address is blacklisted.""") elif zen == "127.255.255.255": - output.print_warning("Too many queries have been performed on the spamhaus server. Could not determine whether this box's IP address is blacklisted.") + output.print_warning(f"""Too many queries have been performed on the spamhaus server. Could not determine + whether this box's {lookuptype} address is blacklisted.""") else: - output.print_error("""The IP address of this machine {} is listed in the Spamhaus Block List (code {}), - which may prevent recipients from receiving your email. See http://www.spamhaus.org/query/ip/{}.""".format(env['PUBLIC_IP'], zen, env['PUBLIC_IP'])) + output.print_error(f"""The {lookuptype} address of this machine {lookupaddress} is listed in the Spamhaus Block + List (code {zen}), which may prevent recipients from receiving your email. See + http://www.spamhaus.org/query/ip/{lookupaddress}.""") + def run_domain_checks(rounded_time, env, output, pool, domains_to_check=None): # Get the list of domains we handle mail for. @@ -521,6 +540,8 @@ def check_dns_zone(domain, env, output, dns_zonefiles): # Check that each custom secondary nameserver resolves the IP address. if custom_secondary_ns and not probably_external_dns: + SOARecord = query_dns(domain, "SOA", at=env['PUBLIC_IP'])# Explicitly ask the local dns server. + for ns in custom_secondary_ns: # We must first resolve the nameserver to an IP address so we can query it. ns_ips = query_dns(ns, "A") @@ -530,15 +551,36 @@ def check_dns_zone(domain, env, output, dns_zonefiles): # Choose the first IP if nameserver returns multiple ns_ip = ns_ips.split('; ')[0] + checkSOA = True + # Now query it to see what it says about this domain. ip = query_dns(domain, "A", at=ns_ip, nxdomain=None) if ip == correct_ip: - output.print_ok("Secondary nameserver %s resolved the domain correctly." % ns) + output.print_ok(f"Secondary nameserver {ns} resolved the domain correctly.") elif ip is None: - output.print_error("Secondary nameserver %s is not configured to resolve this domain." % ns) + output.print_error(f"Secondary nameserver {ns} is not configured to resolve this domain.") + # No need to check SOA record if not configured as nameserver + checkSOA = False + elif ip == '[timeout]': + output.print_error(f"Secondary nameserver {ns} did not resolve this domain, result: {ip}") + checkSOA = False else: output.print_error(f"Secondary nameserver {ns} is not configured correctly. (It resolved this domain as {ip}. It should be {correct_ip}.)") + if checkSOA: + # Check that secondary DNS server is synchronized with our primary DNS server. Simplified by checking the SOA record which has a version number + SOASecondary = query_dns(domain, "SOA", at=ns_ip) + + if SOARecord == SOASecondary: + output.print_ok(f"Secondary nameserver {ns} has consistent SOA record.") + elif SOARecord == '[Not Set]': + output.print_error(f"Secondary nameserver {ns} has no SOA record configured.") + elif SOARecord == '[timeout]': + output.print_error(f"Secondary nameserver {ns} timed out on checking SOA record.") + else: + output.print_error(f"""Secondary nameserver {ns} has inconsistent SOA record (primary: {SOARecord} versus secondary: {SOASecondary}). + Check that synchronization between secondary and primary DNS servers is properly set-up.""") + def check_dns_zone_suggestions(domain, env, output, dns_zonefiles, domains_with_a_records): # Warn if a custom DNS record is preventing this or the automatic www redirect from # being served. diff --git a/management/utils.py b/management/utils.py index a2f4bc67..73d834a0 100644 --- a/management/utils.py +++ b/management/utils.py @@ -193,6 +193,7 @@ def get_ssh_port(): def get_ssh_config_value(parameter_name): # Returns ssh configuration value for the provided parameter + import subprocess try: output = shell('check_output', ['sshd', '-T']) except FileNotFoundError: diff --git a/setup/bootstrap.sh b/setup/bootstrap.sh index 18c53559..fc191c21 100644 --- a/setup/bootstrap.sh +++ b/setup/bootstrap.sh @@ -23,7 +23,7 @@ if [ -z "$TAG" ]; then if [ "$UBUNTU_VERSION" == "Ubuntu 22.04 LTS" ]; then # This machine is running Ubuntu 22.04, which is supported by # Mail-in-a-Box versions 60 and later. - TAG=v69b + TAG=v71a elif [ "$UBUNTU_VERSION" == "Ubuntu 18.04 LTS" ]; then # This machine is running Ubuntu 18.04, which is supported by # Mail-in-a-Box versions 0.40 through 5x. diff --git a/setup/management.sh b/setup/management.sh index fb359cd3..d8032312 100755 --- a/setup/management.sh +++ b/setup/management.sh @@ -116,7 +116,7 @@ minute=$((RANDOM % 60)) # avoid overloading mailinabox.email cat > /etc/cron.d/mailinabox-nightly << EOF; # Mail-in-a-Box --- Do not edit / will be overwritten on update. # Run nightly tasks: backup, status checks. -$minute 3 * * * root (cd $PWD && management/daily_tasks.sh) +$minute 1 * * * root (cd $PWD && management/daily_tasks.sh) EOF # Start the management server. diff --git a/setup/ssl.sh b/setup/ssl.sh index 19a0c048..0aa9b136 100755 --- a/setup/ssl.sh +++ b/setup/ssl.sh @@ -96,3 +96,12 @@ fi if [ ! -f "$STORAGE_ROOT/ssl/dh2048.pem" ]; then openssl dhparam -out "$STORAGE_ROOT/ssl/dh2048.pem" 2048 fi + +# Cleanup expired SSL certificates from $STORAGE_ROOT/ssl daily +cat > /etc/cron.daily/mailinabox-ssl-cleanup << EOF; +#!/bin/bash +# Mail-in-a-Box +# Cleanup expired SSL certificates +$(pwd)/tools/ssl_cleanup +EOF +chmod +x /etc/cron.daily/mailinabox-ssl-cleanup diff --git a/setup/system.sh b/setup/system.sh index 96bb9a87..8c38c5be 100755 --- a/setup/system.sh +++ b/setup/system.sh @@ -83,6 +83,15 @@ fi # (See https://discourse.mailinabox.email/t/journalctl-reclaim-space-on-small-mailinabox/6728/11.) tools/editconf.py /etc/systemd/journald.conf MaxRetentionSec=10day +# ### Improve server privacy + +# Disable MOTD adverts to prevent revealing server information in MOTD request headers +# See https://ma.ttias.be/what-exactly-being-sent-ubuntu-motd/ +if [ -f /etc/default/motd-news ]; then +tools/editconf.py /etc/default/motd-news ENABLED=0 +rm -f /var/cache/motd-news +fi + # ### Add PPAs. # We install some non-standard Ubuntu packages maintained by other @@ -266,14 +275,14 @@ if [ -z "${DISABLE_FIREWALL:-}" ]; then # ssh might be running on an alternate port. Use sshd -T to dump sshd's #NODOC # settings, find the port it is supposedly running on, and open that port #NODOC # too. #NODOC - SSH_PORT=$(sshd -T 2>/dev/null | grep "^port " | sed "s/port //") #NODOC + SSH_PORT=$(sshd -T 2>/dev/null | grep "^port " | sed "s/port //" | tr '\n' ' ') #NODOC if [ -n "$SSH_PORT" ]; then - if [ "$SSH_PORT" != "22" ]; then - - echo "Opening alternate SSH port $SSH_PORT." #NODOC - ufw_limit "$SSH_PORT" #NODOC - - fi + for port in $SSH_PORT; do + if [ "$port" != "22" ]; then + echo "Opening alternate SSH port $port." #NODOC + ufw_limit "$port" #NODOC + fi + done fi ufw --force enable; diff --git a/setup/webmail.sh b/setup/webmail.sh index 5f6822f5..3d5ebc0b 100644 --- a/setup/webmail.sh +++ b/setup/webmail.sh @@ -25,7 +25,7 @@ echo "Installing Roundcube (webmail)..." apt_install \ dbconfig-common \ php"${PHP_VER}"-cli php"${PHP_VER}"-sqlite3 php"${PHP_VER}"-intl php"${PHP_VER}"-common php"${PHP_VER}"-curl php"${PHP_VER}"-imap \ - php"${PHP_VER}"-gd php"${PHP_VER}"-pspell php"${PHP_VER}"-mbstring libjs-jquery libjs-jquery-mousewheel libmagic1 \ + php"${PHP_VER}"-gd php"${PHP_VER}"-pspell php"${PHP_VER}"-mbstring php"${PHP_VER}"-xml libjs-jquery libjs-jquery-mousewheel libmagic1 \ sqlite3 # Install Roundcube from source if it is not already present or if it is out of date. @@ -38,8 +38,8 @@ apt_install \ # https://github.com/mstilkerich/rcmcarddav/releases # The easiest way to get the package hashes is to run this script and get the hash from # the error message. -VERSION=1.6.6 -HASH=7705d2736890c49e7ae3ac75e3ae00ba56187056 +VERSION=1.6.9 +HASH=b63f74209cf287402f6f44b85877388899261f3c PERSISTENT_LOGIN_VERSION=bde7b6840c7d91de627ea14e81cf4133cbb3c07a # version 5.3 HTML5_NOTIFIER_VERSION=68d9ca194212e15b3c7225eb6085dbcf02fd13d7 # version 0.6.4+ CARDDAV_VERSION=4.4.3 diff --git a/setup/zpush.sh b/setup/zpush.sh index 01041ae4..8cfa86d5 100755 --- a/setup/zpush.sh +++ b/setup/zpush.sh @@ -24,8 +24,8 @@ apt_install \ phpenmod -v "$PHP_VER" imap # Copy Z-Push into place. -VERSION=2.7.3 -TARGETHASH=9d4bec41935e9a4e07880c5ff915bcddbda4443b +VERSION=2.7.5 +TARGETHASH=f0b0b06e255f3496173ab9d28a4f2d985184720e needs_update=0 #NODOC if [ ! -f /usr/local/lib/z-push/version ]; then needs_update=1 #NODOC @@ -59,8 +59,6 @@ fi sed -i "s^define('TIMEZONE', .*^define('TIMEZONE', '$(cat /etc/timezone)');^" /usr/local/lib/z-push/config.php sed -i "s/define('BACKEND_PROVIDER', .*/define('BACKEND_PROVIDER', 'BackendCombined');/" /usr/local/lib/z-push/config.php sed -i "s/define('USE_FULLEMAIL_FOR_LOGIN', .*/define('USE_FULLEMAIL_FOR_LOGIN', true);/" /usr/local/lib/z-push/config.php -sed -i "s/define('LOG_MEMORY_PROFILER', .*/define('LOG_MEMORY_PROFILER', false);/" /usr/local/lib/z-push/config.php -sed -i "s/define('BUG68532FIXED', .*/define('BUG68532FIXED', false);/" /usr/local/lib/z-push/config.php sed -i "s/define('LOGLEVEL', .*/define('LOGLEVEL', LOGLEVEL_ERROR);/" /usr/local/lib/z-push/config.php # Configure BACKEND @@ -114,4 +112,6 @@ restart_service php"$PHP_VER"-fpm # Fix states after upgrade -hide_output php"$PHP_VER" /usr/local/lib/z-push/z-push-admin.php -a fixstates +if [ $needs_update == 1 ]; then + hide_output php"$PHP_VER" /usr/local/lib/z-push/z-push-admin.php -a fixstates +fi diff --git a/tools/ssl_cleanup b/tools/ssl_cleanup new file mode 100755 index 00000000..5adfa1be --- /dev/null +++ b/tools/ssl_cleanup @@ -0,0 +1,17 @@ +#!/bin/bash +# Cleanup SSL certificates which expired more than 7 days ago from $STORAGE_ROOT/ssl and move them to $STORAGE_ROOT/ssl.expired + +source /etc/mailinabox.conf +shopt -s extglob + +retain_after="$(date --date="7 days ago" +%Y%m%d)" + +mkdir -p $STORAGE_ROOT/ssl.expired +for file in $STORAGE_ROOT/ssl/*-+([0-9])-+([0-9a-f]).pem; do + pem="$(basename "$file")" + not_valid_after="$(cut -d- -f1 <<< "${pem: -21}")" + + if [ "$not_valid_after" -lt "$retain_after" ]; then + mv "$file" "$STORAGE_ROOT/ssl.expired/${pem}" + fi +done