mirror of
https://github.com/mail-in-a-box/mailinabox.git
synced 2025-04-03 00:07:05 +00:00
Merge remote-tracking branch 'upstream/main' into merge-upstream
# Conflicts: # management/status_checks.py # setup/webmail.sh
This commit is contained in:
commit
0aa7050221
@ -73,7 +73,7 @@ Version 64 (September 2, 2023)
|
|||||||
* Fixed backups to work with the latest duplicity package which was not backwards compatible.
|
* Fixed backups to work with the latest duplicity package which was not backwards compatible.
|
||||||
* Fixed setting B2 as a backup target with a slash in the application key.
|
* Fixed setting B2 as a backup target with a slash in the application key.
|
||||||
* Turned off OpenDMARC diagnostic reports sent in response to incoming mail.
|
* Turned off OpenDMARC diagnostic reports sent in response to incoming mail.
|
||||||
* Fixed some crashes when using an unrelased version of Mail-in-a-Box.
|
* Fixed some crashes when using an unreleased version of Mail-in-a-Box.
|
||||||
* Added z-push administration scripts.
|
* Added z-push administration scripts.
|
||||||
|
|
||||||
Version 63 (July 27, 2023)
|
Version 63 (July 27, 2023)
|
||||||
@ -1129,7 +1129,7 @@ Control panel:
|
|||||||
|
|
||||||
System:
|
System:
|
||||||
* The munin system monitoring tool is now installed and accessible at /admin/munin.
|
* The munin system monitoring tool is now installed and accessible at /admin/munin.
|
||||||
* ownCloud updated to version 8.0.4. The ownCloud installation step now is reslient to download problems. The ownCloud configuration file is now stored in STORAGE_ROOT to fix loss of data when moving STORAGE_ROOT to a new machine.
|
* ownCloud updated to version 8.0.4. The ownCloud installation step now is resilient to download problems. The ownCloud configuration file is now stored in STORAGE_ROOT to fix loss of data when moving STORAGE_ROOT to a new machine.
|
||||||
* The setup scripts now run `apt-get update` prior to installing anything to ensure the apt database is in sync with the packages actually available.
|
* The setup scripts now run `apt-get update` prior to installing anything to ensure the apt database is in sync with the packages actually available.
|
||||||
|
|
||||||
|
|
||||||
@ -1167,7 +1167,7 @@ DNS:
|
|||||||
* Internationalized Domain Names (IDNs) should now work in email. If you had custom DNS or custom web settings for internationalized domains, check that they are still working.
|
* Internationalized Domain Names (IDNs) should now work in email. If you had custom DNS or custom web settings for internationalized domains, check that they are still working.
|
||||||
* It is now possible to set multiple TXT and other types of records on the same domain in the control panel.
|
* It is now possible to set multiple TXT and other types of records on the same domain in the control panel.
|
||||||
* The custom DNS API was completely rewritten to support setting multiple records of the same type on a domain. Any existing client code using the DNS API will have to be rewritten. (Existing code will just get 404s back.)
|
* The custom DNS API was completely rewritten to support setting multiple records of the same type on a domain. Any existing client code using the DNS API will have to be rewritten. (Existing code will just get 404s back.)
|
||||||
* On some systems the `nsd` service failed to start if network inferfaces were not ready.
|
* On some systems the `nsd` service failed to start if network interfaces were not ready.
|
||||||
|
|
||||||
System / Control Panel:
|
System / Control Panel:
|
||||||
|
|
||||||
|
@ -8,7 +8,6 @@
|
|||||||
rewrite ^/admin/munin$ /admin/munin/ redirect;
|
rewrite ^/admin/munin$ /admin/munin/ redirect;
|
||||||
location /admin/ {
|
location /admin/ {
|
||||||
proxy_pass http://127.0.0.1:10222/;
|
proxy_pass http://127.0.0.1:10222/;
|
||||||
proxy_read_timeout 600s;
|
|
||||||
proxy_set_header X-Forwarded-For $remote_addr;
|
proxy_set_header X-Forwarded-For $remote_addr;
|
||||||
add_header X-Frame-Options "DENY";
|
add_header X-Frame-Options "DENY";
|
||||||
add_header X-Content-Type-Options nosniff;
|
add_header X-Content-Type-Options nosniff;
|
||||||
|
@ -19,6 +19,7 @@
|
|||||||
|
|
||||||
import os, os.path, re, datetime, sys
|
import os, os.path, re, datetime, sys
|
||||||
import dateutil.parser, dateutil.relativedelta, dateutil.tz
|
import dateutil.parser, dateutil.relativedelta, dateutil.tz
|
||||||
|
from datetime import date
|
||||||
import rtyaml
|
import rtyaml
|
||||||
from exclusiveprocess import Lock
|
from exclusiveprocess import Lock
|
||||||
|
|
||||||
@ -167,6 +168,8 @@ def should_force_full(config, env):
|
|||||||
# since the last full backup is greater than half the size
|
# since the last full backup is greater than half the size
|
||||||
# of that full backup.
|
# of that full backup.
|
||||||
inc_size = 0
|
inc_size = 0
|
||||||
|
# Check if day of week is a weekend day
|
||||||
|
weekend = date.today().weekday()>=5
|
||||||
for bak in backup_status(env)["backups"]:
|
for bak in backup_status(env)["backups"]:
|
||||||
if not bak["full"]:
|
if not bak["full"]:
|
||||||
# Scan through the incremental backups cumulating
|
# Scan through the incremental backups cumulating
|
||||||
@ -175,12 +178,14 @@ def should_force_full(config, env):
|
|||||||
else:
|
else:
|
||||||
# ...until we reach the most recent full backup.
|
# ...until we reach the most recent full backup.
|
||||||
# Return if we should to a full backup, which is based
|
# Return if we should to a full backup, which is based
|
||||||
# on the size of the increments relative to the full
|
# on whether it is a weekend day, the size of the
|
||||||
# backup, as well as the age of the full backup.
|
# increments relative to the full backup, as well as
|
||||||
if inc_size > .5*bak["size"]:
|
# the age of the full backup.
|
||||||
return True
|
if weekend:
|
||||||
if dateutil.parser.parse(bak["date"]) + datetime.timedelta(days=config["min_age_in_days"]*10+1) < datetime.datetime.now(dateutil.tz.tzlocal()):
|
if inc_size > .5*bak["size"]:
|
||||||
return True
|
return True
|
||||||
|
if dateutil.parser.parse(bak["date"]) + datetime.timedelta(days=config["min_age_in_days"]*10+1) < datetime.datetime.now(dateutil.tz.tzlocal()):
|
||||||
|
return True
|
||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
# If we got here there are no (full) backups, so make one.
|
# If we got here there are no (full) backups, so make one.
|
||||||
@ -348,6 +353,7 @@ def perform_backup(full_backup):
|
|||||||
"--verbosity", "warning", "--no-print-statistics",
|
"--verbosity", "warning", "--no-print-statistics",
|
||||||
"--archive-dir", backup_cache_dir,
|
"--archive-dir", backup_cache_dir,
|
||||||
"--exclude", backup_root,
|
"--exclude", backup_root,
|
||||||
|
"--exclude", os.path.join(env["STORAGE_ROOT"], "owncloud-backup"),
|
||||||
"--volsize", "250",
|
"--volsize", "250",
|
||||||
"--gpg-options", "'--cipher-algo=AES256'",
|
"--gpg-options", "'--cipher-algo=AES256'",
|
||||||
"--allow-source-mismatch",
|
"--allow-source-mismatch",
|
||||||
@ -429,6 +435,7 @@ def run_duplicity_verification():
|
|||||||
"--compare-data",
|
"--compare-data",
|
||||||
"--archive-dir", backup_cache_dir,
|
"--archive-dir", backup_cache_dir,
|
||||||
"--exclude", backup_root,
|
"--exclude", backup_root,
|
||||||
|
"--exclude", os.path.join(env["STORAGE_ROOT"], "owncloud-backup"),
|
||||||
*get_duplicity_additional_args(env),
|
*get_duplicity_additional_args(env),
|
||||||
get_duplicity_target_url(config),
|
get_duplicity_target_url(config),
|
||||||
env["STORAGE_ROOT"],
|
env["STORAGE_ROOT"],
|
||||||
|
@ -23,7 +23,7 @@ def get_ssl_certificates(env):
|
|||||||
# that the certificates are good for to the best certificate for
|
# that the certificates are good for to the best certificate for
|
||||||
# the domain.
|
# the domain.
|
||||||
|
|
||||||
from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey
|
from cryptography.hazmat.primitives.asymmetric import dsa, rsa, ec
|
||||||
from cryptography.x509 import Certificate
|
from cryptography.x509 import Certificate
|
||||||
|
|
||||||
# The certificates are all stored here:
|
# The certificates are all stored here:
|
||||||
@ -44,6 +44,12 @@ def get_ssl_certificates(env):
|
|||||||
# the cert that it should be a
|
# the cert that it should be a
|
||||||
# symlink to.
|
# symlink to.
|
||||||
continue
|
continue
|
||||||
|
if fn in ['ca','ca_certificate.pem','ca_private_key.pem']:
|
||||||
|
# Ignore as these are for generating a temporary
|
||||||
|
# "self-signed" certificate in a virgin setup (before
|
||||||
|
# Let's Encrypt gives us a certificate).
|
||||||
|
#
|
||||||
|
continue
|
||||||
fn = os.path.join(ssl_root, fn)
|
fn = os.path.join(ssl_root, fn)
|
||||||
if os.path.isfile(fn):
|
if os.path.isfile(fn):
|
||||||
yield fn
|
yield fn
|
||||||
@ -68,13 +74,15 @@ def get_ssl_certificates(env):
|
|||||||
# Not a valid PEM format for a PEM type we care about.
|
# Not a valid PEM format for a PEM type we care about.
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Is it a private key?
|
|
||||||
if isinstance(pem, RSAPrivateKey):
|
|
||||||
private_keys[pem.public_key().public_numbers()] = { "filename": fn, "key": pem }
|
|
||||||
|
|
||||||
# Is it a certificate?
|
# Is it a certificate?
|
||||||
if isinstance(pem, Certificate):
|
if isinstance(pem, Certificate):
|
||||||
certificates.append({ "filename": fn, "cert": pem })
|
certificates.append({ "filename": fn, "cert": pem })
|
||||||
|
# It is a private key
|
||||||
|
elif (isinstance(pem, rsa.RSAPrivateKey)
|
||||||
|
or isinstance(pem, dsa.DSAPrivateKey)
|
||||||
|
or isinstance(pem, ec.EllipticCurvePrivateKey)):
|
||||||
|
private_keys[pem.public_key().public_numbers()] = { "filename": fn, "key": pem }
|
||||||
|
|
||||||
|
|
||||||
# Process the certificates.
|
# Process the certificates.
|
||||||
domains = { }
|
domains = { }
|
||||||
@ -518,7 +526,7 @@ def check_certificate(domain, ssl_certificate, ssl_private_key, warn_if_expiring
|
|||||||
# Check that the ssl_certificate & ssl_private_key files are good
|
# Check that the ssl_certificate & ssl_private_key files are good
|
||||||
# for the provided domain.
|
# for the provided domain.
|
||||||
|
|
||||||
from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey
|
from cryptography.hazmat.primitives.asymmetric import rsa, dsa, ec
|
||||||
from cryptography.x509 import Certificate
|
from cryptography.x509 import Certificate
|
||||||
|
|
||||||
# The ssl_certificate file may contain a chain of certificates. We'll
|
# The ssl_certificate file may contain a chain of certificates. We'll
|
||||||
@ -552,7 +560,9 @@ def check_certificate(domain, ssl_certificate, ssl_private_key, warn_if_expiring
|
|||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
return (f"The private key file {ssl_private_key} is not a private key file: {e!s}", None)
|
return (f"The private key file {ssl_private_key} is not a private key file: {e!s}", None)
|
||||||
|
|
||||||
if not isinstance(priv_key, RSAPrivateKey):
|
if (not isinstance(priv_key, rsa.RSAPrivateKey)
|
||||||
|
and not isinstance(priv_key, dsa.DSAPrivateKey)
|
||||||
|
and not isinstance(priv_key, ec.EllipticCurvePrivateKey)):
|
||||||
return ("The private key file %s is not a private key file." % ssl_private_key, None)
|
return ("The private key file %s is not a private key file." % ssl_private_key, None)
|
||||||
|
|
||||||
if priv_key.public_key().public_numbers() != cert.public_key().public_numbers():
|
if priv_key.public_key().public_numbers() != cert.public_key().public_numbers():
|
||||||
@ -655,7 +665,7 @@ def load_pem(pem):
|
|||||||
msg = "File is not a valid PEM-formatted file."
|
msg = "File is not a valid PEM-formatted file."
|
||||||
raise ValueError(msg)
|
raise ValueError(msg)
|
||||||
pem_type = pem_type.group(1)
|
pem_type = pem_type.group(1)
|
||||||
if pem_type in {b"RSA PRIVATE KEY", b"PRIVATE KEY"}:
|
if pem_type.endswith(b"PRIVATE KEY"):
|
||||||
return serialization.load_pem_private_key(pem, password=None, backend=default_backend())
|
return serialization.load_pem_private_key(pem, password=None, backend=default_backend())
|
||||||
if pem_type == b"CERTIFICATE":
|
if pem_type == b"CERTIFICATE":
|
||||||
return load_pem_x509_certificate(pem, default_backend())
|
return load_pem_x509_certificate(pem, default_backend())
|
||||||
|
@ -293,26 +293,45 @@ def run_network_checks(env, output):
|
|||||||
# The user might have ended up on an IP address that was previously in use
|
# The user might have ended up on an IP address that was previously in use
|
||||||
# by a spammer, or the user may be deploying on a residential network. We
|
# by a spammer, or the user may be deploying on a residential network. We
|
||||||
# will not be able to reliably send mail in these cases.
|
# will not be able to reliably send mail in these cases.
|
||||||
|
rev_ip4 = ".".join(reversed(env['PUBLIC_IP'].split('.')))
|
||||||
|
zen = query_dns(rev_ip4+'.zen.spamhaus.org', 'A', nxdomain=None, retry = False)
|
||||||
|
evaluate_spamhaus_lookup(env['PUBLIC_IP'], 'IPv4', rev_ip4, output, zen)
|
||||||
|
|
||||||
|
if not env['PUBLIC_IPV6']:
|
||||||
|
return
|
||||||
|
|
||||||
|
from ipaddress import IPv6Address
|
||||||
|
|
||||||
|
rev_ip6 = ".".join(reversed(IPv6Address(env['PUBLIC_IPV6']).exploded.split(':')))
|
||||||
|
zen = query_dns(rev_ip6+'.zen.spamhaus.org', 'A', nxdomain=None, retry = False)
|
||||||
|
evaluate_spamhaus_lookup(env['PUBLIC_IPV6'], 'IPv6', rev_ip6, output, zen)
|
||||||
|
|
||||||
|
|
||||||
|
def evaluate_spamhaus_lookup(lookupaddress, lookuptype, lookupdomain, output, zen):
|
||||||
# See https://www.spamhaus.org/news/article/807/using-our-public-mirrors-check-your-return-codes-now. for
|
# See https://www.spamhaus.org/news/article/807/using-our-public-mirrors-check-your-return-codes-now. for
|
||||||
# information on spamhaus return codes
|
# information on spamhaus return codes
|
||||||
rev_ip4 = ".".join(reversed(env['PUBLIC_IP'].split('.')))
|
|
||||||
zen = query_dns(rev_ip4+'.zen.spamhaus.org', 'A', nxdomain=None)
|
|
||||||
if zen is None:
|
if zen is None:
|
||||||
output.print_ok("IP address is not blacklisted by zen.spamhaus.org.")
|
output.print_ok(f"{lookuptype} address is not blacklisted by zen.spamhaus.org.")
|
||||||
elif zen == "[timeout]":
|
elif zen == "[timeout]":
|
||||||
output.print_warning("Connection to zen.spamhaus.org timed out. Could not determine whether this box's IP address is blacklisted. Please try again later.")
|
output.print_warning(f"""Connection to zen.spamhaus.org timed out. Could not determine whether this box's
|
||||||
|
{lookuptype} address is blacklisted. Please try again later.""")
|
||||||
elif zen == "[Not Set]":
|
elif zen == "[Not Set]":
|
||||||
output.print_warning("Could not connect to zen.spamhaus.org. Could not determine whether this box's IP address is blacklisted. Please try again later.")
|
output.print_warning(f"""Could not connect to zen.spamhaus.org. Could not determine whether this box's
|
||||||
|
{lookuptype} address is blacklisted. Please try again later.""")
|
||||||
elif zen == "127.255.255.252":
|
elif zen == "127.255.255.252":
|
||||||
output.print_warning("Incorrect spamhaus query: %s. Could not determine whether this box's IP address is blacklisted." % (rev_ip4+'.zen.spamhaus.org'))
|
output.print_warning(f"""Incorrect spamhaus query: {lookupdomain + '.zen.spamhaus.org'}. Could not determine whether
|
||||||
|
this box's {lookuptype} address is blacklisted.""")
|
||||||
elif zen == "127.255.255.254":
|
elif zen == "127.255.255.254":
|
||||||
output.print_warning("Mail-in-a-Box is configured to use a public DNS server. This is not supported by spamhaus. Could not determine whether this box's IP address is blacklisted.")
|
output.print_warning(f"""Mail-in-a-Box is configured to use a public DNS server. This is not supported by
|
||||||
|
spamhaus. Could not determine whether this box's {lookuptype} address is blacklisted.""")
|
||||||
elif zen == "127.255.255.255":
|
elif zen == "127.255.255.255":
|
||||||
output.print_warning("Too many queries have been performed on the spamhaus server. Could not determine whether this box's IP address is blacklisted.")
|
output.print_warning(f"""Too many queries have been performed on the spamhaus server. Could not determine
|
||||||
|
whether this box's {lookuptype} address is blacklisted.""")
|
||||||
else:
|
else:
|
||||||
output.print_error("""The IP address of this machine {} is listed in the Spamhaus Block List (code {}),
|
output.print_error(f"""The {lookuptype} address of this machine {lookupaddress} is listed in the Spamhaus Block
|
||||||
which may prevent recipients from receiving your email. See http://www.spamhaus.org/query/ip/{}.""".format(env['PUBLIC_IP'], zen, env['PUBLIC_IP']))
|
List (code {zen}), which may prevent recipients from receiving your email. See
|
||||||
|
http://www.spamhaus.org/query/ip/{lookupaddress}.""")
|
||||||
|
|
||||||
|
|
||||||
def run_domain_checks(rounded_time, env, output, pool, domains_to_check=None):
|
def run_domain_checks(rounded_time, env, output, pool, domains_to_check=None):
|
||||||
# Get the list of domains we handle mail for.
|
# Get the list of domains we handle mail for.
|
||||||
@ -532,6 +551,8 @@ def check_dns_zone(domain, env, output, dns_zonefiles):
|
|||||||
# Check that each custom secondary nameserver resolves the IP address.
|
# Check that each custom secondary nameserver resolves the IP address.
|
||||||
|
|
||||||
if custom_secondary_ns and not probably_external_dns:
|
if custom_secondary_ns and not probably_external_dns:
|
||||||
|
SOARecord = query_dns(domain, "SOA", at=env['PUBLIC_IP'])# Explicitly ask the local dns server.
|
||||||
|
|
||||||
for ns in custom_secondary_ns:
|
for ns in custom_secondary_ns:
|
||||||
# We must first resolve the nameserver to an IP address so we can query it.
|
# We must first resolve the nameserver to an IP address so we can query it.
|
||||||
ns_ips = query_dns(ns, "A")
|
ns_ips = query_dns(ns, "A")
|
||||||
@ -541,15 +562,36 @@ def check_dns_zone(domain, env, output, dns_zonefiles):
|
|||||||
# Choose the first IP if nameserver returns multiple
|
# Choose the first IP if nameserver returns multiple
|
||||||
ns_ip = ns_ips.split('; ')[0]
|
ns_ip = ns_ips.split('; ')[0]
|
||||||
|
|
||||||
|
checkSOA = True
|
||||||
|
|
||||||
# Now query it to see what it says about this domain.
|
# Now query it to see what it says about this domain.
|
||||||
ip = query_dns(domain, "A", at=ns_ip, nxdomain=None)
|
ip = query_dns(domain, "A", at=ns_ip, nxdomain=None)
|
||||||
if ip == correct_ip:
|
if ip == correct_ip:
|
||||||
output.print_ok("Secondary nameserver %s resolved the domain correctly." % ns)
|
output.print_ok(f"Secondary nameserver {ns} resolved the domain correctly.")
|
||||||
elif ip is None:
|
elif ip is None:
|
||||||
output.print_error("Secondary nameserver %s is not configured to resolve this domain." % ns)
|
output.print_error(f"Secondary nameserver {ns} is not configured to resolve this domain.")
|
||||||
|
# No need to check SOA record if not configured as nameserver
|
||||||
|
checkSOA = False
|
||||||
|
elif ip == '[timeout]':
|
||||||
|
output.print_error(f"Secondary nameserver {ns} did not resolve this domain, result: {ip}")
|
||||||
|
checkSOA = False
|
||||||
else:
|
else:
|
||||||
output.print_error(f"Secondary nameserver {ns} is not configured correctly. (It resolved this domain as {ip}. It should be {correct_ip}.)")
|
output.print_error(f"Secondary nameserver {ns} is not configured correctly. (It resolved this domain as {ip}. It should be {correct_ip}.)")
|
||||||
|
|
||||||
|
if checkSOA:
|
||||||
|
# Check that secondary DNS server is synchronized with our primary DNS server. Simplified by checking the SOA record which has a version number
|
||||||
|
SOASecondary = query_dns(domain, "SOA", at=ns_ip)
|
||||||
|
|
||||||
|
if SOARecord == SOASecondary:
|
||||||
|
output.print_ok(f"Secondary nameserver {ns} has consistent SOA record.")
|
||||||
|
elif SOARecord == '[Not Set]':
|
||||||
|
output.print_error(f"Secondary nameserver {ns} has no SOA record configured.")
|
||||||
|
elif SOARecord == '[timeout]':
|
||||||
|
output.print_error(f"Secondary nameserver {ns} timed out on checking SOA record.")
|
||||||
|
else:
|
||||||
|
output.print_error(f"""Secondary nameserver {ns} has inconsistent SOA record (primary: {SOARecord} versus secondary: {SOASecondary}).
|
||||||
|
Check that synchronization between secondary and primary DNS servers is properly set-up.""")
|
||||||
|
|
||||||
def check_dns_zone_suggestions(domain, env, output, dns_zonefiles, domains_with_a_records):
|
def check_dns_zone_suggestions(domain, env, output, dns_zonefiles, domains_with_a_records):
|
||||||
# Warn if a custom DNS record is preventing this or the automatic www redirect from
|
# Warn if a custom DNS record is preventing this or the automatic www redirect from
|
||||||
# being served.
|
# being served.
|
||||||
|
@ -126,7 +126,7 @@ minute=$((RANDOM % 60)) # avoid overloading mailinabox.email
|
|||||||
cat > /etc/cron.d/mailinabox-nightly << EOF;
|
cat > /etc/cron.d/mailinabox-nightly << EOF;
|
||||||
# Mail-in-a-Box --- Do not edit / will be overwritten on update.
|
# Mail-in-a-Box --- Do not edit / will be overwritten on update.
|
||||||
# Run nightly tasks: backup, status checks.
|
# Run nightly tasks: backup, status checks.
|
||||||
$minute 3 * * * root (cd $PWD && management/daily_tasks.sh)
|
$minute 1 * * * root (cd $PWD && management/daily_tasks.sh)
|
||||||
EOF
|
EOF
|
||||||
|
|
||||||
# Start the management server.
|
# Start the management server.
|
||||||
|
@ -219,3 +219,12 @@ fi
|
|||||||
if [ ! -f "$STORAGE_ROOT/ssl/dh2048.pem" ]; then
|
if [ ! -f "$STORAGE_ROOT/ssl/dh2048.pem" ]; then
|
||||||
openssl dhparam -out "$STORAGE_ROOT/ssl/dh2048.pem" 2048
|
openssl dhparam -out "$STORAGE_ROOT/ssl/dh2048.pem" 2048
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# Cleanup expired SSL certificates from $STORAGE_ROOT/ssl daily
|
||||||
|
cat > /etc/cron.daily/mailinabox-ssl-cleanup << EOF;
|
||||||
|
#!/bin/bash
|
||||||
|
# Mail-in-a-Box
|
||||||
|
# Cleanup expired SSL certificates
|
||||||
|
$(pwd)/tools/ssl_cleanup
|
||||||
|
EOF
|
||||||
|
chmod +x /etc/cron.daily/mailinabox-ssl-cleanup
|
||||||
|
@ -92,6 +92,13 @@ fi
|
|||||||
# (See https://discourse.mailinabox.email/t/journalctl-reclaim-space-on-small-mailinabox/6728/11.)
|
# (See https://discourse.mailinabox.email/t/journalctl-reclaim-space-on-small-mailinabox/6728/11.)
|
||||||
tools/editconf.py /etc/systemd/journald.conf MaxRetentionSec=10day
|
tools/editconf.py /etc/systemd/journald.conf MaxRetentionSec=10day
|
||||||
|
|
||||||
|
# ### Improve server privacy
|
||||||
|
|
||||||
|
# Disable MOTD adverts to prevent revealing server information in MOTD request headers
|
||||||
|
# See https://ma.ttias.be/what-exactly-being-sent-ubuntu-motd/
|
||||||
|
tools/editconf.py /etc/default/motd-news ENABLED=0
|
||||||
|
rm -f /var/cache/motd-news
|
||||||
|
|
||||||
# ### Add PPAs.
|
# ### Add PPAs.
|
||||||
|
|
||||||
# We install some non-standard Ubuntu packages maintained by other
|
# We install some non-standard Ubuntu packages maintained by other
|
||||||
|
@ -34,7 +34,7 @@ echo "Installing Roundcube (webmail)..."
|
|||||||
apt_install \
|
apt_install \
|
||||||
dbconfig-common \
|
dbconfig-common \
|
||||||
php"${PHP_VER}"-cli php"${PHP_VER}"-sqlite3 php"${PHP_VER}"-intl php"${PHP_VER}"-common php"${PHP_VER}"-curl php"${PHP_VER}"-imap \
|
php"${PHP_VER}"-cli php"${PHP_VER}"-sqlite3 php"${PHP_VER}"-intl php"${PHP_VER}"-common php"${PHP_VER}"-curl php"${PHP_VER}"-imap \
|
||||||
php"${PHP_VER}"-gd php"${PHP_VER}"-pspell php"${PHP_VER}"-mbstring libjs-jquery libjs-jquery-mousewheel libmagic1 \
|
php"${PHP_VER}"-gd php"${PHP_VER}"-pspell php"${PHP_VER}"-mbstring php"${PHP_VER}"-xml libjs-jquery libjs-jquery-mousewheel libmagic1 \
|
||||||
sqlite3
|
sqlite3
|
||||||
|
|
||||||
apt_install php"${PHP_VER}"-ldap
|
apt_install php"${PHP_VER}"-ldap
|
||||||
@ -49,8 +49,8 @@ apt_install php"${PHP_VER}"-ldap
|
|||||||
# https://github.com/mstilkerich/rcmcarddav/releases
|
# https://github.com/mstilkerich/rcmcarddav/releases
|
||||||
# The easiest way to get the package hashes is to run this script and get the hash from
|
# The easiest way to get the package hashes is to run this script and get the hash from
|
||||||
# the error message.
|
# the error message.
|
||||||
VERSION=1.6.8
|
VERSION=1.6.9
|
||||||
HASH=00586f5163b3f6c1b0798be745982e3547b1b24a
|
HASH=b63f74209cf287402f6f44b85877388899261f3c
|
||||||
PERSISTENT_LOGIN_VERSION=version-5.3.0
|
PERSISTENT_LOGIN_VERSION=version-5.3.0
|
||||||
HTML5_NOTIFIER_VERSION=68d9ca194212e15b3c7225eb6085dbcf02fd13d7 # version 0.6.4+
|
HTML5_NOTIFIER_VERSION=68d9ca194212e15b3c7225eb6085dbcf02fd13d7 # version 0.6.4+
|
||||||
CARDDAV_VERSION=4.4.3
|
CARDDAV_VERSION=4.4.3
|
||||||
|
@ -120,4 +120,6 @@ restart_service php"$PHP_VER"-fpm
|
|||||||
|
|
||||||
# Fix states after upgrade
|
# Fix states after upgrade
|
||||||
|
|
||||||
hide_output php"$PHP_VER" /usr/local/lib/z-push/z-push-admin.php -a fixstates
|
if [ $needs_update == 1 ]; then
|
||||||
|
hide_output php"$PHP_VER" /usr/local/lib/z-push/z-push-admin.php -a fixstates
|
||||||
|
fi
|
||||||
|
17
tools/ssl_cleanup
Executable file
17
tools/ssl_cleanup
Executable file
@ -0,0 +1,17 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# Cleanup SSL certificates which expired more than 7 days ago from $STORAGE_ROOT/ssl and move them to $STORAGE_ROOT/ssl.expired
|
||||||
|
|
||||||
|
source /etc/mailinabox.conf
|
||||||
|
shopt -s extglob
|
||||||
|
|
||||||
|
retain_after="$(date --date="7 days ago" +%Y%m%d)"
|
||||||
|
|
||||||
|
mkdir -p $STORAGE_ROOT/ssl.expired
|
||||||
|
for file in $STORAGE_ROOT/ssl/*-+([0-9])-+([0-9a-f]).pem; do
|
||||||
|
pem="$(basename "$file")"
|
||||||
|
not_valid_after="$(cut -d- -f1 <<< "${pem: -21}")"
|
||||||
|
|
||||||
|
if [ "$not_valid_after" -lt "$retain_after" ]; then
|
||||||
|
mv "$file" "$STORAGE_ROOT/ssl.expired/${pem}"
|
||||||
|
fi
|
||||||
|
done
|
Loading…
Reference in New Issue
Block a user