Merge branch 'master' into management-memory-usage

This commit is contained in:
yodax 2015-12-26 08:38:04 -05:00
commit 8078799c7f
18 changed files with 633 additions and 580 deletions

View File

@ -7,12 +7,15 @@ Still In Development
Mail:
* Updated Roundcube to version 1.1.3.
* Auto-create RFC2142 aliases for abuse@.
Control panel:
* When IPv6 is enabled, check that system services are accessible over IPv6 too, that the box's hostname resolves over IPv6, and that reverse DNS is setup correctly for IPv6.
* Explanatory text for setting up secondary nameserver is added/fixed.
* DNS checks now have a timeout in case a DNS server is not responding, so the checks don't stall indefinitely.
* Better messages if external DNS is used and, wierdly, custom secondary nameservers are set.
* Better messages if external DNS is used and, weirdly, custom secondary nameservers are set.
* Add POP to the mail client settings documentation.
System:
@ -22,6 +25,8 @@ System:
* If ownCloud sends out email, it will use the box's administrative address now (admin@yourboxname).
* Z-Push (Exchange/ActiveSync) logs now exclude warnings and are now rotated to save disk space.
* Fix pip command that might have not installed all necessary Python packages.
* The control panel and backup would not work on Google Compute Engine because they install a conflicting boto package.
* Added a new command `management/backup.py --restore` to restore files from a backup to a target directory (command line arguments are passed to `duplicity restore`).
v0.14 (November 4, 2015)
------------------------
@ -392,7 +397,7 @@ v0.02 (September 21, 2014)
* Better logic for determining when to take a full backup.
* Reduce DNS TTL, not that it seems to really matter.
* Add SSHFP DNS records.
* Add an API for setting custom DNS records
* Add an API for setting custom DNS records
* Update to ownCloud 7.0.2.
* Some things were broken if the machine had an IPv6 address.
* Use a dialogs library to ask users questions during setup.

View File

@ -1,5 +1,11 @@
# Fail2Ban configuration file for Mail-in-a-Box
[DEFAULT]
# Whitelist our own IP addresses. 127.0.0.1/8 is the default. But our status checks
# ping services over the public interface so we should whitelist that address of
# ours too. The string is substituted during installation.
ignoreip = 127.0.0.1/8 PUBLIC_IP
# JAILS
[ssh]
@ -17,3 +23,7 @@ enabled = true
filter = dovecotimap
findtime = 30
maxretry = 20
[recidive]
enabled = true
maxretry = 10

View File

@ -12,7 +12,7 @@ import os, os.path, shutil, glob, re, datetime
import dateutil.parser, dateutil.relativedelta, dateutil.tz
import rtyaml
from utils import exclusive_process, load_environment, shell, wait_for_service
from utils import exclusive_process, load_environment, shell, wait_for_service, fix_boto
def backup_status(env):
# Root folder
@ -314,6 +314,18 @@ def run_duplicity_verification():
env["STORAGE_ROOT"],
], get_env(env))
def run_duplicity_restore(args):
env = load_environment()
config = get_backup_config(env)
backup_cache_dir = os.path.join(env["STORAGE_ROOT"], 'backup', 'cache')
shell('check_call', [
"/usr/bin/duplicity",
"restore",
"--archive-dir", backup_cache_dir,
config["target"],
] + args,
get_env(env))
def list_target_files(config):
import urllib.parse
try:
@ -326,6 +338,7 @@ def list_target_files(config):
elif p.scheme == "s3":
# match to a Region
fix_boto() # must call prior to importing boto
import boto.s3
from boto.exception import BotoServerError
for region in boto.s3.regions():
@ -437,6 +450,16 @@ if __name__ == "__main__":
# are readable, and b) report if they are up to date.
run_duplicity_verification()
elif sys.argv[-1] == "--status":
# Show backup status.
ret = backup_status(load_environment())
print(rtyaml.dump(ret["backups"]))
elif len(sys.argv) >= 2 and sys.argv[1] == "--restore":
# Run duplicity restore. Rest of command line passed as arguments
# to duplicity. The restore path should be specified.
run_duplicity_restore(sys.argv[2:])
else:
# Perform a backup. Add --full to force a full backup rather than
# possibly performing an incremental backup.

View File

@ -94,6 +94,7 @@ def index():
no_users_exist = (len(get_mail_users(env)) == 0)
no_admins_exist = (len(get_admins(env)) == 0)
utils.fix_boto() # must call prior to importing boto
import boto.s3
backup_s3_hosts = [(r.name, r.endpoint) for r in boto.s3.regions()]
@ -318,17 +319,20 @@ def dns_get_dump():
@app.route('/ssl/csr/<domain>', methods=['POST'])
@authorized_personnel_only
def ssl_get_csr(domain):
from web_update import create_csr
from ssl_certificates import create_csr
ssl_private_key = os.path.join(os.path.join(env["STORAGE_ROOT"], 'ssl', 'ssl_private_key.pem'))
return create_csr(domain, ssl_private_key, env)
@app.route('/ssl/install', methods=['POST'])
@authorized_personnel_only
def ssl_install_cert():
from web_update import install_cert
from web_update import get_web_domains
from ssl_certificates import install_cert
domain = request.form.get('domain')
ssl_cert = request.form.get('cert')
ssl_chain = request.form.get('chain')
if domain not in get_web_domains(env):
return "Invalid domain name."
return install_cert(domain, ssl_cert, ssl_chain, env)
# WEB

View File

@ -51,21 +51,13 @@ def get_dns_zones(env):
return zonefiles
def do_dns_update(env, force=False):
# What domains (and their zone filenames) should we build?
domains = get_dns_domains(env)
zonefiles = get_dns_zones(env)
# Custom records to add to zones.
additional_records = list(get_custom_dns_config(env))
from web_update import get_default_www_redirects
www_redirect_domains = get_default_www_redirects(env)
# Write zone files.
os.makedirs('/etc/nsd/zones', exist_ok=True)
zonefiles = []
updated_domains = []
for i, (domain, zonefile) in enumerate(zonefiles):
# Build the records to put in the zone.
records = build_zone(domain, domains, additional_records, www_redirect_domains, env)
for (domain, zonefile, records) in build_zones(env):
# The final set of files will be signed.
zonefiles.append((domain, zonefile + ".signed"))
# See if the zone has changed, and if so update the serial number
# and write the zone file.
@ -73,14 +65,6 @@ def do_dns_update(env, force=False):
# Zone was not updated. There were no changes.
continue
# If this is a .justtesting.email domain, then post the update.
try:
justtestingdotemail(domain, records)
except:
# Hmm. Might be a network issue. If we stop now, will we end
# up in an inconsistent state? Let's just continue.
pass
# Mark that we just updated this domain.
updated_domains.append(domain)
@ -95,14 +79,8 @@ def do_dns_update(env, force=False):
# and return True so we get a chance to re-sign it.
sign_zone(domain, zonefile, env)
# Now that all zones are signed (some might not have changed and so didn't
# just get signed now, but were before) update the zone filename so nsd.conf
# uses the signed file.
for i in range(len(zonefiles)):
zonefiles[i][1] += ".signed"
# Write the main nsd.conf file.
if write_nsd_conf(zonefiles, additional_records, env):
if write_nsd_conf(zonefiles, list(get_custom_dns_config(env)), env):
# Make sure updated_domains contains *something* if we wrote an updated
# nsd.conf so that we know to restart nsd.
if len(updated_domains) == 0:
@ -112,8 +90,8 @@ def do_dns_update(env, force=False):
if len(updated_domains) > 0:
shell('check_call', ["/usr/sbin/service", "nsd", "restart"])
# Write the OpenDKIM configuration tables.
if write_opendkim_tables(domains, env):
# Write the OpenDKIM configuration tables for all of the domains.
if write_opendkim_tables([domain for domain, zonefile in zonefiles], env):
# Settings changed. Kick opendkim.
shell('check_call', ["/usr/sbin/service", "opendkim", "restart"])
if len(updated_domains) == 0:
@ -132,6 +110,22 @@ def do_dns_update(env, force=False):
########################################################################
def build_zones(env):
# What domains (and their zone filenames) should we build?
domains = get_dns_domains(env)
zonefiles = get_dns_zones(env)
# Custom records to add to zones.
additional_records = list(get_custom_dns_config(env))
from web_update import get_web_domains
www_redirect_domains = set(get_web_domains(env)) - set(get_web_domains(env, include_www_redirects=False))
# Build DNS records for each zone.
for domain, zonefile in zonefiles:
# Build the records to put in the zone.
records = build_zone(domain, domains, additional_records, www_redirect_domains, env)
yield (domain, zonefile, records)
def build_zone(domain, all_domains, additional_records, www_redirect_domains, env, is_zone=True):
records = []
@ -861,57 +855,9 @@ def get_custom_dns_record(custom_dns, qname, rtype):
########################################################################
def justtestingdotemail(domain, records):
# If the domain is a subdomain of justtesting.email, which we own,
# automatically populate the zone where it is set up on dns4e.com.
# Ideally if dns4e.com supported NS records we would just have it
# delegate DNS to us, but instead we will populate the whole zone.
import subprocess, json, urllib.parse
if not domain.endswith(".justtesting.email"):
return
for subdomain, querytype, value, explanation in records:
if querytype in ("NS",): continue
if subdomain in ("www", "ns1", "ns2"): continue # don't do unnecessary things
if subdomain == None:
subdomain = domain
else:
subdomain = subdomain + "." + domain
if querytype == "TXT":
# nsd requires parentheses around txt records with multiple parts,
# but DNS4E requires there be no parentheses; also it goes into
# nsd with a newline and a tab, which we replace with a space here
value = re.sub("^\s*\(\s*([\w\W]*)\)", r"\1", value)
value = re.sub("\s+", " ", value)
else:
continue
print("Updating DNS for %s/%s..." % (subdomain, querytype))
resp = json.loads(subprocess.check_output([
"curl",
"-s",
"https://api.dns4e.com/v7/%s/%s" % (urllib.parse.quote(subdomain), querytype.lower()),
"--user", "2ddbd8e88ed1495fa0ec:A97TDJV26CVUJS6hqAs0CKnhj4HvjTM7MwAAg8xb",
"--data", "record=%s" % urllib.parse.quote(value),
]).decode("utf8"))
print("\t...", resp.get("message", "?"))
########################################################################
def build_recommended_dns(env):
ret = []
domains = get_dns_domains(env)
zonefiles = get_dns_zones(env)
additional_records = list(get_custom_dns_config(env))
from web_update import get_default_www_redirects
www_redirect_domains = get_default_www_redirects(env)
for domain, zonefile in zonefiles:
records = build_zone(domain, domains, additional_records, www_redirect_domains, env)
for (domain, zonefile, records) in build_zones(env):
# remove records that we don't dislay
records = [r for r in records if r[3] is not False]

View File

@ -77,7 +77,7 @@ def prettify_idn_email_address(email):
def is_dcv_address(email):
email = email.lower()
for localpart in ("admin", "administrator", "postmaster", "hostmaster", "webmaster"):
for localpart in ("admin", "administrator", "postmaster", "hostmaster", "webmaster", "abuse"):
if email.startswith(localpart+"@") or email.startswith(localpart+"+"):
return True
return False
@ -520,17 +520,21 @@ def get_required_aliases(env):
# email on that domain are the required aliases or a catch-all/domain-forwarder.
real_mail_domains = get_mail_domains(env,
filter_aliases = lambda alias :
not alias.startswith("postmaster@") and not alias.startswith("admin@")
not alias.startswith("postmaster@")
and not alias.startswith("admin@")
and not alias.startswith("abuse@")
and not alias.startswith("@")
)
# Create postmaster@ and admin@ for all domains we serve mail on.
# postmaster@ is assumed to exist by our Postfix configuration. admin@
# isn't anything, but it might save the user some trouble e.g. when
# Create postmaster@, admin@ and abuse@ for all domains we serve
# mail on. postmaster@ is assumed to exist by our Postfix configuration.
# admin@isn't anything, but it might save the user some trouble e.g. when
# buying an SSL certificate.
# abuse@ is part of RFC2142: https://www.ietf.org/rfc/rfc2142.txt
for domain in real_mail_domains:
aliases.add("postmaster@" + domain)
aliases.add("admin@" + domain)
aliases.add("abuse@" + domain)
return aliases
@ -572,7 +576,7 @@ def kick(env, mail_result=None):
# longer have any other email addresses for.
for address, forwards_to, *_ in existing_alias_records:
user, domain = address.split("@")
if user in ("postmaster", "admin") \
if user in ("postmaster", "admin", "abuse") \
and address not in required_aliases \
and forwards_to == get_system_administrator(env):
remove_mail_alias(address, env, do_kick=False)

View File

@ -0,0 +1,382 @@
# Utilities for installing and selecting SSL certificates.
import os, os.path, re, shutil
from utils import shell, safe_domain_name
def get_ssl_certificates(env):
# Scan all of the installed SSL certificates and map every domain
# that the certificates are good for to the best certificate for
# the domain.
from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey
from cryptography.x509 import Certificate
# The certificates are all stored here:
ssl_root = os.path.join(env["STORAGE_ROOT"], 'ssl')
# List all of the files in the SSL directory and one level deep.
def get_file_list():
for fn in os.listdir(ssl_root):
fn = os.path.join(ssl_root, fn)
if os.path.isfile(fn):
yield fn
elif os.path.isdir(fn):
for fn1 in os.listdir(fn):
fn1 = os.path.join(fn, fn1)
if os.path.isfile(fn1):
yield fn1
# Remember stuff.
private_keys = { }
certificates = [ ]
# Scan each of the files to find private keys and certificates.
# We must load all of the private keys first before processing
# certificates so that we can check that we have a private key
# available before using a certificate.
for fn in get_file_list():
try:
pem = load_pem(load_cert_chain(fn)[0])
except ValueError:
# Not a valid PEM format for a PEM type we care about.
continue
# Remember where we got this object.
pem._filename = fn
# Is it a private key?
if isinstance(pem, RSAPrivateKey):
private_keys[pem.public_key().public_numbers()] = pem
# Is it a certificate?
if isinstance(pem, Certificate):
certificates.append(pem)
# Process the certificates.
domains = { }
for cert in certificates:
# What domains is this certificate good for?
cert_domains, primary_domain = get_certificate_domains(cert)
cert._primary_domain = primary_domain
# Is there a private key file for this certificate?
private_key = private_keys.get(cert.public_key().public_numbers())
if not private_key:
continue
cert._private_key = private_key
# Add this cert to the list of certs usable for the domains.
for domain in cert_domains:
domains.setdefault(domain, []).append(cert)
# Sort the certificates to prefer good ones.
import datetime
now = datetime.datetime.utcnow()
ret = { }
for domain, cert_list in domains.items():
cert_list.sort(key = lambda cert : (
# must be valid NOW
cert.not_valid_before <= now <= cert.not_valid_after,
# prefer one that is not self-signed
cert.issuer != cert.subject,
# prefer one with the expiration furthest into the future so
# that we can easily rotate to new certs as we get them
cert.not_valid_after,
# in case a certificate is installed in multiple paths,
# prefer the... lexicographically last one?
cert._filename,
), reverse=True)
cert = cert_list.pop(0)
ret[domain] = {
"private-key": cert._private_key._filename,
"certificate": cert._filename,
"primary-domain": cert._primary_domain,
}
return ret
def get_domain_ssl_files(domain, ssl_certificates, env, allow_missing_cert=False):
# Get the default paths.
ssl_private_key = os.path.join(os.path.join(env["STORAGE_ROOT"], 'ssl', 'ssl_private_key.pem'))
ssl_certificate = os.path.join(os.path.join(env["STORAGE_ROOT"], 'ssl', 'ssl_certificate.pem'))
if domain == env['PRIMARY_HOSTNAME']:
# The primary domain must use the server certificate because
# it is hard-coded in some service configuration files.
return ssl_private_key, ssl_certificate, None
wildcard_domain = re.sub("^[^\.]+", "*", domain)
if domain in ssl_certificates:
cert_info = ssl_certificates[domain]
cert_type = "multi-domain"
elif wildcard_domain in ssl_certificates:
cert_info = ssl_certificates[wildcard_domain]
cert_type = "wildcard"
elif not allow_missing_cert:
# No certificate is available for this domain! Return default files.
ssl_via = "Using certificate for %s." % env['PRIMARY_HOSTNAME']
return ssl_private_key, ssl_certificate, ssl_via
else:
# No certificate is available - and warn appropriately.
return None
# 'via' is a hint to the user about which certificate is in use for the domain
if cert_info['certificate'] == os.path.join(env["STORAGE_ROOT"], 'ssl', 'ssl_certificate.pem'):
# Using the server certificate.
via = "Using same %s certificate as for %s." % (cert_type, env['PRIMARY_HOSTNAME'])
elif cert_info['primary-domain'] != domain and cert_info['primary-domain'] in ssl_certificates and cert_info == ssl_certificates[cert_info['primary-domain']]:
via = "Using same %s certificate as for %s." % (cert_type, cert_info['primary-domain'])
else:
via = None # don't show a hint - show expiration info instead
return cert_info['private-key'], cert_info['certificate'], via
def create_csr(domain, ssl_key, env):
return shell("check_output", [
"openssl", "req", "-new",
"-key", ssl_key,
"-sha256",
"-subj", "/C=%s/ST=/L=/O=/CN=%s" % (env["CSR_COUNTRY"], domain)])
def install_cert(domain, ssl_cert, ssl_chain, env):
# Write the combined cert+chain to a temporary path and validate that it is OK.
# The certificate always goes above the chain.
import tempfile
fd, fn = tempfile.mkstemp('.pem')
os.write(fd, (ssl_cert + '\n' + ssl_chain).encode("ascii"))
os.close(fd)
# Do validation on the certificate before installing it.
ssl_private_key = os.path.join(os.path.join(env["STORAGE_ROOT"], 'ssl', 'ssl_private_key.pem'))
cert_status, cert_status_details = check_certificate(domain, fn, ssl_private_key)
if cert_status != "OK":
if cert_status == "SELF-SIGNED":
cert_status = "This is a self-signed certificate. I can't install that."
os.unlink(fn)
if cert_status_details is not None:
cert_status += " " + cert_status_details
return cert_status
# Where to put it?
# Make a unique path for the certificate.
from cryptography.hazmat.primitives import hashes
from binascii import hexlify
cert = load_pem(load_cert_chain(fn)[0])
all_domains, cn = get_certificate_domains(cert)
path = "%s-%s-%s.pem" % (
safe_domain_name(cn), # common name, which should be filename safe because it is IDNA-encoded, but in case of a malformed cert make sure it's ok to use as a filename
cert.not_valid_after.date().isoformat().replace("-", ""), # expiration date
hexlify(cert.fingerprint(hashes.SHA256())).decode("ascii")[0:8], # fingerprint prefix
)
ssl_certificate = os.path.join(os.path.join(env["STORAGE_ROOT"], 'ssl', path))
# Install the certificate.
os.makedirs(os.path.dirname(ssl_certificate), exist_ok=True)
shutil.move(fn, ssl_certificate)
ret = ["OK"]
# When updating the cert for PRIMARY_HOSTNAME, symlink it from the system
# certificate path, which is hard-coded for various purposes, and then
# update DNS (because of the DANE TLSA record), postfix, and dovecot,
# which all use the file.
if domain == env['PRIMARY_HOSTNAME']:
# Update symlink.
system_ssl_certificate = os.path.join(os.path.join(env["STORAGE_ROOT"], 'ssl', 'ssl_certificate.pem'))
os.unlink(system_ssl_certificate)
os.symlink(ssl_certificate, system_ssl_certificate)
# Update DNS & restart postfix and dovecot so they pick up the new file.
from dns_update import do_dns_update
ret.append( do_dns_update(env) )
shell('check_call', ["/usr/sbin/service", "postfix", "restart"])
shell('check_call', ["/usr/sbin/service", "dovecot", "restart"])
ret.append("mail services restarted")
# Update the web configuration so nginx picks up the new certificate file.
from web_update import do_web_update
ret.append( do_web_update(env) )
return "\n".join(ret)
def check_certificate(domain, ssl_certificate, ssl_private_key, warn_if_expiring_soon=True, rounded_time=False, just_check_domain=False):
# Check that the ssl_certificate & ssl_private_key files are good
# for the provided domain.
from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey
from cryptography.x509 import Certificate
# The ssl_certificate file may contain a chain of certificates. We'll
# need to split that up before we can pass anything to openssl or
# parse them in Python. Parse it with the cryptography library.
try:
ssl_cert_chain = load_cert_chain(ssl_certificate)
cert = load_pem(ssl_cert_chain[0])
if not isinstance(cert, Certificate): raise ValueError("This is not a certificate file.")
except ValueError as e:
return ("There is a problem with the certificate file: %s" % str(e), None)
# First check that the domain name is one of the names allowed by
# the certificate.
if domain is not None:
certificate_names, cert_primary_name = get_certificate_domains(cert)
# Check that the domain appears among the acceptable names, or a wildcard
# form of the domain name (which is a stricter check than the specs but
# should work in normal cases).
wildcard_domain = re.sub("^[^\.]+", "*", domain)
if domain not in certificate_names and wildcard_domain not in certificate_names:
return ("The certificate is for the wrong domain name. It is for %s."
% ", ".join(sorted(certificate_names)), None)
# Second, check that the certificate matches the private key.
if ssl_private_key is not None:
try:
priv_key = load_pem(open(ssl_private_key, 'rb').read())
except ValueError as e:
return ("The private key file %s is not a private key file: %s" % (ssl_private_key, str(e)), None)
if not isinstance(priv_key, RSAPrivateKey):
return ("The private key file %s is not a private key file." % ssl_private_key, None)
if priv_key.public_key().public_numbers() != cert.public_key().public_numbers():
return ("The certificate does not correspond to the private key at %s." % ssl_private_key, None)
# We could also use the openssl command line tool to get the modulus
# listed in each file. The output of each command below looks like "Modulus=XXXXX".
# $ openssl rsa -inform PEM -noout -modulus -in ssl_private_key
# $ openssl x509 -in ssl_certificate -noout -modulus
# Third, check if the certificate is self-signed. Return a special flag string.
if cert.issuer == cert.subject:
return ("SELF-SIGNED", None)
# When selecting which certificate to use for non-primary domains, we check if the primary
# certificate or a www-parent-domain certificate is good for the domain. There's no need
# to run extra checks beyond this point.
if just_check_domain:
return ("OK", None)
# Check that the certificate hasn't expired. The datetimes returned by the
# certificate are 'naive' and in UTC. We need to get the current time in UTC.
import datetime
now = datetime.datetime.utcnow()
if not(cert.not_valid_before <= now <= cert.not_valid_after):
return ("The certificate has expired or is not yet valid. It is valid from %s to %s." % (cert.not_valid_before, cert.not_valid_after), None)
# Next validate that the certificate is valid. This checks whether the certificate
# is self-signed, that the chain of trust makes sense, that it is signed by a CA
# that Ubuntu has installed on this machine's list of CAs, and I think that it hasn't
# expired.
# The certificate chain has to be passed separately and is given via STDIN.
# This command returns a non-zero exit status in most cases, so trap errors.
retcode, verifyoutput = shell('check_output', [
"openssl",
"verify", "-verbose",
"-purpose", "sslserver", "-policy_check",]
+ ([] if len(ssl_cert_chain) == 1 else ["-untrusted", "/proc/self/fd/0"])
+ [ssl_certificate],
input=b"\n\n".join(ssl_cert_chain[1:]),
trap=True)
if "self signed" in verifyoutput:
# Certificate is self-signed. Probably we detected this above.
return ("SELF-SIGNED", None)
elif retcode != 0:
if "unable to get local issuer certificate" in verifyoutput:
return ("The certificate is missing an intermediate chain or the intermediate chain is incorrect or incomplete. (%s)" % verifyoutput, None)
# There is some unknown problem. Return the `openssl verify` raw output.
return ("There is a problem with the SSL certificate.", verifyoutput.strip())
else:
# `openssl verify` returned a zero exit status so the cert is currently
# good.
# But is it expiring soon?
cert_expiration_date = cert.not_valid_after
ndays = (cert_expiration_date-now).days
if not rounded_time or ndays < 7:
expiry_info = "The certificate expires in %d days on %s." % (ndays, cert_expiration_date.strftime("%x"))
elif ndays <= 14:
expiry_info = "The certificate expires in less than two weeks, on %s." % cert_expiration_date.strftime("%x")
elif ndays <= 31:
expiry_info = "The certificate expires in less than a month, on %s." % cert_expiration_date.strftime("%x")
else:
expiry_info = "The certificate expires on %s." % cert_expiration_date.strftime("%x")
if ndays <= 31 and warn_if_expiring_soon:
return ("The certificate is expiring soon: " + expiry_info, None)
# Return the special OK code.
return ("OK", expiry_info)
def load_cert_chain(pemfile):
# A certificate .pem file may contain a chain of certificates.
# Load the file and split them apart.
re_pem = rb"(-+BEGIN (?:.+)-+[\r\n]+(?:[A-Za-z0-9+/=]{1,64}[\r\n]+)+-+END (?:.+)-+[\r\n]+)"
with open(pemfile, "rb") as f:
pem = f.read() + b"\n" # ensure trailing newline
pemblocks = re.findall(re_pem, pem)
if len(pemblocks) == 0:
raise ValueError("File does not contain valid PEM data.")
return pemblocks
def load_pem(pem):
# Parse a "---BEGIN .... END---" PEM string and return a Python object for it
# using classes from the cryptography package.
from cryptography.x509 import load_pem_x509_certificate
from cryptography.hazmat.primitives import serialization
from cryptography.hazmat.backends import default_backend
pem_type = re.match(b"-+BEGIN (.*?)-+[\r\n]", pem)
if pem_type is None:
raise ValueError("File is not a valid PEM-formatted file.")
pem_type = pem_type.group(1)
if pem_type in (b"RSA PRIVATE KEY", b"PRIVATE KEY"):
return serialization.load_pem_private_key(pem, password=None, backend=default_backend())
if pem_type == b"CERTIFICATE":
return load_pem_x509_certificate(pem, default_backend())
raise ValueError("Unsupported PEM object type: " + pem_type.decode("ascii", "replace"))
def get_certificate_domains(cert):
from cryptography.x509 import DNSName, ExtensionNotFound, OID_COMMON_NAME, OID_SUBJECT_ALTERNATIVE_NAME
import idna
names = set()
cn = None
# The domain may be found in the Subject Common Name (CN). This comes back as an IDNA (ASCII)
# string, which is the format we store domains in - so good.
try:
cn = cert.subject.get_attributes_for_oid(OID_COMMON_NAME)[0].value
names.add(cn)
except IndexError:
# No common name? Certificate is probably generated incorrectly.
# But we'll let it error-out when it doesn't find the domain.
pass
# ... or be one of the Subject Alternative Names. The cryptography library handily IDNA-decodes
# the names for us. We must encode back to ASCII, but wildcard certificates can't pass through
# IDNA encoding/decoding so we must special-case. See https://github.com/pyca/cryptography/pull/2071.
def idna_decode_dns_name(dns_name):
if dns_name.startswith("*."):
return "*." + idna.encode(dns_name[2:]).decode('ascii')
else:
return idna.encode(dns_name).decode('ascii')
try:
sans = cert.extensions.get_extension_for_oid(OID_SUBJECT_ALTERNATIVE_NAME).value.get_values_for_type(DNSName)
for san in sans:
names.add(idna_decode_dns_name(san))
except ExtensionNotFound:
pass
return names, cn

View File

@ -4,8 +4,6 @@
# SSL certificates have been signed, etc., and if not tells the user
# what to do next.
__ALL__ = ['check_certificate']
import sys, os, os.path, re, subprocess, datetime, multiprocessing.pool
import dns.reversename, dns.resolver
@ -14,7 +12,8 @@ import idna
import psutil
from dns_update import get_dns_zones, build_tlsa_record, get_custom_dns_config, get_secondary_dns, get_custom_dns_record
from web_update import get_web_domains, get_default_www_redirects, get_ssl_certificates, get_domain_ssl_files, get_domains_with_a_records
from web_update import get_web_domains, get_domains_with_a_records
from ssl_certificates import get_ssl_certificates, get_domain_ssl_files, check_certificate
from mailconfig import get_mail_domains, get_mail_aliases
from utils import shell, sort_domains, load_env_vars_from_file, load_settings
@ -105,45 +104,60 @@ def check_service(i, service, env):
# Skip check (no port, e.g. no sshd).
return (i, None, None, None)
import socket
output = BufferedOutput()
running = False
fatal = False
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.settimeout(1)
try:
try:
s.connect((
"127.0.0.1" if not service["public"] else env['PUBLIC_IP'],
service["port"]))
running = True
except OSError as e1:
if service["public"] and service["port"] != 53:
# For public services (except DNS), try the private IP as a fallback.
s1 = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s1.settimeout(1)
try:
s1.connect(("127.0.0.1", service["port"]))
output.print_error("%s is running but is not publicly accessible at %s:%d (%s)." % (service['name'], env['PUBLIC_IP'], service['port'], str(e1)))
except:
raise e1
finally:
s1.close()
else:
raise
except OSError as e:
output.print_error("%s is not running (%s; port %d)." % (service['name'], str(e), service['port']))
# Helper function to make a connection to the service, since we try
# up to three ways (localhost, IPv4 address, IPv6 address).
def try_connect(ip):
# Connect to the given IP address on the service's port with a one-second timeout.
import socket
s = socket.socket(socket.AF_INET if ":" not in ip else socket.AF_INET6, socket.SOCK_STREAM)
s.settimeout(1)
try:
s.connect((ip, service["port"]))
return True
except OSError as e:
# timed out or some other odd error
return False
finally:
s.close()
if service["public"]:
# Service should be publicly accessible.
if try_connect(env["PUBLIC_IP"]):
# IPv4 ok.
if not env.get("PUBLIC_IPV6") or service.get("ipv6") is False or try_connect(env["PUBLIC_IPV6"]):
# No IPv6, or service isn't meant to run on IPv6, or IPv6 is good.
running = True
# IPv4 ok but IPv6 failed. Try the PRIVATE_IPV6 address to see if the service is bound to the interface.
elif service["port"] != 53 and try_connect(env["PRIVATE_IPV6"]):
output.print_error("%s is running (and available over IPv4 and the local IPv6 address), but it is not publicly accessible at %s:%d." % (service['name'], env['PUBLIC_IP'], service['port']))
else:
output.print_error("%s is running and available over IPv4 but is not accessible over IPv6 at %s port %d." % (service['name'], env['PUBLIC_IPV6'], service['port']))
# IPv4 failed. Try the private IP to see if the service is running but not accessible (except DNS because a different service runs on the private IP).
elif service["port"] != 53 and try_connect("127.0.0.1"):
output.print_error("%s is running but is not publicly accessible at %s:%d." % (service['name'], env['PUBLIC_IP'], service['port']))
else:
output.print_error("%s is not running (port %d)." % (service['name'], service['port']))
# Why is nginx not running?
if service["port"] in (80, 443):
if not running and service["port"] in (80, 443):
output.print_line(shell('check_output', ['nginx', '-t'], capture_stderr=True, trap=True)[1].strip())
# Flag if local DNS is not running.
if service["port"] == 53 and service["public"] == False:
fatal = True
finally:
s.close()
else:
# Service should be running locally.
if try_connect("127.0.0.1"):
running = True
else:
output.print_error("%s is not running (port %d)." % (service['name'], service['port']))
# Flag if local DNS is not running.
if not running and service["port"] == 53 and service["public"] == False:
fatal = True
return (i, running, fatal, output)
@ -264,7 +278,7 @@ def run_domain_checks(rounded_time, env, output, pool):
dns_domains = set(dns_zonefiles)
# Get the list of domains we serve HTTPS for.
web_domains = set(get_web_domains(env) + get_default_www_redirects(env))
web_domains = set(get_web_domains(env))
domains_to_check = mail_domains | dns_domains | web_domains
@ -325,6 +339,7 @@ def check_primary_hostname_dns(domain, env, output, dns_domains, dns_zonefiles):
ip = query_dns(domain, "A")
ns_ips = query_dns("ns1." + domain, "A") + '/' + query_dns("ns2." + domain, "A")
my_ips = env['PUBLIC_IP'] + ((" / "+env['PUBLIC_IPV6']) if env.get("PUBLIC_IPV6") else "")
# Check that the ns1/ns2 hostnames resolve to A records. This information probably
# comes from the TLD since the information is set at the registrar as glue records.
@ -347,24 +362,29 @@ def check_primary_hostname_dns(domain, env, output, dns_domains, dns_zonefiles):
public DNS to update after a change."""
% (env['PRIMARY_HOSTNAME'], env['PRIMARY_HOSTNAME'], env['PUBLIC_IP'], ns_ips))
# Check that PRIMARY_HOSTNAME resolves to PUBLIC_IP in public DNS.
if ip == env['PUBLIC_IP']:
output.print_ok("Domain resolves to box's IP address. [%s%s]" % (env['PRIMARY_HOSTNAME'], env['PUBLIC_IP']))
# Check that PRIMARY_HOSTNAME resolves to PUBLIC_IP[V6] in public DNS.
ipv6 = query_dns(domain, "AAAA") if env.get("PUBLIC_IPV6") else None
if ip == env['PUBLIC_IP'] and ipv6 in (None, env['PUBLIC_IPV6']):
output.print_ok("Domain resolves to box's IP address. [%s%s]" % (env['PRIMARY_HOSTNAME'], my_ips))
else:
output.print_error("""This domain must resolve to your box's IP address (%s) in public DNS but it currently resolves
to %s. It may take several hours for public DNS to update after a change. This problem may result from other
issues listed here."""
% (env['PUBLIC_IP'], ip))
issues listed above."""
% (my_ips, ip + ((" / " + ipv6) if ipv6 is not None else "")))
# Check reverse DNS on the PRIMARY_HOSTNAME. Note that it might not be
# Check reverse DNS matches the PRIMARY_HOSTNAME. Note that it might not be
# a DNS zone if it is a subdomain of another domain we have a zone for.
ipaddr_rev = dns.reversename.from_address(env['PUBLIC_IP'])
existing_rdns = query_dns(ipaddr_rev, "PTR")
if existing_rdns == domain:
output.print_ok("Reverse DNS is set correctly at ISP. [%s%s]" % (env['PUBLIC_IP'], env['PRIMARY_HOSTNAME']))
else:
existing_rdns_v4 = query_dns(dns.reversename.from_address(env['PUBLIC_IP']), "PTR")
existing_rdns_v6 = query_dns(dns.reversename.from_address(env['PUBLIC_IPV6']), "PTR") if env.get("PUBLIC_IPV6") else None
if existing_rdns_v4 == domain and existing_rdns_v6 in (None, domain):
output.print_ok("Reverse DNS is set correctly at ISP. [%s%s]" % (my_ips, env['PRIMARY_HOSTNAME']))
elif existing_rdns_v4 == existing_rdns_v6 or existing_rdns_v6 is None:
output.print_error("""Your box's reverse DNS is currently %s, but it should be %s. Your ISP or cloud provider will have instructions
on setting up reverse DNS for your box at %s.""" % (existing_rdns, domain, env['PUBLIC_IP']) )
on setting up reverse DNS for your box.""" % (existing_rdns_v4, domain) )
else:
output.print_error("""Your box's reverse DNS is currently %s (IPv4) and %s (IPv6), but it should be %s. Your ISP or cloud provider will have instructions
on setting up reverse DNS for your box.""" % (existing_rdns_v4, existing_rdns_v6, domain) )
# Check the TLSA record.
tlsa_qname = "_25._tcp." + domain
@ -692,181 +712,6 @@ def check_ssl_cert(domain, rounded_time, ssl_certificates, env, output):
output.print_line(cert_status_details)
output.print_line("")
def check_certificate(domain, ssl_certificate, ssl_private_key, warn_if_expiring_soon=True, rounded_time=False, just_check_domain=False):
# Check that the ssl_certificate & ssl_private_key files are good
# for the provided domain.
from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey
from cryptography.x509 import Certificate
# The ssl_certificate file may contain a chain of certificates. We'll
# need to split that up before we can pass anything to openssl or
# parse them in Python. Parse it with the cryptography library.
try:
ssl_cert_chain = load_cert_chain(ssl_certificate)
cert = load_pem(ssl_cert_chain[0])
if not isinstance(cert, Certificate): raise ValueError("This is not a certificate file.")
except ValueError as e:
return ("There is a problem with the certificate file: %s" % str(e), None)
# First check that the domain name is one of the names allowed by
# the certificate.
if domain is not None:
certificate_names, cert_primary_name = get_certificate_domains(cert)
# Check that the domain appears among the acceptable names, or a wildcard
# form of the domain name (which is a stricter check than the specs but
# should work in normal cases).
wildcard_domain = re.sub("^[^\.]+", "*", domain)
if domain not in certificate_names and wildcard_domain not in certificate_names:
return ("The certificate is for the wrong domain name. It is for %s."
% ", ".join(sorted(certificate_names)), None)
# Second, check that the certificate matches the private key.
if ssl_private_key is not None:
try:
priv_key = load_pem(open(ssl_private_key, 'rb').read())
except ValueError as e:
return ("The private key file %s is not a private key file: %s" % (ssl_private_key, str(e)), None)
if not isinstance(priv_key, RSAPrivateKey):
return ("The private key file %s is not a private key file." % ssl_private_key, None)
if priv_key.public_key().public_numbers() != cert.public_key().public_numbers():
return ("The certificate does not correspond to the private key at %s." % ssl_private_key, None)
# We could also use the openssl command line tool to get the modulus
# listed in each file. The output of each command below looks like "Modulus=XXXXX".
# $ openssl rsa -inform PEM -noout -modulus -in ssl_private_key
# $ openssl x509 -in ssl_certificate -noout -modulus
# Third, check if the certificate is self-signed. Return a special flag string.
if cert.issuer == cert.subject:
return ("SELF-SIGNED", None)
# When selecting which certificate to use for non-primary domains, we check if the primary
# certificate or a www-parent-domain certificate is good for the domain. There's no need
# to run extra checks beyond this point.
if just_check_domain:
return ("OK", None)
# Check that the certificate hasn't expired. The datetimes returned by the
# certificate are 'naive' and in UTC. We need to get the current time in UTC.
now = datetime.datetime.utcnow()
if not(cert.not_valid_before <= now <= cert.not_valid_after):
return ("The certificate has expired or is not yet valid. It is valid from %s to %s." % (cert.not_valid_before, cert.not_valid_after), None)
# Next validate that the certificate is valid. This checks whether the certificate
# is self-signed, that the chain of trust makes sense, that it is signed by a CA
# that Ubuntu has installed on this machine's list of CAs, and I think that it hasn't
# expired.
# The certificate chain has to be passed separately and is given via STDIN.
# This command returns a non-zero exit status in most cases, so trap errors.
retcode, verifyoutput = shell('check_output', [
"openssl",
"verify", "-verbose",
"-purpose", "sslserver", "-policy_check",]
+ ([] if len(ssl_cert_chain) == 1 else ["-untrusted", "/proc/self/fd/0"])
+ [ssl_certificate],
input=b"\n\n".join(ssl_cert_chain[1:]),
trap=True)
if "self signed" in verifyoutput:
# Certificate is self-signed. Probably we detected this above.
return ("SELF-SIGNED", None)
elif retcode != 0:
if "unable to get local issuer certificate" in verifyoutput:
return ("The certificate is missing an intermediate chain or the intermediate chain is incorrect or incomplete. (%s)" % verifyoutput, None)
# There is some unknown problem. Return the `openssl verify` raw output.
return ("There is a problem with the SSL certificate.", verifyoutput.strip())
else:
# `openssl verify` returned a zero exit status so the cert is currently
# good.
# But is it expiring soon?
cert_expiration_date = cert.not_valid_after
ndays = (cert_expiration_date-now).days
if not rounded_time or ndays < 7:
expiry_info = "The certificate expires in %d days on %s." % (ndays, cert_expiration_date.strftime("%x"))
elif ndays <= 14:
expiry_info = "The certificate expires in less than two weeks, on %s." % cert_expiration_date.strftime("%x")
elif ndays <= 31:
expiry_info = "The certificate expires in less than a month, on %s." % cert_expiration_date.strftime("%x")
else:
expiry_info = "The certificate expires on %s." % cert_expiration_date.strftime("%x")
if ndays <= 31 and warn_if_expiring_soon:
return ("The certificate is expiring soon: " + expiry_info, None)
# Return the special OK code.
return ("OK", expiry_info)
def load_cert_chain(pemfile):
# A certificate .pem file may contain a chain of certificates.
# Load the file and split them apart.
re_pem = rb"(-+BEGIN (?:.+)-+[\r\n]+(?:[A-Za-z0-9+/=]{1,64}[\r\n]+)+-+END (?:.+)-+[\r\n]+)"
with open(pemfile, "rb") as f:
pem = f.read() + b"\n" # ensure trailing newline
pemblocks = re.findall(re_pem, pem)
if len(pemblocks) == 0:
raise ValueError("File does not contain valid PEM data.")
return pemblocks
def load_pem(pem):
# Parse a "---BEGIN .... END---" PEM string and return a Python object for it
# using classes from the cryptography package.
from cryptography.x509 import load_pem_x509_certificate
from cryptography.hazmat.primitives import serialization
from cryptography.hazmat.backends import default_backend
pem_type = re.match(b"-+BEGIN (.*?)-+[\r\n]", pem)
if pem_type is None:
raise ValueError("File is not a valid PEM-formatted file.")
pem_type = pem_type.group(1)
if pem_type in (b"RSA PRIVATE KEY", b"PRIVATE KEY"):
return serialization.load_pem_private_key(pem, password=None, backend=default_backend())
if pem_type == b"CERTIFICATE":
return load_pem_x509_certificate(pem, default_backend())
raise ValueError("Unsupported PEM object type: " + pem_type.decode("ascii", "replace"))
def get_certificate_domains(cert):
from cryptography.x509 import DNSName, ExtensionNotFound, OID_COMMON_NAME, OID_SUBJECT_ALTERNATIVE_NAME
import idna
names = set()
cn = None
# The domain may be found in the Subject Common Name (CN). This comes back as an IDNA (ASCII)
# string, which is the format we store domains in - so good.
try:
cn = cert.subject.get_attributes_for_oid(OID_COMMON_NAME)[0].value
names.add(cn)
except IndexError:
# No common name? Certificate is probably generated incorrectly.
# But we'll let it error-out when it doesn't find the domain.
pass
# ... or be one of the Subject Alternative Names. The cryptography library handily IDNA-decodes
# the names for us. We must encode back to ASCII, but wildcard certificates can't pass through
# IDNA encoding/decoding so we must special-case. See https://github.com/pyca/cryptography/pull/2071.
def idna_decode_dns_name(dns_name):
if dns_name.startswith("*."):
return "*." + idna.encode(dns_name[2:]).decode('ascii')
else:
return idna.encode(dns_name).decode('ascii')
try:
sans = cert.extensions.get_extension_for_oid(OID_SUBJECT_ALTERNATIVE_NAME).value.get_values_for_type(DNSName)
for san in sans:
names.add(idna_decode_dns_name(san))
except ExtensionNotFound:
pass
return names, cn
_apt_updates = None
def list_apt_updates(apt_update=True):
# See if we have this information cached recently.

View File

@ -86,7 +86,7 @@
</tbody>
</table>
<p style="margin-top: 1.5em"><small>hostmaster@, postmaster@, and admin@ email addresses are required on some domains.</small></p>
<p style="margin-top: 1.5em"><small>hostmaster@, postmaster@, admin@ and abuse@ email addresses are required on some domains.</small></p>
<div style="display: none">
<table>

View File

@ -29,7 +29,7 @@
<tr><th>Protocol/Method</th> <td>IMAP</td></tr>
<tr><th>Mail server</th> <td>{{hostname}}</td>
<tr><th>IMAP Port</th> <td>993</td></tr>
<tr><th>IMAP Security</th> <td>SSL</td></tr>
<tr><th>IMAP Security</th> <td>SSL or TLS</td></tr>
<tr><th>SMTP Port</th> <td>587</td></tr>
<tr><th>SMTP Security</td> <td>STARTTLS <small>(&ldquo;always&rdquo; or &ldquo;required&rdquo;, if prompted)</small></td></tr>
<tr><th>Username:</th> <td>Your whole email address.</td></tr>
@ -38,6 +38,8 @@
<p>In addition to setting up your email, you&rsquo;ll also need to set up <a href="#sync_guide" onclick="return show_panel(this);">contacts and calendar synchronization</a> separately.</p>
<p>As an alternative to IMAP you can also use the POP protocol: choose POP as the protocol, port 995, and SSL or TLS security in your mail client. The SMTP settings and usernames and passwords remain the same. However, we recommend you use IMAP instead.</p>
<h4>Exchange/ActiveSync settings</h4>
<p>On iOS devices, devices on this <a href="http://z-push.org/compatibility/">compatibility list</a>, or using Outlook 2007 or later on Windows 7 and later, you may set up your mail as an Exchange or ActiveSync server. However, we&rsquo;ve found this to be more buggy than using IMAP as described above. If you encounter any problems, please use the manual settings above.</p>

View File

@ -245,11 +245,17 @@ def wait_for_service(port, public, env, timeout):
return False
time.sleep(min(timeout/4, 1))
def fix_boto():
# Google Compute Engine instances install some Python-2-only boto plugins that
# conflict with boto running under Python 3. Disable boto's default configuration
# file prior to importing boto so that GCE's plugin is not loaded:
import os
os.environ["BOTO_CONFIG"] = "/etc/boto3.cfg"
if __name__ == "__main__":
from dns_update import get_dns_domains
from web_update import get_web_domains, get_default_www_redirects
from web_update import get_web_domains
env = load_environment()
domains = get_dns_domains(env) | set(get_web_domains(env) + get_default_www_redirects(env))
domains = sort_domains(domains, env)
domains = get_web_domains(env)
for domain in domains:
print(domain)

View File

@ -2,26 +2,36 @@
# domains for which a mail account has been set up.
########################################################################
import os, os.path, shutil, re, tempfile, rtyaml
import os.path, re, rtyaml
from mailconfig import get_mail_domains
from dns_update import get_custom_dns_config, do_dns_update, get_dns_zones
from dns_update import get_custom_dns_config, get_dns_zones
from ssl_certificates import get_ssl_certificates, get_domain_ssl_files, check_certificate
from utils import shell, safe_domain_name, sort_domains
def get_web_domains(env):
# What domains should we serve websites for?
def get_web_domains(env, include_www_redirects=True):
# What domains should we serve HTTP(S) for?
domains = set()
# At the least it's the PRIMARY_HOSTNAME so we can serve webmail
# as well as Z-Push for Exchange ActiveSync.
domains.add(env['PRIMARY_HOSTNAME'])
# Also serve web for all mail domains so that we might at least
# Serve web for all mail domains so that we might at least
# provide auto-discover of email settings, and also a static website
# if the user wants to make one. These will require an SSL cert.
# if the user wants to make one.
domains |= get_mail_domains(env)
if include_www_redirects:
# Add 'www.' subdomains that we want to provide default redirects
# to the main domain for. We'll add 'www.' to any DNS zones, i.e.
# the topmost of each domain we serve.
domains |= set('www.' + zone for zone, zonefile in get_dns_zones(env))
# ...Unless the domain has an A/AAAA record that maps it to a different
# IP address than this box. Remove those domains from our list.
domains |= (get_mail_domains(env) - get_domains_with_a_records(env))
domains -= get_domains_with_a_records(env)
# Ensure the PRIMARY_HOSTNAME is in the list so we can serve webmail
# as well as Z-Push for Exchange ActiveSync. This can't be removed
# by a custom A/AAAA record and is never a 'www.' redirect.
domains.add(env['PRIMARY_HOSTNAME'])
# Sort the list so the nginx conf gets written in a stable order.
domains = sort_domains(domains, env)
@ -50,15 +60,6 @@ def get_web_domains_with_root_overrides(env):
root_overrides[domain] = (type, value)
return root_overrides
def get_default_www_redirects(env):
# Returns a list of www subdomains that we want to provide default redirects
# for, i.e. any www's that aren't domains the user has actually configured
# to serve for real. Which would be unusual.
web_domains = set(get_web_domains(env))
www_domains = set('www.' + zone for zone, zonefile in get_dns_zones(env))
return sort_domains(www_domains - web_domains - get_domains_with_a_records(env), env)
def do_web_update(env):
# Pre-load what SSL certificates we will use for each domain.
ssl_certificates = get_ssl_certificates(env)
@ -77,16 +78,20 @@ def do_web_update(env):
# Add configuration all other web domains.
has_root_proxy_or_redirect = get_web_domains_with_root_overrides(env)
web_domains_not_redirect = get_web_domains(env, include_www_redirects=False)
for domain in get_web_domains(env):
if domain == env['PRIMARY_HOSTNAME']: continue # handled above
if domain not in has_root_proxy_or_redirect:
nginx_conf += make_domain_config(domain, [template0, template1], ssl_certificates, env)
if domain == env['PRIMARY_HOSTNAME']:
# PRIMARY_HOSTNAME is handled above.
continue
if domain in web_domains_not_redirect:
# This is a regular domain.
if domain not in has_root_proxy_or_redirect:
nginx_conf += make_domain_config(domain, [template0, template1], ssl_certificates, env)
else:
nginx_conf += make_domain_config(domain, [template0], ssl_certificates, env)
else:
nginx_conf += make_domain_config(domain, [template0], ssl_certificates, env)
# Add default www redirects.
for domain in get_default_www_redirects(env):
nginx_conf += make_domain_config(domain, [template0, template3], ssl_certificates, env)
# Add default 'www.' redirect.
nginx_conf += make_domain_config(domain, [template0, template3], ssl_certificates, env)
# Did the file change? If not, don't bother writing & restarting nginx.
nginx_conf_fn = "/etc/nginx/conf.d/local.conf"
@ -185,214 +190,12 @@ def get_web_root(domain, env, test_exists=True):
if os.path.exists(root) or not test_exists: break
return root
def get_ssl_certificates(env):
# Scan all of the installed SSL certificates and map every domain
# that the certificates are good for to the best certificate for
# the domain.
from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey
from cryptography.x509 import Certificate
# The certificates are all stored here:
ssl_root = os.path.join(env["STORAGE_ROOT"], 'ssl')
# List all of the files in the SSL directory and one level deep.
def get_file_list():
for fn in os.listdir(ssl_root):
fn = os.path.join(ssl_root, fn)
if os.path.isfile(fn):
yield fn
elif os.path.isdir(fn):
for fn1 in os.listdir(fn):
fn1 = os.path.join(fn, fn1)
if os.path.isfile(fn1):
yield fn1
# Remember stuff.
private_keys = { }
certificates = [ ]
# Scan each of the files to find private keys and certificates.
# We must load all of the private keys first before processing
# certificates so that we can check that we have a private key
# available before using a certificate.
from status_checks import load_cert_chain, load_pem
for fn in get_file_list():
try:
pem = load_pem(load_cert_chain(fn)[0])
except ValueError:
# Not a valid PEM format for a PEM type we care about.
continue
# Remember where we got this object.
pem._filename = fn
# Is it a private key?
if isinstance(pem, RSAPrivateKey):
private_keys[pem.public_key().public_numbers()] = pem
# Is it a certificate?
if isinstance(pem, Certificate):
certificates.append(pem)
# Process the certificates.
domains = { }
from status_checks import get_certificate_domains
for cert in certificates:
# What domains is this certificate good for?
cert_domains, primary_domain = get_certificate_domains(cert)
cert._primary_domain = primary_domain
# Is there a private key file for this certificate?
private_key = private_keys.get(cert.public_key().public_numbers())
if not private_key:
continue
cert._private_key = private_key
# Add this cert to the list of certs usable for the domains.
for domain in cert_domains:
domains.setdefault(domain, []).append(cert)
# Sort the certificates to prefer good ones.
import datetime
now = datetime.datetime.utcnow()
ret = { }
for domain, cert_list in domains.items():
cert_list.sort(key = lambda cert : (
# must be valid NOW
cert.not_valid_before <= now <= cert.not_valid_after,
# prefer one that is not self-signed
cert.issuer != cert.subject,
# prefer one with the expiration furthest into the future so
# that we can easily rotate to new certs as we get them
cert.not_valid_after,
# in case a certificate is installed in multiple paths,
# prefer the... lexicographically last one?
cert._filename,
), reverse=True)
cert = cert_list.pop(0)
ret[domain] = {
"private-key": cert._private_key._filename,
"certificate": cert._filename,
"primary-domain": cert._primary_domain,
}
return ret
def get_domain_ssl_files(domain, ssl_certificates, env, allow_missing_cert=False):
# Get the default paths.
ssl_private_key = os.path.join(os.path.join(env["STORAGE_ROOT"], 'ssl', 'ssl_private_key.pem'))
ssl_certificate = os.path.join(os.path.join(env["STORAGE_ROOT"], 'ssl', 'ssl_certificate.pem'))
if domain == env['PRIMARY_HOSTNAME']:
# The primary domain must use the server certificate because
# it is hard-coded in some service configuration files.
return ssl_private_key, ssl_certificate, None
wildcard_domain = re.sub("^[^\.]+", "*", domain)
if domain in ssl_certificates:
cert_info = ssl_certificates[domain]
cert_type = "multi-domain"
elif wildcard_domain in ssl_certificates:
cert_info = ssl_certificates[wildcard_domain]
cert_type = "wildcard"
elif not allow_missing_cert:
# No certificate is available for this domain! Return default files.
ssl_via = "Using certificate for %s." % env['PRIMARY_HOSTNAME']
return ssl_private_key, ssl_certificate, ssl_via
else:
# No certificate is available - and warn appropriately.
return None
# 'via' is a hint to the user about which certificate is in use for the domain
if cert_info['certificate'] == os.path.join(env["STORAGE_ROOT"], 'ssl', 'ssl_certificate.pem'):
# Using the server certificate.
via = "Using same %s certificate as for %s." % (cert_type, env['PRIMARY_HOSTNAME'])
elif cert_info['primary-domain'] != domain and cert_info['primary-domain'] in ssl_certificates and cert_info == ssl_certificates[cert_info['primary-domain']]:
via = "Using same %s certificate as for %s." % (cert_type, cert_info['primary-domain'])
else:
via = None # don't show a hint - show expiration info instead
return cert_info['private-key'], cert_info['certificate'], via
def create_csr(domain, ssl_key, env):
return shell("check_output", [
"openssl", "req", "-new",
"-key", ssl_key,
"-sha256",
"-subj", "/C=%s/ST=/L=/O=/CN=%s" % (env["CSR_COUNTRY"], domain)])
def install_cert(domain, ssl_cert, ssl_chain, env):
if domain not in get_web_domains(env) + get_default_www_redirects(env):
return "Invalid domain name."
# Write the combined cert+chain to a temporary path and validate that it is OK.
# The certificate always goes above the chain.
import tempfile, os
fd, fn = tempfile.mkstemp('.pem')
os.write(fd, (ssl_cert + '\n' + ssl_chain).encode("ascii"))
os.close(fd)
# Do validation on the certificate before installing it.
from status_checks import check_certificate
ssl_private_key = os.path.join(os.path.join(env["STORAGE_ROOT"], 'ssl', 'ssl_private_key.pem'))
cert_status, cert_status_details = check_certificate(domain, fn, ssl_private_key)
if cert_status != "OK":
if cert_status == "SELF-SIGNED":
cert_status = "This is a self-signed certificate. I can't install that."
os.unlink(fn)
if cert_status_details is not None:
cert_status += " " + cert_status_details
return cert_status
# Where to put it?
if domain == env['PRIMARY_HOSTNAME']:
ssl_certificate = os.path.join(os.path.join(env["STORAGE_ROOT"], 'ssl', 'ssl_certificate.pem'))
else:
# Make a unique path for the certificate.
from status_checks import load_cert_chain, load_pem, get_certificate_domains
from cryptography.hazmat.primitives import hashes
from binascii import hexlify
cert = load_pem(load_cert_chain(fn)[0])
all_domains, cn = get_certificate_domains(cert)
path = "%s-%s-%s" % (
cn, # common name
cert.not_valid_after.date().isoformat().replace("-", ""), # expiration date
hexlify(cert.fingerprint(hashes.SHA256())).decode("ascii")[0:8], # fingerprint prefix
)
ssl_certificate = os.path.join(os.path.join(env["STORAGE_ROOT"], 'ssl', path, 'ssl_certificate.pem'))
# Install the certificate.
os.makedirs(os.path.dirname(ssl_certificate), exist_ok=True)
shutil.move(fn, ssl_certificate)
ret = ["OK"]
# When updating the cert for PRIMARY_HOSTNAME, also update DNS because it is
# used in the DANE TLSA record and restart postfix and dovecot which use
# that certificate.
if domain == env['PRIMARY_HOSTNAME']:
ret.append( do_dns_update(env) )
shell('check_call', ["/usr/sbin/service", "postfix", "restart"])
shell('check_call', ["/usr/sbin/service", "dovecot", "restart"])
ret.append("mail services restarted")
# Kick nginx so it sees the cert.
ret.append( do_web_update(env) )
return "\n".join(ret)
def get_web_domains_info(env):
has_root_proxy_or_redirect = get_web_domains_with_root_overrides(env)
www_redirects = set(get_web_domains(env)) - set(get_web_domains(env, include_www_redirects=False))
has_root_proxy_or_redirect = set(get_web_domains_with_root_overrides(env))
# for the SSL config panel, get cert status
def check_cert(domain):
from status_checks import check_certificate
ssl_certificates = get_ssl_certificates(env)
x = get_domain_ssl_files(domain, ssl_certificates, env, allow_missing_cert=True)
if x is None: return ("danger", "No Certificate Installed")
@ -415,15 +218,7 @@ def get_web_domains_info(env):
"root": get_web_root(domain, env),
"custom_root": get_web_root(domain, env, test_exists=False),
"ssl_certificate": check_cert(domain),
"static_enabled": domain not in has_root_proxy_or_redirect,
"static_enabled": domain not in (www_redirects | has_root_proxy_or_redirect),
}
for domain in get_web_domains(env)
] + \
[
{
"domain": domain,
"ssl_certificate": check_cert(domain),
"static_enabled": False,
}
for domain in get_default_www_redirects(env)
]
]

View File

@ -88,18 +88,19 @@ sed -i "s/#port = 110/port = 0/" /etc/dovecot/conf.d/10-master.conf
# this are minimal. But for good measure, let's go to 4 minutes to halve the
# bandwidth and number of times the device's networking might be woken up.
# The risk is that if the connection is silent for too long it might be reset
# by a peer. See #129 and http://razor.occams.info/blog/2014/08/09/how-bad-is-imap-idle/.
# by a peer. See [#129](https://github.com/mail-in-a-box/mailinabox/issues/129)
# and [How bad is IMAP IDLE](http://razor.occams.info/blog/2014/08/09/how-bad-is-imap-idle/).
tools/editconf.py /etc/dovecot/conf.d/20-imap.conf \
imap_idle_notify_interval="4 mins"
# Set POP3 UIDL
# UIDLs are used by POP3 clients to keep track of what messages they've downloaded.
# Set POP3 UIDL.
# UIDLs are used by POP3 clients to keep track of what messages they've downloaded.
# For new POP3 servers, the easiest way to set up UIDLs is to use IMAP's UIDVALIDITY
# and UID values, the default in Dovecot.
tools/editconf.py /etc/dovecot/conf.d/20-pop3.conf \
pop3_uidl_format="%08Xu%08Xv"
# Full Text Search - Enable full text search of mail using dovecot's lucene plugin,
# Full Text Search - Enable full text search of mail using dovecot's lucene plugin,
# which *we* package and distribute (dovecot-lucene package).
tools/editconf.py /etc/dovecot/conf.d/10-mail.conf \
mail_plugins="\$mail_plugins fts fts_lucene"

View File

@ -4,13 +4,14 @@ source setup/functions.sh
echo "Installing Mail-in-a-Box system management daemon..."
# build-essential libssl-dev libffi-dev python3-dev: Required to pip install cryptography.
apt_install python3-flask links duplicity libyaml-dev python3-dnspython python3-dateutil \
build-essential libssl-dev libffi-dev python3-dev python-pip
hide_output pip3 install --upgrade rtyaml "email_validator>=1.0.0" "idna>=2.0.0" "cryptography>=1.0.2" boto psutil
# Switching python 2 boto to package manager's, not pypi's.
if [ -f /usr/local/lib/python2.7/dist-packages/boto/__init__.py ]; then hide_output pip uninstall -y boto; fi
# duplicity uses python 2 so we need to use the python 2 package of boto
hide_output pip install --upgrade boto
# build-essential libssl-dev libffi-dev python3-dev: Required to pip install cryptography.
apt_install python3-flask links duplicity python-boto libyaml-dev python3-dnspython python3-dateutil \
build-essential libssl-dev libffi-dev python3-dev python-pip
hide_output pip3 install --upgrade rtyaml "email_validator>=1.0.0" "idna>=2.0.0" "cryptography>=1.0.2" boto psutil
# email_validator is repeated in setup/questions.sh

View File

@ -111,6 +111,32 @@ def migration_9(env):
db = os.path.join(env["STORAGE_ROOT"], 'mail/users.sqlite')
shell("check_call", ["sqlite3", db, "ALTER TABLE aliases ADD permitted_senders TEXT"])
def migration_10(env):
# Clean up the SSL certificates directory.
# Move the primary certificate to a new name and then
# symlink it to the system certificate path.
import datetime
system_certificate = os.path.join(env["STORAGE_ROOT"], 'ssl/ssl_certificate.pem')
if not os.path.islink(system_certificate): # not already a symlink
new_path = os.path.join(env["STORAGE_ROOT"], 'ssl', env['PRIMARY_HOSTNAME'] + "-" + datetime.datetime.now().date().isoformat().replace("-", "") + ".pem")
print("Renamed", system_certificate, "to", new_path, "and created a symlink for the original location.")
shutil.move(system_certificate, new_path)
os.symlink(new_path, system_certificate)
# Flatten the directory structure. For any directory
# that contains a single file named ssl_certificate.pem,
# move the file out and name it the same as the directory,
# and remove the directory.
for sslcert in glob.glob(os.path.join( env["STORAGE_ROOT"], 'ssl/*/ssl_certificate.pem' )):
d = os.path.dirname(sslcert)
if len(os.listdir(d)) == 1:
# This certificate is the only file in that directory.
newname = os.path.join(env["STORAGE_ROOT"], 'ssl', os.path.basename(d) + '.pem')
if not os.path.exists(newname):
shutil.move(sslcert, newname)
os.rmdir(d)
def get_current_migration():
ver = 0
while True:

View File

@ -207,8 +207,6 @@ if [ "$PUBLIC_IPV6" = "auto" ]; then
PUBLIC_IPV6=$(get_publicip_from_web_service 6 || get_default_privateip 6)
fi
if [ "$PRIMARY_HOSTNAME" = "auto" ]; then
# Use reverse DNS to get this machine's hostname. Install bind9-host early.
hide_output apt-get -y install bind9-host
PRIMARY_HOSTNAME=$(get_default_hostname)
elif [ "$PRIMARY_HOSTNAME" = "auto-easy" ]; then
# Generate a probably-unique subdomain under our justtesting.email domain.

View File

@ -77,12 +77,17 @@ if [ ! -f $STORAGE_ROOT/ssl/ssl_certificate.pem ]; then
-sha256 -subj "/C=$CSR_COUNTRY/ST=/L=/O=/CN=$PRIMARY_HOSTNAME"
# Generate the self-signed certificate.
CERT=$STORAGE_ROOT/ssl/$PRIMARY_HOSTNAME-selfsigned-$(date --rfc-3339=date | sed s/-//g).pem
hide_output \
openssl x509 -req -days 365 \
-in $CSR -signkey $STORAGE_ROOT/ssl/ssl_private_key.pem -out $STORAGE_ROOT/ssl/ssl_certificate.pem
-in $CSR -signkey $STORAGE_ROOT/ssl/ssl_private_key.pem -out $CERT
# Delete the certificate signing request because it has no other purpose.
rm -f $CSR
# Delete the certificate signing request because it has no other purpose.
rm -f $CSR
# Symlink the certificate into the system certificate path, so system services
# can find it.
ln -s $CERT $STORAGE_ROOT/ssl/ssl_certificate.pem
fi
# Generate some Diffie-Hellman cipher bits.

View File

@ -1,3 +1,4 @@
source /etc/mailinabox.conf
source setup/functions.sh # load our functions
# Basic System Configuration
@ -11,12 +12,9 @@ source setup/functions.sh # load our functions
# text search plugin for (and by) dovecot, which is not available in
# Ubuntu currently.
#
# Add that to the system's list of repositories using add-apt-repository.
# But add-apt-repository may not be installed. If it's not available,
# then install it. But we have to run apt-get update before we try to
# install anything so the package index is up to date. After adding the
# PPA, we have to run apt-get update *again* to load the PPA's index,
# so this must precede the apt-get update line below.
# So, first ensure add-apt-repository is installed, then use it to install
# the [mail-in-a-box ppa](https://launchpad.net/~mail-in-a-box/+archive/ubuntu/ppa).
if [ ! -f /usr/bin/add-apt-repository ]; then
echo "Installing add-apt-repository..."
@ -198,7 +196,9 @@ restart_service resolvconf
# ### Fail2Ban Service
# Configure the Fail2Ban installation to prevent dumb bruce-force attacks against dovecot, postfix and ssh
cp conf/fail2ban/jail.local /etc/fail2ban/jail.local
cat conf/fail2ban/jail.local \
| sed "s/PUBLIC_IP/$PUBLIC_IP/g" \
> /etc/fail2ban/jail.local
cp conf/fail2ban/dovecotimap.conf /etc/fail2ban/filter.d/dovecotimap.conf
restart_service fail2ban