2014-06-23 00:11:24 +00:00
|
|
|
#!/usr/bin/python3
|
|
|
|
#
|
2014-06-22 15:34:36 +00:00
|
|
|
# Checks that the upstream DNS has been set correctly and that
|
2014-06-23 00:11:24 +00:00
|
|
|
# SSL certificates have been signed, etc., and if not tells the user
|
2014-06-22 15:34:36 +00:00
|
|
|
# what to do next.
|
|
|
|
|
2014-06-23 10:53:09 +00:00
|
|
|
__ALL__ = ['check_certificate']
|
|
|
|
|
2015-03-08 21:56:28 +00:00
|
|
|
import sys, os, os.path, re, subprocess, datetime, multiprocessing.pool
|
2014-06-22 15:34:36 +00:00
|
|
|
|
|
|
|
import dns.reversename, dns.resolver
|
2014-09-21 12:51:27 +00:00
|
|
|
import dateutil.parser, dateutil.tz
|
2015-06-30 12:45:58 +00:00
|
|
|
import idna
|
2014-06-22 15:34:36 +00:00
|
|
|
|
2015-05-03 01:10:28 +00:00
|
|
|
from dns_update import get_dns_zones, build_tlsa_record, get_custom_dns_config, get_secondary_dns
|
2015-06-04 12:06:02 +00:00
|
|
|
from web_update import get_web_domains, get_default_www_redirects, get_domain_ssl_files
|
2014-06-22 16:28:55 +00:00
|
|
|
from mailconfig import get_mail_domains, get_mail_aliases
|
2014-06-22 15:34:36 +00:00
|
|
|
|
2014-08-01 12:15:02 +00:00
|
|
|
from utils import shell, sort_domains, load_env_vars_from_file
|
2014-06-22 15:34:36 +00:00
|
|
|
|
2015-03-08 21:56:28 +00:00
|
|
|
def run_checks(rounded_values, env, output, pool):
|
2015-01-11 13:04:14 +00:00
|
|
|
# run systems checks
|
2015-01-31 19:56:39 +00:00
|
|
|
output.add_heading("System")
|
2015-01-11 13:04:14 +00:00
|
|
|
|
|
|
|
# check that services are running
|
2015-02-18 16:42:18 +00:00
|
|
|
if not run_services_checks(env, output, pool):
|
2015-01-11 13:04:14 +00:00
|
|
|
# If critical services are not running, stop. If bind9 isn't running,
|
|
|
|
# all later DNS checks will timeout and that will take forever to
|
|
|
|
# go through, and if running over the web will cause a fastcgi timeout.
|
|
|
|
return
|
|
|
|
|
2014-12-26 13:22:14 +00:00
|
|
|
# clear bind9's DNS cache so our DNS checks are up to date
|
2015-01-11 13:04:14 +00:00
|
|
|
# (ignore errors; if bind9/rndc isn't running we'd already report
|
|
|
|
# that in run_services checks.)
|
|
|
|
shell('check_call', ["/usr/sbin/rndc", "flush"], trap=True)
|
2015-06-27 17:23:15 +00:00
|
|
|
|
2015-03-08 21:56:28 +00:00
|
|
|
run_system_checks(rounded_values, env, output)
|
2015-01-11 13:04:14 +00:00
|
|
|
|
2015-01-31 20:40:20 +00:00
|
|
|
# perform other checks asynchronously
|
|
|
|
|
2015-02-18 16:42:18 +00:00
|
|
|
run_network_checks(env, output)
|
2015-03-08 21:56:28 +00:00
|
|
|
run_domain_checks(rounded_values, env, output, pool)
|
2014-06-23 19:39:20 +00:00
|
|
|
|
2015-02-01 19:18:32 +00:00
|
|
|
def get_ssh_port():
|
2015-06-18 10:54:51 +00:00
|
|
|
# Returns ssh port
|
2015-06-18 11:01:11 +00:00
|
|
|
try:
|
|
|
|
output = shell('check_output', ['sshd', '-T'])
|
|
|
|
except FileNotFoundError:
|
|
|
|
# sshd is not installed. That's ok.
|
|
|
|
return None
|
2015-06-18 10:54:51 +00:00
|
|
|
|
|
|
|
returnNext = False
|
|
|
|
for e in output.split():
|
|
|
|
if returnNext:
|
|
|
|
return int(e)
|
|
|
|
if e == "port":
|
|
|
|
returnNext = True
|
2015-02-01 19:18:32 +00:00
|
|
|
|
2015-06-18 11:01:11 +00:00
|
|
|
# Did not find port!
|
|
|
|
return None
|
|
|
|
|
2015-02-18 16:42:18 +00:00
|
|
|
def run_services_checks(env, output, pool):
|
2015-01-11 13:04:14 +00:00
|
|
|
# Check that system services are running.
|
|
|
|
|
|
|
|
services = [
|
|
|
|
{ "name": "Local DNS (bind9)", "port": 53, "public": False, },
|
|
|
|
#{ "name": "NSD Control", "port": 8952, "public": False, },
|
|
|
|
{ "name": "Local DNS Control (bind9/rndc)", "port": 953, "public": False, },
|
|
|
|
{ "name": "Dovecot LMTP LDA", "port": 10026, "public": False, },
|
|
|
|
{ "name": "Postgrey", "port": 10023, "public": False, },
|
|
|
|
{ "name": "Spamassassin", "port": 10025, "public": False, },
|
|
|
|
{ "name": "OpenDKIM", "port": 8891, "public": False, },
|
2015-02-16 23:16:09 +00:00
|
|
|
{ "name": "OpenDMARC", "port": 8893, "public": False, },
|
2015-01-11 13:04:14 +00:00
|
|
|
{ "name": "Memcached", "port": 11211, "public": False, },
|
2015-03-03 11:47:00 +00:00
|
|
|
{ "name": "Sieve (dovecot)", "port": 4190, "public": False, },
|
2015-01-11 13:04:14 +00:00
|
|
|
{ "name": "Mail-in-a-Box Management Daemon", "port": 10222, "public": False, },
|
|
|
|
|
2015-02-01 19:18:32 +00:00
|
|
|
{ "name": "SSH Login (ssh)", "port": get_ssh_port(), "public": True, },
|
2015-01-11 13:04:14 +00:00
|
|
|
{ "name": "Public DNS (nsd4)", "port": 53, "public": True, },
|
|
|
|
{ "name": "Incoming Mail (SMTP/postfix)", "port": 25, "public": True, },
|
|
|
|
{ "name": "Outgoing Mail (SMTP 587/postfix)", "port": 587, "public": True, },
|
|
|
|
#{ "name": "Postfix/master", "port": 10587, "public": True, },
|
|
|
|
{ "name": "IMAPS (dovecot)", "port": 993, "public": True, },
|
|
|
|
{ "name": "HTTP Web (nginx)", "port": 80, "public": True, },
|
|
|
|
{ "name": "HTTPS Web (nginx)", "port": 443, "public": True, },
|
|
|
|
]
|
|
|
|
|
2015-01-31 20:40:20 +00:00
|
|
|
all_running = True
|
|
|
|
fatal = False
|
|
|
|
ret = pool.starmap(check_service, ((i, service, env) for i, service in enumerate(services)), chunksize=1)
|
|
|
|
for i, running, fatal2, output2 in sorted(ret):
|
2015-06-18 11:01:11 +00:00
|
|
|
if output2 is None: continue # skip check (e.g. no port was set, e.g. no sshd)
|
2015-01-31 20:40:20 +00:00
|
|
|
all_running = all_running and running
|
|
|
|
fatal = fatal or fatal2
|
|
|
|
output2.playback(output)
|
|
|
|
|
|
|
|
if all_running:
|
2015-01-31 19:56:39 +00:00
|
|
|
output.print_ok("All system services are running.")
|
2015-01-17 15:25:28 +00:00
|
|
|
|
2015-01-31 20:40:20 +00:00
|
|
|
return not fatal
|
|
|
|
|
|
|
|
def check_service(i, service, env):
|
2015-06-18 11:01:11 +00:00
|
|
|
if not service["port"]:
|
|
|
|
# Skip check (no port, e.g. no sshd).
|
|
|
|
return (i, None, None, None)
|
|
|
|
|
2015-01-31 20:40:20 +00:00
|
|
|
import socket
|
|
|
|
output = BufferedOutput()
|
|
|
|
running = False
|
|
|
|
fatal = False
|
|
|
|
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
|
|
|
s.settimeout(1)
|
|
|
|
try:
|
2015-02-13 14:30:25 +00:00
|
|
|
try:
|
|
|
|
s.connect((
|
|
|
|
"127.0.0.1" if not service["public"] else env['PUBLIC_IP'],
|
|
|
|
service["port"]))
|
|
|
|
running = True
|
|
|
|
except OSError as e1:
|
|
|
|
if service["public"] and service["port"] != 53:
|
|
|
|
# For public services (except DNS), try the private IP as a fallback.
|
|
|
|
s1 = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
|
|
|
s1.settimeout(1)
|
|
|
|
try:
|
|
|
|
s1.connect(("127.0.0.1", service["port"]))
|
|
|
|
output.print_error("%s is running but is not publicly accessible at %s:%d (%s)." % (service['name'], env['PUBLIC_IP'], service['port'], str(e1)))
|
|
|
|
except:
|
|
|
|
raise e1
|
|
|
|
finally:
|
|
|
|
s1.close()
|
|
|
|
else:
|
|
|
|
raise
|
2015-02-01 19:18:32 +00:00
|
|
|
|
2015-01-31 20:40:20 +00:00
|
|
|
except OSError as e:
|
2015-02-13 14:30:25 +00:00
|
|
|
output.print_error("%s is not running (%s; port %d)." % (service['name'], str(e), service['port']))
|
2015-01-31 20:40:20 +00:00
|
|
|
|
|
|
|
# Why is nginx not running?
|
|
|
|
if service["port"] in (80, 443):
|
|
|
|
output.print_line(shell('check_output', ['nginx', '-t'], capture_stderr=True, trap=True)[1].strip())
|
|
|
|
|
|
|
|
# Flag if local DNS is not running.
|
|
|
|
if service["port"] == 53 and service["public"] == False:
|
|
|
|
fatal = True
|
|
|
|
finally:
|
|
|
|
s.close()
|
|
|
|
|
|
|
|
return (i, running, fatal, output)
|
2015-01-11 13:04:14 +00:00
|
|
|
|
2015-03-08 21:56:28 +00:00
|
|
|
def run_system_checks(rounded_values, env, output):
|
2015-01-31 19:56:39 +00:00
|
|
|
check_ssh_password(env, output)
|
|
|
|
check_software_updates(env, output)
|
|
|
|
check_system_aliases(env, output)
|
2015-03-08 21:56:28 +00:00
|
|
|
check_free_disk_space(rounded_values, env, output)
|
2014-06-23 19:39:20 +00:00
|
|
|
|
2015-01-31 19:56:39 +00:00
|
|
|
def check_ssh_password(env, output):
|
2015-01-02 22:55:28 +00:00
|
|
|
# Check that SSH login with password is disabled. The openssh-server
|
|
|
|
# package may not be installed so check that before trying to access
|
|
|
|
# the configuration file.
|
|
|
|
if not os.path.exists("/etc/ssh/sshd_config"):
|
|
|
|
return
|
2014-06-23 19:39:20 +00:00
|
|
|
sshd = open("/etc/ssh/sshd_config").read()
|
|
|
|
if re.search("\nPasswordAuthentication\s+yes", sshd) \
|
|
|
|
or not re.search("\nPasswordAuthentication\s+no", sshd):
|
2015-01-31 19:56:39 +00:00
|
|
|
output.print_error("""The SSH server on this machine permits password-based login. A more secure
|
2014-06-23 19:39:20 +00:00
|
|
|
way to log in is using a public key. Add your SSH public key to $HOME/.ssh/authorized_keys, check
|
|
|
|
that you can log in without a password, set the option 'PasswordAuthentication no' in
|
|
|
|
/etc/ssh/sshd_config, and then restart the openssh via 'sudo service ssh restart'.""")
|
|
|
|
else:
|
2015-01-31 19:56:39 +00:00
|
|
|
output.print_ok("SSH disallows password-based login.")
|
2014-06-23 19:39:20 +00:00
|
|
|
|
2015-01-31 19:56:39 +00:00
|
|
|
def check_software_updates(env, output):
|
2014-08-21 11:09:51 +00:00
|
|
|
# Check for any software package updates.
|
2014-09-21 12:43:47 +00:00
|
|
|
pkgs = list_apt_updates(apt_update=False)
|
2014-08-21 11:09:51 +00:00
|
|
|
if os.path.exists("/var/run/reboot-required"):
|
2015-01-31 19:56:39 +00:00
|
|
|
output.print_error("System updates have been installed and a reboot of the machine is required.")
|
2014-08-21 11:09:51 +00:00
|
|
|
elif len(pkgs) == 0:
|
2015-01-31 19:56:39 +00:00
|
|
|
output.print_ok("System software is up to date.")
|
2014-08-21 11:09:51 +00:00
|
|
|
else:
|
2015-01-31 19:56:39 +00:00
|
|
|
output.print_error("There are %d software packages that can be updated." % len(pkgs))
|
2014-08-21 11:09:51 +00:00
|
|
|
for p in pkgs:
|
2015-01-31 19:56:39 +00:00
|
|
|
output.print_line("%s (%s)" % (p["package"], p["version"]))
|
2014-08-21 11:09:51 +00:00
|
|
|
|
2015-01-31 19:56:39 +00:00
|
|
|
def check_system_aliases(env, output):
|
2014-07-16 13:19:32 +00:00
|
|
|
# Check that the administrator alias exists since that's where all
|
|
|
|
# admin email is automatically directed.
|
2015-04-09 13:04:36 +00:00
|
|
|
check_alias_exists("System administrator address", "administrator@" + env['PRIMARY_HOSTNAME'], env, output)
|
2014-07-16 13:19:32 +00:00
|
|
|
|
2015-03-08 21:56:28 +00:00
|
|
|
def check_free_disk_space(rounded_values, env, output):
|
2014-10-12 21:31:58 +00:00
|
|
|
# Check free disk space.
|
|
|
|
st = os.statvfs(env['STORAGE_ROOT'])
|
|
|
|
bytes_total = st.f_blocks * st.f_frsize
|
|
|
|
bytes_free = st.f_bavail * st.f_frsize
|
2015-03-08 21:56:28 +00:00
|
|
|
if not rounded_values:
|
|
|
|
disk_msg = "The disk has %s GB space remaining." % str(round(bytes_free/1024.0/1024.0/1024.0*10.0)/10)
|
|
|
|
else:
|
|
|
|
disk_msg = "The disk has less than %s%% space left." % str(round(bytes_free/bytes_total/10 + .5)*10)
|
2014-10-12 21:31:58 +00:00
|
|
|
if bytes_free > .3 * bytes_total:
|
2015-01-31 19:56:39 +00:00
|
|
|
output.print_ok(disk_msg)
|
2014-10-12 21:31:58 +00:00
|
|
|
elif bytes_free > .15 * bytes_total:
|
2015-01-31 19:56:39 +00:00
|
|
|
output.print_warning(disk_msg)
|
2014-10-12 21:31:58 +00:00
|
|
|
else:
|
2015-01-31 19:56:39 +00:00
|
|
|
output.print_error(disk_msg)
|
2014-10-12 21:31:58 +00:00
|
|
|
|
2015-02-18 16:42:18 +00:00
|
|
|
def run_network_checks(env, output):
|
2014-08-19 11:16:49 +00:00
|
|
|
# Also see setup/network-checks.sh.
|
|
|
|
|
2015-01-31 19:56:39 +00:00
|
|
|
output.add_heading("Network")
|
2014-08-19 11:16:49 +00:00
|
|
|
|
|
|
|
# Stop if we cannot make an outbound connection on port 25. Many residential
|
|
|
|
# networks block outbound port 25 to prevent their network from sending spam.
|
|
|
|
# See if we can reach one of Google's MTAs with a 5-second timeout.
|
|
|
|
code, ret = shell("check_call", ["/bin/nc", "-z", "-w5", "aspmx.l.google.com", "25"], trap=True)
|
|
|
|
if ret == 0:
|
2015-01-31 19:56:39 +00:00
|
|
|
output.print_ok("Outbound mail (SMTP port 25) is not blocked.")
|
2014-08-19 11:16:49 +00:00
|
|
|
else:
|
2015-01-31 19:56:39 +00:00
|
|
|
output.print_error("""Outbound mail (SMTP port 25) seems to be blocked by your network. You
|
2014-08-19 11:16:49 +00:00
|
|
|
will not be able to send any mail. Many residential networks block port 25 to prevent hijacked
|
|
|
|
machines from being able to send spam. A quick connection test to Google's mail server on port 25
|
|
|
|
failed.""")
|
|
|
|
|
2014-09-10 01:39:04 +00:00
|
|
|
# Stop if the IPv4 address is listed in the ZEN Spamhaus Block List.
|
2014-08-19 11:16:49 +00:00
|
|
|
# The user might have ended up on an IP address that was previously in use
|
|
|
|
# by a spammer, or the user may be deploying on a residential network. We
|
|
|
|
# will not be able to reliably send mail in these cases.
|
|
|
|
rev_ip4 = ".".join(reversed(env['PUBLIC_IP'].split('.')))
|
2014-09-08 20:27:26 +00:00
|
|
|
zen = query_dns(rev_ip4+'.zen.spamhaus.org', 'A', nxdomain=None)
|
|
|
|
if zen is None:
|
2015-01-31 19:56:39 +00:00
|
|
|
output.print_ok("IP address is not blacklisted by zen.spamhaus.org.")
|
2014-08-19 11:16:49 +00:00
|
|
|
else:
|
2015-01-31 19:56:39 +00:00
|
|
|
output.print_error("""The IP address of this machine %s is listed in the Spamhaus Block List (code %s),
|
2014-08-19 11:16:49 +00:00
|
|
|
which may prevent recipients from receiving your email. See http://www.spamhaus.org/query/ip/%s."""
|
2014-09-08 20:27:26 +00:00
|
|
|
% (env['PUBLIC_IP'], zen, env['PUBLIC_IP']))
|
2014-08-19 11:16:49 +00:00
|
|
|
|
2015-03-08 21:56:28 +00:00
|
|
|
def run_domain_checks(rounded_time, env, output, pool):
|
2014-06-22 16:28:55 +00:00
|
|
|
# Get the list of domains we handle mail for.
|
|
|
|
mail_domains = get_mail_domains(env)
|
|
|
|
|
2014-06-22 15:34:36 +00:00
|
|
|
# Get the list of domains we serve DNS zones for (i.e. does not include subdomains).
|
|
|
|
dns_zonefiles = dict(get_dns_zones(env))
|
|
|
|
dns_domains = set(dns_zonefiles)
|
|
|
|
|
|
|
|
# Get the list of domains we serve HTTPS for.
|
2015-06-04 12:06:02 +00:00
|
|
|
web_domains = set(get_web_domains(env) + get_default_www_redirects(env))
|
2014-06-22 15:34:36 +00:00
|
|
|
|
2015-01-31 20:40:20 +00:00
|
|
|
domains_to_check = mail_domains | dns_domains | web_domains
|
|
|
|
|
|
|
|
# Serial version:
|
|
|
|
#for domain in sort_domains(domains_to_check, env):
|
2015-03-08 21:56:28 +00:00
|
|
|
# run_domain_checks_on_domain(domain, rounded_time, env, dns_domains, dns_zonefiles, mail_domains, web_domains)
|
2015-01-31 20:40:20 +00:00
|
|
|
|
|
|
|
# Parallelize the checks across a worker pool.
|
2015-03-08 21:56:28 +00:00
|
|
|
args = ((domain, rounded_time, env, dns_domains, dns_zonefiles, mail_domains, web_domains)
|
2015-01-31 20:40:20 +00:00
|
|
|
for domain in domains_to_check)
|
|
|
|
ret = pool.starmap(run_domain_checks_on_domain, args, chunksize=1)
|
|
|
|
ret = dict(ret) # (domain, output) => { domain: output }
|
|
|
|
for domain in sort_domains(ret, env):
|
|
|
|
ret[domain].playback(output)
|
|
|
|
|
2015-03-08 21:56:28 +00:00
|
|
|
def run_domain_checks_on_domain(domain, rounded_time, env, dns_domains, dns_zonefiles, mail_domains, web_domains):
|
2015-01-31 20:40:20 +00:00
|
|
|
output = BufferedOutput()
|
2014-06-22 16:28:55 +00:00
|
|
|
|
store IDNs (internationalized domain names) in IDNA (ASCII) in our database, not in Unicode
I changed my mind. In 1bf8f1991f6f08e0fb1e3d2572d280d894a5e431 I allowed Unicode domain names to go into the database. I thought that was nice because it's what the user *means*. But it's not how the web works. Web and DNS were working, but mail wasn't. Postfix (as shipped with Ubuntu 14.04 without support for SMTPUTF8) exists in an ASCII-only world. When it goes to the users/aliases table, it queries in ASCII (IDNA) only and had no hope of delivering mail if the domain was in full Unicode in the database. I was thinking ahead to SMTPUTF8, where we *could* put Unicode in the database (though that would prevent IDNA-encoded addressing from being deliverable) not realizing it isn't well supported yet anyway.
It's IDNA that goes on the wire in most places anyway (SMTP without SMTPUTF8 (and therefore how Postfix queries our users/aliases tables), DNS zone files, nginx config, CSR 'CN' field, X509 Common Name and Subject Alternative Names fields), so we should really be talking in terms of IDNA (i.e. ASCII).
This partially reverts commit 1bf8f1991f6f08e0fb1e3d2572d280d894a5e431, where I added a lot of Unicode=>IDNA conversions when writing configuration files. Instead I'm doing Unicode=>IDNA before email addresses get into the users/aliases table. Now we assume the database uses IDNA-encoded ASCII domain names. When adding/removing aliases, addresses are converted to ASCII (w/ IDNA). User accounts must be ASCII-only anyway because of Dovecot's auth limitations, so we don't do any IDNA conversion (don't want to change the user's login info behind their back!). The aliases control panel page converts domains back to Unicode for display to be nice. The status checks converts the domains to Unicode just for the output headings.
A migration is added to convert existing aliases with Unicode domains into IDNA. Any custom DNS or web settings with Unicode may need to be changed.
Future support for SMTPUTF8 will probably need to add columns in the users/aliases table so that it lists both IDNA and Unicode forms.
2015-03-29 13:33:31 +00:00
|
|
|
# The domain is IDNA-encoded, but for display use Unicode.
|
2015-06-30 12:45:58 +00:00
|
|
|
output.add_heading(idna.decode(domain.encode('ascii')))
|
2015-01-31 20:40:20 +00:00
|
|
|
|
|
|
|
if domain == env["PRIMARY_HOSTNAME"]:
|
|
|
|
check_primary_hostname_dns(domain, env, output, dns_domains, dns_zonefiles)
|
2015-06-27 17:23:15 +00:00
|
|
|
|
2015-01-31 20:40:20 +00:00
|
|
|
if domain in dns_domains:
|
|
|
|
check_dns_zone(domain, env, output, dns_zonefiles)
|
2015-06-27 17:23:15 +00:00
|
|
|
|
2015-01-31 20:40:20 +00:00
|
|
|
if domain in mail_domains:
|
|
|
|
check_mail_domain(domain, env, output)
|
|
|
|
|
|
|
|
if domain in web_domains:
|
2015-03-08 21:56:28 +00:00
|
|
|
check_web_domain(domain, rounded_time, env, output)
|
2014-06-22 16:28:55 +00:00
|
|
|
|
2015-01-31 20:40:20 +00:00
|
|
|
if domain in dns_domains:
|
|
|
|
check_dns_zone_suggestions(domain, env, output, dns_zonefiles)
|
2014-06-22 16:28:55 +00:00
|
|
|
|
2015-01-31 20:40:20 +00:00
|
|
|
return (domain, output)
|
2014-10-01 12:09:43 +00:00
|
|
|
|
2015-01-31 19:56:39 +00:00
|
|
|
def check_primary_hostname_dns(domain, env, output, dns_domains, dns_zonefiles):
|
2014-10-01 12:09:43 +00:00
|
|
|
# If a DS record is set on the zone containing this domain, check DNSSEC now.
|
2015-03-28 15:22:44 +00:00
|
|
|
has_dnssec = False
|
2014-10-01 12:09:43 +00:00
|
|
|
for zone in dns_domains:
|
|
|
|
if zone == domain or domain.endswith("." + zone):
|
|
|
|
if query_dns(zone, "DS", nxdomain=None) is not None:
|
2015-03-28 15:22:44 +00:00
|
|
|
has_dnssec = True
|
2015-01-31 19:56:39 +00:00
|
|
|
check_dnssec(zone, env, output, dns_zonefiles, is_checking_primary=True)
|
2014-10-01 12:09:43 +00:00
|
|
|
|
2015-03-28 15:19:05 +00:00
|
|
|
ip = query_dns(domain, "A")
|
|
|
|
ns_ips = query_dns("ns1." + domain, "A") + '/' + query_dns("ns2." + domain, "A")
|
|
|
|
|
2014-06-22 15:34:36 +00:00
|
|
|
# Check that the ns1/ns2 hostnames resolve to A records. This information probably
|
2014-10-05 18:42:52 +00:00
|
|
|
# comes from the TLD since the information is set at the registrar as glue records.
|
|
|
|
# We're probably not actually checking that here but instead checking that we, as
|
|
|
|
# the nameserver, are reporting the right info --- but if the glue is incorrect this
|
|
|
|
# will probably fail.
|
2015-03-28 15:19:05 +00:00
|
|
|
if ns_ips == env['PUBLIC_IP'] + '/' + env['PUBLIC_IP']:
|
2015-04-09 13:04:36 +00:00
|
|
|
output.print_ok("Nameserver glue records are correct at registrar. [ns1/ns2.%s ↦ %s]" % (env['PRIMARY_HOSTNAME'], env['PUBLIC_IP']))
|
2015-03-28 15:19:05 +00:00
|
|
|
|
|
|
|
elif ip == env['PUBLIC_IP']:
|
|
|
|
# The NS records are not what we expect, but the domain resolves correctly, so
|
|
|
|
# the user may have set up external DNS. List this discrepancy as a warning.
|
|
|
|
output.print_warning("""Nameserver glue records (ns1.%s and ns2.%s) should be configured at your domain name
|
|
|
|
registrar as having the IP address of this box (%s). They currently report addresses of %s. If you have set up External DNS, this may be OK."""
|
|
|
|
% (env['PRIMARY_HOSTNAME'], env['PRIMARY_HOSTNAME'], env['PUBLIC_IP'], ns_ips))
|
|
|
|
|
2014-06-22 15:34:36 +00:00
|
|
|
else:
|
2015-01-31 19:56:39 +00:00
|
|
|
output.print_error("""Nameserver glue records are incorrect. The ns1.%s and ns2.%s nameservers must be configured at your domain name
|
2014-06-22 15:34:36 +00:00
|
|
|
registrar as having the IP address %s. They currently report addresses of %s. It may take several hours for
|
|
|
|
public DNS to update after a change."""
|
2015-03-28 15:19:05 +00:00
|
|
|
% (env['PRIMARY_HOSTNAME'], env['PRIMARY_HOSTNAME'], env['PUBLIC_IP'], ns_ips))
|
2014-06-22 15:34:36 +00:00
|
|
|
|
2014-06-30 13:15:36 +00:00
|
|
|
# Check that PRIMARY_HOSTNAME resolves to PUBLIC_IP in public DNS.
|
2014-06-22 15:34:36 +00:00
|
|
|
if ip == env['PUBLIC_IP']:
|
2015-04-09 13:04:36 +00:00
|
|
|
output.print_ok("Domain resolves to box's IP address. [%s ↦ %s]" % (env['PRIMARY_HOSTNAME'], env['PUBLIC_IP']))
|
2014-06-22 15:34:36 +00:00
|
|
|
else:
|
2015-01-31 19:56:39 +00:00
|
|
|
output.print_error("""This domain must resolve to your box's IP address (%s) in public DNS but it currently resolves
|
2014-06-22 15:34:36 +00:00
|
|
|
to %s. It may take several hours for public DNS to update after a change. This problem may result from other
|
|
|
|
issues listed here."""
|
|
|
|
% (env['PUBLIC_IP'], ip))
|
|
|
|
|
2014-06-30 13:15:36 +00:00
|
|
|
# Check reverse DNS on the PRIMARY_HOSTNAME. Note that it might not be
|
2014-06-22 15:34:36 +00:00
|
|
|
# a DNS zone if it is a subdomain of another domain we have a zone for.
|
|
|
|
ipaddr_rev = dns.reversename.from_address(env['PUBLIC_IP'])
|
|
|
|
existing_rdns = query_dns(ipaddr_rev, "PTR")
|
|
|
|
if existing_rdns == domain:
|
2015-04-09 13:04:36 +00:00
|
|
|
output.print_ok("Reverse DNS is set correctly at ISP. [%s ↦ %s]" % (env['PUBLIC_IP'], env['PRIMARY_HOSTNAME']))
|
2014-06-22 15:34:36 +00:00
|
|
|
else:
|
2015-01-31 19:56:39 +00:00
|
|
|
output.print_error("""Your box's reverse DNS is currently %s, but it should be %s. Your ISP or cloud provider will have instructions
|
2014-06-22 15:34:36 +00:00
|
|
|
on setting up reverse DNS for your box at %s.""" % (existing_rdns, domain, env['PUBLIC_IP']) )
|
|
|
|
|
2014-08-13 19:42:49 +00:00
|
|
|
# Check the TLSA record.
|
|
|
|
tlsa_qname = "_25._tcp." + domain
|
|
|
|
tlsa25 = query_dns(tlsa_qname, "TLSA", nxdomain=None)
|
|
|
|
tlsa25_expected = build_tlsa_record(env)
|
|
|
|
if tlsa25 == tlsa25_expected:
|
2015-01-31 19:56:39 +00:00
|
|
|
output.print_ok("""The DANE TLSA record for incoming mail is correct (%s).""" % tlsa_qname,)
|
2014-08-13 19:42:49 +00:00
|
|
|
elif tlsa25 is None:
|
2015-03-28 15:22:44 +00:00
|
|
|
if has_dnssec:
|
|
|
|
# Omit a warning about it not being set if DNSSEC isn't enabled,
|
|
|
|
# since TLSA shouldn't be used without DNSSEC.
|
|
|
|
output.print_warning("""The DANE TLSA record for incoming mail is not set. This is optional.""")
|
2014-08-13 19:42:49 +00:00
|
|
|
else:
|
2015-01-31 19:56:39 +00:00
|
|
|
output.print_error("""The DANE TLSA record for incoming mail (%s) is not correct. It is '%s' but it should be '%s'.
|
2014-10-28 11:38:04 +00:00
|
|
|
It may take several hours for public DNS to update after a change."""
|
2014-08-13 19:42:49 +00:00
|
|
|
% (tlsa_qname, tlsa25, tlsa25_expected))
|
|
|
|
|
2014-06-22 16:28:55 +00:00
|
|
|
# Check that the hostmaster@ email address exists.
|
2015-04-09 13:04:36 +00:00
|
|
|
check_alias_exists("Hostmaster contact address", "hostmaster@" + domain, env, output)
|
2014-06-22 16:28:55 +00:00
|
|
|
|
2015-04-09 13:04:36 +00:00
|
|
|
def check_alias_exists(alias_name, alias, env, output):
|
2015-07-04 15:31:11 +00:00
|
|
|
mail_aliases = dict([(address, receivers) for address, receivers, *_ in get_mail_aliases(env)])
|
2015-06-27 17:23:15 +00:00
|
|
|
if alias in mail_aliases:
|
2015-07-04 15:31:11 +00:00
|
|
|
if mail_aliases[alias]:
|
|
|
|
output.print_ok("%s exists as a mail alias. [%s ↦ %s]" % (alias_name, alias, mail_aliases[alias]))
|
2015-06-27 17:23:15 +00:00
|
|
|
else:
|
2015-07-04 15:31:11 +00:00
|
|
|
output.print_error("""You must set the destination of the mail alias for %s to direct email to you or another administrator.""" % alias)
|
2014-06-22 16:28:55 +00:00
|
|
|
else:
|
2015-07-04 15:31:11 +00:00
|
|
|
output.print_error("""You must add a mail alias for %s which directs email to you or another administrator.""" % alias)
|
2014-06-22 16:28:55 +00:00
|
|
|
|
2015-01-31 19:56:39 +00:00
|
|
|
def check_dns_zone(domain, env, output, dns_zonefiles):
|
2014-10-01 12:09:43 +00:00
|
|
|
# If a DS record is set at the registrar, check DNSSEC first because it will affect the NS query.
|
|
|
|
# If it is not set, we suggest it last.
|
|
|
|
if query_dns(domain, "DS", nxdomain=None) is not None:
|
2015-01-31 19:56:39 +00:00
|
|
|
check_dnssec(domain, env, output, dns_zonefiles)
|
2014-10-01 12:09:43 +00:00
|
|
|
|
2014-06-22 15:34:36 +00:00
|
|
|
# We provide a DNS zone for the domain. It should have NS records set up
|
2014-10-05 18:42:52 +00:00
|
|
|
# at the domain name's registrar pointing to this box. The secondary DNS
|
|
|
|
# server may be customized. Unfortunately this may not check the domain's
|
|
|
|
# whois information -- we may be getting the NS records from us rather than
|
|
|
|
# the TLD, and so we're not actually checking the TLD. For that we'd need
|
|
|
|
# to do a DNS trace.
|
2015-03-28 15:19:05 +00:00
|
|
|
ip = query_dns(domain, "A")
|
2015-07-10 15:42:33 +00:00
|
|
|
secondary_ns = get_secondary_dns(get_custom_dns_config(env), mode="NS") or ["ns2." + env['PRIMARY_HOSTNAME']]
|
2014-06-22 15:34:36 +00:00
|
|
|
existing_ns = query_dns(domain, "NS")
|
2015-07-01 19:02:40 +00:00
|
|
|
correct_ns = "; ".join(sorted(["ns1." + env['PRIMARY_HOSTNAME']] + secondary_ns))
|
2014-08-18 22:41:27 +00:00
|
|
|
if existing_ns.lower() == correct_ns.lower():
|
2015-01-31 19:56:39 +00:00
|
|
|
output.print_ok("Nameservers are set correctly at registrar. [%s]" % correct_ns)
|
2015-03-28 15:19:05 +00:00
|
|
|
elif ip == env['PUBLIC_IP']:
|
|
|
|
# The domain resolves correctly, so maybe the user is using External DNS.
|
|
|
|
output.print_warning("""The nameservers set on this domain at your domain name registrar should be %s. They are currently %s.
|
|
|
|
If you are using External DNS, this may be OK."""
|
|
|
|
% (correct_ns, existing_ns) )
|
2014-06-22 15:34:36 +00:00
|
|
|
else:
|
2015-01-31 19:56:39 +00:00
|
|
|
output.print_error("""The nameservers set on this domain are incorrect. They are currently %s. Use your domain name registrar's
|
2014-06-22 15:34:36 +00:00
|
|
|
control panel to set the nameservers to %s."""
|
|
|
|
% (existing_ns, correct_ns) )
|
|
|
|
|
2015-01-31 19:56:39 +00:00
|
|
|
def check_dns_zone_suggestions(domain, env, output, dns_zonefiles):
|
2014-10-01 12:09:43 +00:00
|
|
|
# Since DNSSEC is optional, if a DS record is NOT set at the registrar suggest it.
|
|
|
|
# (If it was set, we did the check earlier.)
|
|
|
|
if query_dns(domain, "DS", nxdomain=None) is None:
|
2015-01-31 19:56:39 +00:00
|
|
|
check_dnssec(domain, env, output, dns_zonefiles)
|
2014-10-01 12:09:43 +00:00
|
|
|
|
|
|
|
|
2015-01-31 19:56:39 +00:00
|
|
|
def check_dnssec(domain, env, output, dns_zonefiles, is_checking_primary=False):
|
2014-08-01 12:15:02 +00:00
|
|
|
# See if the domain has a DS record set at the registrar. The DS record may have
|
|
|
|
# several forms. We have to be prepared to check for any valid record. We've
|
|
|
|
# pre-generated all of the valid digests --- read them in.
|
|
|
|
ds_correct = open('/etc/nsd/zones/' + dns_zonefiles[domain] + '.ds').read().strip().split("\n")
|
|
|
|
digests = { }
|
|
|
|
for rr_ds in ds_correct:
|
|
|
|
ds_keytag, ds_alg, ds_digalg, ds_digest = rr_ds.split("\t")[4].split(" ")
|
|
|
|
digests[ds_digalg] = ds_digest
|
|
|
|
|
|
|
|
# Some registrars may want the public key so they can compute the digest. The DS
|
|
|
|
# record that we suggest using is for the KSK (and that's how the DS records were generated).
|
2014-10-04 17:29:42 +00:00
|
|
|
alg_name_map = { '7': 'RSASHA1-NSEC3-SHA1', '8': 'RSASHA256' }
|
|
|
|
dnssec_keys = load_env_vars_from_file(os.path.join(env['STORAGE_ROOT'], 'dns/dnssec/%s.conf' % alg_name_map[ds_alg]))
|
2014-08-01 12:15:02 +00:00
|
|
|
dnsssec_pubkey = open(os.path.join(env['STORAGE_ROOT'], 'dns/dnssec/' + dnssec_keys['KSK'] + '.key')).read().split("\t")[3].split(" ")[3]
|
|
|
|
|
|
|
|
# Query public DNS for the DS record at the registrar.
|
2014-06-22 15:34:36 +00:00
|
|
|
ds = query_dns(domain, "DS", nxdomain=None)
|
2014-08-01 12:15:02 +00:00
|
|
|
ds_looks_valid = ds and len(ds.split(" ")) == 4
|
|
|
|
if ds_looks_valid: ds = ds.split(" ")
|
2014-10-04 17:29:42 +00:00
|
|
|
if ds_looks_valid and ds[0] == ds_keytag and ds[1] == ds_alg and ds[3] == digests.get(ds[2]):
|
2014-10-01 12:09:43 +00:00
|
|
|
if is_checking_primary: return
|
2015-01-31 19:56:39 +00:00
|
|
|
output.print_ok("DNSSEC 'DS' record is set correctly at registrar.")
|
2014-06-22 15:34:36 +00:00
|
|
|
else:
|
2014-08-01 12:15:02 +00:00
|
|
|
if ds == None:
|
2014-10-01 12:09:43 +00:00
|
|
|
if is_checking_primary: return
|
2015-03-28 15:22:44 +00:00
|
|
|
output.print_warning("""This domain's DNSSEC DS record is not set. The DS record is optional. The DS record activates DNSSEC.
|
2014-08-01 12:15:02 +00:00
|
|
|
To set a DS record, you must follow the instructions provided by your domain name registrar and provide to them this information:""")
|
|
|
|
else:
|
2014-10-01 12:09:43 +00:00
|
|
|
if is_checking_primary:
|
2015-01-31 19:56:39 +00:00
|
|
|
output.print_error("""The DNSSEC 'DS' record for %s is incorrect. See further details below.""" % domain)
|
2014-10-01 12:09:43 +00:00
|
|
|
return
|
2015-01-31 19:56:39 +00:00
|
|
|
output.print_error("""This domain's DNSSEC DS record is incorrect. The chain of trust is broken between the public DNS system
|
2014-08-01 12:15:02 +00:00
|
|
|
and this machine's DNS server. It may take several hours for public DNS to update after a change. If you did not recently
|
|
|
|
make a change, you must resolve this immediately by following the instructions provided by your domain name registrar and
|
|
|
|
provide to them this information:""")
|
2015-01-31 19:56:39 +00:00
|
|
|
output.print_line("")
|
|
|
|
output.print_line("Key Tag: " + ds_keytag + ("" if not ds_looks_valid or ds[0] == ds_keytag else " (Got '%s')" % ds[0]))
|
|
|
|
output.print_line("Key Flags: KSK")
|
|
|
|
output.print_line(
|
2014-10-04 17:29:42 +00:00
|
|
|
("Algorithm: %s / %s" % (ds_alg, alg_name_map[ds_alg]))
|
|
|
|
+ ("" if not ds_looks_valid or ds[1] == ds_alg else " (Got '%s')" % ds[1]))
|
2014-09-07 14:59:14 +00:00
|
|
|
# see http://www.iana.org/assignments/dns-sec-alg-numbers/dns-sec-alg-numbers.xhtml
|
2015-01-31 19:56:39 +00:00
|
|
|
output.print_line("Digest Type: 2 / SHA-256")
|
2014-09-07 14:59:14 +00:00
|
|
|
# http://www.ietf.org/assignments/ds-rr-types/ds-rr-types.xml
|
2015-01-31 19:56:39 +00:00
|
|
|
output.print_line("Digest: " + digests['2'])
|
2014-08-01 12:15:02 +00:00
|
|
|
if ds_looks_valid and ds[3] != digests.get(ds[2]):
|
2015-01-31 19:56:39 +00:00
|
|
|
output.print_line("(Got digest type %s and digest %s which do not match.)" % (ds[2], ds[3]))
|
|
|
|
output.print_line("Public Key: ")
|
|
|
|
output.print_line(dnsssec_pubkey, monospace=True)
|
|
|
|
output.print_line("")
|
|
|
|
output.print_line("Bulk/Record Format:")
|
|
|
|
output.print_line("" + ds_correct[0])
|
|
|
|
output.print_line("")
|
|
|
|
|
|
|
|
def check_mail_domain(domain, env, output):
|
2014-07-07 02:33:35 +00:00
|
|
|
# Check the MX record.
|
|
|
|
|
2015-02-20 19:29:28 +00:00
|
|
|
recommended_mx = "10 " + env['PRIMARY_HOSTNAME']
|
2015-02-23 01:05:09 +00:00
|
|
|
mx = query_dns(domain, "MX", nxdomain=None)
|
2014-07-07 02:33:35 +00:00
|
|
|
|
2015-02-23 01:05:09 +00:00
|
|
|
if mx is None:
|
|
|
|
mxhost = None
|
|
|
|
else:
|
|
|
|
# query_dns returns a semicolon-delimited list
|
|
|
|
# of priority-host pairs.
|
|
|
|
mxhost = mx.split('; ')[0].split(' ')[1]
|
|
|
|
|
|
|
|
if mxhost == None:
|
2014-07-07 02:33:35 +00:00
|
|
|
# A missing MX record is okay on the primary hostname because
|
|
|
|
# the primary hostname's A record (the MX fallback) is... itself,
|
|
|
|
# which is what we want the MX to be.
|
|
|
|
if domain == env['PRIMARY_HOSTNAME']:
|
2015-01-31 19:56:39 +00:00
|
|
|
output.print_ok("Domain's email is directed to this domain. [%s has no MX record, which is ok]" % (domain,))
|
2014-07-07 02:33:35 +00:00
|
|
|
|
|
|
|
# And a missing MX record is okay on other domains if the A record
|
|
|
|
# matches the A record of the PRIMARY_HOSTNAME. Actually this will
|
|
|
|
# probably confuse DANE TLSA, but we'll let that slide for now.
|
|
|
|
else:
|
|
|
|
domain_a = query_dns(domain, "A", nxdomain=None)
|
|
|
|
primary_a = query_dns(env['PRIMARY_HOSTNAME'], "A", nxdomain=None)
|
|
|
|
if domain_a != None and domain_a == primary_a:
|
2015-01-31 19:56:39 +00:00
|
|
|
output.print_ok("Domain's email is directed to this domain. [%s has no MX record but its A record is OK]" % (domain,))
|
2014-07-07 02:33:35 +00:00
|
|
|
else:
|
2015-01-31 19:56:39 +00:00
|
|
|
output.print_error("""This domain's DNS MX record is not set. It should be '%s'. Mail will not
|
2014-07-07 02:33:35 +00:00
|
|
|
be delivered to this box. It may take several hours for public DNS to update after a
|
2015-02-20 19:29:28 +00:00
|
|
|
change. This problem may result from other issues listed here.""" % (recommended_mx,))
|
|
|
|
|
2015-02-23 01:05:09 +00:00
|
|
|
elif mxhost == env['PRIMARY_HOSTNAME']:
|
2015-04-09 13:04:36 +00:00
|
|
|
good_news = "Domain's email is directed to this domain. [%s ↦ %s]" % (domain, mx)
|
2015-02-23 01:05:09 +00:00
|
|
|
if mx != recommended_mx:
|
|
|
|
good_news += " This configuration is non-standard. The recommended configuration is '%s'." % (recommended_mx,)
|
|
|
|
output.print_ok(good_news)
|
|
|
|
else:
|
2015-01-31 19:56:39 +00:00
|
|
|
output.print_error("""This domain's DNS MX record is incorrect. It is currently set to '%s' but should be '%s'. Mail will not
|
2014-06-22 15:34:36 +00:00
|
|
|
be delivered to this box. It may take several hours for public DNS to update after a change. This problem may result from
|
2015-02-20 19:29:28 +00:00
|
|
|
other issues listed here.""" % (mx, recommended_mx))
|
2014-06-22 15:34:36 +00:00
|
|
|
|
2015-02-16 00:06:58 +00:00
|
|
|
# Check that the postmaster@ email address exists. Not required if the domain has a
|
|
|
|
# catch-all address or domain alias.
|
2015-07-04 15:31:11 +00:00
|
|
|
if "@" + domain not in [address for address, *_ in get_mail_aliases(env)]:
|
2015-04-09 13:04:36 +00:00
|
|
|
check_alias_exists("Postmaster contact address", "postmaster@" + domain, env, output)
|
2014-06-22 16:28:55 +00:00
|
|
|
|
2014-08-19 11:16:49 +00:00
|
|
|
# Stop if the domain is listed in the Spamhaus Domain Block List.
|
|
|
|
# The user might have chosen a domain that was previously in use by a spammer
|
|
|
|
# and will not be able to reliably send mail.
|
2014-09-08 20:27:26 +00:00
|
|
|
dbl = query_dns(domain+'.dbl.spamhaus.org', "A", nxdomain=None)
|
|
|
|
if dbl is None:
|
2015-01-31 19:56:39 +00:00
|
|
|
output.print_ok("Domain is not blacklisted by dbl.spamhaus.org.")
|
2014-08-19 11:16:49 +00:00
|
|
|
else:
|
2015-01-31 19:56:39 +00:00
|
|
|
output.print_error("""This domain is listed in the Spamhaus Domain Block List (code %s),
|
2014-09-08 20:27:26 +00:00
|
|
|
which may prevent recipients from receiving your mail.
|
|
|
|
See http://www.spamhaus.org/dbl/ and http://www.spamhaus.org/query/domain/%s.""" % (dbl, domain))
|
2014-08-19 11:16:49 +00:00
|
|
|
|
2015-03-08 21:56:28 +00:00
|
|
|
def check_web_domain(domain, rounded_time, env, output):
|
2014-07-20 15:15:33 +00:00
|
|
|
# See if the domain's A record resolves to our PUBLIC_IP. This is already checked
|
|
|
|
# for PRIMARY_HOSTNAME, for which it is required for mail specifically. For it and
|
|
|
|
# other domains, it is required to access its website.
|
|
|
|
if domain != env['PRIMARY_HOSTNAME']:
|
|
|
|
ip = query_dns(domain, "A")
|
|
|
|
if ip == env['PUBLIC_IP']:
|
2015-04-09 13:04:36 +00:00
|
|
|
output.print_ok("Domain resolves to this box's IP address. [%s ↦ %s]" % (domain, env['PUBLIC_IP']))
|
2014-07-20 15:15:33 +00:00
|
|
|
else:
|
2015-01-31 19:56:39 +00:00
|
|
|
output.print_error("""This domain should resolve to your box's IP address (%s) if you would like the box to serve
|
2014-07-20 15:15:33 +00:00
|
|
|
webmail or a website on this domain. The domain currently resolves to %s in public DNS. It may take several hours for
|
|
|
|
public DNS to update after a change. This problem may result from other issues listed here.""" % (env['PUBLIC_IP'], ip))
|
|
|
|
|
|
|
|
# We need a SSL certificate for PRIMARY_HOSTNAME because that's where the
|
|
|
|
# user will log in with IMAP or webmail. Any other domain we serve a
|
|
|
|
# website for also needs a signed certificate.
|
2015-03-08 21:56:28 +00:00
|
|
|
check_ssl_cert(domain, rounded_time, env, output)
|
2014-07-20 15:15:33 +00:00
|
|
|
|
2014-06-22 15:34:36 +00:00
|
|
|
def query_dns(qname, rtype, nxdomain='[Not Set]'):
|
2014-11-21 15:14:23 +00:00
|
|
|
# Make the qname absolute by appending a period. Without this, dns.resolver.query
|
|
|
|
# will fall back a failed lookup to a second query with this machine's hostname
|
|
|
|
# appended. This has been causing some false-positive Spamhaus reports. The
|
|
|
|
# reverse DNS lookup will pass a dns.name.Name instance which is already
|
|
|
|
# absolute so we should not modify that.
|
|
|
|
if isinstance(qname, str):
|
|
|
|
qname += "."
|
|
|
|
|
|
|
|
# Do the query.
|
2014-06-22 15:34:36 +00:00
|
|
|
try:
|
|
|
|
response = dns.resolver.query(qname, rtype)
|
2014-06-22 16:28:55 +00:00
|
|
|
except (dns.resolver.NoNameservers, dns.resolver.NXDOMAIN, dns.resolver.NoAnswer):
|
2014-06-22 15:34:36 +00:00
|
|
|
# Host did not have an answer for this query; not sure what the
|
|
|
|
# difference is between the two exceptions.
|
|
|
|
return nxdomain
|
2015-01-11 13:04:14 +00:00
|
|
|
except dns.exception.Timeout:
|
|
|
|
return "[timeout]"
|
2014-06-22 15:34:36 +00:00
|
|
|
|
|
|
|
# There may be multiple answers; concatenate the response. Remove trailing
|
|
|
|
# periods from responses since that's how qnames are encoded in DNS but is
|
2014-08-17 19:55:03 +00:00
|
|
|
# confusing for us. The order of the answers doesn't matter, so sort so we
|
|
|
|
# can compare to a well known order.
|
|
|
|
return "; ".join(sorted(str(r).rstrip('.') for r in response))
|
2014-06-22 15:34:36 +00:00
|
|
|
|
2015-03-08 21:56:28 +00:00
|
|
|
def check_ssl_cert(domain, rounded_time, env, output):
|
2014-06-22 15:34:36 +00:00
|
|
|
# Check that SSL certificate is signed.
|
|
|
|
|
2014-06-22 16:28:55 +00:00
|
|
|
# Skip the check if the A record is not pointed here.
|
2014-08-17 22:43:57 +00:00
|
|
|
if query_dns(domain, "A", None) not in (env['PUBLIC_IP'], None): return
|
2014-06-22 16:28:55 +00:00
|
|
|
|
|
|
|
# Where is the SSL stored?
|
2015-02-17 00:40:43 +00:00
|
|
|
ssl_key, ssl_certificate, ssl_via = get_domain_ssl_files(domain, env)
|
2014-06-22 15:34:36 +00:00
|
|
|
|
|
|
|
if not os.path.exists(ssl_certificate):
|
2015-01-31 19:56:39 +00:00
|
|
|
output.print_error("The SSL certificate file for this domain is missing.")
|
2014-06-22 15:34:36 +00:00
|
|
|
return
|
|
|
|
|
2014-06-23 10:53:09 +00:00
|
|
|
# Check that the certificate is good.
|
2014-06-22 15:34:36 +00:00
|
|
|
|
2015-03-08 21:56:28 +00:00
|
|
|
cert_status, cert_status_details = check_certificate(domain, ssl_certificate, ssl_key, rounded_time=rounded_time)
|
2014-10-07 14:49:36 +00:00
|
|
|
|
|
|
|
if cert_status == "OK":
|
|
|
|
# The certificate is ok. The details has expiry info.
|
2015-02-17 00:40:43 +00:00
|
|
|
output.print_ok("SSL certificate is signed & valid. %s %s" % (ssl_via if ssl_via else "", cert_status_details))
|
2014-10-07 14:49:36 +00:00
|
|
|
|
|
|
|
elif cert_status == "SELF-SIGNED":
|
|
|
|
# Offer instructions for purchasing a signed certificate.
|
2014-06-22 15:34:36 +00:00
|
|
|
|
|
|
|
fingerprint = shell('check_output', [
|
|
|
|
"openssl",
|
|
|
|
"x509",
|
|
|
|
"-in", ssl_certificate,
|
|
|
|
"-noout",
|
|
|
|
"-fingerprint"
|
|
|
|
])
|
|
|
|
fingerprint = re.sub(".*Fingerprint=", "", fingerprint).strip()
|
|
|
|
|
2014-07-07 01:54:54 +00:00
|
|
|
if domain == env['PRIMARY_HOSTNAME']:
|
2015-01-31 19:56:39 +00:00
|
|
|
output.print_error("""The SSL certificate for this domain is currently self-signed. You will get a security
|
2014-07-07 01:54:54 +00:00
|
|
|
warning when you check or send email and when visiting this domain in a web browser (for webmail or
|
2014-10-10 15:49:14 +00:00
|
|
|
static site hosting). Use the SSL Certificates page in this control panel to install a signed SSL certificate.
|
|
|
|
You may choose to leave the self-signed certificate in place and confirm the security exception, but check that
|
|
|
|
the certificate fingerprint matches the following:""")
|
2015-01-31 19:56:39 +00:00
|
|
|
output.print_line("")
|
|
|
|
output.print_line(" " + fingerprint, monospace=True)
|
2014-07-07 01:54:54 +00:00
|
|
|
else:
|
2015-01-31 19:56:39 +00:00
|
|
|
output.print_warning("""The SSL certificate for this domain is currently self-signed. Visitors to a website on
|
2014-07-07 01:54:54 +00:00
|
|
|
this domain will get a security warning. If you are not serving a website on this domain, then it is
|
2014-10-10 15:49:14 +00:00
|
|
|
safe to leave the self-signed certificate in place. Use the SSL Certificates page in this control panel to
|
|
|
|
install a signed SSL certificate.""")
|
2014-06-22 15:34:36 +00:00
|
|
|
|
|
|
|
else:
|
2015-01-31 19:56:39 +00:00
|
|
|
output.print_error("The SSL certificate has a problem: " + cert_status)
|
2014-10-07 14:49:36 +00:00
|
|
|
if cert_status_details:
|
2015-01-31 19:56:39 +00:00
|
|
|
output.print_line("")
|
|
|
|
output.print_line(cert_status_details)
|
|
|
|
output.print_line("")
|
2014-06-22 15:34:36 +00:00
|
|
|
|
2015-06-21 14:36:41 +00:00
|
|
|
def check_certificate(domain, ssl_certificate, ssl_private_key, warn_if_expiring_soon=True, rounded_time=False, just_check_domain=False):
|
|
|
|
# Check that the ssl_certificate & ssl_private_key files are good
|
|
|
|
# for the provided domain.
|
|
|
|
|
|
|
|
from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey
|
|
|
|
from cryptography.x509 import Certificate, DNSName, ExtensionNotFound, OID_COMMON_NAME, OID_SUBJECT_ALTERNATIVE_NAME
|
2015-06-30 12:53:56 +00:00
|
|
|
import idna
|
2015-06-21 14:36:41 +00:00
|
|
|
|
|
|
|
# The ssl_certificate file may contain a chain of certificates. We'll
|
|
|
|
# need to split that up before we can pass anything to openssl or
|
|
|
|
# parse them in Python. Parse it with the cryptography library.
|
|
|
|
try:
|
|
|
|
ssl_cert_chain = load_cert_chain(ssl_certificate)
|
|
|
|
cert = load_pem(ssl_cert_chain[0])
|
|
|
|
if not isinstance(cert, Certificate): raise ValueError("This is not a certificate file.")
|
|
|
|
except ValueError as e:
|
|
|
|
return ("There is a problem with the certificate file: %s" % str(e), None)
|
|
|
|
|
|
|
|
# First check that the domain name is one of the names allowed by
|
|
|
|
# the certificate.
|
|
|
|
if domain is not None:
|
2015-06-30 12:53:56 +00:00
|
|
|
# The domain may be found in the Subject Common Name (CN). This comes back as an IDNA (ASCII)
|
|
|
|
# string, which is the format we store domains in - so good.
|
2015-06-21 14:36:41 +00:00
|
|
|
certificate_names = set()
|
|
|
|
try:
|
|
|
|
certificate_names.add(
|
|
|
|
cert.subject.get_attributes_for_oid(OID_COMMON_NAME)[0].value
|
|
|
|
)
|
|
|
|
except IndexError:
|
|
|
|
# No common name? Certificate is probably generated incorrectly.
|
|
|
|
# But we'll let it error-out when it doesn't find the domain.
|
|
|
|
pass
|
|
|
|
|
2015-06-30 12:53:56 +00:00
|
|
|
# ... or be one of the Subject Alternative Names. The cryptography library handily IDNA-decodes
|
|
|
|
# the names for us. We must encode back to ASCII, but wildcard certificates can't pass through
|
|
|
|
# IDNA encoding/decoding so we must special-case. See https://github.com/pyca/cryptography/pull/2071.
|
|
|
|
def idna_decode_dns_name(dns_name):
|
|
|
|
if dns_name.startswith("*."):
|
|
|
|
return "*." + idna.encode(dns_name[2:]).decode('ascii')
|
|
|
|
else:
|
|
|
|
return idna.encode(dns_name).decode('ascii')
|
2015-07-04 15:31:11 +00:00
|
|
|
|
2015-06-21 14:36:41 +00:00
|
|
|
try:
|
|
|
|
sans = cert.extensions.get_extension_for_oid(OID_SUBJECT_ALTERNATIVE_NAME).value.get_values_for_type(DNSName)
|
|
|
|
for san in sans:
|
2015-06-30 12:53:56 +00:00
|
|
|
certificate_names.add(idna_decode_dns_name(san))
|
2015-06-21 14:36:41 +00:00
|
|
|
except ExtensionNotFound:
|
|
|
|
pass
|
|
|
|
|
|
|
|
# Check that the domain appears among the acceptable names, or a wildcard
|
|
|
|
# form of the domain name (which is a stricter check than the specs but
|
|
|
|
# should work in normal cases).
|
|
|
|
wildcard_domain = re.sub("^[^\.]+", "*", domain)
|
|
|
|
if domain not in certificate_names and wildcard_domain not in certificate_names:
|
|
|
|
return ("The certificate is for the wrong domain name. It is for %s."
|
|
|
|
% ", ".join(sorted(certificate_names)), None)
|
|
|
|
|
|
|
|
# Second, check that the certificate matches the private key.
|
2014-07-08 15:47:54 +00:00
|
|
|
if ssl_private_key is not None:
|
2015-07-03 13:44:58 +00:00
|
|
|
try:
|
|
|
|
priv_key = load_pem(open(ssl_private_key, 'rb').read())
|
|
|
|
except ValueError as e:
|
|
|
|
return ("The private key file %s is not a private key file: %s" % (ssl_private_key, str(e)), None)
|
|
|
|
|
2015-06-21 14:36:41 +00:00
|
|
|
if not isinstance(priv_key, RSAPrivateKey):
|
|
|
|
return ("The private key file %s is not a private key file." % ssl_private_key, None)
|
|
|
|
|
|
|
|
if priv_key.public_key().public_numbers() != cert.public_key().public_numbers():
|
|
|
|
return ("The certificate does not correspond to the private key at %s." % ssl_private_key, None)
|
|
|
|
|
|
|
|
# We could also use the openssl command line tool to get the modulus
|
|
|
|
# listed in each file. The output of each command below looks like "Modulus=XXXXX".
|
|
|
|
# $ openssl rsa -inform PEM -noout -modulus -in ssl_private_key
|
|
|
|
# $ openssl x509 -in ssl_certificate -noout -modulus
|
|
|
|
|
|
|
|
# Third, check if the certificate is self-signed. Return a special flag string.
|
|
|
|
if cert.issuer == cert.subject:
|
|
|
|
return ("SELF-SIGNED", None)
|
|
|
|
|
|
|
|
# When selecting which certificate to use for non-primary domains, we check if the primary
|
|
|
|
# certificate or a www-parent-domain certificate is good for the domain. There's no need
|
|
|
|
# to run extra checks beyond this point.
|
|
|
|
if just_check_domain:
|
|
|
|
return ("OK", None)
|
|
|
|
|
|
|
|
# Check that the certificate hasn't expired. The datetimes returned by the
|
|
|
|
# certificate are 'naive' and in UTC. We need to get the current time in UTC.
|
|
|
|
now = datetime.datetime.utcnow()
|
|
|
|
if not(cert.not_valid_before <= now <= cert.not_valid_after):
|
|
|
|
return ("The certificate has expired or is not yet valid. It is valid from %s to %s." % (cert.not_valid_before, cert.not_valid_after), None)
|
2014-07-08 15:47:54 +00:00
|
|
|
|
|
|
|
# Next validate that the certificate is valid. This checks whether the certificate
|
|
|
|
# is self-signed, that the chain of trust makes sense, that it is signed by a CA
|
|
|
|
# that Ubuntu has installed on this machine's list of CAs, and I think that it hasn't
|
|
|
|
# expired.
|
|
|
|
|
2015-06-21 14:36:41 +00:00
|
|
|
# The certificate chain has to be passed separately and is given via STDIN.
|
2014-06-23 10:53:09 +00:00
|
|
|
# This command returns a non-zero exit status in most cases, so trap errors.
|
|
|
|
retcode, verifyoutput = shell('check_output', [
|
|
|
|
"openssl",
|
|
|
|
"verify", "-verbose",
|
|
|
|
"-purpose", "sslserver", "-policy_check",]
|
2015-08-15 15:32:40 +00:00
|
|
|
+ ([] if len(ssl_cert_chain) == 1 else ["-untrusted", "/proc/self/fd/0"])
|
2014-06-23 10:53:09 +00:00
|
|
|
+ [ssl_certificate],
|
2015-06-21 14:36:41 +00:00
|
|
|
input=b"\n\n".join(ssl_cert_chain[1:]),
|
2014-06-23 10:53:09 +00:00
|
|
|
trap=True)
|
|
|
|
|
|
|
|
if "self signed" in verifyoutput:
|
2015-06-21 14:36:41 +00:00
|
|
|
# Certificate is self-signed. Probably we detected this above.
|
2014-10-07 14:49:36 +00:00
|
|
|
return ("SELF-SIGNED", None)
|
2015-05-14 19:16:31 +00:00
|
|
|
|
2014-09-21 12:51:27 +00:00
|
|
|
elif retcode != 0:
|
2014-10-24 21:30:33 +00:00
|
|
|
if "unable to get local issuer certificate" in verifyoutput:
|
2014-12-05 19:25:14 +00:00
|
|
|
return ("The certificate is missing an intermediate chain or the intermediate chain is incorrect or incomplete. (%s)" % verifyoutput, None)
|
2014-10-24 21:30:33 +00:00
|
|
|
|
2014-09-21 12:51:27 +00:00
|
|
|
# There is some unknown problem. Return the `openssl verify` raw output.
|
2014-10-07 14:49:36 +00:00
|
|
|
return ("There is a problem with the SSL certificate.", verifyoutput.strip())
|
2015-05-14 19:16:31 +00:00
|
|
|
|
2014-09-21 12:51:27 +00:00
|
|
|
else:
|
|
|
|
# `openssl verify` returned a zero exit status so the cert is currently
|
|
|
|
# good.
|
|
|
|
|
|
|
|
# But is it expiring soon?
|
2015-06-21 14:36:41 +00:00
|
|
|
cert_expiration_date = cert.not_valid_after
|
2014-09-21 12:51:27 +00:00
|
|
|
ndays = (cert_expiration_date-now).days
|
2015-03-08 21:56:28 +00:00
|
|
|
if not rounded_time or ndays < 7:
|
|
|
|
expiry_info = "The certificate expires in %d days on %s." % (ndays, cert_expiration_date.strftime("%x"))
|
|
|
|
elif ndays <= 14:
|
|
|
|
expiry_info = "The certificate expires in less than two weeks, on %s." % cert_expiration_date.strftime("%x")
|
|
|
|
elif ndays <= 31:
|
|
|
|
expiry_info = "The certificate expires in less than a month, on %s." % cert_expiration_date.strftime("%x")
|
|
|
|
else:
|
|
|
|
expiry_info = "The certificate expires on %s." % cert_expiration_date.strftime("%x")
|
|
|
|
|
2015-05-14 19:16:31 +00:00
|
|
|
if ndays <= 31 and warn_if_expiring_soon:
|
2014-10-07 14:49:36 +00:00
|
|
|
return ("The certificate is expiring soon: " + expiry_info, None)
|
2014-09-21 12:51:27 +00:00
|
|
|
|
|
|
|
# Return the special OK code.
|
2014-10-07 14:49:36 +00:00
|
|
|
return ("OK", expiry_info)
|
2014-06-23 10:53:09 +00:00
|
|
|
|
2015-06-21 14:36:41 +00:00
|
|
|
def load_cert_chain(pemfile):
|
|
|
|
# A certificate .pem file may contain a chain of certificates.
|
|
|
|
# Load the file and split them apart.
|
2015-07-13 21:04:34 +00:00
|
|
|
re_pem = rb"(-+BEGIN (?:.+)-+[\r\n]+(?:[A-Za-z0-9+/=]{1,64}[\r\n]+)+-+END (?:.+)-+[\r\n]+)"
|
2015-06-21 14:36:41 +00:00
|
|
|
with open(pemfile, "rb") as f:
|
|
|
|
pem = f.read() + b"\n" # ensure trailing newline
|
|
|
|
pemblocks = re.findall(re_pem, pem)
|
|
|
|
if len(pemblocks) == 0:
|
|
|
|
raise ValueError("File does not contain valid PEM data.")
|
|
|
|
return pemblocks
|
|
|
|
|
|
|
|
def load_pem(pem):
|
|
|
|
# Parse a "---BEGIN .... END---" PEM string and return a Python object for it
|
|
|
|
# using classes from the cryptography package.
|
|
|
|
from cryptography.x509 import load_pem_x509_certificate
|
|
|
|
from cryptography.hazmat.primitives import serialization
|
|
|
|
from cryptography.hazmat.backends import default_backend
|
2015-07-13 21:04:34 +00:00
|
|
|
pem_type = re.match(b"-+BEGIN (.*?)-+[\r\n]", pem)
|
2015-07-03 13:44:58 +00:00
|
|
|
if pem_type is None:
|
|
|
|
raise ValueError("File is not a valid PEM-formatted file.")
|
|
|
|
pem_type = pem_type.group(1)
|
2015-07-02 19:34:16 +00:00
|
|
|
if pem_type in (b"RSA PRIVATE KEY", b"PRIVATE KEY"):
|
2015-06-21 14:36:41 +00:00
|
|
|
return serialization.load_pem_private_key(pem, password=None, backend=default_backend())
|
|
|
|
if pem_type == b"CERTIFICATE":
|
|
|
|
return load_pem_x509_certificate(pem, default_backend())
|
|
|
|
raise ValueError("Unsupported PEM object type: " + pem_type.decode("ascii", "replace"))
|
|
|
|
|
2014-08-21 11:09:51 +00:00
|
|
|
_apt_updates = None
|
2014-09-21 12:43:47 +00:00
|
|
|
def list_apt_updates(apt_update=True):
|
2014-08-21 11:09:51 +00:00
|
|
|
# See if we have this information cached recently.
|
|
|
|
# Keep the information for 8 hours.
|
|
|
|
global _apt_updates
|
|
|
|
if _apt_updates is not None and _apt_updates[0] > datetime.datetime.now() - datetime.timedelta(hours=8):
|
|
|
|
return _apt_updates[1]
|
|
|
|
|
2014-09-21 12:43:47 +00:00
|
|
|
# Run apt-get update to refresh package list. This should be running daily
|
|
|
|
# anyway, so on the status checks page don't do this because it is slow.
|
|
|
|
if apt_update:
|
|
|
|
shell("check_call", ["/usr/bin/apt-get", "-qq", "update"])
|
2014-08-21 11:09:51 +00:00
|
|
|
|
|
|
|
# Run apt-get upgrade in simulate mode to get a list of what
|
|
|
|
# it would do.
|
|
|
|
simulated_install = shell("check_output", ["/usr/bin/apt-get", "-qq", "-s", "upgrade"])
|
|
|
|
pkgs = []
|
|
|
|
for line in simulated_install.split('\n'):
|
|
|
|
if line.strip() == "":
|
|
|
|
continue
|
|
|
|
if re.match(r'^Conf .*', line):
|
|
|
|
# remove these lines, not informative
|
|
|
|
continue
|
|
|
|
m = re.match(r'^Inst (.*) \[(.*)\] \((\S*)', line)
|
|
|
|
if m:
|
|
|
|
pkgs.append({ "package": m.group(1), "version": m.group(3), "current_version": m.group(2) })
|
|
|
|
else:
|
|
|
|
pkgs.append({ "package": "[" + line + "]", "version": "", "current_version": "" })
|
|
|
|
|
|
|
|
# Cache for future requests.
|
|
|
|
_apt_updates = (datetime.datetime.now(), pkgs)
|
|
|
|
|
|
|
|
return pkgs
|
|
|
|
|
2015-06-25 13:42:22 +00:00
|
|
|
def what_version_is_this(env):
|
|
|
|
# This function runs `git describe` on the Mail-in-a-Box installation directory.
|
|
|
|
# Git may not be installed and Mail-in-a-Box may not have been cloned from github,
|
|
|
|
# so this function may raise all sorts of exceptions.
|
|
|
|
miab_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
|
|
|
tag = shell("check_output", ["/usr/bin/git", "describe"], env={"GIT_DIR": os.path.join(miab_dir, '.git')}).strip()
|
|
|
|
return tag
|
|
|
|
|
|
|
|
def get_latest_miab_version():
|
|
|
|
# This pings https://mailinabox.email/bootstrap.sh and extracts the tag named in
|
|
|
|
# the script to determine the current product version.
|
|
|
|
import urllib.request
|
|
|
|
return re.search(b'TAG=(.*)', urllib.request.urlopen("https://mailinabox.email/bootstrap.sh?ping=1").read()).group(1).decode("utf8")
|
|
|
|
|
2015-03-08 21:56:28 +00:00
|
|
|
def run_and_output_changes(env, pool, send_via_email):
|
|
|
|
import json
|
|
|
|
from difflib import SequenceMatcher
|
2014-08-21 11:09:51 +00:00
|
|
|
|
2015-03-08 21:56:28 +00:00
|
|
|
if not send_via_email:
|
|
|
|
out = ConsoleOutput()
|
|
|
|
else:
|
|
|
|
import io
|
|
|
|
out = FileOutput(io.StringIO(""), 70)
|
|
|
|
|
|
|
|
# Run status checks.
|
|
|
|
cur = BufferedOutput()
|
|
|
|
run_checks(True, env, cur, pool)
|
|
|
|
|
|
|
|
# Load previously saved status checks.
|
|
|
|
cache_fn = "/var/cache/mailinabox/status_checks.json"
|
|
|
|
if os.path.exists(cache_fn):
|
|
|
|
prev = json.load(open(cache_fn))
|
|
|
|
|
|
|
|
# Group the serial output into categories by the headings.
|
|
|
|
def group_by_heading(lines):
|
|
|
|
from collections import OrderedDict
|
|
|
|
ret = OrderedDict()
|
|
|
|
k = []
|
|
|
|
ret["No Category"] = k
|
|
|
|
for line_type, line_args, line_kwargs in lines:
|
|
|
|
if line_type == "add_heading":
|
|
|
|
k = []
|
|
|
|
ret[line_args[0]] = k
|
|
|
|
else:
|
|
|
|
k.append((line_type, line_args, line_kwargs))
|
|
|
|
return ret
|
|
|
|
prev_status = group_by_heading(prev)
|
|
|
|
cur_status = group_by_heading(cur.buf)
|
|
|
|
|
|
|
|
# Compare the previous to the current status checks
|
|
|
|
# category by category.
|
|
|
|
for category, cur_lines in cur_status.items():
|
|
|
|
if category not in prev_status:
|
|
|
|
out.add_heading(category + " -- Added")
|
|
|
|
BufferedOutput(with_lines=cur_lines).playback(out)
|
|
|
|
else:
|
|
|
|
# Actual comparison starts here...
|
|
|
|
prev_lines = prev_status[category]
|
|
|
|
def stringify(lines):
|
|
|
|
return [json.dumps(line) for line in lines]
|
|
|
|
diff = SequenceMatcher(None, stringify(prev_lines), stringify(cur_lines)).get_opcodes()
|
|
|
|
for op, i1, i2, j1, j2 in diff:
|
|
|
|
if op == "replace":
|
|
|
|
out.add_heading(category + " -- Previously:")
|
|
|
|
elif op == "delete":
|
|
|
|
out.add_heading(category + " -- Removed")
|
|
|
|
if op in ("replace", "delete"):
|
|
|
|
BufferedOutput(with_lines=prev_lines[i1:i2]).playback(out)
|
|
|
|
|
|
|
|
if op == "replace":
|
|
|
|
out.add_heading(category + " -- Currently:")
|
|
|
|
elif op == "insert":
|
|
|
|
out.add_heading(category + " -- Added")
|
|
|
|
if op in ("replace", "insert"):
|
|
|
|
BufferedOutput(with_lines=cur_lines[j1:j2]).playback(out)
|
|
|
|
|
|
|
|
for category, prev_lines in prev_status.items():
|
|
|
|
if category not in cur_status:
|
|
|
|
out.add_heading(category)
|
2015-03-22 14:02:48 +00:00
|
|
|
out.print_warning("This section was removed.")
|
2015-06-27 17:23:15 +00:00
|
|
|
|
2015-03-08 21:56:28 +00:00
|
|
|
if send_via_email:
|
|
|
|
# If there were changes, send off an email.
|
|
|
|
buf = out.buf.getvalue()
|
|
|
|
if len(buf) > 0:
|
|
|
|
# create MIME message
|
|
|
|
from email.message import Message
|
|
|
|
msg = Message()
|
|
|
|
msg['From'] = "\"%s\" <administrator@%s>" % (env['PRIMARY_HOSTNAME'], env['PRIMARY_HOSTNAME'])
|
|
|
|
msg['To'] = "administrator@%s" % env['PRIMARY_HOSTNAME']
|
|
|
|
msg['Subject'] = "[%s] Status Checks Change Notice" % env['PRIMARY_HOSTNAME']
|
|
|
|
msg.set_payload(buf, "UTF-8")
|
2015-06-27 17:23:15 +00:00
|
|
|
|
2015-03-08 21:56:28 +00:00
|
|
|
# send to administrator@
|
|
|
|
import smtplib
|
|
|
|
mailserver = smtplib.SMTP('localhost', 25)
|
|
|
|
mailserver.ehlo()
|
|
|
|
mailserver.sendmail(
|
|
|
|
"administrator@%s" % env['PRIMARY_HOSTNAME'], # MAIL FROM
|
|
|
|
"administrator@%s" % env['PRIMARY_HOSTNAME'], # RCPT TO
|
|
|
|
msg.as_string())
|
|
|
|
mailserver.quit()
|
2015-06-27 17:23:15 +00:00
|
|
|
|
2015-03-08 21:56:28 +00:00
|
|
|
# Store the current status checks output for next time.
|
|
|
|
os.makedirs(os.path.dirname(cache_fn), exist_ok=True)
|
|
|
|
with open(cache_fn, "w") as f:
|
|
|
|
json.dump(cur.buf, f, indent=True)
|
|
|
|
|
|
|
|
class FileOutput:
|
|
|
|
def __init__(self, buf, width):
|
|
|
|
self.buf = buf
|
|
|
|
self.width = width
|
2015-01-31 20:40:20 +00:00
|
|
|
|
2014-08-17 22:43:57 +00:00
|
|
|
def add_heading(self, heading):
|
2015-03-08 21:56:28 +00:00
|
|
|
print(file=self.buf)
|
|
|
|
print(heading, file=self.buf)
|
|
|
|
print("=" * len(heading), file=self.buf)
|
2014-08-17 22:43:57 +00:00
|
|
|
|
|
|
|
def print_ok(self, message):
|
|
|
|
self.print_block(message, first_line="✓ ")
|
|
|
|
|
|
|
|
def print_error(self, message):
|
|
|
|
self.print_block(message, first_line="✖ ")
|
|
|
|
|
2014-10-07 20:41:07 +00:00
|
|
|
def print_warning(self, message):
|
|
|
|
self.print_block(message, first_line="? ")
|
|
|
|
|
2014-08-17 22:43:57 +00:00
|
|
|
def print_block(self, message, first_line=" "):
|
2015-03-08 21:56:28 +00:00
|
|
|
print(first_line, end='', file=self.buf)
|
2014-08-17 22:43:57 +00:00
|
|
|
message = re.sub("\n\s*", " ", message)
|
|
|
|
words = re.split("(\s+)", message)
|
|
|
|
linelen = 0
|
|
|
|
for w in words:
|
2015-03-08 21:56:28 +00:00
|
|
|
if linelen + len(w) > self.width-1-len(first_line):
|
|
|
|
print(file=self.buf)
|
|
|
|
print(" ", end="", file=self.buf)
|
2014-08-17 22:43:57 +00:00
|
|
|
linelen = 0
|
|
|
|
if linelen == 0 and w.strip() == "": continue
|
2015-03-08 21:56:28 +00:00
|
|
|
print(w, end="", file=self.buf)
|
2014-08-17 22:43:57 +00:00
|
|
|
linelen += len(w)
|
2015-03-08 21:56:28 +00:00
|
|
|
print(file=self.buf)
|
2014-06-22 15:34:36 +00:00
|
|
|
|
2014-08-17 22:43:57 +00:00
|
|
|
def print_line(self, message, monospace=False):
|
|
|
|
for line in message.split("\n"):
|
|
|
|
self.print_block(line)
|
|
|
|
|
2015-03-08 21:56:28 +00:00
|
|
|
class ConsoleOutput(FileOutput):
|
|
|
|
def __init__(self):
|
|
|
|
self.buf = sys.stdout
|
|
|
|
try:
|
|
|
|
self.width = int(shell('check_output', ['stty', 'size']).split()[1])
|
|
|
|
except:
|
|
|
|
self.width = 76
|
|
|
|
|
2015-01-31 20:40:20 +00:00
|
|
|
class BufferedOutput:
|
|
|
|
# Record all of the instance method calls so we can play them back later.
|
2015-03-08 21:56:28 +00:00
|
|
|
def __init__(self, with_lines=None):
|
|
|
|
self.buf = [] if not with_lines else with_lines
|
2015-01-31 20:40:20 +00:00
|
|
|
def __getattr__(self, attr):
|
|
|
|
if attr not in ("add_heading", "print_ok", "print_error", "print_warning", "print_block", "print_line"):
|
|
|
|
raise AttributeError
|
|
|
|
# Return a function that just records the call & arguments to our buffer.
|
|
|
|
def w(*args, **kwargs):
|
|
|
|
self.buf.append((attr, args, kwargs))
|
|
|
|
return w
|
|
|
|
def playback(self, output):
|
|
|
|
for attr, args, kwargs in self.buf:
|
|
|
|
getattr(output, attr)(*args, **kwargs)
|
|
|
|
|
2015-02-18 16:42:18 +00:00
|
|
|
|
2014-06-22 15:34:36 +00:00
|
|
|
if __name__ == "__main__":
|
|
|
|
from utils import load_environment
|
2015-03-08 21:56:28 +00:00
|
|
|
|
2014-08-17 22:43:57 +00:00
|
|
|
env = load_environment()
|
2015-03-08 21:56:28 +00:00
|
|
|
pool = multiprocessing.pool.Pool(processes=10)
|
|
|
|
|
2014-08-17 22:43:57 +00:00
|
|
|
if len(sys.argv) == 1:
|
2015-03-08 21:56:28 +00:00
|
|
|
run_checks(False, env, ConsoleOutput(), pool)
|
|
|
|
|
|
|
|
elif sys.argv[1] == "--show-changes":
|
|
|
|
run_and_output_changes(env, pool, sys.argv[-1] == "--smtp")
|
|
|
|
|
2014-08-17 22:43:57 +00:00
|
|
|
elif sys.argv[1] == "--check-primary-hostname":
|
|
|
|
# See if the primary hostname appears resolvable and has a signed certificate.
|
|
|
|
domain = env['PRIMARY_HOSTNAME']
|
|
|
|
if query_dns(domain, "A") != env['PUBLIC_IP']:
|
|
|
|
sys.exit(1)
|
2015-02-17 00:40:43 +00:00
|
|
|
ssl_key, ssl_certificate, ssl_via = get_domain_ssl_files(domain, env)
|
2014-08-17 22:43:57 +00:00
|
|
|
if not os.path.exists(ssl_certificate):
|
|
|
|
sys.exit(1)
|
2015-05-14 19:16:31 +00:00
|
|
|
cert_status, cert_status_details = check_certificate(domain, ssl_certificate, ssl_key, warn_if_expiring_soon=False)
|
2014-08-17 22:43:57 +00:00
|
|
|
if cert_status != "OK":
|
|
|
|
sys.exit(1)
|
|
|
|
sys.exit(0)
|
2015-06-25 13:42:22 +00:00
|
|
|
|
|
|
|
elif sys.argv[1] == "--version":
|
|
|
|
print(what_version_is_this(env))
|