First big pass on PEP8'ing all the things.

All PEP8 errors (except line length) have been fixed except one.  That
one will require a little bit of refactoring.
This commit is contained in:
Jack Twilley 2015-02-22 13:20:02 -08:00
parent 7ec662c83f
commit 86a31cd978
17 changed files with 3672 additions and 3337 deletions

View File

@ -1,129 +1,134 @@
import base64, os, os.path, hmac import base64
import os
import os.path
import hmac
from flask import make_response from flask import make_response
import utils import utils
from mailconfig import get_mail_password, get_mail_user_privileges from mailconfig import get_mail_password, get_mail_user_privileges
DEFAULT_KEY_PATH = '/var/lib/mailinabox/api.key' DEFAULT_KEY_PATH = '/var/lib/mailinabox/api.key'
DEFAULT_AUTH_REALM = 'Mail-in-a-Box Management Server' DEFAULT_AUTH_REALM = 'Mail-in-a-Box Management Server'
class KeyAuthService: class KeyAuthService:
"""Generate an API key for authenticating clients """Generate an API key for authenticating clients
Clients must read the key from the key file and send the key with all HTTP Clients must read the key from the key file and send the key with all HTTP
requests. The key is passed as the username field in the standard HTTP requests. The key is passed as the username field in the standard HTTP
Basic Auth header. Basic Auth header.
""" """
def __init__(self): def __init__(self):
self.auth_realm = DEFAULT_AUTH_REALM self.auth_realm = DEFAULT_AUTH_REALM
self.key = self._generate_key() self.key = self._generate_key()
self.key_path = DEFAULT_KEY_PATH self.key_path = DEFAULT_KEY_PATH
def write_key(self): def write_key(self):
"""Write key to file so authorized clients can get the key """Write key to file so authorized clients can get the key
The key file is created with mode 0640 so that additional users can be The key file is created with mode 0640 so that additional users can be
authorized to access the API by granting group/ACL read permissions on authorized to access the API by granting group/ACL read permissions on
the key file. the key file.
""" """
def create_file_with_mode(path, mode): def create_file_with_mode(path, mode):
# Based on answer by A-B-B: http://stackoverflow.com/a/15015748 # Based on answer by A-B-B: http://stackoverflow.com/a/15015748
old_umask = os.umask(0) old_umask = os.umask(0)
try: try:
return os.fdopen(os.open(path, os.O_WRONLY | os.O_CREAT, mode), 'w') return os.fdopen(os.open(path, os.O_WRONLY | os.O_CREAT, mode), 'w')
finally: finally:
os.umask(old_umask) os.umask(old_umask)
os.makedirs(os.path.dirname(self.key_path), exist_ok=True) os.makedirs(os.path.dirname(self.key_path), exist_ok=True)
with create_file_with_mode(self.key_path, 0o640) as key_file: with create_file_with_mode(self.key_path, 0o640) as key_file:
key_file.write(self.key + '\n') key_file.write(self.key + '\n')
def authenticate(self, request, env): def authenticate(self, request, env):
"""Test if the client key passed in HTTP Authorization header matches the service key """Test if the client key passed in HTTP Authorization header matches the service key
or if the or username/password passed in the header matches an administrator user. or if the or username/password passed in the header matches an administrator user.
Returns a tuple of the user's email address and list of user privileges (e.g. Returns a tuple of the user's email address and list of user privileges (e.g.
('my@email', []) or ('my@email', ['admin']); raises a ValueError on login failure. ('my@email', []) or ('my@email', ['admin']); raises a ValueError on login failure.
If the user used an API key, the user's email is returned as None.""" If the user used an API key, the user's email is returned as None."""
def decode(s): def decode(s):
return base64.b64decode(s.encode('ascii')).decode('ascii') return base64.b64decode(s.encode('ascii')).decode('ascii')
def parse_basic_auth(header): def parse_basic_auth(header):
if " " not in header: if " " not in header:
return None, None return None, None
scheme, credentials = header.split(maxsplit=1) scheme, credentials = header.split(maxsplit=1)
if scheme != 'Basic': if scheme != 'Basic':
return None, None return None, None
credentials = decode(credentials) credentials = decode(credentials)
if ":" not in credentials: if ":" not in credentials:
return None, None return None, None
username, password = credentials.split(':', maxsplit=1) username, password = credentials.split(':', maxsplit=1)
return username, password return username, password
header = request.headers.get('Authorization') header = request.headers.get('Authorization')
if not header: if not header:
raise ValueError("No authorization header provided.") raise ValueError("No authorization header provided.")
username, password = parse_basic_auth(header) username, password = parse_basic_auth(header)
if username in (None, ""): if username in (None, ""):
raise ValueError("Authorization header invalid.") raise ValueError("Authorization header invalid.")
elif username == self.key: elif username == self.key:
# The user passed the API key which grants administrative privs. # The user passed the API key which grants administrative privs.
return (None, ["admin"]) return (None, ["admin"])
else: else:
# The user is trying to log in with a username and user-specific # The user is trying to log in with a username and user-specific
# API key or password. Raises or returns privs. # API key or password. Raises or returns privs.
return (username, self.get_user_credentials(username, password, env)) return (username, self.get_user_credentials(username, password, env))
def get_user_credentials(self, email, pw, env): def get_user_credentials(self, email, pw, env):
# Validate a user's credentials. On success returns a list of # Validate a user's credentials. On success returns a list of
# privileges (e.g. [] or ['admin']). On failure raises a ValueError # privileges (e.g. [] or ['admin']). On failure raises a ValueError
# with a login error message. # with a login error message.
# Sanity check. # Sanity check.
if email == "" or pw == "": if email == "" or pw == "":
raise ValueError("Enter an email address and password.") raise ValueError("Enter an email address and password.")
# The password might be a user-specific API key. # The password might be a user-specific API key.
if hmac.compare_digest(self.create_user_key(email), pw): if hmac.compare_digest(self.create_user_key(email), pw):
# OK. # OK.
pass pass
else: else:
# Get the hashed password of the user. Raise a ValueError if the # Get the hashed password of the user. Raise a ValueError if the
# email address does not correspond to a user. # email address does not correspond to a user.
pw_hash = get_mail_password(email, env) pw_hash = get_mail_password(email, env)
# Authenticate. # Authenticate.
try: try:
# Use 'doveadm pw' to check credentials. doveadm will return # Use 'doveadm pw' to check credentials. doveadm will return
# a non-zero exit status if the credentials are no good, # a non-zero exit status if the credentials are no good,
# and check_call will raise an exception in that case. # and check_call will raise an exception in that case.
utils.shell('check_call', [ utils.shell('check_call', [
"/usr/bin/doveadm", "pw", "/usr/bin/doveadm", "pw",
"-p", pw, "-p", pw,
"-t", pw_hash, "-t", pw_hash,
]) ])
except: except:
# Login failed. # Login failed.
raise ValueError("Invalid password.") raise ValueError("Invalid password.")
# Get privileges for authorization. # Get privileges for authorization.
# (This call should never fail on a valid user. But if it did fail, it would # (This call should never fail on a valid user. But if it did fail, it would
# return a tuple of an error message and an HTTP status code.) # return a tuple of an error message and an HTTP status code.)
privs = get_mail_user_privileges(email, env) privs = get_mail_user_privileges(email, env)
if isinstance(privs, tuple): raise Exception("Error getting privileges.") if isinstance(privs, tuple):
raise Exception("Error getting privileges.")
# Return a list of privileges. # Return a list of privileges.
return privs return privs
def create_user_key(self, email): def create_user_key(self, email):
return hmac.new(self.key.encode('ascii'), b"AUTH:" + email.encode("utf8"), digestmod="sha1").hexdigest() return hmac.new(self.key.encode('ascii'), b"AUTH:" + email.encode("utf8"), digestmod="sha1").hexdigest()
def _generate_key(self): def _generate_key(self):
raw_key = os.urandom(32) raw_key = os.urandom(32)
return base64.b64encode(raw_key).decode('ascii') return base64.b64encode(raw_key).decode('ascii')

View File

@ -9,8 +9,15 @@
# backup/secret_key.txt) to STORAGE_ROOT/backup/encrypted. # backup/secret_key.txt) to STORAGE_ROOT/backup/encrypted.
# 5) STORAGE_ROOT/backup/after-backup is executd if it exists. # 5) STORAGE_ROOT/backup/after-backup is executd if it exists.
import os, os.path, shutil, glob, re, datetime import os
import dateutil.parser, dateutil.relativedelta, dateutil.tz import os.path
import shutil
import glob
import re
import datetime
import dateutil.parser
import dateutil.relativedelta
import dateutil.tz
from utils import exclusive_process, load_environment, shell from utils import exclusive_process, load_environment, shell
@ -18,195 +25,209 @@ from utils import exclusive_process, load_environment, shell
# that depends on it is this many days old. # that depends on it is this many days old.
keep_backups_for_days = 3 keep_backups_for_days = 3
def backup_status(env): def backup_status(env):
# What is the current status of backups? # What is the current status of backups?
# Loop through all of the files in STORAGE_ROOT/backup/duplicity to # Loop through all of the files in STORAGE_ROOT/backup/duplicity to
# get a list of all of the backups taken and sum up file sizes to # get a list of all of the backups taken and sum up file sizes to
# see how large the storage is. # see how large the storage is.
now = datetime.datetime.now(dateutil.tz.tzlocal()) now = datetime.datetime.now(dateutil.tz.tzlocal())
def reldate(date, ref, clip):
if ref < date: return clip
rd = dateutil.relativedelta.relativedelta(ref, date)
if rd.months > 1: return "%d months, %d days" % (rd.months, rd.days)
if rd.months == 1: return "%d month, %d days" % (rd.months, rd.days)
if rd.days >= 7: return "%d days" % rd.days
if rd.days > 1: return "%d days, %d hours" % (rd.days, rd.hours)
if rd.days == 1: return "%d day, %d hours" % (rd.days, rd.hours)
return "%d hours, %d minutes" % (rd.hours, rd.minutes)
backups = { } def reldate(date, ref, clip):
basedir = os.path.join(env['STORAGE_ROOT'], 'backup/duplicity/') if ref < date:
encdir = os.path.join(env['STORAGE_ROOT'], 'backup/encrypted/') return clip
os.makedirs(basedir, exist_ok=True) # os.listdir fails if directory does not exist rd = dateutil.relativedelta.relativedelta(ref, date)
for fn in os.listdir(basedir): if rd.months > 1:
m = re.match(r"duplicity-(full|full-signatures|(inc|new-signatures)\.(?P<incbase>\d+T\d+Z)\.to)\.(?P<date>\d+T\d+Z)\.", fn) return "%d months, %d days" % (rd.months, rd.days)
if not m: raise ValueError(fn) if rd.months == 1:
return "%d month, %d days" % (rd.months, rd.days)
if rd.days >= 7:
return "%d days" % rd.days
if rd.days > 1:
return "%d days, %d hours" % (rd.days, rd.hours)
if rd.days == 1:
return "%d day, %d hours" % (rd.days, rd.hours)
return "%d hours, %d minutes" % (rd.hours, rd.minutes)
key = m.group("date") backups = {}
if key not in backups: basedir = os.path.join(env['STORAGE_ROOT'], 'backup/duplicity/')
date = dateutil.parser.parse(m.group("date")) encdir = os.path.join(env['STORAGE_ROOT'], 'backup/encrypted/')
backups[key] = { # os.listdir fails if directory does not exist
"date": m.group("date"), os.makedirs(basedir, exist_ok=True)
"date_str": date.strftime("%x %X"), for fn in os.listdir(basedir):
"date_delta": reldate(date, now, "the future?"), m = re.match(r"duplicity-(full|full-signatures|(inc|new-signatures)\.(?P<incbase>\d+T\d+Z)\.to)\.(?P<date>\d+T\d+Z)\.", fn)
"full": m.group("incbase") is None, if not m:
"previous": m.group("incbase"), raise ValueError(fn)
"size": 0,
"encsize": 0,
}
backups[key]["size"] += os.path.getsize(os.path.join(basedir, fn)) key = m.group("date")
if key not in backups:
date = dateutil.parser.parse(m.group("date"))
backups[key] = {
"date": m.group("date"),
"date_str": date.strftime("%x %X"),
"date_delta": reldate(date, now, "the future?"),
"full": m.group("incbase") is None,
"previous": m.group("incbase"),
"size": 0,
"encsize": 0,
}
# Also check encrypted size. backups[key]["size"] += os.path.getsize(os.path.join(basedir, fn))
encfn = os.path.join(encdir, fn + ".enc")
if os.path.exists(encfn):
backups[key]["encsize"] += os.path.getsize(encfn)
# Ensure the rows are sorted reverse chronologically. # Also check encrypted size.
# This is relied on by should_force_full() and the next step. encfn = os.path.join(encdir, fn + ".enc")
backups = sorted(backups.values(), key = lambda b : b["date"], reverse=True) if os.path.exists(encfn):
backups[key]["encsize"] += os.path.getsize(encfn)
# When will a backup be deleted? # Ensure the rows are sorted reverse chronologically.
saw_full = False # This is relied on by should_force_full() and the next step.
deleted_in = None backups = sorted(backups.values(), key=lambda b: b["date"], reverse=True)
days_ago = now - datetime.timedelta(days=keep_backups_for_days)
for bak in backups: # When will a backup be deleted?
if deleted_in: saw_full = False
# Subsequent backups are deleted when the most recent increment deleted_in = None
# in the chain would be deleted. days_ago = now - datetime.timedelta(days=keep_backups_for_days)
bak["deleted_in"] = deleted_in for bak in backups:
if bak["full"]: if deleted_in:
# Reset when we get to a full backup. A new chain start next. # Subsequent backups are deleted when the most recent increment
saw_full = True # in the chain would be deleted.
deleted_in = None bak["deleted_in"] = deleted_in
elif saw_full and not deleted_in: if bak["full"]:
# Mark deleted_in only on the first increment after a full backup. # Reset when we get to a full backup. A new chain start next.
deleted_in = reldate(days_ago, dateutil.parser.parse(bak["date"]), "on next daily backup") saw_full = True
bak["deleted_in"] = deleted_in deleted_in = None
elif saw_full and not deleted_in:
# Mark deleted_in only on the first increment after a full backup.
deleted_in = reldate(days_ago, dateutil.parser.parse(bak["date"]), "on next daily backup")
bak["deleted_in"] = deleted_in
return {
"directory": basedir,
"encpwfile": os.path.join(env['STORAGE_ROOT'], 'backup/secret_key.txt'),
"encdirectory": encdir,
"tz": now.tzname(),
"backups": backups,
}
return {
"directory": basedir,
"encpwfile": os.path.join(env['STORAGE_ROOT'], 'backup/secret_key.txt'),
"encdirectory": encdir,
"tz": now.tzname(),
"backups": backups,
}
def should_force_full(env): def should_force_full(env):
# Force a full backup when the total size of the increments # Force a full backup when the total size of the increments
# since the last full backup is greater than half the size # since the last full backup is greater than half the size
# of that full backup. # of that full backup.
inc_size = 0 inc_size = 0
for bak in backup_status(env)["backups"]: for bak in backup_status(env)["backups"]:
if not bak["full"]: if not bak["full"]:
# Scan through the incremental backups cumulating # Scan through the incremental backups cumulating
# size... # size...
inc_size += bak["size"] inc_size += bak["size"]
else: else:
# ...until we reach the most recent full backup. # ...until we reach the most recent full backup.
# Return if we should to a full backup. # Return if we should to a full backup.
return inc_size > .5*bak["size"] return inc_size > .5*bak["size"]
else: else:
# If we got here there are no (full) backups, so make one. # If we got here there are no (full) backups, so make one.
# (I love for/else blocks. Here it's just to show off.) # (I love for/else blocks. Here it's just to show off.)
return True return True
def perform_backup(full_backup): def perform_backup(full_backup):
env = load_environment() env = load_environment()
exclusive_process("backup") exclusive_process("backup")
# Ensure the backup directory exists. # Ensure the backup directory exists.
backup_dir = os.path.join(env["STORAGE_ROOT"], 'backup') backup_dir = os.path.join(env["STORAGE_ROOT"], 'backup')
backup_duplicity_dir = os.path.join(backup_dir, 'duplicity') backup_duplicity_dir = os.path.join(backup_dir, 'duplicity')
os.makedirs(backup_duplicity_dir, exist_ok=True) os.makedirs(backup_duplicity_dir, exist_ok=True)
# On the first run, always do a full backup. Incremental # On the first run, always do a full backup. Incremental
# will fail. Otherwise do a full backup when the size of # will fail. Otherwise do a full backup when the size of
# the increments since the most recent full backup are # the increments since the most recent full backup are
# large. # large.
full_backup = full_backup or should_force_full(env) full_backup = full_backup or should_force_full(env)
# Stop services. # Stop services.
shell('check_call', ["/usr/sbin/service", "dovecot", "stop"]) shell('check_call', ["/usr/sbin/service", "dovecot", "stop"])
shell('check_call', ["/usr/sbin/service", "postfix", "stop"]) shell('check_call', ["/usr/sbin/service", "postfix", "stop"])
# Update the backup mirror directory which mirrors the current # Update the backup mirror directory which mirrors the current
# STORAGE_ROOT (but excluding the backups themselves!). # STORAGE_ROOT (but excluding the backups themselves!).
try: try:
shell('check_call', [ shell('check_call', [
"/usr/bin/duplicity", "/usr/bin/duplicity",
"full" if full_backup else "incr", "full" if full_backup else "incr",
"--no-encryption", "--no-encryption",
"--archive-dir", "/tmp/duplicity-archive-dir", "--archive-dir", "/tmp/duplicity-archive-dir",
"--name", "mailinabox", "--name", "mailinabox",
"--exclude", backup_dir, "--exclude", backup_dir,
"--volsize", "100", "--volsize", "100",
"--verbosity", "warning", "--verbosity", "warning",
env["STORAGE_ROOT"], env["STORAGE_ROOT"],
"file://" + backup_duplicity_dir "file://" + backup_duplicity_dir
]) ])
finally: finally:
# Start services again. # Start services again.
shell('check_call', ["/usr/sbin/service", "dovecot", "start"]) shell('check_call', ["/usr/sbin/service", "dovecot", "start"])
shell('check_call', ["/usr/sbin/service", "postfix", "start"]) shell('check_call', ["/usr/sbin/service", "postfix", "start"])
# Remove old backups. This deletes all backup data no longer needed # Remove old backups. This deletes all backup data no longer needed
# from more than 31 days ago. Must do this before destroying the # from more than 31 days ago. Must do this before destroying the
# cache directory or else this command will re-create it. # cache directory or else this command will re-create it.
shell('check_call', [ shell('check_call', [
"/usr/bin/duplicity", "/usr/bin/duplicity",
"remove-older-than", "remove-older-than",
"%dD" % keep_backups_for_days, "%dD" % keep_backups_for_days,
"--archive-dir", "/tmp/duplicity-archive-dir", "--archive-dir", "/tmp/duplicity-archive-dir",
"--name", "mailinabox", "--name", "mailinabox",
"--force", "--force",
"--verbosity", "warning", "--verbosity", "warning",
"file://" + backup_duplicity_dir "file://" + backup_duplicity_dir
]) ])
# Remove duplicity's cache directory because it's redundant with our backup directory. # Remove duplicity's cache directory because it's redundant with our backup directory.
shutil.rmtree("/tmp/duplicity-archive-dir") shutil.rmtree("/tmp/duplicity-archive-dir")
# Encrypt all of the new files. # Encrypt all of the new files.
backup_encrypted_dir = os.path.join(backup_dir, 'encrypted') backup_encrypted_dir = os.path.join(backup_dir, 'encrypted')
os.makedirs(backup_encrypted_dir, exist_ok=True) os.makedirs(backup_encrypted_dir, exist_ok=True)
for fn in os.listdir(backup_duplicity_dir): for fn in os.listdir(backup_duplicity_dir):
fn2 = os.path.join(backup_encrypted_dir, fn) + ".enc" fn2 = os.path.join(backup_encrypted_dir, fn) + ".enc"
if os.path.exists(fn2): continue if os.path.exists(fn2):
continue
# Encrypt the backup using the backup private key. # Encrypt the backup using the backup private key.
shell('check_call', [ shell('check_call', [
"/usr/bin/openssl", "/usr/bin/openssl",
"enc", "enc",
"-aes-256-cbc", "-aes-256-cbc",
"-a", "-a",
"-salt", "-salt",
"-in", os.path.join(backup_duplicity_dir, fn), "-in", os.path.join(backup_duplicity_dir, fn),
"-out", fn2, "-out", fn2,
"-pass", "file:%s" % os.path.join(backup_dir, "secret_key.txt"), "-pass", "file:%s" % os.path.join(backup_dir, "secret_key.txt"),
]) ])
# The backup can be decrypted with: # The backup can be decrypted with:
# openssl enc -d -aes-256-cbc -a -in latest.tgz.enc -out /dev/stdout -pass file:secret_key.txt | tar -z # openssl enc -d -aes-256-cbc -a -in latest.tgz.enc -out /dev/stdout -pass file:secret_key.txt | tar -z
# Remove encrypted backups that are no longer needed. # Remove encrypted backups that are no longer needed.
for fn in os.listdir(backup_encrypted_dir): for fn in os.listdir(backup_encrypted_dir):
fn2 = os.path.join(backup_duplicity_dir, fn.replace(".enc", "")) fn2 = os.path.join(backup_duplicity_dir, fn.replace(".enc", ""))
if os.path.exists(fn2): continue if os.path.exists(fn2):
os.unlink(os.path.join(backup_encrypted_dir, fn)) continue
os.unlink(os.path.join(backup_encrypted_dir, fn))
# Execute a post-backup script that does the copying to a remote server. # Execute a post-backup script that does the copying to a remote server.
# Run as the STORAGE_USER user, not as root. Pass our settings in # Run as the STORAGE_USER user, not as root. Pass our settings in
# environment variables so the script has access to STORAGE_ROOT. # environment variables so the script has access to STORAGE_ROOT.
post_script = os.path.join(backup_dir, 'after-backup') post_script = os.path.join(backup_dir, 'after-backup')
if os.path.exists(post_script): if os.path.exists(post_script):
shell('check_call', shell('check_call',
['su', env['STORAGE_USER'], '-c', post_script], ['su', env['STORAGE_USER'], '-c', post_script],
env=env) env=env)
if __name__ == "__main__": if __name__ == "__main__":
import sys import sys
full_backup = "--full" in sys.argv full_backup = "--full" in sys.argv
perform_backup(full_backup) perform_backup(full_backup)

View File

@ -1,12 +1,16 @@
#!/usr/bin/python3 #!/usr/bin/python3
import os, os.path, re, json import os
import os.path
import re
import json
from functools import wraps from functools import wraps
from flask import Flask, request, render_template, abort, Response from flask import Flask, request, render_template, abort, Response
import auth, utils import auth
import utils
from mailconfig import get_mail_users, get_mail_users_ex, get_admins, add_mail_user, set_mail_password, remove_mail_user from mailconfig import get_mail_users, get_mail_users_ex, get_admins, add_mail_user, set_mail_password, remove_mail_user
from mailconfig import get_mail_user_privileges, add_remove_mail_user_privilege from mailconfig import get_mail_user_privileges, add_remove_mail_user_privilege
from mailconfig import get_mail_aliases, get_mail_aliases_ex, get_mail_domains, add_mail_alias, remove_mail_alias from mailconfig import get_mail_aliases, get_mail_aliases_ex, get_mail_domains, add_mail_alias, remove_mail_alias
@ -24,175 +28,192 @@ auth_service = auth.KeyAuthService()
# We may deploy via a symbolic link, which confuses flask's template finding. # We may deploy via a symbolic link, which confuses flask's template finding.
me = __file__ me = __file__
try: try:
me = os.readlink(__file__) me = os.readlink(__file__)
except OSError: except OSError:
pass pass
app = Flask(__name__, template_folder=os.path.abspath(os.path.join(os.path.dirname(me), "templates"))) app = Flask(__name__, template_folder=os.path.abspath(os.path.join(os.path.dirname(me), "templates")))
# Decorator to protect views that require a user with 'admin' privileges. # Decorator to protect views that require a user with 'admin' privileges.
def authorized_personnel_only(viewfunc): def authorized_personnel_only(viewfunc):
@wraps(viewfunc) @wraps(viewfunc)
def newview(*args, **kwargs): def newview(*args, **kwargs):
# Authenticate the passed credentials, which is either the API key or a username:password pair. # Authenticate the passed credentials, which is either the API key or a username:password pair.
error = None error = None
try: try:
email, privs = auth_service.authenticate(request, env) email, privs = auth_service.authenticate(request, env)
except ValueError as e: except ValueError as e:
# Authentication failed. # Authentication failed.
privs = [] privs = []
error = str(e) error = str(e)
# Authorized to access an API view? # Authorized to access an API view?
if "admin" in privs: if "admin" in privs:
# Call view func. # Call view func.
return viewfunc(*args, **kwargs) return viewfunc(*args, **kwargs)
elif not error: elif not error:
error = "You are not an administrator." error = "You are not an administrator."
# Not authorized. Return a 401 (send auth) and a prompt to authorize by default. # Not authorized. Return a 401 (send auth) and a prompt to authorize by default.
status = 401 status = 401
headers = { headers = {
'WWW-Authenticate': 'Basic realm="{0}"'.format(auth_service.auth_realm), 'WWW-Authenticate': 'Basic realm="{0}"'.format(auth_service.auth_realm),
'X-Reason': error, 'X-Reason': error,
} }
if request.headers.get('X-Requested-With') == 'XMLHttpRequest': if request.headers.get('X-Requested-With') == 'XMLHttpRequest':
# Don't issue a 401 to an AJAX request because the user will # Don't issue a 401 to an AJAX request because the user will
# be prompted for credentials, which is not helpful. # be prompted for credentials, which is not helpful.
status = 403 status = 403
headers = None headers = None
if request.headers.get('Accept') in (None, "", "*/*"): if request.headers.get('Accept') in (None, "", "*/*"):
# Return plain text output. # Return plain text output.
return Response(error+"\n", status=status, mimetype='text/plain', headers=headers) return Response(error+"\n", status=status, mimetype='text/plain', headers=headers)
else: else:
# Return JSON output. # Return JSON output.
return Response(json.dumps({ return Response(json.dumps({
"status": "error", "status": "error",
"reason": error, "reason": error,
})+"\n", status=status, mimetype='application/json', headers=headers) })+"\n", status=status, mimetype='application/json', headers=headers)
return newview
return newview
@app.errorhandler(401) @app.errorhandler(401)
def unauthorized(error): def unauthorized(error):
return auth_service.make_unauthorized_response() return auth_service.make_unauthorized_response()
def json_response(data): def json_response(data):
return Response(json.dumps(data), status=200, mimetype='application/json') return Response(json.dumps(data), status=200, mimetype='application/json')
################################### ###################################
# Control Panel (unauthenticated views) # Control Panel (unauthenticated views)
@app.route('/') @app.route('/')
def index(): def index():
# Render the control panel. This route does not require user authentication # Render the control panel. This route does not require user authentication
# so it must be safe! # so it must be safe!
no_admins_exist = (len(get_admins(env)) == 0) no_admins_exist = (len(get_admins(env)) == 0)
return render_template('index.html', return render_template(
hostname=env['PRIMARY_HOSTNAME'], 'index.html',
storage_root=env['STORAGE_ROOT'], hostname=env['PRIMARY_HOSTNAME'],
no_admins_exist=no_admins_exist, storage_root=env['STORAGE_ROOT'],
) no_admins_exist=no_admins_exist,
)
@app.route('/me') @app.route('/me')
def me(): def me():
# Is the caller authorized? # Is the caller authorized?
try: try:
email, privs = auth_service.authenticate(request, env) email, privs = auth_service.authenticate(request, env)
except ValueError as e: except ValueError as e:
return json_response({ return json_response({
"status": "invalid", "status": "invalid",
"reason": str(e), "reason": str(e),
}) })
resp = { resp = {
"status": "ok", "status": "ok",
"email": email, "email": email,
"privileges": privs, "privileges": privs,
} }
# Is authorized as admin? Return an API key for future use. # Is authorized as admin? Return an API key for future use.
if "admin" in privs: if "admin" in privs:
resp["api_key"] = auth_service.create_user_key(email) resp["api_key"] = auth_service.create_user_key(email)
# Return. # Return.
return json_response(resp) return json_response(resp)
# MAIL # MAIL
@app.route('/mail/users') @app.route('/mail/users')
@authorized_personnel_only @authorized_personnel_only
def mail_users(): def mail_users():
if request.args.get("format", "") == "json": if request.args.get("format", "") == "json":
return json_response(get_mail_users_ex(env, with_archived=True, with_slow_info=True)) return json_response(get_mail_users_ex(env, with_archived=True, with_slow_info=True))
else: else:
return "".join(x+"\n" for x in get_mail_users(env)) return "".join(x+"\n" for x in get_mail_users(env))
@app.route('/mail/users/add', methods=['POST']) @app.route('/mail/users/add', methods=['POST'])
@authorized_personnel_only @authorized_personnel_only
def mail_users_add(): def mail_users_add():
try: try:
return add_mail_user(request.form.get('email', ''), request.form.get('password', ''), request.form.get('privileges', ''), env) return add_mail_user(request.form.get('email', ''), request.form.get('password', ''), request.form.get('privileges', ''), env)
except ValueError as e: except ValueError as e:
return (str(e), 400) return (str(e), 400)
@app.route('/mail/users/password', methods=['POST']) @app.route('/mail/users/password', methods=['POST'])
@authorized_personnel_only @authorized_personnel_only
def mail_users_password(): def mail_users_password():
try: try:
return set_mail_password(request.form.get('email', ''), request.form.get('password', ''), env) return set_mail_password(request.form.get('email', ''), request.form.get('password', ''), env)
except ValueError as e: except ValueError as e:
return (str(e), 400) return (str(e), 400)
@app.route('/mail/users/remove', methods=['POST']) @app.route('/mail/users/remove', methods=['POST'])
@authorized_personnel_only @authorized_personnel_only
def mail_users_remove(): def mail_users_remove():
return remove_mail_user(request.form.get('email', ''), env) return remove_mail_user(request.form.get('email', ''), env)
@app.route('/mail/users/privileges') @app.route('/mail/users/privileges')
@authorized_personnel_only @authorized_personnel_only
def mail_user_privs(): def mail_user_privs():
privs = get_mail_user_privileges(request.args.get('email', ''), env) privs = get_mail_user_privileges(request.args.get('email', ''), env)
if isinstance(privs, tuple): return privs # error # error
return "\n".join(privs) if isinstance(privs, tuple):
return privs
return "\n".join(privs)
@app.route('/mail/users/privileges/add', methods=['POST']) @app.route('/mail/users/privileges/add', methods=['POST'])
@authorized_personnel_only @authorized_personnel_only
def mail_user_privs_add(): def mail_user_privs_add():
return add_remove_mail_user_privilege(request.form.get('email', ''), request.form.get('privilege', ''), "add", env) return add_remove_mail_user_privilege(request.form.get('email', ''), request.form.get('privilege', ''), "add", env)
@app.route('/mail/users/privileges/remove', methods=['POST']) @app.route('/mail/users/privileges/remove', methods=['POST'])
@authorized_personnel_only @authorized_personnel_only
def mail_user_privs_remove(): def mail_user_privs_remove():
return add_remove_mail_user_privilege(request.form.get('email', ''), request.form.get('privilege', ''), "remove", env) return add_remove_mail_user_privilege(request.form.get('email', ''), request.form.get('privilege', ''), "remove", env)
@app.route('/mail/aliases') @app.route('/mail/aliases')
@authorized_personnel_only @authorized_personnel_only
def mail_aliases(): def mail_aliases():
if request.args.get("format", "") == "json": if request.args.get("format", "") == "json":
return json_response(get_mail_aliases_ex(env)) return json_response(get_mail_aliases_ex(env))
else: else:
return "".join(x+"\t"+y+"\n" for x, y in get_mail_aliases(env)) return "".join(x+"\t"+y+"\n" for x, y in get_mail_aliases(env))
@app.route('/mail/aliases/add', methods=['POST']) @app.route('/mail/aliases/add', methods=['POST'])
@authorized_personnel_only @authorized_personnel_only
def mail_aliases_add(): def mail_aliases_add():
return add_mail_alias( return add_mail_alias(
request.form.get('source', ''), request.form.get('source', ''),
request.form.get('destination', ''), request.form.get('destination', ''),
env, env,
update_if_exists=(request.form.get('update_if_exists', '') == '1') update_if_exists=(request.form.get('update_if_exists', '') == '1')
) )
@app.route('/mail/aliases/remove', methods=['POST']) @app.route('/mail/aliases/remove', methods=['POST'])
@authorized_personnel_only @authorized_personnel_only
def mail_aliases_remove(): def mail_aliases_remove():
return remove_mail_alias(request.form.get('source', ''), env) return remove_mail_alias(request.form.get('source', ''), env)
@app.route('/mail/domains') @app.route('/mail/domains')
@authorized_personnel_only @authorized_personnel_only
@ -201,172 +222,196 @@ def mail_domains():
# DNS # DNS
@app.route('/dns/zones') @app.route('/dns/zones')
@authorized_personnel_only @authorized_personnel_only
def dns_zones(): def dns_zones():
from dns_update import get_dns_zones from dns_update import get_dns_zones
return json_response([z[0] for z in get_dns_zones(env)]) return json_response([z[0] for z in get_dns_zones(env)])
@app.route('/dns/update', methods=['POST']) @app.route('/dns/update', methods=['POST'])
@authorized_personnel_only @authorized_personnel_only
def dns_update(): def dns_update():
from dns_update import do_dns_update from dns_update import do_dns_update
try: try:
return do_dns_update(env, force=request.form.get('force', '') == '1') return do_dns_update(env, force=request.form.get('force', '') == '1')
except Exception as e: except Exception as e:
return (str(e), 500) return (str(e), 500)
@app.route('/dns/secondary-nameserver') @app.route('/dns/secondary-nameserver')
@authorized_personnel_only @authorized_personnel_only
def dns_get_secondary_nameserver(): def dns_get_secondary_nameserver():
from dns_update import get_custom_dns_config from dns_update import get_custom_dns_config
return json_response({ "hostname": get_custom_dns_config(env).get("_secondary_nameserver") }) return json_response({"hostname": get_custom_dns_config(env).get("_secondary_nameserver")})
@app.route('/dns/secondary-nameserver', methods=['POST']) @app.route('/dns/secondary-nameserver', methods=['POST'])
@authorized_personnel_only @authorized_personnel_only
def dns_set_secondary_nameserver(): def dns_set_secondary_nameserver():
from dns_update import set_secondary_dns from dns_update import set_secondary_dns
try: try:
return set_secondary_dns(request.form.get('hostname'), env) return set_secondary_dns(request.form.get('hostname'), env)
except ValueError as e: except ValueError as e:
return (str(e), 400) return (str(e), 400)
@app.route('/dns/set') @app.route('/dns/set')
@authorized_personnel_only @authorized_personnel_only
def dns_get_records(): def dns_get_records():
from dns_update import get_custom_dns_config, get_custom_records from dns_update import get_custom_dns_config, get_custom_records
additional_records = get_custom_dns_config(env) additional_records = get_custom_dns_config(env)
records = get_custom_records(None, additional_records, env) records = get_custom_records(None, additional_records, env)
return json_response([{ return json_response([{
"qname": r[0], "qname": r[0],
"rtype": r[1], "rtype": r[1],
"value": r[2], "value": r[2],
} for r in records]) } for r in records])
@app.route('/dns/set/<qname>', methods=['POST']) @app.route('/dns/set/<qname>', methods=['POST'])
@app.route('/dns/set/<qname>/<rtype>', methods=['POST']) @app.route('/dns/set/<qname>/<rtype>', methods=['POST'])
@app.route('/dns/set/<qname>/<rtype>/<value>', methods=['POST']) @app.route('/dns/set/<qname>/<rtype>/<value>', methods=['POST'])
@authorized_personnel_only @authorized_personnel_only
def dns_set_record(qname, rtype="A", value=None): def dns_set_record(qname, rtype="A", value=None):
from dns_update import do_dns_update, set_custom_dns_record from dns_update import do_dns_update, set_custom_dns_record
try: try:
# Get the value from the URL, then the POST parameters, or if it is not set then # Get the value from the URL, then the POST parameters, or if it is not set then
# use the remote IP address of the request --- makes dynamic DNS easy. To clear a # use the remote IP address of the request --- makes dynamic DNS easy. To clear a
# value, '' must be explicitly passed. # value, '' must be explicitly passed.
if value is None: if value is None:
value = request.form.get("value") value = request.form.get("value")
if value is None: if value is None:
value = request.environ.get("HTTP_X_FORWARDED_FOR") # normally REMOTE_ADDR but we're behind nginx as a reverse proxy # normally REMOTE_ADDR but we're behind nginx as a reverse proxy
if value == '' or value == '__delete__': value = request.environ.get("HTTP_X_FORWARDED_FOR")
# request deletion if value == '' or value == '__delete__':
value = None # request deletion
if set_custom_dns_record(qname, rtype, value, env): value = None
return do_dns_update(env) if set_custom_dns_record(qname, rtype, value, env):
return "OK" return do_dns_update(env)
except ValueError as e: return "OK"
return (str(e), 400) except ValueError as e:
return (str(e), 400)
@app.route('/dns/dump') @app.route('/dns/dump')
@authorized_personnel_only @authorized_personnel_only
def dns_get_dump(): def dns_get_dump():
from dns_update import build_recommended_dns from dns_update import build_recommended_dns
return json_response(build_recommended_dns(env)) return json_response(build_recommended_dns(env))
# SSL # SSL
@app.route('/ssl/csr/<domain>', methods=['POST']) @app.route('/ssl/csr/<domain>', methods=['POST'])
@authorized_personnel_only @authorized_personnel_only
def ssl_get_csr(domain): def ssl_get_csr(domain):
from web_update import get_domain_ssl_files, create_csr from web_update import get_domain_ssl_files, create_csr
ssl_key, ssl_certificate, ssl_via = get_domain_ssl_files(domain, env) ssl_key, ssl_certificate, ssl_via = get_domain_ssl_files(domain, env)
return create_csr(domain, ssl_key, env) return create_csr(domain, ssl_key, env)
@app.route('/ssl/install', methods=['POST']) @app.route('/ssl/install', methods=['POST'])
@authorized_personnel_only @authorized_personnel_only
def ssl_install_cert(): def ssl_install_cert():
from web_update import install_cert from web_update import install_cert
domain = request.form.get('domain') domain = request.form.get('domain')
ssl_cert = request.form.get('cert') ssl_cert = request.form.get('cert')
ssl_chain = request.form.get('chain') ssl_chain = request.form.get('chain')
return install_cert(domain, ssl_cert, ssl_chain, env) return install_cert(domain, ssl_cert, ssl_chain, env)
# WEB # WEB
@app.route('/web/domains') @app.route('/web/domains')
@authorized_personnel_only @authorized_personnel_only
def web_get_domains(): def web_get_domains():
from web_update import get_web_domains_info from web_update import get_web_domains_info
return json_response(get_web_domains_info(env)) return json_response(get_web_domains_info(env))
@app.route('/web/update', methods=['POST']) @app.route('/web/update', methods=['POST'])
@authorized_personnel_only @authorized_personnel_only
def web_update(): def web_update():
from web_update import do_web_update from web_update import do_web_update
return do_web_update(env) return do_web_update(env)
# System # System
@app.route('/system/status', methods=["POST"]) @app.route('/system/status', methods=["POST"])
@authorized_personnel_only @authorized_personnel_only
def system_status(): def system_status():
from status_checks import run_checks from status_checks import run_checks
class WebOutput:
def __init__(self): class WebOutput:
self.items = [] def __init__(self):
def add_heading(self, heading): self.items = []
self.items.append({ "type": "heading", "text": heading, "extra": [] })
def print_ok(self, message): def add_heading(self, heading):
self.items.append({ "type": "ok", "text": message, "extra": [] }) self.items.append({"type": "heading", "text": heading, "extra": []})
def print_error(self, message):
self.items.append({ "type": "error", "text": message, "extra": [] }) def print_ok(self, message):
def print_warning(self, message): self.items.append({"type": "ok", "text": message, "extra": []})
self.items.append({ "type": "warning", "text": message, "extra": [] })
def print_line(self, message, monospace=False): def print_error(self, message):
self.items[-1]["extra"].append({ "text": message, "monospace": monospace }) self.items.append({"type": "error", "text": message, "extra": []})
output = WebOutput()
run_checks(env, output, pool) def print_warning(self, message):
return json_response(output.items) self.items.append({"type": "warning", "text": message, "extra": []})
def print_line(self, message, monospace=False):
self.items[-1]["extra"].append({"text": message, "monospace": monospace})
output = WebOutput()
run_checks(env, output, pool)
return json_response(output.items)
@app.route('/system/updates') @app.route('/system/updates')
@authorized_personnel_only @authorized_personnel_only
def show_updates(): def show_updates():
from status_checks import list_apt_updates from status_checks import list_apt_updates
return "".join( return "".join(
"%s (%s)\n" "%s (%s)\n"
% (p["package"], p["version"]) % (p["package"], p["version"])
for p in list_apt_updates()) for p in list_apt_updates())
@app.route('/system/update-packages', methods=["POST"]) @app.route('/system/update-packages', methods=["POST"])
@authorized_personnel_only @authorized_personnel_only
def do_updates(): def do_updates():
utils.shell("check_call", ["/usr/bin/apt-get", "-qq", "update"]) utils.shell("check_call", ["/usr/bin/apt-get", "-qq", "update"])
return utils.shell("check_output", ["/usr/bin/apt-get", "-y", "upgrade"], env={ return utils.shell("check_output", ["/usr/bin/apt-get", "-y", "upgrade"], env={
"DEBIAN_FRONTEND": "noninteractive" "DEBIAN_FRONTEND": "noninteractive"
}) })
@app.route('/system/backup/status') @app.route('/system/backup/status')
@authorized_personnel_only @authorized_personnel_only
def backup_status(): def backup_status():
from backup import backup_status from backup import backup_status
return json_response(backup_status(env)) return json_response(backup_status(env))
# APP # APP
if __name__ == '__main__': if __name__ == '__main__':
if "DEBUG" in os.environ: app.debug = True if "DEBUG" in os.environ:
if "APIKEY" in os.environ: auth_service.key = os.environ["APIKEY"] app.debug = True
if "APIKEY" in os.environ:
auth_service.key = os.environ["APIKEY"]
if not app.debug: if not app.debug:
app.logger.addHandler(utils.create_syslog_handler()) app.logger.addHandler(utils.create_syslog_handler())
# For testing on the command line, you can use `curl` like so: # For testing on the command line, you can use `curl` like so:
# curl --user $(</var/lib/mailinabox/api.key): http://localhost:10222/mail/users # curl --user $(</var/lib/mailinabox/api.key): http://localhost:10222/mail/users
auth_service.write_key() auth_service.write_key()
# For testing in the browser, you can copy the API key that's output to the # For testing in the browser, you can copy the API key that's output to the
# debug console and enter that as the username # debug console and enter that as the username
app.logger.info('API key: ' + auth_service.key) app.logger.info('API key: ' + auth_service.key)
# Start the application server. Listens on 127.0.0.1 (IPv4 only).
app.run(port=10222)
# Start the application server. Listens on 127.0.0.1 (IPv4 only).
app.run(port=10222)

File diff suppressed because it is too large Load Diff

View File

@ -1,121 +1,130 @@
#!/usr/bin/python3 #!/usr/bin/python3
import re, os.path import re
import os.path
import dateutil.parser import dateutil.parser
import mailconfig import mailconfig
import utils import utils
def scan_mail_log(logger, env): def scan_mail_log(logger, env):
collector = { collector = {
"other-services": set(), "other-services": set(),
"imap-logins": { }, "imap-logins": {},
"postgrey": { }, "postgrey": {},
"rejected-mail": { }, "rejected-mail": {},
} }
collector["real_mail_addresses"] = set(mailconfig.get_mail_users(env)) | set(alias[0] for alias in mailconfig.get_mail_aliases(env)) collector["real_mail_addresses"] = set(mailconfig.get_mail_users(env)) | set(alias[0] for alias in mailconfig.get_mail_aliases(env))
for fn in ('/var/log/mail.log.1', '/var/log/mail.log'): for fn in ('/var/log/mail.log.1', '/var/log/mail.log'):
if not os.path.exists(fn): continue if not os.path.exists(fn):
with open(fn, 'rb') as log: continue
for line in log: with open(fn, 'rb') as log:
line = line.decode("utf8", errors='replace') for line in log:
scan_mail_log_line(line.strip(), collector) line = line.decode("utf8", errors='replace')
scan_mail_log_line(line.strip(), collector)
if collector["imap-logins"]: if collector["imap-logins"]:
logger.add_heading("Recent IMAP Logins") logger.add_heading("Recent IMAP Logins")
logger.print_block("The most recent login from each remote IP adddress is show.") logger.print_block("The most recent login from each remote IP adddress is show.")
for k in utils.sort_email_addresses(collector["imap-logins"], env): for k in utils.sort_email_addresses(collector["imap-logins"], env):
for ip, date in sorted(collector["imap-logins"][k].items(), key = lambda kv : kv[1]): for ip, date in sorted(collector["imap-logins"][k].items(), key=lambda kv: kv[1]):
logger.print_line(k + "\t" + str(date) + "\t" + ip) logger.print_line(k + "\t" + str(date) + "\t" + ip)
if collector["postgrey"]: if collector["postgrey"]:
logger.add_heading("Greylisted Mail") logger.add_heading("Greylisted Mail")
logger.print_block("The following mail was greylisted, meaning the emails were temporarily rejected. Legitimate senders will try again within ten minutes.") logger.print_block("The following mail was greylisted, meaning the emails were temporarily rejected. Legitimate senders will try again within ten minutes.")
logger.print_line("recipient" + "\t" + "received" + "\t" + "sender" + "\t" + "delivered") logger.print_line("recipient" + "\t" + "received" + "\t" + "sender" + "\t" + "delivered")
for recipient in utils.sort_email_addresses(collector["postgrey"], env): for recipient in utils.sort_email_addresses(collector["postgrey"], env):
for (client_address, sender), (first_date, delivered_date) in sorted(collector["postgrey"][recipient].items(), key = lambda kv : kv[1][0]): for (client_address, sender), (first_date, delivered_date) in sorted(collector["postgrey"][recipient].items(), key=lambda kv: kv[1][0]):
logger.print_line(recipient + "\t" + str(first_date) + "\t" + sender + "\t" + (("delivered " + str(delivered_date)) if delivered_date else "no retry yet")) logger.print_line(recipient + "\t" + str(first_date) + "\t" + sender + "\t" + (("delivered " + str(delivered_date)) if delivered_date else "no retry yet"))
if collector["rejected-mail"]: if collector["rejected-mail"]:
logger.add_heading("Rejected Mail") logger.add_heading("Rejected Mail")
logger.print_block("The following incoming mail was rejected.") logger.print_block("The following incoming mail was rejected.")
for k in utils.sort_email_addresses(collector["rejected-mail"], env): for k in utils.sort_email_addresses(collector["rejected-mail"], env):
for date, sender, message in collector["rejected-mail"][k]: for date, sender, message in collector["rejected-mail"][k]:
logger.print_line(k + "\t" + str(date) + "\t" + sender + "\t" + message) logger.print_line(k + "\t" + str(date) + "\t" + sender + "\t" + message)
if len(collector["other-services"]) > 0:
logger.add_heading("Other")
logger.print_block("Unrecognized services in the log: " + ", ".join(collector["other-services"]))
if len(collector["other-services"]) > 0:
logger.add_heading("Other")
logger.print_block("Unrecognized services in the log: " + ", ".join(collector["other-services"]))
def scan_mail_log_line(line, collector): def scan_mail_log_line(line, collector):
m = re.match(r"(\S+ \d+ \d+:\d+:\d+) (\S+) (\S+?)(\[\d+\])?: (.*)", line) m = re.match(r"(\S+ \d+ \d+:\d+:\d+) (\S+) (\S+?)(\[\d+\])?: (.*)", line)
if not m: return if not m:
return
date, system, service, pid, log = m.groups() date, system, service, pid, log = m.groups()
date = dateutil.parser.parse(date) date = dateutil.parser.parse(date)
if service == "dovecot":
scan_dovecot_line(date, log, collector)
elif service == "postgrey": if service == "dovecot":
scan_postgrey_line(date, log, collector) scan_dovecot_line(date, log, collector)
elif service == "postfix/smtpd": elif service == "postgrey":
scan_postfix_smtpd_line(date, log, collector) scan_postgrey_line(date, log, collector)
elif service in ("postfix/qmgr", "postfix/pickup", "postfix/cleanup", elif service == "postfix/smtpd":
"postfix/scache", "spampd", "postfix/anvil", "postfix/master", scan_postfix_smtpd_line(date, log, collector)
"opendkim", "postfix/lmtp", "postfix/tlsmgr"):
# nothing to look at elif service in ("postfix/qmgr", "postfix/pickup", "postfix/cleanup",
pass "postfix/scache", "spampd", "postfix/anvil",
"postfix/master", "opendkim", "postfix/lmtp",
"postfix/tlsmgr"):
# nothing to look at
pass
else:
collector["other-services"].add(service)
else:
collector["other-services"].add(service)
def scan_dovecot_line(date, log, collector): def scan_dovecot_line(date, log, collector):
m = re.match("imap-login: Login: user=<(.*?)>, method=PLAIN, rip=(.*?),", log) m = re.match("imap-login: Login: user=<(.*?)>, method=PLAIN, rip=(.*?),", log)
if m: if m:
login, ip = m.group(1), m.group(2) login, ip = m.group(1), m.group(2)
if ip != "127.0.0.1": # local login from webmail/zpush if ip != "127.0.0.1": # local login from webmail/zpush
collector["imap-logins"].setdefault(login, {})[ip] = date collector["imap-logins"].setdefault(login, {})[ip] = date
def scan_postgrey_line(date, log, collector): def scan_postgrey_line(date, log, collector):
m = re.match("action=(greylist|pass), reason=(.*?), (?:delay=\d+, )?client_name=(.*), client_address=(.*), sender=(.*), recipient=(.*)", log) m = re.match("action=(greylist|pass), reason=(.*?), (?:delay=\d+, )?client_name=(.*), client_address=(.*), sender=(.*), recipient=(.*)", log)
if m: if m:
action, reason, client_name, client_address, sender, recipient = m.groups() action, reason, client_name, client_address, sender, recipient = m.groups()
key = (client_address, sender) key = (client_address, sender)
if action == "greylist" and reason == "new": if action == "greylist" and reason == "new":
collector["postgrey"].setdefault(recipient, {})[key] = (date, None) collector["postgrey"].setdefault(recipient, {})[key] = (date, None)
elif action == "pass" and reason == "triplet found" and key in collector["postgrey"].get(recipient, {}): elif action == "pass" and reason == "triplet found" and key in collector["postgrey"].get(recipient, {}):
collector["postgrey"][recipient][key] = (collector["postgrey"][recipient][key][0], date) collector["postgrey"][recipient][key] = (collector["postgrey"][recipient][key][0], date)
def scan_postfix_smtpd_line(date, log, collector): def scan_postfix_smtpd_line(date, log, collector):
m = re.match("NOQUEUE: reject: RCPT from .*?: (.*?); from=<(.*?)> to=<(.*?)>", log) m = re.match("NOQUEUE: reject: RCPT from .*?: (.*?); from=<(.*?)> to=<(.*?)>", log)
if m: if m:
message, sender, recipient = m.groups() message, sender, recipient = m.groups()
if recipient in collector["real_mail_addresses"]: if recipient in collector["real_mail_addresses"]:
# only log mail to real recipients # only log mail to real recipients
# skip this, is reported in the greylisting report # skip this, is reported in the greylisting report
if "Recipient address rejected: Greylisted" in message: if "Recipient address rejected: Greylisted" in message:
return return
# simplify this one # simplify this one
m = re.search(r"Client host \[(.*?)\] blocked using zen.spamhaus.org; (.*)", message) m = re.search(r"Client host \[(.*?)\] blocked using zen.spamhaus.org; (.*)", message)
if m: if m:
message = "ip blocked: " + m.group(2) message = "ip blocked: " + m.group(2)
# simplify this one too # simplify this one too
m = re.search(r"Sender address \[.*@(.*)\] blocked using dbl.spamhaus.org; (.*)", message) m = re.search(r"Sender address \[.*@(.*)\] blocked using dbl.spamhaus.org; (.*)", message)
if m: if m:
message = "domain blocked: " + m.group(2) message = "domain blocked: " + m.group(2)
collector["rejected-mail"].setdefault(recipient, []).append( (date, sender, message) ) collector["rejected-mail"].setdefault(recipient, []).append((date, sender, message))
if __name__ == "__main__": if __name__ == "__main__":
from status_checks import ConsoleOutput from status_checks import ConsoleOutput
env = utils.load_environment() env = utils.load_environment()
scan_mail_log(ConsoleOutput(), env) scan_mail_log(ConsoleOutput(), env)

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -2,33 +2,39 @@ import os.path
CONF_DIR = os.path.join(os.path.dirname(__file__), "../conf") CONF_DIR = os.path.join(os.path.dirname(__file__), "../conf")
def load_environment(): def load_environment():
# Load settings from /etc/mailinabox.conf. # Load settings from /etc/mailinabox.conf.
return load_env_vars_from_file("/etc/mailinabox.conf") return load_env_vars_from_file("/etc/mailinabox.conf")
def load_env_vars_from_file(fn): def load_env_vars_from_file(fn):
# Load settings from a KEY=VALUE file. # Load settings from a KEY=VALUE file.
import collections import collections
env = collections.OrderedDict() env = collections.OrderedDict()
for line in open(fn): env.setdefault(*line.strip().split("=", 1)) for line in open(fn):
env.setdefault(*line.strip().split("=", 1))
return env return env
def save_environment(env): def save_environment(env):
with open("/etc/mailinabox.conf", "w") as f: with open("/etc/mailinabox.conf", "w") as f:
for k, v in env.items(): for k, v in env.items():
f.write("%s=%s\n" % (k, v)) f.write("%s=%s\n" % (k, v))
def safe_domain_name(name): def safe_domain_name(name):
# Sanitize a domain name so it is safe to use as a file name on disk. # Sanitize a domain name so it is safe to use as a file name on disk.
import urllib.parse import urllib.parse
return urllib.parse.quote(name, safe='') return urllib.parse.quote(name, safe='')
def sort_domains(domain_names, env): def sort_domains(domain_names, env):
# Put domain names in a nice sorted order. For web_update, PRIMARY_HOSTNAME # Put domain names in a nice sorted order. For web_update, PRIMARY_HOSTNAME
# must appear first so it becomes the nginx default server. # must appear first so it becomes the nginx default server.
# First group PRIMARY_HOSTNAME and its subdomains, then parent domains of PRIMARY_HOSTNAME, then other domains. # First group PRIMARY_HOSTNAME and its subdomains, then parent domains of PRIMARY_HOSTNAME, then other domains.
groups = ( [], [], [] ) groups = ([], [], [])
for d in domain_names: for d in domain_names:
if d == env['PRIMARY_HOSTNAME'] or d.endswith("." + env['PRIMARY_HOSTNAME']): if d == env['PRIMARY_HOSTNAME'] or d.endswith("." + env['PRIMARY_HOSTNAME']):
groups[0].append(d) groups[0].append(d)
@ -44,13 +50,14 @@ def sort_domains(domain_names, env):
ret = [] ret = []
for d in top_domains: for d in top_domains:
ret.append(d) ret.append(d)
ret.extend( sort_group([s for s in group if s.endswith("." + d)]) ) ret.extend(sort_group([s for s in group if s.endswith("." + d)]))
return ret return ret
groups = [sort_group(g) for g in groups] groups = [sort_group(g) for g in groups]
return groups[0] + groups[1] + groups[2] return groups[0] + groups[1] + groups[2]
def sort_email_addresses(email_addresses, env): def sort_email_addresses(email_addresses, env):
email_addresses = set(email_addresses) email_addresses = set(email_addresses)
domains = set(email.split("@", 1)[1] for email in email_addresses if "@" in email) domains = set(email.split("@", 1)[1] for email in email_addresses if "@" in email)
@ -59,13 +66,17 @@ def sort_email_addresses(email_addresses, env):
domain_emails = set(email for email in email_addresses if email.endswith("@" + domain)) domain_emails = set(email for email in email_addresses if email.endswith("@" + domain))
ret.extend(sorted(domain_emails)) ret.extend(sorted(domain_emails))
email_addresses -= domain_emails email_addresses -= domain_emails
ret.extend(sorted(email_addresses)) # whatever is left # whatever is left
ret.extend(sorted(email_addresses))
return ret return ret
def exclusive_process(name): def exclusive_process(name):
# Ensure that a process named `name` does not execute multiple # Ensure that a process named `name` does not execute multiple
# times concurrently. # times concurrently.
import os, sys, atexit import os
import sys
import atexit
pidfile = '/var/run/mailinabox-%s.pid' % name pidfile = '/var/run/mailinabox-%s.pid' % name
mypid = os.getpid() mypid = os.getpid()
@ -95,7 +106,8 @@ def exclusive_process(name):
try: try:
existing_pid = int(f.read().strip()) existing_pid = int(f.read().strip())
except ValueError: except ValueError:
pass # No valid integer in the file. # No valid integer in the file.
pass
# Check if the pid in it is valid. # Check if the pid in it is valid.
if existing_pid: if existing_pid:
@ -108,7 +120,7 @@ def exclusive_process(name):
f.write(str(mypid)) f.write(str(mypid))
f.truncate() f.truncate()
atexit.register(clear_my_pid, pidfile) atexit.register(clear_my_pid, pidfile)
def clear_my_pid(pidfile): def clear_my_pid(pidfile):
import os import os
@ -118,26 +130,32 @@ def clear_my_pid(pidfile):
def is_pid_valid(pid): def is_pid_valid(pid):
"""Checks whether a pid is a valid process ID of a currently running process.""" """Checks whether a pid is a valid process ID of a currently running process."""
# adapted from http://stackoverflow.com/questions/568271/how-to-check-if-there-exists-a-process-with-a-given-pid # adapted from http://stackoverflow.com/questions/568271/how-to-check-if-there-exists-a-process-with-a-given-pid
import os, errno import os
if pid <= 0: raise ValueError('Invalid PID.') import errno
if pid <= 0:
raise ValueError('Invalid PID.')
try: try:
os.kill(pid, 0) os.kill(pid, 0)
except OSError as err: except OSError as err:
if err.errno == errno.ESRCH: # No such process # No such process
if err.errno == errno.ESRCH:
return False return False
elif err.errno == errno.EPERM: # Not permitted to send signal # Not permitted to send signal
elif err.errno == errno.EPERM:
return True return True
else: # EINVAL # EINVAL
else:
raise raise
else: else:
return True return True
def shell(method, cmd_args, env={}, capture_stderr=False, return_bytes=False, trap=False, input=None): def shell(method, cmd_args, env={}, capture_stderr=False, return_bytes=False, trap=False, input=None):
# A safe way to execute processes. # A safe way to execute processes.
# Some processes like apt-get require being given a sane PATH. # Some processes like apt-get require being given a sane PATH.
import subprocess import subprocess
env.update({ "PATH": "/sbin:/bin:/usr/sbin:/usr/bin" }) env.update({"PATH": "/sbin:/bin:/usr/sbin:/usr/bin"})
kwargs = { kwargs = {
'env': env, 'env': env,
'stderr': None if not capture_stderr else subprocess.STDOUT, 'stderr': None if not capture_stderr else subprocess.STDOUT,
@ -154,18 +172,21 @@ def shell(method, cmd_args, env={}, capture_stderr=False, return_bytes=False, tr
except subprocess.CalledProcessError as e: except subprocess.CalledProcessError as e:
ret = e.output ret = e.output
code = e.returncode code = e.returncode
if not return_bytes and isinstance(ret, bytes): ret = ret.decode("utf8") if not return_bytes and isinstance(ret, bytes):
ret = ret.decode("utf8")
if not trap: if not trap:
return ret return ret
else: else:
return code, ret return code, ret
def create_syslog_handler(): def create_syslog_handler():
import logging.handlers import logging.handlers
handler = logging.handlers.SysLogHandler(address='/dev/log') handler = logging.handlers.SysLogHandler(address='/dev/log')
handler.setLevel(logging.WARNING) handler.setLevel(logging.WARNING)
return handler return handler
def du(path): def du(path):
# Computes the size of all files in the path, like the `du` command. # Computes the size of all files in the path, like the `du` command.
# Based on http://stackoverflow.com/a/17936789. Takes into account # Based on http://stackoverflow.com/a/17936789. Takes into account

View File

@ -2,297 +2,313 @@
# domains for which a mail account has been set up. # domains for which a mail account has been set up.
######################################################################## ########################################################################
import os, os.path, shutil, re, tempfile, rtyaml import os
import os.path
import shutil
import re
import tempfile
import rtyaml
from mailconfig import get_mail_domains from mailconfig import get_mail_domains
from dns_update import get_custom_dns_config, do_dns_update from dns_update import get_custom_dns_config, do_dns_update
from utils import shell, safe_domain_name, sort_domains from utils import shell, safe_domain_name, sort_domains
def get_web_domains(env): def get_web_domains(env):
# What domains should we serve websites for? # What domains should we serve websites for?
domains = set() domains = set()
# At the least it's the PRIMARY_HOSTNAME so we can serve webmail # At the least it's the PRIMARY_HOSTNAME so we can serve webmail
# as well as Z-Push for Exchange ActiveSync. # as well as Z-Push for Exchange ActiveSync.
domains.add(env['PRIMARY_HOSTNAME']) domains.add(env['PRIMARY_HOSTNAME'])
# Also serve web for all mail domains so that we might at least # Also serve web for all mail domains so that we might at least
# provide auto-discover of email settings, and also a static website # provide auto-discover of email settings, and also a static website
# if the user wants to make one. These will require an SSL cert. # if the user wants to make one. These will require an SSL cert.
domains |= get_mail_domains(env) domains |= get_mail_domains(env)
# ...Unless the domain has an A/AAAA record that maps it to a different # ...Unless the domain has an A/AAAA record that maps it to a different
# IP address than this box. Remove those domains from our list. # IP address than this box. Remove those domains from our list.
dns = get_custom_dns_config(env) dns = get_custom_dns_config(env)
for domain, value in dns.items(): for domain, value in dns.items():
if domain not in domains: continue if domain not in domains:
if (isinstance(value, str) and (value != "local")) \ continue
or (isinstance(value, dict) and ("CNAME" in value)) \ if (isinstance(value, str) and (value != "local")) or (isinstance(value, dict) and ("CNAME" in value)) or (isinstance(value, dict) and ("A" in value) and (value["A"] != "local")) or (isinstance(value, dict) and ("AAAA" in value) and (value["AAAA"] != "local")):
or (isinstance(value, dict) and ("A" in value) and (value["A"] != "local")) \ domains.remove(domain)
or (isinstance(value, dict) and ("AAAA" in value) and (value["AAAA"] != "local")):
domains.remove(domain) # Sort the list. Put PRIMARY_HOSTNAME first so it becomes the
# default server (nginx's default_server).
domains = sort_domains(domains, env)
return domains
# Sort the list. Put PRIMARY_HOSTNAME first so it becomes the
# default server (nginx's default_server).
domains = sort_domains(domains, env)
return domains
def do_web_update(env, ok_status="web updated\n"): def do_web_update(env, ok_status="web updated\n"):
# Build an nginx configuration file. # Build an nginx configuration file.
nginx_conf = open(os.path.join(os.path.dirname(__file__), "../conf/nginx-top.conf")).read() nginx_conf = open(os.path.join(os.path.dirname(__file__), "../conf/nginx-top.conf")).read()
# Add configuration for each web domain. # Add configuration for each web domain.
template1 = open(os.path.join(os.path.dirname(__file__), "../conf/nginx.conf")).read() template1 = open(os.path.join(os.path.dirname(__file__), "../conf/nginx.conf")).read()
template2 = open(os.path.join(os.path.dirname(__file__), "../conf/nginx-primaryonly.conf")).read() template2 = open(os.path.join(os.path.dirname(__file__), "../conf/nginx-primaryonly.conf")).read()
for domain in get_web_domains(env): for domain in get_web_domains(env):
nginx_conf += make_domain_config(domain, template1, template2, env) nginx_conf += make_domain_config(domain, template1, template2, env)
# Did the file change? If not, don't bother writing & restarting nginx. # Did the file change? If not, don't bother writing & restarting nginx.
nginx_conf_fn = "/etc/nginx/conf.d/local.conf" nginx_conf_fn = "/etc/nginx/conf.d/local.conf"
if os.path.exists(nginx_conf_fn): if os.path.exists(nginx_conf_fn):
with open(nginx_conf_fn) as f: with open(nginx_conf_fn) as f:
if f.read() == nginx_conf: if f.read() == nginx_conf:
return "" return ""
# Save the file. # Save the file.
with open(nginx_conf_fn, "w") as f: with open(nginx_conf_fn, "w") as f:
f.write(nginx_conf) f.write(nginx_conf)
# Kick nginx. Since this might be called from the web admin # Kick nginx. Since this might be called from the web admin
# don't do a 'restart'. That would kill the connection before # don't do a 'restart'. That would kill the connection before
# the API returns its response. A 'reload' should be good # the API returns its response. A 'reload' should be good
# enough and doesn't break any open connections. # enough and doesn't break any open connections.
shell('check_call', ["/usr/sbin/service", "nginx", "reload"]) shell('check_call', ["/usr/sbin/service", "nginx", "reload"])
return ok_status
return ok_status
def make_domain_config(domain, template, template_for_primaryhost, env): def make_domain_config(domain, template, template_for_primaryhost, env):
# How will we configure this domain. # How will we configure this domain.
# Where will its root directory be for static files? # Where will its root directory be for static files?
root = get_web_root(domain, env) root = get_web_root(domain, env)
# What private key and SSL certificate will we use for this domain? # What private key and SSL certificate will we use for this domain?
ssl_key, ssl_certificate, ssl_via = get_domain_ssl_files(domain, env) ssl_key, ssl_certificate, ssl_via = get_domain_ssl_files(domain, env)
# For hostnames created after the initial setup, ensure we have an SSL certificate # For hostnames created after the initial setup, ensure we have an SSL certificate
# available. Make a self-signed one now if one doesn't exist. # available. Make a self-signed one now if one doesn't exist.
ensure_ssl_certificate_exists(domain, ssl_key, ssl_certificate, env) ensure_ssl_certificate_exists(domain, ssl_key, ssl_certificate, env)
# Put pieces together. # Put pieces together.
nginx_conf_parts = re.split("\s*# ADDITIONAL DIRECTIVES HERE\s*", template) nginx_conf_parts = re.split("\s*# ADDITIONAL DIRECTIVES HERE\s*", template)
nginx_conf = nginx_conf_parts[0] + "\n" nginx_conf = nginx_conf_parts[0] + "\n"
if domain == env['PRIMARY_HOSTNAME']: if domain == env['PRIMARY_HOSTNAME']:
nginx_conf += template_for_primaryhost + "\n" nginx_conf += template_for_primaryhost + "\n"
# Replace substitution strings in the template & return. # Replace substitution strings in the template & return.
nginx_conf = nginx_conf.replace("$STORAGE_ROOT", env['STORAGE_ROOT']) nginx_conf = nginx_conf.replace("$STORAGE_ROOT", env['STORAGE_ROOT'])
nginx_conf = nginx_conf.replace("$HOSTNAME", domain.encode("idna").decode("ascii")) nginx_conf = nginx_conf.replace("$HOSTNAME", domain.encode("idna").decode("ascii"))
nginx_conf = nginx_conf.replace("$ROOT", root) nginx_conf = nginx_conf.replace("$ROOT", root)
nginx_conf = nginx_conf.replace("$SSL_KEY", ssl_key) nginx_conf = nginx_conf.replace("$SSL_KEY", ssl_key)
nginx_conf = nginx_conf.replace("$SSL_CERTIFICATE", ssl_certificate) nginx_conf = nginx_conf.replace("$SSL_CERTIFICATE", ssl_certificate)
# Because the certificate may change, we should recognize this so we # Because the certificate may change, we should recognize this so we
# can trigger an nginx update. # can trigger an nginx update.
def hashfile(filepath): def hashfile(filepath):
import hashlib import hashlib
sha1 = hashlib.sha1() sha1 = hashlib.sha1()
f = open(filepath, 'rb') f = open(filepath, 'rb')
try: try:
sha1.update(f.read()) sha1.update(f.read())
finally: finally:
f.close() f.close()
return sha1.hexdigest() return sha1.hexdigest()
nginx_conf += "# ssl files sha1: %s / %s\n" % (hashfile(ssl_key), hashfile(ssl_certificate)) nginx_conf += "# ssl files sha1: %s / %s\n" % (hashfile(ssl_key), hashfile(ssl_certificate))
# Add in any user customizations in YAML format. # Add in any user customizations in YAML format.
nginx_conf_custom_fn = os.path.join(env["STORAGE_ROOT"], "www/custom.yaml") nginx_conf_custom_fn = os.path.join(env["STORAGE_ROOT"], "www/custom.yaml")
if os.path.exists(nginx_conf_custom_fn): if os.path.exists(nginx_conf_custom_fn):
yaml = rtyaml.load(open(nginx_conf_custom_fn)) yaml = rtyaml.load(open(nginx_conf_custom_fn))
if domain in yaml: if domain in yaml:
yaml = yaml[domain] yaml = yaml[domain]
for path, url in yaml.get("proxies", {}).items(): for path, url in yaml.get("proxies", {}).items():
nginx_conf += "\tlocation %s {\n\t\tproxy_pass %s;\n\t}\n" % (path, url) nginx_conf += "\tlocation %s {\n\t\tproxy_pass %s;\n\t}\n" % (path, url)
for path, url in yaml.get("redirects", {}).items(): for path, url in yaml.get("redirects", {}).items():
nginx_conf += "\trewrite %s %s permanent;\n" % (path, url) nginx_conf += "\trewrite %s %s permanent;\n" % (path, url)
# Add in any user customizations in the includes/ folder. # Add in any user customizations in the includes/ folder.
nginx_conf_custom_include = os.path.join(env["STORAGE_ROOT"], "www", safe_domain_name(domain) + ".conf") nginx_conf_custom_include = os.path.join(env["STORAGE_ROOT"], "www", safe_domain_name(domain) + ".conf")
if os.path.exists(nginx_conf_custom_include): if os.path.exists(nginx_conf_custom_include):
nginx_conf += "\tinclude %s;\n" % (nginx_conf_custom_include) nginx_conf += "\tinclude %s;\n" % (nginx_conf_custom_include)
# Ending. # Ending.
nginx_conf += nginx_conf_parts[1] nginx_conf += nginx_conf_parts[1]
return nginx_conf
return nginx_conf
def get_web_root(domain, env, test_exists=True): def get_web_root(domain, env, test_exists=True):
# Try STORAGE_ROOT/web/domain_name if it exists, but fall back to STORAGE_ROOT/web/default. # Try STORAGE_ROOT/web/domain_name if it exists, but fall back to STORAGE_ROOT/web/default.
for test_domain in (domain, 'default'): for test_domain in (domain, 'default'):
root = os.path.join(env["STORAGE_ROOT"], "www", safe_domain_name(test_domain)) root = os.path.join(env["STORAGE_ROOT"], "www", safe_domain_name(test_domain))
if os.path.exists(root) or not test_exists: break if os.path.exists(root) or not test_exists:
return root break
return root
def get_domain_ssl_files(domain, env, allow_shared_cert=True): def get_domain_ssl_files(domain, env, allow_shared_cert=True):
# What SSL private key will we use? Allow the user to override this, but # What SSL private key will we use? Allow the user to override this, but
# in many cases using the same private key for all domains would be fine. # in many cases using the same private key for all domains would be fine.
# Don't allow the user to override the key for PRIMARY_HOSTNAME because # Don't allow the user to override the key for PRIMARY_HOSTNAME because
# that's what's in the main file. # that's what's in the main file.
ssl_key = os.path.join(env["STORAGE_ROOT"], 'ssl/ssl_private_key.pem') ssl_key = os.path.join(env["STORAGE_ROOT"], 'ssl/ssl_private_key.pem')
ssl_key_is_alt = False ssl_key_is_alt = False
alt_key = os.path.join(env["STORAGE_ROOT"], 'ssl/%s/private_key.pem' % safe_domain_name(domain)) alt_key = os.path.join(env["STORAGE_ROOT"], 'ssl/%s/private_key.pem' % safe_domain_name(domain))
if domain != env['PRIMARY_HOSTNAME'] and os.path.exists(alt_key): if domain != env['PRIMARY_HOSTNAME'] and os.path.exists(alt_key):
ssl_key = alt_key ssl_key = alt_key
ssl_key_is_alt = True ssl_key_is_alt = True
# What SSL certificate will we use? # What SSL certificate will we use?
ssl_certificate_primary = os.path.join(env["STORAGE_ROOT"], 'ssl/ssl_certificate.pem') ssl_certificate_primary = os.path.join(env["STORAGE_ROOT"], 'ssl/ssl_certificate.pem')
ssl_via = None ssl_via = None
if domain == env['PRIMARY_HOSTNAME']: if domain == env['PRIMARY_HOSTNAME']:
# For PRIMARY_HOSTNAME, use the one we generated at set-up time. # For PRIMARY_HOSTNAME, use the one we generated at set-up time.
ssl_certificate = ssl_certificate_primary ssl_certificate = ssl_certificate_primary
else: else:
# For other domains, we'll probably use a certificate in a different path. # For other domains, we'll probably use a certificate in a different path.
ssl_certificate = os.path.join(env["STORAGE_ROOT"], 'ssl/%s/ssl_certificate.pem' % safe_domain_name(domain)) ssl_certificate = os.path.join(env["STORAGE_ROOT"], 'ssl/%s/ssl_certificate.pem' % safe_domain_name(domain))
# But we can be smart and reuse the main SSL certificate if is has # But we can be smart and reuse the main SSL certificate if is has
# a Subject Alternative Name matching this domain. Don't do this if # a Subject Alternative Name matching this domain. Don't do this if
# the user has uploaded a different private key for this domain. # the user has uploaded a different private key for this domain.
if not ssl_key_is_alt and allow_shared_cert: if not ssl_key_is_alt and allow_shared_cert:
from status_checks import check_certificate from status_checks import check_certificate
if check_certificate(domain, ssl_certificate_primary, None)[0] == "OK": if check_certificate(domain, ssl_certificate_primary, None)[0] == "OK":
ssl_certificate = ssl_certificate_primary ssl_certificate = ssl_certificate_primary
ssl_via = "Using multi/wildcard certificate of %s." % env['PRIMARY_HOSTNAME'] ssl_via = "Using multi/wildcard certificate of %s." % env['PRIMARY_HOSTNAME']
# For a 'www.' domain, see if we can reuse the cert of the parent. # For a 'www.' domain, see if we can reuse the cert of the parent.
elif domain.startswith('www.'): elif domain.startswith('www.'):
ssl_certificate_parent = os.path.join(env["STORAGE_ROOT"], 'ssl/%s/ssl_certificate.pem' % safe_domain_name(domain[4:])) ssl_certificate_parent = os.path.join(env["STORAGE_ROOT"], 'ssl/%s/ssl_certificate.pem' % safe_domain_name(domain[4:]))
if os.path.exists(ssl_certificate_parent) and check_certificate(domain, ssl_certificate_parent, None)[0] == "OK": if os.path.exists(ssl_certificate_parent) and check_certificate(domain, ssl_certificate_parent, None)[0] == "OK":
ssl_certificate = ssl_certificate_parent ssl_certificate = ssl_certificate_parent
ssl_via = "Using multi/wildcard certificate of %s." % domain[4:] ssl_via = "Using multi/wildcard certificate of %s." % domain[4:]
return ssl_key, ssl_certificate, ssl_via
return ssl_key, ssl_certificate, ssl_via
def ensure_ssl_certificate_exists(domain, ssl_key, ssl_certificate, env): def ensure_ssl_certificate_exists(domain, ssl_key, ssl_certificate, env):
# For domains besides PRIMARY_HOSTNAME, generate a self-signed certificate if # For domains besides PRIMARY_HOSTNAME, generate a self-signed certificate if
# a certificate doesn't already exist. See setup/mail.sh for documentation. # a certificate doesn't already exist. See setup/mail.sh for documentation.
if domain == env['PRIMARY_HOSTNAME']: if domain == env['PRIMARY_HOSTNAME']:
return return
# Sanity check. Shouldn't happen. A non-primary domain might use this # Sanity check. Shouldn't happen. A non-primary domain might use this
# certificate (see above), but then the certificate should exist anyway. # certificate (see above), but then the certificate should exist anyway.
if ssl_certificate == os.path.join(env["STORAGE_ROOT"], 'ssl/ssl_certificate.pem'): if ssl_certificate == os.path.join(env["STORAGE_ROOT"], 'ssl/ssl_certificate.pem'):
return return
if os.path.exists(ssl_certificate): if os.path.exists(ssl_certificate):
return return
os.makedirs(os.path.dirname(ssl_certificate), exist_ok=True) os.makedirs(os.path.dirname(ssl_certificate), exist_ok=True)
# Generate a new self-signed certificate using the same private key that we already have. # Generate a new self-signed certificate using the same private key that we already have.
# Start with a CSR written to a temporary file. # Start with a CSR written to a temporary file.
with tempfile.NamedTemporaryFile(mode="w") as csr_fp: with tempfile.NamedTemporaryFile(mode="w") as csr_fp:
csr_fp.write(create_csr(domain, ssl_key, env)) csr_fp.write(create_csr(domain, ssl_key, env))
csr_fp.flush() # since we won't close until after running 'openssl x509', since close triggers delete. # since we won't close until after running 'openssl x509', since close triggers delete.
csr_fp.flush()
# And then make the certificate.
shell("check_call", [
"openssl", "x509", "-req",
"-days", "365",
"-in", csr_fp.name,
"-signkey", ssl_key,
"-out", ssl_certificate])
# And then make the certificate.
shell("check_call", [
"openssl", "x509", "-req",
"-days", "365",
"-in", csr_fp.name,
"-signkey", ssl_key,
"-out", ssl_certificate])
def create_csr(domain, ssl_key, env): def create_csr(domain, ssl_key, env):
return shell("check_output", [ return shell("check_output", [
"openssl", "req", "-new", "openssl", "req", "-new",
"-key", ssl_key, "-key", ssl_key,
"-out", "/dev/stdout", "-out", "/dev/stdout",
"-sha256", "-sha256",
"-subj", "/C=%s/ST=/L=/O=/CN=%s" % (env["CSR_COUNTRY"], domain.encode("idna").decode("ascii"))]) "-subj", "/C=%s/ST=/L=/O=/CN=%s" % (env["CSR_COUNTRY"], domain.encode("idna").decode("ascii"))])
def install_cert(domain, ssl_cert, ssl_chain, env): def install_cert(domain, ssl_cert, ssl_chain, env):
if domain not in get_web_domains(env): if domain not in get_web_domains(env):
return "Invalid domain name." return "Invalid domain name."
# Write the combined cert+chain to a temporary path and validate that it is OK. # Write the combined cert+chain to a temporary path and validate that it is OK.
# The certificate always goes above the chain. # The certificate always goes above the chain.
import tempfile, os import tempfile
fd, fn = tempfile.mkstemp('.pem') import os
os.write(fd, (ssl_cert + '\n' + ssl_chain).encode("ascii")) fd, fn = tempfile.mkstemp('.pem')
os.close(fd) os.write(fd, (ssl_cert + '\n' + ssl_chain).encode("ascii"))
os.close(fd)
# Do validation on the certificate before installing it. # Do validation on the certificate before installing it.
from status_checks import check_certificate from status_checks import check_certificate
ssl_key, ssl_certificate, ssl_via = get_domain_ssl_files(domain, env, allow_shared_cert=False) ssl_key, ssl_certificate, ssl_via = get_domain_ssl_files(domain, env, allow_shared_cert=False)
cert_status, cert_status_details = check_certificate(domain, fn, ssl_key) cert_status, cert_status_details = check_certificate(domain, fn, ssl_key)
if cert_status != "OK": if cert_status != "OK":
if cert_status == "SELF-SIGNED": if cert_status == "SELF-SIGNED":
cert_status = "This is a self-signed certificate. I can't install that." cert_status = "This is a self-signed certificate. I can't install that."
os.unlink(fn) os.unlink(fn)
if cert_status_details is not None: if cert_status_details is not None:
cert_status += " " + cert_status_details cert_status += " " + cert_status_details
return cert_status return cert_status
# Copy the certificate to its expected location. # Copy the certificate to its expected location.
os.makedirs(os.path.dirname(ssl_certificate), exist_ok=True) os.makedirs(os.path.dirname(ssl_certificate), exist_ok=True)
shutil.move(fn, ssl_certificate) shutil.move(fn, ssl_certificate)
ret = [] ret = []
# When updating the cert for PRIMARY_HOSTNAME, also update DNS because it is # When updating the cert for PRIMARY_HOSTNAME, also update DNS because it is
# used in the DANE TLSA record and restart postfix and dovecot which use # used in the DANE TLSA record and restart postfix and dovecot which use
# that certificate. # that certificate.
if domain == env['PRIMARY_HOSTNAME']: if domain == env['PRIMARY_HOSTNAME']:
ret.append( do_dns_update(env) ) ret.append(do_dns_update(env))
shell('check_call', ["/usr/sbin/service", "postfix", "restart"]) shell('check_call', ["/usr/sbin/service", "postfix", "restart"])
shell('check_call', ["/usr/sbin/service", "dovecot", "restart"]) shell('check_call', ["/usr/sbin/service", "dovecot", "restart"])
ret.append("mail services restarted") ret.append("mail services restarted")
# Kick nginx so it sees the cert.
ret.append(do_web_update(env, ok_status=""))
return "\n".join(r for r in ret if r.strip() != "")
# Kick nginx so it sees the cert.
ret.append( do_web_update(env, ok_status="") )
return "\n".join(r for r in ret if r.strip() != "")
def get_web_domains_info(env): def get_web_domains_info(env):
# load custom settings so we can tell what domains have a redirect or proxy set up on '/', # load custom settings so we can tell what domains have a redirect or proxy set up on '/',
# which means static hosting is not happening # which means static hosting is not happening
custom_settings = { } custom_settings = {}
nginx_conf_custom_fn = os.path.join(env["STORAGE_ROOT"], "www/custom.yaml") nginx_conf_custom_fn = os.path.join(env["STORAGE_ROOT"], "www/custom.yaml")
if os.path.exists(nginx_conf_custom_fn): if os.path.exists(nginx_conf_custom_fn):
custom_settings = rtyaml.load(open(nginx_conf_custom_fn)) custom_settings = rtyaml.load(open(nginx_conf_custom_fn))
def has_root_proxy_or_redirect(domain):
return custom_settings.get(domain, {}).get('redirects', {}).get('/') or custom_settings.get(domain, {}).get('proxies', {}).get('/')
# for the SSL config panel, get cert status def has_root_proxy_or_redirect(domain):
def check_cert(domain): return custom_settings.get(domain, {}).get('redirects', {}).get('/') or custom_settings.get(domain, {}).get('proxies', {}).get('/')
from status_checks import check_certificate
ssl_key, ssl_certificate, ssl_via = get_domain_ssl_files(domain, env)
if not os.path.exists(ssl_certificate):
return ("danger", "No Certificate Installed")
cert_status, cert_status_details = check_certificate(domain, ssl_certificate, ssl_key)
if cert_status == "OK":
if not ssl_via:
return ("success", "Signed & valid. " + cert_status_details)
else:
# This is an alternate domain but using the same cert as the primary domain.
return ("success", "Signed & valid. " + ssl_via)
elif cert_status == "SELF-SIGNED":
return ("warning", "Self-signed. Get a signed certificate to stop warnings.")
else:
return ("danger", "Certificate has a problem: " + cert_status)
return [ # for the SSL config panel, get cert status
{ def check_cert(domain):
"domain": domain, from status_checks import check_certificate
"root": get_web_root(domain, env), ssl_key, ssl_certificate, ssl_via = get_domain_ssl_files(domain, env)
"custom_root": get_web_root(domain, env, test_exists=False), if not os.path.exists(ssl_certificate):
"ssl_certificate": check_cert(domain), return ("danger", "No Certificate Installed")
"static_enabled": not has_root_proxy_or_redirect(domain), cert_status, cert_status_details = check_certificate(domain, ssl_certificate, ssl_key)
} if cert_status == "OK":
for domain in get_web_domains(env) if not ssl_via:
] return ("success", "Signed & valid. " + cert_status_details)
else:
# This is an alternate domain but using the same cert as the primary domain.
return ("success", "Signed & valid. " + ssl_via)
elif cert_status == "SELF-SIGNED":
return ("warning", "Self-signed. Get a signed certificate to stop warnings.")
else:
return ("danger", "Certificate has a problem: " + cert_status)
return [
{
"domain": domain,
"root": get_web_root(domain, env),
"custom_root": get_web_root(domain, env, test_exists=False),
"ssl_certificate": check_cert(domain),
"static_enabled": not has_root_proxy_or_redirect(domain),
}
for domain in get_web_domains(env)
]

View File

@ -5,134 +5,150 @@
# We have to be careful here that any dependencies are already installed in the previous # We have to be careful here that any dependencies are already installed in the previous
# version since this script runs before all other aspects of the setup script. # version since this script runs before all other aspects of the setup script.
import sys, os, os.path, glob, re, shutil import sys
import os
import os.path
import glob
import re
import shutil
sys.path.insert(0, 'management') sys.path.insert(0, 'management')
from utils import load_environment, save_environment, shell from utils import load_environment, save_environment, shell
def migration_1(env): def migration_1(env):
# Re-arrange where we store SSL certificates. There was a typo also. # Re-arrange where we store SSL certificates. There was a typo also.
def move_file(fn, domain_name_escaped, filename): def move_file(fn, domain_name_escaped, filename):
# Moves an SSL-related file into the right place. # Moves an SSL-related file into the right place.
fn1 = os.path.join( env["STORAGE_ROOT"], 'ssl', domain_name_escaped, file_type) fn1 = os.path.join(env["STORAGE_ROOT"], 'ssl', domain_name_escaped, file_type)
os.makedirs(os.path.dirname(fn1), exist_ok=True) os.makedirs(os.path.dirname(fn1), exist_ok=True)
shutil.move(fn, fn1) shutil.move(fn, fn1)
# Migrate the 'domains' directory. # Migrate the 'domains' directory.
for sslfn in glob.glob(os.path.join( env["STORAGE_ROOT"], 'ssl/domains/*' )): for sslfn in glob.glob(os.path.join(env["STORAGE_ROOT"], 'ssl/domains/*')):
fn = os.path.basename(sslfn) fn = os.path.basename(sslfn)
m = re.match("(.*)_(certifiate.pem|cert_sign_req.csr|private_key.pem)$", fn) m = re.match("(.*)_(certifiate.pem|cert_sign_req.csr|private_key.pem)$", fn)
if m: if m:
# get the new name for the file # get the new name for the file
domain_name, file_type = m.groups() domain_name, file_type = m.groups()
if file_type == "certifiate.pem": file_type = "ssl_certificate.pem" # typo # typo
if file_type == "cert_sign_req.csr": file_type = "certificate_signing_request.csr" # nicer if file_type == "certifiate.pem":
move_file(sslfn, domain_name, file_type) file_type = "ssl_certificate.pem"
# nicer
if file_type == "cert_sign_req.csr":
file_type = "certificate_signing_request.csr"
move_file(sslfn, domain_name, file_type)
# Move the old domains directory if it is now empty.
try:
os.rmdir(os.path.join(env["STORAGE_ROOT"], 'ssl/domains'))
except:
pass
# Move the old domains directory if it is now empty.
try:
os.rmdir(os.path.join( env["STORAGE_ROOT"], 'ssl/domains'))
except:
pass
def migration_2(env): def migration_2(env):
# Delete the .dovecot_sieve script everywhere. This was formerly a copy of our spam -> Spam # Delete the .dovecot_sieve script everywhere. This was formerly a copy of our spam -> Spam
# script. We now install it as a global script, and we use managesieve, so the old file is # script. We now install it as a global script, and we use managesieve, so the old file is
# irrelevant. Also delete the compiled binary form. # irrelevant. Also delete the compiled binary form.
for fn in glob.glob(os.path.join(env["STORAGE_ROOT"], 'mail/mailboxes/*/*/.dovecot.sieve')): for fn in glob.glob(os.path.join(env["STORAGE_ROOT"], 'mail/mailboxes/*/*/.dovecot.sieve')):
os.unlink(fn) os.unlink(fn)
for fn in glob.glob(os.path.join(env["STORAGE_ROOT"], 'mail/mailboxes/*/*/.dovecot.svbin')): for fn in glob.glob(os.path.join(env["STORAGE_ROOT"], 'mail/mailboxes/*/*/.dovecot.svbin')):
os.unlink(fn) os.unlink(fn)
def migration_3(env): def migration_3(env):
# Move the migration ID from /etc/mailinabox.conf to $STORAGE_ROOT/mailinabox.version # Move the migration ID from /etc/mailinabox.conf to $STORAGE_ROOT/mailinabox.version
# so that the ID stays with the data files that it describes the format of. The writing # so that the ID stays with the data files that it describes the format of. The writing
# of the file will be handled by the main function. # of the file will be handled by the main function.
pass pass
def migration_4(env): def migration_4(env):
# Add a new column to the mail users table where we can store administrative privileges. # Add a new column to the mail users table where we can store administrative privileges.
db = os.path.join(env["STORAGE_ROOT"], 'mail/users.sqlite') db = os.path.join(env["STORAGE_ROOT"], 'mail/users.sqlite')
shell("check_call", ["sqlite3", db, "ALTER TABLE users ADD privileges TEXT NOT NULL DEFAULT ''"]) shell("check_call", ["sqlite3", db, "ALTER TABLE users ADD privileges TEXT NOT NULL DEFAULT ''"])
def migration_5(env): def migration_5(env):
# The secret key for encrypting backups was world readable. Fix here. # The secret key for encrypting backups was world readable. Fix here.
os.chmod(os.path.join(env["STORAGE_ROOT"], 'backup/secret_key.txt'), 0o600) os.chmod(os.path.join(env["STORAGE_ROOT"], 'backup/secret_key.txt'), 0o600)
def migration_6(env): def migration_6(env):
# We now will generate multiple DNSSEC keys for different algorithms, since TLDs may # We now will generate multiple DNSSEC keys for different algorithms, since TLDs may
# not support them all. .email only supports RSA/SHA-256. Rename the keys.conf file # not support them all. .email only supports RSA/SHA-256. Rename the keys.conf file
# to be algorithm-specific. # to be algorithm-specific.
basepath = os.path.join(env["STORAGE_ROOT"], 'dns/dnssec') basepath = os.path.join(env["STORAGE_ROOT"], 'dns/dnssec')
shutil.move(os.path.join(basepath, 'keys.conf'), os.path.join(basepath, 'RSASHA1-NSEC3-SHA1.conf')) shutil.move(os.path.join(basepath, 'keys.conf'), os.path.join(basepath, 'RSASHA1-NSEC3-SHA1.conf'))
def get_current_migration(): def get_current_migration():
ver = 0 ver = 0
while True: while True:
next_ver = (ver + 1) next_ver = (ver + 1)
migration_func = globals().get("migration_%d" % next_ver) migration_func = globals().get("migration_%d" % next_ver)
if not migration_func: if not migration_func:
return ver return ver
ver = next_ver ver = next_ver
def run_migrations(): def run_migrations():
if not os.access("/etc/mailinabox.conf", os.W_OK, effective_ids=True): if not os.access("/etc/mailinabox.conf", os.W_OK, effective_ids=True):
print("This script must be run as root.", file=sys.stderr) print("This script must be run as root.", file=sys.stderr)
sys.exit(1) sys.exit(1)
env = load_environment() env = load_environment()
migration_id_file = os.path.join(env['STORAGE_ROOT'], 'mailinabox.version') migration_id_file = os.path.join(env['STORAGE_ROOT'], 'mailinabox.version')
if os.path.exists(migration_id_file): if os.path.exists(migration_id_file):
with open(migration_id_file) as f: with open(migration_id_file) as f:
ourver = int(f.read().strip()) ourver = int(f.read().strip())
else: else:
# Load the legacy location of the migration ID. We'll drop support # Load the legacy location of the migration ID. We'll drop support
# for this eventually. # for this eventually.
ourver = int(env.get("MIGRATIONID", "0")) ourver = int(env.get("MIGRATIONID", "0"))
while True: while True:
next_ver = (ourver + 1) next_ver = (ourver + 1)
migration_func = globals().get("migration_%d" % next_ver) migration_func = globals().get("migration_%d" % next_ver)
if not migration_func: if not migration_func:
# No more migrations to run. # No more migrations to run.
break break
print() print()
print("Running migration to Mail-in-a-Box #%d..." % next_ver) print("Running migration to Mail-in-a-Box #%d..." % next_ver)
try: try:
migration_func(env) migration_func(env)
except Exception as e: except Exception as e:
print() print()
print("Error running the migration script:") print("Error running the migration script:")
print() print()
print(e) print(e)
print() print()
print("Your system may be in an inconsistent state now. We're terribly sorry. A re-install from a backup might be the best way to continue.") print("Your system may be in an inconsistent state now. We're terribly sorry. A re-install from a backup might be the best way to continue.")
sys.exit(1) sys.exit(1)
ourver = next_ver ourver = next_ver
# Write out our current version now. Do this sooner rather than later # Write out our current version now. Do this sooner rather than later
# in case of any problems. # in case of any problems.
with open(migration_id_file, "w") as f: with open(migration_id_file, "w") as f:
f.write(str(ourver) + "\n") f.write(str(ourver) + "\n")
# Delete the legacy location of this field. # Delete the legacy location of this field.
if "MIGRATIONID" in env: if "MIGRATIONID" in env:
del env["MIGRATIONID"] del env["MIGRATIONID"]
save_environment(env) save_environment(env)
# iterate and try next version... # iterate and try next version...
if __name__ == "__main__": if __name__ == "__main__":
if sys.argv[-1] == "--current": if sys.argv[-1] == "--current":
# Return the number of the highest migration. # Return the number of the highest migration.
print(str(get_current_migration())) print(str(get_current_migration()))
elif sys.argv[-1] == "--migrate": elif sys.argv[-1] == "--migrate":
# Perform migrations. # Perform migrations.
run_migrations() run_migrations()

View File

@ -7,100 +7,110 @@
# where ipaddr is the IP address of your Mail-in-a-Box # where ipaddr is the IP address of your Mail-in-a-Box
# and hostname is the domain name to check the DNS for. # and hostname is the domain name to check the DNS for.
import sys, re, difflib import sys
import dns.reversename, dns.resolver import re
import difflib
import dns.reversename
import dns.resolver
if len(sys.argv) < 3: if len(sys.argv) < 3:
print("Usage: tests/dns.py ipaddress hostname [primary hostname]") print("Usage: tests/dns.py ipaddress hostname [primary hostname]")
sys.exit(1) sys.exit(1)
ipaddr, hostname = sys.argv[1:3] ipaddr, hostname = sys.argv[1:3]
primary_hostname = hostname primary_hostname = hostname
if len(sys.argv) == 4: if len(sys.argv) == 4:
primary_hostname = sys.argv[3] primary_hostname = sys.argv[3]
def test(server, description): def test(server, description):
tests = [ tests = [
(hostname, "A", ipaddr), (hostname, "A", ipaddr),
#(hostname, "NS", "ns1.%s.;ns2.%s." % (primary_hostname, primary_hostname)), #(hostname, "NS", "ns1.%s.;ns2.%s." % (primary_hostname, primary_hostname)),
("ns1." + primary_hostname, "A", ipaddr), ("ns1." + primary_hostname, "A", ipaddr),
("ns2." + primary_hostname, "A", ipaddr), ("ns2." + primary_hostname, "A", ipaddr),
("www." + hostname, "A", ipaddr), ("www." + hostname, "A", ipaddr),
(hostname, "MX", "10 " + primary_hostname + "."), (hostname, "MX", "10 " + primary_hostname + "."),
(hostname, "TXT", "\"v=spf1 mx -all\""), (hostname, "TXT", "\"v=spf1 mx -all\""),
("mail._domainkey." + hostname, "TXT", "\"v=DKIM1; k=rsa; s=email; \" \"p=__KEY__\""), ("mail._domainkey." + hostname, "TXT", "\"v=DKIM1; k=rsa; s=email; \" \"p=__KEY__\""),
#("_adsp._domainkey." + hostname, "TXT", "\"dkim=all\""), #("_adsp._domainkey." + hostname, "TXT", "\"dkim=all\""),
("_dmarc." + hostname, "TXT", "\"v=DMARC1; p=quarantine\""), ("_dmarc." + hostname, "TXT", "\"v=DMARC1; p=quarantine\""),
] ]
return test2(tests, server, description) return test2(tests, server, description)
def test_ptr(server, description): def test_ptr(server, description):
ipaddr_rev = dns.reversename.from_address(ipaddr) ipaddr_rev = dns.reversename.from_address(ipaddr)
tests = [ tests = [
(ipaddr_rev, "PTR", hostname+'.'), (ipaddr_rev, "PTR", hostname+'.'),
] ]
return test2(tests, server, description) return test2(tests, server, description)
def test2(tests, server, description): def test2(tests, server, description):
first = True first = True
resolver = dns.resolver.get_default_resolver() resolver = dns.resolver.get_default_resolver()
resolver.nameservers = [server] resolver.nameservers = [server]
for qname, rtype, expected_answer in tests: for qname, rtype, expected_answer in tests:
# do the query and format the result as a string # do the query and format the result as a string
try: try:
response = dns.resolver.query(qname, rtype) response = dns.resolver.query(qname, rtype)
except dns.resolver.NoNameservers: except dns.resolver.NoNameservers:
# host did not have an answer for this query # host did not have an answer for this query
print("Could not connect to %s for DNS query." % server) print("Could not connect to %s for DNS query." % server)
sys.exit(1) sys.exit(1)
except (dns.resolver.NXDOMAIN, dns.resolver.NoAnswer): except (dns.resolver.NXDOMAIN, dns.resolver.NoAnswer):
# host did not have an answer for this query; not sure what the # host did not have an answer for this query; not sure what the
# difference is between the two exceptions # difference is between the two exceptions
response = ["[no value]"] response = ["[no value]"]
response = ";".join(str(r) for r in response) response = ";".join(str(r) for r in response)
response = re.sub(r"(\"p=).*(\")", r"\1__KEY__\2", response) # normalize DKIM key # normalize DKIM key
response = response.replace("\"\" ", "") # normalize TXT records (DNSSEC signing inserts empty text string components) response = re.sub(r"(\"p=).*(\")", r"\1__KEY__\2", response)
# normalize TXT records (DNSSEC signing inserts empty text
# string components)
response = response.replace("\"\" ", "")
# is it right? # is it right?
if response == expected_answer: if response == expected_answer:
#print(server, ":", qname, rtype, "?", response) #print(server, ":", qname, rtype, "?", response)
continue continue
# show prolem # show problem
if first: if first:
print("Incorrect DNS Response from", description) print("Incorrect DNS Response from", description)
print() print()
print("QUERY ", "RESPONSE ", "CORRECT VALUE", sep='\t') print("QUERY ", "RESPONSE ", "CORRECT VALUE", sep='\t')
first = False first = False
print((qname + "/" + rtype).ljust(20), response.ljust(12), expected_answer, sep='\t') print((qname + "/" + rtype).ljust(20), response.ljust(12), expected_answer, sep='\t')
return first # success # success
return first
# Test the response from the machine itself. # Test the response from the machine itself.
if not test(ipaddr, "Mail-in-a-Box"): if not test(ipaddr, "Mail-in-a-Box"):
print () print ()
print ("Please run the Mail-in-a-Box setup script on %s again." % hostname) print ("Please run the Mail-in-a-Box setup script on %s again." % hostname)
sys.exit(1) sys.exit(1)
else: else:
print ("The Mail-in-a-Box provided correct DNS answers.") print ("The Mail-in-a-Box provided correct DNS answers.")
print () print ()
# If those settings are OK, also test Google's Public DNS # If those settings are OK, also test Google's Public DNS
# to see if the machine is hooked up to recursive DNS properly. # to see if the machine is hooked up to recursive DNS properly.
if not test("8.8.8.8", "Google Public DNS"): if not test("8.8.8.8", "Google Public DNS"):
print () print ()
print ("Check that the nameserver settings for %s are correct at your domain registrar. It may take a few hours for Google Public DNS to update after changes on your Mail-in-a-Box." % hostname) print ("Check that the nameserver settings for %s are correct at your domain registrar. It may take a few hours for Google Public DNS to update after changes on your Mail-in-a-Box." % hostname)
sys.exit(1) sys.exit(1)
else: else:
print ("Your domain registrar or DNS host appears to be configured correctly as well. Public DNS provides the same answers.") print ("Your domain registrar or DNS host appears to be configured correctly as well. Public DNS provides the same answers.")
print () print ()
# And if that's OK, also check reverse DNS (the PTR record). # And if that's OK, also check reverse DNS (the PTR record).
if not test_ptr("8.8.8.8", "Google Public DNS (Reverse DNS)"): if not test_ptr("8.8.8.8", "Google Public DNS (Reverse DNS)"):
print () print ()
print ("The reverse DNS for %s is not correct. Consult your ISP for how to set the reverse DNS (also called the PTR record) for %s to %s." % (hostname, hostname, ipaddr)) print ("The reverse DNS for %s is not correct. Consult your ISP for how to set the reverse DNS (also called the PTR record) for %s to %s." % (hostname, hostname, ipaddr))
sys.exit(1) sys.exit(1)
else: else:
print ("And the reverse DNS for the domain is correct.") print ("And the reverse DNS for the domain is correct.")
print () print ()
print ("DNS is OK.") print ("DNS is OK.")

View File

@ -1,28 +1,34 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
# Tests sending and receiving mail by sending a test message to yourself. # Tests sending and receiving mail by sending a test message to yourself.
import sys, imaplib, smtplib, uuid, time import sys
import socket, dns.reversename, dns.resolver import imaplib
import smtplib
import uuid
import time
import socket
import dns.reversename
import dns.resolver
if len(sys.argv) < 3: if len(sys.argv) < 3:
print("Usage: tests/mail.py hostname emailaddress password") print("Usage: tests/mail.py hostname emailaddress password")
sys.exit(1) sys.exit(1)
host, emailaddress, pw = sys.argv[1:4] host, emailaddress, pw = sys.argv[1:4]
# Attempt to login with IMAP. Our setup uses email addresses # Attempt to login with IMAP. Our setup uses email addresses
# as IMAP/SMTP usernames. # as IMAP/SMTP usernames.
try: try:
M = imaplib.IMAP4_SSL(host) M = imaplib.IMAP4_SSL(host)
M.login(emailaddress, pw) M.login(emailaddress, pw)
except OSError as e: except OSError as e:
print("Connection error:", e) print("Connection error:", e)
sys.exit(1) sys.exit(1)
except imaplib.IMAP4.error as e: except imaplib.IMAP4.error as e:
# any sort of login error # any sort of login error
e = ", ".join(a.decode("utf8") for a in e.args) e = ", ".join(a.decode("utf8") for a in e.args)
print("IMAP error:", e) print("IMAP error:", e)
sys.exit(1) sys.exit(1)
M.select() M.select()
print("IMAP login is OK.") print("IMAP login is OK.")
@ -35,10 +41,10 @@ To: {emailto}
Subject: {subject} Subject: {subject}
This is a test message. It should be automatically deleted by the test script.""".format( This is a test message. It should be automatically deleted by the test script.""".format(
emailaddress=emailaddress, emailaddress=emailaddress,
emailto=emailto, emailto=emailto,
subject=mailsubject, subject=mailsubject,
) )
# Connect to the server on the SMTP submission TLS port. # Connect to the server on the SMTP submission TLS port.
server = smtplib.SMTP(host, 587) server = smtplib.SMTP(host, 587)
@ -46,20 +52,21 @@ server = smtplib.SMTP(host, 587)
server.starttls() server.starttls()
# Verify that the EHLO name matches the server's reverse DNS. # Verify that the EHLO name matches the server's reverse DNS.
ipaddr = socket.gethostbyname(host) # IPv4 only! ipaddr = socket.gethostbyname(host) # IPv4 only!
reverse_ip = dns.reversename.from_address(ipaddr) # e.g. "1.0.0.127.in-addr.arpa." reverse_ip = dns.reversename.from_address(ipaddr) # e.g. "1.0.0.127.in-addr.arpa."
try: try:
reverse_dns = dns.resolver.query(reverse_ip, 'PTR')[0].target.to_text(omit_final_dot=True) # => hostname reverse_dns = dns.resolver.query(reverse_ip, 'PTR')[0].target.to_text(omit_final_dot=True) # => hostname
except dns.resolver.NXDOMAIN: except dns.resolver.NXDOMAIN:
print("Reverse DNS lookup failed for %s. SMTP EHLO name check skipped." % ipaddr) print("Reverse DNS lookup failed for %s. SMTP EHLO name check skipped." % ipaddr)
reverse_dns = None reverse_dns = None
if reverse_dns is not None: if reverse_dns is not None:
server.ehlo_or_helo_if_needed() # must send EHLO before getting the server's EHLO name server.ehlo_or_helo_if_needed() # must send EHLO before getting the server's EHLO name
helo_name = server.ehlo_resp.decode("utf8").split("\n")[0] # first line is the EHLO name helo_name = server.ehlo_resp.decode("utf8").split("\n")[0] # first line is the EHLO name
if helo_name != reverse_dns: if helo_name != reverse_dns:
print("The server's EHLO name does not match its reverse hostname. Check DNS settings.") print("The server's EHLO name does not match its reverse hostname. Check DNS settings.")
else: else:
print("SMTP EHLO name (%s) is OK." % helo_name) print("SMTP EHLO name (%s) is OK." % helo_name)
# Login and send a test email. # Login and send a test email.
server.login(emailaddress, pw) server.login(emailaddress, pw)
@ -68,40 +75,40 @@ server.quit()
print("SMTP submission is OK.") print("SMTP submission is OK.")
while True: while True:
# Wait so the message can propagate to the inbox. # Wait so the message can propagate to the inbox.
time.sleep(10) time.sleep(10)
# Read the subject lines of all of the emails in the inbox # Read the subject lines of all of the emails in the inbox
# to find our test message, and then delete it. # to find our test message, and then delete it.
found = False found = False
typ, data = M.search(None, 'ALL') typ, data = M.search(None, 'ALL')
for num in data[0].split(): for num in data[0].split():
typ, data = M.fetch(num, '(BODY[HEADER.FIELDS (SUBJECT)])') typ, data = M.fetch(num, '(BODY[HEADER.FIELDS (SUBJECT)])')
imapsubjectline = data[0][1].strip().decode("utf8") imapsubjectline = data[0][1].strip().decode("utf8")
if imapsubjectline == "Subject: " + mailsubject: if imapsubjectline == "Subject: " + mailsubject:
# We found our test message. # We found our test message.
found = True found = True
# To test DKIM, download the whole mssage body. Unfortunately, # To test DKIM, download the whole mssage body. Unfortunately,
# pydkim doesn't actually work. # pydkim doesn't actually work.
# You must 'sudo apt-get install python3-dkim python3-dnspython' first. # You must 'sudo apt-get install python3-dkim python3-dnspython' first.
#typ, msgdata = M.fetch(num, '(RFC822)') #typ, msgdata = M.fetch(num, '(RFC822)')
#msg = msgdata[0][1] #msg = msgdata[0][1]
#if dkim.verify(msg): #if dkim.verify(msg):
# print("DKIM signature on the test message is OK (verified).") # print("DKIM signature on the test message is OK (verified).")
#else: #else:
# print("DKIM signature on the test message failed verification.") # print("DKIM signature on the test message failed verification.")
# Delete the test message. # Delete the test message.
M.store(num, '+FLAGS', '\\Deleted') M.store(num, '+FLAGS', '\\Deleted')
M.expunge() M.expunge()
break break
if found: if found:
break break
print("Test message not present in the inbox yet...") print("Test message not present in the inbox yet...")
M.close() M.close()
M.logout() M.logout()

View File

@ -1,5 +1,6 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
import smtplib, sys import smtplib
import sys
if len(sys.argv) < 3: if len(sys.argv) < 3:
print("Usage: tests/smtp_server.py host email.to email.from") print("Usage: tests/smtp_server.py host email.to email.from")
@ -16,4 +17,3 @@ server = smtplib.SMTP(host, 25)
server.set_debuglevel(1) server.set_debuglevel(1)
server.sendmail(fromaddr, [toaddr], msg) server.sendmail(fromaddr, [toaddr], msg)
server.quit() server.quit()

View File

@ -18,14 +18,15 @@
# lines while the lines start with whitespace, e.g.: # lines while the lines start with whitespace, e.g.:
# #
# NAME VAL # NAME VAL
# UE # UE
import sys, re import sys
import re
# sanity check # sanity check
if len(sys.argv) < 3: if len(sys.argv) < 3:
print("usage: python3 editconf.py /etc/file.conf [-s] [-w] [-t] NAME=VAL [NAME=VAL ...]") print("usage: python3 editconf.py /etc/file.conf [-s] [-w] [-t] NAME=VAL [NAME=VAL ...]")
sys.exit(1) sys.exit(1)
# parse command line arguments # parse command line arguments
filename = sys.argv[1] filename = sys.argv[1]
@ -37,22 +38,22 @@ comment_char = "#"
folded_lines = False folded_lines = False
testing = False testing = False
while settings[0][0] == "-" and settings[0] != "--": while settings[0][0] == "-" and settings[0] != "--":
opt = settings.pop(0) opt = settings.pop(0)
if opt == "-s": if opt == "-s":
# Space is the delimiter # Space is the delimiter
delimiter = " " delimiter = " "
delimiter_re = r"\s+" delimiter_re = r"\s+"
elif opt == "-w": elif opt == "-w":
# Line folding is possible in this file. # Line folding is possible in this file.
folded_lines = True folded_lines = True
elif opt == "-c": elif opt == "-c":
# Specifies a different comment character. # Specifies a different comment character.
comment_char = settings.pop(0) comment_char = settings.pop(0)
elif opt == "-t": elif opt == "-t":
testing = True testing = True
else: else:
print("Invalid option.") print("Invalid option.")
sys.exit(1) sys.exit(1)
# create the new config file in memory # create the new config file in memory
@ -61,67 +62,69 @@ buf = ""
input_lines = list(open(filename)) input_lines = list(open(filename))
while len(input_lines) > 0: while len(input_lines) > 0:
line = input_lines.pop(0) line = input_lines.pop(0)
# If this configuration file uses folded lines, append any folded lines # If this configuration file uses folded lines, append any folded lines
# into our input buffer. # into our input buffer.
if folded_lines and line[0] not in (comment_char, " ", ""): if folded_lines and line[0] not in (comment_char, " ", ""):
while len(input_lines) > 0 and input_lines[0][0] in " \t": while len(input_lines) > 0 and input_lines[0][0] in " \t":
line += input_lines.pop(0) line += input_lines.pop(0)
# See if this line is for any settings passed on the command line. # See if this line is for any settings passed on the command line.
for i in range(len(settings)): for i in range(len(settings)):
# Check that this line contain this setting from the command-line arguments. # Check that this line contain this setting from the command-line arguments.
name, val = settings[i].split("=", 1) name, val = settings[i].split("=", 1)
m = re.match( m = re.match(
"(\s*)" "(\s*)" +
+ "(" + re.escape(comment_char) + "\s*)?" "(" + re.escape(comment_char) + "\s*)?" +
+ re.escape(name) + delimiter_re + "(.*?)\s*$", re.escape(name) + delimiter_re + "(.*?)\s*$",
line, re.S) line, re.S)
if not m: continue if not m:
indent, is_comment, existing_val = m.groups() continue
indent, is_comment, existing_val = m.groups()
# If this is already the setting, do nothing.
if is_comment is None and existing_val == val:
# It may be that we've already inserted this setting higher
# in the file so check for that first.
if i in found:
break
buf += line
found.add(i)
break
# comment-out the existing line (also comment any folded lines)
if is_comment is None:
buf += comment_char + line.rstrip().replace("\n", "\n" + comment_char) + "\n"
else:
# the line is already commented, pass it through
buf += line
# if this option oddly appears more than once, don't add the setting again
if i in found:
break
# add the new setting
buf += indent + name + delimiter + val + "\n"
# note that we've applied this option
found.add(i)
break
else:
# If did not match any setting names, pass this line through.
buf += line
# If this is already the setting, do nothing.
if is_comment is None and existing_val == val:
# It may be that we've already inserted this setting higher
# in the file so check for that first.
if i in found: break
buf += line
found.add(i)
break
# comment-out the existing line (also comment any folded lines)
if is_comment is None:
buf += comment_char + line.rstrip().replace("\n", "\n" + comment_char) + "\n"
else:
# the line is already commented, pass it through
buf += line
# if this option oddly appears more than once, don't add the setting again
if i in found:
break
# add the new setting
buf += indent + name + delimiter + val + "\n"
# note that we've applied this option
found.add(i)
break
else:
# If did not match any setting names, pass this line through.
buf += line
# Put any settings we didn't see at the end of the file. # Put any settings we didn't see at the end of the file.
for i in range(len(settings)): for i in range(len(settings)):
if i not in found: if i not in found:
name, val = settings[i].split("=", 1) name, val = settings[i].split("=", 1)
buf += name + delimiter + val + "\n" buf += name + delimiter + val + "\n"
if not testing: if not testing:
# Write out the new file. # Write out the new file.
with open(filename, "w") as f: with open(filename, "w") as f:
f.write(buf) f.write(buf)
else: else:
# Just print the new file to stdout. # Just print the new file to stdout.
print(buf) print(buf)

View File

@ -1,124 +1,132 @@
#!/usr/bin/python3 #!/usr/bin/python3
import sys, getpass, urllib.request, urllib.error, json import sys
import getpass
import urllib.request
import urllib.error
import json
def mgmt(cmd, data=None, is_json=False): def mgmt(cmd, data=None, is_json=False):
# The base URL for the management daemon. (Listens on IPv4 only.) # The base URL for the management daemon. (Listens on IPv4 only.)
mgmt_uri = 'http://127.0.0.1:10222' mgmt_uri = 'http://127.0.0.1:10222'
setup_key_auth(mgmt_uri) setup_key_auth(mgmt_uri)
req = urllib.request.Request(mgmt_uri + cmd, urllib.parse.urlencode(data).encode("utf8") if data else None)
try:
response = urllib.request.urlopen(req)
except urllib.error.HTTPError as e:
if e.code == 401:
try:
print(e.read().decode("utf8"))
except:
pass
print("The management daemon refused access. The API key file may be out of sync. Try 'service mailinabox restart'.", file=sys.stderr)
elif hasattr(e, 'read'):
print(e.read().decode('utf8'), file=sys.stderr)
else:
print(e, file=sys.stderr)
sys.exit(1)
resp = response.read().decode('utf8')
if is_json:
resp = json.loads(resp)
return resp
req = urllib.request.Request(mgmt_uri + cmd, urllib.parse.urlencode(data).encode("utf8") if data else None)
try:
response = urllib.request.urlopen(req)
except urllib.error.HTTPError as e:
if e.code == 401:
try:
print(e.read().decode("utf8"))
except:
pass
print("The management daemon refused access. The API key file may be out of sync. Try 'service mailinabox restart'.", file=sys.stderr)
elif hasattr(e, 'read'):
print(e.read().decode('utf8'), file=sys.stderr)
else:
print(e, file=sys.stderr)
sys.exit(1)
resp = response.read().decode('utf8')
if is_json: resp = json.loads(resp)
return resp
def read_password(): def read_password():
first = getpass.getpass('password: ') first = getpass.getpass('password: ')
second = getpass.getpass(' (again): ') second = getpass.getpass(' (again): ')
while first != second: while first != second:
print('Passwords not the same. Try again.') print('Passwords not the same. Try again.')
first = getpass.getpass('password: ') first = getpass.getpass('password: ')
second = getpass.getpass(' (again): ') second = getpass.getpass(' (again): ')
return first return first
def setup_key_auth(mgmt_uri): def setup_key_auth(mgmt_uri):
key = open('/var/lib/mailinabox/api.key').read().strip() key = open('/var/lib/mailinabox/api.key').read().strip()
auth_handler = urllib.request.HTTPBasicAuthHandler() auth_handler = urllib.request.HTTPBasicAuthHandler()
auth_handler.add_password( auth_handler.add_password(
realm='Mail-in-a-Box Management Server', realm='Mail-in-a-Box Management Server',
uri=mgmt_uri, uri=mgmt_uri,
user=key, user=key,
passwd='') passwd='')
opener = urllib.request.build_opener(auth_handler) opener = urllib.request.build_opener(auth_handler)
urllib.request.install_opener(opener) urllib.request.install_opener(opener)
if len(sys.argv) < 2: if len(sys.argv) < 2:
print("Usage: ") print("Usage: ")
print(" tools/mail.py user (lists users)") print(" tools/mail.py user (lists users)")
print(" tools/mail.py user add user@domain.com [password]") print(" tools/mail.py user add user@domain.com [password]")
print(" tools/mail.py user password user@domain.com [password]") print(" tools/mail.py user password user@domain.com [password]")
print(" tools/mail.py user remove user@domain.com") print(" tools/mail.py user remove user@domain.com")
print(" tools/mail.py user make-admin user@domain.com") print(" tools/mail.py user make-admin user@domain.com")
print(" tools/mail.py user remove-admin user@domain.com") print(" tools/mail.py user remove-admin user@domain.com")
print(" tools/mail.py user admins (lists admins)") print(" tools/mail.py user admins (lists admins)")
print(" tools/mail.py alias (lists aliases)") print(" tools/mail.py alias (lists aliases)")
print(" tools/mail.py alias add incoming.name@domain.com sent.to@other.domain.com") print(" tools/mail.py alias add incoming.name@domain.com sent.to@other.domain.com")
print(" tools/mail.py alias add incoming.name@domain.com 'sent.to@other.domain.com, multiple.people@other.domain.com'") print(" tools/mail.py alias add incoming.name@domain.com 'sent.to@other.domain.com, multiple.people@other.domain.com'")
print(" tools/mail.py alias remove incoming.name@domain.com") print(" tools/mail.py alias remove incoming.name@domain.com")
print() print()
print("Removing a mail user does not delete their mail folders on disk. It only prevents IMAP/SMTP login.") print("Removing a mail user does not delete their mail folders on disk. It only prevents IMAP/SMTP login.")
print() print()
elif sys.argv[1] == "user" and len(sys.argv) == 2: elif sys.argv[1] == "user" and len(sys.argv) == 2:
# Dump a list of users, one per line. Mark admins with an asterisk. # Dump a list of users, one per line. Mark admins with an asterisk.
users = mgmt("/mail/users?format=json", is_json=True) users = mgmt("/mail/users?format=json", is_json=True)
for domain in users: for domain in users:
for user in domain["users"]: for user in domain["users"]:
if user['status'] == 'inactive': continue if user['status'] == 'inactive':
print(user['email'], end='') continue
if "admin" in user['privileges']: print(user['email'], end='')
print("*", end='') if "admin" in user['privileges']:
print() print("*", end='')
print()
elif sys.argv[1] == "user" and sys.argv[2] in ("add", "password"): elif sys.argv[1] == "user" and sys.argv[2] in ("add", "password"):
if len(sys.argv) < 5: if len(sys.argv) < 5:
if len(sys.argv) < 4: if len(sys.argv) < 4:
email = input("email: ") email = input("email: ")
else: else:
email = sys.argv[3] email = sys.argv[3]
pw = read_password() pw = read_password()
else: else:
email, pw = sys.argv[3:5] email, pw = sys.argv[3:5]
if sys.argv[2] == "add": if sys.argv[2] == "add":
print(mgmt("/mail/users/add", { "email": email, "password": pw })) print(mgmt("/mail/users/add", {"email": email, "password": pw}))
elif sys.argv[2] == "password": elif sys.argv[2] == "password":
print(mgmt("/mail/users/password", { "email": email, "password": pw })) print(mgmt("/mail/users/password", {"email": email, "password": pw}))
elif sys.argv[1] == "user" and sys.argv[2] == "remove" and len(sys.argv) == 4: elif sys.argv[1] == "user" and sys.argv[2] == "remove" and len(sys.argv) == 4:
print(mgmt("/mail/users/remove", { "email": sys.argv[3] })) print(mgmt("/mail/users/remove", {"email": sys.argv[3]}))
elif sys.argv[1] == "user" and sys.argv[2] in ("make-admin", "remove-admin") and len(sys.argv) == 4: elif sys.argv[1] == "user" and sys.argv[2] in ("make-admin", "remove-admin") and len(sys.argv) == 4:
if sys.argv[2] == "make-admin": if sys.argv[2] == "make-admin":
action = "add" action = "add"
else: else:
action = "remove" action = "remove"
print(mgmt("/mail/users/privileges/" + action, { "email": sys.argv[3], "privilege": "admin" })) print(mgmt("/mail/users/privileges/" + action, {"email": sys.argv[3], "privilege": "admin"}))
elif sys.argv[1] == "user" and sys.argv[2] == "admins": elif sys.argv[1] == "user" and sys.argv[2] == "admins":
# Dump a list of admin users. # Dump a list of admin users.
users = mgmt("/mail/users?format=json", is_json=True) users = mgmt("/mail/users?format=json", is_json=True)
for domain in users: for domain in users:
for user in domain["users"]: for user in domain["users"]:
if "admin" in user['privileges']: if "admin" in user['privileges']:
print(user['email']) print(user['email'])
elif sys.argv[1] == "alias" and len(sys.argv) == 2: elif sys.argv[1] == "alias" and len(sys.argv) == 2:
print(mgmt("/mail/aliases")) print(mgmt("/mail/aliases"))
elif sys.argv[1] == "alias" and sys.argv[2] == "add" and len(sys.argv) == 5: elif sys.argv[1] == "alias" and sys.argv[2] == "add" and len(sys.argv) == 5:
print(mgmt("/mail/aliases/add", { "source": sys.argv[3], "destination": sys.argv[4] })) print(mgmt("/mail/aliases/add", {"source": sys.argv[3], "destination": sys.argv[4]}))
elif sys.argv[1] == "alias" and sys.argv[2] == "remove" and len(sys.argv) == 4: elif sys.argv[1] == "alias" and sys.argv[2] == "remove" and len(sys.argv) == 4:
print(mgmt("/mail/aliases/remove", { "source": sys.argv[3] })) print(mgmt("/mail/aliases/remove", {"source": sys.argv[3]}))
else: else:
print("Invalid command-line arguments.") print("Invalid command-line arguments.")
sys.exit(1) sys.exit(1)

View File

@ -4,7 +4,11 @@
# access log to see how many people are installing Mail-in-a-Box each day, by # access log to see how many people are installing Mail-in-a-Box each day, by
# looking at accesses to the bootstrap.sh script. # looking at accesses to the bootstrap.sh script.
import re, glob, gzip, os.path, json import re
import glob
import gzip
import os.path
import json
import dateutil.parser import dateutil.parser
outfn = "/home/user-data/www/mailinabox.email/install-stats.json" outfn = "/home/user-data/www/mailinabox.email/install-stats.json"
@ -15,35 +19,35 @@ accesses = set()
# Scan the current and rotated access logs. # Scan the current and rotated access logs.
for fn in glob.glob("/var/log/nginx/access.log*"): for fn in glob.glob("/var/log/nginx/access.log*"):
# Gunzip if necessary. # Gunzip if necessary.
if fn.endswith(".gz"): if fn.endswith(".gz"):
f = gzip.open(fn) f = gzip.open(fn)
else: else:
f = open(fn, "rb") f = open(fn, "rb")
# Loop through the lines in the access log. # Loop through the lines in the access log.
with f: with f:
for line in f: for line in f:
# Find lines that are GETs on /bootstrap.sh by either curl or wget. # Find lines that are GETs on /bootstrap.sh by either curl or wget.
m = re.match(rb"(?P<ip>\S+) - - \[(?P<date>.*?)\] \"GET /bootstrap.sh HTTP/.*\" 200 \d+ .* \"(?:curl|wget)", line, re.I) m = re.match(rb"(?P<ip>\S+) - - \[(?P<date>.*?)\] \"GET /bootstrap.sh HTTP/.*\" 200 \d+ .* \"(?:curl|wget)", line, re.I)
if m: if m:
date, time = m.group("date").decode("ascii").split(":", 1) date, time = m.group("date").decode("ascii").split(":", 1)
date = dateutil.parser.parse(date).date().isoformat() date = dateutil.parser.parse(date).date().isoformat()
ip = m.group("ip").decode("ascii") ip = m.group("ip").decode("ascii")
accesses.add( (date, ip) ) accesses.add((date, ip))
# Aggregate by date. # Aggregate by date.
by_date = { } by_date = {}
for date, ip in accesses: for date, ip in accesses:
by_date[date] = by_date.get(date, 0) + 1 by_date[date] = by_date.get(date, 0) + 1
# Since logs are rotated, store the statistics permanently in a JSON file. # Since logs are rotated, store the statistics permanently in a JSON file.
# Load in the stats from an existing file. # Load in the stats from an existing file.
if os.path.exists(outfn): if os.path.exists(outfn):
existing_data = json.load(open(outfn)) existing_data = json.load(open(outfn))
for date, count in existing_data: for date, count in existing_data:
if date not in by_date: if date not in by_date:
by_date[date] = count by_date[date] = count
# Turn into a list rather than a dict structure to make it ordered. # Turn into a list rather than a dict structure to make it ordered.
by_date = sorted(by_date.items()) by_date = sorted(by_date.items())
@ -53,4 +57,4 @@ by_date.pop(-1)
# Write out. # Write out.
with open(outfn, "w") as f: with open(outfn, "w") as f:
json.dump(by_date, f, sort_keys=True, indent=True) json.dump(by_date, f, sort_keys=True, indent=True)

View File

@ -3,12 +3,14 @@
# Generate documentation for how this machine works by # Generate documentation for how this machine works by
# parsing our bash scripts! # parsing our bash scripts!
import cgi, re import cgi
import re
import markdown import markdown
from modgrammar import * from modgrammar import *
def generate_documentation(): def generate_documentation():
print("""<!DOCTYPE html> print("""<!DOCTYPE html>
<html> <html>
<head> <head>
<meta charset="utf-8"> <meta charset="utf-8">
@ -21,93 +23,93 @@ def generate_documentation():
<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.2.0/css/bootstrap-theme.min.css"> <link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.2.0/css/bootstrap-theme.min.css">
<style> <style>
@import url(https://fonts.googleapis.com/css?family=Iceland); @import url(https://fonts.googleapis.com/css?family=Iceland);
@import url(https://fonts.googleapis.com/css?family=Raleway:400,700); @import url(https://fonts.googleapis.com/css?family=Raleway:400,700);
@import url(https://fonts.googleapis.com/css?family=Ubuntu:300,500); @import url(https://fonts.googleapis.com/css?family=Ubuntu:300,500);
body { body {
font-family: Raleway, sans-serif; font-family: Raleway, sans-serif;
font-size: 16px; font-size: 16px;
color: #555; color: #555;
} }
h2, h3 { h2, h3 {
margin-top: .25em; margin-top: .25em;
margin-bottom: .75em; margin-bottom: .75em;
} }
p { p {
margin-bottom: 1em; margin-bottom: 1em;
} }
.intro p { .intro p {
margin: 1.5em 0; margin: 1.5em 0;
} }
li { li {
margin-bottom: .33em; margin-bottom: .33em;
} }
.sourcefile { .sourcefile {
padding-top: 1.5em; padding-top: 1.5em;
padding-bottom: 1em; padding-bottom: 1em;
font-size: 90%; font-size: 90%;
text-align: right; text-align: right;
} }
.sourcefile a { .sourcefile a {
color: red; color: red;
} }
.instructions .row.contd { .instructions .row.contd {
border-top: 1px solid #E0E0E0; border-top: 1px solid #E0E0E0;
} }
.prose { .prose {
padding-top: 1em; padding-top: 1em;
padding-bottom: 1em; padding-bottom: 1em;
} }
.terminal { .terminal {
background-color: #EEE; background-color: #EEE;
padding-top: 1em; padding-top: 1em;
padding-bottom: 1em; padding-bottom: 1em;
} }
ul { ul {
padding-left: 1.25em; padding-left: 1.25em;
} }
pre { pre {
color: black; color: black;
border: 0; border: 0;
background: none; background: none;
font-size: 100%; font-size: 100%;
} }
div.write-to { div.write-to {
margin: 0 0 1em .5em; margin: 0 0 1em .5em;
} }
div.write-to p { div.write-to p {
padding: .5em; padding: .5em;
margin: 0; margin: 0;
} }
div.write-to .filename { div.write-to .filename {
padding: .25em .5em; padding: .25em .5em;
background-color: #666; background-color: #666;
color: white; color: white;
font-family: monospace; font-family: monospace;
font-weight: bold; font-weight: bold;
} }
div.write-to .filename span { div.write-to .filename span {
font-family: sans-serif; font-family: sans-serif;
font-weight: normal; font-weight: normal;
} }
div.write-to pre { div.write-to pre {
margin: 0; margin: 0;
padding: .5em; padding: .5em;
border: 1px solid #999; border: 1px solid #999;
border-radius: 0; border-radius: 0;
font-size: 90%; font-size: 90%;
} }
pre.shell > div:before { pre.shell > div:before {
content: "$ "; content: "$ ";
color: #666; color: #666;
} }
</style> </style>
</head> </head>
<body> <body>
@ -123,359 +125,408 @@ def generate_documentation():
<div class="container instructions"> <div class="container instructions">
""") """)
parser = Source.parser() parser = Source.parser()
for line in open("setup/start.sh"): for line in open("setup/start.sh"):
try: try:
fn = parser.parse_string(line).filename() fn = parser.parse_string(line).filename()
except: except:
continue continue
if fn in ("setup/start.sh", "setup/preflight.sh", "setup/questions.sh", "setup/firstuser.sh", "setup/management.sh"): if fn in ("setup/start.sh", "setup/preflight.sh", "setup/questions.sh", "setup/firstuser.sh", "setup/management.sh"):
continue continue
import sys import sys
print(fn, file=sys.stderr) print(fn, file=sys.stderr)
print(BashScript.parse(fn)) print(BashScript.parse(fn))
print(""" print("""
<script src="https://ajax.googleapis.com/ajax/libs/jquery/1.10.1/jquery.min.js"></script> <script src="https://ajax.googleapis.com/ajax/libs/jquery/1.10.1/jquery.min.js"></script>
<script src="https://maxcdn.bootstrapcdn.com/bootstrap/3.2.0/js/bootstrap.min.js"></script> <script src="https://maxcdn.bootstrapcdn.com/bootstrap/3.2.0/js/bootstrap.min.js"></script>
<script> <script>
$(function() { $(function() {
$('.terminal').each(function() { $('.terminal').each(function() {
$(this).outerHeight( $(this).parent().innerHeight() ); $(this).outerHeight( $(this).parent().innerHeight() );
}); });
}) })
</script> </script>
</body> </body>
</html> </html>
""") """)
class HashBang(Grammar): class HashBang(Grammar):
grammar = (L('#!'), REST_OF_LINE, EOL) grammar = (L('#!'), REST_OF_LINE, EOL)
def value(self):
return "" def value(self):
return ""
def strip_indent(s): def strip_indent(s):
s = s.replace("\t", " ") s = s.replace("\t", " ")
lines = s.split("\n") lines = s.split("\n")
try: try:
min_indent = min(len(re.match(r"\s*", line).group(0)) for line in lines if len(line) > 0) min_indent = min(len(re.match(r"\s*", line).group(0)) for line in lines if len(line) > 0)
except ValueError: except ValueError:
# No non-empty lines. # No non-empty lines.
min_indent = 0 min_indent = 0
lines = [line[min_indent:] for line in lines] lines = [line[min_indent:] for line in lines]
return "\n".join(lines) return "\n".join(lines)
class Comment(Grammar): class Comment(Grammar):
grammar = ONE_OR_MORE(ZERO_OR_MORE(SPACE), L('#'), REST_OF_LINE, EOL) grammar = ONE_OR_MORE(ZERO_OR_MORE(SPACE), L('#'), REST_OF_LINE, EOL)
def value(self):
if self.string.replace("#", "").strip() == "": def value(self):
return "\n" if self.string.replace("#", "").strip() == "":
lines = [x[2].string for x in self[0]] return "\n"
content = "\n".join(lines) lines = [x[2].string for x in self[0]]
content = strip_indent(content) content = "\n".join(lines)
return markdown.markdown(content, output_format="html4") + "\n\n" content = strip_indent(content)
return markdown.markdown(content, output_format="html4") + "\n\n"
FILENAME = WORD('a-z0-9-/.') FILENAME = WORD('a-z0-9-/.')
class Source(Grammar): class Source(Grammar):
grammar = ((L('.') | L('source')), L(' '), FILENAME, Comment | EOL) grammar = ((L('.') | L('source')), L(' '), FILENAME, Comment | EOL)
def filename(self):
return self[2].string.strip() def filename(self):
def value(self): return self[2].string.strip()
return BashScript.parse(self.filename())
def value(self):
return BashScript.parse(self.filename())
class CatEOF(Grammar): class CatEOF(Grammar):
grammar = (ZERO_OR_MORE(SPACE), L('cat '), L('>') | L('>>'), L(' '), ANY_EXCEPT(WHITESPACE), L(" <<"), OPTIONAL(SPACE), L("EOF"), EOL, REPEAT(ANY, greedy=False), EOL, L("EOF"), EOL) grammar = (ZERO_OR_MORE(SPACE), L('cat '), L('>') | L('>>'), L(' '), ANY_EXCEPT(WHITESPACE), L(" <<"), OPTIONAL(SPACE), L("EOF"), EOL, REPEAT(ANY, greedy=False), EOL, L("EOF"), EOL)
def value(self):
content = self[9].string def value(self):
content = re.sub(r"\\([$])", r"\1", content) # un-escape bash-escaped characters content = self[9].string
return "<div class='write-to'><div class='filename'>%s <span>(%s)</span></div><pre>%s</pre></div>\n" \ # un-escape bash-escaped characters
% (self[4].string, content = re.sub(r"\\([$])", r"\1", content)
"overwrite" if ">>" not in self[2].string else "append to", return "<div class='write-to'><div class='filename'>%s <span>(%s)</span></div><pre>%s</pre></div>\n" \
cgi.escape(content)) % (self[4].string,
"overwrite" if ">>" not in self[2].string else "append to",
cgi.escape(content))
class HideOutput(Grammar): class HideOutput(Grammar):
grammar = (L("hide_output "), REF("BashElement")) grammar = (L("hide_output "), REF("BashElement"))
def value(self):
return self[1].value() def value(self):
return self[1].value()
class EchoLine(Grammar): class EchoLine(Grammar):
grammar = (OPTIONAL(SPACE), L("echo "), REST_OF_LINE, EOL) grammar = (OPTIONAL(SPACE), L("echo "), REST_OF_LINE, EOL)
def value(self):
if "|" in self.string or ">" in self.string: def value(self):
return "<pre class='shell'><div>" + recode_bash(self.string.strip()) + "</div></pre>\n" if "|" in self.string or ">" in self.string:
return "" return "<pre class='shell'><div>" + recode_bash(self.string.strip()) + "</div></pre>\n"
return ""
class EditConf(Grammar): class EditConf(Grammar):
grammar = ( grammar = (
L('tools/editconf.py '), L('tools/editconf.py '),
FILENAME, FILENAME,
SPACE, SPACE,
OPTIONAL((LIST_OF( OPTIONAL((LIST_OF(
L("-w") | L("-s") | L("-c ;"), L("-w") | L("-s") | L("-c ;"),
sep=SPACE, sep=SPACE,
), SPACE)), ), SPACE)),
REST_OF_LINE, REST_OF_LINE,
OPTIONAL(SPACE), OPTIONAL(SPACE),
EOL EOL
) )
def value(self):
conffile = self[1] def value(self):
options = [] conffile = self[1]
eq = "=" options = []
if self[3] and "-s" in self[3].string: eq = " " eq = "="
for opt in re.split("\s+", self[4].string): if self[3] and "-s" in self[3].string:
k, v = opt.split("=", 1) eq = " "
v = re.sub(r"\n+", "", fixup_tokens(v)) # not sure why newlines are getting doubled for opt in re.split("\s+", self[4].string):
options.append("%s%s%s" % (k, eq, v)) k, v = opt.split("=", 1)
return "<div class='write-to'><div class='filename'>" + self[1].string + " <span>(change settings)</span></div><pre>" + "\n".join(cgi.escape(s) for s in options) + "</pre></div>\n" # not sure why newlines are getting doubled
v = re.sub(r"\n+", "", fixup_tokens(v))
options.append("%s%s%s" % (k, eq, v))
return "<div class='write-to'><div class='filename'>" + self[1].string + " <span>(change settings)</span></div><pre>" + "\n".join(cgi.escape(s) for s in options) + "</pre></div>\n"
class CaptureOutput(Grammar): class CaptureOutput(Grammar):
grammar = OPTIONAL(SPACE), WORD("A-Za-z_"), L('=$('), REST_OF_LINE, L(")"), OPTIONAL(L(';')), EOL grammar = OPTIONAL(SPACE), WORD("A-Za-z_"), L('=$('), REST_OF_LINE, L(")"), OPTIONAL(L(';')), EOL
def value(self):
cmd = self[3].string def value(self):
cmd = cmd.replace("; ", "\n") cmd = self[3].string
return "<div class='write-to'><div class='filename'>$" + self[1].string + "=</div><pre>" + cgi.escape(cmd) + "</pre></div>\n" cmd = cmd.replace("; ", "\n")
return "<div class='write-to'><div class='filename'>$" + self[1].string + "=</div><pre>" + cgi.escape(cmd) + "</pre></div>\n"
class SedReplace(Grammar): class SedReplace(Grammar):
grammar = OPTIONAL(SPACE), L('sed -i "s/'), OPTIONAL(L('^')), ONE_OR_MORE(WORD("-A-Za-z0-9 #=\\{};.*$_!()")), L('/'), ONE_OR_MORE(WORD("-A-Za-z0-9 #=\\{};.*$_!()")), L('/"'), SPACE, FILENAME, EOL grammar = OPTIONAL(SPACE), L('sed -i "s/'), OPTIONAL(L('^')), ONE_OR_MORE(WORD("-A-Za-z0-9 #=\\{};.*$_!()")), L('/'), ONE_OR_MORE(WORD("-A-Za-z0-9 #=\\{};.*$_!()")), L('/"'), SPACE, FILENAME, EOL
def value(self):
return "<div class='write-to'><div class='filename'>edit<br>" + self[8].string + "</div><p>replace</p><pre>" + cgi.escape(self[3].string.replace(".*", ". . .")) + "</pre><p>with</p><pre>" + cgi.escape(self[5].string.replace("\\n", "\n").replace("\\t", "\t")) + "</pre></div>\n" def value(self):
return "<div class='write-to'><div class='filename'>edit<br>" + self[8].string + "</div><p>replace</p><pre>" + cgi.escape(self[3].string.replace(".*", ". . .")) + "</pre><p>with</p><pre>" + cgi.escape(self[5].string.replace("\\n", "\n").replace("\\t", "\t")) + "</pre></div>\n"
class EchoPipe(Grammar): class EchoPipe(Grammar):
grammar = OPTIONAL(SPACE), L("echo "), REST_OF_LINE, L(' | '), REST_OF_LINE, EOL grammar = OPTIONAL(SPACE), L("echo "), REST_OF_LINE, L(' | '), REST_OF_LINE, EOL
def value(self):
text = " ".join("\"%s\"" % s for s in self[2].string.split(" ")) def value(self):
return "<pre class='shell'><div>echo " + recode_bash(text) + " \<br> | " + recode_bash(self[4].string) + "</div></pre>\n" text = " ".join("\"%s\"" % s for s in self[2].string.split(" "))
return "<pre class='shell'><div>echo " + recode_bash(text) + " \<br> | " + recode_bash(self[4].string) + "</div></pre>\n"
def shell_line(bash): def shell_line(bash):
return "<pre class='shell'><div>" + recode_bash(bash.strip()) + "</div></pre>\n" return "<pre class='shell'><div>" + recode_bash(bash.strip()) + "</div></pre>\n"
class AptGet(Grammar): class AptGet(Grammar):
grammar = (ZERO_OR_MORE(SPACE), L("apt_install "), REST_OF_LINE, EOL) grammar = (ZERO_OR_MORE(SPACE), L("apt_install "), REST_OF_LINE, EOL)
def value(self):
return shell_line("apt-get install -y " + re.sub(r"\s+", " ", self[2].string)) def value(self):
return shell_line("apt-get install -y " + re.sub(r"\s+", " ", self[2].string))
class UfwAllow(Grammar): class UfwAllow(Grammar):
grammar = (ZERO_OR_MORE(SPACE), L("ufw_allow "), REST_OF_LINE, EOL) grammar = (ZERO_OR_MORE(SPACE), L("ufw_allow "), REST_OF_LINE, EOL)
def value(self):
return shell_line("ufw allow " + self[2].string) def value(self):
return shell_line("ufw allow " + self[2].string)
class RestartService(Grammar): class RestartService(Grammar):
grammar = (ZERO_OR_MORE(SPACE), L("restart_service "), REST_OF_LINE, EOL) grammar = (ZERO_OR_MORE(SPACE), L("restart_service "), REST_OF_LINE, EOL)
def value(self):
return shell_line("service " + self[2].string + " restart") def value(self):
return shell_line("service " + self[2].string + " restart")
class OtherLine(Grammar): class OtherLine(Grammar):
grammar = (REST_OF_LINE, EOL) grammar = (REST_OF_LINE, EOL)
def value(self):
if self.string.strip() == "": return "" def value(self):
if "source setup/functions.sh" in self.string: return "" if self.string.strip() == "":
if "source /etc/mailinabox.conf" in self.string: return "" return ""
return "<pre class='shell'><div>" + recode_bash(self.string.strip()) + "</div></pre>\n" if "source setup/functions.sh" in self.string:
return ""
if "source /etc/mailinabox.conf" in self.string:
return ""
return "<pre class='shell'><div>" + recode_bash(self.string.strip()) + "</div></pre>\n"
class BashElement(Grammar): class BashElement(Grammar):
grammar = Comment | CatEOF | EchoPipe | EchoLine | HideOutput | EditConf | SedReplace | AptGet | UfwAllow | RestartService | OtherLine grammar = Comment | CatEOF | EchoPipe | EchoLine | HideOutput | EditConf | SedReplace | AptGet | UfwAllow | RestartService | OtherLine
def value(self):
return self[0].value() def value(self):
return self[0].value()
# Make some special characters to private use Unicode code points. # Make some special characters to private use Unicode code points.
bash_special_characters1 = { bash_special_characters1 = {
"\n": "\uE000", "\n": "\uE000",
" ": "\uE001", " ": "\uE001",
} }
bash_special_characters2 = { bash_special_characters2 = {
"$": "\uE010", "$": "\uE010",
} }
bash_escapes = { bash_escapes = {
"n": "\uE020", "n": "\uE020",
"t": "\uE021", "t": "\uE021",
} }
def quasitokenize(bashscript): def quasitokenize(bashscript):
# Make a parse of bash easier by making the tokenization easy. # Make a parse of bash easier by making the tokenization easy.
newscript = "" newscript = ""
quote_mode = None quote_mode = None
escape_next = False escape_next = False
line_comment = False line_comment = False
subshell = 0 subshell = 0
for c in bashscript: for c in bashscript:
if line_comment: if line_comment:
# We're in a comment until the end of the line. # We're in a comment until the end of the line.
newscript += c newscript += c
if c == '\n': if c == '\n':
line_comment = False line_comment = False
elif escape_next: elif escape_next:
# Previous character was a \. Normally the next character # Previous character was a \. Normally the next character
# comes through literally, but escaped newlines are line # comes through literally, but escaped newlines are line
# continuations and some escapes are for special characters # continuations and some escapes are for special characters
# which we'll recode and then turn back into escapes later. # which we'll recode and then turn back into escapes later.
if c == "\n": if c == "\n":
c = " " c = " "
elif c in bash_escapes: elif c in bash_escapes:
c = bash_escapes[c] c = bash_escapes[c]
newscript += c newscript += c
escape_next = False escape_next = False
elif c == "\\": elif c == "\\":
# Escaping next character. # Escaping next character.
escape_next = True escape_next = True
elif quote_mode is None and c in ('"', "'"): elif quote_mode is None and c in ('"', "'"):
# Starting a quoted word. # Starting a quoted word.
quote_mode = c quote_mode = c
elif c == quote_mode: elif c == quote_mode:
# Ending a quoted word. # Ending a quoted word.
quote_mode = None quote_mode = None
elif quote_mode is not None and quote_mode != "EOF" and c in bash_special_characters1: elif quote_mode is not None and quote_mode != "EOF" and c in bash_special_characters1:
# Replace special tokens within quoted words so that they # Replace special tokens within quoted words so that they
# don't interfere with tokenization later. # don't interfere with tokenization later.
newscript += bash_special_characters1[c] newscript += bash_special_characters1[c]
elif quote_mode is None and c == '#': elif quote_mode is None and c == '#':
# Start of a line comment. # Start of a line comment.
newscript += c newscript += c
line_comment = True line_comment = True
elif quote_mode is None and c == ';' and subshell == 0: elif quote_mode is None and c == ';' and subshell == 0:
# End of a statement. # End of a statement.
newscript += "\n" newscript += "\n"
elif quote_mode is None and c == '(': elif quote_mode is None and c == '(':
# Start of a subshell. # Start of a subshell.
newscript += c newscript += c
subshell += 1 subshell += 1
elif quote_mode is None and c == ')': elif quote_mode is None and c == ')':
# End of a subshell. # End of a subshell.
newscript += c newscript += c
subshell -= 1 subshell -= 1
elif quote_mode is None and c == '\t': elif quote_mode is None and c == '\t':
# Make these just spaces. # Make these just spaces.
if newscript[-1] != " ": if newscript[-1] != " ":
newscript += " " newscript += " "
elif quote_mode is None and c == ' ': elif quote_mode is None and c == ' ':
# Collapse consecutive spaces. # Collapse consecutive spaces.
if newscript[-1] != " ": if newscript[-1] != " ":
newscript += " " newscript += " "
elif c in bash_special_characters2: elif c in bash_special_characters2:
newscript += bash_special_characters2[c] newscript += bash_special_characters2[c]
else: else:
# All other characters. # All other characters.
newscript += c newscript += c
# "<< EOF" escaping. # "<< EOF" escaping.
if quote_mode is None and re.search("<<\s*EOF\n$", newscript): if quote_mode is None and re.search("<<\s*EOF\n$", newscript):
quote_mode = "EOF" quote_mode = "EOF"
elif quote_mode == "EOF" and re.search("\nEOF\n$", newscript): elif quote_mode == "EOF" and re.search("\nEOF\n$", newscript):
quote_mode = None quote_mode = None
return newscript
return newscript
def recode_bash(s): def recode_bash(s):
def requote(tok): def requote(tok):
tok = tok.replace("\\", "\\\\") tok = tok.replace("\\", "\\\\")
for c in bash_special_characters2: for c in bash_special_characters2:
tok = tok.replace(c, "\\" + c) tok = tok.replace(c, "\\" + c)
tok = fixup_tokens(tok) tok = fixup_tokens(tok)
if " " in tok or '"' in tok: if " " in tok or '"' in tok:
tok = tok.replace("\"", "\\\"") tok = tok.replace("\"", "\\\"")
tok = '"' + tok +'"' tok = '"' + tok + '"'
else: else:
tok = tok.replace("'", "\\'") tok = tok.replace("'", "\\'")
return tok return tok
return cgi.escape(" ".join(requote(tok) for tok in s.split(" "))) return cgi.escape(" ".join(requote(tok) for tok in s.split(" ")))
def fixup_tokens(s): def fixup_tokens(s):
for c, enc in bash_special_characters1.items(): for c, enc in bash_special_characters1.items():
s = s.replace(enc, c) s = s.replace(enc, c)
for c, enc in bash_special_characters2.items(): for c, enc in bash_special_characters2.items():
s = s.replace(enc, c) s = s.replace(enc, c)
for esc, c in bash_escapes.items(): for esc, c in bash_escapes.items():
s = s.replace(c, "\\" + esc) s = s.replace(c, "\\" + esc)
return s return s
class BashScript(Grammar): class BashScript(Grammar):
grammar = (OPTIONAL(HashBang), REPEAT(BashElement)) grammar = (OPTIONAL(HashBang), REPEAT(BashElement))
def value(self):
return [line.value() for line in self[1]]
@staticmethod def value(self):
def parse(fn): return [line.value() for line in self[1]]
if fn in ("setup/functions.sh", "/etc/mailinabox.conf"): return ""
string = open(fn).read()
# tokenize @staticmethod
string = re.sub(".* #NODOC\n", "", string) def parse(fn):
string = re.sub("\n\s*if .*then.*|\n\s*fi|\n\s*else|\n\s*elif .*", "", string) if fn in ("setup/functions.sh", "/etc/mailinabox.conf"):
string = quasitokenize(string) return ""
string = re.sub("hide_output ", "", string) string = open(fn).read()
parser = BashScript.parser() # tokenize
result = parser.parse_string(string) string = re.sub(".* #NODOC\n", "", string)
string = re.sub("\n\s*if .*then.*|\n\s*fi|\n\s*else|\n\s*elif .*", "", string)
string = quasitokenize(string)
string = re.sub("hide_output ", "", string)
v = "<div class='row'><div class='col-xs-12 sourcefile'>view the bash source for the following section at <a href=\"%s\">%s</a></div></div>\n" \ parser = BashScript.parser()
% ("https://github.com/mail-in-a-box/mailinabox/tree/master/" + fn, fn) result = parser.parse_string(string)
mode = 0 v = "<div class='row'><div class='col-xs-12 sourcefile'>view the bash source for the following section at <a href=\"%s\">%s</a></div></div>\n" \
for item in result.value(): % ("https://github.com/mail-in-a-box/mailinabox/tree/master/" + fn, fn)
if item.strip() == "":
pass
elif item.startswith("<p") and not item.startswith("<pre"):
clz = ""
if mode == 2:
v += "</div>\n" # col
v += "</div>\n" # row
mode = 0
clz = "contd"
if mode == 0:
v += "<div class='row %s'>\n" % clz
v += "<div class='col-md-6 prose'>\n"
v += item
mode = 1
elif item.startswith("<h"):
if mode != 0:
v += "</div>\n" # col
v += "</div>\n" # row
v += "<div class='row'>\n"
v += "<div class='col-md-6 header'>\n"
v += item
v += "</div>\n" # col
v += "<div class='col-md-6 terminal'> </div>\n"
v += "</div>\n" # row
mode = 0
else:
if mode == 0:
v += "<div class='row'>\n"
v += "<div class='col-md-offset-6 col-md-6 terminal'>\n"
elif mode == 1:
v += "</div>\n"
v += "<div class='col-md-6 terminal'>\n"
mode = 2
v += item
v += "</div>\n" # col mode = 0
v += "</div>\n" # row for item in result.value():
if item.strip() == "":
pass
elif item.startswith("<p") and not item.startswith("<pre"):
clz = ""
if mode == 2:
v += "</div>\n" # col
v += "</div>\n" # row
mode = 0
clz = "contd"
if mode == 0:
v += "<div class='row %s'>\n" % clz
v += "<div class='col-md-6 prose'>\n"
v += item
mode = 1
elif item.startswith("<h"):
if mode != 0:
v += "</div>\n" # col
v += "</div>\n" # row
v += "<div class='row'>\n"
v += "<div class='col-md-6 header'>\n"
v += item
v += "</div>\n" # col
v += "<div class='col-md-6 terminal'> </div>\n"
v += "</div>\n" # row
mode = 0
else:
if mode == 0:
v += "<div class='row'>\n"
v += "<div class='col-md-offset-6 col-md-6 terminal'>\n"
elif mode == 1:
v += "</div>\n"
v += "<div class='col-md-6 terminal'>\n"
mode = 2
v += item
v = fixup_tokens(v) v += "</div>\n" # col
v += "</div>\n" # row
v = v.replace("</pre>\n<pre class='shell'>", "") v = fixup_tokens(v)
v = re.sub("<pre>([\w\W]*?)</pre>", lambda m : "<pre>" + strip_indent(m.group(1)) + "</pre>", v)
v = re.sub(r"(\$?)PRIMARY_HOSTNAME", r"<b>box.yourdomain.com</b>", v) v = v.replace("</pre>\n<pre class='shell'>", "")
v = re.sub(r"\$STORAGE_ROOT", r"<b>$STORE</b>", v) v = re.sub("<pre>([\w\W]*?)</pre>", lambda m: "<pre>" + strip_indent(m.group(1)) + "</pre>", v)
v = re.sub(r"\$CSR_COUNTRY", r"<b>US</b>", v)
v = v.replace("`pwd`", "<code><b>/path/to/mailinabox</b></code>") v = re.sub(r"(\$?)PRIMARY_HOSTNAME", r"<b>box.yourdomain.com</b>", v)
v = re.sub(r"\$STORAGE_ROOT", r"<b>$STORE</b>", v)
v = re.sub(r"\$CSR_COUNTRY", r"<b>US</b>", v)
v = v.replace("`pwd`", "<code><b>/path/to/mailinabox</b></code>")
return v
return v
def wrap_lines(text, cols=60): def wrap_lines(text, cols=60):
ret = "" ret = ""
words = re.split("(\s+)", text) words = re.split("(\s+)", text)
linelen = 0 linelen = 0
for w in words: for w in words:
if linelen + len(w) > cols-1: if linelen + len(w) > cols-1:
ret += " \\\n" ret += " \\\n"
ret += " " ret += " "
linelen = 0 linelen = 0
if linelen == 0 and w.strip() == "": continue if linelen == 0 and w.strip() == "":
ret += w continue
linelen += len(w) ret += w
return ret linelen += len(w)
return ret
if __name__ == '__main__': if __name__ == '__main__':
generate_documentation() generate_documentation()