First big pass on PEP8'ing all the things.
All PEP8 errors (except line length) have been fixed except one. That one will require a little bit of refactoring.
This commit is contained in:
parent
7ec662c83f
commit
86a31cd978
|
@ -1,4 +1,7 @@
|
|||
import base64, os, os.path, hmac
|
||||
import base64
|
||||
import os
|
||||
import os.path
|
||||
import hmac
|
||||
|
||||
from flask import make_response
|
||||
|
||||
|
@ -8,6 +11,7 @@ from mailconfig import get_mail_password, get_mail_user_privileges
|
|||
DEFAULT_KEY_PATH = '/var/lib/mailinabox/api.key'
|
||||
DEFAULT_AUTH_REALM = 'Mail-in-a-Box Management Server'
|
||||
|
||||
|
||||
class KeyAuthService:
|
||||
"""Generate an API key for authenticating clients
|
||||
|
||||
|
@ -116,7 +120,8 @@ class KeyAuthService:
|
|||
# (This call should never fail on a valid user. But if it did fail, it would
|
||||
# return a tuple of an error message and an HTTP status code.)
|
||||
privs = get_mail_user_privileges(email, env)
|
||||
if isinstance(privs, tuple): raise Exception("Error getting privileges.")
|
||||
if isinstance(privs, tuple):
|
||||
raise Exception("Error getting privileges.")
|
||||
|
||||
# Return a list of privileges.
|
||||
return privs
|
||||
|
|
|
@ -9,8 +9,15 @@
|
|||
# backup/secret_key.txt) to STORAGE_ROOT/backup/encrypted.
|
||||
# 5) STORAGE_ROOT/backup/after-backup is executd if it exists.
|
||||
|
||||
import os, os.path, shutil, glob, re, datetime
|
||||
import dateutil.parser, dateutil.relativedelta, dateutil.tz
|
||||
import os
|
||||
import os.path
|
||||
import shutil
|
||||
import glob
|
||||
import re
|
||||
import datetime
|
||||
import dateutil.parser
|
||||
import dateutil.relativedelta
|
||||
import dateutil.tz
|
||||
|
||||
from utils import exclusive_process, load_environment, shell
|
||||
|
||||
|
@ -18,6 +25,7 @@ from utils import exclusive_process, load_environment, shell
|
|||
# that depends on it is this many days old.
|
||||
keep_backups_for_days = 3
|
||||
|
||||
|
||||
def backup_status(env):
|
||||
# What is the current status of backups?
|
||||
# Loop through all of the files in STORAGE_ROOT/backup/duplicity to
|
||||
|
@ -25,23 +33,32 @@ def backup_status(env):
|
|||
# see how large the storage is.
|
||||
|
||||
now = datetime.datetime.now(dateutil.tz.tzlocal())
|
||||
|
||||
def reldate(date, ref, clip):
|
||||
if ref < date: return clip
|
||||
if ref < date:
|
||||
return clip
|
||||
rd = dateutil.relativedelta.relativedelta(ref, date)
|
||||
if rd.months > 1: return "%d months, %d days" % (rd.months, rd.days)
|
||||
if rd.months == 1: return "%d month, %d days" % (rd.months, rd.days)
|
||||
if rd.days >= 7: return "%d days" % rd.days
|
||||
if rd.days > 1: return "%d days, %d hours" % (rd.days, rd.hours)
|
||||
if rd.days == 1: return "%d day, %d hours" % (rd.days, rd.hours)
|
||||
if rd.months > 1:
|
||||
return "%d months, %d days" % (rd.months, rd.days)
|
||||
if rd.months == 1:
|
||||
return "%d month, %d days" % (rd.months, rd.days)
|
||||
if rd.days >= 7:
|
||||
return "%d days" % rd.days
|
||||
if rd.days > 1:
|
||||
return "%d days, %d hours" % (rd.days, rd.hours)
|
||||
if rd.days == 1:
|
||||
return "%d day, %d hours" % (rd.days, rd.hours)
|
||||
return "%d hours, %d minutes" % (rd.hours, rd.minutes)
|
||||
|
||||
backups = { }
|
||||
backups = {}
|
||||
basedir = os.path.join(env['STORAGE_ROOT'], 'backup/duplicity/')
|
||||
encdir = os.path.join(env['STORAGE_ROOT'], 'backup/encrypted/')
|
||||
os.makedirs(basedir, exist_ok=True) # os.listdir fails if directory does not exist
|
||||
# os.listdir fails if directory does not exist
|
||||
os.makedirs(basedir, exist_ok=True)
|
||||
for fn in os.listdir(basedir):
|
||||
m = re.match(r"duplicity-(full|full-signatures|(inc|new-signatures)\.(?P<incbase>\d+T\d+Z)\.to)\.(?P<date>\d+T\d+Z)\.", fn)
|
||||
if not m: raise ValueError(fn)
|
||||
if not m:
|
||||
raise ValueError(fn)
|
||||
|
||||
key = m.group("date")
|
||||
if key not in backups:
|
||||
|
@ -65,7 +82,7 @@ def backup_status(env):
|
|||
|
||||
# Ensure the rows are sorted reverse chronologically.
|
||||
# This is relied on by should_force_full() and the next step.
|
||||
backups = sorted(backups.values(), key = lambda b : b["date"], reverse=True)
|
||||
backups = sorted(backups.values(), key=lambda b: b["date"], reverse=True)
|
||||
|
||||
# When will a backup be deleted?
|
||||
saw_full = False
|
||||
|
@ -93,6 +110,7 @@ def backup_status(env):
|
|||
"backups": backups,
|
||||
}
|
||||
|
||||
|
||||
def should_force_full(env):
|
||||
# Force a full backup when the total size of the increments
|
||||
# since the last full backup is greater than half the size
|
||||
|
@ -112,6 +130,7 @@ def should_force_full(env):
|
|||
# (I love for/else blocks. Here it's just to show off.)
|
||||
return True
|
||||
|
||||
|
||||
def perform_backup(full_backup):
|
||||
env = load_environment()
|
||||
|
||||
|
@ -174,7 +193,8 @@ def perform_backup(full_backup):
|
|||
os.makedirs(backup_encrypted_dir, exist_ok=True)
|
||||
for fn in os.listdir(backup_duplicity_dir):
|
||||
fn2 = os.path.join(backup_encrypted_dir, fn) + ".enc"
|
||||
if os.path.exists(fn2): continue
|
||||
if os.path.exists(fn2):
|
||||
continue
|
||||
|
||||
# Encrypt the backup using the backup private key.
|
||||
shell('check_call', [
|
||||
|
@ -194,7 +214,8 @@ def perform_backup(full_backup):
|
|||
# Remove encrypted backups that are no longer needed.
|
||||
for fn in os.listdir(backup_encrypted_dir):
|
||||
fn2 = os.path.join(backup_duplicity_dir, fn.replace(".enc", ""))
|
||||
if os.path.exists(fn2): continue
|
||||
if os.path.exists(fn2):
|
||||
continue
|
||||
os.unlink(os.path.join(backup_encrypted_dir, fn))
|
||||
|
||||
# Execute a post-backup script that does the copying to a remote server.
|
||||
|
|
|
@ -1,12 +1,16 @@
|
|||
#!/usr/bin/python3
|
||||
|
||||
import os, os.path, re, json
|
||||
import os
|
||||
import os.path
|
||||
import re
|
||||
import json
|
||||
|
||||
from functools import wraps
|
||||
|
||||
from flask import Flask, request, render_template, abort, Response
|
||||
|
||||
import auth, utils
|
||||
import auth
|
||||
import utils
|
||||
from mailconfig import get_mail_users, get_mail_users_ex, get_admins, add_mail_user, set_mail_password, remove_mail_user
|
||||
from mailconfig import get_mail_user_privileges, add_remove_mail_user_privilege
|
||||
from mailconfig import get_mail_aliases, get_mail_aliases_ex, get_mail_domains, add_mail_alias, remove_mail_alias
|
||||
|
@ -30,6 +34,7 @@ except OSError:
|
|||
|
||||
app = Flask(__name__, template_folder=os.path.abspath(os.path.join(os.path.dirname(me), "templates")))
|
||||
|
||||
|
||||
# Decorator to protect views that require a user with 'admin' privileges.
|
||||
def authorized_personnel_only(viewfunc):
|
||||
@wraps(viewfunc)
|
||||
|
@ -75,10 +80,12 @@ def authorized_personnel_only(viewfunc):
|
|||
|
||||
return newview
|
||||
|
||||
|
||||
@app.errorhandler(401)
|
||||
def unauthorized(error):
|
||||
return auth_service.make_unauthorized_response()
|
||||
|
||||
|
||||
def json_response(data):
|
||||
return Response(json.dumps(data), status=200, mimetype='application/json')
|
||||
|
||||
|
@ -86,17 +93,20 @@ def json_response(data):
|
|||
|
||||
# Control Panel (unauthenticated views)
|
||||
|
||||
|
||||
@app.route('/')
|
||||
def index():
|
||||
# Render the control panel. This route does not require user authentication
|
||||
# so it must be safe!
|
||||
no_admins_exist = (len(get_admins(env)) == 0)
|
||||
return render_template('index.html',
|
||||
return render_template(
|
||||
'index.html',
|
||||
hostname=env['PRIMARY_HOSTNAME'],
|
||||
storage_root=env['STORAGE_ROOT'],
|
||||
no_admins_exist=no_admins_exist,
|
||||
)
|
||||
|
||||
|
||||
@app.route('/me')
|
||||
def me():
|
||||
# Is the caller authorized?
|
||||
|
@ -123,6 +133,7 @@ def me():
|
|||
|
||||
# MAIL
|
||||
|
||||
|
||||
@app.route('/mail/users')
|
||||
@authorized_personnel_only
|
||||
def mail_users():
|
||||
|
@ -131,6 +142,7 @@ def mail_users():
|
|||
else:
|
||||
return "".join(x+"\n" for x in get_mail_users(env))
|
||||
|
||||
|
||||
@app.route('/mail/users/add', methods=['POST'])
|
||||
@authorized_personnel_only
|
||||
def mail_users_add():
|
||||
|
@ -139,6 +151,7 @@ def mail_users_add():
|
|||
except ValueError as e:
|
||||
return (str(e), 400)
|
||||
|
||||
|
||||
@app.route('/mail/users/password', methods=['POST'])
|
||||
@authorized_personnel_only
|
||||
def mail_users_password():
|
||||
|
@ -147,6 +160,7 @@ def mail_users_password():
|
|||
except ValueError as e:
|
||||
return (str(e), 400)
|
||||
|
||||
|
||||
@app.route('/mail/users/remove', methods=['POST'])
|
||||
@authorized_personnel_only
|
||||
def mail_users_remove():
|
||||
|
@ -157,14 +171,18 @@ def mail_users_remove():
|
|||
@authorized_personnel_only
|
||||
def mail_user_privs():
|
||||
privs = get_mail_user_privileges(request.args.get('email', ''), env)
|
||||
if isinstance(privs, tuple): return privs # error
|
||||
# error
|
||||
if isinstance(privs, tuple):
|
||||
return privs
|
||||
return "\n".join(privs)
|
||||
|
||||
|
||||
@app.route('/mail/users/privileges/add', methods=['POST'])
|
||||
@authorized_personnel_only
|
||||
def mail_user_privs_add():
|
||||
return add_remove_mail_user_privilege(request.form.get('email', ''), request.form.get('privilege', ''), "add", env)
|
||||
|
||||
|
||||
@app.route('/mail/users/privileges/remove', methods=['POST'])
|
||||
@authorized_personnel_only
|
||||
def mail_user_privs_remove():
|
||||
|
@ -179,6 +197,7 @@ def mail_aliases():
|
|||
else:
|
||||
return "".join(x+"\t"+y+"\n" for x, y in get_mail_aliases(env))
|
||||
|
||||
|
||||
@app.route('/mail/aliases/add', methods=['POST'])
|
||||
@authorized_personnel_only
|
||||
def mail_aliases_add():
|
||||
|
@ -189,11 +208,13 @@ def mail_aliases_add():
|
|||
update_if_exists=(request.form.get('update_if_exists', '') == '1')
|
||||
)
|
||||
|
||||
|
||||
@app.route('/mail/aliases/remove', methods=['POST'])
|
||||
@authorized_personnel_only
|
||||
def mail_aliases_remove():
|
||||
return remove_mail_alias(request.form.get('source', ''), env)
|
||||
|
||||
|
||||
@app.route('/mail/domains')
|
||||
@authorized_personnel_only
|
||||
def mail_domains():
|
||||
|
@ -201,12 +222,14 @@ def mail_domains():
|
|||
|
||||
# DNS
|
||||
|
||||
|
||||
@app.route('/dns/zones')
|
||||
@authorized_personnel_only
|
||||
def dns_zones():
|
||||
from dns_update import get_dns_zones
|
||||
return json_response([z[0] for z in get_dns_zones(env)])
|
||||
|
||||
|
||||
@app.route('/dns/update', methods=['POST'])
|
||||
@authorized_personnel_only
|
||||
def dns_update():
|
||||
|
@ -216,11 +239,13 @@ def dns_update():
|
|||
except Exception as e:
|
||||
return (str(e), 500)
|
||||
|
||||
|
||||
@app.route('/dns/secondary-nameserver')
|
||||
@authorized_personnel_only
|
||||
def dns_get_secondary_nameserver():
|
||||
from dns_update import get_custom_dns_config
|
||||
return json_response({ "hostname": get_custom_dns_config(env).get("_secondary_nameserver") })
|
||||
return json_response({"hostname": get_custom_dns_config(env).get("_secondary_nameserver")})
|
||||
|
||||
|
||||
@app.route('/dns/secondary-nameserver', methods=['POST'])
|
||||
@authorized_personnel_only
|
||||
|
@ -231,6 +256,7 @@ def dns_set_secondary_nameserver():
|
|||
except ValueError as e:
|
||||
return (str(e), 400)
|
||||
|
||||
|
||||
@app.route('/dns/set')
|
||||
@authorized_personnel_only
|
||||
def dns_get_records():
|
||||
|
@ -243,6 +269,7 @@ def dns_get_records():
|
|||
"value": r[2],
|
||||
} for r in records])
|
||||
|
||||
|
||||
@app.route('/dns/set/<qname>', methods=['POST'])
|
||||
@app.route('/dns/set/<qname>/<rtype>', methods=['POST'])
|
||||
@app.route('/dns/set/<qname>/<rtype>/<value>', methods=['POST'])
|
||||
|
@ -256,7 +283,8 @@ def dns_set_record(qname, rtype="A", value=None):
|
|||
if value is None:
|
||||
value = request.form.get("value")
|
||||
if value is None:
|
||||
value = request.environ.get("HTTP_X_FORWARDED_FOR") # normally REMOTE_ADDR but we're behind nginx as a reverse proxy
|
||||
# normally REMOTE_ADDR but we're behind nginx as a reverse proxy
|
||||
value = request.environ.get("HTTP_X_FORWARDED_FOR")
|
||||
if value == '' or value == '__delete__':
|
||||
# request deletion
|
||||
value = None
|
||||
|
@ -266,6 +294,7 @@ def dns_set_record(qname, rtype="A", value=None):
|
|||
except ValueError as e:
|
||||
return (str(e), 400)
|
||||
|
||||
|
||||
@app.route('/dns/dump')
|
||||
@authorized_personnel_only
|
||||
def dns_get_dump():
|
||||
|
@ -274,6 +303,7 @@ def dns_get_dump():
|
|||
|
||||
# SSL
|
||||
|
||||
|
||||
@app.route('/ssl/csr/<domain>', methods=['POST'])
|
||||
@authorized_personnel_only
|
||||
def ssl_get_csr(domain):
|
||||
|
@ -281,6 +311,7 @@ def ssl_get_csr(domain):
|
|||
ssl_key, ssl_certificate, ssl_via = get_domain_ssl_files(domain, env)
|
||||
return create_csr(domain, ssl_key, env)
|
||||
|
||||
|
||||
@app.route('/ssl/install', methods=['POST'])
|
||||
@authorized_personnel_only
|
||||
def ssl_install_cert():
|
||||
|
@ -292,12 +323,14 @@ def ssl_install_cert():
|
|||
|
||||
# WEB
|
||||
|
||||
|
||||
@app.route('/web/domains')
|
||||
@authorized_personnel_only
|
||||
def web_get_domains():
|
||||
from web_update import get_web_domains_info
|
||||
return json_response(get_web_domains_info(env))
|
||||
|
||||
|
||||
@app.route('/web/update', methods=['POST'])
|
||||
@authorized_personnel_only
|
||||
def web_update():
|
||||
|
@ -306,27 +339,36 @@ def web_update():
|
|||
|
||||
# System
|
||||
|
||||
|
||||
@app.route('/system/status', methods=["POST"])
|
||||
@authorized_personnel_only
|
||||
def system_status():
|
||||
from status_checks import run_checks
|
||||
|
||||
class WebOutput:
|
||||
def __init__(self):
|
||||
self.items = []
|
||||
|
||||
def add_heading(self, heading):
|
||||
self.items.append({ "type": "heading", "text": heading, "extra": [] })
|
||||
self.items.append({"type": "heading", "text": heading, "extra": []})
|
||||
|
||||
def print_ok(self, message):
|
||||
self.items.append({ "type": "ok", "text": message, "extra": [] })
|
||||
self.items.append({"type": "ok", "text": message, "extra": []})
|
||||
|
||||
def print_error(self, message):
|
||||
self.items.append({ "type": "error", "text": message, "extra": [] })
|
||||
self.items.append({"type": "error", "text": message, "extra": []})
|
||||
|
||||
def print_warning(self, message):
|
||||
self.items.append({ "type": "warning", "text": message, "extra": [] })
|
||||
self.items.append({"type": "warning", "text": message, "extra": []})
|
||||
|
||||
def print_line(self, message, monospace=False):
|
||||
self.items[-1]["extra"].append({ "text": message, "monospace": monospace })
|
||||
self.items[-1]["extra"].append({"text": message, "monospace": monospace})
|
||||
|
||||
output = WebOutput()
|
||||
run_checks(env, output, pool)
|
||||
return json_response(output.items)
|
||||
|
||||
|
||||
@app.route('/system/updates')
|
||||
@authorized_personnel_only
|
||||
def show_updates():
|
||||
|
@ -336,6 +378,7 @@ def show_updates():
|
|||
% (p["package"], p["version"])
|
||||
for p in list_apt_updates())
|
||||
|
||||
|
||||
@app.route('/system/update-packages', methods=["POST"])
|
||||
@authorized_personnel_only
|
||||
def do_updates():
|
||||
|
@ -344,6 +387,7 @@ def do_updates():
|
|||
"DEBIAN_FRONTEND": "noninteractive"
|
||||
})
|
||||
|
||||
|
||||
@app.route('/system/backup/status')
|
||||
@authorized_personnel_only
|
||||
def backup_status():
|
||||
|
@ -353,8 +397,10 @@ def backup_status():
|
|||
# APP
|
||||
|
||||
if __name__ == '__main__':
|
||||
if "DEBUG" in os.environ: app.debug = True
|
||||
if "APIKEY" in os.environ: auth_service.key = os.environ["APIKEY"]
|
||||
if "DEBUG" in os.environ:
|
||||
app.debug = True
|
||||
if "APIKEY" in os.environ:
|
||||
auth_service.key = os.environ["APIKEY"]
|
||||
|
||||
if not app.debug:
|
||||
app.logger.addHandler(utils.create_syslog_handler())
|
||||
|
@ -369,4 +415,3 @@ if __name__ == '__main__':
|
|||
|
||||
# Start the application server. Listens on 127.0.0.1 (IPv4 only).
|
||||
app.run(port=10222)
|
||||
|
||||
|
|
|
@ -4,7 +4,13 @@
|
|||
# and mail aliases and restarts nsd.
|
||||
########################################################################
|
||||
|
||||
import os, os.path, urllib.parse, datetime, re, hashlib, base64
|
||||
import os
|
||||
import os.path
|
||||
import urllib.parse
|
||||
import datetime
|
||||
import re
|
||||
import hashlib
|
||||
import base64
|
||||
import ipaddress
|
||||
import rtyaml
|
||||
import dns.resolver
|
||||
|
@ -12,6 +18,7 @@ import dns.resolver
|
|||
from mailconfig import get_mail_domains
|
||||
from utils import shell, load_env_vars_from_file, safe_domain_name, sort_domains
|
||||
|
||||
|
||||
def get_dns_domains(env):
|
||||
# Add all domain names in use by email users and mail aliases and ensure
|
||||
# PRIMARY_HOSTNAME is in the list.
|
||||
|
@ -20,6 +27,7 @@ def get_dns_domains(env):
|
|||
domains.add(env['PRIMARY_HOSTNAME'])
|
||||
return domains
|
||||
|
||||
|
||||
def get_dns_zones(env):
|
||||
# What domains should we create DNS zones for? Never create a zone for
|
||||
# a domain & a subdomain of that domain.
|
||||
|
@ -28,7 +36,7 @@ def get_dns_zones(env):
|
|||
# Exclude domains that are subdomains of other domains we know. Proceed
|
||||
# by looking at shorter domains first.
|
||||
zone_domains = set()
|
||||
for domain in sorted(domains, key=lambda d : len(d)):
|
||||
for domain in sorted(domains, key=lambda d: len(d)):
|
||||
for d in zone_domains:
|
||||
if domain.endswith("." + d):
|
||||
# We found a parent domain already in the list.
|
||||
|
@ -45,22 +53,25 @@ def get_dns_zones(env):
|
|||
# Sort the list so that the order is nice and so that nsd.conf has a
|
||||
# stable order so we don't rewrite the file & restart the service
|
||||
# meaninglessly.
|
||||
zone_order = sort_domains([ zone[0] for zone in zonefiles ], env)
|
||||
zonefiles.sort(key = lambda zone : zone_order.index(zone[0]) )
|
||||
zone_order = sort_domains([zone[0] for zone in zonefiles], env)
|
||||
zonefiles.sort(key=lambda zone: zone_order.index(zone[0]))
|
||||
|
||||
return zonefiles
|
||||
|
||||
|
||||
def get_custom_dns_config(env):
|
||||
try:
|
||||
return rtyaml.load(open(os.path.join(env['STORAGE_ROOT'], 'dns/custom.yaml')))
|
||||
except:
|
||||
return { }
|
||||
return {}
|
||||
|
||||
|
||||
def write_custom_dns_config(config, env):
|
||||
config_yaml = rtyaml.dump(config)
|
||||
with open(os.path.join(env['STORAGE_ROOT'], 'dns/custom.yaml'), "w") as f:
|
||||
f.write(config_yaml)
|
||||
|
||||
|
||||
def do_dns_update(env, force=False):
|
||||
# What domains (and their zone filenames) should we build?
|
||||
domains = get_dns_domains(env)
|
||||
|
@ -137,6 +148,7 @@ def do_dns_update(env, force=False):
|
|||
|
||||
########################################################################
|
||||
|
||||
|
||||
def build_zone(domain, all_domains, additional_records, env, is_zone=True):
|
||||
records = []
|
||||
|
||||
|
@ -156,7 +168,6 @@ def build_zone(domain, all_domains, additional_records, env, is_zone=True):
|
|||
secondary_ns = additional_records.get("_secondary_nameserver", "ns2." + env["PRIMARY_HOSTNAME"])
|
||||
records.append((None, "NS", secondary_ns+'.', False))
|
||||
|
||||
|
||||
# In PRIMARY_HOSTNAME...
|
||||
if domain == env["PRIMARY_HOSTNAME"]:
|
||||
# Define ns1 and ns2.
|
||||
|
@ -171,7 +182,8 @@ def build_zone(domain, all_domains, additional_records, env, is_zone=True):
|
|||
# Set the A/AAAA records. Do this early for the PRIMARY_HOSTNAME so that the user cannot override them
|
||||
# and we can provide different explanatory text.
|
||||
records.append((None, "A", env["PUBLIC_IP"], "Required. Sets the IP address of the box."))
|
||||
if env.get("PUBLIC_IPV6"): records.append((None, "AAAA", env["PUBLIC_IPV6"], "Required. Sets the IPv6 address of the box."))
|
||||
if env.get("PUBLIC_IPV6"):
|
||||
records.append((None, "AAAA", env["PUBLIC_IPV6"], "Required. Sets the IPv6 address of the box."))
|
||||
|
||||
# Add a DANE TLSA record for SMTP.
|
||||
records.append(("_25._tcp", "TLSA", build_tlsa_record(env), "Recommended when DNSSEC is enabled. Advertises to mail servers connecting to the box that mandatory encryption should be used."))
|
||||
|
@ -194,7 +206,7 @@ def build_zone(domain, all_domains, additional_records, env, is_zone=True):
|
|||
subdomain_qname = subdomain[0:-len("." + domain)]
|
||||
subzone = build_zone(subdomain, [], additional_records, env, is_zone=False)
|
||||
for child_qname, child_rtype, child_value, child_explanation in subzone:
|
||||
if child_qname == None:
|
||||
if child_qname is None:
|
||||
child_qname = subdomain_qname
|
||||
else:
|
||||
child_qname += "." + subdomain_qname
|
||||
|
@ -208,7 +220,8 @@ def build_zone(domain, all_domains, additional_records, env, is_zone=True):
|
|||
|
||||
# The user may set other records that don't conflict with our settings.
|
||||
for qname, rtype, value in get_custom_records(domain, additional_records, env):
|
||||
if has_rec(qname, rtype): continue
|
||||
if has_rec(qname, rtype):
|
||||
continue
|
||||
records.append((qname, rtype, value, "(Set by user.)"))
|
||||
|
||||
# Add defaults if not overridden by the user's custom settings (and not otherwise configured).
|
||||
|
@ -220,8 +233,12 @@ def build_zone(domain, all_domains, additional_records, env, is_zone=True):
|
|||
("www", "AAAA", env.get('PUBLIC_IPV6'), "Optional. Sets the IPv6 address that www.%s resolves to, e.g. for web hosting." % domain),
|
||||
]
|
||||
for qname, rtype, value, explanation in defaults:
|
||||
if value is None or value.strip() == "": continue # skip IPV6 if not set
|
||||
if not is_zone and qname == "www": continue # don't create any default 'www' subdomains on what are themselves subdomains
|
||||
# skip IPV6 if not set
|
||||
if value is None or value.strip() == "":
|
||||
continue
|
||||
# don't create any default 'www' subdomains on what are themselves subdomains
|
||||
if not is_zone and qname == "www":
|
||||
continue
|
||||
# Set the default record, but not if:
|
||||
# (1) there is not a user-set record of the same type already
|
||||
# (2) there is not a CNAME record already, since you can't set both and who knows what takes precedence
|
||||
|
@ -248,23 +265,25 @@ def build_zone(domain, all_domains, additional_records, env, is_zone=True):
|
|||
if not has_rec(dmarc_qname, "TXT", prefix="v=DMARC1; "):
|
||||
records.append((dmarc_qname, "TXT", 'v=DMARC1; p=reject', "Prevents unauthorized use of this domain name for outbound mail by requiring a valid DKIM signature."))
|
||||
|
||||
|
||||
# Sort the records. The None records *must* go first in the nsd zone file. Otherwise it doesn't matter.
|
||||
records.sort(key = lambda rec : list(reversed(rec[0].split(".")) if rec[0] is not None else ""))
|
||||
records.sort(key=lambda rec: list(reversed(rec[0].split(".")) if rec[0] is not None else ""))
|
||||
|
||||
return records
|
||||
|
||||
########################################################################
|
||||
|
||||
|
||||
def get_custom_records(domain, additional_records, env):
|
||||
for qname, value in additional_records.items():
|
||||
# We don't count the secondary nameserver config (if present) as a record - that would just be
|
||||
# confusing to users. Instead it is accessed/manipulated directly via (get/set)_custom_dns_config.
|
||||
if qname == "_secondary_nameserver": continue
|
||||
if qname == "_secondary_nameserver":
|
||||
continue
|
||||
|
||||
# Is this record for the domain or one of its subdomains?
|
||||
# If `domain` is None, return records for all domains.
|
||||
if domain is not None and qname != domain and not qname.endswith("." + domain): continue
|
||||
if domain is not None and qname != domain and not qname.endswith("." + domain):
|
||||
continue
|
||||
|
||||
# Turn the fully qualified domain name in the YAML file into
|
||||
# our short form (None => domain, or a relative QNAME) if
|
||||
|
@ -280,7 +299,7 @@ def get_custom_records(domain, additional_records, env):
|
|||
if isinstance(value, str):
|
||||
values = [("A", value)]
|
||||
if value == "local" and env.get("PUBLIC_IPV6"):
|
||||
values.append( ("AAAA", value) )
|
||||
values.append(("AAAA", value))
|
||||
|
||||
# A mapping creates multiple records.
|
||||
elif isinstance(value, dict):
|
||||
|
@ -296,12 +315,15 @@ def get_custom_records(domain, additional_records, env):
|
|||
if rtype == "A" and value2 == "local":
|
||||
value2 = env["PUBLIC_IP"]
|
||||
if rtype == "AAAA" and value2 == "local":
|
||||
if "PUBLIC_IPV6" not in env: continue # no IPv6 address is available so don't set anything
|
||||
# no IPv6 address is available so don't set anything
|
||||
if "PUBLIC_IPV6" not in env:
|
||||
continue
|
||||
value2 = env["PUBLIC_IPV6"]
|
||||
yield (qname, rtype, value2)
|
||||
|
||||
########################################################################
|
||||
|
||||
|
||||
def build_tlsa_record(env):
|
||||
# A DANE TLSA record in DNS specifies that connections on a port
|
||||
# must use TLS and the certificate must match a particular certificate.
|
||||
|
@ -325,6 +347,7 @@ def build_tlsa_record(env):
|
|||
# 1: The certificate is SHA256'd here.
|
||||
return "3 0 1 " + certhash
|
||||
|
||||
|
||||
def build_sshfp_records():
|
||||
# The SSHFP record is a way for us to embed this server's SSH public
|
||||
# key fingerprint into the DNS so that remote hosts have an out-of-band
|
||||
|
@ -349,7 +372,8 @@ def build_sshfp_records():
|
|||
# to the zone file (that trigger bumping the serial number).
|
||||
keys = shell("check_output", ["ssh-keyscan", "localhost"])
|
||||
for key in sorted(keys.split("\n")):
|
||||
if key.strip() == "" or key[0] == "#": continue
|
||||
if key.strip() == "" or key[0] == "#":
|
||||
continue
|
||||
try:
|
||||
host, keytype, pubkey = key.split(" ")
|
||||
yield "%d %d ( %s )" % (
|
||||
|
@ -364,6 +388,7 @@ def build_sshfp_records():
|
|||
|
||||
########################################################################
|
||||
|
||||
|
||||
def write_nsd_zone(domain, zonefile, records, env, force):
|
||||
# On the $ORIGIN line, there's typically a ';' comment at the end explaining
|
||||
# what the $ORIGIN line does. Any further data after the domain confuses
|
||||
|
@ -375,7 +400,6 @@ def write_nsd_zone(domain, zonefile, records, env, force):
|
|||
# For the refresh through TTL fields, a good reference is:
|
||||
# http://www.peerwisdom.org/2013/05/15/dns-understanding-the-soa-record/
|
||||
|
||||
|
||||
zone = """
|
||||
$ORIGIN {domain}.
|
||||
$TTL 1800 ; default time to live
|
||||
|
@ -472,10 +496,12 @@ $TTL 1800 ; default time to live
|
|||
with open(zonefile, "w") as f:
|
||||
f.write(zone)
|
||||
|
||||
return True # file is updated
|
||||
# file is updated
|
||||
return True
|
||||
|
||||
########################################################################
|
||||
|
||||
|
||||
def write_nsd_conf(zonefiles, additional_records, env):
|
||||
# Basic header.
|
||||
nsdconf = """
|
||||
|
@ -494,7 +520,8 @@ server:
|
|||
# might have other network interfaces for e.g. tunnelling, we have
|
||||
# to be specific about the network interfaces that nsd binds to.
|
||||
for ipaddr in (env.get("PRIVATE_IP", "") + " " + env.get("PRIVATE_IPV6", "")).split(" "):
|
||||
if ipaddr == "": continue
|
||||
if ipaddr == "":
|
||||
continue
|
||||
nsdconf += " ip-address: %s\n" % ipaddr
|
||||
|
||||
# Append the zones.
|
||||
|
@ -517,7 +544,6 @@ zone:
|
|||
provide-xfr: %s NOKEY
|
||||
""" % (ipaddr, ipaddr)
|
||||
|
||||
|
||||
# Check if the nsd.conf is changing. If it isn't changing,
|
||||
# return False to flag that no change was made.
|
||||
with open("/etc/nsd/nsd.conf") as f:
|
||||
|
@ -531,9 +557,9 @@ zone:
|
|||
|
||||
########################################################################
|
||||
|
||||
|
||||
def dnssec_choose_algo(domain, env):
|
||||
if '.' in domain and domain.rsplit('.')[-1] in \
|
||||
("email", "guide", "fund"):
|
||||
if '.' in domain and domain.rsplit('.')[-1] in ("email", "guide", "fund"):
|
||||
# At GoDaddy, RSASHA256 is the only algorithm supported
|
||||
# for .email and .guide.
|
||||
# A variety of algorithms are supported for .fund. This
|
||||
|
@ -544,6 +570,7 @@ def dnssec_choose_algo(domain, env):
|
|||
# on existing users. We'll probably want to migrate to SHA256 later.
|
||||
return "RSASHA1-NSEC3-SHA1"
|
||||
|
||||
|
||||
def sign_zone(domain, zonefile, env):
|
||||
algo = dnssec_choose_algo(domain, env)
|
||||
dnssec_keys = load_env_vars_from_file(os.path.join(env['STORAGE_ROOT'], 'dns/dnssec/%s.conf' % algo))
|
||||
|
@ -562,27 +589,34 @@ def sign_zone(domain, zonefile, env):
|
|||
# we (root) can read.
|
||||
files_to_kill = []
|
||||
for key in ("KSK", "ZSK"):
|
||||
if dnssec_keys.get(key, "").strip() == "": raise Exception("DNSSEC is not properly set up.")
|
||||
if dnssec_keys.get(key, "").strip() == "":
|
||||
raise Exception("DNSSEC is not properly set up.")
|
||||
oldkeyfn = os.path.join(env['STORAGE_ROOT'], 'dns/dnssec/' + dnssec_keys[key])
|
||||
newkeyfn = '/tmp/' + dnssec_keys[key].replace("_domain_", domain)
|
||||
dnssec_keys[key] = newkeyfn
|
||||
for ext in (".private", ".key"):
|
||||
if not os.path.exists(oldkeyfn + ext): raise Exception("DNSSEC is not properly set up.")
|
||||
if not os.path.exists(oldkeyfn + ext):
|
||||
raise Exception("DNSSEC is not properly set up.")
|
||||
with open(oldkeyfn + ext, "r") as fr:
|
||||
keydata = fr.read()
|
||||
keydata = keydata.replace("_domain_", domain) # trick ldns-signkey into letting our generic key be used by this zone
|
||||
# trick ldns-signkey into letting our generic key be used by this zone
|
||||
keydata = keydata.replace("_domain_", domain)
|
||||
fn = newkeyfn + ext
|
||||
prev_umask = os.umask(0o77) # ensure written file is not world-readable
|
||||
# ensure written file is not world-readable
|
||||
prev_umask = os.umask(0o77)
|
||||
try:
|
||||
with open(fn, "w") as fw:
|
||||
fw.write(keydata)
|
||||
finally:
|
||||
os.umask(prev_umask) # other files we write should be world-readable
|
||||
# other files we write should be world-readable
|
||||
os.umask(prev_umask)
|
||||
files_to_kill.append(fn)
|
||||
|
||||
# Do the signing.
|
||||
expiry_date = (datetime.datetime.now() + datetime.timedelta(days=30)).strftime("%Y%m%d")
|
||||
shell('check_call', ["/usr/bin/ldns-signzone",
|
||||
shell('check_call', [
|
||||
"/usr/bin/ldns-signzone",
|
||||
|
||||
# expire the zone after 30 days
|
||||
"-e", expiry_date,
|
||||
|
||||
|
@ -607,7 +641,8 @@ def sign_zone(domain, zonefile, env):
|
|||
# actually be deployed. Preferebly the first.
|
||||
with open("/etc/nsd/zones/" + zonefile + ".ds", "w") as f:
|
||||
for digest_type in ('2', '1'):
|
||||
rr_ds = shell('check_output', ["/usr/bin/ldns-key2ds",
|
||||
rr_ds = shell('check_output', [
|
||||
"/usr/bin/ldns-key2ds",
|
||||
"-n", # output to stdout
|
||||
"-" + digest_type, # 1=SHA1, 2=SHA256
|
||||
dnssec_keys["KSK"] + ".key"
|
||||
|
@ -620,6 +655,7 @@ def sign_zone(domain, zonefile, env):
|
|||
|
||||
########################################################################
|
||||
|
||||
|
||||
def write_opendkim_tables(domains, env):
|
||||
# Append a record to OpenDKIM's KeyTable and SigningTable for each domain
|
||||
# that we send mail from (zones and all subdomains).
|
||||
|
@ -638,8 +674,7 @@ def write_opendkim_tables(domains, env):
|
|||
# Elsewhere we set the DMARC policy for each domain such that mail claiming
|
||||
# to be From: the domain must be signed with a DKIM key on the same domain.
|
||||
# So we must have a separate KeyTable entry for each domain.
|
||||
"SigningTable":
|
||||
"".join(
|
||||
"SigningTable": "".join(
|
||||
"*@{domain} {domain}\n".format(domain=domain)
|
||||
for domain in domains
|
||||
),
|
||||
|
@ -647,8 +682,7 @@ def write_opendkim_tables(domains, env):
|
|||
# The KeyTable specifies the signing domain, the DKIM selector, and the
|
||||
# path to the private key to use for signing some mail. Per DMARC, the
|
||||
# signing domain must match the sender's From: domain.
|
||||
"KeyTable":
|
||||
"".join(
|
||||
"KeyTable": "".join(
|
||||
"{domain} {domain}:mail:{key_file}\n".format(domain=domain, key_file=opendkim_key_file)
|
||||
for domain in domains
|
||||
),
|
||||
|
@ -673,6 +707,7 @@ def write_opendkim_tables(domains, env):
|
|||
|
||||
########################################################################
|
||||
|
||||
|
||||
def set_custom_dns_record(qname, rtype, value, env):
|
||||
# validate qname
|
||||
for zone, fn in get_dns_zones(env):
|
||||
|
@ -689,8 +724,10 @@ def set_custom_dns_record(qname, rtype, value, env):
|
|||
if value is not None:
|
||||
if rtype in ("A", "AAAA"):
|
||||
v = ipaddress.ip_address(value)
|
||||
if rtype == "A" and not isinstance(v, ipaddress.IPv4Address): raise ValueError("That's an IPv6 address.")
|
||||
if rtype == "AAAA" and not isinstance(v, ipaddress.IPv6Address): raise ValueError("That's an IPv4 address.")
|
||||
if rtype == "A" and not isinstance(v, ipaddress.IPv4Address):
|
||||
raise ValueError("That's an IPv6 address.")
|
||||
if rtype == "AAAA" and not isinstance(v, ipaddress.IPv6Address):
|
||||
raise ValueError("That's an IPv4 address.")
|
||||
elif rtype in ("CNAME", "TXT", "SRV"):
|
||||
# anything goes
|
||||
pass
|
||||
|
@ -710,7 +747,7 @@ def set_custom_dns_record(qname, rtype, value, env):
|
|||
config[qname] = value
|
||||
else:
|
||||
# Add this record. This is the qname's first record.
|
||||
config[qname] = { rtype: value }
|
||||
config[qname] = {rtype: value}
|
||||
else:
|
||||
if isinstance(config[qname], str):
|
||||
# This is a short-form 'qname: value' implicit-A record.
|
||||
|
@ -728,7 +765,7 @@ def set_custom_dns_record(qname, rtype, value, env):
|
|||
config[qname] = value
|
||||
else:
|
||||
# Expand short form so we can add a new record type.
|
||||
config[qname] = { "A": config[qname], rtype: value }
|
||||
config[qname] = {"A": config[qname], rtype: value}
|
||||
else:
|
||||
# This is the qname: { ... } (dict) format.
|
||||
if value is None:
|
||||
|
@ -754,6 +791,7 @@ def set_custom_dns_record(qname, rtype, value, env):
|
|||
|
||||
########################################################################
|
||||
|
||||
|
||||
def set_secondary_dns(hostname, env):
|
||||
config = get_custom_dns_config(env)
|
||||
|
||||
|
@ -786,16 +824,21 @@ def justtestingdotemail(domain, records):
|
|||
# Ideally if dns4e.com supported NS records we would just have it
|
||||
# delegate DNS to us, but instead we will populate the whole zone.
|
||||
|
||||
import subprocess, json, urllib.parse
|
||||
import subprocess
|
||||
import json
|
||||
import urllib.parse
|
||||
|
||||
if not domain.endswith(".justtesting.email"):
|
||||
return
|
||||
|
||||
for subdomain, querytype, value, explanation in records:
|
||||
if querytype in ("NS",): continue
|
||||
if subdomain in ("www", "ns1", "ns2"): continue # don't do unnecessary things
|
||||
if querytype in ("NS",):
|
||||
continue
|
||||
# don't do unnecessary things
|
||||
if subdomain in ("www", "ns1", "ns2"):
|
||||
continue
|
||||
|
||||
if subdomain == None:
|
||||
if subdomain is None:
|
||||
subdomain = domain
|
||||
else:
|
||||
subdomain = subdomain + "." + domain
|
||||
|
@ -821,6 +864,7 @@ def justtestingdotemail(domain, records):
|
|||
|
||||
########################################################################
|
||||
|
||||
|
||||
def build_recommended_dns(env):
|
||||
ret = []
|
||||
domains = get_dns_domains(env)
|
||||
|
@ -833,11 +877,11 @@ def build_recommended_dns(env):
|
|||
records = [r for r in records if r[3] is not False]
|
||||
|
||||
# put Required at the top, then Recommended, then everythiing else
|
||||
records.sort(key = lambda r : 0 if r[3].startswith("Required.") else (1 if r[3].startswith("Recommended.") else 2))
|
||||
records.sort(key=lambda r: 0 if r[3].startswith("Required.") else (1 if r[3].startswith("Recommended.") else 2))
|
||||
|
||||
# expand qnames
|
||||
for i in range(len(records)):
|
||||
if records[i][0] == None:
|
||||
if records[i][0] is None:
|
||||
qname = domain
|
||||
else:
|
||||
qname = records[i][0] + "." + domain
|
||||
|
|
|
@ -1,23 +1,26 @@
|
|||
#!/usr/bin/python3
|
||||
|
||||
import re, os.path
|
||||
import re
|
||||
import os.path
|
||||
import dateutil.parser
|
||||
|
||||
import mailconfig
|
||||
import utils
|
||||
|
||||
|
||||
def scan_mail_log(logger, env):
|
||||
collector = {
|
||||
"other-services": set(),
|
||||
"imap-logins": { },
|
||||
"postgrey": { },
|
||||
"rejected-mail": { },
|
||||
"imap-logins": {},
|
||||
"postgrey": {},
|
||||
"rejected-mail": {},
|
||||
}
|
||||
|
||||
collector["real_mail_addresses"] = set(mailconfig.get_mail_users(env)) | set(alias[0] for alias in mailconfig.get_mail_aliases(env))
|
||||
|
||||
for fn in ('/var/log/mail.log.1', '/var/log/mail.log'):
|
||||
if not os.path.exists(fn): continue
|
||||
if not os.path.exists(fn):
|
||||
continue
|
||||
with open(fn, 'rb') as log:
|
||||
for line in log:
|
||||
line = line.decode("utf8", errors='replace')
|
||||
|
@ -27,7 +30,7 @@ def scan_mail_log(logger, env):
|
|||
logger.add_heading("Recent IMAP Logins")
|
||||
logger.print_block("The most recent login from each remote IP adddress is show.")
|
||||
for k in utils.sort_email_addresses(collector["imap-logins"], env):
|
||||
for ip, date in sorted(collector["imap-logins"][k].items(), key = lambda kv : kv[1]):
|
||||
for ip, date in sorted(collector["imap-logins"][k].items(), key=lambda kv: kv[1]):
|
||||
logger.print_line(k + "\t" + str(date) + "\t" + ip)
|
||||
|
||||
if collector["postgrey"]:
|
||||
|
@ -35,7 +38,7 @@ def scan_mail_log(logger, env):
|
|||
logger.print_block("The following mail was greylisted, meaning the emails were temporarily rejected. Legitimate senders will try again within ten minutes.")
|
||||
logger.print_line("recipient" + "\t" + "received" + "\t" + "sender" + "\t" + "delivered")
|
||||
for recipient in utils.sort_email_addresses(collector["postgrey"], env):
|
||||
for (client_address, sender), (first_date, delivered_date) in sorted(collector["postgrey"][recipient].items(), key = lambda kv : kv[1][0]):
|
||||
for (client_address, sender), (first_date, delivered_date) in sorted(collector["postgrey"][recipient].items(), key=lambda kv: kv[1][0]):
|
||||
logger.print_line(recipient + "\t" + str(first_date) + "\t" + sender + "\t" + (("delivered " + str(delivered_date)) if delivered_date else "no retry yet"))
|
||||
|
||||
if collector["rejected-mail"]:
|
||||
|
@ -49,9 +52,11 @@ def scan_mail_log(logger, env):
|
|||
logger.add_heading("Other")
|
||||
logger.print_block("Unrecognized services in the log: " + ", ".join(collector["other-services"]))
|
||||
|
||||
|
||||
def scan_mail_log_line(line, collector):
|
||||
m = re.match(r"(\S+ \d+ \d+:\d+:\d+) (\S+) (\S+?)(\[\d+\])?: (.*)", line)
|
||||
if not m: return
|
||||
if not m:
|
||||
return
|
||||
|
||||
date, system, service, pid, log = m.groups()
|
||||
date = dateutil.parser.parse(date)
|
||||
|
@ -66,14 +71,16 @@ def scan_mail_log_line(line, collector):
|
|||
scan_postfix_smtpd_line(date, log, collector)
|
||||
|
||||
elif service in ("postfix/qmgr", "postfix/pickup", "postfix/cleanup",
|
||||
"postfix/scache", "spampd", "postfix/anvil", "postfix/master",
|
||||
"opendkim", "postfix/lmtp", "postfix/tlsmgr"):
|
||||
"postfix/scache", "spampd", "postfix/anvil",
|
||||
"postfix/master", "opendkim", "postfix/lmtp",
|
||||
"postfix/tlsmgr"):
|
||||
# nothing to look at
|
||||
pass
|
||||
|
||||
else:
|
||||
collector["other-services"].add(service)
|
||||
|
||||
|
||||
def scan_dovecot_line(date, log, collector):
|
||||
m = re.match("imap-login: Login: user=<(.*?)>, method=PLAIN, rip=(.*?),", log)
|
||||
if m:
|
||||
|
@ -81,6 +88,7 @@ def scan_dovecot_line(date, log, collector):
|
|||
if ip != "127.0.0.1": # local login from webmail/zpush
|
||||
collector["imap-logins"].setdefault(login, {})[ip] = date
|
||||
|
||||
|
||||
def scan_postgrey_line(date, log, collector):
|
||||
m = re.match("action=(greylist|pass), reason=(.*?), (?:delay=\d+, )?client_name=(.*), client_address=(.*), sender=(.*), recipient=(.*)", log)
|
||||
if m:
|
||||
|
@ -91,6 +99,7 @@ def scan_postgrey_line(date, log, collector):
|
|||
elif action == "pass" and reason == "triplet found" and key in collector["postgrey"].get(recipient, {}):
|
||||
collector["postgrey"][recipient][key] = (collector["postgrey"][recipient][key][0], date)
|
||||
|
||||
|
||||
def scan_postfix_smtpd_line(date, log, collector):
|
||||
m = re.match("NOQUEUE: reject: RCPT from .*?: (.*?); from=<(.*?)> to=<(.*?)>", log)
|
||||
if m:
|
||||
|
@ -112,7 +121,7 @@ def scan_postfix_smtpd_line(date, log, collector):
|
|||
if m:
|
||||
message = "domain blocked: " + m.group(2)
|
||||
|
||||
collector["rejected-mail"].setdefault(recipient, []).append( (date, sender, message) )
|
||||
collector["rejected-mail"].setdefault(recipient, []).append((date, sender, message))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
|
@ -1,8 +1,13 @@
|
|||
#!/usr/bin/python3
|
||||
|
||||
import subprocess, shutil, os, sqlite3, re
|
||||
import subprocess
|
||||
import shutil
|
||||
import os
|
||||
import sqlite3
|
||||
import re
|
||||
import utils
|
||||
|
||||
|
||||
def validate_email(email, mode=None):
|
||||
# There are a lot of characters permitted in email addresses, but
|
||||
# Dovecot's sqlite driver seems to get confused if there are any
|
||||
|
@ -10,7 +15,8 @@ def validate_email(email, mode=None):
|
|||
# the mailbox path name is based on the email address, the address
|
||||
# shouldn't be absurdly long and must not have a forward slash.
|
||||
|
||||
if len(email) > 255: return False
|
||||
if len(email) > 255:
|
||||
return False
|
||||
|
||||
if mode == 'user':
|
||||
# For Dovecot's benefit, only allow basic characters.
|
||||
|
@ -40,7 +46,8 @@ def validate_email(email, mode=None):
|
|||
|
||||
# Check the regular expression.
|
||||
m = re.match(ADDR_SPEC, email)
|
||||
if not m: return False
|
||||
if not m:
|
||||
return False
|
||||
|
||||
# Check that the domain part is IDNA-encodable.
|
||||
localpart, domainpart = m.groups()
|
||||
|
@ -51,6 +58,7 @@ def validate_email(email, mode=None):
|
|||
|
||||
return True
|
||||
|
||||
|
||||
def sanitize_idn_email_address(email):
|
||||
# Convert an IDNA-encoded email address (domain part) into Unicode
|
||||
# before storing in our database. Chrome may IDNA-ize <input type="email">
|
||||
|
@ -65,6 +73,7 @@ def sanitize_idn_email_address(email):
|
|||
# leave unchanged.
|
||||
return email
|
||||
|
||||
|
||||
def open_database(env, with_connection=False):
|
||||
conn = sqlite3.connect(env["STORAGE_ROOT"] + "/mail/users.sqlite")
|
||||
if not with_connection:
|
||||
|
@ -72,13 +81,15 @@ def open_database(env, with_connection=False):
|
|||
else:
|
||||
return conn, conn.cursor()
|
||||
|
||||
|
||||
def get_mail_users(env):
|
||||
# Returns a flat, sorted list of all user accounts.
|
||||
c = open_database(env)
|
||||
c.execute('SELECT email FROM users')
|
||||
users = [ row[0] for row in c.fetchall() ]
|
||||
users = [row[0] for row in c.fetchall()]
|
||||
return utils.sort_email_addresses(users, env)
|
||||
|
||||
|
||||
def get_mail_users_ex(env, with_archived=False, with_slow_info=False):
|
||||
# Returns a complex data structure of all user accounts, optionally
|
||||
# including archived (status="inactive") accounts.
|
||||
|
@ -134,7 +145,8 @@ def get_mail_users_ex(env, with_archived=False, with_slow_info=False):
|
|||
for user in os.listdir(os.path.join(root, domain)):
|
||||
email = user + "@" + domain
|
||||
mbox = os.path.join(root, domain, user)
|
||||
if email in active_accounts: continue
|
||||
if email in active_accounts:
|
||||
continue
|
||||
user = {
|
||||
"email": email,
|
||||
"privileges": "",
|
||||
|
@ -146,7 +158,7 @@ def get_mail_users_ex(env, with_archived=False, with_slow_info=False):
|
|||
user["mailbox_size"] = utils.du(mbox)
|
||||
|
||||
# Group by domain.
|
||||
domains = { }
|
||||
domains = {}
|
||||
for user in users:
|
||||
domain = get_domain(user["email"])
|
||||
if domain not in domains:
|
||||
|
@ -161,10 +173,11 @@ def get_mail_users_ex(env, with_archived=False, with_slow_info=False):
|
|||
|
||||
# Sort users within each domain first by status then lexicographically by email address.
|
||||
for domain in domains:
|
||||
domain["users"].sort(key = lambda user : (user["status"] != "active", user["email"]))
|
||||
domain["users"].sort(key=lambda user: (user["status"] != "active", user["email"]))
|
||||
|
||||
return domains
|
||||
|
||||
|
||||
def get_admins(env):
|
||||
# Returns a set of users with admin privileges.
|
||||
users = set()
|
||||
|
@ -174,16 +187,19 @@ def get_admins(env):
|
|||
users.add(user["email"])
|
||||
return users
|
||||
|
||||
|
||||
def get_mail_aliases(env):
|
||||
# Returns a sorted list of tuples of (alias, forward-to string).
|
||||
c = open_database(env)
|
||||
c.execute('SELECT source, destination FROM aliases')
|
||||
aliases = { row[0]: row[1] for row in c.fetchall() } # make dict
|
||||
# make dict
|
||||
aliases = {row[0]: row[1] for row in c.fetchall()}
|
||||
|
||||
# put in a canonical order: sort by domain, then by email address lexicographically
|
||||
aliases = [ (source, aliases[source]) for source in utils.sort_email_addresses(aliases.keys(), env) ]
|
||||
aliases = [(source, aliases[source]) for source in utils.sort_email_addresses(aliases.keys(), env)]
|
||||
return aliases
|
||||
|
||||
|
||||
def get_mail_aliases_ex(env):
|
||||
# Returns a complex data structure of all mail aliases, similar
|
||||
# to get_mail_users_ex.
|
||||
|
@ -227,17 +243,19 @@ def get_mail_aliases_ex(env):
|
|||
|
||||
# Sort aliases within each domain first by required-ness then lexicographically by source address.
|
||||
for domain in domains:
|
||||
domain["aliases"].sort(key = lambda alias : (alias["required"], alias["source"]))
|
||||
domain["aliases"].sort(key=lambda alias: (alias["required"], alias["source"]))
|
||||
return domains
|
||||
|
||||
|
||||
def get_mail_alias_map(env):
|
||||
aliases = { }
|
||||
aliases = {}
|
||||
for alias, targets in get_mail_aliases(env):
|
||||
for em in targets.split(","):
|
||||
em = em.strip().lower()
|
||||
aliases.setdefault(em, []).append(alias)
|
||||
return aliases
|
||||
|
||||
|
||||
def evaluate_mail_alias_map(email, aliases, env):
|
||||
ret = set()
|
||||
for alias in aliases.get(email.lower(), []):
|
||||
|
@ -245,15 +263,18 @@ def evaluate_mail_alias_map(email, aliases, env):
|
|||
ret |= evaluate_mail_alias_map(alias, aliases, env)
|
||||
return ret
|
||||
|
||||
|
||||
def get_domain(emailaddr):
|
||||
return emailaddr.split('@', 1)[1]
|
||||
|
||||
def get_mail_domains(env, filter_aliases=lambda alias : True):
|
||||
|
||||
def get_mail_domains(env, filter_aliases=lambda alias: True):
|
||||
return set(
|
||||
[get_domain(addr) for addr in get_mail_users(env)]
|
||||
+ [get_domain(source) for source, target in get_mail_aliases(env) if filter_aliases((source, target)) ]
|
||||
[get_domain(addr) for addr in get_mail_users(env)] +
|
||||
[get_domain(source) for source, target in get_mail_aliases(env) if filter_aliases((source, target))]
|
||||
)
|
||||
|
||||
|
||||
def add_mail_user(email, pw, privs, env):
|
||||
# accept IDNA domain names but normalize to Unicode before going into database
|
||||
email = sanitize_idn_email_address(email)
|
||||
|
@ -273,7 +294,8 @@ def add_mail_user(email, pw, privs, env):
|
|||
privs = privs.split("\n")
|
||||
for p in privs:
|
||||
validation = validate_privilege(p)
|
||||
if validation: return validation
|
||||
if validation:
|
||||
return validation
|
||||
|
||||
# get the database
|
||||
conn, c = open_database(env, with_connection=True)
|
||||
|
@ -311,6 +333,7 @@ def add_mail_user(email, pw, privs, env):
|
|||
# Update things in case any new domains are added.
|
||||
return kick(env, "mail user added")
|
||||
|
||||
|
||||
def set_mail_password(email, pw, env):
|
||||
# accept IDNA domain names but normalize to Unicode before going into database
|
||||
email = sanitize_idn_email_address(email)
|
||||
|
@ -329,12 +352,14 @@ def set_mail_password(email, pw, env):
|
|||
conn.commit()
|
||||
return "OK"
|
||||
|
||||
|
||||
def hash_password(pw):
|
||||
# Turn the plain password into a Dovecot-format hashed password, meaning
|
||||
# something like "{SCHEME}hashedpassworddata".
|
||||
# http://wiki2.dovecot.org/Authentication/PasswordSchemes
|
||||
return utils.shell('check_output', ["/usr/bin/doveadm", "pw", "-s", "SHA512-CRYPT", "-p", pw]).strip()
|
||||
|
||||
|
||||
def get_mail_password(email, env):
|
||||
# Gets the hashed password for a user. Passwords are stored in Dovecot's
|
||||
# password format, with a prefixed scheme.
|
||||
|
@ -347,6 +372,7 @@ def get_mail_password(email, env):
|
|||
raise ValueError("That's not a user (%s)." % email)
|
||||
return rows[0][0]
|
||||
|
||||
|
||||
def remove_mail_user(email, env):
|
||||
# accept IDNA domain names but normalize to Unicode before going into database
|
||||
email = sanitize_idn_email_address(email)
|
||||
|
@ -361,9 +387,11 @@ def remove_mail_user(email, env):
|
|||
# Update things in case any domains are removed.
|
||||
return kick(env, "mail user removed")
|
||||
|
||||
|
||||
def parse_privs(value):
|
||||
return [p for p in value.split("\n") if p.strip() != ""]
|
||||
|
||||
|
||||
def get_mail_user_privileges(email, env):
|
||||
# accept IDNA domain names but normalize to Unicode before going into database
|
||||
email = sanitize_idn_email_address(email)
|
||||
|
@ -376,22 +404,27 @@ def get_mail_user_privileges(email, env):
|
|||
return ("That's not a user (%s)." % email, 400)
|
||||
return parse_privs(rows[0][0])
|
||||
|
||||
|
||||
def validate_privilege(priv):
|
||||
if "\n" in priv or priv.strip() == "":
|
||||
return ("That's not a valid privilege (%s)." % priv, 400)
|
||||
return None
|
||||
|
||||
|
||||
def add_remove_mail_user_privilege(email, priv, action, env):
|
||||
# accept IDNA domain names but normalize to Unicode before going into database
|
||||
email = sanitize_idn_email_address(email)
|
||||
|
||||
# validate
|
||||
validation = validate_privilege(priv)
|
||||
if validation: return validation
|
||||
if validation:
|
||||
return validation
|
||||
|
||||
# get existing privs, but may fail
|
||||
privs = get_mail_user_privileges(email, env)
|
||||
if isinstance(privs, tuple): return privs # error
|
||||
# error
|
||||
if isinstance(privs, tuple):
|
||||
return privs
|
||||
|
||||
# update privs set
|
||||
if action == "add":
|
||||
|
@ -411,6 +444,7 @@ def add_remove_mail_user_privilege(email, priv, action, env):
|
|||
|
||||
return "OK"
|
||||
|
||||
|
||||
def add_mail_alias(source, destination, env, update_if_exists=False, do_kick=True):
|
||||
# accept IDNA domain names but normalize to Unicode before going into database
|
||||
source = sanitize_idn_email_address(source)
|
||||
|
@ -434,8 +468,10 @@ def add_mail_alias(source, destination, env, update_if_exists=False, do_kick=Tru
|
|||
for line in destination.split("\n"):
|
||||
for email in line.split(","):
|
||||
email = email.strip()
|
||||
email = sanitize_idn_email_address(email) # Unicode => IDNA
|
||||
if email == "": continue
|
||||
# Unicode => IDNA
|
||||
email = sanitize_idn_email_address(email)
|
||||
if email == "":
|
||||
continue
|
||||
if not validate_email(email):
|
||||
return ("Invalid destination email address (%s)." % email, 400)
|
||||
dests.append(email)
|
||||
|
@ -461,6 +497,7 @@ def add_mail_alias(source, destination, env, update_if_exists=False, do_kick=Tru
|
|||
# Update things in case any new domains are added.
|
||||
return kick(env, return_status)
|
||||
|
||||
|
||||
def remove_mail_alias(source, env, do_kick=True):
|
||||
# accept IDNA domain names but normalize to Unicode before going into database
|
||||
source = sanitize_idn_email_address(source)
|
||||
|
@ -476,9 +513,11 @@ def remove_mail_alias(source, env, do_kick=True):
|
|||
# Update things in case any domains are removed.
|
||||
return kick(env, "alias removed")
|
||||
|
||||
|
||||
def get_system_administrator(env):
|
||||
return "administrator@" + env['PRIMARY_HOSTNAME']
|
||||
|
||||
|
||||
def get_required_aliases(env):
|
||||
# These are the aliases that must exist.
|
||||
aliases = set()
|
||||
|
@ -489,8 +528,9 @@ def get_required_aliases(env):
|
|||
# Get a list of domains we serve mail for, except ones for which the only
|
||||
# email on that domain is a postmaster/admin alias to the administrator
|
||||
# or a wildcard alias (since it will forward postmaster/admin).
|
||||
# JMT: no clean way to PEP8 wrap this
|
||||
real_mail_domains = get_mail_domains(env,
|
||||
filter_aliases = lambda alias :
|
||||
filter_aliases=lambda alias:
|
||||
((not alias[0].startswith("postmaster@") and not alias[0].startswith("admin@")) or alias[1] != get_system_administrator(env))
|
||||
and not alias[0].startswith("@")
|
||||
)
|
||||
|
@ -505,6 +545,7 @@ def get_required_aliases(env):
|
|||
|
||||
return aliases
|
||||
|
||||
|
||||
def kick(env, mail_result=None):
|
||||
results = []
|
||||
|
||||
|
@ -533,7 +574,6 @@ def kick(env, mail_result=None):
|
|||
add_mail_alias(source, administrator, env, do_kick=False)
|
||||
results.append("added alias %s (=> %s)\n" % (source, administrator))
|
||||
|
||||
|
||||
for alias in required_aliases:
|
||||
ensure_admin_alias_exists(alias)
|
||||
|
||||
|
@ -541,22 +581,21 @@ def kick(env, mail_result=None):
|
|||
# longer have any other email addresses for.
|
||||
for source, target in existing_aliases:
|
||||
user, domain = source.split("@")
|
||||
if user in ("postmaster", "admin") \
|
||||
and source not in required_aliases \
|
||||
and target == get_system_administrator(env):
|
||||
if user in ("postmaster", "admin") and source not in required_aliases and target == get_system_administrator(env):
|
||||
remove_mail_alias(source, env, do_kick=False)
|
||||
results.append("removed alias %s (was to %s; domain no longer used for email)\n" % (source, target))
|
||||
|
||||
# Update DNS and nginx in case any domains are added/removed.
|
||||
|
||||
from dns_update import do_dns_update
|
||||
results.append( do_dns_update(env) )
|
||||
results.append(do_dns_update(env))
|
||||
|
||||
from web_update import do_web_update
|
||||
results.append( do_web_update(env) )
|
||||
results.append(do_web_update(env))
|
||||
|
||||
return "".join(s for s in results if s != "")
|
||||
|
||||
|
||||
def validate_password(pw):
|
||||
# validate password
|
||||
if pw.strip() == "":
|
||||
|
|
|
@ -6,10 +6,17 @@
|
|||
|
||||
__ALL__ = ['check_certificate']
|
||||
|
||||
import os, os.path, re, subprocess, datetime, multiprocessing.pool
|
||||
import os
|
||||
import os.path
|
||||
import re
|
||||
import subprocess
|
||||
import datetime
|
||||
import multiprocessing.pool
|
||||
|
||||
import dns.reversename, dns.resolver
|
||||
import dateutil.parser, dateutil.tz
|
||||
import dns.reversename
|
||||
import dns.resolver
|
||||
import dateutil.parser
|
||||
import dateutil.tz
|
||||
|
||||
from dns_update import get_dns_zones, build_tlsa_record, get_custom_dns_config
|
||||
from web_update import get_web_domains, get_domain_ssl_files
|
||||
|
@ -17,6 +24,7 @@ from mailconfig import get_mail_domains, get_mail_aliases
|
|||
|
||||
from utils import shell, sort_domains, load_env_vars_from_file
|
||||
|
||||
|
||||
def run_checks(env, output, pool):
|
||||
# run systems checks
|
||||
output.add_heading("System")
|
||||
|
@ -40,6 +48,7 @@ def run_checks(env, output, pool):
|
|||
run_network_checks(env, output)
|
||||
run_domain_checks(env, output, pool)
|
||||
|
||||
|
||||
def get_ssh_port():
|
||||
# Returns ssh port
|
||||
output = shell('check_output', ['sshd', '-T'])
|
||||
|
@ -51,30 +60,31 @@ def get_ssh_port():
|
|||
if e == "port":
|
||||
returnNext = True
|
||||
|
||||
|
||||
def run_services_checks(env, output, pool):
|
||||
# Check that system services are running.
|
||||
|
||||
services = [
|
||||
{ "name": "Local DNS (bind9)", "port": 53, "public": False, },
|
||||
#{ "name": "NSD Control", "port": 8952, "public": False, },
|
||||
{ "name": "Local DNS Control (bind9/rndc)", "port": 953, "public": False, },
|
||||
{ "name": "Dovecot LMTP LDA", "port": 10026, "public": False, },
|
||||
{ "name": "Postgrey", "port": 10023, "public": False, },
|
||||
{ "name": "Spamassassin", "port": 10025, "public": False, },
|
||||
{ "name": "OpenDKIM", "port": 8891, "public": False, },
|
||||
{ "name": "OpenDMARC", "port": 8893, "public": False, },
|
||||
{ "name": "Memcached", "port": 11211, "public": False, },
|
||||
{ "name": "Sieve (dovecot)", "port": 4190, "public": True, },
|
||||
{ "name": "Mail-in-a-Box Management Daemon", "port": 10222, "public": False, },
|
||||
{"name": "Local DNS (bind9)", "port": 53, "public": False, },
|
||||
# {"name": "NSD Control", "port": 8952, "public": False, },
|
||||
{"name": "Local DNS Control (bind9/rndc)", "port": 953, "public": False, },
|
||||
{"name": "Dovecot LMTP LDA", "port": 10026, "public": False, },
|
||||
{"name": "Postgrey", "port": 10023, "public": False, },
|
||||
{"name": "Spamassassin", "port": 10025, "public": False, },
|
||||
{"name": "OpenDKIM", "port": 8891, "public": False, },
|
||||
{"name": "OpenDMARC", "port": 8893, "public": False, },
|
||||
{"name": "Memcached", "port": 11211, "public": False, },
|
||||
{"name": "Sieve (dovecot)", "port": 4190, "public": True, },
|
||||
{"name": "Mail-in-a-Box Management Daemon", "port": 10222, "public": False, },
|
||||
|
||||
{ "name": "SSH Login (ssh)", "port": get_ssh_port(), "public": True, },
|
||||
{ "name": "Public DNS (nsd4)", "port": 53, "public": True, },
|
||||
{ "name": "Incoming Mail (SMTP/postfix)", "port": 25, "public": True, },
|
||||
{ "name": "Outgoing Mail (SMTP 587/postfix)", "port": 587, "public": True, },
|
||||
#{ "name": "Postfix/master", "port": 10587, "public": True, },
|
||||
{ "name": "IMAPS (dovecot)", "port": 993, "public": True, },
|
||||
{ "name": "HTTP Web (nginx)", "port": 80, "public": True, },
|
||||
{ "name": "HTTPS Web (nginx)", "port": 443, "public": True, },
|
||||
{"name": "SSH Login (ssh)", "port": get_ssh_port(), "public": True, },
|
||||
{"name": "Public DNS (nsd4)", "port": 53, "public": True, },
|
||||
{"name": "Incoming Mail (SMTP/postfix)", "port": 25, "public": True, },
|
||||
{"name": "Outgoing Mail (SMTP 587/postfix)", "port": 587, "public": True, },
|
||||
#{"name": "Postfix/master", "port": 10587, "public": True, },
|
||||
{"name": "IMAPS (dovecot)", "port": 993, "public": True, },
|
||||
{"name": "HTTP Web (nginx)", "port": 80, "public": True, },
|
||||
{"name": "HTTPS Web (nginx)", "port": 443, "public": True, },
|
||||
]
|
||||
|
||||
all_running = True
|
||||
|
@ -90,6 +100,7 @@ def run_services_checks(env, output, pool):
|
|||
|
||||
return not fatal
|
||||
|
||||
|
||||
def check_service(i, service, env):
|
||||
import socket
|
||||
output = BufferedOutput()
|
||||
|
@ -126,19 +137,21 @@ def check_service(i, service, env):
|
|||
output.print_line(shell('check_output', ['nginx', '-t'], capture_stderr=True, trap=True)[1].strip())
|
||||
|
||||
# Flag if local DNS is not running.
|
||||
if service["port"] == 53 and service["public"] == False:
|
||||
if service["port"] == 53 and service["public"] is False:
|
||||
fatal = True
|
||||
finally:
|
||||
s.close()
|
||||
|
||||
return (i, running, fatal, output)
|
||||
|
||||
|
||||
def run_system_checks(env, output):
|
||||
check_ssh_password(env, output)
|
||||
check_software_updates(env, output)
|
||||
check_system_aliases(env, output)
|
||||
check_free_disk_space(env, output)
|
||||
|
||||
|
||||
def check_ssh_password(env, output):
|
||||
# Check that SSH login with password is disabled. The openssh-server
|
||||
# package may not be installed so check that before trying to access
|
||||
|
@ -146,8 +159,7 @@ def check_ssh_password(env, output):
|
|||
if not os.path.exists("/etc/ssh/sshd_config"):
|
||||
return
|
||||
sshd = open("/etc/ssh/sshd_config").read()
|
||||
if re.search("\nPasswordAuthentication\s+yes", sshd) \
|
||||
or not re.search("\nPasswordAuthentication\s+no", sshd):
|
||||
if re.search("\nPasswordAuthentication\s+yes", sshd) or not re.search("\nPasswordAuthentication\s+no", sshd):
|
||||
output.print_error("""The SSH server on this machine permits password-based login. A more secure
|
||||
way to log in is using a public key. Add your SSH public key to $HOME/.ssh/authorized_keys, check
|
||||
that you can log in without a password, set the option 'PasswordAuthentication no' in
|
||||
|
@ -155,6 +167,7 @@ def check_ssh_password(env, output):
|
|||
else:
|
||||
output.print_ok("SSH disallows password-based login.")
|
||||
|
||||
|
||||
def check_software_updates(env, output):
|
||||
# Check for any software package updates.
|
||||
pkgs = list_apt_updates(apt_update=False)
|
||||
|
@ -167,11 +180,13 @@ def check_software_updates(env, output):
|
|||
for p in pkgs:
|
||||
output.print_line("%s (%s)" % (p["package"], p["version"]))
|
||||
|
||||
|
||||
def check_system_aliases(env, output):
|
||||
# Check that the administrator alias exists since that's where all
|
||||
# admin email is automatically directed.
|
||||
check_alias_exists("administrator@" + env['PRIMARY_HOSTNAME'], env, output)
|
||||
|
||||
|
||||
def check_free_disk_space(env, output):
|
||||
# Check free disk space.
|
||||
st = os.statvfs(env['STORAGE_ROOT'])
|
||||
|
@ -185,6 +200,7 @@ def check_free_disk_space(env, output):
|
|||
else:
|
||||
output.print_error(disk_msg)
|
||||
|
||||
|
||||
def run_network_checks(env, output):
|
||||
# Also see setup/network-checks.sh.
|
||||
|
||||
|
@ -215,6 +231,7 @@ def run_network_checks(env, output):
|
|||
which may prevent recipients from receiving your email. See http://www.spamhaus.org/query/ip/%s."""
|
||||
% (env['PUBLIC_IP'], zen, env['PUBLIC_IP']))
|
||||
|
||||
|
||||
def run_domain_checks(env, output, pool):
|
||||
# Get the list of domains we handle mail for.
|
||||
mail_domains = get_mail_domains(env)
|
||||
|
@ -236,10 +253,12 @@ def run_domain_checks(env, output, pool):
|
|||
args = ((domain, env, dns_domains, dns_zonefiles, mail_domains, web_domains)
|
||||
for domain in domains_to_check)
|
||||
ret = pool.starmap(run_domain_checks_on_domain, args, chunksize=1)
|
||||
ret = dict(ret) # (domain, output) => { domain: output }
|
||||
# (domain, output) => { domain: output }
|
||||
ret = dict(ret)
|
||||
for domain in sort_domains(ret, env):
|
||||
ret[domain].playback(output)
|
||||
|
||||
|
||||
def run_domain_checks_on_domain(domain, env, dns_domains, dns_zonefiles, mail_domains, web_domains):
|
||||
output = BufferedOutput()
|
||||
|
||||
|
@ -262,6 +281,7 @@ def run_domain_checks_on_domain(domain, env, dns_domains, dns_zonefiles, mail_do
|
|||
|
||||
return (domain, output)
|
||||
|
||||
|
||||
def check_primary_hostname_dns(domain, env, output, dns_domains, dns_zonefiles):
|
||||
# If a DS record is set on the zone containing this domain, check DNSSEC now.
|
||||
for zone in dns_domains:
|
||||
|
@ -300,8 +320,7 @@ def check_primary_hostname_dns(domain, env, output, dns_domains, dns_zonefiles):
|
|||
if existing_rdns == domain:
|
||||
output.print_ok("Reverse DNS is set correctly at ISP. [%s => %s]" % (env['PUBLIC_IP'], env['PRIMARY_HOSTNAME']))
|
||||
else:
|
||||
output.print_error("""Your box's reverse DNS is currently %s, but it should be %s. Your ISP or cloud provider will have instructions
|
||||
on setting up reverse DNS for your box at %s.""" % (existing_rdns, domain, env['PUBLIC_IP']) )
|
||||
output.print_error("""Your box's reverse DNS is currently %s, but it should be %s. Your ISP or cloud provider will have instructions on setting up reverse DNS for your box at %s.""" % (existing_rdns, domain, env['PUBLIC_IP']))
|
||||
|
||||
# Check the TLSA record.
|
||||
tlsa_qname = "_25._tcp." + domain
|
||||
|
@ -319,6 +338,7 @@ def check_primary_hostname_dns(domain, env, output, dns_domains, dns_zonefiles):
|
|||
# Check that the hostmaster@ email address exists.
|
||||
check_alias_exists("hostmaster@" + domain, env, output)
|
||||
|
||||
|
||||
def check_alias_exists(alias, env, output):
|
||||
mail_alises = dict(get_mail_aliases(env))
|
||||
if alias in mail_alises:
|
||||
|
@ -326,6 +346,7 @@ def check_alias_exists(alias, env, output):
|
|||
else:
|
||||
output.print_error("""You must add a mail alias for %s and direct email to you or another administrator.""" % alias)
|
||||
|
||||
|
||||
def check_dns_zone(domain, env, output, dns_zonefiles):
|
||||
# If a DS record is set at the registrar, check DNSSEC first because it will affect the NS query.
|
||||
# If it is not set, we suggest it last.
|
||||
|
@ -349,7 +370,8 @@ def check_dns_zone(domain, env, output, dns_zonefiles):
|
|||
else:
|
||||
output.print_error("""The nameservers set on this domain are incorrect. They are currently %s. Use your domain name registrar's
|
||||
control panel to set the nameservers to %s."""
|
||||
% (existing_ns, correct_ns) )
|
||||
% (existing_ns, correct_ns))
|
||||
|
||||
|
||||
def check_dns_zone_suggestions(domain, env, output, dns_zonefiles):
|
||||
# Since DNSSEC is optional, if a DS record is NOT set at the registrar suggest it.
|
||||
|
@ -363,27 +385,30 @@ def check_dnssec(domain, env, output, dns_zonefiles, is_checking_primary=False):
|
|||
# several forms. We have to be prepared to check for any valid record. We've
|
||||
# pre-generated all of the valid digests --- read them in.
|
||||
ds_correct = open('/etc/nsd/zones/' + dns_zonefiles[domain] + '.ds').read().strip().split("\n")
|
||||
digests = { }
|
||||
digests = {}
|
||||
for rr_ds in ds_correct:
|
||||
ds_keytag, ds_alg, ds_digalg, ds_digest = rr_ds.split("\t")[4].split(" ")
|
||||
digests[ds_digalg] = ds_digest
|
||||
|
||||
# Some registrars may want the public key so they can compute the digest. The DS
|
||||
# record that we suggest using is for the KSK (and that's how the DS records were generated).
|
||||
alg_name_map = { '7': 'RSASHA1-NSEC3-SHA1', '8': 'RSASHA256' }
|
||||
alg_name_map = {'7': 'RSASHA1-NSEC3-SHA1', '8': 'RSASHA256'}
|
||||
dnssec_keys = load_env_vars_from_file(os.path.join(env['STORAGE_ROOT'], 'dns/dnssec/%s.conf' % alg_name_map[ds_alg]))
|
||||
dnsssec_pubkey = open(os.path.join(env['STORAGE_ROOT'], 'dns/dnssec/' + dnssec_keys['KSK'] + '.key')).read().split("\t")[3].split(" ")[3]
|
||||
|
||||
# Query public DNS for the DS record at the registrar.
|
||||
ds = query_dns(domain, "DS", nxdomain=None)
|
||||
ds_looks_valid = ds and len(ds.split(" ")) == 4
|
||||
if ds_looks_valid: ds = ds.split(" ")
|
||||
if ds_looks_valid:
|
||||
ds = ds.split(" ")
|
||||
if ds_looks_valid and ds[0] == ds_keytag and ds[1] == ds_alg and ds[3] == digests.get(ds[2]):
|
||||
if is_checking_primary: return
|
||||
if is_checking_primary:
|
||||
return
|
||||
output.print_ok("DNSSEC 'DS' record is set correctly at registrar.")
|
||||
else:
|
||||
if ds == None:
|
||||
if is_checking_primary: return
|
||||
if ds is None:
|
||||
if is_checking_primary:
|
||||
return
|
||||
output.print_error("""This domain's DNSSEC DS record is not set. The DS record is optional. The DS record activates DNSSEC.
|
||||
To set a DS record, you must follow the instructions provided by your domain name registrar and provide to them this information:""")
|
||||
else:
|
||||
|
@ -398,8 +423,8 @@ def check_dnssec(domain, env, output, dns_zonefiles, is_checking_primary=False):
|
|||
output.print_line("Key Tag: " + ds_keytag + ("" if not ds_looks_valid or ds[0] == ds_keytag else " (Got '%s')" % ds[0]))
|
||||
output.print_line("Key Flags: KSK")
|
||||
output.print_line(
|
||||
("Algorithm: %s / %s" % (ds_alg, alg_name_map[ds_alg]))
|
||||
+ ("" if not ds_looks_valid or ds[1] == ds_alg else " (Got '%s')" % ds[1]))
|
||||
("Algorithm: %s / %s" % (ds_alg, alg_name_map[ds_alg])) +
|
||||
("" if not ds_looks_valid or ds[1] == ds_alg else " (Got '%s')" % ds[1]))
|
||||
# see http://www.iana.org/assignments/dns-sec-alg-numbers/dns-sec-alg-numbers.xhtml
|
||||
output.print_line("Digest Type: 2 / SHA-256")
|
||||
# http://www.ietf.org/assignments/ds-rr-types/ds-rr-types.xml
|
||||
|
@ -413,6 +438,7 @@ def check_dnssec(domain, env, output, dns_zonefiles, is_checking_primary=False):
|
|||
output.print_line("" + ds_correct[0])
|
||||
output.print_line("")
|
||||
|
||||
|
||||
def check_mail_domain(domain, env, output):
|
||||
# Check the MX record.
|
||||
|
||||
|
@ -422,7 +448,7 @@ def check_mail_domain(domain, env, output):
|
|||
if mx == expected_mx:
|
||||
output.print_ok("Domain's email is directed to this domain. [%s => %s]" % (domain, mx))
|
||||
|
||||
elif mx == None:
|
||||
elif mx is None:
|
||||
# A missing MX record is okay on the primary hostname because
|
||||
# the primary hostname's A record (the MX fallback) is... itself,
|
||||
# which is what we want the MX to be.
|
||||
|
@ -435,7 +461,7 @@ def check_mail_domain(domain, env, output):
|
|||
else:
|
||||
domain_a = query_dns(domain, "A", nxdomain=None)
|
||||
primary_a = query_dns(env['PRIMARY_HOSTNAME'], "A", nxdomain=None)
|
||||
if domain_a != None and domain_a == primary_a:
|
||||
if domain_a is not None and domain_a == primary_a:
|
||||
output.print_ok("Domain's email is directed to this domain. [%s has no MX record but its A record is OK]" % (domain,))
|
||||
else:
|
||||
output.print_error("""This domain's DNS MX record is not set. It should be '%s'. Mail will not
|
||||
|
@ -463,6 +489,7 @@ def check_mail_domain(domain, env, output):
|
|||
which may prevent recipients from receiving your mail.
|
||||
See http://www.spamhaus.org/dbl/ and http://www.spamhaus.org/query/domain/%s.""" % (dbl, domain))
|
||||
|
||||
|
||||
def check_web_domain(domain, env, output):
|
||||
# See if the domain's A record resolves to our PUBLIC_IP. This is already checked
|
||||
# for PRIMARY_HOSTNAME, for which it is required for mail specifically. For it and
|
||||
|
@ -481,6 +508,7 @@ def check_web_domain(domain, env, output):
|
|||
# website for also needs a signed certificate.
|
||||
check_ssl_cert(domain, env, output)
|
||||
|
||||
|
||||
def query_dns(qname, rtype, nxdomain='[Not Set]'):
|
||||
# Make the qname absolute by appending a period. Without this, dns.resolver.query
|
||||
# will fall back a failed lookup to a second query with this machine's hostname
|
||||
|
@ -506,11 +534,13 @@ def query_dns(qname, rtype, nxdomain='[Not Set]'):
|
|||
# can compare to a well known order.
|
||||
return "; ".join(sorted(str(r).rstrip('.') for r in response))
|
||||
|
||||
|
||||
def check_ssl_cert(domain, env, output):
|
||||
# Check that SSL certificate is signed.
|
||||
|
||||
# Skip the check if the A record is not pointed here.
|
||||
if query_dns(domain, "A", None) not in (env['PUBLIC_IP'], None): return
|
||||
if query_dns(domain, "A", None) not in (env['PUBLIC_IP'], None):
|
||||
return
|
||||
|
||||
# Where is the SSL stored?
|
||||
ssl_key, ssl_certificate, ssl_via = get_domain_ssl_files(domain, env)
|
||||
|
@ -560,6 +590,7 @@ def check_ssl_cert(domain, env, output):
|
|||
output.print_line(cert_status_details)
|
||||
output.print_line("")
|
||||
|
||||
|
||||
def check_certificate(domain, ssl_certificate, ssl_private_key):
|
||||
# Use openssl verify to check the status of a certificate.
|
||||
|
||||
|
@ -640,7 +671,7 @@ def check_certificate(domain, ssl_certificate, ssl_private_key):
|
|||
|
||||
cert = open(ssl_certificate).read()
|
||||
m = re.match(r'(-*BEGIN CERTIFICATE-*.*?-*END CERTIFICATE-*)(.*)', cert, re.S)
|
||||
if m == None:
|
||||
if m is None:
|
||||
return ("The certificate file is an invalid PEM certificate.", None)
|
||||
mycert, chaincerts = m.groups()
|
||||
|
||||
|
@ -649,7 +680,7 @@ def check_certificate(domain, ssl_certificate, ssl_private_key):
|
|||
retcode, verifyoutput = shell('check_output', [
|
||||
"openssl",
|
||||
"verify", "-verbose",
|
||||
"-purpose", "sslserver", "-policy_check",]
|
||||
"-purpose", "sslserver", "-policy_check", ]
|
||||
+ ([] if chaincerts.strip() == "" else ["-untrusted", "/dev/stdin"])
|
||||
+ [ssl_certificate],
|
||||
input=chaincerts.encode('ascii'),
|
||||
|
@ -679,6 +710,8 @@ def check_certificate(domain, ssl_certificate, ssl_private_key):
|
|||
return ("OK", expiry_info)
|
||||
|
||||
_apt_updates = None
|
||||
|
||||
|
||||
def list_apt_updates(apt_update=True):
|
||||
# See if we have this information cached recently.
|
||||
# Keep the information for 8 hours.
|
||||
|
@ -703,9 +736,9 @@ def list_apt_updates(apt_update=True):
|
|||
continue
|
||||
m = re.match(r'^Inst (.*) \[(.*)\] \((\S*)', line)
|
||||
if m:
|
||||
pkgs.append({ "package": m.group(1), "version": m.group(3), "current_version": m.group(2) })
|
||||
pkgs.append({"package": m.group(1), "version": m.group(3), "current_version": m.group(2)})
|
||||
else:
|
||||
pkgs.append({ "package": "[" + line + "]", "version": "", "current_version": "" })
|
||||
pkgs.append({"package": "[" + line + "]", "version": "", "current_version": ""})
|
||||
|
||||
# Cache for future requests.
|
||||
_apt_updates = (datetime.datetime.now(), pkgs)
|
||||
|
@ -743,7 +776,8 @@ class ConsoleOutput:
|
|||
print()
|
||||
print(" ", end="")
|
||||
linelen = 0
|
||||
if linelen == 0 and w.strip() == "": continue
|
||||
if linelen == 0 and w.strip() == "":
|
||||
continue
|
||||
print(w, end="")
|
||||
linelen += len(w)
|
||||
print()
|
||||
|
@ -752,17 +786,21 @@ class ConsoleOutput:
|
|||
for line in message.split("\n"):
|
||||
self.print_block(line)
|
||||
|
||||
|
||||
class BufferedOutput:
|
||||
# Record all of the instance method calls so we can play them back later.
|
||||
def __init__(self):
|
||||
self.buf = []
|
||||
|
||||
def __getattr__(self, attr):
|
||||
if attr not in ("add_heading", "print_ok", "print_error", "print_warning", "print_block", "print_line"):
|
||||
raise AttributeError
|
||||
# Return a function that just records the call & arguments to our buffer.
|
||||
|
||||
def w(*args, **kwargs):
|
||||
self.buf.append((attr, args, kwargs))
|
||||
return w
|
||||
|
||||
def playback(self, output):
|
||||
for attr, args, kwargs in self.buf:
|
||||
getattr(output, attr)(*args, **kwargs)
|
||||
|
@ -787,5 +825,3 @@ if __name__ == "__main__":
|
|||
if cert_status != "OK":
|
||||
sys.exit(1)
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
|
|
|
@ -2,33 +2,39 @@ import os.path
|
|||
|
||||
CONF_DIR = os.path.join(os.path.dirname(__file__), "../conf")
|
||||
|
||||
|
||||
def load_environment():
|
||||
# Load settings from /etc/mailinabox.conf.
|
||||
return load_env_vars_from_file("/etc/mailinabox.conf")
|
||||
|
||||
|
||||
def load_env_vars_from_file(fn):
|
||||
# Load settings from a KEY=VALUE file.
|
||||
import collections
|
||||
env = collections.OrderedDict()
|
||||
for line in open(fn): env.setdefault(*line.strip().split("=", 1))
|
||||
for line in open(fn):
|
||||
env.setdefault(*line.strip().split("=", 1))
|
||||
return env
|
||||
|
||||
|
||||
def save_environment(env):
|
||||
with open("/etc/mailinabox.conf", "w") as f:
|
||||
for k, v in env.items():
|
||||
f.write("%s=%s\n" % (k, v))
|
||||
|
||||
|
||||
def safe_domain_name(name):
|
||||
# Sanitize a domain name so it is safe to use as a file name on disk.
|
||||
import urllib.parse
|
||||
return urllib.parse.quote(name, safe='')
|
||||
|
||||
|
||||
def sort_domains(domain_names, env):
|
||||
# Put domain names in a nice sorted order. For web_update, PRIMARY_HOSTNAME
|
||||
# must appear first so it becomes the nginx default server.
|
||||
|
||||
# First group PRIMARY_HOSTNAME and its subdomains, then parent domains of PRIMARY_HOSTNAME, then other domains.
|
||||
groups = ( [], [], [] )
|
||||
groups = ([], [], [])
|
||||
for d in domain_names:
|
||||
if d == env['PRIMARY_HOSTNAME'] or d.endswith("." + env['PRIMARY_HOSTNAME']):
|
||||
groups[0].append(d)
|
||||
|
@ -44,13 +50,14 @@ def sort_domains(domain_names, env):
|
|||
ret = []
|
||||
for d in top_domains:
|
||||
ret.append(d)
|
||||
ret.extend( sort_group([s for s in group if s.endswith("." + d)]) )
|
||||
ret.extend(sort_group([s for s in group if s.endswith("." + d)]))
|
||||
return ret
|
||||
|
||||
groups = [sort_group(g) for g in groups]
|
||||
|
||||
return groups[0] + groups[1] + groups[2]
|
||||
|
||||
|
||||
def sort_email_addresses(email_addresses, env):
|
||||
email_addresses = set(email_addresses)
|
||||
domains = set(email.split("@", 1)[1] for email in email_addresses if "@" in email)
|
||||
|
@ -59,13 +66,17 @@ def sort_email_addresses(email_addresses, env):
|
|||
domain_emails = set(email for email in email_addresses if email.endswith("@" + domain))
|
||||
ret.extend(sorted(domain_emails))
|
||||
email_addresses -= domain_emails
|
||||
ret.extend(sorted(email_addresses)) # whatever is left
|
||||
# whatever is left
|
||||
ret.extend(sorted(email_addresses))
|
||||
return ret
|
||||
|
||||
|
||||
def exclusive_process(name):
|
||||
# Ensure that a process named `name` does not execute multiple
|
||||
# times concurrently.
|
||||
import os, sys, atexit
|
||||
import os
|
||||
import sys
|
||||
import atexit
|
||||
pidfile = '/var/run/mailinabox-%s.pid' % name
|
||||
mypid = os.getpid()
|
||||
|
||||
|
@ -95,7 +106,8 @@ def exclusive_process(name):
|
|||
try:
|
||||
existing_pid = int(f.read().strip())
|
||||
except ValueError:
|
||||
pass # No valid integer in the file.
|
||||
# No valid integer in the file.
|
||||
pass
|
||||
|
||||
# Check if the pid in it is valid.
|
||||
if existing_pid:
|
||||
|
@ -118,26 +130,32 @@ def clear_my_pid(pidfile):
|
|||
def is_pid_valid(pid):
|
||||
"""Checks whether a pid is a valid process ID of a currently running process."""
|
||||
# adapted from http://stackoverflow.com/questions/568271/how-to-check-if-there-exists-a-process-with-a-given-pid
|
||||
import os, errno
|
||||
if pid <= 0: raise ValueError('Invalid PID.')
|
||||
import os
|
||||
import errno
|
||||
if pid <= 0:
|
||||
raise ValueError('Invalid PID.')
|
||||
try:
|
||||
os.kill(pid, 0)
|
||||
except OSError as err:
|
||||
if err.errno == errno.ESRCH: # No such process
|
||||
# No such process
|
||||
if err.errno == errno.ESRCH:
|
||||
return False
|
||||
elif err.errno == errno.EPERM: # Not permitted to send signal
|
||||
# Not permitted to send signal
|
||||
elif err.errno == errno.EPERM:
|
||||
return True
|
||||
else: # EINVAL
|
||||
# EINVAL
|
||||
else:
|
||||
raise
|
||||
else:
|
||||
return True
|
||||
|
||||
|
||||
def shell(method, cmd_args, env={}, capture_stderr=False, return_bytes=False, trap=False, input=None):
|
||||
# A safe way to execute processes.
|
||||
# Some processes like apt-get require being given a sane PATH.
|
||||
import subprocess
|
||||
|
||||
env.update({ "PATH": "/sbin:/bin:/usr/sbin:/usr/bin" })
|
||||
env.update({"PATH": "/sbin:/bin:/usr/sbin:/usr/bin"})
|
||||
kwargs = {
|
||||
'env': env,
|
||||
'stderr': None if not capture_stderr else subprocess.STDOUT,
|
||||
|
@ -154,18 +172,21 @@ def shell(method, cmd_args, env={}, capture_stderr=False, return_bytes=False, tr
|
|||
except subprocess.CalledProcessError as e:
|
||||
ret = e.output
|
||||
code = e.returncode
|
||||
if not return_bytes and isinstance(ret, bytes): ret = ret.decode("utf8")
|
||||
if not return_bytes and isinstance(ret, bytes):
|
||||
ret = ret.decode("utf8")
|
||||
if not trap:
|
||||
return ret
|
||||
else:
|
||||
return code, ret
|
||||
|
||||
|
||||
def create_syslog_handler():
|
||||
import logging.handlers
|
||||
handler = logging.handlers.SysLogHandler(address='/dev/log')
|
||||
handler.setLevel(logging.WARNING)
|
||||
return handler
|
||||
|
||||
|
||||
def du(path):
|
||||
# Computes the size of all files in the path, like the `du` command.
|
||||
# Based on http://stackoverflow.com/a/17936789. Takes into account
|
||||
|
|
|
@ -2,12 +2,18 @@
|
|||
# domains for which a mail account has been set up.
|
||||
########################################################################
|
||||
|
||||
import os, os.path, shutil, re, tempfile, rtyaml
|
||||
import os
|
||||
import os.path
|
||||
import shutil
|
||||
import re
|
||||
import tempfile
|
||||
import rtyaml
|
||||
|
||||
from mailconfig import get_mail_domains
|
||||
from dns_update import get_custom_dns_config, do_dns_update
|
||||
from utils import shell, safe_domain_name, sort_domains
|
||||
|
||||
|
||||
def get_web_domains(env):
|
||||
# What domains should we serve websites for?
|
||||
domains = set()
|
||||
|
@ -25,11 +31,9 @@ def get_web_domains(env):
|
|||
# IP address than this box. Remove those domains from our list.
|
||||
dns = get_custom_dns_config(env)
|
||||
for domain, value in dns.items():
|
||||
if domain not in domains: continue
|
||||
if (isinstance(value, str) and (value != "local")) \
|
||||
or (isinstance(value, dict) and ("CNAME" in value)) \
|
||||
or (isinstance(value, dict) and ("A" in value) and (value["A"] != "local")) \
|
||||
or (isinstance(value, dict) and ("AAAA" in value) and (value["AAAA"] != "local")):
|
||||
if domain not in domains:
|
||||
continue
|
||||
if (isinstance(value, str) and (value != "local")) or (isinstance(value, dict) and ("CNAME" in value)) or (isinstance(value, dict) and ("A" in value) and (value["A"] != "local")) or (isinstance(value, dict) and ("AAAA" in value) and (value["AAAA"] != "local")):
|
||||
domains.remove(domain)
|
||||
|
||||
# Sort the list. Put PRIMARY_HOSTNAME first so it becomes the
|
||||
|
@ -38,6 +42,7 @@ def get_web_domains(env):
|
|||
|
||||
return domains
|
||||
|
||||
|
||||
def do_web_update(env, ok_status="web updated\n"):
|
||||
# Build an nginx configuration file.
|
||||
nginx_conf = open(os.path.join(os.path.dirname(__file__), "../conf/nginx-top.conf")).read()
|
||||
|
@ -67,6 +72,7 @@ def do_web_update(env, ok_status="web updated\n"):
|
|||
|
||||
return ok_status
|
||||
|
||||
|
||||
def make_domain_config(domain, template, template_for_primaryhost, env):
|
||||
# How will we configure this domain.
|
||||
|
||||
|
@ -128,13 +134,16 @@ def make_domain_config(domain, template, template_for_primaryhost, env):
|
|||
|
||||
return nginx_conf
|
||||
|
||||
|
||||
def get_web_root(domain, env, test_exists=True):
|
||||
# Try STORAGE_ROOT/web/domain_name if it exists, but fall back to STORAGE_ROOT/web/default.
|
||||
for test_domain in (domain, 'default'):
|
||||
root = os.path.join(env["STORAGE_ROOT"], "www", safe_domain_name(test_domain))
|
||||
if os.path.exists(root) or not test_exists: break
|
||||
if os.path.exists(root) or not test_exists:
|
||||
break
|
||||
return root
|
||||
|
||||
|
||||
def get_domain_ssl_files(domain, env, allow_shared_cert=True):
|
||||
# What SSL private key will we use? Allow the user to override this, but
|
||||
# in many cases using the same private key for all domains would be fine.
|
||||
|
@ -175,6 +184,7 @@ def get_domain_ssl_files(domain, env, allow_shared_cert=True):
|
|||
|
||||
return ssl_key, ssl_certificate, ssl_via
|
||||
|
||||
|
||||
def ensure_ssl_certificate_exists(domain, ssl_key, ssl_certificate, env):
|
||||
# For domains besides PRIMARY_HOSTNAME, generate a self-signed certificate if
|
||||
# a certificate doesn't already exist. See setup/mail.sh for documentation.
|
||||
|
@ -197,7 +207,8 @@ def ensure_ssl_certificate_exists(domain, ssl_key, ssl_certificate, env):
|
|||
# Start with a CSR written to a temporary file.
|
||||
with tempfile.NamedTemporaryFile(mode="w") as csr_fp:
|
||||
csr_fp.write(create_csr(domain, ssl_key, env))
|
||||
csr_fp.flush() # since we won't close until after running 'openssl x509', since close triggers delete.
|
||||
# since we won't close until after running 'openssl x509', since close triggers delete.
|
||||
csr_fp.flush()
|
||||
|
||||
# And then make the certificate.
|
||||
shell("check_call", [
|
||||
|
@ -207,6 +218,7 @@ def ensure_ssl_certificate_exists(domain, ssl_key, ssl_certificate, env):
|
|||
"-signkey", ssl_key,
|
||||
"-out", ssl_certificate])
|
||||
|
||||
|
||||
def create_csr(domain, ssl_key, env):
|
||||
return shell("check_output", [
|
||||
"openssl", "req", "-new",
|
||||
|
@ -215,13 +227,15 @@ def create_csr(domain, ssl_key, env):
|
|||
"-sha256",
|
||||
"-subj", "/C=%s/ST=/L=/O=/CN=%s" % (env["CSR_COUNTRY"], domain.encode("idna").decode("ascii"))])
|
||||
|
||||
|
||||
def install_cert(domain, ssl_cert, ssl_chain, env):
|
||||
if domain not in get_web_domains(env):
|
||||
return "Invalid domain name."
|
||||
|
||||
# Write the combined cert+chain to a temporary path and validate that it is OK.
|
||||
# The certificate always goes above the chain.
|
||||
import tempfile, os
|
||||
import tempfile
|
||||
import os
|
||||
fd, fn = tempfile.mkstemp('.pem')
|
||||
os.write(fd, (ssl_cert + '\n' + ssl_chain).encode("ascii"))
|
||||
os.close(fd)
|
||||
|
@ -248,23 +262,25 @@ def install_cert(domain, ssl_cert, ssl_chain, env):
|
|||
# used in the DANE TLSA record and restart postfix and dovecot which use
|
||||
# that certificate.
|
||||
if domain == env['PRIMARY_HOSTNAME']:
|
||||
ret.append( do_dns_update(env) )
|
||||
ret.append(do_dns_update(env))
|
||||
|
||||
shell('check_call', ["/usr/sbin/service", "postfix", "restart"])
|
||||
shell('check_call', ["/usr/sbin/service", "dovecot", "restart"])
|
||||
ret.append("mail services restarted")
|
||||
|
||||
# Kick nginx so it sees the cert.
|
||||
ret.append( do_web_update(env, ok_status="") )
|
||||
ret.append(do_web_update(env, ok_status=""))
|
||||
return "\n".join(r for r in ret if r.strip() != "")
|
||||
|
||||
|
||||
def get_web_domains_info(env):
|
||||
# load custom settings so we can tell what domains have a redirect or proxy set up on '/',
|
||||
# which means static hosting is not happening
|
||||
custom_settings = { }
|
||||
custom_settings = {}
|
||||
nginx_conf_custom_fn = os.path.join(env["STORAGE_ROOT"], "www/custom.yaml")
|
||||
if os.path.exists(nginx_conf_custom_fn):
|
||||
custom_settings = rtyaml.load(open(nginx_conf_custom_fn))
|
||||
|
||||
def has_root_proxy_or_redirect(domain):
|
||||
return custom_settings.get(domain, {}).get('redirects', {}).get('/') or custom_settings.get(domain, {}).get('proxies', {}).get('/')
|
||||
|
||||
|
|
|
@ -5,37 +5,48 @@
|
|||
# We have to be careful here that any dependencies are already installed in the previous
|
||||
# version since this script runs before all other aspects of the setup script.
|
||||
|
||||
import sys, os, os.path, glob, re, shutil
|
||||
import sys
|
||||
import os
|
||||
import os.path
|
||||
import glob
|
||||
import re
|
||||
import shutil
|
||||
|
||||
sys.path.insert(0, 'management')
|
||||
from utils import load_environment, save_environment, shell
|
||||
|
||||
|
||||
def migration_1(env):
|
||||
# Re-arrange where we store SSL certificates. There was a typo also.
|
||||
|
||||
def move_file(fn, domain_name_escaped, filename):
|
||||
# Moves an SSL-related file into the right place.
|
||||
fn1 = os.path.join( env["STORAGE_ROOT"], 'ssl', domain_name_escaped, file_type)
|
||||
fn1 = os.path.join(env["STORAGE_ROOT"], 'ssl', domain_name_escaped, file_type)
|
||||
os.makedirs(os.path.dirname(fn1), exist_ok=True)
|
||||
shutil.move(fn, fn1)
|
||||
|
||||
# Migrate the 'domains' directory.
|
||||
for sslfn in glob.glob(os.path.join( env["STORAGE_ROOT"], 'ssl/domains/*' )):
|
||||
for sslfn in glob.glob(os.path.join(env["STORAGE_ROOT"], 'ssl/domains/*')):
|
||||
fn = os.path.basename(sslfn)
|
||||
m = re.match("(.*)_(certifiate.pem|cert_sign_req.csr|private_key.pem)$", fn)
|
||||
if m:
|
||||
# get the new name for the file
|
||||
domain_name, file_type = m.groups()
|
||||
if file_type == "certifiate.pem": file_type = "ssl_certificate.pem" # typo
|
||||
if file_type == "cert_sign_req.csr": file_type = "certificate_signing_request.csr" # nicer
|
||||
# typo
|
||||
if file_type == "certifiate.pem":
|
||||
file_type = "ssl_certificate.pem"
|
||||
# nicer
|
||||
if file_type == "cert_sign_req.csr":
|
||||
file_type = "certificate_signing_request.csr"
|
||||
move_file(sslfn, domain_name, file_type)
|
||||
|
||||
# Move the old domains directory if it is now empty.
|
||||
try:
|
||||
os.rmdir(os.path.join( env["STORAGE_ROOT"], 'ssl/domains'))
|
||||
os.rmdir(os.path.join(env["STORAGE_ROOT"], 'ssl/domains'))
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
def migration_2(env):
|
||||
# Delete the .dovecot_sieve script everywhere. This was formerly a copy of our spam -> Spam
|
||||
# script. We now install it as a global script, and we use managesieve, so the old file is
|
||||
|
@ -45,21 +56,25 @@ def migration_2(env):
|
|||
for fn in glob.glob(os.path.join(env["STORAGE_ROOT"], 'mail/mailboxes/*/*/.dovecot.svbin')):
|
||||
os.unlink(fn)
|
||||
|
||||
|
||||
def migration_3(env):
|
||||
# Move the migration ID from /etc/mailinabox.conf to $STORAGE_ROOT/mailinabox.version
|
||||
# so that the ID stays with the data files that it describes the format of. The writing
|
||||
# of the file will be handled by the main function.
|
||||
pass
|
||||
|
||||
|
||||
def migration_4(env):
|
||||
# Add a new column to the mail users table where we can store administrative privileges.
|
||||
db = os.path.join(env["STORAGE_ROOT"], 'mail/users.sqlite')
|
||||
shell("check_call", ["sqlite3", db, "ALTER TABLE users ADD privileges TEXT NOT NULL DEFAULT ''"])
|
||||
|
||||
|
||||
def migration_5(env):
|
||||
# The secret key for encrypting backups was world readable. Fix here.
|
||||
os.chmod(os.path.join(env["STORAGE_ROOT"], 'backup/secret_key.txt'), 0o600)
|
||||
|
||||
|
||||
def migration_6(env):
|
||||
# We now will generate multiple DNSSEC keys for different algorithms, since TLDs may
|
||||
# not support them all. .email only supports RSA/SHA-256. Rename the keys.conf file
|
||||
|
@ -67,6 +82,7 @@ def migration_6(env):
|
|||
basepath = os.path.join(env["STORAGE_ROOT"], 'dns/dnssec')
|
||||
shutil.move(os.path.join(basepath, 'keys.conf'), os.path.join(basepath, 'RSASHA1-NSEC3-SHA1.conf'))
|
||||
|
||||
|
||||
def get_current_migration():
|
||||
ver = 0
|
||||
while True:
|
||||
|
@ -76,6 +92,7 @@ def get_current_migration():
|
|||
return ver
|
||||
ver = next_ver
|
||||
|
||||
|
||||
def run_migrations():
|
||||
if not os.access("/etc/mailinabox.conf", os.W_OK, effective_ids=True):
|
||||
print("This script must be run as root.", file=sys.stderr)
|
||||
|
@ -135,4 +152,3 @@ if __name__ == "__main__":
|
|||
elif sys.argv[-1] == "--migrate":
|
||||
# Perform migrations.
|
||||
run_migrations()
|
||||
|
||||
|
|
|
@ -7,8 +7,11 @@
|
|||
# where ipaddr is the IP address of your Mail-in-a-Box
|
||||
# and hostname is the domain name to check the DNS for.
|
||||
|
||||
import sys, re, difflib
|
||||
import dns.reversename, dns.resolver
|
||||
import sys
|
||||
import re
|
||||
import difflib
|
||||
import dns.reversename
|
||||
import dns.resolver
|
||||
|
||||
if len(sys.argv) < 3:
|
||||
print("Usage: tests/dns.py ipaddress hostname [primary hostname]")
|
||||
|
@ -19,6 +22,7 @@ primary_hostname = hostname
|
|||
if len(sys.argv) == 4:
|
||||
primary_hostname = sys.argv[3]
|
||||
|
||||
|
||||
def test(server, description):
|
||||
tests = [
|
||||
(hostname, "A", ipaddr),
|
||||
|
@ -34,6 +38,7 @@ def test(server, description):
|
|||
]
|
||||
return test2(tests, server, description)
|
||||
|
||||
|
||||
def test_ptr(server, description):
|
||||
ipaddr_rev = dns.reversename.from_address(ipaddr)
|
||||
tests = [
|
||||
|
@ -41,6 +46,7 @@ def test_ptr(server, description):
|
|||
]
|
||||
return test2(tests, server, description)
|
||||
|
||||
|
||||
def test2(tests, server, description):
|
||||
first = True
|
||||
resolver = dns.resolver.get_default_resolver()
|
||||
|
@ -58,15 +64,18 @@ def test2(tests, server, description):
|
|||
# difference is between the two exceptions
|
||||
response = ["[no value]"]
|
||||
response = ";".join(str(r) for r in response)
|
||||
response = re.sub(r"(\"p=).*(\")", r"\1__KEY__\2", response) # normalize DKIM key
|
||||
response = response.replace("\"\" ", "") # normalize TXT records (DNSSEC signing inserts empty text string components)
|
||||
# normalize DKIM key
|
||||
response = re.sub(r"(\"p=).*(\")", r"\1__KEY__\2", response)
|
||||
# normalize TXT records (DNSSEC signing inserts empty text
|
||||
# string components)
|
||||
response = response.replace("\"\" ", "")
|
||||
|
||||
# is it right?
|
||||
if response == expected_answer:
|
||||
#print(server, ":", qname, rtype, "?", response)
|
||||
continue
|
||||
|
||||
# show prolem
|
||||
# show problem
|
||||
if first:
|
||||
print("Incorrect DNS Response from", description)
|
||||
print()
|
||||
|
@ -74,7 +83,8 @@ def test2(tests, server, description):
|
|||
first = False
|
||||
|
||||
print((qname + "/" + rtype).ljust(20), response.ljust(12), expected_answer, sep='\t')
|
||||
return first # success
|
||||
# success
|
||||
return first
|
||||
|
||||
# Test the response from the machine itself.
|
||||
if not test(ipaddr, "Mail-in-a-Box"):
|
||||
|
|
|
@ -1,8 +1,14 @@
|
|||
#!/usr/bin/env python3
|
||||
# Tests sending and receiving mail by sending a test message to yourself.
|
||||
|
||||
import sys, imaplib, smtplib, uuid, time
|
||||
import socket, dns.reversename, dns.resolver
|
||||
import sys
|
||||
import imaplib
|
||||
import smtplib
|
||||
import uuid
|
||||
import time
|
||||
import socket
|
||||
import dns.reversename
|
||||
import dns.resolver
|
||||
|
||||
if len(sys.argv) < 3:
|
||||
print("Usage: tests/mail.py hostname emailaddress password")
|
||||
|
@ -48,6 +54,7 @@ server.starttls()
|
|||
# Verify that the EHLO name matches the server's reverse DNS.
|
||||
ipaddr = socket.gethostbyname(host) # IPv4 only!
|
||||
reverse_ip = dns.reversename.from_address(ipaddr) # e.g. "1.0.0.127.in-addr.arpa."
|
||||
|
||||
try:
|
||||
reverse_dns = dns.resolver.query(reverse_ip, 'PTR')[0].target.to_text(omit_final_dot=True) # => hostname
|
||||
except dns.resolver.NXDOMAIN:
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
#!/usr/bin/env python3
|
||||
import smtplib, sys
|
||||
import smtplib
|
||||
import sys
|
||||
|
||||
if len(sys.argv) < 3:
|
||||
print("Usage: tests/smtp_server.py host email.to email.from")
|
||||
|
@ -16,4 +17,3 @@ server = smtplib.SMTP(host, 25)
|
|||
server.set_debuglevel(1)
|
||||
server.sendmail(fromaddr, [toaddr], msg)
|
||||
server.quit()
|
||||
|
||||
|
|
|
@ -20,7 +20,8 @@
|
|||
# NAME VAL
|
||||
# UE
|
||||
|
||||
import sys, re
|
||||
import sys
|
||||
import re
|
||||
|
||||
# sanity check
|
||||
if len(sys.argv) < 3:
|
||||
|
@ -74,18 +75,20 @@ while len(input_lines) > 0:
|
|||
# Check that this line contain this setting from the command-line arguments.
|
||||
name, val = settings[i].split("=", 1)
|
||||
m = re.match(
|
||||
"(\s*)"
|
||||
+ "(" + re.escape(comment_char) + "\s*)?"
|
||||
+ re.escape(name) + delimiter_re + "(.*?)\s*$",
|
||||
"(\s*)" +
|
||||
"(" + re.escape(comment_char) + "\s*)?" +
|
||||
re.escape(name) + delimiter_re + "(.*?)\s*$",
|
||||
line, re.S)
|
||||
if not m: continue
|
||||
if not m:
|
||||
continue
|
||||
indent, is_comment, existing_val = m.groups()
|
||||
|
||||
# If this is already the setting, do nothing.
|
||||
if is_comment is None and existing_val == val:
|
||||
# It may be that we've already inserted this setting higher
|
||||
# in the file so check for that first.
|
||||
if i in found: break
|
||||
if i in found:
|
||||
break
|
||||
buf += line
|
||||
found.add(i)
|
||||
break
|
||||
|
|
|
@ -1,6 +1,11 @@
|
|||
#!/usr/bin/python3
|
||||
|
||||
import sys, getpass, urllib.request, urllib.error, json
|
||||
import sys
|
||||
import getpass
|
||||
import urllib.request
|
||||
import urllib.error
|
||||
import json
|
||||
|
||||
|
||||
def mgmt(cmd, data=None, is_json=False):
|
||||
# The base URL for the management daemon. (Listens on IPv4 only.)
|
||||
|
@ -24,9 +29,11 @@ def mgmt(cmd, data=None, is_json=False):
|
|||
print(e, file=sys.stderr)
|
||||
sys.exit(1)
|
||||
resp = response.read().decode('utf8')
|
||||
if is_json: resp = json.loads(resp)
|
||||
if is_json:
|
||||
resp = json.loads(resp)
|
||||
return resp
|
||||
|
||||
|
||||
def read_password():
|
||||
first = getpass.getpass('password: ')
|
||||
second = getpass.getpass(' (again): ')
|
||||
|
@ -36,6 +43,7 @@ def read_password():
|
|||
second = getpass.getpass(' (again): ')
|
||||
return first
|
||||
|
||||
|
||||
def setup_key_auth(mgmt_uri):
|
||||
key = open('/var/lib/mailinabox/api.key').read().strip()
|
||||
|
||||
|
@ -70,7 +78,8 @@ elif sys.argv[1] == "user" and len(sys.argv) == 2:
|
|||
users = mgmt("/mail/users?format=json", is_json=True)
|
||||
for domain in users:
|
||||
for user in domain["users"]:
|
||||
if user['status'] == 'inactive': continue
|
||||
if user['status'] == 'inactive':
|
||||
continue
|
||||
print(user['email'], end='')
|
||||
if "admin" in user['privileges']:
|
||||
print("*", end='')
|
||||
|
@ -87,19 +96,19 @@ elif sys.argv[1] == "user" and sys.argv[2] in ("add", "password"):
|
|||
email, pw = sys.argv[3:5]
|
||||
|
||||
if sys.argv[2] == "add":
|
||||
print(mgmt("/mail/users/add", { "email": email, "password": pw }))
|
||||
print(mgmt("/mail/users/add", {"email": email, "password": pw}))
|
||||
elif sys.argv[2] == "password":
|
||||
print(mgmt("/mail/users/password", { "email": email, "password": pw }))
|
||||
print(mgmt("/mail/users/password", {"email": email, "password": pw}))
|
||||
|
||||
elif sys.argv[1] == "user" and sys.argv[2] == "remove" and len(sys.argv) == 4:
|
||||
print(mgmt("/mail/users/remove", { "email": sys.argv[3] }))
|
||||
print(mgmt("/mail/users/remove", {"email": sys.argv[3]}))
|
||||
|
||||
elif sys.argv[1] == "user" and sys.argv[2] in ("make-admin", "remove-admin") and len(sys.argv) == 4:
|
||||
if sys.argv[2] == "make-admin":
|
||||
action = "add"
|
||||
else:
|
||||
action = "remove"
|
||||
print(mgmt("/mail/users/privileges/" + action, { "email": sys.argv[3], "privilege": "admin" }))
|
||||
print(mgmt("/mail/users/privileges/" + action, {"email": sys.argv[3], "privilege": "admin"}))
|
||||
|
||||
elif sys.argv[1] == "user" and sys.argv[2] == "admins":
|
||||
# Dump a list of admin users.
|
||||
|
@ -113,12 +122,11 @@ elif sys.argv[1] == "alias" and len(sys.argv) == 2:
|
|||
print(mgmt("/mail/aliases"))
|
||||
|
||||
elif sys.argv[1] == "alias" and sys.argv[2] == "add" and len(sys.argv) == 5:
|
||||
print(mgmt("/mail/aliases/add", { "source": sys.argv[3], "destination": sys.argv[4] }))
|
||||
print(mgmt("/mail/aliases/add", {"source": sys.argv[3], "destination": sys.argv[4]}))
|
||||
|
||||
elif sys.argv[1] == "alias" and sys.argv[2] == "remove" and len(sys.argv) == 4:
|
||||
print(mgmt("/mail/aliases/remove", { "source": sys.argv[3] }))
|
||||
print(mgmt("/mail/aliases/remove", {"source": sys.argv[3]}))
|
||||
|
||||
else:
|
||||
print("Invalid command-line arguments.")
|
||||
sys.exit(1)
|
||||
|
||||
|
|
|
@ -4,7 +4,11 @@
|
|||
# access log to see how many people are installing Mail-in-a-Box each day, by
|
||||
# looking at accesses to the bootstrap.sh script.
|
||||
|
||||
import re, glob, gzip, os.path, json
|
||||
import re
|
||||
import glob
|
||||
import gzip
|
||||
import os.path
|
||||
import json
|
||||
import dateutil.parser
|
||||
|
||||
outfn = "/home/user-data/www/mailinabox.email/install-stats.json"
|
||||
|
@ -30,10 +34,10 @@ for fn in glob.glob("/var/log/nginx/access.log*"):
|
|||
date, time = m.group("date").decode("ascii").split(":", 1)
|
||||
date = dateutil.parser.parse(date).date().isoformat()
|
||||
ip = m.group("ip").decode("ascii")
|
||||
accesses.add( (date, ip) )
|
||||
accesses.add((date, ip))
|
||||
|
||||
# Aggregate by date.
|
||||
by_date = { }
|
||||
by_date = {}
|
||||
for date, ip in accesses:
|
||||
by_date[date] = by_date.get(date, 0) + 1
|
||||
|
||||
|
|
|
@ -3,10 +3,12 @@
|
|||
# Generate documentation for how this machine works by
|
||||
# parsing our bash scripts!
|
||||
|
||||
import cgi, re
|
||||
import cgi
|
||||
import re
|
||||
import markdown
|
||||
from modgrammar import *
|
||||
|
||||
|
||||
def generate_documentation():
|
||||
print("""<!DOCTYPE html>
|
||||
<html>
|
||||
|
@ -151,11 +153,14 @@ def generate_documentation():
|
|||
</html>
|
||||
""")
|
||||
|
||||
|
||||
class HashBang(Grammar):
|
||||
grammar = (L('#!'), REST_OF_LINE, EOL)
|
||||
|
||||
def value(self):
|
||||
return ""
|
||||
|
||||
|
||||
def strip_indent(s):
|
||||
s = s.replace("\t", " ")
|
||||
lines = s.split("\n")
|
||||
|
@ -167,8 +172,10 @@ def strip_indent(s):
|
|||
lines = [line[min_indent:] for line in lines]
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
class Comment(Grammar):
|
||||
grammar = ONE_OR_MORE(ZERO_OR_MORE(SPACE), L('#'), REST_OF_LINE, EOL)
|
||||
|
||||
def value(self):
|
||||
if self.string.replace("#", "").strip() == "":
|
||||
return "\n"
|
||||
|
@ -179,35 +186,46 @@ class Comment(Grammar):
|
|||
|
||||
FILENAME = WORD('a-z0-9-/.')
|
||||
|
||||
|
||||
class Source(Grammar):
|
||||
grammar = ((L('.') | L('source')), L(' '), FILENAME, Comment | EOL)
|
||||
|
||||
def filename(self):
|
||||
return self[2].string.strip()
|
||||
|
||||
def value(self):
|
||||
return BashScript.parse(self.filename())
|
||||
|
||||
|
||||
class CatEOF(Grammar):
|
||||
grammar = (ZERO_OR_MORE(SPACE), L('cat '), L('>') | L('>>'), L(' '), ANY_EXCEPT(WHITESPACE), L(" <<"), OPTIONAL(SPACE), L("EOF"), EOL, REPEAT(ANY, greedy=False), EOL, L("EOF"), EOL)
|
||||
|
||||
def value(self):
|
||||
content = self[9].string
|
||||
content = re.sub(r"\\([$])", r"\1", content) # un-escape bash-escaped characters
|
||||
# un-escape bash-escaped characters
|
||||
content = re.sub(r"\\([$])", r"\1", content)
|
||||
return "<div class='write-to'><div class='filename'>%s <span>(%s)</span></div><pre>%s</pre></div>\n" \
|
||||
% (self[4].string,
|
||||
"overwrite" if ">>" not in self[2].string else "append to",
|
||||
cgi.escape(content))
|
||||
|
||||
|
||||
class HideOutput(Grammar):
|
||||
grammar = (L("hide_output "), REF("BashElement"))
|
||||
|
||||
def value(self):
|
||||
return self[1].value()
|
||||
|
||||
|
||||
class EchoLine(Grammar):
|
||||
grammar = (OPTIONAL(SPACE), L("echo "), REST_OF_LINE, EOL)
|
||||
|
||||
def value(self):
|
||||
if "|" in self.string or ">" in self.string:
|
||||
return "<pre class='shell'><div>" + recode_bash(self.string.strip()) + "</div></pre>\n"
|
||||
return ""
|
||||
|
||||
|
||||
class EditConf(Grammar):
|
||||
grammar = (
|
||||
L('tools/editconf.py '),
|
||||
|
@ -221,61 +239,86 @@ class EditConf(Grammar):
|
|||
OPTIONAL(SPACE),
|
||||
EOL
|
||||
)
|
||||
|
||||
def value(self):
|
||||
conffile = self[1]
|
||||
options = []
|
||||
eq = "="
|
||||
if self[3] and "-s" in self[3].string: eq = " "
|
||||
if self[3] and "-s" in self[3].string:
|
||||
eq = " "
|
||||
for opt in re.split("\s+", self[4].string):
|
||||
k, v = opt.split("=", 1)
|
||||
v = re.sub(r"\n+", "", fixup_tokens(v)) # not sure why newlines are getting doubled
|
||||
# not sure why newlines are getting doubled
|
||||
v = re.sub(r"\n+", "", fixup_tokens(v))
|
||||
options.append("%s%s%s" % (k, eq, v))
|
||||
return "<div class='write-to'><div class='filename'>" + self[1].string + " <span>(change settings)</span></div><pre>" + "\n".join(cgi.escape(s) for s in options) + "</pre></div>\n"
|
||||
|
||||
|
||||
class CaptureOutput(Grammar):
|
||||
grammar = OPTIONAL(SPACE), WORD("A-Za-z_"), L('=$('), REST_OF_LINE, L(")"), OPTIONAL(L(';')), EOL
|
||||
|
||||
def value(self):
|
||||
cmd = self[3].string
|
||||
cmd = cmd.replace("; ", "\n")
|
||||
return "<div class='write-to'><div class='filename'>$" + self[1].string + "=</div><pre>" + cgi.escape(cmd) + "</pre></div>\n"
|
||||
|
||||
|
||||
class SedReplace(Grammar):
|
||||
grammar = OPTIONAL(SPACE), L('sed -i "s/'), OPTIONAL(L('^')), ONE_OR_MORE(WORD("-A-Za-z0-9 #=\\{};.*$_!()")), L('/'), ONE_OR_MORE(WORD("-A-Za-z0-9 #=\\{};.*$_!()")), L('/"'), SPACE, FILENAME, EOL
|
||||
|
||||
def value(self):
|
||||
return "<div class='write-to'><div class='filename'>edit<br>" + self[8].string + "</div><p>replace</p><pre>" + cgi.escape(self[3].string.replace(".*", ". . .")) + "</pre><p>with</p><pre>" + cgi.escape(self[5].string.replace("\\n", "\n").replace("\\t", "\t")) + "</pre></div>\n"
|
||||
|
||||
|
||||
class EchoPipe(Grammar):
|
||||
grammar = OPTIONAL(SPACE), L("echo "), REST_OF_LINE, L(' | '), REST_OF_LINE, EOL
|
||||
|
||||
def value(self):
|
||||
text = " ".join("\"%s\"" % s for s in self[2].string.split(" "))
|
||||
return "<pre class='shell'><div>echo " + recode_bash(text) + " \<br> | " + recode_bash(self[4].string) + "</div></pre>\n"
|
||||
|
||||
|
||||
def shell_line(bash):
|
||||
return "<pre class='shell'><div>" + recode_bash(bash.strip()) + "</div></pre>\n"
|
||||
|
||||
|
||||
class AptGet(Grammar):
|
||||
grammar = (ZERO_OR_MORE(SPACE), L("apt_install "), REST_OF_LINE, EOL)
|
||||
|
||||
def value(self):
|
||||
return shell_line("apt-get install -y " + re.sub(r"\s+", " ", self[2].string))
|
||||
|
||||
|
||||
class UfwAllow(Grammar):
|
||||
grammar = (ZERO_OR_MORE(SPACE), L("ufw_allow "), REST_OF_LINE, EOL)
|
||||
|
||||
def value(self):
|
||||
return shell_line("ufw allow " + self[2].string)
|
||||
|
||||
|
||||
class RestartService(Grammar):
|
||||
grammar = (ZERO_OR_MORE(SPACE), L("restart_service "), REST_OF_LINE, EOL)
|
||||
|
||||
def value(self):
|
||||
return shell_line("service " + self[2].string + " restart")
|
||||
|
||||
|
||||
class OtherLine(Grammar):
|
||||
grammar = (REST_OF_LINE, EOL)
|
||||
|
||||
def value(self):
|
||||
if self.string.strip() == "": return ""
|
||||
if "source setup/functions.sh" in self.string: return ""
|
||||
if "source /etc/mailinabox.conf" in self.string: return ""
|
||||
if self.string.strip() == "":
|
||||
return ""
|
||||
if "source setup/functions.sh" in self.string:
|
||||
return ""
|
||||
if "source /etc/mailinabox.conf" in self.string:
|
||||
return ""
|
||||
return "<pre class='shell'><div>" + recode_bash(self.string.strip()) + "</div></pre>\n"
|
||||
|
||||
|
||||
class BashElement(Grammar):
|
||||
grammar = Comment | CatEOF | EchoPipe | EchoLine | HideOutput | EditConf | SedReplace | AptGet | UfwAllow | RestartService | OtherLine
|
||||
|
||||
def value(self):
|
||||
return self[0].value()
|
||||
|
||||
|
@ -292,6 +335,7 @@ bash_escapes = {
|
|||
"t": "\uE021",
|
||||
}
|
||||
|
||||
|
||||
def quasitokenize(bashscript):
|
||||
# Make a parse of bash easier by making the tokenization easy.
|
||||
newscript = ""
|
||||
|
@ -366,6 +410,7 @@ def quasitokenize(bashscript):
|
|||
|
||||
return newscript
|
||||
|
||||
|
||||
def recode_bash(s):
|
||||
def requote(tok):
|
||||
tok = tok.replace("\\", "\\\\")
|
||||
|
@ -374,12 +419,13 @@ def recode_bash(s):
|
|||
tok = fixup_tokens(tok)
|
||||
if " " in tok or '"' in tok:
|
||||
tok = tok.replace("\"", "\\\"")
|
||||
tok = '"' + tok +'"'
|
||||
tok = '"' + tok + '"'
|
||||
else:
|
||||
tok = tok.replace("'", "\\'")
|
||||
return tok
|
||||
return cgi.escape(" ".join(requote(tok) for tok in s.split(" ")))
|
||||
|
||||
|
||||
def fixup_tokens(s):
|
||||
for c, enc in bash_special_characters1.items():
|
||||
s = s.replace(enc, c)
|
||||
|
@ -389,14 +435,17 @@ def fixup_tokens(s):
|
|||
s = s.replace(c, "\\" + esc)
|
||||
return s
|
||||
|
||||
|
||||
class BashScript(Grammar):
|
||||
grammar = (OPTIONAL(HashBang), REPEAT(BashElement))
|
||||
|
||||
def value(self):
|
||||
return [line.value() for line in self[1]]
|
||||
|
||||
@staticmethod
|
||||
def parse(fn):
|
||||
if fn in ("setup/functions.sh", "/etc/mailinabox.conf"): return ""
|
||||
if fn in ("setup/functions.sh", "/etc/mailinabox.conf"):
|
||||
return ""
|
||||
string = open(fn).read()
|
||||
|
||||
# tokenize
|
||||
|
@ -454,7 +503,7 @@ class BashScript(Grammar):
|
|||
v = fixup_tokens(v)
|
||||
|
||||
v = v.replace("</pre>\n<pre class='shell'>", "")
|
||||
v = re.sub("<pre>([\w\W]*?)</pre>", lambda m : "<pre>" + strip_indent(m.group(1)) + "</pre>", v)
|
||||
v = re.sub("<pre>([\w\W]*?)</pre>", lambda m: "<pre>" + strip_indent(m.group(1)) + "</pre>", v)
|
||||
|
||||
v = re.sub(r"(\$?)PRIMARY_HOSTNAME", r"<b>box.yourdomain.com</b>", v)
|
||||
v = re.sub(r"\$STORAGE_ROOT", r"<b>$STORE</b>", v)
|
||||
|
@ -463,6 +512,7 @@ class BashScript(Grammar):
|
|||
|
||||
return v
|
||||
|
||||
|
||||
def wrap_lines(text, cols=60):
|
||||
ret = ""
|
||||
words = re.split("(\s+)", text)
|
||||
|
@ -472,7 +522,8 @@ def wrap_lines(text, cols=60):
|
|||
ret += " \\\n"
|
||||
ret += " "
|
||||
linelen = 0
|
||||
if linelen == 0 and w.strip() == "": continue
|
||||
if linelen == 0 and w.strip() == "":
|
||||
continue
|
||||
ret += w
|
||||
linelen += len(w)
|
||||
return ret
|
||||
|
|
Loading…
Reference in New Issue