2014-06-30 20:41:29 +00:00
|
|
|
import os.path
|
|
|
|
|
|
|
|
CONF_DIR = os.path.join(os.path.dirname(__file__), "../conf")
|
|
|
|
|
2014-06-03 20:21:17 +00:00
|
|
|
def load_environment():
|
|
|
|
# Load settings from /etc/mailinabox.conf.
|
2014-06-30 20:41:29 +00:00
|
|
|
return load_env_vars_from_file("/etc/mailinabox.conf")
|
2014-06-17 22:21:12 +00:00
|
|
|
|
|
|
|
def load_env_vars_from_file(fn):
|
|
|
|
# Load settings from a KEY=VALUE file.
|
2014-06-30 20:41:29 +00:00
|
|
|
import collections
|
|
|
|
env = collections.OrderedDict()
|
2014-06-17 22:21:12 +00:00
|
|
|
for line in open(fn): env.setdefault(*line.strip().split("=", 1))
|
|
|
|
return env
|
2014-06-03 20:21:17 +00:00
|
|
|
|
2014-06-30 20:41:29 +00:00
|
|
|
def save_environment(env):
|
|
|
|
with open("/etc/mailinabox.conf", "w") as f:
|
|
|
|
for k, v in env.items():
|
|
|
|
f.write("%s=%s\n" % (k, v))
|
|
|
|
|
2014-06-20 01:16:38 +00:00
|
|
|
def safe_domain_name(name):
|
|
|
|
# Sanitize a domain name so it is safe to use as a file name on disk.
|
|
|
|
import urllib.parse
|
|
|
|
return urllib.parse.quote(name, safe='')
|
|
|
|
|
2014-08-17 22:43:57 +00:00
|
|
|
def unsafe_domain_name(name_encoded):
|
|
|
|
import urllib.parse
|
|
|
|
return urllib.parse.unquote(name_encoded)
|
|
|
|
|
2014-06-22 15:34:36 +00:00
|
|
|
def sort_domains(domain_names, env):
|
2014-06-30 13:15:36 +00:00
|
|
|
# Put domain names in a nice sorted order. For web_update, PRIMARY_HOSTNAME
|
2014-06-22 15:34:36 +00:00
|
|
|
# must appear first so it becomes the nginx default server.
|
|
|
|
|
2014-06-30 13:15:36 +00:00
|
|
|
# First group PRIMARY_HOSTNAME and its subdomains, then parent domains of PRIMARY_HOSTNAME, then other domains.
|
2014-06-22 15:34:36 +00:00
|
|
|
groups = ( [], [], [] )
|
|
|
|
for d in domain_names:
|
2014-06-30 13:15:36 +00:00
|
|
|
if d == env['PRIMARY_HOSTNAME'] or d.endswith("." + env['PRIMARY_HOSTNAME']):
|
2014-06-22 15:34:36 +00:00
|
|
|
groups[0].append(d)
|
2014-06-30 13:15:36 +00:00
|
|
|
elif env['PRIMARY_HOSTNAME'].endswith("." + d):
|
2014-06-22 15:34:36 +00:00
|
|
|
groups[1].append(d)
|
|
|
|
else:
|
|
|
|
groups[2].append(d)
|
|
|
|
|
|
|
|
# Within each group, sort parent domains before subdomains and after that sort lexicographically.
|
|
|
|
def sort_group(group):
|
|
|
|
# Find the top-most domains.
|
2014-07-09 12:35:12 +00:00
|
|
|
top_domains = sorted(d for d in group if len([s for s in group if d.endswith("." + s)]) == 0)
|
2014-06-22 15:34:36 +00:00
|
|
|
ret = []
|
|
|
|
for d in top_domains:
|
|
|
|
ret.append(d)
|
|
|
|
ret.extend( sort_group([s for s in group if s.endswith("." + d)]) )
|
|
|
|
return ret
|
|
|
|
|
|
|
|
groups = [sort_group(g) for g in groups]
|
|
|
|
|
|
|
|
return groups[0] + groups[1] + groups[2]
|
|
|
|
|
2014-08-17 22:43:57 +00:00
|
|
|
def sort_email_addresses(email_addresses, env):
|
|
|
|
email_addresses = set(email_addresses)
|
|
|
|
domains = set(email.split("@", 1)[1] for email in email_addresses if "@" in email)
|
|
|
|
ret = []
|
|
|
|
for domain in sort_domains(domains, env):
|
|
|
|
domain_emails = set(email for email in email_addresses if email.endswith("@" + domain))
|
|
|
|
ret.extend(sorted(domain_emails))
|
|
|
|
email_addresses -= domain_emails
|
|
|
|
ret.extend(sorted(email_addresses)) # whatever is left
|
|
|
|
return ret
|
|
|
|
|
2014-06-03 20:21:17 +00:00
|
|
|
def exclusive_process(name):
|
|
|
|
# Ensure that a process named `name` does not execute multiple
|
|
|
|
# times concurrently.
|
|
|
|
import os, sys, atexit
|
|
|
|
pidfile = '/var/run/mailinabox-%s.pid' % name
|
|
|
|
mypid = os.getpid()
|
|
|
|
|
|
|
|
# Attempt to get a lock on ourself so that the concurrency check
|
|
|
|
# itself is not executed in parallel.
|
|
|
|
with open(__file__, 'r+') as flock:
|
|
|
|
# Try to get a lock. This blocks until a lock is acquired. The
|
|
|
|
# lock is held until the flock file is closed at the end of the
|
|
|
|
# with block.
|
|
|
|
os.lockf(flock.fileno(), os.F_LOCK, 0)
|
|
|
|
|
|
|
|
# While we have a lock, look at the pid file. First attempt
|
|
|
|
# to write our pid to a pidfile if no file already exists there.
|
|
|
|
try:
|
|
|
|
with open(pidfile, 'x') as f:
|
|
|
|
# Successfully opened a new file. Since the file is new
|
|
|
|
# there is no concurrent process. Write our pid.
|
|
|
|
f.write(str(mypid))
|
|
|
|
atexit.register(clear_my_pid, pidfile)
|
|
|
|
return
|
|
|
|
except FileExistsError:
|
|
|
|
# The pid file already exixts, but it may contain a stale
|
|
|
|
# pid of a terminated process.
|
|
|
|
with open(pidfile, 'r+') as f:
|
|
|
|
# Read the pid in the file.
|
|
|
|
existing_pid = None
|
|
|
|
try:
|
|
|
|
existing_pid = int(f.read().strip())
|
|
|
|
except ValueError:
|
|
|
|
pass # No valid integer in the file.
|
|
|
|
|
|
|
|
# Check if the pid in it is valid.
|
|
|
|
if existing_pid:
|
|
|
|
if is_pid_valid(existing_pid):
|
|
|
|
print("Another %s is already running (pid %d)." % (name, existing_pid), file=sys.stderr)
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
# Write our pid.
|
|
|
|
f.seek(0)
|
|
|
|
f.write(str(mypid))
|
|
|
|
f.truncate()
|
|
|
|
atexit.register(clear_my_pid, pidfile)
|
|
|
|
|
|
|
|
|
|
|
|
def clear_my_pid(pidfile):
|
|
|
|
import os
|
|
|
|
os.unlink(pidfile)
|
|
|
|
|
|
|
|
|
|
|
|
def is_pid_valid(pid):
|
|
|
|
"""Checks whether a pid is a valid process ID of a currently running process."""
|
|
|
|
# adapted from http://stackoverflow.com/questions/568271/how-to-check-if-there-exists-a-process-with-a-given-pid
|
|
|
|
import os, errno
|
|
|
|
if pid <= 0: raise ValueError('Invalid PID.')
|
|
|
|
try:
|
|
|
|
os.kill(pid, 0)
|
|
|
|
except OSError as err:
|
|
|
|
if err.errno == errno.ESRCH: # No such process
|
|
|
|
return False
|
|
|
|
elif err.errno == errno.EPERM: # Not permitted to send signal
|
|
|
|
return True
|
|
|
|
else: # EINVAL
|
|
|
|
raise
|
|
|
|
else:
|
2014-06-09 12:09:45 +00:00
|
|
|
return True
|
|
|
|
|
2014-06-22 15:34:36 +00:00
|
|
|
def shell(method, cmd_args, env={}, capture_stderr=False, return_bytes=False, trap=False, input=None):
|
2014-06-09 12:09:45 +00:00
|
|
|
# A safe way to execute processes.
|
|
|
|
# Some processes like apt-get require being given a sane PATH.
|
|
|
|
import subprocess
|
2014-06-22 15:34:36 +00:00
|
|
|
|
2014-06-09 12:09:45 +00:00
|
|
|
env.update({ "PATH": "/sbin:/bin:/usr/sbin:/usr/bin" })
|
2014-06-22 15:34:36 +00:00
|
|
|
kwargs = {
|
|
|
|
'env': env,
|
|
|
|
'stderr': None if not capture_stderr else subprocess.STDOUT,
|
|
|
|
}
|
|
|
|
if method == "check_output" and input is not None:
|
|
|
|
kwargs['input'] = input
|
|
|
|
|
|
|
|
if not trap:
|
|
|
|
ret = getattr(subprocess, method)(cmd_args, **kwargs)
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
ret = getattr(subprocess, method)(cmd_args, **kwargs)
|
|
|
|
code = 0
|
|
|
|
except subprocess.CalledProcessError as e:
|
|
|
|
ret = e.output
|
|
|
|
code = e.returncode
|
2014-06-19 01:39:27 +00:00
|
|
|
if not return_bytes and isinstance(ret, bytes): ret = ret.decode("utf8")
|
2014-06-22 15:34:36 +00:00
|
|
|
if not trap:
|
|
|
|
return ret
|
|
|
|
else:
|
|
|
|
return code, ret
|
2014-06-21 23:25:35 +00:00
|
|
|
|
|
|
|
def create_syslog_handler():
|
|
|
|
import logging.handlers
|
|
|
|
handler = logging.handlers.SysLogHandler(address='/dev/log')
|
|
|
|
handler.setLevel(logging.WARNING)
|
|
|
|
return handler
|