chore(python open): Refactor open and gzip.open to use context manager (#2203)
Co-authored-by: Hugh Secker-Walker <hsw+miac@hodain.net>
This commit is contained in:
parent
57047d96e9
commit
820a39b865
|
@ -531,7 +531,8 @@ def get_backup_config(env, for_save=False, for_ui=False):
|
||||||
|
|
||||||
# Merge in anything written to custom.yaml.
|
# Merge in anything written to custom.yaml.
|
||||||
try:
|
try:
|
||||||
custom_config = rtyaml.load(open(os.path.join(backup_root, 'custom.yaml')))
|
with open(os.path.join(backup_root, 'custom.yaml'), 'r') as f:
|
||||||
|
custom_config = rtyaml.load(f)
|
||||||
if not isinstance(custom_config, dict): raise ValueError() # caught below
|
if not isinstance(custom_config, dict): raise ValueError() # caught below
|
||||||
config.update(custom_config)
|
config.update(custom_config)
|
||||||
except:
|
except:
|
||||||
|
@ -556,7 +557,8 @@ def get_backup_config(env, for_save=False, for_ui=False):
|
||||||
config["target"] = "file://" + config["file_target_directory"]
|
config["target"] = "file://" + config["file_target_directory"]
|
||||||
ssh_pub_key = os.path.join('/root', '.ssh', 'id_rsa_miab.pub')
|
ssh_pub_key = os.path.join('/root', '.ssh', 'id_rsa_miab.pub')
|
||||||
if os.path.exists(ssh_pub_key):
|
if os.path.exists(ssh_pub_key):
|
||||||
config["ssh_pub_key"] = open(ssh_pub_key, 'r').read()
|
with open(ssh_pub_key, 'r') as f:
|
||||||
|
config["ssh_pub_key"] = f.read()
|
||||||
|
|
||||||
return config
|
return config
|
||||||
|
|
||||||
|
|
|
@ -47,7 +47,8 @@ def read_password():
|
||||||
return first
|
return first
|
||||||
|
|
||||||
def setup_key_auth(mgmt_uri):
|
def setup_key_auth(mgmt_uri):
|
||||||
key = open('/var/lib/mailinabox/api.key').read().strip()
|
with open('/var/lib/mailinabox/api.key', 'r') as f:
|
||||||
|
key = f.read().strip()
|
||||||
|
|
||||||
auth_handler = urllib.request.HTTPBasicAuthHandler()
|
auth_handler = urllib.request.HTTPBasicAuthHandler()
|
||||||
auth_handler.add_password(
|
auth_handler.add_password(
|
||||||
|
|
|
@ -815,7 +815,8 @@ def write_opendkim_tables(domains, env):
|
||||||
|
|
||||||
def get_custom_dns_config(env, only_real_records=False):
|
def get_custom_dns_config(env, only_real_records=False):
|
||||||
try:
|
try:
|
||||||
custom_dns = rtyaml.load(open(os.path.join(env['STORAGE_ROOT'], 'dns/custom.yaml')))
|
with open(os.path.join(env['STORAGE_ROOT'], 'dns/custom.yaml'), 'r') as f:
|
||||||
|
custom_dns = rtyaml.load(f)
|
||||||
if not isinstance(custom_dns, dict): raise ValueError() # caught below
|
if not isinstance(custom_dns, dict): raise ValueError() # caught below
|
||||||
except:
|
except:
|
||||||
return [ ]
|
return [ ]
|
||||||
|
|
|
@ -73,7 +73,8 @@ def scan_files(collector):
|
||||||
continue
|
continue
|
||||||
elif fn[-3:] == '.gz':
|
elif fn[-3:] == '.gz':
|
||||||
tmp_file = tempfile.NamedTemporaryFile()
|
tmp_file = tempfile.NamedTemporaryFile()
|
||||||
shutil.copyfileobj(gzip.open(fn), tmp_file)
|
with gzip.open(fn, 'rb') as f:
|
||||||
|
shutil.copyfileobj(f, tmp_file)
|
||||||
|
|
||||||
if VERBOSE:
|
if VERBOSE:
|
||||||
print("Processing file", fn, "...")
|
print("Processing file", fn, "...")
|
||||||
|
|
|
@ -535,7 +535,8 @@ def check_certificate(domain, ssl_certificate, ssl_private_key, warn_if_expiring
|
||||||
# Second, check that the certificate matches the private key.
|
# Second, check that the certificate matches the private key.
|
||||||
if ssl_private_key is not None:
|
if ssl_private_key is not None:
|
||||||
try:
|
try:
|
||||||
priv_key = load_pem(open(ssl_private_key, 'rb').read())
|
with open(ssl_private_key, 'rb') as f:
|
||||||
|
priv_key = load_pem(f.read())
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
return ("The private key file %s is not a private key file: %s" % (ssl_private_key, str(e)), None)
|
return ("The private key file %s is not a private key file: %s" % (ssl_private_key, str(e)), None)
|
||||||
|
|
||||||
|
|
|
@ -207,7 +207,8 @@ def check_ssh_password(env, output):
|
||||||
# the configuration file.
|
# the configuration file.
|
||||||
if not os.path.exists("/etc/ssh/sshd_config"):
|
if not os.path.exists("/etc/ssh/sshd_config"):
|
||||||
return
|
return
|
||||||
sshd = open("/etc/ssh/sshd_config").read()
|
with open("/etc/ssh/sshd_config", "r") as f:
|
||||||
|
sshd = f.read()
|
||||||
if re.search("\nPasswordAuthentication\s+yes", sshd) \
|
if re.search("\nPasswordAuthentication\s+yes", sshd) \
|
||||||
or not re.search("\nPasswordAuthentication\s+no", sshd):
|
or not re.search("\nPasswordAuthentication\s+no", sshd):
|
||||||
output.print_error("""The SSH server on this machine permits password-based login. A more secure
|
output.print_error("""The SSH server on this machine permits password-based login. A more secure
|
||||||
|
@ -594,7 +595,8 @@ def check_dnssec(domain, env, output, dns_zonefiles, is_checking_primary=False):
|
||||||
# record that we suggest using is for the KSK (and that's how the DS records were generated).
|
# record that we suggest using is for the KSK (and that's how the DS records were generated).
|
||||||
# We'll also give the nice name for the key algorithm.
|
# We'll also give the nice name for the key algorithm.
|
||||||
dnssec_keys = load_env_vars_from_file(os.path.join(env['STORAGE_ROOT'], 'dns/dnssec/%s.conf' % alg_name_map[ds_alg]))
|
dnssec_keys = load_env_vars_from_file(os.path.join(env['STORAGE_ROOT'], 'dns/dnssec/%s.conf' % alg_name_map[ds_alg]))
|
||||||
dnsssec_pubkey = open(os.path.join(env['STORAGE_ROOT'], 'dns/dnssec/' + dnssec_keys['KSK'] + '.key')).read().split("\t")[3].split(" ")[3]
|
with open(os.path.join(env['STORAGE_ROOT'], 'dns/dnssec/' + dnssec_keys['KSK'] + '.key'), 'r') as f:
|
||||||
|
dnsssec_pubkey = f.read().split("\t")[3].split(" ")[3]
|
||||||
|
|
||||||
expected_ds_records[ (ds_keytag, ds_alg, ds_digalg, ds_digest) ] = {
|
expected_ds_records[ (ds_keytag, ds_alg, ds_digalg, ds_digest) ] = {
|
||||||
"record": rr_ds,
|
"record": rr_ds,
|
||||||
|
@ -956,7 +958,8 @@ def run_and_output_changes(env, pool):
|
||||||
# Load previously saved status checks.
|
# Load previously saved status checks.
|
||||||
cache_fn = "/var/cache/mailinabox/status_checks.json"
|
cache_fn = "/var/cache/mailinabox/status_checks.json"
|
||||||
if os.path.exists(cache_fn):
|
if os.path.exists(cache_fn):
|
||||||
prev = json.load(open(cache_fn))
|
with open(cache_fn, 'r') as f:
|
||||||
|
prev = json.load(f)
|
||||||
|
|
||||||
# Group the serial output into categories by the headings.
|
# Group the serial output into categories by the headings.
|
||||||
def group_by_heading(lines):
|
def group_by_heading(lines):
|
||||||
|
|
|
@ -14,7 +14,9 @@ def load_env_vars_from_file(fn):
|
||||||
# Load settings from a KEY=VALUE file.
|
# Load settings from a KEY=VALUE file.
|
||||||
import collections
|
import collections
|
||||||
env = collections.OrderedDict()
|
env = collections.OrderedDict()
|
||||||
for line in open(fn): env.setdefault(*line.strip().split("=", 1))
|
with open(fn, 'r') as f:
|
||||||
|
for line in f:
|
||||||
|
env.setdefault(*line.strip().split("=", 1))
|
||||||
return env
|
return env
|
||||||
|
|
||||||
def save_environment(env):
|
def save_environment(env):
|
||||||
|
@ -34,7 +36,8 @@ def load_settings(env):
|
||||||
import rtyaml
|
import rtyaml
|
||||||
fn = os.path.join(env['STORAGE_ROOT'], 'settings.yaml')
|
fn = os.path.join(env['STORAGE_ROOT'], 'settings.yaml')
|
||||||
try:
|
try:
|
||||||
config = rtyaml.load(open(fn, "r"))
|
with open(fn, "r") as f:
|
||||||
|
config = rtyaml.load(f)
|
||||||
if not isinstance(config, dict): raise ValueError() # caught below
|
if not isinstance(config, dict): raise ValueError() # caught below
|
||||||
return config
|
return config
|
||||||
except:
|
except:
|
||||||
|
|
|
@ -63,7 +63,8 @@ def get_web_domains_with_root_overrides(env):
|
||||||
root_overrides = { }
|
root_overrides = { }
|
||||||
nginx_conf_custom_fn = os.path.join(env["STORAGE_ROOT"], "www/custom.yaml")
|
nginx_conf_custom_fn = os.path.join(env["STORAGE_ROOT"], "www/custom.yaml")
|
||||||
if os.path.exists(nginx_conf_custom_fn):
|
if os.path.exists(nginx_conf_custom_fn):
|
||||||
custom_settings = rtyaml.load(open(nginx_conf_custom_fn))
|
with open(nginx_conf_custom_fn, 'r') as f:
|
||||||
|
custom_settings = rtyaml.load(f)
|
||||||
for domain, settings in custom_settings.items():
|
for domain, settings in custom_settings.items():
|
||||||
for type, value in [('redirect', settings.get('redirects', {}).get('/')),
|
for type, value in [('redirect', settings.get('redirects', {}).get('/')),
|
||||||
('proxy', settings.get('proxies', {}).get('/'))]:
|
('proxy', settings.get('proxies', {}).get('/'))]:
|
||||||
|
@ -75,13 +76,18 @@ def do_web_update(env):
|
||||||
# Pre-load what SSL certificates we will use for each domain.
|
# Pre-load what SSL certificates we will use for each domain.
|
||||||
ssl_certificates = get_ssl_certificates(env)
|
ssl_certificates = get_ssl_certificates(env)
|
||||||
|
|
||||||
|
# Helper for reading config files and templates
|
||||||
|
def read_conf(conf_fn):
|
||||||
|
with open(os.path.join(os.path.dirname(__file__), "../conf", conf_fn), "r") as f:
|
||||||
|
return f.read()
|
||||||
|
|
||||||
# Build an nginx configuration file.
|
# Build an nginx configuration file.
|
||||||
nginx_conf = open(os.path.join(os.path.dirname(__file__), "../conf/nginx-top.conf")).read()
|
nginx_conf = read_conf("nginx-top.conf")
|
||||||
|
|
||||||
# Load the templates.
|
# Load the templates.
|
||||||
template0 = open(os.path.join(os.path.dirname(__file__), "../conf/nginx.conf")).read()
|
template0 = read_conf("nginx.conf")
|
||||||
template1 = open(os.path.join(os.path.dirname(__file__), "../conf/nginx-alldomains.conf")).read()
|
template1 = read_conf("nginx-alldomains.conf")
|
||||||
template2 = open(os.path.join(os.path.dirname(__file__), "../conf/nginx-primaryonly.conf")).read()
|
template2 = read_conf("nginx-primaryonly.conf")
|
||||||
template3 = "\trewrite ^(.*) https://$REDIRECT_DOMAIN$1 permanent;\n"
|
template3 = "\trewrite ^(.*) https://$REDIRECT_DOMAIN$1 permanent;\n"
|
||||||
|
|
||||||
# Add the PRIMARY_HOST configuration first so it becomes nginx's default server.
|
# Add the PRIMARY_HOST configuration first so it becomes nginx's default server.
|
||||||
|
@ -141,11 +147,8 @@ def make_domain_config(domain, templates, ssl_certificates, env):
|
||||||
def hashfile(filepath):
|
def hashfile(filepath):
|
||||||
import hashlib
|
import hashlib
|
||||||
sha1 = hashlib.sha1()
|
sha1 = hashlib.sha1()
|
||||||
f = open(filepath, 'rb')
|
with open(filepath, 'rb') as f:
|
||||||
try:
|
|
||||||
sha1.update(f.read())
|
sha1.update(f.read())
|
||||||
finally:
|
|
||||||
f.close()
|
|
||||||
return sha1.hexdigest()
|
return sha1.hexdigest()
|
||||||
nginx_conf_extra += "\t# ssl files sha1: %s / %s\n" % (hashfile(tls_cert["private-key"]), hashfile(tls_cert["certificate"]))
|
nginx_conf_extra += "\t# ssl files sha1: %s / %s\n" % (hashfile(tls_cert["private-key"]), hashfile(tls_cert["certificate"]))
|
||||||
|
|
||||||
|
@ -153,7 +156,8 @@ def make_domain_config(domain, templates, ssl_certificates, env):
|
||||||
hsts = "yes"
|
hsts = "yes"
|
||||||
nginx_conf_custom_fn = os.path.join(env["STORAGE_ROOT"], "www/custom.yaml")
|
nginx_conf_custom_fn = os.path.join(env["STORAGE_ROOT"], "www/custom.yaml")
|
||||||
if os.path.exists(nginx_conf_custom_fn):
|
if os.path.exists(nginx_conf_custom_fn):
|
||||||
yaml = rtyaml.load(open(nginx_conf_custom_fn))
|
with open(nginx_conf_custom_fn, 'r') as f:
|
||||||
|
yaml = rtyaml.load(f)
|
||||||
if domain in yaml:
|
if domain in yaml:
|
||||||
yaml = yaml[domain]
|
yaml = yaml[domain]
|
||||||
|
|
||||||
|
|
|
@ -76,7 +76,8 @@ for setting in settings:
|
||||||
|
|
||||||
found = set()
|
found = set()
|
||||||
buf = ""
|
buf = ""
|
||||||
input_lines = list(open(filename))
|
with open(filename, "r") as f:
|
||||||
|
input_lines = list(f)
|
||||||
|
|
||||||
while len(input_lines) > 0:
|
while len(input_lines) > 0:
|
||||||
line = input_lines.pop(0)
|
line = input_lines.pop(0)
|
||||||
|
|
|
@ -17,13 +17,8 @@ accesses = set()
|
||||||
# Scan the current and rotated access logs.
|
# Scan the current and rotated access logs.
|
||||||
for fn in glob.glob("/var/log/nginx/access.log*"):
|
for fn in glob.glob("/var/log/nginx/access.log*"):
|
||||||
# Gunzip if necessary.
|
# Gunzip if necessary.
|
||||||
if fn.endswith(".gz"):
|
|
||||||
f = gzip.open(fn)
|
|
||||||
else:
|
|
||||||
f = open(fn, "rb")
|
|
||||||
|
|
||||||
# Loop through the lines in the access log.
|
# Loop through the lines in the access log.
|
||||||
with f:
|
with (gzip.open if fn.endswith(".gz") else open)(fn, "rb") as f:
|
||||||
for line in f:
|
for line in f:
|
||||||
# Find lines that are GETs on the bootstrap script by either curl or wget.
|
# Find lines that are GETs on the bootstrap script by either curl or wget.
|
||||||
# (Note that we purposely skip ...?ping=1 requests which is the admin panel querying us for updates.)
|
# (Note that we purposely skip ...?ping=1 requests which is the admin panel querying us for updates.)
|
||||||
|
@ -43,7 +38,8 @@ for date, ip in accesses:
|
||||||
# Since logs are rotated, store the statistics permanently in a JSON file.
|
# Since logs are rotated, store the statistics permanently in a JSON file.
|
||||||
# Load in the stats from an existing file.
|
# Load in the stats from an existing file.
|
||||||
if os.path.exists(outfn):
|
if os.path.exists(outfn):
|
||||||
existing_data = json.load(open(outfn))
|
with open(outfn, "r") as f:
|
||||||
|
existing_data = json.load(f)
|
||||||
for date, count in existing_data:
|
for date, count in existing_data:
|
||||||
if date not in by_date:
|
if date not in by_date:
|
||||||
by_date[date] = count
|
by_date[date] = count
|
||||||
|
|
|
@ -124,13 +124,14 @@ def generate_documentation():
|
||||||
""")
|
""")
|
||||||
|
|
||||||
parser = Source.parser()
|
parser = Source.parser()
|
||||||
for line in open("setup/start.sh"):
|
with open("setup/start.sh", "r") as start_file:
|
||||||
try:
|
for line in start_file:
|
||||||
fn = parser.parse_string(line).filename()
|
try:
|
||||||
except:
|
fn = parser.parse_string(line).filename()
|
||||||
continue
|
except:
|
||||||
if fn in ("setup/start.sh", "setup/preflight.sh", "setup/questions.sh", "setup/firstuser.sh", "setup/management.sh"):
|
continue
|
||||||
continue
|
if fn in ("setup/start.sh", "setup/preflight.sh", "setup/questions.sh", "setup/firstuser.sh", "setup/management.sh"):
|
||||||
|
continue
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
print(fn, file=sys.stderr)
|
print(fn, file=sys.stderr)
|
||||||
|
@ -401,7 +402,8 @@ class BashScript(Grammar):
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def parse(fn):
|
def parse(fn):
|
||||||
if fn in ("setup/functions.sh", "/etc/mailinabox.conf"): return ""
|
if fn in ("setup/functions.sh", "/etc/mailinabox.conf"): return ""
|
||||||
string = open(fn).read()
|
with open(fn, "r") as f:
|
||||||
|
string = f.read()
|
||||||
|
|
||||||
# tokenize
|
# tokenize
|
||||||
string = re.sub(".* #NODOC\n", "", string)
|
string = re.sub(".* #NODOC\n", "", string)
|
||||||
|
|
Loading…
Reference in New Issue