2014-10-21 14:02:19 +00:00
|
|
|
#!/usr/bin/python3
|
|
|
|
#
|
|
|
|
# This is a tool Josh uses on his box serving mailinabox.email to parse the nginx
|
|
|
|
# access log to see how many people are installing Mail-in-a-Box each day, by
|
2016-01-30 16:19:51 +00:00
|
|
|
# looking at accesses to the bootstrap.sh script (which is currently at the URL
|
|
|
|
# .../setup.sh).
|
2014-10-21 14:02:19 +00:00
|
|
|
|
2016-11-20 12:23:46 +00:00
|
|
|
import re
|
|
|
|
import glob
|
|
|
|
import gzip
|
|
|
|
import os.path
|
|
|
|
import json
|
2014-10-21 14:02:19 +00:00
|
|
|
import dateutil.parser
|
|
|
|
|
|
|
|
outfn = "/home/user-data/www/mailinabox.email/install-stats.json"
|
|
|
|
|
|
|
|
# Make a unique list of (date, ip address) pairs so we don't double-count
|
|
|
|
# accesses that are for the same install.
|
|
|
|
accesses = set()
|
|
|
|
|
|
|
|
# Scan the current and rotated access logs.
|
|
|
|
for fn in glob.glob("/var/log/nginx/access.log*"):
|
2016-11-20 12:23:46 +00:00
|
|
|
# Gunzip if necessary.
|
|
|
|
if fn.endswith(".gz"):
|
|
|
|
f = gzip.open(fn)
|
|
|
|
else:
|
|
|
|
f = open(fn, "rb")
|
|
|
|
|
|
|
|
# Loop through the lines in the access log.
|
|
|
|
with f:
|
|
|
|
for line in f:
|
|
|
|
# Find lines that are GETs on the bootstrap script by either curl or wget.
|
|
|
|
# (Note that we purposely skip ...?ping=1 requests which is the admin
|
|
|
|
# panel querying us for updates.)
|
|
|
|
# (Also, the URL changed in January 2016, but we'll accept both.)
|
|
|
|
m = re.match(rb"(?P<ip>\S+) - - \[(?P<date>.*?)\] \"GET /(bootstrap.sh|setup.sh) HTTP/.*\" 200 \d+ .* \"(?:curl|wget)", line, re.I)
|
|
|
|
if m:
|
|
|
|
date, time = m.group("date").decode("ascii").split(":", 1)
|
|
|
|
date = dateutil.parser.parse(date).date().isoformat()
|
|
|
|
ip = m.group("ip").decode("ascii")
|
|
|
|
accesses.add((date, ip))
|
2014-10-21 14:02:19 +00:00
|
|
|
|
|
|
|
# Aggregate by date.
|
2016-11-20 12:23:46 +00:00
|
|
|
by_date = {}
|
2014-10-21 14:02:19 +00:00
|
|
|
for date, ip in accesses:
|
2016-11-20 12:23:46 +00:00
|
|
|
by_date[date] = by_date.get(date, 0) + 1
|
2014-10-21 14:02:19 +00:00
|
|
|
|
|
|
|
# Since logs are rotated, store the statistics permanently in a JSON file.
|
|
|
|
# Load in the stats from an existing file.
|
|
|
|
if os.path.exists(outfn):
|
2016-11-20 12:23:46 +00:00
|
|
|
existing_data = json.load(open(outfn))
|
|
|
|
for date, count in existing_data:
|
|
|
|
if date not in by_date:
|
|
|
|
by_date[date] = count
|
2014-10-21 14:02:19 +00:00
|
|
|
|
|
|
|
# Turn into a list rather than a dict structure to make it ordered.
|
|
|
|
by_date = sorted(by_date.items())
|
|
|
|
|
|
|
|
# Pop the last one because today's stats are incomplete.
|
|
|
|
by_date.pop(-1)
|
|
|
|
|
|
|
|
# Write out.
|
|
|
|
with open(outfn, "w") as f:
|
2016-11-20 12:23:46 +00:00
|
|
|
json.dump(by_date, f, sort_keys=True, indent=True)
|