1
0
mirror of https://github.com/mail-in-a-box/mailinabox.git synced 2026-03-04 15:54:48 +01:00

Add IMAP connection reporting

Fix binsizes and barwidths on timeseries charts
Fix timezone issue in timeseries scales
This commit is contained in:
downtownallday
2021-04-08 12:53:32 -04:00
parent ac811bcbd1
commit 721dd1273f
18 changed files with 358 additions and 123 deletions

View File

@@ -6,6 +6,7 @@ class Timeseries(object):
# start_date: 'YYYY-MM-DD' or 'YYYY-MM-DD HH:MM:SS'
# start: 'YYYY-MM-DD HH:MM:SS'
self.start = self.full_datetime_str(start_date, False)
self.start_unixepoch = self.unix_time(self.start)
# end_date: 'YYYY-MM-DD' or 'YYYY-MM-DD HH:MM:SS'
# end: 'YYYY-MM-DD HH:MM:SS'
@@ -16,20 +17,12 @@ class Timeseries(object):
# timefmt is a format string for sqlite strftime() that puts a
# sqlite datetime into a "bin" date
self.timefmt='%Y-%m-%d'
self.timefmt = '%Y-%m-%d %H:%M:%S'
# parsefmt is a date parser string to be used to re-interpret
# "bin" grouping dates (data.dates) to native dates
parsefmt='%Y-%m-%d'
b = self.binsizeWithUnit()
if b['unit'] == 'hour':
self.timefmt+=' %H:00:00'
parsefmt+=' %H:%M:%S'
elif b['unit'] == 'minute':
self.timefmt+=' %H:%M:00'
parsefmt+=' %H:%M:%S'
# "bin" grouping dates (data.dates) to native dates. server
# always returns utc dates
parsefmt = '%Y-%m-%d %H:%M:%S'
self.dates = [] # dates must be "bin" date strings
self.series = []
@@ -54,6 +47,14 @@ class Timeseries(object):
d = d + datetime.timedelta(days=1)
return d.strftime('%Y-%m-%d 00:00:00')
def unix_time(self, full_datetime_str):
d = datetime.datetime.strptime(
full_datetime_str + ' UTC',
'%Y-%m-%d %H:%M:%S %Z'
)
return int(d.timestamp())
def binsizeWithUnit(self):
# normalize binsize (which is a time span in minutes)
days = int(self.binsize / (24 * 60))
@@ -96,12 +97,17 @@ class Timeseries(object):
'''
i = bisect.bisect_right(self.dates, date_str)
if i == len(self.dates):
if len(self.dates)>0 and self.dates[i-1] == date_str:
return i-1
elif i == len(self.dates):
self.dates.append(date_str)
return i
if self.dates[i] == date_str:
return i
self.dates.insert(i, date_str)
else:
self.dates.insert(i, date_str)
''' add zero values to all series for the new date '''
for series in self.series:
series['values'].insert(i, 0)
return i
def add_series(self, id, name):
@@ -111,6 +117,8 @@ class Timeseries(object):
'values': []
}
self.series.append(s)
for date in self.dates:
s['values'].append(0)
return s

View File

@@ -2,13 +2,36 @@
-- returns count of failed_login_attempt in each 'bin', which is the
-- connection time rounded (as defined by {timefmt})
--
SELECT
strftime('{timefmt}',connect_time) AS `bin`,
count(*) AS `count`
FROM mta_connection
WHERE
disposition='failed_login_attempt' AND
connect_time >= :start_date AND
connect_time < :end_date
GROUP BY strftime('{timefmt}',connect_time)
ORDER BY connect_time
SELECT bin, sum(count) AS `count`
FROM (
SELECT
strftime('{timefmt}',
:start_unixepoch + cast((strftime('%s',connect_time) - :start_unixepoch) / (60 * :binsize) as int) * (60 * :binsize),
'unixepoch'
) AS `bin`,
count(*) AS `count`
FROM mta_connection
WHERE
disposition='failed_login_attempt' AND
connect_time >= :start_date AND
connect_time < :end_date
GROUP BY bin
UNION
SELECT
strftime('{timefmt}',
:start_unixepoch + cast((strftime('%s',connect_time) - :start_unixepoch) / (60 * :binsize) as int) * (60 * :binsize),
'unixepoch'
) AS `bin`,
count(*) AS `count`
FROM imap_connection
WHERE
disposition='failed_login_attempt' AND
connect_time >= :start_date AND
connect_time < :end_date
GROUP BY bin
)
GROUP BY bin
ORDER BY bin

View File

@@ -2,13 +2,35 @@
-- returns count of suspected_scanner in each 'bin', which is the
-- connection time rounded (as defined by {timefmt})
--
SELECT
strftime('{timefmt}',connect_time) AS `bin`,
count(*) AS `count`
FROM mta_connection
WHERE
disposition='suspected_scanner' AND
connect_time >= :start_date AND
connect_time < :end_date
GROUP BY strftime('{timefmt}',connect_time)
ORDER BY connect_time
SELECT bin, sum(count) AS `count`
FROM (
SELECT
strftime('{timefmt}',
:start_unixepoch + cast((strftime('%s',connect_time) - :start_unixepoch) / (60 * :binsize) as int) * (60 * :binsize),
'unixepoch'
) AS `bin`,
count(*) AS `count`
FROM mta_connection
WHERE
disposition='suspected_scanner' AND
connect_time >= :start_date AND
connect_time < :end_date
GROUP BY strftime('{timefmt}',connect_time)
UNION
SELECT
strftime('{timefmt}',
:start_unixepoch + cast((strftime('%s',connect_time) - :start_unixepoch) / (60 * :binsize) as int) * (60 * :binsize),
'unixepoch'
) AS `bin`,
count(*) AS `count`
FROM imap_connection
WHERE
disposition='suspected_scanner' AND
connect_time >= :start_date AND
connect_time < :end_date
GROUP BY strftime('{timefmt}',connect_time)
)
GROUP BY bin
ORDER BY bin

View File

@@ -0,0 +1,19 @@
-- pie chart for "connections by disposition"
--
-- returns a table of disposition along with it's count
SELECT disposition, sum(count) AS `count`
FROM (
SELECT disposition, count(*) AS `count`
FROM mta_connection
WHERE connect_time>=:start_date AND connect_time<:end_date
GROUP by disposition
UNION
SELECT disposition, count(*) AS `count`
FROM imap_connection
WHERE connect_time>=:start_date AND connect_time<:end_date
GROUP BY disposition
)
GROUP BY disposition

View File

@@ -1,7 +1,10 @@
import logging
from .Timeseries import Timeseries
from .exceptions import InvalidArgsError
from .top import select_top
log = logging.getLogger(__name__)
with open(__file__.replace('.py','.1.sql')) as fp:
select_1 = fp.read()
@@ -20,6 +23,9 @@ with open(__file__.replace('.py','.5.sql')) as fp:
with open(__file__.replace('.py','.6.sql')) as fp:
select_6 = fp.read()
with open(__file__.replace('.py','.7.sql')) as fp:
select_7 = fp.read()
def flagged_connections(conn, args):
try:
@@ -35,9 +41,8 @@ def flagged_connections(conn, args):
c = conn.cursor()
# pie chart for "connections by disposition"
select = 'SELECT disposition, count(*) AS `count` FROM mta_connection WHERE connect_time>=:start_date AND connect_time<:end_date GROUP BY disposition'
connections_by_disposition = []
for row in c.execute(select, {'start_date':ts.start, 'end_date':ts.end}):
for row in c.execute(select_7, {'start_date':ts.start, 'end_date':ts.end}):
connections_by_disposition.append({
'name': row[0],
'value': row[1]
@@ -45,21 +50,27 @@ def flagged_connections(conn, args):
# timeseries = failed logins count
s_failed_login = ts.add_series('failed_login_attempt', 'failed login attempts')
for row in c.execute(select_1.format(timefmt=ts.timefmt), {
sql = select_1.format(timefmt=ts.timefmt)
for row in c.execute(sql, {
'start_date': ts.start,
'end_date': ts.end
'end_date': ts.end,
'start_unixepoch': ts.start_unixepoch,
'binsize': ts.binsize
}):
ts.append_date(row['bin'])
s_failed_login['values'].append(row['count'])
idx = ts.insert_date(row['bin'])
s_failed_login['values'][idx] = row['count']
# timeseries = suspected scanners count
s_scanner = ts.add_series('suspected_scanner', 'connections by suspected scanners')
for row in c.execute(select_2.format(timefmt=ts.timefmt), {
sql = select_2.format(timefmt=ts.timefmt)
for row in c.execute(sql, {
'start_date': ts.start,
'end_date': ts.end
'end_date': ts.end,
'start_unixepoch': ts.start_unixepoch,
'binsize': ts.binsize
}):
ts.insert_date(row['bin'])
s_scanner['values'].append(row['count'])
idx = ts.insert_date(row['bin'])
s_scanner['values'][idx] = row['count']
# pie chart for "disposition=='reject' grouped by failure_category"

View File

@@ -3,7 +3,10 @@
-- the connection time rounded (as defined by {timefmt})
--
SELECT
strftime('{timefmt}',connect_time) AS `bin`,
strftime('{timefmt}',
:start_unixepoch + cast((strftime('%s',connect_time) - :start_unixepoch) / (60 * :binsize) as int) * (60 * :binsize),
'unixepoch'
) AS `bin`,
count(*) AS `count`
FROM mta_accept
JOIN mta_connection ON mta_connection.mta_conn_id = mta_accept.mta_conn_id
@@ -11,5 +14,5 @@ WHERE
mta_connection.service = 'smtpd' AND
connect_time >= :start_date AND
connect_time < :end_date
GROUP BY strftime('{timefmt}',connect_time)
GROUP BY bin
ORDER BY connect_time

View File

@@ -40,10 +40,12 @@ def messages_received(conn, args):
try:
for row in c.execute(select_1.format(timefmt=ts.timefmt), {
'start_date':ts.start,
'end_date':ts.end
'end_date':ts.end,
'start_unixepoch':ts.start_unixepoch,
'binsize':ts.binsize
}):
ts.append_date(row['bin'])
s_received['values'].append(row['count'])
idx = ts.insert_date(row['bin'])
s_received['values'][idx] = row['count']
# top 10 senders (envelope_from) by message count

View File

@@ -3,7 +3,10 @@
-- time rounded (as defined by {timefmt})
--
SELECT
strftime('{timefmt}',connect_time) AS `bin`,
strftime('{timefmt}',
:start_unixepoch + cast((strftime('%s',connect_time) - :start_unixepoch) / (60 * :binsize) as int) * (60 * :binsize),
'unixepoch'
) as `bin`,
count(*) AS `sent_count`
FROM mta_accept
JOIN mta_connection ON mta_connection.mta_conn_id = mta_accept.mta_conn_id
@@ -12,5 +15,5 @@ WHERE
(mta_connection.service = 'submission' OR mta_connection.service = 'pickup') AND
connect_time >= :start_date AND
connect_time < :end_date
GROUP BY strftime('{timefmt}',connect_time)
GROUP BY bin
ORDER BY connect_time

View File

@@ -4,7 +4,10 @@
-- defined by {timefmt})
--
SELECT
strftime('{timefmt}',connect_time) AS `bin`,
strftime('{timefmt}',
:start_unixepoch + cast((strftime('%s',connect_time) - :start_unixepoch) / (60 * :binsize) as int) * (60 * :binsize),
'unixepoch'
) AS `bin`,
mta_delivery.service AS `delivery_service`,
count(*) AS `delivery_count`
FROM mta_accept
@@ -14,5 +17,5 @@ WHERE
(mta_connection.service = 'submission' OR mta_connection.service = 'pickup') AND
connect_time >= :start_date AND
connect_time < :end_date
GROUP BY strftime('{timefmt}',connect_time), mta_delivery.service
GROUP BY bin, mta_delivery.service
ORDER BY connect_time

View File

@@ -38,10 +38,12 @@ def messages_sent(conn, args):
try:
for row in c.execute(select_1.format(timefmt=ts.timefmt), {
'start_date':ts.start,
'end_date':ts.end
'end_date':ts.end,
'start_unixepoch':ts.start_unixepoch,
'binsize':ts.binsize
}):
ts.dates.append(row['bin'])
s_sent['values'].append(row['sent_count'])
idx = ts.insert_date(row['bin'])
s_sent['values'][idx] = row['sent_count']
date_idx = -1
@@ -49,25 +51,16 @@ def messages_sent(conn, args):
# querie's WHERE and JOINs are the same
for row in c.execute(select_2.format(timefmt=ts.timefmt), {
'start_date':ts.start,
'end_date':ts.end
'end_date':ts.end,
'start_unixepoch':ts.start_unixepoch,
'binsize':ts.binsize
}):
if date_idx>=0 and ts.dates[date_idx] == row['bin']:
if row['delivery_service']=='smtp':
s_remote['values'][-1] = row['delivery_count']
elif row['delivery_service']=='lmtp':
s_local['values'][-1] = row['delivery_count']
else:
date_idx += 1
if date_idx >= len(ts.dates):
break
if row['delivery_service']=='smtp':
s_remote['values'].append(row['delivery_count'])
s_local['values'].append(0)
elif row['delivery_service']=='lmtp':
s_remote['values'].append(0)
s_local['values'].append(row['delivery_count'])
date_idx = ts.insert_date(row['bin'])
if row['delivery_service']=='smtp':
s_remote['values'][date_idx] = row['delivery_count']
elif row['delivery_service']=='lmtp':
s_local['values'][date_idx] = row['delivery_count']
top_senders1 = {
'start': ts.start,

View File

@@ -0,0 +1,20 @@
--
-- details on user imap connections
--
SELECT
connect_time,
CASE WHEN remote_host='unknown' THEN remote_ip ELSE remote_host END AS `remote_host`,
sasl_method,
disconnect_reason,
connection_security,
disposition,
in_bytes,
out_bytes
FROM
imap_connection
WHERE
sasl_username = :user_id AND
connect_time >= :start_date AND
connect_time < :end_date
ORDER BY
connect_time

View File

@@ -7,6 +7,9 @@ with open(__file__.replace('.py','.1.sql')) as fp:
with open(__file__.replace('.py','.2.sql')) as fp:
select_2 = fp.read()
with open(__file__.replace('.py','.3.sql')) as fp:
select_3 = fp.read()
def user_activity(conn, args):
'''
@@ -162,8 +165,51 @@ def user_activity(conn, args):
received_mail['items'].append(v)
#
# imap connections by user
#
imap_details = {
'start': ts.start,
'end': ts.end,
'y': 'IMAP Details',
'fields': [
'connect_time',
'remote_host',
'sasl_method',
'disconnect_reason',
'connection_security',
'disposition',
'in_bytes',
'out_bytes'
],
'field_types': [
{ 'type':'datetime', 'format': '%Y-%m-%d %H:%M:%S' },# connect_time
'text/plain', # remote_host
'text/plain', # sasl_method
'text/plain', # disconnect_reason
'text/plain', # connection_security
'text/plain', # disposition
'number/size', # in_bytes,
'number/size', # out_bytes,
],
'items': []
}
for row in c.execute(select_3 + limit, {
'user_id': user_id,
'start_date': ts.start,
'end_date': ts.end
}):
v = []
for key in imap_details['fields']:
v.append(row[key])
imap_details['items'].append(v)
return {
'sent_mail': sent_mail,
'received_mail': received_mail
'received_mail': received_mail,
'imap_details': imap_details
}