1
0
mirror of https://github.com/mail-in-a-box/mailinabox.git synced 2025-04-05 00:27:25 +00:00

Merge pull request #13 from downtownallday/reporting

Reporting updates
This commit is contained in:
Downtown Allday 2021-04-13 06:59:30 -04:00 committed by GitHub
commit d4ff45c7fc
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
58 changed files with 1881 additions and 434 deletions

View File

@ -14,7 +14,7 @@ from functools import wraps
from reporting.capture.db.SqliteConnFactory import SqliteConnFactory
import reporting.uidata as uidata
from mailconfig import get_mail_users
from mailconfig import ( get_mail_users, validate_email )
def add_reports(app, env, authorized_personnel_only):
@ -102,6 +102,18 @@ def add_reports(app, env, authorized_personnel_only):
finally:
db_conn_factory.close(conn)
@app.route('/reports/uidata/imap-details', methods=['POST'])
@authorized_personnel_only
@json_payload
def get_imap_details(payload):
conn = db_conn_factory.connect()
try:
return jsonify(uidata.imap_details(conn, payload))
except uidata.InvalidArgsError as e:
return ('invalid request', 400)
finally:
db_conn_factory.close(conn)
@app.route('/reports/uidata/flagged-connections', methods=['POST'])
@authorized_personnel_only
@json_payload
@ -178,7 +190,7 @@ def add_reports(app, env, authorized_personnel_only):
r = subprocess.run(["systemctl", "reload", "miabldap-capture"])
if r.returncode != 0:
log.warning('systemctl reload faild for miabldap-capture: code=%s', r.returncode)
log.warning('systemctl reload failed for miabldap-capture: code=%s', r.returncode)
else:
# wait a sec for daemon to pick up new config
# TODO: monitor runtime config for mtime change
@ -214,3 +226,41 @@ def add_reports(app, env, authorized_personnel_only):
return jsonify(uidata.capture_db_stats(conn))
finally:
db_conn_factory.close(conn)
@app.route('/reports/uidata/message-headers', methods=['POST'])
@authorized_personnel_only
@json_payload
def get_message_headers(payload):
try:
user_id = payload['user_id']
lmtp_id = payload['lmtp_id']
except KeyError:
return ('invalid request', 400)
if not validate_email(user_id, mode="user"):
return ('invalid email address', 400)
r = subprocess.run(
[
"/usr/bin/doveadm",
"fetch",
"-u",user_id,
"hdr",
"HEADER","received","LMTP id " + lmtp_id
],
encoding="utf8",
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
)
if r.returncode != 0:
log.error('retrieving message headers failed, code=%s, lmtp_id=%s, user_id=%s, stderr=%s', r.returncode, lmtp_id, user_id, r.stderr)
return Response(r.stderr, status=400, mimetype='text/plain')
else:
out = r.stdout.strip()
if out.startswith('hdr:\n'):
out = out[5:]
return Response(out, status=200, mimetype='text/plain')

View File

@ -1,2 +0,0 @@
tests/
run.sh

View File

@ -1,6 +1,7 @@
#!/usr/bin/env python3
from logs.TailFile import TailFile
from mail.InboundMailLogHandler import InboundMailLogHandler
from mail.PostfixLogHandler import PostfixLogHandler
from mail.DovecotLogHandler import DovecotLogHandler
from logs.ReadPositionStoreInFile import ReadPositionStoreInFile
from db.SqliteConnFactory import SqliteConnFactory
from db.SqliteEventStore import SqliteEventStore
@ -50,6 +51,7 @@ options = {
'daemon': True,
'log_level': logging.WARNING,
'log_file': "/var/log/mail.log",
'stop_at_eof': False,
'pos_file': "/var/lib/mailinabox/capture-pos.json",
'sqlite_file': os.path.join(CAPTURE_STORAGE_ROOT, 'capture.sqlite'),
'working_dir': "/var/run/mailinabox",
@ -122,6 +124,10 @@ def process_cmdline(options):
elif arg=='-logfile' and have_next:
argi+=1
options['log_file'] = sys.argv[argi]
elif arg=='-stopateof':
argi+=1
options['stop_at_eof'] = True
elif arg=='-posfile' and have_next:
argi+=1
@ -220,14 +226,21 @@ try:
)
mail_tail = TailFile(
options['log_file'],
position_store
position_store,
options['stop_at_eof']
)
inbound_mail_handler = InboundMailLogHandler(
postfix_log_handler = PostfixLogHandler(
event_store,
capture_enabled = options['config'].get('capture',True),
drop_disposition = options['config'].get('drop_disposition')
)
mail_tail.add_handler(inbound_mail_handler)
mail_tail.add_handler(postfix_log_handler)
dovecot_log_handler = DovecotLogHandler(
event_store,
capture_enabled = options['config'].get('capture',True),
drop_disposition = options['config'].get('drop_disposition')
)
mail_tail.add_handler(dovecot_log_handler)
pruner = Pruner(
db_conn_factory,
policy=options['config']['prune_policy']
@ -245,17 +258,6 @@ def terminate(sig, stack):
log.warning("shutting down due to SIGTERM")
log.debug("stopping mail_tail")
mail_tail.stop()
log.debug("stopping pruner")
pruner.stop()
log.debug("stopping position_store")
position_store.stop()
log.debug("stopping event_store")
event_store.stop()
try:
os.remove(options['_runtime_config_file'])
except Exception:
pass
log.info("stopped")
# reload settings handler
def reload(sig, stack):
@ -266,8 +268,10 @@ def reload(sig, stack):
options['config']['default_config'] = False
options['_config_file'] = config_default_file
log.info('%s records are in-progress',
inbound_mail_handler.get_inprogress_count())
log.info('%s mta records are in-progress',
postfix_log_handler.get_inprogress_count())
log.info('%s imap records are in-progress',
dovecot_log_handler.get_inprogress_count())
if options['_config_file']:
log.info('reloading %s', options['_config_file'])
@ -276,10 +280,16 @@ def reload(sig, stack):
pruner.set_policy(
newconfig['prune_policy']
)
inbound_mail_handler.set_capture_enabled(
postfix_log_handler.set_capture_enabled(
newconfig.get('capture', True)
)
inbound_mail_handler.update_drop_disposition(
postfix_log_handler.update_drop_disposition(
newconfig.get('drop_disposition', {})
)
dovecot_log_handler.set_capture_enabled(
newconfig.get('capture', True)
)
dovecot_log_handler.update_drop_disposition(
newconfig.get('drop_disposition', {})
)
write_config(options['_runtime_config_file'], newconfig)
@ -298,3 +308,15 @@ signal.signal(signal.SIGHUP, reload)
mail_tail.start()
mail_tail.join()
# gracefully close other threads
log.debug("stopping pruner")
pruner.stop()
log.debug("stopping position_store")
position_store.stop()
log.debug("stopping event_store")
event_store.stop()
try:
os.remove(options['_runtime_config_file'])
except Exception:
pass
log.info("stopped")

View File

@ -74,7 +74,6 @@ class EventStore(Prunable):
self.t.join()
def __del__(self):
log.debug('EventStore __del__')
self.interrupt.set()
self.have_event.set()

View File

@ -71,6 +71,23 @@ mta_delivery_fields = [
'failure_category',
]
imap_conn_fields = [
'service',
'service_tid',
'connect_time',
'disconnect_time',
'disconnect_reason',
'remote_host',
'remote_ip',
'sasl_method',
'sasl_username',
'remote_auth_success',
'remote_auth_attempts',
'connection_security',
'in_bytes',
'out_bytes',
'disposition'
]
db_info_create_table_stmt = "CREATE TABLE IF NOT EXISTS db_info(id INTEGER PRIMARY KEY AUTOINCREMENT, key TEXT NOT NULL, value TEXT NOT NULL)"
@ -162,7 +179,33 @@ schema_updates = [
[
"ALTER TABLE mta_delivery ADD COLUMN orig_to TEXT COLLATE NOCASE",
"UPDATE db_info SET value='1' WHERE key='schema_version'"
]
],
# update 2
[
"CREATE TABLE imap_connection(\
imap_conn_id INTEGER PRIMARY KEY AUTOINCREMENT,\
service TEXT NOT NULL, /* 'imap', 'imap-login', 'pop3-login' */\
service_tid TEXT,\
connect_time TEXT NOT NULL,\
disconnect_time TEXT,\
disconnect_reason TEXT,\
remote_host TEXT COLLATE NOCASE,\
remote_ip TEXT COLLATE NOCASE,\
sasl_method TEXT, /* eg. 'PLAIN' */\
sasl_username TEXT COLLATE NOCASE,\
remote_auth_success INTEGER, /* count of successes */\
remote_auth_attempts INTEGER, /* count of attempts */\
connection_security TEXT, /* eg 'TLS' */\
in_bytes INTEGER,\
out_bytes INTEGER,\
disposition TEXT /* 'ok','failed_login_attempt',etc */\
)",
"CREATE INDEX idx_imap_connection_connect_time ON imap_connection(connect_time, sasl_username COLLATE NOCASE)",
"UPDATE db_info SET value='2' WHERE key='schema_version'"
],
]
@ -209,9 +252,12 @@ class SqliteEventStore(EventStore):
def write_rec(self, conn, type, rec):
if type=='inbound_mail':
#log.debug('wrote inbound_mail record')
self.write_inbound_mail(conn, rec)
if type=='mta_mail':
self.write_mta_mail(conn, rec)
elif type=='imap_mail':
self.write_imap_mail(conn, rec)
elif type=='state':
''' rec: {
owner_id: int,
@ -246,7 +292,7 @@ class SqliteEventStore(EventStore):
return values
def write_inbound_mail(self, conn, rec):
def write_mta_mail(self, conn, rec):
c = None
try:
c = conn.cursor()
@ -282,6 +328,28 @@ class SqliteEventStore(EventStore):
def write_imap_mail(self, conn, rec):
c = None
try:
c = conn.cursor()
# imap_connection
insert = self._insert('imap_connection', imap_conn_fields)
values = self._values(imap_conn_fields, rec)
#log.debug('INSERT: %s VALUES: %s REC=%s', insert, values, rec)
c.execute(insert, values)
conn_id = c.lastrowid
conn.commit()
except sqlite3.Error as e:
conn.rollback()
raise e
finally:
if c: c.close(); c=None
def write_state(self, conn, rec):
c = None
try:
@ -354,7 +422,9 @@ class SqliteEventStore(EventStore):
JOIN mta_connection ON mta_connection.mta_conn_id = mta_accept.mta_conn_id\
WHERE connect_time < ?)',
'DELETE FROM mta_connection WHERE connect_time < ?'
'DELETE FROM mta_connection WHERE connect_time < ?',
'DELETE FROM imap_connection WHERE connect_time < ?',
]
counts = []

View File

@ -29,7 +29,6 @@ class ReadPositionStoreInFile(ReadPositionStore):
self.t.start()
def __del__(self):
log.debug('ReadPositionStoreInFile __del__')
self.interrupt.set()
def stop(self):

View File

@ -14,12 +14,13 @@ of ReadLineHandler.
'''
class TailFile(threading.Thread):
def __init__(self, log_file, store=None):
def __init__(self, log_file, store=None, stop_at_eof=False):
''' log_file - the log file to monitor
store - a ReadPositionStore instance
'''
self.log_file = log_file
self.store = store
self.stop_at_eof = stop_at_eof
self.fp = None
self.inode = None
@ -31,7 +32,6 @@ class TailFile(threading.Thread):
super(TailFile, self).__init__(name=name, daemon=True)
def stop(self, do_join=True):
log.debug('TailFile stopping')
self.interrupt.set()
# close must be called to unblock the thread fp.readline() call
self._close()
@ -72,15 +72,11 @@ class TailFile(threading.Thread):
def _issue_callbacks(self, line):
for cb in self.callbacks:
if isinstance(cb, ReadLineHandler):
cb.handle(line)
else:
cb(line)
cb.handle(line)
def _notify_end_of_callbacks(self):
for cb in self.callbacks:
if isinstance(cb, ReadLineHandler):
cb.end_of_callbacks(self)
cb.end_of_callbacks(self)
def _restore_read_position(self):
if self.fp is None:
@ -122,6 +118,9 @@ class TailFile(threading.Thread):
line = self.fp.readline() # blocking
if line=='':
log.debug('got EOF')
if self.stop_at_eof:
self.interrupt.set()
# EOF - check if file was rotated
if self._is_rotated():
log.debug('rotated')
@ -144,6 +143,7 @@ class TailFile(threading.Thread):
self._issue_callbacks(line)
except Exception as e:
log.error('exception processing line: %s', line)
log.exception(e)
if self.interrupt.wait(1) is not True:
if self._is_rotated():

View File

@ -0,0 +1,128 @@
import logging
import re
import datetime
import traceback
import ipaddress
import threading
from logs.ReadLineHandler import ReadLineHandler
import logs.DateParser
from db.EventStore import EventStore
from util.DictQuery import DictQuery
from util.safe import (safe_int, safe_append, safe_del)
log = logging.getLogger(__name__)
class CommonHandler(ReadLineHandler):
'''
'''
def __init__(self, state_cache_owner_id, record_store,
date_regexp = logs.DateParser.rsyslog_traditional_regexp,
date_parser_fn = logs.DateParser.rsyslog_traditional,
capture_enabled = True,
drop_disposition = None
):
self.state_cache_owner_id = state_cache_owner_id
''' EventStore instance for persisting "records" '''
self.record_store = record_store
self.set_capture_enabled(capture_enabled)
# our in-progress record queue is a simple list
self.recs = self.get_cached_state(clear=True)
self.current_inprogress_recs = len(self.recs)
# records that have these dispositions will be dropped (not
# recorded in the record store
self.drop_disposition_lock = threading.Lock()
self.drop_disposition = {
'failed_login_attempt': False,
'suspected_scanner': False,
'reject': False
}
self.update_drop_disposition(drop_disposition)
# regular expression that matches a syslog date (always anchored)
self.date_regexp = date_regexp
if date_regexp.startswith('^'):
self.date_regexp = date_regexp[1:]
# function that parses the syslog date
self.date_parser_fn = date_parser_fn
def get_inprogress_count(self):
''' thread-safe '''
return self.current_inprogress_recs
def update_inprogress_count(self):
self.current_inprogress_recs = len(self.recs)
def set_capture_enabled(self, capture_enabled):
''' thread-safe '''
self.capture_enabled = capture_enabled
def update_drop_disposition(self, drop_disposition):
''' thread-safe '''
with self.drop_disposition_lock:
self.drop_disposition.update(drop_disposition)
def test_drop_disposition(self, disposition):
with self.drop_disposition_lock:
return self.drop_disposition.get(disposition, False)
def datetime_as_str(self, d):
# iso-8601 time friendly to sqlite3
timestamp = d.isoformat(sep=' ', timespec='seconds')
# strip the utc offset from the iso format (ie. remove "+00:00")
idx = timestamp.find('+00:00')
if idx>0:
timestamp = timestamp[0:idx]
return timestamp
def parse_date(self, str):
# we're expecting UTC times from date_parser()
d = self.date_parser_fn(str)
return self.datetime_as_str(d)
def get_cached_state(self, clear=True):
conn = None
try:
# obtain the cached records from the record store
conn = self.record_store.connect()
recs = self.record_store.read_rec(conn, 'state', {
"owner_id": self.state_cache_owner_id,
"clear": clear
})
log.info('read %s incomplete records from cache %s', len(recs), self.state_cache_owner_id)
# eliminate stale records - "stale" should be longer than
# the "give-up" time for postfix (4-5 days)
stale = datetime.timedelta(days=7)
cutoff = self.datetime_as_str(
datetime.datetime.now(datetime.timezone.utc) - stale
)
newlist = [ rec for rec in recs if rec['connect_time'] >= cutoff ]
if len(newlist) < len(recs):
log.warning('dropping %s stale incomplete records',
len(recs) - len(newlist))
return newlist
finally:
if conn: self.record_store.close(conn)
def save_state(self):
log.info('saving state to cache %s: %s records', self.state_cache_owner_id, len(self.recs))
self.record_store.store('state', {
'owner_id': self.state_cache_owner_id,
'state': self.recs
})
def end_of_callbacks(self, thread):
'''overrides ReadLineHandler method
save incomplete records so we can pick up where we left off
'''
self.update_inprogress_count()
self.save_state()

View File

@ -0,0 +1,534 @@
import logging
import re
import datetime
import ipaddress
import threading
from logs.ReadLineHandler import ReadLineHandler
import logs.DateParser
from db.EventStore import EventStore
from util.DictQuery import DictQuery
from util.safe import (safe_int, safe_append, safe_del)
from .CommonHandler import CommonHandler
log = logging.getLogger(__name__)
STATE_CACHE_OWNER_ID = 2
class DovecotLogHandler(CommonHandler):
'''
'''
def __init__(self, record_store,
date_regexp = logs.DateParser.rsyslog_traditional_regexp,
date_parser_fn = logs.DateParser.rsyslog_traditional,
capture_enabled = True,
drop_disposition = None
):
super(DovecotLogHandler, self).__init__(
STATE_CACHE_OWNER_ID,
record_store,
date_regexp,
date_parser_fn,
capture_enabled,
drop_disposition
)
# A "record" is composed by parsing all the syslog output from
# the activity generated by dovecot (imap, pop3) from a single
# remote connection. Once a full history of the connection,
# the record is written to the record_store.
#
# `recs` is an array holding incomplete, in-progress
# "records". This array has the following format:
#
# (For convenience, it's easier to refer to the table column
# names found in SqliteEventStore for the dict member names that
# are used here since they're all visible in one place.)
#
# [{
# ... fields of the imap_connection table ...
# }]
#
# IMPORTANT:
#
# No methods in this class are safe to call by any thread
# other than the caller of handle(), unless marked as
# thread-safe.
#
# maximum size of the in-progress record queue (should be the
# same or greater than the maximum simultaneous dovecot/imap
# connections allowed, which is dovecot settings
# `process_limit` times `client_limit`, which defaults to 100
# * 1000)
self.max_inprogress_recs = 100 * 1000
# 1a. imap-login: Info: Login: user=<keith@just1w.com>, method=PLAIN, rip=146.168.130.9, lip=192.155.92.185, mpid=5866, TLS, session=<IF3v7ze27dKSqIIJ>
# 1=date
# 2=service ("imap-login" or "pop3-login")
# 3=sasl_username ("user@domain.com")
# 4=sasl_method ("PLAIN")
# 5=remote_ip ("146.168.130.9")
# 6=local_ip
# 7=service_tid ("5866")
# 8=connection_security ("TLS")
self.re_connect_success = re.compile('^' + self.date_regexp + ' (imap-login|pop3-login): Info: Login: user=<([^>]*)>, method=([^,]*), rip=([^,]+), lip=([^,]+), mpid=([^,]+), ([^,]+)')
# 1a. imap-login: Info: Disconnected (auth failed, 1 attempts in 4 secs): user=<fernando@athigo.com>, method=PLAIN, rip=152.67.63.172, lip=192.155.92.185, TLS: Disconnected, session=<rho/Rjq2EqSYQz+s>
# 1=date
# 2=service ("imap-login" or "pop3-login")
# 3=dissconnect_reason
# 4=remote_auth_attempts
# 5=sasl_username
# 6=sasl_method
# 7=remote_ip
# 8=local_ip
# 9=connection_security
self.re_connect_fail_1 = re.compile('^' + self.date_regexp + ' (imap-login|pop3-login): Info: (?:Disconnected|Aborted login) \(([^,]+), (\d+) attempts[^\)]*\): user=<([^>]*)>, method=([^,]+), rip=([^,]+), lip=([^,]+), ([^,]+)')
# 2a. pop3-login: Info: Disconnected (no auth attempts in 2 secs): user=<>, rip=196.52.43.85, lip=192.155.92.185, TLS handshaking: SSL_accept() failed: error:14209102:SSL routines:tls_early_post_process_client_hello:unsupported protocol, session=<ehaSaDm2x9vENCtV>
# 2b. imap-login: Info: Disconnected (no auth attempts in 2 secs): user=<>, rip=92.118.160.61, lip=192.155.92.185, TLS handshaking: SSL_accept() failed: error:14209102:SSL routines:tls_early_post_process_client_hello:unsupported protocol, session=<cvmKhjq2qtJcdqA9>
# 1=date
# 2=service ("imap-login" or "pop3-login")
# 3=disconnect_reason
# 4=sasl_username ("")
# 5=remote_ip ("146.168.130.9")
# 6=local_ip
# 7=connection_security
self.re_connect_fail_2 = re.compile('^' + self.date_regexp + ' (imap-login|pop3-login): Info: (?:Disconnected|Aborted login) \(([^\)]*)\): user=<([^>]*)>, rip=([^,]+), lip=([^,]+), ([^,]+)')
#3a. imap-login: Info: Disconnected (client didn't finish SASL auth, waited 0 secs): user=<>, method=PLAIN, rip=107.107.63.148, lip=192.155.92.185, TLS: SSL_read() syscall failed: Connection reset by peer, session=<rmBsIP21Zsdraz+U>
# 1=date
# 2=service ("imap-login" or "pop3-login")
# 3=disconnect_reason
# 4=sasl_username ("")
# 5=sasl_method
# 6=remote_ip ("146.168.130.9")
# 7=local_ip
# 8=connection_security
self.re_connect_fail_3 = re.compile('^' + self.date_regexp + ' (imap-login|pop3-login): Info: (?:Disconnected|Aborted login) \(([^\)]*)\): user=<([^>]*)>, method=([^,]+), rip=([^,]+), lip=([^,]+), ([^,]+)')
# 4a. pop3-login: Info: Disconnected: Too many bad commands (no auth attempts in 0 secs): user=<>, rip=83.97.20.35, lip=192.155.92.185, TLS, session=<BH8JRCi2nJ5TYRQj>
# 1=date
# 2=service ("imap-login" or "pop3-login")
# 3=disconnect_reason
# 4=sasl_username ("")
# 5=remote_ip ("146.168.130.9")
# 6=local_ip
# 7=connection_security
self.re_connect_fail_4 = re.compile('^' + self.date_regexp + ' (imap-login|pop3-login): Info: Disconnected: ([^\(]+) \(no auth attempts [^\)]+\): user=<([^>]*)>, rip=([^,]+), lip=([^,]+), ([^,]+)')
# 5a. imap-login: Info: Disconnected: Too many bad commands (auth failed, 1 attempts in 4 secs): user=<fernando@athigo.com>, method=PLAIN, rip=152.67.63.172, lip=192.155.92.185, TLS: Disconnected, session=<rho/Rjq2EqSYQz+s>
# 1=date
# 2=service ("imap-login" or "pop3-login")
# 3=disconnect_reason
# 4=remote_auth_attempts
# 5=sasl_username
# 6=sasl_method
# 7=remote_ip
# 8=local_ip
# 9=connection_security
self.re_connect_fail_5 = re.compile('^' + self.date_regexp + ' (imap-login|pop3-login): Info: (?:Disconnected|Aborted login): ([^\(]+) \(auth failed, (\d+) attempts [^\)]+\): user=<([^>]*)>, method=([^,]+), rip=([^,]+), lip=([^,]+), ([^,]+)')
# 1a. imap(jzw@just1w.com): Info: Logged out in=29 out=496
#
# 1b. imap(jzw@just1w.com): Info: Connection closed (IDLE running for 0.001 + waiting input for 5.949 secs, 2 B in + 10 B out, state=wait-input) in=477 out=6171
# 1c. imap(jzw@just1w.com): Info: Connection closed (UID STORE finished 0.225 secs ago) in=8099 out=21714
# 1d. imap(jzw@just1w.com): Info: Connection closed (LIST finished 115.637 secs ago) in=629 out=11130
#
# 1e. imap(jzw@just1w.com): Info: Connection closed (APPEND finished 0.606 secs ago) in=11792 out=10697
#
# 1f. imap(jzw@just1w.com): Info: Disconnected for inactivity in=1518 out=2962
#
# 1g. imap(keith@just1w.com): Info: Server shutting down. in=720 out=7287
# 1=date
# 2=service ("imap" or "pop3")
# 3=sasl_username
# 4=disconnect_reason ("Disconnected for inactivity")
# 5=in_bytes
# 6=out_bytes
self.re_disconnect = re.compile('^' + self.date_regexp + ' (imap|pop3)\(([^\)]*)\): Info: ((?:Logged out|Connection closed|Disconnected|Server shutting down).*) in=(\d+) out=(\d+)')
def add_new_connection(self, imap_conn):
''' queue an imap_connection record '''
threshold = self.max_inprogress_recs + ( len(self.recs) * 0.05 )
if len(self.recs) > threshold:
backoff = len(self.recs) - self.max_inprogress_recs + int( self.max_inprogress_recs * 0.10 )
log.warning('dropping %s old imap records', backoff)
self.recs = self.recs[min(len(self.recs),backoff):]
self.recs.append(imap_conn)
return imap_conn
def remove_connection(self, imap_conn):
''' remove a imap_connection record from queue '''
self.recs.remove(imap_conn)
def find_by(self, imap_conn_q, debug=False):
'''find records using field-matching queries
return a list of imap_conn matching query `imap_conn_q`
'''
if debug:
log.debug('imap_accept_q: %s', imap_accept_q)
# find all candidate recs with matching imap_conn_q, ordered by most
# recent last
candidates = DictQuery.find(self.recs, imap_conn_q, reverse=False)
if len(candidates)==0:
if debug: log.debug('no candidates')
return []
elif not candidates[0]['exact']:
# there were no exact matches. apply autosets to the best
# match requiring the fewest number of autosets (index 0)
if debug: log.debug('no exact candidates')
DictQuery.autoset(candidates[0])
candidates[0]['exact'] = True
candidates = [ candidates[0] ]
else:
# there is at least one exact match - remove all non-exact
# candidates
candidates = [
candidate for candidate in candidates if candidate['exact']
]
return [ candidate['item'] for candidate in candidates ]
def find_first(self, *args, **kwargs):
'''find the "best" result and return it - find_by() returns the list
ordered, with the first being the "best"
'''
r = self.find_by(*args, **kwargs)
if len(r)==0:
return None
return r[0]
def match_connect_success(self, line):
# 1=date
# 2=service ("imap-login" or "pop3-login")
# 3=sasl_username ("user@domain.com")
# 4=sasl_method ("PLAIN")
# 5=remote_ip ("146.168.130.9")
# 6=local_ip
# 7=service_tid ("5866")
# 8=connection_security ("TLS")
m = self.re_connect_success.search(line)
if m:
imap_conn = {
"connect_time": self.parse_date(m.group(1)), # "YYYY-MM-DD HH:MM:SS"
"service": m.group(2),
"sasl_username": m.group(3),
"sasl_method": m.group(4),
"remote_host": "unknown",
"remote_ip": m.group(5),
"service_tid": m.group(7),
"connection_security": m.group(8),
"remote_auth_success": 1,
"remote_auth_attempts": 1
}
self.add_new_connection(imap_conn)
return { 'imap_conn': imap_conn }
def match_connect_fail(self, line):
m = self.re_connect_fail_1.search(line)
if m:
# 1a. imap-login: Info: Disconnected (auth failed, 1 attempts in 4 secs): user=<fernando@athigo.com>, method=PLAIN, rip=152.67.63.172, lip=192.155.92.185, TLS: Disconnected, session=<rho/Rjq2EqSYQz+s>
# 1=date
# 2=service ("imap-login" or "pop3-login")
# 3=dissconnect_reason
# 4=remote_auth_attempts
# 5=sasl_username
# 6=sasl_method
# 7=remote_ip
# 8=local_ip
# 9=connection_security
d = self.parse_date(m.group(1)) # "YYYY-MM-DD HH:MM:SS"
imap_conn = {
"connect_time": d,
"disconnect_time": d,
"disconnect_reason": m.group(3),
"service": m.group(2),
"sasl_username": m.group(5),
"sasl_method": m.group(6),
"remote_host": "unknown",
"remote_ip": m.group(7),
"connection_security": m.group(9),
"remote_auth_success": 0,
"remote_auth_attempts": int(m.group(4))
}
self.add_new_connection(imap_conn)
return { 'imap_conn': imap_conn }
m = self.re_connect_fail_2.search(line)
if m:
# 2a. pop3-login: Info: Disconnected (no auth attempts in 2 secs): user=<>, rip=196.52.43.85, lip=192.155.92.185, TLS handshaking: SSL_accept() failed: error:14209102:SSL routines:tls_early_post_process_client_hello:unsupported protocol, session=<ehaSaDm2x9vENCtV>
# 1=date
# 2=service ("imap-login" or "pop3-login")
# 3=disconnect_reason
# 4=sasl_username ("")
# 5=remote_ip ("146.168.130.9")
# 6=local_ip
# 7=connection_security
d = self.parse_date(m.group(1)) # "YYYY-MM-DD HH:MM:SS"
imap_conn = {
"connect_time": d,
"disconnect_time": d,
"disconnect_reason": m.group(3),
"service": m.group(2),
"sasl_username": m.group(4),
"remote_host": "unknown",
"remote_ip": m.group(5),
"connection_security": m.group(7),
"remote_auth_success": 0,
"remote_auth_attempts": 0
}
self.add_new_connection(imap_conn)
return { 'imap_conn': imap_conn }
m = self.re_connect_fail_3.search(line)
if m:
#3a. imap-login: Info: Disconnected (client didn't finish SASL auth, waited 0 secs): user=<>, method=PLAIN, rip=107.107.63.148, lip=192.155.92.185, TLS: SSL_read() syscall failed: Connection reset by peer, session=<rmBsIP21Zsdraz+U>
# 1=date
# 2=service ("imap-login" or "pop3-login")
# 3=disconnect_reason
# 4=sasl_username ("")
# 5=sasl_method
# 6=remote_ip ("146.168.130.9")
# 7=local_ip
# 8=connection_security
d = self.parse_date(m.group(1)) # "YYYY-MM-DD HH:MM:SS"
imap_conn = {
"connect_time": d,
"disconnect_time": d,
"disconnect_reason": m.group(3),
"service": m.group(2),
"sasl_username": m.group(4),
"sasl_method": m.group(5),
"remote_host": "unknown",
"remote_ip": m.group(6),
"connection_security": m.group(8),
"remote_auth_success": 0,
"remote_auth_attempts": 0
}
self.add_new_connection(imap_conn)
return { 'imap_conn': imap_conn }
m = self.re_connect_fail_4.search(line)
if m:
# 4a. pop3-login: Info: Disconnected: Too many bad commands (no auth attempts in 0 secs): user=<>, rip=83.97.20.35, lip=192.155.92.185, TLS, session=<BH8JRCi2nJ5TYRQj>
# 1=date
# 2=service ("imap-login" or "pop3-login")
# 3=disconnect_reason
# 4=sasl_username ("")
# 5=remote_ip ("146.168.130.9")
# 6=local_ip
# 7=connection_security
d = self.parse_date(m.group(1)) # "YYYY-MM-DD HH:MM:SS"
imap_conn = {
"connect_time": d,
"disconnect_time": d,
"disconnect_reason": m.group(3),
"service": m.group(2),
"sasl_username": m.group(4),
"remote_host": "unknown",
"remote_ip": m.group(5),
"connection_security": m.group(6),
"remote_auth_success": 0,
"remote_auth_attempts": 0
}
self.add_new_connection(imap_conn)
return { 'imap_conn': imap_conn }
m = self.re_connect_fail_5.search(line)
if m:
# 5a. imap-login: Info: Disconnected: Too many bad commands (auth failed, 1 attempts in 4 secs): user=<fernando@athigo.com>, method=PLAIN, rip=152.67.63.172, lip=192.155.92.185, TLS: Disconnected, session=<rho/Rjq2EqSYQz+s>
# 1=date
# 2=service ("imap-login" or "pop3-login")
# 3=disconnect_reason
# 4=remote_auth_attempts
# 5=sasl_username
# 6=sasl_method
# 7=remote_ip
# 8=local_ip
# 9=connection_security
d = self.parse_date(m.group(1)) # "YYYY-MM-DD HH:MM:SS"
imap_conn = {
"connect_time": d,
"disconnect_time": d,
"disconnect_reason": m.group(3),
"service": m.group(2),
"sasl_username": m.group(5),
"sasl_method": m.group(6),
"remote_host": "unknown",
"remote_ip": m.group(7),
"connection_security": m.group(9),
"remote_auth_success": 0,
"remote_auth_attempts": int(m.group(4))
}
self.add_new_connection(imap_conn)
return { 'imap_conn': imap_conn }
def match_disconnect(self, line):
# 1=date
# 2=service ("imap" or "pop3")
# 3=sasl_username
# 4=disconnect_reason ("Logged out")
# 5=in_bytes
# 6=out_bytes
#
# NOTE: there is no way to match up the disconnect with the
# actual connection because Dovecot does not log a service_tid
# or an ip address or anything else that could be used to
# match the two up. We'll just assign the disconnect to the
# oldest connection for the user.
m = self.re_disconnect.search(line)
if m:
v = {
"service": m.group(2),
"disconnect_time": self.parse_date(m.group(1)),
"disconnect_reason": m.group(4),
"in_bytes": int(m.group(5)),
"out_bytes": int(m.group(6))
}
imap_conn_q = [
{ 'key':'service', 'value':m.group(2) + '-login' },
{ 'key':'sasl_username', 'value':m.group(3),
'ignorecase': True }
]
log.debug(imap_conn_q)
imap_conn = self.find_first(imap_conn_q)
if imap_conn:
imap_conn.update(v)
return { 'imap_conn': imap_conn }
return True
def store(self, imap_conn):
if 'disposition' not in imap_conn:
if imap_conn.get('remote_auth_success') == 0 and \
imap_conn.get('remote_auth_attempts') == 0:
imap_conn.update({
'disposition': 'suspected_scanner',
})
elif imap_conn.get('remote_auth_success') == 0 and \
imap_conn.get('remote_auth_attempts', 0) > 0:
imap_conn.update({
'disposition': 'failed_login_attempt',
})
elif imap_conn.get('connection_security') != 'TLS' and \
imap_conn.get('remote_ip') != '127.0.0.1':
imap_conn.update({
'disposition': 'insecure'
})
else:
imap_conn.update({
'disposition': 'ok',
})
drop = self.test_drop_disposition(imap_conn['disposition'])
if not drop:
log.debug('store: %s', imap_conn)
try:
self.record_store.store('imap_mail', imap_conn)
except Exception as e:
log.exception(e)
self.remove_connection(imap_conn)
def log_match(self, match_str, match_result, line):
if match_result is True:
log.info('%s [unmatched]: %s', match_str, line)
elif match_result:
if match_result.get('deferred', False):
log.debug('%s [deferred]: %s', match_str, line)
elif 'imap_conn' in match_result:
log.debug('%s: %s: %s', match_str, line, match_result['imap_conn'])
else:
log.error('no imap_conn in match_result: ', match_result)
else:
log.debug('%s: %s', match_str, line)
def test_end_of_rec(self, match_result):
if not match_result or match_result is True or match_result.get('deferred', False):
return False
return self.end_of_rec(match_result['imap_conn'])
def end_of_rec(self, imap_conn):
'''a client must be disconnected for the record to be "complete"
'''
if 'disconnect_time' not in imap_conn:
return False
return True
def handle(self, line):
'''overrides ReadLineHandler method
This function is called by the main log reading thread in
TailFile. All additional log reading is blocked until this
function completes.
The storage engine (`record_store`, a SqliteEventStore
instance) does not block, so this function will return before
the record is saved to disk.
IMPORTANT:
The data structures and methods in this class are not thread
safe. It is not okay to call any of them when the instance is
registered with TailFile.
'''
if not self.capture_enabled:
return
self.update_inprogress_count()
log.debug('imap recs in progress: %s', len(self.recs))
match = self.match_connect_success(line)
if match:
self.log_match('connect', match, line)
return
match = self.match_connect_fail(line)
if match:
self.log_match('connect_fail', match, line)
if self.test_end_of_rec(match):
# we're done - not queued and disconnected ... save it
self.store(match['imap_conn'])
return
match = self.match_disconnect(line)
if match:
self.log_match('disconnect', match, line)
if self.test_end_of_rec(match):
# we're done - not queued and disconnected ... save it
self.store(match['imap_conn'])
return
self.log_match('IGNORED', None, line)

View File

@ -10,6 +10,7 @@ from db.EventStore import EventStore
from util.DictQuery import DictQuery
from util.safe import (safe_int, safe_append, safe_del)
from .PostfixLogParser import PostfixLogParser
from .CommonHandler import CommonHandler
log = logging.getLogger(__name__)
@ -17,7 +18,7 @@ log = logging.getLogger(__name__)
STATE_CACHE_OWNER_ID = 1
class InboundMailLogHandler(ReadLineHandler):
class PostfixLogHandler(CommonHandler):
'''
'''
def __init__(self, record_store,
@ -26,9 +27,20 @@ class InboundMailLogHandler(ReadLineHandler):
capture_enabled = True,
drop_disposition = None
):
''' EventStore instance for persisting "records" '''
self.record_store = record_store
self.set_capture_enabled(capture_enabled)
super(PostfixLogHandler, self).__init__(
STATE_CACHE_OWNER_ID,
record_store,
date_regexp,
date_parser_fn,
capture_enabled,
drop_disposition
)
# maximum size of the in-progress record queue
self.current_inprogress_recs = len(self.recs)
self.max_inprogress_recs = 100
# A "record" is composed by parsing all the syslog output from
# the activity generated by the MTA (postfix) from a single
@ -66,31 +78,6 @@ class InboundMailLogHandler(ReadLineHandler):
# thread-safe.
#
# our in-progress record queue is a simple list
self.recs = self.get_cached_state(clear=True)
# maximum size of the in-progress record queue
self.current_inprogress_recs = len(self.recs)
self.max_inprogress_recs = 100
# records that have these dispositions will be dropped (not
# recorded in the record store
self.drop_disposition_lock = threading.Lock()
self.drop_disposition = {
'failed_login_attempt': False,
'suspected_scanner': False,
'reject': False
}
self.update_drop_disposition(drop_disposition)
# regular expression that matches a syslog date (always anchored)
self.date_regexp = date_regexp
if date_regexp.startswith('^'):
self.date_regexp = date_regexp[1:]
# function that parses the syslog date
self.date_parser_fn = date_parser_fn
# 1. 1a. postfix/smtpd[13698]: connect from host.tld[1.2.3.4]
# 1=date
@ -233,70 +220,9 @@ class InboundMailLogHandler(ReadLineHandler):
def set_capture_enabled(self, capture_enabled):
''' thread-safe '''
self.capture_enabled = capture_enabled
def update_drop_disposition(self, drop_disposition):
''' thread-safe '''
with self.drop_disposition_lock:
self.drop_disposition.update(drop_disposition)
def get_inprogress_count(self):
''' thread-safe '''
return self.current_inprogress_recs
def datetime_as_str(self, d):
# iso-8601 time friendly to sqlite3
timestamp = d.isoformat(sep=' ', timespec='seconds')
# strip the utc offset from the iso format (ie. remove "+00:00")
idx = timestamp.find('+00:00')
if idx>0:
timestamp = timestamp[0:idx]
return timestamp
def parse_date(self, str):
# we're expecting UTC times from date_parser()
d = self.date_parser_fn(str)
return self.datetime_as_str(d)
def get_cached_state(self, clear=True):
conn = None
try:
# obtain the cached records from the record store
conn = self.record_store.connect()
recs = self.record_store.read_rec(conn, 'state', {
"owner_id": STATE_CACHE_OWNER_ID,
"clear": clear
})
log.info('read %s incomplete records from cache', len(recs))
# eliminate stale records - "stale" should be longer than
# the "give-up" time for postfix (4-5 days)
stale = datetime.timedelta(days=7)
cutoff = self.datetime_as_str(
datetime.datetime.now(datetime.timezone.utc) - stale
)
newlist = [ rec for rec in recs if rec['connect_time'] >= cutoff ]
if len(newlist) < len(recs):
log.warning('dropping %s stale incomplete records',
len(recs) - len(newlist))
return newlist
finally:
if conn: self.record_store.close(conn)
def save_state(self):
log.info('saving state to cache: %s records', len(self.recs))
conn = None
try:
conn = self.record_store.connect()
self.record_store.write_rec(conn, 'state', {
'owner_id': STATE_CACHE_OWNER_ID,
'state': self.recs
})
finally:
if conn: self.record_store.close(conn)
def add_new_connection(self, mta_conn):
@ -304,7 +230,7 @@ class InboundMailLogHandler(ReadLineHandler):
threshold = self.max_inprogress_recs + ( len(self.recs) * 0.05 )
if len(self.recs) > threshold:
backoff = len(self.recs) - self.max_inprogress_recs + int( self.max_inprogress_recs * 0.10 )
log.warning('dropping %s old records', backoff)
log.warning('dropping %s old mta records', backoff)
self.recs = self.recs[min(len(self.recs),backoff):]
self.recs.append(mta_conn)
@ -1314,14 +1240,12 @@ class InboundMailLogHandler(ReadLineHandler):
'disposition': 'ok',
})
drop = False
with self.drop_disposition_lock:
drop = self.drop_disposition.get(mta_conn['disposition'], False)
drop = self.test_drop_disposition(mta_conn['disposition'])
if not drop:
log.debug('store: %s', mta_conn)
try:
self.record_store.store('inbound_mail', mta_conn)
self.record_store.store('mta_mail', mta_conn)
except Exception as e:
log.exception(e)
@ -1389,9 +1313,9 @@ class InboundMailLogHandler(ReadLineHandler):
if not self.capture_enabled:
return
self.current_inprogress_recs = len(self.recs)
self.update_inprogress_count()
log.debug('recs in progress: %s, dds_by_tid=%s',
log.debug('mta recs in progress: %s, dds_by_tid=%s',
len(self.recs),
len(self.dds_by_tid)
)
@ -1474,11 +1398,3 @@ class InboundMailLogHandler(ReadLineHandler):
self.log_match('IGNORED', None, line)
def end_of_callbacks(self, thread):
'''overrides ReadLineHandler method
save incomplete records so we can pick up where we left off
'''
self.save_state()

View File

@ -0,0 +1,3 @@
*.log
pos.json
*.sqlite

View File

@ -0,0 +1,12 @@
{
"capture": true,
"prune_policy": {
"frequency_min": 2400,
"older_than_days": 30
},
"drop_disposition": {
"failed_login_attempt": false,
"suspected_scanner": false,
"reject": false
}
}

View File

@ -0,0 +1,35 @@
#!/bin/bash
# load a mail.log file into the current test vm's capture.sqlite
#
if [ -z "$1" ]; then
echo "usage: $0 /path/to/mail.log"
exit 1
fi
log="$1"
if [ ! -e "$log" ]; then
echo "Does not exist: $log"
exit 1
fi
. /etc/mailinabox.conf
if [ $? -ne 0 ]; then
echo "Could not load /etc/mailinabox.conf !!"
exit 1
fi
echo "Stopping maibldap-capture daemon"
systemctl stop miabldap-capture || exit 1
echo "Ensuring access to capture.sqlite"
capture_db=$STORAGE_ROOT/reporting/capture.sqlite
sqlite3 "$capture_db" "select value from db_info where key='schema_version'" >/dev/null
[ $? -ne 0 ] && exit 1
echo "Loading $log"
python3 ../capture.py -d -loglevel info -logfile "$log" -stopateof
echo "Starting miabldap-capture daemon"
systemctl start miabldap-capture

View File

@ -0,0 +1,34 @@
#!/bin/bash
#
# interactively load a mail.log file and create a capture.sqlite
# database in the current directory
log="./mail.log"
pos="./pos.json"
sqlite="./capture.sqlite"
config="./config.json"
if [ -e "./debug.log" ]; then
log="./debug.log"
fi
case "$1" in
*.log )
log="$1"
shift
;;
esac
if [ "$1" != "-c" ]; then
# Start over. Don't continue where we left off
echo "STARTING OVER"
rm -f "$pos"
rm -f "$sqlite"
else
shift
fi
echo "USING LOG: $log"
echo "DB: $sqlite"
python3 ../capture.py -d -loglevel info $@ -logfile "$log" -posfile "$pos" -sqlitefile "$sqlite" -config "$config"

View File

@ -1,12 +1,17 @@
import { BvTable, ConnectionDisposition, DateFormatter } from "./charting.js";
import { spinner } from "../../ui-common/page-header.js";
Vue.component('capture-db-stats', {
export default Vue.component('capture-db-stats', {
props: {
},
components: {
spinner,
},
template:'<div>'+
'<template v-if="stats">'+
'<caption class="text-nowrap">Database date range</caption><div class="ml-2">First: {{stats.mta_connect.connect_time.min_str}}</div><div class="ml-2">Last: {{stats.mta_connect.connect_time.max_str}}</div>'+
'<caption class="text-nowrap">Database date range</caption><div class="ml-2">First: {{stats.db_stats.connect_time.min_str}}</div><div class="ml-2">Last: {{stats.db_stats.connect_time.max_str}}</div>'+
'<div class="mt-2">'+
' <b-table-lite small caption="Connections by disposition" caption-top :fields="row_counts.fields" :items=row_counts.items></b-table-lite>'+
'</div>'+
@ -37,9 +42,9 @@ Vue.component('capture-db-stats', {
// convert dates
var parser = d3.utcParse(this.stats.date_parse_format);
[ 'min', 'max' ].forEach( k => {
var d = parser(this.stats.mta_connect.connect_time[k]);
this.stats.mta_connect.connect_time[k] = d;
this.stats.mta_connect.connect_time[k+'_str'] =
var d = parser(this.stats.db_stats.connect_time[k]);
this.stats.db_stats.connect_time[k] = d;
this.stats.db_stats.connect_time[k+'_str'] =
d==null ? '-' : DateFormatter.dt_long(d);
});
@ -63,11 +68,11 @@ Vue.component('capture-db-stats', {
this.row_counts.fields[0].tdClass = 'text-capitalize';
const total = this.stats.mta_connect.count;
for (var name in this.stats.mta_connect.disposition)
const total = this.stats.db_stats.count;
for (var name in this.stats.db_stats.disposition)
{
const count =
this.stats.mta_connect.disposition[name].count;
this.stats.db_stats.disposition[name].count;
this.row_counts.items.push({
name: name,
count: count,
@ -80,7 +85,7 @@ Vue.component('capture-db-stats', {
})
this.row_counts.items.push({
name:'Total',
count:this.stats.mta_connect.count,
count:this.stats.db_stats.count,
percent:1,
'_rowVariant': 'primary'
});

View File

@ -1,4 +1,6 @@
Vue.component('chart-multi-line-timeseries', {
import { ChartPrefs, NumberFormatter, ChartVue } from "./charting.js";
export default Vue.component('chart-multi-line-timeseries', {
props: {
chart_data: { type:Object, required:false }, /* TimeseriesData */
width: { type:Number, default: ChartPrefs.default_width },
@ -57,7 +59,7 @@ Vue.component('chart-multi-line-timeseries', {
.text("no data");
}
this.xscale = d3.scaleUtc()
this.xscale = d3.scaleTime()
.domain(d3.extent(this.tsdata.dates))
.nice()
.range([this.margin.left, this.width - this.margin.right])
@ -78,25 +80,39 @@ Vue.component('chart-multi-line-timeseries', {
.call(this.yAxis.bind(this))
.attr("font-size", ChartPrefs.axis_font_size);
const line = d3.line()
.defined(d => !isNaN(d))
.x((d, i) => this.xscale(this.tsdata.dates[i]))
.y(d => this.yscale(d));
const path = svg.append("g")
.attr("fill", "none")
.attr("stroke-width", 1.5)
.attr("stroke-linejoin", "round")
.attr("stroke-linecap", "round")
.selectAll("path")
.data(this.tsdata.series)
.join("path")
.style("mix-blend-mode", "multiply")
.style("stroke", (d, i) => this.colors[i])
.attr("d", d => line(d.values))
;
svg.call(this.hover.bind(this), path);
if (this.tsdata.dates.length == 1) {
// special case
const g = svg.append("g")
.selectAll("circle")
.data(this.tsdata.series)
.join("circle")
.attr("fill", (d, i) => this.colors[i])
.attr("cx", this.xscale(this.tsdata.dates[0]))
.attr("cy", d => this.yscale(d.values[0]))
.attr("r", 2.5);
this.hover(svg, g);
}
else {
const line = d3.line()
.defined(d => !isNaN(d))
.x((d, i) => this.xscale(this.tsdata.dates[i]))
.y(d => this.yscale(d));
const path = svg.append("g")
.attr("fill", "none")
.attr("stroke-width", 1.5)
.attr("stroke-linejoin", "round")
.attr("stroke-linecap", "round")
.selectAll("path")
.data(this.tsdata.series)
.join("path")
.style("mix-blend-mode", "multiply")
.style("stroke", (d, i) => this.colors[i])
.attr("d", d => line(d.values))
;
svg.call(this.hover.bind(this), path);
}
},
xAxis: function(g) {
@ -160,8 +176,10 @@ Vue.component('chart-multi-line-timeseries', {
const xvalue = this.xscale.invert(pointer[0]); // date
const yvalue = this.yscale.invert(pointer[1]); // number
//const i = d3.bisectCenter(this.tsdata.dates, xvalue); // index
const i = d3.bisect(this.tsdata.dates, xvalue); // index
if (i >= this.tsdata.dates.length) return;
var i = d3.bisect(this.tsdata.dates, xvalue); // index
if (i<0 || i > this.tsdata.dates.length) return;
i = Math.min(this.tsdata.dates.length-1, i);
// closest series
var closest = null;
for (var sidx=0; sidx<this.tsdata.series.length; sidx++) {
@ -174,7 +192,7 @@ Vue.component('chart-multi-line-timeseries', {
}
}
const s = this.tsdata.series[closest.sidx];
if (i>= s.values.length) {
if (i<0 || i>= s.values.length) {
dot.attr("display", "none");
return;
}
@ -192,7 +210,7 @@ Vue.component('chart-multi-line-timeseries', {
function entered() {
path.style("mix-blend-mode", null).attr("stroke", "#ddd");
dot.attr("display", null);
//dot.attr("display", null);
}
function left() {

View File

@ -1,4 +1,7 @@
Vue.component('chart-pie', {
import { ChartPrefs, NumberFormatter, ChartVue } from "./charting.js";
export default Vue.component('chart-pie', {
/*
* chart_data: [
* { name: 'name', value: value },
@ -127,7 +130,7 @@ Vue.component('chart-pie', {
radius *= 0.7;
else
radius *= 0.8;
arcLabel = d3.arc().innerRadius(radius).outerRadius(radius);
var arcLabel = d3.arc().innerRadius(radius).outerRadius(radius);
svg.append("g")
.attr("stroke", "white")
@ -148,11 +151,15 @@ Vue.component('chart-pie', {
.data(arcs)
.join("text")
.attr("transform", d => `translate(${arcLabel.centroid(d)})`)
.call(text => text.append("tspan")
.call(text => text
.filter(d => (d.endAngle - d.startAngle) > 0.25)
.append("tspan")
.attr("y", "-0.4em")
.attr("font-weight", "bold")
.text(d => d.data.name))
.call(text => text.filter(d => (d.endAngle - d.startAngle) > 0.25).append("tspan")
.call(text => text
.filter(d => (d.endAngle - d.startAngle) > 0.25)
.append("tspan")
.attr("x", 0)
.attr("y", "0.7em")
.attr("fill-opacity", 0.7)

View File

@ -2,7 +2,10 @@
stacked bar chart
*/
Vue.component('chart-stacked-bar-timeseries', {
import { ChartPrefs, NumberFormatter, ChartVue } from "./charting.js";
export default Vue.component('chart-stacked-bar-timeseries', {
props: {
chart_data: { type:Object, required:false }, /* TimeseriesData */
width: { type:Number, default: ChartPrefs.default_width },
@ -101,13 +104,14 @@ Vue.component('chart-stacked-bar-timeseries', {
.text("no data");
}
this.xscale = d3.scaleUtc()
this.xscale = d3.scaleTime()
.domain(d3.extent(this.tsdata.dates))
.nice()
.range([this.margin.left, this.width - this.margin.right])
var barwidth = this.tsdata.barwidth(this.xscale, 1);
var padding = barwidth / 2;
var barwidth = this.tsdata.barwidth(this.xscale);
var padding_x = barwidth / 2;
var padding_y = ChartVue.get_yAxisLegendBounds(this.tsdata).height + 2;
this.yscale = d3.scaleLinear()
.domain([
@ -115,28 +119,30 @@ Vue.component('chart-stacked-bar-timeseries', {
d3.sum(this.tsdata.series, s => d3.max(s.values))
])
.range([
this.height - this.margin.bottom,
this.height - this.margin.bottom - padding_y,
this.margin.top,
]);
svg.append("g")
.call(this.xAxis.bind(this, padding))
var g = svg.append("g")
.attr("transform", `translate(0, ${padding_y})`);
g.append("g")
.call(this.xAxis.bind(this, padding_x))
.attr("font-size", ChartPrefs.axis_font_size);
svg.append("g")
.call(this.yAxis.bind(this))
g.append("g")
.call(this.yAxis.bind(this, padding_y))
.attr("font-size", ChartPrefs.axis_font_size);
for (var s_idx=0; s_idx<this.tsdata.series.length; s_idx++) {
svg.append("g")
g.append("g")
.datum(s_idx)
.attr("fill", this.colors[s_idx])
.selectAll("rect")
.data(this.stacked[s_idx])
.join("rect")
.attr("x", d => this.xscale(d.data.date) - barwidth/2 + padding)
.attr("y", d => this.yscale(d[1]))
.attr("x", d => this.xscale(d.data.date) - barwidth/2 + padding_x)
.attr("y", d => this.yscale(d[1]) + padding_y)
.attr("height", d => this.yscale(d[0]) - this.yscale(d[1]))
.attr("width", barwidth)
.call( hover.bind(this) )
@ -146,7 +152,13 @@ Vue.component('chart-stacked-bar-timeseries', {
;
}
var hovinfo = svg.append("g");
g.append("g")
.attr("transform", `translate(${this.margin.left}, 0)`)
.call(
g => ChartVue.add_yAxisLegend(g, this.tsdata, this.colors)
);
var hovinfo = g.append("g");
function hover(rect) {
if ("ontouchstart" in document) rect
@ -165,10 +177,11 @@ Vue.component('chart-stacked-bar-timeseries', {
var s_name = this.tsdata.series[s_idx].name;
var v = d.data[s_name];
var x = Number(rect.attr('x')) + barwidth/2;
//var y = Number(rect.attr('y')) + Number(rect.attr('height'))/2;
var y = Number(rect.attr('y'));
hovinfo.attr(
"transform",
`translate( ${x}, ${rect.attr('y')} )`)
`translate( ${x}, ${y} )`)
.append('text')
.attr("font-family", ChartPrefs.default_font_family)
.attr("font-size", ChartPrefs.default_font_size)
@ -203,18 +216,16 @@ Vue.component('chart-stacked-bar-timeseries', {
return x;
},
yAxis: function(g) {
yAxis: function(padding, g) {
var y = g.attr(
"transform",
`translate(${this.margin.left},0)`
`translate(${this.margin.left},${padding})`
).call(
d3.axisLeft(this.yscale)
.ticks(this.height/50)
).call(g =>
g.select(".domain").remove()
).call(g => {
ChartVue.add_yAxisLegend(g, this.tsdata, this.colors);
});
).call(
g => g.select(".domain").remove()
);
return y;
},

View File

@ -1,8 +1,9 @@
Vue.component('chart-table', {
export default Vue.component('chart-table', {
props: {
items: Array,
fields: Array,
caption: String
caption: String,
small: { type:Boolean, default:true }
},
/* <b-table-lite striped small :fields="fields_x" :items="items" caption-top><template #table-caption><span class="text-nowrap">{{caption}}</span></template></b-table>*/
@ -19,7 +20,7 @@ Vue.component('chart-table', {
var table = ce('b-table-lite', {
props: {
'striped': true,
'small': true,
'small': this.small,
'fields': this.fields_x,
'items': this.items,
'caption-top': true
@ -38,7 +39,7 @@ Vue.component('chart-table', {
if (this.items.length == 0) {
return [{
key: 'no data',
thClass: 'text-nowrap'
thClass: 'text-nowrap align-top'
}];
}
return this.fields;

View File

@ -1,5 +1,5 @@
class ChartPrefs {
export class ChartPrefs {
static get colors() {
// see: https://github.com/d3/d3-scale-chromatic
return d3.schemeSet2;
@ -40,7 +40,7 @@ class ChartPrefs {
};
class DateFormatter {
export class DateFormatter {
/*
* date and time
*/
@ -162,7 +162,7 @@ class DateFormatter {
};
class DateRange {
export class DateRange {
/*
* ranges
*/
@ -224,15 +224,24 @@ class DateRange {
else if (type == 'ytd')
return DateRange.ytd();
else if (type == 'last30days')
return DateRange.lastXdays(30);
return DateRange.lastXdays(29);
else if (type == 'last7days')
return DateRange.lastXdays(7)
return DateRange.lastXdays(6)
else if (type == 'today') {
var d = new Date();
return [ d, d ];
}
else if (type == 'yesterday') {
var d = new Date();
d.setTime(d.getTime() - (1 * 24 * 60 * 60 * 1000));
return [ d, d ];
}
return null;
}
};
class NumberFormatter {
export class NumberFormatter {
static format(v) {
return isNaN(v) || v===null ? "N/A" : v.toLocaleString(ChartPrefs.locales);
}
@ -310,7 +319,7 @@ class NumberFormatter {
});
class BvTable {
export class BvTable {
constructor(data, opt) {
opt = opt || {};
Object.assign(this, data);
@ -437,7 +446,7 @@ class BvTable {
};
class BvTableField {
export class BvTableField {
constructor(field, field_type) {
// this:
// key - required
@ -484,6 +493,8 @@ class BvTableField {
}
else if (ft.type == 'number') {
if (ft.subtype == 'plain' ||
ft.subtype === null ||
ft.subtype === undefined ||
ft.subtype == 'decimal' && isNaN(ft.places)
)
{
@ -619,7 +630,7 @@ class BvTableField {
};
class MailBvTable extends BvTable {
export class MailBvTable extends BvTable {
flag(key, fn) {
var field = this.get_field(key, true);
if (!field) return;
@ -727,7 +738,7 @@ class MailBvTable extends BvTable {
}
class ChartVue {
export class ChartVue {
static svg_attrs(viewBox) {
var attrs = {
@ -748,6 +759,15 @@ class ChartVue {
return svg;
}
static get_yAxisLegendBounds(data) {
const h = ChartPrefs.axis_font_size;
return {
width: h + 6,
height: h * data.series.length
};
}
static add_yAxisLegend(g, data, colors) {
//var gtick = g.select(".tick:last-of-type").append("g");
const h = ChartPrefs.axis_font_size;
@ -800,7 +820,7 @@ class ChartVue {
* }
*/
class TimeseriesData {
export class TimeseriesData {
constructor(data) {
Object.assign(this, data);
this.convert_dates();
@ -853,9 +873,8 @@ class TimeseriesData {
}
static binsizeOfRange(range) {
// target 100-120 datapoints
const target = 100;
const tolerance = 0.2; // 20%
// target roughly 75 datapoints
const target = 75;
if (typeof range[0] == 'string') {
var parser = d3.utcParse('%Y-%m-%d %H:%M:%S');
@ -865,27 +884,47 @@ class TimeseriesData {
const span_min = Math.ceil(
(range[1].getTime() - range[0].getTime()) / (1000*60*target)
);
const bin_days = Math.floor(span_min / (24*60));
const bin_hours = Math.floor((span_min - bin_days*24*60) / 60);
var bin_days = Math.floor(span_min / (24*60));
var bin_hours = Math.floor((span_min - bin_days*24*60) / 60);
if (bin_days >= 1) {
return bin_days * 24 * 60 +
(bin_hours > (24 * tolerance) ? bin_hours*60: 0);
if (bin_hours > 18) {
bin_days += 1;
bin_hours = 0;
}
else if (bin_hours > 6) {
bin_hours = 12;
}
else {
bin_hours = 0;
}
return bin_days * 24 * 60 + bin_hours*60;
}
const bin_mins = span_min - bin_days*24*60 - bin_hours*60;
if (bin_hours >= 1) {
return bin_hours * 60 +
(bin_mins > (60 * tolerance) ? bin_mins: 0 );
var bin_mins = span_min - bin_days*24*60 - bin_hours*60;
if (bin_mins > 45) {
bin_hours += 1
bin_mins = 0;
}
return bin_mins;
else if (bin_mins > 15) {
bin_mins = 30;
}
else {
bin_mins = 0;
}
return bin_hours * 60 + bin_mins;
}
barwidth(xscale, barspacing) {
barwidth(xscale, barspacing, max_width) {
/* get the width of a bar in a bar chart */
var start = this.range[0];
var end = this.range[1];
var bins = (end.getTime() - start.getTime()) / (1000 * this.binsizeTimespan());
return Math.max(1, (xscale.range()[1] - xscale.range()[0])/bins - (barspacing || 0));
if (this.dates.length == 0) return 0; // no data
barspacing = (barspacing === undefined) ? 2 : barspacing;
max_width = (max_width === undefined) ? 75 : max_width;
var first_date = this.dates[0];
var last_date = this.dates[this.dates.length-1];
var bins = (last_date.getTime() - first_date.getTime()) / (1000 * 60 * this.binsize) + 1;
if (bins == 1) return max_width;
return Math.min(max_width, Math.max(1, (xscale(last_date) - xscale(first_date))/bins - barspacing));
}
formatDateTimeLong(d) {
@ -947,7 +986,7 @@ class TimeseriesData {
};
class ConnectionDisposition {
export class ConnectionDisposition {
constructor(disposition) {
const data = {
'failed_login_attempt': {

View File

@ -1,4 +1,7 @@
Vue.component('date-range-picker', {
import { DateRange, DateFormatter } from "./charting.js";
export default Vue.component('date-range-picker', {
props: {
start_range: [ String, Array ], // "ytd", "mtd", "wtd", or [start, end] where start and end are strings in format YYYY-MM-DD in localti
recall_id: String, // save / recall from localStorage
@ -59,6 +62,8 @@ Vue.component('date-range-picker', {
range: range,
range_type: range_type,
options: [
{ value:'today', text:'Today' },
{ value:'yesterday', text:'Yesterday' },
{ value:'last7days', text:'Last 7 days' },
{ value:'last30days', text:'Last 30 days' },
{ value:'wtd', text:'Week-to-date' },
@ -153,16 +158,20 @@ Vue.component('date-range-picker', {
range_type_change: function(evt) {
// ui select callback
if (this.range_type == 'last7days')
this.range = DateRange.lastXdays_as_ymd(7);
else if (this.range_type == 'last30days')
this.range = DateRange.lastXdays_as_ymd(30);
else if (this.range_type == 'wtd')
this.range = DateRange.wtd_as_ymd();
else if (this.range_type == 'mtd')
this.range = DateRange.mtd_as_ymd();
else if (this.range_type == 'ytd')
this.range = DateRange.ytd_as_ymd();
var range = DateRange.rangeFromType(this.range_type);
if (range) {
this.range = range.map(d => DateFormatter.ymd(d));
}
// if (this.range_type == 'last7days')
// this.range = DateRange.lastXdays_as_ymd(7);
// else if (this.range_type == 'last30days')
// this.range = DateRange.lastXdays_as_ymd(30);
// else if (this.range_type == 'wtd')
// this.range = DateRange.wtd_as_ymd();
// else if (this.range_type == 'mtd')
// this.range = DateRange.mtd_as_ymd();
// else if (this.range_type == 'ytd')
// this.range = DateRange.ytd_as_ymd();
},
}

View File

@ -22,34 +22,10 @@
<script src="https://d3js.org/d3.v6.min.js"></script>
<script>axios.defaults.baseURL="/admin"</script>
<!-- our code -->
<script src="ui-common/exceptions.js"></script>
<script src="ui-common/authentication.js"></script>
<script src="ui-common/page-layout.js"></script>
<script src="ui-common/page-header.js"></script>
<script src="reports/ui/settings.js"></script>
<script src="reports/ui/capture-db-stats.js"></script>
<script src="reports/ui/charting.js"></script>
<script src="reports/ui/wbr-text.js"></script>
<script src="reports/ui/date-range-picker.js"></script>
<script src="reports/ui/chart-pie.js"></script>
<script src="reports/ui/chart-table.js"></script>
<script src="reports/ui/chart-stacked-bar-timeseries.js"></script>
<script src="reports/ui/chart-multi-line-timeseries.js"></script>
<script src="reports/ui/panel-messages-sent.js"></script>
<script src="reports/ui/panel-messages-received.js"></script>
<script src="reports/ui/panel-flagged-connections.js"></script>
<script src="reports/ui/panel-user-activity.js"></script>
<script src="reports/ui/panel-remote-sender-activity.js"></script>
<script src="reports/ui/reports-page-header.js"></script>
<script src="reports/ui/page-settings.js"></script>
<script src="reports/ui/page-reports-main.js"></script>
<script src="reports/ui/index.js"></script>
<script src="reports/ui/index.js" type="module"></script>
</head>
<body onload="init_app()">
<body>
<router-view id="app"></router-view>

View File

@ -2,14 +2,19 @@
* reports index page
*/
import page_settings from "./page-settings.js";
import page_reports_main from "./page-reports-main.js";
import { Me, init_authentication_interceptors } from "../../ui-common/authentication.js";
import { AuthenticationError } from "../../ui-common/exceptions.js";
import UserSettings from "./settings.js";
const app = {
router: new VueRouter({
routes: [
{ path: '/', component: Vue.component('page-reports-main') },
{ path: '/settings', component: Vue.component('page-settings') },
{ path: '/:panel', component: Vue.component('page-reports-main') },
{ path: '/', component: page_reports_main },
{ path: '/settings', component: page_settings },
{ path: '/:panel', component: page_reports_main },
],
scrollBehavior: function(to, from, savedPosition) {
if (savedPosition) {
@ -19,8 +24,8 @@ const app = {
}),
components: {
'page-settings': Vue.component('page-settings'),
'page-reports-main': Vue.component('page-reports-main'),
'page-settings': page_settings,
'page-reports-main': page_reports_main,
},
data: {
@ -64,12 +69,12 @@ const app = {
function init_app() {
init_axios_interceptors();
init_authentication_interceptors();
UserSettings.load().then(settings => {
new Vue(app).$mount('#app');
}).catch(error => {
alert('' + error);
});
}
UserSettings.load().then(settings => {
new Vue(app).$mount('#app');
}).catch(error => {
alert('' + error);
});

View File

@ -0,0 +1,57 @@
import { spinner } from "../../ui-common/page-header.js";
export default Vue.component('message-headers-view', {
props: {
lmtp_id: String,
user_id: String
},
template:
'<div>' +
'<div class="text-center" v-if="loading">{{loading_msg}} <spinner></spinner></div>' +
'<pre v-else>{{ message_headers }}</pre>' +
'</div>',
data: function() {
return {
loading: true,
loading_msg: '',
message_headers: ''
}
},
watch: {
lmtp_id: function() {
this.load(this.lmtp_id, this.user_id);
}
},
mounted: function() {
this.load(this.lmtp_id, this.user_id);
},
methods: {
load: function(lmtp_id, user_id) {
if (!lmtp_id || !user_id) {
this.message_headers = 'no data';
return;
}
this.loading = true;
this.loading_msg = 'Searching for message with LMTP ID ' + lmtp_id;
axios.post('reports/uidata/message-headers', {
lmtp_id,
user_id
}).then(response => {
this.message_headers = response.data;
if (this.message_headers == '')
this.message_headers = `Message with LMTP "${lmtp_id}" not found. It may have been deleted.`;
}).catch(e => {
this.message_headers = '' + (e.response.data || e);
}).finally( () => {
this.loading = false;
});
}
}
});

View File

@ -1,16 +1,31 @@
Vue.component('page-reports-main', function(resolve, reject) {
import page_layout from '../../ui-common/page-layout.js';
import reports_page_header from './reports-page-header.js';
import date_range_picker from './date-range-picker.js';
import capture_db_stats from './capture-db-stats.js';
import panel_messages_sent from './panel-messages-sent.js';
import panel_messages_received from './panel-messages-received.js';
import panel_flagged_connections from './panel-flagged-connections.js';
import panel_user_activity from './panel-user-activity.js';
import panel_remote_sender_activity from './panel-remote-sender-activity.js';
import { TimeseriesData } from './charting.js';
export default Vue.component('page-reports-main', function(resolve, reject) {
axios.get('reports/ui/page-reports-main.html').then((response) => { resolve({
template: response.data,
components: {
'page-layout': Vue.component('page-layout'),
'reports-page-header': Vue.component('reports-page-header'),
'date-range-picker': Vue.component('date-range-picker'),
'panel-messages-sent': Vue.component('panel-messages-sent'),
'panel-messages-received': Vue.component('panel-messages-received'),
'panel-flagged-connections': Vue.component('panel-flagged-connections'),
'panel-user-activity': Vue.component('panel-user-activity'),
'page-layout': page_layout,
'reports-page-header': reports_page_header,
'date-range-picker': date_range_picker,
'capture-db-stats': capture_db_stats,
'panel-messages-sent': panel_messages_sent,
'panel-messages-received': panel_messages_received,
'panel-flagged-connections': panel_flagged_connections,
'panel-user-activity': panel_user_activity,
'panel-remote-sender-activity': panel_remote_sender_activity,
},
data: function() {

View File

@ -1,11 +1,18 @@
Vue.component('page-settings', function(resolve, reject) {
import page_layout from '../../ui-common/page-layout.js';
import reports_page_header from './reports-page-header.js';
import UserSettings from "./settings.js";
import { CaptureConfig } from "./settings.js";
export default Vue.component('page-settings', function(resolve, reject) {
axios.get('reports/ui/page-settings.html').then((response) => { resolve({
template: response.data,
components: {
'page-layout': Vue.component('page-layout'),
'reports-page-header': Vue.component('reports-page-header'),
'page-layout': page_layout,
'reports-page-header': reports_page_header,
},
data: function() {

View File

@ -1,4 +1,13 @@
Vue.component('panel-flagged-connections', function(resolve, reject) {
import chart_multi_line_timeseries from "./chart-multi-line-timeseries.js";
import chart_stacked_bar_timeseries from "./chart-stacked-bar-timeseries.js";
import chart_pie from "./chart-pie.js";
import chart_table from "./chart-table.js";
import { ChartPrefs, TimeseriesData, BvTable, ConnectionDisposition } from "./charting.js";
export default Vue.component('panel-flagged-connections', function(resolve, reject) {
axios.get('reports/ui/panel-flagged-connections.html').then((response) => { resolve({
template: response.data,
@ -14,10 +23,10 @@ Vue.component('panel-flagged-connections', function(resolve, reject) {
},
components: {
'chart-multi-line-timeseries': Vue.component('chart-multi-line-timeseries'),
'chart-stacked-bar-timeseries': Vue.component('chart-stacked-bar-timeseries'),
'chart-pie': Vue.component('chart-pie'),
'chart-table': Vue.component('chart-table'),
'chart-multi-line-timeseries': chart_multi_line_timeseries,
'chart-stacked-bar-timeseries': chart_stacked_bar_timeseries,
'chart-pie': chart_pie,
'chart-table': chart_table,
},
computed: {

View File

@ -3,7 +3,13 @@
*/
Vue.component('panel-messages-received', function(resolve, reject) {
import chart_multi_line_timeseries from "./chart-multi-line-timeseries.js";
import chart_table from "./chart-table.js";
import { ChartPrefs, TimeseriesData, BvTable } from "./charting.js";
export default Vue.component('panel-messages-received', function(resolve, reject) {
axios.get('reports/ui/panel-messages-received.html').then((response) => { resolve({
template: response.data,
@ -19,10 +25,8 @@ Vue.component('panel-messages-received', function(resolve, reject) {
},
components: {
'chart-multi-line-timeseries': Vue.component('chart-multi-line-timeseries'),
// 'chart-stacked-bar-timeseries': Vue.component('chart-stacked-bar-timeseries'),
// 'chart-pie': Vue.component('chart-pie'),
'chart-table': Vue.component('chart-table'),
'chart-multi-line-timeseries': chart_multi_line_timeseries,
'chart-table': chart_table,
},
data: function() {

View File

@ -9,7 +9,15 @@
'loading' event=number
*/
Vue.component('panel-messages-sent', function(resolve, reject) {
import chart_multi_line_timeseries from "./chart-multi-line-timeseries.js";
import chart_stacked_bar_timeseries from "./chart-stacked-bar-timeseries.js";
import chart_pie from "./chart-pie.js";
import chart_table from "./chart-table.js";
import { ChartPrefs, TimeseriesData, BvTable } from "./charting.js";
export default Vue.component('panel-messages-sent', function(resolve, reject) {
axios.get('reports/ui/panel-messages-sent.html').then((response) => { resolve({
template: response.data,
@ -27,10 +35,10 @@ Vue.component('panel-messages-sent', function(resolve, reject) {
},
components: {
'chart-multi-line-timeseries': Vue.component('chart-multi-line-timeseries'),
'chart-stacked-bar-timeseries': Vue.component('chart-stacked-bar-timeseries'),
'chart-pie': Vue.component('chart-pie'),
'chart-table': Vue.component('chart-table'),
'chart-multi-line-timeseries': chart_multi_line_timeseries,
'chart-stacked-bar-timeseries': chart_stacked_bar_timeseries,
'chart-pie': chart_pie,
'chart-table': chart_table,
},
data: function() {
@ -50,7 +58,7 @@ Vue.component('panel-messages-sent', function(resolve, reject) {
},
height_recip: function() {
return this.height / 2;
return (this.height / 3) *2;
},
radius_recip_pie: function() {

View File

@ -28,10 +28,10 @@
<b-form-radio class="ml-1" v-model="sender_type" value="server" @change="update_recent_list()">Server</b-form-radio>
</div>
<b-input-group style="width:40em">
<b-form-input v-if="sender_type=='email'" class="h-auto" :autofocus="data_sender===null" list="panel-rsa-recent" v-model="email" placeholder="Enter an email address (envelope FROM)"></b-form-input>
<b-form-input v-else class="h-auto" :autofocus="data_sender===null" list="panel-rsa-recent" v-model="server" placeholder="Enter a hostname or ip address"></b-form-input>
<b-form-input v-if="sender_type=='email'" class="h-auto" :autofocus="data_sender===null" list="panel-rsa-recent" @change="email_changed" placeholder="Enter an email address (envelope FROM)"></b-form-input>
<b-form-input v-else class="h-auto" :autofocus="data_sender===null" list="panel-rsa-recent" @change="server_changed" placeholder="Enter a hostname or ip address"></b-form-input>
<b-input-group-append>
<b-button variant="primary" @click="change_sender" :disabled="sender_type=='email' && (email == '' || email==data_sender) || sender_type=='server' && (server =='' || server==data_sender)">Search</b-button>
<b-button variant="primary" @click="change_sender">Search</b-button>
</b-input-group-append>
</b-input-group>
<b-alert variant="warning" class="ml-2" :show="activity && activity.items.length>=get_row_limit()"><sup>*</sup> Tables limited to {{ get_row_limit() }} rows <router-link to="/settings"><b-icon icon="gear-fill"></b-icon></router-link></b-alert>

View File

@ -2,7 +2,11 @@
details on the activity of a remote sender (envelope from)
*/
Vue.component('panel-remote-sender-activity', function(resolve, reject) {
import UserSettings from "./settings.js";
import { MailBvTable, ConnectionDisposition } from "./charting.js";
export default Vue.component('panel-remote-sender-activity', function(resolve, reject) {
axios.get('reports/ui/panel-remote-sender-activity.html').then((response) => { resolve({
template: response.data,
@ -94,6 +98,14 @@ Vue.component('panel-remote-sender-activity', function(resolve, reject) {
},
methods: {
email_changed: function(evt) {
this.email = evt;
},
server_changed: function(evt) {
this.server = evt;
},
update_recent_list: function() {
this.recent_senders = UserSettings.get()
.get_recent_list(this.set_prefix + this.sender_type);

View File

@ -1,17 +1,21 @@
<div>
<b-modal ref="message_headers_modal" hide-header no-fade ok-only no-close-on-backdrop size="lg" scrollable>
<message-headers-view :user_id="data_user_id" :lmtp_id="lmtp_id"></message-headers-view>
</b-modal>
<datalist id="panel-ua-users">
<option v-for="user in all_users">{{ user }}</option>
</datalist>
<b-form @submit.prevent="getChartData()" class="d-flex">
<b-input-group class="mb-3" style="width:30em">
<b-form-input class="h-auto" :autofocus="data_user_id===null" list="panel-ua-users" v-model="user_id" placeholder="Enter a user id/email address"></b-form-input>
<b-form-input ref="user_id_input" class="h-auto" :autofocus="data_user_id===null" list="panel-ua-users" @change="sync_user_id" placeholder="Enter a user id/email address"></b-form-input>
<b-input-group-append>
<b-button variant="primary" @click="change_user">Change user</b-button>
</b-input-group-append>
</b-input-group>
<b-alert variant="warning" class="ml-2" :show="sent_mail && sent_mail.items.length>=get_row_limit() || received_mail && received_mail.items.length>=get_row_limit()"><sup>*</sup> Tables limited to {{ get_row_limit() }} rows <router-link to="/settings"><b-icon icon="gear-fill"></b-icon></router-link></b-alert>
<b-alert variant="warning" class="ml-2" :show="sent_mail && sent_mail.items.length>=get_row_limit() || received_mail && received_mail.items.length>=get_row_limit() || imap_details && imap_details.items.length>=get_row_limit()"><sup>*</sup> Tables limited to {{ get_row_limit() }} rows <router-link to="/settings"><b-icon icon="gear-fill"></b-icon></router-link></b-alert>
<b-form-checkbox class="ml-auto" v-model="show_only_flagged" @change="show_only_flagged_change()">Flagged only</b-form-checkbox>
</b-form>
@ -42,7 +46,7 @@
</b-table>
</b-tab>
<b-tab :title="`Received mail (${received_mail.items.length})`">
<b-tab>
<template #title>
Received mail<sup v-if="received_mail.items.length >= get_row_limit()">*</sup> ({{received_mail.items.length}})
</template>
@ -61,6 +65,7 @@
</template>
<template #row-details="row">
<b-card>
<div><strong>Remote sender</strong>: {{ row.item.remote_host }}[{{ row.item.remote_ip }}]</div>
<div><strong>Connection disposition</strong>: {{ disposition_formatter(row.item.disposition) }}</div>
<div v-if="row.item.orig_to"><strong>Sent to alias</strong>: {{ row.item.orig_to }}</div>
<div v-if="row.item.dkim_reason"><strong>Dkim reason</strong>: {{row.item.dkim_reason}}</div>
@ -68,9 +73,42 @@
<div v-if="row.item.postgrey_reason"><strong>Postgrey reason</strong>: {{row.item.postgrey_reason}}</div>
<div v-if="row.item.postgrey_delay"><strong>Postgrey delay</strong>: {{received_mail.x_fields.postgrey_delay.formatter(row.item.postgrey_delay)}}</div>
<div v-if="row.item.spam_result"><strong>Spam score</strong>: {{received_mail.x_fields.spam_score.formatter(row.item.spam_score)}}</div>
<div v-if="row.item.message_id"><strong>Message-ID</strong>: {{ row.item.message_id }}</div>
<div v-if="row.item.failure_info"><strong>Failure info</strong>: {{row.item.failure_info}}</div>
<div v-if="row.item.lmtp_id"><a href="#" @click.prevent.stop="show_message_headers(row.item.lmtp_id)">Message headers</a></div>
</b-card>
</template>
</b-table>
</b-tab>
<b-tab>
<template #title>
IMAP Connections
</template>
<b-table
tbody-tr-class="cursor-pointer"
selectable
select-mode="single"
:filter="show_only_flagged_filter"
:filter-function="table_filter_cb"
:items="imap_conn_summary.items"
:fields="imap_conn_summary.fields"
@row-clicked="load_imap_details">
</b-table>
<div v-if="imap_details" class="bg-white">
<div class="mt-3 text-center bg-info p-1">{{imap_details._desc}} ({{imap_details.items.length}} rows<sup v-if="imap_details.items.length >= get_row_limit()">*</sup>)</div>
<b-table
class="sticky-table-header-0"
small
:items="imap_details.items"
:fields="imap_details.fields">
</b-table>
</div>
</b-tab>
</b-tabs>
</div>

View File

@ -2,7 +2,14 @@
details on the activity of a user
*/
Vue.component('panel-user-activity', function(resolve, reject) {
import wbr_text from "./wbr-text.js";
import chart_table from "./chart-table.js";
import message_headers_view from "./message_headers_view.js";
import UserSettings from "./settings.js";
import { BvTable, MailBvTable, ConnectionDisposition } from "./charting.js";
export default Vue.component('panel-user-activity', function(resolve, reject) {
axios.get('reports/ui/panel-user-activity.html').then((response) => { resolve({
template: response.data,
@ -12,7 +19,9 @@ Vue.component('panel-user-activity', function(resolve, reject) {
},
components: {
'wbr-text': Vue.component('wbr-text'),
'wbr-text': wbr_text,
'message-headers-view': message_headers_view,
'chart-table': chart_table,
},
data: function() {
@ -20,7 +29,8 @@ Vue.component('panel-user-activity', function(resolve, reject) {
Number(this.$route.query.tab) :
0;
return {
user_id: this.$route.query.user || '', /* v-model */
//user_id: this.$route.query.user || '', /* v-model */
user_id: '', /* v-model */
tab_index: start_tab, /* v-model */
show_only_flagged: false,
show_only_flagged_filter: null,
@ -28,6 +38,9 @@ Vue.component('panel-user-activity', function(resolve, reject) {
data_date_range: null, /* date range for active table data */
sent_mail: null,
received_mail: null,
imap_conn_summary: null,
imap_details: null,
lmtp_id: null, /* for message headers modal */
all_users: [],
disposition_formatter: ConnectionDisposition.formatter,
};
@ -38,7 +51,7 @@ Vue.component('panel-user-activity', function(resolve, reject) {
const new_user = this.$route.query.user;
if (new_user && new_user != this.user_id) {
this.user_id = new_user;
this.sync_user_id(new_user);
this.getChartData(isNaN(new_tab) ? 0 : new_tab);
return;
}
@ -64,6 +77,10 @@ Vue.component('panel-user-activity', function(resolve, reject) {
this.getChartData();
}
},
mounted: function() {
this.sync_user_id(this.$route.query.user || '');
},
methods: {
update_route: function() {
@ -76,6 +93,13 @@ Vue.component('panel-user-activity', function(resolve, reject) {
this.$router.replace(route);
}
},
sync_user_id: function(user_id) {
// manually update "model" for <input> to avoid
// slowness with large tables
this.user_id = user_id;
this.$refs.user_id_input.value = user_id;
},
change_user: function() {
this.getChartData(0);
@ -124,12 +148,17 @@ Vue.component('panel-user-activity', function(resolve, reject) {
combine_received_mail_fields: function() {
// remove these fields
this.received_mail.combine_fields([
'remote_host',
'remote_ip',
'dkim_reason',
'dmarc_reason',
'failure_info',
'postgrey_reason',
'postgrey_delay',
'spam_score',
'orig_to'
'orig_to',
'message_id',
'lmtp_id',
]);
// combine fields 'envelope_from' and 'sasl_username'
var f = this.received_mail.combine_fields(
@ -143,6 +172,23 @@ Vue.component('panel-user-activity', function(resolve, reject) {
f.label = 'Envelope From (user)';
},
combine_imap_conn_summary_fields: function() {
// remove 'first_conn_time'
this.imap_conn_summary.combine_fields('first_connection_time');
// clear the label for the 'total' column (pct)
const f_total = this.imap_conn_summary.get_field('total');
f_total.label = '';
},
combine_imap_details_fields: function() {
// remove these fields
this.imap_details.combine_fields([
'remote_host',
'disposition',
]);
},
get_row_limit: function() {
return UserSettings.get().row_limit;
},
@ -235,7 +281,23 @@ Vue.component('panel-user-activity', function(resolve, reject) {
this.received_mail
.flag_fields()
.get_field('connect_time')
.add_tdClass('text-nowrap');
.add_tdClass('text-nowrap');
/* setup imap_conn_summary */
this.imap_conn_summary = new MailBvTable(
response.data.imap_conn_summary
);
this.combine_imap_conn_summary_fields();
this.imap_conn_summary.flag_fields();
['last_connection_time','count']
.forEach(name => {
const f = this.imap_conn_summary.get_field(name);
f.add_cls('text-nowrap', 'tdClass');
});
/* clear imap_details */
this.imap_details = null;
}).catch(error => {
this.$root.handleError(error);
@ -250,6 +312,45 @@ Vue.component('panel-user-activity', function(resolve, reject) {
row_clicked: function(item, index, event) {
item._showDetails = ! item._showDetails;
},
show_message_headers: function(lmtp_id) {
// set the lmtp_id that component message-headers-view
// searches for
this.lmtp_id = lmtp_id;
// show the modal dialog
this.$refs.message_headers_modal.show();
},
load_imap_details: function(item, index, event) {
this.$emit('loading', 1);
this.imap_details = null;
const promise = axios.post('reports/uidata/imap-details', {
row_limit: this.get_row_limit(),
user_id: this.user_id.trim(),
start_date: this.date_range[0],
end_date: this.date_range[1],
disposition: item.disposition,
remote_host: item.remote_host
}).then(response => {
this.imap_details = new MailBvTable(
response.data.imap_details
);
this.combine_imap_details_fields();
this.imap_details.get_field('connect_time')
.add_tdClass('text-nowrap');
this.imap_details.get_field('disconnect_time')
.add_tdClass('text-nowrap');
this.imap_details._desc =
`${item.remote_host}/${item.disposition}`;
}).catch(error => {
this.$root.handleError(error);
}).finally( () => {
this.$emit('loading', -1);
});
}
}

View File

@ -1,10 +1,12 @@
Vue.component('reports-page-header', {
import page_header from '../../ui-common/page-header.js';
export default Vue.component('reports-page-header', {
props: {
loading_counter: { type:Number, required:true },
},
components: {
'page-header': Vue.component('page-header'),
'page-header': page_header,
},
template:

View File

@ -1,6 +1,9 @@
import { ValueError } from '../../ui-common/exceptions.js';
window.miabldap = window.miabldap || {};
class CaptureConfig {
export class CaptureConfig {
static get() {
return axios.get('/reports/capture/config').then(response => {
var cc = new CaptureConfig();
@ -11,7 +14,7 @@ class CaptureConfig {
};
class UserSettings {
export default class UserSettings {
static load() {
if (window.miabldap.user_settings) {
return Promise.resolve(window.miabldap.user_settings);

View File

@ -11,7 +11,7 @@
* browser to wrap at any character of the text.
*/
Vue.component('wbr-text', {
export default Vue.component('wbr-text', {
props: {
text: { type:String, required: true },
break_chars: { type:String, default:'@_.,:+=' },

View File

@ -6,6 +6,7 @@ class Timeseries(object):
# start_date: 'YYYY-MM-DD' or 'YYYY-MM-DD HH:MM:SS'
# start: 'YYYY-MM-DD HH:MM:SS'
self.start = self.full_datetime_str(start_date, False)
self.start_unixepoch = self.unix_time(self.start)
# end_date: 'YYYY-MM-DD' or 'YYYY-MM-DD HH:MM:SS'
# end: 'YYYY-MM-DD HH:MM:SS'
@ -16,20 +17,12 @@ class Timeseries(object):
# timefmt is a format string for sqlite strftime() that puts a
# sqlite datetime into a "bin" date
self.timefmt='%Y-%m-%d'
self.timefmt = '%Y-%m-%d %H:%M:%S'
# parsefmt is a date parser string to be used to re-interpret
# "bin" grouping dates (data.dates) to native dates
parsefmt='%Y-%m-%d'
b = self.binsizeWithUnit()
if b['unit'] == 'hour':
self.timefmt+=' %H:00:00'
parsefmt+=' %H:%M:%S'
elif b['unit'] == 'minute':
self.timefmt+=' %H:%M:00'
parsefmt+=' %H:%M:%S'
# "bin" grouping dates (data.dates) to native dates. server
# always returns utc dates
self.parsefmt = '%Y-%m-%d %H:%M:%S'
self.dates = [] # dates must be "bin" date strings
self.series = []
@ -38,7 +31,7 @@ class Timeseries(object):
'range': [ self.start, self.end ],
'range_parse_format': '%Y-%m-%d %H:%M:%S',
'binsize': self.binsize,
'date_parse_format': parsefmt,
'date_parse_format': self.parsefmt,
'y': desc,
'dates': self.dates,
'series': self.series
@ -54,6 +47,14 @@ class Timeseries(object):
d = d + datetime.timedelta(days=1)
return d.strftime('%Y-%m-%d 00:00:00')
def unix_time(self, full_datetime_str):
d = datetime.datetime.strptime(
full_datetime_str + ' UTC',
'%Y-%m-%d %H:%M:%S %Z'
)
return int(d.timestamp())
def binsizeWithUnit(self):
# normalize binsize (which is a time span in minutes)
days = int(self.binsize / (24 * 60))
@ -96,12 +97,17 @@ class Timeseries(object):
'''
i = bisect.bisect_right(self.dates, date_str)
if i == len(self.dates):
if len(self.dates)>0 and self.dates[i-1] == date_str:
return i-1
elif i == len(self.dates):
self.dates.append(date_str)
return i
if self.dates[i] == date_str:
return i
self.dates.insert(i, date_str)
else:
self.dates.insert(i, date_str)
''' add zero values to all series for the new date '''
for series in self.series:
series['values'].insert(i, 0)
return i
def add_series(self, id, name):
@ -111,6 +117,8 @@ class Timeseries(object):
'values': []
}
self.series.append(s)
for date in self.dates:
s['values'].append(0)
return s

View File

@ -3,6 +3,7 @@ from .select_list_suggestions import select_list_suggestions
from .messages_sent import messages_sent
from .messages_received import messages_received
from .user_activity import user_activity
from .imap_details import imap_details
from .remote_sender_activity import remote_sender_activity
from .flagged_connections import flagged_connections
from .capture_db_stats import capture_db_stats

View File

@ -17,10 +17,10 @@ def capture_db_stats(conn):
if stats:
return stats
select_1 = 'SELECT min(connect_time) AS `min`, max(connect_time) AS `max`, count(*) AS `count` FROM mta_connection'
select_1 = 'SELECT min(`min`) AS `min`, max(`max`) AS `max`, sum(`count`) AS `count` FROM (SELECT min(connect_time) AS `min`, max(connect_time) AS `max`, count(*) AS `count` FROM mta_connection UNION SELECT min(connect_time) AS `min`, max(connect_time) AS `max`, count(*) AS `count` FROM imap_connection)'
# table scan
select_2 = 'SELECT disposition, count(*) AS `count` FROM mta_connection GROUP BY disposition'
select_2 = 'SELECT disposition, sum(count) as `count` FROM (SELECT disposition, count(*) AS `count` FROM mta_connection GROUP BY disposition UNION SELECT disposition, count(*) AS `count` FROM imap_connection GROUP BY disposition) GROUP BY disposition'
c = conn.cursor()
stats = {
@ -29,7 +29,7 @@ def capture_db_stats(conn):
}
try:
row = c.execute(select_1).fetchone()
stats['mta_connect'] = {
stats['db_stats'] = {
'connect_time': {
'min': row['min'],
'max': row['max'], # YYYY-MM-DD HH:MM:SS (utc)
@ -39,7 +39,7 @@ def capture_db_stats(conn):
}
for row in c.execute(select_2):
stats['mta_connect']['disposition'][row['disposition']] = {
stats['db_stats']['disposition'][row['disposition']] = {
'count': row['count']
}

View File

@ -2,13 +2,36 @@
-- returns count of failed_login_attempt in each 'bin', which is the
-- connection time rounded (as defined by {timefmt})
--
SELECT
strftime('{timefmt}',connect_time) AS `bin`,
count(*) AS `count`
FROM mta_connection
WHERE
disposition='failed_login_attempt' AND
connect_time >= :start_date AND
connect_time < :end_date
GROUP BY strftime('{timefmt}',connect_time)
ORDER BY connect_time
SELECT bin, sum(count) AS `count`
FROM (
SELECT
strftime('{timefmt}',
:start_unixepoch + cast((strftime('%s',connect_time) - :start_unixepoch) / (60 * :binsize) as int) * (60 * :binsize),
'unixepoch'
) AS `bin`,
count(*) AS `count`
FROM mta_connection
WHERE
disposition='failed_login_attempt' AND
connect_time >= :start_date AND
connect_time < :end_date
GROUP BY bin
UNION
SELECT
strftime('{timefmt}',
:start_unixepoch + cast((strftime('%s',connect_time) - :start_unixepoch) / (60 * :binsize) as int) * (60 * :binsize),
'unixepoch'
) AS `bin`,
count(*) AS `count`
FROM imap_connection
WHERE
disposition='failed_login_attempt' AND
connect_time >= :start_date AND
connect_time < :end_date
GROUP BY bin
)
GROUP BY bin
ORDER BY bin

View File

@ -2,13 +2,35 @@
-- returns count of suspected_scanner in each 'bin', which is the
-- connection time rounded (as defined by {timefmt})
--
SELECT
strftime('{timefmt}',connect_time) AS `bin`,
count(*) AS `count`
FROM mta_connection
WHERE
disposition='suspected_scanner' AND
connect_time >= :start_date AND
connect_time < :end_date
GROUP BY strftime('{timefmt}',connect_time)
ORDER BY connect_time
SELECT bin, sum(count) AS `count`
FROM (
SELECT
strftime('{timefmt}',
:start_unixepoch + cast((strftime('%s',connect_time) - :start_unixepoch) / (60 * :binsize) as int) * (60 * :binsize),
'unixepoch'
) AS `bin`,
count(*) AS `count`
FROM mta_connection
WHERE
disposition='suspected_scanner' AND
connect_time >= :start_date AND
connect_time < :end_date
GROUP BY strftime('{timefmt}',connect_time)
UNION
SELECT
strftime('{timefmt}',
:start_unixepoch + cast((strftime('%s',connect_time) - :start_unixepoch) / (60 * :binsize) as int) * (60 * :binsize),
'unixepoch'
) AS `bin`,
count(*) AS `count`
FROM imap_connection
WHERE
disposition='suspected_scanner' AND
connect_time >= :start_date AND
connect_time < :end_date
GROUP BY strftime('{timefmt}',connect_time)
)
GROUP BY bin
ORDER BY bin

View File

@ -0,0 +1,19 @@
-- pie chart for "connections by disposition"
--
-- returns a table of disposition along with it's count
SELECT disposition, sum(count) AS `count`
FROM (
SELECT disposition, count(*) AS `count`
FROM mta_connection
WHERE connect_time>=:start_date AND connect_time<:end_date
GROUP by disposition
UNION
SELECT disposition, count(*) AS `count`
FROM imap_connection
WHERE connect_time>=:start_date AND connect_time<:end_date
GROUP BY disposition
)
GROUP BY disposition

View File

@ -1,7 +1,10 @@
import logging
from .Timeseries import Timeseries
from .exceptions import InvalidArgsError
from .top import select_top
log = logging.getLogger(__name__)
with open(__file__.replace('.py','.1.sql')) as fp:
select_1 = fp.read()
@ -20,6 +23,9 @@ with open(__file__.replace('.py','.5.sql')) as fp:
with open(__file__.replace('.py','.6.sql')) as fp:
select_6 = fp.read()
with open(__file__.replace('.py','.7.sql')) as fp:
select_7 = fp.read()
def flagged_connections(conn, args):
try:
@ -35,9 +41,8 @@ def flagged_connections(conn, args):
c = conn.cursor()
# pie chart for "connections by disposition"
select = 'SELECT disposition, count(*) AS `count` FROM mta_connection WHERE connect_time>=:start_date AND connect_time<:end_date GROUP BY disposition'
connections_by_disposition = []
for row in c.execute(select, {'start_date':ts.start, 'end_date':ts.end}):
for row in c.execute(select_7, {'start_date':ts.start, 'end_date':ts.end}):
connections_by_disposition.append({
'name': row[0],
'value': row[1]
@ -45,21 +50,27 @@ def flagged_connections(conn, args):
# timeseries = failed logins count
s_failed_login = ts.add_series('failed_login_attempt', 'failed login attempts')
for row in c.execute(select_1.format(timefmt=ts.timefmt), {
sql = select_1.format(timefmt=ts.timefmt)
for row in c.execute(sql, {
'start_date': ts.start,
'end_date': ts.end
'end_date': ts.end,
'start_unixepoch': ts.start_unixepoch,
'binsize': ts.binsize
}):
ts.append_date(row['bin'])
s_failed_login['values'].append(row['count'])
idx = ts.insert_date(row['bin'])
s_failed_login['values'][idx] = row['count']
# timeseries = suspected scanners count
s_scanner = ts.add_series('suspected_scanner', 'connections by suspected scanners')
for row in c.execute(select_2.format(timefmt=ts.timefmt), {
sql = select_2.format(timefmt=ts.timefmt)
for row in c.execute(sql, {
'start_date': ts.start,
'end_date': ts.end
'end_date': ts.end,
'start_unixepoch': ts.start_unixepoch,
'binsize': ts.binsize
}):
ts.insert_date(row['bin'])
s_scanner['values'].append(row['count'])
idx = ts.insert_date(row['bin'])
s_scanner['values'][idx] = row['count']
# pie chart for "disposition=='reject' grouped by failure_category"

View File

@ -0,0 +1,25 @@
--
-- details on user imap connections
--
SELECT
connect_time,
disconnect_time,
CASE WHEN remote_host='unknown' THEN remote_ip ELSE remote_host END AS `remote_host`,
sasl_method,
disconnect_reason,
connection_security,
disposition,
in_bytes,
out_bytes
FROM
imap_connection
WHERE
sasl_username = :user_id AND
connect_time >= :start_date AND
connect_time < :end_date AND
(:remote_host IS NULL OR
remote_host = :remote_host OR remote_ip = :remote_host) AND
(:disposition IS NULL OR
disposition = :disposition)
ORDER BY
connect_time

View File

@ -0,0 +1,83 @@
from .Timeseries import Timeseries
from .exceptions import InvalidArgsError
with open(__file__.replace('.py','.1.sql')) as fp:
select_1 = fp.read()
def imap_details(conn, args):
'''
details on imap connections
'''
try:
user_id = args['user_id']
# use Timeseries to get a normalized start/end range
ts = Timeseries(
'IMAP details',
args['start_date'],
args['end_date'],
0
)
# optional
remote_host = args.get('remote_host')
disposition = args.get('disposition')
except KeyError:
raise InvalidArgsError()
# limit results
try:
limit = 'LIMIT ' + str(int(args.get('row_limit', 1000)));
except ValueError:
limit = 'LIMIT 1000'
c = conn.cursor()
imap_details = {
'start': ts.start,
'end': ts.end,
'y': 'IMAP Details',
'fields': [
'connect_time',
'disconnect_time',
'remote_host',
'sasl_method',
'disconnect_reason',
'connection_security',
'disposition',
'in_bytes',
'out_bytes'
],
'field_types': [
{ 'type':'datetime', 'format': ts.parsefmt }, # connect_time
{ 'type':'datetime', 'format': ts.parsefmt }, # disconnect_time
'text/plain', # remote_host
'text/plain', # sasl_method
'text/plain', # disconnect_reason
'text/plain', # connection_security
'text/plain', # disposition
'number/size', # in_bytes,
'number/size', # out_bytes,
],
'items': []
}
for row in c.execute(select_1 + limit, {
'user_id': user_id,
'start_date': ts.start,
'end_date': ts.end,
'remote_host': remote_host,
'disposition': disposition
}):
v = []
for key in imap_details['fields']:
v.append(row[key])
imap_details['items'].append(v)
return {
'imap_details': imap_details
}

View File

@ -3,7 +3,10 @@
-- the connection time rounded (as defined by {timefmt})
--
SELECT
strftime('{timefmt}',connect_time) AS `bin`,
strftime('{timefmt}',
:start_unixepoch + cast((strftime('%s',connect_time) - :start_unixepoch) / (60 * :binsize) as int) * (60 * :binsize),
'unixepoch'
) AS `bin`,
count(*) AS `count`
FROM mta_accept
JOIN mta_connection ON mta_connection.mta_conn_id = mta_accept.mta_conn_id
@ -11,5 +14,5 @@ WHERE
mta_connection.service = 'smtpd' AND
connect_time >= :start_date AND
connect_time < :end_date
GROUP BY strftime('{timefmt}',connect_time)
GROUP BY bin
ORDER BY connect_time

View File

@ -40,10 +40,12 @@ def messages_received(conn, args):
try:
for row in c.execute(select_1.format(timefmt=ts.timefmt), {
'start_date':ts.start,
'end_date':ts.end
'end_date':ts.end,
'start_unixepoch':ts.start_unixepoch,
'binsize':ts.binsize
}):
ts.append_date(row['bin'])
s_received['values'].append(row['count'])
idx = ts.insert_date(row['bin'])
s_received['values'][idx] = row['count']
# top 10 senders (envelope_from) by message count

View File

@ -3,7 +3,10 @@
-- time rounded (as defined by {timefmt})
--
SELECT
strftime('{timefmt}',connect_time) AS `bin`,
strftime('{timefmt}',
:start_unixepoch + cast((strftime('%s',connect_time) - :start_unixepoch) / (60 * :binsize) as int) * (60 * :binsize),
'unixepoch'
) as `bin`,
count(*) AS `sent_count`
FROM mta_accept
JOIN mta_connection ON mta_connection.mta_conn_id = mta_accept.mta_conn_id
@ -12,5 +15,5 @@ WHERE
(mta_connection.service = 'submission' OR mta_connection.service = 'pickup') AND
connect_time >= :start_date AND
connect_time < :end_date
GROUP BY strftime('{timefmt}',connect_time)
GROUP BY bin
ORDER BY connect_time

View File

@ -4,7 +4,10 @@
-- defined by {timefmt})
--
SELECT
strftime('{timefmt}',connect_time) AS `bin`,
strftime('{timefmt}',
:start_unixepoch + cast((strftime('%s',connect_time) - :start_unixepoch) / (60 * :binsize) as int) * (60 * :binsize),
'unixepoch'
) AS `bin`,
mta_delivery.service AS `delivery_service`,
count(*) AS `delivery_count`
FROM mta_accept
@ -14,5 +17,5 @@ WHERE
(mta_connection.service = 'submission' OR mta_connection.service = 'pickup') AND
connect_time >= :start_date AND
connect_time < :end_date
GROUP BY strftime('{timefmt}',connect_time), mta_delivery.service
GROUP BY bin, mta_delivery.service
ORDER BY connect_time

View File

@ -38,10 +38,12 @@ def messages_sent(conn, args):
try:
for row in c.execute(select_1.format(timefmt=ts.timefmt), {
'start_date':ts.start,
'end_date':ts.end
'end_date':ts.end,
'start_unixepoch':ts.start_unixepoch,
'binsize':ts.binsize
}):
ts.dates.append(row['bin'])
s_sent['values'].append(row['sent_count'])
idx = ts.insert_date(row['bin'])
s_sent['values'][idx] = row['sent_count']
date_idx = -1
@ -49,25 +51,16 @@ def messages_sent(conn, args):
# querie's WHERE and JOINs are the same
for row in c.execute(select_2.format(timefmt=ts.timefmt), {
'start_date':ts.start,
'end_date':ts.end
'end_date':ts.end,
'start_unixepoch':ts.start_unixepoch,
'binsize':ts.binsize
}):
if date_idx>=0 and ts.dates[date_idx] == row['bin']:
if row['delivery_service']=='smtp':
s_remote['values'][-1] = row['delivery_count']
elif row['delivery_service']=='lmtp':
s_local['values'][-1] = row['delivery_count']
else:
date_idx += 1
if date_idx >= len(ts.dates):
break
if row['delivery_service']=='smtp':
s_remote['values'].append(row['delivery_count'])
s_local['values'].append(0)
elif row['delivery_service']=='lmtp':
s_remote['values'].append(0)
s_local['values'].append(row['delivery_count'])
date_idx = ts.insert_date(row['bin'])
if row['delivery_service']=='smtp':
s_remote['values'][date_idx] = row['delivery_count']
elif row['delivery_service']=='lmtp':
s_local['values'][date_idx] = row['delivery_count']
top_senders1 = {
'start': ts.start,

View File

@ -4,10 +4,12 @@
SELECT
-- mta_connection
connect_time, mta_connection.service AS service, sasl_username, disposition,
remote_host, remote_ip,
-- mta_accept
envelope_from, spf_result, dkim_result, dkim_reason, dmarc_result, dmarc_reason,
message_id, failure_info,
-- mta_delivery
postgrey_result, postgrey_reason, postgrey_delay, spam_score, spam_result, message_size, orig_to
postgrey_result, postgrey_reason, postgrey_delay, spam_score, spam_result, message_size, orig_to, delivery_info
FROM mta_accept
JOIN mta_connection ON mta_accept.mta_conn_id = mta_connection.mta_conn_id
JOIN mta_delivery ON mta_accept.mta_accept_id = mta_delivery.mta_accept_id

View File

@ -0,0 +1,22 @@
--
-- imap connection summary
--
SELECT
count(*) as `count`,
disposition,
CASE WHEN remote_host='unknown' THEN remote_ip ELSE remote_host END AS `remote_host`,
sum(in_bytes) as `in_bytes`,
sum(out_bytes) as `out_bytes`,
min(connect_time) as `first_connection_time`,
max(connect_time) as `last_connection_time`
FROM
imap_connection
WHERE
sasl_username = :user_id AND
connect_time >= :start_date AND
connect_time < :end_date
GROUP BY
disposition,
CASE WHEN remote_host='unknown' THEN remote_ip ELSE remote_host END
ORDER BY
`count` DESC, disposition

View File

@ -7,6 +7,9 @@ with open(__file__.replace('.py','.1.sql')) as fp:
with open(__file__.replace('.py','.2.sql')) as fp:
select_2 = fp.read()
with open(__file__.replace('.py','.3.sql')) as fp:
select_3 = fp.read()
def user_activity(conn, args):
'''
@ -110,6 +113,8 @@ def user_activity(conn, args):
'connect_time',
'service',
'sasl_username',
'remote_host',
'remote_ip',
# mta_accept
'envelope_from',
@ -119,6 +124,8 @@ def user_activity(conn, args):
'dkim_reason',
'dmarc_result',
'dmarc_reason',
'message_id',
'failure_info',
# mta_delivery
'orig_to',
@ -128,18 +135,23 @@ def user_activity(conn, args):
'spam_score',
'spam_result',
'message_size',
'lmtp_id',
],
'field_types': [
{ 'type':'datetime', 'format': '%Y-%m-%d %H:%M:%S' },# connect_time
'text/plain', # mta_connection.service
'text/email', # sasl_username
'text/plain', # remote_host
'text/plain', # remote_ip
'text/email', # envelope_from
'text/plain', # disposition
'text/plain', # spf_result
'text/plain', # dkim_result
'text/plain', # dkim_result
'text/plain', # dmarc_result
'text/plain', # dmarc_result
'text/plain', # dmarc_reason
'text/plain', # message_id
'text/plain', # failure_info
'text/email', # orig_to
'text/plain', # postgrey_result
'text/plain', # postgrey_reason
@ -147,6 +159,7 @@ def user_activity(conn, args):
{ 'type':'decimal', 'places':2 }, # spam_score
'text/plain', # spam_result
'number/size', # message_size
'text/plain', # lmtp_id
],
'items': []
}
@ -158,12 +171,93 @@ def user_activity(conn, args):
}):
v = []
for key in received_mail['fields']:
v.append(row[key])
if key == 'lmtp_id':
# Extract the LMTP ID from delivery info, which looks
# like:
#
# "250 2.0.0 <user@domain.tld> oPHmBDvTaWA7UwAAlWWVsw
# Saved"
#
# When we know the LMTP ID, we can get the message
# headers using doveadm, like this:
#
# "/usr/bin/doveadm fetch -u "user@domain.tld" hdr
# HEADER received "LMTP id oPHmBDvTaWA7UwAAlWWVsw"
#
delivery_info = row['delivery_info']
valid = False
if delivery_info:
parts = delivery_info.split(' ')
if parts[0]=='250' and parts[1]=='2.0.0':
v.append(parts[-2])
valid = True
if not valid:
v.append(None)
else:
v.append(row[key])
received_mail['items'].append(v)
#
# IMAP connections by disposition, by remote host
# Disposition
# Remote host
# Count
# In bytes (sum)
# Out bytes (sum)
# % of total
#
imap_conn_summary = {
'start': ts.start,
'end': ts.end,
'y': 'IMAP connection summary by host and disposition',
'fields': [
'count',
'total',
'remote_host',
'disposition',
'first_connection_time',
'last_connection_time',
'in_bytes',
'out_bytes',
],
'field_types': [
'number', # count
{ 'type': 'number/percent', 'places': 1 }, # total
'text/plain', # remote_host
'text/plain', # disposition
{ 'type':'datetime', 'format': ts.parsefmt }, # first_conn_time
{ 'type':'datetime', 'format': ts.parsefmt }, # last_conn_time
'number/size', # in_bytes,
'number/size', # out_bytes,
],
'items': []
}
count_field_idx = 0
total_field_idx = 1
total = 0
for row in c.execute(select_3 + limit, {
'user_id': user_id,
'start_date': ts.start,
'end_date': ts.end
}):
v = []
for key in imap_conn_summary['fields']:
if key=='count':
total += row[key]
if key!='total':
v.append(row[key])
imap_conn_summary['items'].append(v)
for v in imap_conn_summary['items']:
v.insert(total_field_idx, v[count_field_idx] / total)
return {
'sent_mail': sent_mail,
'received_mail': received_mail
'received_mail': received_mail,
'imap_conn_summary': imap_conn_summary
}

View File

@ -1,4 +1,7 @@
class Me {
import { AuthenticationError } from './exceptions.js';
export class Me {
/* construct with return value from GET /me */
constructor(me) {
Object.assign(this, me);
@ -18,7 +21,7 @@ class Me {
* axios interceptors for authentication
*/
function init_axios_interceptors() {
export function init_authentication_interceptors() {
// requests: attach non-session based auth (admin panel)
axios.interceptors.request.use(request => {
@ -38,7 +41,8 @@ function init_axios_interceptors() {
});
// reponses: redirect on authorization failure
// reponses: handle authorization failures by throwing exceptions
// users should catch AuthenticationError exceptions
axios.interceptors.response.use(
response => {
if (response.data &&

View File

@ -1,13 +1,13 @@
class ValueError extends Error {
export class ValueError extends Error {
constructor(msg) {
super(msg);
}
};
class AssertionError extends Error {
export class AssertionError extends Error {
}
class AuthenticationError extends Error {
export class AuthenticationError extends Error {
constructor(caused_by_error, msg, response) {
super(msg);
this.caused_by = caused_by_error;

View File

@ -1,8 +1,8 @@
Vue.component('spinner', {
var spinner = Vue.component('spinner', {
template: '<span class="spinner-border spinner-border-sm"></span>'
});
Vue.component('page-header', function(resolve, reject) {
var header = Vue.component('page-header', function(resolve, reject) {
axios.get('ui-common/page-header.html').then((response) => { resolve({
props: {
@ -17,3 +17,5 @@ Vue.component('page-header', function(resolve, reject) {
});
});
export { spinner, header as default };

View File

@ -1,4 +1,4 @@
Vue.component('page-layout', function(resolve, reject) {
export default Vue.component('page-layout', function(resolve, reject) {
axios.get('ui-common/page-layout.html').then((response) => { resolve({
template: response.data,

View File

@ -27,6 +27,6 @@ fi
sed "s|%BIN%|$(pwd)|g" conf/miabldap-capture.service > /etc/systemd/system/miabldap-capture.service
systemctl daemon-reload
systemctl enable miabldap-capture
systemctl start miabldap-capture
hide_output systemctl daemon-reload
hide_output systemctl enable miabldap-capture
restart_service miabldap-capture