mirror of
https://github.com/mail-in-a-box/mailinabox.git
synced 2025-04-04 00:17:06 +00:00
Merge branch 'reporting' into postgrey-whitelist
This commit is contained in:
commit
9a9a699be6
2
management/reporting/capture/.gitignore
vendored
2
management/reporting/capture/.gitignore
vendored
@ -1,2 +0,0 @@
|
||||
tests/
|
||||
run.sh
|
@ -1,6 +1,7 @@
|
||||
#!/usr/bin/env python3
|
||||
from logs.TailFile import TailFile
|
||||
from mail.InboundMailLogHandler import InboundMailLogHandler
|
||||
from mail.PostfixLogHandler import PostfixLogHandler
|
||||
from mail.DovecotLogHandler import DovecotLogHandler
|
||||
from logs.ReadPositionStoreInFile import ReadPositionStoreInFile
|
||||
from db.SqliteConnFactory import SqliteConnFactory
|
||||
from db.SqliteEventStore import SqliteEventStore
|
||||
@ -50,6 +51,7 @@ options = {
|
||||
'daemon': True,
|
||||
'log_level': logging.WARNING,
|
||||
'log_file': "/var/log/mail.log",
|
||||
'stop_at_eof': False,
|
||||
'pos_file': "/var/lib/mailinabox/capture-pos.json",
|
||||
'sqlite_file': os.path.join(CAPTURE_STORAGE_ROOT, 'capture.sqlite'),
|
||||
'working_dir': "/var/run/mailinabox",
|
||||
@ -122,6 +124,10 @@ def process_cmdline(options):
|
||||
elif arg=='-logfile' and have_next:
|
||||
argi+=1
|
||||
options['log_file'] = sys.argv[argi]
|
||||
|
||||
elif arg=='-stopateof':
|
||||
argi+=1
|
||||
options['stop_at_eof'] = True
|
||||
|
||||
elif arg=='-posfile' and have_next:
|
||||
argi+=1
|
||||
@ -220,14 +226,21 @@ try:
|
||||
)
|
||||
mail_tail = TailFile(
|
||||
options['log_file'],
|
||||
position_store
|
||||
position_store,
|
||||
options['stop_at_eof']
|
||||
)
|
||||
inbound_mail_handler = InboundMailLogHandler(
|
||||
postfix_log_handler = PostfixLogHandler(
|
||||
event_store,
|
||||
capture_enabled = options['config'].get('capture',True),
|
||||
drop_disposition = options['config'].get('drop_disposition')
|
||||
)
|
||||
mail_tail.add_handler(inbound_mail_handler)
|
||||
mail_tail.add_handler(postfix_log_handler)
|
||||
dovecot_log_handler = DovecotLogHandler(
|
||||
event_store,
|
||||
capture_enabled = options['config'].get('capture',True),
|
||||
drop_disposition = options['config'].get('drop_disposition')
|
||||
)
|
||||
mail_tail.add_handler(dovecot_log_handler)
|
||||
pruner = Pruner(
|
||||
db_conn_factory,
|
||||
policy=options['config']['prune_policy']
|
||||
@ -245,17 +258,6 @@ def terminate(sig, stack):
|
||||
log.warning("shutting down due to SIGTERM")
|
||||
log.debug("stopping mail_tail")
|
||||
mail_tail.stop()
|
||||
log.debug("stopping pruner")
|
||||
pruner.stop()
|
||||
log.debug("stopping position_store")
|
||||
position_store.stop()
|
||||
log.debug("stopping event_store")
|
||||
event_store.stop()
|
||||
try:
|
||||
os.remove(options['_runtime_config_file'])
|
||||
except Exception:
|
||||
pass
|
||||
log.info("stopped")
|
||||
|
||||
# reload settings handler
|
||||
def reload(sig, stack):
|
||||
@ -266,8 +268,10 @@ def reload(sig, stack):
|
||||
options['config']['default_config'] = False
|
||||
options['_config_file'] = config_default_file
|
||||
|
||||
log.info('%s records are in-progress',
|
||||
inbound_mail_handler.get_inprogress_count())
|
||||
log.info('%s mta records are in-progress',
|
||||
postfix_log_handler.get_inprogress_count())
|
||||
log.info('%s imap records are in-progress',
|
||||
dovecot_log_handler.get_inprogress_count())
|
||||
|
||||
if options['_config_file']:
|
||||
log.info('reloading %s', options['_config_file'])
|
||||
@ -276,10 +280,16 @@ def reload(sig, stack):
|
||||
pruner.set_policy(
|
||||
newconfig['prune_policy']
|
||||
)
|
||||
inbound_mail_handler.set_capture_enabled(
|
||||
postfix_log_handler.set_capture_enabled(
|
||||
newconfig.get('capture', True)
|
||||
)
|
||||
inbound_mail_handler.update_drop_disposition(
|
||||
postfix_log_handler.update_drop_disposition(
|
||||
newconfig.get('drop_disposition', {})
|
||||
)
|
||||
dovecot_log_handler.set_capture_enabled(
|
||||
newconfig.get('capture', True)
|
||||
)
|
||||
dovecot_log_handler.update_drop_disposition(
|
||||
newconfig.get('drop_disposition', {})
|
||||
)
|
||||
write_config(options['_runtime_config_file'], newconfig)
|
||||
@ -298,3 +308,15 @@ signal.signal(signal.SIGHUP, reload)
|
||||
mail_tail.start()
|
||||
mail_tail.join()
|
||||
|
||||
# gracefully close other threads
|
||||
log.debug("stopping pruner")
|
||||
pruner.stop()
|
||||
log.debug("stopping position_store")
|
||||
position_store.stop()
|
||||
log.debug("stopping event_store")
|
||||
event_store.stop()
|
||||
try:
|
||||
os.remove(options['_runtime_config_file'])
|
||||
except Exception:
|
||||
pass
|
||||
log.info("stopped")
|
||||
|
@ -74,7 +74,6 @@ class EventStore(Prunable):
|
||||
self.t.join()
|
||||
|
||||
def __del__(self):
|
||||
log.debug('EventStore __del__')
|
||||
self.interrupt.set()
|
||||
self.have_event.set()
|
||||
|
||||
|
@ -71,6 +71,23 @@ mta_delivery_fields = [
|
||||
'failure_category',
|
||||
]
|
||||
|
||||
imap_conn_fields = [
|
||||
'service',
|
||||
'service_tid',
|
||||
'connect_time',
|
||||
'disconnect_time',
|
||||
'disconnect_reason',
|
||||
'remote_host',
|
||||
'remote_ip',
|
||||
'sasl_method',
|
||||
'sasl_username',
|
||||
'remote_auth_success',
|
||||
'remote_auth_attempts',
|
||||
'connection_security',
|
||||
'in_bytes',
|
||||
'out_bytes',
|
||||
'disposition'
|
||||
]
|
||||
|
||||
db_info_create_table_stmt = "CREATE TABLE IF NOT EXISTS db_info(id INTEGER PRIMARY KEY AUTOINCREMENT, key TEXT NOT NULL, value TEXT NOT NULL)"
|
||||
|
||||
@ -162,7 +179,33 @@ schema_updates = [
|
||||
[
|
||||
"ALTER TABLE mta_delivery ADD COLUMN orig_to TEXT COLLATE NOCASE",
|
||||
"UPDATE db_info SET value='1' WHERE key='schema_version'"
|
||||
]
|
||||
],
|
||||
|
||||
# update 2
|
||||
[
|
||||
"CREATE TABLE imap_connection(\
|
||||
imap_conn_id INTEGER PRIMARY KEY AUTOINCREMENT,\
|
||||
service TEXT NOT NULL, /* 'imap', 'imap-login', 'pop3-login' */\
|
||||
service_tid TEXT,\
|
||||
connect_time TEXT NOT NULL,\
|
||||
disconnect_time TEXT,\
|
||||
disconnect_reason TEXT,\
|
||||
remote_host TEXT COLLATE NOCASE,\
|
||||
remote_ip TEXT COLLATE NOCASE,\
|
||||
sasl_method TEXT, /* eg. 'PLAIN' */\
|
||||
sasl_username TEXT COLLATE NOCASE,\
|
||||
remote_auth_success INTEGER, /* count of successes */\
|
||||
remote_auth_attempts INTEGER, /* count of attempts */\
|
||||
connection_security TEXT, /* eg 'TLS' */\
|
||||
in_bytes INTEGER,\
|
||||
out_bytes INTEGER,\
|
||||
disposition TEXT /* 'ok','failed_login_attempt',etc */\
|
||||
)",
|
||||
|
||||
"CREATE INDEX idx_imap_connection_connect_time ON imap_connection(connect_time, sasl_username COLLATE NOCASE)",
|
||||
|
||||
"UPDATE db_info SET value='2' WHERE key='schema_version'"
|
||||
],
|
||||
|
||||
]
|
||||
|
||||
@ -209,9 +252,12 @@ class SqliteEventStore(EventStore):
|
||||
|
||||
|
||||
def write_rec(self, conn, type, rec):
|
||||
if type=='inbound_mail':
|
||||
#log.debug('wrote inbound_mail record')
|
||||
self.write_inbound_mail(conn, rec)
|
||||
if type=='mta_mail':
|
||||
self.write_mta_mail(conn, rec)
|
||||
|
||||
elif type=='imap_mail':
|
||||
self.write_imap_mail(conn, rec)
|
||||
|
||||
elif type=='state':
|
||||
''' rec: {
|
||||
owner_id: int,
|
||||
@ -246,7 +292,7 @@ class SqliteEventStore(EventStore):
|
||||
return values
|
||||
|
||||
|
||||
def write_inbound_mail(self, conn, rec):
|
||||
def write_mta_mail(self, conn, rec):
|
||||
c = None
|
||||
try:
|
||||
c = conn.cursor()
|
||||
@ -282,6 +328,28 @@ class SqliteEventStore(EventStore):
|
||||
|
||||
|
||||
|
||||
def write_imap_mail(self, conn, rec):
|
||||
c = None
|
||||
try:
|
||||
c = conn.cursor()
|
||||
|
||||
# imap_connection
|
||||
insert = self._insert('imap_connection', imap_conn_fields)
|
||||
values = self._values(imap_conn_fields, rec)
|
||||
#log.debug('INSERT: %s VALUES: %s REC=%s', insert, values, rec)
|
||||
c.execute(insert, values)
|
||||
conn_id = c.lastrowid
|
||||
|
||||
conn.commit()
|
||||
|
||||
except sqlite3.Error as e:
|
||||
conn.rollback()
|
||||
raise e
|
||||
|
||||
finally:
|
||||
if c: c.close(); c=None
|
||||
|
||||
|
||||
def write_state(self, conn, rec):
|
||||
c = None
|
||||
try:
|
||||
@ -354,7 +422,9 @@ class SqliteEventStore(EventStore):
|
||||
JOIN mta_connection ON mta_connection.mta_conn_id = mta_accept.mta_conn_id\
|
||||
WHERE connect_time < ?)',
|
||||
|
||||
'DELETE FROM mta_connection WHERE connect_time < ?'
|
||||
'DELETE FROM mta_connection WHERE connect_time < ?',
|
||||
|
||||
'DELETE FROM imap_connection WHERE connect_time < ?',
|
||||
]
|
||||
|
||||
counts = []
|
||||
|
@ -29,7 +29,6 @@ class ReadPositionStoreInFile(ReadPositionStore):
|
||||
self.t.start()
|
||||
|
||||
def __del__(self):
|
||||
log.debug('ReadPositionStoreInFile __del__')
|
||||
self.interrupt.set()
|
||||
|
||||
def stop(self):
|
||||
|
@ -14,12 +14,13 @@ of ReadLineHandler.
|
||||
'''
|
||||
|
||||
class TailFile(threading.Thread):
|
||||
def __init__(self, log_file, store=None):
|
||||
def __init__(self, log_file, store=None, stop_at_eof=False):
|
||||
''' log_file - the log file to monitor
|
||||
store - a ReadPositionStore instance
|
||||
'''
|
||||
self.log_file = log_file
|
||||
self.store = store
|
||||
self.stop_at_eof = stop_at_eof
|
||||
|
||||
self.fp = None
|
||||
self.inode = None
|
||||
@ -31,7 +32,6 @@ class TailFile(threading.Thread):
|
||||
super(TailFile, self).__init__(name=name, daemon=True)
|
||||
|
||||
def stop(self, do_join=True):
|
||||
log.debug('TailFile stopping')
|
||||
self.interrupt.set()
|
||||
# close must be called to unblock the thread fp.readline() call
|
||||
self._close()
|
||||
@ -72,15 +72,11 @@ class TailFile(threading.Thread):
|
||||
|
||||
def _issue_callbacks(self, line):
|
||||
for cb in self.callbacks:
|
||||
if isinstance(cb, ReadLineHandler):
|
||||
cb.handle(line)
|
||||
else:
|
||||
cb(line)
|
||||
cb.handle(line)
|
||||
|
||||
def _notify_end_of_callbacks(self):
|
||||
for cb in self.callbacks:
|
||||
if isinstance(cb, ReadLineHandler):
|
||||
cb.end_of_callbacks(self)
|
||||
cb.end_of_callbacks(self)
|
||||
|
||||
def _restore_read_position(self):
|
||||
if self.fp is None:
|
||||
@ -122,6 +118,9 @@ class TailFile(threading.Thread):
|
||||
line = self.fp.readline() # blocking
|
||||
if line=='':
|
||||
log.debug('got EOF')
|
||||
if self.stop_at_eof:
|
||||
self.interrupt.set()
|
||||
|
||||
# EOF - check if file was rotated
|
||||
if self._is_rotated():
|
||||
log.debug('rotated')
|
||||
@ -144,6 +143,7 @@ class TailFile(threading.Thread):
|
||||
self._issue_callbacks(line)
|
||||
|
||||
except Exception as e:
|
||||
log.error('exception processing line: %s', line)
|
||||
log.exception(e)
|
||||
if self.interrupt.wait(1) is not True:
|
||||
if self._is_rotated():
|
||||
|
128
management/reporting/capture/mail/CommonHandler.py
Normal file
128
management/reporting/capture/mail/CommonHandler.py
Normal file
@ -0,0 +1,128 @@
|
||||
import logging
|
||||
import re
|
||||
import datetime
|
||||
import traceback
|
||||
import ipaddress
|
||||
import threading
|
||||
from logs.ReadLineHandler import ReadLineHandler
|
||||
import logs.DateParser
|
||||
from db.EventStore import EventStore
|
||||
from util.DictQuery import DictQuery
|
||||
from util.safe import (safe_int, safe_append, safe_del)
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CommonHandler(ReadLineHandler):
|
||||
'''
|
||||
'''
|
||||
def __init__(self, state_cache_owner_id, record_store,
|
||||
date_regexp = logs.DateParser.rsyslog_traditional_regexp,
|
||||
date_parser_fn = logs.DateParser.rsyslog_traditional,
|
||||
capture_enabled = True,
|
||||
drop_disposition = None
|
||||
):
|
||||
self.state_cache_owner_id = state_cache_owner_id
|
||||
|
||||
''' EventStore instance for persisting "records" '''
|
||||
self.record_store = record_store
|
||||
self.set_capture_enabled(capture_enabled)
|
||||
|
||||
# our in-progress record queue is a simple list
|
||||
self.recs = self.get_cached_state(clear=True)
|
||||
self.current_inprogress_recs = len(self.recs)
|
||||
|
||||
# records that have these dispositions will be dropped (not
|
||||
# recorded in the record store
|
||||
self.drop_disposition_lock = threading.Lock()
|
||||
self.drop_disposition = {
|
||||
'failed_login_attempt': False,
|
||||
'suspected_scanner': False,
|
||||
'reject': False
|
||||
}
|
||||
self.update_drop_disposition(drop_disposition)
|
||||
|
||||
# regular expression that matches a syslog date (always anchored)
|
||||
self.date_regexp = date_regexp
|
||||
if date_regexp.startswith('^'):
|
||||
self.date_regexp = date_regexp[1:]
|
||||
|
||||
# function that parses the syslog date
|
||||
self.date_parser_fn = date_parser_fn
|
||||
|
||||
|
||||
|
||||
def get_inprogress_count(self):
|
||||
''' thread-safe '''
|
||||
return self.current_inprogress_recs
|
||||
|
||||
def update_inprogress_count(self):
|
||||
self.current_inprogress_recs = len(self.recs)
|
||||
|
||||
def set_capture_enabled(self, capture_enabled):
|
||||
''' thread-safe '''
|
||||
self.capture_enabled = capture_enabled
|
||||
|
||||
def update_drop_disposition(self, drop_disposition):
|
||||
''' thread-safe '''
|
||||
with self.drop_disposition_lock:
|
||||
self.drop_disposition.update(drop_disposition)
|
||||
|
||||
def test_drop_disposition(self, disposition):
|
||||
with self.drop_disposition_lock:
|
||||
return self.drop_disposition.get(disposition, False)
|
||||
|
||||
def datetime_as_str(self, d):
|
||||
# iso-8601 time friendly to sqlite3
|
||||
timestamp = d.isoformat(sep=' ', timespec='seconds')
|
||||
# strip the utc offset from the iso format (ie. remove "+00:00")
|
||||
idx = timestamp.find('+00:00')
|
||||
if idx>0:
|
||||
timestamp = timestamp[0:idx]
|
||||
return timestamp
|
||||
|
||||
def parse_date(self, str):
|
||||
# we're expecting UTC times from date_parser()
|
||||
d = self.date_parser_fn(str)
|
||||
return self.datetime_as_str(d)
|
||||
|
||||
def get_cached_state(self, clear=True):
|
||||
conn = None
|
||||
try:
|
||||
# obtain the cached records from the record store
|
||||
conn = self.record_store.connect()
|
||||
recs = self.record_store.read_rec(conn, 'state', {
|
||||
"owner_id": self.state_cache_owner_id,
|
||||
"clear": clear
|
||||
})
|
||||
log.info('read %s incomplete records from cache %s', len(recs), self.state_cache_owner_id)
|
||||
|
||||
# eliminate stale records - "stale" should be longer than
|
||||
# the "give-up" time for postfix (4-5 days)
|
||||
stale = datetime.timedelta(days=7)
|
||||
cutoff = self.datetime_as_str(
|
||||
datetime.datetime.now(datetime.timezone.utc) - stale
|
||||
)
|
||||
newlist = [ rec for rec in recs if rec['connect_time'] >= cutoff ]
|
||||
if len(newlist) < len(recs):
|
||||
log.warning('dropping %s stale incomplete records',
|
||||
len(recs) - len(newlist))
|
||||
return newlist
|
||||
finally:
|
||||
if conn: self.record_store.close(conn)
|
||||
|
||||
def save_state(self):
|
||||
log.info('saving state to cache %s: %s records', self.state_cache_owner_id, len(self.recs))
|
||||
self.record_store.store('state', {
|
||||
'owner_id': self.state_cache_owner_id,
|
||||
'state': self.recs
|
||||
})
|
||||
|
||||
def end_of_callbacks(self, thread):
|
||||
'''overrides ReadLineHandler method
|
||||
|
||||
save incomplete records so we can pick up where we left off
|
||||
|
||||
'''
|
||||
self.update_inprogress_count()
|
||||
self.save_state()
|
534
management/reporting/capture/mail/DovecotLogHandler.py
Normal file
534
management/reporting/capture/mail/DovecotLogHandler.py
Normal file
@ -0,0 +1,534 @@
|
||||
import logging
|
||||
import re
|
||||
import datetime
|
||||
import ipaddress
|
||||
import threading
|
||||
from logs.ReadLineHandler import ReadLineHandler
|
||||
import logs.DateParser
|
||||
from db.EventStore import EventStore
|
||||
from util.DictQuery import DictQuery
|
||||
from util.safe import (safe_int, safe_append, safe_del)
|
||||
from .CommonHandler import CommonHandler
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
STATE_CACHE_OWNER_ID = 2
|
||||
|
||||
class DovecotLogHandler(CommonHandler):
|
||||
'''
|
||||
'''
|
||||
def __init__(self, record_store,
|
||||
date_regexp = logs.DateParser.rsyslog_traditional_regexp,
|
||||
date_parser_fn = logs.DateParser.rsyslog_traditional,
|
||||
capture_enabled = True,
|
||||
drop_disposition = None
|
||||
):
|
||||
super(DovecotLogHandler, self).__init__(
|
||||
STATE_CACHE_OWNER_ID,
|
||||
record_store,
|
||||
date_regexp,
|
||||
date_parser_fn,
|
||||
capture_enabled,
|
||||
drop_disposition
|
||||
)
|
||||
|
||||
|
||||
# A "record" is composed by parsing all the syslog output from
|
||||
# the activity generated by dovecot (imap, pop3) from a single
|
||||
# remote connection. Once a full history of the connection,
|
||||
# the record is written to the record_store.
|
||||
#
|
||||
# `recs` is an array holding incomplete, in-progress
|
||||
# "records". This array has the following format:
|
||||
#
|
||||
# (For convenience, it's easier to refer to the table column
|
||||
# names found in SqliteEventStore for the dict member names that
|
||||
# are used here since they're all visible in one place.)
|
||||
#
|
||||
# [{
|
||||
# ... fields of the imap_connection table ...
|
||||
# }]
|
||||
#
|
||||
# IMPORTANT:
|
||||
#
|
||||
# No methods in this class are safe to call by any thread
|
||||
# other than the caller of handle(), unless marked as
|
||||
# thread-safe.
|
||||
#
|
||||
|
||||
# maximum size of the in-progress record queue (should be the
|
||||
# same or greater than the maximum simultaneous dovecot/imap
|
||||
# connections allowed, which is dovecot settings
|
||||
# `process_limit` times `client_limit`, which defaults to 100
|
||||
# * 1000)
|
||||
self.max_inprogress_recs = 100 * 1000
|
||||
|
||||
|
||||
# 1a. imap-login: Info: Login: user=<keith@just1w.com>, method=PLAIN, rip=146.168.130.9, lip=192.155.92.185, mpid=5866, TLS, session=<IF3v7ze27dKSqIIJ>
|
||||
# 1=date
|
||||
# 2=service ("imap-login" or "pop3-login")
|
||||
# 3=sasl_username ("user@domain.com")
|
||||
# 4=sasl_method ("PLAIN")
|
||||
# 5=remote_ip ("146.168.130.9")
|
||||
# 6=local_ip
|
||||
# 7=service_tid ("5866")
|
||||
# 8=connection_security ("TLS")
|
||||
self.re_connect_success = re.compile('^' + self.date_regexp + ' (imap-login|pop3-login): Info: Login: user=<([^>]*)>, method=([^,]*), rip=([^,]+), lip=([^,]+), mpid=([^,]+), ([^,]+)')
|
||||
|
||||
# 1a. imap-login: Info: Disconnected (auth failed, 1 attempts in 4 secs): user=<fernando@athigo.com>, method=PLAIN, rip=152.67.63.172, lip=192.155.92.185, TLS: Disconnected, session=<rho/Rjq2EqSYQz+s>
|
||||
# 1=date
|
||||
# 2=service ("imap-login" or "pop3-login")
|
||||
# 3=dissconnect_reason
|
||||
# 4=remote_auth_attempts
|
||||
# 5=sasl_username
|
||||
# 6=sasl_method
|
||||
# 7=remote_ip
|
||||
# 8=local_ip
|
||||
# 9=connection_security
|
||||
self.re_connect_fail_1 = re.compile('^' + self.date_regexp + ' (imap-login|pop3-login): Info: (?:Disconnected|Aborted login) \(([^,]+), (\d+) attempts[^\)]*\): user=<([^>]*)>, method=([^,]+), rip=([^,]+), lip=([^,]+), ([^,]+)')
|
||||
|
||||
# 2a. pop3-login: Info: Disconnected (no auth attempts in 2 secs): user=<>, rip=196.52.43.85, lip=192.155.92.185, TLS handshaking: SSL_accept() failed: error:14209102:SSL routines:tls_early_post_process_client_hello:unsupported protocol, session=<ehaSaDm2x9vENCtV>
|
||||
# 2b. imap-login: Info: Disconnected (no auth attempts in 2 secs): user=<>, rip=92.118.160.61, lip=192.155.92.185, TLS handshaking: SSL_accept() failed: error:14209102:SSL routines:tls_early_post_process_client_hello:unsupported protocol, session=<cvmKhjq2qtJcdqA9>
|
||||
# 1=date
|
||||
# 2=service ("imap-login" or "pop3-login")
|
||||
# 3=disconnect_reason
|
||||
# 4=sasl_username ("")
|
||||
# 5=remote_ip ("146.168.130.9")
|
||||
# 6=local_ip
|
||||
# 7=connection_security
|
||||
self.re_connect_fail_2 = re.compile('^' + self.date_regexp + ' (imap-login|pop3-login): Info: (?:Disconnected|Aborted login) \(([^\)]*)\): user=<([^>]*)>, rip=([^,]+), lip=([^,]+), ([^,]+)')
|
||||
|
||||
#3a. imap-login: Info: Disconnected (client didn't finish SASL auth, waited 0 secs): user=<>, method=PLAIN, rip=107.107.63.148, lip=192.155.92.185, TLS: SSL_read() syscall failed: Connection reset by peer, session=<rmBsIP21Zsdraz+U>
|
||||
# 1=date
|
||||
# 2=service ("imap-login" or "pop3-login")
|
||||
# 3=disconnect_reason
|
||||
# 4=sasl_username ("")
|
||||
# 5=sasl_method
|
||||
# 6=remote_ip ("146.168.130.9")
|
||||
# 7=local_ip
|
||||
# 8=connection_security
|
||||
self.re_connect_fail_3 = re.compile('^' + self.date_regexp + ' (imap-login|pop3-login): Info: (?:Disconnected|Aborted login) \(([^\)]*)\): user=<([^>]*)>, method=([^,]+), rip=([^,]+), lip=([^,]+), ([^,]+)')
|
||||
|
||||
# 4a. pop3-login: Info: Disconnected: Too many bad commands (no auth attempts in 0 secs): user=<>, rip=83.97.20.35, lip=192.155.92.185, TLS, session=<BH8JRCi2nJ5TYRQj>
|
||||
# 1=date
|
||||
# 2=service ("imap-login" or "pop3-login")
|
||||
# 3=disconnect_reason
|
||||
# 4=sasl_username ("")
|
||||
# 5=remote_ip ("146.168.130.9")
|
||||
# 6=local_ip
|
||||
# 7=connection_security
|
||||
self.re_connect_fail_4 = re.compile('^' + self.date_regexp + ' (imap-login|pop3-login): Info: Disconnected: ([^\(]+) \(no auth attempts [^\)]+\): user=<([^>]*)>, rip=([^,]+), lip=([^,]+), ([^,]+)')
|
||||
|
||||
# 5a. imap-login: Info: Disconnected: Too many bad commands (auth failed, 1 attempts in 4 secs): user=<fernando@athigo.com>, method=PLAIN, rip=152.67.63.172, lip=192.155.92.185, TLS: Disconnected, session=<rho/Rjq2EqSYQz+s>
|
||||
# 1=date
|
||||
# 2=service ("imap-login" or "pop3-login")
|
||||
# 3=disconnect_reason
|
||||
# 4=remote_auth_attempts
|
||||
# 5=sasl_username
|
||||
# 6=sasl_method
|
||||
# 7=remote_ip
|
||||
# 8=local_ip
|
||||
# 9=connection_security
|
||||
self.re_connect_fail_5 = re.compile('^' + self.date_regexp + ' (imap-login|pop3-login): Info: (?:Disconnected|Aborted login): ([^\(]+) \(auth failed, (\d+) attempts [^\)]+\): user=<([^>]*)>, method=([^,]+), rip=([^,]+), lip=([^,]+), ([^,]+)')
|
||||
|
||||
|
||||
# 1a. imap(jzw@just1w.com): Info: Logged out in=29 out=496
|
||||
#
|
||||
# 1b. imap(jzw@just1w.com): Info: Connection closed (IDLE running for 0.001 + waiting input for 5.949 secs, 2 B in + 10 B out, state=wait-input) in=477 out=6171
|
||||
# 1c. imap(jzw@just1w.com): Info: Connection closed (UID STORE finished 0.225 secs ago) in=8099 out=21714
|
||||
|
||||
# 1d. imap(jzw@just1w.com): Info: Connection closed (LIST finished 115.637 secs ago) in=629 out=11130
|
||||
#
|
||||
# 1e. imap(jzw@just1w.com): Info: Connection closed (APPEND finished 0.606 secs ago) in=11792 out=10697
|
||||
#
|
||||
# 1f. imap(jzw@just1w.com): Info: Disconnected for inactivity in=1518 out=2962
|
||||
#
|
||||
# 1g. imap(keith@just1w.com): Info: Server shutting down. in=720 out=7287
|
||||
# 1=date
|
||||
# 2=service ("imap" or "pop3")
|
||||
# 3=sasl_username
|
||||
# 4=disconnect_reason ("Disconnected for inactivity")
|
||||
# 5=in_bytes
|
||||
# 6=out_bytes
|
||||
self.re_disconnect = re.compile('^' + self.date_regexp + ' (imap|pop3)\(([^\)]*)\): Info: ((?:Logged out|Connection closed|Disconnected|Server shutting down).*) in=(\d+) out=(\d+)')
|
||||
|
||||
|
||||
|
||||
def add_new_connection(self, imap_conn):
|
||||
''' queue an imap_connection record '''
|
||||
threshold = self.max_inprogress_recs + ( len(self.recs) * 0.05 )
|
||||
if len(self.recs) > threshold:
|
||||
backoff = len(self.recs) - self.max_inprogress_recs + int( self.max_inprogress_recs * 0.10 )
|
||||
log.warning('dropping %s old imap records', backoff)
|
||||
self.recs = self.recs[min(len(self.recs),backoff):]
|
||||
|
||||
self.recs.append(imap_conn)
|
||||
return imap_conn
|
||||
|
||||
def remove_connection(self, imap_conn):
|
||||
''' remove a imap_connection record from queue '''
|
||||
self.recs.remove(imap_conn)
|
||||
|
||||
def find_by(self, imap_conn_q, debug=False):
|
||||
'''find records using field-matching queries
|
||||
|
||||
return a list of imap_conn matching query `imap_conn_q`
|
||||
'''
|
||||
|
||||
if debug:
|
||||
log.debug('imap_accept_q: %s', imap_accept_q)
|
||||
|
||||
# find all candidate recs with matching imap_conn_q, ordered by most
|
||||
# recent last
|
||||
candidates = DictQuery.find(self.recs, imap_conn_q, reverse=False)
|
||||
if len(candidates)==0:
|
||||
if debug: log.debug('no candidates')
|
||||
return []
|
||||
|
||||
elif not candidates[0]['exact']:
|
||||
# there were no exact matches. apply autosets to the best
|
||||
# match requiring the fewest number of autosets (index 0)
|
||||
if debug: log.debug('no exact candidates')
|
||||
DictQuery.autoset(candidates[0])
|
||||
candidates[0]['exact'] = True
|
||||
candidates = [ candidates[0] ]
|
||||
|
||||
else:
|
||||
# there is at least one exact match - remove all non-exact
|
||||
# candidates
|
||||
candidates = [
|
||||
candidate for candidate in candidates if candidate['exact']
|
||||
]
|
||||
|
||||
return [ candidate['item'] for candidate in candidates ]
|
||||
|
||||
|
||||
def find_first(self, *args, **kwargs):
|
||||
'''find the "best" result and return it - find_by() returns the list
|
||||
ordered, with the first being the "best"
|
||||
|
||||
'''
|
||||
r = self.find_by(*args, **kwargs)
|
||||
if len(r)==0:
|
||||
return None
|
||||
return r[0]
|
||||
|
||||
def match_connect_success(self, line):
|
||||
# 1=date
|
||||
# 2=service ("imap-login" or "pop3-login")
|
||||
# 3=sasl_username ("user@domain.com")
|
||||
# 4=sasl_method ("PLAIN")
|
||||
# 5=remote_ip ("146.168.130.9")
|
||||
# 6=local_ip
|
||||
# 7=service_tid ("5866")
|
||||
# 8=connection_security ("TLS")
|
||||
m = self.re_connect_success.search(line)
|
||||
if m:
|
||||
imap_conn = {
|
||||
"connect_time": self.parse_date(m.group(1)), # "YYYY-MM-DD HH:MM:SS"
|
||||
"service": m.group(2),
|
||||
"sasl_username": m.group(3),
|
||||
"sasl_method": m.group(4),
|
||||
"remote_host": "unknown",
|
||||
"remote_ip": m.group(5),
|
||||
"service_tid": m.group(7),
|
||||
"connection_security": m.group(8),
|
||||
"remote_auth_success": 1,
|
||||
"remote_auth_attempts": 1
|
||||
}
|
||||
self.add_new_connection(imap_conn)
|
||||
return { 'imap_conn': imap_conn }
|
||||
|
||||
def match_connect_fail(self, line):
|
||||
m = self.re_connect_fail_1.search(line)
|
||||
if m:
|
||||
# 1a. imap-login: Info: Disconnected (auth failed, 1 attempts in 4 secs): user=<fernando@athigo.com>, method=PLAIN, rip=152.67.63.172, lip=192.155.92.185, TLS: Disconnected, session=<rho/Rjq2EqSYQz+s>
|
||||
# 1=date
|
||||
# 2=service ("imap-login" or "pop3-login")
|
||||
# 3=dissconnect_reason
|
||||
# 4=remote_auth_attempts
|
||||
# 5=sasl_username
|
||||
# 6=sasl_method
|
||||
# 7=remote_ip
|
||||
# 8=local_ip
|
||||
# 9=connection_security
|
||||
d = self.parse_date(m.group(1)) # "YYYY-MM-DD HH:MM:SS"
|
||||
imap_conn = {
|
||||
"connect_time": d,
|
||||
"disconnect_time": d,
|
||||
"disconnect_reason": m.group(3),
|
||||
"service": m.group(2),
|
||||
"sasl_username": m.group(5),
|
||||
"sasl_method": m.group(6),
|
||||
"remote_host": "unknown",
|
||||
"remote_ip": m.group(7),
|
||||
"connection_security": m.group(9),
|
||||
"remote_auth_success": 0,
|
||||
"remote_auth_attempts": int(m.group(4))
|
||||
}
|
||||
self.add_new_connection(imap_conn)
|
||||
return { 'imap_conn': imap_conn }
|
||||
|
||||
m = self.re_connect_fail_2.search(line)
|
||||
if m:
|
||||
# 2a. pop3-login: Info: Disconnected (no auth attempts in 2 secs): user=<>, rip=196.52.43.85, lip=192.155.92.185, TLS handshaking: SSL_accept() failed: error:14209102:SSL routines:tls_early_post_process_client_hello:unsupported protocol, session=<ehaSaDm2x9vENCtV>
|
||||
# 1=date
|
||||
# 2=service ("imap-login" or "pop3-login")
|
||||
# 3=disconnect_reason
|
||||
# 4=sasl_username ("")
|
||||
# 5=remote_ip ("146.168.130.9")
|
||||
# 6=local_ip
|
||||
# 7=connection_security
|
||||
d = self.parse_date(m.group(1)) # "YYYY-MM-DD HH:MM:SS"
|
||||
imap_conn = {
|
||||
"connect_time": d,
|
||||
"disconnect_time": d,
|
||||
"disconnect_reason": m.group(3),
|
||||
"service": m.group(2),
|
||||
"sasl_username": m.group(4),
|
||||
"remote_host": "unknown",
|
||||
"remote_ip": m.group(5),
|
||||
"connection_security": m.group(7),
|
||||
"remote_auth_success": 0,
|
||||
"remote_auth_attempts": 0
|
||||
}
|
||||
self.add_new_connection(imap_conn)
|
||||
return { 'imap_conn': imap_conn }
|
||||
|
||||
m = self.re_connect_fail_3.search(line)
|
||||
if m:
|
||||
#3a. imap-login: Info: Disconnected (client didn't finish SASL auth, waited 0 secs): user=<>, method=PLAIN, rip=107.107.63.148, lip=192.155.92.185, TLS: SSL_read() syscall failed: Connection reset by peer, session=<rmBsIP21Zsdraz+U>
|
||||
# 1=date
|
||||
# 2=service ("imap-login" or "pop3-login")
|
||||
# 3=disconnect_reason
|
||||
# 4=sasl_username ("")
|
||||
# 5=sasl_method
|
||||
# 6=remote_ip ("146.168.130.9")
|
||||
# 7=local_ip
|
||||
# 8=connection_security
|
||||
d = self.parse_date(m.group(1)) # "YYYY-MM-DD HH:MM:SS"
|
||||
imap_conn = {
|
||||
"connect_time": d,
|
||||
"disconnect_time": d,
|
||||
"disconnect_reason": m.group(3),
|
||||
"service": m.group(2),
|
||||
"sasl_username": m.group(4),
|
||||
"sasl_method": m.group(5),
|
||||
"remote_host": "unknown",
|
||||
"remote_ip": m.group(6),
|
||||
"connection_security": m.group(8),
|
||||
"remote_auth_success": 0,
|
||||
"remote_auth_attempts": 0
|
||||
}
|
||||
self.add_new_connection(imap_conn)
|
||||
return { 'imap_conn': imap_conn }
|
||||
|
||||
m = self.re_connect_fail_4.search(line)
|
||||
if m:
|
||||
# 4a. pop3-login: Info: Disconnected: Too many bad commands (no auth attempts in 0 secs): user=<>, rip=83.97.20.35, lip=192.155.92.185, TLS, session=<BH8JRCi2nJ5TYRQj>
|
||||
# 1=date
|
||||
# 2=service ("imap-login" or "pop3-login")
|
||||
# 3=disconnect_reason
|
||||
# 4=sasl_username ("")
|
||||
# 5=remote_ip ("146.168.130.9")
|
||||
# 6=local_ip
|
||||
# 7=connection_security
|
||||
d = self.parse_date(m.group(1)) # "YYYY-MM-DD HH:MM:SS"
|
||||
imap_conn = {
|
||||
"connect_time": d,
|
||||
"disconnect_time": d,
|
||||
"disconnect_reason": m.group(3),
|
||||
"service": m.group(2),
|
||||
"sasl_username": m.group(4),
|
||||
"remote_host": "unknown",
|
||||
"remote_ip": m.group(5),
|
||||
"connection_security": m.group(6),
|
||||
"remote_auth_success": 0,
|
||||
"remote_auth_attempts": 0
|
||||
}
|
||||
self.add_new_connection(imap_conn)
|
||||
return { 'imap_conn': imap_conn }
|
||||
|
||||
m = self.re_connect_fail_5.search(line)
|
||||
if m:
|
||||
# 5a. imap-login: Info: Disconnected: Too many bad commands (auth failed, 1 attempts in 4 secs): user=<fernando@athigo.com>, method=PLAIN, rip=152.67.63.172, lip=192.155.92.185, TLS: Disconnected, session=<rho/Rjq2EqSYQz+s>
|
||||
# 1=date
|
||||
# 2=service ("imap-login" or "pop3-login")
|
||||
# 3=disconnect_reason
|
||||
# 4=remote_auth_attempts
|
||||
# 5=sasl_username
|
||||
# 6=sasl_method
|
||||
# 7=remote_ip
|
||||
# 8=local_ip
|
||||
# 9=connection_security
|
||||
d = self.parse_date(m.group(1)) # "YYYY-MM-DD HH:MM:SS"
|
||||
imap_conn = {
|
||||
"connect_time": d,
|
||||
"disconnect_time": d,
|
||||
"disconnect_reason": m.group(3),
|
||||
"service": m.group(2),
|
||||
"sasl_username": m.group(5),
|
||||
"sasl_method": m.group(6),
|
||||
"remote_host": "unknown",
|
||||
"remote_ip": m.group(7),
|
||||
"connection_security": m.group(9),
|
||||
"remote_auth_success": 0,
|
||||
"remote_auth_attempts": int(m.group(4))
|
||||
}
|
||||
self.add_new_connection(imap_conn)
|
||||
return { 'imap_conn': imap_conn }
|
||||
|
||||
|
||||
def match_disconnect(self, line):
|
||||
# 1=date
|
||||
# 2=service ("imap" or "pop3")
|
||||
# 3=sasl_username
|
||||
# 4=disconnect_reason ("Logged out")
|
||||
# 5=in_bytes
|
||||
# 6=out_bytes
|
||||
#
|
||||
# NOTE: there is no way to match up the disconnect with the
|
||||
# actual connection because Dovecot does not log a service_tid
|
||||
# or an ip address or anything else that could be used to
|
||||
# match the two up. We'll just assign the disconnect to the
|
||||
# oldest connection for the user.
|
||||
m = self.re_disconnect.search(line)
|
||||
if m:
|
||||
v = {
|
||||
"service": m.group(2),
|
||||
"disconnect_time": self.parse_date(m.group(1)),
|
||||
"disconnect_reason": m.group(4),
|
||||
"in_bytes": int(m.group(5)),
|
||||
"out_bytes": int(m.group(6))
|
||||
}
|
||||
imap_conn_q = [
|
||||
{ 'key':'service', 'value':m.group(2) + '-login' },
|
||||
{ 'key':'sasl_username', 'value':m.group(3),
|
||||
'ignorecase': True }
|
||||
]
|
||||
log.debug(imap_conn_q)
|
||||
|
||||
imap_conn = self.find_first(imap_conn_q)
|
||||
if imap_conn:
|
||||
imap_conn.update(v)
|
||||
return { 'imap_conn': imap_conn }
|
||||
return True
|
||||
|
||||
|
||||
def store(self, imap_conn):
|
||||
if 'disposition' not in imap_conn:
|
||||
if imap_conn.get('remote_auth_success') == 0 and \
|
||||
imap_conn.get('remote_auth_attempts') == 0:
|
||||
imap_conn.update({
|
||||
'disposition': 'suspected_scanner',
|
||||
})
|
||||
|
||||
elif imap_conn.get('remote_auth_success') == 0 and \
|
||||
imap_conn.get('remote_auth_attempts', 0) > 0:
|
||||
imap_conn.update({
|
||||
'disposition': 'failed_login_attempt',
|
||||
})
|
||||
|
||||
elif imap_conn.get('connection_security') != 'TLS' and \
|
||||
imap_conn.get('remote_ip') != '127.0.0.1':
|
||||
imap_conn.update({
|
||||
'disposition': 'insecure'
|
||||
})
|
||||
|
||||
else:
|
||||
imap_conn.update({
|
||||
'disposition': 'ok',
|
||||
})
|
||||
|
||||
drop = self.test_drop_disposition(imap_conn['disposition'])
|
||||
|
||||
if not drop:
|
||||
log.debug('store: %s', imap_conn)
|
||||
try:
|
||||
self.record_store.store('imap_mail', imap_conn)
|
||||
except Exception as e:
|
||||
log.exception(e)
|
||||
|
||||
self.remove_connection(imap_conn)
|
||||
|
||||
|
||||
def log_match(self, match_str, match_result, line):
|
||||
if match_result is True:
|
||||
log.info('%s [unmatched]: %s', match_str, line)
|
||||
|
||||
elif match_result:
|
||||
if match_result.get('deferred', False):
|
||||
log.debug('%s [deferred]: %s', match_str, line)
|
||||
|
||||
elif 'imap_conn' in match_result:
|
||||
log.debug('%s: %s: %s', match_str, line, match_result['imap_conn'])
|
||||
else:
|
||||
log.error('no imap_conn in match_result: ', match_result)
|
||||
else:
|
||||
log.debug('%s: %s', match_str, line)
|
||||
|
||||
|
||||
def test_end_of_rec(self, match_result):
|
||||
if not match_result or match_result is True or match_result.get('deferred', False):
|
||||
return False
|
||||
return self.end_of_rec(match_result['imap_conn'])
|
||||
|
||||
def end_of_rec(self, imap_conn):
|
||||
'''a client must be disconnected for the record to be "complete"
|
||||
|
||||
'''
|
||||
if 'disconnect_time' not in imap_conn:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def handle(self, line):
|
||||
'''overrides ReadLineHandler method
|
||||
|
||||
This function is called by the main log reading thread in
|
||||
TailFile. All additional log reading is blocked until this
|
||||
function completes.
|
||||
|
||||
The storage engine (`record_store`, a SqliteEventStore
|
||||
instance) does not block, so this function will return before
|
||||
the record is saved to disk.
|
||||
|
||||
IMPORTANT:
|
||||
|
||||
The data structures and methods in this class are not thread
|
||||
safe. It is not okay to call any of them when the instance is
|
||||
registered with TailFile.
|
||||
|
||||
'''
|
||||
if not self.capture_enabled:
|
||||
return
|
||||
|
||||
self.update_inprogress_count()
|
||||
|
||||
log.debug('imap recs in progress: %s', len(self.recs))
|
||||
|
||||
match = self.match_connect_success(line)
|
||||
if match:
|
||||
self.log_match('connect', match, line)
|
||||
return
|
||||
|
||||
match = self.match_connect_fail(line)
|
||||
if match:
|
||||
self.log_match('connect_fail', match, line)
|
||||
if self.test_end_of_rec(match):
|
||||
# we're done - not queued and disconnected ... save it
|
||||
self.store(match['imap_conn'])
|
||||
return
|
||||
|
||||
match = self.match_disconnect(line)
|
||||
if match:
|
||||
self.log_match('disconnect', match, line)
|
||||
if self.test_end_of_rec(match):
|
||||
# we're done - not queued and disconnected ... save it
|
||||
self.store(match['imap_conn'])
|
||||
return
|
||||
|
||||
self.log_match('IGNORED', None, line)
|
||||
|
@ -10,6 +10,7 @@ from db.EventStore import EventStore
|
||||
from util.DictQuery import DictQuery
|
||||
from util.safe import (safe_int, safe_append, safe_del)
|
||||
from .PostfixLogParser import PostfixLogParser
|
||||
from .CommonHandler import CommonHandler
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
@ -17,7 +18,7 @@ log = logging.getLogger(__name__)
|
||||
STATE_CACHE_OWNER_ID = 1
|
||||
|
||||
|
||||
class InboundMailLogHandler(ReadLineHandler):
|
||||
class PostfixLogHandler(CommonHandler):
|
||||
'''
|
||||
'''
|
||||
def __init__(self, record_store,
|
||||
@ -26,9 +27,20 @@ class InboundMailLogHandler(ReadLineHandler):
|
||||
capture_enabled = True,
|
||||
drop_disposition = None
|
||||
):
|
||||
''' EventStore instance for persisting "records" '''
|
||||
self.record_store = record_store
|
||||
self.set_capture_enabled(capture_enabled)
|
||||
super(PostfixLogHandler, self).__init__(
|
||||
STATE_CACHE_OWNER_ID,
|
||||
record_store,
|
||||
date_regexp,
|
||||
date_parser_fn,
|
||||
capture_enabled,
|
||||
drop_disposition
|
||||
)
|
||||
|
||||
|
||||
# maximum size of the in-progress record queue
|
||||
self.current_inprogress_recs = len(self.recs)
|
||||
self.max_inprogress_recs = 100
|
||||
|
||||
|
||||
# A "record" is composed by parsing all the syslog output from
|
||||
# the activity generated by the MTA (postfix) from a single
|
||||
@ -66,31 +78,6 @@ class InboundMailLogHandler(ReadLineHandler):
|
||||
# thread-safe.
|
||||
#
|
||||
|
||||
# our in-progress record queue is a simple list
|
||||
self.recs = self.get_cached_state(clear=True)
|
||||
|
||||
# maximum size of the in-progress record queue
|
||||
self.current_inprogress_recs = len(self.recs)
|
||||
self.max_inprogress_recs = 100
|
||||
|
||||
# records that have these dispositions will be dropped (not
|
||||
# recorded in the record store
|
||||
self.drop_disposition_lock = threading.Lock()
|
||||
self.drop_disposition = {
|
||||
'failed_login_attempt': False,
|
||||
'suspected_scanner': False,
|
||||
'reject': False
|
||||
}
|
||||
self.update_drop_disposition(drop_disposition)
|
||||
|
||||
# regular expression that matches a syslog date (always anchored)
|
||||
self.date_regexp = date_regexp
|
||||
if date_regexp.startswith('^'):
|
||||
self.date_regexp = date_regexp[1:]
|
||||
|
||||
# function that parses the syslog date
|
||||
self.date_parser_fn = date_parser_fn
|
||||
|
||||
|
||||
# 1. 1a. postfix/smtpd[13698]: connect from host.tld[1.2.3.4]
|
||||
# 1=date
|
||||
@ -233,70 +220,9 @@ class InboundMailLogHandler(ReadLineHandler):
|
||||
|
||||
|
||||
|
||||
def set_capture_enabled(self, capture_enabled):
|
||||
''' thread-safe '''
|
||||
self.capture_enabled = capture_enabled
|
||||
|
||||
def update_drop_disposition(self, drop_disposition):
|
||||
''' thread-safe '''
|
||||
with self.drop_disposition_lock:
|
||||
self.drop_disposition.update(drop_disposition)
|
||||
|
||||
def get_inprogress_count(self):
|
||||
''' thread-safe '''
|
||||
return self.current_inprogress_recs
|
||||
|
||||
|
||||
def datetime_as_str(self, d):
|
||||
# iso-8601 time friendly to sqlite3
|
||||
timestamp = d.isoformat(sep=' ', timespec='seconds')
|
||||
# strip the utc offset from the iso format (ie. remove "+00:00")
|
||||
idx = timestamp.find('+00:00')
|
||||
if idx>0:
|
||||
timestamp = timestamp[0:idx]
|
||||
return timestamp
|
||||
|
||||
def parse_date(self, str):
|
||||
# we're expecting UTC times from date_parser()
|
||||
d = self.date_parser_fn(str)
|
||||
return self.datetime_as_str(d)
|
||||
|
||||
def get_cached_state(self, clear=True):
|
||||
conn = None
|
||||
try:
|
||||
# obtain the cached records from the record store
|
||||
conn = self.record_store.connect()
|
||||
recs = self.record_store.read_rec(conn, 'state', {
|
||||
"owner_id": STATE_CACHE_OWNER_ID,
|
||||
"clear": clear
|
||||
})
|
||||
log.info('read %s incomplete records from cache', len(recs))
|
||||
|
||||
# eliminate stale records - "stale" should be longer than
|
||||
# the "give-up" time for postfix (4-5 days)
|
||||
stale = datetime.timedelta(days=7)
|
||||
cutoff = self.datetime_as_str(
|
||||
datetime.datetime.now(datetime.timezone.utc) - stale
|
||||
)
|
||||
newlist = [ rec for rec in recs if rec['connect_time'] >= cutoff ]
|
||||
if len(newlist) < len(recs):
|
||||
log.warning('dropping %s stale incomplete records',
|
||||
len(recs) - len(newlist))
|
||||
return newlist
|
||||
finally:
|
||||
if conn: self.record_store.close(conn)
|
||||
|
||||
def save_state(self):
|
||||
log.info('saving state to cache: %s records', len(self.recs))
|
||||
conn = None
|
||||
try:
|
||||
conn = self.record_store.connect()
|
||||
self.record_store.write_rec(conn, 'state', {
|
||||
'owner_id': STATE_CACHE_OWNER_ID,
|
||||
'state': self.recs
|
||||
})
|
||||
finally:
|
||||
if conn: self.record_store.close(conn)
|
||||
|
||||
|
||||
def add_new_connection(self, mta_conn):
|
||||
@ -304,7 +230,7 @@ class InboundMailLogHandler(ReadLineHandler):
|
||||
threshold = self.max_inprogress_recs + ( len(self.recs) * 0.05 )
|
||||
if len(self.recs) > threshold:
|
||||
backoff = len(self.recs) - self.max_inprogress_recs + int( self.max_inprogress_recs * 0.10 )
|
||||
log.warning('dropping %s old records', backoff)
|
||||
log.warning('dropping %s old mta records', backoff)
|
||||
self.recs = self.recs[min(len(self.recs),backoff):]
|
||||
|
||||
self.recs.append(mta_conn)
|
||||
@ -1314,14 +1240,12 @@ class InboundMailLogHandler(ReadLineHandler):
|
||||
'disposition': 'ok',
|
||||
})
|
||||
|
||||
drop = False
|
||||
with self.drop_disposition_lock:
|
||||
drop = self.drop_disposition.get(mta_conn['disposition'], False)
|
||||
|
||||
drop = self.test_drop_disposition(mta_conn['disposition'])
|
||||
|
||||
if not drop:
|
||||
log.debug('store: %s', mta_conn)
|
||||
try:
|
||||
self.record_store.store('inbound_mail', mta_conn)
|
||||
self.record_store.store('mta_mail', mta_conn)
|
||||
except Exception as e:
|
||||
log.exception(e)
|
||||
|
||||
@ -1389,9 +1313,9 @@ class InboundMailLogHandler(ReadLineHandler):
|
||||
if not self.capture_enabled:
|
||||
return
|
||||
|
||||
self.current_inprogress_recs = len(self.recs)
|
||||
self.update_inprogress_count()
|
||||
|
||||
log.debug('recs in progress: %s, dds_by_tid=%s',
|
||||
log.debug('mta recs in progress: %s, dds_by_tid=%s',
|
||||
len(self.recs),
|
||||
len(self.dds_by_tid)
|
||||
)
|
||||
@ -1474,11 +1398,3 @@ class InboundMailLogHandler(ReadLineHandler):
|
||||
self.log_match('IGNORED', None, line)
|
||||
|
||||
|
||||
def end_of_callbacks(self, thread):
|
||||
'''overrides ReadLineHandler method
|
||||
|
||||
save incomplete records so we can pick up where we left off
|
||||
|
||||
'''
|
||||
self.save_state()
|
||||
|
3
management/reporting/capture/tests/.gitignore
vendored
Normal file
3
management/reporting/capture/tests/.gitignore
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
*.log
|
||||
pos.json
|
||||
*.sqlite
|
12
management/reporting/capture/tests/config.json
Normal file
12
management/reporting/capture/tests/config.json
Normal file
@ -0,0 +1,12 @@
|
||||
{
|
||||
"capture": true,
|
||||
"prune_policy": {
|
||||
"frequency_min": 2400,
|
||||
"older_than_days": 30
|
||||
},
|
||||
"drop_disposition": {
|
||||
"failed_login_attempt": false,
|
||||
"suspected_scanner": false,
|
||||
"reject": false
|
||||
}
|
||||
}
|
35
management/reporting/capture/tests/load.sh
Executable file
35
management/reporting/capture/tests/load.sh
Executable file
@ -0,0 +1,35 @@
|
||||
#!/bin/bash
|
||||
|
||||
# load a mail.log file into the current test vm's capture.sqlite
|
||||
#
|
||||
if [ -z "$1" ]; then
|
||||
echo "usage: $0 /path/to/mail.log"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
log="$1"
|
||||
if [ ! -e "$log" ]; then
|
||||
echo "Does not exist: $log"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
. /etc/mailinabox.conf
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "Could not load /etc/mailinabox.conf !!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
echo "Stopping maibldap-capture daemon"
|
||||
systemctl stop miabldap-capture || exit 1
|
||||
|
||||
echo "Ensuring access to capture.sqlite"
|
||||
capture_db=$STORAGE_ROOT/reporting/capture.sqlite
|
||||
sqlite3 "$capture_db" "select value from db_info where key='schema_version'" >/dev/null
|
||||
[ $? -ne 0 ] && exit 1
|
||||
|
||||
echo "Loading $log"
|
||||
python3 ../capture.py -d -loglevel info -logfile "$log" -stopateof
|
||||
|
||||
echo "Starting miabldap-capture daemon"
|
||||
systemctl start miabldap-capture
|
34
management/reporting/capture/tests/run.sh
Executable file
34
management/reporting/capture/tests/run.sh
Executable file
@ -0,0 +1,34 @@
|
||||
#!/bin/bash
|
||||
|
||||
#
|
||||
# interactively load a mail.log file and create a capture.sqlite
|
||||
# database in the current directory
|
||||
|
||||
log="./mail.log"
|
||||
pos="./pos.json"
|
||||
sqlite="./capture.sqlite"
|
||||
config="./config.json"
|
||||
|
||||
if [ -e "./debug.log" ]; then
|
||||
log="./debug.log"
|
||||
fi
|
||||
|
||||
case "$1" in
|
||||
*.log )
|
||||
log="$1"
|
||||
shift
|
||||
;;
|
||||
esac
|
||||
|
||||
if [ "$1" != "-c" ]; then
|
||||
# Start over. Don't continue where we left off
|
||||
echo "STARTING OVER"
|
||||
rm -f "$pos"
|
||||
rm -f "$sqlite"
|
||||
else
|
||||
shift
|
||||
fi
|
||||
|
||||
echo "USING LOG: $log"
|
||||
echo "DB: $sqlite"
|
||||
python3 ../capture.py -d -loglevel info $@ -logfile "$log" -posfile "$pos" -sqlitefile "$sqlite" -config "$config"
|
@ -6,7 +6,7 @@ Vue.component('capture-db-stats', {
|
||||
|
||||
template:'<div>'+
|
||||
'<template v-if="stats">'+
|
||||
'<caption class="text-nowrap">Database date range</caption><div class="ml-2">First: {{stats.mta_connect.connect_time.min_str}}</div><div class="ml-2">Last: {{stats.mta_connect.connect_time.max_str}}</div>'+
|
||||
'<caption class="text-nowrap">Database date range</caption><div class="ml-2">First: {{stats.db_stats.connect_time.min_str}}</div><div class="ml-2">Last: {{stats.db_stats.connect_time.max_str}}</div>'+
|
||||
'<div class="mt-2">'+
|
||||
' <b-table-lite small caption="Connections by disposition" caption-top :fields="row_counts.fields" :items=row_counts.items></b-table-lite>'+
|
||||
'</div>'+
|
||||
@ -37,9 +37,9 @@ Vue.component('capture-db-stats', {
|
||||
// convert dates
|
||||
var parser = d3.utcParse(this.stats.date_parse_format);
|
||||
[ 'min', 'max' ].forEach( k => {
|
||||
var d = parser(this.stats.mta_connect.connect_time[k]);
|
||||
this.stats.mta_connect.connect_time[k] = d;
|
||||
this.stats.mta_connect.connect_time[k+'_str'] =
|
||||
var d = parser(this.stats.db_stats.connect_time[k]);
|
||||
this.stats.db_stats.connect_time[k] = d;
|
||||
this.stats.db_stats.connect_time[k+'_str'] =
|
||||
d==null ? '-' : DateFormatter.dt_long(d);
|
||||
});
|
||||
|
||||
@ -63,11 +63,11 @@ Vue.component('capture-db-stats', {
|
||||
this.row_counts.fields[0].tdClass = 'text-capitalize';
|
||||
|
||||
|
||||
const total = this.stats.mta_connect.count;
|
||||
for (var name in this.stats.mta_connect.disposition)
|
||||
const total = this.stats.db_stats.count;
|
||||
for (var name in this.stats.db_stats.disposition)
|
||||
{
|
||||
const count =
|
||||
this.stats.mta_connect.disposition[name].count;
|
||||
this.stats.db_stats.disposition[name].count;
|
||||
this.row_counts.items.push({
|
||||
name: name,
|
||||
count: count,
|
||||
@ -80,7 +80,7 @@ Vue.component('capture-db-stats', {
|
||||
})
|
||||
this.row_counts.items.push({
|
||||
name:'Total',
|
||||
count:this.stats.mta_connect.count,
|
||||
count:this.stats.db_stats.count,
|
||||
percent:1,
|
||||
'_rowVariant': 'primary'
|
||||
});
|
||||
|
@ -57,7 +57,7 @@ Vue.component('chart-multi-line-timeseries', {
|
||||
.text("no data");
|
||||
}
|
||||
|
||||
this.xscale = d3.scaleUtc()
|
||||
this.xscale = d3.scaleTime()
|
||||
.domain(d3.extent(this.tsdata.dates))
|
||||
.nice()
|
||||
.range([this.margin.left, this.width - this.margin.right])
|
||||
@ -78,25 +78,39 @@ Vue.component('chart-multi-line-timeseries', {
|
||||
.call(this.yAxis.bind(this))
|
||||
.attr("font-size", ChartPrefs.axis_font_size);
|
||||
|
||||
const line = d3.line()
|
||||
.defined(d => !isNaN(d))
|
||||
.x((d, i) => this.xscale(this.tsdata.dates[i]))
|
||||
.y(d => this.yscale(d));
|
||||
|
||||
const path = svg.append("g")
|
||||
.attr("fill", "none")
|
||||
.attr("stroke-width", 1.5)
|
||||
.attr("stroke-linejoin", "round")
|
||||
.attr("stroke-linecap", "round")
|
||||
.selectAll("path")
|
||||
.data(this.tsdata.series)
|
||||
.join("path")
|
||||
.style("mix-blend-mode", "multiply")
|
||||
.style("stroke", (d, i) => this.colors[i])
|
||||
.attr("d", d => line(d.values))
|
||||
;
|
||||
|
||||
svg.call(this.hover.bind(this), path);
|
||||
if (this.tsdata.dates.length == 1) {
|
||||
// special case
|
||||
const g = svg.append("g")
|
||||
.selectAll("circle")
|
||||
.data(this.tsdata.series)
|
||||
.join("circle")
|
||||
.attr("fill", (d, i) => this.colors[i])
|
||||
.attr("cx", this.xscale(this.tsdata.dates[0]))
|
||||
.attr("cy", d => this.yscale(d.values[0]))
|
||||
.attr("r", 2.5);
|
||||
this.hover(svg, g);
|
||||
}
|
||||
else {
|
||||
const line = d3.line()
|
||||
.defined(d => !isNaN(d))
|
||||
.x((d, i) => this.xscale(this.tsdata.dates[i]))
|
||||
.y(d => this.yscale(d));
|
||||
|
||||
const path = svg.append("g")
|
||||
.attr("fill", "none")
|
||||
.attr("stroke-width", 1.5)
|
||||
.attr("stroke-linejoin", "round")
|
||||
.attr("stroke-linecap", "round")
|
||||
.selectAll("path")
|
||||
.data(this.tsdata.series)
|
||||
.join("path")
|
||||
.style("mix-blend-mode", "multiply")
|
||||
.style("stroke", (d, i) => this.colors[i])
|
||||
.attr("d", d => line(d.values))
|
||||
;
|
||||
|
||||
svg.call(this.hover.bind(this), path);
|
||||
}
|
||||
},
|
||||
|
||||
xAxis: function(g) {
|
||||
@ -160,8 +174,10 @@ Vue.component('chart-multi-line-timeseries', {
|
||||
const xvalue = this.xscale.invert(pointer[0]); // date
|
||||
const yvalue = this.yscale.invert(pointer[1]); // number
|
||||
//const i = d3.bisectCenter(this.tsdata.dates, xvalue); // index
|
||||
const i = d3.bisect(this.tsdata.dates, xvalue); // index
|
||||
if (i >= this.tsdata.dates.length) return;
|
||||
var i = d3.bisect(this.tsdata.dates, xvalue); // index
|
||||
if (i > this.tsdata.dates.length) return;
|
||||
i = Math.min(this.tsdata.dates.length-1, i);
|
||||
|
||||
// closest series
|
||||
var closest = null;
|
||||
for (var sidx=0; sidx<this.tsdata.series.length; sidx++) {
|
||||
@ -192,7 +208,7 @@ Vue.component('chart-multi-line-timeseries', {
|
||||
|
||||
function entered() {
|
||||
path.style("mix-blend-mode", null).attr("stroke", "#ddd");
|
||||
dot.attr("display", null);
|
||||
//dot.attr("display", null);
|
||||
}
|
||||
|
||||
function left() {
|
||||
|
@ -148,11 +148,15 @@ Vue.component('chart-pie', {
|
||||
.data(arcs)
|
||||
.join("text")
|
||||
.attr("transform", d => `translate(${arcLabel.centroid(d)})`)
|
||||
.call(text => text.append("tspan")
|
||||
.call(text => text
|
||||
.filter(d => (d.endAngle - d.startAngle) > 0.25)
|
||||
.append("tspan")
|
||||
.attr("y", "-0.4em")
|
||||
.attr("font-weight", "bold")
|
||||
.text(d => d.data.name))
|
||||
.call(text => text.filter(d => (d.endAngle - d.startAngle) > 0.25).append("tspan")
|
||||
.call(text => text
|
||||
.filter(d => (d.endAngle - d.startAngle) > 0.25)
|
||||
.append("tspan")
|
||||
.attr("x", 0)
|
||||
.attr("y", "0.7em")
|
||||
.attr("fill-opacity", 0.7)
|
||||
|
@ -101,13 +101,14 @@ Vue.component('chart-stacked-bar-timeseries', {
|
||||
.text("no data");
|
||||
}
|
||||
|
||||
this.xscale = d3.scaleUtc()
|
||||
this.xscale = d3.scaleTime()
|
||||
.domain(d3.extent(this.tsdata.dates))
|
||||
.nice()
|
||||
.range([this.margin.left, this.width - this.margin.right])
|
||||
|
||||
var barwidth = this.tsdata.barwidth(this.xscale, 1);
|
||||
var padding = barwidth / 2;
|
||||
var barwidth = this.tsdata.barwidth(this.xscale);
|
||||
var padding_x = barwidth / 2;
|
||||
var padding_y = ChartVue.get_yAxisLegendBounds(this.tsdata).height + 2;
|
||||
|
||||
this.yscale = d3.scaleLinear()
|
||||
.domain([
|
||||
@ -115,28 +116,30 @@ Vue.component('chart-stacked-bar-timeseries', {
|
||||
d3.sum(this.tsdata.series, s => d3.max(s.values))
|
||||
])
|
||||
.range([
|
||||
this.height - this.margin.bottom,
|
||||
this.height - this.margin.bottom - padding_y,
|
||||
this.margin.top,
|
||||
]);
|
||||
|
||||
svg.append("g")
|
||||
.call(this.xAxis.bind(this, padding))
|
||||
|
||||
var g = svg.append("g")
|
||||
.attr("transform", `translate(0, ${padding_y})`);
|
||||
|
||||
g.append("g")
|
||||
.call(this.xAxis.bind(this, padding_x))
|
||||
.attr("font-size", ChartPrefs.axis_font_size);
|
||||
|
||||
svg.append("g")
|
||||
.call(this.yAxis.bind(this))
|
||||
g.append("g")
|
||||
.call(this.yAxis.bind(this, padding_y))
|
||||
.attr("font-size", ChartPrefs.axis_font_size);
|
||||
|
||||
|
||||
for (var s_idx=0; s_idx<this.tsdata.series.length; s_idx++) {
|
||||
svg.append("g")
|
||||
g.append("g")
|
||||
.datum(s_idx)
|
||||
.attr("fill", this.colors[s_idx])
|
||||
.selectAll("rect")
|
||||
.data(this.stacked[s_idx])
|
||||
.join("rect")
|
||||
.attr("x", d => this.xscale(d.data.date) - barwidth/2 + padding)
|
||||
.attr("y", d => this.yscale(d[1]))
|
||||
.attr("x", d => this.xscale(d.data.date) - barwidth/2 + padding_x)
|
||||
.attr("y", d => this.yscale(d[1]) + padding_y)
|
||||
.attr("height", d => this.yscale(d[0]) - this.yscale(d[1]))
|
||||
.attr("width", barwidth)
|
||||
.call( hover.bind(this) )
|
||||
@ -146,7 +149,13 @@ Vue.component('chart-stacked-bar-timeseries', {
|
||||
;
|
||||
}
|
||||
|
||||
var hovinfo = svg.append("g");
|
||||
g.append("g")
|
||||
.attr("transform", `translate(${this.margin.left}, 0)`)
|
||||
.call(
|
||||
g => ChartVue.add_yAxisLegend(g, this.tsdata, this.colors)
|
||||
);
|
||||
|
||||
var hovinfo = g.append("g");
|
||||
|
||||
function hover(rect) {
|
||||
if ("ontouchstart" in document) rect
|
||||
@ -165,10 +174,11 @@ Vue.component('chart-stacked-bar-timeseries', {
|
||||
var s_name = this.tsdata.series[s_idx].name;
|
||||
var v = d.data[s_name];
|
||||
var x = Number(rect.attr('x')) + barwidth/2;
|
||||
|
||||
//var y = Number(rect.attr('y')) + Number(rect.attr('height'))/2;
|
||||
var y = Number(rect.attr('y'));
|
||||
hovinfo.attr(
|
||||
"transform",
|
||||
`translate( ${x}, ${rect.attr('y')} )`)
|
||||
`translate( ${x}, ${y} )`)
|
||||
.append('text')
|
||||
.attr("font-family", ChartPrefs.default_font_family)
|
||||
.attr("font-size", ChartPrefs.default_font_size)
|
||||
@ -203,18 +213,16 @@ Vue.component('chart-stacked-bar-timeseries', {
|
||||
return x;
|
||||
},
|
||||
|
||||
yAxis: function(g) {
|
||||
yAxis: function(padding, g) {
|
||||
var y = g.attr(
|
||||
"transform",
|
||||
`translate(${this.margin.left},0)`
|
||||
`translate(${this.margin.left},${padding})`
|
||||
).call(
|
||||
d3.axisLeft(this.yscale)
|
||||
.ticks(this.height/50)
|
||||
).call(g =>
|
||||
g.select(".domain").remove()
|
||||
).call(g => {
|
||||
ChartVue.add_yAxisLegend(g, this.tsdata, this.colors);
|
||||
});
|
||||
).call(
|
||||
g => g.select(".domain").remove()
|
||||
);
|
||||
|
||||
return y;
|
||||
},
|
||||
|
@ -224,9 +224,18 @@ class DateRange {
|
||||
else if (type == 'ytd')
|
||||
return DateRange.ytd();
|
||||
else if (type == 'last30days')
|
||||
return DateRange.lastXdays(30);
|
||||
return DateRange.lastXdays(29);
|
||||
else if (type == 'last7days')
|
||||
return DateRange.lastXdays(7)
|
||||
return DateRange.lastXdays(6)
|
||||
else if (type == 'today') {
|
||||
var d = new Date();
|
||||
return [ d, d ];
|
||||
}
|
||||
else if (type == 'yesterday') {
|
||||
var d = new Date();
|
||||
d.setTime(d.getTime() - (1 * 24 * 60 * 60 * 1000));
|
||||
return [ d, d ];
|
||||
}
|
||||
return null;
|
||||
}
|
||||
};
|
||||
@ -748,6 +757,15 @@ class ChartVue {
|
||||
return svg;
|
||||
}
|
||||
|
||||
|
||||
static get_yAxisLegendBounds(data) {
|
||||
const h = ChartPrefs.axis_font_size;
|
||||
return {
|
||||
width: h + 6,
|
||||
height: h * data.series.length
|
||||
};
|
||||
}
|
||||
|
||||
static add_yAxisLegend(g, data, colors) {
|
||||
//var gtick = g.select(".tick:last-of-type").append("g");
|
||||
const h = ChartPrefs.axis_font_size;
|
||||
@ -853,9 +871,8 @@ class TimeseriesData {
|
||||
}
|
||||
|
||||
static binsizeOfRange(range) {
|
||||
// target 100-120 datapoints
|
||||
const target = 100;
|
||||
const tolerance = 0.2; // 20%
|
||||
// target roughly 75 datapoints
|
||||
const target = 75;
|
||||
|
||||
if (typeof range[0] == 'string') {
|
||||
var parser = d3.utcParse('%Y-%m-%d %H:%M:%S');
|
||||
@ -865,27 +882,47 @@ class TimeseriesData {
|
||||
const span_min = Math.ceil(
|
||||
(range[1].getTime() - range[0].getTime()) / (1000*60*target)
|
||||
);
|
||||
const bin_days = Math.floor(span_min / (24*60));
|
||||
const bin_hours = Math.floor((span_min - bin_days*24*60) / 60);
|
||||
|
||||
var bin_days = Math.floor(span_min / (24*60));
|
||||
var bin_hours = Math.floor((span_min - bin_days*24*60) / 60);
|
||||
if (bin_days >= 1) {
|
||||
return bin_days * 24 * 60 +
|
||||
(bin_hours > (24 * tolerance) ? bin_hours*60: 0);
|
||||
if (bin_hours > 18) {
|
||||
bin_days += 1;
|
||||
bin_hours = 0;
|
||||
}
|
||||
else if (bin_hours > 6) {
|
||||
bin_hours = 12;
|
||||
}
|
||||
else {
|
||||
bin_hours = 0;
|
||||
}
|
||||
return bin_days * 24 * 60 + bin_hours*60;
|
||||
}
|
||||
|
||||
const bin_mins = span_min - bin_days*24*60 - bin_hours*60;
|
||||
if (bin_hours >= 1) {
|
||||
return bin_hours * 60 +
|
||||
(bin_mins > (60 * tolerance) ? bin_mins: 0 );
|
||||
var bin_mins = span_min - bin_days*24*60 - bin_hours*60;
|
||||
if (bin_mins > 45) {
|
||||
bin_hours += 1
|
||||
bin_mins = 0;
|
||||
}
|
||||
return bin_mins;
|
||||
else if (bin_mins > 15) {
|
||||
bin_mins = 30;
|
||||
}
|
||||
else {
|
||||
bin_mins = 0;
|
||||
}
|
||||
return bin_hours * 60 + bin_mins;
|
||||
}
|
||||
|
||||
barwidth(xscale, barspacing) {
|
||||
barwidth(xscale, barspacing, max_width) {
|
||||
/* get the width of a bar in a bar chart */
|
||||
var start = this.range[0];
|
||||
var end = this.range[1];
|
||||
var bins = (end.getTime() - start.getTime()) / (1000 * this.binsizeTimespan());
|
||||
return Math.max(1, (xscale.range()[1] - xscale.range()[0])/bins - (barspacing || 0));
|
||||
if (this.dates.length == 0) return 0; // no data
|
||||
barspacing = (barspacing === undefined) ? 2 : barspacing;
|
||||
max_width = (max_width === undefined) ? 75 : max_width;
|
||||
var first_date = this.dates[0];
|
||||
var last_date = this.dates[this.dates.length-1];
|
||||
var bins = (last_date.getTime() - first_date.getTime()) / (1000 * 60 * this.binsize) + 1;
|
||||
if (bins == 1) return max_width;
|
||||
return Math.min(max_width, Math.max(1, (xscale(last_date) - xscale(first_date))/bins - barspacing));
|
||||
}
|
||||
|
||||
formatDateTimeLong(d) {
|
||||
|
@ -59,6 +59,8 @@ Vue.component('date-range-picker', {
|
||||
range: range,
|
||||
range_type: range_type,
|
||||
options: [
|
||||
{ value:'today', text:'Today' },
|
||||
{ value:'yesterday', text:'Yesterday' },
|
||||
{ value:'last7days', text:'Last 7 days' },
|
||||
{ value:'last30days', text:'Last 30 days' },
|
||||
{ value:'wtd', text:'Week-to-date' },
|
||||
@ -153,16 +155,20 @@ Vue.component('date-range-picker', {
|
||||
|
||||
range_type_change: function(evt) {
|
||||
// ui select callback
|
||||
if (this.range_type == 'last7days')
|
||||
this.range = DateRange.lastXdays_as_ymd(7);
|
||||
else if (this.range_type == 'last30days')
|
||||
this.range = DateRange.lastXdays_as_ymd(30);
|
||||
else if (this.range_type == 'wtd')
|
||||
this.range = DateRange.wtd_as_ymd();
|
||||
else if (this.range_type == 'mtd')
|
||||
this.range = DateRange.mtd_as_ymd();
|
||||
else if (this.range_type == 'ytd')
|
||||
this.range = DateRange.ytd_as_ymd();
|
||||
var range = DateRange.rangeFromType(this.range_type);
|
||||
if (range) {
|
||||
this.range = range.map(d => DateFormatter.ymd(d));
|
||||
}
|
||||
// if (this.range_type == 'last7days')
|
||||
// this.range = DateRange.lastXdays_as_ymd(7);
|
||||
// else if (this.range_type == 'last30days')
|
||||
// this.range = DateRange.lastXdays_as_ymd(30);
|
||||
// else if (this.range_type == 'wtd')
|
||||
// this.range = DateRange.wtd_as_ymd();
|
||||
// else if (this.range_type == 'mtd')
|
||||
// this.range = DateRange.mtd_as_ymd();
|
||||
// else if (this.range_type == 'ytd')
|
||||
// this.range = DateRange.ytd_as_ymd();
|
||||
},
|
||||
|
||||
}
|
||||
|
@ -50,7 +50,7 @@ Vue.component('panel-messages-sent', function(resolve, reject) {
|
||||
},
|
||||
|
||||
height_recip: function() {
|
||||
return this.height / 2;
|
||||
return (this.height / 3) *2;
|
||||
},
|
||||
|
||||
radius_recip_pie: function() {
|
||||
|
@ -28,10 +28,10 @@
|
||||
<b-form-radio class="ml-1" v-model="sender_type" value="server" @change="update_recent_list()">Server</b-form-radio>
|
||||
</div>
|
||||
<b-input-group style="width:40em">
|
||||
<b-form-input v-if="sender_type=='email'" class="h-auto" :autofocus="data_sender===null" list="panel-rsa-recent" v-model="email" placeholder="Enter an email address (envelope FROM)"></b-form-input>
|
||||
<b-form-input v-else class="h-auto" :autofocus="data_sender===null" list="panel-rsa-recent" v-model="server" placeholder="Enter a hostname or ip address"></b-form-input>
|
||||
<b-form-input v-if="sender_type=='email'" class="h-auto" :autofocus="data_sender===null" list="panel-rsa-recent" @change="email_changed" placeholder="Enter an email address (envelope FROM)"></b-form-input>
|
||||
<b-form-input v-else class="h-auto" :autofocus="data_sender===null" list="panel-rsa-recent" @change="server_changed" placeholder="Enter a hostname or ip address"></b-form-input>
|
||||
<b-input-group-append>
|
||||
<b-button variant="primary" @click="change_sender" :disabled="sender_type=='email' && (email == '' || email==data_sender) || sender_type=='server' && (server =='' || server==data_sender)">Search</b-button>
|
||||
<b-button variant="primary" @click="change_sender">Search</b-button>
|
||||
</b-input-group-append>
|
||||
</b-input-group>
|
||||
<b-alert variant="warning" class="ml-2" :show="activity && activity.items.length>=get_row_limit()"><sup>*</sup> Tables limited to {{ get_row_limit() }} rows <router-link to="/settings"><b-icon icon="gear-fill"></b-icon></router-link></b-alert>
|
||||
|
@ -94,6 +94,14 @@ Vue.component('panel-remote-sender-activity', function(resolve, reject) {
|
||||
},
|
||||
|
||||
methods: {
|
||||
email_changed: function(evt) {
|
||||
this.email = evt;
|
||||
},
|
||||
|
||||
server_changed: function(evt) {
|
||||
this.server = evt;
|
||||
},
|
||||
|
||||
update_recent_list: function() {
|
||||
this.recent_senders = UserSettings.get()
|
||||
.get_recent_list(this.set_prefix + this.sender_type);
|
||||
|
@ -6,12 +6,12 @@
|
||||
|
||||
<b-form @submit.prevent="getChartData()" class="d-flex">
|
||||
<b-input-group class="mb-3" style="width:30em">
|
||||
<b-form-input class="h-auto" :autofocus="data_user_id===null" list="panel-ua-users" v-model="user_id" placeholder="Enter a user id/email address"></b-form-input>
|
||||
<b-form-input ref="user_id_input" class="h-auto" :autofocus="data_user_id===null" list="panel-ua-users" @change="sync_user_id" placeholder="Enter a user id/email address"></b-form-input>
|
||||
<b-input-group-append>
|
||||
<b-button variant="primary" @click="change_user">Change user</b-button>
|
||||
</b-input-group-append>
|
||||
</b-input-group>
|
||||
<b-alert variant="warning" class="ml-2" :show="sent_mail && sent_mail.items.length>=get_row_limit() || received_mail && received_mail.items.length>=get_row_limit()"><sup>*</sup> Tables limited to {{ get_row_limit() }} rows <router-link to="/settings"><b-icon icon="gear-fill"></b-icon></router-link></b-alert>
|
||||
<b-alert variant="warning" class="ml-2" :show="sent_mail && sent_mail.items.length>=get_row_limit() || received_mail && received_mail.items.length>=get_row_limit() || imap_details && imap_details.items.length>=get_row_limit()"><sup>*</sup> Tables limited to {{ get_row_limit() }} rows <router-link to="/settings"><b-icon icon="gear-fill"></b-icon></router-link></b-alert>
|
||||
<b-form-checkbox class="ml-auto" v-model="show_only_flagged" @change="show_only_flagged_change()">Flagged only</b-form-checkbox>
|
||||
</b-form>
|
||||
|
||||
@ -42,7 +42,7 @@
|
||||
</b-table>
|
||||
</b-tab>
|
||||
|
||||
<b-tab :title="`Received mail (${received_mail.items.length})`">
|
||||
<b-tab>
|
||||
<template #title>
|
||||
Received mail<sup v-if="received_mail.items.length >= get_row_limit()">*</sup> ({{received_mail.items.length}})
|
||||
</template>
|
||||
@ -61,6 +61,7 @@
|
||||
</template>
|
||||
<template #row-details="row">
|
||||
<b-card>
|
||||
<div><strong>Remote host</strong>: {{ row.item.remote_host }}[{{ row.item.remote_ip }}]</div>
|
||||
<div><strong>Connection disposition</strong>: {{ disposition_formatter(row.item.disposition) }}</div>
|
||||
<div v-if="row.item.orig_to"><strong>Sent to alias</strong>: {{ row.item.orig_to }}</div>
|
||||
<div v-if="row.item.dkim_reason"><strong>Dkim reason</strong>: {{row.item.dkim_reason}}</div>
|
||||
@ -68,9 +69,36 @@
|
||||
<div v-if="row.item.postgrey_reason"><strong>Postgrey reason</strong>: {{row.item.postgrey_reason}}</div>
|
||||
<div v-if="row.item.postgrey_delay"><strong>Postgrey delay</strong>: {{received_mail.x_fields.postgrey_delay.formatter(row.item.postgrey_delay)}}</div>
|
||||
<div v-if="row.item.spam_result"><strong>Spam score</strong>: {{received_mail.x_fields.spam_score.formatter(row.item.spam_score)}}</div>
|
||||
<div v-if="row.item.failure_info"><strong>Failure info</strong>: {{row.item.failure_info}}</div>
|
||||
</b-card>
|
||||
</template>
|
||||
</b-table>
|
||||
</b-tab>
|
||||
|
||||
|
||||
<b-tab>
|
||||
<template #title>
|
||||
IMAP Connections<sup v-if="imap_details.items.length >= get_row_limit()">*</sup> ({{imap_details.items.length}})
|
||||
</template>
|
||||
<b-table
|
||||
class="sticky-table-header-0 bg-light"
|
||||
small
|
||||
:filter="show_only_flagged_filter"
|
||||
:filter-function="table_filter_cb"
|
||||
tbody-tr-class="cursor-pointer"
|
||||
details-td-class="cursor-default"
|
||||
@row-clicked="row_clicked"
|
||||
:items="imap_details.items"
|
||||
:fields="imap_details.fields">
|
||||
<template #row-details="row">
|
||||
<b-card>
|
||||
<div><strong>Connection disposition</strong>: {{ disposition_formatter(row.item.disposition) }}</div>
|
||||
<div><strong>Connection security</strong> {{ row.item.connection_security }}</div>
|
||||
<div><strong>Disconnect reason</strong> {{ row.item.disconnect_reason }}</div>
|
||||
</b-card>
|
||||
</template>
|
||||
</b-table>
|
||||
</b-tab>
|
||||
|
||||
</b-tabs>
|
||||
</div>
|
||||
|
@ -20,7 +20,8 @@ Vue.component('panel-user-activity', function(resolve, reject) {
|
||||
Number(this.$route.query.tab) :
|
||||
0;
|
||||
return {
|
||||
user_id: this.$route.query.user || '', /* v-model */
|
||||
//user_id: this.$route.query.user || '', /* v-model */
|
||||
user_id: '', /* v-model */
|
||||
tab_index: start_tab, /* v-model */
|
||||
show_only_flagged: false,
|
||||
show_only_flagged_filter: null,
|
||||
@ -28,6 +29,7 @@ Vue.component('panel-user-activity', function(resolve, reject) {
|
||||
data_date_range: null, /* date range for active table data */
|
||||
sent_mail: null,
|
||||
received_mail: null,
|
||||
imap_details: null,
|
||||
all_users: [],
|
||||
disposition_formatter: ConnectionDisposition.formatter,
|
||||
};
|
||||
@ -38,7 +40,7 @@ Vue.component('panel-user-activity', function(resolve, reject) {
|
||||
const new_user = this.$route.query.user;
|
||||
|
||||
if (new_user && new_user != this.user_id) {
|
||||
this.user_id = new_user;
|
||||
this.sync_user_id(new_user);
|
||||
this.getChartData(isNaN(new_tab) ? 0 : new_tab);
|
||||
return;
|
||||
}
|
||||
@ -64,6 +66,10 @@ Vue.component('panel-user-activity', function(resolve, reject) {
|
||||
this.getChartData();
|
||||
}
|
||||
},
|
||||
|
||||
mounted: function() {
|
||||
this.sync_user_id(this.$route.query.user || '');
|
||||
},
|
||||
|
||||
methods: {
|
||||
update_route: function() {
|
||||
@ -76,6 +82,13 @@ Vue.component('panel-user-activity', function(resolve, reject) {
|
||||
this.$router.replace(route);
|
||||
}
|
||||
},
|
||||
|
||||
sync_user_id: function(user_id) {
|
||||
// manually update "model" for <input> to avoid
|
||||
// slowness with large tables
|
||||
this.user_id = user_id;
|
||||
this.$refs.user_id_input.value = user_id;
|
||||
},
|
||||
|
||||
change_user: function() {
|
||||
this.getChartData(0);
|
||||
@ -124,8 +137,11 @@ Vue.component('panel-user-activity', function(resolve, reject) {
|
||||
combine_received_mail_fields: function() {
|
||||
// remove these fields
|
||||
this.received_mail.combine_fields([
|
||||
'remote_host',
|
||||
'remote_ip',
|
||||
'dkim_reason',
|
||||
'dmarc_reason',
|
||||
'failure_info',
|
||||
'postgrey_reason',
|
||||
'postgrey_delay',
|
||||
'spam_score',
|
||||
@ -143,6 +159,15 @@ Vue.component('panel-user-activity', function(resolve, reject) {
|
||||
f.label = 'Envelope From (user)';
|
||||
},
|
||||
|
||||
combine_imap_details_fields: function() {
|
||||
// remove these fields
|
||||
this.imap_details.combine_fields([
|
||||
'disconnect_reason',
|
||||
'connection_security',
|
||||
]);
|
||||
},
|
||||
|
||||
|
||||
get_row_limit: function() {
|
||||
return UserSettings.get().row_limit;
|
||||
},
|
||||
@ -235,7 +260,18 @@ Vue.component('panel-user-activity', function(resolve, reject) {
|
||||
this.received_mail
|
||||
.flag_fields()
|
||||
.get_field('connect_time')
|
||||
.add_tdClass('text-nowrap');
|
||||
.add_tdClass('text-nowrap');
|
||||
|
||||
/* setup imap_details */
|
||||
this.imap_details = new MailBvTable(
|
||||
response.data.imap_details, {
|
||||
_showDetails: true
|
||||
});
|
||||
this.combine_imap_details_fields();
|
||||
this.imap_details
|
||||
.flag_fields()
|
||||
.get_field('connect_time')
|
||||
.add_tdClass('text-nowrap');
|
||||
|
||||
}).catch(error => {
|
||||
this.$root.handleError(error);
|
||||
|
@ -6,6 +6,7 @@ class Timeseries(object):
|
||||
# start_date: 'YYYY-MM-DD' or 'YYYY-MM-DD HH:MM:SS'
|
||||
# start: 'YYYY-MM-DD HH:MM:SS'
|
||||
self.start = self.full_datetime_str(start_date, False)
|
||||
self.start_unixepoch = self.unix_time(self.start)
|
||||
|
||||
# end_date: 'YYYY-MM-DD' or 'YYYY-MM-DD HH:MM:SS'
|
||||
# end: 'YYYY-MM-DD HH:MM:SS'
|
||||
@ -16,20 +17,12 @@ class Timeseries(object):
|
||||
|
||||
# timefmt is a format string for sqlite strftime() that puts a
|
||||
# sqlite datetime into a "bin" date
|
||||
self.timefmt='%Y-%m-%d'
|
||||
self.timefmt = '%Y-%m-%d %H:%M:%S'
|
||||
|
||||
# parsefmt is a date parser string to be used to re-interpret
|
||||
# "bin" grouping dates (data.dates) to native dates
|
||||
parsefmt='%Y-%m-%d'
|
||||
|
||||
b = self.binsizeWithUnit()
|
||||
|
||||
if b['unit'] == 'hour':
|
||||
self.timefmt+=' %H:00:00'
|
||||
parsefmt+=' %H:%M:%S'
|
||||
elif b['unit'] == 'minute':
|
||||
self.timefmt+=' %H:%M:00'
|
||||
parsefmt+=' %H:%M:%S'
|
||||
# "bin" grouping dates (data.dates) to native dates. server
|
||||
# always returns utc dates
|
||||
parsefmt = '%Y-%m-%d %H:%M:%S'
|
||||
|
||||
self.dates = [] # dates must be "bin" date strings
|
||||
self.series = []
|
||||
@ -54,6 +47,14 @@ class Timeseries(object):
|
||||
d = d + datetime.timedelta(days=1)
|
||||
return d.strftime('%Y-%m-%d 00:00:00')
|
||||
|
||||
def unix_time(self, full_datetime_str):
|
||||
d = datetime.datetime.strptime(
|
||||
full_datetime_str + ' UTC',
|
||||
'%Y-%m-%d %H:%M:%S %Z'
|
||||
)
|
||||
return int(d.timestamp())
|
||||
|
||||
|
||||
def binsizeWithUnit(self):
|
||||
# normalize binsize (which is a time span in minutes)
|
||||
days = int(self.binsize / (24 * 60))
|
||||
@ -96,12 +97,17 @@ class Timeseries(object):
|
||||
|
||||
'''
|
||||
i = bisect.bisect_right(self.dates, date_str)
|
||||
if i == len(self.dates):
|
||||
if len(self.dates)>0 and self.dates[i-1] == date_str:
|
||||
return i-1
|
||||
elif i == len(self.dates):
|
||||
self.dates.append(date_str)
|
||||
return i
|
||||
if self.dates[i] == date_str:
|
||||
return i
|
||||
self.dates.insert(i, date_str)
|
||||
else:
|
||||
self.dates.insert(i, date_str)
|
||||
|
||||
''' add zero values to all series for the new date '''
|
||||
for series in self.series:
|
||||
series['values'].insert(i, 0)
|
||||
|
||||
return i
|
||||
|
||||
def add_series(self, id, name):
|
||||
@ -111,6 +117,8 @@ class Timeseries(object):
|
||||
'values': []
|
||||
}
|
||||
self.series.append(s)
|
||||
for date in self.dates:
|
||||
s['values'].append(0)
|
||||
return s
|
||||
|
||||
|
||||
|
@ -17,10 +17,10 @@ def capture_db_stats(conn):
|
||||
if stats:
|
||||
return stats
|
||||
|
||||
select_1 = 'SELECT min(connect_time) AS `min`, max(connect_time) AS `max`, count(*) AS `count` FROM mta_connection'
|
||||
select_1 = 'SELECT min(`min`) AS `min`, max(`max`) AS `max`, sum(`count`) AS `count` FROM (SELECT min(connect_time) AS `min`, max(connect_time) AS `max`, count(*) AS `count` FROM mta_connection UNION SELECT min(connect_time) AS `min`, max(connect_time) AS `max`, count(*) AS `count` FROM imap_connection)'
|
||||
|
||||
# table scan
|
||||
select_2 = 'SELECT disposition, count(*) AS `count` FROM mta_connection GROUP BY disposition'
|
||||
select_2 = 'SELECT disposition, sum(count) as `count` FROM (SELECT disposition, count(*) AS `count` FROM mta_connection GROUP BY disposition UNION SELECT disposition, count(*) AS `count` FROM imap_connection GROUP BY disposition) GROUP BY disposition'
|
||||
|
||||
c = conn.cursor()
|
||||
stats = {
|
||||
@ -29,7 +29,7 @@ def capture_db_stats(conn):
|
||||
}
|
||||
try:
|
||||
row = c.execute(select_1).fetchone()
|
||||
stats['mta_connect'] = {
|
||||
stats['db_stats'] = {
|
||||
'connect_time': {
|
||||
'min': row['min'],
|
||||
'max': row['max'], # YYYY-MM-DD HH:MM:SS (utc)
|
||||
@ -39,7 +39,7 @@ def capture_db_stats(conn):
|
||||
}
|
||||
|
||||
for row in c.execute(select_2):
|
||||
stats['mta_connect']['disposition'][row['disposition']] = {
|
||||
stats['db_stats']['disposition'][row['disposition']] = {
|
||||
'count': row['count']
|
||||
}
|
||||
|
||||
|
@ -2,13 +2,36 @@
|
||||
-- returns count of failed_login_attempt in each 'bin', which is the
|
||||
-- connection time rounded (as defined by {timefmt})
|
||||
--
|
||||
SELECT
|
||||
strftime('{timefmt}',connect_time) AS `bin`,
|
||||
count(*) AS `count`
|
||||
FROM mta_connection
|
||||
WHERE
|
||||
disposition='failed_login_attempt' AND
|
||||
connect_time >= :start_date AND
|
||||
connect_time < :end_date
|
||||
GROUP BY strftime('{timefmt}',connect_time)
|
||||
ORDER BY connect_time
|
||||
|
||||
SELECT bin, sum(count) AS `count`
|
||||
FROM (
|
||||
SELECT
|
||||
strftime('{timefmt}',
|
||||
:start_unixepoch + cast((strftime('%s',connect_time) - :start_unixepoch) / (60 * :binsize) as int) * (60 * :binsize),
|
||||
'unixepoch'
|
||||
) AS `bin`,
|
||||
count(*) AS `count`
|
||||
FROM mta_connection
|
||||
WHERE
|
||||
disposition='failed_login_attempt' AND
|
||||
connect_time >= :start_date AND
|
||||
connect_time < :end_date
|
||||
GROUP BY bin
|
||||
|
||||
UNION
|
||||
|
||||
SELECT
|
||||
strftime('{timefmt}',
|
||||
:start_unixepoch + cast((strftime('%s',connect_time) - :start_unixepoch) / (60 * :binsize) as int) * (60 * :binsize),
|
||||
'unixepoch'
|
||||
) AS `bin`,
|
||||
count(*) AS `count`
|
||||
FROM imap_connection
|
||||
WHERE
|
||||
disposition='failed_login_attempt' AND
|
||||
connect_time >= :start_date AND
|
||||
connect_time < :end_date
|
||||
GROUP BY bin
|
||||
)
|
||||
GROUP BY bin
|
||||
ORDER BY bin
|
||||
|
@ -2,13 +2,35 @@
|
||||
-- returns count of suspected_scanner in each 'bin', which is the
|
||||
-- connection time rounded (as defined by {timefmt})
|
||||
--
|
||||
SELECT
|
||||
strftime('{timefmt}',connect_time) AS `bin`,
|
||||
count(*) AS `count`
|
||||
FROM mta_connection
|
||||
WHERE
|
||||
disposition='suspected_scanner' AND
|
||||
connect_time >= :start_date AND
|
||||
connect_time < :end_date
|
||||
GROUP BY strftime('{timefmt}',connect_time)
|
||||
ORDER BY connect_time
|
||||
SELECT bin, sum(count) AS `count`
|
||||
FROM (
|
||||
SELECT
|
||||
strftime('{timefmt}',
|
||||
:start_unixepoch + cast((strftime('%s',connect_time) - :start_unixepoch) / (60 * :binsize) as int) * (60 * :binsize),
|
||||
'unixepoch'
|
||||
) AS `bin`,
|
||||
count(*) AS `count`
|
||||
FROM mta_connection
|
||||
WHERE
|
||||
disposition='suspected_scanner' AND
|
||||
connect_time >= :start_date AND
|
||||
connect_time < :end_date
|
||||
GROUP BY strftime('{timefmt}',connect_time)
|
||||
|
||||
UNION
|
||||
|
||||
SELECT
|
||||
strftime('{timefmt}',
|
||||
:start_unixepoch + cast((strftime('%s',connect_time) - :start_unixepoch) / (60 * :binsize) as int) * (60 * :binsize),
|
||||
'unixepoch'
|
||||
) AS `bin`,
|
||||
count(*) AS `count`
|
||||
FROM imap_connection
|
||||
WHERE
|
||||
disposition='suspected_scanner' AND
|
||||
connect_time >= :start_date AND
|
||||
connect_time < :end_date
|
||||
GROUP BY strftime('{timefmt}',connect_time)
|
||||
)
|
||||
GROUP BY bin
|
||||
ORDER BY bin
|
||||
|
19
management/reporting/uidata/flagged_connections.7.sql
Normal file
19
management/reporting/uidata/flagged_connections.7.sql
Normal file
@ -0,0 +1,19 @@
|
||||
-- pie chart for "connections by disposition"
|
||||
--
|
||||
-- returns a table of disposition along with it's count
|
||||
|
||||
SELECT disposition, sum(count) AS `count`
|
||||
FROM (
|
||||
SELECT disposition, count(*) AS `count`
|
||||
FROM mta_connection
|
||||
WHERE connect_time>=:start_date AND connect_time<:end_date
|
||||
GROUP by disposition
|
||||
|
||||
UNION
|
||||
|
||||
SELECT disposition, count(*) AS `count`
|
||||
FROM imap_connection
|
||||
WHERE connect_time>=:start_date AND connect_time<:end_date
|
||||
GROUP BY disposition
|
||||
)
|
||||
GROUP BY disposition
|
@ -1,7 +1,10 @@
|
||||
import logging
|
||||
from .Timeseries import Timeseries
|
||||
from .exceptions import InvalidArgsError
|
||||
from .top import select_top
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
with open(__file__.replace('.py','.1.sql')) as fp:
|
||||
select_1 = fp.read()
|
||||
|
||||
@ -20,6 +23,9 @@ with open(__file__.replace('.py','.5.sql')) as fp:
|
||||
with open(__file__.replace('.py','.6.sql')) as fp:
|
||||
select_6 = fp.read()
|
||||
|
||||
with open(__file__.replace('.py','.7.sql')) as fp:
|
||||
select_7 = fp.read()
|
||||
|
||||
|
||||
def flagged_connections(conn, args):
|
||||
try:
|
||||
@ -35,9 +41,8 @@ def flagged_connections(conn, args):
|
||||
c = conn.cursor()
|
||||
|
||||
# pie chart for "connections by disposition"
|
||||
select = 'SELECT disposition, count(*) AS `count` FROM mta_connection WHERE connect_time>=:start_date AND connect_time<:end_date GROUP BY disposition'
|
||||
connections_by_disposition = []
|
||||
for row in c.execute(select, {'start_date':ts.start, 'end_date':ts.end}):
|
||||
for row in c.execute(select_7, {'start_date':ts.start, 'end_date':ts.end}):
|
||||
connections_by_disposition.append({
|
||||
'name': row[0],
|
||||
'value': row[1]
|
||||
@ -45,21 +50,27 @@ def flagged_connections(conn, args):
|
||||
|
||||
# timeseries = failed logins count
|
||||
s_failed_login = ts.add_series('failed_login_attempt', 'failed login attempts')
|
||||
for row in c.execute(select_1.format(timefmt=ts.timefmt), {
|
||||
sql = select_1.format(timefmt=ts.timefmt)
|
||||
for row in c.execute(sql, {
|
||||
'start_date': ts.start,
|
||||
'end_date': ts.end
|
||||
'end_date': ts.end,
|
||||
'start_unixepoch': ts.start_unixepoch,
|
||||
'binsize': ts.binsize
|
||||
}):
|
||||
ts.append_date(row['bin'])
|
||||
s_failed_login['values'].append(row['count'])
|
||||
idx = ts.insert_date(row['bin'])
|
||||
s_failed_login['values'][idx] = row['count']
|
||||
|
||||
# timeseries = suspected scanners count
|
||||
s_scanner = ts.add_series('suspected_scanner', 'connections by suspected scanners')
|
||||
for row in c.execute(select_2.format(timefmt=ts.timefmt), {
|
||||
sql = select_2.format(timefmt=ts.timefmt)
|
||||
for row in c.execute(sql, {
|
||||
'start_date': ts.start,
|
||||
'end_date': ts.end
|
||||
'end_date': ts.end,
|
||||
'start_unixepoch': ts.start_unixepoch,
|
||||
'binsize': ts.binsize
|
||||
}):
|
||||
ts.insert_date(row['bin'])
|
||||
s_scanner['values'].append(row['count'])
|
||||
idx = ts.insert_date(row['bin'])
|
||||
s_scanner['values'][idx] = row['count']
|
||||
|
||||
|
||||
# pie chart for "disposition=='reject' grouped by failure_category"
|
||||
|
@ -3,7 +3,10 @@
|
||||
-- the connection time rounded (as defined by {timefmt})
|
||||
--
|
||||
SELECT
|
||||
strftime('{timefmt}',connect_time) AS `bin`,
|
||||
strftime('{timefmt}',
|
||||
:start_unixepoch + cast((strftime('%s',connect_time) - :start_unixepoch) / (60 * :binsize) as int) * (60 * :binsize),
|
||||
'unixepoch'
|
||||
) AS `bin`,
|
||||
count(*) AS `count`
|
||||
FROM mta_accept
|
||||
JOIN mta_connection ON mta_connection.mta_conn_id = mta_accept.mta_conn_id
|
||||
@ -11,5 +14,5 @@ WHERE
|
||||
mta_connection.service = 'smtpd' AND
|
||||
connect_time >= :start_date AND
|
||||
connect_time < :end_date
|
||||
GROUP BY strftime('{timefmt}',connect_time)
|
||||
GROUP BY bin
|
||||
ORDER BY connect_time
|
||||
|
@ -40,10 +40,12 @@ def messages_received(conn, args):
|
||||
try:
|
||||
for row in c.execute(select_1.format(timefmt=ts.timefmt), {
|
||||
'start_date':ts.start,
|
||||
'end_date':ts.end
|
||||
'end_date':ts.end,
|
||||
'start_unixepoch':ts.start_unixepoch,
|
||||
'binsize':ts.binsize
|
||||
}):
|
||||
ts.append_date(row['bin'])
|
||||
s_received['values'].append(row['count'])
|
||||
idx = ts.insert_date(row['bin'])
|
||||
s_received['values'][idx] = row['count']
|
||||
|
||||
|
||||
# top 10 senders (envelope_from) by message count
|
||||
|
@ -3,7 +3,10 @@
|
||||
-- time rounded (as defined by {timefmt})
|
||||
--
|
||||
SELECT
|
||||
strftime('{timefmt}',connect_time) AS `bin`,
|
||||
strftime('{timefmt}',
|
||||
:start_unixepoch + cast((strftime('%s',connect_time) - :start_unixepoch) / (60 * :binsize) as int) * (60 * :binsize),
|
||||
'unixepoch'
|
||||
) as `bin`,
|
||||
count(*) AS `sent_count`
|
||||
FROM mta_accept
|
||||
JOIN mta_connection ON mta_connection.mta_conn_id = mta_accept.mta_conn_id
|
||||
@ -12,5 +15,5 @@ WHERE
|
||||
(mta_connection.service = 'submission' OR mta_connection.service = 'pickup') AND
|
||||
connect_time >= :start_date AND
|
||||
connect_time < :end_date
|
||||
GROUP BY strftime('{timefmt}',connect_time)
|
||||
GROUP BY bin
|
||||
ORDER BY connect_time
|
||||
|
@ -4,7 +4,10 @@
|
||||
-- defined by {timefmt})
|
||||
--
|
||||
SELECT
|
||||
strftime('{timefmt}',connect_time) AS `bin`,
|
||||
strftime('{timefmt}',
|
||||
:start_unixepoch + cast((strftime('%s',connect_time) - :start_unixepoch) / (60 * :binsize) as int) * (60 * :binsize),
|
||||
'unixepoch'
|
||||
) AS `bin`,
|
||||
mta_delivery.service AS `delivery_service`,
|
||||
count(*) AS `delivery_count`
|
||||
FROM mta_accept
|
||||
@ -14,5 +17,5 @@ WHERE
|
||||
(mta_connection.service = 'submission' OR mta_connection.service = 'pickup') AND
|
||||
connect_time >= :start_date AND
|
||||
connect_time < :end_date
|
||||
GROUP BY strftime('{timefmt}',connect_time), mta_delivery.service
|
||||
GROUP BY bin, mta_delivery.service
|
||||
ORDER BY connect_time
|
||||
|
@ -38,10 +38,12 @@ def messages_sent(conn, args):
|
||||
try:
|
||||
for row in c.execute(select_1.format(timefmt=ts.timefmt), {
|
||||
'start_date':ts.start,
|
||||
'end_date':ts.end
|
||||
'end_date':ts.end,
|
||||
'start_unixepoch':ts.start_unixepoch,
|
||||
'binsize':ts.binsize
|
||||
}):
|
||||
ts.dates.append(row['bin'])
|
||||
s_sent['values'].append(row['sent_count'])
|
||||
idx = ts.insert_date(row['bin'])
|
||||
s_sent['values'][idx] = row['sent_count']
|
||||
|
||||
date_idx = -1
|
||||
|
||||
@ -49,25 +51,16 @@ def messages_sent(conn, args):
|
||||
# querie's WHERE and JOINs are the same
|
||||
for row in c.execute(select_2.format(timefmt=ts.timefmt), {
|
||||
'start_date':ts.start,
|
||||
'end_date':ts.end
|
||||
'end_date':ts.end,
|
||||
'start_unixepoch':ts.start_unixepoch,
|
||||
'binsize':ts.binsize
|
||||
}):
|
||||
if date_idx>=0 and ts.dates[date_idx] == row['bin']:
|
||||
if row['delivery_service']=='smtp':
|
||||
s_remote['values'][-1] = row['delivery_count']
|
||||
elif row['delivery_service']=='lmtp':
|
||||
s_local['values'][-1] = row['delivery_count']
|
||||
|
||||
else:
|
||||
date_idx += 1
|
||||
if date_idx >= len(ts.dates):
|
||||
break
|
||||
if row['delivery_service']=='smtp':
|
||||
s_remote['values'].append(row['delivery_count'])
|
||||
s_local['values'].append(0)
|
||||
elif row['delivery_service']=='lmtp':
|
||||
s_remote['values'].append(0)
|
||||
s_local['values'].append(row['delivery_count'])
|
||||
|
||||
date_idx = ts.insert_date(row['bin'])
|
||||
if row['delivery_service']=='smtp':
|
||||
s_remote['values'][date_idx] = row['delivery_count']
|
||||
elif row['delivery_service']=='lmtp':
|
||||
s_local['values'][date_idx] = row['delivery_count']
|
||||
|
||||
|
||||
top_senders1 = {
|
||||
'start': ts.start,
|
||||
|
@ -4,8 +4,10 @@
|
||||
SELECT
|
||||
-- mta_connection
|
||||
connect_time, mta_connection.service AS service, sasl_username, disposition,
|
||||
remote_host, remote_ip,
|
||||
-- mta_accept
|
||||
envelope_from, spf_result, dkim_result, dkim_reason, dmarc_result, dmarc_reason,
|
||||
failure_info,
|
||||
-- mta_delivery
|
||||
postgrey_result, postgrey_reason, postgrey_delay, spam_score, spam_result, message_size, orig_to
|
||||
FROM mta_accept
|
||||
|
20
management/reporting/uidata/user_activity.3.sql
Normal file
20
management/reporting/uidata/user_activity.3.sql
Normal file
@ -0,0 +1,20 @@
|
||||
--
|
||||
-- details on user imap connections
|
||||
--
|
||||
SELECT
|
||||
connect_time,
|
||||
CASE WHEN remote_host='unknown' THEN remote_ip ELSE remote_host END AS `remote_host`,
|
||||
sasl_method,
|
||||
disconnect_reason,
|
||||
connection_security,
|
||||
disposition,
|
||||
in_bytes,
|
||||
out_bytes
|
||||
FROM
|
||||
imap_connection
|
||||
WHERE
|
||||
sasl_username = :user_id AND
|
||||
connect_time >= :start_date AND
|
||||
connect_time < :end_date
|
||||
ORDER BY
|
||||
connect_time
|
@ -7,6 +7,9 @@ with open(__file__.replace('.py','.1.sql')) as fp:
|
||||
with open(__file__.replace('.py','.2.sql')) as fp:
|
||||
select_2 = fp.read()
|
||||
|
||||
with open(__file__.replace('.py','.3.sql')) as fp:
|
||||
select_3 = fp.read()
|
||||
|
||||
|
||||
def user_activity(conn, args):
|
||||
'''
|
||||
@ -110,6 +113,8 @@ def user_activity(conn, args):
|
||||
'connect_time',
|
||||
'service',
|
||||
'sasl_username',
|
||||
'remote_host',
|
||||
'remote_ip',
|
||||
|
||||
# mta_accept
|
||||
'envelope_from',
|
||||
@ -119,6 +124,7 @@ def user_activity(conn, args):
|
||||
'dkim_reason',
|
||||
'dmarc_result',
|
||||
'dmarc_reason',
|
||||
'failure_info',
|
||||
|
||||
# mta_delivery
|
||||
'orig_to',
|
||||
@ -133,13 +139,16 @@ def user_activity(conn, args):
|
||||
{ 'type':'datetime', 'format': '%Y-%m-%d %H:%M:%S' },# connect_time
|
||||
'text/plain', # mta_connection.service
|
||||
'text/email', # sasl_username
|
||||
'text/plain', # remote_host
|
||||
'text/plain', # remote_ip
|
||||
'text/email', # envelope_from
|
||||
'text/plain', # disposition
|
||||
'text/plain', # spf_result
|
||||
'text/plain', # dkim_result
|
||||
'text/plain', # dkim_result
|
||||
'text/plain', # dmarc_result
|
||||
'text/plain', # dmarc_result
|
||||
'text/plain', # dmarc_reason
|
||||
'text/plain', # failure_info
|
||||
'text/email', # orig_to
|
||||
'text/plain', # postgrey_result
|
||||
'text/plain', # postgrey_reason
|
||||
@ -162,8 +171,51 @@ def user_activity(conn, args):
|
||||
received_mail['items'].append(v)
|
||||
|
||||
|
||||
#
|
||||
# imap connections by user
|
||||
#
|
||||
|
||||
imap_details = {
|
||||
'start': ts.start,
|
||||
'end': ts.end,
|
||||
'y': 'IMAP Details',
|
||||
'fields': [
|
||||
'connect_time',
|
||||
'remote_host',
|
||||
'sasl_method',
|
||||
'disconnect_reason',
|
||||
'connection_security',
|
||||
'disposition',
|
||||
'in_bytes',
|
||||
'out_bytes'
|
||||
],
|
||||
'field_types': [
|
||||
{ 'type':'datetime', 'format': '%Y-%m-%d %H:%M:%S' },# connect_time
|
||||
'text/plain', # remote_host
|
||||
'text/plain', # sasl_method
|
||||
'text/plain', # disconnect_reason
|
||||
'text/plain', # connection_security
|
||||
'text/plain', # disposition
|
||||
'number/size', # in_bytes,
|
||||
'number/size', # out_bytes,
|
||||
],
|
||||
'items': []
|
||||
}
|
||||
|
||||
for row in c.execute(select_3 + limit, {
|
||||
'user_id': user_id,
|
||||
'start_date': ts.start,
|
||||
'end_date': ts.end
|
||||
}):
|
||||
v = []
|
||||
for key in imap_details['fields']:
|
||||
v.append(row[key])
|
||||
imap_details['items'].append(v)
|
||||
|
||||
|
||||
|
||||
return {
|
||||
'sent_mail': sent_mail,
|
||||
'received_mail': received_mail
|
||||
'received_mail': received_mail,
|
||||
'imap_details': imap_details
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user