diff --git a/management/reporting/ui/chart-multi-line-timeseries.js b/management/reporting/ui/chart-multi-line-timeseries.js index cb71eb36..5ee65304 100644 --- a/management/reporting/ui/chart-multi-line-timeseries.js +++ b/management/reporting/ui/chart-multi-line-timeseries.js @@ -57,7 +57,7 @@ Vue.component('chart-multi-line-timeseries', { .text("no data"); } - this.xscale = d3.scaleUtc() + this.xscale = d3.scaleTime() .domain(d3.extent(this.tsdata.dates)) .nice() .range([this.margin.left, this.width - this.margin.right]) diff --git a/management/reporting/ui/chart-stacked-bar-timeseries.js b/management/reporting/ui/chart-stacked-bar-timeseries.js index ea78d4a8..bdac98e2 100644 --- a/management/reporting/ui/chart-stacked-bar-timeseries.js +++ b/management/reporting/ui/chart-stacked-bar-timeseries.js @@ -101,13 +101,14 @@ Vue.component('chart-stacked-bar-timeseries', { .text("no data"); } - this.xscale = d3.scaleUtc() + this.xscale = d3.scaleTime() .domain(d3.extent(this.tsdata.dates)) .nice() .range([this.margin.left, this.width - this.margin.right]) - var barwidth = this.tsdata.barwidth(this.xscale, 1); - var padding = barwidth / 2; + var barwidth = this.tsdata.barwidth(this.xscale); + var padding_x = barwidth / 2; + var padding_y = ChartVue.get_yAxisLegendBounds(this.tsdata).height + 2; this.yscale = d3.scaleLinear() .domain([ @@ -115,28 +116,30 @@ Vue.component('chart-stacked-bar-timeseries', { d3.sum(this.tsdata.series, s => d3.max(s.values)) ]) .range([ - this.height - this.margin.bottom, + this.height - this.margin.bottom - padding_y, this.margin.top, ]); - - svg.append("g") - .call(this.xAxis.bind(this, padding)) + + var g = svg.append("g") + .attr("transform", `translate(0, ${padding_y})`); + + g.append("g") + .call(this.xAxis.bind(this, padding_x)) .attr("font-size", ChartPrefs.axis_font_size); - svg.append("g") - .call(this.yAxis.bind(this)) + g.append("g") + .call(this.yAxis.bind(this, padding_y)) .attr("font-size", ChartPrefs.axis_font_size); - for (var s_idx=0; s_idx this.xscale(d.data.date) - barwidth/2 + padding) - .attr("y", d => this.yscale(d[1])) + .attr("x", d => this.xscale(d.data.date) - barwidth/2 + padding_x) + .attr("y", d => this.yscale(d[1]) + padding_y) .attr("height", d => this.yscale(d[0]) - this.yscale(d[1])) .attr("width", barwidth) .call( hover.bind(this) ) @@ -146,7 +149,13 @@ Vue.component('chart-stacked-bar-timeseries', { ; } - var hovinfo = svg.append("g"); + g.append("g") + .attr("transform", `translate(${this.margin.left}, 0)`) + .call( + g => ChartVue.add_yAxisLegend(g, this.tsdata, this.colors) + ); + + var hovinfo = g.append("g"); function hover(rect) { if ("ontouchstart" in document) rect @@ -165,10 +174,11 @@ Vue.component('chart-stacked-bar-timeseries', { var s_name = this.tsdata.series[s_idx].name; var v = d.data[s_name]; var x = Number(rect.attr('x')) + barwidth/2; - + //var y = Number(rect.attr('y')) + Number(rect.attr('height'))/2; + var y = Number(rect.attr('y')); hovinfo.attr( "transform", - `translate( ${x}, ${rect.attr('y')} )`) + `translate( ${x}, ${y} )`) .append('text') .attr("font-family", ChartPrefs.default_font_family) .attr("font-size", ChartPrefs.default_font_size) @@ -203,18 +213,16 @@ Vue.component('chart-stacked-bar-timeseries', { return x; }, - yAxis: function(g) { + yAxis: function(padding, g) { var y = g.attr( "transform", - `translate(${this.margin.left},0)` + `translate(${this.margin.left},${padding})` ).call( d3.axisLeft(this.yscale) .ticks(this.height/50) - ).call(g => - g.select(".domain").remove() - ).call(g => { - ChartVue.add_yAxisLegend(g, this.tsdata, this.colors); - }); + ).call( + g => g.select(".domain").remove() + ); return y; }, diff --git a/management/reporting/ui/charting.js b/management/reporting/ui/charting.js index 0c1500e4..8a097ba8 100644 --- a/management/reporting/ui/charting.js +++ b/management/reporting/ui/charting.js @@ -748,6 +748,15 @@ class ChartVue { return svg; } + + static get_yAxisLegendBounds(data) { + const h = ChartPrefs.axis_font_size; + return { + width: h + 6, + height: h * data.series.length + }; + } + static add_yAxisLegend(g, data, colors) { //var gtick = g.select(".tick:last-of-type").append("g"); const h = ChartPrefs.axis_font_size; @@ -853,9 +862,8 @@ class TimeseriesData { } static binsizeOfRange(range) { - // target 100-120 datapoints - const target = 100; - const tolerance = 0.2; // 20% + // target roughly 75 datapoints + const target = 75; if (typeof range[0] == 'string') { var parser = d3.utcParse('%Y-%m-%d %H:%M:%S'); @@ -865,27 +873,46 @@ class TimeseriesData { const span_min = Math.ceil( (range[1].getTime() - range[0].getTime()) / (1000*60*target) ); - const bin_days = Math.floor(span_min / (24*60)); - const bin_hours = Math.floor((span_min - bin_days*24*60) / 60); + + var bin_days = Math.floor(span_min / (24*60)); + var bin_hours = Math.floor((span_min - bin_days*24*60) / 60); if (bin_days >= 1) { - return bin_days * 24 * 60 + - (bin_hours > (24 * tolerance) ? bin_hours*60: 0); + if (bin_hours > 18) { + bin_days += 1; + bin_hours = 0; + } + else if (bin_hours > 6) { + bin_hours = 12; + } + else { + bin_hours = 0; + } + return bin_days * 24 * 60 + bin_hours*60; } - const bin_mins = span_min - bin_days*24*60 - bin_hours*60; - if (bin_hours >= 1) { - return bin_hours * 60 + - (bin_mins > (60 * tolerance) ? bin_mins: 0 ); + var bin_mins = span_min - bin_days*24*60 - bin_hours*60; + if (bin_mins > 45) { + bin_hours += 1 + bin_mins = 0; } - return bin_mins; + else if (bin_mins > 15) { + bin_mins = 30; + } + else { + bin_mins = 0; + } + return bin_hours * 60 + bin_mins; } - barwidth(xscale, barspacing) { + barwidth(xscale, barspacing, max_width) { /* get the width of a bar in a bar chart */ - var start = this.range[0]; - var end = this.range[1]; - var bins = (end.getTime() - start.getTime()) / (1000 * this.binsizeTimespan()); - return Math.max(1, (xscale.range()[1] - xscale.range()[0])/bins - (barspacing || 0)); + if (this.dates.length == 0) return 0; // no data + barspacing = (barspacing === undefined) ? 2 : barspacing; + max_width = (max_width === undefined) ? 75 : max_width; + var first_date = this.dates[0]; + var last_date = this.dates[this.dates.length-1]; + var bins = (last_date.getTime() - first_date.getTime()) / (1000 * 60 * this.binsize); + return Math.min(max_width, Math.max(1, (xscale(last_date) - xscale(first_date))/bins - barspacing)); } formatDateTimeLong(d) { diff --git a/management/reporting/ui/panel-messages-sent.js b/management/reporting/ui/panel-messages-sent.js index f9d55f14..18248d86 100644 --- a/management/reporting/ui/panel-messages-sent.js +++ b/management/reporting/ui/panel-messages-sent.js @@ -50,7 +50,7 @@ Vue.component('panel-messages-sent', function(resolve, reject) { }, height_recip: function() { - return this.height / 2; + return (this.height / 3) *2; }, radius_recip_pie: function() { diff --git a/management/reporting/ui/panel-user-activity.html b/management/reporting/ui/panel-user-activity.html index d9068a05..4437db64 100644 --- a/management/reporting/ui/panel-user-activity.html +++ b/management/reporting/ui/panel-user-activity.html @@ -11,7 +11,7 @@ Change user - * Tables limited to {{ get_row_limit() }} rows + * Tables limited to {{ get_row_limit() }} rows Flagged only @@ -42,7 +42,7 @@ - + @@ -72,5 +72,31 @@ + + + + + + + + + diff --git a/management/reporting/ui/panel-user-activity.js b/management/reporting/ui/panel-user-activity.js index 96011a9b..dcb9e786 100644 --- a/management/reporting/ui/panel-user-activity.js +++ b/management/reporting/ui/panel-user-activity.js @@ -28,6 +28,7 @@ Vue.component('panel-user-activity', function(resolve, reject) { data_date_range: null, /* date range for active table data */ sent_mail: null, received_mail: null, + imap_details: null, all_users: [], disposition_formatter: ConnectionDisposition.formatter, }; @@ -147,6 +148,15 @@ Vue.component('panel-user-activity', function(resolve, reject) { f.label = 'Envelope From (user)'; }, + combine_imap_details_fields: function() { + // remove these fields + this.imap_details.combine_fields([ + 'disconnect_reason', + 'connection_security', + ]); + }, + + get_row_limit: function() { return UserSettings.get().row_limit; }, @@ -239,7 +249,18 @@ Vue.component('panel-user-activity', function(resolve, reject) { this.received_mail .flag_fields() .get_field('connect_time') - .add_tdClass('text-nowrap'); + .add_tdClass('text-nowrap'); + + /* setup imap_details */ + this.imap_details = new MailBvTable( + response.data.imap_details, { + _showDetails: true + }); + this.combine_imap_details_fields(); + this.imap_details + .flag_fields() + .get_field('connect_time') + .add_tdClass('text-nowrap'); }).catch(error => { this.$root.handleError(error); diff --git a/management/reporting/uidata/Timeseries.py b/management/reporting/uidata/Timeseries.py index b6730f61..73575187 100644 --- a/management/reporting/uidata/Timeseries.py +++ b/management/reporting/uidata/Timeseries.py @@ -6,6 +6,7 @@ class Timeseries(object): # start_date: 'YYYY-MM-DD' or 'YYYY-MM-DD HH:MM:SS' # start: 'YYYY-MM-DD HH:MM:SS' self.start = self.full_datetime_str(start_date, False) + self.start_unixepoch = self.unix_time(self.start) # end_date: 'YYYY-MM-DD' or 'YYYY-MM-DD HH:MM:SS' # end: 'YYYY-MM-DD HH:MM:SS' @@ -16,20 +17,12 @@ class Timeseries(object): # timefmt is a format string for sqlite strftime() that puts a # sqlite datetime into a "bin" date - self.timefmt='%Y-%m-%d' + self.timefmt = '%Y-%m-%d %H:%M:%S' # parsefmt is a date parser string to be used to re-interpret - # "bin" grouping dates (data.dates) to native dates - parsefmt='%Y-%m-%d' - - b = self.binsizeWithUnit() - - if b['unit'] == 'hour': - self.timefmt+=' %H:00:00' - parsefmt+=' %H:%M:%S' - elif b['unit'] == 'minute': - self.timefmt+=' %H:%M:00' - parsefmt+=' %H:%M:%S' + # "bin" grouping dates (data.dates) to native dates. server + # always returns utc dates + parsefmt = '%Y-%m-%d %H:%M:%S' self.dates = [] # dates must be "bin" date strings self.series = [] @@ -54,6 +47,14 @@ class Timeseries(object): d = d + datetime.timedelta(days=1) return d.strftime('%Y-%m-%d 00:00:00') + def unix_time(self, full_datetime_str): + d = datetime.datetime.strptime( + full_datetime_str + ' UTC', + '%Y-%m-%d %H:%M:%S %Z' + ) + return int(d.timestamp()) + + def binsizeWithUnit(self): # normalize binsize (which is a time span in minutes) days = int(self.binsize / (24 * 60)) @@ -96,12 +97,17 @@ class Timeseries(object): ''' i = bisect.bisect_right(self.dates, date_str) - if i == len(self.dates): + if len(self.dates)>0 and self.dates[i-1] == date_str: + return i-1 + elif i == len(self.dates): self.dates.append(date_str) - return i - if self.dates[i] == date_str: - return i - self.dates.insert(i, date_str) + else: + self.dates.insert(i, date_str) + + ''' add zero values to all series for the new date ''' + for series in self.series: + series['values'].insert(i, 0) + return i def add_series(self, id, name): @@ -111,6 +117,8 @@ class Timeseries(object): 'values': [] } self.series.append(s) + for date in self.dates: + s['values'].append(0) return s diff --git a/management/reporting/uidata/flagged_connections.1.sql b/management/reporting/uidata/flagged_connections.1.sql index 398bbfb6..452d57c9 100644 --- a/management/reporting/uidata/flagged_connections.1.sql +++ b/management/reporting/uidata/flagged_connections.1.sql @@ -2,13 +2,36 @@ -- returns count of failed_login_attempt in each 'bin', which is the -- connection time rounded (as defined by {timefmt}) -- -SELECT - strftime('{timefmt}',connect_time) AS `bin`, - count(*) AS `count` -FROM mta_connection -WHERE - disposition='failed_login_attempt' AND - connect_time >= :start_date AND - connect_time < :end_date -GROUP BY strftime('{timefmt}',connect_time) -ORDER BY connect_time + +SELECT bin, sum(count) AS `count` +FROM ( + SELECT + strftime('{timefmt}', + :start_unixepoch + cast((strftime('%s',connect_time) - :start_unixepoch) / (60 * :binsize) as int) * (60 * :binsize), + 'unixepoch' + ) AS `bin`, + count(*) AS `count` + FROM mta_connection + WHERE + disposition='failed_login_attempt' AND + connect_time >= :start_date AND + connect_time < :end_date + GROUP BY bin + + UNION + + SELECT + strftime('{timefmt}', + :start_unixepoch + cast((strftime('%s',connect_time) - :start_unixepoch) / (60 * :binsize) as int) * (60 * :binsize), + 'unixepoch' + ) AS `bin`, + count(*) AS `count` + FROM imap_connection + WHERE + disposition='failed_login_attempt' AND + connect_time >= :start_date AND + connect_time < :end_date + GROUP BY bin +) +GROUP BY bin +ORDER BY bin diff --git a/management/reporting/uidata/flagged_connections.2.sql b/management/reporting/uidata/flagged_connections.2.sql index fb99a7f0..9a737a9e 100644 --- a/management/reporting/uidata/flagged_connections.2.sql +++ b/management/reporting/uidata/flagged_connections.2.sql @@ -2,13 +2,35 @@ -- returns count of suspected_scanner in each 'bin', which is the -- connection time rounded (as defined by {timefmt}) -- -SELECT - strftime('{timefmt}',connect_time) AS `bin`, - count(*) AS `count` -FROM mta_connection -WHERE - disposition='suspected_scanner' AND - connect_time >= :start_date AND - connect_time < :end_date -GROUP BY strftime('{timefmt}',connect_time) -ORDER BY connect_time +SELECT bin, sum(count) AS `count` +FROM ( + SELECT + strftime('{timefmt}', + :start_unixepoch + cast((strftime('%s',connect_time) - :start_unixepoch) / (60 * :binsize) as int) * (60 * :binsize), + 'unixepoch' + ) AS `bin`, + count(*) AS `count` + FROM mta_connection + WHERE + disposition='suspected_scanner' AND + connect_time >= :start_date AND + connect_time < :end_date + GROUP BY strftime('{timefmt}',connect_time) + + UNION + + SELECT + strftime('{timefmt}', + :start_unixepoch + cast((strftime('%s',connect_time) - :start_unixepoch) / (60 * :binsize) as int) * (60 * :binsize), + 'unixepoch' + ) AS `bin`, + count(*) AS `count` + FROM imap_connection + WHERE + disposition='suspected_scanner' AND + connect_time >= :start_date AND + connect_time < :end_date + GROUP BY strftime('{timefmt}',connect_time) +) +GROUP BY bin +ORDER BY bin diff --git a/management/reporting/uidata/flagged_connections.7.sql b/management/reporting/uidata/flagged_connections.7.sql new file mode 100644 index 00000000..85c7e6b4 --- /dev/null +++ b/management/reporting/uidata/flagged_connections.7.sql @@ -0,0 +1,19 @@ +-- pie chart for "connections by disposition" +-- +-- returns a table of disposition along with it's count + +SELECT disposition, sum(count) AS `count` +FROM ( + SELECT disposition, count(*) AS `count` + FROM mta_connection + WHERE connect_time>=:start_date AND connect_time<:end_date + GROUP by disposition + + UNION + + SELECT disposition, count(*) AS `count` + FROM imap_connection + WHERE connect_time>=:start_date AND connect_time<:end_date + GROUP BY disposition +) +GROUP BY disposition diff --git a/management/reporting/uidata/flagged_connections.py b/management/reporting/uidata/flagged_connections.py index 240952f9..cf147491 100644 --- a/management/reporting/uidata/flagged_connections.py +++ b/management/reporting/uidata/flagged_connections.py @@ -1,7 +1,10 @@ +import logging from .Timeseries import Timeseries from .exceptions import InvalidArgsError from .top import select_top +log = logging.getLogger(__name__) + with open(__file__.replace('.py','.1.sql')) as fp: select_1 = fp.read() @@ -20,6 +23,9 @@ with open(__file__.replace('.py','.5.sql')) as fp: with open(__file__.replace('.py','.6.sql')) as fp: select_6 = fp.read() +with open(__file__.replace('.py','.7.sql')) as fp: + select_7 = fp.read() + def flagged_connections(conn, args): try: @@ -35,9 +41,8 @@ def flagged_connections(conn, args): c = conn.cursor() # pie chart for "connections by disposition" - select = 'SELECT disposition, count(*) AS `count` FROM mta_connection WHERE connect_time>=:start_date AND connect_time<:end_date GROUP BY disposition' connections_by_disposition = [] - for row in c.execute(select, {'start_date':ts.start, 'end_date':ts.end}): + for row in c.execute(select_7, {'start_date':ts.start, 'end_date':ts.end}): connections_by_disposition.append({ 'name': row[0], 'value': row[1] @@ -45,21 +50,27 @@ def flagged_connections(conn, args): # timeseries = failed logins count s_failed_login = ts.add_series('failed_login_attempt', 'failed login attempts') - for row in c.execute(select_1.format(timefmt=ts.timefmt), { + sql = select_1.format(timefmt=ts.timefmt) + for row in c.execute(sql, { 'start_date': ts.start, - 'end_date': ts.end + 'end_date': ts.end, + 'start_unixepoch': ts.start_unixepoch, + 'binsize': ts.binsize }): - ts.append_date(row['bin']) - s_failed_login['values'].append(row['count']) + idx = ts.insert_date(row['bin']) + s_failed_login['values'][idx] = row['count'] # timeseries = suspected scanners count s_scanner = ts.add_series('suspected_scanner', 'connections by suspected scanners') - for row in c.execute(select_2.format(timefmt=ts.timefmt), { + sql = select_2.format(timefmt=ts.timefmt) + for row in c.execute(sql, { 'start_date': ts.start, - 'end_date': ts.end + 'end_date': ts.end, + 'start_unixepoch': ts.start_unixepoch, + 'binsize': ts.binsize }): - ts.insert_date(row['bin']) - s_scanner['values'].append(row['count']) + idx = ts.insert_date(row['bin']) + s_scanner['values'][idx] = row['count'] # pie chart for "disposition=='reject' grouped by failure_category" diff --git a/management/reporting/uidata/messages_received.1.sql b/management/reporting/uidata/messages_received.1.sql index 382158a8..6162815c 100644 --- a/management/reporting/uidata/messages_received.1.sql +++ b/management/reporting/uidata/messages_received.1.sql @@ -3,7 +3,10 @@ -- the connection time rounded (as defined by {timefmt}) -- SELECT - strftime('{timefmt}',connect_time) AS `bin`, + strftime('{timefmt}', + :start_unixepoch + cast((strftime('%s',connect_time) - :start_unixepoch) / (60 * :binsize) as int) * (60 * :binsize), + 'unixepoch' + ) AS `bin`, count(*) AS `count` FROM mta_accept JOIN mta_connection ON mta_connection.mta_conn_id = mta_accept.mta_conn_id @@ -11,5 +14,5 @@ WHERE mta_connection.service = 'smtpd' AND connect_time >= :start_date AND connect_time < :end_date -GROUP BY strftime('{timefmt}',connect_time) +GROUP BY bin ORDER BY connect_time diff --git a/management/reporting/uidata/messages_received.py b/management/reporting/uidata/messages_received.py index 84d68d37..da01761b 100644 --- a/management/reporting/uidata/messages_received.py +++ b/management/reporting/uidata/messages_received.py @@ -40,10 +40,12 @@ def messages_received(conn, args): try: for row in c.execute(select_1.format(timefmt=ts.timefmt), { 'start_date':ts.start, - 'end_date':ts.end + 'end_date':ts.end, + 'start_unixepoch':ts.start_unixepoch, + 'binsize':ts.binsize }): - ts.append_date(row['bin']) - s_received['values'].append(row['count']) + idx = ts.insert_date(row['bin']) + s_received['values'][idx] = row['count'] # top 10 senders (envelope_from) by message count diff --git a/management/reporting/uidata/messages_sent.1.sql b/management/reporting/uidata/messages_sent.1.sql index eb11fc20..d6907744 100644 --- a/management/reporting/uidata/messages_sent.1.sql +++ b/management/reporting/uidata/messages_sent.1.sql @@ -3,7 +3,10 @@ -- time rounded (as defined by {timefmt}) -- SELECT - strftime('{timefmt}',connect_time) AS `bin`, + strftime('{timefmt}', + :start_unixepoch + cast((strftime('%s',connect_time) - :start_unixepoch) / (60 * :binsize) as int) * (60 * :binsize), + 'unixepoch' + ) as `bin`, count(*) AS `sent_count` FROM mta_accept JOIN mta_connection ON mta_connection.mta_conn_id = mta_accept.mta_conn_id @@ -12,5 +15,5 @@ WHERE (mta_connection.service = 'submission' OR mta_connection.service = 'pickup') AND connect_time >= :start_date AND connect_time < :end_date -GROUP BY strftime('{timefmt}',connect_time) +GROUP BY bin ORDER BY connect_time diff --git a/management/reporting/uidata/messages_sent.2.sql b/management/reporting/uidata/messages_sent.2.sql index de7b6584..3cb9e648 100644 --- a/management/reporting/uidata/messages_sent.2.sql +++ b/management/reporting/uidata/messages_sent.2.sql @@ -4,7 +4,10 @@ -- defined by {timefmt}) -- SELECT - strftime('{timefmt}',connect_time) AS `bin`, + strftime('{timefmt}', + :start_unixepoch + cast((strftime('%s',connect_time) - :start_unixepoch) / (60 * :binsize) as int) * (60 * :binsize), + 'unixepoch' + ) AS `bin`, mta_delivery.service AS `delivery_service`, count(*) AS `delivery_count` FROM mta_accept @@ -14,5 +17,5 @@ WHERE (mta_connection.service = 'submission' OR mta_connection.service = 'pickup') AND connect_time >= :start_date AND connect_time < :end_date -GROUP BY strftime('{timefmt}',connect_time), mta_delivery.service +GROUP BY bin, mta_delivery.service ORDER BY connect_time diff --git a/management/reporting/uidata/messages_sent.py b/management/reporting/uidata/messages_sent.py index 0892b1a8..8a3c039b 100644 --- a/management/reporting/uidata/messages_sent.py +++ b/management/reporting/uidata/messages_sent.py @@ -38,10 +38,12 @@ def messages_sent(conn, args): try: for row in c.execute(select_1.format(timefmt=ts.timefmt), { 'start_date':ts.start, - 'end_date':ts.end + 'end_date':ts.end, + 'start_unixepoch':ts.start_unixepoch, + 'binsize':ts.binsize }): - ts.dates.append(row['bin']) - s_sent['values'].append(row['sent_count']) + idx = ts.insert_date(row['bin']) + s_sent['values'][idx] = row['sent_count'] date_idx = -1 @@ -49,25 +51,16 @@ def messages_sent(conn, args): # querie's WHERE and JOINs are the same for row in c.execute(select_2.format(timefmt=ts.timefmt), { 'start_date':ts.start, - 'end_date':ts.end + 'end_date':ts.end, + 'start_unixepoch':ts.start_unixepoch, + 'binsize':ts.binsize }): - if date_idx>=0 and ts.dates[date_idx] == row['bin']: - if row['delivery_service']=='smtp': - s_remote['values'][-1] = row['delivery_count'] - elif row['delivery_service']=='lmtp': - s_local['values'][-1] = row['delivery_count'] - - else: - date_idx += 1 - if date_idx >= len(ts.dates): - break - if row['delivery_service']=='smtp': - s_remote['values'].append(row['delivery_count']) - s_local['values'].append(0) - elif row['delivery_service']=='lmtp': - s_remote['values'].append(0) - s_local['values'].append(row['delivery_count']) - + date_idx = ts.insert_date(row['bin']) + if row['delivery_service']=='smtp': + s_remote['values'][date_idx] = row['delivery_count'] + elif row['delivery_service']=='lmtp': + s_local['values'][date_idx] = row['delivery_count'] + top_senders1 = { 'start': ts.start, diff --git a/management/reporting/uidata/user_activity.3.sql b/management/reporting/uidata/user_activity.3.sql new file mode 100644 index 00000000..fbd98e25 --- /dev/null +++ b/management/reporting/uidata/user_activity.3.sql @@ -0,0 +1,20 @@ +-- +-- details on user imap connections +-- +SELECT + connect_time, + CASE WHEN remote_host='unknown' THEN remote_ip ELSE remote_host END AS `remote_host`, + sasl_method, + disconnect_reason, + connection_security, + disposition, + in_bytes, + out_bytes +FROM + imap_connection +WHERE + sasl_username = :user_id AND + connect_time >= :start_date AND + connect_time < :end_date +ORDER BY + connect_time diff --git a/management/reporting/uidata/user_activity.py b/management/reporting/uidata/user_activity.py index d1c0b49c..8d2ea945 100644 --- a/management/reporting/uidata/user_activity.py +++ b/management/reporting/uidata/user_activity.py @@ -7,6 +7,9 @@ with open(__file__.replace('.py','.1.sql')) as fp: with open(__file__.replace('.py','.2.sql')) as fp: select_2 = fp.read() +with open(__file__.replace('.py','.3.sql')) as fp: + select_3 = fp.read() + def user_activity(conn, args): ''' @@ -162,8 +165,51 @@ def user_activity(conn, args): received_mail['items'].append(v) + # + # imap connections by user + # + + imap_details = { + 'start': ts.start, + 'end': ts.end, + 'y': 'IMAP Details', + 'fields': [ + 'connect_time', + 'remote_host', + 'sasl_method', + 'disconnect_reason', + 'connection_security', + 'disposition', + 'in_bytes', + 'out_bytes' + ], + 'field_types': [ + { 'type':'datetime', 'format': '%Y-%m-%d %H:%M:%S' },# connect_time + 'text/plain', # remote_host + 'text/plain', # sasl_method + 'text/plain', # disconnect_reason + 'text/plain', # connection_security + 'text/plain', # disposition + 'number/size', # in_bytes, + 'number/size', # out_bytes, + ], + 'items': [] + } + + for row in c.execute(select_3 + limit, { + 'user_id': user_id, + 'start_date': ts.start, + 'end_date': ts.end + }): + v = [] + for key in imap_details['fields']: + v.append(row[key]) + imap_details['items'].append(v) + + return { 'sent_mail': sent_mail, - 'received_mail': received_mail + 'received_mail': received_mail, + 'imap_details': imap_details }