diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 00000000..593ae64e --- /dev/null +++ b/.editorconfig @@ -0,0 +1,30 @@ +# EditorConfig helps developers define and maintain consistent +# coding styles between different editors and IDEs +# editorconfig.org + +root = true + +[*] +indent_style = space +indent_size = 4 +end_of_line = lf +charset = utf-8 +trim_trailing_whitespace = true +insert_final_newline = true + +[Makefile] +indent_style = tabs +indent_size = 4 + +[Vagrantfile] +indent_size = 2 + +[*.rb] +indent_size = 2 + +[*.py] +indent_style = tabs + +[*.js] +indent_size = 2 + diff --git a/.gitignore b/.gitignore index e22c1d9f..f3cdb1bc 100644 --- a/.gitignore +++ b/.gitignore @@ -4,3 +4,4 @@ management/__pycache__/ tools/__pycache__/ externals/ .env +.vagrant diff --git a/CHANGELOG.md b/CHANGELOG.md index 7c99020b..65524f67 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,195 @@ CHANGELOG ========= +In Development +-------------- + +Mail: + +* The CardDAV plugin has been added to Roundcube so that your ownCloud contacts are available in webmail. +* Upgraded to Roundcube 1.2.4 and updated the persistent login plugin. +* Allow larger messages to be checked by SpamAssassin. +* Dovecot's vsz memory limit has been increased proportional to system memory. +* Newly set user passwords must be at least eight characters. + +ownCloud: + +* Upgraded to ownCloud 9.1.4. + +Control Panel/Management: + +* The status checks page crashed when the mailinabox.email website was down - that's fixed. +* Made nightly re-provisioning of TLS certificates less noisy. +* Fixed bugs in rsync backup method and in the list of recent backups. +* Fixed incorrect status checks errors about IPv6 addresses. +* Fixed incorrect status checks errors for secondary nameservers if round-robin custom A records are set. +* The management mail_log.py tool has been rewritten. + +DNS: + +* Added support for DSA, ED25519, and custom SSHFP records. + +System: + +* The SSH fail2ban jail was not activated. + +Installation: + +* At the end of installation, the SHA256 -- rather than SHA1 -- hash of the system's TLS certificate is shown. + +v0.21c (February 1, 2017) +------------------------- + +Installations and upgrades started failing about 10 days ago with the error "ImportError: No module named 'packaging'" after an upstream package (Python's setuptools) was updated by its maintainers. The updated package conflicted with Ubuntu 14.04's version of another package (Python's pip). This update upgrades both packages to remove the conflict. + +If you already encountered the error during installation or upgrade of Mail-in-a-Box, this update may not correct the problem on your existing system. See https://discourse.mailinabox.email/t/v0-21c-release-fixes-python-package-installation-issue/1881 for help if the problem persists after upgrading to this version of Mail-in-a-Box. + +v0.21b (December 4, 2016) +------------------------- + +This update corrects a first-time installation issue introduced in v0.21 caused by the new Exchange/ActiveSync feature. + +v0.21 (November 30, 2016) +------------------------- + +This version updates ownCloud, which may include security fixes, and makes some other smaller improvements. + +Mail: + +* Header privacy filters were improperly running on the contents of forwarded email --- that's fixed. +* We have another go at fixing a long-standing issue with training the spam filter (because of a file permissions issue). +* Exchange/ActiveSync will now use your display name set in Roundcube in the From: line of outgoing email. + +ownCloud: + +* Updated ownCloud to version 9.1.1. + +Control panel: + +* Backups can now be made using rsync-over-ssh! +* Status checks failed if the system doesn't support iptables or doesn't have ufw installed. +* Added support for SSHFP records when sshd listens on non-standard ports. +* Recommendations for TLS certificate providers were removed now that everyone mostly uses Let's Encrypt. + +System: + +* Ubuntu's "Upgrade to 16.04" notice is suppressed since you should not do that. +* Lowered memory requirements to 512MB, display a warning if system memory is below 768MB. + +v0.20 (September 23, 2016) +-------------------------- + +ownCloud: + +* Updated to ownCloud to 8.2.7. + +Control Panel: + +* Fixed a crash that occurs when there are IPv6 DNS records due to a bug in dnspython 1.14.0. +* Improved the wonky low disk space check. + +v0.19b (August 20, 2016) +------------------------ + +This update corrects a security issue introduced in v0.18. + +* A remote code execution vulnerability is corrected in how the munin system monitoring graphs are generated for the control panel. The vulnerability involves an administrative user visiting a carefully crafted URL. + +v0.19a (August 18, 2016) +------------------------ + +This update corrects a security issue in v0.19. + +* fail2ban won't start if Roundcube had not yet been used - new installations probably do not have fail2ban running. + +v0.19 (August 13, 2016) +----------------------- + +Mail: + +* Roundcube is updated to version 1.2.1. +* SSLv3 and RC4 are now no longer supported in incoming and outgoing mail (SMTP port 25). + +Control panel: + +* The users and aliases APIs are now documented on their control panel pages. +* The HSTS header was missing. +* New status checks were added for the ufw firewall. + +DNS: + +* Add SRV records for CardDAV/CalDAV to facilitate autoconfiguration (e.g. in DavDroid, whose latest version didn't seem to work to configure with entering just a hostname). + +System: + +* fail2ban jails added for SMTP submission, Roundcube, ownCloud, the control panel, and munin. +* Mail-in-a-Box can now be installed on the i686 architecture. + +v0.18c (June 2, 2016) +--------------------- + +* Domain aliases (and misconfigured aliases/catch-alls with non-existent local targets) would accept mail and deliver it to new mailbox folders on disk even if the target address didn't correspond with an existing mail user, instead of rejecting the mail. This issue was introduced in v0.18. +* The Munin Monitoring link in the control panel now opens a new window. +* Added an undocumented before-backup script. + +v0.18b (May 16, 2016) +--------------------- + +* Fixed a Roundcube user accounts issue introduced in v0.18. + +v0.18 (May 15, 2016) +-------------------- + +ownCloud: + +* Updated to ownCloud to 8.2.3 + +Mail: + +* Roundcube is updated to version 1.1.5 and the Roundcube login screen now says "[hostname] Webmail" instead of "Mail-in-a-Box/Roundcube webmail". +* Fixed a long-standing issue with training the spam filter not working (because of a file permissions issue). + +Control panel: + +* Munin system monitoring graphs are now zoomable. +* When a reboot is required (due to Ubuntu security updates automatically installed), a Reboot Box button now appears on the System Status Checks page of the control panel. +* It is now possible to add SRV and secondary MX records in the Custom DNS page. +* Other minor fixes. + +System: + +* The fail2ban recidive jail, which blocks long-duration brute force attacks, now no longer sends the administrator emails (which were not helpful). + +Setup: + +* The system hostname is now set during setup. +* A swap file is now created if system memory is less than 2GB, 5GB of free disk space is available, and if no swap file yet exists. +* We now install Roundcube from the official GitHub repository instead of our own mirror, which we had previously created to solve problems with SourceForge. +* DKIM was incorrectly set up on machines where "localhost" was defined as something other than "127.0.0.1". + +v0.17c (April 1, 2016) +---------------------- + +This update addresses some minor security concerns and some installation issues. + +ownCoud: + +* Block web access to the configuration parameters (config.php). There is no immediate impact (see [#776](https://github.com/mail-in-a-box/mailinabox/pull/776)), although advanced users may want to take note. + +Mail: + +* Roundcube html5_notifier plugin updated from version 0.6 to 0.6.2 to fix Roundcube getting stuck for some people. + +Control panel: + +* Prevent click-jacking of the management interface by adding HTTP headers. +* Failed login no longer reveals whether an account exists on the system. + +Setup: + +* Setup dialogs did not appear correctly when connecting to SSH using Putty on Windows. +* We now install Roundcube from our own mirror because Sourceforge's downloads experience frequent intermittant unavailability. + v0.17b (March 1, 2016) ---------------------- @@ -39,7 +228,6 @@ v0.16 (January 30, 2016) ------------------------ This update primarily adds automatic SSL (now "TLS") certificate provisioning from Let's Encrypt (https://letsencrypt.org/). -* The Sieve port is now open so tools like the Thunderbird Sieve program can be used to edit mail filters. Control Panel: @@ -478,4 +666,4 @@ v0.02 (September 21, 2014) v0.01 (August 19, 2014) ----------------------- -First release. +First versioned release after a year of unversioned development. diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md new file mode 100644 index 00000000..2efdfdb9 --- /dev/null +++ b/CODE_OF_CONDUCT.md @@ -0,0 +1,48 @@ +# Mail-in-a-Box Code of Conduct + +Mail-in-a-Box is an open source community project about working, as a group, to empower ourselves and others to have control over our own digital communications. Just as we hope to increase technological diversity on the Internet through decentralization, we also believe that diverse viewpoints and voices among our community members foster innovation and creative solutions to the challenges we face. + +We are committed to providing a safe, welcoming, and harrassment-free space for collaboration, for everyone, without regard to age, disability, economic situation, ethnicity, gender identity and expression, language fluency, level of knowledge or experience, nationality, personal appearance, race, religion, sexual identity and orientation, or any other attribute. Community comes first. This policy supersedes all other project goals. + +The maintainers of Mail-in-a-Box share the dual responsibility of leading by example and enforcing these policies as necessary to maintain an open and welcoming environment. All community members should be excellent to each other. + +## Scope + +This Code of Conduct applies to all places where Mail-in-a-Box community activity is ocurring, including on GitHub, in discussion forums, on Slack, on social media, and in real life. The Code of Conduct applies not only on websites/at events run by the Mail-in-a-Box community (e.g. our GitHub organization, our Slack team) but also at any other location where the Mail-in-a-Box community is present (e.g. in issues of other GitHub organizations where Mail-in-a-Box community members are discussing problems related to Mail-in-a-Box, or real-life professional conferences), or whenever a Mail-in-a-Box community member is representing Mail-in-a-Box to the public at large or acting on behalf of Mail-in-a-Box. + +This code does not apply to activity on a server running Mail-in-a-Box software, unless your server is hosting a service for the Mail-in-a-Box community at large. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Showing empathy towards other community members +* Making room for new and quieter voices + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or advances +* Trolling, insulting/derogatory/unwelcome comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic address, without explicit permission +* Aggressive and micro-aggressive behavior, such as unconstructive criticism, providing corrections that do not improve the conversation (sometimes referred to as "well actually"s), repeatedly interrupting or talking over someone else, feigning surprise at someone's lack of knowledge or awareness about a topic, or subtle prejudice (for example, comments like "That's so easy my grandmother could do it.", which is prejudicial toward grandmothers). +* Other conduct which could reasonably be considered inappropriate in a professional setting +* Retaliating against anyone who reports a violation of this code. + +We will not tolerate harassment. Harassment is any unwelcome or hostile behavior towards another person for any reason. This includes, but is not limited to, offensive verbal comments related to personal characteristics or choices, sexual images or comments, deliberate intimidation, bullying, stalking, following, harassing photography or recording, sustained disruption of discussion or events, nonconsensual publication of private comments, inappropriate physical contact, or unwelcome sexual attention. Conduct need not be intentional to be harassment. + +## Enforcement + +We will remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not consistent with this Code of Conduct. We may ban, temporarily or permanently, any contributor for violating this code, when appropriate. + +Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project lead, [Joshua Tauberer](https://razor.occams.info/). All reports will be treated confidentially, impartially, consistently, and swiftly. + +Because the need for confidentiality for all parties involved in an enforcement action outweighs the goals of openness, limited information will be shared with the Mail-in-a-Box community regarding enforcement actions that have taken place. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant, version 1.4](http://contributor-covenant.org/version/1/4) and the code of conduct of [Code for DC](http://codefordc.org/resources/codeofconduct.html). + diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index a0b40f9c..8e8614a3 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -5,3 +5,7 @@ This project is in the public domain. Copyright and related rights in the work w All contributions to this project must be released under the same CC0 wavier. By submitting a pull request or patch, you are agreeing to comply with this waiver of copyright interest. [CC0]: http://creativecommons.org/publicdomain/zero/1.0/ + +## Code of Conduct + +This project has a [Code of Conduct](CODE_OF_CONDUCT.md). Please review it when joining our community. diff --git a/README.md b/README.md index d8329054..81d8952d 100644 --- a/README.md +++ b/README.md @@ -9,15 +9,15 @@ Mail-in-a-Box helps individuals take back control of their email by defining a o * * * -I am trying to: +Our goals are to: * Make deploying a good mail server easy. * Promote [decentralization](http://redecentralize.org/), innovation, and privacy on the web. -* Have automated, auditable, and [idempotent](http://sharknet.us/2014/02/01/automated-configuration-management-challenges-with-idempotency/) configuration. +* Have automated, auditable, and [idempotent](https://sharknet.us/2014/02/01/automated-configuration-management-challenges-with-idempotency/) configuration. * **Not** make a totally unhackable, NSA-proof server. * **Not** make something customizable by power users. -This setup is what has been powering my own personal email since September 2013. +Additionally, this project has a [Code of Conduct](CODE_OF_CONDUCT.md), which supersedes the goals above. Please review it when joining our community. The Box ------- @@ -28,10 +28,10 @@ It is a one-click email appliance. There are no user-configurable setup options. The components installed are: -* SMTP ([postfix](http://www.postfix.org/)), IMAP ([dovecot](http://dovecot.org/)), CardDAV/CalDAV ([ownCloud](http://owncloud.org/)), Exchange ActiveSync ([z-push](https://github.com/fmbiete/Z-Push-contrib)) +* SMTP ([postfix](http://www.postfix.org/)), IMAP ([dovecot](http://dovecot.org/)), CardDAV/CalDAV ([ownCloud](https://owncloud.org/)), Exchange ActiveSync ([z-push](https://github.com/fmbiete/Z-Push-contrib)) * Webmail ([Roundcube](http://roundcube.net/)), static website hosting ([nginx](http://nginx.org/)) * Spam filtering ([spamassassin](https://spamassassin.apache.org/)), greylisting ([postgrey](http://postgrey.schweikert.ch/)) -* DNS ([nsd4](http://www.nlnetlabs.nl/projects/nsd/)) with [SPF](https://en.wikipedia.org/wiki/Sender_Policy_Framework), DKIM ([OpenDKIM](http://www.opendkim.org/)), [DMARC](https://en.wikipedia.org/wiki/DMARC), [DNSSEC](https://en.wikipedia.org/wiki/DNSSEC), [DANE TLSA](https://en.wikipedia.org/wiki/DNS-based_Authentication_of_Named_Entities), and [SSHFP](https://tools.ietf.org/html/rfc4255) records automatically set +* DNS ([nsd4](https://www.nlnetlabs.nl/projects/nsd/)) with [SPF](https://en.wikipedia.org/wiki/Sender_Policy_Framework), DKIM ([OpenDKIM](http://www.opendkim.org/)), [DMARC](https://en.wikipedia.org/wiki/DMARC), [DNSSEC](https://en.wikipedia.org/wiki/DNSSEC), [DANE TLSA](https://en.wikipedia.org/wiki/DNS-based_Authentication_of_Named_Entities), and [SSHFP](https://tools.ietf.org/html/rfc4255) records automatically set * Backups ([duplicity](http://duplicity.nongnu.org/)), firewall ([ufw](https://launchpad.net/ufw)), intrusion protection ([fail2ban](http://www.fail2ban.org/wiki/index.php/Main_Page)), system monitoring ([munin](http://munin-monitoring.org/)) It also includes: @@ -59,7 +59,7 @@ by me: $ curl -s https://keybase.io/joshdata/key.asc | gpg --import gpg: key C10BDD81: public key "Joshua Tauberer " imported - $ git verify-tag v0.17b + $ git verify-tag v0.21c gpg: Signature made ..... using RSA key ID C10BDD81 gpg: Good signature from "Joshua Tauberer " gpg: WARNING: This key is not certified with a trusted signature! @@ -72,7 +72,7 @@ and on my [personal homepage](https://razor.occams.info/). (Of course, if this r Checkout the tag corresponding to the most recent release: - $ git checkout v0.17b + $ git checkout v0.21c Begin the installation. @@ -85,7 +85,7 @@ Post your question on the [discussion forum](https://discourse.mailinabox.email/ The Acknowledgements -------------------- -This project was inspired in part by the ["NSA-proof your email in 2 hours"](http://sealedabstract.com/code/nsa-proof-your-e-mail-in-2-hours/) blog post by Drew Crawford, [Sovereign](https://github.com/al3x/sovereign) by Alex Payne, and conversations with @shevski, @konklone, and @GregElin. +This project was inspired in part by the ["NSA-proof your email in 2 hours"](http://sealedabstract.com/code/nsa-proof-your-e-mail-in-2-hours/) blog post by Drew Crawford, [Sovereign](https://github.com/sovereign/sovereign) by Alex Payne, and conversations with @shevski, @konklone, and @GregElin. Mail-in-a-Box is similar to [iRedMail](http://www.iredmail.org/) and [Modoboa](https://github.com/tonioo/modoboa). @@ -95,5 +95,5 @@ The History * In 2007 I wrote a relatively popular Mozilla Thunderbird extension that added client-side SPF and DKIM checks to mail to warn users about possible phishing: [add-on page](https://addons.mozilla.org/en-us/thunderbird/addon/sender-verification-anti-phish/), [source](https://github.com/JoshData/thunderbird-spf). * In August 2013 I began Mail-in-a-Box by combining my own mail server configuration with the setup in ["NSA-proof your email in 2 hours"](http://sealedabstract.com/code/nsa-proof-your-e-mail-in-2-hours/) and making the setup steps reproducible with bash scripts. * Mail-in-a-Box was a semifinalist in the 2014 [Knight News Challenge](https://www.newschallenge.org/challenge/2014/submissions/mail-in-a-box), but it was not selected as a winner. -* Mail-in-a-Box hit the front page of Hacker News in [April](https://news.ycombinator.com/item?id=7634514) 2014, [September](https://news.ycombinator.com/item?id=8276171) 2014, and [May](https://news.ycombinator.com/item?id=9624267) 2015. +* Mail-in-a-Box hit the front page of Hacker News in [April](https://news.ycombinator.com/item?id=7634514) 2014, [September](https://news.ycombinator.com/item?id=8276171) 2014, [May](https://news.ycombinator.com/item?id=9624267) 2015, and [November](https://news.ycombinator.com/item?id=13050500) 2016. * FastCompany mentioned Mail-in-a-Box a [roundup of privacy projects](http://www.fastcompany.com/3047645/your-own-private-cloud) on June 26, 2015. diff --git a/Vagrantfile b/Vagrantfile index c6ef0ab9..b4bcb257 100644 --- a/Vagrantfile +++ b/Vagrantfile @@ -5,23 +5,27 @@ Vagrant.configure("2") do |config| config.vm.box = "ubuntu14.04" config.vm.box_url = "http://cloud-images.ubuntu.com/vagrant/trusty/current/trusty-server-cloudimg-amd64-vagrant-disk1.box" + if Vagrant.has_plugin?("vagrant-cachier") + # Configure cached packages to be shared between instances of the same base box. + # More info on http://fgrehm.viewdocs.io/vagrant-cachier/usage + config.cache.scope = :box + end + # Network config: Since it's a mail server, the machine must be connected # to the public web. However, we currently don't want to expose SSH since # the machine's box will let anyone log into it. So instead we'll put the # machine on a private network. - config.vm.hostname = "mailinabox" + config.vm.hostname = "mailinabox.lan" config.vm.network "private_network", ip: "192.168.50.4" config.vm.provision :shell, :inline => <<-SH # Set environment variables so that the setup script does # not ask any questions during provisioning. We'll let the - # machine figure out its own public IP and it'll take a - # subdomain on our justtesting.email domain so we can get - # started quickly. + # machine figure out its own public IP. export NONINTERACTIVE=1 export PUBLIC_IP=auto export PUBLIC_IPV6=auto - export PRIMARY_HOSTNAME=auto-easy + export PRIMARY_HOSTNAME=auto #export SKIP_NETWORK_CHECKS=1 # Start the setup script. diff --git a/conf/fail2ban/dovecotimap.conf b/conf/fail2ban/filter.d/dovecotimap.conf similarity index 100% rename from conf/fail2ban/dovecotimap.conf rename to conf/fail2ban/filter.d/dovecotimap.conf diff --git a/conf/fail2ban/filter.d/miab-management-daemon.conf b/conf/fail2ban/filter.d/miab-management-daemon.conf new file mode 100644 index 00000000..0b0489c2 --- /dev/null +++ b/conf/fail2ban/filter.d/miab-management-daemon.conf @@ -0,0 +1,12 @@ +# Fail2Ban filter Mail-in-a-Box management daemon + +[INCLUDES] + +before = common.conf + +[Definition] + +_daemon = mailinabox + +failregex = Mail-in-a-Box Management Daemon: Failed login attempt from ip - timestamp .* +ignoreregex = diff --git a/conf/fail2ban/filter.d/miab-munin.conf b/conf/fail2ban/filter.d/miab-munin.conf new file mode 100644 index 00000000..b254cc62 --- /dev/null +++ b/conf/fail2ban/filter.d/miab-munin.conf @@ -0,0 +1,7 @@ +[INCLUDES] + +before = common.conf + +[Definition] +failregex= - .*GET /admin/munin/.* HTTP/1.1\" 401.* +ignoreregex = diff --git a/conf/fail2ban/filter.d/miab-owncloud.conf b/conf/fail2ban/filter.d/miab-owncloud.conf new file mode 100644 index 00000000..a9a13f2c --- /dev/null +++ b/conf/fail2ban/filter.d/miab-owncloud.conf @@ -0,0 +1,7 @@ +[INCLUDES] + +before = common.conf + +[Definition] +failregex=Login failed: .*Remote IP: '[\)'] +ignoreregex = diff --git a/conf/fail2ban/filter.d/miab-postfix-submission.conf b/conf/fail2ban/filter.d/miab-postfix-submission.conf new file mode 100644 index 00000000..236e1331 --- /dev/null +++ b/conf/fail2ban/filter.d/miab-postfix-submission.conf @@ -0,0 +1,7 @@ +[INCLUDES] + +before = common.conf + +[Definition] +failregex=postfix/submission/smtpd.*warning.*\[\]: .* authentication (failed|aborted) +ignoreregex = diff --git a/conf/fail2ban/filter.d/miab-roundcube.conf b/conf/fail2ban/filter.d/miab-roundcube.conf new file mode 100644 index 00000000..c6979c85 --- /dev/null +++ b/conf/fail2ban/filter.d/miab-roundcube.conf @@ -0,0 +1,9 @@ +[INCLUDES] + +before = common.conf + +[Definition] + +failregex = IMAP Error: Login failed for .*? from \. AUTHENTICATE.* + +ignoreregex = diff --git a/conf/fail2ban/jail.local b/conf/fail2ban/jail.local deleted file mode 100644 index b9340e52..00000000 --- a/conf/fail2ban/jail.local +++ /dev/null @@ -1,29 +0,0 @@ -# Fail2Ban configuration file for Mail-in-a-Box - -[DEFAULT] -# Whitelist our own IP addresses. 127.0.0.1/8 is the default. But our status checks -# ping services over the public interface so we should whitelist that address of -# ours too. The string is substituted during installation. -ignoreip = 127.0.0.1/8 PUBLIC_IP - -# JAILS - -[ssh] -maxretry = 7 -bantime = 3600 - -[ssh-ddos] -enabled = true - -[sasl] -enabled = true - -[dovecot] -enabled = true -filter = dovecotimap -findtime = 30 -maxretry = 20 - -[recidive] -enabled = true -maxretry = 10 diff --git a/conf/fail2ban/jails.conf b/conf/fail2ban/jails.conf new file mode 100644 index 00000000..290a75bb --- /dev/null +++ b/conf/fail2ban/jails.conf @@ -0,0 +1,81 @@ +# Fail2Ban configuration file for Mail-in-a-Box. Do not edit. +# This file is re-generated on updates. + +[DEFAULT] +# Whitelist our own IP addresses. 127.0.0.1/8 is the default. But our status checks +# ping services over the public interface so we should whitelist that address of +# ours too. The string is substituted during installation. +ignoreip = 127.0.0.1/8 PUBLIC_IP + +[dovecot] +enabled = true +filter = dovecotimap +logpath = /var/log/mail.log +findtime = 30 +maxretry = 20 + +[miab-management] +enabled = true +filter = miab-management-daemon +port = http,https +logpath = /var/log/syslog +maxretry = 20 +findtime = 30 + +[miab-munin] +enabled = true +port = http,https +filter = miab-munin +logpath = /var/log/nginx/access.log +maxretry = 20 +findtime = 30 + +[miab-owncloud] +enabled = true +port = http,https +filter = miab-owncloud +logpath = STORAGE_ROOT/owncloud/owncloud.log +maxretry = 20 +findtime = 120 + +[miab-postfix587] +enabled = true +port = 587 +filter = miab-postfix-submission +logpath = /var/log/mail.log +maxretry = 20 +findtime = 30 + +[miab-roundcube] +enabled = true +port = http,https +filter = miab-roundcube +logpath = /var/log/roundcubemail/errors +maxretry = 20 +findtime = 30 + +[recidive] +enabled = true +maxretry = 10 +action = iptables-allports[name=recidive] +# In the recidive section of jail.conf the action contains: +# +# action = iptables-allports[name=recidive] +# sendmail-whois-lines[name=recidive, logpath=/var/log/fail2ban.log] +# +# The last line on the action will sent an email to the configured address. This mail will +# notify the administrator that someone has been repeatedly triggering one of the other jails. +# By default we don't configure this address and no action is required from the admin anyway. +# So the notification is ommited. This will prevent message appearing in the mail.log that mail +# can't be delivered to fail2ban@$HOSTNAME. + +[sasl] +enabled = true + +[ssh] +enabled = true +maxretry = 7 +bantime = 3600 + +[ssh-ddos] +enabled = true diff --git a/conf/nginx-primaryonly.conf b/conf/nginx-primaryonly.conf index 2fb9972e..eb446251 100644 --- a/conf/nginx-primaryonly.conf +++ b/conf/nginx-primaryonly.conf @@ -6,6 +6,10 @@ location /admin/ { proxy_pass http://127.0.0.1:10222/; proxy_set_header X-Forwarded-For $remote_addr; + add_header X-Frame-Options "DENY"; + add_header X-Content-Type-Options nosniff; + add_header Content-Security-Policy "frame-ancestors 'none';"; + add_header Strict-Transport-Security max-age=31536000; } # ownCloud configuration. @@ -15,8 +19,11 @@ rewrite ^(/cloud/core/doc/[^\/]+/)$ $1/index.html; location /cloud/ { alias /usr/local/lib/owncloud/; - location ~ ^/(data|config|\.ht|db_structure\.xml|README) { - deny all; + location ~ ^/cloud/(build|tests|config|lib|3rdparty|templates|data|README)/ { + deny all; + } + location ~ ^/cloud/(?:\.|autotest|occ|issue|indie|db_|console) { + deny all; } } location ~ ^(/cloud)((?:/ocs)?/[^/]+\.php)(/.*)?$ { diff --git a/conf/zpush/backend_caldav.php b/conf/zpush/backend_caldav.php index 7bddded9..b10ebc3e 100644 --- a/conf/zpush/backend_caldav.php +++ b/conf/zpush/backend_caldav.php @@ -6,7 +6,7 @@ ************************************************/ define('CALDAV_PROTOCOL', 'https'); -define('CALDAV_SERVER', 'localhost'); +define('CALDAV_SERVER', '127.0.0.1'); define('CALDAV_PORT', '443'); define('CALDAV_PATH', '/caldav/calendars/%u/'); define('CALDAV_PERSONAL', 'PRINCIPAL'); diff --git a/conf/zpush/backend_carddav.php b/conf/zpush/backend_carddav.php index edf32901..4b166ad5 100644 --- a/conf/zpush/backend_carddav.php +++ b/conf/zpush/backend_carddav.php @@ -7,7 +7,7 @@ define('CARDDAV_PROTOCOL', 'https'); /* http or https */ -define('CARDDAV_SERVER', 'localhost'); +define('CARDDAV_SERVER', '127.0.0.1'); define('CARDDAV_PORT', '443'); define('CARDDAV_PATH', '/carddav/addressbooks/%u/'); define('CARDDAV_DEFAULT_PATH', '/carddav/addressbooks/%u/contacts/'); /* subdirectory of the main path */ diff --git a/conf/zpush/backend_imap.php b/conf/zpush/backend_imap.php index 3f69f53e..b1867625 100644 --- a/conf/zpush/backend_imap.php +++ b/conf/zpush/backend_imap.php @@ -5,10 +5,10 @@ * Descr : IMAP backend configuration file ************************************************/ -define('IMAP_SERVER', 'localhost'); +define('IMAP_SERVER', '127.0.0.1'); define('IMAP_PORT', 993); define('IMAP_OPTIONS', '/ssl/norsh/novalidate-cert'); -define('IMAP_DEFAULTFROM', ''); +define('IMAP_DEFAULTFROM', 'sql'); define('SYSTEM_MIME_TYPES_MAPPING', '/etc/mime.types'); define('IMAP_AUTOSEEN_ON_DELETE', false); @@ -23,15 +23,16 @@ define('IMAP_FOLDER_TRASH', 'TRASH'); define('IMAP_FOLDER_SPAM', 'SPAM'); define('IMAP_FOLDER_ARCHIVE', 'ARCHIVE'); - -// not used -define('IMAP_FROM_SQL_DSN', ''); +define('IMAP_FROM_SQL_DSN', 'sqlite:STORAGE_ROOT/mail/roundcube/roundcube.sqlite'); define('IMAP_FROM_SQL_USER', ''); define('IMAP_FROM_SQL_PASSWORD', ''); define('IMAP_FROM_SQL_OPTIONS', serialize(array(PDO::ATTR_PERSISTENT => true))); -define('IMAP_FROM_SQL_QUERY', "select first_name, last_name, mail_address from users where mail_address = '#username@#domain'"); -define('IMAP_FROM_SQL_FIELDS', serialize(array('first_name', 'last_name', 'mail_address'))); -define('IMAP_FROM_SQL_FROM', '#first_name #last_name <#mail_address>'); +define('IMAP_FROM_SQL_QUERY', "SELECT name, email FROM identities i INNER JOIN users u ON i.user_id = u.user_id WHERE u.username = '#username' AND i.standard = 1 AND i.del = 0 AND i.name <> ''"); +define('IMAP_FROM_SQL_FIELDS', serialize(array('name', 'email'))); +define('IMAP_FROM_SQL_FROM', '#name <#email>'); +define('IMAP_FROM_SQL_FULLNAME', '#name'); + +// not used define('IMAP_FROM_LDAP_SERVER', ''); define('IMAP_FROM_LDAP_SERVER_PORT', '389'); define('IMAP_FROM_LDAP_USER', 'cn=zpush,ou=servers,dc=zpush,dc=org'); @@ -40,11 +41,12 @@ define('IMAP_FROM_LDAP_BASE', 'dc=zpush,dc=org'); define('IMAP_FROM_LDAP_QUERY', '(mail=#username@#domain)'); define('IMAP_FROM_LDAP_FIELDS', serialize(array('givenname', 'sn', 'mail'))); define('IMAP_FROM_LDAP_FROM', '#givenname #sn <#mail>'); +define('IMAP_FROM_LDAP_FULLNAME', '#givenname #sn'); define('IMAP_SMTP_METHOD', 'sendmail'); global $imap_smtp_params; -$imap_smtp_params = array('host' => 'ssl://localhost', 'port' => 587, 'auth' => true, 'username' => 'imap_username', 'password' => 'imap_password'); +$imap_smtp_params = array('host' => 'ssl://127.0.0.1', 'port' => 587, 'auth' => true, 'username' => 'imap_username', 'password' => 'imap_password'); define('MAIL_MIMEPART_CRLF', "\r\n"); diff --git a/management/backup.py b/management/backup.py index 2bb499ef..d6189cfe 100755 --- a/management/backup.py +++ b/management/backup.py @@ -2,15 +2,22 @@ # This script performs a backup of all user data: # 1) System services are stopped. -# 2) An incremental encrypted backup is made using duplicity. -# 3) The stopped services are restarted. -# 4) STORAGE_ROOT/backup/after-backup is executd if it exists. +# 2) STORAGE_ROOT/backup/before-backup is executed if it exists. +# 3) An incremental encrypted backup is made using duplicity. +# 4) The stopped services are restarted. +# 5) STORAGE_ROOT/backup/after-backup is executed if it exists. import os, os.path, shutil, glob, re, datetime, sys import dateutil.parser, dateutil.relativedelta, dateutil.tz import rtyaml +from exclusiveprocess import Lock -from utils import exclusive_process, load_environment, shell, wait_for_service, fix_boto +from utils import load_environment, shell, wait_for_service, fix_boto + +rsync_ssh_options = [ + "--ssh-options='-i /root/.ssh/id_rsa_miab'", + "--rsync-options=-e \"/usr/bin/ssh -oStrictHostKeyChecking=no -oBatchMode=yes -p 22 -i /root/.ssh/id_rsa_miab\"", +] def backup_status(env): # Root folder @@ -32,6 +39,8 @@ def backup_status(env): def reldate(date, ref, clip): if ref < date: return clip rd = dateutil.relativedelta.relativedelta(ref, date) + if rd.years > 1: return "%d years, %d months" % (rd.years, rd.months) + if rd.years == 1: return "%d year, %d months" % (rd.years, rd.months) if rd.months > 1: return "%d months, %d days" % (rd.months, rd.days) if rd.months == 1: return "%d month, %d days" % (rd.months, rd.days) if rd.days >= 7: return "%d days" % rd.days @@ -51,6 +60,7 @@ def backup_status(env): "size": 0, # collection-status doesn't give us the size "volumes": keys[2], # number of archive volumes for this backup (not really helpful) } + code, collection_status = shell('check_output', [ "/usr/bin/duplicity", "collection-status", @@ -58,7 +68,7 @@ def backup_status(env): "--gpg-options", "--cipher-algo=AES256", "--log-fd", "1", config["target"], - ], + ] + rsync_ssh_options, get_env(env), trap=True) if code != 0: @@ -176,34 +186,37 @@ def get_passphrase(env): with open(os.path.join(backup_root, 'secret_key.txt')) as f: passphrase = f.readline().strip() if len(passphrase) < 43: raise Exception("secret_key.txt's first line is too short!") - + return passphrase def get_env(env): config = get_backup_config(env) - + env = { "PASSPHRASE" : get_passphrase(env) } - + if get_target_type(config) == 's3': env["AWS_ACCESS_KEY_ID"] = config["target_user"] env["AWS_SECRET_ACCESS_KEY"] = config["target_pass"] - + return env - + def get_target_type(config): protocol = config["target"].split(":")[0] return protocol - + def perform_backup(full_backup): env = load_environment() - exclusive_process("backup") + # Create an global exclusive lock so that the backup script + # cannot be run more than one. + Lock(die=True).forever() + config = get_backup_config(env) backup_root = os.path.join(env["STORAGE_ROOT"], 'backup') backup_cache_dir = os.path.join(backup_root, 'cache') backup_dir = os.path.join(backup_root, 'encrypted') - # Are backups dissbled? + # Are backups disabled? if config["target"] == "off": return @@ -258,6 +271,15 @@ def perform_backup(full_backup): service_command("postfix", "stop", quit=True) service_command("dovecot", "stop", quit=True) + # Execute a pre-backup script that copies files outside the homedir. + # Run as the STORAGE_USER user, not as root. Pass our settings in + # environment variables so the script has access to STORAGE_ROOT. + pre_script = os.path.join(backup_root, 'before-backup') + if os.path.exists(pre_script): + shell('check_call', + ['su', env['STORAGE_USER'], '-c', pre_script, config["target"]], + env=env) + # Run a backup of STORAGE_ROOT (but excluding the backups themselves!). # --allow-source-mismatch is needed in case the box's hostname is changed # after the first backup. See #396. @@ -273,7 +295,7 @@ def perform_backup(full_backup): env["STORAGE_ROOT"], config["target"], "--allow-source-mismatch" - ], + ] + rsync_ssh_options, get_env(env)) finally: # Start services again. @@ -295,7 +317,7 @@ def perform_backup(full_backup): "--archive-dir", backup_cache_dir, "--force", config["target"] - ], + ] + rsync_ssh_options, get_env(env)) # From duplicity's manual: @@ -310,7 +332,7 @@ def perform_backup(full_backup): "--archive-dir", backup_cache_dir, "--force", config["target"] - ], + ] + rsync_ssh_options, get_env(env)) # Change ownership of backups to the user-data user, so that the after-bcakup @@ -349,7 +371,7 @@ def run_duplicity_verification(): "--exclude", backup_root, config["target"], env["STORAGE_ROOT"], - ], get_env(env)) + ] + rsync_ssh_options, get_env(env)) def run_duplicity_restore(args): env = load_environment() @@ -360,32 +382,74 @@ def run_duplicity_restore(args): "restore", "--archive-dir", backup_cache_dir, config["target"], - ] + args, + ] + rsync_ssh_options + args, get_env(env)) def list_target_files(config): import urllib.parse try: - p = urllib.parse.urlparse(config["target"]) + target = urllib.parse.urlparse(config["target"]) except ValueError: return "invalid target" - if p.scheme == "file": - return [(fn, os.path.getsize(os.path.join(p.path, fn))) for fn in os.listdir(p.path)] + if target.scheme == "file": + return [(fn, os.path.getsize(os.path.join(target.path, fn))) for fn in os.listdir(target.path)] - elif p.scheme == "s3": + elif target.scheme == "rsync": + rsync_fn_size_re = re.compile(r'.* ([^ ]*) [^ ]* [^ ]* (.*)') + rsync_target = '{host}:{path}' + + if not target.path.endswith('/'): + target_path = target.path + '/' + if target.path.startswith('/'): + target_path = target.path[1:] + + rsync_command = [ 'rsync', + '-e', + '/usr/bin/ssh -i /root/.ssh/id_rsa_miab -oStrictHostKeyChecking=no -oBatchMode=yes', + '--list-only', + '-r', + rsync_target.format( + host=target.netloc, + path=target_path) + ] + + code, listing = shell('check_output', rsync_command, trap=True, capture_stderr=True) + if code == 0: + ret = [] + for l in listing.split('\n'): + match = rsync_fn_size_re.match(l) + if match: + ret.append( (match.groups()[1], int(match.groups()[0].replace(',',''))) ) + return ret + else: + if 'Permission denied (publickey).' in listing: + reason = "Invalid user or check you correctly copied the SSH key." + elif 'No such file or directory' in listing: + reason = "Provided path {} is invalid.".format(target_path) + elif 'Network is unreachable' in listing: + reason = "The IP address {} is unreachable.".format(target.hostname) + elif 'Could not resolve hostname': + reason = "The hostname {} cannot be resolved.".format(target.hostname) + else: + reason = "Unknown error." \ + "Please check running 'python management/backup.py --verify'" \ + "from mailinabox sources to debug the issue." + raise ValueError("Connection to rsync host failed: {}".format(reason)) + + elif target.scheme == "s3": # match to a Region fix_boto() # must call prior to importing boto import boto.s3 from boto.exception import BotoServerError for region in boto.s3.regions(): - if region.endpoint == p.hostname: + if region.endpoint == target.hostname: break else: raise ValueError("Invalid S3 region/host.") - bucket = p.path[1:].split('/')[0] - path = '/'.join(p.path[1:].split('/')[1:]) + '/' + bucket = target.path[1:].split('/')[0] + path = '/'.join(target.path[1:].split('/')[1:]) + '/' # If no prefix is specified, set the path to '', otherwise boto won't list the files if path == '/': @@ -415,7 +479,7 @@ def list_target_files(config): def backup_set_custom(env, target, target_user, target_pass, min_age): config = get_backup_config(env, for_save=True) - + # min_age must be an int if isinstance(min_age, str): min_age = int(min_age) @@ -433,11 +497,11 @@ def backup_set_custom(env, target, target_user, target_pass, min_age): list_target_files(config) except ValueError as e: return str(e) - + write_backup_config(env, config) return "OK" - + def get_backup_config(env, for_save=False, for_ui=False): backup_root = os.path.join(env["STORAGE_ROOT"], 'backup') @@ -472,6 +536,9 @@ def get_backup_config(env, for_save=False, for_ui=False): if config["target"] == "local": # Expand to the full URL. config["target"] = "file://" + config["file_target_directory"] + ssh_pub_key = os.path.join('/root', '.ssh', 'id_rsa_miab.pub') + if os.path.exists(ssh_pub_key): + config["ssh_pub_key"] = open(ssh_pub_key, 'r').read() return config @@ -487,6 +554,12 @@ if __name__ == "__main__": # are readable, and b) report if they are up to date. run_duplicity_verification() + elif sys.argv[-1] == "--list": + # Run duplicity's verification command to check a) the backup files + # are readable, and b) report if they are up to date. + for fn, size in list_target_files(get_backup_config(load_environment())): + print("{}\t{}".format(fn, size)) + elif sys.argv[-1] == "--status": # Show backup status. ret = backup_status(load_environment()) diff --git a/management/daemon.py b/management/daemon.py index bf3c9134..3c712303 100755 --- a/management/daemon.py +++ b/management/daemon.py @@ -1,10 +1,11 @@ #!/usr/bin/python3 -import os, os.path, re, json +import os, os.path, re, json, time +import subprocess from functools import wraps -from flask import Flask, request, render_template, abort, Response, send_from_directory +from flask import Flask, request, render_template, abort, Response, send_from_directory, make_response import auth, utils, multiprocessing.pool from mailconfig import get_mail_users, get_mail_users_ex, get_admins, add_mail_user, set_mail_password, remove_mail_user @@ -43,7 +44,10 @@ def authorized_personnel_only(viewfunc): except ValueError as e: # Authentication failed. privs = [] - error = str(e) + error = "Incorrect username or password" + + # Write a line in the log recording the failed login + log_failed_login(request) # Authorized to access an API view? if "admin" in privs: @@ -117,9 +121,12 @@ def me(): try: email, privs = auth_service.authenticate(request, env) except ValueError as e: + # Log the failed login + log_failed_login(request) + return json_response({ "status": "invalid", - "reason": str(e), + "reason": "Incorrect username or password", }) resp = { @@ -453,6 +460,27 @@ def do_updates(): "DEBIAN_FRONTEND": "noninteractive" }) + +@app.route('/system/reboot', methods=["GET"]) +@authorized_personnel_only +def needs_reboot(): + from status_checks import is_reboot_needed_due_to_package_installation + if is_reboot_needed_due_to_package_installation(): + return json_response(True) + else: + return json_response(False) + +@app.route('/system/reboot', methods=["POST"]) +@authorized_personnel_only +def do_reboot(): + # To keep the attack surface low, we don't allow a remote reboot if one isn't necessary. + from status_checks import is_reboot_needed_due_to_package_installation + if is_reboot_needed_due_to_package_installation(): + return utils.shell("check_output", ["/sbin/shutdown", "-r", "now"], capture_stderr=True) + else: + return "No reboot is required, so it is not allowed." + + @app.route('/system/backup/status') @authorized_personnel_only def backup_status(): @@ -504,6 +532,77 @@ def munin(filename=""): if filename == "": filename = "index.html" return send_from_directory("/var/cache/munin/www", filename) +@app.route('/munin/cgi-graph/') +@authorized_personnel_only +def munin_cgi(filename): + """ Relay munin cgi dynazoom requests + /usr/lib/munin/cgi/munin-cgi-graph is a perl cgi script in the munin package + that is responsible for generating binary png images _and_ associated HTTP + headers based on parameters in the requesting URL. All output is written + to stdout which munin_cgi splits into response headers and binary response + data. + munin-cgi-graph reads environment variables to determine + what it should do. It expects a path to be in the env-var PATH_INFO, and a + querystring to be in the env-var QUERY_STRING. + munin-cgi-graph has several failure modes. Some write HTTP Status headers and + others return nonzero exit codes. + Situating munin_cgi between the user-agent and munin-cgi-graph enables keeping + the cgi script behind mailinabox's auth mechanisms and avoids additional + support infrastructure like spawn-fcgi. + """ + + COMMAND = 'su - munin --preserve-environment --shell=/bin/bash -c /usr/lib/munin/cgi/munin-cgi-graph' + # su changes user, we use the munin user here + # --preserve-environment retains the environment, which is where Popen's `env` data is + # --shell=/bin/bash ensures the shell used is bash + # -c "/usr/lib/munin/cgi/munin-cgi-graph" passes the command to run as munin + # "%s" is a placeholder for where the request's querystring will be added + + if filename == "": + return ("a path must be specified", 404) + + query_str = request.query_string.decode("utf-8", 'ignore') + + env = {'PATH_INFO': '/%s/' % filename, 'REQUEST_METHOD': 'GET', 'QUERY_STRING': query_str} + code, binout = utils.shell('check_output', + COMMAND.split(" ", 5), + # Using a maxsplit of 5 keeps the last arguments together + env=env, + return_bytes=True, + trap=True) + + if code != 0: + # nonzero returncode indicates error + app.logger.error("munin_cgi: munin-cgi-graph returned nonzero exit code, %s", process.returncode) + return ("error processing graph image", 500) + + # /usr/lib/munin/cgi/munin-cgi-graph returns both headers and binary png when successful. + # A double-Windows-style-newline always indicates the end of HTTP headers. + headers, image_bytes = binout.split(b'\r\n\r\n', 1) + response = make_response(image_bytes) + for line in headers.splitlines(): + name, value = line.decode("utf8").split(':', 1) + response.headers[name] = value + if 'Status' in response.headers and '404' in response.headers['Status']: + app.logger.warning("munin_cgi: munin-cgi-graph returned 404 status code. PATH_INFO=%s", env['PATH_INFO']) + return response + +def log_failed_login(request): + # We need to figure out the ip to list in the message, all our calls are routed + # through nginx who will put the original ip in X-Forwarded-For. + # During setup we call the management interface directly to determine the user + # status. So we can't always use X-Forwarded-For because during setup that header + # will not be present. + if request.headers.getlist("X-Forwarded-For"): + ip = request.headers.getlist("X-Forwarded-For")[0] + else: + ip = request.remote_addr + + # We need to add a timestamp to the log message, otherwise /dev/log will eat the "duplicate" + # message. + app.logger.warning( "Mail-in-a-Box Management Daemon: Failed login attempt from ip %s - timestamp %s" % (ip, time.time())) + + # APP if __name__ == '__main__': diff --git a/management/daily_tasks.sh b/management/daily_tasks.sh index 3cea74b1..c0e88a8c 100755 --- a/management/daily_tasks.sh +++ b/management/daily_tasks.sh @@ -13,7 +13,7 @@ export LC_TYPE=en_US.UTF-8 management/backup.py | management/email_administrator.py "Backup Status" # Provision any new certificates for new domains or domains with expiring certificates. -management/ssl_certificates.py --headless | management/email_administrator.py "Error Provisioning TLS Certificate" +management/ssl_certificates.py -q --headless | management/email_administrator.py "Error Provisioning TLS Certificate" # Run status checks and email the administrator if anything changed. management/status_checks.py --show-changes | management/email_administrator.py "Status Checks Change Notice" diff --git a/management/dns_update.py b/management/dns_update.py index 6f4de318..6c16add1 100755 --- a/management/dns_update.py +++ b/management/dns_update.py @@ -175,9 +175,6 @@ def build_zone(domain, all_domains, additional_records, www_redirect_domains, en for value in build_sshfp_records(): records.append((None, "SSHFP", value, "Optional. Provides an out-of-band method for verifying an SSH key before connecting. Use 'VerifyHostKeyDNS yes' (or 'VerifyHostKeyDNS ask') when connecting with ssh.")) - # The MX record says where email for the domain should be delivered: Here! - records.append((None, "MX", "10 %s." % env["PRIMARY_HOSTNAME"], "Required. Specifies the hostname (and priority) of the machine that handles @%s mail." % domain)) - # Add DNS records for any subdomains of this domain. We should not have a zone for # both a domain and one of its subdomains. subdomains = [d for d in all_domains if d.endswith("." + domain)] @@ -244,6 +241,10 @@ def build_zone(domain, all_domains, additional_records, www_redirect_domains, en # Don't pin the list of records that has_rec checks against anymore. has_rec_base = records + # The MX record says where email for the domain should be delivered: Here! + if not has_rec(None, "MX", prefix="10 "): + records.append((None, "MX", "10 %s." % env["PRIMARY_HOSTNAME"], "Required. Specifies the hostname (and priority) of the machine that handles @%s mail." % domain)) + # SPF record: Permit the box ('mx', see above) to send mail on behalf of # the domain, and no one else. # Skip if the user has set a custom SPF record. @@ -273,6 +274,13 @@ def build_zone(domain, all_domains, additional_records, www_redirect_domains, en if not has_rec(dmarc_qname, "TXT", prefix="v=DMARC1; "): records.append((dmarc_qname, "TXT", 'v=DMARC1; p=reject', "Recommended. Prevents use of this domain name for outbound mail by specifying that the SPF rule should be honoured for mail from @%s." % (qname + "." + domain))) + # Add CardDAV/CalDAV SRV records on the non-primary hostname that points to the primary hostname. + # The SRV record format is priority (0, whatever), weight (0, whatever), port, service provider hostname (w/ trailing dot). + if domain != env["PRIMARY_HOSTNAME"]: + for dav in ("card", "cal"): + qname = "_" + dav + "davs._tcp" + if not has_rec(qname, "SRV"): + records.append((qname, "SRV", "0 0 443 " + env["PRIMARY_HOSTNAME"] + ".", "Recommended. Specifies the hostname of the server that handles CardDAV/CalDAV services for email addresses on this domain.")) # Sort the records. The None records *must* go first in the nsd zone file. Otherwise it doesn't matter. records.sort(key = lambda rec : list(reversed(rec[0].split(".")) if rec[0] is not None else "")) @@ -334,13 +342,25 @@ def build_sshfp_records(): "ssh-rsa": 1, "ssh-dss": 2, "ecdsa-sha2-nistp256": 3, + "ssh-ed25519": 4, } # Get our local fingerprints by running ssh-keyscan. The output looks # like the known_hosts file: hostname, keytype, fingerprint. The order # of the output is arbitrary, so sort it to prevent spurrious updates # to the zone file (that trigger bumping the serial number). - keys = shell("check_output", ["ssh-keyscan", "localhost"]) + + # scan the sshd_config and find the ssh ports (port 22 may be closed) + with open('/etc/ssh/sshd_config', 'r') as f: + ports = [] + t = f.readlines() + for line in t: + s = line.split() + if len(s) == 2 and s[0] == 'Port': + ports = ports + [s[1]] + # the keys are the same at each port, so we only need to get + # them at the first port found (may not be port 22) + keys = shell("check_output", ["ssh-keyscan", "-t", "rsa,dsa,ecdsa,ed25519", "-p", ports[0], "localhost"]) for key in sorted(keys.split("\n")): if key.strip() == "" or key[0] == "#": continue try: @@ -747,7 +767,7 @@ def set_custom_dns_record(qname, rtype, value, action, env): v = ipaddress.ip_address(value) # raises a ValueError if there's a problem if rtype == "A" and not isinstance(v, ipaddress.IPv4Address): raise ValueError("That's an IPv6 address.") if rtype == "AAAA" and not isinstance(v, ipaddress.IPv6Address): raise ValueError("That's an IPv4 address.") - elif rtype in ("CNAME", "TXT", "SRV", "MX"): + elif rtype in ("CNAME", "TXT", "SRV", "MX", "SSHFP"): # anything goes pass else: @@ -862,10 +882,10 @@ def set_secondary_dns(hostnames, env): return do_dns_update(env) -def get_custom_dns_record(custom_dns, qname, rtype): +def get_custom_dns_records(custom_dns, qname, rtype): for qname1, rtype1, value in custom_dns: if qname1 == qname and rtype1 == rtype: - return value + yield value return None ######################################################################## diff --git a/management/email_administrator.py b/management/email_administrator.py index 84d27460..b16fda1d 100755 --- a/management/email_administrator.py +++ b/management/email_administrator.py @@ -33,7 +33,7 @@ msg['Subject'] = "[%s] %s" % (env['PRIMARY_HOSTNAME'], subject) msg.set_payload(content, "UTF-8") # send -smtpclient = smtplib.SMTP('localhost', 25) +smtpclient = smtplib.SMTP('127.0.0.1', 25) smtpclient.ehlo() smtpclient.sendmail( admin_addr, # MAIL FROM diff --git a/management/mail_log.py b/management/mail_log.py index 22fb87af..c63692a1 100755 --- a/management/mail_log.py +++ b/management/mail_log.py @@ -1,136 +1,881 @@ #!/usr/bin/python3 +import argparse +import datetime +import gzip +import os.path +import re +import shutil +import tempfile +import textwrap +from collections import defaultdict, OrderedDict -from collections import defaultdict -import re, os.path import dateutil.parser +import time + +from dateutil.relativedelta import relativedelta -import mailconfig import utils -def scan_mail_log(logger, env): - collector = { - "other-services": set(), - "imap-logins": { }, - "postgrey": { }, - "rejected-mail": { }, - "activity-by-hour": { "imap-logins": defaultdict(int), "smtp-sends": defaultdict(int) }, - } - collector["real_mail_addresses"] = set(mailconfig.get_mail_users(env)) | set(alias[0] for alias in mailconfig.get_mail_aliases(env)) +LOG_FILES = ( + '/var/log/mail.log', + '/var/log/mail.log.1', + '/var/log/mail.log.2.gz', + '/var/log/mail.log.3.gz', + '/var/log/mail.log.4.gz', + '/var/log/mail.log.5.gz', + '/var/log/mail.log.6.gz', +) - for fn in ('/var/log/mail.log.1', '/var/log/mail.log'): - if not os.path.exists(fn): continue - with open(fn, 'rb') as log: - for line in log: - line = line.decode("utf8", errors='replace') - scan_mail_log_line(line.strip(), collector) +TIME_DELTAS = OrderedDict([ + ('all', datetime.timedelta(weeks=52)), + ('month', datetime.timedelta(weeks=4)), + ('2weeks', datetime.timedelta(days=14)), + ('week', datetime.timedelta(days=7)), + ('2days', datetime.timedelta(days=2)), + ('day', datetime.timedelta(days=1)), + ('12hours', datetime.timedelta(hours=12)), + ('6hours', datetime.timedelta(hours=6)), + ('hour', datetime.timedelta(hours=1)), + ('30min', datetime.timedelta(minutes=30)), + ('10min', datetime.timedelta(minutes=10)), + ('5min', datetime.timedelta(minutes=5)), + ('min', datetime.timedelta(minutes=1)), + ('today', datetime.datetime.now() - datetime.datetime.now().replace(hour=0, minute=0, second=0)) +]) - if collector["imap-logins"]: - logger.add_heading("Recent IMAP Logins") - logger.print_block("The most recent login from each remote IP adddress is show.") - for k in utils.sort_email_addresses(collector["imap-logins"], env): - for ip, date in sorted(collector["imap-logins"][k].items(), key = lambda kv : kv[1]): - logger.print_line(k + "\t" + str(date) + "\t" + ip) +# Start date > end date! +START_DATE = datetime.datetime.now() +END_DATE = None - if collector["postgrey"]: - logger.add_heading("Greylisted Mail") - logger.print_block("The following mail was greylisted, meaning the emails were temporarily rejected. Legitimate senders will try again within ten minutes.") - logger.print_line("recipient" + "\t" + "received" + "\t" + "sender" + "\t" + "delivered") - for recipient in utils.sort_email_addresses(collector["postgrey"], env): - for (client_address, sender), (first_date, delivered_date) in sorted(collector["postgrey"][recipient].items(), key = lambda kv : kv[1][0]): - logger.print_line(recipient + "\t" + str(first_date) + "\t" + sender + "\t" + (("delivered " + str(delivered_date)) if delivered_date else "no retry yet")) +VERBOSE = False - if collector["rejected-mail"]: - logger.add_heading("Rejected Mail") - logger.print_block("The following incoming mail was rejected.") - for k in utils.sort_email_addresses(collector["rejected-mail"], env): - for date, sender, message in collector["rejected-mail"][k]: - logger.print_line(k + "\t" + str(date) + "\t" + sender + "\t" + message) +# List of strings to filter users with +FILTERS = None - logger.add_heading("Activity by Hour") - for h in range(24): - logger.print_line("%d\t%d\t%d" % (h, collector["activity-by-hour"]["imap-logins"][h], collector["activity-by-hour"]["smtp-sends"][h] )) +# What to show by default +SCAN_OUT = True # Outgoing email +SCAN_IN = True # Incoming email +SCAN_CONN = False # IMAP and POP3 logins +SCAN_GREY = False # Greylisted email +SCAN_BLOCKED = False # Rejected email + + +def scan_files(collector): + """ Scan files until they run out or the earliest date is reached """ + + stop_scan = False + + for fn in LOG_FILES: + + tmp_file = None + + if not os.path.exists(fn): + continue + elif fn[-3:] == '.gz': + tmp_file = tempfile.NamedTemporaryFile() + shutil.copyfileobj(gzip.open(fn), tmp_file) + + print("Processing file", fn, "...") + fn = tmp_file.name if tmp_file else fn + + for line in reverse_readline(fn): + if scan_mail_log_line(line.strip(), collector) is False: + if stop_scan: + return + stop_scan = True + else: + stop_scan = False + + + +def scan_mail_log(env): + """ Scan the system's mail log files and collect interesting data + + This function scans the 2 most recent mail log files in /var/log/. + + Args: + env (dict): Dictionary containing MiaB settings + + """ + + collector = { + "scan_count": 0, # Number of lines scanned + "parse_count": 0, # Number of lines parsed (i.e. that had their contents examined) + "scan_time": time.time(), # The time in seconds the scan took + "sent_mail": OrderedDict(), # Data about email sent by users + "received_mail": OrderedDict(), # Data about email received by users + "dovecot": OrderedDict(), # Data about Dovecot activity + "postgrey": {}, # Data about greylisting of email addresses + "rejected": OrderedDict(), # Emails that were blocked + "known_addresses": None, # Addresses handled by the Miab installation + "other-services": set(), + } + + try: + import mailconfig + collector["known_addresses"] = (set(mailconfig.get_mail_users(env)) | + set(alias[0] for alias in mailconfig.get_mail_aliases(env))) + except ImportError: + pass + + print("Scanning from {:%Y-%m-%d %H:%M:%S} back to {:%Y-%m-%d %H:%M:%S}".format( + START_DATE, END_DATE) + ) + + # Scan the lines in the log files until the date goes out of range + scan_files(collector) + + if not collector["scan_count"]: + print("No log lines scanned...") + return + + collector["scan_time"] = time.time() - collector["scan_time"] + + print("{scan_count} Log lines scanned, {parse_count} lines parsed in {scan_time:.2f} " + "seconds\n".format(**collector)) + + # Print Sent Mail report + + if collector["sent_mail"]: + msg = "Sent email between {:%Y-%m-%d %H:%M:%S} and {:%Y-%m-%d %H:%M:%S}" + print_header(msg.format(END_DATE, START_DATE)) + + data = OrderedDict(sorted(collector["sent_mail"].items(), key=email_sort)) + + print_user_table( + data.keys(), + data=[ + ("sent", [u["sent_count"] for u in data.values()]), + ("hosts", [len(u["hosts"]) for u in data.values()]), + ], + sub_data=[ + ("sending hosts", [u["hosts"] for u in data.values()]), + ], + activity=[ + ("sent", [u["activity-by-hour"] for u in data.values()]), + ], + earliest=[u["earliest"] for u in data.values()], + latest=[u["latest"] for u in data.values()], + ) + + accum = defaultdict(int) + data = collector["sent_mail"].values() + + for h in range(24): + accum[h] = sum(d["activity-by-hour"][h] for d in data) + + print_time_table( + ["sent"], + [accum] + ) + + # Print Received Mail report + + if collector["received_mail"]: + msg = "Received email between {:%Y-%m-%d %H:%M:%S} and {:%Y-%m-%d %H:%M:%S}" + print_header(msg.format(END_DATE, START_DATE)) + + data = OrderedDict(sorted(collector["received_mail"].items(), key=email_sort)) + + print_user_table( + data.keys(), + data=[ + ("received", [u["received_count"] for u in data.values()]), + ], + activity=[ + ("sent", [u["activity-by-hour"] for u in data.values()]), + ], + earliest=[u["earliest"] for u in data.values()], + latest=[u["latest"] for u in data.values()], + ) + + accum = defaultdict(int) + for h in range(24): + accum[h] = sum(d["activity-by-hour"][h] for d in data.values()) + + print_time_table( + ["received"], + [accum] + ) + + # Print Dovecot report + + if collector["dovecot"]: + msg = "Email client logins between {:%Y-%m-%d %H:%M:%S} and {:%Y-%m-%d %H:%M:%S}" + print_header(msg.format(END_DATE, START_DATE)) + + data = OrderedDict(sorted(collector["dovecot"].items(), key=email_sort)) + + print_user_table( + data.keys(), + data=[ + ("imap", [u["imap"] for u in data.values()]), + ("pop3", [u["pop3"] for u in data.values()]), + ], + sub_data=[ + ("IMAP IP addresses", [[k + " (%d)" % v for k, v in u["imap-logins"].items()] + for u in data.values()]), + ("POP3 IP addresses", [[k + " (%d)" % v for k, v in u["pop3-logins"].items()] + for u in data.values()]), + ], + activity=[ + ("imap", [u["activity-by-hour"]["imap"] for u in data.values()]), + ("pop3", [u["activity-by-hour"]["pop3"] for u in data.values()]), + ], + earliest=[u["earliest"] for u in data.values()], + latest=[u["latest"] for u in data.values()], + ) + + accum = {"imap": defaultdict(int), "pop3": defaultdict(int), "both": defaultdict(int)} + for h in range(24): + accum["imap"][h] = sum(d["activity-by-hour"]["imap"][h] for d in data.values()) + accum["pop3"][h] = sum(d["activity-by-hour"]["pop3"][h] for d in data.values()) + accum["both"][h] = accum["imap"][h] + accum["pop3"][h] + + print_time_table( + ["imap", "pop3", " +"], + [accum["imap"], accum["pop3"], accum["both"]] + ) + + if collector["postgrey"]: + msg = "Greylisted Email {:%Y-%m-%d %H:%M:%S} and {:%Y-%m-%d %H:%M:%S}" + print_header(msg.format(END_DATE, START_DATE)) + + print(textwrap.fill( + "The following mail was greylisted, meaning the emails were temporarily rejected. " + "Legitimate senders will try again within ten minutes.", + width=80, initial_indent=" ", subsequent_indent=" " + ), end='\n\n') + + data = OrderedDict(sorted(collector["postgrey"].items(), key=email_sort)) + users = [] + received = [] + senders = [] + sender_clients = [] + delivered_dates = [] + + for recipient in data: + sorted_recipients = sorted(data[recipient].items(), key=lambda kv: kv[1][0] or kv[1][1]) + for (client_address, sender), (first_date, delivered_date) in sorted_recipients: + if first_date: + users.append(recipient) + received.append(first_date) + senders.append(sender) + delivered_dates.append(delivered_date) + sender_clients.append(client_address) + + print_user_table( + users, + data=[ + ("received", received), + ("sender", senders), + ("delivered", [str(d) or "no retry yet" for d in delivered_dates]), + ("sending host", sender_clients) + ], + delimit=True, + ) + + if collector["rejected"]: + msg = "Blocked Email {:%Y-%m-%d %H:%M:%S} and {:%Y-%m-%d %H:%M:%S}" + print_header(msg.format(END_DATE, START_DATE)) + + data = OrderedDict(sorted(collector["rejected"].items(), key=email_sort)) + + rejects = [] + + if VERBOSE: + for user_data in data.values(): + user_rejects = [] + for date, sender, message in user_data["blocked"]: + if len(sender) > 64: + sender = sender[:32] + "…" + sender[-32:] + user_rejects.append("%s - %s " % (date, sender)) + user_rejects.append(" %s" % message) + rejects.append(user_rejects) + + print_user_table( + data.keys(), + data=[ + ("blocked", [len(u["blocked"]) for u in data.values()]), + ], + sub_data=[ + ("blocked emails", rejects), + ], + earliest=[u["earliest"] for u in data.values()], + latest=[u["latest"] for u in data.values()], + ) + + if collector["other-services"] and VERBOSE and False: + print_header("Other services") + print("The following unkown services were found in the log file.") + print(" ", *sorted(list(collector["other-services"])), sep='\n│ ') - if len(collector["other-services"]) > 0: - logger.add_heading("Other") - logger.print_block("Unrecognized services in the log: " + ", ".join(collector["other-services"])) def scan_mail_log_line(line, collector): - m = re.match(r"(\S+ \d+ \d+:\d+:\d+) (\S+) (\S+?)(\[\d+\])?: (.*)", line) - if not m: return + """ Scan a log line and extract interesting data """ - date, system, service, pid, log = m.groups() - date = dateutil.parser.parse(date) - - if service == "dovecot": - scan_dovecot_line(date, log, collector) + m = re.match(r"(\w+[\s]+\d+ \d+:\d+:\d+) ([\w]+ )?([\w\-/]+)[^:]*: (.*)", line) - elif service == "postgrey": - scan_postgrey_line(date, log, collector) + if not m: + return True - elif service == "postfix/smtpd": - scan_postfix_smtpd_line(date, log, collector) + date, system, service, log = m.groups() + collector["scan_count"] += 1 - elif service == "postfix/submission/smtpd": - scan_postfix_submission_line(date, log, collector) + # print() + # print("date:", date) + # print("host:", system) + # print("service:", service) + # print("log:", log) - elif service in ("postfix/qmgr", "postfix/pickup", "postfix/cleanup", - "postfix/scache", "spampd", "postfix/anvil", "postfix/master", - "opendkim", "postfix/lmtp", "postfix/tlsmgr"): - # nothing to look at - pass + # Replaced the dateutil parser for a less clever way of parser that is roughly 4 times faster. + # date = dateutil.parser.parse(date) + date = datetime.datetime.strptime(date, '%b %d %H:%M:%S') + date = date.replace(START_DATE.year) - else: - collector["other-services"].add(service) + # Check if the found date is within the time span we are scanning + if date > START_DATE: + # Don't process, but continue + return True + elif date < END_DATE: + # Don't process, and halt + return False + + if service == "postfix/submission/smtpd": + if SCAN_OUT: + scan_postfix_submission_line(date, log, collector) + elif service == "postfix/lmtp": + if SCAN_IN: + scan_postfix_lmtp_line(date, log, collector) + elif service in ("imap-login", "pop3-login"): + if SCAN_CONN: + scan_dovecot_line(date, log, collector, service[:4]) + elif service == "postgrey": + if SCAN_GREY: + scan_postgrey_line(date, log, collector) + elif service == "postfix/smtpd": + if SCAN_BLOCKED: + scan_postfix_smtpd_line(date, log, collector) + elif service in ("postfix/qmgr", "postfix/pickup", "postfix/cleanup", "postfix/scache", + "spampd", "postfix/anvil", "postfix/master", "opendkim", "postfix/lmtp", + "postfix/tlsmgr", "anvil"): + # nothing to look at + return True + else: + collector["other-services"].add(service) + return True + + collector["parse_count"] += 1 + return True -def scan_dovecot_line(date, log, collector): - m = re.match("imap-login: Login: user=<(.*?)>, method=PLAIN, rip=(.*?),", log) - if m: - login, ip = m.group(1), m.group(2) - if ip != "127.0.0.1": # local login from webmail/zpush - collector["imap-logins"].setdefault(login, {})[ip] = date - collector["activity-by-hour"]["imap-logins"][date.hour] += 1 def scan_postgrey_line(date, log, collector): - m = re.match("action=(greylist|pass), reason=(.*?), (?:delay=\d+, )?client_name=(.*), client_address=(.*), sender=(.*), recipient=(.*)", log) - if m: - action, reason, client_name, client_address, sender, recipient = m.groups() - key = (client_address, sender) - if action == "greylist" and reason == "new": - collector["postgrey"].setdefault(recipient, {})[key] = (date, None) - elif action == "pass" and reason == "triplet found" and key in collector["postgrey"].get(recipient, {}): - collector["postgrey"][recipient][key] = (collector["postgrey"][recipient][key][0], date) + """ Scan a postgrey log line and extract interesting data """ + + m = re.match("action=(greylist|pass), reason=(.*?), (?:delay=\d+, )?client_name=(.*), " + "client_address=(.*), sender=(.*), recipient=(.*)", + log) + + if m: + + action, reason, client_name, client_address, sender, user = m.groups() + + if user_match(user): + + # Might be useful to group services that use a lot of mail different servers on sub + # domains like 1.domein.com + + # if '.' in client_name: + # addr = client_name.split('.') + # if len(addr) > 2: + # client_name = '.'.join(addr[1:]) + + key = (client_address if client_name == 'unknown' else client_name, sender) + + rep = collector["postgrey"].setdefault(user, {}) + + if action == "greylist" and reason == "new": + rep[key] = (date, rep[key][1] if key in rep else None) + elif action == "pass": + rep[key] = (rep[key][0] if key in rep else None, date) + def scan_postfix_smtpd_line(date, log, collector): - m = re.match("NOQUEUE: reject: RCPT from .*?: (.*?); from=<(.*?)> to=<(.*?)>", log) - if m: - message, sender, recipient = m.groups() - if recipient in collector["real_mail_addresses"]: - # only log mail to real recipients + """ Scan a postfix smtpd log line and extract interesting data """ - # skip this, is reported in the greylisting report - if "Recipient address rejected: Greylisted" in message: - return + # Check if the incoming mail was rejected - # simplify this one - m = re.search(r"Client host \[(.*?)\] blocked using zen.spamhaus.org; (.*)", message) - if m: - message = "ip blocked: " + m.group(2) + m = re.match("NOQUEUE: reject: RCPT from .*?: (.*?); from=<(.*?)> to=<(.*?)>", log) - # simplify this one too - m = re.search(r"Sender address \[.*@(.*)\] blocked using dbl.spamhaus.org; (.*)", message) - if m: - message = "domain blocked: " + m.group(2) + if m: + message, sender, user = m.groups() + + # skip this, if reported in the greylisting report + if "Recipient address rejected: Greylisted" in message: + return + + # only log mail to known recipients + if user_match(user): + if collector["known_addresses"] is None or user in collector["known_addresses"]: + data = collector["rejected"].get( + user, + { + "blocked": [], + "earliest": None, + "latest": None, + } + ) + # simplify this one + m = re.search( + r"Client host \[(.*?)\] blocked using zen.spamhaus.org; (.*)", message + ) + if m: + message = "ip blocked: " + m.group(2) + else: + # simplify this one too + m = re.search( + r"Sender address \[.*@(.*)\] blocked using dbl.spamhaus.org; (.*)", message + ) + if m: + message = "domain blocked: " + m.group(2) + + if data["latest"] is None: + data["latest"] = date + data["earliest"] = date + data["blocked"].append((date, sender, message)) + + collector["rejected"][user] = data + + +def scan_dovecot_line(date, log, collector, prot): + """ Scan a dovecot log line and extract interesting data """ + + m = re.match("Info: Login: user=<(.*?)>, method=PLAIN, rip=(.*?),", log) + + if m: + # TODO: CHECK DIT + user, rip = m.groups() + + if user_match(user): + # Get the user data, or create it if the user is new + data = collector["dovecot"].get( + user, + { + "imap": 0, + "pop3": 0, + "earliest": None, + "latest": None, + "imap-logins": defaultdict(int), + "pop3-logins": defaultdict(int), + "activity-by-hour": { + "imap": defaultdict(int), + "pop3": defaultdict(int), + }, + } + ) + + data[prot] += 1 + data["activity-by-hour"][prot][date.hour] += 1 + + if data["latest"] is None: + data["latest"] = date + data["earliest"] = date + + if rip not in ("127.0.0.1", "::1") or True: + data["%s-logins" % prot][rip] += 1 + + collector["dovecot"][user] = data + + +def scan_postfix_lmtp_line(date, log, collector): + """ Scan a postfix lmtp log line and extract interesting data + + It is assumed that every log of postfix/lmtp indicates an email that was successfully + received by Postfix. + + """ + + m = re.match("([A-Z0-9]+): to=<(\S+)>, .* Saved", log) + + if m: + _, user = m.groups() + + if user_match(user): + # Get the user data, or create it if the user is new + data = collector["received_mail"].get( + user, + { + "received_count": 0, + "earliest": None, + "latest": None, + "activity-by-hour": defaultdict(int), + } + ) + + data["received_count"] += 1 + data["activity-by-hour"][date.hour] += 1 + + if data["latest"] is None: + data["latest"] = date + data["earliest"] = date + + collector["received_mail"][user] = data - collector["rejected-mail"].setdefault(recipient, []).append( (date, sender, message) ) def scan_postfix_submission_line(date, log, collector): - m = re.match("([A-Z0-9]+): client=(\S+), sasl_method=PLAIN, sasl_username=(\S+)", log) - if m: - procid, client, user = m.groups() - collector["activity-by-hour"]["smtp-sends"][date.hour] += 1 + """ Scan a postfix submission log line and extract interesting data + + Lines containing a sasl_method with the values PLAIN or LOGIN are assumed to indicate a sent + email. + + """ + + # Match both the 'plain' and 'login' sasl methods, since both authentication methods are + # allowed by Dovecot + m = re.match("([A-Z0-9]+): client=(\S+), sasl_method=(PLAIN|LOGIN), sasl_username=(\S+)", log) + + if m: + _, client, method, user = m.groups() + + if user_match(user): + # Get the user data, or create it if the user is new + data = collector["sent_mail"].get( + user, + { + "sent_count": 0, + "hosts": set(), + "earliest": None, + "latest": None, + "activity-by-hour": defaultdict(int), + } + ) + + data["sent_count"] += 1 + data["hosts"].add(client) + data["activity-by-hour"][date.hour] += 1 + + if data["latest"] is None: + data["latest"] = date + data["earliest"] = date + + collector["sent_mail"][user] = data + + +# Utility functions + +def reverse_readline(filename, buf_size=8192): + """ A generator that returns the lines of a file in reverse order + + http://stackoverflow.com/a/23646049/801870 + + """ + + with open(filename) as fh: + segment = None + offset = 0 + fh.seek(0, os.SEEK_END) + file_size = remaining_size = fh.tell() + while remaining_size > 0: + offset = min(file_size, offset + buf_size) + fh.seek(file_size - offset) + buff = fh.read(min(remaining_size, buf_size)) + remaining_size -= buf_size + lines = buff.split('\n') + # the first line of the buffer is probably not a complete line so + # we'll save it and append it to the last line of the next buffer + # we read + if segment is not None: + # if the previous chunk starts right from the beginning of line + # do not concat the segment to the last line of new chunk + # instead, yield the segment first + if buff[-1] is not '\n': + lines[-1] += segment + else: + yield segment + segment = lines[0] + for index in range(len(lines) - 1, 0, -1): + if len(lines[index]): + yield lines[index] + # Don't yield None if the file was empty + if segment is not None: + yield segment + + +def user_match(user): + """ Check if the given user matches any of the filters """ + return FILTERS is None or any(u in user for u in FILTERS) + + +def email_sort(email): + """ Split the given email address into a reverse order tuple, for sorting i.e (domain, name) """ + return tuple(reversed(email[0].split('@'))) + + +def valid_date(string): + """ Validate the given date string fetched from the --startdate argument """ + try: + date = dateutil.parser.parse(string) + except ValueError: + raise argparse.ArgumentTypeError("Unrecognized date and/or time '%s'" % string) + return date + + +# Print functions + +def print_time_table(labels, data, do_print=True): + labels.insert(0, "hour") + data.insert(0, [str(h) for h in range(24)]) + + temp = "│ {:<%d} " % max(len(l) for l in labels) + lines = [] + + for label in labels: + lines.append(temp.format(label)) + + for h in range(24): + max_len = max(len(str(d[h])) for d in data) + base = "{:>%d} " % max(2, max_len) + + for i, d in enumerate(data): + lines[i] += base.format(d[h]) + + lines.insert(0, "┬") + lines.append("└" + (len(lines[-1]) - 2) * "─") + + if do_print: + print("\n".join(lines)) + else: + return lines + + +def print_user_table(users, data=None, sub_data=None, activity=None, latest=None, earliest=None, + delimit=False): + str_temp = "{:<32} " + lines = [] + data = data or [] + + col_widths = len(data) * [0] + col_left = len(data) * [False] + vert_pos = 0 + + do_accum = all(isinstance(n, (int, float)) for _, d in data for n in d) + data_accum = len(data) * ([0] if do_accum else [" "]) + + last_user = None + + for row, user in enumerate(users): + + if delimit: + if last_user and last_user != user: + lines.append(len(lines[-1]) * "…") + last_user = user + + line = "{:<32} ".format(user[:31] + "…" if len(user) > 32 else user) + + for col, (l, d) in enumerate(data): + if isinstance(d[row], str): + col_str = str_temp.format(d[row][:31] + "…" if len(d[row]) > 32 else d[row]) + col_left[col] = True + elif isinstance(d[row], datetime.datetime): + col_str = "{:<20}".format(str(d[row])) + col_left[col] = True + else: + temp = "{:>%s}" % max(5, len(l) + 1, len(str(d[row])) + 1) + col_str = temp.format(str(d[row])) + col_widths[col] = max(col_widths[col], len(col_str)) + line += col_str + + if do_accum: + data_accum[col] += d[row] + + try: + if None not in [latest, earliest]: + vert_pos = len(line) + e = earliest[row] + l = latest[row] + timespan = relativedelta(l, e) + if timespan.months: + temp = " │ {:0.1f} months" + line += temp.format(timespan.months + timespan.days / 30.0) + elif timespan.days: + temp = " │ {:0.1f} days" + line += temp.format(timespan.days + timespan.hours / 24.0) + elif (e.hour, e.minute) == (l.hour, l.minute): + temp = " │ {:%H:%M}" + line += temp.format(e) + else: + temp = " │ {:%H:%M} - {:%H:%M}" + line += temp.format(e, l) + + except KeyError: + pass + + lines.append(line.rstrip()) + + try: + if VERBOSE: + if sub_data is not None: + for l, d in sub_data: + if d[row]: + lines.append("┬") + lines.append("│ %s" % l) + lines.append("├─%s─" % (len(l) * "─")) + lines.append("│") + max_len = 0 + for v in list(d[row]): + lines.append("│ %s" % v) + max_len = max(max_len, len(v)) + lines.append("└" + (max_len + 1) * "─") + + if activity is not None: + lines.extend(print_time_table( + [label for label, _ in activity], + [data[row] for _, data in activity], + do_print=False + )) + + except KeyError: + pass + + header = str_temp.format("") + + for col, (l, _) in enumerate(data): + if col_left[col]: + header += l.ljust(max(5, len(l) + 1, col_widths[col])) + else: + header += l.rjust(max(5, len(l) + 1, col_widths[col])) + + if None not in (latest, earliest): + header += " │ timespan " + + lines.insert(0, header.rstrip()) + + table_width = max(len(l) for l in lines) + t_line = table_width * "─" + b_line = table_width * "─" + + if vert_pos: + t_line = t_line[:vert_pos + 1] + "┼" + t_line[vert_pos + 2:] + b_line = b_line[:vert_pos + 1] + ("┬" if VERBOSE else "┼") + b_line[vert_pos + 2:] + + lines.insert(1, t_line) + lines.append(b_line) + + # Print totals + + data_accum = [str(a) for a in data_accum] + footer = str_temp.format("Totals:" if do_accum else " ") + for row, (l, _) in enumerate(data): + temp = "{:>%d}" % max(5, len(l) + 1) + footer += temp.format(data_accum[row]) + + try: + if None not in [latest, earliest]: + max_l = max(latest) + min_e = min(earliest) + timespan = relativedelta(max_l, min_e) + if timespan.days: + temp = " │ {:0.2f} days" + footer += temp.format(timespan.days + timespan.hours / 24.0) + elif (min_e.hour, min_e.minute) == (max_l.hour, max_l.minute): + temp = " │ {:%H:%M}" + footer += temp.format(min_e) + else: + temp = " │ {:%H:%M} - {:%H:%M}" + footer += temp.format(min_e, max_l) + + except KeyError: + pass + + lines.append(footer) + + print("\n".join(lines)) + + +def print_header(msg): + print('\n' + msg) + print("═" * len(msg), '\n') + if __name__ == "__main__": - from status_checks import ConsoleOutput - env = utils.load_environment() - scan_mail_log(ConsoleOutput(), env) + try: + env_vars = utils.load_environment() + except FileNotFoundError: + env_vars = {} + + parser = argparse.ArgumentParser( + description="Scan the mail log files for interesting data. By default, this script " + "shows today's incoming and outgoing mail statistics. This script was (" + "re)written for the Mail-in-a-box email server." + "https://github.com/mail-in-a-box/mailinabox", + add_help=False + ) + + # Switches to determine what to parse and what to ignore + + parser.add_argument("-r", "--received", help="Scan for received emails.", + action="store_true") + parser.add_argument("-s", "--sent", help="Scan for sent emails.", + action="store_true") + parser.add_argument("-l", "--logins", help="Scan for IMAP/POP logins.", + action="store_true") + parser.add_argument("-g", "--grey", help="Scan for greylisted emails.", + action="store_true") + parser.add_argument("-b", "--blocked", help="Scan for blocked emails.", + action="store_true") + + parser.add_argument("-t", "--timespan", choices=TIME_DELTAS.keys(), default='today', + metavar='