From e43c01e6fee0507327e39fa729c5e285326b1507 Mon Sep 17 00:00:00 2001 From: downtownallday Date: Thu, 5 Nov 2020 16:19:42 -0500 Subject: [PATCH] Enable caching of Nextcloud downloads as well as downloading Nextcloud from github instead of Nextcloud servers --- .travis.yml | 15 ++ setup/functions-downloads.sh | 309 +++++++++++++++++++++++++++ setup/functions.sh | 39 ++++ setup/ldap.sh | 45 +--- setup/mods.available/coturn.sh | 108 ++++++++++ setup/nextcloud.sh | 6 +- tests/system-setup/setup-defaults.sh | 2 + tests/vagrant/Vagrantfile | 34 +++ 8 files changed, 519 insertions(+), 39 deletions(-) create mode 100644 setup/functions-downloads.sh create mode 100755 setup/mods.available/coturn.sh diff --git a/.travis.yml b/.travis.yml index 47a34c19..32f29731 100644 --- a/.travis.yml +++ b/.travis.yml @@ -2,6 +2,8 @@ env: global: - MIAB_LDAP_PROJECT=true + - DOWNLOAD_NEXTCLOUD_FROM_GITHUB=true + - SKIP_SYSTEM_UPDATE=1 language: shell os: linux @@ -49,3 +51,16 @@ jobs: # launch automated tests, but skip tests that require remote # smtp support because Travis-CI blocks outgoing port 25 - sudo tests/runner.sh -dumpoutput -no-smtp-remote upgrade-basic upgrade-totpuser default + + # JOB: unsetvars + - env: + - NONINTERACTIVE=1 + - PUBLIC_IP=auto + - PUBLIC_IPV6=auto + - PRIMARY_HOSTNAME=auto + name: unsetvars + install: + - sudo setup/start.sh + script: + - sudo tests/runner.sh -dumpoutput -no-smtp-remote default + diff --git a/setup/functions-downloads.sh b/setup/functions-downloads.sh new file mode 100644 index 00000000..5e7676d1 --- /dev/null +++ b/setup/functions-downloads.sh @@ -0,0 +1,309 @@ +verify_file_sha1sum() { + local FILE="$1" + local HASH="$2" + local output_error_what="${3:-}" + CHECKSUM="$HASH $FILE" + if ! echo "$CHECKSUM" | sha1sum --check --strict > /dev/null; then + if [ ! -z "$output_error_what" ]; then + echo "------------------------------------------------------------" + echo "$output_error_what unexpected checksum." + echo "Found:" + sha1sum "$FILE" + echo + echo "Expected:" + echo "$HASH" + fi + return 1 + else + return 0 + fi +} + + +download_link() { + # download a link (URL) and cache it + # + # arguments: + # 1: the url to download + # + # 2: where to send output + # 'to-stdout': the function dumps the url contents to stdout + # 'to-file': the function stores url contents in a file. the + # name of the file is returned in global variable + # DOWNLOAD_FILE. if caching is not enabled, the + # caller is responsible for deleting the file when + # it is no longer needed. + # + # 3: whether to cache the request or not + # 'use-cache': the download will be cached to the directory + # specified in the 5th argument or to the + # default directory in global variable + # DOWNLOAD_CACHE_DIR + # 'no-cache': do not cache (implied if no explicit or default + # cache directory are set) + # + # 4: the file name to use for the cache. this could be a hash of + # the url to ensure uniqueness, or a name for a file that + # might be used across download sites. if not specified, the + # basename of the url is used. + # + # 5: the directory used to cache downloads. if not specified, the + # directory in DOWNLOAD_CACHE_DIR is used. If neither are set, + # no caching will occur. + # + # 6: the expected sha1 hash of the download [optional]. if output + # option 'to-stdout' is specified, this argument is ignored. + # + # The function returns: + # 0 if successful + # 1 if downloading failed + # 2 for hash mismatch + # + local url="$1" + local output_to="${2:-to-stdout}" + local cache="${3:-use-cache}" + local cache_file_name="${4:-$(basename "$url")}" + local cache_dir="${5:-${DOWNLOAD_CACHE_DIR:-}}" + local expected_hash="${6:-}" + + #say_verbose "download_link: $url (cache=$cache, output_to=$output_to)" 1>&2 + + if [ -z "$cache_dir" ]; then + say_debug "No cache directory configured, not caching" 1>&2 + cache="no-cache" + + elif [ "$cache" == "use-cache" ]; then + mkdir -p "$cache_dir" >/dev/null + if [ $? -ne 0 ]; then + say_verbose "Could not create cache dir, not caching" 1>&2 + cache="no-cache" + fi + if [ ! -w "$cache_dir" ]; then + say_verbose "Cache dir is not writable, not caching" 1>&2 + cache="no-cache" + fi + fi + + # + # do not use the cache + # + if [ "$cache" != "use-cache" ]; then + if [ "$output_to" == "to-stdout" ]; then + DOWNLOAD_FILE="" + DOWNLOAD_FILE_REMOVE="false" + curl -s "$url" + [ $? -ne 0 ] && return 1 + return 0 + + fi + + DOWNLOAD_FILE="/tmp/download_file.$$.$(date +%s)" + DOWNLOAD_FILE_REMOVE="true" + rm -f "$DOWNLOAD_FILE" + say_verbose "Download $url" 1>&2 + curl -s "$url" > "$DOWNLOAD_FILE" + [ $? -ne 0 ] && return 1 + if [ ! -z "$expected_hash" ] && \ + ! verify_file_sha1sum "$DOWNLOAD_FILE" "$expected_hash" "Download of $url" + then + rm -f "$DOWNLOAD_FILE" + DOWNLOAD_FILE="" + DOWNLOAD_FILE_REMOVE="false" + return 2 + fi + return 0 + fi + + + # + # use the cache + # + local cache_dst="$cache_dir/$cache_file_name" + local tmp_dst="/tmp/download_file.$$.$(date +%s)" + local code=1 + + rm -f "$tmp_dst" + + if [ -e "$cache_dst" ]; then + # cache file exists, download with 'if-modified-since' + say_verbose "Download (if-modified-since) $url" 1>&2 + curl -z "$cache_dst" -s "$url" > "$tmp_dst" + code=$? + + if [ $code -eq 0 ]; then + if [ -s "$tmp_dst" ]; then + # non-empty download file, cache it + say_verbose "Modifed - caching to: $cache_dst" 1>&2 + rm -f "$cache_dst" >/dev/null && \ + mv "$tmp_dst" "$cache_dst" >/dev/null + code=$? + + else + # cache file is up-to-date + say_verbose "Not modifed" 1>&2 + rm -f "$tmp_dst" >/dev/null + fi + fi + + else + # cache file does not exist + say_verbose "Download $url" 1>&2 + curl -s "$url" > "$tmp_dst" + code=$? + if [ $code -eq 0 ]; then + say_verbose "Caching to: $cache_dst" 1>&2 + rm -f "$cache_dst" >/dev/null && \ + mv "$tmp_dst" "$cache_dst" >/dev/null + code=$? + else + rm -f "$tmp_dst" >/dev/null + fi + fi + + if [ $code -eq 0 ]; then + if [ "$output_to" == "to-stdout" ]; then + DOWNLOAD_FILE="" + DOWNLOAD_FILE_REMOVE="false" + cat "$cache_dst" + [ $? -eq 0 ] && return 0 + return 1 + else + DOWNLOAD_FILE="$cache_dst" + DOWNLOAD_FILE_REMOVE="false" + if [ ! -z "$expected_hash" ] && \ + ! verify_file_sha1sum "$DOWNLOAD_FILE" "$expected_hash" "Download of $url" + then + rm -f "$DOWNLOAD_FILE" + DOWNLOAD_FILE="" + return 2 + fi + fi + + return 0 + + else + return 1 + fi +} + + +get_nc_download_url() { + # This function returns a url where Nextcloud can be downloaded + # for the version specified. The url is placed into global + # variable DOWNLOAD_URL. + # + # Specify the version desired to 3 positions as the first argument + # with no leading "v". eg: "19.0.0", or leave the first argument + # blank for a url to the latest version for a fresh install. If + # the latest minor version of a specific major version is desired, + # set global variable REQUIRED_NC_FOR_FRESH_INSTALLS to + # "latest-$major", for example "latest-20". + # + # Unless DOWNLOAD_NEXTCLOUD_FROM_GITHUB is set to "true", this + # function always returns a link directed at Nextcloud's download + # servers. + # + # requires that jq is installed on the system for Github downloads + # when argument 1 (the nextcloud version) is not specified + # + # specify the archive extension to download as the second argument + # for example, "zip" or "tar.bz2". Defaults to "tar.bz2" + # + # on return: + # DOWNLOAD_URL contains the url for the requested download + # DOWNLOAD_URL_CACHE_ID contains an id that should be passed to + # the download_link function as the cache_file_name argument + # the return code is always 0 + # + + local ver="${1:-}" + local ext="${2:-tar.bz2}" + local url="" + local url_cache_id="" + + if [ "${DOWNLOAD_NEXTCLOUD_FROM_GITHUB:-false}" == "true" ]; then + # use Github REST API to obtain latest version and link. if + # unsuccessful, fall back to using Nextcloud + local github_ver="" + if [ ! -z "$ver" ]; then + github_ver="v${ver}" + url="https://github.com/nextcloud/server/releases/download/${github_ver}/nextcloud-${ver}.${ext#.}" + url_cache_id="nextcloud-${ver}.${ext#.}" + + elif [ -x "/usr/bin/jq" ]; then + local latest="${REQUIRED_NC_FOR_FRESH_INSTALLS:-latest}" + + if [ "$latest" == "latest" ]; then + github_ver=$(curl -s -H "Accept: application/vnd.github.v3+json" https://api.github.com/repos/nextcloud/server/tags 2>/dev/null | /usr/bin/jq -r '.[].name' | grep -v -i -E '(RC|beta)' | head -1) #eg: "v20.0.1" + else + local major=$(awk -F- '{print $2}' <<<"$latest") + github_ver=$(curl -s -H "Accept: application/vnd.github.v3+json" https://api.github.com/repos/nextcloud/server/tags 2>/dev/null | /usr/bin/jq -r '.[].name' | grep "^v$major\\." | grep -v -i -E '(RC|beta)' | head -1) #eg: "v20.0.1" + fi + + if [ $? -ne 0 ]; then + say_verbose "Github API call failed! Using Nextcloud's server." + # fall through and use nextcloud's download site + else + local github_plain_ver=$(awk -Fv '{print $2}' <<<"$github_ver") + url="https://github.com/nextcloud/server/releases/download/$github_ver/nextcloud-${github_plain_ver}.${ext#.}" + url_cache_id="nextcloud-${github_plain_ver}.${ext#.}" + + fi + fi + + if [ ! -z "$url" ]; then + # ensure the download exists - sometimes Github releases + # only have sources and not a .bz2 file. In that case we + # have to revert to using nextcloud's download server + local http_status + http_status="$(curl -s -L --head -w "%{http_code}" "$url" |tail -1)" + local code=$? + if [ $code -ne 0 ]; then + say_verbose "Problem contacting Github to verify a download url ($code)" + url="" + + elif [ "$http_status" != "403" -a "$http_status" != "200" ]; then + say_verbose "Github doesn't have a download for $github_ver ($http_status)" + url="" + + else + # Github returns an html page with a redirect link + # .. we have to extract the link + local content + content=$(download_link "$url" to-stdout no-cache) + if [ $? -ne 0 ]; then + say_verbose "Unable to get Github download redir page" + url="" + + else + #say_verbose "Got github redirect page content: $content" + content=$(python3 -c "import xml.etree.ElementTree as ET; tree=ET.fromstring(r'$content'); els=tree.findall('.//a'); print(els[0].attrib['href'])" 2>/dev/null) + if [ $? -ne 0 ]; then + say_verbose "Unable to parse Github redirect html" + url="" + + else + say_debug "Github redirected to $content" + url="$content" + fi + fi + fi + fi + fi + + + if [ -z "$url" ]; then + if [ -z "$ver" ]; then + url="https://download.nextcloud.com/server/releases/${REQUIRED_NC_FOR_FRESH_INSTALLS:-latest}.${ext#.}" + url_cache_id="${REQUIRED_NC_FOR_FRESH_INSTALLS:-latest}.${ext#.}" + + else + url="https://download.nextcloud.com/server/releases/nextcloud-${ver}.${ext#.}" + url_cache_id="nextcloud-${ver}.${ext#.}" + fi + fi + + DOWNLOAD_URL="$url" + DOWNLOAD_URL_CACHE_ID="$url_cache_id" + return 0 +} diff --git a/setup/functions.sh b/setup/functions.sh index 3f138b87..60da46da 100644 --- a/setup/functions.sh +++ b/setup/functions.sh @@ -248,3 +248,42 @@ function kernel_ipv6_lo_disabled() { [ "$v" == "1" ] && return 0 return 1 } + + +declare -i verbose=${verbose:-0} + +while [ $# -gt 0 ]; do + if [ "$1" == "-verbose" -o "$1" == "-v" ]; then + let verbose+=1 + shift + else + break + fi +done + +die() { + local msg="${1:-}" + local rtn="${2:-1}" + [ ! -z "$msg" ] && echo "FATAL: $msg" || \ + echo "An unrecoverable error occurred, exiting" + exit $rtn +} + +is_verbose() { + [ $verbose -gt 0 ] && return 0 + return 1 +} + +say_debug() { + [ $verbose -gt 1 ] && echo "$@" + return 0 +} + +say_verbose() { + [ $verbose -gt 0 ] && echo "$@" + return 0 +} + +say() { + echo "$@" +} diff --git a/setup/ldap.sh b/setup/ldap.sh index f86a67a5..eb7cee85 100755 --- a/setup/ldap.sh +++ b/setup/ldap.sh @@ -26,35 +26,13 @@ MIAB_INTERNAL_CONF_FILE="$STORAGE_LDAP_ROOT/miab_ldap.conf" SERVICE_ACCOUNTS=(LDAP_DOVECOT LDAP_POSTFIX LDAP_WEBMAIL LDAP_MANAGEMENT LDAP_NEXTCLOUD) -declare -i verbose=0 - # # Helper functions # -die() { - local msg="$1" - local rtn="${2:-1}" - [ ! -z "$msg" ] && echo "FATAL: $msg" || echo "An unrecoverable error occurred, exiting" - exit ${rtn} -} - -say_debug() { - [ $verbose -gt 1 ] && echo $@ - return 0 -} - -say_verbose() { - [ $verbose -gt 0 ] && echo $@ - return 0 -} - -say() { - echo $@ -} ldap_debug_flag() { - [ $verbose -gt 1 ] && echo "-d 1" + [ ${verbose:-0} -gt 1 ] && echo "-d 1" } wait_slapd_start() { @@ -299,7 +277,7 @@ relocate_slapd_data() { # Re-create the config say_verbose "Create new slapd config" local xargs=() - [ $verbose -gt 0 ] && xargs+=(-d 10 -v) + [ ${verbose:-0} -gt 0 ] && xargs+=(-d 10 -v) slapadd -F "${MIAB_SLAPD_CONF}" ${xargs[@]} -n 0 -l "$TMP.2" 2>/dev/null || die "slapadd failed!" chown -R openldap:openldap "${MIAB_SLAPD_CONF}" rm -f "$TMP.2" @@ -370,7 +348,7 @@ add_schemas() { schema_to_ldif "$schema" "$ldif" "$cn" sed -i 's/\$ member \$/$ member $ rfc822MailMember $/' "$ldif" say_verbose "Adding '$cn' schema" - [ $verbose -gt 1 ] && cat "$ldif" + [ ${verbose:-0} -gt 1 ] && cat "$ldif" ldapadd -Q -Y EXTERNAL -H ldapi:/// -f "$ldif" >/dev/null rm -f "$ldif" fi @@ -384,7 +362,7 @@ add_schemas() { local ldif="/tmp/$cn.$$.ldif" schema_to_ldif "$schema" "$ldif" "$cn" say_verbose "Adding '$cn' schema" - [ $verbose -gt 1 ] && cat "$ldif" + [ ${verbose:-0} -gt 1 ] && cat "$ldif" ldapadd -Q -Y EXTERNAL -H ldapi:/// -f "$ldif" >/dev/null rm -f "$ldif" fi @@ -706,7 +684,7 @@ process_cmdline() { local s=${2:-all} local hide_attrs="(structuralObjectClass|entryUUID|creatorsName|createTimestamp|entryCSN|modifiersName|modifyTimestamp)" local slapcat_args=(-F "$MIAB_SLAPD_CONF" -o ldif-wrap=no) - [ $verbose -gt 0 ] && hide_attrs="(_____NEVERMATCHES)" + [ ${verbose:-0} -gt 0 ] && hide_attrs="(_____NEVERMATCHES)" if [ "$s" == "all" ]; then echo "" @@ -739,14 +717,14 @@ process_cmdline() { echo "" echo '--------------------------------' local attrs=(mail member mailRoutingAddress rfc822MailMember) - [ $verbose -gt 0 ] && attrs=() + [ ${verbose:-0} -gt 0 ] && attrs=() debug_search "(objectClass=mailGroup)" "$LDAP_ALIASES_BASE" ${attrs[@]} fi if [ "$s" == "permitted-senders" -o "$s" == "ps" ]; then echo "" echo '--------------------------------' local attrs=(mail member mailRoutingAddress rfc822MailMember) - [ $verbose -gt 0 ] && attrs=() + [ ${verbose:-0} -gt 0 ] && attrs=() debug_search "(objectClass=mailGroup)" "$LDAP_PERMITTED_SENDERS_BASE" ${attrs[@]} fi if [ "$s" == "domains" ]; then @@ -789,15 +767,6 @@ process_cmdline() { fi } -while [ $# -gt 0 ]; do - if [ "$1" == "-verbose" -o "$1" == "-v" ]; then - let verbose+=1 - shift - else - break - fi -done - [ $# -gt 0 ] && process_cmdline $@ diff --git a/setup/mods.available/coturn.sh b/setup/mods.available/coturn.sh new file mode 100755 index 00000000..a4e42085 --- /dev/null +++ b/setup/mods.available/coturn.sh @@ -0,0 +1,108 @@ +#!/bin/bash +# -*- indent-tabs-mode: t; tab-width: 4; -*- + +source setup/functions.sh || exit 1 # load our functions + +[ -e /etc/mailinabox.conf ] && source /etc/mailinabox.conf +[ -e /etc/cloudinabox.conf ] && source /etc/cloudinabox.conf + + +create_turnserver_conf() { + if [ ! -e "$STORAGE_ROOT/voip/turnserver.conf" ]; then + mkdir -p "$STORAGE_ROOT/voip" + cat > "$STORAGE_ROOT/voip/turnserver.conf" </etc/turnserver.conf </etc/logrotate.d/coturn < <<-SH +# Set environment variables so that the setup script does +# not ask any questions during provisioning. We'll let the +# machine figure out its own public IP. +export NONINTERACTIVE=1 +export PUBLIC_IP=auto +export PUBLIC_IPV6=auto +export PRIMARY_HOSTNAME=auto +export SKIP_NETWORK_CHECKS=1 + +# Start the setup script. +cd /mailinabox +setup/start.sh +echo "EXITCODE: $?" +SH + end + end + + end