1
0
mirror of https://github.com/mail-in-a-box/mailinabox.git synced 2025-04-03 00:07:05 +00:00

Change to the structure of the custom.yaml file

The structure of the custom.yaml file has been modified in order to simplify
the reading and maintainability of the backup.py code and definitively fix
problems related to backups on S3-compatible services not provided by AWS.
In addition to improved code readability, it is now verified that backup
also works on MinIO, solving issue #717.
This commit is contained in:
pappapisshu 2023-01-13 00:56:36 +01:00
parent da06fcbb09
commit 65e6469273
5 changed files with 726 additions and 297 deletions

View File

@ -227,6 +227,32 @@ paths:
text/html:
schema:
type: string
/system/ssh-public-key:
get:
tags:
- System
summary: Get system SSH public key
description: Returns system SSH public key.
operationId: getSystemSSHPublicKey
x-codeSamples:
- lang: curl
source: |
curl -X GET "https://{host}/admin/system/ssh-public-key" \
-u "<email>:<password>"
responses:
200:
description: Successful operation
content:
text/html:
schema:
$ref: '#/components/schemas/SystemSSHPublicKeyResponse'
example: ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDb root@box.example.com\n
403:
description: Forbidden
content:
text/html:
schema:
type: string
/system/update-packages:
post:
tags:
@ -424,6 +450,32 @@ paths:
text/html:
schema:
type: string
/system/backup/info:
get:
tags:
- System
summary: Get system backup info
description: |
Returns the system backup info, such as the directory of the system where backups are stored.
operationId: getSystemBackupInfo
x-codeSamples:
- lang: curl
source: |
curl -X GET "https://{host}/admin/system/backup/info" \
-u "<email>:<password>"
responses:
200:
description: Successful operation
content:
application/json:
schema:
$ref: '#/components/schemas/SystemBackupInfoResponse'
403:
description: Forbidden
content:
text/html:
schema:
type: string
/system/backup/config:
get:
tags:
@ -442,7 +494,20 @@ paths:
content:
application/json:
schema:
$ref: '#/components/schemas/SystemBackupConfigResponse'
oneOf:
- $ref: '#/components/schemas/SystemBackupOffConfigResponse'
- $ref: '#/components/schemas/SystemBackupLocalConfigResponse'
- $ref: '#/components/schemas/SystemBackupRSyncConfigResponse'
- $ref: '#/components/schemas/SystemBackupS3ConfigResponse'
- $ref: '#/components/schemas/SystemBackupB2ConfigResponse'
discriminator:
propertyName: type
mapping:
off: '#/components/schemas/SystemBackupOffConfigResponse'
local: '#/components/schemas/SystemBackupLocalConfigResponse'
rsync: '#/components/schemas/SystemBackupRSyncConfigResponse'
s3: '#/components/schemas/SystemBackupS3ConfigResponse'
b2: '#/components/schemas/SystemBackupB2ConfigResponse'
403:
description: Forbidden
content:
@ -460,49 +525,58 @@ paths:
content:
application/x-www-form-urlencoded:
schema:
$ref: '#/components/schemas/SystemBackupConfigUpdateRequest'
oneOf:
- $ref: '#/components/schemas/SystemBackupOffConfigUpdateRequest'
- $ref: '#/components/schemas/SystemBackupLocalConfigUpdateRequest'
- $ref: '#/components/schemas/SystemBackupRSyncConfigUpdateRequest'
- $ref: '#/components/schemas/SystemBackupS3ConfigUpdateRequest'
- $ref: '#/components/schemas/SystemBackupB2ConfigUpdateRequest'
discriminator:
propertyName: type
mapping:
off: '#/components/schemas/SystemBackupOffConfigUpdateRequest'
local: '#/components/schemas/SystemBackupLocalConfigUpdateRequest'
rsync: '#/components/schemas/SystemBackupRSyncConfigUpdateRequest'
s3: '#/components/schemas/SystemBackupS3ConfigUpdateRequest'
b2: '#/components/schemas/SystemBackupB2ConfigUpdateRequest'
examples:
s3:
summary: S3 backup
value:
target: s3://s3.eu-central-1.amazonaws.com/box-example-com
target_user: ACCESS_KEY
target_pass: SECRET_ACCESS_KEY
target_region: eu-central-1
minAge: 3
local:
summary: Local backup
value:
target: local
target_user: ''
target_pass: ''
target_region: ''
minAge: 3
rsync:
summary: Rsync backup
value:
target: rsync://username@box.example.com//backups/box.example.com
target_user: ''
target_pass: ''
target_region: ''
minAge: 3
off:
summary: Disable backups
value:
target: 'off'
target_user: ''
target_pass: ''
target_region: ''
minAge: 0
type: 'off'
local:
summary: Local backup
value:
type: local
min_age_in_days: 3
rsync:
summary: Rsync backup
value:
type: rsync
target_url: rsync://user@example.org/mail-in-a-box
min_age_in_days: 3
s3:
summary: S3 backup
value:
type: s3
target_url: s3://your-bucket-name/your-backup-directory
s3_access_key_id: ACCESS_KEY_ID
s3_secret_access_key: SECRET_ACCESS_KEY
s3_endpoint_url: https://objectstorage.example.org:9199
s3_region_name: region-name-1
min_age_in_days: 3
b2:
summary: B2 backup
value:
type: b2
target_url: b2://account_id:application_key@bucket_name/folder/
min_age_in_days: 3
x-codeSamples:
- lang: curl
source: |
curl -X POST "https://{host}/admin/system/backup/config" \
-d "target=<hostname>" \
-d "target_user=<string>" \
-d "target_pass=<password>" \
-d "target_region=<region>" \
-d "min_age=<integer>" \
-d "type=local" \
-d "min_age_in_days=<integer>" \
-u "<email>:<password>"
responses:
200:
@ -2380,6 +2454,10 @@ components:
example: |
libgnutls30 (3.5.18-1ubuntu1.4)
libxau6 (1:1.0.8-1ubuntu1)
SystemSSHPublicKeyResponse:
type: string
description: System SSH public key response.
example: ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDb root@box.example.com\n
SystemUpdatePackagesResponse:
type: string
example: |
@ -2459,74 +2537,212 @@ components:
type: string
example: 'Digest Type: 2 / SHA-256'
description: System entry extra information.
SystemBackupConfigUpdateRequest:
SystemBackupOffConfigUpdateRequest:
type: object
required:
- type
properties:
type:
type: string
example: off
description: Backup "off" config update request.
SystemBackupLocalConfigUpdateRequest:
type: object
required:
- target
- target_user
- target_pass
- target_region
- min_age
properties:
target:
type: string
format: hostname
example: s3://s3.eu-central-1.amazonaws.com/box-example-com
target_user:
type: string
example: username
target_pass:
type: string
example: password
format: password
target_region:
type: string
example: eu-central-1
min_age:
type: integer
format: int32
minimum: 1
example: 3
description: Backup config update request.
SystemBackupConfigUpdateResponse:
type: string
example: OK
description: Backup config update response.
SystemBackupConfigResponse:
type: object
required:
- enc_pw_file
- file_target_directory
- min_age_in_days
- ssh_pub_key
- target
properties:
enc_pw_file:
type:
type: string
example: /home/user-data/backup/secret_key.txt
file_target_directory:
type: string
example: /home/user-data/backup/encrypted
example: local
min_age_in_days:
type: integer
format: int32
minimum: 1
example: 3
ssh_pub_key:
description: Backup "local" config update request.
SystemBackupRSyncConfigUpdateRequest:
type: object
required:
- type
- target_url
- min_age_in_days
properties:
type:
type: string
example: ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDb root@box.example.com\n
target:
example: rsync
target_url:
type: string
format: hostname
example: s3://s3.eu-central-1.amazonaws.com/box-example-com
target_user:
format: uri
example: rsync://user@example.org/mail-in-a-box
min_age_in_days:
type: integer
format: int32
minimum: 1
example: 3
description: Backup "rsync" config update request.
SystemBackupS3ConfigUpdateRequest:
type: object
required:
- type
- s3_access_key_id
- s3_secret_access_key
- min_age_in_days
- target_url
- s3_endpoint_url
properties:
type:
type: string
target_pass:
example: s3
target_url:
type: string
target_region:
format: uri
example: s3://your-bucket-name/your-backup-directory
s3_endpoint_url:
type: string
example: eu-central-1
description: Backup config response.
format: uri
example: https://objectstorage.example.org:9199
s3_region_name:
type: string
example: region-name-1
s3_access_key_id:
type: string
example: access_key_id
s3_secret_access_key:
type: string
example: secret_access_key
format: password
min_age_in_days:
type: integer
format: int32
minimum: 1
example: 3
description: Backup "s3" config update request.
SystemBackupB2ConfigUpdateRequest:
type: object
required:
- type
- target_url
- min_age_in_days
properties:
target_url:
type: string
format: uri
example: b2://account_id:application_key@bucket_name/folder/
type:
type: string
example: b2
min_age_in_days:
type: integer
format: int32
minimum: 1
example: 3
description: Backup "b2" config update request.
SystemBackupConfigUpdateResponse:
type: string
example: OK
description: Backup config update response.
SystemBackupOffConfigResponse:
type: object
required:
- type
properties:
type:
type: string
example: off
description: Backup "off" config response.
SystemBackupLocalConfigResponse:
type: object
required:
- type
- target_url
- min_age_in_days
properties:
type:
type: string
example: local
target_url:
type: string
format: uri
example: file:///home/user-data/backup/encrypted
min_age_in_days:
type: integer
format: int32
minimum: 1
example: 3
description: Backup "local" config response.
SystemBackupRSyncConfigResponse:
type: object
required:
- type
- target_url
- min_age_in_days
properties:
type:
type: string
example: rsync
target_url:
type: string
format: uri
example: rsync://user@example.org/mail-in-a-box
min_age_in_days:
type: integer
format: int32
minimum: 1
example: 3
description: Backup "rsync" config response.
SystemBackupS3ConfigResponse:
type: object
required:
- type
- target_url
- s3_endpoint_url
- min_age_in_days
properties:
type:
type: string
example: s3
s3_access_key_id:
type: string
s3_secret_access_key:
type: string
target_url:
type: string
format: uri
example: s3://your-bucket-name/your-backup-directory
s3_endpoint_url:
type: string
format: uri
example: https://objectstorage.example.org:9199
s3_region_name:
type: string
example: region-name-1
min_age_in_days:
type: integer
format: int32
minimum: 1
example: 3
description: Backup "s3" config response.
SystemBackupB2ConfigResponse:
type: object
required:
- type
- target_url
- min_age_in_days
properties:
type:
type: string
example: b2
target_url:
type: string
format: uri
example: b2://account_id:application_key@bucket_name/folder/
min_age_in_days:
type: integer
format: int32
minimum: 1
example: 3
description: Backup "b2" config response.
SystemBackupStatusResponse:
type: object
required:
@ -2544,6 +2760,34 @@ components:
type: string
example: Something is wrong with the backup
description: Backup status response. Lists the status for all backups.
SystemBackupInfoResponse:
type: object
required:
- cache_directory
- configuration_file
- encrypted_directory
- encryption_key_file
- root_directory
properties:
cache_directory:
type: string
example: /home/user-data/backup/cache
configuration_file:
type: string
example: /home/user-data/backup/custom.yaml
encrypted_directory:
type: string
example: /home/user-data/backup/encrypted
encryption_key_file:
type: string
example: /home/user-data/backup/secret_key.txt
root_directory:
type: string
example: "/home/user-data/backup"
error:
type: string
example: Something is wrong with the backup
description: Backup status response. Lists the status for all backups.
SystemBackupStatus:
type: object
required:
@ -2742,3 +2986,4 @@ components:
properties:
status:
type: string

View File

@ -14,10 +14,25 @@ from exclusiveprocess import Lock
from utils import load_environment, shell, wait_for_service
def get_backup_root_directory(env):
return os.path.join(env["STORAGE_ROOT"], 'backup')
def get_backup_cache_directory(env):
return os.path.join(get_backup_root_directory(env), 'cache')
def get_backup_encrypted_directory(env):
return os.path.join(get_backup_root_directory(env), 'encrypted')
def get_backup_configuration_file(env):
return os.path.join(get_backup_root_directory(env), 'custom.yaml')
def get_backup_encryption_key_file(env):
return os.path.join(get_backup_root_directory(env), 'secret_key.txt')
def backup_status(env):
# If backups are dissbled, return no status.
# If backups are disabled, return no status.
config = get_backup_config(env)
if config["target"] == "off":
if config["type"] == "off":
return { }
# Query duplicity to get a list of all full and incremental
@ -25,8 +40,8 @@ def backup_status(env):
backups = { }
now = datetime.datetime.now(dateutil.tz.tzlocal())
backup_root = os.path.join(env["STORAGE_ROOT"], 'backup')
backup_cache_dir = os.path.join(backup_root, 'cache')
backup_root = get_backup_root_directory(env)
backup_cache_dir = get_backup_cache_directory(env)
def reldate(date, ref, clip):
if ref < date: return clip
@ -59,7 +74,7 @@ def backup_status(env):
"--archive-dir", backup_cache_dir,
"--gpg-options", "--cipher-algo=AES256",
"--log-fd", "1",
get_duplicity_target_url(config),
config["target_url"],
] + get_duplicity_additional_args(env),
get_duplicity_env_vars(env),
trap=True)
@ -183,57 +198,28 @@ def get_passphrase(env):
# that line is long enough to be a reasonable passphrase. It
# only needs to be 43 base64-characters to match AES256's key
# length of 32 bytes.
backup_root = os.path.join(env["STORAGE_ROOT"], 'backup')
with open(os.path.join(backup_root, 'secret_key.txt')) as f:
with open(get_backup_encryption_key_file(env)) as f:
passphrase = f.readline().strip()
if len(passphrase) < 43: raise Exception("secret_key.txt's first line is too short!")
return passphrase
def get_duplicity_target_url(config):
target = config["target"]
if get_target_type(config) == "s3":
from urllib.parse import urlsplit, urlunsplit
target = list(urlsplit(target))
# Although we store the S3 hostname in the target URL,
# duplicity no longer accepts it in the target URL. The hostname in
# the target URL must be the bucket name. The hostname is passed
# via get_duplicity_additional_args. Move the first part of the
# path (the bucket name) into the hostname URL component, and leave
# the rest for the path.
target[1], target[2] = target[2].lstrip('/').split('/', 1)
target = urlunsplit(target)
return target
def get_duplicity_additional_args(env):
config = get_backup_config(env)
if get_target_type(config) == 'rsync':
if config["type"] == 'rsync':
return [
"--ssh-options= -i /root/.ssh/id_rsa_miab",
"--rsync-options= -e \"/usr/bin/ssh -oStrictHostKeyChecking=no -oBatchMode=yes -p 22 -i /root/.ssh/id_rsa_miab\"",
]
elif get_target_type(config) == 's3':
# See note about hostname in get_duplicity_target_url.
from urllib.parse import urlsplit, urlunsplit
target = urlsplit(config["target"])
endpoint_url = urlunsplit(("https", target.netloc, '', '', ''))
elif config["type"] == 's3':
additional_args = ["--s3-endpoint-url", config["s3_endpoint_url"]]
# The target_region parameter has been added since duplicity
# now requires it for most cases in which
# the S3-compatible service is not provided by AWS.
# Nevertheless, some users who use mail-in-a-box
# from before version v60 and who use AWS's S3 service
# may not have this parameter in the configuration.
if "target_region" in config:
region = config["target_region"]
return ["--s3-endpoint-url", endpoint_url, "--s3-region-name", region]
else:
return ["--s3-endpoint-url", endpoint_url]
if "s3_region_name" in config:
additional_args.append("--s3-region-name")
additional_args.append(config["s3_region_name"])
return additional_args
return []
@ -242,16 +228,12 @@ def get_duplicity_env_vars(env):
env = { "PASSPHRASE" : get_passphrase(env) }
if get_target_type(config) == 's3':
env["AWS_ACCESS_KEY_ID"] = config["target_user"]
env["AWS_SECRET_ACCESS_KEY"] = config["target_pass"]
if config["type"] == 's3':
env["AWS_ACCESS_KEY_ID"] = config["s3_access_key_id"]
env["AWS_SECRET_ACCESS_KEY"] = config["s3_secret_access_key"]
return env
def get_target_type(config):
protocol = config["target"].split(":")[0]
return protocol
def perform_backup(full_backup):
env = load_environment()
@ -260,12 +242,12 @@ def perform_backup(full_backup):
Lock(die=True).forever()
config = get_backup_config(env)
backup_root = os.path.join(env["STORAGE_ROOT"], 'backup')
backup_cache_dir = os.path.join(backup_root, 'cache')
backup_dir = os.path.join(backup_root, 'encrypted')
backup_root = get_backup_root_directory(env)
backup_cache_dir = get_backup_cache_directory(env)
backup_dir = get_backup_encrypted_directory(env)
# Are backups disabled?
if config["target"] == "off":
if config["type"] == "off":
return
# On the first run, always do a full backup. Incremental
@ -300,7 +282,7 @@ def perform_backup(full_backup):
pre_script = os.path.join(backup_root, 'before-backup')
if os.path.exists(pre_script):
shell('check_call',
['su', env['STORAGE_USER'], '-c', pre_script, config["target"]],
['su', env['STORAGE_USER'], '-c', pre_script, config["target_url"]],
env=env)
# Run a backup of STORAGE_ROOT (but excluding the backups themselves!).
@ -316,7 +298,7 @@ def perform_backup(full_backup):
"--volsize", "250",
"--gpg-options", "--cipher-algo=AES256",
env["STORAGE_ROOT"],
get_duplicity_target_url(config),
config["target_url"],
"--allow-source-mismatch"
] + get_duplicity_additional_args(env),
get_duplicity_env_vars(env))
@ -336,7 +318,7 @@ def perform_backup(full_backup):
"--verbosity", "error",
"--archive-dir", backup_cache_dir,
"--force",
get_duplicity_target_url(config)
config["target_url"]
] + get_duplicity_additional_args(env),
get_duplicity_env_vars(env))
@ -351,13 +333,13 @@ def perform_backup(full_backup):
"--verbosity", "error",
"--archive-dir", backup_cache_dir,
"--force",
get_duplicity_target_url(config)
config["target_url"]
] + get_duplicity_additional_args(env),
get_duplicity_env_vars(env))
# Change ownership of backups to the user-data user, so that the after-bcakup
# script can access them.
if get_target_type(config) == 'file':
if config["type"] == 'local':
shell('check_call', ["/bin/chown", "-R", env["STORAGE_USER"], backup_dir])
# Execute a post-backup script that does the copying to a remote server.
@ -366,7 +348,7 @@ def perform_backup(full_backup):
post_script = os.path.join(backup_root, 'after-backup')
if os.path.exists(post_script):
shell('check_call',
['su', env['STORAGE_USER'], '-c', post_script, config["target"]],
['su', env['STORAGE_USER'], '-c', post_script, config["target_url"]],
env=env)
# Our nightly cron job executes system status checks immediately after this
@ -378,9 +360,9 @@ def perform_backup(full_backup):
def run_duplicity_verification():
env = load_environment()
backup_root = os.path.join(env["STORAGE_ROOT"], 'backup')
backup_root = get_backup_root_directory(env)
config = get_backup_config(env)
backup_cache_dir = os.path.join(backup_root, 'cache')
backup_cache_dir = get_backup_cache_directory(env)
shell('check_call', [
"/usr/bin/duplicity",
@ -389,41 +371,47 @@ def run_duplicity_verification():
"--compare-data",
"--archive-dir", backup_cache_dir,
"--exclude", backup_root,
get_duplicity_target_url(config),
config["target_url"],
env["STORAGE_ROOT"],
] + get_duplicity_additional_args(env), get_duplicity_env_vars(env))
def run_duplicity_restore(args):
env = load_environment()
config = get_backup_config(env)
backup_cache_dir = os.path.join(env["STORAGE_ROOT"], 'backup', 'cache')
backup_cache_dir = get_backup_cache_directory(env)
shell('check_call', [
"/usr/bin/duplicity",
"restore",
"--archive-dir", backup_cache_dir,
get_duplicity_target_url(config),
config["target_url"],
] + get_duplicity_additional_args(env) + args,
get_duplicity_env_vars(env))
def list_target_files(config):
import urllib.parse
try:
target = urllib.parse.urlparse(config["target"])
except ValueError:
return "invalid target"
if config["type"] == "local":
import urllib.parse
try:
url = urllib.parse.urlparse(config["target_url"])
except ValueError:
return "invalid target"
if target.scheme == "file":
return [(fn, os.path.getsize(os.path.join(target.path, fn))) for fn in os.listdir(target.path)]
return [(fn, os.path.getsize(os.path.join(url.path, fn))) for fn in os.listdir(url.path)]
elif config["type"] == "rsync":
import urllib.parse
try:
url = urllib.parse.urlparse(config["target_url"])
except ValueError:
return "invalid target"
elif target.scheme == "rsync":
rsync_fn_size_re = re.compile(r'.* ([^ ]*) [^ ]* [^ ]* (.*)')
rsync_target = '{host}:{path}'
target_path = target.path
if not target_path.endswith('/'):
target_path = target_path + '/'
if target_path.startswith('/'):
target_path = target_path[1:]
url_path = url.path
if not url_path.endswith('/'):
url_path = url_path + '/'
if url_path.startswith('/'):
url_path = url_path[1:]
rsync_command = [ 'rsync',
'-e',
@ -431,8 +419,8 @@ def list_target_files(config):
'--list-only',
'-r',
rsync_target.format(
host=target.netloc,
path=target_path)
host=url.netloc,
path=url_path)
]
code, listing = shell('check_output', rsync_command, trap=True, capture_stderr=True)
@ -447,24 +435,29 @@ def list_target_files(config):
if 'Permission denied (publickey).' in listing:
reason = "Invalid user or check you correctly copied the SSH key."
elif 'No such file or directory' in listing:
reason = "Provided path {} is invalid.".format(target_path)
reason = "Provided path {} is invalid.".format(url_path)
elif 'Network is unreachable' in listing:
reason = "The IP address {} is unreachable.".format(target.hostname)
reason = "The IP address {} is unreachable.".format(url.hostname)
elif 'Could not resolve hostname' in listing:
reason = "The hostname {} cannot be resolved.".format(target.hostname)
reason = "The hostname {} cannot be resolved.".format(url.hostname)
else:
reason = "Unknown error." \
"Please check running 'management/backup.py --verify'" \
"from mailinabox sources to debug the issue."
raise ValueError("Connection to rsync host failed: {}".format(reason))
elif target.scheme == "s3":
elif config["type"] == "s3":
import urllib.parse
try:
url = urllib.parse.urlparse(config["target_url"])
except ValueError:
return "invalid target"
import boto3.s3
from botocore.exceptions import ClientError
# separate bucket from path in target
bucket = target.path[1:].split('/')[0]
path = '/'.join(target.path[1:].split('/')[1:]) + '/'
bucket = url.hostname
path = url.path
# If no prefix is specified, set the path to '', otherwise boto won't list the files
if path == '/':
@ -475,25 +468,42 @@ def list_target_files(config):
# connect to the region & bucket
try:
s3 = boto3.client('s3', \
endpoint_url=f'https://{target.hostname}', \
aws_access_key_id=config['target_user'], \
aws_secret_access_key=config['target_pass'])
s3 = None
if "s3_region_name" in config:
s3 = boto3.client('s3', \
endpoint_url=config["s3_endpoint_url"], \
region_name=config["s3_region_name"], \
aws_access_key_id=config["s3_access_key_id"], \
aws_secret_access_key=config["s3_secret_access_key"])
else:
s3 = boto3.client('s3', \
endpoint_url=config["s3_endpoint_url"], \
aws_access_key_id=config["s3_access_key_id"], \
aws_secret_access_key=config["s3_secret_access_key"])
bucket_objects = s3.list_objects_v2(Bucket=bucket, Prefix=path)['Contents']
backup_list = [(key['Key'][len(path):], key['Size']) for key in bucket_objects]
except ClientError as e:
raise ValueError(e)
return backup_list
elif target.scheme == 'b2':
elif config["type"] == "b2":
import urllib.parse
try:
url = urllib.parse.urlparse(config["target_url"])
except ValueError:
return "invalid target"
from b2sdk.v1 import InMemoryAccountInfo, B2Api
from b2sdk.v1.exception import NonExistentBucket
info = InMemoryAccountInfo()
b2_api = B2Api(info)
# Extract information from target
b2_application_keyid = target.netloc[:target.netloc.index(':')]
b2_application_key = target.netloc[target.netloc.index(':')+1:target.netloc.index('@')]
b2_bucket = target.netloc[target.netloc.index('@')+1:]
b2_application_keyid = url.netloc[:url.netloc.index(':')]
b2_application_key = url.netloc[url.netloc.index(':')+1:url.netloc.index('@')]
b2_bucket = url.netloc[url.netloc.index('@')+1:]
try:
b2_api.authorize_account("production", b2_application_keyid, b2_application_key)
@ -503,28 +513,46 @@ def list_target_files(config):
return [(key.file_name, key.size) for key, _ in bucket.ls()]
else:
raise ValueError(config["target"])
raise ValueError(config["type"])
def backup_set_custom(env, target, target_user, target_pass, target_region, min_age):
config = get_backup_config(env, for_save=True)
def set_off_backup_config(env):
config = {
"type": "off"
}
# min_age must be an int
if isinstance(min_age, str):
min_age = int(min_age)
write_backup_config(env, config)
config["target"] = target
config["target_user"] = target_user
config["target_pass"] = target_pass
config["target_region"] = target_region
config["min_age_in_days"] = min_age
return "OK"
def set_local_backup_config(env, min_age_in_days):
# min_age_in_days must be an int
if isinstance(min_age_in_days, str):
min_age_in_days = int(min_age_in_days)
config = {
"type": "local",
"min_age_in_days": min_age_in_days
}
write_backup_config(env, config)
return "OK"
def set_rsync_backup_config(env, min_age_in_days, target_url):
# min_age_in_days must be an int
if isinstance(min_age_in_days, str):
min_age_in_days = int(min_age_in_days)
config = {
"type": "rsync",
"target_url": target_url,
"min_age_in_days": min_age_in_days
}
# Validate.
try:
if config["target"] not in ("off", "local"):
# these aren't supported by the following function, which expects a full url in the target key,
# which is what is there except when loading the config prior to saving
list_target_files(config)
list_target_files(config)
except ValueError as e:
return str(e)
@ -532,49 +560,128 @@ def backup_set_custom(env, target, target_user, target_pass, target_region, min_
return "OK"
def get_backup_config(env, for_save=False, for_ui=False):
backup_root = os.path.join(env["STORAGE_ROOT"], 'backup')
def set_s3_backup_config(env, min_age_in_days, s3_access_key_id, s3_secret_access_key, target_url, s3_endpoint_url, s3_region_name=None):
# min_age_in_days must be an int
if isinstance(min_age_in_days, str):
min_age_in_days = int(min_age_in_days)
config = {
"type": "s3",
"target_url": target_url,
"s3_endpoint_url": s3_endpoint_url,
"s3_region_name": s3_region_name,
"s3_access_key_id": s3_access_key_id,
"s3_secret_access_key": s3_secret_access_key,
"min_age_in_days": min_age_in_days
}
if s3_region_name is not None:
config["s3_region_name"] = s3_region_name
# Validate.
try:
list_target_files(config)
except ValueError as e:
return str(e)
write_backup_config(env, config)
return "OK"
def set_b2_backup_config(env, min_age_in_days, target_url):
# min_age_in_days must be an int
if isinstance(min_age_in_days, str):
min_age_in_days = int(min_age_in_days)
config = {
"type": "b2",
"target_url": target_url,
"min_age_in_days": min_age_in_days
}
# Validate.
try:
list_target_files(config)
except ValueError as e:
return str(e)
write_backup_config(env, config)
return "OK"
def get_backup_config(env):
backup_root = get_backup_root_directory(env)
# Defaults.
config = {
"min_age_in_days": 3,
"target": "local",
"type": "local",
"min_age_in_days": 3
}
# Merge in anything written to custom.yaml.
try:
custom_config = rtyaml.load(open(os.path.join(backup_root, 'custom.yaml')))
custom_config = rtyaml.load(open(get_backup_configuration_file(env)))
if not isinstance(custom_config, dict): raise ValueError() # caught below
# Converting the previous configuration (which was not very clear)
# into the new configuration format which also provides
# a "type" attribute to distinguish the type of backup.
if "type" not in custom_config:
scheme = custom_config["target"].split(":")[0]
if scheme == "off":
custom_config = {
"type": "off"
}
elif scheme == "file":
custom_config = {
"type": "local",
"min_age_in_days": custom_config["min_age_in_days"]
}
elif scheme == "rsync":
custom_config = {
"type": "rsync",
"target_url": custom_config["target"],
"min_age_in_days": custom_config["min_age_in_days"]
}
elif scheme == "s3":
import urllib.parse
url = urllib.parse.urlparse(custom_config["target"])
target_url = url.scheme + ":/" + url.path
s3_endpoint_url = "https://" + url.netloc
s3_access_key_id = custom_config["target_user"]
s3_secret_access_key = custom_config["target_pass"]
custom_config = {
"type": "s3",
"target_url": target_url,
"s3_endpoint_url": s3_endpoint_url,
"s3_access_key_id": custom_config["target_user"],
"s3_secret_access_key": custom_config["target_pass"],
"min_age_in_days": custom_config["min_age_in_days"]
}
elif scheme == "b2":
custom_config = {
"type": "b2",
"target_url": custom_config["target"],
"min_age_in_days": custom_config["min_age_in_days"]
}
else:
raise ValueError("Unexpected scheme during the conversion of the previous config to the new format.")
config.update(custom_config)
except:
pass
# When updating config.yaml, don't do any further processing on what we find.
if for_save:
return config
# When passing this back to the admin to show the current settings, do not include
# authentication details. The user will have to re-enter it.
if for_ui:
for field in ("target_user", "target_pass"):
if field in config:
del config[field]
# helper fields for the admin
config["file_target_directory"] = os.path.join(backup_root, 'encrypted')
config["enc_pw_file"] = os.path.join(backup_root, 'secret_key.txt')
if config["target"] == "local":
# Expand to the full URL.
config["target"] = "file://" + config["file_target_directory"]
ssh_pub_key = os.path.join('/root', '.ssh', 'id_rsa_miab.pub')
if os.path.exists(ssh_pub_key):
config["ssh_pub_key"] = open(ssh_pub_key, 'r').read()
# Adding an implicit information (for "local" backup, the target_url corresponds
# to the encrypted directory) because in the backup_status function it is easier
# to pass to duplicity the value of "target_url" without worrying about
# distinguishing between "local" or "rsync" or "s3" or "b2" backup types.
if config["type"] == "local":
config["target_url"] = "file://" + get_backup_encrypted_directory(env)
return config
def write_backup_config(env, newconfig):
backup_root = os.path.join(env["STORAGE_ROOT"], 'backup')
with open(os.path.join(backup_root, 'custom.yaml'), "w") as f:
with open(get_backup_configuration_file(env), "w") as f:
f.write(rtyaml.dump(newconfig))
if __name__ == "__main__":

View File

@ -579,6 +579,13 @@ def show_updates():
% (p["package"], p["version"])
for p in list_apt_updates())
@app.route('/system/ssh-public-key')
@authorized_personnel_only
def get_ssh_public_key():
from utils import load_ssh_public_key
return load_ssh_public_key()
# return json_response({"ssh_public_key": load_ssh_public_key()})
@app.route('/system/update-packages', methods=["POST"])
@authorized_personnel_only
def do_updates():
@ -617,23 +624,70 @@ def backup_status():
except Exception as e:
return json_response({ "error": str(e) })
@app.route('/system/backup/info')
@authorized_personnel_only
def backup_info():
from backup import get_backup_root_directory, get_backup_cache_directory, get_backup_encrypted_directory, get_backup_configuration_file, get_backup_encryption_key_file
try:
info = {
"root_directory": get_backup_root_directory(env),
"cache_directory": get_backup_cache_directory(env),
"encrypted_directory": get_backup_encrypted_directory(env),
"configuration_file": get_backup_configuration_file(env),
"encryption_key_file": get_backup_encryption_key_file(env)
}
return json_response(info)
except Exception as e:
return json_response({ "error": str(e) })
@app.route('/system/backup/config', methods=["GET"])
@authorized_personnel_only
def backup_get_custom():
from backup import get_backup_config
return json_response(get_backup_config(env, for_ui=True))
config = get_backup_config(env)
# When passing this back to the admin to show the current settings, do not include
# authentication details. The user will have to re-enter it.
for field in ("s3_access_key_id", "s3_secret_access_key"):
if field in config:
del config[field]
return json_response(config)
@app.route('/system/backup/config', methods=["POST"])
@authorized_personnel_only
def backup_set_custom():
from backup import backup_set_custom
return json_response(backup_set_custom(env,
request.form.get('target', ''),
request.form.get('target_user', ''),
request.form.get('target_pass', ''),
request.form.get('target_region', ''),
request.form.get('min_age', '')
))
from backup import set_off_backup_config, set_local_backup_config, set_rsync_backup_config, set_s3_backup_config, set_b2_backup_config
type = request.form.get('type', '')
if type == "off":
return json_response(set_off_backup_config(env))
elif type == "local":
return json_response(set_local_backup_config(env,
request.form.get('min_age_in_days', '')
))
elif type == "rsync":
return json_response(set_rsync_backup_config(env,
request.form.get('min_age_in_days', ''),
request.form.get('target_url', '')
))
elif type == "s3":
return json_response(set_s3_backup_config(env,
request.form.get('min_age_in_days', ''),
request.form.get('s3_access_key_id', ''),
request.form.get('s3_secret_access_key', ''),
request.form.get('target_url', ''),
request.form.get('s3_endpoint_url', ''),
request.form.get('s3_region_name', None)
))
elif type == "b2":
return json_response(set_b2_backup_config(env,
request.form.get('min_age_in_days', ''),
request.form.get('target_url', '')
))
else:
return json_response({"error": "unknown config type"})
@app.route('/system/privacy', methods=["GET"])
@authorized_personnel_only

View File

@ -9,7 +9,7 @@
<h3>Configuration</h3>
<form class="form-horizontal" role="form" onsubmit="set_custom_backup(); return false;">
<form class="form-horizontal" role="form" onsubmit="set_backup_configuration(); return false;">
<div class="form-group">
<label for="backup-target-type" class="col-sm-2 control-label">Backup to:</label>
<div class="col-sm-2">
@ -78,33 +78,33 @@
</div>
</div>
<div class="form-group backup-target-s3">
<label for="backup-target-s3-host" class="col-sm-2 control-label">S3 Host / Endpoint</label>
<label for="backup-target-url" class="col-sm-2 control-label">S3 Target URL</label>
<div class="col-sm-8">
<input type="text" placeholder="Endpoint" class="form-control" rows="1" id="backup-target-s3-host">
<input type="text" placeholder="s3://your-bucket-name/your-backup-directory" class="form-control" rows="1" id="backup-target-url">
</div>
</div>
<div class="form-group backup-target-s3">
<label for="backup-target-s3-region" class="col-sm-2 control-label">S3 Region</label>
<label for="backup-s3-endpoint-url" class="col-sm-2 control-label">S3 Endpoint URL</label>
<div class="col-sm-8">
<input type="text" placeholder="Region" class="form-control" rows="1" id="backup-target-s3-region">
<input type="text" placeholder="https://objectstorage.example.org:9199" class="form-control" rows="1" id="backup-s3-endpoint-url">
</div>
</div>
<div class="form-group backup-target-s3">
<label for="backup-target-s3-path" class="col-sm-2 control-label">S3 Path</label>
<label for="backup-s3-region-name" class="col-sm-2 control-label">S3 Region Name</label>
<div class="col-sm-8">
<input type="text" placeholder="your-bucket-name/backup-directory" class="form-control" rows="1" id="backup-target-s3-path">
<input type="text" placeholder="region-name-1" class="form-control" rows="1" id="backup-s3-region-name">
</div>
</div>
<div class="form-group backup-target-s3">
<label for="backup-target-user" class="col-sm-2 control-label">S3 Access Key</label>
<label for="backup-s3-access-key-id" class="col-sm-2 control-label">S3 Access Key Id</label>
<div class="col-sm-8">
<input type="text" class="form-control" rows="1" id="backup-target-user">
<input type="text" class="form-control" rows="1" id="backup-s3-access-key-id">
</div>
</div>
<div class="form-group backup-target-s3">
<label for="backup-target-pass" class="col-sm-2 control-label">S3 Secret Access Key</label>
<label for="backup-s3-secret-access-key" class="col-sm-2 control-label">S3 Secret Access Key</label>
<div class="col-sm-8">
<input type="text" class="form-control" rows="1" id="backup-target-pass">
<input type="text" class="form-control" rows="1" id="backup-s3-secret-access-key">
</div>
</div>
<!-- Backblaze -->
@ -186,7 +186,7 @@ function nice_size(bytes) {
}
function show_system_backup() {
show_custom_backup()
show_backup_configuration()
$('#backup-status tbody').html("<tr><td colspan='2' class='text-muted'>Loading...</td></tr>")
api(
@ -233,41 +233,57 @@ function show_system_backup() {
})
}
function show_custom_backup() {
function show_backup_configuration() {
$(".backup-target-local, .backup-target-rsync, .backup-target-s3, .backup-target-b2").hide();
api(
"/system/ssh-public-key",
"GET",
{ },
function(r) {
$("#ssh-pub-key").val(r);
})
api(
"/system/backup/info",
"GET",
{ },
function(r) {
$(".backup-location").text(r.encrypted_directory);
$(".backup-encpassword-file").text(r.encryption_key_file);
})
api(
"/system/backup/config",
"GET",
{ },
function(r) {
$("#backup-target-user").val(r.target_user);
$("#backup-target-pass").val(r.target_pass);
$("#min-age").val(r.min_age_in_days);
$(".backup-location").text(r.file_target_directory);
$(".backup-encpassword-file").text(r.enc_pw_file);
$("#ssh-pub-key").val(r.ssh_pub_key);
$("#backup-target-s3-region").val(r.target_region);
if (r.target == "file://" + r.file_target_directory) {
$("#backup-target-type").val("local");
} else if (r.target == "off") {
if(r.type == "off") {
$("#backup-target-type").val("off");
} else if (r.target.substring(0, 8) == "rsync://") {
} else if(r.type == "local") {
$("#backup-target-type").val("local");
$("#min-age").val(r.min_age_in_days);
} else if(r.type == "rsync") {
$("#min-age").val(r.min_age_in_days);
$("#backup-target-type").val("rsync");
var path = r.target.substring(8).split('//');
var path = r.target_url.substring(8).split('//');
var host_parts = path.shift().split('@');
$("#backup-target-rsync-user").val(host_parts[0]);
$("#backup-target-rsync-host").val(host_parts[1]);
$("#backup-target-rsync-path").val('/'+path[0]);
} else if (r.target.substring(0, 5) == "s3://") {
} else if(r.type == "s3") {
$("#backup-s3-access-key-id").val(r.s3_access_key_id);
$("#backup-s3-secret-access-key").val(r.s3_secret_access_key);
$("#backup-target-type").val("s3");
var hostpath = r.target.substring(5).split('/');
var host = hostpath.shift();
$("#backup-target-s3-host").val(host);
$("#backup-target-s3-path").val(hostpath.join('/'));
} else if (r.target.substring(0, 5) == "b2://") {
$("#backup-target-url").val(r.target_url);
$("#backup-s3-endpoint").val(r.s3_endpoint_url);
$("#backup-s3-region").val(r.s3_region_name);
$("#min-age").val(r.min_age_in_days);
} else if(r.type == "b2") {
$("#min-age").val(r.min_age_in_days);
$("#backup-target-type").val("b2");
var targetPath = r.target.substring(5);
var targetPath = r.target_url.substring(5);
var b2_application_keyid = targetPath.split(':')[0];
var b2_applicationkey = targetPath.split(':')[1].split('@')[0];
var b2_bucket = targetPath.split('@')[1];
@ -275,45 +291,40 @@ function show_custom_backup() {
$("#backup-target-b2-pass").val(b2_applicationkey);
$("#backup-target-b2-bucket").val(b2_bucket);
}
toggle_form()
})
}
function set_custom_backup() {
var target_type = $("#backup-target-type").val();
var target_user = $("#backup-target-user").val();
var target_pass = $("#backup-target-pass").val();
function set_backup_configuration() {
var target;
var target_region = '';
if (target_type == "local" || target_type == "off") {
target = target_type;
} else if (target_type == "s3") {
target = "s3://" + $("#backup-target-s3-host").val() + "/" + $("#backup-target-s3-path").val();
target_region = $("#backup-target-s3-region").val();
} else if (target_type == "rsync") {
target = "rsync://" + $("#backup-target-rsync-user").val() + "@" + $("#backup-target-rsync-host").val()
+ "/" + $("#backup-target-rsync-path").val();
target_user = '';
} else if (target_type == "b2") {
target = 'b2://' + $('#backup-target-b2-user').val() + ':' + $('#backup-target-b2-pass').val()
+ '@' + $('#backup-target-b2-bucket').val()
target_user = '';
target_pass = '';
var config = {
type: $("#backup-target-type").val()
};
if(config.type == "local") {
config["min_age_in_days"] = $("#min-age").val();
} else if(config.type == "rsync") {
config["target_url"] = "rsync://" + $("#backup-target-rsync-user").val() + "@" + $("#backup-target-rsync-host").val() + "/" + $("#backup-target-rsync-path").val();
config["min_age_in_days"] = $("#min-age").val();
} else if(config.type == "s3") {
config["s3_access_key_id"] = $("#backup-s3-access-key-id").val();
config["s3_secret_access_key"] = $("#backup-s3-secret-access-key").val();
config["min_age_in_days"] = $("#min-age").val();
config["target_url"] = $("#backup-target-url").val();
config["s3_endpoint_url"] = $("#backup-s3-endpoint-url").val();
if($("#backup-s3-region-name").val()) {
config["s3_region_name"] = $("#backup-s3-region-name").val();
}
} else if(config.type == "b2") {
config["target_url"] = "b2://" + $("#backup-target-b2-user").val() + ":" + $("#backup-target-b2-pass").val() + "@" + $("#backup-target-b2-bucket").val()
config["min_age_in_days"] = $("#min-age").val();
}
var min_age = $("#min-age").val();
api(
"/system/backup/config",
"POST",
{
target: target,
target_user: target_user,
target_pass: target_pass,
min_age: min_age,
target_region: target_region
},
config,
function(r) {
// use .text() --- it's a text response, not html
show_modal_error("Backup configuration", $("<p/>").text(r), function() { if (r == "OK") show_system_backup(); }); // refresh after modal on success

View File

@ -40,6 +40,18 @@ def load_settings(env):
except:
return { }
# THE SSH KEYS AT /root/.ssh
def load_ssh_public_key():
ssh_public_key_file = os.path.join('/root', '.ssh', 'id_rsa_miab.pub')
if os.path.exists(ssh_public_key_file):
return open(ssh_public_key_file, 'r').read()
def load_ssh_private_key():
ssh_private_key_file = os.path.join('/root', '.ssh', 'id_rsa_miab')
if os.path.exists(ssh_private_key_file):
return open(ssh_private_key_file, 'r').read()
# UTILITIES
def safe_domain_name(name):