From 9327a1df4fa6b258c185dfff554aaf260c0a53c7 Mon Sep 17 00:00:00 2001 From: KiekerJan Date: Mon, 12 Sep 2022 22:58:52 +0200 Subject: [PATCH] merge proposed boto chagnes --- management/backup.py | 45 +++++++++++++++----------------------------- management/daemon.py | 5 +++-- 2 files changed, 18 insertions(+), 32 deletions(-) diff --git a/management/backup.py b/management/backup.py index b2fc1655..4887c662 100755 --- a/management/backup.py +++ b/management/backup.py @@ -451,46 +451,31 @@ def list_target_files(config): raise ValueError("Connection to rsync host failed: {}".format(reason)) elif target.scheme == "s3": - # match to a Region - import boto.s3 - from boto.exception import BotoServerError - custom_region = False - for region in boto.s3.regions(): - if region.endpoint == target.hostname: - break - else: - # If region is not found this is a custom region - custom_region = True - + import boto3.s3 + from botocore.exceptions import ClientError + + # separate bucket from path in target bucket = target.path[1:].split('/')[0] path = '/'.join(target.path[1:].split('/')[1:]) + '/' - - # Create a custom region with custom endpoint - if custom_region: - from boto.s3.connection import S3Connection - region = boto.s3.S3RegionInfo(name=bucket, endpoint=target.hostname, connection_cls=S3Connection) - + # If no prefix is specified, set the path to '', otherwise boto won't list the files if path == '/': path = '' if bucket == "": - raise ValueError("Enter an S3 bucket name.") + raise ValueError(f"Enter an S3 bucket name. // {url}") # connect to the region & bucket try: - conn = region.connect(aws_access_key_id=config["target_user"], aws_secret_access_key=config["target_pass"]) - bucket = conn.get_bucket(bucket) - except BotoServerError as e: - if e.status == 403: - raise ValueError("Invalid S3 access key or secret access key.") - elif e.status == 404: - raise ValueError("Invalid S3 bucket name.") - elif e.status == 301: - raise ValueError("Incorrect region for this bucket.") - raise ValueError(e.reason) - - return [(key.name[len(path):], key.size) for key in bucket.list(prefix=path)] + s3 = boto3.client('s3', \ + endpoint_url=f'https://{target.hostname}', \ + aws_access_key_id=config['target_user'], \ + aws_secret_access_key=config['target_pass']) + bucket_objects = s3.list_objects_v2(Bucket=bucket, Prefix=path)['Contents'] + backup_list = [(key['Key'][len(path):], key['Size']) for key in bucket_objects] + except ClientError as e: + raise ValueError(e) + return backup_list elif target.scheme == 'b2': from b2sdk.v1 import InMemoryAccountInfo, B2Api from b2sdk.v1.exception import NonExistentBucket diff --git a/management/daemon.py b/management/daemon.py index ecf6f10e..54275fd9 100755 --- a/management/daemon.py +++ b/management/daemon.py @@ -122,8 +122,9 @@ def index(): no_users_exist = (len(get_mail_users(env)) == 0) no_admins_exist = (len(get_admins(env)) == 0) - import boto.s3 - backup_s3_hosts = [(r.name, r.endpoint) for r in boto.s3.regions()] + import boto3.s3 + from urllib.parse import urlparse + backup_s3_hosts = [(r, urlparse(boto3.client('s3', region_name=r).meta.endpoint_url).netloc) for r in boto3.session.Session().get_available_regions('s3')] return render_template('index.html', hostname=env['PRIMARY_HOSTNAME'],