From 290b6bd7bd85cf392090581ea5c7ae20f49796d2 Mon Sep 17 00:00:00 2001
From: Steve Hay <hay.steve@gmail.com>
Date: Sun, 4 Sep 2022 14:28:56 -0400
Subject: [PATCH 1/2] port boto to boto3 and fix asyncio issue

---
 management/backup.py        | 47 +++++++++++++------------------------
 management/daemon.py        |  5 ++--
 management/status_checks.py |  2 +-
 3 files changed, 20 insertions(+), 34 deletions(-)

diff --git a/management/backup.py b/management/backup.py
index 2e88c8d0..5f5dc0b5 100755
--- a/management/backup.py
+++ b/management/backup.py
@@ -446,46 +446,31 @@ def list_target_files(config):
 			raise ValueError("Connection to rsync host failed: {}".format(reason))
 
 	elif target.scheme == "s3":
-		# match to a Region
-		import boto.s3
-		from boto.exception import BotoServerError
-		custom_region = False
-		for region in boto.s3.regions():
-			if region.endpoint == target.hostname:
-				break
-		else:
-			# If region is not found this is a custom region
-			custom_region = True
-
-		bucket = target.path[1:].split('/')[0]
-		path = '/'.join(target.path[1:].split('/')[1:]) + '/'
-
-		# Create a custom region with custom endpoint
-		if custom_region:
-			from boto.s3.connection import S3Connection
-			region = boto.s3.S3RegionInfo(name=bucket, endpoint=target.hostname, connection_cls=S3Connection)
+		import boto3.s3
+		from botocore.exceptions import ClientError
+		
+		# separate bucket from path in target
+		bucket = target.path.split('/')[1]
+		path = '/'.join(target.path.split('/')[2:]) + '/'
 
 		# If no prefix is specified, set the path to '', otherwise boto won't list the files
 		if path == '/':
 			path = ''
 
 		if bucket == "":
-			raise ValueError("Enter an S3 bucket name.")
+			raise ValueError(f"Enter an S3 bucket name. // {url}")
 
 		# connect to the region & bucket
 		try:
-			conn = region.connect(aws_access_key_id=config["target_user"], aws_secret_access_key=config["target_pass"])
-			bucket = conn.get_bucket(bucket)
-		except BotoServerError as e:
-			if e.status == 403:
-				raise ValueError("Invalid S3 access key or secret access key.")
-			elif e.status == 404:
-				raise ValueError("Invalid S3 bucket name.")
-			elif e.status == 301:
-				raise ValueError("Incorrect region for this bucket.")
-			raise ValueError(e.reason)
-
-		return [(key.name[len(path):], key.size) for key in bucket.list(prefix=path)]
+			s3 = boto3.client('s3', \
+				endpoint_url=f'https://{target.hostname}', \
+				aws_access_key_id=config['target_user'], \
+				aws_secret_access_key=config['target_pass'])
+			bucket_objects = s3.list_objects_v2(Bucket=bucket)['Contents']
+			backup_list = [(key['Key'][len(path):], key['Size']) for key in bucket_objects]
+		except ClientError as e:
+			raise ValueError(e)
+		return backup_list
 	elif target.scheme == 'b2':
 		from b2sdk.v1 import InMemoryAccountInfo, B2Api
 		from b2sdk.v1.exception import NonExistentBucket
diff --git a/management/daemon.py b/management/daemon.py
index 98c6689c..f9bc03a9 100755
--- a/management/daemon.py
+++ b/management/daemon.py
@@ -121,8 +121,9 @@ def index():
 	no_users_exist = (len(get_mail_users(env)) == 0)
 	no_admins_exist = (len(get_admins(env)) == 0)
 
-	import boto.s3
-	backup_s3_hosts = [(r.name, r.endpoint) for r in boto.s3.regions()]
+	import boto3.s3
+	from urllib.parse import urlparse
+	backup_s3_hosts = [(r, urlparse(boto3.client('s3', region_name=r).meta.endpoint_url).netloc) for r in boto3.session.Session().get_available_regions('s3')]
 
 	return render_template('index.html',
 		hostname=env['PRIMARY_HOSTNAME'],
diff --git a/management/status_checks.py b/management/status_checks.py
index 12b4440d..0d555441 100755
--- a/management/status_checks.py
+++ b/management/status_checks.py
@@ -715,7 +715,7 @@ def check_mail_domain(domain, env, output):
 		output.print_ok(good_news)
 
 		# Check MTA-STS policy.
-		loop = asyncio.get_event_loop()
+		loop = asyncio.new_event_loop()
 		sts_resolver = postfix_mta_sts_resolver.resolver.STSResolver(loop=loop)
 		valid, policy = loop.run_until_complete(sts_resolver.resolve(domain))
 		if valid == postfix_mta_sts_resolver.resolver.STSFetchResult.VALID:

From 8c1a7b7735370070f42c76060cd2c0fe2987e8ac Mon Sep 17 00:00:00 2001
From: Steve Hay <hay.steve@gmail.com>
Date: Mon, 5 Sep 2022 14:48:20 -0400
Subject: [PATCH 2/2] reverting parts of the patch accidentally changed from
 original. addressing Prefix=path.

---
 management/backup.py | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)

diff --git a/management/backup.py b/management/backup.py
index 5f5dc0b5..012ccccb 100755
--- a/management/backup.py
+++ b/management/backup.py
@@ -450,15 +450,15 @@ def list_target_files(config):
 		from botocore.exceptions import ClientError
 		
 		# separate bucket from path in target
-		bucket = target.path.split('/')[1]
-		path = '/'.join(target.path.split('/')[2:]) + '/'
+		bucket = target.path[1:].split('/')[0]
+		path = '/'.join(target.path[1:].split('/')[1:]) + '/'
 
 		# If no prefix is specified, set the path to '', otherwise boto won't list the files
 		if path == '/':
 			path = ''
 
 		if bucket == "":
-			raise ValueError(f"Enter an S3 bucket name. // {url}")
+			raise ValueError("Enter an S3 bucket name.")
 
 		# connect to the region & bucket
 		try:
@@ -466,7 +466,7 @@ def list_target_files(config):
 				endpoint_url=f'https://{target.hostname}', \
 				aws_access_key_id=config['target_user'], \
 				aws_secret_access_key=config['target_pass'])
-			bucket_objects = s3.list_objects_v2(Bucket=bucket)['Contents']
+			bucket_objects = s3.list_objects_v2(Bucket=bucket, Prefix=path)['Contents']
 			backup_list = [(key['Key'][len(path):], key['Size']) for key in bucket_objects]
 		except ClientError as e:
 			raise ValueError(e)