Change how backup.py script deals with S3 backups.
In case that no static AWS credentials are specified, we try to create the boto3 client without explicitly passing static credentials. This way, we can benedit from dynamic credentials in AWS environments (e.g. using EC2 instance roles)
This commit is contained in:
parent
93380b243f
commit
32cfd1ed52
|
@ -492,10 +492,13 @@ def list_target_files(config):
|
||||||
|
|
||||||
# connect to the region & bucket
|
# connect to the region & bucket
|
||||||
try:
|
try:
|
||||||
s3 = boto3.client('s3', \
|
if config['target_user'] == "" and config['target_pass'] == "":
|
||||||
endpoint_url=f'https://{target.hostname}', \
|
s3 = boto3.client('s3', endpoint_url=f'https://{target.hostname}')
|
||||||
aws_access_key_id=config['target_user'], \
|
else:
|
||||||
aws_secret_access_key=config['target_pass'])
|
s3 = boto3.client('s3', \
|
||||||
|
endpoint_url=f'https://{target.hostname}', \
|
||||||
|
aws_access_key_id=config['target_user'], \
|
||||||
|
aws_secret_access_key=config['target_pass'])
|
||||||
bucket_objects = s3.list_objects_v2(Bucket=bucket, Prefix=path)['Contents']
|
bucket_objects = s3.list_objects_v2(Bucket=bucket, Prefix=path)['Contents']
|
||||||
backup_list = [(key['Key'][len(path):], key['Size']) for key in bucket_objects]
|
backup_list = [(key['Key'][len(path):], key['Size']) for key in bucket_objects]
|
||||||
except ClientError as e:
|
except ClientError as e:
|
||||||
|
|
Loading…
Reference in New Issue