diff --git a/cloudAPI/cloudManager.py b/cloudAPI/cloudManager.py index 656290782..ef37bfdcd 100755 --- a/cloudAPI/cloudManager.py +++ b/cloudAPI/cloudManager.py @@ -1813,11 +1813,19 @@ class CloudManager: aws_access_key_id, aws_secret_access_key, region = self.fetchAWSKeys() - s3 = boto3.resource( - 's3', - aws_access_key_id=aws_access_key_id, - aws_secret_access_key=aws_secret_access_key - ) + if region.find('http') > -1: + s3 = boto3.resource( + 's3', + aws_access_key_id=aws_access_key_id, + aws_secret_access_key=aws_secret_access_key, + endpoint_url=region + ) + else: + s3 = boto3.resource( + 's3', + aws_access_key_id=aws_access_key_id, + aws_secret_access_key=aws_secret_access_key, + ) bucket = s3.Bucket(plan.bucket) key = '%s/%s/' % (plan.name, self.data['domainName']) @@ -1861,11 +1869,20 @@ class CloudManager: aws_access_key_id, aws_secret_access_key, region = self.fetchAWSKeys() - s3 = boto3.resource( - 's3', - aws_access_key_id=aws_access_key_id, - aws_secret_access_key=aws_secret_access_key - ) + if region.find('http') > -1: + s3 = boto3.resource( + 's3', + aws_access_key_id=aws_access_key_id, + aws_secret_access_key=aws_secret_access_key, + endpoint_url = region + ) + else: + s3 = boto3.resource( + 's3', + aws_access_key_id=aws_access_key_id, + aws_secret_access_key=aws_secret_access_key, + ) + s3.Object(plan.bucket, self.data['backupFile']).delete() final_json = json.dumps({'status': 1, 'fetchStatus': 1, 'error_message': "None"}) diff --git a/plogical/IncScheduler.py b/plogical/IncScheduler.py index 4fd07d81c..0f3b4a2ef 100644 --- a/plogical/IncScheduler.py +++ b/plogical/IncScheduler.py @@ -625,11 +625,21 @@ Automatic backup failed for %s on %s. ts = time.time() retentionSeconds = 86400 * plan.retention - s3 = boto3.resource( - 's3', - aws_access_key_id=aws_access_key_id, - aws_secret_access_key=aws_secret_access_key - ) + + if region.find('http') > -1: + s3 = boto3.resource( + 's3', + aws_access_key_id=aws_access_key_id, + aws_secret_access_key=aws_secret_access_key, + endpoint_url=region + ) + else: + s3 = boto3.resource( + 's3', + aws_access_key_id=aws_access_key_id, + aws_secret_access_key=aws_secret_access_key, + ) + bucket = s3.Bucket(plan.bucket) for file in bucket.objects.all(): @@ -642,12 +652,19 @@ Automatic backup failed for %s on %s. ### - client = boto3.client( - 's3', - aws_access_key_id = aws_access_key_id, - aws_secret_access_key = aws_secret_access_key, - #region_name=region - ) + if region.find('http') > -1: + client = boto3.client( + 's3', + aws_access_key_id=aws_access_key_id, + aws_secret_access_key=aws_secret_access_key, + endpoint_url=region + ) + else: + client = boto3.client( + 's3', + aws_access_key_id = aws_access_key_id, + aws_secret_access_key = aws_secret_access_key, + ) ## diff --git a/s3Backups/s3Backups.py b/s3Backups/s3Backups.py index 12036eac4..9b2c56d9e 100755 --- a/s3Backups/s3Backups.py +++ b/s3Backups/s3Backups.py @@ -151,12 +151,19 @@ class S3Backups(multi.Thread): aws_access_key_id, aws_secret_access_key, region = self.fetchAWSKeys() - s3 = boto3.resource( - 's3', - aws_access_key_id = aws_access_key_id, - aws_secret_access_key = aws_secret_access_key, - region_name=region - ) + if region.find('http') > -1: + s3 = boto3.resource( + 's3', + aws_access_key_id=aws_access_key_id, + aws_secret_access_key=aws_secret_access_key, + endpoint_url=region + ) + else: + s3 = boto3.resource( + 's3', + aws_access_key_id=aws_access_key_id, + aws_secret_access_key=aws_secret_access_key, + ) json_data = "[" checker = 0