bug fix: endpoint on aws backups

This commit is contained in:
Usman Nasir
2020-12-14 15:33:59 +05:00
parent 1839a9d205
commit de02d683dd
3 changed files with 68 additions and 27 deletions

View File

@@ -1813,11 +1813,19 @@ class CloudManager:
aws_access_key_id, aws_secret_access_key, region = self.fetchAWSKeys() aws_access_key_id, aws_secret_access_key, region = self.fetchAWSKeys()
s3 = boto3.resource( if region.find('http') > -1:
's3', s3 = boto3.resource(
aws_access_key_id=aws_access_key_id, 's3',
aws_secret_access_key=aws_secret_access_key aws_access_key_id=aws_access_key_id,
) aws_secret_access_key=aws_secret_access_key,
endpoint_url=region
)
else:
s3 = boto3.resource(
's3',
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
)
bucket = s3.Bucket(plan.bucket) bucket = s3.Bucket(plan.bucket)
key = '%s/%s/' % (plan.name, self.data['domainName']) key = '%s/%s/' % (plan.name, self.data['domainName'])
@@ -1861,11 +1869,20 @@ class CloudManager:
aws_access_key_id, aws_secret_access_key, region = self.fetchAWSKeys() aws_access_key_id, aws_secret_access_key, region = self.fetchAWSKeys()
s3 = boto3.resource( if region.find('http') > -1:
's3', s3 = boto3.resource(
aws_access_key_id=aws_access_key_id, 's3',
aws_secret_access_key=aws_secret_access_key aws_access_key_id=aws_access_key_id,
) aws_secret_access_key=aws_secret_access_key,
endpoint_url = region
)
else:
s3 = boto3.resource(
's3',
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
)
s3.Object(plan.bucket, self.data['backupFile']).delete() s3.Object(plan.bucket, self.data['backupFile']).delete()
final_json = json.dumps({'status': 1, 'fetchStatus': 1, 'error_message': "None"}) final_json = json.dumps({'status': 1, 'fetchStatus': 1, 'error_message': "None"})

View File

@@ -625,11 +625,21 @@ Automatic backup failed for %s on %s.
ts = time.time() ts = time.time()
retentionSeconds = 86400 * plan.retention retentionSeconds = 86400 * plan.retention
s3 = boto3.resource(
's3', if region.find('http') > -1:
aws_access_key_id=aws_access_key_id, s3 = boto3.resource(
aws_secret_access_key=aws_secret_access_key 's3',
) aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
endpoint_url=region
)
else:
s3 = boto3.resource(
's3',
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
)
bucket = s3.Bucket(plan.bucket) bucket = s3.Bucket(plan.bucket)
for file in bucket.objects.all(): for file in bucket.objects.all():
@@ -642,12 +652,19 @@ Automatic backup failed for %s on %s.
### ###
client = boto3.client( if region.find('http') > -1:
's3', client = boto3.client(
aws_access_key_id = aws_access_key_id, 's3',
aws_secret_access_key = aws_secret_access_key, aws_access_key_id=aws_access_key_id,
#region_name=region aws_secret_access_key=aws_secret_access_key,
) endpoint_url=region
)
else:
client = boto3.client(
's3',
aws_access_key_id = aws_access_key_id,
aws_secret_access_key = aws_secret_access_key,
)
## ##

View File

@@ -151,12 +151,19 @@ class S3Backups(multi.Thread):
aws_access_key_id, aws_secret_access_key, region = self.fetchAWSKeys() aws_access_key_id, aws_secret_access_key, region = self.fetchAWSKeys()
s3 = boto3.resource( if region.find('http') > -1:
's3', s3 = boto3.resource(
aws_access_key_id = aws_access_key_id, 's3',
aws_secret_access_key = aws_secret_access_key, aws_access_key_id=aws_access_key_id,
region_name=region aws_secret_access_key=aws_secret_access_key,
) endpoint_url=region
)
else:
s3 = boto3.resource(
's3',
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
)
json_data = "[" json_data = "["
checker = 0 checker = 0