mirror of
https://github.com/usmannasir/cyberpanel.git
synced 2025-11-06 21:35:55 +01:00
restore s3 backups
This commit is contained in:
@@ -1791,3 +1791,106 @@ class CloudManager:
|
|||||||
|
|
||||||
except BaseException as msg:
|
except BaseException as msg:
|
||||||
return self.ajaxPre(0, str(msg))
|
return self.ajaxPre(0, str(msg))
|
||||||
|
|
||||||
|
def fetchAWSKeys(self):
|
||||||
|
path = '/home/cyberpanel/.aws'
|
||||||
|
credentials = path + '/credentials'
|
||||||
|
|
||||||
|
data = open(credentials, 'r').readlines()
|
||||||
|
|
||||||
|
aws_access_key_id = data[1].split(' ')[2].strip(' ').strip('\n')
|
||||||
|
aws_secret_access_key = data[2].split(' ')[2].strip(' ').strip('\n')
|
||||||
|
region = data[3].split(' ')[2].strip(' ').strip('\n')
|
||||||
|
|
||||||
|
return aws_access_key_id, aws_secret_access_key, region
|
||||||
|
|
||||||
|
def getCurrentS3Backups(self):
|
||||||
|
try:
|
||||||
|
|
||||||
|
import boto3
|
||||||
|
from s3Backups.models import BackupPlan, BackupLogs
|
||||||
|
plan = BackupPlan.objects.get(name=self.data['planName'])
|
||||||
|
|
||||||
|
aws_access_key_id, aws_secret_access_key, region = self.fetchAWSKeys()
|
||||||
|
|
||||||
|
s3 = boto3.resource(
|
||||||
|
's3',
|
||||||
|
aws_access_key_id=aws_access_key_id,
|
||||||
|
aws_secret_access_key=aws_secret_access_key
|
||||||
|
)
|
||||||
|
bucket = s3.Bucket(plan.bucket)
|
||||||
|
key = '%s/%s/' % (plan.name, self.data['domainName'])
|
||||||
|
|
||||||
|
backups = []
|
||||||
|
|
||||||
|
for file in bucket.objects.filter(Prefix=key):
|
||||||
|
backups.append({'key': file.key, 'size': file.size})
|
||||||
|
|
||||||
|
json_data = "["
|
||||||
|
checker = 0
|
||||||
|
|
||||||
|
counter = 1
|
||||||
|
for items in backups:
|
||||||
|
|
||||||
|
dic = {'id': counter,
|
||||||
|
'file': items['key'],
|
||||||
|
'size': items['size'],
|
||||||
|
}
|
||||||
|
counter = counter + 1
|
||||||
|
|
||||||
|
if checker == 0:
|
||||||
|
json_data = json_data + json.dumps(dic)
|
||||||
|
checker = 1
|
||||||
|
else:
|
||||||
|
json_data = json_data + ',' + json.dumps(dic)
|
||||||
|
|
||||||
|
json_data = json_data + ']'
|
||||||
|
final_json = json.dumps({'status': 1, 'fetchStatus': 1, 'error_message': "None", "data": json_data})
|
||||||
|
return HttpResponse(final_json)
|
||||||
|
except BaseException as msg:
|
||||||
|
final_dic = {'status': 0, 'fetchStatus': 0, 'error_message': str(msg)}
|
||||||
|
final_json = json.dumps(final_dic)
|
||||||
|
return HttpResponse(final_json)
|
||||||
|
|
||||||
|
def deleteS3Backup(self):
|
||||||
|
try:
|
||||||
|
|
||||||
|
import boto3
|
||||||
|
from s3Backups.models import BackupPlan, BackupLogs
|
||||||
|
plan = BackupPlan.objects.get(name=self.data['planName'])
|
||||||
|
|
||||||
|
aws_access_key_id, aws_secret_access_key, region = self.fetchAWSKeys()
|
||||||
|
|
||||||
|
s3 = boto3.resource(
|
||||||
|
's3',
|
||||||
|
aws_access_key_id=aws_access_key_id,
|
||||||
|
aws_secret_access_key=aws_secret_access_key
|
||||||
|
)
|
||||||
|
s3.Object(plan.bucket, self.data['backupFile']).delete()
|
||||||
|
|
||||||
|
final_json = json.dumps({'status': 1, 'fetchStatus': 1, 'error_message': "None"})
|
||||||
|
return HttpResponse(final_json)
|
||||||
|
except BaseException as msg:
|
||||||
|
final_dic = {'status': 0, 'fetchStatus': 0, 'error_message': str(msg)}
|
||||||
|
final_json = json.dumps(final_dic)
|
||||||
|
return HttpResponse(final_json)
|
||||||
|
|
||||||
|
def SubmitS3BackupRestore(self):
|
||||||
|
try:
|
||||||
|
|
||||||
|
tempStatusPath = "/home/cyberpanel/" + str(randint(1000, 9999))
|
||||||
|
|
||||||
|
writeToFile = open(tempStatusPath, 'w')
|
||||||
|
writeToFile.write('Starting..,0')
|
||||||
|
writeToFile.close()
|
||||||
|
|
||||||
|
execPath = "/usr/local/CyberCP/bin/python " + virtualHostUtilities.cyberPanel + "/plogical/backupUtilities.py"
|
||||||
|
execPath = execPath + " SubmitS3BackupRestore --backupDomain %s --backupFile '%s' --tempStoragePath %s --planName %s" % (self.data['domain'], self.data['backupFile'], tempStatusPath, self.data['planName'])
|
||||||
|
ProcessUtilities.popenExecutioner(execPath)
|
||||||
|
|
||||||
|
final_dic = {'status': 1, 'tempStatusPath': tempStatusPath}
|
||||||
|
final_json = json.dumps(final_dic)
|
||||||
|
return HttpResponse(final_json)
|
||||||
|
|
||||||
|
except BaseException as msg:
|
||||||
|
return self.ajaxPre(0, str(msg))
|
||||||
@@ -63,6 +63,12 @@ def router(request):
|
|||||||
return cm.deleteCloudBackup()
|
return cm.deleteCloudBackup()
|
||||||
elif controller == 'SubmitCloudBackupRestore':
|
elif controller == 'SubmitCloudBackupRestore':
|
||||||
return cm.SubmitCloudBackupRestore()
|
return cm.SubmitCloudBackupRestore()
|
||||||
|
elif controller == 'getCurrentS3Backups':
|
||||||
|
return cm.getCurrentS3Backups()
|
||||||
|
elif controller == 'deleteS3Backup':
|
||||||
|
return cm.deleteS3Backup()
|
||||||
|
elif controller == 'SubmitS3BackupRestore':
|
||||||
|
return cm.SubmitS3BackupRestore()
|
||||||
elif controller == 'fetchWebsites':
|
elif controller == 'fetchWebsites':
|
||||||
return cm.fetchWebsites()
|
return cm.fetchWebsites()
|
||||||
elif controller == 'fetchWebsiteDataJSON':
|
elif controller == 'fetchWebsiteDataJSON':
|
||||||
|
|||||||
@@ -611,6 +611,7 @@ Automatic backup failed for %s on %s.
|
|||||||
def forceRunAWSBackup(planName):
|
def forceRunAWSBackup(planName):
|
||||||
try:
|
try:
|
||||||
|
|
||||||
|
|
||||||
plan = BackupPlan.objects.get(name=planName)
|
plan = BackupPlan.objects.get(name=planName)
|
||||||
bucketName = plan.bucket.strip('\n').strip(' ')
|
bucketName = plan.bucket.strip('\n').strip(' ')
|
||||||
runTime = time.strftime("%d:%m:%Y")
|
runTime = time.strftime("%d:%m:%Y")
|
||||||
@@ -618,8 +619,29 @@ Automatic backup failed for %s on %s.
|
|||||||
config = TransferConfig(multipart_threshold=1024 * 25, max_concurrency=10,
|
config = TransferConfig(multipart_threshold=1024 * 25, max_concurrency=10,
|
||||||
multipart_chunksize=1024 * 25, use_threads=True)
|
multipart_chunksize=1024 * 25, use_threads=True)
|
||||||
|
|
||||||
|
##
|
||||||
|
|
||||||
aws_access_key_id, aws_secret_access_key, region = IncScheduler.fetchAWSKeys()
|
aws_access_key_id, aws_secret_access_key, region = IncScheduler.fetchAWSKeys()
|
||||||
|
|
||||||
|
ts = time.time()
|
||||||
|
retentionSeconds = 86400 * plan.retention
|
||||||
|
s3 = boto3.resource(
|
||||||
|
's3',
|
||||||
|
aws_access_key_id=aws_access_key_id,
|
||||||
|
aws_secret_access_key=aws_secret_access_key
|
||||||
|
)
|
||||||
|
bucket = s3.Bucket(plan.bucket)
|
||||||
|
|
||||||
|
for file in bucket.objects.all():
|
||||||
|
result = float(ts - file.last_modified.timestamp())
|
||||||
|
if result > retentionSeconds:
|
||||||
|
BackupLogs(owner=plan, level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"),
|
||||||
|
msg='File %s expired and deleted according to your retention settings.' % (
|
||||||
|
file.key)).save()
|
||||||
|
file.delete()
|
||||||
|
|
||||||
|
###
|
||||||
|
|
||||||
client = boto3.client(
|
client = boto3.client(
|
||||||
's3',
|
's3',
|
||||||
aws_access_key_id = aws_access_key_id,
|
aws_access_key_id = aws_access_key_id,
|
||||||
@@ -629,7 +651,6 @@ Automatic backup failed for %s on %s.
|
|||||||
|
|
||||||
##
|
##
|
||||||
|
|
||||||
|
|
||||||
BackupLogs(owner=plan, level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"),
|
BackupLogs(owner=plan, level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"),
|
||||||
msg='Starting backup process..').save()
|
msg='Starting backup process..').save()
|
||||||
|
|
||||||
@@ -652,7 +673,7 @@ Automatic backup failed for %s on %s.
|
|||||||
finalResult = open(tempStatusPath, 'r').read()
|
finalResult = open(tempStatusPath, 'r').read()
|
||||||
|
|
||||||
if result == 1:
|
if result == 1:
|
||||||
key = plan.name + '/' + runTime + '/' + fileName.split('/')[-1]
|
key = plan.name + '/' + items.domain + '/' + fileName.split('/')[-1]
|
||||||
client.upload_file(
|
client.upload_file(
|
||||||
fileName,
|
fileName,
|
||||||
bucketName,
|
bucketName,
|
||||||
@@ -675,26 +696,6 @@ Automatic backup failed for %s on %s.
|
|||||||
BackupLogs(owner=plan, level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"),
|
BackupLogs(owner=plan, level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"),
|
||||||
msg='Backup Process Finished.').save()
|
msg='Backup Process Finished.').save()
|
||||||
|
|
||||||
###
|
|
||||||
|
|
||||||
s3 = boto3.resource(
|
|
||||||
's3',
|
|
||||||
aws_access_key_id=aws_access_key_id,
|
|
||||||
aws_secret_access_key=aws_secret_access_key,
|
|
||||||
region_name=region
|
|
||||||
)
|
|
||||||
|
|
||||||
ts = time.time()
|
|
||||||
|
|
||||||
retentionSeconds = 86400 * plan.retention
|
|
||||||
|
|
||||||
for bucket in s3.buckets.all():
|
|
||||||
if bucket.name == plan.bucket:
|
|
||||||
for file in bucket.objects.all():
|
|
||||||
result = float(ts - file.last_modified.timestamp())
|
|
||||||
if result > retentionSeconds:
|
|
||||||
file.delete()
|
|
||||||
break
|
|
||||||
|
|
||||||
except BaseException as msg:
|
except BaseException as msg:
|
||||||
logging.writeToFile(str(msg) + ' [S3Backups.runBackupPlan]')
|
logging.writeToFile(str(msg) + ' [S3Backups.runBackupPlan]')
|
||||||
|
|||||||
@@ -1,5 +1,7 @@
|
|||||||
import os, sys
|
import os, sys
|
||||||
|
|
||||||
|
from s3transfer import TransferConfig
|
||||||
|
|
||||||
sys.path.append('/usr/local/CyberCP')
|
sys.path.append('/usr/local/CyberCP')
|
||||||
import django
|
import django
|
||||||
|
|
||||||
@@ -1491,6 +1493,7 @@ class backupUtilities:
|
|||||||
## Restore functions
|
## Restore functions
|
||||||
|
|
||||||
def SubmitCloudBackupRestore(self):
|
def SubmitCloudBackupRestore(self):
|
||||||
|
try:
|
||||||
import json
|
import json
|
||||||
if os.path.exists(backupUtilities.CloudBackupConfigPath):
|
if os.path.exists(backupUtilities.CloudBackupConfigPath):
|
||||||
result = json.loads(open(backupUtilities.CloudBackupConfigPath, 'r').read())
|
result = json.loads(open(backupUtilities.CloudBackupConfigPath, 'r').read())
|
||||||
@@ -1616,9 +1619,178 @@ class backupUtilities:
|
|||||||
ProcessUtilities.executioner(command)
|
ProcessUtilities.executioner(command)
|
||||||
|
|
||||||
logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'], 'Completed [200].')
|
logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'], 'Completed [200].')
|
||||||
|
except BaseException as msg:
|
||||||
|
logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'], '%s [404].' % str(msg))
|
||||||
|
|
||||||
### Cloud Backup functions ends
|
### Cloud Backup functions ends
|
||||||
|
|
||||||
|
def fetchAWSKeys(self):
|
||||||
|
path = '/home/cyberpanel/.aws'
|
||||||
|
credentials = path + '/credentials'
|
||||||
|
|
||||||
|
data = open(credentials, 'r').readlines()
|
||||||
|
|
||||||
|
aws_access_key_id = data[1].split(' ')[2].strip(' ').strip('\n')
|
||||||
|
aws_secret_access_key = data[2].split(' ')[2].strip(' ').strip('\n')
|
||||||
|
region = data[3].split(' ')[2].strip(' ').strip('\n')
|
||||||
|
|
||||||
|
return aws_access_key_id, aws_secret_access_key, region
|
||||||
|
|
||||||
|
def SubmitS3BackupRestore(self):
|
||||||
|
|
||||||
|
try:
|
||||||
|
import json
|
||||||
|
if os.path.exists(backupUtilities.CloudBackupConfigPath):
|
||||||
|
result = json.loads(open(backupUtilities.CloudBackupConfigPath, 'r').read())
|
||||||
|
self.nice = result['nice']
|
||||||
|
self.cpu = result['cpu']
|
||||||
|
self.time = int(result['time'])
|
||||||
|
else:
|
||||||
|
self.nice = backupUtilities.NiceDefault
|
||||||
|
self.cpu = backupUtilities.CPUDefault
|
||||||
|
self.time = int(backupUtilities.time)
|
||||||
|
|
||||||
|
### First Download file from S3
|
||||||
|
|
||||||
|
logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'],
|
||||||
|
'Starting file download from S3..,0')
|
||||||
|
|
||||||
|
import boto3
|
||||||
|
from s3Backups.models import BackupPlan
|
||||||
|
plan = BackupPlan.objects.get(name=self.extraArgs['planName'])
|
||||||
|
|
||||||
|
aws_access_key_id, aws_secret_access_key, region = self.fetchAWSKeys()
|
||||||
|
|
||||||
|
s3 = boto3.resource(
|
||||||
|
's3',
|
||||||
|
aws_access_key_id=aws_access_key_id,
|
||||||
|
aws_secret_access_key=aws_secret_access_key
|
||||||
|
)
|
||||||
|
|
||||||
|
self.BackupPath = '/home/cyberpanel/backups/%s/%s' % (self.extraArgs['domain'], self.extraArgs['backupFile'].split('/')[-1])
|
||||||
|
|
||||||
|
s3.Bucket(plan.bucket).download_file(self.extraArgs['backupFile'], self.BackupPath)
|
||||||
|
|
||||||
|
logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'],
|
||||||
|
'File download completed..,5')
|
||||||
|
|
||||||
|
|
||||||
|
self.website = Websites.objects.get(domain=self.extraArgs['domain'])
|
||||||
|
|
||||||
|
logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'],
|
||||||
|
'Extracting main archive..,0')
|
||||||
|
|
||||||
|
|
||||||
|
command = 'tar -xf %s -C %s' % (self.BackupPath, '/home/cyberpanel/backups/%s/' % (self.extraArgs['domain']))
|
||||||
|
ProcessUtilities.executioner(command)
|
||||||
|
|
||||||
|
logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'],
|
||||||
|
'Main Archive extracted,20')
|
||||||
|
|
||||||
|
self.extractedPath = '/home/cyberpanel/backups/%s/%s' % (self.extraArgs['domain'], self.extraArgs['backupFile'].split('/')[-1].rstrip('.tar.gz'))
|
||||||
|
|
||||||
|
self.dataPath = '%s/data' % (self.extractedPath)
|
||||||
|
self.databasesPath = '%s/databases' % (self.extractedPath)
|
||||||
|
self.emailsPath = '%s/emails' % (self.extractedPath)
|
||||||
|
|
||||||
|
## Data
|
||||||
|
|
||||||
|
if os.path.exists(self.dataPath):
|
||||||
|
try:
|
||||||
|
logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'],
|
||||||
|
'Creating child domains if any..,20')
|
||||||
|
childDomains = json.loads(open('%s/data.json' % (self.extractedPath), 'r').read())['ChildDomains']
|
||||||
|
|
||||||
|
for child in childDomains:
|
||||||
|
try:
|
||||||
|
ch = ChildDomains.objects.get(domain=child['domain'])
|
||||||
|
except:
|
||||||
|
logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'],
|
||||||
|
'Creating %s,20' % (child['domain']))
|
||||||
|
virtualHostUtilities.createDomain(self.website.domain, child['domain'], child['php'], child['path'], 1, 0, 0,
|
||||||
|
self.website.admin.userName, 0, "/home/cyberpanel/" + str(randint(1000, 9999)))
|
||||||
|
|
||||||
|
except BaseException as msg:
|
||||||
|
logging.CyberCPLogFileWriter.writeToFile('%s [SubmitCloudBackupRestore:1533]' % str(msg))
|
||||||
|
|
||||||
|
homePath = '/home/%s' % (self.website.domain)
|
||||||
|
command = 'rm -rf %s' % (homePath)
|
||||||
|
ProcessUtilities.executioner(command)
|
||||||
|
|
||||||
|
command = 'mv %s/%s %s' % (self.dataPath, self.website.domain, '/home')
|
||||||
|
ProcessUtilities.executioner(command)
|
||||||
|
|
||||||
|
from filemanager.filemanager import FileManager
|
||||||
|
|
||||||
|
fm = FileManager(None, None)
|
||||||
|
fm.fixPermissions(self.website.domain)
|
||||||
|
|
||||||
|
## Emails
|
||||||
|
|
||||||
|
if os.path.exists(self.emailsPath):
|
||||||
|
try:
|
||||||
|
logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'],
|
||||||
|
'Creating emails if any..,40')
|
||||||
|
emails = json.loads(open('%s/emails.json' % (self.extractedPath), 'r').read())['emails']
|
||||||
|
from mailServer.models import Domains, EUsers
|
||||||
|
emailDomain = Domains.objects.get(domain=self.website.domain)
|
||||||
|
|
||||||
|
for email in emails:
|
||||||
|
try:
|
||||||
|
eu = EUsers.objects.get(emailOwner=emailDomain, email=email['email'])
|
||||||
|
except:
|
||||||
|
logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'],
|
||||||
|
'Creating %s,40' % (email['email']))
|
||||||
|
emailAcct = EUsers(emailOwner=emailDomain, email=email['email'], password=email['password'])
|
||||||
|
emailAcct.mail = 'maildir:/home/vmail/%s/%s/Maildir' % (self.website.domain, email['email'].split('@')[0])
|
||||||
|
emailAcct.save()
|
||||||
|
|
||||||
|
EmailsHome = '/home/vmail/%s' % (self.website.domain)
|
||||||
|
|
||||||
|
command = 'rm -rf %s' % (EmailsHome)
|
||||||
|
ProcessUtilities.executioner(command)
|
||||||
|
|
||||||
|
command = 'mv %s/%s /home/vmail' % (self.emailsPath, self.website.domain)
|
||||||
|
ProcessUtilities.executioner(command)
|
||||||
|
|
||||||
|
command = 'chown -R vmail:vmail %s' % (EmailsHome)
|
||||||
|
ProcessUtilities.executioner(command)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
except BaseException as msg:
|
||||||
|
logging.CyberCPLogFileWriter.writeToFile('%s [SubmitCloudBackupRestore:1533]' % str(msg))
|
||||||
|
|
||||||
|
## Databases
|
||||||
|
|
||||||
|
logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'],
|
||||||
|
'Restoring databases if any..,70')
|
||||||
|
|
||||||
|
databases = json.loads(open('%s/databases.json' % (self.extractedPath), 'r').read())['databases']
|
||||||
|
|
||||||
|
for db in databases:
|
||||||
|
|
||||||
|
logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'],
|
||||||
|
'Restoring database %s..,70' % (db['databaseName']))
|
||||||
|
|
||||||
|
mysqlUtilities.mysqlUtilities.submitDBDeletion(db['databaseName'])
|
||||||
|
|
||||||
|
if mysqlUtilities.mysqlUtilities.createDatabase(db['databaseName'], db['databaseUser'], "cyberpanel") == 0:
|
||||||
|
raise BaseException("Failed to create Databases!")
|
||||||
|
|
||||||
|
newDB = Databases(website=self.website, dbName=db['databaseName'], dbUser=db['databaseUser'])
|
||||||
|
newDB.save()
|
||||||
|
|
||||||
|
mysqlUtilities.mysqlUtilities.restoreDatabaseBackup(db['databaseName'], self.databasesPath, db['password'])
|
||||||
|
|
||||||
|
|
||||||
|
command = 'rm -rf %s' % (self.extractedPath)
|
||||||
|
ProcessUtilities.executioner(command)
|
||||||
|
|
||||||
|
logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'], 'Completed [200].')
|
||||||
|
except BaseException as msg:
|
||||||
|
logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'], '%s [404].' % str(msg))
|
||||||
|
|
||||||
def submitBackupCreation(tempStoragePath, backupName, backupPath, backupDomain):
|
def submitBackupCreation(tempStoragePath, backupName, backupPath, backupDomain):
|
||||||
try:
|
try:
|
||||||
## /home/example.com/backup/backup-example.com-02.13.2018_10-24-52 -- tempStoragePath
|
## /home/example.com/backup/backup-example.com-02.13.2018_10-24-52 -- tempStoragePath
|
||||||
@@ -1859,6 +2031,10 @@ def main():
|
|||||||
parser.add_argument('--emails', help='')
|
parser.add_argument('--emails', help='')
|
||||||
parser.add_argument('--databases', help='')
|
parser.add_argument('--databases', help='')
|
||||||
|
|
||||||
|
## FOR S3
|
||||||
|
|
||||||
|
parser.add_argument('--planName', help='')
|
||||||
|
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
@@ -1892,6 +2068,14 @@ def main():
|
|||||||
extraArgs['backupFile'] = args.backupFile
|
extraArgs['backupFile'] = args.backupFile
|
||||||
bu = backupUtilities(extraArgs)
|
bu = backupUtilities(extraArgs)
|
||||||
bu.SubmitCloudBackupRestore()
|
bu.SubmitCloudBackupRestore()
|
||||||
|
elif args.function == 'SubmitS3BackupRestore':
|
||||||
|
extraArgs = {}
|
||||||
|
extraArgs['domain'] = args.backupDomain
|
||||||
|
extraArgs['tempStatusPath'] = args.tempStoragePath
|
||||||
|
extraArgs['backupFile'] = args.backupFile
|
||||||
|
extraArgs['planName'] = args.planName
|
||||||
|
bu = backupUtilities(extraArgs)
|
||||||
|
bu.SubmitS3BackupRestore()
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|||||||
@@ -42,12 +42,8 @@ class S3Backups(multi.Thread):
|
|||||||
try:
|
try:
|
||||||
if self.function == 'connectAccount':
|
if self.function == 'connectAccount':
|
||||||
self.connectAccount()
|
self.connectAccount()
|
||||||
elif self.function == 'forceRunAWSBackup':
|
|
||||||
self.forceRunAWSBackup()
|
|
||||||
elif self.function == 'forceRunAWSBackupDO':
|
elif self.function == 'forceRunAWSBackupDO':
|
||||||
self.forceRunAWSBackupDO()
|
self.forceRunAWSBackupDO()
|
||||||
elif self.function == 'runAWSBackups':
|
|
||||||
self.runAWSBackups()
|
|
||||||
elif self.function == 'forceRunAWSBackupMINIO':
|
elif self.function == 'forceRunAWSBackupMINIO':
|
||||||
self.forceRunAWSBackupMINIO()
|
self.forceRunAWSBackupMINIO()
|
||||||
except BaseException as msg:
|
except BaseException as msg:
|
||||||
|
|||||||
Reference in New Issue
Block a user