cloud backups to aws

This commit is contained in:
Usman Nasir
2020-12-08 07:35:30 +05:00
parent e1f1f525d5
commit 43d514c26c
6 changed files with 184 additions and 22 deletions

View File

@@ -961,9 +961,12 @@ class CloudManager:
def forceRunAWSBackup(self, request): def forceRunAWSBackup(self, request):
try: try:
request.session['userID'] = self.admin.pk request.session['userID'] = self.admin.pk
s3 = S3Backups(request, self.data, 'forceRunAWSBackup')
s3.start() execPath = "/usr/local/CyberCP/bin/python /usr/local/CyberCP/IncBackups/IncScheduler.py Daily"
ProcessUtilities.popenExecutioner(execPath)
return self.ajaxPre(1, None) return self.ajaxPre(1, None)
except BaseException as msg: except BaseException as msg:
return self.ajaxPre(0, str(msg)) return self.ajaxPre(0, str(msg))

View File

@@ -18,11 +18,15 @@ from googleapiclient.discovery import build
from googleapiclient.http import MediaFileUpload from googleapiclient.http import MediaFileUpload
from plogical.backupSchedule import backupSchedule from plogical.backupSchedule import backupSchedule
import requests import requests
from websiteFunctions.models import NormalBackupJobs, NormalBackupSites, NormalBackupDests, NormalBackupJobLogs from websiteFunctions.models import NormalBackupJobs, NormalBackupJobLogs
try: try:
from s3Backups.models import BackupPlan, BackupLogs
import boto3
from plogical.virtualHostUtilities import virtualHostUtilities from plogical.virtualHostUtilities import virtualHostUtilities
from plogical.mailUtilities import mailUtilities from plogical.mailUtilities import mailUtilities
from plogical.CyberCPLogFileWriter import CyberCPLogFileWriter as logging from plogical.CyberCPLogFileWriter import CyberCPLogFileWriter as logging
from plogical.processUtilities import ProcessUtilities
except: except:
pass pass
@@ -589,6 +593,86 @@ Automatic backup failed for %s on %s.
backupjob.config = json.dumps(jobConfig) backupjob.config = json.dumps(jobConfig)
backupjob.save() backupjob.save()
@staticmethod
def fetchAWSKeys():
path = '/home/cyberpanel/.aws'
credentials = path + '/credentials'
data = open(credentials, 'r').readlines()
aws_access_key_id = data[1].split(' ')[2].strip(' ').strip('\n')
aws_secret_access_key = data[2].split(' ')[2].strip(' ').strip('\n')
region = data[3].split(' ')[2].strip(' ').strip('\n')
return aws_access_key_id, aws_secret_access_key, region
@staticmethod
def forceRunAWSBackup():
try:
plan = BackupPlan.objects.get(name='hi')
bucketName = plan.bucket.strip('\n').strip(' ')
runTime = time.strftime("%d:%m:%Y")
aws_access_key_id, aws_secret_access_key, region = IncScheduler.fetchAWSKeys()
client = boto3.client(
's3',
aws_access_key_id = aws_access_key_id,
aws_secret_access_key = aws_secret_access_key,
#region_name=region
)
##
BackupLogs(owner=plan, level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"),
msg='Starting backup process..').save()
PlanConfig = json.loads(plan.config)
for items in plan.websitesinplan_set.all():
from plogical.backupUtilities import backupUtilities
tempStatusPath = "/home/cyberpanel/" + str(randint(1000, 9999))
extraArgs = {}
extraArgs['domain'] = items.domain
extraArgs['tempStatusPath'] = tempStatusPath
extraArgs['data'] = PlanConfig['data']
extraArgs['emails'] = PlanConfig['emails']
extraArgs['databases'] = PlanConfig['databases']
bu = backupUtilities(extraArgs)
result = bu.CloudBackups()
finalResult = open(tempStatusPath, 'r').read()
if result[0] == 1:
key = plan.name + '/' + runTime + '/' + result[1]
client.upload_file(
result[1],
bucketName,
key,
)
command = 'rm -f ' + result[1]
ProcessUtilities.executioner(command)
BackupLogs(owner=plan, level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"),
msg='Backup successful for ' + items.domain + '.').save()
else:
BackupLogs(owner=plan, level='ERROR', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"),
msg='Backup failed for ' + items.domain + '. Error: ' + finalResult).save()
plan.lastRun = runTime
plan.save()
BackupLogs(owner=plan, level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"),
msg='Backup Process Finished.').save()
except BaseException as msg:
logging.writeToFile(str(msg) + ' [S3Backups.runBackupPlan]')
plan = BackupPlan.objects.get(name='hi')
BackupLogs(owner=plan, timeStamp=time.strftime("%b %d %Y, %H:%M:%S"), level='ERROR', msg=str(msg)).save()
def main(): def main():
@@ -602,6 +686,7 @@ def main():
IncScheduler.git(args.function) IncScheduler.git(args.function)
IncScheduler.checkDiskUsage() IncScheduler.checkDiskUsage()
IncScheduler.startNormalBackups(args.function) IncScheduler.startNormalBackups(args.function)
IncScheduler.forceRunAWSBackup()
if __name__ == "__main__": if __name__ == "__main__":

View File

@@ -1438,7 +1438,7 @@ class backupUtilities:
if result[0] == 0: if result[0] == 0:
logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'], logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'],
'Failed to generate backups for data. Error: %s. [404], 0' % (result[1] )) 'Failed to generate backups for data. Error: %s. [404], 0' % (result[1] ))
return 0 return 0, self.BackupPath
logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'], logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'],
'Data backup successfully generated,30') 'Data backup successfully generated,30')
@@ -1450,7 +1450,7 @@ class backupUtilities:
if result[0] == 0: if result[0] == 0:
logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'], logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'],
'Failed to generate backups for emails. Error: %s. [404], 0' % (result[1] )) 'Failed to generate backups for emails. Error: %s. [404], 0' % (result[1] ))
return 0 return 0, self.BackupPath
logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'], logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'],
'Emails backup successfully generated,60') 'Emails backup successfully generated,60')
@@ -1462,7 +1462,7 @@ class backupUtilities:
if result[0] == 0: if result[0] == 0:
logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'], logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'],
'Failed to generate backups for databases. Error: %s. [404], 0' % (result[1] )) 'Failed to generate backups for databases. Error: %s. [404], 0' % (result[1] ))
return 0 return 0, self.BackupPath
logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'], logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'],
'Databases backups successfully generated,30') 'Databases backups successfully generated,30')
@@ -1476,7 +1476,17 @@ class backupUtilities:
command = 'rm -rf %s' % (self.BackupPath) command = 'rm -rf %s' % (self.BackupPath)
ProcessUtilities.executioner(command) ProcessUtilities.executioner(command)
logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'], 'Completed [200].') finalPath = '%s.tar.gz' % (self.BackupPath)
command = 'chown cyberpanel:cyberpanel %s' % (finalPath)
ProcessUtilities.executioner(command)
command = 'chmod 600:600 %s' % (finalPath)
ProcessUtilities.executioner(command)
logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'], 'Completed [200].' % (self.BackupPath))
return 1, self.BackupPath + '.tar.gz'
## Restore functions ## Restore functions
@@ -1605,7 +1615,8 @@ class backupUtilities:
mysqlUtilities.mysqlUtilities.restoreDatabaseBackup(db['databaseName'], self.databasesPath, db['password']) mysqlUtilities.mysqlUtilities.restoreDatabaseBackup(db['databaseName'], self.databasesPath, db['password'])
command = 'rm -rf %s' % (self.extractedPath)
ProcessUtilities.executioner(command)
logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'], 'Completed [200].') logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'], 'Completed [200].')

View File

@@ -5,8 +5,8 @@ Babel==2.8.0
backports.ssl-match-hostname==3.7.0.1 backports.ssl-match-hostname==3.7.0.1
bcrypt==3.2.0 bcrypt==3.2.0
beautifulsoup4==4.9.3 beautifulsoup4==4.9.3
boto3==1.16.13 boto3==1.19.30
botocore==1.19.13 botocore==1.19.30
cachetools==4.1.1 cachetools==4.1.1
certifi==2020.11.8 certifi==2020.11.8
cffi==1.14.3 cffi==1.14.3

View File

@@ -13,6 +13,7 @@ class BackupPlan(models.Model):
retention = models.IntegerField() retention = models.IntegerField()
type = models.CharField(max_length=5, default='AWS') type = models.CharField(max_length=5, default='AWS')
lastRun = models.CharField(max_length=50, default='0:0:0') lastRun = models.CharField(max_length=50, default='0:0:0')
config = models.TextField()
class WebsitesInPlan(models.Model): class WebsitesInPlan(models.Model):
owner = models.ForeignKey(BackupPlan,on_delete=models.CASCADE) owner = models.ForeignKey(BackupPlan,on_delete=models.CASCADE)

View File

@@ -176,8 +176,9 @@ class S3Backups(multi.Thread):
aws_access_key_id = data[1].split(' ')[2].strip(' ').strip('\n') aws_access_key_id = data[1].split(' ')[2].strip(' ').strip('\n')
aws_secret_access_key = data[2].split(' ')[2].strip(' ').strip('\n') aws_secret_access_key = data[2].split(' ')[2].strip(' ').strip('\n')
region = data[3].split(' ')[2].strip(' ').strip('\n')
return aws_access_key_id, aws_secret_access_key return aws_access_key_id, aws_secret_access_key, region
def fetchBuckets(self): def fetchBuckets(self):
try: try:
@@ -191,12 +192,13 @@ class S3Backups(multi.Thread):
return proc.ajax(0, 'Only administrators can use AWS S3 Backups.') return proc.ajax(0, 'Only administrators can use AWS S3 Backups.')
aws_access_key_id, aws_secret_access_key = self.fetchAWSKeys() aws_access_key_id, aws_secret_access_key, region = self.fetchAWSKeys()
s3 = boto3.resource( s3 = boto3.resource(
's3', 's3',
aws_access_key_id = aws_access_key_id, aws_access_key_id = aws_access_key_id,
aws_secret_access_key = aws_secret_access_key aws_secret_access_key = aws_secret_access_key,
region_name=region
) )
json_data = "[" json_data = "["
@@ -232,8 +234,28 @@ class S3Backups(multi.Thread):
admin = Administrator.objects.get(pk=userID) admin = Administrator.objects.get(pk=userID)
## What to backup
WTB = {}
try:
WTB['data'] = int(self.data['data'])
except:
WTB['data'] = 0
try:
WTB['databases'] = int(self.data['databases'])
except:
WTB['databases'] = 0
try:
WTB['emails'] = int(self.data['emails'])
except:
WTB['emails'] = 0
###
newPlan = BackupPlan(owner=admin, name=self.data['planName'].replace(' ', ''), freq=self.data['frequency'], newPlan = BackupPlan(owner=admin, name=self.data['planName'].replace(' ', ''), freq=self.data['frequency'],
retention=self.data['retenion'], bucket=self.data['bucketName']) retention=self.data['retenion'], bucket=self.data['bucketName'], config=json.dumps(WTB))
newPlan.save() newPlan.save()
for items in self.data['websitesInPlan']: for items in self.data['websitesInPlan']:
@@ -263,12 +285,16 @@ class S3Backups(multi.Thread):
checker = 0 checker = 0
for plan in admin.backupplan_set.all(): for plan in admin.backupplan_set.all():
config = json.loads(plan.config)
dic = { dic = {
'name': plan.name, 'name': plan.name,
'bucket': plan.bucket, 'bucket': plan.bucket,
'freq': plan.freq, 'freq': plan.freq,
'retention': plan.retention, 'retention': plan.retention,
'lastRun': plan.lastRun, 'lastRun': plan.lastRun,
'data': config['data'],
'databases': config['databases'],
'emails': config['emails'],
} }
if checker == 0: if checker == 0:
@@ -374,9 +400,28 @@ class S3Backups(multi.Thread):
changePlan = BackupPlan.objects.get(name=self.data['planName']) changePlan = BackupPlan.objects.get(name=self.data['planName'])
## What to backup
WTB = {}
try:
WTB['data'] = int(self.data['data'])
except:
WTB['data'] = 0
try:
WTB['databases'] = int(self.data['databases'])
except:
WTB['databases'] = 0
try:
WTB['emails'] = int(self.data['emails'])
except:
WTB['emails'] = 0
changePlan.bucket = self.data['bucketName'] changePlan.bucket = self.data['bucketName']
changePlan.freq = self.data['frequency'] changePlan.freq = self.data['frequency']
changePlan.retention = self.data['retention'] changePlan.retention = self.data['retention']
changePlan.config = json.dumps(WTB)
changePlan.save() changePlan.save()
@@ -478,15 +523,17 @@ class S3Backups(multi.Thread):
try: try:
plan = BackupPlan.objects.get(name=self.data['planName']) plan = BackupPlan.objects.get(name=self.data['planName'])
logging.writeToFile(plan.config)
bucketName = plan.bucket.strip('\n').strip(' ') bucketName = plan.bucket.strip('\n').strip(' ')
runTime = time.strftime("%d:%m:%Y") runTime = time.strftime("%d:%m:%Y")
aws_access_key_id, aws_secret_access_key = self.fetchAWSKeys() aws_access_key_id, aws_secret_access_key, region = self.fetchAWSKeys()
client = boto3.client( client = boto3.client(
's3', 's3',
aws_access_key_id = aws_access_key_id, aws_access_key_id = aws_access_key_id,
aws_secret_access_key = aws_secret_access_key aws_secret_access_key = aws_secret_access_key,
#region_name=region
) )
@@ -533,25 +580,40 @@ class S3Backups(multi.Thread):
BackupLogs(owner=plan, level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"), BackupLogs(owner=plan, level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"),
msg='Starting backup process..').save() msg='Starting backup process..').save()
PlanConfig = json.loads(plan.config)
for items in plan.websitesinplan_set.all(): for items in plan.websitesinplan_set.all():
result = self.createBackup(items.domain)
if result[0]: from plogical.backupUtilities import backupUtilities
key = plan.name + '/' + runTime + '/' + result[1].split('/')[-1] + ".tar.gz" tempStatusPath = "/home/cyberpanel/" + str(randint(1000, 9999))
extraArgs = {}
extraArgs['domain'] = items.domain
extraArgs['tempStatusPath'] = tempStatusPath
extraArgs['data'] = PlanConfig['data']
extraArgs['emails'] = PlanConfig['emails']
extraArgs['databases'] = PlanConfig['databases']
bu = backupUtilities(extraArgs)
result = bu.CloudBackups()
finalResult = open(tempStatusPath, 'r').read()
if result[0] == 1:
key = plan.name + '/' + runTime + '/' + result[1]
client.upload_file( client.upload_file(
result[1] + ".tar.gz", result[1],
bucketName, bucketName,
key, key,
Config=config, Config=config,
) )
command = 'rm -f ' + result[1] + ".tar.gz" command = 'rm -f ' + result[1]
ProcessUtilities.executioner(command) ProcessUtilities.executioner(command)
BackupLogs(owner=plan, level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"), BackupLogs(owner=plan, level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"),
msg='Backup successful for ' + items.domain + '.').save() msg='Backup successful for ' + items.domain + '.').save()
else: else:
BackupLogs(owner=plan, level='ERROR', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"), BackupLogs(owner=plan, level='ERROR', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"),
msg='Backup failed for ' + items.domain + '. Error: ' + result[1]).save() msg='Backup failed for ' + items.domain + '. Error: ' + finalResult).save()
plan.lastRun = runTime plan.lastRun = runTime
plan.save() plan.save()