diff --git a/cloudAPI/cloudManager.py b/cloudAPI/cloudManager.py index 587e63291..d9e587350 100755 --- a/cloudAPI/cloudManager.py +++ b/cloudAPI/cloudManager.py @@ -961,9 +961,12 @@ class CloudManager: def forceRunAWSBackup(self, request): try: + request.session['userID'] = self.admin.pk - s3 = S3Backups(request, self.data, 'forceRunAWSBackup') - s3.start() + + execPath = "/usr/local/CyberCP/bin/python /usr/local/CyberCP/IncBackups/IncScheduler.py Daily" + ProcessUtilities.popenExecutioner(execPath) + return self.ajaxPre(1, None) except BaseException as msg: return self.ajaxPre(0, str(msg)) diff --git a/plogical/IncScheduler.py b/plogical/IncScheduler.py index 0da947c46..47f3a6035 100644 --- a/plogical/IncScheduler.py +++ b/plogical/IncScheduler.py @@ -18,11 +18,15 @@ from googleapiclient.discovery import build from googleapiclient.http import MediaFileUpload from plogical.backupSchedule import backupSchedule import requests -from websiteFunctions.models import NormalBackupJobs, NormalBackupSites, NormalBackupDests, NormalBackupJobLogs +from websiteFunctions.models import NormalBackupJobs, NormalBackupJobLogs + try: + from s3Backups.models import BackupPlan, BackupLogs + import boto3 from plogical.virtualHostUtilities import virtualHostUtilities from plogical.mailUtilities import mailUtilities from plogical.CyberCPLogFileWriter import CyberCPLogFileWriter as logging + from plogical.processUtilities import ProcessUtilities except: pass @@ -589,6 +593,86 @@ Automatic backup failed for %s on %s. backupjob.config = json.dumps(jobConfig) backupjob.save() + @staticmethod + def fetchAWSKeys(): + path = '/home/cyberpanel/.aws' + credentials = path + '/credentials' + + data = open(credentials, 'r').readlines() + + aws_access_key_id = data[1].split(' ')[2].strip(' ').strip('\n') + aws_secret_access_key = data[2].split(' ')[2].strip(' ').strip('\n') + region = data[3].split(' ')[2].strip(' ').strip('\n') + + return aws_access_key_id, aws_secret_access_key, region + + @staticmethod + def forceRunAWSBackup(): + try: + + plan = BackupPlan.objects.get(name='hi') + bucketName = plan.bucket.strip('\n').strip(' ') + runTime = time.strftime("%d:%m:%Y") + + aws_access_key_id, aws_secret_access_key, region = IncScheduler.fetchAWSKeys() + + client = boto3.client( + 's3', + aws_access_key_id = aws_access_key_id, + aws_secret_access_key = aws_secret_access_key, + #region_name=region + ) + + ## + + + BackupLogs(owner=plan, level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"), + msg='Starting backup process..').save() + + PlanConfig = json.loads(plan.config) + + for items in plan.websitesinplan_set.all(): + + from plogical.backupUtilities import backupUtilities + tempStatusPath = "/home/cyberpanel/" + str(randint(1000, 9999)) + extraArgs = {} + extraArgs['domain'] = items.domain + extraArgs['tempStatusPath'] = tempStatusPath + extraArgs['data'] = PlanConfig['data'] + extraArgs['emails'] = PlanConfig['emails'] + extraArgs['databases'] = PlanConfig['databases'] + bu = backupUtilities(extraArgs) + result = bu.CloudBackups() + + finalResult = open(tempStatusPath, 'r').read() + + if result[0] == 1: + key = plan.name + '/' + runTime + '/' + result[1] + client.upload_file( + result[1], + bucketName, + key, + ) + + command = 'rm -f ' + result[1] + ProcessUtilities.executioner(command) + + BackupLogs(owner=plan, level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"), + msg='Backup successful for ' + items.domain + '.').save() + else: + BackupLogs(owner=plan, level='ERROR', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"), + msg='Backup failed for ' + items.domain + '. Error: ' + finalResult).save() + + plan.lastRun = runTime + plan.save() + + BackupLogs(owner=plan, level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"), + msg='Backup Process Finished.').save() + except BaseException as msg: + logging.writeToFile(str(msg) + ' [S3Backups.runBackupPlan]') + plan = BackupPlan.objects.get(name='hi') + BackupLogs(owner=plan, timeStamp=time.strftime("%b %d %Y, %H:%M:%S"), level='ERROR', msg=str(msg)).save() + def main(): @@ -602,6 +686,7 @@ def main(): IncScheduler.git(args.function) IncScheduler.checkDiskUsage() IncScheduler.startNormalBackups(args.function) + IncScheduler.forceRunAWSBackup() if __name__ == "__main__": diff --git a/plogical/backupUtilities.py b/plogical/backupUtilities.py index b61344ceb..1918fe173 100755 --- a/plogical/backupUtilities.py +++ b/plogical/backupUtilities.py @@ -1438,7 +1438,7 @@ class backupUtilities: if result[0] == 0: logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'], 'Failed to generate backups for data. Error: %s. [404], 0' % (result[1] )) - return 0 + return 0, self.BackupPath logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'], 'Data backup successfully generated,30') @@ -1450,7 +1450,7 @@ class backupUtilities: if result[0] == 0: logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'], 'Failed to generate backups for emails. Error: %s. [404], 0' % (result[1] )) - return 0 + return 0, self.BackupPath logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'], 'Emails backup successfully generated,60') @@ -1462,7 +1462,7 @@ class backupUtilities: if result[0] == 0: logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'], 'Failed to generate backups for databases. Error: %s. [404], 0' % (result[1] )) - return 0 + return 0, self.BackupPath logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'], 'Databases backups successfully generated,30') @@ -1476,7 +1476,17 @@ class backupUtilities: command = 'rm -rf %s' % (self.BackupPath) ProcessUtilities.executioner(command) - logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'], 'Completed [200].') + finalPath = '%s.tar.gz' % (self.BackupPath) + + command = 'chown cyberpanel:cyberpanel %s' % (finalPath) + ProcessUtilities.executioner(command) + + command = 'chmod 600:600 %s' % (finalPath) + ProcessUtilities.executioner(command) + + logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'], 'Completed [200].' % (self.BackupPath)) + + return 1, self.BackupPath + '.tar.gz' ## Restore functions @@ -1605,7 +1615,8 @@ class backupUtilities: mysqlUtilities.mysqlUtilities.restoreDatabaseBackup(db['databaseName'], self.databasesPath, db['password']) - + command = 'rm -rf %s' % (self.extractedPath) + ProcessUtilities.executioner(command) logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'], 'Completed [200].') diff --git a/requirments.txt b/requirments.txt index afd0c78e3..66ee115f6 100755 --- a/requirments.txt +++ b/requirments.txt @@ -5,8 +5,8 @@ Babel==2.8.0 backports.ssl-match-hostname==3.7.0.1 bcrypt==3.2.0 beautifulsoup4==4.9.3 -boto3==1.16.13 -botocore==1.19.13 +boto3==1.19.30 +botocore==1.19.30 cachetools==4.1.1 certifi==2020.11.8 cffi==1.14.3 diff --git a/s3Backups/models.py b/s3Backups/models.py index be1e7cdff..251d35cee 100755 --- a/s3Backups/models.py +++ b/s3Backups/models.py @@ -13,6 +13,7 @@ class BackupPlan(models.Model): retention = models.IntegerField() type = models.CharField(max_length=5, default='AWS') lastRun = models.CharField(max_length=50, default='0:0:0') + config = models.TextField() class WebsitesInPlan(models.Model): owner = models.ForeignKey(BackupPlan,on_delete=models.CASCADE) diff --git a/s3Backups/s3Backups.py b/s3Backups/s3Backups.py index 1031d42f9..d2cb0510d 100755 --- a/s3Backups/s3Backups.py +++ b/s3Backups/s3Backups.py @@ -176,8 +176,9 @@ class S3Backups(multi.Thread): aws_access_key_id = data[1].split(' ')[2].strip(' ').strip('\n') aws_secret_access_key = data[2].split(' ')[2].strip(' ').strip('\n') + region = data[3].split(' ')[2].strip(' ').strip('\n') - return aws_access_key_id, aws_secret_access_key + return aws_access_key_id, aws_secret_access_key, region def fetchBuckets(self): try: @@ -191,12 +192,13 @@ class S3Backups(multi.Thread): return proc.ajax(0, 'Only administrators can use AWS S3 Backups.') - aws_access_key_id, aws_secret_access_key = self.fetchAWSKeys() + aws_access_key_id, aws_secret_access_key, region = self.fetchAWSKeys() s3 = boto3.resource( 's3', aws_access_key_id = aws_access_key_id, - aws_secret_access_key = aws_secret_access_key + aws_secret_access_key = aws_secret_access_key, + region_name=region ) json_data = "[" @@ -232,8 +234,28 @@ class S3Backups(multi.Thread): admin = Administrator.objects.get(pk=userID) + ## What to backup + + WTB = {} + try: + WTB['data'] = int(self.data['data']) + except: + WTB['data'] = 0 + + try: + WTB['databases'] = int(self.data['databases']) + except: + WTB['databases'] = 0 + + try: + WTB['emails'] = int(self.data['emails']) + except: + WTB['emails'] = 0 + + ### + newPlan = BackupPlan(owner=admin, name=self.data['planName'].replace(' ', ''), freq=self.data['frequency'], - retention=self.data['retenion'], bucket=self.data['bucketName']) + retention=self.data['retenion'], bucket=self.data['bucketName'], config=json.dumps(WTB)) newPlan.save() for items in self.data['websitesInPlan']: @@ -263,12 +285,16 @@ class S3Backups(multi.Thread): checker = 0 for plan in admin.backupplan_set.all(): + config = json.loads(plan.config) dic = { 'name': plan.name, 'bucket': plan.bucket, 'freq': plan.freq, 'retention': plan.retention, 'lastRun': plan.lastRun, + 'data': config['data'], + 'databases': config['databases'], + 'emails': config['emails'], } if checker == 0: @@ -374,9 +400,28 @@ class S3Backups(multi.Thread): changePlan = BackupPlan.objects.get(name=self.data['planName']) + ## What to backup + + WTB = {} + try: + WTB['data'] = int(self.data['data']) + except: + WTB['data'] = 0 + + try: + WTB['databases'] = int(self.data['databases']) + except: + WTB['databases'] = 0 + + try: + WTB['emails'] = int(self.data['emails']) + except: + WTB['emails'] = 0 + changePlan.bucket = self.data['bucketName'] changePlan.freq = self.data['frequency'] changePlan.retention = self.data['retention'] + changePlan.config = json.dumps(WTB) changePlan.save() @@ -478,15 +523,17 @@ class S3Backups(multi.Thread): try: plan = BackupPlan.objects.get(name=self.data['planName']) + logging.writeToFile(plan.config) bucketName = plan.bucket.strip('\n').strip(' ') runTime = time.strftime("%d:%m:%Y") - aws_access_key_id, aws_secret_access_key = self.fetchAWSKeys() + aws_access_key_id, aws_secret_access_key, region = self.fetchAWSKeys() client = boto3.client( 's3', aws_access_key_id = aws_access_key_id, - aws_secret_access_key = aws_secret_access_key + aws_secret_access_key = aws_secret_access_key, + #region_name=region ) @@ -533,25 +580,40 @@ class S3Backups(multi.Thread): BackupLogs(owner=plan, level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"), msg='Starting backup process..').save() + PlanConfig = json.loads(plan.config) + for items in plan.websitesinplan_set.all(): - result = self.createBackup(items.domain) - if result[0]: - key = plan.name + '/' + runTime + '/' + result[1].split('/')[-1] + ".tar.gz" + + from plogical.backupUtilities import backupUtilities + tempStatusPath = "/home/cyberpanel/" + str(randint(1000, 9999)) + extraArgs = {} + extraArgs['domain'] = items.domain + extraArgs['tempStatusPath'] = tempStatusPath + extraArgs['data'] = PlanConfig['data'] + extraArgs['emails'] = PlanConfig['emails'] + extraArgs['databases'] = PlanConfig['databases'] + bu = backupUtilities(extraArgs) + result = bu.CloudBackups() + + finalResult = open(tempStatusPath, 'r').read() + + if result[0] == 1: + key = plan.name + '/' + runTime + '/' + result[1] client.upload_file( - result[1] + ".tar.gz", + result[1], bucketName, key, Config=config, ) - command = 'rm -f ' + result[1] + ".tar.gz" + command = 'rm -f ' + result[1] ProcessUtilities.executioner(command) BackupLogs(owner=plan, level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"), msg='Backup successful for ' + items.domain + '.').save() else: BackupLogs(owner=plan, level='ERROR', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"), - msg='Backup failed for ' + items.domain + '. Error: ' + result[1]).save() + msg='Backup failed for ' + items.domain + '. Error: ' + finalResult).save() plan.lastRun = runTime plan.save()