fix scheduled cloud backups

This commit is contained in:
Usman Nasir
2020-12-08 15:01:17 +05:00
parent cbbd327fcb
commit b68a570191
4 changed files with 113 additions and 256 deletions

View File

@@ -964,7 +964,7 @@ class CloudManager:
request.session['userID'] = self.admin.pk request.session['userID'] = self.admin.pk
execPath = "/usr/local/CyberCP/bin/python /usr/local/CyberCP/IncBackups/IncScheduler.py Daily" execPath = "/usr/local/CyberCP/bin/python /usr/local/CyberCP/plogical/IncScheduler.py forceRunAWSBackup --planName %s" % (self.data['planName'])
ProcessUtilities.popenExecutioner(execPath) ProcessUtilities.popenExecutioner(execPath)
return self.ajaxPre(1, None) return self.ajaxPre(1, None)

View File

@@ -19,6 +19,7 @@ from googleapiclient.http import MediaFileUpload
from plogical.backupSchedule import backupSchedule from plogical.backupSchedule import backupSchedule
import requests import requests
from websiteFunctions.models import NormalBackupJobs, NormalBackupJobLogs from websiteFunctions.models import NormalBackupJobs, NormalBackupJobLogs
from boto3.s3.transfer import TransferConfig
try: try:
from s3Backups.models import BackupPlan, BackupLogs from s3Backups.models import BackupPlan, BackupLogs
@@ -607,13 +608,16 @@ Automatic backup failed for %s on %s.
return aws_access_key_id, aws_secret_access_key, region return aws_access_key_id, aws_secret_access_key, region
@staticmethod @staticmethod
def forceRunAWSBackup(): def forceRunAWSBackup(planName):
try: try:
plan = BackupPlan.objects.get(name='hi') plan = BackupPlan.objects.get(name=planName)
bucketName = plan.bucket.strip('\n').strip(' ') bucketName = plan.bucket.strip('\n').strip(' ')
runTime = time.strftime("%d:%m:%Y") runTime = time.strftime("%d:%m:%Y")
config = TransferConfig(multipart_threshold=1024 * 25, max_concurrency=10,
multipart_chunksize=1024 * 25, use_threads=True)
aws_access_key_id, aws_secret_access_key, region = IncScheduler.fetchAWSKeys() aws_access_key_id, aws_secret_access_key, region = IncScheduler.fetchAWSKeys()
client = boto3.client( client = boto3.client(
@@ -638,23 +642,25 @@ Automatic backup failed for %s on %s.
extraArgs = {} extraArgs = {}
extraArgs['domain'] = items.domain extraArgs['domain'] = items.domain
extraArgs['tempStatusPath'] = tempStatusPath extraArgs['tempStatusPath'] = tempStatusPath
extraArgs['data'] = PlanConfig['data'] extraArgs['data'] = int(PlanConfig['data'])
extraArgs['emails'] = PlanConfig['emails'] extraArgs['emails'] = int(PlanConfig['emails'])
extraArgs['databases'] = PlanConfig['databases'] extraArgs['databases'] = int(PlanConfig['databases'])
bu = backupUtilities(extraArgs) bu = backupUtilities(extraArgs)
result = bu.CloudBackups() result, fileName = bu.CloudBackups()
finalResult = open(tempStatusPath, 'r').read() finalResult = open(tempStatusPath, 'r').read()
if result[0] == 1: if result == 1:
key = plan.name + '/' + runTime + '/' + result[1] key = plan.name + '/' + runTime + '/' + fileName.split('/')[-1]
client.upload_file( client.upload_file(
result[1], fileName,
bucketName, bucketName,
key, key,
Config=config
) )
command = 'rm -f ' + result[1] command = 'rm -f ' + fileName
ProcessUtilities.executioner(command) ProcessUtilities.executioner(command)
BackupLogs(owner=plan, level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"), BackupLogs(owner=plan, level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"),
@@ -668,25 +674,118 @@ Automatic backup failed for %s on %s.
BackupLogs(owner=plan, level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"), BackupLogs(owner=plan, level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"),
msg='Backup Process Finished.').save() msg='Backup Process Finished.').save()
###
s3 = boto3.resource(
's3',
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
region_name=region
)
ts = time.time()
for bucket in s3.buckets.all():
for file in bucket.objects.all():
result = float(ts - file.last_modified.timestamp())
if result > 100.0:
file.delete()
print(result)
except BaseException as msg: except BaseException as msg:
logging.writeToFile(str(msg) + ' [S3Backups.runBackupPlan]') logging.writeToFile(str(msg) + ' [S3Backups.runBackupPlan]')
plan = BackupPlan.objects.get(name='hi') plan = BackupPlan.objects.get(name=planName)
BackupLogs(owner=plan, timeStamp=time.strftime("%b %d %Y, %H:%M:%S"), level='ERROR', msg=str(msg)).save() BackupLogs(owner=plan, timeStamp=time.strftime("%b %d %Y, %H:%M:%S"), level='ERROR', msg=str(msg)).save()
@staticmethod
def runAWSBackups():
try:
for plan in BackupPlan.objects.all():
lastRunDay = plan.lastRun.split(':')[0]
lastRunMonth = plan.lastRun.split(':')[1]
if plan.freq == 'Daily' and lastRunDay != time.strftime("%d"):
IncScheduler.forceRunAWSBackup(plan.name)
else:
if lastRunMonth == time.strftime("%m"):
days = int(time.strftime("%d")) - int(lastRunDay)
if days >= 6:
IncScheduler.forceRunAWSBackup(plan.name)
else:
days = 30 - int(lastRunDay)
days = days + int(time.strftime("%d"))
if days >= 6:
IncScheduler.forceRunAWSBackup(plan.name)
# for plan in BackupPlanDO.objects.all():
# lastRunDay = plan.lastRun.split(':')[0]
# lastRunMonth = plan.lastRun.split(':')[1]
#
# if plan.freq == 'Daily' and lastRunDay != time.strftime("%d"):
# self.data = {}
# self.data['planName'] = plan.name
# self.forceRunAWSBackupDO()
# else:
# if lastRunMonth == time.strftime("%m"):
# days = int(time.strftime("%d")) - int(lastRunDay)
# if days >= 6:
# self.data = {}
# self.data['planName'] = plan.name
# self.forceRunAWSBackupDO()
# else:
# days = 30 - int(lastRunDay)
# days = days + int(time.strftime("%d"))
# if days >= 6:
# self.data = {}
# self.data['planName'] = plan.name
# self.forceRunAWSBackupDO()
#
# for plan in BackupPlanMINIO.objects.all():
# lastRunDay = plan.lastRun.split(':')[0]
# lastRunMonth = plan.lastRun.split(':')[1]
#
# if plan.freq == 'Daily' and lastRunDay != time.strftime("%d"):
# self.data = {}
# self.data['planName'] = plan.name
# self.forceRunAWSBackupMINIO()
# else:
# if lastRunMonth == time.strftime("%m"):
# days = int(time.strftime("%d")) - int(lastRunDay)
# if days >= 6:
# self.data = {}
# self.data['planName'] = plan.name
# self.forceRunAWSBackupMINIO()
# else:
# days = 30 - int(lastRunDay)
# days = days + int(time.strftime("%d"))
# if days >= 6:
# self.data = {}
# self.data['planName'] = plan.name
# self.forceRunAWSBackupMINIO()
except BaseException as msg:
logging.writeToFile(str(msg) + ' [S3Backups.runAWSBackups]')
def main(): def main():
parser = argparse.ArgumentParser(description='CyberPanel Installer') parser = argparse.ArgumentParser(description='CyberPanel Installer')
parser.add_argument('function', help='Specific a function to call!') parser.add_argument('function', help='Specific a function to call!')
parser.add_argument('--planName', help='Plan name for AWS!')
args = parser.parse_args() args = parser.parse_args()
if args.function == 'forceRunAWSBackup':
IncScheduler.forceRunAWSBackup(args.planName)
return 0
IncScheduler.startBackup(args.function) IncScheduler.startBackup(args.function)
IncScheduler.runGoogleDriveBackups(args.function) IncScheduler.runGoogleDriveBackups(args.function)
IncScheduler.git(args.function) IncScheduler.git(args.function)
IncScheduler.checkDiskUsage() IncScheduler.checkDiskUsage()
IncScheduler.startNormalBackups(args.function) IncScheduler.startNormalBackups(args.function)
IncScheduler.forceRunAWSBackup() IncScheduler.runAWSBackups()
if __name__ == "__main__": if __name__ == "__main__":

View File

@@ -1484,7 +1484,7 @@ class backupUtilities:
command = 'chmod 600:600 %s' % (finalPath) command = 'chmod 600:600 %s' % (finalPath)
ProcessUtilities.executioner(command) ProcessUtilities.executioner(command)
logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'], 'Completed [200].' % (self.BackupPath)) logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'], 'Completed [200].')
return 1, self.BackupPath + '.tar.gz' return 1, self.BackupPath + '.tar.gz'
@@ -1518,11 +1518,8 @@ class backupUtilities:
self.extractedPath = '/home/cyberpanel/backups/%s/%s' % (self.extraArgs['domain'], self.extraArgs['backupFile'].rstrip('.tar.gz')) self.extractedPath = '/home/cyberpanel/backups/%s/%s' % (self.extraArgs['domain'], self.extraArgs['backupFile'].rstrip('.tar.gz'))
self.dataPath = '%s/data' % (self.extractedPath) self.dataPath = '%s/data' % (self.extractedPath)
logging.CyberCPLogFileWriter.writeToFile('Data path: %s' % (self.dataPath))
self.databasesPath = '%s/databases' % (self.extractedPath) self.databasesPath = '%s/databases' % (self.extractedPath)
logging.CyberCPLogFileWriter.writeToFile('Databases path: %s' % (self.databasesPath))
self.emailsPath = '%s/emails' % (self.extractedPath) self.emailsPath = '%s/emails' % (self.extractedPath)
logging.CyberCPLogFileWriter.writeToFile('Emails path: %s' % (self.emailsPath))
## Data ## Data

View File

@@ -97,43 +97,6 @@ class S3Backups(multi.Thread):
json_data = json_data + ']' json_data = json_data + ']'
return json_data return json_data
def setupCron(self):
try:
command = "sudo cat /etc/crontab"
crons = ProcessUtilities.outputExecutioner(command).splitlines()
cronCheck = 1
for items in crons:
if items.find('s3Backups.py') > -1:
cronCheck = 0
tempPath = '/home/cyberpanel/' + str(randint(10000, 99999))
writeToFile = open(tempPath, "w")
for items in crons:
writeToFile.writelines(items + "\n")
if cronCheck:
writeToFile.writelines("0 0 * * * root /usr/local/CyberCP/bin/python /usr/local/CyberCP/s3Backups/s3Backups.py > /home/cyberpanel/error-logs.txt 2>&1\n")
writeToFile.close()
command = 'sudo mv ' + tempPath + " /etc/crontab"
ProcessUtilities.executioner(command)
command = 'chown root:root /etc/crontab'
ProcessUtilities.executioner(command)
try:
os.remove(tempPath)
except:
pass
except BaseException as msg:
logging.writeToFile(str(msg) + " [S3Backups.setupCron]")
def connectAccount(self): def connectAccount(self):
try: try:
@@ -160,8 +123,6 @@ class S3Backups(multi.Thread):
## ##
self.setupCron()
return proc.ajax(1, None) return proc.ajax(1, None)
except BaseException as msg: except BaseException as msg:
@@ -519,111 +480,6 @@ class S3Backups(multi.Thread):
items.delete() items.delete()
return 0, status return 0, status
def forceRunAWSBackup(self):
try:
plan = BackupPlan.objects.get(name=self.data['planName'])
logging.writeToFile(plan.config)
bucketName = plan.bucket.strip('\n').strip(' ')
runTime = time.strftime("%d:%m:%Y")
aws_access_key_id, aws_secret_access_key, region = self.fetchAWSKeys()
client = boto3.client(
's3',
aws_access_key_id = aws_access_key_id,
aws_secret_access_key = aws_secret_access_key,
#region_name=region
)
config = TransferConfig(multipart_threshold=1024 * 25, max_concurrency=10,
multipart_chunksize=1024 * 25, use_threads=True)
## Set Expiration for objects
try:
client.put_bucket_lifecycle_configuration(
Bucket='string',
LifecycleConfiguration={
'Rules': [
{
'Expiration': {
'Days': plan.retention,
'ExpiredObjectDeleteMarker': True
},
'ID': plan.name,
'Prefix': '',
'Filter': {
'Prefix': plan.name + '/',
},
'Status': 'Enabled',
},
]
}
)
except BaseException as msg:
BackupLogs(owner=plan, timeStamp=time.strftime("%b %d %Y, %H:%M:%S"), level='ERROR',
msg=str(msg)).save()
##
userID = self.request.session['userID']
currentACL = ACLManager.loadedACL(userID)
if currentACL['admin'] == 0:
BackupLogs(owner=plan, timeStamp=time.strftime("%b %d %Y, %H:%M:%S"), level='INFO',
msg='Unauthorised user tried to run AWS Backups.').save()
return 0
BackupLogs(owner=plan, level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"),
msg='Starting backup process..').save()
PlanConfig = json.loads(plan.config)
for items in plan.websitesinplan_set.all():
from plogical.backupUtilities import backupUtilities
tempStatusPath = "/home/cyberpanel/" + str(randint(1000, 9999))
extraArgs = {}
extraArgs['domain'] = items.domain
extraArgs['tempStatusPath'] = tempStatusPath
extraArgs['data'] = PlanConfig['data']
extraArgs['emails'] = PlanConfig['emails']
extraArgs['databases'] = PlanConfig['databases']
bu = backupUtilities(extraArgs)
result = bu.CloudBackups()
finalResult = open(tempStatusPath, 'r').read()
if result[0] == 1:
key = plan.name + '/' + runTime + '/' + result[1]
client.upload_file(
result[1],
bucketName,
key,
Config=config,
)
command = 'rm -f ' + result[1]
ProcessUtilities.executioner(command)
BackupLogs(owner=plan, level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"),
msg='Backup successful for ' + items.domain + '.').save()
else:
BackupLogs(owner=plan, level='ERROR', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"),
msg='Backup failed for ' + items.domain + '. Error: ' + finalResult).save()
plan.lastRun = runTime
plan.save()
BackupLogs(owner=plan, level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"),
msg='Backup Process Finished.').save()
except BaseException as msg:
logging.writeToFile(str(msg) + ' [S3Backups.runBackupPlan]')
plan = BackupPlan.objects.get(name=self.data['planName'])
BackupLogs(owner=plan, timeStamp=time.strftime("%b %d %Y, %H:%M:%S"), level='ERROR', msg=str(msg)).save()
def connectAccountDO(self): def connectAccountDO(self):
try: try:
@@ -649,9 +505,6 @@ class S3Backups(multi.Thread):
credFile.write(self.data['credData']) credFile.write(self.data['credData'])
credFile.close() credFile.close()
##
self.setupCron()
return proc.ajax(1, None) return proc.ajax(1, None)
@@ -1023,8 +876,6 @@ class S3Backups(multi.Thread):
secretKey=self.data['secretKey']) secretKey=self.data['secretKey'])
newNode.save() newNode.save()
self.setupCron()
return proc.ajax(1, None) return proc.ajax(1, None)
except BaseException as msg: except BaseException as msg:
@@ -1361,93 +1212,3 @@ class S3Backups(multi.Thread):
except BaseException as msg: except BaseException as msg:
proc = httpProc(self.request, None, None) proc = httpProc(self.request, None, None)
return proc.ajax(0, str(msg)) return proc.ajax(0, str(msg))
def runAWSBackups(self):
try:
admin = Administrator.objects.get(userName='admin')
self.request.session['userID'] = admin.pk
for plan in BackupPlan.objects.all():
lastRunDay = plan.lastRun.split(':')[0]
lastRunMonth = plan.lastRun.split(':')[1]
if plan.freq == 'Daily' and lastRunDay != time.strftime("%d"):
self.data = {}
self.data['planName'] = plan.name
self.forceRunAWSBackup()
else:
if lastRunMonth == time.strftime("%m"):
days = int(time.strftime("%d")) - int(lastRunDay)
if days >= 6:
self.data = {}
self.data['planName'] = plan.name
self.forceRunAWSBackup()
else:
days = 30 - int(lastRunDay)
days = days + int(time.strftime("%d"))
if days >= 6:
self.data = {}
self.data['planName'] = plan.name
self.forceRunAWSBackup()
for plan in BackupPlanDO.objects.all():
lastRunDay = plan.lastRun.split(':')[0]
lastRunMonth = plan.lastRun.split(':')[1]
if plan.freq == 'Daily' and lastRunDay != time.strftime("%d"):
self.data = {}
self.data['planName'] = plan.name
self.forceRunAWSBackupDO()
else:
if lastRunMonth == time.strftime("%m"):
days = int(time.strftime("%d")) - int(lastRunDay)
if days >= 6:
self.data = {}
self.data['planName'] = plan.name
self.forceRunAWSBackupDO()
else:
days = 30 - int(lastRunDay)
days = days + int(time.strftime("%d"))
if days >= 6:
self.data = {}
self.data['planName'] = plan.name
self.forceRunAWSBackupDO()
for plan in BackupPlanMINIO.objects.all():
lastRunDay = plan.lastRun.split(':')[0]
lastRunMonth = plan.lastRun.split(':')[1]
if plan.freq == 'Daily' and lastRunDay != time.strftime("%d"):
self.data = {}
self.data['planName'] = plan.name
self.forceRunAWSBackupMINIO()
else:
if lastRunMonth == time.strftime("%m"):
days = int(time.strftime("%d")) - int(lastRunDay)
if days >= 6:
self.data = {}
self.data['planName'] = plan.name
self.forceRunAWSBackupMINIO()
else:
days = 30 - int(lastRunDay)
days = days + int(time.strftime("%d"))
if days >= 6:
self.data = {}
self.data['planName'] = plan.name
self.forceRunAWSBackupMINIO()
except BaseException as msg:
logging.writeToFile(str(msg) + ' [S3Backups.runAWSBackups]')
def main():
pathToFile = "/home/cyberpanel/" + str(randint(1000, 9999))
file = open(pathToFile, "w")
file.close()
finalData = json.dumps({'randomFile': pathToFile})
requests.post("https://localhost:8090/api/runAWSBackups", data=finalData, verify=False)
if __name__ == "__main__":
main()