minio block storage backups

This commit is contained in:
usmannasir
2019-01-16 15:08:41 +05:00
parent a2497d11a7
commit 6c4ccc1bff
6 changed files with 575 additions and 77 deletions

View File

@@ -95,6 +95,7 @@
<link rel="stylesheet" type="text/css" <link rel="stylesheet" type="text/css"
href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/4.7.0/css/font-awesome.css"> href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/4.7.0/css/font-awesome.css">
<link rel="stylesheet" type="text/css" href="{% static 'baseTemplate/custom-js/pnotify.custom.min.css' %}"> <link rel="stylesheet" type="text/css" href="{% static 'baseTemplate/custom-js/pnotify.custom.min.css' %}">
<link rel="stylesheet" type="text/css" href="{% static 'websiteFunctions/websiteFunctions.css' %}">
<!-- Components theme, component below was above three CSS files. --> <!-- Components theme, component below was above three CSS files. -->

View File

@@ -1168,3 +1168,93 @@ class CloudManager:
return HttpResponse(finalData) return HttpResponse(finalData)
except BaseException, msg: except BaseException, msg:
return self.ajaxPre(0, str(msg)) return self.ajaxPre(0, str(msg))
def addMINIONode(self, request):
try:
request.session['userID'] = self.admin.pk
s3 = S3Backups(request, self.data, 'addMINIONode')
return s3.addMINIONode()
except BaseException, msg:
return self.ajaxPre(0, str(msg))
def fetchMINIONodes(self, request):
try:
request.session['userID'] = self.admin.pk
s3 = S3Backups(request, self.data, 'fetchMINIONodes')
return s3.fetchMINIONodes()
except BaseException, msg:
return self.ajaxPre(0, str(msg))
def deleteMINIONode(self, request):
try:
request.session['userID'] = self.admin.pk
s3 = S3Backups(request, self.data, 'deleteMINIONode')
return s3.deleteMINIONode()
except BaseException, msg:
return self.ajaxPre(0, str(msg))
def createPlanMINIO(self, request):
try:
request.session['userID'] = self.admin.pk
s3 = S3Backups(request, self.data, 'createPlanMINIO')
return s3.createPlanMINIO()
except BaseException, msg:
return self.ajaxPre(0, str(msg))
def fetchBackupPlansMINIO(self, request):
try:
request.session['userID'] = self.admin.pk
s3 = S3Backups(request, self.data, 'fetchBackupPlansMINIO')
return s3.fetchBackupPlansMINIO()
except BaseException, msg:
return self.ajaxPre(0, str(msg))
def deletePlanMINIO(self, request):
try:
request.session['userID'] = self.admin.pk
s3 = S3Backups(request, self.data, 'deletePlanMINIO')
return s3.deletePlanMINIO()
except BaseException, msg:
return self.ajaxPre(0, str(msg))
def savePlanChangesMINIO(self, request):
try:
request.session['userID'] = self.admin.pk
s3 = S3Backups(request, self.data, 'savePlanChangesMINIO')
return s3.savePlanChangesMINIO()
except BaseException, msg:
return self.ajaxPre(0, str(msg))
def forceRunAWSBackupMINIO(self, request):
try:
request.session['userID'] = self.admin.pk
s3 = S3Backups(request, self.data, 'forceRunAWSBackupMINIO')
s3.start()
return self.ajaxPre(1, None)
except BaseException, msg:
return self.ajaxPre(0, str(msg))
def fetchWebsitesInPlanMINIO(self, request):
try:
request.session['userID'] = self.admin.pk
s3 = S3Backups(request, self.data, 'fetchWebsitesInPlanMINIO')
return s3.fetchWebsitesInPlanMINIO()
except BaseException, msg:
return self.ajaxPre(0, str(msg))
def fetchBackupLogsMINIO(self, request):
try:
request.session['userID'] = self.admin.pk
s3 = S3Backups(request, self.data, 'fetchBackupLogsMINIO')
return s3.fetchBackupLogsMINIO()
except BaseException, msg:
return self.ajaxPre(0, str(msg))
def deleteDomainFromPlanMINIO(self, request):
try:
request.session['userID'] = self.admin.pk
s3 = S3Backups(request, self.data, 'deleteDomainFromPlanMINIO')
return s3.deleteDomainFromPlanMINIO()
except BaseException, msg:
return self.ajaxPre(0, str(msg))

View File

@@ -239,6 +239,28 @@ def router(request):
return cm.fetchTableData(request) return cm.fetchTableData(request)
elif controller == 'fetchStructure': elif controller == 'fetchStructure':
return cm.fetchStructure(request) return cm.fetchStructure(request)
elif controller == 'addMINIONode':
return cm.addMINIONode(request)
elif controller == 'fetchMINIONodes':
return cm.fetchMINIONodes(request)
elif controller == 'deleteMINIONode':
return cm.deleteMINIONode(request)
elif controller == 'createPlanMINIO':
return cm.createPlanMINIO(request)
elif controller == 'fetchBackupPlansMINIO':
return cm.fetchBackupPlansMINIO(request)
elif controller == 'deletePlanMINIO':
return cm.deletePlanMINIO(request)
elif controller == 'savePlanChangesMINIO':
return cm.savePlanChangesMINIO(request)
elif controller == 'forceRunAWSBackupMINIO':
return cm.forceRunAWSBackupMINIO(request)
elif controller == 'fetchWebsitesInPlanMINIO':
return cm.fetchWebsitesInPlanMINIO(request)
elif controller == 'fetchBackupLogsMINIO':
return cm.fetchBackupLogsMINIO(request)
elif controller == 'deleteDomainFromPlanMINIO':
return cm.deleteDomainFromPlanMINIO(request)
else: else:
return cm.ajaxPre(0, 'This function is not available in your version of CyberPanel.') return cm.ajaxPre(0, 'This function is not available in your version of CyberPanel.')

View File

@@ -33,7 +33,6 @@ jsonpointer==1.9
kitchen==1.1.1 kitchen==1.1.1
MarkupSafe==0.11 MarkupSafe==0.11
mock==2.0.0 mock==2.0.0
minio==4.0.9
parsedatetime==2.4 parsedatetime==2.4
pbr==4.0.4 pbr==4.0.4
perf==0.1 perf==0.1

View File

@@ -45,3 +45,29 @@ class BackupLogsDO(models.Model):
timeStamp = models.CharField(max_length=200) timeStamp = models.CharField(max_length=200)
level = models.CharField(max_length=5) level = models.CharField(max_length=5)
msg = models.CharField(max_length=500) msg = models.CharField(max_length=500)
class MINIONodes(models.Model):
owner = models.ForeignKey(Administrator, on_delete=models.CASCADE)
endPointURL = models.CharField(max_length=200, unique=True)
accessKey = models.CharField(max_length=200, unique=True)
secretKey = models.CharField(max_length=200)
class BackupPlanMINIO(models.Model):
owner = models.ForeignKey(Administrator, on_delete=models.CASCADE)
minioNode = models.ForeignKey(MINIONodes, on_delete=models.CASCADE)
name = models.CharField(max_length=50, unique=True)
freq = models.CharField(max_length=50)
retention = models.IntegerField()
lastRun = models.CharField(max_length=50, default='0:0:0')
class WebsitesInPlanMINIO(models.Model):
owner = models.ForeignKey(BackupPlanMINIO, on_delete=models.CASCADE)
domain = models.CharField(max_length=100)
class BackupLogsMINIO(models.Model):
owner = models.ForeignKey(BackupPlanMINIO, on_delete=models.CASCADE)
timeStamp = models.CharField(max_length=200)
level = models.CharField(max_length=5)
msg = models.CharField(max_length=500)

View File

@@ -9,6 +9,7 @@ try:
import threading as multi import threading as multi
from plogical.mailUtilities import mailUtilities from plogical.mailUtilities import mailUtilities
import boto3 import boto3
from minio.error import ResponseError
from boto3.s3.transfer import TransferConfig from boto3.s3.transfer import TransferConfig
import json import json
from .models import * from .models import *
@@ -25,9 +26,9 @@ except:
import requests import requests
import subprocess, shlex import subprocess, shlex
class S3Backups(multi.Thread):
def __init__(self, request = None, data = None, function = None): class S3Backups(multi.Thread):
def __init__(self, request=None, data=None, function=None):
multi.Thread.__init__(self) multi.Thread.__init__(self)
self.request = request self.request = request
self.data = data self.data = data
@@ -43,8 +44,10 @@ class S3Backups(multi.Thread):
self.forceRunAWSBackupDO() self.forceRunAWSBackupDO()
elif self.function == 'runAWSBackups': elif self.function == 'runAWSBackups':
self.runAWSBackups() self.runAWSBackups()
elif self.function == 'forceRunAWSBackupMINIO':
self.forceRunAWSBackupMINIO()
except BaseException, msg: except BaseException, msg:
logging.writeToFile( str(msg) + ' [S3Backups.run]') logging.writeToFile(str(msg) + ' [S3Backups.run]')
@staticmethod @staticmethod
def getPagination(records, toShow): def getPagination(records, toShow):
@@ -79,7 +82,7 @@ class S3Backups(multi.Thread):
counter = 1 counter = 1
for items in logs: for items in logs:
dic = { 'id': items.id, 'timeStamp': items.timeStamp, 'level': items.level, 'mesg': items.msg } dic = {'id': items.id, 'timeStamp': items.timeStamp, 'level': items.level, 'mesg': items.msg}
if checker == 0: if checker == 0:
json_data = json_data + json.dumps(dic) json_data = json_data + json.dumps(dic)
checker = 1 checker = 1
@@ -133,7 +136,8 @@ class S3Backups(multi.Thread):
writeToFile = open(pathToFile, 'w') writeToFile = open(pathToFile, 'w')
for items in output: for items in output:
writeToFile.writelines(items + '\n') writeToFile.writelines(items + '\n')
writeToFile.writelines('0 0 * * * cyberpanel /usr/local/CyberCP/bin/python2 /usr/local/CyberCP/s3Backups/s3Backups.py\n') writeToFile.writelines(
'0 0 * * * cyberpanel /usr/local/CyberCP/bin/python2 /usr/local/CyberCP/s3Backups/s3Backups.py\n')
writeToFile.close() writeToFile.close()
command = 'sudo mv ' + pathToFile + ' /etc/crontab' command = 'sudo mv ' + pathToFile + ' /etc/crontab'
ProcessUtilities.executioner(command) ProcessUtilities.executioner(command)
@@ -155,7 +159,6 @@ class S3Backups(multi.Thread):
if currentACL['admin'] == 0: if currentACL['admin'] == 0:
return proc.ajax(0, 'Only administrators can use AWS S3 Backups.') return proc.ajax(0, 'Only administrators can use AWS S3 Backups.')
s3 = boto3.resource('s3') s3 = boto3.resource('s3')
json_data = "[" json_data = "["
checker = 0 checker = 0
@@ -190,8 +193,8 @@ class S3Backups(multi.Thread):
admin = Administrator.objects.get(pk=userID) admin = Administrator.objects.get(pk=userID)
newPlan = BackupPlan(owner=admin, name=self.data['planName'].replace(' ', ''), freq = self.data['frequency'], newPlan = BackupPlan(owner=admin, name=self.data['planName'].replace(' ', ''), freq=self.data['frequency'],
retention= self.data['retenion'], bucket= self.data['bucketName']) retention=self.data['retenion'], bucket=self.data['bucketName'])
newPlan.save() newPlan.save()
for items in self.data['websitesInPlan']: for items in self.data['websitesInPlan']:
@@ -216,7 +219,6 @@ class S3Backups(multi.Thread):
if currentACL['admin'] == 0: if currentACL['admin'] == 0:
return proc.ajax(0, 'Only administrators can use AWS S3 Backups.') return proc.ajax(0, 'Only administrators can use AWS S3 Backups.')
admin = Administrator.objects.get(pk=userID) admin = Administrator.objects.get(pk=userID)
json_data = "[" json_data = "["
checker = 0 checker = 0
@@ -275,7 +277,6 @@ class S3Backups(multi.Thread):
if currentACL['admin'] == 0: if currentACL['admin'] == 0:
return proc.ajax(0, 'Only administrators can use AWS S3 Backups.') return proc.ajax(0, 'Only administrators can use AWS S3 Backups.')
plan = BackupPlan.objects.get(name=self.data['planName']) plan = BackupPlan.objects.get(name=self.data['planName'])
json_data = "[" json_data = "["
checker = 0 checker = 0
@@ -332,8 +333,6 @@ class S3Backups(multi.Thread):
if currentACL['admin'] == 0: if currentACL['admin'] == 0:
return proc.ajax(0, 'Only administrators can use AWS S3 Backups.') return proc.ajax(0, 'Only administrators can use AWS S3 Backups.')
logging.writeToFile('hello world')
changePlan = BackupPlan.objects.get(name=self.data['planName']) changePlan = BackupPlan.objects.get(name=self.data['planName'])
changePlan.bucket = self.data['bucketName'] changePlan.bucket = self.data['bucketName']
@@ -433,7 +432,8 @@ class S3Backups(multi.Thread):
} }
) )
except BaseException, msg: except BaseException, msg:
BackupLogs(owner=plan, timeStamp=time.strftime("%b %d %Y, %H:%M:%S"), level='ERROR', msg=str(msg)).save() BackupLogs(owner=plan, timeStamp=time.strftime("%b %d %Y, %H:%M:%S"), level='ERROR',
msg=str(msg)).save()
## ##
@@ -441,10 +441,12 @@ class S3Backups(multi.Thread):
currentACL = ACLManager.loadedACL(userID) currentACL = ACLManager.loadedACL(userID)
if currentACL['admin'] == 0: if currentACL['admin'] == 0:
BackupLogs(owner=plan, timeStamp=time.strftime("%b %d %Y, %H:%M:%S"), level='INFO', msg='Unauthorised user tried to run AWS Backups.').save() BackupLogs(owner=plan, timeStamp=time.strftime("%b %d %Y, %H:%M:%S"), level='INFO',
msg='Unauthorised user tried to run AWS Backups.').save()
return 0 return 0
BackupLogs(owner=plan,level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"), msg='Starting backup process..').save() BackupLogs(owner=plan, level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"),
msg='Starting backup process..').save()
for items in plan.websitesinplan_set.all(): for items in plan.websitesinplan_set.all():
result = self.createBackup(items.domain) result = self.createBackup(items.domain)
@@ -456,15 +458,17 @@ class S3Backups(multi.Thread):
key, key,
Config=config, Config=config,
) )
BackupLogs(owner=plan, level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"), msg='Backup successful for ' + items.domain + '.').save() BackupLogs(owner=plan, level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"),
msg='Backup successful for ' + items.domain + '.').save()
else: else:
BackupLogs(owner=plan, level='ERROR', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"), msg='Backup failed for ' + items.domain + '. Error: ' + result[1]).save() BackupLogs(owner=plan, level='ERROR', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"),
msg='Backup failed for ' + items.domain + '. Error: ' + result[1]).save()
plan.lastRun = runTime plan.lastRun = runTime
plan.save() plan.save()
BackupLogs(owner=plan, level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"), msg='Backup Process Finished.').save() BackupLogs(owner=plan, level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"),
msg='Backup Process Finished.').save()
except BaseException, msg: except BaseException, msg:
logging.writeToFile(str(msg) + ' [S3Backups.runBackupPlan]') logging.writeToFile(str(msg) + ' [S3Backups.runBackupPlan]')
plan = BackupPlan.objects.get(name=self.data['planName']) plan = BackupPlan.objects.get(name=self.data['planName'])
@@ -513,7 +517,8 @@ class S3Backups(multi.Thread):
writeToFile = open(pathToFile, 'w') writeToFile = open(pathToFile, 'w')
for items in output: for items in output:
writeToFile.writelines(items + '\n') writeToFile.writelines(items + '\n')
writeToFile.writelines('0 0 * * * cyberpanel /usr/local/CyberCP/bin/python2 /usr/local/CyberCP/s3Backups/s3Backups.py\n') writeToFile.writelines(
'0 0 * * * cyberpanel /usr/local/CyberCP/bin/python2 /usr/local/CyberCP/s3Backups/s3Backups.py\n')
writeToFile.close() writeToFile.close()
command = 'sudo mv ' + pathToFile + ' /etc/crontab' command = 'sudo mv ' + pathToFile + ' /etc/crontab'
ProcessUtilities.executioner(command) ProcessUtilities.executioner(command)
@@ -586,9 +591,11 @@ class S3Backups(multi.Thread):
admin = Administrator.objects.get(pk=userID) admin = Administrator.objects.get(pk=userID)
newPlan = BackupPlanDO(owner=admin, name=self.data['planName'].replace(' ', ''), freq = self.data['frequency'], newPlan = BackupPlanDO(owner=admin, name=self.data['planName'].replace(' ', ''),
retention= self.data['retenion'], bucket= self.data['bucketName'], type= self.data['type'], freq=self.data['frequency'],
region= self.data['region']) retention=self.data['retenion'], bucket=self.data['bucketName'],
type=self.data['type'],
region=self.data['region'])
newPlan.save() newPlan.save()
for items in self.data['websitesInPlan']: for items in self.data['websitesInPlan']:
@@ -613,12 +620,11 @@ class S3Backups(multi.Thread):
if currentACL['admin'] == 0: if currentACL['admin'] == 0:
return proc.ajax(0, 'Only administrators can use AWS S3 Backups.') return proc.ajax(0, 'Only administrators can use AWS S3 Backups.')
admin = Administrator.objects.get(pk=userID) admin = Administrator.objects.get(pk=userID)
json_data = "[" json_data = "["
checker = 0 checker = 0
for plan in admin.backupplando_set.filter(type= self.data['type']): for plan in admin.backupplando_set.filter(type=self.data['type']):
dic = { dic = {
'name': plan.name, 'name': plan.name,
'bucket': plan.bucket, 'bucket': plan.bucket,
@@ -672,7 +678,6 @@ class S3Backups(multi.Thread):
if currentACL['admin'] == 0: if currentACL['admin'] == 0:
return proc.ajax(0, 'Only administrators can use AWS S3 Backups.') return proc.ajax(0, 'Only administrators can use AWS S3 Backups.')
plan = BackupPlanDO.objects.get(name=self.data['planName']) plan = BackupPlanDO.objects.get(name=self.data['planName'])
json_data = "[" json_data = "["
checker = 0 checker = 0
@@ -759,8 +764,6 @@ class S3Backups(multi.Thread):
if currentACL['admin'] == 0: if currentACL['admin'] == 0:
return proc.ajax(0, 'Only administrators can use AWS S3 Backups.') return proc.ajax(0, 'Only administrators can use AWS S3 Backups.')
logging.writeToFile('hello world')
changePlan = BackupPlanDO.objects.get(name=self.data['planName']) changePlan = BackupPlanDO.objects.get(name=self.data['planName'])
changePlan.bucket = self.data['bucketName'] changePlan.bucket = self.data['bucketName']
@@ -828,7 +831,8 @@ class S3Backups(multi.Thread):
} }
) )
except BaseException, msg: except BaseException, msg:
BackupLogsDO(owner=plan, timeStamp=time.strftime("%b %d %Y, %H:%M:%S"), level='ERROR', msg=str(msg)).save() BackupLogsDO(owner=plan, timeStamp=time.strftime("%b %d %Y, %H:%M:%S"), level='ERROR',
msg=str(msg)).save()
## ##
@@ -836,10 +840,12 @@ class S3Backups(multi.Thread):
currentACL = ACLManager.loadedACL(userID) currentACL = ACLManager.loadedACL(userID)
if currentACL['admin'] == 0: if currentACL['admin'] == 0:
BackupLogsDO(owner=plan, timeStamp=time.strftime("%b %d %Y, %H:%M:%S"), level='INFO', msg='Unauthorised user tried to run AWS Backups.').save() BackupLogsDO(owner=plan, timeStamp=time.strftime("%b %d %Y, %H:%M:%S"), level='INFO',
msg='Unauthorised user tried to run AWS Backups.').save()
return 0 return 0
BackupLogsDO(owner=plan,level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"), msg='Starting backup process..').save() BackupLogsDO(owner=plan, level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"),
msg='Starting backup process..').save()
for items in plan.websitesinplando_set.all(): for items in plan.websitesinplando_set.all():
result = self.createBackup(items.domain) result = self.createBackup(items.domain)
@@ -851,15 +857,17 @@ class S3Backups(multi.Thread):
key, key,
Config=config, Config=config,
) )
BackupLogsDO(owner=plan, level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"), msg='Backup successful for ' + items.domain + '.').save() BackupLogsDO(owner=plan, level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"),
msg='Backup successful for ' + items.domain + '.').save()
else: else:
BackupLogsDO(owner=plan, level='ERROR', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"), msg='Backup failed for ' + items.domain + '. Error: ' + result[1]).save() BackupLogsDO(owner=plan, level='ERROR', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"),
msg='Backup failed for ' + items.domain + '. Error: ' + result[1]).save()
plan.lastRun = runTime plan.lastRun = runTime
plan.save() plan.save()
BackupLogsDO(owner=plan, level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"), msg='Backup Process Finished.').save() BackupLogsDO(owner=plan, level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"),
msg='Backup Process Finished.').save()
except BaseException, msg: except BaseException, msg:
logging.writeToFile(str(msg) + ' [S3Backups.forceRunAWSBackupDO]') logging.writeToFile(str(msg) + ' [S3Backups.forceRunAWSBackupDO]')
plan = BackupPlanDO.objects.get(name=self.data['planName']) plan = BackupPlanDO.objects.get(name=self.data['planName'])
@@ -881,14 +889,14 @@ class S3Backups(multi.Thread):
else: else:
if lastRunMonth == time.strftime("%m"): if lastRunMonth == time.strftime("%m"):
days = int(time.strftime("%d")) - int(lastRunDay) days = int(time.strftime("%d")) - int(lastRunDay)
if days >=6: if days >= 6:
self.data = {} self.data = {}
self.data['planName'] = plan.name self.data['planName'] = plan.name
self.forceRunAWSBackup() self.forceRunAWSBackup()
else: else:
days = 30 - int(lastRunDay) days = 30 - int(lastRunDay)
days = days + int(time.strftime("%d")) days = days + int(time.strftime("%d"))
if days >=6: if days >= 6:
self.data = {} self.data = {}
self.data['planName'] = plan.name self.data['planName'] = plan.name
self.forceRunAWSBackup() self.forceRunAWSBackup()
@@ -904,14 +912,14 @@ class S3Backups(multi.Thread):
else: else:
if lastRunMonth == time.strftime("%m"): if lastRunMonth == time.strftime("%m"):
days = int(time.strftime("%d")) - int(lastRunDay) days = int(time.strftime("%d")) - int(lastRunDay)
if days >=6: if days >= 6:
self.data = {} self.data = {}
self.data['planName'] = plan.name self.data['planName'] = plan.name
self.forceRunAWSBackupDO() self.forceRunAWSBackupDO()
else: else:
days = 30 - int(lastRunDay) days = 30 - int(lastRunDay)
days = days + int(time.strftime("%d")) days = days + int(time.strftime("%d"))
if days >=6: if days >= 6:
self.data = {} self.data = {}
self.data['planName'] = plan.name self.data['planName'] = plan.name
self.forceRunAWSBackupDO() self.forceRunAWSBackupDO()
@@ -919,15 +927,367 @@ class S3Backups(multi.Thread):
except BaseException, msg: except BaseException, msg:
logging.writeToFile(str(msg) + ' [S3Backups.runAWSBackups]') logging.writeToFile(str(msg) + ' [S3Backups.runAWSBackups]')
def main(): def addMINIONode(self):
try:
proc = httpProc(self.request, None, None)
userID = self.request.session['userID']
currentACL = ACLManager.loadedACL(userID)
if currentACL['admin'] == 0:
return proc.ajax(0, 'Only administrators can use MINIO Backups.')
admin = Administrator.objects.get(pk=userID)
newNode = MINIONodes(owner=admin, endPointURL=self.data['endPoint'], accessKey=self.data['accessKey'],
secretKey=self.data['secretKey'])
newNode.save()
return proc.ajax(1, None)
except BaseException, msg:
logging.writeToFile(str(msg) + ' [addMINIONode]')
proc = httpProc(self.request, None, None)
return proc.ajax(0, str(msg))
def fetchMINIONodes(self):
try:
proc = httpProc(self.request, None, None)
userID = self.request.session['userID']
currentACL = ACLManager.loadedACL(userID)
if currentACL['admin'] == 0:
return proc.ajax(0, 'Only administrators can use MINIO Backups.')
admin = Administrator.objects.get(pk=userID)
json_data = "["
checker = 0
for node in admin.minionodes_set.all():
dic = {
'accessKey': node.accessKey,
'endPoint': node.endPointURL.lstrip('https://').lstrip('http://')
}
if checker == 0:
json_data = json_data + json.dumps(dic)
checker = 1
else:
json_data = json_data + ',' + json.dumps(dic)
json_data = json_data + ']'
final_json = json.dumps({'status': 1, 'error_message': "None", "data": json_data})
return HttpResponse(final_json)
except BaseException, msg:
proc = httpProc(self.request, None, None)
return proc.ajax(0, str(msg))
def deleteMINIONode(self):
try:
proc = httpProc(self.request, None, None)
userID = self.request.session['userID']
currentACL = ACLManager.loadedACL(userID)
if currentACL['admin'] == 0:
return proc.ajax(0, 'Only administrators can use AWS S3 Backups.')
delNode = MINIONodes.objects.get(accessKey=self.data['accessKey'])
delNode.delete()
return proc.ajax(1, None)
except BaseException, msg:
proc = httpProc(self.request, None, None)
return proc.ajax(0, str(msg))
def createPlanMINIO(self):
try:
proc = httpProc(self.request, None, None)
userID = self.request.session['userID']
currentACL = ACLManager.loadedACL(userID)
if currentACL['admin'] == 0:
return proc.ajax(0, 'Only administrators can use AWS S3 Backups.')
admin = Administrator.objects.get(pk=userID)
minioNode = MINIONodes.objects.get(accessKey=self.data['minioNode'])
newPlan = BackupPlanMINIO(owner=admin, name=self.data['planName'].replace(' ', ''),
freq=self.data['frequency'],
retention=self.data['retenion'], minioNode=minioNode)
newPlan.save()
for items in self.data['websitesInPlan']:
wp = WebsitesInPlanMINIO(owner=newPlan, domain=items)
wp.save()
return proc.ajax(1, None)
except BaseException, msg:
logging.writeToFile(str(msg) + ' [createPlanDO]')
proc = httpProc(self.request, None, None)
return proc.ajax(0, str(msg))
def fetchBackupPlansMINIO(self):
try:
proc = httpProc(self.request, None, None)
userID = self.request.session['userID']
currentACL = ACLManager.loadedACL(userID)
if currentACL['admin'] == 0:
return proc.ajax(0, 'Only administrators can use AWS S3 Backups.')
admin = Administrator.objects.get(pk=userID)
json_data = "["
checker = 0
for plan in admin.backupplanminio_set.all():
dic = {
'name': plan.name,
'minioNode': plan.minioNode.accessKey,
'freq': plan.freq,
'retention': plan.retention,
'lastRun': plan.lastRun,
}
if checker == 0:
json_data = json_data + json.dumps(dic)
checker = 1
else:
json_data = json_data + ',' + json.dumps(dic)
json_data = json_data + ']'
final_json = json.dumps({'status': 1, 'error_message': "None", "data": json_data})
return HttpResponse(final_json)
except BaseException, msg:
proc = httpProc(self.request, None, None)
return proc.ajax(0, str(msg))
def deletePlanMINIO(self):
try:
proc = httpProc(self.request, None, None)
userID = self.request.session['userID']
currentACL = ACLManager.loadedACL(userID)
if currentACL['admin'] == 0:
return proc.ajax(0, 'Only administrators can use AWS S3 Backups.')
delPlan = BackupPlanMINIO.objects.get(name=self.data['planName'])
delPlan.delete()
return proc.ajax(1, None)
except BaseException, msg:
proc = httpProc(self.request, None, None)
return proc.ajax(0, str(msg))
def savePlanChangesMINIO(self):
try:
proc = httpProc(self.request, None, None)
userID = self.request.session['userID']
currentACL = ACLManager.loadedACL(userID)
if currentACL['admin'] == 0:
return proc.ajax(0, 'Only administrators can use AWS S3 Backups.')
changePlan = BackupPlanMINIO.objects.get(name=self.data['planName'])
minioNode = MINIONodes.objects.get(accessKey=self.data['minioNode'].strip(' ').strip('\n'))
changePlan.minioNode = minioNode
changePlan.freq = self.data['frequency']
changePlan.retention = self.data['retention']
changePlan.save()
return proc.ajax(1, None)
except BaseException, msg:
proc = httpProc(self.request, None, None)
return proc.ajax(0, str(msg))
def forceRunAWSBackupMINIO(self):
try:
plan = BackupPlanMINIO.objects.get(name=self.data['planName'])
runTime = time.strftime("%d:%m:%Y")
## Setup MINIO Client
endPoint = plan.minioNode.endPointURL
accessID = plan.minioNode.accessKey
secret = plan.minioNode.secretKey
session = boto3.session.Session()
client = session.client(
's3',
endpoint_url= endPoint,
aws_access_key_id=accessID,
aws_secret_access_key=secret,
verify= False
)
config = TransferConfig(multipart_threshold=1024 * 25, max_concurrency=10,
multipart_chunksize=1024 * 25, use_threads=True)
try:
client.create_bucket(Bucket=plan.name.lower())
except BaseException, msg:
BackupLogsMINIO(owner=plan, level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"),
msg=str(msg)).save()
return 0
##
userID = self.request.session['userID']
currentACL = ACLManager.loadedACL(userID)
if currentACL['admin'] == 0:
BackupLogsMINIO(owner=plan, timeStamp=time.strftime("%b %d %Y, %H:%M:%S"), level='INFO',
msg='Unauthorised user tried to run AWS Backups.').save()
return 0
BackupLogsMINIO(owner=plan, level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"),
msg='Starting backup process..').save()
for items in plan.websitesinplanminio_set.all():
result = self.createBackup(items.domain)
if result[0]:
key = runTime + '/' + result[1].split('/')[-1] + ".tar.gz"
client.upload_file(
result[1] + ".tar.gz",
plan.name.lower(),
key,
Config=config,
)
BackupLogsMINIO(owner=plan, level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"),
msg='Backup successful for ' + items.domain + '.').save()
else:
BackupLogsMINIO(owner=plan, level='ERROR', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"),
msg='Backup failed for ' + items.domain + '. Error: ' + result[1]).save()
plan.lastRun = runTime
plan.save()
BackupLogsMINIO(owner=plan, level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"),
msg='Backup Process Finished.').save()
except BaseException, msg:
logging.writeToFile(str(msg) + ' [S3Backups.forceRunAWSBackupMINIO]')
plan = BackupPlanMINIO.objects.get(name=self.data['planName'])
BackupLogsMINIO(owner=plan, timeStamp=time.strftime("%b %d %Y, %H:%M:%S"), level='ERROR',
msg=str(msg)).save()
def fetchWebsitesInPlanMINIO(self):
try:
proc = httpProc(self.request, None, None)
userID = self.request.session['userID']
currentACL = ACLManager.loadedACL(userID)
if currentACL['admin'] == 0:
return proc.ajax(0, 'Only administrators can use AWS S3 Backups.')
plan = BackupPlanMINIO.objects.get(name=self.data['planName'])
json_data = "["
checker = 0
for website in plan.websitesinplanminio_set.all():
dic = {
'id': website.id,
'domain': website.domain,
}
if checker == 0:
json_data = json_data + json.dumps(dic)
checker = 1
else:
json_data = json_data + ',' + json.dumps(dic)
json_data = json_data + ']'
final_json = json.dumps({'status': 1, 'error_message': "None", "data": json_data})
return HttpResponse(final_json)
except BaseException, msg:
proc = httpProc(self.request, None, None)
return proc.ajax(0, str(msg))
def fetchBackupLogsMINIO(self):
try:
proc = httpProc(self.request, None, None)
userID = self.request.session['userID']
currentACL = ACLManager.loadedACL(userID)
if currentACL['admin'] == 0:
return proc.ajax(0, 'Only administrators can use AWS S3 Backups.')
recordsToShow = int(self.data['recordsToShow'])
page = int(self.data['page'])
backupPlan = BackupPlanMINIO.objects.get(name=self.data['planName'])
logs = backupPlan.backuplogsminio_set.all().order_by('-id')
pagination = S3Backups.getPagination(len(logs), recordsToShow)
endPageNumber, finalPageNumber = S3Backups.recordsPointer(page, recordsToShow)
jsonData = S3Backups.getLogsInJson(logs[finalPageNumber:endPageNumber])
data = {}
data['data'] = jsonData
data['pagination'] = pagination
return proc.ajax(1, None, data)
except BaseException, msg:
proc = httpProc(self.request, None, None)
return proc.ajaxPre(0, str(msg))
def deleteDomainFromPlanMINIO(self):
try:
proc = httpProc(self.request, None, None)
userID = self.request.session['userID']
currentACL = ACLManager.loadedACL(userID)
if currentACL['admin'] == 0:
return proc.ajax(0, 'Only administrators can use AWS S3 Backups.')
plan = BackupPlanMINIO.objects.get(name=self.data['planName'])
web = WebsitesInPlanMINIO.objects.get(owner=plan, domain=self.data['domainName'])
web.delete()
return proc.ajax(1, None)
except BaseException, msg:
proc = httpProc(self.request, None, None)
return proc.ajax(0, str(msg))
def main():
pathToFile = "/home/cyberpanel/" + str(randint(1000, 9999)) pathToFile = "/home/cyberpanel/" + str(randint(1000, 9999))
file = open(pathToFile, "w") file = open(pathToFile, "w")
file.close() file.close()
finalData = json.dumps({'randomFile': pathToFile}) finalData = json.dumps({'randomFile': pathToFile})
requests.post("http://localhost:5003/api/runAWSBackups", data=finalData,verify=False) requests.post("http://localhost:5003/api/runAWSBackups", data=finalData, verify=False)
if __name__ == "__main__": if __name__ == "__main__":
main() main()