mirror of
https://github.com/usmannasir/cyberpanel.git
synced 2025-11-10 15:26:13 +01:00
mysql tuning
This commit is contained in:
@@ -39,6 +39,8 @@ class S3Backups(multi.Thread):
|
||||
self.connectAccount()
|
||||
elif self.function == 'forceRunAWSBackup':
|
||||
self.forceRunAWSBackup()
|
||||
elif self.function == 'forceRunAWSBackupDO':
|
||||
self.forceRunAWSBackupDO()
|
||||
elif self.function == 'runAWSBackups':
|
||||
self.runAWSBackups()
|
||||
except BaseException, msg:
|
||||
@@ -468,6 +470,401 @@ class S3Backups(multi.Thread):
|
||||
plan = BackupPlan.objects.get(name=self.data['planName'])
|
||||
BackupLogs(owner=plan, timeStamp=time.strftime("%b %d %Y, %H:%M:%S"), level='ERROR', msg=str(msg)).save()
|
||||
|
||||
def connectAccountDO(self):
|
||||
try:
|
||||
|
||||
proc = httpProc(self.request, None, None)
|
||||
|
||||
userID = self.request.session['userID']
|
||||
currentACL = ACLManager.loadedACL(userID)
|
||||
|
||||
if currentACL['admin'] == 0:
|
||||
return proc.ajax(0, 'Only administrators can use AWS S3 Backups.')
|
||||
|
||||
mailUtilities.checkHome()
|
||||
|
||||
path = '/home/cyberpanel/.do'
|
||||
|
||||
if not os.path.exists(path):
|
||||
os.mkdir(path)
|
||||
|
||||
credentials = path + '/credentials'
|
||||
|
||||
credFile = open(credentials, 'w')
|
||||
credFile.write(self.data['credData'])
|
||||
credFile.close()
|
||||
|
||||
##
|
||||
|
||||
cronPath = '/etc/crontab'
|
||||
|
||||
command = 'sudo cat ' + cronPath
|
||||
output = subprocess.check_output(shlex.split(command)).split('\n')
|
||||
|
||||
insertCron = 1
|
||||
|
||||
for items in output:
|
||||
if items.find('s3backups.py') > -1:
|
||||
insertCron = 0
|
||||
break
|
||||
|
||||
if insertCron:
|
||||
pathToFile = "/home/cyberpanel/" + str(randint(1000, 9999))
|
||||
writeToFile = open(pathToFile, 'w')
|
||||
for items in output:
|
||||
writeToFile.writelines(items + '\n')
|
||||
writeToFile.writelines('0 0 * * * cyberpanel /usr/local/CyberCP/bin/python2 /usr/local/CyberCP/s3Backups/s3Backups.py\n')
|
||||
writeToFile.close()
|
||||
command = 'sudo mv ' + pathToFile + ' /etc/crontab'
|
||||
ProcessUtilities.executioner(command)
|
||||
|
||||
return proc.ajax(1, None)
|
||||
|
||||
except BaseException, msg:
|
||||
proc = httpProc(self.request, None, None)
|
||||
return proc.ajax(0, str(msg))
|
||||
|
||||
def fetchBucketsDO(self):
|
||||
try:
|
||||
|
||||
proc = httpProc(self.request, None, None)
|
||||
|
||||
userID = self.request.session['userID']
|
||||
currentACL = ACLManager.loadedACL(userID)
|
||||
|
||||
if currentACL['admin'] == 0:
|
||||
return proc.ajax(0, 'Only administrators can use AWS S3 Backups.')
|
||||
|
||||
filePath = '/home/cyberpanel/.do/credentials'
|
||||
|
||||
data = open(filePath, 'r').readlines()
|
||||
|
||||
accessID = data[1].split('=')[1].strip(' ').strip('\n')
|
||||
secret = data[2].split('=')[1].strip(' ').strip('\n')
|
||||
|
||||
session = boto3.session.Session()
|
||||
client = session.client(
|
||||
's3',
|
||||
region_name=self.data['doRegion'],
|
||||
endpoint_url='https://' + self.data['doRegion'] + '.digitaloceanspaces.com',
|
||||
aws_access_key_id=accessID,
|
||||
aws_secret_access_key=secret
|
||||
)
|
||||
response = client.list_buckets()
|
||||
spaces = [space['Name'] for space in response['Buckets']]
|
||||
json_data = "["
|
||||
checker = 0
|
||||
|
||||
for space in spaces:
|
||||
dic = {'name': space}
|
||||
|
||||
if checker == 0:
|
||||
json_data = json_data + json.dumps(dic)
|
||||
checker = 1
|
||||
else:
|
||||
json_data = json_data + ',' + json.dumps(dic)
|
||||
|
||||
json_data = json_data + ']'
|
||||
final_json = json.dumps({'status': 1, 'error_message': "None", "data": json_data})
|
||||
return HttpResponse(final_json)
|
||||
|
||||
except BaseException, msg:
|
||||
logging.writeToFile(str(msg))
|
||||
proc = httpProc(self.request, None, None)
|
||||
return proc.ajax(0, str(msg))
|
||||
|
||||
def createPlanDO(self):
|
||||
try:
|
||||
|
||||
proc = httpProc(self.request, None, None)
|
||||
|
||||
userID = self.request.session['userID']
|
||||
currentACL = ACLManager.loadedACL(userID)
|
||||
|
||||
if currentACL['admin'] == 0:
|
||||
return proc.ajax(0, 'Only administrators can use AWS S3 Backups.')
|
||||
|
||||
admin = Administrator.objects.get(pk=userID)
|
||||
|
||||
newPlan = BackupPlanDO(owner=admin, name=self.data['planName'], freq = self.data['frequency'],
|
||||
retention= self.data['retenion'], bucket= self.data['bucketName'], type= self.data['type'],
|
||||
region= self.data['region'])
|
||||
newPlan.save()
|
||||
|
||||
for items in self.data['websitesInPlan']:
|
||||
wp = WebsitesInPlanDO(owner=newPlan, domain=items)
|
||||
wp.save()
|
||||
|
||||
return proc.ajax(1, None)
|
||||
|
||||
except BaseException, msg:
|
||||
logging.writeToFile(str(msg) + ' [createPlanDO]')
|
||||
proc = httpProc(self.request, None, None)
|
||||
return proc.ajax(0, str(msg))
|
||||
|
||||
def fetchBackupPlansDO(self):
|
||||
try:
|
||||
|
||||
proc = httpProc(self.request, None, None)
|
||||
|
||||
userID = self.request.session['userID']
|
||||
currentACL = ACLManager.loadedACL(userID)
|
||||
|
||||
if currentACL['admin'] == 0:
|
||||
return proc.ajax(0, 'Only administrators can use AWS S3 Backups.')
|
||||
|
||||
|
||||
admin = Administrator.objects.get(pk=userID)
|
||||
json_data = "["
|
||||
checker = 0
|
||||
|
||||
for plan in admin.backupplando_set.filter(type= self.data['type']):
|
||||
dic = {
|
||||
'name': plan.name,
|
||||
'bucket': plan.bucket,
|
||||
'freq': plan.freq,
|
||||
'retention': plan.retention,
|
||||
'lastRun': plan.lastRun,
|
||||
}
|
||||
|
||||
if checker == 0:
|
||||
json_data = json_data + json.dumps(dic)
|
||||
checker = 1
|
||||
else:
|
||||
json_data = json_data + ',' + json.dumps(dic)
|
||||
|
||||
json_data = json_data + ']'
|
||||
final_json = json.dumps({'status': 1, 'error_message': "None", "data": json_data})
|
||||
return HttpResponse(final_json)
|
||||
|
||||
except BaseException, msg:
|
||||
proc = httpProc(self.request, None, None)
|
||||
return proc.ajax(0, str(msg))
|
||||
|
||||
def deletePlanDO(self):
|
||||
try:
|
||||
|
||||
proc = httpProc(self.request, None, None)
|
||||
|
||||
userID = self.request.session['userID']
|
||||
currentACL = ACLManager.loadedACL(userID)
|
||||
|
||||
if currentACL['admin'] == 0:
|
||||
return proc.ajax(0, 'Only administrators can use AWS S3 Backups.')
|
||||
|
||||
delPlan = BackupPlanDO.objects.get(name=self.data['planName'])
|
||||
delPlan.delete()
|
||||
|
||||
return proc.ajax(1, None)
|
||||
|
||||
except BaseException, msg:
|
||||
proc = httpProc(self.request, None, None)
|
||||
return proc.ajax(0, str(msg))
|
||||
|
||||
def fetchWebsitesInPlanDO(self):
|
||||
try:
|
||||
|
||||
proc = httpProc(self.request, None, None)
|
||||
|
||||
userID = self.request.session['userID']
|
||||
currentACL = ACLManager.loadedACL(userID)
|
||||
|
||||
if currentACL['admin'] == 0:
|
||||
return proc.ajax(0, 'Only administrators can use AWS S3 Backups.')
|
||||
|
||||
|
||||
plan = BackupPlanDO.objects.get(name=self.data['planName'])
|
||||
json_data = "["
|
||||
checker = 0
|
||||
|
||||
for website in plan.websitesinplando_set.all():
|
||||
dic = {
|
||||
'id': website.id,
|
||||
'domain': website.domain,
|
||||
}
|
||||
|
||||
if checker == 0:
|
||||
json_data = json_data + json.dumps(dic)
|
||||
checker = 1
|
||||
else:
|
||||
json_data = json_data + ',' + json.dumps(dic)
|
||||
|
||||
json_data = json_data + ']'
|
||||
final_json = json.dumps({'status': 1, 'error_message': "None", "data": json_data})
|
||||
return HttpResponse(final_json)
|
||||
|
||||
except BaseException, msg:
|
||||
proc = httpProc(self.request, None, None)
|
||||
return proc.ajax(0, str(msg))
|
||||
|
||||
def fetchBackupLogsDO(self):
|
||||
try:
|
||||
proc = httpProc(self.request, None, None)
|
||||
|
||||
userID = self.request.session['userID']
|
||||
currentACL = ACLManager.loadedACL(userID)
|
||||
|
||||
if currentACL['admin'] == 0:
|
||||
return proc.ajax(0, 'Only administrators can use AWS S3 Backups.')
|
||||
|
||||
recordsToShow = int(self.data['recordsToShow'])
|
||||
page = int(self.data['page'])
|
||||
|
||||
backupPlan = BackupPlanDO.objects.get(name=self.data['planName'])
|
||||
logs = backupPlan.backuplogsdo_set.all().order_by('-id')
|
||||
|
||||
pagination = S3Backups.getPagination(len(logs), recordsToShow)
|
||||
endPageNumber, finalPageNumber = S3Backups.recordsPointer(page, recordsToShow)
|
||||
jsonData = S3Backups.getLogsInJson(logs[finalPageNumber:endPageNumber])
|
||||
|
||||
data = {}
|
||||
data['data'] = jsonData
|
||||
data['pagination'] = pagination
|
||||
|
||||
return proc.ajax(1, None, data)
|
||||
|
||||
except BaseException, msg:
|
||||
proc = httpProc(self.request, None, None)
|
||||
return proc.ajaxPre(0, str(msg))
|
||||
|
||||
def deleteDomainFromPlanDO(self):
|
||||
try:
|
||||
|
||||
proc = httpProc(self.request, None, None)
|
||||
|
||||
userID = self.request.session['userID']
|
||||
currentACL = ACLManager.loadedACL(userID)
|
||||
|
||||
if currentACL['admin'] == 0:
|
||||
return proc.ajax(0, 'Only administrators can use AWS S3 Backups.')
|
||||
|
||||
plan = BackupPlanDO.objects.get(name=self.data['planName'])
|
||||
web = WebsitesInPlanDO.objects.get(owner=plan, domain=self.data['domainName'])
|
||||
web.delete()
|
||||
|
||||
return proc.ajax(1, None)
|
||||
|
||||
except BaseException, msg:
|
||||
proc = httpProc(self.request, None, None)
|
||||
return proc.ajax(0, str(msg))
|
||||
|
||||
def savePlanChangesDO(self):
|
||||
try:
|
||||
|
||||
proc = httpProc(self.request, None, None)
|
||||
|
||||
userID = self.request.session['userID']
|
||||
currentACL = ACLManager.loadedACL(userID)
|
||||
|
||||
if currentACL['admin'] == 0:
|
||||
return proc.ajax(0, 'Only administrators can use AWS S3 Backups.')
|
||||
|
||||
logging.writeToFile('hello world')
|
||||
|
||||
changePlan = BackupPlanDO.objects.get(name=self.data['planName'])
|
||||
|
||||
changePlan.bucket = self.data['bucketName']
|
||||
changePlan.freq = self.data['frequency']
|
||||
changePlan.retention = self.data['retention']
|
||||
changePlan.region = self.data['region']
|
||||
|
||||
changePlan.save()
|
||||
|
||||
return proc.ajax(1, None)
|
||||
|
||||
except BaseException, msg:
|
||||
proc = httpProc(self.request, None, None)
|
||||
return proc.ajax(0, str(msg))
|
||||
|
||||
def forceRunAWSBackupDO(self):
|
||||
try:
|
||||
|
||||
plan = BackupPlanDO.objects.get(name=self.data['planName'])
|
||||
bucketName = plan.bucket.strip('\n').strip(' ')
|
||||
runTime = time.strftime("%d:%m:%Y")
|
||||
|
||||
## Setup DO Client
|
||||
|
||||
filePath = '/home/cyberpanel/.do/credentials'
|
||||
|
||||
data = open(filePath, 'r').readlines()
|
||||
|
||||
accessID = data[1].split('=')[1].strip(' ').strip('\n')
|
||||
secret = data[2].split('=')[1].strip(' ').strip('\n')
|
||||
|
||||
session = boto3.session.Session()
|
||||
client = session.client(
|
||||
's3',
|
||||
region_name=plan.region,
|
||||
endpoint_url='https://' + plan.region + '.digitaloceanspaces.com',
|
||||
aws_access_key_id=accessID,
|
||||
aws_secret_access_key=secret
|
||||
)
|
||||
|
||||
config = TransferConfig(multipart_threshold=1024 * 25, max_concurrency=10,
|
||||
multipart_chunksize=1024 * 25, use_threads=True)
|
||||
|
||||
## Set Expiration for objects
|
||||
try:
|
||||
|
||||
client.put_bucket_lifecycle_configuration(
|
||||
Bucket='string',
|
||||
LifecycleConfiguration={
|
||||
'Rules': [
|
||||
{
|
||||
'Expiration': {
|
||||
'Days': plan.retention,
|
||||
'ExpiredObjectDeleteMarker': True
|
||||
},
|
||||
'ID': plan.name,
|
||||
'Prefix': '',
|
||||
'Filter': {
|
||||
'Prefix': plan.name + '/',
|
||||
},
|
||||
'Status': 'Enabled',
|
||||
|
||||
},
|
||||
]
|
||||
}
|
||||
)
|
||||
except BaseException, msg:
|
||||
BackupLogsDO(owner=plan, timeStamp=time.strftime("%b %d %Y, %H:%M:%S"), level='ERROR', msg=str(msg)).save()
|
||||
|
||||
##
|
||||
|
||||
userID = self.request.session['userID']
|
||||
currentACL = ACLManager.loadedACL(userID)
|
||||
|
||||
if currentACL['admin'] == 0:
|
||||
BackupLogsDO(owner=plan, timeStamp=time.strftime("%b %d %Y, %H:%M:%S"), level='INFO', msg='Unauthorised user tried to run AWS Backups.').save()
|
||||
return 0
|
||||
|
||||
BackupLogsDO(owner=plan,level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"), msg='Starting backup process..').save()
|
||||
|
||||
for items in plan.websitesinplando_set.all():
|
||||
result = self.createBackup(items.domain)
|
||||
if result[0]:
|
||||
key = plan.name + '/' + runTime + '/' + result[1].split('/')[-1] + ".tar.gz"
|
||||
client.upload_file(
|
||||
result[1] + ".tar.gz",
|
||||
bucketName,
|
||||
key,
|
||||
Config=config,
|
||||
)
|
||||
BackupLogsDO(owner=plan, level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"), msg='Backup successful for ' + items.domain + '.').save()
|
||||
else:
|
||||
BackupLogsDO(owner=plan, level='ERROR', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"), msg='Backup failed for ' + items.domain + '. Error: ' + result[1]).save()
|
||||
|
||||
|
||||
plan.lastRun = runTime
|
||||
plan.save()
|
||||
|
||||
BackupLogsDO(owner=plan, level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"), msg='Backup Process Finished.').save()
|
||||
except BaseException, msg:
|
||||
logging.writeToFile(str(msg) + ' [S3Backups.forceRunAWSBackupDO]')
|
||||
plan = BackupPlanDO.objects.get(name=self.data['planName'])
|
||||
BackupLogsDO(owner=plan, timeStamp=time.strftime("%b %d %Y, %H:%M:%S"), level='ERROR', msg=str(msg)).save()
|
||||
|
||||
def runAWSBackups(self):
|
||||
try:
|
||||
admin = Administrator.objects.get(pk=1)
|
||||
@@ -496,6 +893,29 @@ class S3Backups(multi.Thread):
|
||||
self.data['planName'] = plan.name
|
||||
self.forceRunAWSBackup()
|
||||
|
||||
for plan in BackupPlanDO.objects.all():
|
||||
lastRunDay = plan.lastRun.split(':')[0]
|
||||
lastRunMonth = plan.lastRun.split(':')[1]
|
||||
|
||||
if plan.freq == 'Daily' and lastRunDay != time.strftime("%d"):
|
||||
self.data = {}
|
||||
self.data['planName'] = plan.name
|
||||
self.forceRunAWSBackupDO()
|
||||
else:
|
||||
if lastRunMonth == time.strftime("%m"):
|
||||
days = int(time.strftime("%d")) - int(lastRunDay)
|
||||
if days >=6:
|
||||
self.data = {}
|
||||
self.data['planName'] = plan.name
|
||||
self.forceRunAWSBackupDO()
|
||||
else:
|
||||
days = 30 - int(lastRunDay)
|
||||
days = days + int(time.strftime("%d"))
|
||||
if days >=6:
|
||||
self.data = {}
|
||||
self.data['planName'] = plan.name
|
||||
self.forceRunAWSBackupDO()
|
||||
|
||||
except BaseException, msg:
|
||||
logging.writeToFile(str(msg) + ' [S3Backups.runAWSBackups]')
|
||||
|
||||
|
||||
Reference in New Issue
Block a user