2019-12-10 23:04:24 +05:00
|
|
|
#!/usr/local/CyberCP/bin/python
|
2019-03-13 23:05:22 +05:00
|
|
|
import os
|
|
|
|
|
import os.path
|
|
|
|
|
import sys
|
|
|
|
|
import django
|
2020-06-13 19:43:16 +05:00
|
|
|
|
2019-03-13 23:05:22 +05:00
|
|
|
sys.path.append('/usr/local/CyberCP')
|
|
|
|
|
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "CyberCP.settings")
|
|
|
|
|
django.setup()
|
|
|
|
|
import json
|
|
|
|
|
from plogical.acl import ACLManager
|
|
|
|
|
import plogical.CyberCPLogFileWriter as logging
|
2020-09-20 19:52:02 +05:00
|
|
|
from websiteFunctions.models import Websites, Backups, dest, backupSchedules, BackupJob, GDrive, GDriveSites
|
2019-03-13 23:05:22 +05:00
|
|
|
from plogical.virtualHostUtilities import virtualHostUtilities
|
|
|
|
|
import subprocess
|
|
|
|
|
import shlex
|
|
|
|
|
from django.shortcuts import HttpResponse, render
|
|
|
|
|
from loginSystem.models import Administrator
|
|
|
|
|
from plogical.mailUtilities import mailUtilities
|
|
|
|
|
from random import randint
|
|
|
|
|
import time
|
|
|
|
|
import plogical.backupUtilities as backupUtil
|
2019-03-21 23:26:42 +05:00
|
|
|
from plogical.processUtilities import ProcessUtilities
|
2019-07-16 23:23:16 +05:00
|
|
|
from multiprocessing import Process
|
2020-06-13 19:43:16 +05:00
|
|
|
import requests
|
|
|
|
|
import google.oauth2.credentials
|
|
|
|
|
import googleapiclient.discovery
|
|
|
|
|
from googleapiclient.discovery import build
|
2020-09-20 19:52:02 +05:00
|
|
|
from websiteFunctions.models import NormalBackupDests
|
2020-06-13 19:43:16 +05:00
|
|
|
|
2019-03-13 23:05:22 +05:00
|
|
|
|
|
|
|
|
class BackupManager:
|
2019-08-22 20:38:49 +05:00
|
|
|
localBackupPath = '/home/cyberpanel/localBackupPath'
|
2020-06-13 19:43:16 +05:00
|
|
|
|
|
|
|
|
def __init__(self, domain=None, childDomain=None):
|
2019-03-13 23:05:22 +05:00
|
|
|
self.domain = domain
|
|
|
|
|
self.childDomain = childDomain
|
|
|
|
|
|
2020-06-13 19:43:16 +05:00
|
|
|
def loadBackupHome(self, request=None, userID=None, data=None):
|
2019-03-13 23:05:22 +05:00
|
|
|
try:
|
|
|
|
|
currentACL = ACLManager.loadedACL(userID)
|
|
|
|
|
return render(request, 'backup/index.html', currentACL)
|
2019-12-10 15:09:10 +05:00
|
|
|
except BaseException as msg:
|
2019-03-13 23:05:22 +05:00
|
|
|
return HttpResponse(str(msg))
|
|
|
|
|
|
2020-06-13 19:43:16 +05:00
|
|
|
def backupSite(self, request=None, userID=None, data=None):
|
2019-03-13 23:05:22 +05:00
|
|
|
try:
|
|
|
|
|
currentACL = ACLManager.loadedACL(userID)
|
|
|
|
|
|
|
|
|
|
if ACLManager.currentContextPermission(currentACL, 'createBackup') == 0:
|
|
|
|
|
return ACLManager.loadError()
|
|
|
|
|
|
|
|
|
|
websitesName = ACLManager.findAllSites(currentACL, userID)
|
|
|
|
|
return render(request, 'backup/backup.html', {'websiteList': websitesName})
|
2019-12-10 15:09:10 +05:00
|
|
|
except BaseException as msg:
|
2019-03-13 23:05:22 +05:00
|
|
|
return HttpResponse(str(msg))
|
|
|
|
|
|
2020-06-13 19:43:16 +05:00
|
|
|
def gDrive(self, request=None, userID=None, data=None):
|
2020-06-13 11:56:09 +05:00
|
|
|
try:
|
|
|
|
|
currentACL = ACLManager.loadedACL(userID)
|
|
|
|
|
|
2020-06-13 14:25:29 +05:00
|
|
|
admin = Administrator.objects.get(pk=userID)
|
|
|
|
|
|
2020-06-20 22:44:55 +05:00
|
|
|
if ACLManager.currentContextPermission(currentACL, 'createBackup') == 0:
|
2020-06-13 11:56:09 +05:00
|
|
|
return ACLManager.loadError()
|
|
|
|
|
|
2020-06-13 14:25:29 +05:00
|
|
|
gDriveAcctsList = []
|
|
|
|
|
|
|
|
|
|
gDriveAccts = admin.gdrive_set.all()
|
|
|
|
|
|
|
|
|
|
for items in gDriveAccts:
|
|
|
|
|
gDriveAcctsList.append(items.name)
|
|
|
|
|
|
2020-06-13 15:30:32 +05:00
|
|
|
websitesName = ACLManager.findAllSites(currentACL, userID)
|
|
|
|
|
|
|
|
|
|
return render(request, 'backup/googleDrive.html', {'accounts': gDriveAcctsList, 'websites': websitesName})
|
2020-06-13 11:56:09 +05:00
|
|
|
except BaseException as msg:
|
|
|
|
|
return HttpResponse(str(msg))
|
|
|
|
|
|
2020-06-13 19:43:16 +05:00
|
|
|
def gDriveSetup(self, userID=None, request=None):
|
2020-06-13 14:25:29 +05:00
|
|
|
try:
|
|
|
|
|
currentACL = ACLManager.loadedACL(userID)
|
|
|
|
|
admin = Administrator.objects.get(pk=userID)
|
|
|
|
|
|
2020-06-20 22:44:55 +05:00
|
|
|
if ACLManager.currentContextPermission(currentACL, 'createBackup') == 0:
|
2020-06-13 14:25:29 +05:00
|
|
|
return ACLManager.loadError()
|
|
|
|
|
|
|
|
|
|
gDriveData = {}
|
|
|
|
|
gDriveData['token'] = request.GET.get('t')
|
|
|
|
|
gDriveData['refresh_token'] = request.GET.get('r')
|
|
|
|
|
gDriveData['token_uri'] = request.GET.get('to')
|
|
|
|
|
gDriveData['scopes'] = request.GET.get('s')
|
|
|
|
|
|
2020-06-13 19:43:16 +05:00
|
|
|
gD = GDrive(owner=admin, name=request.GET.get('n'), auth=json.dumps(gDriveData))
|
|
|
|
|
gD.save()
|
2020-06-13 15:30:32 +05:00
|
|
|
|
2020-06-13 21:56:37 +05:00
|
|
|
return self.gDrive(request, userID)
|
2020-06-13 14:25:29 +05:00
|
|
|
except BaseException as msg:
|
|
|
|
|
final_dic = {'status': 0, 'fetchStatus': 0, 'error_message': str(msg)}
|
|
|
|
|
final_json = json.dumps(final_dic)
|
|
|
|
|
return HttpResponse(final_json)
|
|
|
|
|
|
2020-06-13 19:43:16 +05:00
|
|
|
def fetchDriveLogs(self, request=None, userID=None, data=None):
|
|
|
|
|
try:
|
|
|
|
|
|
|
|
|
|
userID = request.session['userID']
|
|
|
|
|
currentACL = ACLManager.loadedACL(userID)
|
|
|
|
|
admin = Administrator.objects.get(pk=userID)
|
|
|
|
|
|
|
|
|
|
data = json.loads(request.body)
|
|
|
|
|
|
|
|
|
|
selectedAccount = data['selectedAccount']
|
|
|
|
|
recordsToShow = int(data['recordsToShow'])
|
|
|
|
|
page = int(str(data['page']).strip('\n'))
|
|
|
|
|
|
|
|
|
|
gD = GDrive.objects.get(name=selectedAccount)
|
|
|
|
|
|
2020-06-20 22:44:55 +05:00
|
|
|
if ACLManager.checkGDriveOwnership(gD, admin, currentACL) == 1:
|
|
|
|
|
pass
|
|
|
|
|
else:
|
|
|
|
|
return ACLManager.loadErrorJson('status', 0)
|
|
|
|
|
|
2020-06-13 19:43:16 +05:00
|
|
|
logs = gD.gdrivejoblogs_set.all().order_by('-id')
|
|
|
|
|
|
|
|
|
|
from s3Backups.s3Backups import S3Backups
|
|
|
|
|
|
|
|
|
|
pagination = S3Backups.getPagination(len(logs), recordsToShow)
|
|
|
|
|
endPageNumber, finalPageNumber = S3Backups.recordsPointer(page, recordsToShow)
|
|
|
|
|
logs = logs[finalPageNumber:endPageNumber]
|
|
|
|
|
|
|
|
|
|
json_data = "["
|
|
|
|
|
checker = 0
|
|
|
|
|
counter = 0
|
|
|
|
|
|
|
|
|
|
from plogical.backupSchedule import backupSchedule
|
|
|
|
|
|
|
|
|
|
for log in logs:
|
|
|
|
|
|
|
|
|
|
if log.status == backupSchedule.INFO:
|
|
|
|
|
status = 'INFO'
|
|
|
|
|
else:
|
|
|
|
|
status = 'ERROR'
|
|
|
|
|
|
|
|
|
|
dic = {
|
|
|
|
|
'type': status,
|
|
|
|
|
'message': log.message
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if checker == 0:
|
|
|
|
|
json_data = json_data + json.dumps(dic)
|
|
|
|
|
checker = 1
|
|
|
|
|
else:
|
|
|
|
|
json_data = json_data + ',' + json.dumps(dic)
|
|
|
|
|
|
|
|
|
|
counter = counter + 1
|
|
|
|
|
|
|
|
|
|
json_data = json_data + ']'
|
|
|
|
|
|
|
|
|
|
data_ret = {'status': 1, 'logs': json_data, 'pagination': pagination}
|
|
|
|
|
json_data = json.dumps(data_ret)
|
|
|
|
|
return HttpResponse(json_data)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
except BaseException as msg:
|
|
|
|
|
data_ret = {'status': 0, 'error_message': str(msg)}
|
|
|
|
|
json_data = json.dumps(data_ret)
|
|
|
|
|
return HttpResponse(json_data)
|
|
|
|
|
|
|
|
|
|
def fetchgDriveSites(self, request=None, userID=None, data=None):
|
2020-06-13 15:30:32 +05:00
|
|
|
try:
|
|
|
|
|
|
|
|
|
|
userID = request.session['userID']
|
|
|
|
|
currentACL = ACLManager.loadedACL(userID)
|
|
|
|
|
admin = Administrator.objects.get(pk=userID)
|
|
|
|
|
|
|
|
|
|
data = json.loads(request.body)
|
|
|
|
|
|
|
|
|
|
selectedAccount = data['selectedAccount']
|
|
|
|
|
recordsToShow = int(data['recordsToShow'])
|
|
|
|
|
page = int(str(data['page']).strip('\n'))
|
|
|
|
|
|
|
|
|
|
gD = GDrive.objects.get(name=selectedAccount)
|
|
|
|
|
|
2020-06-20 22:44:55 +05:00
|
|
|
if ACLManager.checkGDriveOwnership(gD, admin, currentACL) == 1:
|
|
|
|
|
pass
|
|
|
|
|
else:
|
|
|
|
|
return ACLManager.loadErrorJson('status', 0)
|
|
|
|
|
|
2020-06-13 15:30:32 +05:00
|
|
|
websites = gD.gdrivesites_set.all()
|
|
|
|
|
|
|
|
|
|
from s3Backups.s3Backups import S3Backups
|
|
|
|
|
|
|
|
|
|
pagination = S3Backups.getPagination(len(websites), recordsToShow)
|
|
|
|
|
endPageNumber, finalPageNumber = S3Backups.recordsPointer(page, recordsToShow)
|
|
|
|
|
finalWebsites = websites[finalPageNumber:endPageNumber]
|
|
|
|
|
|
|
|
|
|
json_data = "["
|
|
|
|
|
checker = 0
|
|
|
|
|
counter = 0
|
|
|
|
|
|
|
|
|
|
from plogical.backupSchedule import backupSchedule
|
|
|
|
|
|
|
|
|
|
for website in finalWebsites:
|
|
|
|
|
|
|
|
|
|
dic = {
|
|
|
|
|
'name': website.domain
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if checker == 0:
|
|
|
|
|
json_data = json_data + json.dumps(dic)
|
|
|
|
|
checker = 1
|
|
|
|
|
else:
|
|
|
|
|
json_data = json_data + ',' + json.dumps(dic)
|
|
|
|
|
|
|
|
|
|
counter = counter + 1
|
|
|
|
|
|
|
|
|
|
json_data = json_data + ']'
|
|
|
|
|
|
|
|
|
|
currently = gD.runTime
|
|
|
|
|
|
|
|
|
|
data_ret = {'status': 1, 'websites': json_data, 'pagination': pagination, 'currently': currently}
|
|
|
|
|
json_data = json.dumps(data_ret)
|
|
|
|
|
return HttpResponse(json_data)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
except BaseException as msg:
|
|
|
|
|
data_ret = {'status': 0, 'error_message': str(msg)}
|
|
|
|
|
json_data = json.dumps(data_ret)
|
|
|
|
|
return HttpResponse(json_data)
|
|
|
|
|
|
2020-06-13 19:43:16 +05:00
|
|
|
def addSitegDrive(self, request=None, userID=None, data=None):
|
2020-06-13 15:30:32 +05:00
|
|
|
try:
|
|
|
|
|
|
|
|
|
|
userID = request.session['userID']
|
|
|
|
|
currentACL = ACLManager.loadedACL(userID)
|
|
|
|
|
admin = Administrator.objects.get(pk=userID)
|
|
|
|
|
|
|
|
|
|
data = json.loads(request.body)
|
|
|
|
|
|
|
|
|
|
selectedAccount = data['selectedAccount']
|
|
|
|
|
selectedWebsite = data['selectedWebsite']
|
|
|
|
|
|
|
|
|
|
gD = GDrive.objects.get(name=selectedAccount)
|
|
|
|
|
|
2020-06-20 22:44:55 +05:00
|
|
|
if ACLManager.checkGDriveOwnership(gD, admin, currentACL) == 1 and ACLManager.checkOwnership(selectedWebsite, admin, currentACL) == 1:
|
|
|
|
|
pass
|
|
|
|
|
else:
|
|
|
|
|
return ACLManager.loadErrorJson('status', 0)
|
|
|
|
|
|
2020-06-13 15:30:32 +05:00
|
|
|
gdSite = GDriveSites(owner=gD, domain=selectedWebsite)
|
|
|
|
|
gdSite.save()
|
|
|
|
|
|
|
|
|
|
data_ret = {'status': 1}
|
|
|
|
|
json_data = json.dumps(data_ret)
|
|
|
|
|
return HttpResponse(json_data)
|
|
|
|
|
|
|
|
|
|
except BaseException as msg:
|
|
|
|
|
data_ret = {'status': 0, 'error_message': str(msg)}
|
|
|
|
|
json_data = json.dumps(data_ret)
|
|
|
|
|
return HttpResponse(json_data)
|
|
|
|
|
|
2020-06-13 19:43:16 +05:00
|
|
|
def deleteAccountgDrive(self, request=None, userID=None, data=None):
|
2020-06-13 15:30:32 +05:00
|
|
|
try:
|
|
|
|
|
|
|
|
|
|
userID = request.session['userID']
|
|
|
|
|
currentACL = ACLManager.loadedACL(userID)
|
|
|
|
|
admin = Administrator.objects.get(pk=userID)
|
|
|
|
|
|
|
|
|
|
data = json.loads(request.body)
|
|
|
|
|
|
|
|
|
|
selectedAccount = data['selectedAccount']
|
|
|
|
|
|
|
|
|
|
gD = GDrive.objects.get(name=selectedAccount)
|
|
|
|
|
|
2020-06-20 22:44:55 +05:00
|
|
|
if ACLManager.checkGDriveOwnership(gD, admin, currentACL):
|
|
|
|
|
pass
|
|
|
|
|
else:
|
|
|
|
|
return ACLManager.loadErrorJson('status', 0)
|
|
|
|
|
|
2020-06-13 15:30:32 +05:00
|
|
|
gD.delete()
|
|
|
|
|
|
|
|
|
|
data_ret = {'status': 1}
|
|
|
|
|
json_data = json.dumps(data_ret)
|
|
|
|
|
return HttpResponse(json_data)
|
|
|
|
|
|
|
|
|
|
except BaseException as msg:
|
|
|
|
|
data_ret = {'status': 0, 'error_message': str(msg)}
|
|
|
|
|
json_data = json.dumps(data_ret)
|
|
|
|
|
return HttpResponse(json_data)
|
|
|
|
|
|
2020-06-13 19:43:16 +05:00
|
|
|
def changeAccountFrequencygDrive(self, request=None, userID=None, data=None):
|
2020-06-13 15:30:32 +05:00
|
|
|
try:
|
|
|
|
|
|
|
|
|
|
userID = request.session['userID']
|
|
|
|
|
currentACL = ACLManager.loadedACL(userID)
|
|
|
|
|
admin = Administrator.objects.get(pk=userID)
|
|
|
|
|
|
|
|
|
|
data = json.loads(request.body)
|
|
|
|
|
|
|
|
|
|
selectedAccount = data['selectedAccount']
|
|
|
|
|
backupFrequency = data['backupFrequency']
|
|
|
|
|
|
|
|
|
|
gD = GDrive.objects.get(name=selectedAccount)
|
2020-06-20 22:44:55 +05:00
|
|
|
|
|
|
|
|
if ACLManager.checkGDriveOwnership(gD, admin, currentACL):
|
|
|
|
|
pass
|
|
|
|
|
else:
|
|
|
|
|
return ACLManager.loadErrorJson('status', 0)
|
|
|
|
|
|
2020-06-13 15:30:32 +05:00
|
|
|
gD.runTime = backupFrequency
|
|
|
|
|
|
|
|
|
|
gD.save()
|
|
|
|
|
|
|
|
|
|
data_ret = {'status': 1}
|
|
|
|
|
json_data = json.dumps(data_ret)
|
|
|
|
|
return HttpResponse(json_data)
|
|
|
|
|
|
|
|
|
|
except BaseException as msg:
|
|
|
|
|
data_ret = {'status': 0, 'error_message': str(msg)}
|
|
|
|
|
json_data = json.dumps(data_ret)
|
|
|
|
|
return HttpResponse(json_data)
|
|
|
|
|
|
2020-06-13 19:43:16 +05:00
|
|
|
def deleteSitegDrive(self, request=None, userID=None, data=None):
|
2020-06-13 15:30:32 +05:00
|
|
|
try:
|
|
|
|
|
|
|
|
|
|
userID = request.session['userID']
|
|
|
|
|
currentACL = ACLManager.loadedACL(userID)
|
|
|
|
|
admin = Administrator.objects.get(pk=userID)
|
|
|
|
|
|
|
|
|
|
data = json.loads(request.body)
|
|
|
|
|
|
|
|
|
|
selectedAccount = data['selectedAccount']
|
|
|
|
|
website = data['website']
|
|
|
|
|
|
|
|
|
|
gD = GDrive.objects.get(name=selectedAccount)
|
2020-06-20 22:44:55 +05:00
|
|
|
|
|
|
|
|
if ACLManager.checkGDriveOwnership(gD, admin, currentACL) == 1 and ACLManager.checkOwnership(website, admin, currentACL) == 1:
|
|
|
|
|
pass
|
|
|
|
|
else:
|
|
|
|
|
return ACLManager.loadErrorJson('status', 0)
|
|
|
|
|
|
2020-06-13 21:56:37 +05:00
|
|
|
sites = GDriveSites.objects.filter(owner=gD, domain=website)
|
|
|
|
|
|
|
|
|
|
for items in sites:
|
|
|
|
|
items.delete()
|
2020-06-13 15:30:32 +05:00
|
|
|
|
|
|
|
|
data_ret = {'status': 1}
|
|
|
|
|
json_data = json.dumps(data_ret)
|
|
|
|
|
return HttpResponse(json_data)
|
|
|
|
|
|
|
|
|
|
except BaseException as msg:
|
|
|
|
|
data_ret = {'status': 0, 'error_message': str(msg)}
|
|
|
|
|
json_data = json.dumps(data_ret)
|
|
|
|
|
return HttpResponse(json_data)
|
|
|
|
|
|
2020-06-13 19:43:16 +05:00
|
|
|
def restoreSite(self, request=None, userID=None, data=None):
|
2019-03-13 23:05:22 +05:00
|
|
|
try:
|
|
|
|
|
currentACL = ACLManager.loadedACL(userID)
|
|
|
|
|
|
|
|
|
|
if ACLManager.currentContextPermission(currentACL, 'restoreBackup') == 0:
|
|
|
|
|
return ACLManager.loadError()
|
|
|
|
|
|
|
|
|
|
path = os.path.join("/home", "backup")
|
|
|
|
|
|
|
|
|
|
if not os.path.exists(path):
|
|
|
|
|
return render(request, 'backup/restore.html')
|
|
|
|
|
else:
|
|
|
|
|
all_files = []
|
|
|
|
|
ext = ".tar.gz"
|
|
|
|
|
|
|
|
|
|
command = 'sudo chown -R cyberpanel:cyberpanel ' + path
|
2019-03-26 16:19:03 +05:00
|
|
|
ACLManager.executeCall(command)
|
2019-03-13 23:05:22 +05:00
|
|
|
|
|
|
|
|
files = os.listdir(path)
|
|
|
|
|
for filename in files:
|
|
|
|
|
if filename.endswith(ext):
|
|
|
|
|
all_files.append(filename)
|
|
|
|
|
|
|
|
|
|
return render(request, 'backup/restore.html', {'backups': all_files})
|
|
|
|
|
|
2019-12-10 15:09:10 +05:00
|
|
|
except BaseException as msg:
|
2019-03-13 23:05:22 +05:00
|
|
|
return HttpResponse(str(msg))
|
|
|
|
|
|
2020-06-13 19:43:16 +05:00
|
|
|
def getCurrentBackups(self, userID=None, data=None):
|
2019-03-13 23:05:22 +05:00
|
|
|
try:
|
|
|
|
|
currentACL = ACLManager.loadedACL(userID)
|
|
|
|
|
admin = Administrator.objects.get(pk=userID)
|
|
|
|
|
backupDomain = data['websiteToBeBacked']
|
|
|
|
|
|
|
|
|
|
if ACLManager.checkOwnership(backupDomain, admin, currentACL) == 1:
|
|
|
|
|
pass
|
|
|
|
|
else:
|
|
|
|
|
return ACLManager.loadErrorJson('fetchStatus', 0)
|
|
|
|
|
|
2019-07-16 23:23:16 +05:00
|
|
|
if ACLManager.checkOwnership(backupDomain, admin, currentACL) == 1:
|
|
|
|
|
pass
|
|
|
|
|
else:
|
|
|
|
|
return ACLManager.loadErrorJson()
|
|
|
|
|
|
2019-03-13 23:05:22 +05:00
|
|
|
website = Websites.objects.get(domain=backupDomain)
|
|
|
|
|
|
|
|
|
|
backups = website.backups_set.all()
|
|
|
|
|
|
|
|
|
|
json_data = "["
|
|
|
|
|
checker = 0
|
|
|
|
|
|
|
|
|
|
for items in backups:
|
|
|
|
|
if items.status == 0:
|
|
|
|
|
status = "Pending"
|
|
|
|
|
else:
|
|
|
|
|
status = "Completed"
|
|
|
|
|
dic = {'id': items.id,
|
|
|
|
|
'file': items.fileName,
|
|
|
|
|
'date': items.date,
|
|
|
|
|
'size': items.size,
|
|
|
|
|
'status': status
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if checker == 0:
|
|
|
|
|
json_data = json_data + json.dumps(dic)
|
|
|
|
|
checker = 1
|
|
|
|
|
else:
|
|
|
|
|
json_data = json_data + ',' + json.dumps(dic)
|
|
|
|
|
|
|
|
|
|
json_data = json_data + ']'
|
|
|
|
|
final_json = json.dumps({'status': 1, 'fetchStatus': 1, 'error_message': "None", "data": json_data})
|
|
|
|
|
return HttpResponse(final_json)
|
2019-12-10 15:09:10 +05:00
|
|
|
except BaseException as msg:
|
2019-03-13 23:05:22 +05:00
|
|
|
final_dic = {'status': 0, 'fetchStatus': 0, 'error_message': str(msg)}
|
|
|
|
|
final_json = json.dumps(final_dic)
|
|
|
|
|
return HttpResponse(final_json)
|
|
|
|
|
|
2020-06-13 19:43:16 +05:00
|
|
|
def submitBackupCreation(self, userID=None, data=None):
|
2019-03-13 23:05:22 +05:00
|
|
|
try:
|
|
|
|
|
|
|
|
|
|
currentACL = ACLManager.loadedACL(userID)
|
|
|
|
|
admin = Administrator.objects.get(pk=userID)
|
|
|
|
|
backupDomain = data['websiteToBeBacked']
|
|
|
|
|
website = Websites.objects.get(domain=backupDomain)
|
|
|
|
|
|
|
|
|
|
if ACLManager.checkOwnership(backupDomain, admin, currentACL) == 1:
|
|
|
|
|
pass
|
|
|
|
|
else:
|
|
|
|
|
return ACLManager.loadErrorJson('metaStatus', 0)
|
|
|
|
|
|
|
|
|
|
## defining paths
|
|
|
|
|
|
|
|
|
|
## /home/example.com/backup
|
|
|
|
|
backupPath = os.path.join("/home", backupDomain, "backup/")
|
2019-10-08 13:17:33 -04:00
|
|
|
backupDomainName = data['websiteToBeBacked']
|
|
|
|
|
backupName = 'backup-' + backupDomainName + "-" + time.strftime("%m.%d.%Y_%H-%M-%S")
|
2019-03-13 23:05:22 +05:00
|
|
|
|
2019-10-08 13:17:33 -04:00
|
|
|
## /home/example.com/backup/backup-example.com-02.13.2018_10-24-52
|
2019-03-13 23:05:22 +05:00
|
|
|
tempStoragePath = os.path.join(backupPath, backupName)
|
|
|
|
|
|
2020-06-13 19:43:16 +05:00
|
|
|
p = Process(target=backupUtil.submitBackupCreation,
|
|
|
|
|
args=(tempStoragePath, backupName, backupPath, backupDomain))
|
2019-07-16 23:23:16 +05:00
|
|
|
p.start()
|
2019-03-13 23:05:22 +05:00
|
|
|
|
|
|
|
|
time.sleep(2)
|
|
|
|
|
|
2020-06-13 19:43:16 +05:00
|
|
|
final_json = json.dumps(
|
|
|
|
|
{'status': 1, 'metaStatus': 1, 'error_message': "None", 'tempStorage': tempStoragePath})
|
2019-03-13 23:05:22 +05:00
|
|
|
return HttpResponse(final_json)
|
|
|
|
|
|
2019-12-10 15:09:10 +05:00
|
|
|
except BaseException as msg:
|
2019-03-13 23:05:22 +05:00
|
|
|
logging.CyberCPLogFileWriter.writeToFile(str(msg))
|
|
|
|
|
final_dic = {'status': 0, 'metaStatus': 0, 'error_message': str(msg)}
|
|
|
|
|
final_json = json.dumps(final_dic)
|
|
|
|
|
return HttpResponse(final_json)
|
|
|
|
|
|
2020-06-13 19:43:16 +05:00
|
|
|
def backupStatus(self, userID=None, data=None):
|
2019-03-13 23:05:22 +05:00
|
|
|
try:
|
|
|
|
|
|
|
|
|
|
backupDomain = data['websiteToBeBacked']
|
|
|
|
|
status = os.path.join("/home", backupDomain, "backup/status")
|
|
|
|
|
backupFileNamePath = os.path.join("/home", backupDomain, "backup/backupFileName")
|
|
|
|
|
pid = os.path.join("/home", backupDomain, "backup/pid")
|
|
|
|
|
|
2019-07-16 23:23:16 +05:00
|
|
|
domain = Websites.objects.get(domain=backupDomain)
|
|
|
|
|
|
2019-03-13 23:05:22 +05:00
|
|
|
## read file name
|
|
|
|
|
|
|
|
|
|
try:
|
2019-03-26 16:19:03 +05:00
|
|
|
command = "sudo cat " + backupFileNamePath
|
2019-07-16 23:23:16 +05:00
|
|
|
fileName = ProcessUtilities.outputExecutioner(command, domain.externalApp)
|
2019-09-11 15:20:32 +05:00
|
|
|
if fileName.find('No such file or directory') > -1:
|
|
|
|
|
final_json = json.dumps({'backupStatus': 0, 'error_message': "None", "status": 0, "abort": 0})
|
|
|
|
|
return HttpResponse(final_json)
|
2019-03-13 23:05:22 +05:00
|
|
|
except:
|
|
|
|
|
fileName = "Fetching.."
|
|
|
|
|
|
|
|
|
|
## file name read ends
|
|
|
|
|
|
|
|
|
|
if os.path.exists(status):
|
2019-03-26 16:19:03 +05:00
|
|
|
command = "sudo cat " + status
|
2019-07-16 23:23:16 +05:00
|
|
|
status = ProcessUtilities.outputExecutioner(command, domain.externalApp)
|
2019-03-13 23:05:22 +05:00
|
|
|
|
|
|
|
|
if status.find("Completed") > -1:
|
|
|
|
|
|
|
|
|
|
### Removing Files
|
|
|
|
|
|
2019-03-26 16:19:03 +05:00
|
|
|
command = 'sudo rm -f ' + status
|
2019-07-16 23:23:16 +05:00
|
|
|
ProcessUtilities.executioner(command, domain.externalApp)
|
2019-03-13 23:05:22 +05:00
|
|
|
|
2019-03-26 16:19:03 +05:00
|
|
|
command = 'sudo rm -f ' + backupFileNamePath
|
2019-07-16 23:23:16 +05:00
|
|
|
ProcessUtilities.executioner(command, domain.externalApp)
|
2019-03-13 23:05:22 +05:00
|
|
|
|
2019-03-26 16:19:03 +05:00
|
|
|
command = 'sudo rm -f ' + pid
|
2019-07-16 23:23:16 +05:00
|
|
|
ProcessUtilities.executioner(command, domain.externalApp)
|
2019-03-13 23:05:22 +05:00
|
|
|
|
|
|
|
|
final_json = json.dumps(
|
|
|
|
|
{'backupStatus': 1, 'error_message': "None", "status": status, "abort": 1,
|
|
|
|
|
'fileName': fileName, })
|
|
|
|
|
return HttpResponse(final_json)
|
|
|
|
|
|
|
|
|
|
elif status.find("[5009]") > -1:
|
|
|
|
|
## removing status file, so that backup can re-run
|
|
|
|
|
try:
|
2019-03-26 16:19:03 +05:00
|
|
|
command = 'sudo rm -f ' + status
|
2019-07-16 23:23:16 +05:00
|
|
|
ProcessUtilities.executioner(command, domain.externalApp)
|
2019-03-13 23:05:22 +05:00
|
|
|
|
2019-03-26 16:19:03 +05:00
|
|
|
command = 'sudo rm -f ' + backupFileNamePath
|
2019-07-16 23:23:16 +05:00
|
|
|
ProcessUtilities.executioner(command, domain.externalApp)
|
2019-03-13 23:05:22 +05:00
|
|
|
|
2019-03-26 16:19:03 +05:00
|
|
|
command = 'sudo rm -f ' + pid
|
2019-07-16 23:23:16 +05:00
|
|
|
ProcessUtilities.executioner(command, domain.externalApp)
|
2019-03-13 23:05:22 +05:00
|
|
|
|
|
|
|
|
backupObs = Backups.objects.filter(fileName=fileName)
|
|
|
|
|
for items in backupObs:
|
|
|
|
|
items.delete()
|
|
|
|
|
|
|
|
|
|
except:
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
final_json = json.dumps(
|
|
|
|
|
{'backupStatus': 1, 'fileName': fileName, 'error_message': "None", "status": status,
|
|
|
|
|
"abort": 1})
|
|
|
|
|
return HttpResponse(final_json)
|
|
|
|
|
else:
|
|
|
|
|
final_json = json.dumps(
|
|
|
|
|
{'backupStatus': 1, 'error_message': "None", 'fileName': fileName, "status": status,
|
|
|
|
|
"abort": 0})
|
|
|
|
|
return HttpResponse(final_json)
|
|
|
|
|
else:
|
|
|
|
|
final_json = json.dumps({'backupStatus': 0, 'error_message': "None", "status": 0, "abort": 0})
|
|
|
|
|
return HttpResponse(final_json)
|
|
|
|
|
|
2019-12-10 15:09:10 +05:00
|
|
|
except BaseException as msg:
|
2019-03-13 23:05:22 +05:00
|
|
|
final_dic = {'backupStatus': 0, 'error_message': str(msg)}
|
|
|
|
|
final_json = json.dumps(final_dic)
|
|
|
|
|
logging.CyberCPLogFileWriter.writeToFile(str(msg) + " [backupStatus]")
|
|
|
|
|
return HttpResponse(final_json)
|
|
|
|
|
|
2020-06-13 19:43:16 +05:00
|
|
|
def cancelBackupCreation(self, userID=None, data=None):
|
2019-03-13 23:05:22 +05:00
|
|
|
try:
|
|
|
|
|
|
|
|
|
|
backupCancellationDomain = data['backupCancellationDomain']
|
|
|
|
|
fileName = data['fileName']
|
|
|
|
|
|
2019-12-10 23:04:24 +05:00
|
|
|
execPath = "/usr/local/CyberCP/bin/python " + virtualHostUtilities.cyberPanel + "/plogical/backupUtilities.py"
|
2019-03-13 23:05:22 +05:00
|
|
|
execPath = execPath + " cancelBackupCreation --backupCancellationDomain " + backupCancellationDomain + " --fileName " + fileName
|
2019-03-26 16:19:03 +05:00
|
|
|
subprocess.call(shlex.split(execPath))
|
2019-03-13 23:05:22 +05:00
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
backupOb = Backups.objects.get(fileName=fileName)
|
|
|
|
|
backupOb.delete()
|
2019-12-10 15:09:10 +05:00
|
|
|
except BaseException as msg:
|
2019-03-13 23:05:22 +05:00
|
|
|
logging.CyberCPLogFileWriter.writeToFile(str(msg) + " [cancelBackupCreation]")
|
|
|
|
|
|
|
|
|
|
final_json = json.dumps({'abortStatus': 1, 'error_message': "None", "status": 0})
|
|
|
|
|
return HttpResponse(final_json)
|
|
|
|
|
|
2019-12-10 15:09:10 +05:00
|
|
|
except BaseException as msg:
|
2019-03-13 23:05:22 +05:00
|
|
|
final_dic = {'abortStatus': 0, 'error_message': str(msg)}
|
|
|
|
|
final_json = json.dumps(final_dic)
|
|
|
|
|
return HttpResponse(final_json)
|
|
|
|
|
|
2020-06-13 19:43:16 +05:00
|
|
|
def deleteBackup(self, userID=None, data=None):
|
2019-03-13 23:05:22 +05:00
|
|
|
try:
|
|
|
|
|
backupID = data['backupID']
|
|
|
|
|
backup = Backups.objects.get(id=backupID)
|
|
|
|
|
|
|
|
|
|
domainName = backup.website.domain
|
2019-07-16 23:23:16 +05:00
|
|
|
currentACL = ACLManager.loadedACL(userID)
|
|
|
|
|
admin = Administrator.objects.get(pk=userID)
|
|
|
|
|
if ACLManager.checkOwnership(domainName, admin, currentACL) == 1:
|
|
|
|
|
pass
|
|
|
|
|
else:
|
|
|
|
|
return ACLManager.loadErrorJson()
|
2019-03-13 23:05:22 +05:00
|
|
|
|
|
|
|
|
path = "/home/" + domainName + "/backup/" + backup.fileName + ".tar.gz"
|
2019-03-26 16:19:03 +05:00
|
|
|
command = 'sudo rm -f ' + path
|
2019-03-31 02:47:35 +05:00
|
|
|
ProcessUtilities.executioner(command)
|
2019-03-13 23:05:22 +05:00
|
|
|
|
|
|
|
|
backup.delete()
|
|
|
|
|
|
|
|
|
|
final_json = json.dumps({'status': 1, 'deleteStatus': 1, 'error_message': "None"})
|
|
|
|
|
return HttpResponse(final_json)
|
2019-12-10 15:09:10 +05:00
|
|
|
except BaseException as msg:
|
2019-03-13 23:05:22 +05:00
|
|
|
final_dic = {'status': 0, 'deleteStatus': 0, 'error_message': str(msg)}
|
|
|
|
|
final_json = json.dumps(final_dic)
|
|
|
|
|
|
|
|
|
|
return HttpResponse(final_json)
|
|
|
|
|
|
2020-06-13 19:43:16 +05:00
|
|
|
def submitRestore(self, data=None, userID=None):
|
2019-03-13 23:05:22 +05:00
|
|
|
try:
|
|
|
|
|
backupFile = data['backupFile']
|
|
|
|
|
originalFile = "/home/backup/" + backupFile
|
|
|
|
|
|
|
|
|
|
if not os.path.exists(originalFile):
|
|
|
|
|
dir = data['dir']
|
|
|
|
|
else:
|
|
|
|
|
dir = "CyberPanelRestore"
|
|
|
|
|
|
2019-07-16 23:23:16 +05:00
|
|
|
currentACL = ACLManager.loadedACL(userID)
|
|
|
|
|
if currentACL['admin'] == 1:
|
|
|
|
|
pass
|
|
|
|
|
else:
|
|
|
|
|
return ACLManager.loadErrorJson()
|
|
|
|
|
|
2019-12-10 23:04:24 +05:00
|
|
|
execPath = "sudo nice -n 10 /usr/local/CyberCP/bin/python " + virtualHostUtilities.cyberPanel + "/plogical/backupUtilities.py"
|
2019-03-13 23:05:22 +05:00
|
|
|
execPath = execPath + " submitRestore --backupFile " + backupFile + " --dir " + dir
|
2019-03-21 23:26:42 +05:00
|
|
|
ProcessUtilities.popenExecutioner(execPath)
|
2019-03-13 23:05:22 +05:00
|
|
|
time.sleep(4)
|
|
|
|
|
|
|
|
|
|
final_dic = {'restoreStatus': 1, 'error_message': "None"}
|
|
|
|
|
final_json = json.dumps(final_dic)
|
|
|
|
|
return HttpResponse(final_json)
|
2019-12-10 15:09:10 +05:00
|
|
|
except BaseException as msg:
|
2019-03-13 23:05:22 +05:00
|
|
|
final_dic = {'restoreStatus': 0, 'error_message': str(msg)}
|
|
|
|
|
final_json = json.dumps(final_dic)
|
|
|
|
|
return HttpResponse(final_json)
|
|
|
|
|
|
2020-06-13 19:43:16 +05:00
|
|
|
def restoreStatus(self, data=None):
|
2019-03-13 23:05:22 +05:00
|
|
|
try:
|
|
|
|
|
backupFile = data['backupFile'].strip(".tar.gz")
|
|
|
|
|
|
|
|
|
|
path = os.path.join("/home", "backup", data['backupFile'])
|
|
|
|
|
|
|
|
|
|
if os.path.exists(path):
|
|
|
|
|
path = os.path.join("/home", "backup", backupFile)
|
|
|
|
|
elif os.path.exists(data['backupFile']):
|
|
|
|
|
path = data['backupFile'].strip(".tar.gz")
|
|
|
|
|
else:
|
|
|
|
|
dir = data['dir']
|
|
|
|
|
path = "/home/backup/transfer-" + str(dir) + "/" + backupFile
|
|
|
|
|
|
|
|
|
|
if os.path.exists(path):
|
|
|
|
|
try:
|
|
|
|
|
execPath = "sudo cat " + path + "/status"
|
2019-03-31 02:47:35 +05:00
|
|
|
status = ProcessUtilities.outputExecutioner(execPath)
|
2019-03-13 23:05:22 +05:00
|
|
|
|
|
|
|
|
if status.find("Done") > -1:
|
|
|
|
|
|
2019-03-26 16:19:03 +05:00
|
|
|
command = "sudo rm -rf " + path
|
2019-03-31 02:47:35 +05:00
|
|
|
ProcessUtilities.executioner(command)
|
2019-03-13 23:05:22 +05:00
|
|
|
|
|
|
|
|
final_json = json.dumps(
|
|
|
|
|
{'restoreStatus': 1, 'error_message': "None", "status": status, 'abort': 1,
|
|
|
|
|
'running': 'Completed'})
|
|
|
|
|
return HttpResponse(final_json)
|
|
|
|
|
elif status.find("[5009]") > -1:
|
|
|
|
|
## removing temporarily generated files while restoring
|
2019-03-26 16:19:03 +05:00
|
|
|
command = "sudo rm -rf " + path
|
2019-03-31 02:47:35 +05:00
|
|
|
ProcessUtilities.executioner(command)
|
2019-03-13 23:05:22 +05:00
|
|
|
final_json = json.dumps({'restoreStatus': 1, 'error_message': "None",
|
|
|
|
|
"status": status, 'abort': 1, 'alreadyRunning': 0,
|
|
|
|
|
'running': 'Error'})
|
|
|
|
|
return HttpResponse(final_json)
|
|
|
|
|
else:
|
|
|
|
|
final_json = json.dumps(
|
|
|
|
|
{'restoreStatus': 1, 'error_message': "None", "status": status, 'abort': 0,
|
|
|
|
|
'running': 'Running..'})
|
|
|
|
|
return HttpResponse(final_json)
|
|
|
|
|
|
2019-12-10 15:09:10 +05:00
|
|
|
except BaseException as msg:
|
2019-03-13 23:05:22 +05:00
|
|
|
logging.CyberCPLogFileWriter.writeToFile(str(msg))
|
|
|
|
|
status = "Just Started"
|
|
|
|
|
final_json = json.dumps(
|
|
|
|
|
{'restoreStatus': 1, 'error_message': "None", "status": status, 'abort': 0,
|
|
|
|
|
'running': 'Running..'})
|
|
|
|
|
return HttpResponse(final_json)
|
|
|
|
|
else:
|
|
|
|
|
final_json = json.dumps(
|
|
|
|
|
{'restoreStatus': 1, 'error_message': "None", "status": "OK To Run", 'running': 'Halted',
|
|
|
|
|
'abort': 1})
|
|
|
|
|
return HttpResponse(final_json)
|
|
|
|
|
|
2019-12-10 15:09:10 +05:00
|
|
|
except BaseException as msg:
|
2019-03-13 23:05:22 +05:00
|
|
|
final_dic = {'restoreStatus': 0, 'error_message': str(msg)}
|
|
|
|
|
final_json = json.dumps(final_dic)
|
|
|
|
|
return HttpResponse(final_json)
|
|
|
|
|
|
2020-06-13 19:43:16 +05:00
|
|
|
def backupDestinations(self, request=None, userID=None, data=None):
|
2019-03-13 23:05:22 +05:00
|
|
|
try:
|
|
|
|
|
currentACL = ACLManager.loadedACL(userID)
|
|
|
|
|
|
|
|
|
|
if ACLManager.currentContextPermission(currentACL, 'addDeleteDestinations') == 0:
|
|
|
|
|
return ACLManager.loadError()
|
|
|
|
|
|
|
|
|
|
return render(request, 'backup/backupDestinations.html', {})
|
|
|
|
|
|
2019-12-10 15:09:10 +05:00
|
|
|
except BaseException as msg:
|
2019-03-13 23:05:22 +05:00
|
|
|
return HttpResponse(str(msg))
|
|
|
|
|
|
2020-06-13 19:43:16 +05:00
|
|
|
def submitDestinationCreation(self, userID=None, data=None):
|
2019-03-13 23:05:22 +05:00
|
|
|
try:
|
|
|
|
|
currentACL = ACLManager.loadedACL(userID)
|
|
|
|
|
|
|
|
|
|
if ACLManager.currentContextPermission(currentACL, 'addDeleteDestinations') == 0:
|
|
|
|
|
return ACLManager.loadErrorJson('destStatus', 0)
|
|
|
|
|
|
2020-04-27 16:18:46 +05:00
|
|
|
finalDic = {}
|
|
|
|
|
|
2020-09-20 19:52:02 +05:00
|
|
|
if data['type'] == 'SFTP':
|
2020-04-27 16:18:46 +05:00
|
|
|
|
2020-09-20 19:52:02 +05:00
|
|
|
finalDic['ipAddress'] = data['IPAddress']
|
|
|
|
|
finalDic['password'] = data['password']
|
2020-04-27 16:18:46 +05:00
|
|
|
|
2020-09-20 19:52:02 +05:00
|
|
|
try:
|
|
|
|
|
finalDic['port'] = data['backupSSHPort']
|
|
|
|
|
except:
|
|
|
|
|
finalDic['port'] = "22"
|
2019-03-13 23:05:22 +05:00
|
|
|
|
2020-09-20 19:52:02 +05:00
|
|
|
try:
|
|
|
|
|
finalDic['user'] = data['user']
|
|
|
|
|
except:
|
|
|
|
|
finalDic['user'] = "root"
|
2020-04-27 16:18:46 +05:00
|
|
|
|
2019-03-13 23:05:22 +05:00
|
|
|
|
2019-12-10 23:04:24 +05:00
|
|
|
execPath = "/usr/local/CyberCP/bin/python " + virtualHostUtilities.cyberPanel + "/plogical/backupUtilities.py"
|
2020-04-27 16:18:46 +05:00
|
|
|
execPath = execPath + " submitDestinationCreation --ipAddress " + finalDic['ipAddress'] + " --password " \
|
|
|
|
|
+ finalDic['password'] + " --port " + finalDic['port'] + ' --user %s' % (finalDic['user'])
|
|
|
|
|
|
|
|
|
|
if os.path.exists(ProcessUtilities.debugPath):
|
|
|
|
|
logging.CyberCPLogFileWriter.writeToFile(execPath)
|
2019-03-13 23:05:22 +05:00
|
|
|
|
2019-03-21 23:26:42 +05:00
|
|
|
output = ProcessUtilities.outputExecutioner(execPath)
|
2019-03-13 23:05:22 +05:00
|
|
|
|
2020-04-27 16:18:46 +05:00
|
|
|
if os.path.exists(ProcessUtilities.debugPath):
|
|
|
|
|
logging.CyberCPLogFileWriter.writeToFile(output)
|
|
|
|
|
|
2019-03-13 23:05:22 +05:00
|
|
|
if output.find('1,') > -1:
|
2020-04-27 16:18:46 +05:00
|
|
|
|
2020-09-20 19:52:02 +05:00
|
|
|
config = {'type': data['type'], 'ip': data['IPAddress'], 'username': data['userName'], 'port': data['backupSSHPort'], 'path': data['path']}
|
|
|
|
|
nd = NormalBackupDests(name=data['name'], config = json.dumps(config))
|
|
|
|
|
nd.save()
|
2019-03-13 23:05:22 +05:00
|
|
|
|
2020-09-20 19:52:02 +05:00
|
|
|
|
|
|
|
|
final_dic = {'status' : 1, 'destStatus': 1, 'error_message': "None"}
|
|
|
|
|
final_json = json.dumps(final_dic)
|
|
|
|
|
return HttpResponse(final_json)
|
2019-03-13 23:05:22 +05:00
|
|
|
else:
|
2020-09-20 19:52:02 +05:00
|
|
|
final_dic = {'status' : 0, 'destStatus': 0, 'error_message': output}
|
2019-03-13 23:05:22 +05:00
|
|
|
final_json = json.dumps(final_dic)
|
|
|
|
|
return HttpResponse(final_json)
|
2020-09-20 19:52:02 +05:00
|
|
|
else:
|
|
|
|
|
config = {'type': data['type'], 'path': data['path']}
|
|
|
|
|
nd = NormalBackupDests(name='local', config=json.dumps(config))
|
|
|
|
|
nd.save()
|
|
|
|
|
|
|
|
|
|
final_dic = {'status' : 1, 'destStatus': 1, 'error_message': "None"}
|
|
|
|
|
final_json = json.dumps(final_dic)
|
|
|
|
|
return HttpResponse(final_json)
|
|
|
|
|
|
2019-03-13 23:05:22 +05:00
|
|
|
|
2019-12-10 15:09:10 +05:00
|
|
|
except BaseException as msg:
|
2020-09-20 19:52:02 +05:00
|
|
|
final_dic = {'status' : 0, 'destStatus': 0, 'error_message': str(msg)}
|
2019-03-13 23:05:22 +05:00
|
|
|
final_json = json.dumps(final_dic)
|
|
|
|
|
return HttpResponse(final_json)
|
|
|
|
|
|
2020-06-13 19:43:16 +05:00
|
|
|
def getCurrentBackupDestinations(self, userID=None, data=None):
|
2019-03-13 23:05:22 +05:00
|
|
|
try:
|
|
|
|
|
currentACL = ACLManager.loadedACL(userID)
|
|
|
|
|
|
|
|
|
|
if ACLManager.currentContextPermission(currentACL, 'addDeleteDestinations') == 0:
|
|
|
|
|
return ACLManager.loadErrorJson('fetchStatus', 0)
|
|
|
|
|
|
2020-09-20 19:52:02 +05:00
|
|
|
destinations = NormalBackupDests.objects.all()
|
2019-03-13 23:05:22 +05:00
|
|
|
|
|
|
|
|
json_data = "["
|
|
|
|
|
checker = 0
|
|
|
|
|
|
2020-09-20 19:52:02 +05:00
|
|
|
for items in destinations:
|
|
|
|
|
|
|
|
|
|
config = json.loads(items.config)
|
|
|
|
|
|
|
|
|
|
if config['type'] == data['type'] and data['type'] == 'SFTP':
|
|
|
|
|
dic = {
|
|
|
|
|
'name': items.name,
|
|
|
|
|
'ip': config['ip'],
|
|
|
|
|
'username': config['username'],
|
|
|
|
|
'path': config['path'],
|
|
|
|
|
'port': config['port'],
|
|
|
|
|
}
|
|
|
|
|
else:
|
|
|
|
|
dic = {
|
|
|
|
|
'name': items.name,
|
|
|
|
|
'path': config['path'],
|
|
|
|
|
}
|
2019-03-13 23:05:22 +05:00
|
|
|
|
|
|
|
|
if checker == 0:
|
|
|
|
|
json_data = json_data + json.dumps(dic)
|
|
|
|
|
checker = 1
|
|
|
|
|
else:
|
|
|
|
|
json_data = json_data + ',' + json.dumps(dic)
|
|
|
|
|
|
|
|
|
|
json_data = json_data + ']'
|
2020-09-20 19:52:02 +05:00
|
|
|
final_json = json.dumps({'status': 1, 'error_message': "None", "data": json_data})
|
2019-03-13 23:05:22 +05:00
|
|
|
return HttpResponse(final_json)
|
|
|
|
|
|
2019-12-10 15:09:10 +05:00
|
|
|
except BaseException as msg:
|
2020-09-20 19:52:02 +05:00
|
|
|
final_dic = {'status': 0, 'error_message': str(msg)}
|
2019-03-13 23:05:22 +05:00
|
|
|
final_json = json.dumps(final_dic)
|
|
|
|
|
return HttpResponse(final_json)
|
|
|
|
|
|
2020-06-13 19:43:16 +05:00
|
|
|
def getConnectionStatus(self, userID=None, data=None):
|
2019-03-13 23:05:22 +05:00
|
|
|
try:
|
|
|
|
|
currentACL = ACLManager.loadedACL(userID)
|
|
|
|
|
|
|
|
|
|
if ACLManager.currentContextPermission(currentACL, 'addDeleteDestinations') == 0:
|
|
|
|
|
return ACLManager.loadErrorJson('connStatus', 0)
|
|
|
|
|
|
|
|
|
|
ipAddress = data['IPAddress']
|
|
|
|
|
|
2019-12-10 23:04:24 +05:00
|
|
|
execPath = "/usr/local/CyberCP/bin/python " + virtualHostUtilities.cyberPanel + "/plogical/backupUtilities.py"
|
2019-03-13 23:05:22 +05:00
|
|
|
execPath = execPath + " getConnectionStatus --ipAddress " + ipAddress
|
|
|
|
|
|
2019-03-31 02:47:35 +05:00
|
|
|
output = ProcessUtilities.executioner(execPath)
|
2019-03-13 23:05:22 +05:00
|
|
|
|
|
|
|
|
if output.find('1,') > -1:
|
|
|
|
|
final_dic = {'connStatus': 1, 'error_message': "None"}
|
|
|
|
|
final_json = json.dumps(final_dic)
|
|
|
|
|
return HttpResponse(final_json)
|
|
|
|
|
else:
|
|
|
|
|
final_dic = {'connStatus': 0, 'error_message': output}
|
|
|
|
|
final_json = json.dumps(final_dic)
|
|
|
|
|
return HttpResponse(final_json)
|
|
|
|
|
|
2019-12-10 15:09:10 +05:00
|
|
|
except BaseException as msg:
|
2019-03-13 23:05:22 +05:00
|
|
|
final_dic = {'connStatus': 1, 'error_message': str(msg)}
|
|
|
|
|
final_json = json.dumps(final_dic)
|
|
|
|
|
return HttpResponse(final_json)
|
|
|
|
|
|
2020-06-13 19:43:16 +05:00
|
|
|
def deleteDestination(self, userID=None, data=None):
|
2019-03-13 23:05:22 +05:00
|
|
|
try:
|
|
|
|
|
|
|
|
|
|
currentACL = ACLManager.loadedACL(userID)
|
|
|
|
|
|
|
|
|
|
if ACLManager.currentContextPermission(currentACL, 'addDeleteDestinations') == 0:
|
|
|
|
|
return ACLManager.loadErrorJson('delStatus', 0)
|
|
|
|
|
|
2020-09-20 19:52:02 +05:00
|
|
|
nameOrPath = data['nameOrPath']
|
|
|
|
|
type = data['type']
|
2019-03-13 23:05:22 +05:00
|
|
|
|
2020-09-20 19:52:02 +05:00
|
|
|
if type == 'SFTP':
|
|
|
|
|
NormalBackupDests.objects.get(name=nameOrPath).delete()
|
|
|
|
|
else:
|
|
|
|
|
dests = NormalBackupDests.objects.filter(name='local')
|
|
|
|
|
for items in dests:
|
|
|
|
|
config = json.loads(items.config)
|
|
|
|
|
if config['path'] == nameOrPath:
|
|
|
|
|
items.delete()
|
|
|
|
|
break
|
|
|
|
|
|
|
|
|
|
final_dic = {'status': 1, 'delStatus': 1, 'error_message': "None"}
|
2019-03-13 23:05:22 +05:00
|
|
|
final_json = json.dumps(final_dic)
|
|
|
|
|
return HttpResponse(final_json)
|
|
|
|
|
|
2019-12-10 15:09:10 +05:00
|
|
|
except BaseException as msg:
|
2020-09-20 19:52:02 +05:00
|
|
|
final_dic = {'status': 0, 'delStatus': 1, 'error_message': str(msg)}
|
2019-03-13 23:05:22 +05:00
|
|
|
final_json = json.dumps(final_dic)
|
|
|
|
|
return HttpResponse(final_json)
|
|
|
|
|
|
2020-06-13 19:43:16 +05:00
|
|
|
def scheduleBackup(self, request, userID=None, data=None):
|
2019-03-13 23:05:22 +05:00
|
|
|
try:
|
|
|
|
|
currentACL = ACLManager.loadedACL(userID)
|
|
|
|
|
|
|
|
|
|
if ACLManager.currentContextPermission(currentACL, 'scheDuleBackups') == 0:
|
|
|
|
|
return ACLManager.loadError()
|
|
|
|
|
|
|
|
|
|
if dest.objects.all().count() <= 1:
|
|
|
|
|
try:
|
|
|
|
|
homeDest = dest(destLoc="Home")
|
|
|
|
|
homeDest.save()
|
|
|
|
|
except:
|
|
|
|
|
pass
|
|
|
|
|
backups = dest.objects.all()
|
|
|
|
|
|
|
|
|
|
destinations = []
|
|
|
|
|
|
|
|
|
|
for items in backups:
|
|
|
|
|
destinations.append(items.destLoc)
|
|
|
|
|
|
|
|
|
|
return render(request, 'backup/backupSchedule.html', {'destinations': destinations})
|
|
|
|
|
|
2019-12-10 15:09:10 +05:00
|
|
|
except BaseException as msg:
|
2019-03-13 23:05:22 +05:00
|
|
|
return HttpResponse(str(msg))
|
|
|
|
|
|
2020-06-13 19:43:16 +05:00
|
|
|
def getCurrentBackupSchedules(self, userID=None, data=None):
|
2019-03-13 23:05:22 +05:00
|
|
|
try:
|
|
|
|
|
currentACL = ACLManager.loadedACL(userID)
|
|
|
|
|
|
|
|
|
|
if ACLManager.currentContextPermission(currentACL, 'scheDuleBackups') == 0:
|
|
|
|
|
return ACLManager.loadErrorJson('fetchStatus', 0)
|
|
|
|
|
|
|
|
|
|
records = backupSchedules.objects.all()
|
|
|
|
|
|
|
|
|
|
json_data = "["
|
|
|
|
|
checker = 0
|
|
|
|
|
|
|
|
|
|
for items in records:
|
|
|
|
|
dic = {'id': items.id,
|
|
|
|
|
'destLoc': items.dest.destLoc,
|
|
|
|
|
'frequency': items.frequency,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if checker == 0:
|
|
|
|
|
json_data = json_data + json.dumps(dic)
|
|
|
|
|
checker = 1
|
|
|
|
|
else:
|
|
|
|
|
json_data = json_data + ',' + json.dumps(dic)
|
|
|
|
|
|
|
|
|
|
json_data = json_data + ']'
|
|
|
|
|
final_json = json.dumps({'fetchStatus': 1, 'error_message': "None", "data": json_data})
|
|
|
|
|
return HttpResponse(final_json)
|
|
|
|
|
|
2019-12-10 15:09:10 +05:00
|
|
|
except BaseException as msg:
|
2019-03-13 23:05:22 +05:00
|
|
|
final_dic = {'fetchStatus': 0, 'error_message': str(msg)}
|
|
|
|
|
final_json = json.dumps(final_dic)
|
|
|
|
|
return HttpResponse(final_json)
|
|
|
|
|
|
2020-06-13 19:43:16 +05:00
|
|
|
def submitBackupSchedule(self, userID=None, data=None):
|
2019-03-13 23:05:22 +05:00
|
|
|
try:
|
|
|
|
|
backupDest = data['backupDest']
|
|
|
|
|
backupFreq = data['backupFreq']
|
|
|
|
|
|
|
|
|
|
currentACL = ACLManager.loadedACL(userID)
|
|
|
|
|
|
|
|
|
|
if ACLManager.currentContextPermission(currentACL, 'scheDuleBackups') == 0:
|
|
|
|
|
return ACLManager.loadErrorJson('scheduleStatus', 0)
|
|
|
|
|
|
|
|
|
|
path = "/etc/crontab"
|
|
|
|
|
|
|
|
|
|
## check if already exists
|
|
|
|
|
try:
|
|
|
|
|
schedule = backupSchedules.objects.get(frequency=backupFreq)
|
2019-03-26 16:19:03 +05:00
|
|
|
|
2019-03-13 23:05:22 +05:00
|
|
|
if schedule.dest.destLoc == backupDest:
|
|
|
|
|
final_json = json.dumps(
|
|
|
|
|
{'scheduleStatus': 0, 'error_message': "This schedule already exists"})
|
|
|
|
|
return HttpResponse(final_json)
|
|
|
|
|
else:
|
|
|
|
|
if backupDest == "Home" and backupFreq == "Daily":
|
2019-12-10 23:04:24 +05:00
|
|
|
cronJob = "0 3 * * * root /usr/local/CyberCP/bin/python /usr/local/CyberCP/plogical/backupScheduleLocal.py"
|
2019-03-13 23:05:22 +05:00
|
|
|
elif backupDest == "Home" and backupFreq == "Weekly":
|
2019-12-10 23:04:24 +05:00
|
|
|
cronJob = "0 0 * * 0 root /usr/local/CyberCP/bin/python /usr/local/CyberCP/plogical/backupScheduleLocal.py "
|
2019-03-13 23:05:22 +05:00
|
|
|
elif backupDest != "Home" and backupFreq == "Daily":
|
2019-12-10 23:04:24 +05:00
|
|
|
cronJob = "0 3 * * * root /usr/local/CyberCP/bin/python /usr/local/CyberCP/plogical/backupSchedule.py"
|
2019-03-13 23:05:22 +05:00
|
|
|
elif backupDest != "Home" and backupFreq == "Weekly":
|
2019-12-10 23:04:24 +05:00
|
|
|
cronJob = "0 0 * * 0 root /usr/local/CyberCP/bin/python /usr/local/CyberCP/plogical/backupSchedule.py "
|
2019-03-13 23:05:22 +05:00
|
|
|
|
2019-03-26 16:19:03 +05:00
|
|
|
command = "cat " + path
|
|
|
|
|
output = ProcessUtilities.outputExecutioner(command)
|
2019-03-13 23:05:22 +05:00
|
|
|
|
2019-03-26 16:19:03 +05:00
|
|
|
finalCronJob = output + cronJob
|
|
|
|
|
tempCronPath = "/home/cyberpanel/" + str(randint(1000, 9999))
|
2019-03-13 23:05:22 +05:00
|
|
|
|
2019-03-26 16:19:03 +05:00
|
|
|
writeToFile = open(tempCronPath, 'a')
|
|
|
|
|
writeToFile.writelines(finalCronJob + "\n")
|
2019-03-13 23:05:22 +05:00
|
|
|
writeToFile.close()
|
|
|
|
|
|
2019-07-25 14:12:54 +05:00
|
|
|
command = "mv " + tempCronPath + " " + path
|
2019-03-26 16:19:03 +05:00
|
|
|
ProcessUtilities.executioner(command)
|
2019-03-13 23:05:22 +05:00
|
|
|
|
2019-08-13 16:27:56 +05:00
|
|
|
command = 'chown root:root %s' % (path)
|
|
|
|
|
ProcessUtilities.executioner(command)
|
|
|
|
|
|
2019-07-25 14:12:54 +05:00
|
|
|
command = "systemctl restart crond"
|
2019-03-26 16:19:03 +05:00
|
|
|
ProcessUtilities.executioner(command)
|
2019-03-13 23:05:22 +05:00
|
|
|
|
2019-08-22 20:38:49 +05:00
|
|
|
## Set local path for backup
|
|
|
|
|
|
2019-08-26 12:27:07 +05:00
|
|
|
if backupDest == "Home":
|
|
|
|
|
writeToFile = open(BackupManager.localBackupPath, 'w')
|
|
|
|
|
writeToFile.write(data['localPath'])
|
|
|
|
|
writeToFile.close()
|
2019-08-22 20:38:49 +05:00
|
|
|
|
2019-03-13 23:05:22 +05:00
|
|
|
destination = dest.objects.get(destLoc=backupDest)
|
|
|
|
|
newSchedule = backupSchedules(dest=destination, frequency=backupFreq)
|
|
|
|
|
newSchedule.save()
|
|
|
|
|
|
|
|
|
|
final_json = json.dumps({'scheduleStatus': 1, 'error_message': "None"})
|
|
|
|
|
return HttpResponse(final_json)
|
2019-03-26 16:19:03 +05:00
|
|
|
except:
|
|
|
|
|
if backupDest == "Home" and backupFreq == "Daily":
|
2019-12-10 23:04:24 +05:00
|
|
|
cronJob = "0 3 * * * root /usr/local/CyberCP/bin/python /usr/local/CyberCP/plogical/backupScheduleLocal.py"
|
2019-03-13 23:05:22 +05:00
|
|
|
elif backupDest == "Home" and backupFreq == "Weekly":
|
2019-12-10 23:04:24 +05:00
|
|
|
cronJob = "0 0 * * 0 root /usr/local/CyberCP/bin/python /usr/local/CyberCP/plogical/backupScheduleLocal.py "
|
2019-03-13 23:05:22 +05:00
|
|
|
elif backupDest != "Home" and backupFreq == "Daily":
|
2019-12-10 23:04:24 +05:00
|
|
|
cronJob = "0 3 * * * root /usr/local/CyberCP/bin/python /usr/local/CyberCP/plogical/backupSchedule.py"
|
2019-03-13 23:05:22 +05:00
|
|
|
elif backupDest != "Home" and backupFreq == "Weekly":
|
2019-12-10 23:04:24 +05:00
|
|
|
cronJob = "0 0 * * 0 root /usr/local/CyberCP/bin/python /usr/local/CyberCP/plogical/backupSchedule.py "
|
2019-03-13 23:05:22 +05:00
|
|
|
|
2019-03-26 16:19:03 +05:00
|
|
|
command = "cat " + path
|
|
|
|
|
output = ProcessUtilities.outputExecutioner(command)
|
2019-03-13 23:05:22 +05:00
|
|
|
|
2019-03-26 16:19:03 +05:00
|
|
|
finalCronJob = output + cronJob
|
|
|
|
|
tempCronPath = "/home/cyberpanel/" + str(randint(1000, 9999))
|
2019-03-13 23:05:22 +05:00
|
|
|
|
2019-03-26 16:19:03 +05:00
|
|
|
writeToFile = open(tempCronPath, 'a')
|
|
|
|
|
writeToFile.writelines(finalCronJob + "\n")
|
|
|
|
|
writeToFile.close()
|
2019-03-13 23:05:22 +05:00
|
|
|
|
2019-03-26 16:19:03 +05:00
|
|
|
command = "sudo mv " + tempCronPath + " " + path
|
|
|
|
|
ProcessUtilities.executioner(command)
|
2019-03-13 23:05:22 +05:00
|
|
|
|
2019-08-13 16:27:56 +05:00
|
|
|
command = 'chown root:root %s' % (path)
|
|
|
|
|
ProcessUtilities.executioner(command)
|
|
|
|
|
|
2019-03-26 16:19:03 +05:00
|
|
|
command = "sudo systemctl restart crond"
|
|
|
|
|
ProcessUtilities.executioner(command)
|
2019-03-13 23:05:22 +05:00
|
|
|
|
2019-03-26 16:19:03 +05:00
|
|
|
destination = dest.objects.get(destLoc=backupDest)
|
|
|
|
|
newSchedule = backupSchedules(dest=destination, frequency=backupFreq)
|
|
|
|
|
newSchedule.save()
|
2019-03-13 23:05:22 +05:00
|
|
|
|
2019-08-22 20:38:49 +05:00
|
|
|
## Set local path for backup
|
|
|
|
|
|
2019-08-26 12:27:07 +05:00
|
|
|
if backupDest == "Home":
|
|
|
|
|
writeToFile = open(BackupManager.localBackupPath, 'w')
|
|
|
|
|
writeToFile.write(data['localPath'])
|
|
|
|
|
writeToFile.close()
|
2019-08-22 20:38:49 +05:00
|
|
|
|
2019-03-26 16:19:03 +05:00
|
|
|
final_json = json.dumps({'scheduleStatus': 1, 'error_message': "None"})
|
|
|
|
|
return HttpResponse(final_json)
|
2019-03-13 23:05:22 +05:00
|
|
|
|
2019-12-10 15:09:10 +05:00
|
|
|
except BaseException as msg:
|
2019-03-13 23:05:22 +05:00
|
|
|
final_json = json.dumps({'scheduleStatus': 0, 'error_message': str(msg)})
|
|
|
|
|
return HttpResponse(final_json)
|
|
|
|
|
|
2020-06-13 19:43:16 +05:00
|
|
|
def scheduleDelete(self, userID=None, data=None):
|
2019-03-13 23:05:22 +05:00
|
|
|
try:
|
|
|
|
|
currentACL = ACLManager.loadedACL(userID)
|
|
|
|
|
|
|
|
|
|
if ACLManager.currentContextPermission(currentACL, 'scheDuleBackups') == 0:
|
|
|
|
|
return ACLManager.loadErrorJson('scheduleStatus', 0)
|
|
|
|
|
|
|
|
|
|
backupDest = data['destLoc']
|
|
|
|
|
backupFreq = data['frequency']
|
2019-03-26 16:19:03 +05:00
|
|
|
findTxt = ""
|
2019-03-13 23:05:22 +05:00
|
|
|
|
|
|
|
|
if backupDest == "Home" and backupFreq == "Daily":
|
2019-12-01 21:03:54 +05:00
|
|
|
findTxt = "0 3"
|
2019-03-13 23:05:22 +05:00
|
|
|
elif backupDest == "Home" and backupFreq == "Weekly":
|
2019-12-01 21:03:54 +05:00
|
|
|
findTxt = "0 0"
|
2019-03-13 23:05:22 +05:00
|
|
|
elif backupDest != "Home" and backupFreq == "Daily":
|
2019-12-01 21:03:54 +05:00
|
|
|
findTxt = "0 3"
|
2019-03-26 16:19:03 +05:00
|
|
|
elif backupDest != "Home" and backupFreq == "Weekly":
|
2019-12-01 21:03:54 +05:00
|
|
|
findTxt = "0 0"
|
2019-03-13 23:05:22 +05:00
|
|
|
|
2019-03-26 16:19:03 +05:00
|
|
|
###
|
2019-03-13 23:05:22 +05:00
|
|
|
|
2019-12-01 21:03:54 +05:00
|
|
|
logging.CyberCPLogFileWriter.writeToFile(findTxt)
|
|
|
|
|
logging.CyberCPLogFileWriter.writeToFile(backupFreq)
|
|
|
|
|
|
2019-03-26 16:19:03 +05:00
|
|
|
path = "/etc/crontab"
|
2019-03-13 23:05:22 +05:00
|
|
|
|
2019-03-26 16:19:03 +05:00
|
|
|
command = "cat " + path
|
|
|
|
|
output = ProcessUtilities.outputExecutioner(command).split('\n')
|
|
|
|
|
tempCronPath = "/home/cyberpanel/" + str(randint(1000, 9999))
|
2019-03-13 23:05:22 +05:00
|
|
|
|
2019-03-26 16:19:03 +05:00
|
|
|
writeToFile = open(tempCronPath, 'w')
|
2019-03-13 23:05:22 +05:00
|
|
|
|
2019-03-26 16:19:03 +05:00
|
|
|
for items in output:
|
2020-06-13 19:43:16 +05:00
|
|
|
if (items.find(findTxt) > -1 and items.find("backupScheduleLocal.py") > -1) or (
|
|
|
|
|
items.find(findTxt) > -1 and items.find('backupSchedule.py')):
|
2019-03-26 16:19:03 +05:00
|
|
|
continue
|
|
|
|
|
else:
|
|
|
|
|
writeToFile.writelines(items + '\n')
|
2019-03-13 23:05:22 +05:00
|
|
|
|
2019-03-26 16:19:03 +05:00
|
|
|
writeToFile.close()
|
2019-03-13 23:05:22 +05:00
|
|
|
|
2019-03-26 16:19:03 +05:00
|
|
|
command = "sudo mv " + tempCronPath + " " + path
|
|
|
|
|
ProcessUtilities.executioner(command)
|
2019-03-13 23:05:22 +05:00
|
|
|
|
2019-08-13 16:27:56 +05:00
|
|
|
command = 'chown root:root %s' % (path)
|
|
|
|
|
ProcessUtilities.executioner(command)
|
2019-03-13 23:05:22 +05:00
|
|
|
|
2019-08-13 16:27:56 +05:00
|
|
|
command = "sudo systemctl restart crond"
|
2019-03-31 02:47:35 +05:00
|
|
|
ProcessUtilities.executioner(command)
|
2019-03-13 23:05:22 +05:00
|
|
|
|
2019-03-26 16:19:03 +05:00
|
|
|
destination = dest.objects.get(destLoc=backupDest)
|
|
|
|
|
newSchedule = backupSchedules.objects.get(dest=destination, frequency=backupFreq)
|
|
|
|
|
newSchedule.delete()
|
2019-03-13 23:05:22 +05:00
|
|
|
|
2019-03-26 16:19:03 +05:00
|
|
|
final_json = json.dumps({'delStatus': 1, 'error_message': "None"})
|
|
|
|
|
return HttpResponse(final_json)
|
2019-03-13 23:05:22 +05:00
|
|
|
|
2019-12-10 15:09:10 +05:00
|
|
|
except BaseException as msg:
|
2019-03-13 23:05:22 +05:00
|
|
|
final_json = json.dumps({'delStatus': 0, 'error_message': str(msg)})
|
|
|
|
|
return HttpResponse(final_json)
|
|
|
|
|
|
2020-06-13 19:43:16 +05:00
|
|
|
def remoteBackups(self, request, userID=None, data=None):
|
2019-03-13 23:05:22 +05:00
|
|
|
try:
|
|
|
|
|
currentACL = ACLManager.loadedACL(userID)
|
|
|
|
|
|
|
|
|
|
if ACLManager.currentContextPermission(currentACL, 'remoteBackups') == 0:
|
|
|
|
|
return ACLManager.loadError()
|
|
|
|
|
|
|
|
|
|
return render(request, 'backup/remoteBackups.html')
|
|
|
|
|
|
2019-12-10 15:09:10 +05:00
|
|
|
except BaseException as msg:
|
2019-03-13 23:05:22 +05:00
|
|
|
return HttpResponse(str(msg))
|
|
|
|
|
|
2020-06-13 19:43:16 +05:00
|
|
|
def submitRemoteBackups(self, userID=None, data=None):
|
2019-03-13 23:05:22 +05:00
|
|
|
try:
|
|
|
|
|
currentACL = ACLManager.loadedACL(userID)
|
|
|
|
|
|
|
|
|
|
if ACLManager.currentContextPermission(currentACL, 'remoteBackups') == 0:
|
|
|
|
|
return ACLManager.loadErrorJson()
|
|
|
|
|
|
|
|
|
|
ipAddress = data['ipAddress']
|
|
|
|
|
password = data['password']
|
|
|
|
|
|
|
|
|
|
## Ask for Remote version of CyberPanel
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
finalData = json.dumps({'username': "admin", "password": password})
|
|
|
|
|
|
|
|
|
|
url = "https://" + ipAddress + ":8090/api/cyberPanelVersion"
|
|
|
|
|
|
|
|
|
|
r = requests.post(url, data=finalData, verify=False)
|
|
|
|
|
|
|
|
|
|
data = json.loads(r.text)
|
|
|
|
|
|
|
|
|
|
if data['getVersion'] == 1:
|
|
|
|
|
|
|
|
|
|
if float(data['currentVersion']) >= 1.6 and data['build'] >= 0:
|
|
|
|
|
pass
|
|
|
|
|
else:
|
|
|
|
|
data_ret = {'status': 0,
|
|
|
|
|
'error_message': "Your version does not match with version of remote server.",
|
|
|
|
|
"dir": "Null"}
|
|
|
|
|
data_ret = json.dumps(data_ret)
|
|
|
|
|
return HttpResponse(data_ret)
|
|
|
|
|
else:
|
|
|
|
|
data_ret = {'status': 0,
|
|
|
|
|
'error_message': "Not able to fetch version of remote server. Error Message: " +
|
|
|
|
|
data[
|
|
|
|
|
'error_message'], "dir": "Null"}
|
|
|
|
|
data_ret = json.dumps(data_ret)
|
|
|
|
|
return HttpResponse(data_ret)
|
|
|
|
|
|
|
|
|
|
|
2019-12-10 15:09:10 +05:00
|
|
|
except BaseException as msg:
|
2019-03-13 23:05:22 +05:00
|
|
|
data_ret = {'status': 0,
|
|
|
|
|
'error_message': "Not able to fetch version of remote server. Error Message: " + str(
|
|
|
|
|
msg),
|
|
|
|
|
"dir": "Null"}
|
|
|
|
|
data_ret = json.dumps(data_ret)
|
|
|
|
|
return HttpResponse(data_ret)
|
|
|
|
|
|
|
|
|
|
## Fetch public key of remote server!
|
|
|
|
|
|
|
|
|
|
finalData = json.dumps({'username': "admin", "password": password})
|
|
|
|
|
|
|
|
|
|
url = "https://" + ipAddress + ":8090/api/fetchSSHkey"
|
|
|
|
|
r = requests.post(url, data=finalData, verify=False)
|
|
|
|
|
data = json.loads(r.text)
|
|
|
|
|
|
|
|
|
|
if data['pubKeyStatus'] == 1:
|
|
|
|
|
pubKey = data["pubKey"].strip("\n")
|
|
|
|
|
else:
|
|
|
|
|
final_json = json.dumps({'status': 0,
|
|
|
|
|
'error_message': "I am sorry, I could not fetch key from remote server. Error Message: " +
|
|
|
|
|
data['error_message']
|
|
|
|
|
})
|
|
|
|
|
return HttpResponse(final_json)
|
|
|
|
|
|
|
|
|
|
## write key
|
|
|
|
|
|
|
|
|
|
## Writing key to a temporary location, to be read later by backup process.
|
|
|
|
|
|
|
|
|
|
mailUtilities.checkHome()
|
|
|
|
|
|
|
|
|
|
pathToKey = "/home/cyberpanel/" + str(randint(1000, 9999))
|
|
|
|
|
|
|
|
|
|
vhost = open(pathToKey, "w")
|
|
|
|
|
vhost.write(pubKey)
|
|
|
|
|
vhost.close()
|
|
|
|
|
|
|
|
|
|
##
|
|
|
|
|
|
2019-12-10 23:04:24 +05:00
|
|
|
execPath = "/usr/local/CyberCP/bin/python " + virtualHostUtilities.cyberPanel + "/plogical/remoteTransferUtilities.py"
|
2019-03-13 23:05:22 +05:00
|
|
|
execPath = execPath + " writeAuthKey --pathToKey " + pathToKey
|
2019-03-21 23:26:42 +05:00
|
|
|
output = ProcessUtilities.outputExecutioner(execPath)
|
2019-03-13 23:05:22 +05:00
|
|
|
|
|
|
|
|
if output.find("1,None") > -1:
|
|
|
|
|
pass
|
|
|
|
|
else:
|
|
|
|
|
final_json = json.dumps({'status': 0, 'error_message': output})
|
|
|
|
|
return HttpResponse(final_json)
|
|
|
|
|
|
|
|
|
|
##
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
finalData = json.dumps({'username': "admin", "password": password})
|
|
|
|
|
|
|
|
|
|
url = "https://" + ipAddress + ":8090/api/fetchAccountsFromRemoteServer"
|
|
|
|
|
|
|
|
|
|
r = requests.post(url, data=finalData, verify=False)
|
|
|
|
|
|
|
|
|
|
data = json.loads(r.text)
|
|
|
|
|
|
|
|
|
|
if data['fetchStatus'] == 1:
|
|
|
|
|
json_data = data['data']
|
|
|
|
|
data_ret = {'status': 1, 'error_message': "None",
|
|
|
|
|
"dir": "Null", 'data': json_data}
|
|
|
|
|
data_ret = json.dumps(data_ret)
|
|
|
|
|
return HttpResponse(data_ret)
|
|
|
|
|
else:
|
|
|
|
|
data_ret = {'status': 0,
|
|
|
|
|
'error_message': "Not able to fetch accounts from remote server. Error Message: " +
|
|
|
|
|
data['error_message'], "dir": "Null"}
|
|
|
|
|
data_ret = json.dumps(data_ret)
|
|
|
|
|
return HttpResponse(data_ret)
|
2019-12-10 15:09:10 +05:00
|
|
|
except BaseException as msg:
|
2019-03-13 23:05:22 +05:00
|
|
|
data_ret = {'status': 0,
|
|
|
|
|
'error_message': "Not able to fetch accounts from remote server. Error Message: " + str(
|
|
|
|
|
msg), "dir": "Null"}
|
|
|
|
|
data_ret = json.dumps(data_ret)
|
|
|
|
|
return HttpResponse(data_ret)
|
|
|
|
|
|
2019-12-10 15:09:10 +05:00
|
|
|
except BaseException as msg:
|
2019-03-13 23:05:22 +05:00
|
|
|
final_json = json.dumps({'status': 0, 'error_message': str(msg)})
|
|
|
|
|
return HttpResponse(final_json)
|
|
|
|
|
|
2020-06-13 19:43:16 +05:00
|
|
|
def starRemoteTransfer(self, userID=None, data=None):
|
2019-03-13 23:05:22 +05:00
|
|
|
try:
|
|
|
|
|
currentACL = ACLManager.loadedACL(userID)
|
|
|
|
|
|
|
|
|
|
if ACLManager.currentContextPermission(currentACL, 'remoteBackups') == 0:
|
|
|
|
|
return ACLManager.loadErrorJson('remoteTransferStatus', 0)
|
|
|
|
|
|
|
|
|
|
ipAddress = data['ipAddress']
|
|
|
|
|
password = data['password']
|
|
|
|
|
accountsToTransfer = data['accountsToTransfer']
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
|
|
|
|
|
ipFile = os.path.join("/etc", "cyberpanel", "machineIP")
|
|
|
|
|
f = open(ipFile)
|
|
|
|
|
ownIP = f.read()
|
|
|
|
|
|
|
|
|
|
finalData = json.dumps({'username': "admin", "password": password, "ipAddress": ownIP,
|
|
|
|
|
"accountsToTransfer": accountsToTransfer})
|
|
|
|
|
|
|
|
|
|
url = "https://" + ipAddress + ":8090/api/remoteTransfer"
|
|
|
|
|
|
|
|
|
|
r = requests.post(url, data=finalData, verify=False)
|
|
|
|
|
|
|
|
|
|
data = json.loads(r.text)
|
|
|
|
|
|
|
|
|
|
if data['transferStatus'] == 1:
|
|
|
|
|
|
|
|
|
|
## Create local backup dir
|
|
|
|
|
|
|
|
|
|
localBackupDir = os.path.join("/home", "backup")
|
|
|
|
|
|
|
|
|
|
if not os.path.exists(localBackupDir):
|
2019-03-26 16:19:03 +05:00
|
|
|
command = "sudo mkdir " + localBackupDir
|
2019-03-31 02:47:35 +05:00
|
|
|
ProcessUtilities.executioner(command)
|
2019-03-13 23:05:22 +05:00
|
|
|
|
|
|
|
|
## create local directory that will host backups
|
|
|
|
|
|
|
|
|
|
localStoragePath = "/home/backup/transfer-" + str(data['dir'])
|
|
|
|
|
|
|
|
|
|
## making local storage directory for backups
|
|
|
|
|
|
2019-03-26 16:19:03 +05:00
|
|
|
command = "sudo mkdir " + localStoragePath
|
2019-03-31 02:47:35 +05:00
|
|
|
ProcessUtilities.executioner(command)
|
2019-03-13 23:05:22 +05:00
|
|
|
|
|
|
|
|
final_json = json.dumps(
|
|
|
|
|
{'remoteTransferStatus': 1, 'error_message': "None", "dir": data['dir']})
|
|
|
|
|
return HttpResponse(final_json)
|
|
|
|
|
else:
|
|
|
|
|
final_json = json.dumps({'remoteTransferStatus': 0,
|
|
|
|
|
'error_message': "Can not initiate remote transfer. Error message: " +
|
|
|
|
|
data['error_message']})
|
|
|
|
|
return HttpResponse(final_json)
|
|
|
|
|
|
2019-12-10 15:09:10 +05:00
|
|
|
except BaseException as msg:
|
2019-03-13 23:05:22 +05:00
|
|
|
final_json = json.dumps({'remoteTransferStatus': 0,
|
|
|
|
|
'error_message': "Can not initiate remote transfer. Error message: " +
|
|
|
|
|
str(msg)})
|
|
|
|
|
return HttpResponse(final_json)
|
|
|
|
|
|
2019-12-10 15:09:10 +05:00
|
|
|
except BaseException as msg:
|
2019-03-13 23:05:22 +05:00
|
|
|
final_json = json.dumps({'remoteTransferStatus': 0, 'error_message': str(msg)})
|
|
|
|
|
return HttpResponse(final_json)
|
|
|
|
|
|
2020-06-13 19:43:16 +05:00
|
|
|
def getRemoteTransferStatus(self, userID=None, data=None):
|
2019-03-13 23:05:22 +05:00
|
|
|
try:
|
|
|
|
|
currentACL = ACLManager.loadedACL(userID)
|
|
|
|
|
|
|
|
|
|
if ACLManager.currentContextPermission(currentACL, 'remoteBackups') == 0:
|
|
|
|
|
return ACLManager.loadErrorJson('remoteTransferStatus', 0)
|
|
|
|
|
|
|
|
|
|
ipAddress = data['ipAddress']
|
|
|
|
|
password = data['password']
|
|
|
|
|
dir = data['dir']
|
|
|
|
|
username = "admin"
|
|
|
|
|
|
|
|
|
|
finalData = json.dumps({'dir': dir, "username": username, "password": password})
|
|
|
|
|
r = requests.post("https://" + ipAddress + ":8090/api/FetchRemoteTransferStatus", data=finalData,
|
|
|
|
|
verify=False)
|
|
|
|
|
|
|
|
|
|
data = json.loads(r.text)
|
|
|
|
|
|
|
|
|
|
if data['fetchStatus'] == 1:
|
|
|
|
|
if data['status'].find("Backups are successfully generated and received on") > -1:
|
|
|
|
|
|
|
|
|
|
data = {'remoteTransferStatus': 1, 'error_message': "None", "status": data['status'],
|
|
|
|
|
'backupsSent': 1}
|
|
|
|
|
json_data = json.dumps(data)
|
|
|
|
|
return HttpResponse(json_data)
|
|
|
|
|
elif data['status'].find("[5010]") > -1:
|
|
|
|
|
data = {'remoteTransferStatus': 0, 'error_message': data['status'],
|
|
|
|
|
'backupsSent': 0}
|
|
|
|
|
json_data = json.dumps(data)
|
|
|
|
|
return HttpResponse(json_data)
|
|
|
|
|
else:
|
|
|
|
|
data = {'remoteTransferStatus': 1, 'error_message': "None", "status": data['status'],
|
|
|
|
|
'backupsSent': 0}
|
|
|
|
|
json_data = json.dumps(data)
|
|
|
|
|
return HttpResponse(json_data)
|
|
|
|
|
else:
|
|
|
|
|
data = {'remoteTransferStatus': 0, 'error_message': data['error_message'],
|
|
|
|
|
'backupsSent': 0}
|
|
|
|
|
json_data = json.dumps(data)
|
|
|
|
|
return HttpResponse(json_data)
|
2019-12-10 15:09:10 +05:00
|
|
|
except BaseException as msg:
|
2019-03-13 23:05:22 +05:00
|
|
|
data = {'remoteTransferStatus': 0, 'error_message': str(msg), 'backupsSent': 0}
|
|
|
|
|
json_data = json.dumps(data)
|
|
|
|
|
return HttpResponse(json_data)
|
|
|
|
|
|
2020-06-13 19:43:16 +05:00
|
|
|
def remoteBackupRestore(self, userID=None, data=None):
|
2019-03-13 23:05:22 +05:00
|
|
|
try:
|
|
|
|
|
currentACL = ACLManager.loadedACL(userID)
|
|
|
|
|
if ACLManager.currentContextPermission(currentACL, 'remoteBackups') == 0:
|
|
|
|
|
return ACLManager.loadErrorJson('remoteTransferStatus', 0)
|
|
|
|
|
|
|
|
|
|
backupDir = data['backupDir']
|
|
|
|
|
|
|
|
|
|
backupDirComplete = "/home/backup/transfer-" + str(backupDir)
|
|
|
|
|
# adminEmail = admin.email
|
|
|
|
|
|
|
|
|
|
##
|
|
|
|
|
|
2019-12-10 23:04:24 +05:00
|
|
|
execPath = "/usr/local/CyberCP/bin/python " + virtualHostUtilities.cyberPanel + "/plogical/remoteTransferUtilities.py"
|
2019-03-13 23:05:22 +05:00
|
|
|
execPath = execPath + " remoteBackupRestore --backupDirComplete " + backupDirComplete + " --backupDir " + str(
|
|
|
|
|
backupDir)
|
|
|
|
|
|
2019-03-21 23:26:42 +05:00
|
|
|
ProcessUtilities.popenExecutioner(execPath)
|
2019-03-13 23:05:22 +05:00
|
|
|
|
|
|
|
|
time.sleep(3)
|
|
|
|
|
|
|
|
|
|
data = {'remoteRestoreStatus': 1, 'error_message': 'None'}
|
|
|
|
|
json_data = json.dumps(data)
|
|
|
|
|
return HttpResponse(json_data)
|
|
|
|
|
|
2019-12-10 15:09:10 +05:00
|
|
|
except BaseException as msg:
|
2019-03-13 23:05:22 +05:00
|
|
|
data = {'remoteRestoreStatus': 0, 'error_message': str(msg)}
|
|
|
|
|
json_data = json.dumps(data)
|
|
|
|
|
return HttpResponse(json_data)
|
|
|
|
|
|
2020-06-13 19:43:16 +05:00
|
|
|
def localRestoreStatus(self, userID=None, data=None):
|
2019-03-13 23:05:22 +05:00
|
|
|
try:
|
|
|
|
|
currentACL = ACLManager.loadedACL(userID)
|
|
|
|
|
|
|
|
|
|
if ACLManager.currentContextPermission(currentACL, 'remoteBackups') == 0:
|
|
|
|
|
return ACLManager.loadErrorJson('remoteTransferStatus', 0)
|
|
|
|
|
|
|
|
|
|
backupDir = data['backupDir']
|
|
|
|
|
|
|
|
|
|
# admin = Administrator.objects.get(userName=username)
|
|
|
|
|
backupLogPath = "/home/backup/transfer-" + backupDir + "/" + "backup_log"
|
|
|
|
|
|
|
|
|
|
removalPath = "/home/backup/transfer-" + str(backupDir)
|
|
|
|
|
|
|
|
|
|
time.sleep(3)
|
|
|
|
|
|
2020-05-07 17:15:31 +05:00
|
|
|
command = "sudo cat " + backupLogPath
|
|
|
|
|
status = ProcessUtilities.outputExecutioner(command)
|
2019-03-13 23:05:22 +05:00
|
|
|
|
2020-05-07 17:15:31 +05:00
|
|
|
if status.find("completed[success]") > -1:
|
|
|
|
|
command = "rm -rf " + removalPath
|
|
|
|
|
ProcessUtilities.executioner(command)
|
|
|
|
|
data_ret = {'remoteTransferStatus': 1, 'error_message': "None", "status": status, "complete": 1}
|
|
|
|
|
json_data = json.dumps(data_ret)
|
|
|
|
|
return HttpResponse(json_data)
|
|
|
|
|
elif status.find("[5010]") > -1:
|
|
|
|
|
command = "sudo rm -rf " + removalPath
|
|
|
|
|
ProcessUtilities.executioner(command)
|
|
|
|
|
data = {'remoteTransferStatus': 0, 'error_message': status,
|
|
|
|
|
"status": "None", "complete": 0}
|
|
|
|
|
json_data = json.dumps(data)
|
|
|
|
|
return HttpResponse(json_data)
|
2019-03-13 23:05:22 +05:00
|
|
|
else:
|
2020-05-07 17:15:31 +05:00
|
|
|
data_ret = {'remoteTransferStatus': 1, 'error_message': "None", "status": status, "complete": 0}
|
2019-03-13 23:05:22 +05:00
|
|
|
json_data = json.dumps(data_ret)
|
|
|
|
|
return HttpResponse(json_data)
|
2020-05-07 17:15:31 +05:00
|
|
|
|
2019-12-10 15:09:10 +05:00
|
|
|
except BaseException as msg:
|
2019-03-13 23:05:22 +05:00
|
|
|
data = {'remoteTransferStatus': 0, 'error_message': str(msg), "status": "None", "complete": 0}
|
|
|
|
|
json_data = json.dumps(data)
|
|
|
|
|
return HttpResponse(json_data)
|
|
|
|
|
|
2020-06-13 19:43:16 +05:00
|
|
|
def cancelRemoteBackup(self, userID=None, data=None):
|
2019-03-13 23:05:22 +05:00
|
|
|
try:
|
|
|
|
|
|
|
|
|
|
currentACL = ACLManager.loadedACL(userID)
|
|
|
|
|
|
|
|
|
|
if ACLManager.currentContextPermission(currentACL, 'remoteBackups') == 0:
|
|
|
|
|
return ACLManager.loadErrorJson('cancelStatus', 0)
|
|
|
|
|
|
|
|
|
|
ipAddress = data['ipAddress']
|
|
|
|
|
password = data['password']
|
|
|
|
|
dir = data['dir']
|
|
|
|
|
username = "admin"
|
|
|
|
|
|
|
|
|
|
finalData = json.dumps({'dir': dir, "username": username, "password": password})
|
|
|
|
|
r = requests.post("https://" + ipAddress + ":8090/api/cancelRemoteTransfer", data=finalData,
|
|
|
|
|
verify=False)
|
|
|
|
|
|
|
|
|
|
data = json.loads(r.text)
|
|
|
|
|
|
|
|
|
|
if data['cancelStatus'] == 1:
|
|
|
|
|
pass
|
|
|
|
|
else:
|
|
|
|
|
logging.CyberCPLogFileWriter.writeToFile(
|
|
|
|
|
"Some error cancelling at remote server, see the log file for remote server.")
|
|
|
|
|
|
|
|
|
|
path = "/home/backup/transfer-" + str(dir)
|
|
|
|
|
pathpid = path + "/pid"
|
|
|
|
|
|
2019-03-26 16:19:03 +05:00
|
|
|
command = "sudo cat " + pathpid
|
|
|
|
|
pid = ProcessUtilities.outputExecutioner(command)
|
2019-03-13 23:05:22 +05:00
|
|
|
|
2019-03-26 16:19:03 +05:00
|
|
|
command = "sudo kill -KILL " + pid
|
|
|
|
|
ProcessUtilities.executioner(command)
|
2019-03-13 23:05:22 +05:00
|
|
|
|
2019-03-26 16:19:03 +05:00
|
|
|
command = "sudo rm -rf " + path
|
|
|
|
|
ProcessUtilities.executioner(command)
|
2019-03-13 23:05:22 +05:00
|
|
|
|
|
|
|
|
data = {'cancelStatus': 1, 'error_message': "None"}
|
|
|
|
|
json_data = json.dumps(data)
|
|
|
|
|
return HttpResponse(json_data)
|
|
|
|
|
|
2019-12-10 15:09:10 +05:00
|
|
|
except BaseException as msg:
|
2019-03-13 23:05:22 +05:00
|
|
|
data = {'cancelStatus': 0, 'error_message': str(msg)}
|
|
|
|
|
json_data = json.dumps(data)
|
|
|
|
|
return HttpResponse(json_data)
|
2020-05-21 23:21:36 +05:00
|
|
|
|
2020-06-13 19:43:16 +05:00
|
|
|
def backupLogs(self, request=None, userID=None, data=None):
|
2020-05-21 23:21:36 +05:00
|
|
|
try:
|
|
|
|
|
currentACL = ACLManager.loadedACL(userID)
|
|
|
|
|
|
|
|
|
|
if currentACL['admin'] == 1:
|
|
|
|
|
pass
|
|
|
|
|
else:
|
|
|
|
|
return ACLManager.loadError()
|
|
|
|
|
|
|
|
|
|
all_files = []
|
|
|
|
|
|
|
|
|
|
logFiles = BackupJob.objects.all().order_by('-id')
|
|
|
|
|
|
|
|
|
|
for logFile in logFiles:
|
2020-06-13 19:43:16 +05:00
|
|
|
all_files.append(logFile.logFile)
|
2020-05-21 23:21:36 +05:00
|
|
|
|
|
|
|
|
return render(request, 'backup/backupLogs.html', {'backups': all_files})
|
|
|
|
|
|
|
|
|
|
except BaseException as msg:
|
|
|
|
|
return HttpResponse(str(msg))
|
|
|
|
|
|
2020-06-13 19:43:16 +05:00
|
|
|
def fetchLogs(self, userID=None, data=None):
|
2020-05-21 23:21:36 +05:00
|
|
|
try:
|
|
|
|
|
currentACL = ACLManager.loadedACL(userID)
|
|
|
|
|
|
|
|
|
|
if currentACL['admin'] == 1:
|
|
|
|
|
pass
|
|
|
|
|
else:
|
|
|
|
|
return ACLManager.loadError()
|
|
|
|
|
|
|
|
|
|
page = int(str(data['page']).rstrip('\n'))
|
|
|
|
|
recordsToShow = int(data['recordsToShow'])
|
|
|
|
|
logFile = data['logFile']
|
|
|
|
|
|
|
|
|
|
logJob = BackupJob.objects.get(logFile=logFile)
|
|
|
|
|
|
|
|
|
|
logs = logJob.backupjoblogs_set.all()
|
|
|
|
|
|
|
|
|
|
from s3Backups.s3Backups import S3Backups
|
|
|
|
|
from plogical.backupSchedule import backupSchedule
|
|
|
|
|
|
|
|
|
|
pagination = S3Backups.getPagination(len(logs), recordsToShow)
|
|
|
|
|
endPageNumber, finalPageNumber = S3Backups.recordsPointer(page, recordsToShow)
|
|
|
|
|
finalLogs = logs[finalPageNumber:endPageNumber]
|
|
|
|
|
|
|
|
|
|
json_data = "["
|
|
|
|
|
checker = 0
|
|
|
|
|
counter = 0
|
|
|
|
|
|
|
|
|
|
for log in finalLogs:
|
|
|
|
|
|
|
|
|
|
if log.status == backupSchedule.INFO:
|
|
|
|
|
status = 'INFO'
|
|
|
|
|
else:
|
|
|
|
|
status = 'ERROR'
|
|
|
|
|
|
|
|
|
|
dic = {
|
|
|
|
|
'LEVEL': status, "Message": log.message
|
|
|
|
|
}
|
|
|
|
|
if checker == 0:
|
|
|
|
|
json_data = json_data + json.dumps(dic)
|
|
|
|
|
checker = 1
|
|
|
|
|
else:
|
|
|
|
|
json_data = json_data + ',' + json.dumps(dic)
|
|
|
|
|
counter = counter + 1
|
|
|
|
|
|
|
|
|
|
json_data = json_data + ']'
|
|
|
|
|
|
|
|
|
|
if logJob.location == backupSchedule.LOCAL:
|
|
|
|
|
location = 'local'
|
|
|
|
|
else:
|
|
|
|
|
location = 'remote'
|
|
|
|
|
|
|
|
|
|
data = {
|
2020-06-13 19:43:16 +05:00
|
|
|
'status': 1,
|
|
|
|
|
'error_message': 'None',
|
|
|
|
|
'logs': json_data,
|
|
|
|
|
'pagination': pagination,
|
|
|
|
|
'jobSuccessSites': logJob.jobSuccessSites,
|
|
|
|
|
'jobFailedSites': logJob.jobFailedSites,
|
|
|
|
|
'location': location
|
|
|
|
|
}
|
2020-05-21 23:21:36 +05:00
|
|
|
|
|
|
|
|
json_data = json.dumps(data)
|
|
|
|
|
return HttpResponse(json_data)
|
|
|
|
|
|
|
|
|
|
except BaseException as msg:
|
|
|
|
|
data = {'remoteRestoreStatus': 0, 'error_message': str(msg)}
|
|
|
|
|
json_data = json.dumps(data)
|
|
|
|
|
return HttpResponse(json_data)
|