Files
CyberPanel/IncBackups/views.py

1161 lines
42 KiB
Python
Raw Normal View History

import json
import os
2021-02-03 00:35:08 -05:00
import stat
import time
from pathlib import Path
from random import randint
from django.shortcuts import HttpResponse, redirect
2023-03-10 14:37:18 +05:00
from backup.backupManager import BackupManager
from loginSystem.models import Administrator
2021-02-03 00:35:08 -05:00
from loginSystem.views import loadLoginPage
2023-03-10 14:37:18 +05:00
from plogical.Backupsv2 import CPBackupsV2
2021-02-03 00:35:08 -05:00
from plogical.CyberCPLogFileWriter import CyberCPLogFileWriter as logging
from plogical.acl import ACLManager
2021-03-03 13:42:02 +05:00
from plogical.httpProc import httpProc
2021-02-03 00:35:08 -05:00
from plogical.processUtilities import ProcessUtilities as pu
from plogical.virtualHostUtilities import virtualHostUtilities as vhu
from websiteFunctions.models import Websites
2021-02-03 00:35:08 -05:00
from .IncBackupProvider import IncBackupProvider
from .IncBackupPath import IncBackupPath
2019-10-06 18:41:09 +05:00
from .IncBackupsControl import IncJobs
2021-02-03 00:35:08 -05:00
from .models import IncJob, BackupJob, JobSites
2023-03-14 10:47:25 +05:00
2021-03-03 13:42:02 +05:00
def def_renderer(request, templateName, args, context=None):
proc = httpProc(request, templateName,
args, context)
return proc.render()
2021-02-03 00:35:08 -05:00
def _get_destinations(local: bool = False):
destinations = []
if local:
destinations.append('local')
path = Path(IncBackupPath.SFTP.value)
if path.exists():
for item in path.iterdir():
destinations.append('sftp:%s' % item.name)
2021-02-03 00:35:08 -05:00
path = Path(IncBackupPath.AWS.value)
if path.exists():
for item in path.iterdir():
destinations.append('s3:s3.amazonaws.com/%s' % item.name)
return destinations
2021-02-03 00:35:08 -05:00
def _get_user_acl(request):
user_id = request.session['userID']
current_acl = ACLManager.loadedACL(user_id)
return user_id, current_acl
2023-03-11 20:20:38 +05:00
2023-03-10 22:35:34 +05:00
2021-02-03 00:35:08 -05:00
def create_backup(request):
2021-03-03 13:42:02 +05:00
2021-02-03 00:35:08 -05:00
try:
user_id, current_acl = _get_user_acl(request)
if ACLManager.currentContextPermission(current_acl, 'createBackup') == 0:
return ACLManager.loadError()
2021-02-03 00:35:08 -05:00
websites = ACLManager.findAllSites(current_acl, user_id)
destinations = _get_destinations(local=True)
return def_renderer(request, 'IncBackups/createBackup.html',
2021-03-03 13:42:02 +05:00
{'websiteList': websites, 'destinations': destinations}, 'createBackup')
2019-12-10 15:09:10 +05:00
except BaseException as msg:
2019-10-15 22:59:22 +05:00
logging.writeToFile(str(msg))
return redirect(loadLoginPage)
2021-02-03 00:35:08 -05:00
def backup_destinations(request):
try:
user_id, current_acl = _get_user_acl(request)
if ACLManager.currentContextPermission(current_acl, 'addDeleteDestinations') == 0:
return ACLManager.loadError()
2021-03-03 13:42:02 +05:00
return def_renderer(request, 'IncBackups/incrementalDestinations.html', {}, 'addDeleteDestinations')
2019-12-10 15:09:10 +05:00
except BaseException as msg:
2019-10-15 22:59:22 +05:00
logging.writeToFile(str(msg))
return redirect(loadLoginPage)
2021-02-03 00:35:08 -05:00
def add_destination(request):
try:
user_id, current_acl = _get_user_acl(request)
if ACLManager.currentContextPermission(current_acl, 'addDeleteDestinations') == 0:
return ACLManager.loadErrorJson('destStatus', 0)
data = json.loads(request.body)
2021-02-03 00:35:08 -05:00
if data['type'].lower() == IncBackupProvider.SFTP.name.lower():
path = Path(IncBackupPath.SFTP.value)
path.mkdir(exist_ok=True)
2021-02-03 00:35:08 -05:00
ip_address = data['IPAddress']
password = data['password']
2021-02-03 00:35:08 -05:00
address_file = path / ip_address
port = data.get('backupSSHPort', '22')
2021-02-03 00:35:08 -05:00
if address_file.exists():
final_dic = {'status': 0, 'error_message': 'This destination already exists.'}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
2021-02-03 00:35:08 -05:00
python_path = Path('/usr/local/CyberCP/bin/python')
backup_utils = Path(vhu.cyberPanel) / "plogical/backupUtilities.py"
2021-02-03 00:35:08 -05:00
exec_args = "submitDestinationCreation --ipAddress %s --password %s --port %s --user %s" % \
(ip_address, password, port, 'root')
2021-02-03 00:35:08 -05:00
exec_cmd = "%s %s %s" % (python_path, backup_utils, exec_args)
2021-02-03 00:35:08 -05:00
if Path(pu.debugPath).exists():
logging.writeToFile(exec_cmd)
2020-04-29 04:46:01 +05:00
2021-02-03 00:35:08 -05:00
output = pu.outputExecutioner(exec_cmd)
2021-02-03 00:35:08 -05:00
if Path(pu.debugPath).exists():
2020-04-29 04:46:01 +05:00
logging.writeToFile(output)
if output.find('1,') > -1:
2021-02-03 00:35:08 -05:00
content = '%s\n%s' % (ip_address, port)
with open(address_file, 'w') as outfile:
outfile.write(content)
command = 'cat /root/.ssh/config'
2021-02-03 00:35:08 -05:00
current_config = pu.outputExecutioner(command)
2021-02-03 00:35:08 -05:00
tmp_file = '/home/cyberpanel/sshconfig'
with open(tmp_file, 'w') as outfile:
if current_config.find('cat') == -1:
outfile.write(current_config)
2021-02-03 00:35:08 -05:00
content = "Host %s\n" \
" IdentityFile ~/.ssh/cyberpanel\n" \
" Port %s\n" % (ip_address, port)
if current_config.find(ip_address) == -1:
outfile.write(content)
2021-02-03 00:35:08 -05:00
command = 'mv %s /root/.ssh/config' % tmp_file
pu.executioner(command)
command = 'chown root:root /root/.ssh/config'
2021-02-03 00:35:08 -05:00
pu.executioner(command)
final_dic = {'status': 1, 'error_message': 'None'}
else:
final_dic = {'status': 0, 'error_message': output}
2021-02-03 00:35:08 -05:00
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
2021-02-03 00:35:08 -05:00
if data['type'].lower() == IncBackupProvider.AWS.name.lower():
path = Path(IncBackupPath.AWS.value)
path.mkdir(exist_ok=True)
2021-02-03 00:35:08 -05:00
access_key = data['AWS_ACCESS_KEY_ID']
secret_key = data['AWS_SECRET_ACCESS_KEY']
2021-02-03 00:35:08 -05:00
aws_file = path / access_key
2021-02-03 00:35:08 -05:00
with open(aws_file, 'w') as outfile:
outfile.write(secret_key)
2021-02-03 00:35:08 -05:00
aws_file.chmod(stat.S_IRUSR | stat.S_IWUSR)
final_dic = {'status': 1}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
2019-12-10 15:09:10 +05:00
except BaseException as msg:
final_dic = {'status': 0, 'error_message': str(msg)}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
2021-02-03 00:35:08 -05:00
def populate_current_records(request):
try:
user_id, current_acl = _get_user_acl(request)
if ACLManager.currentContextPermission(current_acl, 'addDeleteDestinations') == 0:
return ACLManager.loadErrorJson('fetchStatus', 0)
data = json.loads(request.body)
2021-02-03 00:35:08 -05:00
json_data = []
if data['type'].lower() == IncBackupProvider.SFTP.name.lower():
path = Path(IncBackupPath.SFTP.value)
if path.exists():
for item in path.iterdir():
with open(item, 'r') as infile:
_file = infile.readlines()
json_data.append({
'ip': _file[0].strip('\n'),
'port': _file[1],
})
else:
final_json = json.dumps({'status': 1, 'error_message': "None", "data": ''})
return HttpResponse(final_json)
2021-02-03 00:35:08 -05:00
if data['type'].lower() == IncBackupProvider.AWS.name.lower():
path = Path(IncBackupPath.AWS.value)
2021-02-03 00:35:08 -05:00
if path.exists():
for item in path.iterdir():
json_data.append({'AWS_ACCESS_KEY_ID': item.name})
else:
final_json = json.dumps({'status': 1, 'error_message': "None", "data": ''})
return HttpResponse(final_json)
final_json = json.dumps({'status': 1, 'error_message': "None", "data": json_data})
return HttpResponse(final_json)
2019-12-10 15:09:10 +05:00
except BaseException as msg:
final_dic = {'status': 0, 'error_message': str(msg)}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
2021-02-03 00:35:08 -05:00
def remove_destination(request):
try:
user_id, current_acl = _get_user_acl(request)
if ACLManager.currentContextPermission(current_acl, 'addDeleteDestinations') == 0:
return ACLManager.loadErrorJson('destStatus', 0)
data = json.loads(request.body)
2021-02-03 00:35:08 -05:00
if 'IPAddress' in data:
file_name = data['IPAddress']
2021-02-03 00:35:08 -05:00
if data['type'].lower() == IncBackupProvider.SFTP.name.lower():
dest_file = Path(IncBackupPath.SFTP.value) / file_name
dest_file.unlink()
2021-02-03 00:35:08 -05:00
if data['type'].lower() == IncBackupProvider.AWS.name.lower():
dest_file = Path(IncBackupPath.AWS.value) / file_name
dest_file.unlink()
final_dic = {'status': 1, 'error_message': 'None'}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
2019-12-10 15:09:10 +05:00
except BaseException as msg:
final_dic = {'destStatus': 0, 'error_message': str(msg)}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
2021-02-03 00:35:08 -05:00
def fetch_current_backups(request):
try:
2021-02-03 00:35:08 -05:00
user_id, current_acl = _get_user_acl(request)
admin = Administrator.objects.get(pk=user_id)
data = json.loads(request.body)
2021-02-03 00:35:08 -05:00
backup_domain = data['websiteToBeBacked']
2021-02-03 00:35:08 -05:00
if ACLManager.checkOwnership(backup_domain, admin, current_acl) == 1:
pass
else:
return ACLManager.loadErrorJson('fetchStatus', 0)
2021-02-03 00:35:08 -05:00
if 'backupDestinations' in data:
backup_destinations = data['backupDestinations']
extra_args = {'website': backup_domain, 'backupDestinations': backup_destinations}
2021-02-03 00:35:08 -05:00
if 'password' in data:
extra_args['password'] = data['password']
else:
final_json = json.dumps({'status': 0, 'error_message': "Please supply the password."})
return HttpResponse(final_json)
2021-02-03 00:35:08 -05:00
start_job = IncJobs('Dummy', extra_args)
return start_job.fetchCurrentBackups()
else:
website = Websites.objects.get(domain=backup_domain)
backups = website.incjob_set.all()
2021-02-03 00:35:08 -05:00
json_data = []
for backup in reversed(backups):
snapshots = []
jobs = backup.jobsnapshots_set.all()
for job in jobs:
2021-02-03 00:35:08 -05:00
snapshots.append({'type': job.type, 'snapshotid': job.snapshotid, 'destination': job.destination})
json_data.append({'id': backup.id,
'date': str(backup.date),
'snapshots': snapshots
})
final_json = json.dumps({'status': 1, 'error_message': "None", "data": json_data})
return HttpResponse(final_json)
2019-12-10 15:09:10 +05:00
except BaseException as msg:
final_dic = {'status': 0, 'error_message': str(msg)}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
2021-02-03 00:35:08 -05:00
def submit_backup_creation(request):
try:
2021-02-03 00:35:08 -05:00
user_id, current_acl = _get_user_acl(request)
admin = Administrator.objects.get(pk=user_id)
data = json.loads(request.body)
2021-02-03 00:35:08 -05:00
backup_domain = data['websiteToBeBacked']
2022-04-30 00:28:28 +05:00
backup_destinations = data['backupDestinations']
2021-02-03 00:35:08 -05:00
if ACLManager.checkOwnership(backup_domain, admin, current_acl) == 1:
pass
else:
return ACLManager.loadErrorJson('metaStatus', 0)
2021-02-03 00:35:08 -05:00
temp_path = Path("/home/cyberpanel/") / str(randint(1000, 9999))
2021-02-03 00:35:08 -05:00
extra_args = {}
extra_args['website'] = backup_domain
extra_args['tempPath'] = str(temp_path)
extra_args['backupDestinations'] = backup_destinations
extra_args['websiteData'] = data['websiteData'] if 'websiteData' in data else False
extra_args['websiteEmails'] = data['websiteEmails'] if 'websiteEmails' in data else False
extra_args['websiteSSLs'] = data['websiteSSLs'] if 'websiteSSLs' in data else False
extra_args['websiteDatabases'] = data['websiteDatabases'] if 'websiteDatabases' in data else False
2021-02-03 00:35:08 -05:00
start_job = IncJobs('createBackup', extra_args)
start_job.start()
time.sleep(2)
2021-02-03 00:35:08 -05:00
final_json = json.dumps({'status': 1, 'error_message': "None", 'tempPath': str(temp_path)})
return HttpResponse(final_json)
2019-12-10 15:09:10 +05:00
except BaseException as msg:
logging.writeToFile(str(msg))
final_dic = {'status': 0, 'metaStatus': 0, 'error_message': str(msg)}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
2021-02-03 00:35:08 -05:00
def get_backup_status(request):
try:
data = json.loads(request.body)
status = data['tempPath']
2021-02-03 00:35:08 -05:00
backup_domain = data['websiteToBeBacked']
2019-10-06 18:41:09 +05:00
2021-02-03 00:35:08 -05:00
user_id, current_acl = _get_user_acl(request)
admin = Administrator.objects.get(pk=user_id)
if ACLManager.checkOwnership(backup_domain, admin, current_acl) == 1:
2019-10-06 18:41:09 +05:00
pass
else:
return ACLManager.loadErrorJson('fetchStatus', 0)
2021-11-16 19:12:34 +05:00
if ACLManager.CheckStatusFilleLoc(status):
2020-02-05 23:05:07 +05:00
pass
else:
data_ret = {'abort': 1, 'installStatus': 0, 'installationProgress': "100",
'currentStatus': 'Invalid status file.'}
json_data = json.dumps(data_ret)
return HttpResponse(json_data)
## file name read ends
if os.path.exists(status):
2020-02-05 23:05:07 +05:00
command = "cat " + status
2021-02-03 00:35:08 -05:00
result = pu.outputExecutioner(command, 'cyberpanel')
2019-10-06 18:41:09 +05:00
if result.find("Completed") > -1:
### Removing Files
2019-10-06 18:41:09 +05:00
os.remove(status)
final_json = json.dumps(
2019-10-06 18:41:09 +05:00
{'backupStatus': 1, 'error_message': "None", "status": result, "abort": 1})
return HttpResponse(final_json)
2019-10-06 18:41:09 +05:00
elif result.find("[5009]") > -1:
## removing status file, so that backup can re-run
try:
2019-10-06 18:41:09 +05:00
os.remove(status)
except:
pass
final_json = json.dumps(
2019-10-06 18:41:09 +05:00
{'backupStatus': 1, 'error_message': "None", "status": result,
"abort": 1})
return HttpResponse(final_json)
else:
final_json = json.dumps(
2019-10-06 18:41:09 +05:00
{'backupStatus': 1, 'error_message': "None", "status": result,
"abort": 0})
return HttpResponse(final_json)
else:
final_json = json.dumps({'backupStatus': 1, 'error_message': "None", "status": 1, "abort": 0})
return HttpResponse(final_json)
2019-12-10 15:09:10 +05:00
except BaseException as msg:
final_dic = {'backupStatus': 0, 'error_message': str(msg)}
final_json = json.dumps(final_dic)
logging.writeToFile(str(msg) + " [backupStatus]")
return HttpResponse(final_json)
2021-02-03 00:35:08 -05:00
def delete_backup(request):
try:
2021-12-23 18:32:11 +05:00
2021-02-03 00:35:08 -05:00
user_id, current_acl = _get_user_acl(request)
admin = Administrator.objects.get(pk=user_id)
data = json.loads(request.body)
2021-02-03 00:35:08 -05:00
backup_domain = data['websiteToBeBacked']
2019-10-06 18:41:09 +05:00
2021-02-03 00:35:08 -05:00
if ACLManager.checkOwnership(backup_domain, admin, current_acl) == 1:
2019-10-06 18:41:09 +05:00
pass
else:
return ACLManager.loadErrorJson('fetchStatus', 0)
2021-02-03 00:35:08 -05:00
backup_id = data['backupID']
2021-05-07 22:22:29 +05:00
inc_job = IncJob.objects.get(id=backup_id)
job = IncJobs(None, None)
job.DeleteSnapShot(inc_job)
inc_job.delete()
final_dic = {'status': 1, 'error_message': 'None'}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
2019-12-10 15:09:10 +05:00
except BaseException as msg:
final_dic = {'destStatus': 0, 'error_message': str(msg)}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
2021-02-03 00:35:08 -05:00
def fetch_restore_points(request):
try:
2021-02-03 00:35:08 -05:00
user_id, current_acl = _get_user_acl(request)
admin = Administrator.objects.get(pk=user_id)
2019-10-06 18:41:09 +05:00
data = json.loads(request.body)
2021-02-03 00:35:08 -05:00
backup_domain = data['websiteToBeBacked']
2019-10-06 18:41:09 +05:00
2021-02-03 00:35:08 -05:00
if ACLManager.checkOwnership(backup_domain, admin, current_acl) == 1:
2019-10-06 18:41:09 +05:00
pass
else:
return ACLManager.loadErrorJson('fetchStatus', 0)
data = json.loads(request.body)
2021-02-03 00:35:08 -05:00
job_id = data['id']
2021-02-03 00:35:08 -05:00
inc_job = IncJob.objects.get(id=job_id)
2021-02-03 00:35:08 -05:00
backups = inc_job.jobsnapshots_set.all()
2021-02-03 00:35:08 -05:00
json_data = []
for items in backups:
2021-02-03 00:35:08 -05:00
json_data.append({'id': items.id,
'snapshotid': items.snapshotid,
'type': items.type,
'destination': items.destination,
})
final_json = json.dumps({'status': 1, 'error_message': "None", "data": json_data})
return HttpResponse(final_json)
2019-12-10 15:09:10 +05:00
except BaseException as msg:
final_dic = {'status': 0, 'error_message': str(msg)}
final_json = json.dumps(final_dic)
2019-10-06 18:41:09 +05:00
return HttpResponse(final_json)
2021-02-03 00:35:08 -05:00
def restore_point(request):
2019-10-06 18:41:09 +05:00
try:
2021-02-03 00:35:08 -05:00
user_id, current_acl = _get_user_acl(request)
admin = Administrator.objects.get(pk=user_id)
2019-10-06 18:41:09 +05:00
data = json.loads(request.body)
2021-02-03 00:35:08 -05:00
backup_domain = data['websiteToBeBacked']
job_id = data['jobid']
2019-10-06 18:41:09 +05:00
2021-02-03 00:35:08 -05:00
if ACLManager.checkOwnership(backup_domain, admin, current_acl) == 1:
2019-10-06 18:41:09 +05:00
pass
else:
return ACLManager.loadErrorJson('metaStatus', 0)
2021-02-03 00:35:08 -05:00
temp_path = Path("/home/cyberpanel/") / str(randint(1000, 9999))
2019-10-06 18:41:09 +05:00
2019-11-02 19:29:02 +05:00
if data['reconstruct'] == 'remote':
extraArgs = {}
2021-02-03 00:35:08 -05:00
extraArgs['website'] = backup_domain
extraArgs['jobid'] = job_id
extraArgs['tempPath'] = str(temp_path)
2019-11-02 19:29:02 +05:00
extraArgs['reconstruct'] = data['reconstruct']
extraArgs['backupDestinations'] = data['backupDestinations']
extraArgs['password'] = data['password']
extraArgs['path'] = data['path']
else:
extraArgs = {}
2021-02-03 00:35:08 -05:00
extraArgs['website'] = backup_domain
extraArgs['jobid'] = job_id
extraArgs['tempPath'] = str(temp_path)
2019-11-02 19:29:02 +05:00
extraArgs['reconstruct'] = data['reconstruct']
2019-10-06 18:41:09 +05:00
2021-02-03 00:35:08 -05:00
start_job = IncJobs('restorePoint', extraArgs)
start_job.start()
2019-10-06 18:41:09 +05:00
time.sleep(2)
2021-02-03 00:35:08 -05:00
final_json = json.dumps({'status': 1, 'error_message': "None", 'tempPath': str(temp_path)})
2019-10-06 18:41:09 +05:00
return HttpResponse(final_json)
2019-12-10 15:09:10 +05:00
except BaseException as msg:
2019-10-06 18:41:09 +05:00
logging.writeToFile(str(msg))
final_dic = {'status': 0, 'metaStatus': 0, 'error_message': str(msg)}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
2021-02-03 00:35:08 -05:00
def schedule_backups(request):
try:
user_id, current_acl = _get_user_acl(request)
2021-04-15 19:47:12 -04:00
if ACLManager.currentContextPermission(current_acl, 'scheduleBackups') == 0:
return ACLManager.loadError()
2021-02-03 00:35:08 -05:00
websites = ACLManager.findAllSites(current_acl, user_id)
2021-02-03 00:35:08 -05:00
destinations = _get_destinations(local=True)
2020-01-22 20:36:17 +05:00
2021-02-03 00:35:08 -05:00
return def_renderer(request, 'IncBackups/backupSchedule.html',
2021-04-15 19:47:12 -04:00
{'websiteList': websites, 'destinations': destinations}, 'scheduleBackups')
2019-12-10 15:09:10 +05:00
except BaseException as msg:
2019-10-15 22:59:22 +05:00
logging.writeToFile(str(msg))
return redirect(loadLoginPage)
2021-02-03 00:35:08 -05:00
def submit_backup_schedule(request):
try:
user_id, current_acl = _get_user_acl(request)
2021-04-15 19:47:12 -04:00
if ACLManager.currentContextPermission(current_acl, 'scheduleBackups') == 0:
return ACLManager.loadErrorJson('scheduleStatus', 0)
data = json.loads(request.body)
2021-02-03 00:35:08 -05:00
backup_dest = data['backupDestinations']
backup_freq = data['backupFreq']
backup_retention = data['backupRetention']
2021-02-03 00:35:08 -05:00
backup_sites = data['websitesToBeBacked']
2021-02-03 00:35:08 -05:00
backup_data = 1 if 'websiteData' in data else 0
backup_emails = 1 if 'websiteEmails' in data else 0
backup_databases = 1 if 'websiteDatabases' in data else 0
2021-02-03 00:35:08 -05:00
backup_job = BackupJob(websiteData=backup_data, websiteDataEmails=backup_emails,
websiteDatabases=backup_databases, destination=backup_dest, frequency=backup_freq,
retention=backup_retention)
2021-02-03 00:35:08 -05:00
backup_job.save()
2021-02-03 00:35:08 -05:00
for site in backup_sites:
backup_site_job = JobSites(job=backup_job, website=site)
backup_site_job.save()
final_json = json.dumps({'status': 1, 'error_message': "None"})
return HttpResponse(final_json)
2019-12-10 15:09:10 +05:00
except BaseException as msg:
final_json = json.dumps({'status': 0, 'error_message': str(msg)})
return HttpResponse(final_json)
2021-02-03 00:35:08 -05:00
def get_current_backup_schedules(request):
try:
user_id, current_acl = _get_user_acl(request)
2021-04-15 19:47:12 -04:00
if ACLManager.currentContextPermission(current_acl, 'scheduleBackups') == 0:
return ACLManager.loadErrorJson('fetchStatus', 0)
records = BackupJob.objects.all()
2021-02-03 00:35:08 -05:00
json_data = []
for items in records:
2021-02-03 00:35:08 -05:00
json_data.append({'id': items.id,
'destination': items.destination,
'frequency': items.frequency,
'retention': items.retention,
2021-02-03 00:35:08 -05:00
'numberOfSites': items.jobsites_set.all().count()
})
2020-01-21 22:10:58 +05:00
final_json = json.dumps({'status': 1, 'error_message': "None", "data": json_data})
return HttpResponse(final_json)
except BaseException as msg:
final_dic = {'status': 0, 'error_message': str(msg)}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
2021-02-03 00:35:08 -05:00
def fetch_sites(request):
try:
user_id, current_acl = _get_user_acl(request)
2021-04-15 19:47:12 -04:00
if ACLManager.currentContextPermission(current_acl, 'scheduleBackups') == 0:
2020-01-21 22:10:58 +05:00
return ACLManager.loadErrorJson('fetchStatus', 0)
data = json.loads(request.body)
job = BackupJob.objects.get(pk=data['id'])
2021-02-03 00:35:08 -05:00
json_data = []
for jobsite in job.jobsites_set.all():
json_data.append({'id': jobsite.id,
'website': jobsite.website,
})
2020-01-22 20:36:17 +05:00
final_json = json.dumps({'status': 1, 'error_message': "None", "data": json_data,
'websiteData': job.websiteData, 'websiteDatabases': job.websiteDatabases,
'websiteEmails': job.websiteDataEmails})
return HttpResponse(final_json)
2019-12-10 15:09:10 +05:00
except BaseException as msg:
final_dic = {'status': 0, 'error_message': str(msg)}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
2021-02-03 00:35:08 -05:00
def schedule_delete(request):
try:
user_id, current_acl = _get_user_acl(request)
2021-04-15 19:47:12 -04:00
if ACLManager.currentContextPermission(current_acl, 'scheduleBackups') == 0:
return ACLManager.loadErrorJson('scheduleStatus', 0)
data = json.loads(request.body)
2021-02-03 00:35:08 -05:00
job_id = data['id']
2021-02-03 00:35:08 -05:00
backup_job = BackupJob.objects.get(id=job_id)
backup_job.delete()
final_json = json.dumps({'status': 1, 'error_message': "None"})
return HttpResponse(final_json)
2019-12-10 15:09:10 +05:00
except BaseException as msg:
final_json = json.dumps({'status': 0, 'error_message': str(msg)})
return HttpResponse(final_json)
2021-02-03 00:35:08 -05:00
def restore_remote_backups(request):
try:
user_id, current_acl = _get_user_acl(request)
if ACLManager.currentContextPermission(current_acl, 'createBackup') == 0:
return ACLManager.loadError()
2021-02-03 00:35:08 -05:00
websites = ACLManager.findAllSites(current_acl, user_id)
2021-02-03 00:35:08 -05:00
destinations = _get_destinations()
2021-02-03 00:35:08 -05:00
return def_renderer(request, 'IncBackups/restoreRemoteBackups.html',
2021-03-03 13:42:02 +05:00
{'websiteList': websites, 'destinations': destinations}, 'createBackup')
2019-12-10 15:09:10 +05:00
except BaseException as msg:
logging.writeToFile(str(msg))
2020-01-22 20:36:17 +05:00
return redirect(loadLoginPage)
2021-02-03 00:35:08 -05:00
def save_changes(request):
try:
user_id, current_acl = _get_user_acl(request)
2021-04-15 19:47:12 -04:00
if ACLManager.currentContextPermission(current_acl, 'scheduleBackups') == 0:
2020-01-22 20:36:17 +05:00
return ACLManager.loadErrorJson('scheduleStatus', 0)
data = json.loads(request.body)
2021-02-03 00:35:08 -05:00
job_id = data['id']
backup_data = data['websiteData'] if 'websiteData' in data else 0
backup_emails = data['websiteEmails'] if 'websiteEmails' in data else 0
backup_databases = data['websiteDatabases'] if 'websiteDatabases' in data else 0
2020-01-22 20:36:17 +05:00
2021-02-03 00:35:08 -05:00
job = BackupJob.objects.get(pk=job_id)
2020-01-22 20:36:17 +05:00
2021-02-03 00:35:08 -05:00
job.websiteData = int(backup_data)
job.websiteDatabases = int(backup_databases)
job.websiteDataEmails = int(backup_emails)
2020-01-22 20:36:17 +05:00
job.save()
final_json = json.dumps({'status': 1, 'error_message': "None"})
return HttpResponse(final_json)
except BaseException as msg:
final_json = json.dumps({'status': 0, 'error_message': str(msg)})
return HttpResponse(final_json)
2021-02-03 00:35:08 -05:00
def remove_site(request):
try:
_, current_acl = _get_user_acl(request)
2021-04-15 19:47:12 -04:00
if ACLManager.currentContextPermission(current_acl, 'scheduleBackups') == 0:
2020-01-22 20:36:17 +05:00
return ACLManager.loadErrorJson('scheduleStatus', 0)
data = json.loads(request.body)
2021-02-03 00:35:08 -05:00
job_id = data['id']
2020-01-22 20:36:17 +05:00
website = data['website']
2021-02-03 00:35:08 -05:00
job = BackupJob.objects.get(pk=job_id)
2020-01-22 20:36:17 +05:00
site = JobSites.objects.get(job=job, website=website)
site.delete()
final_json = json.dumps({'status': 1, 'error_message': "None"})
return HttpResponse(final_json)
except BaseException as msg:
final_json = json.dumps({'status': 0, 'error_message': str(msg)})
return HttpResponse(final_json)
2021-02-03 00:35:08 -05:00
def add_website(request):
try:
_, current_acl = _get_user_acl(request)
2021-04-15 19:47:12 -04:00
if ACLManager.currentContextPermission(current_acl, 'scheduleBackups') == 0:
2020-01-22 20:36:17 +05:00
return ACLManager.loadErrorJson('scheduleStatus', 0)
data = json.loads(request.body)
2021-02-03 00:35:08 -05:00
job_id = data['id']
2020-01-22 20:36:17 +05:00
website = data['website']
2021-02-03 00:35:08 -05:00
job = BackupJob.objects.get(pk=job_id)
2020-01-22 20:36:17 +05:00
try:
JobSites.objects.get(job=job, website=website)
2021-02-03 00:35:08 -05:00
except BaseException:
2020-01-22 20:36:17 +05:00
site = JobSites(job=job, website=website)
site.save()
final_json = json.dumps({'status': 1, 'error_message': "None"})
return HttpResponse(final_json)
except BaseException as msg:
final_json = json.dumps({'status': 0, 'error_message': str(msg)})
2021-02-03 00:35:08 -05:00
return HttpResponse(final_json)
2023-03-01 15:53:40 +05:00
2023-04-01 23:37:08 +05:00
#### Backups v2
2023-03-01 15:53:40 +05:00
2023-03-13 16:28:13 +05:00
def ConfigureV2Backup(request):
2023-03-01 15:53:40 +05:00
try:
2023-03-10 14:37:18 +05:00
user_id, current_acl = _get_user_acl(request)
2023-04-01 23:37:08 +05:00
2023-03-10 14:37:18 +05:00
if ACLManager.currentContextPermission(current_acl, 'createBackup') == 0:
return ACLManager.loadError()
2023-03-01 15:53:40 +05:00
2023-04-11 22:47:14 +05:00
if ACLManager.CheckForPremFeature('all'):
2023-04-13 04:34:53 +05:00
BackupStat = 1
2023-04-11 22:47:14 +05:00
else:
2023-04-13 04:34:53 +05:00
BackupStat = 0
2023-04-11 22:47:14 +05:00
websites = ACLManager.findAllSites(current_acl, user_id)
2023-03-01 15:53:40 +05:00
#
# destinations = _get_destinations(local=True)
2023-04-13 04:34:53 +05:00
proc = httpProc(request, 'IncBackups/ConfigureV2Backup.html', {'websiteList': websites, 'BackupStat': BackupStat})
2023-03-01 15:53:40 +05:00
return proc.render()
except BaseException as msg:
2023-03-10 14:37:18 +05:00
logging.writeToFile(str(msg))
return redirect(loadLoginPage)
2023-03-14 11:04:43 +05:00
2023-04-08 00:12:51 +05:00
def ConfigureV2BackupSetup(request):
2023-03-10 14:37:18 +05:00
try:
userID = request.session['userID']
req_data={}
2023-03-13 16:48:46 +05:00
req_data['name'] = 'GDrive'
2023-03-10 14:37:18 +05:00
req_data['token'] = request.GET.get('t')
req_data['refresh_token'] = request.GET.get('r')
req_data['token_uri'] = request.GET.get('to')
req_data['scopes'] = request.GET.get('s')
req_data['accountname'] = request.GET.get('n')
website = request.GET.get('d')
2023-04-01 23:37:08 +05:00
# logging.writeToFile('domainname is ====%s'%(request.GET.get))
2023-03-10 14:37:18 +05:00
2023-04-01 23:37:08 +05:00
currentACL = ACLManager.loadedACL(userID)
admin = Administrator.objects.get(pk=userID)
if ACLManager.checkOwnership(website, admin, currentACL) == 1:
pass
else:
return ACLManager.loadError()
2023-03-10 14:37:18 +05:00
cpbuv2 = CPBackupsV2(
{'domain': website, 'BasePath': '/home/backup', 'BackupDatabase': 1, 'BackupData': 1,
2023-03-10 14:37:18 +05:00
'BackupEmails': 1, 'BackendName': 'testremote'})
cpbuv2.SetupRcloneBackend(CPBackupsV2.GDrive, req_data)
2023-03-14 11:04:43 +05:00
return ConfigureV2Backup(request)
2023-03-10 14:37:18 +05:00
2023-04-08 00:12:51 +05:00
except BaseException as msg:
logging.writeToFile("Error configure"+str(msg))
return redirect(loadLoginPage)
def CreateV2Backup(request):
try:
userID = request.session['userID']
bm = BackupManager()
return bm.CreateV2backupSite(request, userID)
2023-03-10 14:37:18 +05:00
except KeyError:
2023-03-11 20:20:38 +05:00
return redirect(loadLoginPage)
2023-03-13 16:28:13 +05:00
def CreateV2BackupButton(request):
try:
userID = request.session['userID']
data = json.loads(request.body)
Selectedwebsite = data['Selectedwebsite']
Selectedrepo = data['Selectedrepo']
2023-04-02 04:30:17 +05:00
2023-04-01 23:37:08 +05:00
currentACL = ACLManager.loadedACL(userID)
admin = Administrator.objects.get(pk=userID)
2023-04-01 23:37:08 +05:00
if ACLManager.checkOwnership(Selectedwebsite, admin, currentACL) == 1:
pass
else:
return ACLManager.loadError()
2023-03-20 14:07:39 +05:00
2023-04-02 04:30:17 +05:00
extra_args = {}
extra_args['function'] = 'InitiateBackup'
extra_args['website'] = Selectedwebsite
2023-04-02 13:49:39 +05:00
extra_args['domain'] = Selectedwebsite
2023-04-02 04:30:17 +05:00
extra_args['BasePath'] = '/home/backup'
extra_args['BackendName'] = Selectedrepo
extra_args['BackupData'] = data['websiteData'] if 'websiteData' in data else False
extra_args['BackupEmails'] = data['websiteEmails'] if 'websiteEmails' in data else False
extra_args['BackupDatabase'] = data['websiteDatabases'] if 'websiteDatabases' in data else False
background = CPBackupsV2(extra_args)
background.start()
2023-04-22 09:36:51 +05:00
2023-04-02 04:30:17 +05:00
# background = CPBackupsV2({'domain': Selectedwebsite, 'BasePath': '/home/backup', 'BackupDatabase': 1, 'BackupData': 1,
# 'BackupEmails': 1, 'BackendName': Selectedrepo, 'function': 'InitiateBackup', })
# background.start()
time.sleep(2)
2023-03-20 14:07:39 +05:00
data_ret = {'status': 1, 'installStatus': 1, 'error_message': 'None',}
json_data = json.dumps(data_ret)
return HttpResponse(json_data)
2023-03-11 20:20:38 +05:00
2023-03-13 16:28:13 +05:00
except BaseException as msg:
data_ret = {'status': 0, 'installStatus': 0, 'error_message': str(msg)}
json_data = json.dumps(data_ret)
return HttpResponse(json_data)
def CreateV2BackupStatus(request):
try:
userID = request.session['userID']
bm = BackupManager()
return bm.CreateV2BackupStatus(userID, json.loads(request.body))
except KeyError:
return redirect(loadLoginPage)
2023-03-11 20:20:38 +05:00
def RestoreV2backupSite(request):
try:
userID = request.session['userID']
bm = BackupManager()
return bm.RestoreV2backupSite(request, userID)
except KeyError:
return redirect(loadLoginPage)
2023-03-13 16:28:13 +05:00
def RestorePathV2(request):
try:
userID = request.session['userID']
data = json.loads(request.body)
SnapShotId = data['snapshotid']
Path = data['path']
2023-04-03 13:20:57 +05:00
Selectedwebsite = data['selwebsite']
Selectedrepo = data['selectedrepo']
2023-04-03 13:20:57 +05:00
currentACL = ACLManager.loadedACL(userID)
admin = Administrator.objects.get(pk=userID)
if ACLManager.checkOwnership(str(Selectedwebsite), admin, currentACL) == 1:
pass
else:
return ACLManager.loadError()
2023-04-04 17:44:20 +05:00
extra_args = {}
extra_args['function'] = 'InitiateRestore'
extra_args['website'] = Selectedwebsite
extra_args['domain'] = Selectedwebsite
extra_args['BasePath'] = '/home/backup'
extra_args['BackendName'] = Selectedrepo
extra_args['path'] = Path
extra_args['snapshotid'] = SnapShotId
# extra_args['BackupData'] = data['websiteData'] if 'websiteData' in data else False
# extra_args['BackupEmails'] = data['websiteEmails'] if 'websiteEmails' in data else False
# extra_args['BackupDatabase'] = data['websiteDatabases'] if 'websiteDatabases' in data else False
2023-04-03 13:20:57 +05:00
2023-04-04 17:44:20 +05:00
background = CPBackupsV2(extra_args)
background.start()
# vm = CPBackupsV2({'domain': Selectedwebsite, 'BackendName': Selectedrepo, "function": "", 'BasePath': '/home/backup'})
# status = vm.InitiateRestore(SnapShotId, Path)
data_ret = {'status': 1, 'installStatus': 1, 'error_message': 'None',}
json_data = json.dumps(data_ret)
return HttpResponse(json_data)
2023-03-13 16:28:13 +05:00
except BaseException as msg:
2023-04-04 17:44:20 +05:00
data_ret = {'status': 0, 'installStatus': 0, 'error_message': str(msg)}
json_data = json.dumps(data_ret)
return HttpResponse(json_data)
2023-03-11 20:20:38 +05:00
def selectwebsiteRetorev2(request):
import re
try:
userID = request.session['userID']
data = json.loads(request.body)
Selectedwebsite = data['Selectedwebsite']
2023-04-01 23:37:08 +05:00
currentACL = ACLManager.loadedACL(userID)
2023-03-11 20:20:38 +05:00
admin = Administrator.objects.get(pk=userID)
2023-04-01 23:37:08 +05:00
if ACLManager.checkOwnership(str(Selectedwebsite), admin, currentACL) == 1:
pass
else:
return ACLManager.loadError()
2023-03-11 20:20:38 +05:00
obj = Websites.objects.get(domain = str(Selectedwebsite), admin = admin)
#/home/cyberpanel.net/.config/rclone/rclone.conf
path = '/home/%s/.config/rclone/rclone.conf' %(obj.domain)
command = 'cat %s'%(path)
result = pu.outputExecutioner(command)
if result.find('type') > -1:
2023-03-11 20:20:38 +05:00
pattern = r'\[(.*?)\]'
matches = re.findall(pattern, result)
final_json = json.dumps({'status': 1, 'fetchStatus': 1, 'error_message': "None", "data": matches})
return HttpResponse(final_json)
else:
final_json = json.dumps({'status': 0, 'fetchStatus': 0, 'error_message': 'Could not Find repo'})
return HttpResponse(final_json)
# final_json = json.dumps({'status': 1, 'fetchStatus': 1, 'error_message': "None", "data": 1})
# return HttpResponse(final_json)
except BaseException as msg:
final_dic = {'status': 0, 'fetchStatus': 0, 'error_message': str(msg)}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
def ConfigureSftpV2Backup(request):
try:
userID = request.session['userID']
data = json.loads(request.body)
Selectedwebsite = data['Selectedwebsite']
sfptpasswd = data['sfptpasswd']
hostName = data['hostName']
2023-03-20 14:07:39 +05:00
UserName = data['UserName']
2023-04-08 18:07:24 +05:00
Repo_Name = data['Repo_Name']
2023-04-01 23:37:08 +05:00
currentACL = ACLManager.loadedACL(userID)
admin = Administrator.objects.get(pk=userID)
2023-04-01 23:37:08 +05:00
if ACLManager.checkOwnership(str(Selectedwebsite), admin, currentACL) == 1:
pass
else:
return ACLManager.loadError()
req_data = {}
req_data['name'] = 'SFTP'
2023-03-20 14:07:39 +05:00
req_data['host'] = hostName
req_data['user'] = UserName
req_data['password'] = sfptpasswd
2023-04-08 18:07:24 +05:00
req_data['Repo_Name'] = Repo_Name
cpbuv2 = CPBackupsV2(
{'domain': Selectedwebsite, 'BasePath': '/home/backup', 'BackupDatabase': 1, 'BackupData': 1,
'BackupEmails': 1, 'BackendName': 'SFTP', 'function': None})
cpbuv2.SetupRcloneBackend(CPBackupsV2.SFTP, req_data)
2023-04-08 18:07:24 +05:00
# return ConfigureV2Backup(request)
2023-04-08 18:07:24 +05:00
final_json = json.dumps({'status': 1, 'fetchStatus': 1, 'error_message': "None", "data": None})
return HttpResponse(final_json)
except BaseException as msg:
final_dic = {'status': 0, 'fetchStatus': 0, 'error_message': str(msg)}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
def selectwebsiteCreatev2(request):
import re
try:
userID = request.session['userID']
data = json.loads(request.body)
Selectedwebsite = data['Selectedwebsite']
2023-04-01 23:37:08 +05:00
currentACL = ACLManager.loadedACL(userID)
admin = Administrator.objects.get(pk=userID)
2023-04-01 23:37:08 +05:00
if ACLManager.checkOwnership(str(Selectedwebsite), admin, currentACL) == 1:
pass
else:
return ACLManager.loadError()
2023-04-28 16:16:04 +05:00
obj = Websites.objects.get(domain = str(Selectedwebsite))
#/home/cyberpanel.net/.config/rclone/rclone.conf
path = '/home/%s/.config/rclone/rclone.conf' %(obj.domain)
command = 'cat %s'%(path)
CurrentContent = pu.outputExecutioner(command)
2023-04-06 23:26:41 +05:00
status, currentSchedules = CPBackupsV2.FetchCurrentSchedules(str(Selectedwebsite))
if CurrentContent.find('No such file or directory') > -1:
LocalRclonePath = f'/home/{obj.domain}/.config/rclone'
command = f'mkdir -p {LocalRclonePath}'
pu.executioner(command, obj.externalApp)
2023-04-13 04:43:24 +05:00
# content = '''
# [local]
# type = local
# '''
# command = f"echo '{content}' > {path}"
# pu.executioner(command, obj.externalApp, True)
command = 'cat %s' % (path)
result = pu.outputExecutioner(command)
if result.find('type') > -1:
pattern = r'\[(.*?)\]'
matches = re.findall(pattern, result)
2023-04-06 23:26:41 +05:00
final_json = json.dumps({'status': 1, 'fetchStatus': 1, 'error_message': "None", "data": matches, 'currentSchedules': currentSchedules})
return HttpResponse(final_json)
else:
final_json = json.dumps({'status': 0, 'fetchStatus': 0, 'error_message': 'Could not Find repo'})
return HttpResponse(final_json)
else:
command = 'cat %s' % (path)
result = pu.outputExecutioner(command)
if result.find('type') > -1:
pattern = r'\[(.*?)\]'
matches = re.findall(pattern, result)
2023-04-06 23:26:41 +05:00
final_json = json.dumps({'status': 1, 'fetchStatus': 1, 'error_message': "None", "data": matches, 'currentSchedules': currentSchedules})
return HttpResponse(final_json)
else:
2023-04-06 23:26:41 +05:00
final_json = json.dumps({'status': 0, 'fetchStatus': 0, 'error_message': 'Could not Find repo', 'currentSchedules': currentSchedules})
return HttpResponse(final_json)
# logging.writeToFile(str(CurrentContent))
# final_json = json.dumps({'status': 1, 'fetchStatus': 1, 'error_message': "None", "data": None})
# return HttpResponse(final_json)
#
except BaseException as msg:
final_dic = {'status': 0, 'fetchStatus': 0, 'error_message': str(msg)}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
2023-03-11 20:20:38 +05:00
def selectreporestorev2(request):
try:
userID = request.session['userID']
data = json.loads(request.body)
Selectedrepo = data['Selectedrepo']
Selectedwebsite= data['Selectedwebsite']
2023-04-01 23:37:08 +05:00
currentACL = ACLManager.loadedACL(userID)
2023-03-11 20:20:38 +05:00
admin = Administrator.objects.get(pk=userID)
2023-04-01 23:37:08 +05:00
if ACLManager.checkOwnership(str(Selectedwebsite), admin, currentACL) == 1:
pass
else:
return ACLManager.loadError()
2023-03-11 20:20:38 +05:00
# f'rustic -r testremote snapshots --password "" --json 2>/dev/null'
# final_json = json.dumps({'status': 0, 'fetchStatus': 1, 'error_message': Selectedrepo })
# return HttpResponse(final_json)
2023-03-20 23:36:22 +05:00
vm = CPBackupsV2({'domain': Selectedwebsite, 'BackendName': Selectedrepo, "function":""})
2023-03-11 20:20:38 +05:00
status, data = vm.FetchSnapShots()
if status == 1:
final_json = json.dumps({'status': 1, 'fetchStatus': 1, 'error_message': "None", "data": data})
return HttpResponse(final_json)
else:
# final_json = json.dumps({'status': 0, 'fetchStatus': 1, 'error_message': ac,})
final_json = json.dumps({'status': 0, 'fetchStatus': 1, 'error_message': 'Cannot Find!',})
return HttpResponse(final_json)
2023-04-06 23:26:41 +05:00
except BaseException as msg:
final_dic = {'status': 0, 'fetchStatus': 0, 'error_message': str(msg)}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
def schedulev2Backups(request):
try:
userID = request.session['userID']
bm = BackupManager()
return bm.schedulev2Backups(request, userID)
except KeyError:
return redirect(loadLoginPage)
def DeleteScheduleV2(request):
try:
userID = request.session['userID']
data = json.loads(request.body)
Selectedwebsite = data['Selectedwebsite']
repo = data['repo']
frequency = data['frequency']
websiteData = data['websiteData']
websiteDatabases = data['websiteDatabases']
websiteEmails = data['websiteEmails']
currentACL = ACLManager.loadedACL(userID)
admin = Administrator.objects.get(pk=userID)
if ACLManager.checkOwnership(str(Selectedwebsite), admin, currentACL) == 1:
pass
else:
return ACLManager.loadError()
status, message = CPBackupsV2.DeleteSchedule(Selectedwebsite, repo, frequency, websiteData, websiteDatabases, websiteEmails)
final_dic = {'status': 1, 'error_message': message}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
# final_json = json.dumps({'status': 1, 'fetchStatus': 1, 'error_message': "None", "data": None})
# return HttpResponse(final_json)
except BaseException as msg:
final_dic = {'status': 0, 'fetchStatus': 0, 'error_message': str(msg)}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
def CreateScheduleV2(request):
try:
userID = request.session['userID']
data = json.loads(request.body)
Selectedwebsite = data['Selectedwebsite']
repo = data['repo']
frequency = data['frequency']
retention = data['retention']
websiteData = data['websiteData'] if 'websiteData' in data else False
websiteDatabases = data['websiteDatabases'] if 'websiteDatabases' in data else False
websiteEmails = data['websiteEmails'] if 'websiteEmails' in data else False
2023-04-06 23:26:41 +05:00
#
# extra_args['BackupData'] = data['websiteData'] if 'websiteData' in data else False
# extra_args['BackupEmails'] = data['websiteEmails'] if 'websiteEmails' in data else False
# extra_args['BackupDatabase'] = data['websiteDatabases'] if 'websiteDatabases' in data else False
currentACL = ACLManager.loadedACL(userID)
admin = Administrator.objects.get(pk=userID)
if ACLManager.checkOwnership(str(Selectedwebsite), admin, currentACL) == 1:
pass
else:
return ACLManager.loadError()
status, message = CPBackupsV2.CreateScheduleV2(Selectedwebsite, repo, frequency, websiteData, websiteDatabases, websiteEmails, retention)
2023-04-06 23:26:41 +05:00
final_dic = {'status': 1, 'error_message': message}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
# final_json = json.dumps({'status': 1, 'fetchStatus': 1, 'error_message': "None", "data": None})
# return HttpResponse(final_json)
2023-03-11 20:20:38 +05:00
except BaseException as msg:
final_dic = {'status': 0, 'fetchStatus': 0, 'error_message': str(msg)}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)