Files
CyberPanel/plogical/Backupsv2.py

1006 lines
42 KiB
Python
Raw Normal View History

2023-02-12 22:31:41 +05:00
import argparse
2023-02-11 21:27:06 +05:00
import json
2023-02-11 11:27:09 +05:00
import os
import sys
import time
2023-02-20 14:59:34 +05:00
import requests
from django.http import HttpResponse
2023-02-21 14:12:46 +05:00
2023-02-11 11:27:09 +05:00
sys.path.append('/usr/local/CyberCP')
import django
2023-02-20 14:59:34 +05:00
import plogical.CyberCPLogFileWriter as logging
2023-03-14 10:47:25 +05:00
2023-02-24 11:47:36 +05:00
import plogical.mysqlUtilities as mysqlUtilities
2023-02-11 11:27:09 +05:00
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "CyberCP.settings")
try:
django.setup()
except:
pass
2023-02-12 22:31:41 +05:00
from plogical.processUtilities import ProcessUtilities
2023-03-17 13:58:10 +05:00
import threading as multi
2023-02-12 22:31:41 +05:00
2023-04-06 23:26:41 +05:00
2023-03-17 13:58:10 +05:00
class CPBackupsV2(multi.Thread):
PENDING_START = 0
RUNNING = 1
COMPLETED = 2
FAILED = 3
2023-02-27 13:15:01 +05:00
### RCLONE BACKEND TYPES
SFTP = 1
LOCAL = 2
2023-03-10 14:37:18 +05:00
GDrive = 3
2023-02-27 13:15:01 +05:00
2023-02-24 20:37:15 +05:00
RUSTIC_PATH = '/usr/bin/rustic'
2023-02-27 13:15:01 +05:00
RCLONE_CONFIG = '/root/.config/rclone/rclone.conf'
command = 'rclone obscure hosting'
2023-02-24 20:37:15 +05:00
2023-02-11 11:27:09 +05:00
def __init__(self, data):
2023-03-17 13:58:10 +05:00
multi.Thread.__init__(self)
2023-02-11 11:27:09 +05:00
self.data = data
2023-02-28 18:00:47 +05:00
2023-03-17 13:58:10 +05:00
self.function = data['function']
2023-02-28 18:00:47 +05:00
### set self.website as it is needed in many functions
from websiteFunctions.models import Websites
self.website = Websites.objects.get(domain=self.data['domain'])
## Set up the repo name to be used
2023-03-16 17:04:09 +05:00
if self.data['BackendName'] != 'local':
self.repo = f"rclone:{self.data['BackendName']}:{self.data['domain']}"
else:
self.repo = f"rclone:{self.data['BackendName']}:/home/{self.data['domain']}/incrementalbackups"
2023-03-12 11:25:47 +05:00
### This will contain list of all snapshots id generated and it will be merged
2023-03-09 19:03:08 +05:00
self.snapshots = []
2023-02-11 11:27:09 +05:00
2023-03-12 11:25:47 +05:00
##
self.StatusFile = f'/home/cyberpanel/{self.website.domain}_rustic_backup_log'
self.StatusFile_Restore = f'/home/cyberpanel/{self.website.domain}_rustic_backup_log_Restore'
## restore or backup?
self.restore = 0
2023-03-20 13:12:43 +05:00
if os.path.exists(self.StatusFile):
os.remove(self.StatusFile)
2023-03-17 13:58:10 +05:00
def run(self):
try:
if self.function == 'InitiateBackup':
self.InitiateBackup()
2023-04-04 17:44:20 +05:00
elif self.function == 'InitiateRestore':
self.InitiateRestore()
2023-03-17 13:58:10 +05:00
except BaseException as msg:
logging.CyberCPLogFileWriter.writeToFile(str(msg) + ' [CPBackupsV2.run]')
2023-03-08 11:39:36 +05:00
def FetchSnapShots(self):
try:
command = f'rustic -r {self.repo} snapshots --password "" --json 2>/dev/null'
2023-03-11 20:20:38 +05:00
# SLSkjoSCczb6wxTMCBPmBMGq/UDSpp28-u cyber5986 rustic -r rclone:None:cyberpanel.net snapshots --password "" --json 2>/dev/null
2023-03-08 11:39:36 +05:00
result = json.loads(
ProcessUtilities.outputExecutioner(command, self.website.externalApp, True).rstrip('\n'))
return 1, result
except BaseException as msg:
return 0, str(msg)
2023-02-27 13:15:01 +05:00
def SetupRcloneBackend(self, type, config):
2023-03-14 10:47:25 +05:00
2023-02-27 13:15:01 +05:00
self.LocalRclonePath = f'/home/{self.website.domain}/.config/rclone'
self.ConfigFilePath = f'{self.LocalRclonePath}/rclone.conf'
command = f'mkdir -p {self.LocalRclonePath}'
ProcessUtilities.executioner(command, self.website.externalApp)
2023-03-13 11:13:06 +05:00
command = f'cat {self.ConfigFilePath}'
2023-03-13 11:13:50 +05:00
CurrentContent = ProcessUtilities.outputExecutioner(command, self.website.externalApp)
2023-03-13 11:13:06 +05:00
2023-03-14 11:04:43 +05:00
try:
if CurrentContent.find('No such file or directory'):
CurrentContent = ''
except:
2023-03-13 11:13:06 +05:00
CurrentContent = ''
2023-02-27 13:15:01 +05:00
if type == CPBackupsV2.SFTP:
## config = {"name":, "host":, "user":, "port":, "path":, "password":,}
command = f'rclone obscure {config["password"]}'
ObsecurePassword = ProcessUtilities.outputExecutioner(command).rstrip('\n')
2023-03-13 11:13:06 +05:00
content = f'''{CurrentContent}
[{config["name"]}]
2023-02-27 13:15:01 +05:00
type = sftp
host = {config["host"]}
user = {config["user"]}
pass = {ObsecurePassword}
'''
2023-04-01 23:37:08 +05:00
command = f"echo '{content}' >> {self.ConfigFilePath}"
ProcessUtilities.executioner(command, self.website.externalApp, True)
2023-02-27 13:15:01 +05:00
2023-03-10 14:37:18 +05:00
command = f"chmod 600 {self.ConfigFilePath}"
ProcessUtilities.executioner(command, self.website.externalApp)
final_json = json.dumps({'status': 1, 'fetchStatus': 1, 'error_message': "None", "data": None})
return HttpResponse(final_json)
2023-03-10 14:37:18 +05:00
elif type == CPBackupsV2.GDrive:
2023-04-06 23:26:41 +05:00
token = """{"access_token":"%s","token_type":"Bearer","refresh_token":"%s"}""" % (
config["token"], config["refresh_token"])
2023-03-14 10:47:25 +05:00
2023-03-14 11:04:43 +05:00
content = f'''{CurrentContent}
[{config["name"]}]
2023-03-13 16:48:46 +05:00
type = Gdrive
[Gdrive_Mount]
client_id = ""
client_secret = ""
scope = drive
root_folder_id = ""
service_account_file = ""
2023-03-14 10:47:25 +05:00
token = {token}
2023-03-10 14:37:18 +05:00
'''
2023-04-01 23:37:08 +05:00
command = f"echo '{content}' >> {self.ConfigFilePath}"
2023-03-10 14:37:18 +05:00
ProcessUtilities.executioner(command, self.website.externalApp, True)
2023-02-27 13:15:01 +05:00
command = f"chmod 600 {self.ConfigFilePath}"
ProcessUtilities.executioner(command, self.website.externalApp)
@staticmethod
def FetchCurrentTimeStamp():
import time
return str(time.time())
def UpdateStatus(self, message, status):
2023-02-12 22:31:41 +05:00
if status == CPBackupsV2.FAILED:
2023-03-12 11:25:47 +05:00
self.website.BackupLock = 0
self.website.save()
2023-03-01 15:03:40 +05:00
### delete leftover dbs if backup fails
command = f'rm -f {self.FinalPathRuctic}/*.sql'
ProcessUtilities.executioner(command, None, True)
2023-03-12 11:25:47 +05:00
file = open(self.StatusFile, 'a')
file.writelines("[" + time.strftime(
2023-04-06 23:26:41 +05:00
"%m.%d.%Y_%H-%M-%S") + ":FAILED] " + message + "[404]" + "\n")
2023-03-12 11:25:47 +05:00
file.close()
elif status == CPBackupsV2.COMPLETED:
2023-03-12 11:25:47 +05:00
self.website.BackupLock = 0
self.website.save()
file = open(self.StatusFile, 'a')
file.writelines("[" + time.strftime(
2023-04-06 23:26:41 +05:00
"%m.%d.%Y_%H-%M-%S") + ":COMPLETED] " + message + "[200]" + "\n")
2023-03-12 11:25:47 +05:00
file.close()
else:
file = open(self.StatusFile, 'a')
file.writelines("[" + time.strftime(
"%m.%d.%Y_%H-%M-%S") + ":INFO] " + message + "\n")
file.close()
2023-02-28 18:18:42 +05:00
## parent is used to link this snapshot with master snapshot
2023-03-09 19:03:08 +05:00
def BackupConfig(self):
2023-02-28 18:00:47 +05:00
### Backup config file to rustic
2023-02-11 21:27:06 +05:00
2023-02-28 18:00:47 +05:00
command = f'chown {self.website.externalApp}:{self.website.externalApp} {self.FinalPathRuctic}'
ProcessUtilities.executioner(command)
command = f'rustic init -r {self.repo} --password ""'
ProcessUtilities.executioner(command, self.website.externalApp)
2023-04-06 23:26:41 +05:00
# command = f'chown cyberpanel:cyberpanel {self.FinalPathRuctic}'
# ProcessUtilities.executioner(command)
2023-02-28 18:00:47 +05:00
2023-03-04 12:47:34 +05:00
command = f'chown {self.website.externalApp}:{self.website.externalApp} {self.FinalPathRuctic}/config.json'
ProcessUtilities.executioner(command)
command = f'rustic -r {self.repo} backup {self.FinalPathRuctic}/config.json --json --password "" 2>/dev/null'
2023-03-09 19:03:08 +05:00
result = json.loads(ProcessUtilities.outputExecutioner(command, self.website.externalApp, True).rstrip('\n'))
try:
SnapShotID = result['id'] ## snapshot id that we need to store in db
files_new = result['summary']['files_new'] ## basically new files in backup
total_duration = result['summary']['total_duration'] ## time taken
self.snapshots.append(SnapShotID)
except BaseException as msg:
self.UpdateStatus(f'Backup failed as no snapshot id found, error: {str(msg)}', CPBackupsV2.FAILED)
return 0
2023-03-04 12:47:34 +05:00
command = f'chown cyberpanel:cyberpanel {self.FinalPathRuctic}/config.json'
2023-02-28 18:00:47 +05:00
ProcessUtilities.executioner(command)
2023-03-12 11:25:47 +05:00
return 1
2023-03-09 19:03:08 +05:00
def MergeSnapshots(self):
snapshots = ''
for snapshot in self.snapshots:
2023-04-06 23:26:41 +05:00
snapshots = f'{snapshots} {snapshot}'
2023-03-09 19:03:08 +05:00
2023-04-02 13:49:39 +05:00
command = f'rustic -r {self.repo} merge {snapshots} --password "" --json'
result = ProcessUtilities.outputExecutioner(command, self.website.externalApp, True)
if os.path.exists(ProcessUtilities.debugPath):
logging.CyberCPLogFileWriter.writeToFile(result)
2023-03-09 19:03:08 +05:00
command = f'rustic -r {self.repo} forget {snapshots} --password ""'
result = ProcessUtilities.outputExecutioner(command, self.website.externalApp, True)
2023-03-09 19:03:08 +05:00
2023-04-02 13:49:39 +05:00
if os.path.exists(ProcessUtilities.debugPath):
logging.CyberCPLogFileWriter.writeToFile(result)
2023-02-28 18:00:47 +05:00
def InitiateBackup(self):
2023-02-20 15:28:16 +05:00
from websiteFunctions.models import Websites, Backupsv2
2023-02-11 11:27:09 +05:00
from django.forms.models import model_to_dict
from plogical.mysqlUtilities import mysqlUtilities
self.website = Websites.objects.get(domain=self.data['domain'])
2023-02-11 11:27:09 +05:00
## Base path is basically the path set by user where all the backups will be housed
2023-02-12 22:31:41 +05:00
if not os.path.exists(self.data['BasePath']):
command = f"mkdir -p {self.data['BasePath']}"
ProcessUtilities.executioner(command)
command = f"chmod 711 {self.data['BasePath']}"
ProcessUtilities.executioner(command)
2023-02-14 10:06:09 +05:00
self.StartingTimeStamp = CPBackupsV2.FetchCurrentTimeStamp()
2023-02-12 22:31:41 +05:00
2023-02-23 12:12:51 +05:00
### Init rustic repo in main func so dont need to do again and again
2023-04-06 23:26:41 +05:00
while (1):
2023-02-11 11:27:09 +05:00
self.website = Websites.objects.get(domain=self.data['domain'])
if self.website.BackupLock == 0:
2023-02-11 11:27:09 +05:00
2023-02-14 10:35:41 +05:00
Disk1 = f"du -sm /home/{self.website.domain}/"
Disk2 = "2>/dev/null | awk '{print $1}'"
2023-04-06 23:26:41 +05:00
self.WebsiteDiskUsage = int(
ProcessUtilities.outputExecutioner(f"{Disk1} {Disk2}", 'root', True).rstrip('\n'))
2023-02-14 10:27:19 +05:00
2023-04-06 23:26:41 +05:00
self.CurrentFreeSpaceOnDisk = int(
ProcessUtilities.outputExecutioner("df -m / | awk 'NR==2 {print $4}'", 'root', True).rstrip('\n'))
2023-02-14 10:27:19 +05:00
if self.WebsiteDiskUsage > self.CurrentFreeSpaceOnDisk:
2023-04-06 23:26:41 +05:00
self.UpdateStatus(f'Not enough disk space on the server to backup this website.',
CPBackupsV2.FAILED)
2023-02-14 10:27:19 +05:00
return 0
2023-02-20 15:32:25 +05:00
### Before doing anything install rustic
statusRes, message = self.InstallRustic()
if statusRes == 0:
2023-03-12 11:25:47 +05:00
self.UpdateStatus(f'Failed to install Rustic, error: {message}', CPBackupsV2.FAILED)
2023-02-20 15:32:25 +05:00
return 0
2023-03-12 11:25:47 +05:00
# = Backupsv2(website=self.website, fileName='backup-' + self.data['domain'] + "-" + time.strftime("%m.%d.%Y_%H-%M-%S"), status=CPBackupsV2.RUNNING, BasePath=self.data['BasePath'])
2023-04-06 23:26:41 +05:00
# self.buv2.save()
2023-02-11 11:27:09 +05:00
2023-04-06 23:26:41 +05:00
# self.FinalPath = f"{self.data['BasePath']}/{self.buv2.fileName}"
2023-02-12 22:31:41 +05:00
### Rustic backup final path
self.FinalPathRuctic = f"{self.data['BasePath']}/{self.website.domain}"
2023-04-06 23:26:41 +05:00
# command = f"mkdir -p {self.FinalPath}"
# ProcessUtilities.executioner(command)
2023-04-06 23:26:41 +05:00
# command = f"chown {website.externalApp}:{website.externalApp} {self.FinalPath}"
# ProcessUtilities.executioner(command)
2023-04-06 23:26:41 +05:00
# command = f'chown cyberpanel:cyberpanel {self.FinalPath}'
# ProcessUtilities.executioner(command)
2023-02-12 22:31:41 +05:00
2023-04-06 23:26:41 +05:00
# command = f"chmod 711 {self.FinalPath}"
# ProcessUtilities.executioner(command)
2023-03-12 11:25:47 +05:00
command = f"mkdir -p {self.FinalPathRuctic}"
ProcessUtilities.executioner(command)
2023-03-12 11:25:47 +05:00
command = f'chown cyberpanel:cyberpanel {self.FinalPathRuctic}'
ProcessUtilities.executioner(command)
command = f"chmod 711 {self.FinalPathRuctic}"
ProcessUtilities.executioner(command)
2023-02-12 22:31:41 +05:00
try:
2023-02-11 11:27:09 +05:00
2023-02-12 22:31:41 +05:00
self.UpdateStatus('Creating backup config,0', CPBackupsV2.RUNNING)
2023-04-06 23:26:41 +05:00
Config = {'MainWebsite': model_to_dict(self.website,
fields=['domain', 'adminEmail', 'phpSelection', 'state',
'config'])}
Config['admin'] = model_to_dict(self.website.admin,
fields=['userName', 'password', 'firstName', 'lastName',
'email', 'type', 'owner', 'token', 'api', 'securityLevel',
'state', 'initself.websitesLimit', 'twoFA', 'secretKey',
'config'])
Config['acl'] = model_to_dict(self.website.admin.acl)
2023-02-11 11:27:09 +05:00
### Child domains to config
2023-02-11 11:27:09 +05:00
ChildsList = []
2023-02-11 11:27:09 +05:00
for childDomains in self.website.childdomains_set.all():
print(childDomains.domain)
ChildsList.append(model_to_dict(childDomains))
2023-02-11 11:27:09 +05:00
Config['ChildDomains'] = ChildsList
2023-02-11 11:27:09 +05:00
2023-04-06 23:26:41 +05:00
# print(str(Config))
2023-02-11 21:27:06 +05:00
### Databases
2023-02-11 21:27:06 +05:00
connection, cursor = mysqlUtilities.setupConnection()
2023-02-11 21:27:06 +05:00
if connection == 0:
return 0
2023-02-11 21:27:06 +05:00
dataBases = self.website.databases_set.all()
DBSList = []
2023-02-11 21:27:06 +05:00
for db in dataBases:
2023-02-11 11:27:09 +05:00
query = f"SELECT host,user FROM mysql.db WHERE db='{db.dbName}';"
cursor.execute(query)
DBUsers = cursor.fetchall()
2023-02-11 21:27:06 +05:00
UserList = []
2023-02-11 21:27:06 +05:00
for databaseUser in DBUsers:
query = f"SELECT password FROM `mysql`.`user` WHERE `Host`='{databaseUser[0]}' AND `User`='{databaseUser[1]}';"
cursor.execute(query)
resp = cursor.fetchall()
print(resp)
UserList.append({'user': databaseUser[1], 'host': databaseUser[0], 'password': resp[0][0]})
2023-02-11 21:27:06 +05:00
DBSList.append({db.dbName: UserList})
2023-02-11 21:27:06 +05:00
Config['databases'] = DBSList
2023-02-11 21:27:06 +05:00
WPSitesList = []
for wpsite in self.website.wpsites_set.all():
2023-04-06 23:26:41 +05:00
WPSitesList.append(model_to_dict(wpsite, fields=['title', 'path', 'FinalURL', 'AutoUpdates',
'PluginUpdates', 'ThemeUpdates',
'WPLockState']))
Config['WPSites'] = WPSitesList
self.config = Config
2023-02-16 10:32:44 +05:00
### DNS Records
from dns.models import Domains
self.dnsDomain = Domains.objects.get(name=self.website.domain)
DNSRecords = []
for record in self.dnsDomain.records_set.all():
DNSRecords.append(model_to_dict(record))
Config['MainDNSDomain'] = model_to_dict(self.dnsDomain)
Config['DNSRecords'] = DNSRecords
2023-02-16 10:37:19 +05:00
### Email accounts
2023-02-16 10:32:44 +05:00
2023-02-16 10:37:19 +05:00
try:
from mailServer.models import Domains
self.emailDomain = Domains.objects.get(domain=self.website.domain)
EmailAddrList = []
for record in self.emailDomain.eusers_set.all():
EmailAddrList.append(model_to_dict(record))
Config['MainEmailDomain'] = model_to_dict(self.emailDomain)
Config['EmailAddresses'] = EmailAddrList
except:
pass
2023-02-16 10:32:44 +05:00
2023-04-06 23:26:41 +05:00
# command = f"echo '{json.dumps(Config)}' > {self.FinalPath}/config.json"
# ProcessUtilities.executioner(command, self.website.externalApp, True)
2023-02-12 22:31:41 +05:00
2023-03-08 12:16:27 +05:00
command = f'chown cyberpanel:cyberpanel {self.FinalPathRuctic}/config.json'
ProcessUtilities.executioner(command)
WriteToFile = open(f'{self.FinalPathRuctic}/config.json', 'w')
WriteToFile.write(json.dumps(Config))
WriteToFile.close()
command = f"chmod 600 {self.FinalPathRuctic}/config.json"
ProcessUtilities.executioner(command)
2023-02-12 22:31:41 +05:00
2023-03-12 11:25:47 +05:00
if self.BackupConfig() == 0:
return 0
2023-02-24 11:53:22 +05:00
2023-02-12 22:31:41 +05:00
self.UpdateStatus('Backup config created,5', CPBackupsV2.RUNNING)
2023-04-05 23:03:49 +05:00
except BaseException as msg:
2023-02-14 10:06:09 +05:00
self.UpdateStatus(f'Failed during config generation, Error: {str(msg)}', CPBackupsV2.FAILED)
return 0
2023-02-14 10:06:09 +05:00
try:
if self.data['BackupDatabase']:
self.UpdateStatus('Backing up databases..,10', CPBackupsV2.RUNNING)
2023-02-24 11:47:36 +05:00
if self.BackupDataBasesRustic() == 0:
2023-02-14 10:06:09 +05:00
self.UpdateStatus(f'Failed to create backup for databases.', CPBackupsV2.FAILED)
return 0
self.UpdateStatus('Database backups completed successfully..,25', CPBackupsV2.RUNNING)
2023-02-14 10:06:09 +05:00
if self.data['BackupData']:
self.UpdateStatus('Backing up website data..,30', CPBackupsV2.RUNNING)
2023-02-23 11:48:19 +05:00
if self.BackupRustic() == 0:
2023-02-14 10:06:09 +05:00
return 0
self.UpdateStatus('Website data backup completed successfully..,70', CPBackupsV2.RUNNING)
2023-02-12 22:31:41 +05:00
2023-04-01 23:37:08 +05:00
if self.data['BackupEmails']:
self.UpdateStatus('Backing up emails..,75', CPBackupsV2.RUNNING)
if self.BackupEmailsRustic() == 0:
return 0
self.UpdateStatus('Emails backup completed successfully..,85', CPBackupsV2.RUNNING)
2023-02-12 22:31:41 +05:00
2023-02-24 11:53:22 +05:00
### Finally change the backup rustic folder to the website user owner
command = f'chown {self.website.externalApp}:{self.website.externalApp} {self.FinalPathRuctic}'
ProcessUtilities.executioner(command)
2023-02-12 22:31:41 +05:00
2023-03-09 19:03:08 +05:00
self.MergeSnapshots()
2023-02-14 10:06:09 +05:00
self.UpdateStatus('Completed', CPBackupsV2.COMPLETED)
2023-04-05 23:03:49 +05:00
return 1
2023-02-14 10:06:09 +05:00
break
except BaseException as msg:
self.UpdateStatus(f'Failed, Error: {str(msg)}', CPBackupsV2.FAILED)
2023-02-14 10:06:09 +05:00
return 0
else:
time.sleep(5)
2023-02-16 10:32:44 +05:00
### If website lock is there for more then 20 minutes it means old backup is stucked or backup job failed, thus continue backup
2023-02-16 10:32:44 +05:00
if float(CPBackupsV2.FetchCurrentTimeStamp()) > (float(self.StartingTimeStamp) + 1200):
self.website = Websites.objects.get(domain=self.data['domain'])
self.website.BackupLock = 0
self.website.save()
2023-02-23 12:12:51 +05:00
def BackupDataBasesRustic(self):
### This function will backup databases of the website, also need to take care of database that we need to exclude
### excluded databases are in a list self.data['ExcludedDatabases'] only backup databases if backupdatabase check is on
## For example if self.data['BackupDatabase'] is one then only run this function otherwise not
2023-04-06 23:26:41 +05:00
# command = f'chown {self.website.externalApp}:{self.website.externalApp} {self.FinalPathRuctic}'
# ProcessUtilities.executioner(command)
2023-02-24 11:47:36 +05:00
2023-02-28 18:00:47 +05:00
command = f'rustic init -r {self.repo} --password ""'
2023-02-24 11:47:36 +05:00
ProcessUtilities.executioner(command, self.website.externalApp)
command = f'chown cyberpanel:cyberpanel {self.FinalPathRuctic}'
ProcessUtilities.executioner(command)
2023-02-23 12:12:51 +05:00
from plogical.mysqlUtilities import mysqlUtilities
for dbs in self.config['databases']:
### Pending: Need to only backup database present in the list of databases that need backing up
for key, value in dbs.items():
print(f'DB {key}')
2023-02-24 11:47:36 +05:00
CurrentDBPath = f"{self.FinalPathRuctic}/{key}.sql"
2023-02-23 12:12:51 +05:00
2023-04-06 23:26:41 +05:00
DBResult, SnapID = mysqlUtilities.createDatabaseBackup(key, self.FinalPathRuctic, 1, self.repo,
self.website.externalApp)
2023-02-23 12:12:51 +05:00
2023-02-24 11:47:36 +05:00
if DBResult == 1:
self.snapshots.append(SnapID)
2023-02-23 12:12:51 +05:00
2023-04-06 23:26:41 +05:00
# command = f'chown {self.website.externalApp}:{self.website.externalApp} {CurrentDBPath}'
# ProcessUtilities.executioner(command)
## Now pack config into same thing
2023-04-06 23:26:41 +05:00
# command = f'chown {self.website.externalApp}:{self.website.externalApp} {self.FinalPathRuctic}/config.json'
# ProcessUtilities.executioner(command)
# command = f'rustic -r {self.repo} backup {CurrentDBPath} --password "" --json 2>/dev/null'
# print(f'db command rustic: {command}')
# result = json.loads(
# ProcessUtilities.outputExecutioner(command, self.website.externalApp, True).rstrip('\n'))
#
# try:
# SnapShotID = result['id'] ## snapshot id that we need to store in db
# files_new = result['summary']['files_new'] ## basically new files in backup
# total_duration = result['summary']['total_duration'] ## time taken
#
# self.snapshots.append(SnapShotID)
#
# ### Config is saved with each database, snapshot of config is attached to db snapshot with parent
#
# #self.BackupConfig(SnapShotID)
#
# command = f'chown cyberpanel:cyberpanel {self.FinalPathRuctic}'
# ProcessUtilities.executioner(command)
#
# except BaseException as msg:
# self.UpdateStatus(f'Backup failed as no snapshot id found, error: {str(msg)}',
# CPBackupsV2.FAILED)
# return 0
#
#
# for dbUsers in value:
# print(f'User: {dbUsers["user"]}, Host: {dbUsers["host"]}, Pass: {dbUsers["password"]}')
#
# command = f'rm -f {CurrentDBPath}'
# ProcessUtilities.executioner(command)
2023-02-24 20:55:17 +05:00
2023-02-24 11:47:36 +05:00
else:
2023-02-24 20:55:17 +05:00
command = f'rm -f {CurrentDBPath}'
ProcessUtilities.executioner(command)
self.UpdateStatus(f'Failed to create backup for database {key}.', CPBackupsV2.FAILED)
2023-02-24 11:47:36 +05:00
return 0
return 1
2023-02-11 11:27:09 +05:00
2023-02-21 13:33:22 +05:00
def BackupRustic(self):
### This function will backup data of the website, also need to take care of directories that we need to exclude
### excluded directories are in a list self.data['ExcludedDirectories'] only backup data if backupdata check is on
## For example if self.data['BackupData'] is one then only run this function otherwise not
2023-04-06 23:26:41 +05:00
# command = f'chown {self.website.externalApp}:{self.website.externalApp} {self.FinalPathRuctic}'
# ProcessUtilities.executioner(command)
2023-02-28 18:00:47 +05:00
command = f'rustic init -r {self.repo} --password ""'
ProcessUtilities.executioner(command, self.website.externalApp)
2023-02-21 13:33:22 +05:00
source = f'/home/{self.website.domain}'
2023-04-06 23:26:41 +05:00
# command = f'chown {self.website.externalApp}:{self.website.externalApp} {self.FinalPathRuctic}/config.json'
# ProcessUtilities.executioner(command)
2023-02-23 11:48:19 +05:00
## Pending add user provided folders in the exclude list
2023-02-21 13:33:22 +05:00
2023-02-23 11:48:19 +05:00
exclude = f' --exclude-if-present rusticbackup --exclude-if-present logs '
2023-02-21 13:33:22 +05:00
2023-03-09 19:03:08 +05:00
command = f'rustic -r {self.repo} backup {source} --password "" {exclude} --json 2>/dev/null'
2023-02-24 20:55:17 +05:00
result = json.loads(ProcessUtilities.outputExecutioner(command, self.website.externalApp, True).rstrip('\n'))
try:
2023-04-06 23:26:41 +05:00
SnapShotID = result['id'] ## snapshot id that we need to store in db
files_new = result['summary']['files_new'] ## basically new files in backup
total_duration = result['summary']['total_duration'] ## time taken
2023-02-24 20:55:17 +05:00
2023-03-09 19:03:08 +05:00
self.snapshots.append(SnapShotID)
2023-02-28 18:18:42 +05:00
### Config is saved with each backup, snapshot of config is attached to data snapshot with parent
2023-04-06 23:26:41 +05:00
# self.BackupConfig(SnapShotID)
2023-02-28 18:18:42 +05:00
2023-02-24 20:55:17 +05:00
except BaseException as msg:
self.UpdateStatus(f'Backup failed as no snapshot id found, error: {str(msg)}', CPBackupsV2.FAILED)
return 0
2023-04-06 23:26:41 +05:00
# self.UpdateStatus(f'Rustic command result id: {SnapShotID}, files new {files_new}, total_duration {total_duration}', CPBackupsV2.RUNNING)
2023-02-21 13:33:22 +05:00
return 1
2023-02-24 11:47:36 +05:00
def BackupEmailsRustic(self):
2023-02-17 08:28:41 +05:00
### This function will backup emails of the website, also need to take care of emails that we need to exclude
### excluded emails are in a list self.data['ExcludedEmails'] only backup data if backupemail check is on
## For example if self.data['BackupEmails'] is one then only run this function otherwise not
2023-04-06 23:26:41 +05:00
# command = f'chown {self.website.externalApp}:{self.website.externalApp} {self.FinalPathRuctic}'
# ProcessUtilities.executioner(command)
2023-02-24 11:53:22 +05:00
2023-02-28 18:00:47 +05:00
command = f'rustic init -r {self.repo} --password ""'
2023-02-24 11:53:22 +05:00
ProcessUtilities.executioner(command, self.website.externalApp)
2023-04-06 23:26:41 +05:00
# command = f'chown {self.website.externalApp}:{self.website.externalApp} {self.FinalPathRuctic}/config.json'
# ProcessUtilities.executioner(command)
2023-02-24 11:47:36 +05:00
2023-02-17 08:28:41 +05:00
source = f'/home/vmail/{self.website.domain}'
## Pending add user provided folders in the exclude list
2023-02-24 11:47:36 +05:00
exclude = f' --exclude-if-present rusticbackup --exclude-if-present logs '
2023-02-17 08:28:41 +05:00
2023-04-01 23:37:08 +05:00
command = f'export RCLONE_CONFIG=/home/{self.website.domain}/.config/rclone/rclone.conf && rustic -r {self.repo} backup {source} --password "" {exclude} --json 2>/dev/null'
2023-02-24 20:55:17 +05:00
2023-03-10 14:37:03 +05:00
result = json.loads(ProcessUtilities.outputExecutioner(command, None, True).rstrip('\n'))
2023-02-24 20:55:17 +05:00
try:
SnapShotID = result['id'] ## snapshot id that we need to store in db
files_new = result['summary']['files_new'] ## basically new files in backup
total_duration = result['summary']['total_duration'] ## time taken
2023-03-09 19:03:08 +05:00
self.snapshots.append(SnapShotID)
2023-02-28 18:18:42 +05:00
### Config is saved with each email backup, snapshot of config is attached to email snapshot with parent
2023-04-06 23:26:41 +05:00
# self.BackupConfig(SnapShotID)
2023-02-28 18:18:42 +05:00
2023-02-24 20:55:17 +05:00
except BaseException as msg:
self.UpdateStatus(f'Backup failed as no snapshot id found, error: {str(msg)}', CPBackupsV2.FAILED)
return 0
2023-02-17 08:28:41 +05:00
return 1
2023-04-01 23:49:04 +05:00
#### Resote Functions
2023-04-04 17:44:20 +05:00
def InitiateRestore(self):
### if restore then status file should be restore status file
self.restore = 1
2023-04-06 23:26:41 +05:00
# self.StatusFile = self.StatusFile_Restore
2023-04-04 17:36:54 +05:00
from websiteFunctions.models import Websites
2023-04-01 23:49:04 +05:00
from plogical.mysqlUtilities import mysqlUtilities
self.website = Websites.objects.get(domain=self.data['domain'])
2023-04-04 18:00:38 +05:00
self.UpdateStatus('Started restoring,20', CPBackupsV2.RUNNING)
2023-04-01 23:49:04 +05:00
## Base path is basically the path set by user where all the backups will be housed
if not os.path.exists(self.data['BasePath']):
command = f"mkdir -p {self.data['BasePath']}"
ProcessUtilities.executioner(command)
command = f"chmod 711 {self.data['BasePath']}"
ProcessUtilities.executioner(command)
self.StartingTimeStamp = CPBackupsV2.FetchCurrentTimeStamp()
### Init rustic repo in main func so dont need to do again and again
2023-04-06 23:26:41 +05:00
while (1):
2023-04-01 23:49:04 +05:00
self.website = Websites.objects.get(domain=self.data['domain'])
if self.website.BackupLock == 0:
Disk1 = f"du -sm /home/{self.website.domain}/"
Disk2 = "2>/dev/null | awk '{print $1}'"
2023-04-06 23:26:41 +05:00
self.WebsiteDiskUsage = int(
ProcessUtilities.outputExecutioner(f"{Disk1} {Disk2}", 'root', True).rstrip('\n'))
2023-04-01 23:49:04 +05:00
2023-04-06 23:26:41 +05:00
self.CurrentFreeSpaceOnDisk = int(
ProcessUtilities.outputExecutioner("df -m / | awk 'NR==2 {print $4}'", 'root', True).rstrip('\n'))
2023-04-01 23:49:04 +05:00
if self.WebsiteDiskUsage > self.CurrentFreeSpaceOnDisk:
2023-04-06 23:26:41 +05:00
self.UpdateStatus(f'Not enough disk space on the server to restore this website.',
CPBackupsV2.FAILED)
2023-04-01 23:49:04 +05:00
return 0
### Rustic backup final path
self.FinalPathRuctic = f"{self.data['BasePath']}/{self.website.domain}"
command = f"mkdir -p {self.FinalPathRuctic}"
ProcessUtilities.executioner(command)
command = f'chown cyberpanel:cyberpanel {self.FinalPathRuctic}'
ProcessUtilities.executioner(command)
command = f"chmod 711 {self.FinalPathRuctic}"
ProcessUtilities.executioner(command)
### Find Restore path first, if path is db, only then restore it to cp
2023-04-01 23:49:04 +05:00
2023-04-04 18:00:38 +05:00
if self.data["path"].find('.sql') > -1:
2023-04-06 23:26:41 +05:00
mysqlUtilities.restoreDatabaseBackup(self.data["path"].rstrip('.sql'), None, None, None, None, 1,
self.repo, self.website.externalApp, self.data["snapshotid"])
else:
2023-04-01 23:49:04 +05:00
2023-04-04 18:00:38 +05:00
if self.data["path"].find('/home/vmail') > -1:
externalApp = None
else:
externalApp = self.website.externalApp
2023-04-01 23:49:04 +05:00
2023-04-04 18:00:38 +05:00
command = f'rustic -r {self.repo} restore {self.data["snapshotid"]}:{self.data["path"]} {self.data["path"]} --password "" 2>/dev/null'
result = ProcessUtilities.outputExecutioner(command, externalApp, True)
2023-04-01 23:49:04 +05:00
2023-04-04 18:00:38 +05:00
if os.path.exists(ProcessUtilities.debugPath):
logging.CyberCPLogFileWriter.writeToFile(result)
2023-04-01 23:49:04 +05:00
self.UpdateStatus('Completed', CPBackupsV2.COMPLETED)
2023-04-01 23:49:04 +05:00
return 1
2023-04-01 23:49:04 +05:00
else:
time.sleep(5)
### If website lock is there for more then 20 minutes it means old backup is stucked or backup job failed, thus continue backup
if float(CPBackupsV2.FetchCurrentTimeStamp()) > (float(self.StartingTimeStamp) + 1200):
self.website = Websites.objects.get(domain=self.data['domain'])
self.website.BackupLock = 0
self.website.save()
2023-04-05 23:03:49 +05:00
### Delete Snapshots
def DeleteSnapshots(self, deleteString):
### if restore then status file should be restore status file
from websiteFunctions.models import Websites
self.website = Websites.objects.get(domain=self.data['domain'])
command = f'rustic -r {self.repo} forget {deleteString} --prune --password "" 2>/dev/null'
result = ProcessUtilities.outputExecutioner(command, self.website.externalApp, True)
if os.path.exists(ProcessUtilities.debugPath):
logging.CyberCPLogFileWriter.writeToFile(result)
2023-04-05 23:03:49 +05:00
2023-04-06 23:26:41 +05:00
@staticmethod
def FetchCurrentSchedules(website):
try:
finalConfigPath = f'/home/cyberpanel/v2backups/{website}'
if os.path.exists(finalConfigPath):
command = f'cat {finalConfigPath}'
RetResult = ProcessUtilities.outputExecutioner(command)
print(repr(RetResult))
BackupConfig = json.loads(ProcessUtilities.outputExecutioner(command).rstrip('\n'))
schedules = []
for value in BackupConfig['schedules']:
schedules.append({
'repo': value['repo'],
'frequency': value['frequency'],
'websiteData': value['websiteData'],
2023-04-06 23:26:41 +05:00
'websiteEmails': value['websiteEmails'],
'websiteDatabases': value['websiteDatabases'],
'lastRun': value['lastRun'],
'retention': value['retention'],
'domain': website
})
2023-04-06 23:26:41 +05:00
return 1, schedules
else:
return 1, []
except BaseException as msg:
return 0, str(msg)
@staticmethod
def DeleteSchedule(website, repo, frequency, websiteData, websiteDatabases, websiteEmails):
try:
finalConfigPath = f'/home/cyberpanel/v2backups/{website}'
if os.path.exists(finalConfigPath):
command = f'cat {finalConfigPath}'
RetResult = ProcessUtilities.outputExecutioner(command)
print(repr(RetResult))
BackupConfig = json.loads(ProcessUtilities.outputExecutioner(command).rstrip('\n'))
counter = 0
for value in BackupConfig['schedules']:
if value['repo'] == repo and value['frequency'] == frequency and value['websiteData'] == websiteData and \
value['websiteEmails'] == websiteEmails and value['websiteDatabases'] == websiteDatabases:
del BackupConfig['schedules'][counter]
break
else:
counter = counter + 1
FinalContent = json.dumps(BackupConfig)
WriteToFile = open(finalConfigPath, 'w')
WriteToFile.write(FinalContent)
WriteToFile.close()
return 1, BackupConfig
else:
return 1, []
except BaseException as msg:
return 0, str(msg)
@staticmethod
def CreateScheduleV2(website, repo, frequency, websiteData, websiteDatabases, websiteEmails, retention):
2023-04-06 23:26:41 +05:00
try:
finalConfigPath = f'/home/cyberpanel/v2backups/{website}'
if os.path.exists(finalConfigPath):
logging.CyberCPLogFileWriter.writeToFile('22222')
command = f'cat {finalConfigPath}'
RetResult = ProcessUtilities.outputExecutioner(command)
print(repr(RetResult))
BackupConfig = json.loads(ProcessUtilities.outputExecutioner(command).rstrip('\n'))
try:
BackupConfig['schedules'].append({"repo": repo, "retention": retention, "frequency": frequency, "websiteData": websiteData,
2023-04-06 23:26:41 +05:00
"websiteEmails": websiteEmails, "websiteDatabases": websiteDatabases,
"lastRun": ""})
except:
BackupConfig['schedules'] = [{"repo": repo, "retention": retention, "frequency": frequency, "websiteData": websiteData,
2023-04-06 23:26:41 +05:00
"websiteEmails": websiteEmails, "websiteDatabases": websiteDatabases,
"lastRun": ""}]
# BackupConfig['schedules'] = {"retention": "7", "frequency": frequency, "websiteData": websiteData,
# "websiteEmails": websiteEmails, "websiteDatabases": websiteDatabases,
# "lastRun": ""}
FinalContent = json.dumps(BackupConfig)
WriteToFile = open(finalConfigPath, 'w')
WriteToFile.write(FinalContent)
WriteToFile.close()
return 1, BackupConfig
else:
BackupConfig = {'site': website,
'schedules':
[{"repo": repo, "retention": retention, "frequency": frequency,
2023-04-06 23:26:41 +05:00
"websiteData": websiteData,
"websiteEmails": websiteEmails, "websiteDatabases": websiteDatabases,
"lastRun": ""}]}
FinalContent = json.dumps(BackupConfig)
WriteToFile = open(finalConfigPath, 'w')
WriteToFile.write(FinalContent)
WriteToFile.close()
return 1, BackupConfig
except BaseException as msg:
return 0, str(msg)
2023-02-24 11:47:36 +05:00
# def BackupEmails(self):
#
# ### This function will backup emails of the website, also need to take care of emails that we need to exclude
# ### excluded emails are in a list self.data['ExcludedEmails'] only backup data if backupemail check is on
# ## For example if self.data['BackupEmails'] is one then only run this function otherwise not
#
# destination = f'{self.FinalPath}/emails'
# source = f'/home/vmail/{self.website.domain}'
#
# ## Pending add user provided folders in the exclude list
#
# exclude = f'--exclude=.cache --exclude=.cache --exclude=.cache --exclude=.wp-cli ' \
# f'--exclude=backup --exclude=incbackup --exclude=incbackup --exclude=logs --exclude=lscache'
#
# command = f'mkdir -p {destination}'
# ProcessUtilities.executioner(command, 'cyberpanel')
#
# command = f'chown vmail:vmail {destination}'
# ProcessUtilities.executioner(command)
#
# command = f'rsync -av {source}/ {destination}/'
# ProcessUtilities.executioner(command, 'vmail')
#
# return 1
2023-03-12 11:25:47 +05:00
# def BackupDataBases(self):
#
# ### This function will backup databases of the website, also need to take care of database that we need to exclude
# ### excluded databases are in a list self.data['ExcludedDatabases'] only backup databases if backupdatabase check is on
# ## For example if self.data['BackupDatabase'] is one then only run this function otherwise not
#
# command = f'chown {self.website.externalApp}:{self.website.externalApp} {self.FinalPathRuctic}'
# ProcessUtilities.executioner(command)
#
# command = f'rustic init -r {self.FinalPathRuctic} --password ""'
# ProcessUtilities.executioner(command, self.website.externalApp)
#
# command = f'chown cyberpanel:cyberpanel {self.FinalPathRuctic}'
# ProcessUtilities.executioner(command)
#
# from plogical.mysqlUtilities import mysqlUtilities
#
# for dbs in self.config['databases']:
#
# ### Pending: Need to only backup database present in the list of databases that need backing up
#
# for key, value in dbs.items():
# print(f'DB {key}')
#
# if mysqlUtilities.createDatabaseBackup(key, self.FinalPath) == 0:
# self.UpdateStatus(f'Failed to create backup for database {key}.', CPBackupsV2.RUNNING)
# return 0
#
# for dbUsers in value:
# print(f'User: {dbUsers["user"]}, Host: {dbUsers["host"]}, Pass: {dbUsers["password"]}')
#
#
#
# return 1
# def BackupData(self):
#
# ### This function will backup data of the website, also need to take care of directories that we need to exclude
# ### excluded directories are in a list self.data['ExcludedDirectories'] only backup data if backupdata check is on
# ## For example if self.data['BackupData'] is one then only run this function otherwise not
#
# destination = f'{self.FinalPath}/data'
# source = f'/home/{self.website.domain}'
#
# ## Pending add user provided folders in the exclude list
#
# exclude = f'--exclude=.cache --exclude=.cache --exclude=.cache --exclude=.wp-cli ' \
# f'--exclude=backup --exclude=incbackup --exclude=incbackup --exclude=logs --exclude=lscache'
#
# command = f'mkdir -p {destination}'
# ProcessUtilities.executioner(command, 'cyberpanel')
#
# command = f'chown {self.website.externalApp}:{self.website.externalApp} {destination}'
# ProcessUtilities.executioner(command)
#
# command = f'rsync -av {exclude} {source}/ {destination}/'
# ProcessUtilities.executioner(command, self.website.externalApp)
#
# return 1
2023-02-20 15:32:25 +05:00
def InstallRustic(self):
2023-02-20 14:59:34 +05:00
try:
2023-02-24 20:37:15 +05:00
if not os.path.exists(CPBackupsV2.RUSTIC_PATH):
2023-02-20 14:59:34 +05:00
2023-02-24 20:37:15 +05:00
url = "https://api.github.com/repos/rustic-rs/rustic/releases/latest"
response = requests.get(url)
2023-02-20 14:59:34 +05:00
2023-02-24 20:37:15 +05:00
if response.status_code == 200:
data = response.json()
2023-04-06 23:26:41 +05:00
version = data['tag_name']
name = data['name']
2023-02-24 20:37:15 +05:00
else:
return 0, str(response.content)
2023-02-20 14:59:34 +05:00
2023-04-06 23:26:41 +05:00
# sudo mv filename /usr/bin/
command = 'wget -P /home/rustic https://github.com/rustic-rs/rustic/releases/download/%s/rustic-%s-x86_64-unknown-linux-musl.tar.gz' % (
version, version)
2023-02-24 20:37:15 +05:00
ProcessUtilities.executioner(command)
2023-02-20 14:59:34 +05:00
2023-04-06 23:26:41 +05:00
command = 'tar xzf /home/rustic/rustic-%s-x86_64-unknown-linux-musl.tar.gz -C /home/rustic//' % (
version)
2023-02-24 20:37:15 +05:00
ProcessUtilities.executioner(command)
2023-02-20 14:59:34 +05:00
2023-02-24 20:37:15 +05:00
command = 'sudo mv /home/rustic/rustic /usr/bin/'
ProcessUtilities.executioner(command)
2023-02-20 15:32:25 +05:00
2023-02-24 20:37:15 +05:00
command = 'rm -rf /home/rustic'
ProcessUtilities.executioner(command)
2023-02-21 14:12:46 +05:00
2023-02-24 20:37:15 +05:00
return 1, None
2023-02-21 14:12:46 +05:00
2023-02-20 14:59:34 +05:00
except BaseException as msg:
2023-04-06 23:26:41 +05:00
print('Error: %s' % msg)
2023-02-20 15:32:25 +05:00
return 0, str(msg)
2023-02-20 14:59:34 +05:00
2023-04-01 23:49:04 +05:00
2023-02-11 11:27:09 +05:00
if __name__ == "__main__":
2023-02-12 22:31:41 +05:00
try:
parser = argparse.ArgumentParser(description='CyberPanel Backup Generator')
parser.add_argument('function', help='Specify a function to call!')
parser.add_argument('--path', help='')
args = parser.parse_args()
if args.function == "BackupDataBases":
cpbuv2 = CPBackupsV2({'finalPath': args.path})
2023-04-06 23:26:41 +05:00
# cpbuv2.BackupDataBases()
2023-02-12 22:31:41 +05:00
except:
2023-04-06 23:26:41 +05:00
cpbuv2 = CPBackupsV2(
{'function': 'InitiateRestore', 'domain': 'cyberpanel.net', 'BasePath': '/home/backup', 'SnapShotID': 1,
'BackendName': 'usman'})
cpbuv2.InitiateRestore()