2020-04-24 13:25:49 -04:00
|
|
|
import os, sys
|
|
|
|
|
|
2018-06-05 00:53:45 +05:00
|
|
|
sys.path.append('/usr/local/CyberCP')
|
|
|
|
|
import django
|
2020-04-24 13:25:49 -04:00
|
|
|
|
2018-06-05 00:53:45 +05:00
|
|
|
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "CyberCP.settings")
|
2019-07-16 23:23:16 +05:00
|
|
|
try:
|
|
|
|
|
django.setup()
|
|
|
|
|
except:
|
|
|
|
|
pass
|
2017-10-24 19:16:36 +05:00
|
|
|
import pexpect
|
2019-12-15 13:30:40 +05:00
|
|
|
from plogical import CyberCPLogFileWriter as logging
|
2017-10-24 19:16:36 +05:00
|
|
|
import subprocess
|
|
|
|
|
import shlex
|
2020-04-24 13:25:49 -04:00
|
|
|
from shutil import make_archive, rmtree
|
2019-12-15 13:30:40 +05:00
|
|
|
from plogical import mysqlUtilities
|
2017-10-24 19:16:36 +05:00
|
|
|
import tarfile
|
|
|
|
|
from multiprocessing import Process
|
2017-10-29 22:16:06 +05:00
|
|
|
import signal
|
2019-12-15 13:30:40 +05:00
|
|
|
from plogical.installUtilities import installUtilities
|
2017-12-09 22:30:10 +05:00
|
|
|
import argparse
|
2020-04-24 13:25:49 -04:00
|
|
|
|
2019-07-18 14:08:00 +05:00
|
|
|
try:
|
2019-12-15 13:30:40 +05:00
|
|
|
from plogical.virtualHostUtilities import virtualHostUtilities
|
|
|
|
|
from plogical.sslUtilities import sslUtilities
|
2019-07-18 14:08:00 +05:00
|
|
|
from plogical.mailUtilities import mailUtilities
|
|
|
|
|
except:
|
|
|
|
|
pass
|
|
|
|
|
|
2018-06-05 00:53:45 +05:00
|
|
|
from xml.etree.ElementTree import Element, SubElement
|
|
|
|
|
from xml.etree import ElementTree
|
|
|
|
|
from xml.dom import minidom
|
|
|
|
|
import time
|
2020-04-06 15:59:05 +05:00
|
|
|
from shutil import copy
|
|
|
|
|
from distutils.dir_util import copy_tree
|
2019-07-16 23:23:16 +05:00
|
|
|
from random import randint
|
|
|
|
|
from plogical.processUtilities import ProcessUtilities
|
2020-04-24 13:25:49 -04:00
|
|
|
|
2019-07-18 14:08:00 +05:00
|
|
|
try:
|
|
|
|
|
from websiteFunctions.models import Websites, ChildDomains, Backups
|
|
|
|
|
from databases.models import Databases
|
|
|
|
|
from loginSystem.models import Administrator
|
2019-12-15 13:30:40 +05:00
|
|
|
from plogical.dnsUtilities import DNS
|
2019-07-18 14:08:00 +05:00
|
|
|
from mailServer.models import Domains as eDomains
|
|
|
|
|
from backup.models import DBUsers
|
|
|
|
|
except:
|
|
|
|
|
pass
|
2018-02-16 21:05:15 +05:00
|
|
|
|
2020-04-06 14:24:52 +05:00
|
|
|
VERSION = '2.0'
|
2020-04-06 15:59:05 +05:00
|
|
|
BUILD = 1
|
2020-04-06 14:24:52 +05:00
|
|
|
|
2020-04-24 13:25:49 -04:00
|
|
|
|
2018-05-09 23:10:57 +05:00
|
|
|
## I am not the monster that you think I am..
|
|
|
|
|
|
2017-10-24 19:16:36 +05:00
|
|
|
class backupUtilities:
|
2019-07-18 14:08:00 +05:00
|
|
|
Server_root = "/usr/local/lsws"
|
2017-10-24 19:16:36 +05:00
|
|
|
|
2020-04-24 13:25:49 -04:00
|
|
|
completeKeyPath = "/home/cyberpanel/.ssh"
|
2017-12-09 22:30:10 +05:00
|
|
|
destinationsPath = "/home/cyberpanel/destinations"
|
2018-08-28 01:19:34 +05:00
|
|
|
licenseKey = '/usr/local/lsws/conf/license.key'
|
2017-11-05 03:02:51 +05:00
|
|
|
|
2018-06-05 00:53:45 +05:00
|
|
|
@staticmethod
|
|
|
|
|
def prepareBackupMeta(backupDomain, backupName, tempStoragePath, backupPath):
|
|
|
|
|
try:
|
|
|
|
|
|
2019-07-18 14:08:00 +05:00
|
|
|
status = os.path.join(backupPath, 'status')
|
|
|
|
|
|
|
|
|
|
logging.CyberCPLogFileWriter.statusWriter(status, 'Setting up meta data..')
|
|
|
|
|
|
2018-06-05 00:53:45 +05:00
|
|
|
website = Websites.objects.get(domain=backupDomain)
|
|
|
|
|
|
|
|
|
|
######### Generating meta
|
|
|
|
|
|
|
|
|
|
## XML Generation
|
|
|
|
|
|
|
|
|
|
metaFileXML = Element('metaFile')
|
|
|
|
|
|
2020-04-06 14:24:52 +05:00
|
|
|
child = SubElement(metaFileXML, 'VERSION')
|
|
|
|
|
child.text = VERSION
|
|
|
|
|
|
|
|
|
|
child = SubElement(metaFileXML, 'BUILD')
|
|
|
|
|
child.text = str(BUILD)
|
|
|
|
|
|
2018-06-05 00:53:45 +05:00
|
|
|
child = SubElement(metaFileXML, 'masterDomain')
|
|
|
|
|
child.text = backupDomain
|
|
|
|
|
|
|
|
|
|
child = SubElement(metaFileXML, 'phpSelection')
|
|
|
|
|
child.text = website.phpSelection
|
|
|
|
|
|
|
|
|
|
child = SubElement(metaFileXML, 'externalApp')
|
|
|
|
|
child.text = website.externalApp
|
|
|
|
|
|
2020-02-13 19:40:55 +05:00
|
|
|
### Find user of site
|
|
|
|
|
|
|
|
|
|
siteUser = website.admin
|
|
|
|
|
|
|
|
|
|
child = SubElement(metaFileXML, 'userName')
|
|
|
|
|
child.text = siteUser.userName
|
|
|
|
|
|
|
|
|
|
child = SubElement(metaFileXML, 'userPassword')
|
|
|
|
|
child.text = siteUser.password
|
|
|
|
|
|
|
|
|
|
child = SubElement(metaFileXML, 'firstName')
|
|
|
|
|
child.text = siteUser.firstName
|
|
|
|
|
|
|
|
|
|
child = SubElement(metaFileXML, 'lastName')
|
|
|
|
|
child.text = siteUser.lastName
|
|
|
|
|
|
|
|
|
|
child = SubElement(metaFileXML, 'email')
|
|
|
|
|
child.text = siteUser.email
|
|
|
|
|
|
|
|
|
|
child = SubElement(metaFileXML, 'type')
|
|
|
|
|
child.text = str(siteUser.type)
|
|
|
|
|
|
|
|
|
|
child = SubElement(metaFileXML, 'owner')
|
|
|
|
|
child.text = str(siteUser.owner)
|
|
|
|
|
|
|
|
|
|
child = SubElement(metaFileXML, 'token')
|
|
|
|
|
child.text = siteUser.token
|
|
|
|
|
|
|
|
|
|
child = SubElement(metaFileXML, 'api')
|
|
|
|
|
child.text = str(siteUser.api)
|
|
|
|
|
|
|
|
|
|
child = SubElement(metaFileXML, 'securityLevel')
|
|
|
|
|
child.text = str(siteUser.securityLevel)
|
|
|
|
|
|
|
|
|
|
child = SubElement(metaFileXML, 'state')
|
|
|
|
|
child.text = siteUser.state
|
|
|
|
|
|
2020-02-13 20:11:54 +05:00
|
|
|
child = SubElement(metaFileXML, 'initWebsitesLimit')
|
|
|
|
|
child.text = str(siteUser.initWebsitesLimit)
|
|
|
|
|
|
|
|
|
|
child = SubElement(metaFileXML, 'aclName')
|
|
|
|
|
child.text = siteUser.acl.name
|
|
|
|
|
|
2020-02-13 19:40:55 +05:00
|
|
|
#####################
|
|
|
|
|
|
2018-06-05 00:53:45 +05:00
|
|
|
childDomains = website.childdomains_set.all()
|
|
|
|
|
|
|
|
|
|
databases = website.databases_set.all()
|
|
|
|
|
|
|
|
|
|
## Child domains XML
|
|
|
|
|
|
|
|
|
|
childDomainsXML = Element('ChildDomains')
|
|
|
|
|
|
|
|
|
|
for items in childDomains:
|
|
|
|
|
childDomainXML = Element('domain')
|
|
|
|
|
|
|
|
|
|
child = SubElement(childDomainXML, 'domain')
|
|
|
|
|
child.text = items.domain
|
|
|
|
|
child = SubElement(childDomainXML, 'phpSelection')
|
|
|
|
|
child.text = items.phpSelection
|
|
|
|
|
child = SubElement(childDomainXML, 'path')
|
|
|
|
|
child.text = items.path
|
|
|
|
|
|
|
|
|
|
childDomainsXML.append(childDomainXML)
|
|
|
|
|
|
|
|
|
|
metaFileXML.append(childDomainsXML)
|
|
|
|
|
|
|
|
|
|
## Databases XML
|
|
|
|
|
|
|
|
|
|
databasesXML = Element('Databases')
|
|
|
|
|
|
|
|
|
|
for items in databases:
|
2019-07-24 22:37:37 +05:00
|
|
|
try:
|
|
|
|
|
dbuser = DBUsers.objects.get(user=items.dbUser)
|
2019-09-10 14:06:56 +05:00
|
|
|
userToTry = items.dbUser
|
2019-07-24 22:37:37 +05:00
|
|
|
except:
|
2019-09-10 14:06:56 +05:00
|
|
|
try:
|
2019-12-05 22:36:17 +05:00
|
|
|
dbusers = DBUsers.objects.all().filter(user=items.dbUser)
|
|
|
|
|
userToTry = items.dbUser
|
2019-09-10 14:06:56 +05:00
|
|
|
for it in dbusers:
|
|
|
|
|
dbuser = it
|
|
|
|
|
break
|
|
|
|
|
|
2020-01-23 15:07:37 +05:00
|
|
|
userToTry = mysqlUtilities.mysqlUtilities.fetchuser(items.dbName)
|
2019-12-05 22:36:17 +05:00
|
|
|
|
2020-01-23 16:07:07 +05:00
|
|
|
if userToTry == 0 or userToTry == 1:
|
|
|
|
|
continue
|
|
|
|
|
|
2019-12-05 22:36:17 +05:00
|
|
|
try:
|
|
|
|
|
dbuser = DBUsers.objects.get(user=userToTry)
|
|
|
|
|
except:
|
|
|
|
|
try:
|
|
|
|
|
dbusers = DBUsers.objects.all().filter(user=userToTry)
|
|
|
|
|
for it in dbusers:
|
2020-01-23 16:07:07 +05:00
|
|
|
dbuser = it
|
|
|
|
|
break
|
|
|
|
|
|
2019-12-10 15:09:10 +05:00
|
|
|
except BaseException as msg:
|
2019-12-05 22:36:17 +05:00
|
|
|
logging.CyberCPLogFileWriter.writeToFile(
|
|
|
|
|
'While creating backup for %s, we failed to backup database %s. Error message: %s' % (
|
|
|
|
|
backupDomain, items.dbName, str(msg)))
|
|
|
|
|
continue
|
2019-12-10 15:09:10 +05:00
|
|
|
except BaseException as msg:
|
2019-12-05 22:36:17 +05:00
|
|
|
logging.CyberCPLogFileWriter.writeToFile(
|
|
|
|
|
'While creating backup for %s, we failed to backup database %s. Error message: %s' % (
|
2020-04-24 13:25:49 -04:00
|
|
|
backupDomain, items.dbName, str(msg)))
|
2019-12-05 22:36:17 +05:00
|
|
|
continue
|
|
|
|
|
|
2018-06-05 00:53:45 +05:00
|
|
|
databaseXML = Element('database')
|
|
|
|
|
|
|
|
|
|
child = SubElement(databaseXML, 'dbName')
|
2019-12-05 22:36:17 +05:00
|
|
|
child.text = str(items.dbName)
|
2018-06-05 00:53:45 +05:00
|
|
|
child = SubElement(databaseXML, 'dbUser')
|
2019-12-05 22:36:17 +05:00
|
|
|
child.text = str(userToTry)
|
2018-06-05 00:53:45 +05:00
|
|
|
child = SubElement(databaseXML, 'password')
|
2019-12-05 22:36:17 +05:00
|
|
|
child.text = str(dbuser.password)
|
2018-06-05 00:53:45 +05:00
|
|
|
|
|
|
|
|
databasesXML.append(databaseXML)
|
|
|
|
|
|
|
|
|
|
metaFileXML.append(databasesXML)
|
|
|
|
|
|
|
|
|
|
## Get Aliases
|
|
|
|
|
|
2019-12-05 22:36:17 +05:00
|
|
|
try:
|
|
|
|
|
|
|
|
|
|
aliasesXML = Element('Aliases')
|
2018-06-05 00:53:45 +05:00
|
|
|
|
2019-12-05 22:36:17 +05:00
|
|
|
aliases = backupUtilities.getAliases(backupDomain)
|
2018-06-05 00:53:45 +05:00
|
|
|
|
2019-12-05 22:36:17 +05:00
|
|
|
for items in aliases:
|
|
|
|
|
child = SubElement(aliasesXML, 'alias')
|
|
|
|
|
child.text = items
|
2018-06-05 00:53:45 +05:00
|
|
|
|
2019-12-05 22:36:17 +05:00
|
|
|
metaFileXML.append(aliasesXML)
|
|
|
|
|
|
2019-12-10 15:09:10 +05:00
|
|
|
except BaseException as msg:
|
2019-12-05 22:36:17 +05:00
|
|
|
logging.CyberCPLogFileWriter.statusWriter(status, '%s. [167:prepMeta]' % (str(msg)))
|
2018-06-05 00:53:45 +05:00
|
|
|
|
|
|
|
|
## Finish Alias
|
|
|
|
|
|
|
|
|
|
## DNS Records XML
|
|
|
|
|
|
|
|
|
|
try:
|
2019-03-13 18:22:12 +05:00
|
|
|
|
2018-06-05 00:53:45 +05:00
|
|
|
dnsRecordsXML = Element("dnsrecords")
|
|
|
|
|
dnsRecords = DNS.getDNSRecords(backupDomain)
|
|
|
|
|
|
|
|
|
|
for items in dnsRecords:
|
|
|
|
|
dnsRecordXML = Element('dnsrecord')
|
|
|
|
|
|
|
|
|
|
child = SubElement(dnsRecordXML, 'type')
|
|
|
|
|
child.text = items.type
|
|
|
|
|
child = SubElement(dnsRecordXML, 'name')
|
|
|
|
|
child.text = items.name
|
|
|
|
|
child = SubElement(dnsRecordXML, 'content')
|
|
|
|
|
child.text = items.content
|
|
|
|
|
child = SubElement(dnsRecordXML, 'priority')
|
|
|
|
|
child.text = str(items.prio)
|
|
|
|
|
|
|
|
|
|
dnsRecordsXML.append(dnsRecordXML)
|
|
|
|
|
|
|
|
|
|
metaFileXML.append(dnsRecordsXML)
|
|
|
|
|
|
2019-12-10 15:09:10 +05:00
|
|
|
except BaseException as msg:
|
2019-07-24 22:37:37 +05:00
|
|
|
logging.CyberCPLogFileWriter.statusWriter(status, '%s. [158:prepMeta]' % (str(msg)))
|
2018-06-05 00:53:45 +05:00
|
|
|
|
|
|
|
|
## Email accounts XML
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
emailRecordsXML = Element('emails')
|
|
|
|
|
eDomain = eDomains.objects.get(domain=backupDomain)
|
|
|
|
|
emailAccounts = eDomain.eusers_set.all()
|
|
|
|
|
|
|
|
|
|
for items in emailAccounts:
|
|
|
|
|
emailRecordXML = Element('emailAccount')
|
|
|
|
|
|
|
|
|
|
child = SubElement(emailRecordXML, 'email')
|
|
|
|
|
child.text = items.email
|
|
|
|
|
child = SubElement(emailRecordXML, 'password')
|
|
|
|
|
child.text = items.password
|
|
|
|
|
|
|
|
|
|
emailRecordsXML.append(emailRecordXML)
|
|
|
|
|
|
|
|
|
|
metaFileXML.append(emailRecordsXML)
|
2019-12-10 15:09:10 +05:00
|
|
|
except BaseException as msg:
|
2019-07-24 22:37:37 +05:00
|
|
|
logging.CyberCPLogFileWriter.statusWriter(status, '%s. [179:prepMeta]' % (str(msg)))
|
2018-06-05 00:53:45 +05:00
|
|
|
|
|
|
|
|
## Email meta generated!
|
|
|
|
|
|
|
|
|
|
def prettify(elem):
|
|
|
|
|
"""Return a pretty-printed XML string for the Element.
|
|
|
|
|
"""
|
|
|
|
|
rough_string = ElementTree.tostring(elem, 'utf-8')
|
|
|
|
|
reparsed = minidom.parseString(rough_string)
|
|
|
|
|
return reparsed.toprettyxml(indent=" ")
|
|
|
|
|
|
2019-10-08 13:17:33 -04:00
|
|
|
## /home/example.com/backup/backup-example.com-02.13.2018_10-24-52/meta.xml -- metaPath
|
2019-07-16 23:23:16 +05:00
|
|
|
|
|
|
|
|
metaPath = '/tmp/%s' % (str(randint(1000, 9999)))
|
2018-06-05 00:53:45 +05:00
|
|
|
|
|
|
|
|
xmlpretty = prettify(metaFileXML).encode('ascii', 'ignore')
|
|
|
|
|
metaFile = open(metaPath, 'w')
|
2019-12-17 11:41:23 +05:00
|
|
|
metaFile.write(xmlpretty.decode())
|
2018-06-05 00:53:45 +05:00
|
|
|
metaFile.close()
|
2019-12-10 15:09:10 +05:00
|
|
|
os.chmod(metaPath, 0o777)
|
2018-06-05 00:53:45 +05:00
|
|
|
|
|
|
|
|
## meta generated
|
|
|
|
|
|
2019-10-08 13:17:33 -04:00
|
|
|
newBackup = Backups(website=website, fileName=backupName, date=time.strftime("%m.%d.%Y_%H-%M-%S"),
|
2019-07-16 23:23:16 +05:00
|
|
|
size=0, status=1)
|
2018-06-05 00:53:45 +05:00
|
|
|
newBackup.save()
|
|
|
|
|
|
2019-12-05 22:36:17 +05:00
|
|
|
logging.CyberCPLogFileWriter.statusWriter(status, 'Meta data is ready..')
|
2019-07-18 14:08:00 +05:00
|
|
|
|
2020-04-24 13:25:49 -04:00
|
|
|
return 1, 'None', metaPath
|
2018-06-05 00:53:45 +05:00
|
|
|
|
|
|
|
|
|
2019-12-10 15:09:10 +05:00
|
|
|
except BaseException as msg:
|
2019-07-18 14:08:00 +05:00
|
|
|
logging.CyberCPLogFileWriter.statusWriter(status, "%s [207][5009]" % (str(msg)))
|
2020-04-24 13:25:49 -04:00
|
|
|
return 0, str(msg)
|
2018-06-05 00:53:45 +05:00
|
|
|
|
2017-10-24 19:16:36 +05:00
|
|
|
@staticmethod
|
2020-04-24 13:25:49 -04:00
|
|
|
def startBackup(tempStoragePath, backupName, backupPath, metaPath=None):
|
2017-10-24 19:16:36 +05:00
|
|
|
try:
|
|
|
|
|
|
2019-10-08 13:17:33 -04:00
|
|
|
## /home/example.com/backup/backup-example.com-02.13.2018_10-24-52 -- tempStoragePath
|
2019-07-16 23:23:16 +05:00
|
|
|
## /home/example.com/backup - backupPath
|
|
|
|
|
|
2018-05-11 00:03:26 +05:00
|
|
|
##### Writing the name of backup file.
|
2017-12-09 22:30:10 +05:00
|
|
|
|
2018-02-16 00:57:46 +05:00
|
|
|
## /home/example.com/backup/backupFileName
|
2019-12-13 22:18:53 +05:00
|
|
|
pidFile = '%sstartBackup' % (backupPath)
|
|
|
|
|
writeToFile = open(pidFile, 'w')
|
|
|
|
|
writeToFile.writelines(str(os.getpid()))
|
|
|
|
|
writeToFile.close()
|
2018-08-23 15:39:28 +05:00
|
|
|
|
2020-04-24 13:25:49 -04:00
|
|
|
backupFileNamePath = os.path.join(backupPath, "backupFileName")
|
2018-08-23 15:39:28 +05:00
|
|
|
logging.CyberCPLogFileWriter.statusWriter(backupFileNamePath, backupName)
|
2017-12-09 22:30:10 +05:00
|
|
|
|
2018-05-11 00:03:26 +05:00
|
|
|
#####
|
2017-10-24 19:16:36 +05:00
|
|
|
|
2020-04-24 13:25:49 -04:00
|
|
|
status = os.path.join(backupPath, 'status')
|
2018-02-16 00:57:46 +05:00
|
|
|
|
2018-08-23 15:39:28 +05:00
|
|
|
logging.CyberCPLogFileWriter.statusWriter(status, "Making archive of home directory.\n")
|
2017-10-24 19:16:36 +05:00
|
|
|
|
2018-05-11 00:03:26 +05:00
|
|
|
##### Parsing XML Meta file!
|
2017-12-09 22:30:10 +05:00
|
|
|
|
2019-10-08 13:17:33 -04:00
|
|
|
## /home/example.com/backup/backup-example.com-02.13.2018_10-24-52 -- tempStoragePath
|
2019-07-16 23:23:16 +05:00
|
|
|
|
2020-04-24 13:25:49 -04:00
|
|
|
metaPathInBackup = os.path.join(tempStoragePath, 'meta.xml')
|
2019-07-16 23:23:16 +05:00
|
|
|
|
|
|
|
|
if metaPath != None:
|
|
|
|
|
writeToFile = open(metaPathInBackup, 'w')
|
|
|
|
|
writeToFile.write(open(metaPath, 'r').read())
|
|
|
|
|
writeToFile.close()
|
|
|
|
|
|
|
|
|
|
backupMetaData = ElementTree.parse(metaPathInBackup)
|
|
|
|
|
|
2018-05-11 00:03:26 +05:00
|
|
|
##### Making archive of home directory
|
2018-02-16 00:57:46 +05:00
|
|
|
|
|
|
|
|
domainName = backupMetaData.find('masterDomain').text
|
2018-02-17 15:34:59 +05:00
|
|
|
|
2018-08-28 01:19:34 +05:00
|
|
|
## Saving original vhost conf file
|
|
|
|
|
|
2019-07-18 14:08:00 +05:00
|
|
|
completPathToConf = backupUtilities.Server_root + '/conf/vhosts/' + domainName + '/vhost.conf'
|
2019-03-13 23:05:22 +05:00
|
|
|
|
2018-08-28 01:19:34 +05:00
|
|
|
if os.path.exists(backupUtilities.licenseKey):
|
|
|
|
|
copy(completPathToConf, tempStoragePath + '/vhost.conf')
|
|
|
|
|
|
2019-10-08 13:17:33 -04:00
|
|
|
## /home/example.com/backup/backup-example.com-02.13.2018_10-24-52 -- tempStoragePath
|
2018-02-16 21:05:15 +05:00
|
|
|
## shutil.make_archive
|
2018-05-11 00:03:26 +05:00
|
|
|
|
2020-04-06 14:24:52 +05:00
|
|
|
## Stop making archive of document_root and copy instead
|
|
|
|
|
|
2020-04-06 15:59:05 +05:00
|
|
|
copy_tree('/home/%s/public_html' % domainName, '%s/%s' % (tempStoragePath, 'public_html'))
|
2020-04-06 14:24:52 +05:00
|
|
|
|
2020-04-24 13:25:49 -04:00
|
|
|
# make_archive(os.path.join(tempStoragePath,"public_html"), 'gztar', os.path.join("/home",domainName,"public_html"))
|
2020-04-06 14:24:52 +05:00
|
|
|
|
|
|
|
|
##
|
2018-02-16 00:57:46 +05:00
|
|
|
|
2019-07-18 14:08:00 +05:00
|
|
|
logging.CyberCPLogFileWriter.statusWriter(status, "Backing up databases..")
|
2019-12-10 15:09:10 +05:00
|
|
|
print('1,None')
|
2017-10-24 19:16:36 +05:00
|
|
|
|
2019-12-10 15:09:10 +05:00
|
|
|
except BaseException as msg:
|
2017-10-24 19:16:36 +05:00
|
|
|
try:
|
2020-04-24 13:25:49 -04:00
|
|
|
os.remove(os.path.join(backupPath, backupName + ".tar.gz"))
|
2017-10-24 19:16:36 +05:00
|
|
|
except:
|
2019-07-18 14:08:00 +05:00
|
|
|
pass
|
2017-10-24 19:16:36 +05:00
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
rmtree(tempStoragePath)
|
|
|
|
|
except:
|
2019-07-18 14:08:00 +05:00
|
|
|
pass
|
2017-10-24 19:16:36 +05:00
|
|
|
|
2019-03-31 02:47:35 +05:00
|
|
|
status = os.path.join(backupPath, 'status')
|
2020-04-24 13:25:49 -04:00
|
|
|
logging.CyberCPLogFileWriter.statusWriter(status, "Aborted, " + str(msg) + ".[365] [5009]")
|
|
|
|
|
print(("Aborted, " + str(msg) + ".[365] [5009]"))
|
2017-10-24 19:16:36 +05:00
|
|
|
|
2019-12-13 22:18:53 +05:00
|
|
|
os.remove(pidFile)
|
|
|
|
|
|
2019-07-16 23:23:16 +05:00
|
|
|
@staticmethod
|
|
|
|
|
def BackupRoot(tempStoragePath, backupName, backupPath, metaPath=None):
|
|
|
|
|
|
2019-12-13 22:18:53 +05:00
|
|
|
pidFile = '%sBackupRoot' % (backupPath)
|
|
|
|
|
|
|
|
|
|
writeToFile = open(pidFile, 'w')
|
|
|
|
|
writeToFile.writelines(str(os.getpid()))
|
|
|
|
|
writeToFile.close()
|
|
|
|
|
|
2019-07-16 23:23:16 +05:00
|
|
|
status = os.path.join(backupPath, 'status')
|
|
|
|
|
metaPathInBackup = os.path.join(tempStoragePath, 'meta.xml')
|
|
|
|
|
backupMetaData = ElementTree.parse(metaPathInBackup)
|
2019-08-31 15:30:53 +05:00
|
|
|
|
|
|
|
|
domainName = backupMetaData.find('masterDomain').text
|
|
|
|
|
##### Saving SSL Certificates if any
|
|
|
|
|
|
|
|
|
|
sslStoragePath = '/etc/letsencrypt/live/' + domainName
|
|
|
|
|
|
|
|
|
|
if os.path.exists(sslStoragePath):
|
|
|
|
|
try:
|
|
|
|
|
copy(os.path.join(sslStoragePath, "cert.pem"), os.path.join(tempStoragePath, domainName + ".cert.pem"))
|
|
|
|
|
copy(os.path.join(sslStoragePath, "fullchain.pem"),
|
|
|
|
|
os.path.join(tempStoragePath, domainName + ".fullchain.pem"))
|
|
|
|
|
copy(os.path.join(sslStoragePath, "privkey.pem"),
|
|
|
|
|
os.path.join(tempStoragePath, domainName + ".privkey.pem"))
|
2019-12-10 15:09:10 +05:00
|
|
|
except BaseException as msg:
|
2019-08-31 15:30:53 +05:00
|
|
|
logging.CyberCPLogFileWriter.writeToFile('%s. [283:startBackup]' % (str(msg)))
|
|
|
|
|
|
|
|
|
|
## Child Domains SSL.
|
|
|
|
|
|
|
|
|
|
childDomains = backupMetaData.findall('ChildDomains/domain')
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
for childDomain in childDomains:
|
|
|
|
|
|
|
|
|
|
actualChildDomain = childDomain.find('domain').text
|
2020-04-06 14:24:52 +05:00
|
|
|
childPath = childDomain.find('path').text
|
2019-08-31 15:30:53 +05:00
|
|
|
|
|
|
|
|
if os.path.exists(backupUtilities.licenseKey):
|
|
|
|
|
completPathToConf = backupUtilities.Server_root + '/conf/vhosts/' + actualChildDomain + '/vhost.conf'
|
|
|
|
|
copy(completPathToConf, tempStoragePath + '/' + actualChildDomain + '.vhost.conf')
|
|
|
|
|
|
|
|
|
|
### Storing SSL for child domainsa
|
|
|
|
|
|
|
|
|
|
sslStoragePath = '/etc/letsencrypt/live/' + actualChildDomain
|
|
|
|
|
|
|
|
|
|
if os.path.exists(sslStoragePath):
|
|
|
|
|
try:
|
|
|
|
|
copy(os.path.join(sslStoragePath, "cert.pem"),
|
|
|
|
|
os.path.join(tempStoragePath, actualChildDomain + ".cert.pem"))
|
|
|
|
|
copy(os.path.join(sslStoragePath, "fullchain.pem"),
|
|
|
|
|
os.path.join(tempStoragePath, actualChildDomain + ".fullchain.pem"))
|
|
|
|
|
copy(os.path.join(sslStoragePath, "privkey.pem"),
|
|
|
|
|
os.path.join(tempStoragePath, actualChildDomain + ".privkey.pem"))
|
|
|
|
|
make_archive(os.path.join(tempStoragePath, "sslData-" + domainName), 'gztar',
|
|
|
|
|
sslStoragePath)
|
|
|
|
|
except:
|
|
|
|
|
pass
|
2020-04-06 14:24:52 +05:00
|
|
|
|
|
|
|
|
if childPath.find('/home/%s/public_html' % domainName) == -1:
|
2020-04-06 15:59:05 +05:00
|
|
|
copy_tree(childPath, '%s/%s-docroot' % (tempStoragePath, actualChildDomain))
|
2020-04-06 14:24:52 +05:00
|
|
|
|
2019-12-10 15:09:10 +05:00
|
|
|
except BaseException as msg:
|
2019-08-31 15:30:53 +05:00
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
## backup emails
|
|
|
|
|
|
2019-07-16 23:23:16 +05:00
|
|
|
domainName = backupMetaData.find('masterDomain').text
|
|
|
|
|
|
2020-04-24 13:25:49 -04:00
|
|
|
if os.path.islink(status) or os.path.islink(tempStoragePath or os.path.islink(backupPath)) or os.path.islink(
|
|
|
|
|
metaPath):
|
2019-07-16 23:23:16 +05:00
|
|
|
logging.CyberCPLogFileWriter.writeToFile('symlinked.')
|
2019-12-05 22:36:17 +05:00
|
|
|
logging.CyberCPLogFileWriter.statusWriter(status, 'Symlink attack. [365][5009]')
|
2019-07-16 23:23:16 +05:00
|
|
|
return 0
|
|
|
|
|
|
|
|
|
|
## backup email accounts
|
|
|
|
|
|
2019-07-18 14:08:00 +05:00
|
|
|
logging.CyberCPLogFileWriter.statusWriter(status, "Backing up email accounts..\n")
|
2019-07-16 23:23:16 +05:00
|
|
|
|
2020-04-07 13:31:23 +05:00
|
|
|
emailPath = '/home/vmail/%s' % (domainName)
|
2019-07-16 23:23:16 +05:00
|
|
|
|
2020-04-07 13:31:23 +05:00
|
|
|
if os.path.exists(emailPath):
|
|
|
|
|
copy_tree(emailPath, '%s/vmail' % (tempStoragePath))
|
2019-07-16 23:23:16 +05:00
|
|
|
|
|
|
|
|
## shutil.make_archive. Creating final package.
|
|
|
|
|
|
|
|
|
|
make_archive(os.path.join(backupPath, backupName), 'gztar', tempStoragePath)
|
|
|
|
|
rmtree(tempStoragePath)
|
|
|
|
|
|
|
|
|
|
###
|
2020-04-24 13:25:49 -04:00
|
|
|
backupFileNamePath = os.path.join(backupPath, "backupFileName")
|
2019-07-16 23:23:16 +05:00
|
|
|
fileName = open(backupFileNamePath, 'r').read()
|
|
|
|
|
|
|
|
|
|
backupObs = Backups.objects.filter(fileName=fileName)
|
|
|
|
|
|
|
|
|
|
## adding backup data to database.
|
|
|
|
|
try:
|
|
|
|
|
for items in backupObs:
|
|
|
|
|
items.status = 1
|
|
|
|
|
items.size = str(int(float(
|
2020-04-24 13:25:49 -04:00
|
|
|
os.path.getsize(os.path.join(backupPath, backupName + ".tar.gz"))) / (
|
|
|
|
|
1024.0 * 1024.0))) + "MB"
|
2019-07-16 23:23:16 +05:00
|
|
|
items.save()
|
|
|
|
|
except:
|
|
|
|
|
for items in backupObs:
|
|
|
|
|
items.status = 1
|
|
|
|
|
items.size = str(int(float(
|
2020-04-24 13:25:49 -04:00
|
|
|
os.path.getsize(os.path.join(backupPath, backupName + ".tar.gz"))) / (
|
|
|
|
|
1024.0 * 1024.0))) + "MB"
|
2019-07-16 23:23:16 +05:00
|
|
|
items.save()
|
|
|
|
|
|
2020-04-24 13:25:49 -04:00
|
|
|
command = 'chmod 600 %s' % (os.path.join(backupPath, backupName + ".tar.gz"))
|
2020-04-06 19:16:49 +05:00
|
|
|
ProcessUtilities.executioner(command)
|
|
|
|
|
|
2019-07-16 23:23:16 +05:00
|
|
|
logging.CyberCPLogFileWriter.statusWriter(status, "Completed\n")
|
2019-12-13 22:18:53 +05:00
|
|
|
os.remove(pidFile)
|
2019-07-16 23:23:16 +05:00
|
|
|
|
2017-10-24 19:16:36 +05:00
|
|
|
@staticmethod
|
2020-04-24 13:25:49 -04:00
|
|
|
def initiateBackup(tempStoragePath, backupName, backupPath):
|
2017-10-24 19:16:36 +05:00
|
|
|
try:
|
2020-04-24 13:25:49 -04:00
|
|
|
p = Process(target=backupUtilities.startBackup, args=(tempStoragePath, backupName, backupPath,))
|
2017-10-24 19:16:36 +05:00
|
|
|
p.start()
|
|
|
|
|
pid = open(backupPath + 'pid', "w")
|
|
|
|
|
pid.write(str(p.pid))
|
|
|
|
|
pid.close()
|
2019-12-10 15:09:10 +05:00
|
|
|
except BaseException as msg:
|
2017-10-24 19:16:36 +05:00
|
|
|
logging.CyberCPLogFileWriter.writeToFile(str(msg) + " [initiateBackup]")
|
|
|
|
|
|
2018-06-05 00:53:45 +05:00
|
|
|
@staticmethod
|
|
|
|
|
def createWebsiteFromBackup(backupFileOrig, dir):
|
|
|
|
|
try:
|
|
|
|
|
backupFile = backupFileOrig.strip(".tar.gz")
|
|
|
|
|
originalFile = "/home/backup/" + backupFileOrig
|
|
|
|
|
|
|
|
|
|
if os.path.exists(backupFileOrig):
|
|
|
|
|
path = backupFile
|
|
|
|
|
elif not os.path.exists(originalFile):
|
|
|
|
|
dir = dir
|
|
|
|
|
path = "/home/backup/transfer-" + str(dir) + "/" + backupFile
|
|
|
|
|
else:
|
|
|
|
|
path = "/home/backup/" + backupFile
|
|
|
|
|
|
2020-01-29 11:05:47 +05:00
|
|
|
admin = Administrator.objects.get(userName='admin')
|
2018-06-05 00:53:45 +05:00
|
|
|
|
|
|
|
|
## open meta file to read data
|
|
|
|
|
|
|
|
|
|
## Parsing XML Meta file!
|
|
|
|
|
|
|
|
|
|
backupMetaData = ElementTree.parse(os.path.join(path, 'meta.xml'))
|
|
|
|
|
|
|
|
|
|
domain = backupMetaData.find('masterDomain').text
|
|
|
|
|
phpSelection = backupMetaData.find('phpSelection').text
|
|
|
|
|
externalApp = backupMetaData.find('externalApp').text
|
|
|
|
|
|
2020-02-13 20:11:54 +05:00
|
|
|
### Fetch user details
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
|
2020-03-04 22:58:29 +05:00
|
|
|
userName = backupMetaData.find('userName').text
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
siteUser = Administrator.objects.get(userName=userName)
|
|
|
|
|
except:
|
|
|
|
|
userPassword = backupMetaData.find('userPassword').text
|
|
|
|
|
firstName = backupMetaData.find('firstName').text
|
|
|
|
|
lastName = backupMetaData.find('lastName').text
|
|
|
|
|
email = backupMetaData.find('email').text
|
|
|
|
|
type = int(backupMetaData.find('type').text)
|
|
|
|
|
owner = int(backupMetaData.find('owner').text)
|
|
|
|
|
token = backupMetaData.find('token').text
|
|
|
|
|
api = int(backupMetaData.find('api').text)
|
|
|
|
|
securityLevel = int(backupMetaData.find('securityLevel').text)
|
|
|
|
|
state = backupMetaData.find('state').text
|
|
|
|
|
initWebsitesLimit = int(backupMetaData.find('initWebsitesLimit').text)
|
|
|
|
|
from loginSystem.models import ACL
|
|
|
|
|
acl = ACL.objects.get(name=backupMetaData.find('aclName').text)
|
|
|
|
|
siteUser = Administrator(userName=userName, password=userPassword, firstName=firstName,
|
|
|
|
|
initWebsitesLimit=initWebsitesLimit, acl=acl,
|
|
|
|
|
lastName=lastName, email=email, type=type, owner=owner, token=token,
|
|
|
|
|
api=api, securityLevel=securityLevel, state=state)
|
|
|
|
|
siteUser.save()
|
|
|
|
|
except:
|
|
|
|
|
siteUser = Administrator.objects.get(userName='admin')
|
2020-02-13 20:11:54 +05:00
|
|
|
|
2018-06-05 00:53:45 +05:00
|
|
|
## Pre-creation checks
|
|
|
|
|
|
|
|
|
|
if Websites.objects.filter(domain=domain).count() > 0:
|
|
|
|
|
raise BaseException('This website already exists.')
|
|
|
|
|
|
|
|
|
|
if ChildDomains.objects.filter(domain=domain).count() > 0:
|
|
|
|
|
raise BaseException("This website already exists as child domain.")
|
|
|
|
|
|
|
|
|
|
####### Pre-creation checks ends
|
|
|
|
|
|
|
|
|
|
## Create Configurations
|
|
|
|
|
|
2020-02-13 20:11:54 +05:00
|
|
|
result = virtualHostUtilities.createVirtualHost(domain, siteUser.email, phpSelection, externalApp, 0, 1, 0,
|
|
|
|
|
siteUser.userName, 'Default', 0)
|
2018-06-05 00:53:45 +05:00
|
|
|
|
|
|
|
|
if result[0] == 0:
|
|
|
|
|
raise BaseException(result[1])
|
|
|
|
|
|
|
|
|
|
## Create Configurations ends here
|
|
|
|
|
|
|
|
|
|
## Create databases
|
|
|
|
|
|
|
|
|
|
databases = backupMetaData.findall('Databases/database')
|
|
|
|
|
website = Websites.objects.get(domain=domain)
|
|
|
|
|
|
|
|
|
|
for database in databases:
|
|
|
|
|
dbName = database.find('dbName').text
|
|
|
|
|
dbUser = database.find('dbUser').text
|
|
|
|
|
|
|
|
|
|
if mysqlUtilities.mysqlUtilities.createDatabase(dbName, dbUser, "cyberpanel") == 0:
|
|
|
|
|
raise BaseException("Failed to create Databases!")
|
|
|
|
|
|
|
|
|
|
newDB = Databases(website=website, dbName=dbName, dbUser=dbUser)
|
|
|
|
|
newDB.save()
|
|
|
|
|
|
|
|
|
|
## Create dns zone
|
|
|
|
|
|
|
|
|
|
dnsrecords = backupMetaData.findall('dnsrecords/dnsrecord')
|
|
|
|
|
|
|
|
|
|
DNS.createDNSZone(domain, admin)
|
|
|
|
|
|
|
|
|
|
zone = DNS.getZoneObject(domain)
|
|
|
|
|
|
|
|
|
|
for dnsrecord in dnsrecords:
|
|
|
|
|
recordType = dnsrecord.find('type').text
|
|
|
|
|
value = dnsrecord.find('name').text
|
|
|
|
|
content = dnsrecord.find('content').text
|
|
|
|
|
prio = int(dnsrecord.find('priority').text)
|
|
|
|
|
|
|
|
|
|
DNS.createDNSRecord(zone, value, recordType, content, prio, 3600)
|
|
|
|
|
|
2020-04-24 13:25:49 -04:00
|
|
|
return 1, 'None'
|
2018-06-05 00:53:45 +05:00
|
|
|
|
2019-12-10 15:09:10 +05:00
|
|
|
except BaseException as msg:
|
2018-06-05 00:53:45 +05:00
|
|
|
return 0, str(msg)
|
|
|
|
|
|
2017-10-24 19:16:36 +05:00
|
|
|
@staticmethod
|
2017-10-26 23:50:59 +05:00
|
|
|
def startRestore(backupName, dir):
|
2017-10-24 19:16:36 +05:00
|
|
|
try:
|
|
|
|
|
|
2017-12-09 22:30:10 +05:00
|
|
|
if dir == "CyberPanelRestore":
|
2017-10-26 23:50:59 +05:00
|
|
|
backupFileName = backupName.strip(".tar.gz")
|
2020-04-24 13:25:49 -04:00
|
|
|
completPath = os.path.join("/home", "backup", backupFileName) ## without extension
|
|
|
|
|
originalFile = os.path.join("/home", "backup", backupName) ## with extension
|
2018-06-05 00:53:45 +05:00
|
|
|
elif dir == 'CLI':
|
|
|
|
|
completPath = backupName.strip(".tar.gz") ## without extension
|
|
|
|
|
originalFile = backupName ## with extension
|
2017-10-26 23:50:59 +05:00
|
|
|
else:
|
|
|
|
|
backupFileName = backupName.strip(".tar.gz")
|
2020-04-24 13:25:49 -04:00
|
|
|
completPath = "/home/backup/transfer-" + str(dir) + "/" + backupFileName ## without extension
|
|
|
|
|
originalFile = "/home/backup/transfer-" + str(dir) + "/" + backupName ## with extension
|
2017-10-26 23:50:59 +05:00
|
|
|
|
2020-04-24 13:25:49 -04:00
|
|
|
pathToCompressedHome = os.path.join(completPath, "public_html.tar.gz")
|
2017-10-24 19:16:36 +05:00
|
|
|
|
|
|
|
|
if not os.path.exists(completPath):
|
|
|
|
|
os.mkdir(completPath)
|
|
|
|
|
|
2018-08-23 15:39:28 +05:00
|
|
|
## Writing pid of restore process
|
2017-10-24 19:16:36 +05:00
|
|
|
|
2020-04-24 13:25:49 -04:00
|
|
|
pid = os.path.join(completPath, 'pid')
|
2017-10-24 19:16:36 +05:00
|
|
|
|
2018-08-23 15:39:28 +05:00
|
|
|
logging.CyberCPLogFileWriter.statusWriter(pid, str(os.getpid()))
|
2018-02-16 21:05:15 +05:00
|
|
|
|
2020-04-24 13:25:49 -04:00
|
|
|
status = os.path.join(completPath, 'status')
|
2018-08-23 15:39:28 +05:00
|
|
|
logging.CyberCPLogFileWriter.statusWriter(status, "Extracting Main Archive!")
|
2017-10-24 19:16:36 +05:00
|
|
|
|
2019-10-08 13:17:33 -04:00
|
|
|
## Converting /home/backup/backup-example.com-02.13.2018_10-24-52.tar.gz -> /home/backup/backup-example.com-02.13.2018_10-24-52
|
2018-02-16 21:05:15 +05:00
|
|
|
|
2017-10-24 19:16:36 +05:00
|
|
|
tar = tarfile.open(originalFile)
|
|
|
|
|
tar.extractall(completPath)
|
|
|
|
|
tar.close()
|
|
|
|
|
|
2018-08-23 15:39:28 +05:00
|
|
|
logging.CyberCPLogFileWriter.statusWriter(status, "Creating Accounts,Databases and DNS records!")
|
2017-10-24 19:16:36 +05:00
|
|
|
|
2018-05-09 23:10:57 +05:00
|
|
|
########### Creating website and its dabases
|
2017-10-24 19:16:36 +05:00
|
|
|
|
2018-05-11 00:03:26 +05:00
|
|
|
## extracting master domain for later use
|
|
|
|
|
backupMetaData = ElementTree.parse(os.path.join(completPath, "meta.xml"))
|
|
|
|
|
masterDomain = backupMetaData.find('masterDomain').text
|
|
|
|
|
|
2020-04-06 15:59:05 +05:00
|
|
|
twoPointO = 0
|
2020-04-06 14:24:52 +05:00
|
|
|
try:
|
|
|
|
|
version = backupMetaData.find('VERSION').text
|
|
|
|
|
build = backupMetaData.find('BUILD').text
|
2020-04-06 15:59:05 +05:00
|
|
|
twoPointO = 1
|
2020-04-06 14:24:52 +05:00
|
|
|
except:
|
2020-04-06 15:59:05 +05:00
|
|
|
twoPointO = 0
|
2020-04-06 14:24:52 +05:00
|
|
|
|
2018-06-05 00:53:45 +05:00
|
|
|
result = backupUtilities.createWebsiteFromBackup(backupName, dir)
|
2017-10-24 19:16:36 +05:00
|
|
|
|
2018-06-05 00:53:45 +05:00
|
|
|
if result[0] == 1:
|
|
|
|
|
## Let us try to restore SSL.
|
2018-05-11 00:03:26 +05:00
|
|
|
|
2019-03-13 23:05:22 +05:00
|
|
|
sslStoragePath = completPath + "/" + masterDomain + ".cert.pem"
|
2018-05-11 00:03:26 +05:00
|
|
|
|
2018-08-22 00:37:43 +05:00
|
|
|
if os.path.exists(sslStoragePath):
|
|
|
|
|
sslHome = '/etc/letsencrypt/live/' + masterDomain
|
2019-03-13 23:05:22 +05:00
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
if not os.path.exists(sslHome):
|
|
|
|
|
os.mkdir(sslHome)
|
|
|
|
|
|
|
|
|
|
copy(completPath + "/" + masterDomain + ".cert.pem", sslHome + "/cert.pem")
|
|
|
|
|
copy(completPath + "/" + masterDomain + ".privkey.pem", sslHome + "/privkey.pem")
|
|
|
|
|
copy(completPath + "/" + masterDomain + ".fullchain.pem", sslHome + "/fullchain.pem")
|
|
|
|
|
|
|
|
|
|
sslUtilities.installSSLForDomain(masterDomain)
|
2019-12-10 15:09:10 +05:00
|
|
|
except BaseException as msg:
|
2019-08-31 15:30:53 +05:00
|
|
|
logging.CyberCPLogFileWriter.writeToFile('%s. [555:startRestore]' % (str(msg)))
|
2018-05-11 00:03:26 +05:00
|
|
|
|
2018-06-05 00:53:45 +05:00
|
|
|
else:
|
2020-04-24 13:25:49 -04:00
|
|
|
logging.CyberCPLogFileWriter.statusWriter(status, "Error Message: " + result[
|
|
|
|
|
1] + ". Not able to create Account, Databases and DNS Records, aborting. [575][5009]")
|
2017-10-24 19:16:36 +05:00
|
|
|
return 0
|
|
|
|
|
|
2018-05-11 00:03:26 +05:00
|
|
|
########### Creating child/sub/addon/parked domains
|
2017-10-29 22:16:06 +05:00
|
|
|
|
2018-08-23 15:39:28 +05:00
|
|
|
logging.CyberCPLogFileWriter.statusWriter(status, "Creating Child Domains!")
|
2017-10-29 22:16:06 +05:00
|
|
|
|
2018-05-11 00:03:26 +05:00
|
|
|
## Reading meta file to create subdomains
|
2017-10-29 22:16:06 +05:00
|
|
|
|
2018-02-16 00:57:46 +05:00
|
|
|
externalApp = backupMetaData.find('externalApp').text
|
2020-04-24 13:25:49 -04:00
|
|
|
websiteHome = os.path.join("/home", masterDomain, "public_html")
|
2017-10-29 22:16:06 +05:00
|
|
|
|
2018-05-11 00:03:26 +05:00
|
|
|
### Restoring Child Domains if any.
|
2018-05-09 23:10:57 +05:00
|
|
|
|
2018-05-11 00:03:26 +05:00
|
|
|
childDomains = backupMetaData.findall('ChildDomains/domain')
|
2018-05-09 23:10:57 +05:00
|
|
|
|
2017-10-29 22:16:06 +05:00
|
|
|
try:
|
2018-02-16 00:57:46 +05:00
|
|
|
for childDomain in childDomains:
|
2017-10-29 22:16:06 +05:00
|
|
|
|
2018-02-16 00:57:46 +05:00
|
|
|
domain = childDomain.find('domain').text
|
2020-02-09 13:11:26 +05:00
|
|
|
|
|
|
|
|
## mail domain check
|
|
|
|
|
|
|
|
|
|
mailDomain = 'mail.%s' % (masterDomain)
|
|
|
|
|
|
|
|
|
|
if domain == mailDomain:
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
## Mail domain check
|
|
|
|
|
|
2018-02-16 00:57:46 +05:00
|
|
|
phpSelection = childDomain.find('phpSelection').text
|
|
|
|
|
path = childDomain.find('path').text
|
2017-10-29 22:16:06 +05:00
|
|
|
|
2020-04-24 13:25:49 -04:00
|
|
|
retValues = virtualHostUtilities.createDomain(masterDomain, domain, phpSelection, path, 0, 0, 0,
|
|
|
|
|
'admin', 0)
|
2017-10-29 22:16:06 +05:00
|
|
|
|
2018-08-22 00:37:43 +05:00
|
|
|
if retValues[0] == 1:
|
2018-08-23 15:39:28 +05:00
|
|
|
if os.path.exists(websiteHome):
|
|
|
|
|
rmtree(websiteHome)
|
2018-05-11 00:03:26 +05:00
|
|
|
|
|
|
|
|
## Let us try to restore SSL for Child Domains.
|
|
|
|
|
|
2018-08-22 00:37:43 +05:00
|
|
|
try:
|
2018-08-28 01:19:34 +05:00
|
|
|
|
|
|
|
|
if os.path.exists(backupUtilities.licenseKey):
|
|
|
|
|
if os.path.exists(completPath + '/' + domain + '.vhost.conf'):
|
2019-07-18 14:08:00 +05:00
|
|
|
completPathToConf = backupUtilities.Server_root + '/conf/vhosts/' + domain + '/vhost.conf'
|
2018-08-28 01:19:34 +05:00
|
|
|
copy(completPath + '/' + domain + '.vhost.conf', completPathToConf)
|
|
|
|
|
|
2019-03-13 23:05:22 +05:00
|
|
|
sslStoragePath = completPath + "/" + domain + ".cert.pem"
|
2018-05-11 00:03:26 +05:00
|
|
|
|
2018-08-22 00:37:43 +05:00
|
|
|
if os.path.exists(sslStoragePath):
|
|
|
|
|
sslHome = '/etc/letsencrypt/live/' + domain
|
2019-03-13 23:20:15 +05:00
|
|
|
|
2019-03-13 23:05:22 +05:00
|
|
|
try:
|
|
|
|
|
if not os.path.exists(sslHome):
|
|
|
|
|
os.mkdir(sslHome)
|
|
|
|
|
|
|
|
|
|
copy(completPath + "/" + domain + ".cert.pem", sslHome + "/cert.pem")
|
|
|
|
|
copy(completPath + "/" + domain + ".privkey.pem", sslHome + "/privkey.pem")
|
|
|
|
|
copy(completPath + "/" + domain + ".fullchain.pem",
|
|
|
|
|
sslHome + "/fullchain.pem")
|
|
|
|
|
|
|
|
|
|
sslUtilities.installSSLForDomain(domain)
|
|
|
|
|
except:
|
|
|
|
|
pass
|
2018-08-22 00:37:43 +05:00
|
|
|
except:
|
2020-04-24 13:25:49 -04:00
|
|
|
logging.CyberCPLogFileWriter.writeToFile(
|
|
|
|
|
'While restoring backup we had minor issues for rebuilding vhost conf for: ' + domain + '. However this will be auto healed.')
|
2018-05-11 00:03:26 +05:00
|
|
|
|
2020-04-06 14:24:52 +05:00
|
|
|
if float(version) > 2.0 or float(build) > 0:
|
|
|
|
|
if path.find('/home/%s/public_html' % masterDomain) == -1:
|
2020-04-06 15:59:05 +05:00
|
|
|
copy_tree('%s/%s-docroot' % (completPath, domain), path)
|
2020-04-06 14:24:52 +05:00
|
|
|
|
2018-02-16 00:57:46 +05:00
|
|
|
continue
|
|
|
|
|
else:
|
2020-02-09 13:11:26 +05:00
|
|
|
logging.CyberCPLogFileWriter.writeToFile('Error domain %s' % (domain))
|
2020-04-24 13:25:49 -04:00
|
|
|
logging.CyberCPLogFileWriter.statusWriter(status, "Error Message: " + retValues[
|
|
|
|
|
1] + ". Not able to create child domains, aborting. [635][5009]")
|
2018-02-16 00:57:46 +05:00
|
|
|
return 0
|
2019-12-10 15:09:10 +05:00
|
|
|
except BaseException as msg:
|
2020-04-24 13:25:49 -04:00
|
|
|
status = open(os.path.join(completPath, 'status'), "w")
|
|
|
|
|
status.write("Error Message: " + str(msg) + ". Not able to create child domains, aborting. [638][5009]")
|
2017-10-29 22:16:06 +05:00
|
|
|
status.close()
|
|
|
|
|
logging.CyberCPLogFileWriter.writeToFile(str(msg) + " [startRestore]")
|
|
|
|
|
return 0
|
|
|
|
|
|
2018-05-11 00:03:26 +05:00
|
|
|
## Restore Aliases
|
2018-02-17 15:34:59 +05:00
|
|
|
|
2018-08-23 15:39:28 +05:00
|
|
|
logging.CyberCPLogFileWriter.statusWriter(status, "Restoring Domain Aliases!")
|
2018-02-17 15:34:59 +05:00
|
|
|
|
2018-05-09 23:10:57 +05:00
|
|
|
aliases = backupMetaData.findall('Aliases/alias')
|
|
|
|
|
|
|
|
|
|
for items in aliases:
|
2018-08-23 15:39:28 +05:00
|
|
|
virtualHostUtilities.createAlias(masterDomain, items.text, 0, "", "", "admin")
|
2018-05-09 23:10:57 +05:00
|
|
|
|
2018-05-11 00:03:26 +05:00
|
|
|
## Restoring email accounts
|
|
|
|
|
|
2018-08-23 15:39:28 +05:00
|
|
|
logging.CyberCPLogFileWriter.statusWriter(status, "Restoring email accounts!")
|
2018-05-11 00:03:26 +05:00
|
|
|
|
2018-02-17 15:34:59 +05:00
|
|
|
emailAccounts = backupMetaData.findall('emails/emailAccount')
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
for emailAccount in emailAccounts:
|
|
|
|
|
|
|
|
|
|
email = emailAccount.find('email').text
|
|
|
|
|
username = email.split("@")[0]
|
|
|
|
|
password = emailAccount.find('password').text
|
|
|
|
|
|
2019-12-16 20:53:03 +05:00
|
|
|
result = mailUtilities.createEmailAccount(masterDomain, username, password, 'restore')
|
2018-07-05 15:22:48 +05:00
|
|
|
if result[0] == 0:
|
|
|
|
|
raise BaseException(result[1])
|
2019-12-10 15:09:10 +05:00
|
|
|
except BaseException as msg:
|
2020-04-24 13:25:49 -04:00
|
|
|
logging.CyberCPLogFileWriter.statusWriter(status, "Error Message: " + str(
|
|
|
|
|
msg) + ". Not able to create email accounts, aborting. [671][5009]")
|
2018-02-17 15:34:59 +05:00
|
|
|
logging.CyberCPLogFileWriter.writeToFile(str(msg) + " [startRestore]")
|
|
|
|
|
return 0
|
|
|
|
|
|
|
|
|
|
## Emails restored
|
|
|
|
|
|
2017-10-29 22:16:06 +05:00
|
|
|
## restoring databases
|
2017-10-24 19:16:36 +05:00
|
|
|
|
2018-08-23 15:39:28 +05:00
|
|
|
logging.CyberCPLogFileWriter.statusWriter(status, "Restoring Databases!")
|
2017-10-24 19:16:36 +05:00
|
|
|
|
2018-02-16 00:57:46 +05:00
|
|
|
databases = backupMetaData.findall('Databases/database')
|
|
|
|
|
|
|
|
|
|
for database in databases:
|
|
|
|
|
dbName = database.find('dbName').text
|
|
|
|
|
password = database.find('password').text
|
2018-02-16 21:05:15 +05:00
|
|
|
if mysqlUtilities.mysqlUtilities.restoreDatabaseBackup(dbName, completPath, password) == 0:
|
|
|
|
|
raise BaseException
|
2017-10-29 22:16:06 +05:00
|
|
|
|
2018-02-17 15:34:59 +05:00
|
|
|
## Databases restored
|
|
|
|
|
|
2018-08-23 15:39:28 +05:00
|
|
|
logging.CyberCPLogFileWriter.statusWriter(status, "Extracting web home data!")
|
2017-10-24 19:16:36 +05:00
|
|
|
|
2019-10-08 13:17:33 -04:00
|
|
|
# /home/backup/backup-example.com-02.13.2018_10-24-52/public_html.tar.gz
|
2020-04-06 14:24:52 +05:00
|
|
|
## Moving above v2.0.0 extracting webhome data is not required, thus commenting below lines
|
2018-02-17 15:34:59 +05:00
|
|
|
|
2020-04-06 15:59:05 +05:00
|
|
|
if not twoPointO:
|
|
|
|
|
tar = tarfile.open(pathToCompressedHome)
|
|
|
|
|
tar.extractall(websiteHome)
|
|
|
|
|
tar.close()
|
|
|
|
|
else:
|
|
|
|
|
if float(version) > 2.0 or float(build) > 0:
|
|
|
|
|
copy_tree('%s/public_html' % (completPath), websiteHome)
|
2017-10-24 19:16:36 +05:00
|
|
|
|
2018-02-17 15:34:59 +05:00
|
|
|
## extracting email accounts
|
|
|
|
|
|
2018-08-23 15:39:28 +05:00
|
|
|
logging.CyberCPLogFileWriter.statusWriter(status, "Extracting email accounts!")
|
2018-02-17 15:34:59 +05:00
|
|
|
|
2020-04-07 13:31:23 +05:00
|
|
|
if not twoPointO:
|
2018-02-17 15:34:59 +05:00
|
|
|
|
2020-04-07 13:31:23 +05:00
|
|
|
try:
|
|
|
|
|
pathToCompressedEmails = os.path.join(completPath, masterDomain + ".tar.gz")
|
2020-04-24 13:25:49 -04:00
|
|
|
emailHome = os.path.join("/home", "vmail", masterDomain)
|
2018-08-28 01:19:34 +05:00
|
|
|
|
2020-04-07 13:31:23 +05:00
|
|
|
tar = tarfile.open(pathToCompressedEmails)
|
|
|
|
|
tar.extractall(emailHome)
|
|
|
|
|
tar.close()
|
2018-08-28 01:19:34 +05:00
|
|
|
|
2020-04-07 13:31:23 +05:00
|
|
|
## Change permissions
|
2018-08-28 01:19:34 +05:00
|
|
|
|
2020-04-07 13:31:23 +05:00
|
|
|
command = "chown -R vmail:vmail " + emailHome
|
|
|
|
|
subprocess.call(shlex.split(command))
|
|
|
|
|
|
|
|
|
|
except:
|
|
|
|
|
pass
|
|
|
|
|
else:
|
|
|
|
|
|
|
|
|
|
emailsPath = '%s/vmail' % (completPath)
|
|
|
|
|
|
|
|
|
|
if os.path.exists(emailsPath):
|
|
|
|
|
copy_tree(emailsPath, '/home/vmail/%s' % (masterDomain))
|
|
|
|
|
|
|
|
|
|
command = "chown -R vmail:vmail /home/vmail/%s" % (masterDomain)
|
|
|
|
|
ProcessUtilities.executioner(command)
|
2018-02-17 15:34:59 +05:00
|
|
|
|
|
|
|
|
## emails extracted
|
|
|
|
|
|
2018-08-28 01:19:34 +05:00
|
|
|
if os.path.exists(backupUtilities.licenseKey):
|
2019-07-18 14:08:00 +05:00
|
|
|
completPathToConf = backupUtilities.Server_root + '/conf/vhosts/' + masterDomain + '/vhost.conf'
|
2018-08-28 01:19:34 +05:00
|
|
|
if os.path.exists(completPath + '/vhost.conf'):
|
|
|
|
|
copy(completPath + '/vhost.conf', completPathToConf)
|
2018-02-19 23:42:57 +05:00
|
|
|
|
2018-08-23 15:39:28 +05:00
|
|
|
logging.CyberCPLogFileWriter.statusWriter(status, "Done")
|
2018-02-19 23:42:57 +05:00
|
|
|
|
2017-11-05 03:02:51 +05:00
|
|
|
installUtilities.reStartLiteSpeed()
|
|
|
|
|
|
2020-04-06 19:08:41 +05:00
|
|
|
## Fix permissions
|
2020-04-03 11:28:27 +05:00
|
|
|
|
2020-04-06 19:08:41 +05:00
|
|
|
from filemanager.filemanager import FileManager
|
2020-03-04 23:19:13 +05:00
|
|
|
|
2020-04-06 19:08:41 +05:00
|
|
|
fm = FileManager(None, None)
|
|
|
|
|
fm.fixPermissions(masterDomain)
|
2017-11-05 03:02:51 +05:00
|
|
|
|
2019-12-10 15:09:10 +05:00
|
|
|
except BaseException as msg:
|
2019-03-13 18:22:12 +05:00
|
|
|
status = os.path.join(completPath, 'status')
|
2019-12-05 22:36:17 +05:00
|
|
|
logging.CyberCPLogFileWriter.statusWriter(status, str(msg) + " [736][5009]")
|
2017-10-24 19:16:36 +05:00
|
|
|
logging.CyberCPLogFileWriter.writeToFile(str(msg) + " [startRestore]")
|
|
|
|
|
|
|
|
|
|
@staticmethod
|
2020-04-24 13:25:49 -04:00
|
|
|
def initiateRestore(backupName, dir):
|
2017-10-24 19:16:36 +05:00
|
|
|
try:
|
2017-10-26 23:50:59 +05:00
|
|
|
p = Process(target=backupUtilities.startRestore, args=(backupName, dir,))
|
2017-10-24 19:16:36 +05:00
|
|
|
p.start()
|
2019-12-10 15:09:10 +05:00
|
|
|
except BaseException as msg:
|
2017-10-24 19:16:36 +05:00
|
|
|
logging.CyberCPLogFileWriter.writeToFile(str(msg) + " [initiateRestore]")
|
|
|
|
|
|
|
|
|
|
@staticmethod
|
2020-04-24 13:25:49 -04:00
|
|
|
def sendKey(IPAddress, password, port='22', user='root'):
|
2017-10-24 19:16:36 +05:00
|
|
|
try:
|
|
|
|
|
|
2019-03-01 20:20:44 +05:00
|
|
|
expectation = []
|
|
|
|
|
expectation.append("password:")
|
|
|
|
|
expectation.append("Password:")
|
|
|
|
|
expectation.append("Permission denied")
|
2019-10-01 16:03:07 +05:00
|
|
|
expectation.append("100%")
|
2019-03-01 20:20:44 +05:00
|
|
|
|
2020-04-24 13:25:49 -04:00
|
|
|
command = "scp -o StrictHostKeyChecking=no -P " + port + " /root/.ssh/cyberpanel.pub " + user + "@" + IPAddress + ":~/.ssh/authorized_keys"
|
2019-03-01 20:20:44 +05:00
|
|
|
setupKeys = pexpect.spawn(command, timeout=3)
|
2017-11-05 21:07:12 +05:00
|
|
|
|
2019-03-01 20:20:44 +05:00
|
|
|
index = setupKeys.expect(expectation)
|
2017-10-24 19:16:36 +05:00
|
|
|
|
2019-03-01 20:20:44 +05:00
|
|
|
## on first login attempt send password
|
2017-10-24 19:16:36 +05:00
|
|
|
|
2019-03-01 20:20:44 +05:00
|
|
|
if index == 0:
|
|
|
|
|
setupKeys.sendline(password)
|
2019-03-02 17:57:09 +05:00
|
|
|
setupKeys.expect("100%")
|
|
|
|
|
setupKeys.wait()
|
2019-03-01 20:20:44 +05:00
|
|
|
elif index == 1:
|
|
|
|
|
setupKeys.sendline(password)
|
2019-03-02 17:57:09 +05:00
|
|
|
setupKeys.expect("100%")
|
|
|
|
|
setupKeys.wait()
|
2019-03-01 20:20:44 +05:00
|
|
|
elif index == 2:
|
|
|
|
|
return [0, 'Please enable password authentication on your remote server.']
|
2019-10-01 16:03:07 +05:00
|
|
|
elif index == 3:
|
|
|
|
|
pass
|
2019-03-01 20:20:44 +05:00
|
|
|
else:
|
|
|
|
|
raise BaseException
|
2017-10-24 19:16:36 +05:00
|
|
|
|
2017-11-05 21:07:12 +05:00
|
|
|
return [1, "None"]
|
2017-10-24 19:16:36 +05:00
|
|
|
|
2019-12-10 15:09:10 +05:00
|
|
|
except pexpect.TIMEOUT as msg:
|
2017-10-24 19:16:36 +05:00
|
|
|
logging.CyberCPLogFileWriter.writeToFile(str(msg) + " [sendKey]")
|
2017-11-05 21:07:12 +05:00
|
|
|
return [0, "TIMEOUT [sendKey]"]
|
2019-12-10 15:09:10 +05:00
|
|
|
except pexpect.EOF as msg:
|
2017-10-24 19:16:36 +05:00
|
|
|
logging.CyberCPLogFileWriter.writeToFile(str(msg) + " [sendKey]")
|
2020-04-24 13:25:49 -04:00
|
|
|
return [0, "EOF [sendKey]"]
|
2019-12-10 15:09:10 +05:00
|
|
|
except BaseException as msg:
|
2017-10-24 19:16:36 +05:00
|
|
|
logging.CyberCPLogFileWriter.writeToFile(str(msg) + " [sendKey]")
|
2017-11-05 21:07:12 +05:00
|
|
|
return [0, str(msg) + " [sendKey]"]
|
2017-10-24 19:16:36 +05:00
|
|
|
|
|
|
|
|
@staticmethod
|
2020-04-24 13:25:49 -04:00
|
|
|
def setupSSHKeys(IPAddress, password, port='22', user='root'):
|
2017-10-24 19:16:36 +05:00
|
|
|
try:
|
|
|
|
|
## Checking for host verification
|
|
|
|
|
|
|
|
|
|
backupUtilities.host_key_verification(IPAddress)
|
|
|
|
|
|
|
|
|
|
if backupUtilities.checkIfHostIsUp(IPAddress) == 1:
|
|
|
|
|
pass
|
|
|
|
|
else:
|
2019-03-01 20:20:44 +05:00
|
|
|
logging.CyberCPLogFileWriter.writeToFile("Host is Down.")
|
2020-04-24 13:25:49 -04:00
|
|
|
# return [0,"Host is Down."]
|
2017-10-24 19:16:36 +05:00
|
|
|
|
2018-05-21 16:51:28 +05:00
|
|
|
expectation = []
|
|
|
|
|
expectation.append("password:")
|
2019-03-01 20:20:44 +05:00
|
|
|
expectation.append("Password:")
|
2018-05-21 16:51:28 +05:00
|
|
|
expectation.append("Permission denied")
|
2019-10-01 16:03:07 +05:00
|
|
|
expectation.append("File exists")
|
2017-10-24 19:16:36 +05:00
|
|
|
|
2020-04-25 06:48:08 -04:00
|
|
|
command = "ssh -o StrictHostKeyChecking=no -p " + port + ' ' + user + "@" + IPAddress + ' "mkdir ~/.ssh || rm -f ~/.ssh/temp && rm -f ~/.ssh/authorized_temp && cp ~/.ssh/authorized_keys ~/.ssh/temp"'
|
2019-03-01 20:20:44 +05:00
|
|
|
setupKeys = pexpect.spawn(command, timeout=3)
|
2017-10-24 19:16:36 +05:00
|
|
|
|
2018-05-21 16:51:28 +05:00
|
|
|
index = setupKeys.expect(expectation)
|
2017-10-24 19:16:36 +05:00
|
|
|
|
2017-11-05 21:07:12 +05:00
|
|
|
## on first login attempt send password
|
2017-10-24 19:16:36 +05:00
|
|
|
|
2018-05-21 16:51:28 +05:00
|
|
|
if index == 0:
|
|
|
|
|
setupKeys.sendline(password)
|
|
|
|
|
elif index == 1:
|
2019-03-01 20:20:44 +05:00
|
|
|
setupKeys.sendline(password)
|
|
|
|
|
elif index == 2:
|
2018-05-21 16:51:28 +05:00
|
|
|
return [0, 'Please enable password authentication on your remote server.']
|
2019-10-01 16:03:07 +05:00
|
|
|
elif index == 3:
|
|
|
|
|
pass
|
2018-05-21 16:51:28 +05:00
|
|
|
else:
|
|
|
|
|
raise BaseException
|
2017-10-24 19:16:36 +05:00
|
|
|
|
2017-11-05 21:07:12 +05:00
|
|
|
## if it again give you password, than provided password is wrong
|
2017-10-24 19:16:36 +05:00
|
|
|
|
2017-11-05 21:07:12 +05:00
|
|
|
expectation = []
|
|
|
|
|
expectation.append("please try again.")
|
2019-03-02 17:57:09 +05:00
|
|
|
expectation.append("Password:")
|
2017-11-05 21:07:12 +05:00
|
|
|
expectation.append(pexpect.EOF)
|
2017-10-24 19:16:36 +05:00
|
|
|
|
2017-11-05 21:07:12 +05:00
|
|
|
index = setupKeys.expect(expectation)
|
2017-10-24 19:16:36 +05:00
|
|
|
|
2017-11-05 21:07:12 +05:00
|
|
|
if index == 0:
|
2020-04-24 13:25:49 -04:00
|
|
|
return [0, "Wrong Password!"]
|
2017-10-24 19:16:36 +05:00
|
|
|
elif index == 1:
|
2019-03-02 17:57:09 +05:00
|
|
|
return [0, "Wrong Password!"]
|
|
|
|
|
elif index == 2:
|
2017-11-05 21:07:12 +05:00
|
|
|
setupKeys.wait()
|
2017-10-24 19:16:36 +05:00
|
|
|
|
2020-04-24 13:25:49 -04:00
|
|
|
sendKey = backupUtilities.sendKey(IPAddress, password, port, user)
|
2017-10-24 19:16:36 +05:00
|
|
|
|
2017-11-05 21:07:12 +05:00
|
|
|
if sendKey[0] == 1:
|
|
|
|
|
return [1, "None"]
|
|
|
|
|
else:
|
2020-04-24 13:25:49 -04:00
|
|
|
return [0, sendKey[1]]
|
2017-10-24 19:16:36 +05:00
|
|
|
|
|
|
|
|
|
2019-12-10 15:09:10 +05:00
|
|
|
except pexpect.TIMEOUT as msg:
|
2017-11-05 21:07:12 +05:00
|
|
|
return [0, str(msg) + " [TIMEOUT setupSSHKeys]"]
|
2019-12-10 15:09:10 +05:00
|
|
|
except BaseException as msg:
|
2017-11-05 21:07:12 +05:00
|
|
|
return [0, str(msg) + " [setupSSHKeys]"]
|
2017-10-24 19:16:36 +05:00
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
|
def checkIfHostIsUp(IPAddress):
|
|
|
|
|
try:
|
2019-12-15 13:30:40 +05:00
|
|
|
if subprocess.check_output(['ping', IPAddress, '-c 1']).decode("utf-8").find("0% packet loss") > -1:
|
2017-10-24 19:16:36 +05:00
|
|
|
return 1
|
|
|
|
|
else:
|
|
|
|
|
return 0
|
2019-12-10 15:09:10 +05:00
|
|
|
except BaseException as msg:
|
2017-10-29 22:16:06 +05:00
|
|
|
logging.CyberCPLogFileWriter.writeToFile(str(msg) + "[checkIfHostIsUp]")
|
2017-10-24 19:16:36 +05:00
|
|
|
|
|
|
|
|
@staticmethod
|
2020-04-24 13:25:49 -04:00
|
|
|
def checkConnection(IPAddress, password, port='22', user='root'):
|
2017-10-24 19:16:36 +05:00
|
|
|
try:
|
2017-11-05 21:07:12 +05:00
|
|
|
|
2017-12-09 22:30:10 +05:00
|
|
|
try:
|
|
|
|
|
destinations = backupUtilities.destinationsPath
|
|
|
|
|
data = open(destinations, 'r').readlines()
|
|
|
|
|
port = data[1].strip("\n")
|
2020-04-25 06:48:08 -04:00
|
|
|
user = data[2].strip("\n")
|
2017-12-09 22:30:10 +05:00
|
|
|
except:
|
|
|
|
|
port = "22"
|
2017-11-05 21:07:12 +05:00
|
|
|
|
2017-10-24 19:16:36 +05:00
|
|
|
expectation = []
|
|
|
|
|
expectation.append("password:")
|
2019-03-02 17:57:09 +05:00
|
|
|
expectation.append("Password:")
|
2017-10-24 19:16:36 +05:00
|
|
|
expectation.append("Last login")
|
|
|
|
|
expectation.append(pexpect.EOF)
|
2018-05-29 20:20:05 +05:00
|
|
|
expectation.append(pexpect.TIMEOUT)
|
2017-10-24 19:16:36 +05:00
|
|
|
|
2020-04-24 13:25:49 -04:00
|
|
|
checkConn = pexpect.spawn(
|
2020-04-25 06:48:08 -04:00
|
|
|
"sudo ssh -i /root/.ssh/cyberpanel -o StrictHostKeyChecking=no -p " + port + ' ' + user + "@" + IPAddress,
|
2020-04-24 13:25:49 -04:00
|
|
|
timeout=3)
|
2017-10-24 19:16:36 +05:00
|
|
|
index = checkConn.expect(expectation)
|
|
|
|
|
|
|
|
|
|
if index == 0:
|
2019-03-26 16:19:03 +05:00
|
|
|
subprocess.call(['kill', str(checkConn.pid)])
|
2020-04-24 13:25:49 -04:00
|
|
|
logging.CyberCPLogFileWriter.writeToFile(
|
|
|
|
|
"Remote Server is not able to authenticate for transfer to initiate, IP Address:" + IPAddress)
|
|
|
|
|
return [0, "Remote Server is not able to authenticate for transfer to initiate."]
|
2017-10-24 19:16:36 +05:00
|
|
|
elif index == 1:
|
2019-03-26 16:19:03 +05:00
|
|
|
subprocess.call(['kill', str(checkConn.pid)])
|
2019-03-02 17:57:09 +05:00
|
|
|
logging.CyberCPLogFileWriter.writeToFile(
|
|
|
|
|
"Remote Server is not able to authenticate for transfer to initiate, IP Address:" + IPAddress)
|
|
|
|
|
return [0, "Remote Server is not able to authenticate for transfer to initiate."]
|
|
|
|
|
elif index == 2:
|
2019-03-26 16:19:03 +05:00
|
|
|
subprocess.call(['kill', str(checkConn.pid)])
|
2017-10-29 22:16:06 +05:00
|
|
|
return [1, "None"]
|
2019-03-02 17:57:09 +05:00
|
|
|
elif index == 4:
|
2019-03-26 16:19:03 +05:00
|
|
|
subprocess.call(['kill', str(checkConn.pid)])
|
2018-05-29 20:20:05 +05:00
|
|
|
return [1, "None"]
|
2017-10-24 19:16:36 +05:00
|
|
|
else:
|
2019-03-26 16:19:03 +05:00
|
|
|
subprocess.call(['kill', str(checkConn.pid)])
|
2018-05-29 20:20:05 +05:00
|
|
|
return [1, "None"]
|
2017-10-24 19:16:36 +05:00
|
|
|
|
2019-12-10 15:09:10 +05:00
|
|
|
except pexpect.TIMEOUT as msg:
|
2020-04-24 13:25:49 -04:00
|
|
|
logging.CyberCPLogFileWriter.writeToFile("Timeout " + IPAddress + " [checkConnection]")
|
2017-10-29 22:16:06 +05:00
|
|
|
return [0, "371 Timeout while making connection to this server [checkConnection]"]
|
2019-12-10 15:09:10 +05:00
|
|
|
except pexpect.EOF as msg:
|
2020-04-24 13:25:49 -04:00
|
|
|
logging.CyberCPLogFileWriter.writeToFile("EOF " + IPAddress + "[checkConnection]")
|
2017-10-29 22:16:06 +05:00
|
|
|
return [0, "374 Remote Server is not able to authenticate for transfer to initiate. [checkConnection]"]
|
2019-12-10 15:09:10 +05:00
|
|
|
except BaseException as msg:
|
2020-04-24 13:25:49 -04:00
|
|
|
logging.CyberCPLogFileWriter.writeToFile(str(msg) + " " + IPAddress + " [checkConnection]")
|
2017-10-29 22:16:06 +05:00
|
|
|
return [0, "377 Remote Server is not able to authenticate for transfer to initiate. [checkConnection]"]
|
2017-10-24 19:16:36 +05:00
|
|
|
|
|
|
|
|
@staticmethod
|
2020-04-25 07:24:26 -04:00
|
|
|
def verifyHostKey(IPAddress, port='22', user='root'):
|
2017-10-24 19:16:36 +05:00
|
|
|
try:
|
2017-10-29 22:16:06 +05:00
|
|
|
backupUtilities.host_key_verification(IPAddress)
|
2017-10-24 19:16:36 +05:00
|
|
|
|
2020-04-24 13:25:49 -04:00
|
|
|
password = "hello" ## dumb password, not used anywhere.
|
2017-10-29 22:16:06 +05:00
|
|
|
|
|
|
|
|
expectation = []
|
|
|
|
|
|
|
|
|
|
expectation.append("continue connecting (yes/no)?")
|
|
|
|
|
expectation.append("password:")
|
|
|
|
|
|
2020-04-25 07:24:26 -04:00
|
|
|
setupSSHKeys = pexpect.spawn("ssh -p " + port + user + "@" + IPAddress, timeout=3)
|
2017-10-29 22:16:06 +05:00
|
|
|
|
|
|
|
|
index = setupSSHKeys.expect(expectation)
|
|
|
|
|
|
|
|
|
|
if index == 0:
|
|
|
|
|
setupSSHKeys.sendline("yes")
|
|
|
|
|
|
|
|
|
|
setupSSHKeys.expect("password:")
|
|
|
|
|
setupSSHKeys.sendline(password)
|
|
|
|
|
|
|
|
|
|
expectation = []
|
|
|
|
|
|
|
|
|
|
expectation.append("password:")
|
|
|
|
|
expectation.append(pexpect.EOF)
|
|
|
|
|
|
|
|
|
|
innerIndex = setupSSHKeys.expect(expectation)
|
|
|
|
|
|
|
|
|
|
if innerIndex == 0:
|
|
|
|
|
setupSSHKeys.kill(signal.SIGTERM)
|
|
|
|
|
return [1, "None"]
|
|
|
|
|
elif innerIndex == 1:
|
|
|
|
|
setupSSHKeys.kill(signal.SIGTERM)
|
|
|
|
|
return [1, "None"]
|
|
|
|
|
|
|
|
|
|
elif index == 1:
|
|
|
|
|
|
|
|
|
|
setupSSHKeys.expect("password:")
|
|
|
|
|
setupSSHKeys.sendline(password)
|
|
|
|
|
|
|
|
|
|
expectation = []
|
|
|
|
|
|
|
|
|
|
expectation.append("password:")
|
|
|
|
|
expectation.append(pexpect.EOF)
|
|
|
|
|
|
|
|
|
|
innerIndex = setupSSHKeys.expect(expectation)
|
|
|
|
|
|
|
|
|
|
if innerIndex == 0:
|
|
|
|
|
setupSSHKeys.kill(signal.SIGTERM)
|
|
|
|
|
return [1, "None"]
|
|
|
|
|
elif innerIndex == 1:
|
|
|
|
|
setupSSHKeys.kill(signal.SIGTERM)
|
|
|
|
|
return [1, "None"]
|
2017-10-24 19:16:36 +05:00
|
|
|
|
|
|
|
|
|
2019-12-10 15:09:10 +05:00
|
|
|
except pexpect.TIMEOUT as msg:
|
2017-10-26 23:50:59 +05:00
|
|
|
logging.CyberCPLogFileWriter.writeToFile("Timeout [verifyHostKey]")
|
2020-04-24 13:25:49 -04:00
|
|
|
return [0, "Timeout [verifyHostKey]"]
|
2019-12-10 15:09:10 +05:00
|
|
|
except pexpect.EOF as msg:
|
2017-10-26 23:50:59 +05:00
|
|
|
logging.CyberCPLogFileWriter.writeToFile("EOF [verifyHostKey]")
|
2020-04-24 13:25:49 -04:00
|
|
|
return [0, "EOF [verifyHostKey]"]
|
2019-12-10 15:09:10 +05:00
|
|
|
except BaseException as msg:
|
2017-10-24 19:16:36 +05:00
|
|
|
logging.CyberCPLogFileWriter.writeToFile(str(msg) + " [verifyHostKey]")
|
2020-04-24 13:25:49 -04:00
|
|
|
return [0, str(msg) + " [verifyHostKey]"]
|
2017-10-24 19:16:36 +05:00
|
|
|
|
|
|
|
|
@staticmethod
|
2020-04-24 13:25:49 -04:00
|
|
|
def createBackupDir(IPAddress, port='22', user='root'):
|
2017-10-24 19:16:36 +05:00
|
|
|
|
|
|
|
|
try:
|
2020-04-25 06:48:08 -04:00
|
|
|
command = "sudo ssh -o StrictHostKeyChecking=no -p " + port + " -i /root/.ssh/cyberpanel " + user + "@" + IPAddress + " mkdir ~/backup"
|
2019-03-26 16:19:03 +05:00
|
|
|
subprocess.call(shlex.split(command))
|
2019-03-05 16:35:57 +05:00
|
|
|
|
2020-04-24 13:25:49 -04:00
|
|
|
command = "sudo ssh -o StrictHostKeyChecking=no -p " + port + " -i /root/.ssh/cyberpanel " + user + "@" + IPAddress + ' "cat ~/.ssh/authorized_keys ~/.ssh/temp > ~/.ssh/authorized_temp"'
|
2019-03-26 16:19:03 +05:00
|
|
|
subprocess.call(shlex.split(command))
|
2019-03-05 16:35:57 +05:00
|
|
|
|
2020-04-24 13:25:49 -04:00
|
|
|
command = "sudo ssh -o StrictHostKeyChecking=no -p " + port + " -i /root/.ssh/cyberpanel " + user + "@" + IPAddress + ' "cat ~/.ssh/authorized_temp > ~/.ssh/authorized_keys"'
|
2019-03-26 16:19:03 +05:00
|
|
|
subprocess.call(shlex.split(command))
|
2019-03-05 16:35:57 +05:00
|
|
|
|
2019-12-10 15:09:10 +05:00
|
|
|
except BaseException as msg:
|
2017-10-24 19:16:36 +05:00
|
|
|
logging.CyberCPLogFileWriter.writeToFile(str(msg) + " [createBackupDir]")
|
|
|
|
|
return 0
|
|
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
|
def host_key_verification(IPAddress):
|
|
|
|
|
try:
|
2018-05-21 16:51:28 +05:00
|
|
|
command = 'sudo ssh-keygen -R ' + IPAddress
|
2019-03-26 16:19:03 +05:00
|
|
|
subprocess.call(shlex.split(command))
|
2018-05-21 16:51:28 +05:00
|
|
|
return 1
|
2019-12-10 15:09:10 +05:00
|
|
|
except BaseException as msg:
|
2017-10-24 19:16:36 +05:00
|
|
|
logging.CyberCPLogFileWriter.writeToFile(str(msg) + " [host_key_verification]")
|
|
|
|
|
return 0
|
|
|
|
|
|
2018-05-09 23:10:57 +05:00
|
|
|
@staticmethod
|
|
|
|
|
def getAliases(masterDomain):
|
|
|
|
|
try:
|
|
|
|
|
aliases = []
|
2018-09-24 18:56:48 +05:00
|
|
|
master = Websites.objects.get(domain=masterDomain)
|
|
|
|
|
aliasDomains = master.aliasdomains_set.all()
|
2018-05-09 23:10:57 +05:00
|
|
|
|
2018-09-24 18:56:48 +05:00
|
|
|
for items in aliasDomains:
|
|
|
|
|
aliases.append(items.aliasDomain)
|
2018-05-09 23:10:57 +05:00
|
|
|
|
|
|
|
|
return aliases
|
|
|
|
|
|
2019-12-10 15:09:10 +05:00
|
|
|
except BaseException as msg:
|
2018-05-09 23:10:57 +05:00
|
|
|
logging.CyberCPLogFileWriter.writeToFile(str(msg) + " [getAliases]")
|
2019-12-10 15:09:10 +05:00
|
|
|
print(0)
|
2018-05-09 23:10:57 +05:00
|
|
|
|
2017-10-24 19:16:36 +05:00
|
|
|
|
2018-06-05 00:53:45 +05:00
|
|
|
def submitBackupCreation(tempStoragePath, backupName, backupPath, backupDomain):
|
2017-12-09 22:30:10 +05:00
|
|
|
try:
|
2019-10-08 13:17:33 -04:00
|
|
|
## /home/example.com/backup/backup-example.com-02.13.2018_10-24-52 -- tempStoragePath
|
|
|
|
|
## backup-example.com-02.13.2018_10-24-52 -- backup name
|
2018-02-16 21:05:15 +05:00
|
|
|
## /home/example.com/backup - backupPath
|
|
|
|
|
## /home/cyberpanel/1047.xml - metaPath
|
2017-12-09 22:30:10 +05:00
|
|
|
|
2018-08-23 15:39:28 +05:00
|
|
|
status = os.path.join(backupPath, 'status')
|
2019-07-18 14:08:00 +05:00
|
|
|
website = Websites.objects.get(domain=backupDomain)
|
|
|
|
|
|
|
|
|
|
##
|
|
|
|
|
|
2019-12-13 22:18:53 +05:00
|
|
|
schedulerPath = '/home/cyberpanel/%s-backup.txt' % (backupDomain)
|
|
|
|
|
|
2019-07-24 22:37:37 +05:00
|
|
|
if not os.path.exists(backupPath) or not os.path.islink(backupPath):
|
|
|
|
|
command = 'mkdir -p %s' % (backupPath)
|
|
|
|
|
ProcessUtilities.executioner(command)
|
|
|
|
|
else:
|
2019-12-13 22:18:53 +05:00
|
|
|
writeToFile = open(schedulerPath, 'w')
|
|
|
|
|
writeToFile.writelines('error')
|
|
|
|
|
writeToFile.close()
|
2019-07-24 22:37:37 +05:00
|
|
|
return 0
|
2019-07-18 14:08:00 +05:00
|
|
|
|
2019-07-24 22:37:37 +05:00
|
|
|
if not os.path.exists(backupPath) or not os.path.islink(backupPath):
|
|
|
|
|
command = 'chown -R %s:%s %s' % (website.externalApp, website.externalApp, backupPath)
|
|
|
|
|
ProcessUtilities.executioner(command)
|
|
|
|
|
else:
|
2019-12-13 22:18:53 +05:00
|
|
|
writeToFile = open(schedulerPath, 'w')
|
|
|
|
|
writeToFile.writelines('error')
|
|
|
|
|
writeToFile.close()
|
2019-07-24 22:37:37 +05:00
|
|
|
return 0
|
2019-07-18 14:08:00 +05:00
|
|
|
|
|
|
|
|
##
|
|
|
|
|
|
2019-07-24 22:37:37 +05:00
|
|
|
if not os.path.exists(tempStoragePath) or not os.path.islink(tempStoragePath):
|
|
|
|
|
command = 'mkdir -p %s' % (tempStoragePath)
|
|
|
|
|
ProcessUtilities.executioner(command)
|
|
|
|
|
else:
|
2019-12-13 22:18:53 +05:00
|
|
|
writeToFile = open(schedulerPath, 'w')
|
|
|
|
|
writeToFile.writelines('error')
|
|
|
|
|
writeToFile.close()
|
2019-07-24 22:37:37 +05:00
|
|
|
return 0
|
2019-07-18 14:08:00 +05:00
|
|
|
|
2019-07-24 22:37:37 +05:00
|
|
|
if not os.path.exists(tempStoragePath) or not os.path.islink(tempStoragePath):
|
|
|
|
|
command = 'chown -R %s:%s %s' % (website.externalApp, website.externalApp, tempStoragePath)
|
|
|
|
|
ProcessUtilities.executioner(command)
|
|
|
|
|
else:
|
2019-12-13 22:18:53 +05:00
|
|
|
writeToFile = open(schedulerPath, 'w')
|
|
|
|
|
writeToFile.writelines('error')
|
|
|
|
|
writeToFile.close()
|
2019-07-24 22:37:37 +05:00
|
|
|
return 0
|
2019-07-18 14:08:00 +05:00
|
|
|
|
|
|
|
|
##
|
2019-07-24 22:37:37 +05:00
|
|
|
if not os.path.exists(status) or not os.path.islink(status):
|
|
|
|
|
command = 'touch %s' % (status)
|
|
|
|
|
ProcessUtilities.executioner(command)
|
|
|
|
|
else:
|
2019-12-13 22:18:53 +05:00
|
|
|
writeToFile = open(schedulerPath, 'w')
|
|
|
|
|
writeToFile.writelines('error')
|
|
|
|
|
writeToFile.close()
|
2019-07-24 22:37:37 +05:00
|
|
|
return 0
|
|
|
|
|
|
|
|
|
|
if not os.path.exists(status) or not os.path.islink(status):
|
|
|
|
|
command = 'chown cyberpanel:cyberpanel %s' % (status)
|
|
|
|
|
ProcessUtilities.executioner(command)
|
|
|
|
|
else:
|
2019-12-13 22:18:53 +05:00
|
|
|
writeToFile = open(schedulerPath, 'w')
|
|
|
|
|
writeToFile.writelines('error')
|
|
|
|
|
writeToFile.close()
|
2019-07-24 22:37:37 +05:00
|
|
|
return 0
|
2019-07-18 14:08:00 +05:00
|
|
|
|
2018-08-23 15:39:28 +05:00
|
|
|
result = backupUtilities.prepareBackupMeta(backupDomain, backupName, tempStoragePath, backupPath)
|
|
|
|
|
|
|
|
|
|
if result[0] == 0:
|
2019-12-13 22:18:53 +05:00
|
|
|
writeToFile = open(schedulerPath, 'w')
|
|
|
|
|
writeToFile.writelines('error')
|
|
|
|
|
writeToFile.close()
|
2019-12-05 22:36:17 +05:00
|
|
|
logging.CyberCPLogFileWriter.statusWriter(status, str(result[1]) + ' [1084][5009]')
|
2019-07-18 14:08:00 +05:00
|
|
|
return 0
|
2017-12-09 22:30:10 +05:00
|
|
|
|
2019-07-18 14:08:00 +05:00
|
|
|
command = 'chown %s:%s %s' % (website.externalApp, website.externalApp, status)
|
|
|
|
|
ProcessUtilities.executioner(command)
|
2017-12-09 22:30:10 +05:00
|
|
|
|
2019-12-10 23:04:24 +05:00
|
|
|
execPath = "sudo nice -n 10 /usr/local/CyberCP/bin/python " + virtualHostUtilities.cyberPanel + "/plogical/backupUtilities.py"
|
2019-07-16 23:23:16 +05:00
|
|
|
execPath = execPath + " startBackup --tempStoragePath " + tempStoragePath + " --backupName " \
|
2020-04-24 13:25:49 -04:00
|
|
|
+ backupName + " --backupPath " + backupPath + ' --backupDomain ' + backupDomain + ' --metaPath %s' % (
|
|
|
|
|
result[2])
|
2019-07-16 23:23:16 +05:00
|
|
|
|
2019-07-18 14:08:00 +05:00
|
|
|
output = ProcessUtilities.outputExecutioner(execPath, website.externalApp)
|
|
|
|
|
if output.find('[5009') > -1:
|
|
|
|
|
logging.CyberCPLogFileWriter.writeToFile(output)
|
2019-12-13 22:18:53 +05:00
|
|
|
writeToFile = open(schedulerPath, 'w')
|
|
|
|
|
writeToFile.writelines('error')
|
|
|
|
|
writeToFile.close()
|
2019-07-18 14:08:00 +05:00
|
|
|
return 0
|
2019-07-16 23:23:16 +05:00
|
|
|
|
|
|
|
|
## Backing up databases
|
|
|
|
|
|
|
|
|
|
backupMetaData = ElementTree.parse(result[2])
|
|
|
|
|
|
|
|
|
|
databases = backupMetaData.findall('Databases/database')
|
|
|
|
|
|
|
|
|
|
for database in databases:
|
|
|
|
|
|
|
|
|
|
dbName = database.find('dbName').text
|
|
|
|
|
|
|
|
|
|
if mysqlUtilities.mysqlUtilities.createDatabaseBackup(dbName, '/home/cyberpanel') == 0:
|
2019-12-13 22:18:53 +05:00
|
|
|
writeToFile = open(schedulerPath, 'w')
|
|
|
|
|
writeToFile.writelines('error')
|
|
|
|
|
writeToFile.close()
|
2019-07-16 23:23:16 +05:00
|
|
|
return 0
|
|
|
|
|
|
|
|
|
|
command = 'mv /home/cyberpanel/%s.sql %s/%s.sql' % (dbName, tempStoragePath, dbName)
|
|
|
|
|
ProcessUtilities.executioner(command, 'root')
|
|
|
|
|
|
|
|
|
|
##
|
|
|
|
|
|
2019-07-18 14:08:00 +05:00
|
|
|
output = ProcessUtilities.outputExecutioner(execPath, website.externalApp)
|
2019-12-13 22:18:53 +05:00
|
|
|
|
2019-07-18 14:08:00 +05:00
|
|
|
if output.find('1,None') > -1:
|
2019-12-10 23:04:24 +05:00
|
|
|
execPath = "sudo nice -n 10 /usr/local/CyberCP/bin/python " + virtualHostUtilities.cyberPanel + "/plogical/backupUtilities.py"
|
2019-07-16 23:23:16 +05:00
|
|
|
execPath = execPath + " BackupRoot --tempStoragePath " + tempStoragePath + " --backupName " \
|
|
|
|
|
+ backupName + " --backupPath " + backupPath + ' --backupDomain ' + backupDomain + ' --metaPath %s' % (
|
2020-04-24 13:25:49 -04:00
|
|
|
result[2])
|
2019-07-16 23:23:16 +05:00
|
|
|
|
|
|
|
|
ProcessUtilities.executioner(execPath, 'root')
|
2019-07-18 14:08:00 +05:00
|
|
|
else:
|
|
|
|
|
logging.CyberCPLogFileWriter.writeToFile(output)
|
|
|
|
|
|
|
|
|
|
command = 'chown -R %s:%s %s' % (website.externalApp, website.externalApp, backupPath)
|
|
|
|
|
ProcessUtilities.executioner(command)
|
2017-12-09 22:30:10 +05:00
|
|
|
|
2019-07-16 23:23:16 +05:00
|
|
|
command = 'rm -f %s' % (result[2])
|
|
|
|
|
ProcessUtilities.executioner(command, 'cyberpanel')
|
2017-12-09 22:30:10 +05:00
|
|
|
|
2019-12-10 15:09:10 +05:00
|
|
|
except BaseException as msg:
|
2017-12-09 22:30:10 +05:00
|
|
|
logging.CyberCPLogFileWriter.writeToFile(
|
|
|
|
|
str(msg) + " [submitBackupCreation]")
|
|
|
|
|
|
2020-04-24 13:25:49 -04:00
|
|
|
|
|
|
|
|
def cancelBackupCreation(backupCancellationDomain, fileName):
|
2017-12-09 22:30:10 +05:00
|
|
|
try:
|
|
|
|
|
|
|
|
|
|
path = "/home/" + backupCancellationDomain + "/backup/pid"
|
|
|
|
|
|
|
|
|
|
pid = open(path, "r").readlines()[0]
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
os.kill(int(pid), signal.SIGKILL)
|
2019-12-10 15:09:10 +05:00
|
|
|
except BaseException as msg:
|
2017-12-09 22:30:10 +05:00
|
|
|
logging.CyberCPLogFileWriter.writeToFile(str(msg) + " [cancelBackupCreation]")
|
|
|
|
|
|
|
|
|
|
backupPath = "/home/" + backupCancellationDomain + "/backup/"
|
|
|
|
|
|
|
|
|
|
tempStoragePath = backupPath + fileName
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
os.remove(tempStoragePath + ".tar.gz")
|
2019-12-10 15:09:10 +05:00
|
|
|
except BaseException as msg:
|
2017-12-09 22:30:10 +05:00
|
|
|
logging.CyberCPLogFileWriter.writeToFile(str(msg) + " [cancelBackupCreation]")
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
rmtree(tempStoragePath)
|
2019-12-10 15:09:10 +05:00
|
|
|
except BaseException as msg:
|
2017-12-09 22:30:10 +05:00
|
|
|
logging.CyberCPLogFileWriter.writeToFile(str(msg) + " [cancelBackupCreation]")
|
|
|
|
|
|
|
|
|
|
status = open(backupPath + 'status', "w")
|
2019-12-05 22:36:17 +05:00
|
|
|
status.write("Aborted manually. [1165][5009]")
|
2017-12-09 22:30:10 +05:00
|
|
|
status.close()
|
2019-12-10 15:09:10 +05:00
|
|
|
except BaseException as msg:
|
2017-12-09 22:30:10 +05:00
|
|
|
logging.CyberCPLogFileWriter.writeToFile(
|
|
|
|
|
str(msg) + " [cancelBackupCreation]")
|
2020-04-24 13:25:49 -04:00
|
|
|
print("0," + str(msg))
|
|
|
|
|
|
2017-12-09 22:30:10 +05:00
|
|
|
|
2020-04-24 13:25:49 -04:00
|
|
|
def submitRestore(backupFile, dir):
|
2017-12-09 22:30:10 +05:00
|
|
|
try:
|
|
|
|
|
|
|
|
|
|
p = Process(target=backupUtilities.startRestore, args=(backupFile, dir,))
|
|
|
|
|
p.start()
|
|
|
|
|
|
2019-12-10 15:09:10 +05:00
|
|
|
print("1,None")
|
2017-12-09 22:30:10 +05:00
|
|
|
|
2019-12-10 15:09:10 +05:00
|
|
|
except BaseException as msg:
|
2017-12-09 22:30:10 +05:00
|
|
|
logging.CyberCPLogFileWriter.writeToFile(
|
|
|
|
|
str(msg) + " [cancelBackupCreation]")
|
2020-04-24 13:25:49 -04:00
|
|
|
print("0," + str(msg))
|
2017-12-09 22:30:10 +05:00
|
|
|
|
2020-04-24 13:25:49 -04:00
|
|
|
|
|
|
|
|
def submitDestinationCreation(ipAddress, password, port='22', user='root'):
|
|
|
|
|
setupKeys = backupUtilities.setupSSHKeys(ipAddress, password, port, user)
|
2019-03-02 17:57:09 +05:00
|
|
|
|
|
|
|
|
if setupKeys[0] == 1:
|
2020-04-24 13:25:49 -04:00
|
|
|
backupUtilities.createBackupDir(ipAddress, port, user)
|
2019-12-10 15:09:10 +05:00
|
|
|
print("1,None")
|
2019-03-02 17:57:09 +05:00
|
|
|
else:
|
2019-12-10 15:09:10 +05:00
|
|
|
print(setupKeys[1])
|
2019-03-02 17:57:09 +05:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def getConnectionStatus(ipAddress):
|
|
|
|
|
try:
|
|
|
|
|
checkCon = backupUtilities.checkConnection(ipAddress)
|
|
|
|
|
|
|
|
|
|
if checkCon[0] == 1:
|
2019-12-10 15:09:10 +05:00
|
|
|
print("1,None")
|
2019-03-02 17:57:09 +05:00
|
|
|
else:
|
2019-12-10 15:09:10 +05:00
|
|
|
print(checkCon[1])
|
2019-03-02 17:57:09 +05:00
|
|
|
|
2019-12-10 15:09:10 +05:00
|
|
|
except BaseException as msg:
|
|
|
|
|
print(str(msg))
|
2019-03-02 17:57:09 +05:00
|
|
|
|
2017-12-09 22:30:10 +05:00
|
|
|
|
2020-04-24 13:25:49 -04:00
|
|
|
def main():
|
2017-12-09 22:30:10 +05:00
|
|
|
parser = argparse.ArgumentParser(description='CyberPanel Installer')
|
|
|
|
|
parser.add_argument('function', help='Specific a function to call!')
|
|
|
|
|
parser.add_argument('--tempStoragePath', help='')
|
|
|
|
|
parser.add_argument('--backupName', help='!')
|
|
|
|
|
parser.add_argument('--backupPath', help='')
|
2018-06-05 00:53:45 +05:00
|
|
|
parser.add_argument('--backupDomain', help='')
|
2017-12-09 22:30:10 +05:00
|
|
|
parser.add_argument('--metaPath', help='')
|
|
|
|
|
|
2019-03-02 17:57:09 +05:00
|
|
|
## Destination Creation
|
|
|
|
|
|
|
|
|
|
parser.add_argument('--ipAddress', help='')
|
|
|
|
|
parser.add_argument('--password', help='')
|
|
|
|
|
parser.add_argument('--port', help='')
|
2020-04-24 13:25:49 -04:00
|
|
|
parser.add_argument('--user', help='')
|
2019-03-02 17:57:09 +05:00
|
|
|
|
2017-12-09 22:30:10 +05:00
|
|
|
## backup cancellation arguments
|
|
|
|
|
|
|
|
|
|
parser.add_argument('--backupCancellationDomain', help='')
|
|
|
|
|
parser.add_argument('--fileName', help='')
|
|
|
|
|
|
|
|
|
|
## backup restore arguments
|
|
|
|
|
|
|
|
|
|
parser.add_argument('--backupFile', help='')
|
|
|
|
|
parser.add_argument('--dir', help='')
|
|
|
|
|
|
|
|
|
|
args = parser.parse_args()
|
|
|
|
|
|
|
|
|
|
if args.function == "submitBackupCreation":
|
2020-04-24 13:25:49 -04:00
|
|
|
submitBackupCreation(args.tempStoragePath, args.backupName, args.backupPath, args.backupDomain)
|
2017-12-09 22:30:10 +05:00
|
|
|
elif args.function == "cancelBackupCreation":
|
2020-04-24 13:25:49 -04:00
|
|
|
cancelBackupCreation(args.backupCancellationDomain, args.fileName)
|
2017-12-09 22:30:10 +05:00
|
|
|
elif args.function == "submitRestore":
|
2020-04-24 13:25:49 -04:00
|
|
|
submitRestore(args.backupFile, args.dir)
|
2019-03-02 17:57:09 +05:00
|
|
|
elif args.function == "submitDestinationCreation":
|
2020-04-24 13:25:49 -04:00
|
|
|
submitDestinationCreation(args.ipAddress, args.password, args.port, args.user)
|
2019-03-02 17:57:09 +05:00
|
|
|
elif args.function == "getConnectionStatus":
|
|
|
|
|
getConnectionStatus(args.ipAddress)
|
2019-07-16 23:23:16 +05:00
|
|
|
elif args.function == "startBackup":
|
|
|
|
|
backupUtilities.startBackup(args.tempStoragePath, args.backupName, args.backupPath, args.metaPath)
|
|
|
|
|
elif args.function == "BackupRoot":
|
|
|
|
|
backupUtilities.BackupRoot(args.tempStoragePath, args.backupName, args.backupPath, args.metaPath)
|
2017-12-09 22:30:10 +05:00
|
|
|
|
2020-04-24 13:25:49 -04:00
|
|
|
|
2017-12-09 22:30:10 +05:00
|
|
|
if __name__ == "__main__":
|
2020-04-24 13:25:49 -04:00
|
|
|
main()
|