mirror of
https://github.com/usmannasir/cyberpanel.git
synced 2025-11-12 00:06:09 +01:00
s3 completion
This commit is contained in:
@@ -25,7 +25,7 @@ urlpatterns = [
|
||||
url(r'^cyberPanelVersion', views.cyberPanelVersion, name='cyberPanelVersion'),
|
||||
url(r'^putSSHkey', views.putSSHkey, name='putSSHkey'),
|
||||
|
||||
|
||||
url(r'^changeAdminPassword', views.changeAdminPassword, name='changeAdminPassword'),
|
||||
url(r'^runAWSBackups$', views.runAWSBackups, name='runAWSBackups'),
|
||||
|
||||
]
|
||||
15
api/views.py
15
api/views.py
@@ -19,10 +19,11 @@ from plogical.website import WebsiteManager
|
||||
from loginSystem.models import ACL
|
||||
from plogical.acl import ACLManager
|
||||
from firewall.models import FirewallRules
|
||||
from s3Backups.s3Backups import S3Backups
|
||||
from plogical.CyberCPLogFileWriter import CyberCPLogFileWriter as logging
|
||||
# Create your views here.
|
||||
|
||||
|
||||
|
||||
def verifyConn(request):
|
||||
try:
|
||||
if request.method == 'POST':
|
||||
@@ -641,3 +642,15 @@ def changeAdminPassword(request):
|
||||
|
||||
json_data = json.dumps(data_ret)
|
||||
return HttpResponse(json_data)
|
||||
|
||||
def runAWSBackups(request):
|
||||
try:
|
||||
|
||||
data = json.loads(request.body)
|
||||
randomFile = data['randomFile']
|
||||
|
||||
if os.path.exists(randomFile):
|
||||
s3 = S3Backups(request, None, 'runAWSBackups')
|
||||
s3.start()
|
||||
except BaseException, msg:
|
||||
logging.writeToFile(str(msg) + ' [API.runAWSBackups]')
|
||||
@@ -797,8 +797,8 @@ class preFlightsChecks:
|
||||
|
||||
count = 0
|
||||
while (1):
|
||||
command = "wget http://cyberpanel.net/CyberPanel.1.7.4.tar.gz"
|
||||
# command = "wget http://cyberpanel.net/CyberPanelTemp.tar.gz"
|
||||
#command = "wget http://cyberpanel.net/CyberPanel.1.7.4.tar.gz"
|
||||
command = "wget http://cyberpanel.sh/CyberPanelTemp.tar.gz"
|
||||
res = subprocess.call(shlex.split(command))
|
||||
|
||||
if preFlightsChecks.resFailed(self.distro, res):
|
||||
@@ -818,8 +818,8 @@ class preFlightsChecks:
|
||||
|
||||
count = 0
|
||||
while (1):
|
||||
command = "tar zxf CyberPanel.1.7.4.tar.gz"
|
||||
# command = "tar zxf CyberPanelTemp.tar.gz"
|
||||
#command = "tar zxf CyberPanel.1.7.4.tar.gz"
|
||||
command = "tar zxf CyberPanelTemp.tar.gz"
|
||||
|
||||
res = subprocess.call(shlex.split(command))
|
||||
|
||||
|
||||
@@ -219,12 +219,17 @@ class InstallCyberPanel:
|
||||
'lsphp7? lsphp7?-common lsphp7?-curl lsphp7?-dev lsphp7?-imap lsphp7?-intl lsphp7?-json ' \
|
||||
'lsphp7?-ldap lsphp7?-mysql lsphp7?-opcache lsphp7?-pspell lsphp7?-recode ' \
|
||||
'lsphp7?-sqlite3 lsphp7?-tidy'
|
||||
|
||||
res = os.system(command)
|
||||
if res != 0:
|
||||
InstallCyberPanel.stdOut("Failed to install PHP on Ubuntu.", 1, 1)
|
||||
|
||||
else:
|
||||
command = 'yum -y groupinstall lsphp-all'
|
||||
|
||||
install.preFlightsChecks.call(command, self.distro, '[installAllPHPVersions]',
|
||||
'Install PHP',
|
||||
1, 1, os.EX_OSERR)
|
||||
install.preFlightsChecks.call(command, self.distro, '[installAllPHPVersions]',
|
||||
'Install PHP',
|
||||
1, 1, os.EX_OSERR)
|
||||
|
||||
InstallCyberPanel.stdOut("LiteSpeed PHPs successfully installed!", 1)
|
||||
|
||||
@@ -391,7 +396,7 @@ class InstallCyberPanel:
|
||||
command = "systemctl enable mysql"
|
||||
|
||||
install.preFlightsChecks.call(command, self.distro, '[installMySQL]',
|
||||
'Install MySQL',
|
||||
'Enable MySQL',
|
||||
1, 1, os.EX_OSERR)
|
||||
|
||||
def fixMariaDB(self):
|
||||
|
||||
@@ -2,6 +2,8 @@ acme==0.21.1
|
||||
asn1crypto==0.24.0
|
||||
Babel==0.9.6
|
||||
backports.ssl-match-hostname==3.5.0.1
|
||||
boto3==1.9.64
|
||||
botocore==1.12.64
|
||||
certbot==0.21.1
|
||||
certifi==2018.4.16
|
||||
cffi==1.11.5
|
||||
@@ -11,21 +13,25 @@ configobj==4.7.2
|
||||
cryptography==2.2.2
|
||||
decorator==3.4.0
|
||||
Django==1.11
|
||||
docutils==0.14
|
||||
enum34==1.1.6
|
||||
funcsigs==1.0.2
|
||||
future==0.16.0
|
||||
futures==3.2.0
|
||||
gunicorn==19.8.1
|
||||
idna==2.6
|
||||
iniparse==0.4
|
||||
ipaddress==1.0.16
|
||||
IPy==0.75
|
||||
Jinja2==2.7.2
|
||||
jmespath==0.9.3
|
||||
josepy==1.1.0
|
||||
jsonpatch==1.2
|
||||
jsonpointer==1.9
|
||||
kitchen==1.1.1
|
||||
MarkupSafe==0.11
|
||||
mock==2.0.0
|
||||
MySQL-python==1.2.5
|
||||
parsedatetime==2.4
|
||||
pbr==4.0.4
|
||||
perf==0.1
|
||||
@@ -36,20 +42,34 @@ ptyprocess==0.6.0
|
||||
pycparser==2.18
|
||||
pycurl==7.19.0
|
||||
pydns==2.3.6
|
||||
pygobject==3.22.0
|
||||
pygpgme==0.3
|
||||
pyliblzma==0.5.3
|
||||
pyOpenSSL==17.5.0
|
||||
pyRFC3339==1.1
|
||||
pyserial==2.6
|
||||
python-dateutil==2.7.5
|
||||
python-linux-procfs==0.4.9
|
||||
pytz==2018.4
|
||||
pyudev==0.15
|
||||
pyxattr==0.5.1
|
||||
PyYAML==3.10
|
||||
requests==2.18.4
|
||||
requests-file==1.4.3
|
||||
s3transfer==0.1.13
|
||||
schedutils==0.4
|
||||
six==1.9.0
|
||||
slip==0.4.0
|
||||
slip.dbus==0.4.0
|
||||
tldextract==2.2.0
|
||||
urlgrabber==3.10
|
||||
urllib3==1.22
|
||||
virtualenv==16.0.0
|
||||
yum-metadata-parser==1.1.4
|
||||
zope.component==4.4.1
|
||||
zope.deferredimport==4.3
|
||||
zope.deprecation==4.3.0
|
||||
zope.event==4.3.0
|
||||
zope.hookable==4.2.0
|
||||
zope.interface==4.5.0
|
||||
zope.proxy==4.3.1
|
||||
@@ -11,7 +11,8 @@ class BackupPlan(models.Model):
|
||||
bucket = models.CharField(max_length=50, default='NONE')
|
||||
freq = models.CharField(max_length=50)
|
||||
retention = models.IntegerField()
|
||||
lastRun = models.CharField(max_length=50, default='NEVER')
|
||||
type = models.CharField(max_length=5, default='AWS')
|
||||
lastRun = models.CharField(max_length=50, default='0:0:0')
|
||||
|
||||
class WebsitesInPlan(models.Model):
|
||||
owner = models.ForeignKey(BackupPlan,on_delete=models.CASCADE)
|
||||
@@ -20,5 +21,6 @@ class WebsitesInPlan(models.Model):
|
||||
|
||||
class BackupLogs(models.Model):
|
||||
owner = models.ForeignKey(BackupPlan,on_delete=models.CASCADE)
|
||||
timeStamp = models.CharField(max_length=200)
|
||||
level = models.CharField(max_length=5)
|
||||
msg = models.CharField(max_length=500)
|
||||
@@ -1,23 +1,29 @@
|
||||
#!/usr/local/CyberCP/bin/python2
|
||||
import os.path
|
||||
import sys
|
||||
import django
|
||||
sys.path.append('/usr/local/CyberCP')
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "CyberCP.settings")
|
||||
django.setup()
|
||||
from django.shortcuts import HttpResponse
|
||||
from plogical.CyberCPLogFileWriter import CyberCPLogFileWriter as logging
|
||||
from plogical.httpProc import httpProc
|
||||
from plogical.acl import ACLManager
|
||||
import threading as multi
|
||||
import argparse
|
||||
from plogical.mailUtilities import mailUtilities
|
||||
import boto3
|
||||
import json
|
||||
from .models import *
|
||||
from math import ceil
|
||||
import requests
|
||||
import time
|
||||
try:
|
||||
import os
|
||||
import os.path
|
||||
from django.shortcuts import HttpResponse
|
||||
from plogical.CyberCPLogFileWriter import CyberCPLogFileWriter as logging
|
||||
from plogical.httpProc import httpProc
|
||||
from plogical.acl import ACLManager
|
||||
import threading as multi
|
||||
from plogical.mailUtilities import mailUtilities
|
||||
import boto3
|
||||
import json
|
||||
from .models import *
|
||||
from math import ceil
|
||||
import requests
|
||||
import time
|
||||
from random import randint
|
||||
import subprocess, shlex
|
||||
except BaseException, msg:
|
||||
from plogical.CyberCPLogFileWriter import CyberCPLogFileWriter as logging
|
||||
logging.writeToFile(str(msg))
|
||||
import threading as multi
|
||||
from random import randint
|
||||
import json
|
||||
import requests
|
||||
import subprocess, shlex
|
||||
|
||||
class S3Backups(multi.Thread):
|
||||
|
||||
@@ -33,6 +39,8 @@ class S3Backups(multi.Thread):
|
||||
self.connectAccount()
|
||||
elif self.function == 'forceRunAWSBackup':
|
||||
self.forceRunAWSBackup()
|
||||
elif self.function == 'runAWSBackups':
|
||||
self.runAWSBackups()
|
||||
except BaseException, msg:
|
||||
logging.writeToFile( str(msg) + ' [S3Backups.run]')
|
||||
|
||||
@@ -69,7 +77,7 @@ class S3Backups(multi.Thread):
|
||||
counter = 1
|
||||
|
||||
for items in reversed(logs):
|
||||
dic = { 'id': items.id, 'level': items.level, 'mesg': items.msg }
|
||||
dic = { 'id': items.id, 'timeStamp': items.timeStamp, 'level': items.level, 'mesg': items.msg }
|
||||
if checker == 0:
|
||||
json_data = json_data + json.dumps(dic)
|
||||
checker = 1
|
||||
@@ -104,6 +112,24 @@ class S3Backups(multi.Thread):
|
||||
credFile.write(self.data['credData'])
|
||||
credFile.close()
|
||||
|
||||
##
|
||||
|
||||
cronPath = '/etc/crontab'
|
||||
|
||||
command = 'sudo cat ' + cronPath
|
||||
output = subprocess.check_output(shlex.split(command)).split('\n')
|
||||
|
||||
insertCron = 1
|
||||
|
||||
for items in output:
|
||||
if items.find('s3backups.py') > -1:
|
||||
insertCron = 0
|
||||
break
|
||||
|
||||
if insertCron:
|
||||
command = 'echo "0 24 * * * root /usr/local/CyberCP/bin/python2 /usr/local/CyberCP/s3Backups/s3Backups.py" >> ' + cronPath
|
||||
subprocess.call(command, shell=True)
|
||||
|
||||
return proc.ajax(1, None)
|
||||
|
||||
except BaseException, msg:
|
||||
@@ -350,7 +376,10 @@ class S3Backups(multi.Thread):
|
||||
r = requests.post("http://localhost:5003/backup/submitBackupCreation", data=finalData)
|
||||
|
||||
data = json.loads(r.text)
|
||||
backupPath = data['tempStorage']
|
||||
try:
|
||||
backupPath = data['tempStorage']
|
||||
except:
|
||||
pass
|
||||
|
||||
while (1):
|
||||
r = requests.post("http://localhost:5003/backup/backupStatus", data=finalData)
|
||||
@@ -368,39 +397,105 @@ class S3Backups(multi.Thread):
|
||||
s3 = boto3.resource('s3')
|
||||
plan = BackupPlan.objects.get(name=self.data['planName'])
|
||||
bucketName = plan.bucket.strip('\n').strip(' ')
|
||||
runTime = time.strftime("%d:%m:%Y")
|
||||
|
||||
## Set Expiration for objects
|
||||
try:
|
||||
client = boto3.client('s3')
|
||||
client.put_bucket_lifecycle_configuration(
|
||||
Bucket='string',
|
||||
LifecycleConfiguration={
|
||||
'Rules': [
|
||||
{
|
||||
'Expiration': {
|
||||
'Days': plan.retention,
|
||||
'ExpiredObjectDeleteMarker': True
|
||||
},
|
||||
'ID': plan.name,
|
||||
'Prefix': '',
|
||||
'Filter': {
|
||||
'Prefix': plan.name + '/',
|
||||
},
|
||||
'Status': 'Enabled',
|
||||
|
||||
},
|
||||
]
|
||||
}
|
||||
)
|
||||
except BaseException, msg:
|
||||
BackupLogs(owner=plan, timeStamp=time.strftime("%b %d %Y, %H:%M:%S"), level='ERROR', msg=str(msg)).save()
|
||||
|
||||
##
|
||||
|
||||
userID = self.request.session['userID']
|
||||
currentACL = ACLManager.loadedACL(userID)
|
||||
|
||||
if currentACL['admin'] == 0:
|
||||
BackupLogs(owner=plan, level='INFO', msg='Unauthorised user tried to run AWS Backups.').save()
|
||||
BackupLogs(owner=plan, timeStamp=time.strftime("%b %d %Y, %H:%M:%S"), level='INFO', msg='Unauthorised user tried to run AWS Backups.').save()
|
||||
return 0
|
||||
|
||||
BackupLogs(owner=plan,level='INFO', msg='Starting backup process..').save()
|
||||
BackupLogs(owner=plan,level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"), msg='Starting backup process..').save()
|
||||
|
||||
for items in plan.websitesinplan_set.all():
|
||||
result = self.createBackup(items.domain)
|
||||
if result[0]:
|
||||
data = open(result[1] + ".tar.gz", 'rb')
|
||||
s3.Bucket(bucketName).put_object(Key=result[1].split('/')[-1] + ".tar.gz", Body=data)
|
||||
BackupLogs(owner=plan, level='INFO', msg='Backup successful for ' + items.domain + '.').save()
|
||||
s3.Bucket(bucketName).put_object(Key=plan.name + '/' + runTime + '/' + result[1].split('/')[-1] + ".tar.gz", Body=data)
|
||||
BackupLogs(owner=plan, level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"), msg='Backup successful for ' + items.domain + '.').save()
|
||||
else:
|
||||
BackupLogs(owner=plan, level='ERROR', msg='Backup failed for ' + items.domain + '. Error: ' + result[1]).save()
|
||||
BackupLogs(owner=plan, level='ERROR', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"), msg='Backup failed for ' + items.domain + '. Error: ' + result[1]).save()
|
||||
|
||||
BackupLogs(owner=plan, level='INFO', msg='Backup Process Finished.').save()
|
||||
|
||||
plan.lastRun = runTime
|
||||
plan.save()
|
||||
|
||||
BackupLogs(owner=plan, level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"), msg='Backup Process Finished.').save()
|
||||
|
||||
|
||||
except BaseException, msg:
|
||||
logging.writeToFile(str(msg) + ' [S3Backups.runBackupPlan]')
|
||||
plan = BackupPlan.objects.get(name=self.data['planName'])
|
||||
BackupLogs(owner=plan, level='ERROR', msg=str(msg)).save()
|
||||
BackupLogs(owner=plan, timeStamp=time.strftime("%b %d %Y, %H:%M:%S"), level='ERROR', msg=str(msg)).save()
|
||||
|
||||
def runAWSBackups(self):
|
||||
try:
|
||||
admin = Administrator.objects.get(pk=1)
|
||||
self.request.session['userID'] = admin.pk
|
||||
|
||||
for plan in BackupPlan.objects.all():
|
||||
lastRunDay = plan.lastRun.split(':')[0]
|
||||
lastRunMonth = plan.lastRun.split(':')[1]
|
||||
|
||||
if plan.freq == 'Daily' and lastRunDay != time.strftime("%d"):
|
||||
self.data = {}
|
||||
self.data['planName'] = plan.name
|
||||
self.forceRunAWSBackup()
|
||||
else:
|
||||
if lastRunMonth == time.strftime("%m"):
|
||||
days = int(time.strftime("%d")) - int(lastRunDay)
|
||||
if days >=6:
|
||||
self.data = {}
|
||||
self.data['planName'] = plan.name
|
||||
self.forceRunAWSBackup()
|
||||
else:
|
||||
days = 30 - int(lastRunDay)
|
||||
days = days + int(time.strftime("%d"))
|
||||
if days >=6:
|
||||
self.data = {}
|
||||
self.data['planName'] = plan.name
|
||||
self.forceRunAWSBackup()
|
||||
|
||||
except BaseException, msg:
|
||||
logging.writeToFile(str(msg) + ' [S3Backups.runAWSBackups]')
|
||||
|
||||
def main():
|
||||
|
||||
parser = argparse.ArgumentParser(description='CyberPanel S3 Backups')
|
||||
parser.add_argument('function', help='Specify a function to call!')
|
||||
pathToFile = "/home/cyberpanel/" + str(randint(1000, 9999))
|
||||
file = open(pathToFile, "w")
|
||||
file.close()
|
||||
|
||||
args = parser.parse_args()
|
||||
finalData = json.dumps({'randomFile': pathToFile})
|
||||
requests.post("http://localhost:5003/api/runAWSBackups", data=finalData,verify=False)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
@@ -478,7 +478,6 @@ def topProcessesStatus(request):
|
||||
|
||||
loadAVG = data[0].split(' ')
|
||||
loadAVG = filter(lambda a: a != '', loadAVG)
|
||||
logging.CyberCPLogFileWriter.writeToFile(str(loadAVG))
|
||||
|
||||
loadNow = data[2].split(' ')
|
||||
loadNow = filter(lambda a: a != '', loadNow)
|
||||
@@ -493,7 +492,6 @@ def topProcessesStatus(request):
|
||||
processes = data[1].split(' ')
|
||||
processes = filter(lambda a: a != '', processes)
|
||||
|
||||
|
||||
for items in data:
|
||||
counter = counter + 1
|
||||
if counter <= 7:
|
||||
@@ -513,7 +511,6 @@ def topProcessesStatus(request):
|
||||
else:
|
||||
json_data = json_data + ',' + json.dumps(dic)
|
||||
|
||||
|
||||
json_data = json_data + ']'
|
||||
|
||||
data = {}
|
||||
|
||||
@@ -999,7 +999,7 @@ app.controller('remoteBackupControl', function($scope, $http, $timeout) {
|
||||
|
||||
$scope.addRemoveWebsite = function (website,websiteStatus) {
|
||||
|
||||
if(websiteStatus==true)
|
||||
if(websiteStatus === true)
|
||||
{
|
||||
var check = 1;
|
||||
for(var j = 0; j < websitesToBeBacked.length; j++){
|
||||
@@ -1028,9 +1028,7 @@ app.controller('remoteBackupControl', function($scope, $http, $timeout) {
|
||||
|
||||
$scope.allChecked = function (webSiteStatus) {
|
||||
|
||||
|
||||
|
||||
if(webSiteStatus==true) {
|
||||
if(webSiteStatus === true) {
|
||||
|
||||
websitesToBeBacked = websitesToBeBackedTemp;
|
||||
$scope.webSiteStatus = true;
|
||||
@@ -1142,7 +1140,7 @@ app.controller('remoteBackupControl', function($scope, $http, $timeout) {
|
||||
|
||||
|
||||
if(websitesToBeBacked.length === 0){
|
||||
alert("No websites selected for transfer.")
|
||||
alert("No websites selected for transfer.");
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user