bug fix: upload large files to s3

This commit is contained in:
usmannasir
2018-12-24 20:21:14 +05:00
parent db15dc03ed
commit 22646cdb5c
6 changed files with 79 additions and 20 deletions

View File

@@ -21,6 +21,8 @@ from cliParser import cliParser
from plogical.vhost import vhost
from plogical.mailUtilities import mailUtilities
from plogical.ftpUtilities import FTPUtilities
from plogical.sslUtilities import sslUtilities
from plogical.processUtilities import ProcessUtilities
# All that we see or seem is but a dream within a dream.
@@ -784,6 +786,35 @@ class cyberPanel:
logger.writeforCLI(str(msg), "Error", stack()[0][3])
self.printStatus(0, str(msg))
def issueSelfSignedSSL(self, virtualHost):
try:
try:
website = ChildDomains.objects.get(domain=virtualHost)
adminEmail = website.master.adminEmail
except:
website = Websites.objects.get(domain=virtualHost)
adminEmail = website.adminEmail
pathToStoreSSL = "/etc/letsencrypt/live/" + virtualHost
command = 'mkdir -p ' + pathToStoreSSL
ProcessUtilities.executioner(command)
pathToStoreSSLPrivKey = "/etc/letsencrypt/live/" + virtualHost + "/privkey.pem"
pathToStoreSSLFullChain = "/etc/letsencrypt/live/" + virtualHost + "/fullchain.pem"
command = 'openssl req -newkey rsa:2048 -new -nodes -x509 -days 3650 -subj "/C=US/ST=Denial/L=Springfield/O=Dis/CN=www.example.com" -keyout ' + pathToStoreSSLPrivKey + ' -out ' + pathToStoreSSLFullChain
ProcessUtilities.executioner(command)
sslUtilities.installSSLForDomain(virtualHost, adminEmail)
ProcessUtilities.restartLitespeed()
self.printStatus(1, 'None')
except BaseException, msg:
logger.writeforCLI(str(msg), "Error", stack()[0][3])
self.printStatus(0, str(msg))
def main():
parser = cliParser()
@@ -1267,6 +1298,15 @@ def main():
cyberpanel.issueSSLForMailServer(args.domainName)
elif args.function == "issueSelfSignedSSL":
completeCommandExample = 'cyberpanel issueSelfSignedSSL --domainName cyberpanel.net'
if not args.domainName:
print "\n\nPlease enter Domain name. For example:\n\n" + completeCommandExample + "\n\n"
return
cyberpanel.issueSelfSignedSSL(args.domainName)
if __name__ == "__main__":

View File

@@ -22,6 +22,7 @@ from highAvailability.haManager import HAManager
from plogical.httpProc import httpProc
from s3Backups.s3Backups import S3Backups
import os
from serverStatus.views import topProcessesStatus, killProcess
class CloudManager:
def __init__(self, data=None, admin = None):
@@ -911,3 +912,18 @@ class CloudManager:
return self.ajaxPre(1, None)
except BaseException, msg:
return self.ajaxPre(0, str(msg))
def systemStatus(self, request):
try:
return topProcessesStatus(request)
except BaseException, msg:
return self.ajaxPre(0, str(msg))
def killProcess(self, request):
try:
request.session['userID'] = self.admin.pk
return killProcess(request)
except BaseException, msg:
return self.ajaxPre(0, str(msg))

View File

@@ -197,6 +197,10 @@ def router(request):
return cm.fetchBackupLogs(request)
elif controller == 'forceRunAWSBackup':
return cm.forceRunAWSBackup(request)
elif controller == 'systemStatus':
return cm.systemStatus(request)
elif controller == 'killProcess':
return cm.killProcess(request)
else:
return cm.ajaxPre(0, 'This function is not available in your version of CyberPanel.')

View File

@@ -1,8 +0,0 @@
import boto3
# Let's use Amazon S3
s3 = boto3.resource('s3')
# Print out bucket names
for bucket in s3.buckets.all():
print(bucket.name)

View File

@@ -681,13 +681,14 @@ WantedBy=multi-user.target"""
command = 'yum -y install pcre-devel openssl-devel expat-devel geoip-devel zlib-devel udns-devel which curl'
Upgrade.executioner(command, 'LSCPD Pre-reqs [two]', 0)
command = 'wget https://cyberpanel.net/lscp.tar.gz'
Upgrade.executioner(command, 'Download LSCPD [two]', 0)
##
if os.path.exists('/usr/local/lscp.tar.gz'):
os.remove('/usr/local/lscp.tar.gz')
command = 'wget https://cyberpanel.net/lscp.tar.gz'
Upgrade.executioner(command, 'Download LSCPD [two]', 0)
##
command = 'tar zxf lscp.tar.gz -C /usr/local/'

View File

@@ -9,6 +9,7 @@ try:
import threading as multi
from plogical.mailUtilities import mailUtilities
import boto3
from boto3.s3.transfer import TransferConfig
import json
from .models import *
from math import ceil
@@ -75,7 +76,7 @@ class S3Backups(multi.Thread):
checker = 0
counter = 1
for items in reversed(logs):
for items in logs:
dic = { 'id': items.id, 'timeStamp': items.timeStamp, 'level': items.level, 'mesg': items.msg }
if checker == 0:
json_data = json_data + json.dumps(dic)
@@ -130,7 +131,7 @@ class S3Backups(multi.Thread):
writeToFile = open(pathToFile, 'w')
for items in output:
writeToFile.writelines(items + '\n')
writeToFile.writelines('0 24 * * * cyberpanel /usr/local/CyberCP/bin/python2 /usr/local/CyberCP/s3Backups/s3Backups.py\n')
writeToFile.writelines('0 0 * * * cyberpanel /usr/local/CyberCP/bin/python2 /usr/local/CyberCP/s3Backups/s3Backups.py\n')
writeToFile.close()
command = 'sudo mv ' + pathToFile + ' /etc/crontab'
ProcessUtilities.executioner(command)
@@ -359,7 +360,7 @@ class S3Backups(multi.Thread):
page = int(self.data['page'])
backupPlan = BackupPlan.objects.get(name=self.data['planName'])
logs = backupPlan.backuplogs_set.all()
logs = backupPlan.backuplogs_set.all().order_by('-id')
pagination = S3Backups.getPagination(len(logs), recordsToShow)
endPageNumber, finalPageNumber = S3Backups.recordsPointer(page, recordsToShow)
@@ -399,14 +400,16 @@ class S3Backups(multi.Thread):
def forceRunAWSBackup(self):
try:
s3 = boto3.resource('s3')
plan = BackupPlan.objects.get(name=self.data['planName'])
bucketName = plan.bucket.strip('\n').strip(' ')
runTime = time.strftime("%d:%m:%Y")
client = boto3.client('s3')
config = TransferConfig(multipart_threshold=1024 * 25, max_concurrency=10,
multipart_chunksize=1024 * 25, use_threads=True)
## Set Expiration for objects
try:
client = boto3.client('s3')
client.put_bucket_lifecycle_configuration(
Bucket='string',
LifecycleConfiguration={
@@ -444,8 +447,13 @@ class S3Backups(multi.Thread):
for items in plan.websitesinplan_set.all():
result = self.createBackup(items.domain)
if result[0]:
data = open(result[1] + ".tar.gz", 'rb')
s3.Bucket(bucketName).put_object(Key=plan.name + '/' + runTime + '/' + result[1].split('/')[-1] + ".tar.gz", Body=data)
key = plan.name + '/' + runTime + '/' + result[1].split('/')[-1] + ".tar.gz"
client.upload_file(
result[1] + ".tar.gz",
bucketName,
key,
Config=config,
)
BackupLogs(owner=plan, level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"), msg='Backup successful for ' + items.domain + '.').save()
else:
BackupLogs(owner=plan, level='ERROR', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"), msg='Backup failed for ' + items.domain + '. Error: ' + result[1]).save()
@@ -455,8 +463,6 @@ class S3Backups(multi.Thread):
plan.save()
BackupLogs(owner=plan, level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"), msg='Backup Process Finished.').save()
except BaseException, msg:
logging.writeToFile(str(msg) + ' [S3Backups.runBackupPlan]')
plan = BackupPlan.objects.get(name=self.data['planName'])