2017-10-24 19:16:36 +05:00
|
|
|
# -*- coding: utf-8 -*-
|
2024-01-25 10:44:06 +05:00
|
|
|
from random import randint
|
|
|
|
|
|
2022-03-02 13:18:22 +05:00
|
|
|
from django.shortcuts import render, redirect
|
2023-10-23 23:58:03 +02:00
|
|
|
from django.http import HttpResponse
|
|
|
|
|
from plogical.getSystemInformation import SystemInformation
|
|
|
|
|
import json
|
|
|
|
|
from loginSystem.views import loadLoginPage
|
2017-10-24 19:16:36 +05:00
|
|
|
from .models import version
|
|
|
|
|
import requests
|
|
|
|
|
import subprocess
|
|
|
|
|
import shlex
|
|
|
|
|
import os
|
2023-10-23 23:58:03 +02:00
|
|
|
import plogical.CyberCPLogFileWriter as logging
|
2023-10-17 17:52:49 +05:00
|
|
|
from plogical.acl import ACLManager
|
2019-06-26 03:57:16 +05:00
|
|
|
from manageServices.models import PDNSStatus
|
2025-06-02 00:58:11 +05:00
|
|
|
from django.views.decorators.csrf import ensure_csrf_cookie, csrf_exempt
|
2023-10-17 17:52:49 +05:00
|
|
|
from plogical.processUtilities import ProcessUtilities
|
|
|
|
|
from plogical.httpProc import httpProc
|
2025-05-24 16:15:24 +05:00
|
|
|
from websiteFunctions.models import Websites, WPSites
|
2025-05-24 21:30:37 +05:00
|
|
|
from databases.models import Databases
|
|
|
|
|
from mailServer.models import EUsers
|
2025-06-02 18:38:42 +05:00
|
|
|
from django.views.decorators.http import require_GET, require_POST
|
2017-10-24 19:16:36 +05:00
|
|
|
|
2023-10-23 23:58:03 +02:00
|
|
|
# Create your views here.
|
|
|
|
|
|
2025-04-14 14:44:01 +05:00
|
|
|
VERSION = '2.4'
|
2025-05-19 16:25:03 +05:00
|
|
|
BUILD = 1
|
2023-10-23 23:58:03 +02:00
|
|
|
|
2017-10-24 19:16:36 +05:00
|
|
|
|
2019-07-16 23:23:16 +05:00
|
|
|
@ensure_csrf_cookie
|
2017-10-24 19:16:36 +05:00
|
|
|
def renderBase(request):
|
2021-02-22 10:18:50 +05:00
|
|
|
template = 'baseTemplate/homePage.html'
|
|
|
|
|
cpuRamDisk = SystemInformation.cpuRamDisk()
|
|
|
|
|
finaData = {'ramUsage': cpuRamDisk['ramUsage'], 'cpuUsage': cpuRamDisk['cpuUsage'],
|
|
|
|
|
'diskUsage': cpuRamDisk['diskUsage']}
|
|
|
|
|
proc = httpProc(request, template, finaData)
|
|
|
|
|
return proc.render()
|
2017-10-24 19:16:36 +05:00
|
|
|
|
2023-10-15 19:48:55 +02:00
|
|
|
|
|
|
|
|
@ensure_csrf_cookie
|
|
|
|
|
def versionManagement(request):
|
|
|
|
|
getVersion = requests.get('https://cyberpanel.net/version.txt')
|
|
|
|
|
latest = getVersion.json()
|
|
|
|
|
latestVersion = latest['version']
|
|
|
|
|
latestBuild = latest['build']
|
|
|
|
|
|
|
|
|
|
currentVersion = VERSION
|
|
|
|
|
currentBuild = str(BUILD)
|
2022-03-02 13:18:22 +05:00
|
|
|
|
2023-10-15 19:48:55 +02:00
|
|
|
u = "https://api.github.com/repos/usmannasir/cyberpanel/commits?sha=v%s.%s" % (latestVersion, latestBuild)
|
2023-10-29 12:15:46 +05:00
|
|
|
logging.writeToFile(u)
|
2023-10-15 19:48:55 +02:00
|
|
|
r = requests.get(u)
|
|
|
|
|
latestcomit = r.json()[0]['sha']
|
|
|
|
|
|
|
|
|
|
command = "git -C /usr/local/CyberCP/ rev-parse HEAD"
|
|
|
|
|
output = ProcessUtilities.outputExecutioner(command)
|
|
|
|
|
|
|
|
|
|
Currentcomt = output.rstrip("\n")
|
|
|
|
|
notechk = True
|
|
|
|
|
|
|
|
|
|
if Currentcomt == latestcomit:
|
|
|
|
|
notechk = False
|
|
|
|
|
|
|
|
|
|
template = 'baseTemplate/versionManagment.html'
|
|
|
|
|
finalData = {'build': currentBuild, 'currentVersion': currentVersion, 'latestVersion': latestVersion,
|
2024-01-25 10:44:06 +05:00
|
|
|
'latestBuild': latestBuild, 'latestcomit': latestcomit, "Currentcomt": Currentcomt,
|
|
|
|
|
"Notecheck": notechk}
|
2023-10-15 19:48:55 +02:00
|
|
|
|
|
|
|
|
proc = httpProc(request, template, finalData, 'versionManagement')
|
|
|
|
|
return proc.render()
|
|
|
|
|
|
2024-01-25 10:44:06 +05:00
|
|
|
|
2023-10-15 19:48:55 +02:00
|
|
|
@ensure_csrf_cookie
|
|
|
|
|
def upgrade_cyberpanel(request):
|
|
|
|
|
if request.method == 'POST':
|
|
|
|
|
try:
|
|
|
|
|
upgrade_command = 'sh <(curl https://raw.githubusercontent.com/usmannasir/cyberpanel/stable/preUpgrade.sh || wget -O - https://raw.githubusercontent.com/usmannasir/cyberpanel/stable/preUpgrade.sh)'
|
2024-01-25 10:44:06 +05:00
|
|
|
result = subprocess.run(upgrade_command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
|
2024-10-11 19:20:04 +05:00
|
|
|
universal_newlines=True)
|
2023-10-15 19:48:55 +02:00
|
|
|
|
|
|
|
|
if result.returncode == 0:
|
|
|
|
|
response_data = {'success': True, 'message': 'CyberPanel upgrade completed successfully.'}
|
|
|
|
|
else:
|
2024-01-25 10:44:06 +05:00
|
|
|
response_data = {'success': False,
|
|
|
|
|
'message': 'CyberPanel upgrade failed. Error output: ' + result.stderr}
|
2023-10-15 19:48:55 +02:00
|
|
|
except Exception as e:
|
|
|
|
|
response_data = {'success': False, 'message': 'An error occurred during the upgrade: ' + str(e)}
|
|
|
|
|
|
|
|
|
|
|
2017-10-24 19:16:36 +05:00
|
|
|
def getAdminStatus(request):
|
|
|
|
|
try:
|
2018-08-18 00:39:10 +05:00
|
|
|
val = request.session['userID']
|
|
|
|
|
currentACL = ACLManager.loadedACL(val)
|
2017-10-24 19:16:36 +05:00
|
|
|
|
2019-06-26 03:57:16 +05:00
|
|
|
if os.path.exists('/home/cyberpanel/postfix'):
|
|
|
|
|
currentACL['emailAsWhole'] = 1
|
|
|
|
|
else:
|
|
|
|
|
currentACL['emailAsWhole'] = 0
|
|
|
|
|
|
|
|
|
|
if os.path.exists('/home/cyberpanel/pureftpd'):
|
|
|
|
|
currentACL['ftpAsWhole'] = 1
|
|
|
|
|
else:
|
|
|
|
|
currentACL['ftpAsWhole'] = 0
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
pdns = PDNSStatus.objects.get(pk=1)
|
|
|
|
|
currentACL['dnsAsWhole'] = pdns.serverStatus
|
|
|
|
|
except:
|
2020-05-15 01:07:04 +05:00
|
|
|
if ProcessUtilities.decideDistro() == ProcessUtilities.ubuntu or ProcessUtilities.decideDistro() == ProcessUtilities.ubuntu20:
|
2019-07-25 14:12:54 +05:00
|
|
|
pdnsPath = '/etc/powerdns'
|
|
|
|
|
else:
|
|
|
|
|
pdnsPath = '/etc/pdns'
|
|
|
|
|
|
|
|
|
|
if os.path.exists(pdnsPath):
|
2019-06-26 03:57:16 +05:00
|
|
|
PDNSStatus(serverStatus=1).save()
|
|
|
|
|
currentACL['dnsAsWhole'] = 1
|
|
|
|
|
else:
|
|
|
|
|
currentACL['dnsAsWhole'] = 0
|
|
|
|
|
|
2018-08-18 00:39:10 +05:00
|
|
|
json_data = json.dumps(currentACL)
|
2017-10-24 19:16:36 +05:00
|
|
|
return HttpResponse(json_data)
|
|
|
|
|
except KeyError:
|
|
|
|
|
return HttpResponse("Can not get admin Status")
|
|
|
|
|
|
2023-10-23 23:58:03 +02:00
|
|
|
|
2017-10-24 19:16:36 +05:00
|
|
|
def getSystemStatus(request):
|
|
|
|
|
try:
|
2021-06-09 12:30:50 +06:00
|
|
|
val = request.session['userID']
|
|
|
|
|
currentACL = ACLManager.loadedACL(val)
|
2017-10-24 19:16:36 +05:00
|
|
|
HTTPData = SystemInformation.getSystemInformation()
|
|
|
|
|
json_data = json.dumps(HTTPData)
|
|
|
|
|
return HttpResponse(json_data)
|
|
|
|
|
except KeyError:
|
|
|
|
|
return HttpResponse("Can not get admin Status")
|
|
|
|
|
|
2023-10-23 23:58:03 +02:00
|
|
|
|
2017-10-24 19:16:36 +05:00
|
|
|
def getLoadAverage(request):
|
2021-08-22 21:49:56 +06:00
|
|
|
try:
|
|
|
|
|
val = request.session['userID']
|
|
|
|
|
currentACL = ACLManager.loadedACL(val)
|
|
|
|
|
loadAverage = SystemInformation.cpuLoad()
|
|
|
|
|
loadAverage = list(loadAverage)
|
|
|
|
|
one = loadAverage[0]
|
|
|
|
|
two = loadAverage[1]
|
|
|
|
|
three = loadAverage[2]
|
2022-03-02 13:18:22 +05:00
|
|
|
loadAvg = {"one": one, "two": two, "three": three}
|
2021-08-22 21:49:56 +06:00
|
|
|
json_data = json.dumps(loadAvg)
|
|
|
|
|
return HttpResponse(json_data)
|
|
|
|
|
except KeyError:
|
|
|
|
|
return HttpResponse("Not allowed.")
|
2017-10-24 19:16:36 +05:00
|
|
|
|
2023-10-23 23:58:03 +02:00
|
|
|
|
2019-07-16 23:23:16 +05:00
|
|
|
@ensure_csrf_cookie
|
2023-10-23 23:58:03 +02:00
|
|
|
def versionManagment(request):
|
|
|
|
|
## Get latest version
|
|
|
|
|
|
|
|
|
|
getVersion = requests.get('https://cyberpanel.net/version.txt')
|
|
|
|
|
latest = getVersion.json()
|
|
|
|
|
latestVersion = latest['version']
|
|
|
|
|
latestBuild = latest['build']
|
|
|
|
|
|
|
|
|
|
## Get local version
|
|
|
|
|
|
|
|
|
|
currentVersion = VERSION
|
|
|
|
|
currentBuild = str(BUILD)
|
|
|
|
|
|
|
|
|
|
u = "https://api.github.com/repos/usmannasir/cyberpanel/commits?sha=v%s.%s" % (latestVersion, latestBuild)
|
|
|
|
|
logging.CyberCPLogFileWriter.writeToFile(u)
|
|
|
|
|
r = requests.get(u)
|
|
|
|
|
latestcomit = r.json()[0]['sha']
|
|
|
|
|
|
2024-01-25 10:44:06 +05:00
|
|
|
command = "git -C /usr/local/CyberCP/ rev-parse HEAD"
|
2023-10-23 23:58:03 +02:00
|
|
|
output = ProcessUtilities.outputExecutioner(command)
|
|
|
|
|
|
|
|
|
|
Currentcomt = output.rstrip("\n")
|
|
|
|
|
notechk = True
|
2024-01-25 10:44:06 +05:00
|
|
|
|
|
|
|
|
if (Currentcomt == latestcomit):
|
2023-10-23 23:58:03 +02:00
|
|
|
notechk = False
|
|
|
|
|
|
|
|
|
|
template = 'baseTemplate/versionManagment.html'
|
|
|
|
|
finalData = {'build': currentBuild, 'currentVersion': currentVersion, 'latestVersion': latestVersion,
|
2024-01-25 10:44:06 +05:00
|
|
|
'latestBuild': latestBuild, 'latestcomit': latestcomit, "Currentcomt": Currentcomt,
|
|
|
|
|
"Notecheck": notechk}
|
2023-10-23 23:58:03 +02:00
|
|
|
|
|
|
|
|
proc = httpProc(request, template, finalData, 'versionManagement')
|
|
|
|
|
return proc.render()
|
|
|
|
|
|
|
|
|
|
|
2017-10-24 19:16:36 +05:00
|
|
|
def upgrade(request):
|
|
|
|
|
try:
|
|
|
|
|
admin = request.session['userID']
|
2024-01-09 19:35:50 +05:00
|
|
|
currentACL = ACLManager.loadedACL(admin)
|
2017-10-24 19:16:36 +05:00
|
|
|
|
2024-01-09 19:35:50 +05:00
|
|
|
data = json.loads(request.body)
|
2023-10-23 23:58:03 +02:00
|
|
|
|
2024-01-09 19:35:50 +05:00
|
|
|
if currentACL['admin'] == 1:
|
|
|
|
|
pass
|
|
|
|
|
else:
|
|
|
|
|
return ACLManager.loadErrorJson('fetchStatus', 0)
|
2023-10-23 23:58:03 +02:00
|
|
|
|
2024-01-10 09:50:52 +05:00
|
|
|
from plogical.applicationInstaller import ApplicationInstaller
|
|
|
|
|
|
|
|
|
|
extraArgs = {}
|
|
|
|
|
extraArgs['branchSelect'] = data["branchSelect"]
|
|
|
|
|
background = ApplicationInstaller('UpgradeCP', extraArgs)
|
|
|
|
|
background.start()
|
2023-10-23 23:58:03 +02:00
|
|
|
|
2022-03-02 13:18:22 +05:00
|
|
|
adminData = {"upgrade": 1}
|
2017-10-24 19:16:36 +05:00
|
|
|
json_data = json.dumps(adminData)
|
|
|
|
|
return HttpResponse(json_data)
|
|
|
|
|
|
|
|
|
|
except KeyError:
|
2022-03-02 13:18:22 +05:00
|
|
|
adminData = {"upgrade": 1, "error_message": "Please login or refresh this page."}
|
2017-10-24 19:16:36 +05:00
|
|
|
json_data = json.dumps(adminData)
|
|
|
|
|
return HttpResponse(json_data)
|
|
|
|
|
|
2023-10-23 23:58:03 +02:00
|
|
|
|
2017-10-24 19:16:36 +05:00
|
|
|
def upgradeStatus(request):
|
|
|
|
|
try:
|
|
|
|
|
val = request.session['userID']
|
2024-01-26 12:37:11 +05:00
|
|
|
currentACL = ACLManager.loadedACL(val)
|
|
|
|
|
if currentACL['admin'] == 1:
|
|
|
|
|
pass
|
|
|
|
|
else:
|
|
|
|
|
return ACLManager.loadErrorJson('FilemanagerAdmin', 0)
|
|
|
|
|
|
2017-10-24 19:16:36 +05:00
|
|
|
try:
|
|
|
|
|
if request.method == 'POST':
|
2024-01-09 19:35:50 +05:00
|
|
|
from plogical.upgrade import Upgrade
|
2023-10-23 23:58:03 +02:00
|
|
|
|
2024-01-09 19:35:50 +05:00
|
|
|
path = Upgrade.LogPathNew
|
2023-10-23 23:58:03 +02:00
|
|
|
|
2017-10-24 19:16:36 +05:00
|
|
|
try:
|
2024-01-09 19:35:50 +05:00
|
|
|
upgradeLog = ProcessUtilities.outputExecutioner(f'cat {path}')
|
2017-10-24 19:16:36 +05:00
|
|
|
except:
|
|
|
|
|
final_json = json.dumps({'finished': 0, 'upgradeStatus': 1,
|
|
|
|
|
'error_message': "None",
|
|
|
|
|
'upgradeLog': "Upgrade Just started.."})
|
|
|
|
|
return HttpResponse(final_json)
|
|
|
|
|
|
2022-03-02 13:18:22 +05:00
|
|
|
if upgradeLog.find("Upgrade Completed") > -1:
|
2023-10-23 23:58:03 +02:00
|
|
|
|
2024-01-09 19:51:53 +05:00
|
|
|
command = f'rm -rf {path}'
|
|
|
|
|
ProcessUtilities.executioner(command)
|
|
|
|
|
|
2017-10-24 19:16:36 +05:00
|
|
|
final_json = json.dumps({'finished': 1, 'upgradeStatus': 1,
|
|
|
|
|
'error_message': "None",
|
|
|
|
|
'upgradeLog': upgradeLog})
|
|
|
|
|
return HttpResponse(final_json)
|
|
|
|
|
else:
|
|
|
|
|
final_json = json.dumps({'finished': 0, 'upgradeStatus': 1,
|
|
|
|
|
'error_message': "None",
|
|
|
|
|
'upgradeLog': upgradeLog})
|
|
|
|
|
return HttpResponse(final_json)
|
2019-12-10 15:09:10 +05:00
|
|
|
except BaseException as msg:
|
2017-10-24 19:16:36 +05:00
|
|
|
final_dic = {'upgradeStatus': 0, 'error_message': str(msg)}
|
|
|
|
|
final_json = json.dumps(final_dic)
|
|
|
|
|
return HttpResponse(final_json)
|
|
|
|
|
except KeyError:
|
|
|
|
|
final_dic = {'upgradeStatus': 0, 'error_message': "Not Logged In, please refresh the page or login again."}
|
|
|
|
|
final_json = json.dumps(final_dic)
|
|
|
|
|
return HttpResponse(final_json)
|
|
|
|
|
|
2023-10-23 23:58:03 +02:00
|
|
|
|
2017-10-24 19:16:36 +05:00
|
|
|
def upgradeVersion(request):
|
|
|
|
|
try:
|
2024-01-26 12:37:11 +05:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2017-10-24 19:16:36 +05:00
|
|
|
vers = version.objects.get(pk=1)
|
|
|
|
|
getVersion = requests.get('https://cyberpanel.net/version.txt')
|
|
|
|
|
latest = getVersion.json()
|
|
|
|
|
vers.currentVersion = latest['version']
|
|
|
|
|
vers.build = latest['build']
|
|
|
|
|
vers.save()
|
|
|
|
|
return HttpResponse("Version upgrade OK.")
|
2019-12-10 15:09:10 +05:00
|
|
|
except BaseException as msg:
|
2017-10-24 19:16:36 +05:00
|
|
|
logging.CyberCPLogFileWriter.writeToFile(str(msg))
|
2021-06-09 12:30:50 +06:00
|
|
|
return HttpResponse(str(msg))
|
2021-07-16 15:00:35 +05:00
|
|
|
|
2023-10-23 23:58:03 +02:00
|
|
|
|
2021-07-16 15:00:35 +05:00
|
|
|
@ensure_csrf_cookie
|
|
|
|
|
def design(request):
|
2023-10-23 23:58:03 +02:00
|
|
|
### Load Custom CSS
|
2021-07-16 15:00:35 +05:00
|
|
|
try:
|
|
|
|
|
from baseTemplate.models import CyberPanelCosmetic
|
|
|
|
|
cosmetic = CyberPanelCosmetic.objects.get(pk=1)
|
|
|
|
|
except:
|
|
|
|
|
from baseTemplate.models import CyberPanelCosmetic
|
|
|
|
|
cosmetic = CyberPanelCosmetic()
|
|
|
|
|
cosmetic.save()
|
|
|
|
|
|
2022-05-11 12:45:24 +05:00
|
|
|
val = request.session['userID']
|
|
|
|
|
currentACL = ACLManager.loadedACL(val)
|
|
|
|
|
if currentACL['admin'] == 1:
|
|
|
|
|
pass
|
|
|
|
|
else:
|
|
|
|
|
return ACLManager.loadErrorJson('reboot', 0)
|
|
|
|
|
|
2021-07-16 15:00:35 +05:00
|
|
|
finalData = {}
|
|
|
|
|
|
|
|
|
|
if request.method == 'POST':
|
|
|
|
|
MainDashboardCSS = request.POST.get('MainDashboardCSS', '')
|
|
|
|
|
cosmetic.MainDashboardCSS = MainDashboardCSS
|
|
|
|
|
cosmetic.save()
|
|
|
|
|
finalData['saved'] = 1
|
|
|
|
|
|
2023-10-23 23:58:03 +02:00
|
|
|
####### Fetch sha...
|
|
|
|
|
|
2022-03-02 13:18:22 +05:00
|
|
|
sha_url = "https://api.github.com/repos/usmannasir/CyberPanel-Themes/commits"
|
2023-10-23 23:58:03 +02:00
|
|
|
|
2022-03-02 13:18:22 +05:00
|
|
|
sha_res = requests.get(sha_url)
|
2023-10-23 23:58:03 +02:00
|
|
|
|
2022-03-02 13:18:22 +05:00
|
|
|
sha = sha_res.json()[0]['sha']
|
|
|
|
|
|
|
|
|
|
l = "https://api.github.com/repos/usmannasir/CyberPanel-Themes/git/trees/%s" % sha
|
|
|
|
|
fres = requests.get(l)
|
|
|
|
|
tott = len(fres.json()['tree'])
|
|
|
|
|
finalData['tree'] = []
|
|
|
|
|
for i in range(tott):
|
|
|
|
|
if (fres.json()['tree'][i]['type'] == "tree"):
|
|
|
|
|
finalData['tree'].append(fres.json()['tree'][i]['path'])
|
|
|
|
|
|
2021-07-16 15:00:35 +05:00
|
|
|
template = 'baseTemplate/design.html'
|
|
|
|
|
finalData['cosmetic'] = cosmetic
|
|
|
|
|
|
|
|
|
|
proc = httpProc(request, template, finalData, 'versionManagement')
|
|
|
|
|
return proc.render()
|
2022-03-02 13:18:22 +05:00
|
|
|
|
2023-10-23 23:58:03 +02:00
|
|
|
|
2022-03-02 13:18:22 +05:00
|
|
|
def getthemedata(request):
|
|
|
|
|
try:
|
|
|
|
|
val = request.session['userID']
|
|
|
|
|
currentACL = ACLManager.loadedACL(val)
|
|
|
|
|
data = json.loads(request.body)
|
|
|
|
|
|
2022-03-08 20:10:53 +05:00
|
|
|
if currentACL['admin'] == 1:
|
|
|
|
|
pass
|
|
|
|
|
else:
|
|
|
|
|
return ACLManager.loadErrorJson('reboot', 0)
|
|
|
|
|
|
2024-01-25 10:44:06 +05:00
|
|
|
# logging.CyberCPLogFileWriter.writeToFile(str(data) + " [themedata]")
|
2023-10-23 23:58:03 +02:00
|
|
|
|
2022-03-02 13:18:22 +05:00
|
|
|
url = "https://raw.githubusercontent.com/usmannasir/CyberPanel-Themes/main/%s/design.css" % data['Themename']
|
|
|
|
|
|
|
|
|
|
res = requests.get(url)
|
2023-10-23 23:58:03 +02:00
|
|
|
|
2022-03-02 13:18:22 +05:00
|
|
|
rsult = res.text
|
|
|
|
|
final_dic = {'status': 1, 'csscontent': rsult}
|
|
|
|
|
final_json = json.dumps(final_dic)
|
|
|
|
|
return HttpResponse(final_json)
|
|
|
|
|
except BaseException as msg:
|
|
|
|
|
final_dic = {'status': 0, 'error_message': str(msg)}
|
|
|
|
|
final_json = json.dumps(final_dic)
|
|
|
|
|
return HttpResponse(final_json)
|
2023-10-23 23:58:03 +02:00
|
|
|
|
2024-01-25 10:44:06 +05:00
|
|
|
|
|
|
|
|
def onboarding(request):
|
|
|
|
|
template = 'baseTemplate/onboarding.html'
|
|
|
|
|
|
|
|
|
|
proc = httpProc(request, template, None, 'admin')
|
|
|
|
|
return proc.render()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def runonboarding(request):
|
|
|
|
|
try:
|
|
|
|
|
userID = request.session['userID']
|
|
|
|
|
currentACL = ACLManager.loadedACL(userID)
|
|
|
|
|
|
|
|
|
|
if currentACL['admin'] == 1:
|
|
|
|
|
pass
|
|
|
|
|
else:
|
|
|
|
|
return ACLManager.loadErrorJson()
|
|
|
|
|
|
|
|
|
|
data = json.loads(request.body)
|
|
|
|
|
hostname = data['hostname']
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
rDNSCheck = str(int(data['rDNSCheck']))
|
|
|
|
|
except:
|
|
|
|
|
rDNSCheck = 0
|
|
|
|
|
|
|
|
|
|
tempStatusPath = "/home/cyberpanel/" + str(randint(1000, 9999))
|
|
|
|
|
|
|
|
|
|
WriteToFile = open(tempStatusPath, 'w')
|
|
|
|
|
WriteToFile.write('Starting')
|
|
|
|
|
WriteToFile.close()
|
|
|
|
|
|
|
|
|
|
command = f'/usr/local/CyberCP/bin/python /usr/local/CyberCP/plogical/virtualHostUtilities.py OnBoardingHostName --virtualHostName {hostname} --path {tempStatusPath} --rdns {rDNSCheck}'
|
|
|
|
|
ProcessUtilities.popenExecutioner(command)
|
|
|
|
|
|
|
|
|
|
dic = {'status': 1, 'tempStatusPath': tempStatusPath}
|
|
|
|
|
json_data = json.dumps(dic)
|
|
|
|
|
return HttpResponse(json_data)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
except BaseException as msg:
|
|
|
|
|
dic = {'status': 0, 'error_message': str(msg)}
|
|
|
|
|
json_data = json.dumps(dic)
|
|
|
|
|
return HttpResponse(json_data)
|
|
|
|
|
|
|
|
|
|
def RestartCyberPanel(request):
|
|
|
|
|
try:
|
|
|
|
|
userID = request.session['userID']
|
|
|
|
|
currentACL = ACLManager.loadedACL(userID)
|
|
|
|
|
|
|
|
|
|
if currentACL['admin'] == 1:
|
|
|
|
|
pass
|
|
|
|
|
else:
|
|
|
|
|
return ACLManager.loadErrorJson()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
command = 'systemctl restart lscpd'
|
|
|
|
|
ProcessUtilities.popenExecutioner(command)
|
|
|
|
|
|
|
|
|
|
dic = {'status': 1}
|
|
|
|
|
json_data = json.dumps(dic)
|
|
|
|
|
return HttpResponse(json_data)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
except BaseException as msg:
|
|
|
|
|
dic = {'status': 0, 'error_message': str(msg)}
|
|
|
|
|
json_data = json.dumps(dic)
|
|
|
|
|
return HttpResponse(json_data)
|
2025-05-24 16:15:24 +05:00
|
|
|
|
|
|
|
|
def getDashboardStats(request):
|
|
|
|
|
try:
|
2025-06-02 00:40:55 +05:00
|
|
|
val = request.session['userID']
|
|
|
|
|
currentACL = ACLManager.loadedACL(val)
|
2025-05-24 16:15:24 +05:00
|
|
|
total_sites = Websites.objects.count()
|
|
|
|
|
total_wp_sites = WPSites.objects.count()
|
2025-05-24 21:30:37 +05:00
|
|
|
total_dbs = Databases.objects.count()
|
|
|
|
|
total_emails = EUsers.objects.count()
|
2025-05-24 16:15:24 +05:00
|
|
|
data = {
|
|
|
|
|
'total_sites': total_sites,
|
|
|
|
|
'total_wp_sites': total_wp_sites,
|
2025-05-24 21:30:37 +05:00
|
|
|
'total_dbs': total_dbs,
|
|
|
|
|
'total_emails': total_emails,
|
2025-05-24 16:15:24 +05:00
|
|
|
'status': 1
|
|
|
|
|
}
|
|
|
|
|
return HttpResponse(json.dumps(data), content_type='application/json')
|
|
|
|
|
except Exception as e:
|
|
|
|
|
return HttpResponse(json.dumps({'status': 0, 'error_message': str(e)}), content_type='application/json')
|
|
|
|
|
|
|
|
|
|
def getTrafficStats(request):
|
|
|
|
|
try:
|
2025-06-02 00:40:55 +05:00
|
|
|
val = request.session['userID']
|
|
|
|
|
currentACL = ACLManager.loadedACL(val)
|
2025-05-24 16:15:24 +05:00
|
|
|
# Get network stats from /proc/net/dev (Linux)
|
|
|
|
|
rx = tx = 0
|
|
|
|
|
with open('/proc/net/dev', 'r') as f:
|
|
|
|
|
for line in f.readlines():
|
|
|
|
|
if 'lo:' in line:
|
|
|
|
|
continue
|
|
|
|
|
if ':' in line:
|
|
|
|
|
parts = line.split()
|
|
|
|
|
rx += int(parts[1])
|
|
|
|
|
tx += int(parts[9])
|
|
|
|
|
data = {
|
|
|
|
|
'rx_bytes': rx,
|
|
|
|
|
'tx_bytes': tx,
|
|
|
|
|
'status': 1
|
|
|
|
|
}
|
|
|
|
|
return HttpResponse(json.dumps(data), content_type='application/json')
|
|
|
|
|
except Exception as e:
|
|
|
|
|
return HttpResponse(json.dumps({'status': 0, 'error_message': str(e)}), content_type='application/json')
|
|
|
|
|
|
|
|
|
|
def getDiskIOStats(request):
|
|
|
|
|
try:
|
2025-06-02 00:40:55 +05:00
|
|
|
val = request.session['userID']
|
|
|
|
|
currentACL = ACLManager.loadedACL(val)
|
2025-05-24 16:15:24 +05:00
|
|
|
# Parse /proc/diskstats for all disks
|
|
|
|
|
read_sectors = 0
|
|
|
|
|
write_sectors = 0
|
|
|
|
|
sector_size = 512 # Most Linux systems use 512 bytes per sector
|
|
|
|
|
with open('/proc/diskstats', 'r') as f:
|
|
|
|
|
for line in f:
|
|
|
|
|
parts = line.split()
|
|
|
|
|
if len(parts) < 14:
|
|
|
|
|
continue
|
|
|
|
|
# parts[2] is device name, skip loopback/ram devices
|
|
|
|
|
dev = parts[2]
|
|
|
|
|
if dev.startswith('loop') or dev.startswith('ram'):
|
|
|
|
|
continue
|
|
|
|
|
# 6th and 10th columns: sectors read/written
|
|
|
|
|
read_sectors += int(parts[5])
|
|
|
|
|
write_sectors += int(parts[9])
|
|
|
|
|
data = {
|
|
|
|
|
'read_bytes': read_sectors * sector_size,
|
|
|
|
|
'write_bytes': write_sectors * sector_size,
|
|
|
|
|
'status': 1
|
|
|
|
|
}
|
|
|
|
|
return HttpResponse(json.dumps(data), content_type='application/json')
|
|
|
|
|
except Exception as e:
|
|
|
|
|
return HttpResponse(json.dumps({'status': 0, 'error_message': str(e)}), content_type='application/json')
|
|
|
|
|
|
|
|
|
|
def getCPULoadGraph(request):
|
|
|
|
|
try:
|
2025-06-02 00:40:55 +05:00
|
|
|
val = request.session['userID']
|
|
|
|
|
currentACL = ACLManager.loadedACL(val)
|
2025-05-24 16:15:24 +05:00
|
|
|
# Parse /proc/stat for the 'cpu' line
|
|
|
|
|
with open('/proc/stat', 'r') as f:
|
|
|
|
|
for line in f:
|
|
|
|
|
if line.startswith('cpu '):
|
|
|
|
|
parts = line.strip().split()
|
|
|
|
|
# parts[1:] are user, nice, system, idle, iowait, irq, softirq, steal, guest, guest_nice
|
|
|
|
|
cpu_times = [float(x) for x in parts[1:]]
|
|
|
|
|
break
|
|
|
|
|
else:
|
|
|
|
|
cpu_times = []
|
|
|
|
|
data = {
|
|
|
|
|
'cpu_times': cpu_times,
|
|
|
|
|
'status': 1
|
|
|
|
|
}
|
|
|
|
|
return HttpResponse(json.dumps(data), content_type='application/json')
|
|
|
|
|
except Exception as e:
|
|
|
|
|
return HttpResponse(json.dumps({'status': 0, 'error_message': str(e)}), content_type='application/json')
|
2025-06-02 00:58:11 +05:00
|
|
|
|
|
|
|
|
@csrf_exempt
|
|
|
|
|
@require_GET
|
|
|
|
|
def getRecentSSHLogins(request):
|
|
|
|
|
try:
|
|
|
|
|
user_id = request.session.get('userID')
|
|
|
|
|
if not user_id:
|
|
|
|
|
return HttpResponse(json.dumps({'error': 'Not logged in'}), content_type='application/json', status=403)
|
|
|
|
|
currentACL = ACLManager.loadedACL(user_id)
|
|
|
|
|
if not currentACL.get('admin', 0):
|
|
|
|
|
return HttpResponse(json.dumps({'error': 'Admin only'}), content_type='application/json', status=403)
|
|
|
|
|
|
|
|
|
|
import re, time
|
|
|
|
|
from collections import OrderedDict
|
|
|
|
|
|
|
|
|
|
# Run 'last -n 20' to get recent SSH logins
|
|
|
|
|
try:
|
|
|
|
|
output = ProcessUtilities.outputExecutioner('last -n 20')
|
|
|
|
|
except Exception as e:
|
|
|
|
|
return HttpResponse(json.dumps({'error': 'Failed to run last: %s' % str(e)}), content_type='application/json', status=500)
|
|
|
|
|
|
|
|
|
|
lines = output.strip().split('\n')
|
|
|
|
|
logins = []
|
|
|
|
|
ip_cache = {}
|
|
|
|
|
for line in lines:
|
|
|
|
|
if not line.strip() or any(x in line for x in ['reboot', 'system boot', 'wtmp begins']):
|
|
|
|
|
continue
|
|
|
|
|
# Example: ubuntu pts/0 206.84.168.7 Sun Jun 1 19:41 still logged in
|
|
|
|
|
# or: ubuntu pts/0 206.84.169.36 Tue May 27 11:34 - 13:47 (02:13)
|
|
|
|
|
parts = re.split(r'\s+', line, maxsplit=5)
|
|
|
|
|
if len(parts) < 5:
|
|
|
|
|
continue
|
|
|
|
|
user, tty, ip, *rest = parts
|
|
|
|
|
# Find date/time and session info
|
|
|
|
|
date_session = rest[-1] if rest else ''
|
|
|
|
|
# Try to extract date/session
|
|
|
|
|
date_match = re.search(r'([A-Za-z]{3} [A-Za-z]{3} +\d+ [\d:]+)', line)
|
|
|
|
|
date_str = date_match.group(1) if date_match else ''
|
|
|
|
|
session_info = ''
|
|
|
|
|
if '-' in line:
|
|
|
|
|
# Session ended
|
|
|
|
|
session_info = line.split('-')[-1].strip()
|
|
|
|
|
elif 'still logged in' in line:
|
|
|
|
|
session_info = 'still logged in'
|
|
|
|
|
# GeoIP lookup (cache per request)
|
|
|
|
|
country = flag = ''
|
|
|
|
|
if re.match(r'\d+\.\d+\.\d+\.\d+', ip) and ip != '127.0.0.1':
|
|
|
|
|
if ip in ip_cache:
|
|
|
|
|
country, flag = ip_cache[ip]
|
|
|
|
|
else:
|
|
|
|
|
try:
|
|
|
|
|
geo = requests.get(f'http://ip-api.com/json/{ip}', timeout=2).json()
|
|
|
|
|
country = geo.get('countryCode', '')
|
|
|
|
|
flag = f"https://flagcdn.com/24x18/{country.lower()}.png" if country else ''
|
|
|
|
|
ip_cache[ip] = (country, flag)
|
|
|
|
|
except Exception:
|
|
|
|
|
country, flag = '', ''
|
|
|
|
|
elif ip == '127.0.0.1':
|
|
|
|
|
country, flag = 'Local', ''
|
|
|
|
|
logins.append({
|
|
|
|
|
'user': user,
|
|
|
|
|
'ip': ip,
|
|
|
|
|
'country': country,
|
|
|
|
|
'flag': flag,
|
|
|
|
|
'date': date_str,
|
|
|
|
|
'session': session_info,
|
|
|
|
|
'raw': line
|
|
|
|
|
})
|
|
|
|
|
return HttpResponse(json.dumps({'logins': logins}), content_type='application/json')
|
|
|
|
|
except Exception as e:
|
|
|
|
|
return HttpResponse(json.dumps({'error': str(e)}), content_type='application/json', status=500)
|
2025-06-02 16:11:00 +05:00
|
|
|
|
|
|
|
|
@csrf_exempt
|
|
|
|
|
@require_GET
|
|
|
|
|
def getRecentSSHLogs(request):
|
|
|
|
|
try:
|
|
|
|
|
user_id = request.session.get('userID')
|
|
|
|
|
if not user_id:
|
|
|
|
|
return HttpResponse(json.dumps({'error': 'Not logged in'}), content_type='application/json', status=403)
|
|
|
|
|
currentACL = ACLManager.loadedACL(user_id)
|
|
|
|
|
if not currentACL.get('admin', 0):
|
|
|
|
|
return HttpResponse(json.dumps({'error': 'Admin only'}), content_type='application/json', status=403)
|
|
|
|
|
from plogical.processUtilities import ProcessUtilities
|
|
|
|
|
distro = ProcessUtilities.decideDistro()
|
|
|
|
|
if distro in [ProcessUtilities.ubuntu, ProcessUtilities.ubuntu20]:
|
|
|
|
|
log_path = '/var/log/auth.log'
|
|
|
|
|
else:
|
|
|
|
|
log_path = '/var/log/secure'
|
|
|
|
|
try:
|
|
|
|
|
output = ProcessUtilities.outputExecutioner(f'tail -n 100 {log_path}')
|
|
|
|
|
except Exception as e:
|
|
|
|
|
return HttpResponse(json.dumps({'error': f'Failed to read log: {str(e)}'}), content_type='application/json', status=500)
|
|
|
|
|
lines = output.split('\n')
|
|
|
|
|
logs = []
|
|
|
|
|
for line in lines:
|
2025-06-02 16:27:07 +05:00
|
|
|
if not line.strip():
|
|
|
|
|
continue
|
|
|
|
|
parts = line.split()
|
|
|
|
|
if len(parts) > 4:
|
|
|
|
|
timestamp = ' '.join(parts[:3])
|
|
|
|
|
message = ' '.join(parts[4:])
|
|
|
|
|
else:
|
|
|
|
|
timestamp = ''
|
|
|
|
|
message = line
|
|
|
|
|
logs.append({'timestamp': timestamp, 'message': message, 'raw': line})
|
2025-06-02 16:11:00 +05:00
|
|
|
return HttpResponse(json.dumps({'logs': logs}), content_type='application/json')
|
|
|
|
|
except Exception as e:
|
|
|
|
|
return HttpResponse(json.dumps({'error': str(e)}), content_type='application/json', status=500)
|
2025-06-02 18:38:42 +05:00
|
|
|
|
|
|
|
|
@csrf_exempt
|
|
|
|
|
@require_POST
|
2025-06-02 22:11:56 +05:00
|
|
|
def getSSHUserActivity(request):
|
|
|
|
|
import json
|
|
|
|
|
from plogical.processUtilities import ProcessUtilities
|
2025-06-02 18:38:42 +05:00
|
|
|
try:
|
|
|
|
|
user_id = request.session.get('userID')
|
|
|
|
|
if not user_id:
|
|
|
|
|
return HttpResponse(json.dumps({'error': 'Not logged in'}), content_type='application/json', status=403)
|
|
|
|
|
currentACL = ACLManager.loadedACL(user_id)
|
|
|
|
|
if not currentACL.get('admin', 0):
|
|
|
|
|
return HttpResponse(json.dumps({'error': 'Admin only'}), content_type='application/json', status=403)
|
|
|
|
|
data = json.loads(request.body.decode('utf-8'))
|
|
|
|
|
user = data.get('user')
|
|
|
|
|
tty = data.get('tty')
|
|
|
|
|
if not user:
|
|
|
|
|
return HttpResponse(json.dumps({'error': 'Missing user'}), content_type='application/json', status=400)
|
2025-06-02 22:11:56 +05:00
|
|
|
# Get processes for the user
|
|
|
|
|
ps_cmd = f"ps -u {user} -o pid,tty,time,cmd --no-headers"
|
2025-06-02 18:38:42 +05:00
|
|
|
try:
|
2025-06-02 22:11:56 +05:00
|
|
|
ps_output = ProcessUtilities.outputExecutioner(ps_cmd)
|
2025-06-02 18:38:42 +05:00
|
|
|
except Exception as e:
|
2025-06-02 22:11:56 +05:00
|
|
|
ps_output = ''
|
|
|
|
|
processes = []
|
|
|
|
|
if ps_output:
|
|
|
|
|
for line in ps_output.strip().split('\n'):
|
|
|
|
|
parts = line.split(None, 3)
|
|
|
|
|
if len(parts) == 4:
|
|
|
|
|
pid, tty_val, time_val, cmd = parts
|
|
|
|
|
if tty and tty not in tty_val:
|
|
|
|
|
continue
|
|
|
|
|
processes.append({
|
|
|
|
|
'pid': pid,
|
|
|
|
|
'tty': tty_val,
|
|
|
|
|
'time': time_val,
|
|
|
|
|
'cmd': cmd
|
|
|
|
|
})
|
|
|
|
|
# Optionally, get 'w' output for more info
|
|
|
|
|
w_cmd = f"w -h {user}"
|
2025-06-02 18:38:42 +05:00
|
|
|
try:
|
2025-06-02 22:11:56 +05:00
|
|
|
w_output = ProcessUtilities.outputExecutioner(w_cmd)
|
2025-06-02 18:38:42 +05:00
|
|
|
except Exception as e:
|
2025-06-02 22:11:56 +05:00
|
|
|
w_output = ''
|
|
|
|
|
w_lines = []
|
|
|
|
|
if w_output:
|
|
|
|
|
for line in w_output.strip().split('\n'):
|
|
|
|
|
w_lines.append(line)
|
|
|
|
|
return HttpResponse(json.dumps({'processes': processes, 'w': w_lines}), content_type='application/json')
|
2025-06-02 18:38:42 +05:00
|
|
|
except Exception as e:
|
|
|
|
|
return HttpResponse(json.dumps({'error': str(e)}), content_type='application/json', status=500)
|