mirror of
https://github.com/usmannasir/cyberpanel.git
synced 2025-11-14 09:16:11 +01:00
fix issue with design on n8n page
This commit is contained in:
@@ -8,6 +8,8 @@ from django.shortcuts import redirect
|
|||||||
from loginSystem.views import loadLoginPage
|
from loginSystem.views import loadLoginPage
|
||||||
from django.views.decorators.csrf import csrf_exempt
|
from django.views.decorators.csrf import csrf_exempt
|
||||||
from plogical.CyberCPLogFileWriter import CyberCPLogFileWriter as logging
|
from plogical.CyberCPLogFileWriter import CyberCPLogFileWriter as logging
|
||||||
|
import datetime
|
||||||
|
import requests
|
||||||
|
|
||||||
def require_login(view_func):
|
def require_login(view_func):
|
||||||
def wrapper(request, *args, **kwargs):
|
def wrapper(request, *args, **kwargs):
|
||||||
@@ -194,31 +196,211 @@ def n8n_container_operation(request):
|
|||||||
|
|
||||||
# Handle different operations
|
# Handle different operations
|
||||||
if operation == 'create_backup':
|
if operation == 'create_backup':
|
||||||
# For now, just return mock data to test UI functionality
|
try:
|
||||||
|
# Determine the port where n8n is running
|
||||||
|
container_info = container.attrs
|
||||||
|
port_bindings = container_info.get('HostConfig', {}).get('PortBindings', {})
|
||||||
|
n8n_port = None
|
||||||
|
|
||||||
|
for container_port, host_ports in port_bindings.items():
|
||||||
|
if container_port.startswith('5678'):
|
||||||
|
n8n_port = host_ports[0]['HostPort']
|
||||||
|
break
|
||||||
|
|
||||||
|
if not n8n_port:
|
||||||
|
return HttpResponse(json.dumps({
|
||||||
|
'status': 0,
|
||||||
|
'error_message': 'Could not determine n8n port'
|
||||||
|
}))
|
||||||
|
|
||||||
|
# Get backup options from request
|
||||||
backup_options = data.get('options', {})
|
backup_options = data.get('options', {})
|
||||||
include_credentials = backup_options.get('includeCredentials', True)
|
include_credentials = backup_options.get('includeCredentials', True)
|
||||||
include_executions = backup_options.get('includeExecutions', False)
|
include_executions = backup_options.get('includeExecutions', False)
|
||||||
|
|
||||||
# In a real implementation, you would call the n8n API to create a backup
|
# Set up n8n API URL
|
||||||
# For now, simulate a successful backup
|
host_ip = request.get_host().split(':')[0]
|
||||||
|
n8n_base_url = f"http://{host_ip}:{n8n_port}/api/v1"
|
||||||
|
|
||||||
|
# Initialize the backup data dictionary
|
||||||
|
backup_data = {}
|
||||||
|
|
||||||
|
# Fetch workflows
|
||||||
|
# Get n8n workflows (no authentication required for basic n8n)
|
||||||
|
workflows_response = requests.get(f"{n8n_base_url}/workflows")
|
||||||
|
|
||||||
|
if workflows_response.status_code == 200:
|
||||||
|
backup_data['workflows'] = workflows_response.json()
|
||||||
|
else:
|
||||||
|
logging.writeToFile(f"Failed to fetch n8n workflows: {workflows_response.status_code} - {workflows_response.text}")
|
||||||
|
return HttpResponse(json.dumps({
|
||||||
|
'status': 0,
|
||||||
|
'error_message': f'Failed to fetch workflows: {workflows_response.text}'
|
||||||
|
}))
|
||||||
|
|
||||||
|
# Get credentials if requested
|
||||||
|
if include_credentials:
|
||||||
|
credentials_response = requests.get(f"{n8n_base_url}/credentials")
|
||||||
|
|
||||||
|
if credentials_response.status_code == 200:
|
||||||
|
backup_data['credentials'] = credentials_response.json()
|
||||||
|
else:
|
||||||
|
logging.writeToFile(f"Failed to fetch n8n credentials: {credentials_response.status_code} - {credentials_response.text}")
|
||||||
|
# Don't fail the whole backup just because credentials failed
|
||||||
|
|
||||||
|
# Get execution data if requested
|
||||||
|
if include_executions:
|
||||||
|
executions_response = requests.get(f"{n8n_base_url}/executions")
|
||||||
|
|
||||||
|
if executions_response.status_code == 200:
|
||||||
|
backup_data['executions'] = executions_response.json()
|
||||||
|
else:
|
||||||
|
logging.writeToFile(f"Failed to fetch n8n executions: {executions_response.status_code} - {executions_response.text}")
|
||||||
|
# Don't fail the whole backup just because executions failed
|
||||||
|
|
||||||
|
# Include metadata
|
||||||
|
backup_data['metadata'] = {
|
||||||
|
'timestamp': datetime.datetime.now().isoformat(),
|
||||||
|
'container_id': container_id,
|
||||||
|
'container_name': container.name,
|
||||||
|
'include_credentials': include_credentials,
|
||||||
|
'include_executions': include_executions
|
||||||
|
}
|
||||||
|
|
||||||
|
# Create a response with the backup data
|
||||||
return HttpResponse(json.dumps({
|
return HttpResponse(json.dumps({
|
||||||
'status': 1,
|
'status': 1,
|
||||||
'message': 'Backup simulation successful. In a production environment, this would download a backup file.',
|
'message': 'Backup created successfully',
|
||||||
# In real implementation, you would provide a download URL
|
'backup': backup_data,
|
||||||
# 'download_url': '/path/to/download/backup.json'
|
'filename': f'n8n-backup-{container.name}-{datetime.datetime.now().strftime("%Y%m%d-%H%M%S")}.json'
|
||||||
|
}))
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logging.writeToFile(f"Error creating n8n backup: {str(e)}")
|
||||||
|
return HttpResponse(json.dumps({
|
||||||
|
'status': 0,
|
||||||
|
'error_message': f'Error creating backup: {str(e)}'
|
||||||
}))
|
}))
|
||||||
|
|
||||||
elif operation == 'restore_backup':
|
elif operation == 'restore_backup':
|
||||||
# For now, just return mock data to test UI functionality
|
try:
|
||||||
|
# Determine the port where n8n is running
|
||||||
|
container_info = container.attrs
|
||||||
|
port_bindings = container_info.get('HostConfig', {}).get('PortBindings', {})
|
||||||
|
n8n_port = None
|
||||||
|
|
||||||
|
for container_port, host_ports in port_bindings.items():
|
||||||
|
if container_port.startswith('5678'):
|
||||||
|
n8n_port = host_ports[0]['HostPort']
|
||||||
|
break
|
||||||
|
|
||||||
|
if not n8n_port:
|
||||||
|
return HttpResponse(json.dumps({
|
||||||
|
'status': 0,
|
||||||
|
'error_message': 'Could not determine n8n port'
|
||||||
|
}))
|
||||||
|
|
||||||
|
# Get backup data from request
|
||||||
backup_data = data.get('backup_data')
|
backup_data = data.get('backup_data')
|
||||||
|
|
||||||
# In a real implementation, you would call the n8n API to restore from backup
|
if not backup_data:
|
||||||
# For now, simulate a successful restore
|
return HttpResponse(json.dumps({
|
||||||
|
'status': 0,
|
||||||
|
'error_message': 'No backup data provided'
|
||||||
|
}))
|
||||||
|
|
||||||
|
# Set up n8n API URL
|
||||||
|
host_ip = request.get_host().split(':')[0]
|
||||||
|
n8n_base_url = f"http://{host_ip}:{n8n_port}/api/v1"
|
||||||
|
|
||||||
|
# Restore workflows
|
||||||
|
if 'workflows' in backup_data:
|
||||||
|
# First, get the list of existing workflows to avoid duplicates
|
||||||
|
existing_workflows_response = requests.get(f"{n8n_base_url}/workflows")
|
||||||
|
|
||||||
|
if existing_workflows_response.status_code != 200:
|
||||||
|
logging.writeToFile(f"Failed to fetch existing workflows: {existing_workflows_response.status_code} - {existing_workflows_response.text}")
|
||||||
|
return HttpResponse(json.dumps({
|
||||||
|
'status': 0,
|
||||||
|
'error_message': f'Failed to fetch existing workflows: {existing_workflows_response.text}'
|
||||||
|
}))
|
||||||
|
|
||||||
|
existing_workflows = existing_workflows_response.json()
|
||||||
|
existing_workflow_names = {wf['name']: wf['id'] for wf in existing_workflows}
|
||||||
|
|
||||||
|
# Now restore each workflow
|
||||||
|
for workflow in backup_data['workflows']:
|
||||||
|
# Remove ID from the backup data to create a new workflow
|
||||||
|
if 'id' in workflow:
|
||||||
|
workflow_id = workflow.pop('id')
|
||||||
|
|
||||||
|
# Check if workflow with the same name already exists
|
||||||
|
if workflow['name'] in existing_workflow_names:
|
||||||
|
# Update existing workflow
|
||||||
|
update_response = requests.put(
|
||||||
|
f"{n8n_base_url}/workflows/{existing_workflow_names[workflow['name']]}",
|
||||||
|
json=workflow
|
||||||
|
)
|
||||||
|
|
||||||
|
if update_response.status_code not in [200, 201]:
|
||||||
|
logging.writeToFile(f"Failed to update workflow: {update_response.status_code} - {update_response.text}")
|
||||||
|
else:
|
||||||
|
# Create new workflow
|
||||||
|
create_response = requests.post(
|
||||||
|
f"{n8n_base_url}/workflows",
|
||||||
|
json=workflow
|
||||||
|
)
|
||||||
|
|
||||||
|
if create_response.status_code not in [200, 201]:
|
||||||
|
logging.writeToFile(f"Failed to create workflow: {create_response.status_code} - {create_response.text}")
|
||||||
|
|
||||||
|
# Restore credentials if included in backup
|
||||||
|
if 'credentials' in backup_data:
|
||||||
|
# First, get existing credentials to avoid duplicates
|
||||||
|
existing_creds_response = requests.get(f"{n8n_base_url}/credentials")
|
||||||
|
|
||||||
|
if existing_creds_response.status_code == 200:
|
||||||
|
existing_creds = existing_creds_response.json()
|
||||||
|
existing_cred_names = {cred['name']: cred['id'] for cred in existing_creds}
|
||||||
|
|
||||||
|
# Now restore each credential
|
||||||
|
for credential in backup_data['credentials']:
|
||||||
|
# Remove ID from the backup data to create a new credential
|
||||||
|
if 'id' in credential:
|
||||||
|
credential_id = credential.pop('id')
|
||||||
|
|
||||||
|
# Check if credential with the same name already exists
|
||||||
|
if credential['name'] in existing_cred_names:
|
||||||
|
# Update existing credential
|
||||||
|
update_response = requests.put(
|
||||||
|
f"{n8n_base_url}/credentials/{existing_cred_names[credential['name']]}",
|
||||||
|
json=credential
|
||||||
|
)
|
||||||
|
|
||||||
|
if update_response.status_code not in [200, 201]:
|
||||||
|
logging.writeToFile(f"Failed to update credential: {update_response.status_code} - {update_response.text}")
|
||||||
|
else:
|
||||||
|
# Create new credential
|
||||||
|
create_response = requests.post(
|
||||||
|
f"{n8n_base_url}/credentials",
|
||||||
|
json=credential
|
||||||
|
)
|
||||||
|
|
||||||
|
if create_response.status_code not in [200, 201]:
|
||||||
|
logging.writeToFile(f"Failed to create credential: {create_response.status_code} - {create_response.text}")
|
||||||
|
else:
|
||||||
|
logging.writeToFile(f"Failed to fetch existing credentials: {existing_creds_response.status_code} - {existing_creds_response.text}")
|
||||||
|
|
||||||
return HttpResponse(json.dumps({
|
return HttpResponse(json.dumps({
|
||||||
'status': 1,
|
'status': 1,
|
||||||
'message': 'Restore simulation successful.'
|
'message': 'Backup restored successfully'
|
||||||
|
}))
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logging.writeToFile(f"Error restoring n8n backup: {str(e)}")
|
||||||
|
return HttpResponse(json.dumps({
|
||||||
|
'status': 0,
|
||||||
|
'error_message': f'Error restoring backup: {str(e)}'
|
||||||
}))
|
}))
|
||||||
|
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -387,15 +387,43 @@ app.controller('ListDockersitecontainer', function ($scope, $http) {
|
|||||||
$('#cyberpanelLoading').hide();
|
$('#cyberpanelLoading').hide();
|
||||||
|
|
||||||
if (response.data.status === 1) {
|
if (response.data.status === 1) {
|
||||||
|
// Check if we have backup data
|
||||||
|
if (response.data.backup) {
|
||||||
|
// Create a download file from the backup data
|
||||||
|
var backupData = response.data.backup;
|
||||||
|
var fileName = response.data.filename || 'n8n-backup.json';
|
||||||
|
|
||||||
|
// Convert the backup data to a JSON string
|
||||||
|
var backupJson = JSON.stringify(backupData, null, 2);
|
||||||
|
|
||||||
|
// Create a blob with the JSON data
|
||||||
|
var blob = new Blob([backupJson], { type: 'application/json' });
|
||||||
|
|
||||||
|
// Create a download link
|
||||||
|
var downloadLink = document.createElement('a');
|
||||||
|
downloadLink.href = URL.createObjectURL(blob);
|
||||||
|
downloadLink.download = fileName;
|
||||||
|
|
||||||
|
// Append to the document, trigger click, then remove
|
||||||
|
document.body.appendChild(downloadLink);
|
||||||
|
downloadLink.click();
|
||||||
|
document.body.removeChild(downloadLink);
|
||||||
|
|
||||||
|
// Clean up the URL.createObjectURL
|
||||||
|
URL.revokeObjectURL(downloadLink.href);
|
||||||
|
|
||||||
new PNotify({
|
new PNotify({
|
||||||
title: 'Success!',
|
title: 'Success!',
|
||||||
text: 'Backup created successfully. ' + (response.data.message || ''),
|
text: 'Backup created and downloaded successfully.',
|
||||||
|
type: 'success'
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
// No backup data but still a success
|
||||||
|
new PNotify({
|
||||||
|
title: 'Success!',
|
||||||
|
text: response.data.message || 'Backup created successfully.',
|
||||||
type: 'success'
|
type: 'success'
|
||||||
});
|
});
|
||||||
|
|
||||||
// Add download link if provided
|
|
||||||
if (response.data.download_url) {
|
|
||||||
window.location.href = response.data.download_url;
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
new PNotify({
|
new PNotify({
|
||||||
|
|||||||
Reference in New Issue
Block a user