mirror of
https://github.com/usmannasir/cyberpanel.git
synced 2025-10-26 07:46:35 +01:00
add aiscanner file patcher
This commit is contained in:
922
aiScanner/api.py
922
aiScanner/api.py
@@ -472,10 +472,10 @@ def get_file_content(request):
|
||||
def scan_callback(request):
|
||||
"""
|
||||
Receive scan completion callbacks from AI Scanner platform
|
||||
|
||||
|
||||
POST /api/ai-scanner/callback
|
||||
Content-Type: application/json
|
||||
|
||||
|
||||
Expected payload:
|
||||
{
|
||||
"scan_id": "uuid",
|
||||
@@ -489,7 +489,7 @@ def scan_callback(request):
|
||||
"findings": [
|
||||
{
|
||||
"file_path": "wp-content/plugins/file.php",
|
||||
"severity": "CRITICAL|HIGH|MEDIUM|LOW",
|
||||
"severity": "CRITICAL|HIGH|MEDIUM|LOW",
|
||||
"title": "Issue title",
|
||||
"description": "Detailed description",
|
||||
"ai_confidence": 95
|
||||
@@ -517,15 +517,15 @@ def scan_callback(request):
|
||||
from .models import ScanHistory
|
||||
from django.utils import timezone
|
||||
import datetime
|
||||
|
||||
|
||||
# Find the scan record
|
||||
scan_record = ScanHistory.objects.get(scan_id=scan_id)
|
||||
|
||||
|
||||
# Update scan record
|
||||
scan_record.status = status
|
||||
scan_record.issues_found = summary.get('total_findings', 0)
|
||||
scan_record.files_scanned = summary.get('files_scanned', 0)
|
||||
|
||||
|
||||
# Parse and store cost
|
||||
cost_str = summary.get('cost', '$0.00')
|
||||
try:
|
||||
@@ -534,10 +534,10 @@ def scan_callback(request):
|
||||
scan_record.cost_usd = cost_value
|
||||
except (ValueError, AttributeError):
|
||||
scan_record.cost_usd = 0.0
|
||||
|
||||
|
||||
# Store findings and AI analysis
|
||||
scan_record.set_findings(findings)
|
||||
|
||||
|
||||
# Build summary dict
|
||||
summary_dict = {
|
||||
'threat_level': summary.get('threat_level', 'UNKNOWN'),
|
||||
@@ -546,7 +546,7 @@ def scan_callback(request):
|
||||
'ai_analysis': ai_analysis
|
||||
}
|
||||
scan_record.set_summary(summary_dict)
|
||||
|
||||
|
||||
# Set completion time
|
||||
if completed_at:
|
||||
try:
|
||||
@@ -557,9 +557,9 @@ def scan_callback(request):
|
||||
scan_record.completed_at = timezone.now()
|
||||
else:
|
||||
scan_record.completed_at = timezone.now()
|
||||
|
||||
|
||||
scan_record.save()
|
||||
|
||||
|
||||
# Also update the ScanStatusUpdate record with final statistics
|
||||
try:
|
||||
from .status_models import ScanStatusUpdate
|
||||
@@ -586,7 +586,7 @@ def scan_callback(request):
|
||||
logging.writeToFile(f"[API] Updated ScanStatusUpdate for completed scan {scan_id}")
|
||||
except Exception as e:
|
||||
logging.writeToFile(f"[API] Error updating ScanStatusUpdate: {str(e)}")
|
||||
|
||||
|
||||
# Update user balance if scan cost money
|
||||
if scan_record.cost_usd > 0:
|
||||
try:
|
||||
@@ -623,7 +623,7 @@ def scan_callback(request):
|
||||
'message': 'Scan record not found',
|
||||
'scan_id': scan_id
|
||||
}, status=404)
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logging.writeToFile(f"[API] Failed to update scan record: {str(e)}")
|
||||
return JsonResponse({
|
||||
@@ -651,4 +651,898 @@ def scan_callback(request):
|
||||
return JsonResponse({
|
||||
'status': 'error',
|
||||
'message': 'Internal server error'
|
||||
}, status=500)
|
||||
}, status=500)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# File Operation Helper Functions
|
||||
# =============================================================================
|
||||
|
||||
def log_file_operation(scan_id, operation, file_path, success, error_message=None, backup_path=None, request=None):
|
||||
"""
|
||||
Log file operations to the audit log
|
||||
"""
|
||||
try:
|
||||
from .models import ScannerFileOperation
|
||||
|
||||
ip_address = None
|
||||
user_agent = None
|
||||
|
||||
if request:
|
||||
ip_address = request.META.get('REMOTE_ADDR', '')[:45]
|
||||
user_agent = request.META.get('HTTP_USER_AGENT', '')[:255]
|
||||
|
||||
ScannerFileOperation.objects.create(
|
||||
scan_id=scan_id,
|
||||
operation=operation,
|
||||
file_path=file_path,
|
||||
backup_path=backup_path,
|
||||
success=success,
|
||||
error_message=error_message,
|
||||
ip_address=ip_address,
|
||||
user_agent=user_agent
|
||||
)
|
||||
|
||||
logging.writeToFile(f'[API] Logged {operation} operation for {file_path}: {"success" if success else "failed"}')
|
||||
except Exception as e:
|
||||
logging.writeToFile(f'[API] Failed to log operation: {str(e)}')
|
||||
|
||||
|
||||
def check_rate_limit(scan_id, endpoint, max_requests):
|
||||
"""
|
||||
Check if rate limit is exceeded for a scan/endpoint combination
|
||||
Returns (is_allowed, current_count)
|
||||
"""
|
||||
try:
|
||||
from .models import ScannerAPIRateLimit
|
||||
|
||||
rate_limit, created = ScannerAPIRateLimit.objects.get_or_create(
|
||||
scan_id=scan_id,
|
||||
endpoint=endpoint,
|
||||
defaults={'request_count': 0}
|
||||
)
|
||||
|
||||
if rate_limit.request_count >= max_requests:
|
||||
logging.writeToFile(f'[API] Rate limit exceeded for scan {scan_id} on endpoint {endpoint}: {rate_limit.request_count}/{max_requests}')
|
||||
return False, rate_limit.request_count
|
||||
|
||||
rate_limit.request_count += 1
|
||||
rate_limit.save()
|
||||
|
||||
return True, rate_limit.request_count
|
||||
except Exception as e:
|
||||
logging.writeToFile(f'[API] Rate limit check error: {str(e)}')
|
||||
# On error, allow the request
|
||||
return True, 0
|
||||
|
||||
|
||||
def get_website_user(domain):
|
||||
"""
|
||||
Get the system user for a website domain
|
||||
"""
|
||||
try:
|
||||
website = Websites.objects.get(domain=domain)
|
||||
return website.externalApp
|
||||
except Websites.DoesNotExist:
|
||||
raise SecurityError(f"Website not found: {domain}")
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# File Operation API Endpoints
|
||||
# =============================================================================
|
||||
|
||||
@csrf_exempt
|
||||
@require_http_methods(['POST'])
|
||||
def scanner_backup_file(request):
|
||||
"""
|
||||
POST /api/scanner/backup-file
|
||||
|
||||
Create a backup copy of a file before modification
|
||||
|
||||
Headers:
|
||||
Authorization: Bearer {file_access_token}
|
||||
X-Scan-ID: {scan_job_id}
|
||||
|
||||
Request Body:
|
||||
{
|
||||
"file_path": "wp-content/plugins/example/plugin.php",
|
||||
"scan_id": "550e8400-e29b-41d4-a716-446655440000"
|
||||
}
|
||||
|
||||
Response:
|
||||
{
|
||||
"success": true,
|
||||
"backup_path": "/home/username/public_html/.ai-scanner-backups/2025-10-25/plugin.php.1730000000.bak",
|
||||
"original_path": "wp-content/plugins/example/plugin.php",
|
||||
"backup_size": 15420,
|
||||
"timestamp": "2025-10-25T20:30:00Z"
|
||||
}
|
||||
"""
|
||||
try:
|
||||
# Parse request
|
||||
data = json.loads(request.body)
|
||||
file_path = data.get('file_path', '').strip('/')
|
||||
scan_id = data.get('scan_id', '')
|
||||
|
||||
# Validate authorization
|
||||
auth_header = request.META.get('HTTP_AUTHORIZATION', '')
|
||||
if not auth_header.startswith('Bearer '):
|
||||
return JsonResponse({'success': False, 'error': 'Missing or invalid Authorization header'}, status=401)
|
||||
|
||||
access_token = auth_header.replace('Bearer ', '')
|
||||
header_scan_id = request.META.get('HTTP_X_SCAN_ID', '')
|
||||
|
||||
if not scan_id or not header_scan_id or scan_id != header_scan_id:
|
||||
return JsonResponse({'success': False, 'error': 'Scan ID mismatch'}, status=400)
|
||||
|
||||
# Validate access token
|
||||
file_token, error = validate_access_token(access_token, scan_id)
|
||||
if error:
|
||||
log_file_operation(scan_id, 'backup', file_path, False, error, request=request)
|
||||
return JsonResponse({'success': False, 'error': error}, status=401)
|
||||
|
||||
# Rate limiting
|
||||
is_allowed, count = check_rate_limit(scan_id, 'backup-file', 100)
|
||||
if not is_allowed:
|
||||
return JsonResponse({'success': False, 'error': 'Rate limit exceeded (max 100 backups per scan)'}, status=429)
|
||||
|
||||
# Security check and get full path
|
||||
try:
|
||||
full_path = secure_path_check(file_token.wp_path, file_path)
|
||||
except SecurityError as e:
|
||||
log_file_operation(scan_id, 'backup', file_path, False, str(e), request=request)
|
||||
return JsonResponse({'success': False, 'error': 'Path not allowed'}, status=403)
|
||||
|
||||
# Get website user
|
||||
try:
|
||||
user = get_website_user(file_token.domain)
|
||||
except SecurityError as e:
|
||||
log_file_operation(scan_id, 'backup', file_path, False, str(e), request=request)
|
||||
return JsonResponse({'success': False, 'error': str(e)}, status=404)
|
||||
|
||||
# Check file exists
|
||||
from plogical.processUtilities import ProcessUtilities
|
||||
|
||||
check_cmd = f'test -f "{full_path}" && echo "exists"'
|
||||
result = ProcessUtilities.outputExecutioner(check_cmd, user=user, retRequired=True)
|
||||
|
||||
if not result[1] or 'exists' not in result[1]:
|
||||
log_file_operation(scan_id, 'backup', file_path, False, 'File not found', request=request)
|
||||
return JsonResponse({'success': False, 'error': 'File not found', 'error_code': 'FILE_NOT_FOUND'}, status=404)
|
||||
|
||||
# Create backup directory
|
||||
import datetime
|
||||
backup_dir_name = f'{file_token.wp_path}/.ai-scanner-backups/{datetime.datetime.now().strftime("%Y-%m-%d")}'
|
||||
mkdir_cmd = f'mkdir -p "{backup_dir_name}"'
|
||||
ProcessUtilities.executioner(mkdir_cmd, user=user)
|
||||
|
||||
# Create backup filename with timestamp
|
||||
timestamp = int(time.time())
|
||||
basename = os.path.basename(full_path)
|
||||
backup_filename = f'{basename}.{timestamp}.bak'
|
||||
backup_path = os.path.join(backup_dir_name, backup_filename)
|
||||
|
||||
# Copy file to backup
|
||||
cp_cmd = f'cp "{full_path}" "{backup_path}"'
|
||||
cp_result = ProcessUtilities.executioner(cp_cmd, user=user)
|
||||
|
||||
if cp_result != 0:
|
||||
log_file_operation(scan_id, 'backup', file_path, False, 'Failed to create backup', request=request)
|
||||
return JsonResponse({'success': False, 'error': 'Failed to create backup', 'error_code': 'BACKUP_FAILED'}, status=500)
|
||||
|
||||
# Get file size
|
||||
stat_cmd = f'stat -c %s "{backup_path}"'
|
||||
stat_result = ProcessUtilities.outputExecutioner(stat_cmd, user=user, retRequired=True)
|
||||
|
||||
backup_size = 0
|
||||
if stat_result[1]:
|
||||
try:
|
||||
backup_size = int(stat_result[1].strip())
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
# Log success
|
||||
log_file_operation(scan_id, 'backup', file_path, True, backup_path=backup_path, request=request)
|
||||
|
||||
logging.writeToFile(f'[API] Backup created for {file_path}: {backup_path}')
|
||||
|
||||
return JsonResponse({
|
||||
'success': True,
|
||||
'backup_path': backup_path,
|
||||
'original_path': file_path,
|
||||
'backup_size': backup_size,
|
||||
'timestamp': datetime.datetime.now().strftime('%Y-%m-%dT%H:%M:%SZ')
|
||||
})
|
||||
|
||||
except json.JSONDecodeError:
|
||||
return JsonResponse({'success': False, 'error': 'Invalid JSON'}, status=400)
|
||||
except Exception as e:
|
||||
logging.writeToFile(f'[API] Backup file error: {str(e)}')
|
||||
log_file_operation(scan_id if 'scan_id' in locals() else 'unknown', 'backup',
|
||||
file_path if 'file_path' in locals() else 'unknown', False, str(e), request=request)
|
||||
return JsonResponse({'success': False, 'error': 'Internal server error'}, status=500)@csrf_exempt
|
||||
@require_http_methods(['GET'])
|
||||
def scanner_get_file(request):
|
||||
"""
|
||||
GET /api/scanner/get-file?file_path=wp-content/plugins/plugin.php
|
||||
|
||||
Read the contents of a file for analysis or verification
|
||||
|
||||
Headers:
|
||||
Authorization: Bearer {file_access_token}
|
||||
X-Scan-ID: {scan_job_id}
|
||||
|
||||
Response:
|
||||
{
|
||||
"success": true,
|
||||
"file_path": "wp-content/plugins/example/plugin.php",
|
||||
"content": "<?php\n/*\nPlugin Name: Example Plugin\n*/\n...",
|
||||
"size": 15420,
|
||||
"encoding": "utf-8",
|
||||
"mime_type": "text/x-php",
|
||||
"last_modified": "2025-10-25T20:30:00Z",
|
||||
"hash": {
|
||||
"md5": "5d41402abc4b2a76b9719d911017c592",
|
||||
"sha256": "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae"
|
||||
}
|
||||
}
|
||||
"""
|
||||
try:
|
||||
# Validate authorization
|
||||
auth_header = request.META.get('HTTP_AUTHORIZATION', '')
|
||||
if not auth_header.startswith('Bearer '):
|
||||
return JsonResponse({'success': False, 'error': 'Missing or invalid Authorization header'}, status=401)
|
||||
|
||||
access_token = auth_header.replace('Bearer ', '')
|
||||
scan_id = request.META.get('HTTP_X_SCAN_ID', '')
|
||||
|
||||
if not scan_id:
|
||||
return JsonResponse({'success': False, 'error': 'X-Scan-ID header required'}, status=400)
|
||||
|
||||
# Get file path
|
||||
file_path = request.GET.get('file_path', '').strip('/')
|
||||
if not file_path:
|
||||
return JsonResponse({'success': False, 'error': 'File path required'}, status=400)
|
||||
|
||||
# Validate access token
|
||||
file_token, error = validate_access_token(access_token, scan_id)
|
||||
if error:
|
||||
log_file_operation(scan_id, 'read', file_path, False, error, request=request)
|
||||
return JsonResponse({'success': False, 'error': error}, status=401)
|
||||
|
||||
# Rate limiting
|
||||
is_allowed, count = check_rate_limit(scan_id, 'get-file', 500)
|
||||
if not is_allowed:
|
||||
return JsonResponse({'success': False, 'error': 'Rate limit exceeded (max 500 file reads per scan)'}, status=429)
|
||||
|
||||
# Security check and get full path
|
||||
try:
|
||||
full_path = secure_path_check(file_token.wp_path, file_path)
|
||||
except SecurityError as e:
|
||||
log_file_operation(scan_id, 'read', file_path, False, str(e), request=request)
|
||||
return JsonResponse({'success': False, 'error': 'Path not allowed'}, status=403)
|
||||
|
||||
# Only allow specific file types for security
|
||||
allowed_extensions = {
|
||||
'.php', '.js', '.html', '.htm', '.css', '.txt', '.md',
|
||||
'.json', '.xml', '.sql', '.log', '.conf', '.ini', '.yml', '.yaml'
|
||||
}
|
||||
|
||||
file_ext = os.path.splitext(full_path)[1].lower()
|
||||
if file_ext not in allowed_extensions:
|
||||
log_file_operation(scan_id, 'read', file_path, False, f'File type not allowed: {file_ext}', request=request)
|
||||
return JsonResponse({'success': False, 'error': f'File type not allowed: {file_ext}'}, status=403)
|
||||
|
||||
# Get website user
|
||||
try:
|
||||
user = get_website_user(file_token.domain)
|
||||
except SecurityError as e:
|
||||
log_file_operation(scan_id, 'read', file_path, False, str(e), request=request)
|
||||
return JsonResponse({'success': False, 'error': str(e)}, status=404)
|
||||
|
||||
# Check file size
|
||||
from plogical.processUtilities import ProcessUtilities
|
||||
import hashlib
|
||||
|
||||
stat_cmd = f'stat -c "%s %Y" "{full_path}"'
|
||||
stat_result = ProcessUtilities.outputExecutioner(stat_cmd, user=user, retRequired=True)
|
||||
|
||||
if not stat_result[1]:
|
||||
log_file_operation(scan_id, 'read', file_path, False, 'File not found', request=request)
|
||||
return JsonResponse({'success': False, 'error': 'File not found', 'error_code': 'FILE_NOT_FOUND'}, status=404)
|
||||
|
||||
try:
|
||||
parts = stat_result[1].strip().split()
|
||||
file_size = int(parts[0])
|
||||
last_modified_timestamp = int(parts[1])
|
||||
|
||||
if file_size > 10 * 1024 * 1024: # 10MB limit
|
||||
log_file_operation(scan_id, 'read', file_path, False, 'File too large (max 10MB)', request=request)
|
||||
return JsonResponse({'success': False, 'error': 'File too large (max 10MB)'}, status=400)
|
||||
except (ValueError, IndexError):
|
||||
log_file_operation(scan_id, 'read', file_path, False, 'Could not get file size', request=request)
|
||||
return JsonResponse({'success': False, 'error': 'Could not get file size'}, status=500)
|
||||
|
||||
# Read file content
|
||||
cat_cmd = f'cat "{full_path}"'
|
||||
result = ProcessUtilities.outputExecutioner(cat_cmd, user=user, retRequired=True)
|
||||
|
||||
if len(result) < 2:
|
||||
log_file_operation(scan_id, 'read', file_path, False, 'Unable to read file', request=request)
|
||||
return JsonResponse({'success': False, 'error': 'Unable to read file'}, status=400)
|
||||
|
||||
content = result[1] if result[1] is not None else ''
|
||||
|
||||
# Calculate hashes
|
||||
try:
|
||||
content_bytes = content.encode('utf-8')
|
||||
md5_hash = hashlib.md5(content_bytes).hexdigest()
|
||||
sha256_hash = hashlib.sha256(content_bytes).hexdigest()
|
||||
except UnicodeEncodeError:
|
||||
try:
|
||||
content_bytes = content.encode('latin-1')
|
||||
md5_hash = hashlib.md5(content_bytes).hexdigest()
|
||||
sha256_hash = hashlib.sha256(content_bytes).hexdigest()
|
||||
except:
|
||||
md5_hash = ''
|
||||
sha256_hash = ''
|
||||
|
||||
# Detect MIME type
|
||||
mime_type, _ = mimetypes.guess_type(full_path)
|
||||
if not mime_type:
|
||||
if file_ext == '.php':
|
||||
mime_type = 'text/x-php'
|
||||
elif file_ext == '.js':
|
||||
mime_type = 'application/javascript'
|
||||
else:
|
||||
mime_type = 'text/plain'
|
||||
|
||||
# Format last modified time
|
||||
import datetime
|
||||
last_modified = datetime.datetime.fromtimestamp(last_modified_timestamp).strftime('%Y-%m-%dT%H:%M:%SZ')
|
||||
|
||||
# Log success
|
||||
log_file_operation(scan_id, 'read', file_path, True, request=request)
|
||||
|
||||
logging.writeToFile(f'[API] File content retrieved: {file_path} ({file_size} bytes)')
|
||||
|
||||
return JsonResponse({
|
||||
'success': True,
|
||||
'file_path': file_path,
|
||||
'content': content,
|
||||
'size': file_size,
|
||||
'encoding': 'utf-8',
|
||||
'mime_type': mime_type,
|
||||
'last_modified': last_modified,
|
||||
'hash': {
|
||||
'md5': md5_hash,
|
||||
'sha256': sha256_hash
|
||||
}
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
logging.writeToFile(f'[API] Get file error: {str(e)}')
|
||||
log_file_operation(scan_id if 'scan_id' in locals() else 'unknown', 'read',
|
||||
file_path if 'file_path' in locals() else 'unknown', False, str(e), request=request)
|
||||
return JsonResponse({'success': False, 'error': 'Internal server error'}, status=500)
|
||||
|
||||
|
||||
@csrf_exempt
|
||||
@require_http_methods(['POST'])
|
||||
def scanner_replace_file(request):
|
||||
"""
|
||||
POST /api/scanner/replace-file
|
||||
|
||||
Overwrite a file with new content (after backup)
|
||||
|
||||
Headers:
|
||||
Authorization: Bearer {file_access_token}
|
||||
X-Scan-ID: {scan_job_id}
|
||||
|
||||
Request Body:
|
||||
{
|
||||
"file_path": "wp-content/plugins/example/plugin.php",
|
||||
"content": "<?php\n/*\nPlugin Name: Example Plugin (Clean Version)\n*/\n...",
|
||||
"backup_before_replace": true,
|
||||
"verify_hash": "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae"
|
||||
}
|
||||
|
||||
Response:
|
||||
{
|
||||
"success": true,
|
||||
"file_path": "wp-content/plugins/example/plugin.php",
|
||||
"backup_path": "/home/username/public_html/.ai-scanner-backups/2025-10-25/plugin.php.1730000000.bak",
|
||||
"bytes_written": 14850,
|
||||
"new_hash": {
|
||||
"md5": "abc123...",
|
||||
"sha256": "def456..."
|
||||
},
|
||||
"timestamp": "2025-10-25T20:35:00Z"
|
||||
}
|
||||
"""
|
||||
try:
|
||||
# Parse request
|
||||
data = json.loads(request.body)
|
||||
file_path = data.get('file_path', '').strip('/')
|
||||
content = data.get('content', '')
|
||||
backup_before_replace = data.get('backup_before_replace', True)
|
||||
verify_hash = data.get('verify_hash', '')
|
||||
|
||||
# Validate authorization
|
||||
auth_header = request.META.get('HTTP_AUTHORIZATION', '')
|
||||
if not auth_header.startswith('Bearer '):
|
||||
return JsonResponse({'success': False, 'error': 'Missing or invalid Authorization header'}, status=401)
|
||||
|
||||
access_token = auth_header.replace('Bearer ', '')
|
||||
scan_id = request.META.get('HTTP_X_SCAN_ID', '')
|
||||
|
||||
if not scan_id:
|
||||
return JsonResponse({'success': False, 'error': 'X-Scan-ID header required'}, status=400)
|
||||
|
||||
# Validate access token
|
||||
file_token, error = validate_access_token(access_token, scan_id)
|
||||
if error:
|
||||
log_file_operation(scan_id, 'replace', file_path, False, error, request=request)
|
||||
return JsonResponse({'success': False, 'error': error}, status=401)
|
||||
|
||||
# Rate limiting
|
||||
is_allowed, count = check_rate_limit(scan_id, 'replace-file', 100)
|
||||
if not is_allowed:
|
||||
return JsonResponse({'success': False, 'error': 'Rate limit exceeded (max 100 replacements per scan)'}, status=429)
|
||||
|
||||
# Security check and get full path
|
||||
try:
|
||||
full_path = secure_path_check(file_token.wp_path, file_path)
|
||||
except SecurityError as e:
|
||||
log_file_operation(scan_id, 'replace', file_path, False, str(e), request=request)
|
||||
return JsonResponse({'success': False, 'error': 'Path not allowed'}, status=403)
|
||||
|
||||
# Get website user
|
||||
try:
|
||||
user = get_website_user(file_token.domain)
|
||||
except SecurityError as e:
|
||||
log_file_operation(scan_id, 'replace', file_path, False, str(e), request=request)
|
||||
return JsonResponse({'success': False, 'error': str(e)}, status=404)
|
||||
|
||||
# Verify hash if provided
|
||||
from plogical.processUtilities import ProcessUtilities
|
||||
import hashlib
|
||||
import datetime
|
||||
|
||||
if verify_hash:
|
||||
cat_cmd = f'cat "{full_path}"'
|
||||
result = ProcessUtilities.outputExecutioner(cat_cmd, user=user, retRequired=True)
|
||||
|
||||
if result[1]:
|
||||
current_hash = hashlib.sha256(result[1].encode('utf-8')).hexdigest()
|
||||
if current_hash != verify_hash:
|
||||
log_file_operation(scan_id, 'replace', file_path, False, 'Hash verification failed - file was modified', request=request)
|
||||
return JsonResponse({
|
||||
'success': False,
|
||||
'error': 'Hash verification failed - file was modified during scan',
|
||||
'error_code': 'HASH_MISMATCH',
|
||||
'expected_hash': verify_hash,
|
||||
'actual_hash': current_hash
|
||||
}, status=400)
|
||||
|
||||
backup_path = None
|
||||
|
||||
# Create backup if requested
|
||||
if backup_before_replace:
|
||||
backup_dir_name = f'{file_token.wp_path}/.ai-scanner-backups/{datetime.datetime.now().strftime("%Y-%m-%d")}'
|
||||
mkdir_cmd = f'mkdir -p "{backup_dir_name}"'
|
||||
ProcessUtilities.executioner(mkdir_cmd, user=user)
|
||||
|
||||
timestamp = int(time.time())
|
||||
basename = os.path.basename(full_path)
|
||||
backup_filename = f'{basename}.{timestamp}.bak'
|
||||
backup_path = os.path.join(backup_dir_name, backup_filename)
|
||||
|
||||
cp_cmd = f'cp "{full_path}" "{backup_path}"'
|
||||
cp_result = ProcessUtilities.executioner(cp_cmd, user=user)
|
||||
|
||||
if cp_result != 0:
|
||||
log_file_operation(scan_id, 'replace', file_path, False, 'Failed to create backup', backup_path=backup_path, request=request)
|
||||
return JsonResponse({'success': False, 'error': 'Failed to create backup', 'error_code': 'BACKUP_FAILED'}, status=500)
|
||||
|
||||
# Write new content to temp file first (atomic write)
|
||||
temp_path = f'{full_path}.tmp.{int(time.time())}'
|
||||
|
||||
# Write content using a here-document to avoid shell escaping issues
|
||||
write_cmd = f'cat > "{temp_path}" << \'EOF_MARKER\'\n{content}\nEOF_MARKER'
|
||||
write_result = ProcessUtilities.executioner(write_cmd, user=user)
|
||||
|
||||
if write_result != 0:
|
||||
log_file_operation(scan_id, 'replace', file_path, False, 'Failed to write temp file', backup_path=backup_path, request=request)
|
||||
return JsonResponse({'success': False, 'error': 'Failed to write file', 'error_code': 'WRITE_FAILED'}, status=500)
|
||||
|
||||
# Get original file permissions
|
||||
stat_cmd = f'stat -c %a "{full_path}"'
|
||||
stat_result = ProcessUtilities.outputExecutioner(stat_cmd, user=user, retRequired=True)
|
||||
permissions = '644' # Default
|
||||
if stat_result[1]:
|
||||
permissions = stat_result[1].strip()
|
||||
|
||||
# Set permissions on temp file
|
||||
chmod_cmd = f'chmod {permissions} "{temp_path}"'
|
||||
ProcessUtilities.executioner(chmod_cmd, user=user)
|
||||
|
||||
# Atomic rename
|
||||
mv_cmd = f'mv "{temp_path}" "{full_path}"'
|
||||
mv_result = ProcessUtilities.executioner(mv_cmd, user=user)
|
||||
|
||||
if mv_result != 0:
|
||||
# Cleanup temp file
|
||||
ProcessUtilities.executioner(f'rm -f "{temp_path}"', user=user)
|
||||
log_file_operation(scan_id, 'replace', file_path, False, 'Failed to replace file', backup_path=backup_path, request=request)
|
||||
return JsonResponse({'success': False, 'error': 'Failed to replace file', 'error_code': 'REPLACE_FAILED'}, status=500)
|
||||
|
||||
# Calculate new hash
|
||||
cat_cmd = f'cat "{full_path}"'
|
||||
result = ProcessUtilities.outputExecutioner(cat_cmd, user=user, retRequired=True)
|
||||
|
||||
new_md5 = ''
|
||||
new_sha256 = ''
|
||||
if result[1]:
|
||||
try:
|
||||
content_bytes = result[1].encode('utf-8')
|
||||
new_md5 = hashlib.md5(content_bytes).hexdigest()
|
||||
new_sha256 = hashlib.sha256(content_bytes).hexdigest()
|
||||
except:
|
||||
pass
|
||||
|
||||
bytes_written = len(content.encode('utf-8'))
|
||||
|
||||
# Log success
|
||||
log_file_operation(scan_id, 'replace', file_path, True, backup_path=backup_path, request=request)
|
||||
|
||||
logging.writeToFile(f'[API] File replaced: {file_path} ({bytes_written} bytes)')
|
||||
|
||||
return JsonResponse({
|
||||
'success': True,
|
||||
'file_path': file_path,
|
||||
'backup_path': backup_path,
|
||||
'bytes_written': bytes_written,
|
||||
'new_hash': {
|
||||
'md5': new_md5,
|
||||
'sha256': new_sha256
|
||||
},
|
||||
'timestamp': datetime.datetime.now().strftime('%Y-%m-%dT%H:%M:%SZ')
|
||||
})
|
||||
|
||||
except json.JSONDecodeError:
|
||||
return JsonResponse({'success': False, 'error': 'Invalid JSON'}, status=400)
|
||||
except Exception as e:
|
||||
logging.writeToFile(f'[API] Replace file error: {str(e)}')
|
||||
log_file_operation(scan_id if 'scan_id' in locals() else 'unknown', 'replace',
|
||||
file_path if 'file_path' in locals() else 'unknown', False, str(e), request=request)
|
||||
return JsonResponse({'success': False, 'error': 'Internal server error'}, status=500)
|
||||
|
||||
|
||||
@csrf_exempt
|
||||
@require_http_methods(['POST'])
|
||||
def scanner_rename_file(request):
|
||||
"""
|
||||
POST /api/scanner/rename-file
|
||||
|
||||
Rename a file (used for quarantining malicious files)
|
||||
|
||||
Headers:
|
||||
Authorization: Bearer {file_access_token}
|
||||
X-Scan-ID: {scan_job_id}
|
||||
|
||||
Request Body:
|
||||
{
|
||||
"old_path": "wp-content/uploads/malicious.php",
|
||||
"new_path": "wp-content/uploads/malicious.php.quarantined.1730000000",
|
||||
"backup_before_rename": true
|
||||
}
|
||||
|
||||
Response:
|
||||
{
|
||||
"success": true,
|
||||
"old_path": "wp-content/uploads/malicious.php",
|
||||
"new_path": "wp-content/uploads/malicious.php.quarantined.1730000000",
|
||||
"backup_path": "/home/username/public_html/.ai-scanner-backups/2025-10-25/malicious.php.1730000000.bak",
|
||||
"timestamp": "2025-10-25T20:40:00Z"
|
||||
}
|
||||
"""
|
||||
try:
|
||||
# Parse request
|
||||
data = json.loads(request.body)
|
||||
old_path = data.get('old_path', '').strip('/')
|
||||
new_path = data.get('new_path', '').strip('/')
|
||||
backup_before_rename = data.get('backup_before_rename', True)
|
||||
|
||||
# Validate authorization
|
||||
auth_header = request.META.get('HTTP_AUTHORIZATION', '')
|
||||
if not auth_header.startswith('Bearer '):
|
||||
return JsonResponse({'success': False, 'error': 'Missing or invalid Authorization header'}, status=401)
|
||||
|
||||
access_token = auth_header.replace('Bearer ', '')
|
||||
scan_id = request.META.get('HTTP_X_SCAN_ID', '')
|
||||
|
||||
if not scan_id:
|
||||
return JsonResponse({'success': False, 'error': 'X-Scan-ID header required'}, status=400)
|
||||
|
||||
# Validate access token
|
||||
file_token, error = validate_access_token(access_token, scan_id)
|
||||
if error:
|
||||
log_file_operation(scan_id, 'rename', old_path, False, error, request=request)
|
||||
return JsonResponse({'success': False, 'error': error}, status=401)
|
||||
|
||||
# Rate limiting
|
||||
is_allowed, count = check_rate_limit(scan_id, 'rename-file', 50)
|
||||
if not is_allowed:
|
||||
return JsonResponse({'success': False, 'error': 'Rate limit exceeded (max 50 renames per scan)'}, status=429)
|
||||
|
||||
# Security check for both paths
|
||||
try:
|
||||
full_old_path = secure_path_check(file_token.wp_path, old_path)
|
||||
full_new_path = secure_path_check(file_token.wp_path, new_path)
|
||||
except SecurityError as e:
|
||||
log_file_operation(scan_id, 'rename', old_path, False, str(e), request=request)
|
||||
return JsonResponse({'success': False, 'error': 'Path not allowed'}, status=403)
|
||||
|
||||
# Get website user
|
||||
try:
|
||||
user = get_website_user(file_token.domain)
|
||||
except SecurityError as e:
|
||||
log_file_operation(scan_id, 'rename', old_path, False, str(e), request=request)
|
||||
return JsonResponse({'success': False, 'error': str(e)}, status=404)
|
||||
|
||||
# Check source file exists
|
||||
from plogical.processUtilities import ProcessUtilities
|
||||
import datetime
|
||||
|
||||
check_cmd = f'test -f "{full_old_path}" && echo "exists"'
|
||||
result = ProcessUtilities.outputExecutioner(check_cmd, user=user, retRequired=True)
|
||||
|
||||
if not result[1] or 'exists' not in result[1]:
|
||||
log_file_operation(scan_id, 'rename', old_path, False, 'Source file not found', request=request)
|
||||
return JsonResponse({'success': False, 'error': 'Source file not found', 'error_code': 'FILE_NOT_FOUND'}, status=404)
|
||||
|
||||
# Check destination doesn't exist
|
||||
check_cmd = f'test -f "{full_new_path}" && echo "exists"'
|
||||
result = ProcessUtilities.outputExecutioner(check_cmd, user=user, retRequired=True)
|
||||
|
||||
if result[1] and 'exists' in result[1]:
|
||||
log_file_operation(scan_id, 'rename', old_path, False, 'Destination file already exists', request=request)
|
||||
return JsonResponse({'success': False, 'error': 'Destination file already exists', 'error_code': 'FILE_EXISTS'}, status=409)
|
||||
|
||||
backup_path = None
|
||||
|
||||
# Create backup if requested
|
||||
if backup_before_rename:
|
||||
backup_dir_name = f'{file_token.wp_path}/.ai-scanner-backups/{datetime.datetime.now().strftime("%Y-%m-%d")}'
|
||||
mkdir_cmd = f'mkdir -p "{backup_dir_name}"'
|
||||
ProcessUtilities.executioner(mkdir_cmd, user=user)
|
||||
|
||||
timestamp = int(time.time())
|
||||
basename = os.path.basename(full_old_path)
|
||||
backup_filename = f'{basename}.{timestamp}.bak'
|
||||
backup_path = os.path.join(backup_dir_name, backup_filename)
|
||||
|
||||
cp_cmd = f'cp "{full_old_path}" "{backup_path}"'
|
||||
ProcessUtilities.executioner(cp_cmd, user=user)
|
||||
|
||||
# Perform rename
|
||||
mv_cmd = f'mv "{full_old_path}" "{full_new_path}"'
|
||||
mv_result = ProcessUtilities.executioner(mv_cmd, user=user)
|
||||
|
||||
if mv_result != 0:
|
||||
log_file_operation(scan_id, 'rename', old_path, False, 'Failed to rename file', backup_path=backup_path, request=request)
|
||||
return JsonResponse({'success': False, 'error': 'Failed to rename file', 'error_code': 'RENAME_FAILED'}, status=500)
|
||||
|
||||
# Verify rename
|
||||
check_cmd = f'test -f "{full_new_path}" && echo "exists"'
|
||||
result = ProcessUtilities.outputExecutioner(check_cmd, user=user, retRequired=True)
|
||||
|
||||
if not result[1] or 'exists' not in result[1]:
|
||||
log_file_operation(scan_id, 'rename', old_path, False, 'Rename verification failed', backup_path=backup_path, request=request)
|
||||
return JsonResponse({'success': False, 'error': 'Rename verification failed'}, status=500)
|
||||
|
||||
# Log success
|
||||
log_file_operation(scan_id, 'rename', old_path, True, backup_path=backup_path, request=request)
|
||||
|
||||
logging.writeToFile(f'[API] File renamed: {old_path} -> {new_path}')
|
||||
|
||||
return JsonResponse({
|
||||
'success': True,
|
||||
'old_path': old_path,
|
||||
'new_path': new_path,
|
||||
'backup_path': backup_path,
|
||||
'timestamp': datetime.datetime.now().strftime('%Y-%m-%dT%H:%M:%SZ')
|
||||
})
|
||||
|
||||
except json.JSONDecodeError:
|
||||
return JsonResponse({'success': False, 'error': 'Invalid JSON'}, status=400)
|
||||
except Exception as e:
|
||||
logging.writeToFile(f'[API] Rename file error: {str(e)}')
|
||||
log_file_operation(scan_id if 'scan_id' in locals() else 'unknown', 'rename',
|
||||
old_path if 'old_path' in locals() else 'unknown', False, str(e), request=request)
|
||||
return JsonResponse({'success': False, 'error': 'Internal server error'}, status=500)
|
||||
|
||||
|
||||
@csrf_exempt
|
||||
@require_http_methods(['POST'])
|
||||
def scanner_delete_file(request):
|
||||
"""
|
||||
POST /api/scanner/delete-file
|
||||
|
||||
Permanently delete a malicious file (after backup)
|
||||
|
||||
Headers:
|
||||
Authorization: Bearer {file_access_token}
|
||||
X-Scan-ID: {scan_job_id}
|
||||
|
||||
Request Body:
|
||||
{
|
||||
"file_path": "wp-content/uploads/shell.php",
|
||||
"backup_before_delete": true,
|
||||
"confirm_deletion": true
|
||||
}
|
||||
|
||||
Response:
|
||||
{
|
||||
"success": true,
|
||||
"file_path": "wp-content/uploads/shell.php",
|
||||
"backup_path": "/home/username/public_html/.ai-scanner-backups/2025-10-25/shell.php.1730000000.bak",
|
||||
"deleted_at": "2025-10-25T20:45:00Z",
|
||||
"file_info": {
|
||||
"size": 2048,
|
||||
"last_modified": "2025-10-20T14:30:00Z",
|
||||
"hash": "abc123..."
|
||||
}
|
||||
}
|
||||
"""
|
||||
try:
|
||||
# Parse request
|
||||
data = json.loads(request.body)
|
||||
file_path = data.get('file_path', '').strip('/')
|
||||
backup_before_delete = data.get('backup_before_delete', True)
|
||||
confirm_deletion = data.get('confirm_deletion', False)
|
||||
|
||||
# Require explicit confirmation
|
||||
if not confirm_deletion:
|
||||
return JsonResponse({
|
||||
'success': False,
|
||||
'error': 'Deletion not confirmed',
|
||||
'error_code': 'CONFIRMATION_REQUIRED',
|
||||
'message': 'Set confirm_deletion: true to proceed'
|
||||
}, status=400)
|
||||
|
||||
# Validate authorization
|
||||
auth_header = request.META.get('HTTP_AUTHORIZATION', '')
|
||||
if not auth_header.startswith('Bearer '):
|
||||
return JsonResponse({'success': False, 'error': 'Missing or invalid Authorization header'}, status=401)
|
||||
|
||||
access_token = auth_header.replace('Bearer ', '')
|
||||
scan_id = request.META.get('HTTP_X_SCAN_ID', '')
|
||||
|
||||
if not scan_id:
|
||||
return JsonResponse({'success': False, 'error': 'X-Scan-ID header required'}, status=400)
|
||||
|
||||
# Validate access token
|
||||
file_token, error = validate_access_token(access_token, scan_id)
|
||||
if error:
|
||||
log_file_operation(scan_id, 'delete', file_path, False, error, request=request)
|
||||
return JsonResponse({'success': False, 'error': error}, status=401)
|
||||
|
||||
# Rate limiting
|
||||
is_allowed, count = check_rate_limit(scan_id, 'delete-file', 50)
|
||||
if not is_allowed:
|
||||
return JsonResponse({'success': False, 'error': 'Rate limit exceeded (max 50 deletions per scan)'}, status=429)
|
||||
|
||||
# Security check and get full path
|
||||
try:
|
||||
full_path = secure_path_check(file_token.wp_path, file_path)
|
||||
except SecurityError as e:
|
||||
log_file_operation(scan_id, 'delete', file_path, False, str(e), request=request)
|
||||
return JsonResponse({'success': False, 'error': 'Path not allowed'}, status=403)
|
||||
|
||||
# Check for protected files
|
||||
protected_files = ['wp-config.php', '.htaccess', 'index.php']
|
||||
if os.path.basename(full_path) in protected_files:
|
||||
log_file_operation(scan_id, 'delete', file_path, False, 'Cannot delete protected system file', request=request)
|
||||
return JsonResponse({'success': False, 'error': 'Cannot delete protected system file', 'error_code': 'PROTECTED_FILE'}, status=403)
|
||||
|
||||
# Get website user
|
||||
try:
|
||||
user = get_website_user(file_token.domain)
|
||||
except SecurityError as e:
|
||||
log_file_operation(scan_id, 'delete', file_path, False, str(e), request=request)
|
||||
return JsonResponse({'success': False, 'error': str(e)}, status=404)
|
||||
|
||||
# Get file info before deletion
|
||||
from plogical.processUtilities import ProcessUtilities
|
||||
import hashlib
|
||||
import datetime
|
||||
|
||||
stat_cmd = f'stat -c "%s %Y" "{full_path}"'
|
||||
stat_result = ProcessUtilities.outputExecutioner(stat_cmd, user=user, retRequired=True)
|
||||
|
||||
if not stat_result[1]:
|
||||
log_file_operation(scan_id, 'delete', file_path, False, 'File not found', request=request)
|
||||
return JsonResponse({'success': False, 'error': 'File not found', 'error_code': 'FILE_NOT_FOUND'}, status=404)
|
||||
|
||||
file_size = 0
|
||||
last_modified = ''
|
||||
try:
|
||||
parts = stat_result[1].strip().split()
|
||||
file_size = int(parts[0])
|
||||
last_modified_timestamp = int(parts[1])
|
||||
last_modified = datetime.datetime.fromtimestamp(last_modified_timestamp).strftime('%Y-%m-%dT%H:%M:%SZ')
|
||||
except (ValueError, IndexError):
|
||||
pass
|
||||
|
||||
# Get file hash
|
||||
cat_cmd = f'cat "{full_path}"'
|
||||
result = ProcessUtilities.outputExecutioner(cat_cmd, user=user, retRequired=True)
|
||||
|
||||
file_hash = ''
|
||||
if result[1]:
|
||||
try:
|
||||
file_hash = hashlib.sha256(result[1].encode('utf-8')).hexdigest()
|
||||
except:
|
||||
pass
|
||||
|
||||
backup_path = None
|
||||
|
||||
# ALWAYS create backup before deletion
|
||||
backup_dir_name = f'{file_token.wp_path}/.ai-scanner-backups/{datetime.datetime.now().strftime("%Y-%m-%d")}'
|
||||
mkdir_cmd = f'mkdir -p "{backup_dir_name}"'
|
||||
ProcessUtilities.executioner(mkdir_cmd, user=user)
|
||||
|
||||
timestamp = int(time.time())
|
||||
basename = os.path.basename(full_path)
|
||||
backup_filename = f'{basename}.{timestamp}.bak'
|
||||
backup_path = os.path.join(backup_dir_name, backup_filename)
|
||||
|
||||
cp_cmd = f'cp "{full_path}" "{backup_path}"'
|
||||
cp_result = ProcessUtilities.executioner(cp_cmd, user=user)
|
||||
|
||||
if cp_result != 0:
|
||||
log_file_operation(scan_id, 'delete', file_path, False, 'Backup creation failed - deletion blocked', backup_path=backup_path, request=request)
|
||||
return JsonResponse({'success': False, 'error': 'Backup creation failed - deletion blocked', 'error_code': 'BACKUP_FAILED'}, status=500)
|
||||
|
||||
# Delete file
|
||||
rm_cmd = f'rm -f "{full_path}"'
|
||||
rm_result = ProcessUtilities.executioner(rm_cmd, user=user)
|
||||
|
||||
if rm_result != 0:
|
||||
log_file_operation(scan_id, 'delete', file_path, False, 'Failed to delete file', backup_path=backup_path, request=request)
|
||||
return JsonResponse({'success': False, 'error': 'Failed to delete file', 'error_code': 'DELETE_FAILED'}, status=500)
|
||||
|
||||
# Verify deletion
|
||||
check_cmd = f'test -f "{full_path}" && echo "exists"'
|
||||
result = ProcessUtilities.outputExecutioner(check_cmd, user=user, retRequired=True)
|
||||
|
||||
if result[1] and 'exists' in result[1]:
|
||||
log_file_operation(scan_id, 'delete', file_path, False, 'Deletion verification failed', backup_path=backup_path, request=request)
|
||||
return JsonResponse({'success': False, 'error': 'Deletion verification failed'}, status=500)
|
||||
|
||||
# Log success
|
||||
log_file_operation(scan_id, 'delete', file_path, True, backup_path=backup_path, request=request)
|
||||
|
||||
logging.writeToFile(f'[API] File deleted: {file_path} (backup: {backup_path})')
|
||||
|
||||
return JsonResponse({
|
||||
'success': True,
|
||||
'file_path': file_path,
|
||||
'backup_path': backup_path,
|
||||
'deleted_at': datetime.datetime.now().strftime('%Y-%m-%dT%H:%M:%SZ'),
|
||||
'file_info': {
|
||||
'size': file_size,
|
||||
'last_modified': last_modified,
|
||||
'hash': file_hash
|
||||
}
|
||||
})
|
||||
|
||||
except json.JSONDecodeError:
|
||||
return JsonResponse({'success': False, 'error': 'Invalid JSON'}, status=400)
|
||||
except Exception as e:
|
||||
logging.writeToFile(f'[API] Delete file error: {str(e)}')
|
||||
log_file_operation(scan_id if 'scan_id' in locals() else 'unknown', 'delete',
|
||||
file_path if 'file_path' in locals() else 'unknown', False, str(e), request=request)
|
||||
return JsonResponse({'success': False, 'error': 'Internal server error'}, status=500)
|
||||
|
||||
43
aiScanner/migrations/create_file_operation_tables.sql
Normal file
43
aiScanner/migrations/create_file_operation_tables.sql
Normal file
@@ -0,0 +1,43 @@
|
||||
-- AI Scanner File Operations Audit Tables
|
||||
-- These tables track file operations performed by the scanner for security and auditing
|
||||
|
||||
-- Drop tables if they exist (use with caution in production)
|
||||
-- DROP TABLE IF EXISTS scanner_file_operations;
|
||||
-- DROP TABLE IF EXISTS scanner_api_rate_limits;
|
||||
|
||||
-- Table: scanner_file_operations
|
||||
-- Tracks all file operations (backup, read, replace, rename, delete)
|
||||
CREATE TABLE IF NOT EXISTS scanner_file_operations (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
scan_id VARCHAR(255) NOT NULL,
|
||||
operation VARCHAR(20) NOT NULL,
|
||||
file_path VARCHAR(500) NOT NULL,
|
||||
backup_path VARCHAR(500) NULL,
|
||||
success BOOLEAN NOT NULL DEFAULT FALSE,
|
||||
error_message TEXT NULL,
|
||||
ip_address VARCHAR(45) NULL,
|
||||
user_agent VARCHAR(255) NULL,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
INDEX idx_scan_id (scan_id),
|
||||
INDEX idx_created_at (created_at),
|
||||
INDEX idx_scan_created (scan_id, created_at)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||
|
||||
-- Table: scanner_api_rate_limits
|
||||
-- Rate limiting for scanner API endpoints
|
||||
CREATE TABLE IF NOT EXISTS scanner_api_rate_limits (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
scan_id VARCHAR(255) NOT NULL,
|
||||
endpoint VARCHAR(100) NOT NULL,
|
||||
request_count INT NOT NULL DEFAULT 0,
|
||||
last_request_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
||||
UNIQUE KEY unique_scan_endpoint (scan_id, endpoint),
|
||||
INDEX idx_scan_endpoint (scan_id, endpoint)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||
|
||||
-- Show created tables
|
||||
SHOW TABLES LIKE 'scanner_%';
|
||||
|
||||
-- Show table structures
|
||||
DESCRIBE scanner_file_operations;
|
||||
DESCRIBE scanner_api_rate_limits;
|
||||
@@ -272,7 +272,7 @@ class ScheduledScanExecution(models.Model):
|
||||
('failed', 'Failed'),
|
||||
('cancelled', 'Cancelled'),
|
||||
]
|
||||
|
||||
|
||||
scheduled_scan = models.ForeignKey(ScheduledScan, on_delete=models.CASCADE, related_name='executions')
|
||||
execution_time = models.DateTimeField(auto_now_add=True)
|
||||
status = models.CharField(max_length=20, choices=STATUS_CHOICES, default='pending')
|
||||
@@ -285,14 +285,14 @@ class ScheduledScanExecution(models.Model):
|
||||
error_message = models.TextField(blank=True, null=True)
|
||||
started_at = models.DateTimeField(null=True, blank=True)
|
||||
completed_at = models.DateTimeField(null=True, blank=True)
|
||||
|
||||
|
||||
class Meta:
|
||||
db_table = 'ai_scanner_scheduled_executions'
|
||||
ordering = ['-execution_time']
|
||||
|
||||
|
||||
def __str__(self):
|
||||
return f"Execution of {self.scheduled_scan.name} at {self.execution_time}"
|
||||
|
||||
|
||||
@property
|
||||
def scanned_domains(self):
|
||||
"""Parse domains scanned JSON"""
|
||||
@@ -302,7 +302,7 @@ class ScheduledScanExecution(models.Model):
|
||||
except json.JSONDecodeError:
|
||||
return []
|
||||
return []
|
||||
|
||||
|
||||
@property
|
||||
def scan_id_list(self):
|
||||
"""Parse scan IDs JSON"""
|
||||
@@ -312,11 +312,60 @@ class ScheduledScanExecution(models.Model):
|
||||
except json.JSONDecodeError:
|
||||
return []
|
||||
return []
|
||||
|
||||
|
||||
def set_scanned_domains(self, domain_list):
|
||||
"""Set scanned domains from list"""
|
||||
self.domains_scanned = json.dumps(domain_list)
|
||||
|
||||
|
||||
def set_scan_ids(self, scan_id_list):
|
||||
"""Set scan IDs from list"""
|
||||
self.scan_ids = json.dumps(scan_id_list)
|
||||
|
||||
|
||||
class ScannerFileOperation(models.Model):
|
||||
"""Audit log for file operations performed by scanner"""
|
||||
OPERATION_CHOICES = [
|
||||
('backup', 'Backup'),
|
||||
('read', 'Read'),
|
||||
('replace', 'Replace'),
|
||||
('rename', 'Rename'),
|
||||
('delete', 'Delete'),
|
||||
]
|
||||
|
||||
scan_id = models.CharField(max_length=255, db_index=True)
|
||||
operation = models.CharField(max_length=20, choices=OPERATION_CHOICES)
|
||||
file_path = models.CharField(max_length=500)
|
||||
backup_path = models.CharField(max_length=500, blank=True, null=True)
|
||||
success = models.BooleanField(default=False)
|
||||
error_message = models.TextField(blank=True, null=True)
|
||||
ip_address = models.CharField(max_length=45, blank=True, null=True)
|
||||
user_agent = models.CharField(max_length=255, blank=True, null=True)
|
||||
created_at = models.DateTimeField(auto_now_add=True, db_index=True)
|
||||
|
||||
class Meta:
|
||||
db_table = 'scanner_file_operations'
|
||||
ordering = ['-created_at']
|
||||
indexes = [
|
||||
models.Index(fields=['scan_id', 'created_at']),
|
||||
]
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.operation} - {self.file_path} ({'success' if self.success else 'failed'})"
|
||||
|
||||
|
||||
class ScannerAPIRateLimit(models.Model):
|
||||
"""Rate limiting for scanner API endpoints"""
|
||||
scan_id = models.CharField(max_length=255)
|
||||
endpoint = models.CharField(max_length=100)
|
||||
request_count = models.IntegerField(default=0)
|
||||
last_request_at = models.DateTimeField(auto_now=True)
|
||||
|
||||
class Meta:
|
||||
db_table = 'scanner_api_rate_limits'
|
||||
unique_together = ['scan_id', 'endpoint']
|
||||
indexes = [
|
||||
models.Index(fields=['scan_id', 'endpoint']),
|
||||
]
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.scan_id} - {self.endpoint}: {self.request_count} requests"
|
||||
|
||||
@@ -40,4 +40,11 @@ urlpatterns = [
|
||||
re_path(r'^ai-scanner/status-webhook$', views.aiScannerStatusWebhook, name='aiScannerStatusWebhookAPI'),
|
||||
re_path(r'^ai-scanner/callback/status-webhook$', views.aiScannerStatusWebhook, name='aiScannerStatusWebhookCallbackAPI'), # Alternative URL for worker compatibility
|
||||
re_path(r'^ai-scanner/scan/(?P<scan_id>[^/]+)/live-progress$', views.aiScannerLiveProgress, name='aiScannerLiveProgressAPI'),
|
||||
|
||||
# File operation endpoints for AI Scanner
|
||||
re_path(r'^scanner/backup-file$', views.scannerBackupFile, name='scannerBackupFileAPI'),
|
||||
re_path(r'^scanner/get-file$', views.scannerGetFile, name='scannerGetFileAPI'),
|
||||
re_path(r'^scanner/replace-file$', views.scannerReplaceFile, name='scannerReplaceFileAPI'),
|
||||
re_path(r'^scanner/rename-file$', views.scannerRenameFile, name='scannerRenameFileAPI'),
|
||||
re_path(r'^scanner/delete-file$', views.scannerDeleteFile, name='scannerDeleteFileAPI'),
|
||||
]
|
||||
|
||||
61
api/views.py
61
api/views.py
@@ -915,3 +915,64 @@ def aiScannerLiveProgress(request, scan_id):
|
||||
logging.writeToFile(f'[API] AI Scanner live progress error: {str(e)}')
|
||||
data_ret = {'error': 'Live progress service unavailable'}
|
||||
return HttpResponse(json.dumps(data_ret), status=500)
|
||||
|
||||
|
||||
# AI Scanner File Operation endpoints
|
||||
@csrf_exempt
|
||||
def scannerBackupFile(request):
|
||||
"""Scanner backup file endpoint"""
|
||||
try:
|
||||
from aiScanner.api import scanner_backup_file
|
||||
return scanner_backup_file(request)
|
||||
except Exception as e:
|
||||
logging.writeToFile(f'[API] Scanner backup file error: {str(e)}')
|
||||
data_ret = {'error': 'Backup file service unavailable'}
|
||||
return HttpResponse(json.dumps(data_ret), status=500)
|
||||
|
||||
|
||||
@csrf_exempt
|
||||
def scannerGetFile(request):
|
||||
"""Scanner get file endpoint"""
|
||||
try:
|
||||
from aiScanner.api import scanner_get_file
|
||||
return scanner_get_file(request)
|
||||
except Exception as e:
|
||||
logging.writeToFile(f'[API] Scanner get file error: {str(e)}')
|
||||
data_ret = {'error': 'Get file service unavailable'}
|
||||
return HttpResponse(json.dumps(data_ret), status=500)
|
||||
|
||||
|
||||
@csrf_exempt
|
||||
def scannerReplaceFile(request):
|
||||
"""Scanner replace file endpoint"""
|
||||
try:
|
||||
from aiScanner.api import scanner_replace_file
|
||||
return scanner_replace_file(request)
|
||||
except Exception as e:
|
||||
logging.writeToFile(f'[API] Scanner replace file error: {str(e)}')
|
||||
data_ret = {'error': 'Replace file service unavailable'}
|
||||
return HttpResponse(json.dumps(data_ret), status=500)
|
||||
|
||||
|
||||
@csrf_exempt
|
||||
def scannerRenameFile(request):
|
||||
"""Scanner rename file endpoint"""
|
||||
try:
|
||||
from aiScanner.api import scanner_rename_file
|
||||
return scanner_rename_file(request)
|
||||
except Exception as e:
|
||||
logging.writeToFile(f'[API] Scanner rename file error: {str(e)}')
|
||||
data_ret = {'error': 'Rename file service unavailable'}
|
||||
return HttpResponse(json.dumps(data_ret), status=500)
|
||||
|
||||
|
||||
@csrf_exempt
|
||||
def scannerDeleteFile(request):
|
||||
"""Scanner delete file endpoint"""
|
||||
try:
|
||||
from aiScanner.api import scanner_delete_file
|
||||
return scanner_delete_file(request)
|
||||
except Exception as e:
|
||||
logging.writeToFile(f'[API] Scanner delete file error: {str(e)}')
|
||||
data_ret = {'error': 'Delete file service unavailable'}
|
||||
return HttpResponse(json.dumps(data_ret), status=500)
|
||||
|
||||
@@ -1249,13 +1249,50 @@ $cfg['Servers'][$i]['LogoutURL'] = 'phpmyadminsignin.php?logout';
|
||||
`completed_at` datetime(6) DEFAULT NULL,
|
||||
KEY `ai_scanner_scheduled_executions_scheduled_scan_id_idx` (`scheduled_scan_id`),
|
||||
KEY `ai_scanner_scheduled_executions_execution_time_idx` (`execution_time` DESC),
|
||||
CONSTRAINT `ai_scanner_scheduled_executions_scheduled_scan_id_fk` FOREIGN KEY (`scheduled_scan_id`)
|
||||
CONSTRAINT `ai_scanner_scheduled_executions_scheduled_scan_id_fk` FOREIGN KEY (`scheduled_scan_id`)
|
||||
REFERENCES `ai_scanner_scheduled_scans` (`id`) ON DELETE CASCADE
|
||||
)
|
||||
''')
|
||||
except:
|
||||
pass
|
||||
|
||||
# AI Scanner File Operation Audit Tables
|
||||
try:
|
||||
cursor.execute('''
|
||||
CREATE TABLE `scanner_file_operations` (
|
||||
`id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY,
|
||||
`scan_id` varchar(255) NOT NULL,
|
||||
`operation` varchar(20) NOT NULL,
|
||||
`file_path` varchar(500) NOT NULL,
|
||||
`backup_path` varchar(500) DEFAULT NULL,
|
||||
`success` bool NOT NULL DEFAULT 0,
|
||||
`error_message` longtext DEFAULT NULL,
|
||||
`ip_address` varchar(45) DEFAULT NULL,
|
||||
`user_agent` varchar(255) DEFAULT NULL,
|
||||
`created_at` datetime(6) NOT NULL,
|
||||
KEY `scanner_file_operations_scan_id_idx` (`scan_id`),
|
||||
KEY `scanner_file_operations_created_at_idx` (`created_at`),
|
||||
KEY `scanner_file_operations_scan_created_idx` (`scan_id`, `created_at`)
|
||||
)
|
||||
''')
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
cursor.execute('''
|
||||
CREATE TABLE `scanner_api_rate_limits` (
|
||||
`id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY,
|
||||
`scan_id` varchar(255) NOT NULL,
|
||||
`endpoint` varchar(100) NOT NULL,
|
||||
`request_count` integer NOT NULL DEFAULT 0,
|
||||
`last_request_at` datetime(6) NOT NULL,
|
||||
UNIQUE KEY `scanner_api_rate_limits_scan_endpoint_unique` (`scan_id`, `endpoint`),
|
||||
KEY `scanner_api_rate_limits_scan_endpoint_idx` (`scan_id`, `endpoint`)
|
||||
)
|
||||
''')
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
cursor.execute(
|
||||
'CREATE TABLE `loginSystem_acl` (`id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY, `name` varchar(50) NOT NULL UNIQUE, `adminStatus` integer NOT NULL DEFAULT 0, `versionManagement` integer NOT NULL DEFAULT 0, `createNewUser` integer NOT NULL DEFAULT 0, `deleteUser` integer NOT NULL DEFAULT 0, `resellerCenter` integer NOT NULL DEFAULT 0, `changeUserACL` integer NOT NULL DEFAULT 0, `createWebsite` integer NOT NULL DEFAULT 0, `modifyWebsite` integer NOT NULL DEFAULT 0, `suspendWebsite` integer NOT NULL DEFAULT 0, `deleteWebsite` integer NOT NULL DEFAULT 0, `createPackage` integer NOT NULL DEFAULT 0, `deletePackage` integer NOT NULL DEFAULT 0, `modifyPackage` integer NOT NULL DEFAULT 0, `createDatabase` integer NOT NULL DEFAULT 0, `deleteDatabase` integer NOT NULL DEFAULT 0, `listDatabases` integer NOT NULL DEFAULT 0, `createNameServer` integer NOT NULL DEFAULT 0, `createDNSZone` integer NOT NULL DEFAULT 0, `deleteZone` integer NOT NULL DEFAULT 0, `addDeleteRecords` integer NOT NULL DEFAULT 0, `createEmail` integer NOT NULL DEFAULT 0, `deleteEmail` integer NOT NULL DEFAULT 0, `emailForwarding` integer NOT NULL DEFAULT 0, `changeEmailPassword` integer NOT NULL DEFAULT 0, `dkimManager` integer NOT NULL DEFAULT 0, `createFTPAccount` integer NOT NULL DEFAULT 0, `deleteFTPAccount` integer NOT NULL DEFAULT 0, `listFTPAccounts` integer NOT NULL DEFAULT 0, `createBackup` integer NOT NULL DEFAULT 0, `restoreBackup` integer NOT NULL DEFAULT 0, `addDeleteDestinations` integer NOT NULL DEFAULT 0, `scheduleBackups` integer NOT NULL DEFAULT 0, `remoteBackups` integer NOT NULL DEFAULT 0, `manageSSL` integer NOT NULL DEFAULT 0, `hostnameSSL` integer NOT NULL DEFAULT 0, `mailServerSSL` integer NOT NULL DEFAULT 0)')
|
||||
|
||||
Reference in New Issue
Block a user