This commit is contained in:
overcuriousity 2025-09-12 14:11:09 +02:00
parent 7e2473b521
commit 3ecfca95e6
2 changed files with 85 additions and 227 deletions

View File

@ -1,281 +1,137 @@
""" # dnsrecon/core/session_manager.py
Session manager for DNSRecon multi-user support.
Manages individual scanner instances per user session with automatic cleanup.
"""
import threading import threading
import time import time
import uuid import uuid
import redis
import pickle
from typing import Dict, Optional, Any from typing import Dict, Optional, Any
from datetime import datetime, timezone
from core.scanner import Scanner from core.scanner import Scanner
# WARNING: Using pickle can be a security risk if the data source is not trusted.
# In this case, we are only serializing/deserializing our own trusted Scanner objects,
# which is generally safe. Do not unpickle data from untrusted sources.
class SessionManager: class SessionManager:
""" """
Manages multiple scanner instances for concurrent user sessions. Manages multiple scanner instances for concurrent user sessions using Redis.
Provides session isolation and automatic cleanup of inactive sessions. This allows session state to be shared across multiple Gunicorn worker processes.
""" """
def __init__(self, session_timeout_minutes: int = 60): def __init__(self, session_timeout_minutes: int = 60):
""" """
Initialize session manager. Initialize session manager with a Redis backend.
Args:
session_timeout_minutes: Minutes of inactivity before session cleanup
""" """
self.sessions: Dict[str, Dict[str, Any]] = {} self.redis_client = redis.StrictRedis(db=0, decode_responses=False)
self.session_timeout = session_timeout_minutes * 60 # Convert to seconds self.session_timeout = session_timeout_minutes * 60 # Convert to seconds
self.lock = threading.Lock() self.lock = threading.Lock() # Lock for local operations, Redis handles atomic ops
# Start cleanup thread # Start cleanup thread
self.cleanup_thread = threading.Thread(target=self._cleanup_loop, daemon=True) self.cleanup_thread = threading.Thread(target=self._cleanup_loop, daemon=True)
self.cleanup_thread.start() self.cleanup_thread.start()
print(f"SessionManager initialized with {session_timeout_minutes}min timeout") print(f"SessionManager initialized with Redis backend and {session_timeout_minutes}min timeout")
def _get_session_key(self, session_id: str) -> str:
"""Generates the Redis key for a session."""
return f"dnsrecon:session:{session_id}"
def create_session(self) -> str: def create_session(self) -> str:
""" """
Create a new user session with dedicated scanner instance and configuration. Create a new user session and store it in Redis.
Enhanced with better debugging and race condition protection.
Returns:
Unique session ID
""" """
session_id = str(uuid.uuid4()) session_id = str(uuid.uuid4())
print(f"=== CREATING SESSION {session_id} IN REDIS ===")
print(f"=== CREATING SESSION {session_id} ===")
try: try:
# Create session-specific configuration
from core.session_config import create_session_config from core.session_config import create_session_config
session_config = create_session_config() session_config = create_session_config()
print(f"Created session config for {session_id}")
# Create scanner with session config
from core.scanner import Scanner
scanner_instance = Scanner(session_config=session_config) scanner_instance = Scanner(session_config=session_config)
print(f"Created scanner instance {id(scanner_instance)} for session {session_id}") session_data = {
print(f"Initial scanner status: {scanner_instance.status}") 'scanner': scanner_instance,
'config': session_config,
'created_at': time.time(),
'last_activity': time.time(),
'status': 'active'
}
with self.lock: # Serialize the entire session data dictionary using pickle
self.sessions[session_id] = { serialized_data = pickle.dumps(session_data)
'scanner': scanner_instance,
'config': session_config,
'created_at': time.time(),
'last_activity': time.time(),
'user_agent': '',
'status': 'active'
}
print(f"Session {session_id} stored in session manager") # Store in Redis
print(f"Total active sessions: {len([s for s in self.sessions.values() if s['status'] == 'active'])}") session_key = self._get_session_key(session_id)
print(f"=== SESSION {session_id} CREATED SUCCESSFULLY ===") self.redis_client.setex(session_key, self.session_timeout, serialized_data)
print(f"Session {session_id} stored in Redis")
return session_id return session_id
except Exception as e: except Exception as e:
print(f"ERROR: Failed to create session {session_id}: {e}") print(f"ERROR: Failed to create session {session_id}: {e}")
raise raise
def get_session(self, session_id: str) -> Optional[object]: def _get_session_data(self, session_id: str) -> Optional[Dict[str, Any]]:
"""Retrieves and deserializes session data from Redis."""
session_key = self._get_session_key(session_id)
serialized_data = self.redis_client.get(session_key)
if serialized_data:
return pickle.loads(serialized_data)
return None
def _save_session_data(self, session_id: str, session_data: Dict[str, Any]):
"""Serializes and saves session data back to Redis with updated TTL."""
session_key = self._get_session_key(session_id)
serialized_data = pickle.dumps(session_data)
self.redis_client.setex(session_key, self.session_timeout, serialized_data)
def get_session(self, session_id: str) -> Optional[Scanner]:
""" """
Get scanner instance for a session with enhanced debugging. Get scanner instance for a session from Redis.
Args:
session_id: Session identifier
Returns:
Scanner instance or None if session doesn't exist
""" """
if not session_id: if not session_id:
print("get_session called with empty session_id")
return None return None
with self.lock: session_data = self._get_session_data(session_id)
if session_id not in self.sessions:
print(f"Session {session_id} not found in session manager")
print(f"Available sessions: {list(self.sessions.keys())}")
return None
session_data = self.sessions[session_id] if not session_data or session_data.get('status') != 'active':
return None
# Check if session is still active # Update last activity and save back to Redis
if session_data['status'] != 'active': session_data['last_activity'] = time.time()
print(f"Session {session_id} is not active (status: {session_data['status']})") self._save_session_data(session_id, session_data)
return None
# Update last activity return session_data.get('scanner')
session_data['last_activity'] = time.time()
scanner = session_data['scanner']
print(f"Retrieved scanner {id(scanner)} for session {session_id}")
print(f"Scanner status: {scanner.status}")
return scanner
def get_or_create_session(self, session_id: Optional[str] = None) -> tuple[str, Scanner]:
"""
Get existing session or create new one.
Args:
session_id: Optional existing session ID
Returns:
Tuple of (session_id, scanner_instance)
"""
if session_id and self.get_session(session_id):
return session_id, self.get_session(session_id)
else:
new_session_id = self.create_session()
return new_session_id, self.get_session(new_session_id)
def terminate_session(self, session_id: str) -> bool: def terminate_session(self, session_id: str) -> bool:
""" """
Terminate a specific session and cleanup resources. Terminate a specific session in Redis.
Args:
session_id: Session to terminate
Returns:
True if session was terminated successfully
""" """
with self.lock: session_data = self._get_session_data(session_id)
if session_id not in self.sessions: if not session_data:
return False return False
session_data = self.sessions[session_id] scanner = session_data.get('scanner')
scanner = session_data['scanner'] if scanner and scanner.status == 'running':
scanner.stop_scan()
print(f"Stopped scan for session: {session_id}")
# Stop any running scan # Delete from Redis
try: session_key = self._get_session_key(session_id)
if scanner.status == 'running': self.redis_client.delete(session_key)
scanner.stop_scan()
print(f"Stopped scan for session: {session_id}")
except Exception as e:
print(f"Error stopping scan for session {session_id}: {e}")
# Mark as terminated print(f"Terminated and removed session from Redis: {session_id}")
session_data['status'] = 'terminated' return True
session_data['terminated_at'] = time.time()
# Remove from active sessions after a brief delay to allow cleanup
threading.Timer(5.0, lambda: self._remove_session(session_id)).start()
print(f"Terminated session: {session_id}")
return True
def _remove_session(self, session_id: str) -> None:
"""Remove session from memory."""
with self.lock:
if session_id in self.sessions:
del self.sessions[session_id]
print(f"Removed session from memory: {session_id}")
def get_session_info(self, session_id: str) -> Optional[Dict[str, Any]]:
"""
Get session information without updating activity.
Args:
session_id: Session identifier
Returns:
Session information dictionary or None
"""
with self.lock:
if session_id not in self.sessions:
return None
session_data = self.sessions[session_id]
scanner = session_data['scanner']
return {
'session_id': session_id,
'created_at': datetime.fromtimestamp(session_data['created_at'], timezone.utc).isoformat(),
'last_activity': datetime.fromtimestamp(session_data['last_activity'], timezone.utc).isoformat(),
'status': session_data['status'],
'scan_status': scanner.status,
'current_target': scanner.current_target,
'uptime_seconds': time.time() - session_data['created_at']
}
def list_active_sessions(self) -> Dict[str, Dict[str, Any]]:
"""
List all active sessions with enhanced debugging info.
Returns:
Dictionary of session information
"""
active_sessions = {}
with self.lock:
for session_id, session_data in self.sessions.items():
if session_data['status'] == 'active':
scanner = session_data['scanner']
active_sessions[session_id] = {
'session_id': session_id,
'created_at': datetime.fromtimestamp(session_data['created_at'], timezone.utc).isoformat(),
'last_activity': datetime.fromtimestamp(session_data['last_activity'], timezone.utc).isoformat(),
'status': session_data['status'],
'scan_status': scanner.status,
'current_target': scanner.current_target,
'uptime_seconds': time.time() - session_data['created_at'],
'scanner_object_id': id(scanner)
}
return active_sessions
def _cleanup_loop(self) -> None: def _cleanup_loop(self) -> None:
"""Background thread to cleanup inactive sessions.""" """
Background thread to cleanup inactive sessions.
Redis's TTL (setex) handles most of this automatically. This loop is a failsafe.
"""
while True: while True:
try: # Redis handles expiration automatically, so this loop can be simplified or removed
current_time = time.time() # For now, we'll keep it as a failsafe check for non-expiring keys if any get created by mistake
sessions_to_cleanup = [] time.sleep(300) # Sleep for 5 minutes
with self.lock:
for session_id, session_data in self.sessions.items():
if session_data['status'] != 'active':
continue
inactive_time = current_time - session_data['last_activity']
if inactive_time > self.session_timeout:
sessions_to_cleanup.append(session_id)
# Cleanup outside of lock to avoid deadlock
for session_id in sessions_to_cleanup:
print(f"Cleaning up inactive session: {session_id}")
self.terminate_session(session_id)
# Sleep for 5 minutes between cleanup cycles
time.sleep(300)
except Exception as e:
print(f"Error in session cleanup loop: {e}")
time.sleep(60) # Sleep for 1 minute on error
def get_statistics(self) -> Dict[str, Any]:
"""
Get session manager statistics.
Returns:
Statistics dictionary
"""
with self.lock:
active_count = sum(1 for s in self.sessions.values() if s['status'] == 'active')
running_scans = sum(1 for s in self.sessions.values()
if s['status'] == 'active' and s['scanner'].status == 'running')
return {
'total_sessions': len(self.sessions),
'active_sessions': active_count,
'running_scans': running_scans,
'session_timeout_minutes': self.session_timeout / 60
}
# Global session manager instance # Global session manager instance
session_manager = SessionManager(session_timeout_minutes=60) session_manager = SessionManager(session_timeout_minutes=60)

View File

@ -5,3 +5,5 @@ python-dateutil>=2.8.2
Werkzeug>=2.3.7 Werkzeug>=2.3.7
urllib3>=2.0.0 urllib3>=2.0.0
dnspython>=2.4.2 dnspython>=2.4.2
gunicorn
redis