remove many unnecessary debug print, improve large entity handling

This commit is contained in:
overcuriousity 2025-09-17 13:31:35 +02:00
parent 469c133f1b
commit ec755b17ad
9 changed files with 57 additions and 121 deletions

View File

@ -25,7 +25,7 @@ DEFAULT_RECURSION_DEPTH=2
# Default timeout for provider API requests in seconds. # Default timeout for provider API requests in seconds.
DEFAULT_TIMEOUT=30 DEFAULT_TIMEOUT=30
# The number of concurrent provider requests to make. # The number of concurrent provider requests to make.
MAX_CONCURRENT_REQUESTS=5 MAX_CONCURRENT_REQUESTS=1
# The number of results from a provider that triggers the "large entity" grouping. # The number of results from a provider that triggers the "large entity" grouping.
LARGE_ENTITY_THRESHOLD=100 LARGE_ENTITY_THRESHOLD=100
# The number of times to retry a target if a provider fails. # The number of times to retry a target if a provider fails.

4
app.py
View File

@ -32,7 +32,7 @@ def get_user_scanner():
if current_flask_session_id: if current_flask_session_id:
existing_scanner = session_manager.get_session(current_flask_session_id) existing_scanner = session_manager.get_session(current_flask_session_id)
if existing_scanner: if existing_scanner:
print(f"Reusing existing session: {current_flask_session_id}") #print(f"Reusing existing session: {current_flask_session_id}")
return current_flask_session_id, existing_scanner return current_flask_session_id, existing_scanner
else: else:
print(f"Session {current_flask_session_id} expired, will create new one") print(f"Session {current_flask_session_id} expired, will create new one")
@ -93,7 +93,6 @@ def start_scan():
return jsonify({'success': False, 'error': 'Failed to get scanner instance.'}), 500 return jsonify({'success': False, 'error': 'Failed to get scanner instance.'}), 500
print(f"Using scanner {id(scanner)} in session {user_session_id}") print(f"Using scanner {id(scanner)} in session {user_session_id}")
print(f"Scanner has {len(scanner.providers)} providers: {[p.get_name() for p in scanner.providers]}")
# FIXED: Pass clear_graph flag to scanner, let it handle graph clearing internally # FIXED: Pass clear_graph flag to scanner, let it handle graph clearing internally
success = scanner.start_scan(target, max_depth, clear_graph=clear_graph, force_rescan_target=force_rescan_target) success = scanner.start_scan(target, max_depth, clear_graph=clear_graph, force_rescan_target=force_rescan_target)
@ -469,7 +468,6 @@ def get_providers():
completed_tasks = scanner.indicators_completed completed_tasks = scanner.indicators_completed
total_tasks = scanner.total_tasks_ever_enqueued total_tasks = scanner.total_tasks_ever_enqueued
print(f"DEBUG: Task Progress - Completed: {completed_tasks}, Total Enqueued: {total_tasks}") print(f"DEBUG: Task Progress - Completed: {completed_tasks}, Total Enqueued: {total_tasks}")
print(f"DEBUG: Scanner has {len(scanner.providers)} providers: {[p.get_name() for p in scanner.providers]}")
else: else:
print("DEBUG: No active scanner session found.") print("DEBUG: No active scanner session found.")

View File

@ -21,8 +21,8 @@ class Config:
# --- General Settings --- # --- General Settings ---
self.default_recursion_depth = 2 self.default_recursion_depth = 2
self.default_timeout = 30 self.default_timeout = 60
self.max_concurrent_requests = 5 self.max_concurrent_requests = 1
self.large_entity_threshold = 100 self.large_entity_threshold = 100
self.max_retries_per_target = 8 self.max_retries_per_target = 8
self.cache_expiry_hours = 12 self.cache_expiry_hours = 12

View File

@ -344,13 +344,19 @@ class GraphManager:
return False return False
node_data = self.graph.nodes[large_entity_id] node_data = self.graph.nodes[large_entity_id]
attributes = node_data.get('attributes', {}) attributes = node_data.get('attributes', [])
# Find the 'nodes' attribute dictionary in the list
nodes_attr = next((attr for attr in attributes if attr.get('name') == 'nodes'), None)
# Remove from the list of member nodes # Remove from the list of member nodes
if 'nodes' in attributes and node_id_to_extract in attributes['nodes']: if nodes_attr and 'value' in nodes_attr and isinstance(nodes_attr['value'], list) and node_id_to_extract in nodes_attr['value']:
attributes['nodes'].remove(node_id_to_extract) nodes_attr['value'].remove(node_id_to_extract)
# Update the count
attributes['count'] = len(attributes['nodes']) # Find the 'count' attribute and update it
count_attr = next((attr for attr in attributes if attr.get('name') == 'count'), None)
if count_attr:
count_attr['value'] = len(nodes_attr['value'])
else: else:
# This can happen if the node was already extracted, which is not an error. # This can happen if the node was already extracted, which is not an error.
print(f"Warning: Node {node_id_to_extract} not found in the 'nodes' list of {large_entity_id}.") print(f"Warning: Node {node_id_to_extract} not found in the 'nodes' list of {large_entity_id}.")

View File

@ -152,7 +152,7 @@ class ForensicLogger:
# Log to standard logger # Log to standard logger
if error: if error:
self.logger.error(f"API Request Failed - {provider}: {url} - {error}") self.logger.error(f"API Request Failed.")
else: else:
self.logger.info(f"API Request - {provider}: {url} - Status: {status_code}") self.logger.info(f"API Request - {provider}: {url} - Status: {status_code}")

View File

@ -432,7 +432,7 @@ class Scanner:
self.indicators_completed += 1 self.indicators_completed += 1
if not self._is_stop_requested(): if not self._is_stop_requested():
all_new_targets = new_targets.union(large_entity_members) all_new_targets = new_targets
for new_target in all_new_targets: for new_target in all_new_targets:
is_ip_new = _is_valid_ip(new_target) is_ip_new = _is_valid_ip(new_target)
eligible_providers_new = self._get_eligible_providers(new_target, is_ip_new, False) eligible_providers_new = self._get_eligible_providers(new_target, is_ip_new, False)
@ -576,6 +576,33 @@ class Scanner:
print(f"Stop requested before processing results from {provider_name} for {target}") print(f"Stop requested before processing results from {provider_name} for {target}")
return discovered_targets, False return discovered_targets, False
attributes_by_node = defaultdict(list)
for attribute in provider_result.attributes:
# Convert the StandardAttribute object to a dictionary that the frontend can use
attr_dict = {
"name": attribute.name,
"value": attribute.value,
"type": attribute.type,
"provider": attribute.provider,
"confidence": attribute.confidence,
"metadata": attribute.metadata
}
attributes_by_node[attribute.target_node].append(attr_dict)
# Add attributes to nodes
for node_id, node_attributes_list in attributes_by_node.items():
if self.graph.graph.has_node(node_id):
# Determine node type
if _is_valid_ip(node_id):
node_type = NodeType.IP
elif node_id.startswith('AS') and node_id[2:].isdigit():
node_type = NodeType.ASN
else:
node_type = NodeType.DOMAIN
# Add node with the list of attributes
self.graph.add_node(node_id, node_type, attributes=node_attributes_list)
# Check for large entity based on relationship count # Check for large entity based on relationship count
if provider_result.get_relationship_count() > self.config.large_entity_threshold: if provider_result.get_relationship_count() > self.config.large_entity_threshold:
print(f"Large entity detected: {provider_name} returned {provider_result.get_relationship_count()} relationships for {target}") print(f"Large entity detected: {provider_name} returned {provider_result.get_relationship_count()} relationships for {target}")
@ -619,34 +646,6 @@ class Scanner:
if _is_valid_domain(target_node) or _is_valid_ip(target_node): if _is_valid_domain(target_node) or _is_valid_ip(target_node):
discovered_targets.add(target_node) discovered_targets.add(target_node)
# Process attributes, preserving them as a list of objects
attributes_by_node = defaultdict(list)
for attribute in provider_result.attributes:
# Convert the StandardAttribute object to a dictionary that the frontend can use
attr_dict = {
"name": attribute.name,
"value": attribute.value,
"type": attribute.type,
"provider": attribute.provider,
"confidence": attribute.confidence,
"metadata": attribute.metadata
}
attributes_by_node[attribute.target_node].append(attr_dict)
# Add attributes to nodes
for node_id, node_attributes_list in attributes_by_node.items():
if self.graph.graph.has_node(node_id):
# Determine node type
if _is_valid_ip(node_id):
node_type = NodeType.IP
elif node_id.startswith('AS') and node_id[2:].isdigit():
node_type = NodeType.ASN
else:
node_type = NodeType.DOMAIN
# Add node with the list of attributes
self.graph.add_node(node_id, node_type, attributes=node_attributes_list)
return discovered_targets, False return discovered_targets, False
def _create_large_entity_from_provider_result(self, source: str, provider_name: str, def _create_large_entity_from_provider_result(self, source: str, provider_name: str,
@ -793,7 +792,11 @@ class Scanner:
# 4. Re-queue the extracted node for full processing by all eligible providers # 4. Re-queue the extracted node for full processing by all eligible providers
print(f"Re-queueing extracted node {node_id_to_extract} for full reconnaissance...") print(f"Re-queueing extracted node {node_id_to_extract} for full reconnaissance...")
is_ip = _is_valid_ip(node_id_to_extract) is_ip = _is_valid_ip(node_id_to_extract)
current_depth = self.graph.graph.nodes[large_entity_id].get('attributes', {}).get('discovery_depth', 0)
# FIX: Correctly retrieve discovery_depth from the list of attributes
large_entity_attributes = self.graph.graph.nodes[large_entity_id].get('attributes', [])
discovery_depth_attr = next((attr for attr in large_entity_attributes if attr.get('name') == 'discovery_depth'), None)
current_depth = discovery_depth_attr['value'] if discovery_depth_attr else 0
eligible_providers = self._get_eligible_providers(node_id_to_extract, is_ip, False) eligible_providers = self._get_eligible_providers(node_id_to_extract, is_ip, False)
for provider in eligible_providers: for provider in eligible_providers:

View File

@ -50,7 +50,6 @@ class SessionManager:
"""Restore SessionManager after unpickling.""" """Restore SessionManager after unpickling."""
self.__dict__.update(state) self.__dict__.update(state)
# Re-initialize unpickleable attributes # Re-initialize unpickleable attributes
import redis
self.redis_client = redis.StrictRedis(db=0, decode_responses=False) self.redis_client = redis.StrictRedis(db=0, decode_responses=False)
self.lock = threading.Lock() self.lock = threading.Lock()
self.creation_lock = threading.Lock() self.creation_lock = threading.Lock()
@ -140,7 +139,6 @@ class SessionManager:
new_scanner = Scanner(session_config=preserved_config) new_scanner = Scanner(session_config=preserved_config)
new_scanner.session_id = new_session_id new_scanner.session_id = new_session_id
print(f"New scanner has {len(new_scanner.providers)} providers: {[p.get_name() for p in new_scanner.providers]}")
new_session_data = { new_session_data = {
'scanner': new_scanner, 'scanner': new_scanner,

View File

@ -214,7 +214,6 @@ class GraphManager {
}); });
document.body.appendChild(this.contextMenu); document.body.appendChild(this.contextMenu);
console.log('Context menu created and added to body');
} }
/** /**
@ -291,7 +290,6 @@ class GraphManager {
// FIXED: Right-click context menu // FIXED: Right-click context menu
this.container.addEventListener('contextmenu', (event) => { this.container.addEventListener('contextmenu', (event) => {
event.preventDefault(); event.preventDefault();
console.log('Right-click detected at:', event.offsetX, event.offsetY);
// Get coordinates relative to the canvas // Get coordinates relative to the canvas
const pointer = { const pointer = {
@ -300,7 +298,6 @@ class GraphManager {
}; };
const nodeId = this.network.getNodeAt(pointer); const nodeId = this.network.getNodeAt(pointer);
console.log('Node at pointer:', nodeId);
if (nodeId) { if (nodeId) {
// Pass the original client event for positioning // Pass the original client event for positioning
@ -341,19 +338,12 @@ class GraphManager {
// Stabilization events with progress // Stabilization events with progress
this.network.on('stabilizationProgress', (params) => { this.network.on('stabilizationProgress', (params) => {
const progress = params.iterations / params.total; const progress = params.iterations / params.total;
this.updateStabilizationProgress(progress);
}); });
this.network.on('stabilizationIterationsDone', () => { this.network.on('stabilizationIterationsDone', () => {
this.onStabilizationComplete(); this.onStabilizationComplete();
}); });
// Selection events
this.network.on('select', (params) => {
console.log('Selected nodes:', params.nodes);
console.log('Selected edges:', params.edges);
});
// Click away to hide context menu // Click away to hide context menu
document.addEventListener('click', (e) => { document.addEventListener('click', (e) => {
if (!this.contextMenu.contains(e.target)) { if (!this.contextMenu.contains(e.target)) {
@ -979,15 +969,6 @@ class GraphManager {
}, 2000); }, 2000);
} }
/**
* Update stabilization progress
* @param {number} progress - Progress value (0-1)
*/
updateStabilizationProgress(progress) {
// Could show a progress indicator if needed
console.log(`Graph stabilization: ${(progress * 100).toFixed(1)}%`);
}
/** /**
* Handle stabilization completion * Handle stabilization completion
*/ */
@ -1175,11 +1156,11 @@ class GraphManager {
adjacencyList adjacencyList
); );
console.log(`Reachability analysis complete:`, { /*console.log(`Reachability analysis complete:`, {
reachable: analysis.reachableNodes.size, reachable: analysis.reachableNodes.size,
unreachable: analysis.unreachableNodes.size, unreachable: analysis.unreachableNodes.size,
clusters: analysis.isolatedClusters.length clusters: analysis.isolatedClusters.length
}); });*/
return analysis; return analysis;
} }
@ -1294,7 +1275,6 @@ class GraphManager {
* Replaces the existing applyAllFilters() method * Replaces the existing applyAllFilters() method
*/ */
applyAllFilters() { applyAllFilters() {
console.log("Applying filters with enhanced reachability analysis...");
if (this.nodes.length === 0) return; if (this.nodes.length === 0) return;
// Get filter criteria from UI // Get filter criteria from UI
@ -1351,22 +1331,9 @@ class GraphManager {
timestamp: Date.now() timestamp: Date.now()
}; };
// Apply hiding with forensic documentation
const updates = nodesToHide.map(id => ({
id: id,
hidden: true,
forensicNote: `Hidden due to reachability analysis from ${nodeId}`
}));
this.nodes.update(updates); this.nodes.update(updates);
this.addToHistory('hide', historyData); this.addToHistory('hide', historyData);
console.log(`Forensic hide operation: ${nodesToHide.length} nodes hidden`, {
originalTarget: nodeId,
cascadeNodes: nodesToHide.length - 1,
isolatedClusters: analysis.isolatedClusters.length
});
return { return {
hiddenNodes: nodesToHide, hiddenNodes: nodesToHide,
isolatedClusters: analysis.isolatedClusters isolatedClusters: analysis.isolatedClusters
@ -1450,8 +1417,6 @@ class GraphManager {
// Handle operation results // Handle operation results
if (!operationFailed) { if (!operationFailed) {
this.addToHistory('delete', historyData); this.addToHistory('delete', historyData);
console.log(`Forensic delete operation completed:`, historyData.forensicAnalysis);
return { return {
success: true, success: true,
deletedNodes: nodesToDelete, deletedNodes: nodesToDelete,
@ -1542,7 +1507,6 @@ class GraphManager {
e.stopPropagation(); e.stopPropagation();
const action = e.currentTarget.dataset.action; const action = e.currentTarget.dataset.action;
const nodeId = e.currentTarget.dataset.nodeId; const nodeId = e.currentTarget.dataset.nodeId;
console.log('Context menu action:', action, 'for node:', nodeId);
this.performContextMenuAction(action, nodeId); this.performContextMenuAction(action, nodeId);
this.hideContextMenu(); this.hideContextMenu();
}); });
@ -1563,8 +1527,6 @@ class GraphManager {
* Updates the existing performContextMenuAction() method * Updates the existing performContextMenuAction() method
*/ */
performContextMenuAction(action, nodeId) { performContextMenuAction(action, nodeId) {
console.log('Performing enhanced action:', action, 'on node:', nodeId);
switch (action) { switch (action) {
case 'focus': case 'focus':
this.focusOnNode(nodeId); this.focusOnNode(nodeId);

View File

@ -120,7 +120,6 @@ class DNSReconApp {
max-width: 400px; max-width: 400px;
`; `;
document.body.appendChild(messageContainer); document.body.appendChild(messageContainer);
console.log('Message container created');
} }
} }
@ -135,7 +134,6 @@ class DNSReconApp {
this.initializeModalFunctionality(); this.initializeModalFunctionality();
this.elements.startScan.addEventListener('click', (e) => { this.elements.startScan.addEventListener('click', (e) => {
console.log('Start scan button clicked');
e.preventDefault(); e.preventDefault();
this.startScan(); this.startScan();
}); });
@ -146,13 +144,11 @@ class DNSReconApp {
}); });
this.elements.stopScan.addEventListener('click', (e) => { this.elements.stopScan.addEventListener('click', (e) => {
console.log('Stop scan button clicked');
e.preventDefault(); e.preventDefault();
this.stopScan(); this.stopScan();
}); });
this.elements.exportResults.addEventListener('click', (e) => { this.elements.exportResults.addEventListener('click', (e) => {
console.log('Export results button clicked');
e.preventDefault(); e.preventDefault();
this.exportResults(); this.exportResults();
}); });
@ -162,7 +158,6 @@ class DNSReconApp {
// Enter key support for target domain input // Enter key support for target domain input
this.elements.targetInput.addEventListener('keypress', (e) => { this.elements.targetInput.addEventListener('keypress', (e) => {
if (e.key === 'Enter' && !this.isScanning) { if (e.key === 'Enter' && !this.isScanning) {
console.log('Enter key pressed in domain input');
this.startScan(); this.startScan();
} }
}); });
@ -287,12 +282,8 @@ class DNSReconApp {
force_rescan_target: forceRescanTarget force_rescan_target: forceRescanTarget
}; };
console.log('Request data:', requestData);
const response = await this.apiCall('/api/scan/start', 'POST', requestData); const response = await this.apiCall('/api/scan/start', 'POST', requestData);
console.log('API response received:', response);
if (response.success) { if (response.success) {
this.currentSessionId = response.scan_id; this.currentSessionId = response.scan_id;
this.showSuccess('Reconnaissance scan started successfully'); this.showSuccess('Reconnaissance scan started successfully');
@ -345,7 +336,6 @@ class DNSReconApp {
if (response.success) { if (response.success) {
this.showSuccess('Scan stop requested'); this.showSuccess('Scan stop requested');
console.log('Scan stop requested successfully');
// Force immediate status update // Force immediate status update
setTimeout(() => { setTimeout(() => {
@ -414,7 +404,6 @@ class DNSReconApp {
} }
this.pollInterval = setInterval(() => { this.pollInterval = setInterval(() => {
console.log('--- Polling tick ---');
this.updateStatus(); this.updateStatus();
this.updateGraph(); this.updateGraph();
this.loadProviders(); this.loadProviders();
@ -439,16 +428,11 @@ class DNSReconApp {
*/ */
async updateStatus() { async updateStatus() {
try { try {
console.log('Updating status...');
const response = await this.apiCall('/api/scan/status'); const response = await this.apiCall('/api/scan/status');
console.log('Status response:', response);
if (response.success && response.status) { if (response.success && response.status) {
const status = response.status; const status = response.status;
console.log('Current scan status:', status.status);
console.log('Current progress:', status.progress_percentage + '%');
console.log('Graph stats:', status.graph_statistics);
this.updateStatusDisplay(status); this.updateStatusDisplay(status);
@ -478,7 +462,6 @@ class DNSReconApp {
console.log('Updating graph...'); console.log('Updating graph...');
const response = await this.apiCall('/api/graph'); const response = await this.apiCall('/api/graph');
console.log('Graph response:', response);
if (response.success) { if (response.success) {
const graphData = response.graph; const graphData = response.graph;
@ -528,8 +511,6 @@ class DNSReconApp {
*/ */
updateStatusDisplay(status) { updateStatusDisplay(status) {
try { try {
console.log('Updating status display...');
// Update status text with animation // Update status text with animation
if (this.elements.scanStatus) { if (this.elements.scanStatus) {
const formattedStatus = this.formatStatus(status.status); const formattedStatus = this.formatStatus(status.status);
@ -584,8 +565,6 @@ class DNSReconApp {
} }
this.setUIState(status.status, status.task_queue_size); this.setUIState(status.status, status.task_queue_size);
console.log('Status display updated successfully');
} catch (error) { } catch (error) {
console.error('Error updating status display:', error); console.error('Error updating status display:', error);
} }
@ -681,8 +660,6 @@ class DNSReconApp {
* UI state management with immediate button updates * UI state management with immediate button updates
*/ */
setUIState(state, task_queue_size) { setUIState(state, task_queue_size) {
console.log(`Setting UI state to: ${state}`);
const isQueueEmpty = task_queue_size === 0; const isQueueEmpty = task_queue_size === 0;
switch (state) { switch (state) {
@ -743,7 +720,6 @@ class DNSReconApp {
*/ */
async loadProviders() { async loadProviders() {
try { try {
console.log('Loading providers...');
const response = await this.apiCall('/api/providers'); const response = await this.apiCall('/api/providers');
if (response.success) { if (response.success) {
@ -1401,7 +1377,6 @@ class DNSReconApp {
e.stopPropagation(); e.stopPropagation();
const nodeId = e.target.dataset.nodeId || e.target.getAttribute('data-node-id'); const nodeId = e.target.dataset.nodeId || e.target.getAttribute('data-node-id');
console.log('Node link clicked:', nodeId);
if (nodeId && this.graphManager && this.graphManager.nodes) { if (nodeId && this.graphManager && this.graphManager.nodes) {
const nextNode = this.graphManager.nodes.get(nodeId); const nextNode = this.graphManager.nodes.get(nodeId);
@ -1758,7 +1733,6 @@ class DNSReconApp {
* @returns {Promise<Object>} Response data * @returns {Promise<Object>} Response data
*/ */
async apiCall(endpoint, method = 'GET', data = null) { async apiCall(endpoint, method = 'GET', data = null) {
console.log(`Making API call: ${method} ${endpoint}`, data ? data : '(no data)');
try { try {
const options = { const options = {
@ -1773,17 +1747,14 @@ class DNSReconApp {
console.log('Request body:', options.body); console.log('Request body:', options.body);
} }
console.log('Fetch options:', options);
const response = await fetch(endpoint, options); const response = await fetch(endpoint, options);
console.log('Response status:', response.status, response.statusText);
if (!response.ok) { if (!response.ok) {
throw new Error(`HTTP ${response.status}: ${response.statusText}`); throw new Error(`HTTP ${response.status}: ${response.statusText}`);
} }
const result = await response.json(); const result = await response.json();
console.log('Response data:', result);
return result; return result;
@ -1918,8 +1889,6 @@ class DNSReconApp {
* @param {string} type - Message type (success, error, warning, info) * @param {string} type - Message type (success, error, warning, info)
*/ */
showMessage(message, type = 'info') { showMessage(message, type = 'info') {
console.log(`${type.toUpperCase()}: ${message}`);
// Create message element // Create message element
const messageElement = document.createElement('div'); const messageElement = document.createElement('div');
messageElement.className = `message-toast message-${type}`; messageElement.className = `message-toast message-${type}`;