data-model #2

Merged
mstoeck3 merged 20 commits from data-model into main 2025-09-17 21:56:18 +00:00
9 changed files with 57 additions and 121 deletions
Showing only changes of commit ec755b17ad - Show all commits

View File

@ -25,7 +25,7 @@ DEFAULT_RECURSION_DEPTH=2
# Default timeout for provider API requests in seconds.
DEFAULT_TIMEOUT=30
# The number of concurrent provider requests to make.
MAX_CONCURRENT_REQUESTS=5
MAX_CONCURRENT_REQUESTS=1
# The number of results from a provider that triggers the "large entity" grouping.
LARGE_ENTITY_THRESHOLD=100
# The number of times to retry a target if a provider fails.

4
app.py
View File

@ -32,7 +32,7 @@ def get_user_scanner():
if current_flask_session_id:
existing_scanner = session_manager.get_session(current_flask_session_id)
if existing_scanner:
print(f"Reusing existing session: {current_flask_session_id}")
#print(f"Reusing existing session: {current_flask_session_id}")
return current_flask_session_id, existing_scanner
else:
print(f"Session {current_flask_session_id} expired, will create new one")
@ -93,7 +93,6 @@ def start_scan():
return jsonify({'success': False, 'error': 'Failed to get scanner instance.'}), 500
print(f"Using scanner {id(scanner)} in session {user_session_id}")
print(f"Scanner has {len(scanner.providers)} providers: {[p.get_name() for p in scanner.providers]}")
# FIXED: Pass clear_graph flag to scanner, let it handle graph clearing internally
success = scanner.start_scan(target, max_depth, clear_graph=clear_graph, force_rescan_target=force_rescan_target)
@ -469,7 +468,6 @@ def get_providers():
completed_tasks = scanner.indicators_completed
total_tasks = scanner.total_tasks_ever_enqueued
print(f"DEBUG: Task Progress - Completed: {completed_tasks}, Total Enqueued: {total_tasks}")
print(f"DEBUG: Scanner has {len(scanner.providers)} providers: {[p.get_name() for p in scanner.providers]}")
else:
print("DEBUG: No active scanner session found.")

View File

@ -21,8 +21,8 @@ class Config:
# --- General Settings ---
self.default_recursion_depth = 2
self.default_timeout = 30
self.max_concurrent_requests = 5
self.default_timeout = 60
self.max_concurrent_requests = 1
self.large_entity_threshold = 100
self.max_retries_per_target = 8
self.cache_expiry_hours = 12

View File

@ -344,13 +344,19 @@ class GraphManager:
return False
node_data = self.graph.nodes[large_entity_id]
attributes = node_data.get('attributes', {})
attributes = node_data.get('attributes', [])
# Find the 'nodes' attribute dictionary in the list
nodes_attr = next((attr for attr in attributes if attr.get('name') == 'nodes'), None)
# Remove from the list of member nodes
if 'nodes' in attributes and node_id_to_extract in attributes['nodes']:
attributes['nodes'].remove(node_id_to_extract)
# Update the count
attributes['count'] = len(attributes['nodes'])
if nodes_attr and 'value' in nodes_attr and isinstance(nodes_attr['value'], list) and node_id_to_extract in nodes_attr['value']:
nodes_attr['value'].remove(node_id_to_extract)
# Find the 'count' attribute and update it
count_attr = next((attr for attr in attributes if attr.get('name') == 'count'), None)
if count_attr:
count_attr['value'] = len(nodes_attr['value'])
else:
# This can happen if the node was already extracted, which is not an error.
print(f"Warning: Node {node_id_to_extract} not found in the 'nodes' list of {large_entity_id}.")

View File

@ -152,7 +152,7 @@ class ForensicLogger:
# Log to standard logger
if error:
self.logger.error(f"API Request Failed - {provider}: {url} - {error}")
self.logger.error(f"API Request Failed.")
else:
self.logger.info(f"API Request - {provider}: {url} - Status: {status_code}")

View File

@ -432,7 +432,7 @@ class Scanner:
self.indicators_completed += 1
if not self._is_stop_requested():
all_new_targets = new_targets.union(large_entity_members)
all_new_targets = new_targets
for new_target in all_new_targets:
is_ip_new = _is_valid_ip(new_target)
eligible_providers_new = self._get_eligible_providers(new_target, is_ip_new, False)
@ -576,6 +576,33 @@ class Scanner:
print(f"Stop requested before processing results from {provider_name} for {target}")
return discovered_targets, False
attributes_by_node = defaultdict(list)
for attribute in provider_result.attributes:
# Convert the StandardAttribute object to a dictionary that the frontend can use
attr_dict = {
"name": attribute.name,
"value": attribute.value,
"type": attribute.type,
"provider": attribute.provider,
"confidence": attribute.confidence,
"metadata": attribute.metadata
}
attributes_by_node[attribute.target_node].append(attr_dict)
# Add attributes to nodes
for node_id, node_attributes_list in attributes_by_node.items():
if self.graph.graph.has_node(node_id):
# Determine node type
if _is_valid_ip(node_id):
node_type = NodeType.IP
elif node_id.startswith('AS') and node_id[2:].isdigit():
node_type = NodeType.ASN
else:
node_type = NodeType.DOMAIN
# Add node with the list of attributes
self.graph.add_node(node_id, node_type, attributes=node_attributes_list)
# Check for large entity based on relationship count
if provider_result.get_relationship_count() > self.config.large_entity_threshold:
print(f"Large entity detected: {provider_name} returned {provider_result.get_relationship_count()} relationships for {target}")
@ -619,34 +646,6 @@ class Scanner:
if _is_valid_domain(target_node) or _is_valid_ip(target_node):
discovered_targets.add(target_node)
# Process attributes, preserving them as a list of objects
attributes_by_node = defaultdict(list)
for attribute in provider_result.attributes:
# Convert the StandardAttribute object to a dictionary that the frontend can use
attr_dict = {
"name": attribute.name,
"value": attribute.value,
"type": attribute.type,
"provider": attribute.provider,
"confidence": attribute.confidence,
"metadata": attribute.metadata
}
attributes_by_node[attribute.target_node].append(attr_dict)
# Add attributes to nodes
for node_id, node_attributes_list in attributes_by_node.items():
if self.graph.graph.has_node(node_id):
# Determine node type
if _is_valid_ip(node_id):
node_type = NodeType.IP
elif node_id.startswith('AS') and node_id[2:].isdigit():
node_type = NodeType.ASN
else:
node_type = NodeType.DOMAIN
# Add node with the list of attributes
self.graph.add_node(node_id, node_type, attributes=node_attributes_list)
return discovered_targets, False
def _create_large_entity_from_provider_result(self, source: str, provider_name: str,
@ -793,7 +792,11 @@ class Scanner:
# 4. Re-queue the extracted node for full processing by all eligible providers
print(f"Re-queueing extracted node {node_id_to_extract} for full reconnaissance...")
is_ip = _is_valid_ip(node_id_to_extract)
current_depth = self.graph.graph.nodes[large_entity_id].get('attributes', {}).get('discovery_depth', 0)
# FIX: Correctly retrieve discovery_depth from the list of attributes
large_entity_attributes = self.graph.graph.nodes[large_entity_id].get('attributes', [])
discovery_depth_attr = next((attr for attr in large_entity_attributes if attr.get('name') == 'discovery_depth'), None)
current_depth = discovery_depth_attr['value'] if discovery_depth_attr else 0
eligible_providers = self._get_eligible_providers(node_id_to_extract, is_ip, False)
for provider in eligible_providers:

View File

@ -50,7 +50,6 @@ class SessionManager:
"""Restore SessionManager after unpickling."""
self.__dict__.update(state)
# Re-initialize unpickleable attributes
import redis
self.redis_client = redis.StrictRedis(db=0, decode_responses=False)
self.lock = threading.Lock()
self.creation_lock = threading.Lock()
@ -140,7 +139,6 @@ class SessionManager:
new_scanner = Scanner(session_config=preserved_config)
new_scanner.session_id = new_session_id
print(f"New scanner has {len(new_scanner.providers)} providers: {[p.get_name() for p in new_scanner.providers]}")
new_session_data = {
'scanner': new_scanner,

View File

@ -214,7 +214,6 @@ class GraphManager {
});
document.body.appendChild(this.contextMenu);
console.log('Context menu created and added to body');
}
/**
@ -291,7 +290,6 @@ class GraphManager {
// FIXED: Right-click context menu
this.container.addEventListener('contextmenu', (event) => {
event.preventDefault();
console.log('Right-click detected at:', event.offsetX, event.offsetY);
// Get coordinates relative to the canvas
const pointer = {
@ -300,7 +298,6 @@ class GraphManager {
};
const nodeId = this.network.getNodeAt(pointer);
console.log('Node at pointer:', nodeId);
if (nodeId) {
// Pass the original client event for positioning
@ -341,19 +338,12 @@ class GraphManager {
// Stabilization events with progress
this.network.on('stabilizationProgress', (params) => {
const progress = params.iterations / params.total;
this.updateStabilizationProgress(progress);
});
this.network.on('stabilizationIterationsDone', () => {
this.onStabilizationComplete();
});
// Selection events
this.network.on('select', (params) => {
console.log('Selected nodes:', params.nodes);
console.log('Selected edges:', params.edges);
});
// Click away to hide context menu
document.addEventListener('click', (e) => {
if (!this.contextMenu.contains(e.target)) {
@ -979,15 +969,6 @@ class GraphManager {
}, 2000);
}
/**
* Update stabilization progress
* @param {number} progress - Progress value (0-1)
*/
updateStabilizationProgress(progress) {
// Could show a progress indicator if needed
console.log(`Graph stabilization: ${(progress * 100).toFixed(1)}%`);
}
/**
* Handle stabilization completion
*/
@ -1175,11 +1156,11 @@ class GraphManager {
adjacencyList
);
console.log(`Reachability analysis complete:`, {
/*console.log(`Reachability analysis complete:`, {
reachable: analysis.reachableNodes.size,
unreachable: analysis.unreachableNodes.size,
clusters: analysis.isolatedClusters.length
});
});*/
return analysis;
}
@ -1294,7 +1275,6 @@ class GraphManager {
* Replaces the existing applyAllFilters() method
*/
applyAllFilters() {
console.log("Applying filters with enhanced reachability analysis...");
if (this.nodes.length === 0) return;
// Get filter criteria from UI
@ -1351,22 +1331,9 @@ class GraphManager {
timestamp: Date.now()
};
// Apply hiding with forensic documentation
const updates = nodesToHide.map(id => ({
id: id,
hidden: true,
forensicNote: `Hidden due to reachability analysis from ${nodeId}`
}));
this.nodes.update(updates);
this.addToHistory('hide', historyData);
console.log(`Forensic hide operation: ${nodesToHide.length} nodes hidden`, {
originalTarget: nodeId,
cascadeNodes: nodesToHide.length - 1,
isolatedClusters: analysis.isolatedClusters.length
});
return {
hiddenNodes: nodesToHide,
isolatedClusters: analysis.isolatedClusters
@ -1450,8 +1417,6 @@ class GraphManager {
// Handle operation results
if (!operationFailed) {
this.addToHistory('delete', historyData);
console.log(`Forensic delete operation completed:`, historyData.forensicAnalysis);
return {
success: true,
deletedNodes: nodesToDelete,
@ -1542,7 +1507,6 @@ class GraphManager {
e.stopPropagation();
const action = e.currentTarget.dataset.action;
const nodeId = e.currentTarget.dataset.nodeId;
console.log('Context menu action:', action, 'for node:', nodeId);
this.performContextMenuAction(action, nodeId);
this.hideContextMenu();
});
@ -1563,8 +1527,6 @@ class GraphManager {
* Updates the existing performContextMenuAction() method
*/
performContextMenuAction(action, nodeId) {
console.log('Performing enhanced action:', action, 'on node:', nodeId);
switch (action) {
case 'focus':
this.focusOnNode(nodeId);

View File

@ -120,7 +120,6 @@ class DNSReconApp {
max-width: 400px;
`;
document.body.appendChild(messageContainer);
console.log('Message container created');
}
}
@ -135,7 +134,6 @@ class DNSReconApp {
this.initializeModalFunctionality();
this.elements.startScan.addEventListener('click', (e) => {
console.log('Start scan button clicked');
e.preventDefault();
this.startScan();
});
@ -146,13 +144,11 @@ class DNSReconApp {
});
this.elements.stopScan.addEventListener('click', (e) => {
console.log('Stop scan button clicked');
e.preventDefault();
this.stopScan();
});
this.elements.exportResults.addEventListener('click', (e) => {
console.log('Export results button clicked');
e.preventDefault();
this.exportResults();
});
@ -162,7 +158,6 @@ class DNSReconApp {
// Enter key support for target domain input
this.elements.targetInput.addEventListener('keypress', (e) => {
if (e.key === 'Enter' && !this.isScanning) {
console.log('Enter key pressed in domain input');
this.startScan();
}
});
@ -287,12 +282,8 @@ class DNSReconApp {
force_rescan_target: forceRescanTarget
};
console.log('Request data:', requestData);
const response = await this.apiCall('/api/scan/start', 'POST', requestData);
console.log('API response received:', response);
if (response.success) {
this.currentSessionId = response.scan_id;
this.showSuccess('Reconnaissance scan started successfully');
@ -345,7 +336,6 @@ class DNSReconApp {
if (response.success) {
this.showSuccess('Scan stop requested');
console.log('Scan stop requested successfully');
// Force immediate status update
setTimeout(() => {
@ -414,7 +404,6 @@ class DNSReconApp {
}
this.pollInterval = setInterval(() => {
console.log('--- Polling tick ---');
this.updateStatus();
this.updateGraph();
this.loadProviders();
@ -439,16 +428,11 @@ class DNSReconApp {
*/
async updateStatus() {
try {
console.log('Updating status...');
const response = await this.apiCall('/api/scan/status');
console.log('Status response:', response);
if (response.success && response.status) {
const status = response.status;
console.log('Current scan status:', status.status);
console.log('Current progress:', status.progress_percentage + '%');
console.log('Graph stats:', status.graph_statistics);
this.updateStatusDisplay(status);
@ -478,7 +462,6 @@ class DNSReconApp {
console.log('Updating graph...');
const response = await this.apiCall('/api/graph');
console.log('Graph response:', response);
if (response.success) {
const graphData = response.graph;
@ -528,8 +511,6 @@ class DNSReconApp {
*/
updateStatusDisplay(status) {
try {
console.log('Updating status display...');
// Update status text with animation
if (this.elements.scanStatus) {
const formattedStatus = this.formatStatus(status.status);
@ -584,8 +565,6 @@ class DNSReconApp {
}
this.setUIState(status.status, status.task_queue_size);
console.log('Status display updated successfully');
} catch (error) {
console.error('Error updating status display:', error);
}
@ -681,8 +660,6 @@ class DNSReconApp {
* UI state management with immediate button updates
*/
setUIState(state, task_queue_size) {
console.log(`Setting UI state to: ${state}`);
const isQueueEmpty = task_queue_size === 0;
switch (state) {
@ -743,7 +720,6 @@ class DNSReconApp {
*/
async loadProviders() {
try {
console.log('Loading providers...');
const response = await this.apiCall('/api/providers');
if (response.success) {
@ -1401,7 +1377,6 @@ class DNSReconApp {
e.stopPropagation();
const nodeId = e.target.dataset.nodeId || e.target.getAttribute('data-node-id');
console.log('Node link clicked:', nodeId);
if (nodeId && this.graphManager && this.graphManager.nodes) {
const nextNode = this.graphManager.nodes.get(nodeId);
@ -1758,7 +1733,6 @@ class DNSReconApp {
* @returns {Promise<Object>} Response data
*/
async apiCall(endpoint, method = 'GET', data = null) {
console.log(`Making API call: ${method} ${endpoint}`, data ? data : '(no data)');
try {
const options = {
@ -1773,17 +1747,14 @@ class DNSReconApp {
console.log('Request body:', options.body);
}
console.log('Fetch options:', options);
const response = await fetch(endpoint, options);
console.log('Response status:', response.status, response.statusText);
if (!response.ok) {
throw new Error(`HTTP ${response.status}: ${response.statusText}`);
}
const result = await response.json();
console.log('Response data:', result);
return result;
@ -1918,8 +1889,6 @@ class DNSReconApp {
* @param {string} type - Message type (success, error, warning, info)
*/
showMessage(message, type = 'info') {
console.log(`${type.toUpperCase()}: ${message}`);
// Create message element
const messageElement = document.createElement('div');
messageElement.className = `message-toast message-${type}`;