large entity recreation
This commit is contained in:
parent
332805709d
commit
0a6d12de9a
13
app.py
13
app.py
@ -200,14 +200,13 @@ def extract_from_large_entity():
|
|||||||
if not scanner:
|
if not scanner:
|
||||||
return jsonify({'success': False, 'error': 'No active session found'}), 404
|
return jsonify({'success': False, 'error': 'No active session found'}), 404
|
||||||
|
|
||||||
# TODO implement
|
success = scanner.extract_node_from_large_entity(large_entity_id, node_id)
|
||||||
#success = scanner.extract_node_from_large_entity(large_entity_id, node_id)
|
|
||||||
|
|
||||||
#if success:
|
if success:
|
||||||
# session_manager.update_session_scanner(user_session_id, scanner)
|
session_manager.update_session_scanner(user_session_id, scanner)
|
||||||
# return jsonify({'success': True, 'message': f'Node {node_id} extracted successfully.'})
|
return jsonify({'success': True, 'message': f'Node {node_id} extracted successfully.'})
|
||||||
#else:
|
else:
|
||||||
# return jsonify({'success': False, 'error': f'Failed to extract node {node_id}.'}), 500
|
return jsonify({'success': False, 'error': f'Failed to extract node {node_id}.'}), 500
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
|
|||||||
104
core/scanner.py
104
core/scanner.py
@ -821,6 +821,88 @@ class Scanner:
|
|||||||
self._update_provider_state(target, provider_name, 'failed', 0, str(e), start_time)
|
self._update_provider_state(target, provider_name, 'failed', 0, str(e), start_time)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
def _create_large_entity_from_result(self, source_node: str, provider_name: str,
|
||||||
|
provider_result: ProviderResult, depth: int) -> Set[str]:
|
||||||
|
"""
|
||||||
|
Creates a large entity node and tags all member nodes.
|
||||||
|
"""
|
||||||
|
members = {rel.target_node for rel in provider_result.relationships
|
||||||
|
if _is_valid_domain(rel.target_node) or _is_valid_ip(rel.target_node)}
|
||||||
|
|
||||||
|
if not members:
|
||||||
|
return set()
|
||||||
|
|
||||||
|
large_entity_id = f"le_{provider_name}_{source_node}"
|
||||||
|
|
||||||
|
# Add the large entity node to the graph
|
||||||
|
self.graph.add_node(
|
||||||
|
node_id=large_entity_id,
|
||||||
|
node_type=NodeType.LARGE_ENTITY,
|
||||||
|
attributes=[
|
||||||
|
{"name": "count", "value": len(members), "type": "statistic"},
|
||||||
|
{"name": "source_provider", "value": provider_name, "type": "metadata"},
|
||||||
|
{"name": "discovery_depth", "value": depth, "type": "metadata"},
|
||||||
|
{"name": "nodes", "value": list(members), "type": "metadata"}
|
||||||
|
],
|
||||||
|
description=f"A collection of {len(members)} nodes discovered from {source_node} via {provider_name}."
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create a single edge from the source to the large entity
|
||||||
|
self.graph.add_edge(
|
||||||
|
source_node, large_entity_id,
|
||||||
|
relationship_type=f"{provider_name}_collection",
|
||||||
|
confidence_score=0.95,
|
||||||
|
source_provider=provider_name,
|
||||||
|
raw_data={'description': 'Represents a large collection of nodes.'}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Tag each member node with the large entity ID
|
||||||
|
for member_id in members:
|
||||||
|
node_type = NodeType.IP if _is_valid_ip(member_id) else NodeType.DOMAIN
|
||||||
|
self.graph.add_node(
|
||||||
|
node_id=member_id,
|
||||||
|
node_type=node_type,
|
||||||
|
metadata={'large_entity_id': large_entity_id}
|
||||||
|
)
|
||||||
|
|
||||||
|
return members
|
||||||
|
|
||||||
|
def extract_node_from_large_entity(self, large_entity_id: str, node_id: str) -> bool:
|
||||||
|
"""
|
||||||
|
Removes a node from a large entity, allowing it to be processed normally.
|
||||||
|
"""
|
||||||
|
if not self.graph.graph.has_node(node_id):
|
||||||
|
return False
|
||||||
|
|
||||||
|
node_data = self.graph.graph.nodes[node_id]
|
||||||
|
metadata = node_data.get('metadata', {})
|
||||||
|
|
||||||
|
if metadata.get('large_entity_id') == large_entity_id:
|
||||||
|
# Remove the large entity tag
|
||||||
|
del metadata['large_entity_id']
|
||||||
|
self.graph.add_node(node_id, NodeType(node_data['type']), metadata=metadata)
|
||||||
|
|
||||||
|
# Re-enqueue the node for full processing
|
||||||
|
is_ip = _is_valid_ip(node_id)
|
||||||
|
eligible_providers = self._get_eligible_providers(node_id, is_ip, False)
|
||||||
|
for provider in eligible_providers:
|
||||||
|
provider_name = provider.get_name()
|
||||||
|
priority = self._get_priority(provider_name)
|
||||||
|
# Use current depth of the large entity if available, else 0
|
||||||
|
depth = 0
|
||||||
|
if self.graph.graph.has_node(large_entity_id):
|
||||||
|
le_attrs = self.graph.graph.nodes[large_entity_id].get('attributes', [])
|
||||||
|
depth_attr = next((a for a in le_attrs if a['name'] == 'discovery_depth'), None)
|
||||||
|
if depth_attr:
|
||||||
|
depth = depth_attr['value']
|
||||||
|
|
||||||
|
self.task_queue.put((time.time(), priority, (provider_name, node_id, depth)))
|
||||||
|
self.total_tasks_ever_enqueued += 1
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
def _process_provider_result_unified(self, target: str, provider: BaseProvider,
|
def _process_provider_result_unified(self, target: str, provider: BaseProvider,
|
||||||
provider_result: ProviderResult, current_depth: int) -> Tuple[Set[str], bool]:
|
provider_result: ProviderResult, current_depth: int) -> Tuple[Set[str], bool]:
|
||||||
"""
|
"""
|
||||||
@ -830,7 +912,7 @@ class Scanner:
|
|||||||
"""
|
"""
|
||||||
provider_name = provider.get_name()
|
provider_name = provider.get_name()
|
||||||
discovered_targets = set()
|
discovered_targets = set()
|
||||||
#large_entity_members = set()
|
large_entity_members = set()
|
||||||
|
|
||||||
if self._is_stop_requested():
|
if self._is_stop_requested():
|
||||||
return discovered_targets, False
|
return discovered_targets, False
|
||||||
@ -842,11 +924,11 @@ class Scanner:
|
|||||||
|
|
||||||
is_large_entity = eligible_relationship_count > self.config.large_entity_threshold
|
is_large_entity = eligible_relationship_count > self.config.large_entity_threshold
|
||||||
|
|
||||||
#if is_large_entity:
|
if is_large_entity:
|
||||||
# Create the large entity node and get the set of its members
|
# Create the large entity node and get the set of its members
|
||||||
#large_entity_members = self._create_large_entity_from_result(
|
large_entity_members = self._create_large_entity_from_result(
|
||||||
# target, provider_name, provider_result, current_depth
|
target, provider_name, provider_result, current_depth
|
||||||
#)
|
)
|
||||||
|
|
||||||
# Process ALL relationships to build the complete underlying data model
|
# Process ALL relationships to build the complete underlying data model
|
||||||
for i, relationship in enumerate(provider_result.relationships):
|
for i, relationship in enumerate(provider_result.relationships):
|
||||||
@ -885,6 +967,7 @@ class Scanner:
|
|||||||
|
|
||||||
# Add all discovered domains/IPs to be considered for further processing
|
# Add all discovered domains/IPs to be considered for further processing
|
||||||
if (_is_valid_domain(target_node) or _is_valid_ip(target_node)) and not max_depth_reached:
|
if (_is_valid_domain(target_node) or _is_valid_ip(target_node)) and not max_depth_reached:
|
||||||
|
if target_node not in large_entity_members:
|
||||||
discovered_targets.add(target_node)
|
discovered_targets.add(target_node)
|
||||||
|
|
||||||
# Process all attributes and add them to the corresponding nodes
|
# Process all attributes and add them to the corresponding nodes
|
||||||
@ -1004,8 +1087,19 @@ class Scanner:
|
|||||||
eligible = []
|
eligible = []
|
||||||
target_key = 'ips' if is_ip else 'domains'
|
target_key = 'ips' if is_ip else 'domains'
|
||||||
|
|
||||||
|
# Check if the target is part of a large entity
|
||||||
|
is_in_large_entity = False
|
||||||
|
if self.graph.graph.has_node(target):
|
||||||
|
metadata = self.graph.graph.nodes[target].get('metadata', {})
|
||||||
|
if 'large_entity_id' in metadata:
|
||||||
|
is_in_large_entity = True
|
||||||
|
|
||||||
for provider in self.providers:
|
for provider in self.providers:
|
||||||
try:
|
try:
|
||||||
|
# If in large entity, only allow dns and correlation providers
|
||||||
|
if is_in_large_entity and provider.get_name() not in ['dns', 'correlation']:
|
||||||
|
continue
|
||||||
|
|
||||||
# Check if provider supports this target type
|
# Check if provider supports this target type
|
||||||
if not provider.get_eligibility().get(target_key, False):
|
if not provider.get_eligibility().get(target_key, False):
|
||||||
continue
|
continue
|
||||||
|
|||||||
@ -1,3 +1,4 @@
|
|||||||
|
// dnsrecon-reduced/static/js/graph.js
|
||||||
/**
|
/**
|
||||||
* Graph visualization module for DNSRecon
|
* Graph visualization module for DNSRecon
|
||||||
* Handles network graph rendering using vis.js with proper large entity node hiding
|
* Handles network graph rendering using vis.js with proper large entity node hiding
|
||||||
@ -362,77 +363,60 @@ class GraphManager {
|
|||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Initialize if not already done
|
|
||||||
if (!this.isInitialized) {
|
if (!this.isInitialized) {
|
||||||
this.initialize();
|
this.initialize();
|
||||||
}
|
}
|
||||||
|
|
||||||
this.initialTargetIds = new Set(graphData.initial_targets || []);
|
this.initialTargetIds = new Set(graphData.initial_targets || []);
|
||||||
// Check if we have actual data to display
|
|
||||||
const hasData = graphData.nodes.length > 0 || graphData.edges.length > 0;
|
const hasData = graphData.nodes.length > 0 || graphData.edges.length > 0;
|
||||||
|
|
||||||
// Handle placeholder visibility
|
|
||||||
const placeholder = this.container.querySelector('.graph-placeholder');
|
const placeholder = this.container.querySelector('.graph-placeholder');
|
||||||
if (placeholder) {
|
if (placeholder) {
|
||||||
if (hasData) {
|
placeholder.style.display = hasData ? 'none' : 'flex';
|
||||||
placeholder.style.display = 'none';
|
}
|
||||||
} else {
|
if (!hasData) {
|
||||||
placeholder.style.display = 'flex';
|
this.nodes.clear();
|
||||||
// Early return if no data to process
|
this.edges.clear();
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
// Process nodes with proper certificate coloring
|
const nodeMap = new Map(graphData.nodes.map(node => [node.id, node]));
|
||||||
const processedNodes = filteredNodes.map(node => {
|
|
||||||
|
// Filter out hidden nodes before processing for rendering
|
||||||
|
const filteredNodes = graphData.nodes.filter(node =>
|
||||||
|
!(node.metadata && node.metadata.large_entity_id)
|
||||||
|
);
|
||||||
|
|
||||||
|
const processedNodes = graphData.nodes.map(node => {
|
||||||
const processed = this.processNode(node);
|
const processed = this.processNode(node);
|
||||||
|
if (node.metadata && node.metadata.large_entity_id) {
|
||||||
// Apply certificate-based coloring here in frontend
|
processed.hidden = true;
|
||||||
if (node.type === 'domain' && Array.isArray(node.attributes)) {
|
|
||||||
const certInfo = this.analyzeCertificateInfo(node.attributes);
|
|
||||||
|
|
||||||
if (certInfo.hasExpiredOnly) {
|
|
||||||
// Red for domains with only expired/invalid certificates
|
|
||||||
processed.color = { background: '#ff6b6b', border: '#cc5555' };
|
|
||||||
} else if (!certInfo.hasCertificates) {
|
|
||||||
// Grey for domains with no certificates
|
|
||||||
processed.color = { background: '#c7c7c7', border: '#999999' };
|
|
||||||
}
|
|
||||||
// Valid certificates use default green (handled by processNode)
|
|
||||||
}
|
|
||||||
|
|
||||||
return processed;
|
|
||||||
});
|
|
||||||
|
|
||||||
const mergedEdges = {};
|
|
||||||
graphData.edges.forEach(edge => {
|
|
||||||
const mergeKey = `${fromNode}-${toNode}-${edge.label}`;
|
|
||||||
|
|
||||||
if (!mergedEdges[mergeKey]) {
|
|
||||||
mergedEdges[mergeKey] = {
|
|
||||||
...edge,
|
|
||||||
from: fromNode,
|
|
||||||
to: toNode,
|
|
||||||
count: 0,
|
|
||||||
confidence_score: 0
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
mergedEdges[mergeKey].count++;
|
|
||||||
if (edge.confidence_score > mergedEdges[mergeKey].confidence_score) {
|
|
||||||
mergedEdges[mergeKey].confidence_score = edge.confidence_score;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
const processedEdges = Object.values(mergedEdges).map(edge => {
|
|
||||||
const processed = this.processEdge(edge);
|
|
||||||
if (edge.count > 1) {
|
|
||||||
processed.label = `${edge.label} (${edge.count})`;
|
|
||||||
}
|
}
|
||||||
return processed;
|
return processed;
|
||||||
});
|
});
|
||||||
|
|
||||||
// Update datasets with animation
|
const processedEdges = graphData.edges.map(edge => {
|
||||||
|
let fromNode = nodeMap.get(edge.from);
|
||||||
|
let toNode = nodeMap.get(edge.to);
|
||||||
|
let fromId = edge.from;
|
||||||
|
let toId = edge.to;
|
||||||
|
|
||||||
|
if (fromNode && fromNode.metadata && fromNode.metadata.large_entity_id) {
|
||||||
|
fromId = fromNode.metadata.large_entity_id;
|
||||||
|
}
|
||||||
|
if (toNode && toNode.metadata && toNode.metadata.large_entity_id) {
|
||||||
|
toId = toNode.metadata.large_entity_id;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Avoid self-referencing edges from re-routing
|
||||||
|
if (fromId === toId) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const reRoutedEdge = { ...edge, from: fromId, to: toId };
|
||||||
|
return this.processEdge(reRoutedEdge);
|
||||||
|
}).filter(Boolean); // Remove nulls from self-referencing edges
|
||||||
|
|
||||||
const existingNodeIds = this.nodes.getIds();
|
const existingNodeIds = this.nodes.getIds();
|
||||||
const existingEdgeIds = this.edges.getIds();
|
const existingEdgeIds = this.edges.getIds();
|
||||||
|
|
||||||
@ -449,12 +433,10 @@ class GraphManager {
|
|||||||
setTimeout(() => this.highlightNewElements(newNodes, newEdges), 100);
|
setTimeout(() => this.highlightNewElements(newNodes, newEdges), 100);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (processedNodes.length <= 10 || existingNodeIds.length === 0) {
|
if (this.nodes.length <= 10 || existingNodeIds.length === 0) {
|
||||||
setTimeout(() => this.fitView(), 800);
|
setTimeout(() => this.fitView(), 800);
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log(`Graph updated: ${processedNodes.length} nodes, ${processedEdges.length} edges (${newNodes.length} new nodes, ${newEdges.length} new edges)`);
|
|
||||||
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Failed to update graph:', error);
|
console.error('Failed to update graph:', error);
|
||||||
this.showError('Failed to update visualization');
|
this.showError('Failed to update visualization');
|
||||||
@ -582,7 +564,7 @@ class GraphManager {
|
|||||||
processEdge(edge) {
|
processEdge(edge) {
|
||||||
const confidence = edge.confidence_score || 0;
|
const confidence = edge.confidence_score || 0;
|
||||||
const processedEdge = {
|
const processedEdge = {
|
||||||
id: `${edge.from}-${edge.to}`,
|
id: `${edge.from}-${edge.to}-${edge.label}`,
|
||||||
from: edge.from,
|
from: edge.from,
|
||||||
to: edge.to,
|
to: edge.to,
|
||||||
label: this.formatEdgeLabel(edge.label, confidence),
|
label: this.formatEdgeLabel(edge.label, confidence),
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user