export manager modularized

This commit is contained in:
overcuriousity
2025-09-18 17:42:39 +02:00
parent 15227b392d
commit d4081e1a32
6 changed files with 406 additions and 243 deletions

View File

@@ -5,6 +5,7 @@ Graph data model for DNSRecon using NetworkX.
Manages in-memory graph storage with confidence scoring and forensic metadata.
Now fully compatible with the unified ProviderResult data model.
UPDATED: Fixed correlation exclusion keys to match actual attribute names.
UPDATED: Removed export_json() method - now handled by ExportManager.
"""
import re
from datetime import datetime, timezone
@@ -212,7 +213,7 @@ class GraphManager:
def _has_direct_edge_bidirectional(self, node_a: str, node_b: str) -> bool:
"""
Check if there's a direct edge between two nodes in either direction.
Returns True if node_aâ†'node_b OR node_bâ†'node_a exists.
Returns True if node_aâ†'node_b OR node_bâ†'node_a exists.
"""
return (self.graph.has_edge(node_a, node_b) or
self.graph.has_edge(node_b, node_a))
@@ -503,22 +504,6 @@ class GraphManager:
'statistics': self.get_statistics()['basic_metrics']
}
def export_json(self) -> Dict[str, Any]:
"""Export complete graph data as a JSON-serializable dictionary."""
graph_data = nx.node_link_data(self.graph, edges="edges")
return {
'export_metadata': {
'export_timestamp': datetime.now(timezone.utc).isoformat(),
'graph_creation_time': self.creation_time,
'last_modified': self.last_modified,
'total_nodes': self.get_node_count(),
'total_edges': self.get_edge_count(),
'graph_format': 'dnsrecon_v1_unified_model'
},
'graph': graph_data,
'statistics': self.get_statistics()
}
def _get_confidence_distribution(self) -> Dict[str, int]:
"""Get distribution of edge confidence scores with empty graph handling."""
distribution = {'high': 0, 'medium': 0, 'low': 0}

View File

@@ -17,6 +17,7 @@ from core.graph_manager import GraphManager, NodeType
from core.logger import get_forensic_logger, new_session
from core.provider_result import ProviderResult
from utils.helpers import _is_valid_ip, _is_valid_domain
from utils.export_manager import export_manager
from providers.base_provider import BaseProvider
from core.rate_limiter import GlobalRateLimiter
@@ -868,114 +869,6 @@ class Scanner:
graph_data['initial_targets'] = list(self.initial_targets)
return graph_data
def export_results(self) -> Dict[str, Any]:
graph_data = self.graph.export_json()
audit_trail = self.logger.export_audit_trail()
provider_stats = {}
for provider in self.providers:
provider_stats[provider.get_name()] = provider.get_statistics()
return {
'scan_metadata': {
'target_domain': self.current_target, 'max_depth': self.max_depth,
'final_status': self.status, 'total_indicators_processed': self.indicators_processed,
'enabled_providers': list(provider_stats.keys()), 'session_id': self.session_id
},
'graph_data': graph_data,
'forensic_audit': audit_trail,
'provider_statistics': provider_stats,
'scan_summary': self.logger.get_forensic_summary()
}
def export_targets_txt(self) -> str:
"""Export all discovered domains and IPs as a text file."""
nodes = self.graph.get_graph_data().get('nodes', [])
targets = {node['id'] for node in nodes if _is_valid_domain(node['id']) or _is_valid_ip(node['id'])}
return "\n".join(sorted(list(targets)))
def generate_executive_summary(self) -> str:
"""Generate a natural-language executive summary of the scan results."""
summary = []
now = datetime.now(timezone.utc).strftime('%Y-%m-%d %H:%M:%S %Z')
scan_metadata = self.get_scan_status()
graph_data = self.graph.get_graph_data()
nodes = graph_data.get('nodes', [])
edges = graph_data.get('edges', [])
summary.append(f"DNSRecon Executive Summary")
summary.append(f"Report Generated: {now}")
summary.append("="*40)
# Scan Overview
summary.append("\n## Scan Overview")
summary.append(f"- Initial Target: {self.current_target}")
summary.append(f"- Scan Status: {self.status.capitalize()}")
summary.append(f"- Analysis Depth: {self.max_depth}")
summary.append(f"- Total Indicators Found: {len(nodes)}")
summary.append(f"- Total Relationships Discovered: {len(edges)}")
# Key Findings
summary.append("\n## Key Findings")
domains = [n for n in nodes if n['type'] == 'domain']
ips = [n for n in nodes if n['type'] == 'ip']
isps = [n for n in nodes if n['type'] == 'isp']
cas = [n for n in nodes if n['type'] == 'ca']
summary.append(f"- Discovered {len(domains)} unique domain(s).")
summary.append(f"- Identified {len(ips)} unique IP address(es).")
if isps:
summary.append(f"- Infrastructure is hosted across {len(isps)} unique ISP(s).")
if cas:
summary.append(f"- Found certificates issued by {len(cas)} unique Certificate Authorit(y/ies).")
# Detailed Findings
summary.append("\n## Detailed Findings")
# Domain Analysis
if domains:
summary.append("\n### Domain Analysis")
for domain in domains[:5]: # report on first 5
summary.append(f"\n- Domain: {domain['id']}")
# Associated IPs
associated_ips = [edge['to'] for edge in edges if edge['from'] == domain['id'] and _is_valid_ip(edge['to'])]
if associated_ips:
summary.append(f" - Associated IPs: {', '.join(associated_ips)}")
# Certificate info
cert_attributes = [attr for attr in domain.get('attributes', []) if attr.get('name', '').startswith('cert_')]
if cert_attributes:
issuer = next((attr['value'] for attr in cert_attributes if attr['name'] == 'cert_issuer_name'), 'N/A')
valid_until = next((attr['value'] for attr in cert_attributes if attr['name'] == 'cert_not_after'), 'N/A')
summary.append(f" - Certificate Issuer: {issuer}")
summary.append(f" - Certificate Valid Until: {valid_until}")
# IP Address Analysis
if ips:
summary.append("\n### IP Address Analysis")
for ip in ips[:5]: # report on first 5
summary.append(f"\n- IP Address: {ip['id']}")
# Hostnames
hostnames = [edge['to'] for edge in edges if edge['from'] == ip['id'] and _is_valid_domain(edge['to'])]
if hostnames:
summary.append(f" - Associated Hostnames: {', '.join(hostnames)}")
# ISP
isp_edge = next((edge for edge in edges if edge['from'] == ip['id'] and self.graph.graph.nodes[edge['to']]['type'] == 'isp'), None)
if isp_edge:
summary.append(f" - ISP: {isp_edge['to']}")
# Data Sources
summary.append("\n## Data Sources")
provider_stats = self.logger.get_forensic_summary().get('provider_statistics', {})
for provider, stats in provider_stats.items():
summary.append(f"- {provider.capitalize()}: {stats.get('relationships_discovered', 0)} relationships from {stats.get('successful_requests', 0)} requests.")
summary.append("\n" + "="*40)
summary.append("End of Report")
return "\n".join(summary)
def get_provider_info(self) -> Dict[str, Dict[str, Any]]:
info = {}