This commit is contained in:
overcuriousity 2025-09-09 14:54:02 +02:00
parent 8263f5cfa9
commit 0c9cf00a3b
12 changed files with 1475 additions and 252 deletions

View File

@ -4,11 +4,15 @@
import requests import requests
import json import json
import time import time
import logging
from datetime import datetime from datetime import datetime
from typing import List, Optional, Set from typing import List, Optional, Set
from .data_structures import Certificate from .data_structures import Certificate
from .config import Config from .config import Config
# Module logger
logger = logging.getLogger(__name__)
class CertificateChecker: class CertificateChecker:
"""Check certificates using crt.sh.""" """Check certificates using crt.sh."""
@ -17,6 +21,9 @@ class CertificateChecker:
def __init__(self, config: Config): def __init__(self, config: Config):
self.config = config self.config = config
self.last_request = 0 self.last_request = 0
self.query_count = 0
logger.info("🔐 Certificate checker initialized")
def _rate_limit(self): def _rate_limit(self):
"""Apply rate limiting for crt.sh.""" """Apply rate limiting for crt.sh."""
@ -25,29 +32,45 @@ class CertificateChecker:
min_interval = 1.0 / self.config.CRT_SH_RATE_LIMIT min_interval = 1.0 / self.config.CRT_SH_RATE_LIMIT
if time_since_last < min_interval: if time_since_last < min_interval:
time.sleep(min_interval - time_since_last) sleep_time = min_interval - time_since_last
logger.debug(f"⏸️ crt.sh rate limiting: sleeping for {sleep_time:.2f}s")
time.sleep(sleep_time)
self.last_request = time.time() self.last_request = time.time()
self.query_count += 1
def get_certificates(self, domain: str) -> List[Certificate]: def get_certificates(self, domain: str) -> List[Certificate]:
"""Get certificates for a domain from crt.sh.""" """Get certificates for a domain from crt.sh."""
logger.debug(f"🔐 Getting certificates for domain: {domain}")
certificates = [] certificates = []
# Query for the domain # Query for the domain
certificates.extend(self._query_crt_sh(domain)) domain_certs = self._query_crt_sh(domain)
certificates.extend(domain_certs)
# Also query for wildcard certificates # Also query for wildcard certificates
certificates.extend(self._query_crt_sh(f"%.{domain}")) wildcard_certs = self._query_crt_sh(f"%.{domain}")
certificates.extend(wildcard_certs)
# Remove duplicates based on certificate ID # Remove duplicates based on certificate ID
unique_certs = {cert.id: cert for cert in certificates} unique_certs = {cert.id: cert for cert in certificates}
return list(unique_certs.values()) final_certs = list(unique_certs.values())
if final_certs:
logger.info(f"📜 Found {len(final_certs)} unique certificates for {domain}")
else:
logger.debug(f"❌ No certificates found for {domain}")
return final_certs
def _query_crt_sh(self, query: str) -> List[Certificate]: def _query_crt_sh(self, query: str) -> List[Certificate]:
"""Query crt.sh API with retry logic.""" """Query crt.sh API with retry logic."""
certificates = [] certificates = []
self._rate_limit() self._rate_limit()
logger.debug(f"📡 Querying crt.sh for: {query}")
max_retries = 3 max_retries = 3
for attempt in range(max_retries): for attempt in range(max_retries):
try: try:
@ -59,52 +82,141 @@ class CertificateChecker:
response = requests.get( response = requests.get(
self.CRT_SH_URL, self.CRT_SH_URL,
params=params, params=params,
timeout=self.config.HTTP_TIMEOUT timeout=self.config.HTTP_TIMEOUT,
headers={'User-Agent': 'DNS-Recon-Tool/1.0'}
) )
logger.debug(f"📡 crt.sh API response for {query}: {response.status_code}")
if response.status_code == 200: if response.status_code == 200:
data = response.json() try:
for cert_data in data: data = response.json()
try: logger.debug(f"📊 crt.sh returned {len(data)} certificate entries for {query}")
certificate = Certificate(
id=cert_data.get('id'), for cert_data in data:
issuer=cert_data.get('issuer_name', ''), try:
subject=cert_data.get('name_value', ''), # Parse dates with better error handling
not_before=datetime.fromisoformat( not_before = self._parse_date(cert_data.get('not_before'))
cert_data.get('not_before', '').replace('Z', '+00:00') not_after = self._parse_date(cert_data.get('not_after'))
),
not_after=datetime.fromisoformat( if not_before and not_after:
cert_data.get('not_after', '').replace('Z', '+00:00') certificate = Certificate(
), id=cert_data.get('id'),
is_wildcard='*.' in cert_data.get('name_value', '') issuer=cert_data.get('issuer_name', ''),
) subject=cert_data.get('name_value', ''),
certificates.append(certificate) not_before=not_before,
except (ValueError, TypeError): not_after=not_after,
continue # Skip malformed certificate data is_wildcard='*.' in cert_data.get('name_value', '')
return certificates # Success, exit retry loop )
certificates.append(certificate)
logger.debug(f"✅ Parsed certificate ID {certificate.id} for {query}")
else:
logger.debug(f"⚠️ Skipped certificate with invalid dates: {cert_data.get('id')}")
except (ValueError, TypeError, KeyError) as e:
logger.debug(f"⚠️ Error parsing certificate data: {e}")
continue # Skip malformed certificate data
logger.info(f"✅ Successfully processed {len(certificates)} certificates from crt.sh for {query}")
return certificates # Success, exit retry loop
except json.JSONDecodeError as e:
logger.warning(f"❌ Invalid JSON response from crt.sh for {query}: {e}")
if attempt < max_retries - 1:
time.sleep(2 ** attempt) # Exponential backoff
continue
return certificates
elif response.status_code == 429:
logger.warning(f"⚠️ crt.sh rate limit exceeded for {query}")
if attempt < max_retries - 1:
time.sleep(5) # Wait longer for rate limits
continue
return certificates
else:
logger.warning(f"⚠️ crt.sh HTTP error for {query}: {response.status_code}")
if attempt < max_retries - 1:
time.sleep(2)
continue
return certificates
except requests.exceptions.RequestException as e: except requests.exceptions.Timeout:
print(f"Error querying crt.sh for {query} (attempt {attempt+1}/{max_retries}): {e}") logger.warning(f"⏱️ crt.sh query timeout for {query} (attempt {attempt+1}/{max_retries})")
if attempt < max_retries - 1: if attempt < max_retries - 1:
time.sleep(2) # Wait 2 seconds before retrying time.sleep(2)
continue
except requests.exceptions.RequestException as e:
logger.warning(f"🌐 crt.sh network error for {query} (attempt {attempt+1}/{max_retries}): {e}")
if attempt < max_retries - 1:
time.sleep(2)
continue
except Exception as e:
logger.error(f"❌ Unexpected error querying crt.sh for {query}: {e}")
if attempt < max_retries - 1:
time.sleep(2)
continue
# If we get here, all retries failed
logger.warning(f"❌ All {max_retries} attempts failed for crt.sh query: {query}")
return certificates
def _parse_date(self, date_str: str) -> Optional[datetime]:
"""Parse date string with multiple format support."""
if not date_str:
return None
# Common date formats from crt.sh
date_formats = [
'%Y-%m-%dT%H:%M:%S', # ISO format without timezone
'%Y-%m-%dT%H:%M:%SZ', # ISO format with Z
'%Y-%m-%d %H:%M:%S', # Space separated
'%Y-%m-%dT%H:%M:%S.%f', # With microseconds
'%Y-%m-%dT%H:%M:%S.%fZ', # With microseconds and Z
]
for fmt in date_formats:
try:
return datetime.strptime(date_str, fmt)
except ValueError:
continue continue
return certificates # Return what we have after all retries # Try with timezone info
try:
return datetime.fromisoformat(date_str.replace('Z', '+00:00'))
except ValueError:
pass
logger.debug(f"⚠️ Could not parse date: {date_str}")
return None
def extract_subdomains_from_certificates(self, certificates: List[Certificate]) -> Set[str]: def extract_subdomains_from_certificates(self, certificates: List[Certificate]) -> Set[str]:
"""Extract subdomains from certificate subjects.""" """Extract subdomains from certificate subjects."""
subdomains = set() subdomains = set()
logger.debug(f"🌿 Extracting subdomains from {len(certificates)} certificates")
for cert in certificates: for cert in certificates:
# Parse subject field for domain names # Parse subject field for domain names
subjects = cert.subject.split('\n') # Certificate subjects can be multi-line with multiple domains
for subject in subjects: subject_lines = cert.subject.split('\n')
subject = subject.strip()
for line in subject_lines:
line = line.strip()
# Skip wildcard domains for recursion # Skip wildcard domains for recursion (they don't resolve directly)
if not subject.startswith('*.'): if line.startswith('*.'):
if self._is_valid_domain(subject): logger.debug(f"🌿 Skipping wildcard domain: {line}")
subdomains.add(subject.lower()) continue
if self._is_valid_domain(line):
subdomains.add(line.lower())
logger.debug(f"🌿 Found subdomain from certificate: {line}")
if subdomains:
logger.info(f"🌿 Extracted {len(subdomains)} subdomains from certificates")
else:
logger.debug("❌ No subdomains extracted from certificates")
return subdomains return subdomains
@ -114,9 +226,32 @@ class CertificateChecker:
return False return False
# Remove common prefixes # Remove common prefixes
domain = domain.lower() domain = domain.lower().strip()
if domain.startswith('www.'): if domain.startswith('www.'):
domain = domain[4:] domain = domain[4:]
# Basic validation # Basic validation
return len(domain) > 0 and len(domain) < 255 if len(domain) < 3 or len(domain) > 255:
return False
# Must not be an IP address
try:
import socket
socket.inet_aton(domain)
return False # It's an IPv4 address
except socket.error:
pass
# Check for reasonable domain structure
parts = domain.split('.')
if len(parts) < 2:
return False
# Each part should be reasonable
for part in parts:
if len(part) < 1 or len(part) > 63:
return False
if not part.replace('-', '').replace('_', '').isalnum():
return False
return True

View File

@ -2,6 +2,7 @@
"""Configuration settings for the reconnaissance tool.""" """Configuration settings for the reconnaissance tool."""
import os import os
import logging
from dataclasses import dataclass from dataclasses import dataclass
from typing import List, Optional from typing import List, Optional
@ -17,10 +18,11 @@ class Config:
virustotal_key: Optional[str] = None virustotal_key: Optional[str] = None
# Rate limiting (requests per second) # Rate limiting (requests per second)
DNS_RATE_LIMIT: float = 10.0 # DNS servers are generally quite robust, increased from 10 to 50/s
DNS_RATE_LIMIT: float = 50.0
CRT_SH_RATE_LIMIT: float = 2.0 CRT_SH_RATE_LIMIT: float = 2.0
SHODAN_RATE_LIMIT: float = 0.5 SHODAN_RATE_LIMIT: float = 0.5 # Shodan is more restrictive
VIRUSTOTAL_RATE_LIMIT: float = 0.25 VIRUSTOTAL_RATE_LIMIT: float = 0.25 # VirusTotal is very restrictive
# Recursive depth # Recursive depth
max_depth: int = 2 max_depth: int = 2
@ -29,17 +31,50 @@ class Config:
DNS_TIMEOUT: int = 5 DNS_TIMEOUT: int = 5
HTTP_TIMEOUT: int = 20 HTTP_TIMEOUT: int = 20
# Logging level
log_level: str = "INFO"
def __post_init__(self): def __post_init__(self):
if self.DNS_SERVERS is None: if self.DNS_SERVERS is None:
self.DNS_SERVERS = ['1.1.1.1', '8.8.8.8', '9.9.9.9'] # Use multiple reliable DNS servers
self.DNS_SERVERS = [
'1.1.1.1', # Cloudflare
'8.8.8.8', # Google
'9.9.9.9' # Quad9
]
@classmethod @classmethod
def from_args(cls, shodan_key: Optional[str] = None, def from_args(cls, shodan_key: Optional[str] = None,
virustotal_key: Optional[str] = None, virustotal_key: Optional[str] = None,
max_depth: int = 2) -> 'Config': max_depth: int = 2,
log_level: str = "INFO") -> 'Config':
"""Create config from command line arguments.""" """Create config from command line arguments."""
return cls( return cls(
shodan_key=shodan_key, shodan_key=shodan_key,
virustotal_key=virustotal_key, virustotal_key=virustotal_key,
max_depth=max_depth max_depth=max_depth,
log_level=log_level.upper()
) )
def setup_logging(self, cli_mode: bool = True):
"""Set up logging configuration."""
log_format = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
if cli_mode:
# For CLI, use a more readable format
log_format = '%(asctime)s [%(levelname)s] %(message)s'
logging.basicConfig(
level=getattr(logging, self.log_level, logging.INFO),
format=log_format,
datefmt='%H:%M:%S'
)
# Set specific loggers
logging.getLogger('urllib3').setLevel(logging.WARNING) # Reduce HTTP noise
logging.getLogger('requests').setLevel(logging.WARNING) # Reduce HTTP noise
if self.log_level == "DEBUG":
logging.getLogger(__name__.split('.')[0]).setLevel(logging.DEBUG)
return logging.getLogger(__name__)

View File

@ -5,6 +5,10 @@ from dataclasses import dataclass, field
from typing import Dict, List, Set, Optional, Any from typing import Dict, List, Set, Optional, Any
from datetime import datetime from datetime import datetime
import json import json
import logging
# Set up logging for this module
logger = logging.getLogger(__name__)
@dataclass @dataclass
class DNSRecord: class DNSRecord:
@ -12,6 +16,13 @@ class DNSRecord:
record_type: str record_type: str
value: str value: str
ttl: Optional[int] = None ttl: Optional[int] = None
def to_dict(self) -> dict:
return {
'record_type': self.record_type,
'value': self.value,
'ttl': self.ttl
}
@dataclass @dataclass
class Certificate: class Certificate:
@ -22,6 +33,16 @@ class Certificate:
not_before: datetime not_before: datetime
not_after: datetime not_after: datetime
is_wildcard: bool = False is_wildcard: bool = False
def to_dict(self) -> dict:
return {
'id': self.id,
'issuer': self.issuer,
'subject': self.subject,
'not_before': self.not_before.isoformat() if self.not_before else None,
'not_after': self.not_after.isoformat() if self.not_after else None,
'is_wildcard': self.is_wildcard
}
@dataclass @dataclass
class ShodanResult: class ShodanResult:
@ -31,6 +52,15 @@ class ShodanResult:
services: Dict[str, Any] services: Dict[str, Any]
organization: Optional[str] = None organization: Optional[str] = None
country: Optional[str] = None country: Optional[str] = None
def to_dict(self) -> dict:
return {
'ip': self.ip,
'ports': self.ports,
'services': self.services,
'organization': self.organization,
'country': self.country
}
@dataclass @dataclass
class VirusTotalResult: class VirusTotalResult:
@ -40,6 +70,15 @@ class VirusTotalResult:
total: int total: int
scan_date: datetime scan_date: datetime
permalink: str permalink: str
def to_dict(self) -> dict:
return {
'resource': self.resource,
'positives': self.positives,
'total': self.total,
'scan_date': self.scan_date.isoformat() if self.scan_date else None,
'permalink': self.permalink
}
@dataclass @dataclass
class ReconData: class ReconData:
@ -67,12 +106,15 @@ class ReconData:
def add_hostname(self, hostname: str, depth: int = 0) -> None: def add_hostname(self, hostname: str, depth: int = 0) -> None:
"""Add a hostname to the dataset.""" """Add a hostname to the dataset."""
self.hostnames.add(hostname.lower()) hostname = hostname.lower()
self.depth_map[hostname.lower()] = depth self.hostnames.add(hostname)
self.depth_map[hostname] = depth
logger.info(f"Added hostname: {hostname} (depth: {depth})")
def add_ip_address(self, ip: str) -> None: def add_ip_address(self, ip: str) -> None:
"""Add an IP address to the dataset.""" """Add an IP address to the dataset."""
self.ip_addresses.add(ip) self.ip_addresses.add(ip)
logger.info(f"Added IP address: {ip}")
def add_dns_record(self, hostname: str, record: DNSRecord) -> None: def add_dns_record(self, hostname: str, record: DNSRecord) -> None:
"""Add a DNS record for a hostname.""" """Add a DNS record for a hostname."""
@ -80,6 +122,17 @@ class ReconData:
if hostname not in self.dns_records: if hostname not in self.dns_records:
self.dns_records[hostname] = [] self.dns_records[hostname] = []
self.dns_records[hostname].append(record) self.dns_records[hostname].append(record)
logger.debug(f"Added DNS record for {hostname}: {record.record_type} -> {record.value}")
def add_shodan_result(self, ip: str, result: ShodanResult) -> None:
"""Add Shodan result."""
self.shodan_results[ip] = result
logger.info(f"Added Shodan result for {ip}: {len(result.ports)} ports, org: {result.organization}")
def add_virustotal_result(self, resource: str, result: VirusTotalResult) -> None:
"""Add VirusTotal result."""
self.virustotal_results[resource] = result
logger.info(f"Added VirusTotal result for {resource}: {result.positives}/{result.total} detections")
def get_new_subdomains(self, max_depth: int) -> Set[str]: def get_new_subdomains(self, max_depth: int) -> Set[str]:
"""Get subdomains that haven't been processed yet and are within depth limit.""" """Get subdomains that haven't been processed yet and are within depth limit."""
@ -90,53 +143,62 @@ class ReconData:
new_domains.add(hostname) new_domains.add(hostname)
return new_domains return new_domains
def get_stats(self) -> Dict[str, int]:
"""Get current statistics."""
return {
'hostnames': len(self.hostnames),
'ip_addresses': len(self.ip_addresses),
'dns_records': sum(len(records) for records in self.dns_records.values()),
'certificates': sum(len(certs) for certs in self.certificates.values()),
'shodan_results': len(self.shodan_results),
'virustotal_results': len(self.virustotal_results)
}
def to_dict(self) -> dict: def to_dict(self) -> dict:
"""Export data as a serializable dictionary.""" """Export data as a serializable dictionary."""
return { logger.debug(f"Serializing ReconData with stats: {self.get_stats()}")
'hostnames': list(self.hostnames),
'ip_addresses': list(self.ip_addresses), result = {
'hostnames': sorted(list(self.hostnames)),
'ip_addresses': sorted(list(self.ip_addresses)),
'dns_records': { 'dns_records': {
host: [{'type': r.record_type, 'value': r.value, 'ttl': r.ttl} host: [record.to_dict() for record in records]
for r in records]
for host, records in self.dns_records.items() for host, records in self.dns_records.items()
}, },
'reverse_dns': self.reverse_dns, 'reverse_dns': dict(self.reverse_dns),
'certificates': { 'certificates': {
host: [{ host: [cert.to_dict() for cert in certs]
'id': cert.id,
'issuer': cert.issuer,
'subject': cert.subject,
'not_before': cert.not_before.isoformat(),
'not_after': cert.not_after.isoformat(),
'is_wildcard': cert.is_wildcard
} for cert in certs]
for host, certs in self.certificates.items() for host, certs in self.certificates.items()
}, },
'shodan_results': { 'shodan_results': {
ip: { ip: result.to_dict() for ip, result in self.shodan_results.items()
'ports': result.ports,
'services': result.services,
'organization': result.organization,
'country': result.country
} for ip, result in self.shodan_results.items()
}, },
'virustotal_results': { 'virustotal_results': {
resource: { resource: result.to_dict() for resource, result in self.virustotal_results.items()
'positives': result.positives,
'total': result.total,
'scan_date': result.scan_date.isoformat(),
'permalink': result.permalink
} for resource, result in self.virustotal_results.items()
}, },
'depth_map': dict(self.depth_map),
'metadata': { 'metadata': {
'start_time': self.start_time.isoformat(), 'start_time': self.start_time.isoformat() if self.start_time else None,
'end_time': self.end_time.isoformat() if self.end_time else None, 'end_time': self.end_time.isoformat() if self.end_time else None,
'total_hostnames': len(self.hostnames), 'stats': self.get_stats()
'total_ips': len(self.ip_addresses)
} }
} }
logger.info(f"Serialized data contains: {len(result['hostnames'])} hostnames, "
f"{len(result['ip_addresses'])} IPs, {len(result['shodan_results'])} Shodan results, "
f"{len(result['virustotal_results'])} VirusTotal results")
return result
def to_json(self) -> str: def to_json(self) -> str:
"""Export data as JSON.""" """Export data as JSON."""
# Now uses the to_dict method try:
return json.dumps(self.to_dict(), indent=2, default=str) return json.dumps(self.to_dict(), indent=2, ensure_ascii=False)
except Exception as e:
logger.error(f"Failed to serialize to JSON: {e}")
# Return minimal JSON in case of error
return json.dumps({
'error': str(e),
'stats': self.get_stats(),
'timestamp': datetime.now().isoformat()
}, indent=2)

View File

@ -8,9 +8,13 @@ import dns.zone
from typing import List, Dict, Optional, Set from typing import List, Dict, Optional, Set
import socket import socket
import time import time
import logging
from .data_structures import DNSRecord, ReconData from .data_structures import DNSRecord, ReconData
from .config import Config from .config import Config
# Module logger
logger = logging.getLogger(__name__)
class DNSResolver: class DNSResolver:
"""DNS resolution and record lookup.""" """DNS resolution and record lookup."""
@ -23,22 +27,33 @@ class DNSResolver:
def __init__(self, config: Config): def __init__(self, config: Config):
self.config = config self.config = config
self.last_request = 0 self.last_request = 0
self.query_count = 0
logger.info(f"🌐 DNS resolver initialized with {len(config.DNS_SERVERS)} servers: {config.DNS_SERVERS}")
logger.info(f"⚡ DNS rate limit: {config.DNS_RATE_LIMIT}/s, timeout: {config.DNS_TIMEOUT}s")
def _rate_limit(self): def _rate_limit(self):
"""Apply rate limiting.""" """Apply rate limiting - more graceful for DNS servers."""
now = time.time() now = time.time()
time_since_last = now - self.last_request time_since_last = now - self.last_request
min_interval = 1.0 / self.config.DNS_RATE_LIMIT min_interval = 1.0 / self.config.DNS_RATE_LIMIT
if time_since_last < min_interval: if time_since_last < min_interval:
time.sleep(min_interval - time_since_last) sleep_time = min_interval - time_since_last
# Only log if sleep is significant to reduce spam
if sleep_time > 0.1:
logger.debug(f"⏸️ DNS rate limiting: sleeping for {sleep_time:.2f}s")
time.sleep(sleep_time)
self.last_request = time.time() self.last_request = time.time()
self.query_count += 1
def resolve_hostname(self, hostname: str) -> List[str]: def resolve_hostname(self, hostname: str) -> List[str]:
"""Resolve hostname to IP addresses.""" """Resolve hostname to IP addresses."""
ips = [] ips = []
logger.debug(f"🔍 Resolving hostname: {hostname}")
for dns_server in self.config.DNS_SERVERS: for dns_server in self.config.DNS_SERVERS:
self._rate_limit() self._rate_limit()
resolver = dns.resolver.Resolver() resolver = dns.resolver.Resolver()
@ -50,24 +65,45 @@ class DNSResolver:
answers = resolver.resolve(hostname, 'A') answers = resolver.resolve(hostname, 'A')
for answer in answers: for answer in answers:
ips.append(str(answer)) ips.append(str(answer))
except Exception: logger.debug(f"✅ A record for {hostname}: {answer}")
pass except dns.resolver.NXDOMAIN:
logger.debug(f"❌ NXDOMAIN for {hostname} A record on {dns_server}")
except dns.resolver.NoAnswer:
logger.debug(f"⚠️ No A record for {hostname} on {dns_server}")
except Exception as e:
logger.debug(f"⚠️ Error resolving A record for {hostname} on {dns_server}: {e}")
try: try:
# Try AAAA records # Try AAAA records (IPv6)
answers = resolver.resolve(hostname, 'AAAA') answers = resolver.resolve(hostname, 'AAAA')
for answer in answers: for answer in answers:
ips.append(str(answer)) ips.append(str(answer))
except Exception: logger.debug(f"✅ AAAA record for {hostname}: {answer}")
pass except dns.resolver.NXDOMAIN:
logger.debug(f"❌ NXDOMAIN for {hostname} AAAA record on {dns_server}")
except dns.resolver.NoAnswer:
logger.debug(f"⚠️ No AAAA record for {hostname} on {dns_server}")
except Exception as e:
logger.debug(f"⚠️ Error resolving AAAA record for {hostname} on {dns_server}: {e}")
return list(set(ips)) # Remove duplicates unique_ips = list(set(ips))
if unique_ips:
logger.info(f"✅ Resolved {hostname} to {len(unique_ips)} unique IPs: {unique_ips}")
else:
logger.debug(f"❌ No IPs found for {hostname}")
return unique_ips
def get_all_dns_records(self, hostname: str) -> List[DNSRecord]: def get_all_dns_records(self, hostname: str) -> List[DNSRecord]:
"""Get all DNS records for a hostname.""" """Get all DNS records for a hostname."""
records = [] records = []
successful_queries = 0
logger.debug(f"📋 Getting all DNS records for: {hostname}")
for record_type in self.RECORD_TYPES: for record_type in self.RECORD_TYPES:
type_found = False
for dns_server in self.config.DNS_SERVERS: for dns_server in self.config.DNS_SERVERS:
self._rate_limit() self._rate_limit()
resolver = dns.resolver.Resolver() resolver = dns.resolver.Resolver()
@ -82,50 +118,114 @@ class DNSResolver:
value=str(answer), value=str(answer),
ttl=answers.ttl ttl=answers.ttl
)) ))
except Exception: if not type_found:
continue logger.debug(f"✅ Found {record_type} record for {hostname}: {answer}")
type_found = True
if not type_found:
successful_queries += 1
break # Found records, no need to query other DNS servers for this type
except dns.resolver.NXDOMAIN:
logger.debug(f"❌ NXDOMAIN for {hostname} {record_type} on {dns_server}")
break # Domain doesn't exist, no point checking other servers
except dns.resolver.NoAnswer:
logger.debug(f"⚠️ No {record_type} record for {hostname} on {dns_server}")
continue # Try next DNS server
except dns.resolver.Timeout:
logger.debug(f"⏱️ Timeout for {hostname} {record_type} on {dns_server}")
continue # Try next DNS server
except Exception as e:
logger.debug(f"⚠️ Error querying {record_type} for {hostname} on {dns_server}: {e}")
continue # Try next DNS server
logger.info(f"📋 Found {len(records)} DNS records for {hostname} across {len(set(r.record_type for r in records))} record types")
# Log query statistics every 100 queries
if self.query_count % 100 == 0:
logger.info(f"📊 DNS query statistics: {self.query_count} total queries performed")
return records return records
def reverse_dns_lookup(self, ip: str) -> Optional[str]: def reverse_dns_lookup(self, ip: str) -> Optional[str]:
"""Perform reverse DNS lookup.""" """Perform reverse DNS lookup."""
logger.debug(f"🔍 Reverse DNS lookup for: {ip}")
try: try:
self._rate_limit() self._rate_limit()
return socket.gethostbyaddr(ip)[0] hostname = socket.gethostbyaddr(ip)[0]
except Exception: logger.info(f"✅ Reverse DNS for {ip}: {hostname}")
return hostname
except socket.herror:
logger.debug(f"❌ No reverse DNS for {ip}")
return None
except Exception as e:
logger.debug(f"⚠️ Error in reverse DNS for {ip}: {e}")
return None return None
def extract_subdomains_from_dns(self, records: List[DNSRecord]) -> Set[str]: def extract_subdomains_from_dns(self, records: List[DNSRecord]) -> Set[str]:
"""Extract potential subdomains from DNS records.""" """Extract potential subdomains from DNS records."""
subdomains = set() subdomains = set()
logger.debug(f"🌿 Extracting subdomains from {len(records)} DNS records")
for record in records: for record in records:
value = record.value.lower() value = record.value.lower()
# Extract from CNAME, NS, and correctly from MX records # Extract from different record types
if record.record_type == 'MX': try:
# MX record values are like: "10 mail.example.com." if record.record_type == 'MX':
# We need to extract the hostname part. # MX record format: "priority hostname"
parts = value.split() parts = value.split()
if len(parts) == 2: if len(parts) >= 2:
hostname = parts[1].rstrip('.') hostname = parts[-1].rstrip('.') # Take the last part (hostname)
if self._is_valid_hostname(hostname):
subdomains.add(hostname)
logger.debug(f"🌿 Found subdomain from MX: {hostname}")
elif record.record_type in ['CNAME', 'NS']:
# Direct hostname records
hostname = value.rstrip('.')
if self._is_valid_hostname(hostname): if self._is_valid_hostname(hostname):
subdomains.add(hostname) subdomains.add(hostname)
elif record.record_type in ['CNAME', 'NS']: logger.debug(f"🌿 Found subdomain from {record.record_type}: {hostname}")
# These records are just the hostname
hostname = value.rstrip('.') elif record.record_type == 'TXT':
if self._is_valid_hostname(hostname): # Search for domain-like strings in TXT records
subdomains.add(hostname) # Common patterns: include:example.com, v=spf1 include:_spf.google.com
words = value.replace(',', ' ').replace(';', ' ').split()
# Extract from TXT records (sometimes contain domain references) for word in words:
elif record.record_type == 'TXT': # Look for include: patterns
# Look for domain-like strings in TXT records if word.startswith('include:'):
parts = value.split() hostname = word[8:].rstrip('.')
for part in parts: if self._is_valid_hostname(hostname):
if '.' in part and not part.startswith('http'): subdomains.add(hostname)
clean_part = part.strip('",\'()[]{}') logger.debug(f"🌿 Found subdomain from TXT include: {hostname}")
if self._is_valid_hostname(clean_part):
subdomains.add(clean_part) # Look for other domain patterns
elif '.' in word and not word.startswith('http'):
clean_word = word.strip('",\'()[]{}').rstrip('.')
if self._is_valid_hostname(clean_word):
subdomains.add(clean_word)
logger.debug(f"🌿 Found subdomain from TXT: {clean_word}")
elif record.record_type == 'SRV':
# SRV record format: "priority weight port target"
parts = value.split()
if len(parts) >= 4:
hostname = parts[-1].rstrip('.') # Target hostname
if self._is_valid_hostname(hostname):
subdomains.add(hostname)
logger.debug(f"🌿 Found subdomain from SRV: {hostname}")
except Exception as e:
logger.debug(f"⚠️ Error extracting subdomain from {record.record_type} record '{value}': {e}")
continue
if subdomains:
logger.info(f"🌿 Extracted {len(subdomains)} potential subdomains")
else:
logger.debug("❌ No subdomains extracted from DNS records")
return subdomains return subdomains
@ -138,6 +238,43 @@ class DNSResolver:
if '.' not in hostname: if '.' not in hostname:
return False return False
# Basic character check # Must not be an IP address
allowed_chars = set('abcdefghijklmnopqrstuvwxyz0123456789.-') if self._looks_like_ip(hostname):
return all(c in allowed_chars for c in hostname.lower()) return False
# Basic character check - allow international domains
# Remove overly restrictive character filtering
if not hostname.replace('-', '').replace('.', '').replace('_', '').isalnum():
# Allow some special cases for internationalized domains
try:
hostname.encode('ascii')
except UnicodeEncodeError:
return False # Skip non-ASCII for now
# Must have reasonable length parts
parts = hostname.split('.')
if len(parts) < 2:
return False
# Each part should be reasonable length
for part in parts:
if len(part) < 1 or len(part) > 63:
return False
return True
def _looks_like_ip(self, text: str) -> bool:
"""Check if text looks like an IP address."""
try:
socket.inet_aton(text)
return True
except socket.error:
pass
try:
socket.inet_pton(socket.AF_INET6, text)
return True
except socket.error:
pass
return False

View File

@ -4,12 +4,16 @@
import click import click
import json import json
import sys import sys
import logging
from pathlib import Path from pathlib import Path
from .config import Config from .config import Config
from .reconnaissance import ReconnaissanceEngine from .reconnaissance import ReconnaissanceEngine
from .report_generator import ReportGenerator from .report_generator import ReportGenerator
from .web_app import create_app from .web_app import create_app
# Module logger
logger = logging.getLogger(__name__)
@click.command() @click.command()
@click.argument('target', required=False) @click.argument('target', required=False)
@click.option('--web', is_flag=True, help='Start web interface instead of CLI') @click.option('--web', is_flag=True, help='Start web interface instead of CLI')
@ -20,87 +24,171 @@ from .web_app import create_app
@click.option('--json-only', is_flag=True, help='Only output JSON') @click.option('--json-only', is_flag=True, help='Only output JSON')
@click.option('--text-only', is_flag=True, help='Only output text report') @click.option('--text-only', is_flag=True, help='Only output text report')
@click.option('--port', default=5000, help='Port for web interface (default: 5000)') @click.option('--port', default=5000, help='Port for web interface (default: 5000)')
def main(target, web, shodan_key, virustotal_key, max_depth, output, json_only, text_only, port): @click.option('--verbose', '-v', is_flag=True, help='Enable verbose logging (DEBUG level)')
@click.option('--quiet', '-q', is_flag=True, help='Quiet mode (WARNING level only)')
def main(target, web, shodan_key, virustotal_key, max_depth, output, json_only, text_only, port, verbose, quiet):
"""DNS Reconnaissance Tool """DNS Reconnaissance Tool
Examples: Examples:
recon example.com # Scan example.com recon example.com # Scan example.com
recon example # Try example.* for all TLDs recon example # Try example.* for all TLDs
recon example.com --max-depth 3 # Deeper recursion recon example.com --max-depth 3 # Deeper recursion
recon example.com -v # Verbose logging
recon --web # Start web interface recon --web # Start web interface
""" """
# Determine log level
if verbose:
log_level = "DEBUG"
elif quiet:
log_level = "WARNING"
else:
log_level = "INFO"
# Create configuration and setup logging
config = Config.from_args(shodan_key, virustotal_key, max_depth, log_level)
config.setup_logging(cli_mode=True)
if web: if web:
# Start web interface # Start web interface
app = create_app(Config.from_args(shodan_key, virustotal_key, max_depth)) logger.info("🌐 Starting web interface...")
app.run(host='0.0.0.0', port=port, debug=True) app = create_app(config)
logger.info(f"🚀 Web interface starting on http://0.0.0.0:{port}")
app.run(host='0.0.0.0', port=port, debug=False) # Changed debug to False to reduce noise
return return
if not target: if not target:
click.echo("Error: TARGET is required for CLI mode. Use --web for web interface.") click.echo("Error: TARGET is required for CLI mode. Use --web for web interface.")
sys.exit(1) sys.exit(1)
# Create configuration
config = Config.from_args(shodan_key, virustotal_key, max_depth)
# Initialize reconnaissance engine # Initialize reconnaissance engine
logger.info("🔧 Initializing reconnaissance engine...")
engine = ReconnaissanceEngine(config) engine = ReconnaissanceEngine(config)
# Set up progress callback # Set up progress callback for CLI
def progress_callback(message, percentage=None): def progress_callback(message, percentage=None):
if percentage: if percentage is not None:
click.echo(f"[{percentage:3d}%] {message}") click.echo(f"[{percentage:3d}%] {message}")
else: else:
click.echo(f" {message}") click.echo(f" {message}")
engine.set_progress_callback(progress_callback) engine.set_progress_callback(progress_callback)
# Run reconnaissance # Display startup information
click.echo(f"Starting reconnaissance for: {target}") click.echo("=" * 60)
click.echo(f"Max recursion depth: {max_depth}") click.echo("🔍 DNS RECONNAISSANCE TOOL")
click.echo("=" * 60)
click.echo(f"🎯 Target: {target}")
click.echo(f"📊 Max recursion depth: {max_depth}")
click.echo(f"🌐 DNS servers: {', '.join(config.DNS_SERVERS[:3])}{'...' if len(config.DNS_SERVERS) > 3 else ''}")
click.echo(f"⚡ DNS rate limit: {config.DNS_RATE_LIMIT}/s")
if shodan_key: if shodan_key:
click.echo("✓ Shodan integration enabled") click.echo("✅ Shodan integration enabled")
logger.info(f"🕵️ Shodan API key provided (ends with: ...{shodan_key[-4:] if len(shodan_key) > 4 else shodan_key})")
else:
click.echo("⚠️ Shodan integration disabled (no API key)")
if virustotal_key: if virustotal_key:
click.echo("✓ VirusTotal integration enabled") click.echo("✅ VirusTotal integration enabled")
logger.info(f"🛡️ VirusTotal API key provided (ends with: ...{virustotal_key[-4:] if len(virustotal_key) > 4 else virustotal_key})")
else:
click.echo("⚠️ VirusTotal integration disabled (no API key)")
click.echo("") click.echo("")
# Run reconnaissance
try: try:
logger.info(f"🚀 Starting reconnaissance for target: {target}")
data = engine.run_reconnaissance(target) data = engine.run_reconnaissance(target)
# Display final statistics
stats = data.get_stats()
click.echo("")
click.echo("=" * 60)
click.echo("📊 RECONNAISSANCE COMPLETE")
click.echo("=" * 60)
click.echo(f"🏠 Hostnames discovered: {stats['hostnames']}")
click.echo(f"🌐 IP addresses found: {stats['ip_addresses']}")
click.echo(f"📋 DNS records collected: {stats['dns_records']}")
click.echo(f"📜 Certificates found: {stats['certificates']}")
click.echo(f"🕵️ Shodan results: {stats['shodan_results']}")
click.echo(f"🛡️ VirusTotal results: {stats['virustotal_results']}")
# Calculate and display timing
if data.end_time and data.start_time:
duration = data.end_time - data.start_time
click.echo(f"⏱️ Total time: {duration}")
click.echo("")
# Generate reports # Generate reports
logger.info("📄 Generating reports...")
report_gen = ReportGenerator(data) report_gen = ReportGenerator(data)
if output: if output:
# Save to files # Save to files
saved_files = []
if not text_only: if not text_only:
json_file = f"{output}.json" json_file = f"{output}.json"
with open(json_file, 'w') as f: try:
f.write(data.to_json()) json_content = data.to_json()
click.echo(f"JSON report saved to: {json_file}") with open(json_file, 'w', encoding='utf-8') as f:
f.write(json_content)
saved_files.append(json_file)
logger.info(f"💾 JSON report saved: {json_file}")
except Exception as e:
logger.error(f"❌ Failed to save JSON report: {e}")
if not json_only: if not json_only:
text_file = f"{output}.txt" text_file = f"{output}.txt"
with open(text_file, 'w') as f: try:
f.write(report_gen.generate_text_report()) with open(text_file, 'w', encoding='utf-8') as f:
click.echo(f"Text report saved to: {text_file}") f.write(report_gen.generate_text_report())
saved_files.append(text_file)
logger.info(f"💾 Text report saved: {text_file}")
except Exception as e:
logger.error(f"❌ Failed to save text report: {e}")
if saved_files:
click.echo(f"💾 Reports saved:")
for file in saved_files:
click.echo(f" 📄 {file}")
else: else:
# Output to stdout # Output to stdout
if json_only: if json_only:
click.echo(data.to_json()) try:
click.echo(data.to_json())
except Exception as e:
logger.error(f"❌ Failed to generate JSON output: {e}")
click.echo(f"Error generating JSON: {e}")
elif text_only: elif text_only:
click.echo(report_gen.generate_text_report()) try:
click.echo(report_gen.generate_text_report())
except Exception as e:
logger.error(f"❌ Failed to generate text report: {e}")
click.echo(f"Error generating text report: {e}")
else: else:
# Default: show text report # Default: show text report
click.echo(report_gen.generate_text_report()) try:
click.echo(f"\nTo get JSON output, use: --json-only") click.echo(report_gen.generate_text_report())
click.echo(f"To save reports, use: --output filename") click.echo(f"\n💡 To get JSON output, use: --json-only")
click.echo(f"💡 To save reports, use: --output filename")
except Exception as e:
logger.error(f"❌ Failed to generate report: {e}")
click.echo(f"Error generating report: {e}")
except KeyboardInterrupt: except KeyboardInterrupt:
click.echo("\nReconnaissance interrupted by user.") logger.warning("⚠️ Reconnaissance interrupted by user")
click.echo("\n⚠️ Reconnaissance interrupted by user.")
sys.exit(1) sys.exit(1)
except Exception as e: except Exception as e:
click.echo(f"Error during reconnaissance: {e}") logger.error(f"❌ Error during reconnaissance: {e}", exc_info=True)
click.echo(f"❌ Error during reconnaissance: {e}")
if verbose:
raise # Re-raise in verbose mode to show full traceback
sys.exit(1) sys.exit(1)
if __name__ == '__main__': if __name__ == '__main__':

View File

@ -3,6 +3,7 @@
import threading import threading
import concurrent.futures import concurrent.futures
import logging
from datetime import datetime from datetime import datetime
from typing import Set, List, Optional from typing import Set, List, Optional
from .data_structures import ReconData from .data_structures import ReconData
@ -13,12 +14,14 @@ from .shodan_client import ShodanClient
from .virustotal_client import VirusTotalClient from .virustotal_client import VirusTotalClient
from .tld_fetcher import TLDFetcher from .tld_fetcher import TLDFetcher
# Set up logging for this module
logger = logging.getLogger(__name__)
class ReconnaissanceEngine: class ReconnaissanceEngine:
"""Main reconnaissance engine.""" """Main reconnaissance engine."""
def __init__(self, config: Config): def __init__(self, config: Config):
self.config = config self.config = config
# self.data = ReconData() # <-- REMOVED FROM HERE
# Initialize clients # Initialize clients
self.dns_resolver = DNSResolver(config) self.dns_resolver = DNSResolver(config)
@ -29,10 +32,16 @@ class ReconnaissanceEngine:
self.shodan_client = None self.shodan_client = None
if config.shodan_key: if config.shodan_key:
self.shodan_client = ShodanClient(config.shodan_key, config) self.shodan_client = ShodanClient(config.shodan_key, config)
logger.info("✅ Shodan client initialized")
else:
logger.info("⚠️ Shodan API key not provided, skipping Shodan integration")
self.virustotal_client = None self.virustotal_client = None
if config.virustotal_key: if config.virustotal_key:
self.virustotal_client = VirusTotalClient(config.virustotal_key, config) self.virustotal_client = VirusTotalClient(config.virustotal_key, config)
logger.info("✅ VirusTotal client initialized")
else:
logger.info("⚠️ VirusTotal API key not provided, skipping VirusTotal integration")
# Progress tracking # Progress tracking
self.progress_callback = None self.progress_callback = None
@ -44,6 +53,7 @@ class ReconnaissanceEngine:
def _update_progress(self, message: str, percentage: int = None): def _update_progress(self, message: str, percentage: int = None):
"""Update progress if callback is set.""" """Update progress if callback is set."""
logger.info(f"Progress: {message} ({percentage}%)" if percentage else f"Progress: {message}")
if self.progress_callback: if self.progress_callback:
self.progress_callback(message, percentage) self.progress_callback(message, percentage)
@ -52,15 +62,22 @@ class ReconnaissanceEngine:
self.data = ReconData() self.data = ReconData()
self.data.start_time = datetime.now() self.data.start_time = datetime.now()
logger.info(f"🚀 Starting reconnaissance for target: {target}")
logger.info(f"📊 Configuration: max_depth={self.config.max_depth}, "
f"DNS_rate={self.config.DNS_RATE_LIMIT}/s")
try: try:
# Determine if target is hostname.tld or just hostname # Determine if target is hostname.tld or just hostname
if '.' in target: if '.' in target:
logger.info(f"🎯 Target '{target}' appears to be a full domain name")
self._update_progress(f"Starting reconnaissance for {target}", 0) self._update_progress(f"Starting reconnaissance for {target}", 0)
self.data.add_hostname(target, 0) self.data.add_hostname(target, 0)
initial_targets = {target} initial_targets = {target}
else: else:
logger.info(f"🔍 Target '{target}' appears to be a hostname, expanding to all TLDs")
self._update_progress(f"Expanding {target} to all TLDs", 5) self._update_progress(f"Expanding {target} to all TLDs", 5)
initial_targets = self._expand_hostname_to_tlds(target) initial_targets = self._expand_hostname_to_tlds(target)
logger.info(f"📋 Found {len(initial_targets)} valid domains after TLD expansion")
self._update_progress("Resolving initial targets", 10) self._update_progress("Resolving initial targets", 10)
@ -71,24 +88,40 @@ class ReconnaissanceEngine:
self._update_progress("Performing external service lookups", 90) self._update_progress("Performing external service lookups", 90)
self._perform_external_lookups() self._perform_external_lookups()
# Log final statistics
stats = self.data.get_stats()
logger.info(f"📈 Final statistics: {stats}")
self._update_progress("Reconnaissance complete", 100) self._update_progress("Reconnaissance complete", 100)
except Exception as e:
logger.error(f"❌ Error during reconnaissance: {e}", exc_info=True)
raise
finally: finally:
self.data.end_time = datetime.now() self.data.end_time = datetime.now()
duration = self.data.end_time - self.data.start_time
logger.info(f"⏱️ Total reconnaissance time: {duration}")
return self.data return self.data
def _expand_hostname_to_tlds(self, hostname: str) -> Set[str]: def _expand_hostname_to_tlds(self, hostname: str) -> Set[str]:
"""Expand hostname to all possible TLDs.""" """Expand hostname to all possible TLDs."""
logger.info(f"🌐 Fetching TLD list for hostname expansion")
tlds = self.tld_fetcher.get_tlds() tlds = self.tld_fetcher.get_tlds()
logger.info(f"📝 Testing against {len(tlds)} TLDs")
targets = set() targets = set()
tested_count = 0
for i, tld in enumerate(tlds): for i, tld in enumerate(tlds):
full_hostname = f"{hostname}.{tld}" full_hostname = f"{hostname}.{tld}"
# Quick check if domain resolves # Quick check if domain resolves
ips = self.dns_resolver.resolve_hostname(full_hostname) ips = self.dns_resolver.resolve_hostname(full_hostname)
tested_count += 1
if ips: if ips:
logger.info(f"✅ Found valid domain: {full_hostname} -> {ips}")
self.data.add_hostname(full_hostname, 0) self.data.add_hostname(full_hostname, 0)
targets.add(full_hostname) targets.add(full_hostname)
for ip in ips: for ip in ips:
@ -99,6 +132,7 @@ class ReconnaissanceEngine:
progress = 5 + int((i / len(tlds)) * 5) # 5-10% range progress = 5 + int((i / len(tlds)) * 5) # 5-10% range
self._update_progress(f"Checked {i}/{len(tlds)} TLDs, found {len(targets)} valid domains", progress) self._update_progress(f"Checked {i}/{len(tlds)} TLDs, found {len(targets)} valid domains", progress)
logger.info(f"🎯 TLD expansion complete: tested {tested_count} TLDs, found {len(targets)} valid domains")
return targets return targets
def _process_targets_recursively(self, targets: Set[str]): def _process_targets_recursively(self, targets: Set[str]):
@ -106,28 +140,40 @@ class ReconnaissanceEngine:
current_depth = 0 current_depth = 0
while current_depth <= self.config.max_depth and targets: while current_depth <= self.config.max_depth and targets:
self._update_progress(f"Processing depth {current_depth}", 15 + (current_depth * 25)) logger.info(f"🔄 Processing depth {current_depth} with {len(targets)} targets")
self._update_progress(f"Processing depth {current_depth} ({len(targets)} targets)", 15 + (current_depth * 25))
new_targets = set() new_targets = set()
for target in targets: for target in targets:
logger.debug(f"🔍 Processing target: {target}")
# DNS resolution and record gathering # DNS resolution and record gathering
self._process_single_target(target, current_depth) self._process_single_target(target, current_depth)
# Extract new subdomains # Extract new subdomains
if current_depth < self.config.max_depth: if current_depth < self.config.max_depth:
new_subdomains = self._extract_new_subdomains(target) new_subdomains = self._extract_new_subdomains(target)
logger.debug(f"🌿 Found {len(new_subdomains)} new subdomains from {target}")
for subdomain in new_subdomains: for subdomain in new_subdomains:
self.data.add_hostname(subdomain, current_depth + 1) self.data.add_hostname(subdomain, current_depth + 1)
new_targets.add(subdomain) new_targets.add(subdomain)
logger.info(f"📊 Depth {current_depth} complete. Found {len(new_targets)} new targets for next depth")
targets = new_targets targets = new_targets
current_depth += 1 current_depth += 1
logger.info(f"🏁 Recursive processing complete after {current_depth} levels")
def _process_single_target(self, hostname: str, depth: int): def _process_single_target(self, hostname: str, depth: int):
"""Process a single target hostname.""" """Process a single target hostname."""
logger.debug(f"🎯 Processing single target: {hostname} at depth {depth}")
# Get all DNS records # Get all DNS records
dns_records = self.dns_resolver.get_all_dns_records(hostname) dns_records = self.dns_resolver.get_all_dns_records(hostname)
logger.debug(f"📋 Found {len(dns_records)} DNS records for {hostname}")
for record in dns_records: for record in dns_records:
self.data.add_dns_record(hostname, record) self.data.add_dns_record(hostname, record)
@ -136,9 +182,13 @@ class ReconnaissanceEngine:
self.data.add_ip_address(record.value) self.data.add_ip_address(record.value)
# Get certificates # Get certificates
logger.debug(f"🔐 Checking certificates for {hostname}")
certificates = self.cert_checker.get_certificates(hostname) certificates = self.cert_checker.get_certificates(hostname)
if certificates: if certificates:
self.data.certificates[hostname] = certificates self.data.certificates[hostname] = certificates
logger.info(f"📜 Found {len(certificates)} certificates for {hostname}")
else:
logger.debug(f"❌ No certificates found for {hostname}")
def _extract_new_subdomains(self, hostname: str) -> Set[str]: def _extract_new_subdomains(self, hostname: str) -> Set[str]:
"""Extract new subdomains from DNS records and certificates.""" """Extract new subdomains from DNS records and certificates."""
@ -150,6 +200,7 @@ class ReconnaissanceEngine:
self.data.dns_records[hostname] self.data.dns_records[hostname]
) )
new_subdomains.update(dns_subdomains) new_subdomains.update(dns_subdomains)
logger.debug(f"🌐 Extracted {len(dns_subdomains)} subdomains from DNS records of {hostname}")
# From certificates # From certificates
if hostname in self.data.certificates: if hostname in self.data.certificates:
@ -157,35 +208,94 @@ class ReconnaissanceEngine:
self.data.certificates[hostname] self.data.certificates[hostname]
) )
new_subdomains.update(cert_subdomains) new_subdomains.update(cert_subdomains)
logger.debug(f"🔐 Extracted {len(cert_subdomains)} subdomains from certificates of {hostname}")
# Filter out already known hostnames # Filter out already known hostnames
return new_subdomains - self.data.hostnames filtered_subdomains = new_subdomains - self.data.hostnames
logger.debug(f"🆕 {len(filtered_subdomains)} new subdomains after filtering")
return filtered_subdomains
def _perform_external_lookups(self): def _perform_external_lookups(self):
"""Perform Shodan and VirusTotal lookups.""" """Perform Shodan and VirusTotal lookups."""
logger.info(f"🔍 Starting external lookups for {len(self.data.ip_addresses)} IPs and {len(self.data.hostnames)} hostnames")
# Reverse DNS for all IPs # Reverse DNS for all IPs
logger.info("🔄 Performing reverse DNS lookups")
reverse_dns_count = 0
for ip in self.data.ip_addresses: for ip in self.data.ip_addresses:
reverse = self.dns_resolver.reverse_dns_lookup(ip) reverse = self.dns_resolver.reverse_dns_lookup(ip)
if reverse: if reverse:
self.data.reverse_dns[ip] = reverse self.data.reverse_dns[ip] = reverse
reverse_dns_count += 1
logger.debug(f"🔙 Reverse DNS for {ip}: {reverse}")
logger.info(f"✅ Completed reverse DNS: {reverse_dns_count}/{len(self.data.ip_addresses)} successful")
# Shodan lookups # Shodan lookups
if self.shodan_client: if self.shodan_client:
logger.info(f"🕵️ Starting Shodan lookups for {len(self.data.ip_addresses)} IPs")
shodan_success_count = 0
for ip in self.data.ip_addresses: for ip in self.data.ip_addresses:
result = self.shodan_client.lookup_ip(ip) try:
if result: logger.debug(f"🔍 Querying Shodan for IP: {ip}")
self.data.shodan_results[ip] = result result = self.shodan_client.lookup_ip(ip)
if result:
self.data.add_shodan_result(ip, result)
shodan_success_count += 1
logger.info(f"✅ Shodan result for {ip}: {len(result.ports)} ports")
else:
logger.debug(f"❌ No Shodan data for {ip}")
except Exception as e:
logger.warning(f"⚠️ Error querying Shodan for {ip}: {e}")
logger.info(f"✅ Shodan lookups complete: {shodan_success_count}/{len(self.data.ip_addresses)} successful")
else:
logger.info("⚠️ Skipping Shodan lookups (no API key)")
# VirusTotal lookups # VirusTotal lookups
if self.virustotal_client: if self.virustotal_client:
total_resources = len(self.data.ip_addresses) + len(self.data.hostnames)
logger.info(f"🛡️ Starting VirusTotal lookups for {total_resources} resources")
vt_success_count = 0
# Check IPs # Check IPs
for ip in self.data.ip_addresses: for ip in self.data.ip_addresses:
result = self.virustotal_client.lookup_ip(ip) try:
if result: logger.debug(f"🔍 Querying VirusTotal for IP: {ip}")
self.data.virustotal_results[ip] = result result = self.virustotal_client.lookup_ip(ip)
if result:
self.data.add_virustotal_result(ip, result)
vt_success_count += 1
logger.info(f"🛡️ VirusTotal result for {ip}: {result.positives}/{result.total} detections")
else:
logger.debug(f"❌ No VirusTotal data for {ip}")
except Exception as e:
logger.warning(f"⚠️ Error querying VirusTotal for IP {ip}: {e}")
# Check domains # Check domains
for hostname in self.data.hostnames: for hostname in self.data.hostnames:
result = self.virustotal_client.lookup_domain(hostname) try:
if result: logger.debug(f"🔍 Querying VirusTotal for domain: {hostname}")
self.data.virustotal_results[hostname] = result result = self.virustotal_client.lookup_domain(hostname)
if result:
self.data.add_virustotal_result(hostname, result)
vt_success_count += 1
logger.info(f"🛡️ VirusTotal result for {hostname}: {result.positives}/{result.total} detections")
else:
logger.debug(f"❌ No VirusTotal data for {hostname}")
except Exception as e:
logger.warning(f"⚠️ Error querying VirusTotal for domain {hostname}: {e}")
logger.info(f"✅ VirusTotal lookups complete: {vt_success_count}/{total_resources} successful")
else:
logger.info("⚠️ Skipping VirusTotal lookups (no API key)")
# Final external lookup summary
ext_stats = {
'reverse_dns': len(self.data.reverse_dns),
'shodan_results': len(self.data.shodan_results),
'virustotal_results': len(self.data.virustotal_results)
}
logger.info(f"📊 External lookups summary: {ext_stats}")

View File

@ -3,10 +3,14 @@
import requests import requests
import time import time
import logging
from typing import Optional, Dict, Any, List from typing import Optional, Dict, Any, List
from .data_structures import ShodanResult from .data_structures import ShodanResult
from .config import Config from .config import Config
# Module logger
logger = logging.getLogger(__name__)
class ShodanClient: class ShodanClient:
"""Shodan API client.""" """Shodan API client."""
@ -16,6 +20,8 @@ class ShodanClient:
self.api_key = api_key self.api_key = api_key
self.config = config self.config = config
self.last_request = 0 self.last_request = 0
logger.info(f"🕵️ Shodan client initialized with API key ending in: ...{api_key[-4:] if len(api_key) > 4 else api_key}")
def _rate_limit(self): def _rate_limit(self):
"""Apply rate limiting for Shodan.""" """Apply rate limiting for Shodan."""
@ -24,7 +30,9 @@ class ShodanClient:
min_interval = 1.0 / self.config.SHODAN_RATE_LIMIT min_interval = 1.0 / self.config.SHODAN_RATE_LIMIT
if time_since_last < min_interval: if time_since_last < min_interval:
time.sleep(min_interval - time_since_last) sleep_time = min_interval - time_since_last
logger.debug(f"⏸️ Shodan rate limiting: sleeping for {sleep_time:.2f}s")
time.sleep(sleep_time)
self.last_request = time.time() self.last_request = time.time()
@ -32,11 +40,20 @@ class ShodanClient:
"""Lookup IP address information.""" """Lookup IP address information."""
self._rate_limit() self._rate_limit()
logger.debug(f"🔍 Querying Shodan for IP: {ip}")
try: try:
url = f"{self.BASE_URL}/shodan/host/{ip}" url = f"{self.BASE_URL}/shodan/host/{ip}"
params = {'key': self.api_key} params = {'key': self.api_key}
response = requests.get(url, params=params, timeout=self.config.HTTP_TIMEOUT) response = requests.get(
url,
params=params,
timeout=self.config.HTTP_TIMEOUT,
headers={'User-Agent': 'DNS-Recon-Tool/1.0'}
)
logger.debug(f"📡 Shodan API response for {ip}: {response.status_code}")
if response.status_code == 200: if response.status_code == 200:
data = response.json() data = response.json()
@ -51,10 +68,10 @@ class ShodanClient:
services[str(port)] = { services[str(port)] = {
'product': service.get('product', ''), 'product': service.get('product', ''),
'version': service.get('version', ''), 'version': service.get('version', ''),
'banner': service.get('data', '').strip()[:200] # Limit banner size 'banner': service.get('data', '').strip()[:200] if service.get('data') else ''
} }
return ShodanResult( result = ShodanResult(
ip=ip, ip=ip,
ports=sorted(list(set(ports))), ports=sorted(list(set(ports))),
services=services, services=services,
@ -62,20 +79,43 @@ class ShodanClient:
country=data.get('country_name') country=data.get('country_name')
) )
logger.info(f"✅ Shodan result for {ip}: {len(result.ports)} ports, org: {result.organization}")
return result
elif response.status_code == 404: elif response.status_code == 404:
return None # IP not found in Shodan logger.debug(f" IP {ip} not found in Shodan database")
return None
elif response.status_code == 401:
logger.error("❌ Shodan API key is invalid or expired")
return None
elif response.status_code == 429:
logger.warning("⚠️ Shodan API rate limit exceeded")
return None
else: else:
print(f"Shodan API error for {ip}: {response.status_code}") logger.warning(f"⚠️ Shodan API error for {ip}: HTTP {response.status_code}")
try:
error_data = response.json()
logger.debug(f"Shodan error details: {error_data}")
except:
pass
return None return None
except requests.exceptions.Timeout:
logger.warning(f"⏱️ Shodan query timeout for {ip}")
return None
except requests.exceptions.RequestException as e:
logger.error(f"🌐 Shodan network error for {ip}: {e}")
return None
except Exception as e: except Exception as e:
print(f"Error querying Shodan for {ip}: {e}") logger.error(f"❌ Unexpected error querying Shodan for {ip}: {e}")
return None return None
def search_domain(self, domain: str) -> List[str]: def search_domain(self, domain: str) -> List[str]:
"""Search for IPs associated with a domain.""" """Search for IPs associated with a domain."""
self._rate_limit() self._rate_limit()
logger.debug(f"🔍 Searching Shodan for domain: {domain}")
try: try:
url = f"{self.BASE_URL}/shodan/host/search" url = f"{self.BASE_URL}/shodan/host/search"
params = { params = {
@ -84,7 +124,14 @@ class ShodanClient:
'limit': 100 'limit': 100
} }
response = requests.get(url, params=params, timeout=self.config.HTTP_TIMEOUT) response = requests.get(
url,
params=params,
timeout=self.config.HTTP_TIMEOUT,
headers={'User-Agent': 'DNS-Recon-Tool/1.0'}
)
logger.debug(f"📡 Shodan search response for {domain}: {response.status_code}")
if response.status_code == 200: if response.status_code == 200:
data = response.json() data = response.json()
@ -95,11 +142,25 @@ class ShodanClient:
if ip: if ip:
ips.append(ip) ips.append(ip)
return list(set(ips)) unique_ips = list(set(ips))
logger.info(f"🔍 Shodan search for {domain} found {len(unique_ips)} unique IPs")
return unique_ips
elif response.status_code == 401:
logger.error("❌ Shodan API key is invalid for search")
return []
elif response.status_code == 429:
logger.warning("⚠️ Shodan search rate limit exceeded")
return []
else: else:
print(f"Shodan search error for {domain}: {response.status_code}") logger.warning(f"⚠️ Shodan search error for {domain}: HTTP {response.status_code}")
return [] return []
except requests.exceptions.Timeout:
logger.warning(f"⏱️ Shodan search timeout for {domain}")
return []
except requests.exceptions.RequestException as e:
logger.error(f"🌐 Shodan search network error for {domain}: {e}")
return []
except Exception as e: except Exception as e:
print(f"Error searching Shodan for {domain}: {e}") logger.error(f"❌ Unexpected error searching Shodan for {domain}: {e}")
return [] return []

View File

@ -2,10 +2,14 @@
"""Fetch and cache IANA TLD list.""" """Fetch and cache IANA TLD list."""
import requests import requests
from typing import List, Set import logging
from typing import List, Set, Optional
import os import os
import time import time
# Module logger
logger = logging.getLogger(__name__)
class TLDFetcher: class TLDFetcher:
"""Fetches and caches IANA TLD list.""" """Fetches and caches IANA TLD list."""
@ -15,54 +19,124 @@ class TLDFetcher:
def __init__(self): def __init__(self):
self._tlds: Optional[Set[str]] = None self._tlds: Optional[Set[str]] = None
logger.info("🌐 TLD fetcher initialized")
def get_tlds(self) -> Set[str]: def get_tlds(self) -> Set[str]:
"""Get list of TLDs, using cache if available.""" """Get list of TLDs, using cache if available."""
if self._tlds is None: if self._tlds is None:
logger.debug("🔍 Loading TLD list...")
self._tlds = self._load_tlds() self._tlds = self._load_tlds()
logger.info(f"✅ Loaded {len(self._tlds)} TLDs")
return self._tlds return self._tlds
def _load_tlds(self) -> Set[str]: def _load_tlds(self) -> Set[str]:
"""Load TLDs from cache or fetch from IANA.""" """Load TLDs from cache or fetch from IANA."""
if self._is_cache_valid(): if self._is_cache_valid():
logger.debug("📂 Loading TLDs from cache")
return self._load_from_cache() return self._load_from_cache()
return self._fetch_and_cache() else:
logger.info("🌐 Fetching fresh TLD list from IANA")
return self._fetch_and_cache()
def _is_cache_valid(self) -> bool: def _is_cache_valid(self) -> bool:
"""Check if cache file exists and is recent.""" """Check if cache file exists and is recent."""
if not os.path.exists(self.CACHE_FILE): if not os.path.exists(self.CACHE_FILE):
logger.debug("❌ TLD cache file does not exist")
return False return False
cache_age = time.time() - os.path.getmtime(self.CACHE_FILE) cache_age = time.time() - os.path.getmtime(self.CACHE_FILE)
return cache_age < self.CACHE_DURATION is_valid = cache_age < self.CACHE_DURATION
if is_valid:
logger.debug(f"✅ TLD cache is valid (age: {cache_age/3600:.1f} hours)")
else:
logger.debug(f"❌ TLD cache is expired (age: {cache_age/3600:.1f} hours)")
return is_valid
def _load_from_cache(self) -> Set[str]: def _load_from_cache(self) -> Set[str]:
"""Load TLDs from cache file.""" """Load TLDs from cache file."""
with open(self.CACHE_FILE, 'r') as f: try:
return set(line.strip().lower() for line in f if not line.startswith('#')) with open(self.CACHE_FILE, 'r', encoding='utf-8') as f:
tlds = set()
for line in f:
line = line.strip().lower()
if line and not line.startswith('#'):
tlds.add(line)
logger.info(f"📂 Loaded {len(tlds)} TLDs from cache")
return tlds
except Exception as e:
logger.error(f"❌ Error loading TLD cache: {e}")
# Fall back to fetching fresh data
return self._fetch_and_cache()
def _fetch_and_cache(self) -> Set[str]: def _fetch_and_cache(self) -> Set[str]:
"""Fetch TLDs from IANA and cache them.""" """Fetch TLDs from IANA and cache them."""
try: try:
response = requests.get(self.IANA_TLD_URL, timeout=10) logger.info(f"📡 Fetching TLD list from: {self.IANA_TLD_URL}")
response = requests.get(
self.IANA_TLD_URL,
timeout=30,
headers={'User-Agent': 'DNS-Recon-Tool/1.0'}
)
response.raise_for_status() response.raise_for_status()
tlds = set() tlds = set()
lines_processed = 0
for line in response.text.split('\n'): for line in response.text.split('\n'):
line = line.strip().lower() line = line.strip().lower()
if line and not line.startswith('#'): if line and not line.startswith('#'):
tlds.add(line) tlds.add(line)
lines_processed += 1
logger.info(f"✅ Fetched {len(tlds)} TLDs from IANA (processed {lines_processed} lines)")
# Cache the results # Cache the results
with open(self.CACHE_FILE, 'w') as f: try:
f.write(response.text) with open(self.CACHE_FILE, 'w', encoding='utf-8') as f:
f.write(response.text)
logger.info(f"💾 TLD list cached to {self.CACHE_FILE}")
except Exception as cache_error:
logger.warning(f"⚠️ Could not cache TLD list: {cache_error}")
return tlds return tlds
except requests.exceptions.Timeout:
logger.error("⏱️ Timeout fetching TLD list from IANA")
return self._get_fallback_tlds()
except requests.exceptions.RequestException as e:
logger.error(f"🌐 Network error fetching TLD list: {e}")
return self._get_fallback_tlds()
except Exception as e: except Exception as e:
print(f"Failed to fetch TLD list: {e}") logger.error(f"❌ Unexpected error fetching TLD list: {e}")
# Return a minimal set if fetch fails return self._get_fallback_tlds()
return {
'com', 'org', 'net', 'edu', 'gov', 'mil', 'int', def _get_fallback_tlds(self) -> Set[str]:
'co.uk', 'org.uk', 'ac.uk', 'de', 'fr', 'it', 'nl', 'be' """Return a minimal set of common TLDs if fetch fails."""
} logger.warning("⚠️ Using fallback TLD list")
fallback_tlds = {
# Generic top-level domains
'com', 'org', 'net', 'edu', 'gov', 'mil', 'int', 'info', 'biz', 'name',
# Country code top-level domains (major ones)
'us', 'uk', 'de', 'fr', 'it', 'es', 'nl', 'be', 'ch', 'at', 'se', 'no',
'dk', 'fi', 'pl', 'cz', 'hu', 'ro', 'bg', 'hr', 'si', 'sk', 'lt', 'lv',
'ee', 'ie', 'pt', 'gr', 'cy', 'mt', 'lu', 'is', 'li', 'ad', 'mc', 'sm',
'va', 'by', 'ua', 'md', 'ru', 'kz', 'kg', 'tj', 'tm', 'uz', 'am', 'az',
'ge', 'tr', 'il', 'jo', 'lb', 'sy', 'iq', 'ir', 'af', 'pk', 'in', 'lk',
'mv', 'bt', 'bd', 'np', 'mm', 'th', 'la', 'kh', 'vn', 'my', 'sg', 'bn',
'id', 'tl', 'ph', 'tw', 'hk', 'mo', 'cn', 'kp', 'kr', 'jp', 'mn',
# Common compound TLDs
'co.uk', 'org.uk', 'ac.uk', 'gov.uk', 'com.au', 'org.au', 'net.au',
'gov.au', 'edu.au', 'co.za', 'org.za', 'net.za', 'gov.za', 'ac.za',
'co.nz', 'org.nz', 'net.nz', 'govt.nz', 'ac.nz', 'co.jp', 'or.jp',
'ne.jp', 'go.jp', 'ac.jp', 'ad.jp', 'ed.jp', 'gr.jp', 'lg.jp'
}
logger.info(f"📋 Using {len(fallback_tlds)} fallback TLDs")
return fallback_tlds

View File

@ -3,11 +3,15 @@
import requests import requests
import time import time
import logging
from datetime import datetime from datetime import datetime
from typing import Optional from typing import Optional
from .data_structures import VirusTotalResult from .data_structures import VirusTotalResult
from .config import Config from .config import Config
# Module logger
logger = logging.getLogger(__name__)
class VirusTotalClient: class VirusTotalClient:
"""VirusTotal API client.""" """VirusTotal API client."""
@ -17,6 +21,8 @@ class VirusTotalClient:
self.api_key = api_key self.api_key = api_key
self.config = config self.config = config
self.last_request = 0 self.last_request = 0
logger.info(f"🛡️ VirusTotal client initialized with API key ending in: ...{api_key[-4:] if len(api_key) > 4 else api_key}")
def _rate_limit(self): def _rate_limit(self):
"""Apply rate limiting for VirusTotal.""" """Apply rate limiting for VirusTotal."""
@ -25,7 +31,9 @@ class VirusTotalClient:
min_interval = 1.0 / self.config.VIRUSTOTAL_RATE_LIMIT min_interval = 1.0 / self.config.VIRUSTOTAL_RATE_LIMIT
if time_since_last < min_interval: if time_since_last < min_interval:
time.sleep(min_interval - time_since_last) sleep_time = min_interval - time_since_last
logger.debug(f"⏸️ VirusTotal rate limiting: sleeping for {sleep_time:.2f}s")
time.sleep(sleep_time)
self.last_request = time.time() self.last_request = time.time()
@ -33,6 +41,8 @@ class VirusTotalClient:
"""Lookup IP address reputation.""" """Lookup IP address reputation."""
self._rate_limit() self._rate_limit()
logger.debug(f"🔍 Querying VirusTotal for IP: {ip}")
try: try:
url = f"{self.BASE_URL}/ip-address/report" url = f"{self.BASE_URL}/ip-address/report"
params = { params = {
@ -40,34 +50,84 @@ class VirusTotalClient:
'ip': ip 'ip': ip
} }
response = requests.get(url, params=params, timeout=self.config.HTTP_TIMEOUT) response = requests.get(
url,
params=params,
timeout=self.config.HTTP_TIMEOUT,
headers={'User-Agent': 'DNS-Recon-Tool/1.0'}
)
logger.debug(f"📡 VirusTotal API response for IP {ip}: {response.status_code}")
if response.status_code == 200: if response.status_code == 200:
data = response.json() data = response.json()
logger.debug(f"VirusTotal IP response data keys: {data.keys()}")
if data.get('response_code') == 1: if data.get('response_code') == 1:
return VirusTotalResult( # Count detected URLs
detected_urls = data.get('detected_urls', [])
positives = sum(1 for url in detected_urls if url.get('positives', 0) > 0)
total = len(detected_urls)
# Parse scan date
scan_date = datetime.now()
if data.get('scan_date'):
try:
scan_date = datetime.fromisoformat(data['scan_date'].replace('Z', '+00:00'))
except ValueError:
try:
scan_date = datetime.strptime(data['scan_date'], '%Y-%m-%d %H:%M:%S')
except ValueError:
logger.debug(f"Could not parse scan_date: {data.get('scan_date')}")
result = VirusTotalResult(
resource=ip, resource=ip,
positives=data.get('detected_urls', []) and len([ positives=positives,
url for url in data.get('detected_urls', []) total=total,
if url.get('positives', 0) > 0 scan_date=scan_date,
]) or 0, permalink=data.get('permalink', f'https://www.virustotal.com/gui/ip-address/{ip}')
total=len(data.get('detected_urls', [])),
scan_date=datetime.fromisoformat(
data.get('scan_date', datetime.now().isoformat())
) if data.get('scan_date') else datetime.now(),
permalink=data.get('permalink', '')
) )
logger.info(f"✅ VirusTotal result for IP {ip}: {result.positives}/{result.total} detections")
return result
elif data.get('response_code') == 0:
logger.debug(f" IP {ip} not found in VirusTotal database")
return None
else:
logger.debug(f"VirusTotal returned response_code: {data.get('response_code')}")
return None
elif response.status_code == 204:
logger.warning("⚠️ VirusTotal API rate limit exceeded")
return None
elif response.status_code == 403:
logger.error("❌ VirusTotal API key is invalid or lacks permissions")
return None
else:
logger.warning(f"⚠️ VirusTotal API error for IP {ip}: HTTP {response.status_code}")
try:
error_data = response.json()
logger.debug(f"VirusTotal error details: {error_data}")
except:
pass
return None
except requests.exceptions.Timeout:
logger.warning(f"⏱️ VirusTotal query timeout for IP {ip}")
return None
except requests.exceptions.RequestException as e:
logger.error(f"🌐 VirusTotal network error for IP {ip}: {e}")
return None
except Exception as e: except Exception as e:
print(f"Error querying VirusTotal for {ip}: {e}") logger.error(f"❌ Unexpected error querying VirusTotal for IP {ip}: {e}")
return None
return None
def lookup_domain(self, domain: str) -> Optional[VirusTotalResult]: def lookup_domain(self, domain: str) -> Optional[VirusTotalResult]:
"""Lookup domain reputation.""" """Lookup domain reputation."""
self._rate_limit() self._rate_limit()
logger.debug(f"🔍 Querying VirusTotal for domain: {domain}")
try: try:
url = f"{self.BASE_URL}/domain/report" url = f"{self.BASE_URL}/domain/report"
params = { params = {
@ -75,26 +135,80 @@ class VirusTotalClient:
'domain': domain 'domain': domain
} }
response = requests.get(url, params=params, timeout=self.config.HTTP_TIMEOUT) response = requests.get(
url,
params=params,
timeout=self.config.HTTP_TIMEOUT,
headers={'User-Agent': 'DNS-Recon-Tool/1.0'}
)
logger.debug(f"📡 VirusTotal API response for domain {domain}: {response.status_code}")
if response.status_code == 200: if response.status_code == 200:
data = response.json() data = response.json()
logger.debug(f"VirusTotal domain response data keys: {data.keys()}")
if data.get('response_code') == 1: if data.get('response_code') == 1:
return VirusTotalResult( # Count detected URLs
detected_urls = data.get('detected_urls', [])
positives = sum(1 for url in detected_urls if url.get('positives', 0) > 0)
total = len(detected_urls)
# Also check for malicious/suspicious categories
categories = data.get('categories', [])
if any(cat in ['malicious', 'suspicious', 'phishing', 'malware']
for cat in categories):
positives += 1
# Parse scan date
scan_date = datetime.now()
if data.get('scan_date'):
try:
scan_date = datetime.fromisoformat(data['scan_date'].replace('Z', '+00:00'))
except ValueError:
try:
scan_date = datetime.strptime(data['scan_date'], '%Y-%m-%d %H:%M:%S')
except ValueError:
logger.debug(f"Could not parse scan_date: {data.get('scan_date')}")
result = VirusTotalResult(
resource=domain, resource=domain,
positives=data.get('detected_urls', []) and len([ positives=positives,
url for url in data.get('detected_urls', []) total=max(total, 1), # Ensure total is at least 1
if url.get('positives', 0) > 0 scan_date=scan_date,
]) or 0, permalink=data.get('permalink', f'https://www.virustotal.com/gui/domain/{domain}')
total=len(data.get('detected_urls', [])),
scan_date=datetime.fromisoformat(
data.get('scan_date', datetime.now().isoformat())
) if data.get('scan_date') else datetime.now(),
permalink=data.get('permalink', '')
) )
logger.info(f"✅ VirusTotal result for domain {domain}: {result.positives}/{result.total} detections")
return result
elif data.get('response_code') == 0:
logger.debug(f" Domain {domain} not found in VirusTotal database")
return None
else:
logger.debug(f"VirusTotal returned response_code: {data.get('response_code')}")
return None
elif response.status_code == 204:
logger.warning("⚠️ VirusTotal API rate limit exceeded")
return None
elif response.status_code == 403:
logger.error("❌ VirusTotal API key is invalid or lacks permissions")
return None
else:
logger.warning(f"⚠️ VirusTotal API error for domain {domain}: HTTP {response.status_code}")
try:
error_data = response.json()
logger.debug(f"VirusTotal error details: {error_data}")
except:
pass
return None
except requests.exceptions.Timeout:
logger.warning(f"⏱️ VirusTotal query timeout for domain {domain}")
return None
except requests.exceptions.RequestException as e:
logger.error(f"🌐 VirusTotal network error for domain {domain}: {e}")
return None
except Exception as e: except Exception as e:
print(f"Error querying VirusTotal for {domain}: {e}") logger.error(f"❌ Unexpected error querying VirusTotal for domain {domain}: {e}")
return None
return None

View File

@ -4,10 +4,14 @@
from flask import Flask, render_template, request, jsonify, send_from_directory from flask import Flask, render_template, request, jsonify, send_from_directory
import threading import threading
import time import time
import logging
from .config import Config from .config import Config
from .reconnaissance import ReconnaissanceEngine from .reconnaissance import ReconnaissanceEngine
from .report_generator import ReportGenerator from .report_generator import ReportGenerator
# Set up logging for this module
logger = logging.getLogger(__name__)
# Global variables for tracking ongoing scans # Global variables for tracking ongoing scans
active_scans = {} active_scans = {}
scan_lock = threading.Lock() scan_lock = threading.Lock()
@ -20,6 +24,10 @@ def create_app(config: Config):
app.config['SECRET_KEY'] = 'recon-tool-secret-key' app.config['SECRET_KEY'] = 'recon-tool-secret-key'
# Set up logging for web app
config.setup_logging(cli_mode=False)
logger.info("🌐 Web application initialized")
@app.route('/') @app.route('/')
def index(): def index():
"""Main page.""" """Main page."""
@ -28,52 +36,68 @@ def create_app(config: Config):
@app.route('/api/scan', methods=['POST']) @app.route('/api/scan', methods=['POST'])
def start_scan(): def start_scan():
"""Start a new reconnaissance scan.""" """Start a new reconnaissance scan."""
data = request.get_json() try:
target = data.get('target') data = request.get_json()
scan_config = Config.from_args( target = data.get('target')
shodan_key=data.get('shodan_key'), scan_config = Config.from_args(
virustotal_key=data.get('virustotal_key'), shodan_key=data.get('shodan_key'),
max_depth=data.get('max_depth', 2) virustotal_key=data.get('virustotal_key'),
) max_depth=data.get('max_depth', 2)
)
if not target:
logger.warning("⚠️ Scan request missing target")
return jsonify({'error': 'Target is required'}), 400
# Generate scan ID
scan_id = f"{target}_{int(time.time())}"
logger.info(f"🚀 Starting new scan: {scan_id} for target: {target}")
# Initialize scan data
with scan_lock:
active_scans[scan_id] = {
'status': 'starting',
'progress': 0,
'message': 'Initializing...',
'data': None,
'error': None,
'live_stats': {
'hostnames': 0,
'ip_addresses': 0,
'dns_records': 0,
'certificates': 0,
'shodan_results': 0,
'virustotal_results': 0
},
'latest_discoveries': []
}
# Start reconnaissance in background thread
thread = threading.Thread(
target=run_reconnaissance_background,
args=(scan_id, target, scan_config)
)
thread.daemon = True
thread.start()
return jsonify({'scan_id': scan_id})
if not target: except Exception as e:
return jsonify({'error': 'Target is required'}), 400 logger.error(f"❌ Error starting scan: {e}", exc_info=True)
return jsonify({'error': str(e)}), 500
# Generate scan ID
scan_id = f"{target}_{int(time.time())}"
# Initialize scan data
with scan_lock:
active_scans[scan_id] = {
'status': 'starting',
'progress': 0,
'message': 'Initializing...',
'data': None,
'error': None
}
# Start reconnaissance in background thread
thread = threading.Thread(
target=run_reconnaissance_background,
args=(scan_id, target, scan_config)
)
thread.daemon = True
thread.start()
return jsonify({'scan_id': scan_id})
@app.route('/api/scan/<scan_id>/status') @app.route('/api/scan/<scan_id>/status')
def get_scan_status(scan_id): def get_scan_status(scan_id):
"""Get scan status and progress.""" """Get scan status and progress with live discoveries."""
with scan_lock: with scan_lock:
if scan_id not in active_scans: if scan_id not in active_scans:
return jsonify({'error': 'Scan not found'}), 404 return jsonify({'error': 'Scan not found'}), 404
scan_data = active_scans[scan_id].copy() scan_data = active_scans[scan_id].copy()
# Convert ReconData object to a dict to make it JSON serializable # Don't include the full data object in status (too large)
if scan_data.get('data'): if 'data' in scan_data:
scan_data['data'] = scan_data['data'].to_dict() del scan_data['data']
return jsonify(scan_data) return jsonify(scan_data)
@ -88,14 +112,42 @@ def create_app(config: Config):
if scan_data['status'] != 'completed' or not scan_data['data']: if scan_data['status'] != 'completed' or not scan_data['data']:
return jsonify({'error': 'Scan not completed'}), 400 return jsonify({'error': 'Scan not completed'}), 400
try:
# Generate report # Generate report
report_gen = ReportGenerator(scan_data['data']) report_gen = ReportGenerator(scan_data['data'])
return jsonify({ return jsonify({
'json_report': scan_data['data'].to_dict(), # Use to_dict for a clean JSON object 'json_report': scan_data['data'].to_json(), # This should now work properly
'text_report': report_gen.generate_text_report() 'text_report': report_gen.generate_text_report()
}) })
except Exception as e:
logger.error(f"❌ Error generating report for {scan_id}: {e}", exc_info=True)
return jsonify({'error': f'Failed to generate report: {str(e)}'}), 500
@app.route('/api/scan/<scan_id>/live-data')
def get_live_scan_data(scan_id):
"""Get current reconnaissance data (for real-time updates)."""
with scan_lock:
if scan_id not in active_scans:
return jsonify({'error': 'Scan not found'}), 404
scan_data = active_scans[scan_id]
if not scan_data['data']:
return jsonify({
'hostnames': [],
'ip_addresses': [],
'stats': scan_data['live_stats']
})
# Return current discoveries
return jsonify({
'hostnames': sorted(list(scan_data['data'].hostnames)),
'ip_addresses': sorted(list(scan_data['data'].ip_addresses)),
'stats': scan_data['data'].get_stats(),
'latest_discoveries': scan_data.get('latest_discoveries', [])
})
return app return app
@ -109,8 +161,29 @@ def run_reconnaissance_background(scan_id: str, target: str, config: Config):
active_scans[scan_id]['message'] = message active_scans[scan_id]['message'] = message
if percentage is not None: if percentage is not None:
active_scans[scan_id]['progress'] = percentage active_scans[scan_id]['progress'] = percentage
# Update live stats if we have data
if active_scans[scan_id]['data']:
active_scans[scan_id]['live_stats'] = active_scans[scan_id]['data'].get_stats()
# Add to latest discoveries (keep last 10)
if 'latest_discoveries' not in active_scans[scan_id]:
active_scans[scan_id]['latest_discoveries'] = []
active_scans[scan_id]['latest_discoveries'].append({
'timestamp': time.time(),
'message': message
})
# Keep only last 10 discoveries
active_scans[scan_id]['latest_discoveries'] = \
active_scans[scan_id]['latest_discoveries'][-10:]
logger.info(f"[{scan_id}] {message} ({percentage}%)" if percentage else f"[{scan_id}] {message}")
try: try:
logger.info(f"🔧 Initializing reconnaissance engine for scan: {scan_id}")
# Initialize engine # Initialize engine
engine = ReconnaissanceEngine(config) engine = ReconnaissanceEngine(config)
engine.set_progress_callback(update_progress) engine.set_progress_callback(update_progress)
@ -119,21 +192,29 @@ def run_reconnaissance_background(scan_id: str, target: str, config: Config):
with scan_lock: with scan_lock:
active_scans[scan_id]['status'] = 'running' active_scans[scan_id]['status'] = 'running'
logger.info(f"🚀 Starting reconnaissance for: {target}")
# Run reconnaissance # Run reconnaissance
data = engine.run_reconnaissance(target) data = engine.run_reconnaissance(target)
logger.info(f"✅ Reconnaissance completed for scan: {scan_id}")
# Update with results # Update with results
with scan_lock: with scan_lock:
active_scans[scan_id]['status'] = 'completed' active_scans[scan_id]['status'] = 'completed'
active_scans[scan_id]['progress'] = 100 active_scans[scan_id]['progress'] = 100
active_scans[scan_id]['message'] = 'Reconnaissance completed' active_scans[scan_id]['message'] = 'Reconnaissance completed'
active_scans[scan_id]['data'] = data active_scans[scan_id]['data'] = data
active_scans[scan_id]['live_stats'] = data.get_stats()
# Log final statistics
final_stats = data.get_stats()
logger.info(f"📊 Final stats for {scan_id}: {final_stats}")
except Exception as e: except Exception as e:
logger.error(f"❌ Error in reconnaissance for {scan_id}: {e}", exc_info=True)
# Handle errors # Handle errors
with scan_lock: with scan_lock:
active_scans[scan_id]['status'] = 'error' active_scans[scan_id]['status'] = 'error'
active_scans[scan_id]['error'] = str(e) active_scans[scan_id]['error'] = str(e)
active_scans[scan_id]['message'] = f'Error: {str(e)}' active_scans[scan_id]['message'] = f'Error: {str(e)}'

View File

@ -1,15 +1,73 @@
// DNS Reconnaissance Tool - Frontend JavaScript // DNS Reconnaissance Tool - Enhanced Frontend JavaScript with Real-time Updates
class ReconTool { class ReconTool {
constructor() { constructor() {
this.currentScanId = null; this.currentScanId = null;
this.pollInterval = null; this.pollInterval = null;
this.liveDataInterval = null;
this.currentReport = null; this.currentReport = null;
this.init(); this.init();
} }
init() { init() {
this.bindEvents(); this.bindEvents();
this.setupRealtimeElements();
}
setupRealtimeElements() {
// Create live discovery container if it doesn't exist
if (!document.getElementById('liveDiscoveries')) {
const progressSection = document.getElementById('progressSection');
const liveDiv = document.createElement('div');
liveDiv.id = 'liveDiscoveries';
liveDiv.innerHTML = `
<div class="live-discoveries" style="display: none;">
<h3>🔍 Live Discoveries</h3>
<div class="stats-grid">
<div class="stat-item">
<span class="stat-label">Hostnames:</span>
<span id="liveHostnames" class="stat-value">0</span>
</div>
<div class="stat-item">
<span class="stat-label">IP Addresses:</span>
<span id="liveIPs" class="stat-value">0</span>
</div>
<div class="stat-item">
<span class="stat-label">DNS Records:</span>
<span id="liveDNS" class="stat-value">0</span>
</div>
<div class="stat-item">
<span class="stat-label">Certificates:</span>
<span id="liveCerts" class="stat-value">0</span>
</div>
<div class="stat-item">
<span class="stat-label">Shodan Results:</span>
<span id="liveShodan" class="stat-value">0</span>
</div>
<div class="stat-item">
<span class="stat-label">VirusTotal:</span>
<span id="liveVT" class="stat-value">0</span>
</div>
</div>
<div class="discoveries-list">
<h4>📋 Recent Discoveries</h4>
<div id="recentHostnames" class="discovery-section">
<strong>Hostnames:</strong>
<div class="hostname-list"></div>
</div>
<div id="recentIPs" class="discovery-section">
<strong>IP Addresses:</strong>
<div class="ip-list"></div>
</div>
<div id="activityLog" class="discovery-section">
<strong>Activity Log:</strong>
<div class="activity-list"></div>
</div>
</div>
</div>
`;
progressSection.appendChild(liveDiv);
}
} }
bindEvents() { bindEvents() {
@ -69,6 +127,8 @@ class ReconTool {
this.showProgressSection(); this.showProgressSection();
this.updateProgress(0, 'Starting scan...'); this.updateProgress(0, 'Starting scan...');
console.log('🚀 Starting scan with data:', scanData);
const response = await fetch('/api/scan', { const response = await fetch('/api/scan', {
method: 'POST', method: 'POST',
headers: { headers: {
@ -88,15 +148,19 @@ class ReconTool {
} }
this.currentScanId = result.scan_id; this.currentScanId = result.scan_id;
console.log('✅ Scan started with ID:', this.currentScanId);
this.startPolling(); this.startPolling();
this.startLiveDataPolling();
} catch (error) { } catch (error) {
console.error('❌ Failed to start scan:', error);
this.showError(`Failed to start scan: ${error.message}`); this.showError(`Failed to start scan: ${error.message}`);
} }
} }
startPolling() { startPolling() {
// Poll every 2 seconds for updates // Poll every 2 seconds for status updates
this.pollInterval = setInterval(() => { this.pollInterval = setInterval(() => {
this.checkScanStatus(); this.checkScanStatus();
}, 2000); }, 2000);
@ -104,6 +168,22 @@ class ReconTool {
// Also check immediately // Also check immediately
this.checkScanStatus(); this.checkScanStatus();
} }
startLiveDataPolling() {
// Poll every 3 seconds for live data updates
this.liveDataInterval = setInterval(() => {
this.updateLiveData();
}, 3000);
// Show the live discoveries section
const liveSection = document.querySelector('.live-discoveries');
if (liveSection) {
liveSection.style.display = 'block';
}
// Also update immediately
this.updateLiveData();
}
async checkScanStatus() { async checkScanStatus() {
if (!this.currentScanId) { if (!this.currentScanId) {
@ -125,9 +205,15 @@ class ReconTool {
// Update progress // Update progress
this.updateProgress(status.progress, status.message); this.updateProgress(status.progress, status.message);
// Update live stats
if (status.live_stats) {
this.updateLiveStats(status.live_stats);
}
// Check if completed // Check if completed
if (status.status === 'completed') { if (status.status === 'completed') {
console.log('✅ Scan completed');
this.stopPolling(); this.stopPolling();
await this.loadScanReport(); await this.loadScanReport();
} else if (status.status === 'error') { } else if (status.status === 'error') {
@ -136,13 +222,101 @@ class ReconTool {
} }
} catch (error) { } catch (error) {
console.error('❌ Error checking scan status:', error);
this.stopPolling(); this.stopPolling();
this.showError(`Error checking scan status: ${error.message}`); this.showError(`Error checking scan status: ${error.message}`);
} }
} }
async updateLiveData() {
if (!this.currentScanId) {
return;
}
try {
const response = await fetch(`/api/scan/${this.currentScanId}/live-data`);
if (!response.ok) {
return; // Silently fail for live data
}
const data = await response.json();
if (data.error) {
return; // Silently fail for live data
}
// Update live discoveries
this.updateLiveDiscoveries(data);
} catch (error) {
// Silently fail for live data updates
console.debug('Live data update failed:', error);
}
}
updateLiveStats(stats) {
// Update the live statistics counters
const statElements = {
'liveHostnames': stats.hostnames || 0,
'liveIPs': stats.ip_addresses || 0,
'liveDNS': stats.dns_records || 0,
'liveCerts': stats.certificates || 0,
'liveShodan': stats.shodan_results || 0,
'liveVT': stats.virustotal_results || 0
};
Object.entries(statElements).forEach(([elementId, value]) => {
const element = document.getElementById(elementId);
if (element) {
element.textContent = value;
// Add a brief highlight effect when value changes
if (element.textContent !== value.toString()) {
element.style.backgroundColor = '#ff9900';
setTimeout(() => {
element.style.backgroundColor = '';
}, 1000);
}
}
});
}
updateLiveDiscoveries(data) {
// Update hostnames list
const hostnameList = document.querySelector('#recentHostnames .hostname-list');
if (hostnameList && data.hostnames) {
// Show last 10 hostnames
const recentHostnames = data.hostnames.slice(-10);
hostnameList.innerHTML = recentHostnames.map(hostname =>
`<span class="discovery-item">${hostname}</span>`
).join('');
}
// Update IP addresses list
const ipList = document.querySelector('#recentIPs .ip-list');
if (ipList && data.ip_addresses) {
// Show last 10 IPs
const recentIPs = data.ip_addresses.slice(-10);
ipList.innerHTML = recentIPs.map(ip =>
`<span class="discovery-item">${ip}</span>`
).join('');
}
// Update activity log
const activityList = document.querySelector('#activityLog .activity-list');
if (activityList && data.latest_discoveries) {
const activities = data.latest_discoveries.slice(-5); // Last 5 activities
activityList.innerHTML = activities.map(activity => {
const time = new Date(activity.timestamp * 1000).toLocaleTimeString();
return `<div class="activity-item">[${time}] ${activity.message}</div>`;
}).join('');
}
}
async loadScanReport() { async loadScanReport() {
try { try {
console.log('📄 Loading scan report...');
const response = await fetch(`/api/scan/${this.currentScanId}/report`); const response = await fetch(`/api/scan/${this.currentScanId}/report`);
if (!response.ok) { if (!response.ok) {
@ -156,10 +330,12 @@ class ReconTool {
} }
this.currentReport = report; this.currentReport = report;
console.log('✅ Report loaded successfully');
this.showResultsSection(); this.showResultsSection();
this.showReport('text'); // Default to text view this.showReport('text'); // Default to text view
} catch (error) { } catch (error) {
console.error('❌ Error loading report:', error);
this.showError(`Error loading report: ${error.message}`); this.showError(`Error loading report: ${error.message}`);
} }
} }
@ -169,6 +345,10 @@ class ReconTool {
clearInterval(this.pollInterval); clearInterval(this.pollInterval);
this.pollInterval = null; this.pollInterval = null;
} }
if (this.liveDataInterval) {
clearInterval(this.liveDataInterval);
this.liveDataInterval = null;
}
} }
showProgressSection() { showProgressSection() {
@ -181,6 +361,12 @@ class ReconTool {
document.getElementById('scanForm').style.display = 'none'; document.getElementById('scanForm').style.display = 'none';
document.getElementById('progressSection').style.display = 'none'; document.getElementById('progressSection').style.display = 'none';
document.getElementById('resultsSection').style.display = 'block'; document.getElementById('resultsSection').style.display = 'block';
// Hide live discoveries in results section
const liveSection = document.querySelector('.live-discoveries');
if (liveSection) {
liveSection.style.display = 'none';
}
} }
resetToForm() { resetToForm() {
@ -192,6 +378,12 @@ class ReconTool {
document.getElementById('progressSection').style.display = 'none'; document.getElementById('progressSection').style.display = 'none';
document.getElementById('resultsSection').style.display = 'none'; document.getElementById('resultsSection').style.display = 'none';
// Hide live discoveries
const liveSection = document.querySelector('.live-discoveries');
if (liveSection) {
liveSection.style.display = 'none';
}
// Clear form // Clear form
document.getElementById('target').value = ''; document.getElementById('target').value = '';
document.getElementById('shodanKey').value = ''; document.getElementById('shodanKey').value = '';
@ -227,9 +419,16 @@ class ReconTool {
if (type === 'json') { if (type === 'json') {
// Show JSON report // Show JSON report
try { try {
const jsonData = JSON.parse(this.currentReport.json_report); // The json_report should already be a string from the server
let jsonData;
if (typeof this.currentReport.json_report === 'string') {
jsonData = JSON.parse(this.currentReport.json_report);
} else {
jsonData = this.currentReport.json_report;
}
reportContent.textContent = JSON.stringify(jsonData, null, 2); reportContent.textContent = JSON.stringify(jsonData, null, 2);
} catch (e) { } catch (e) {
console.error('Error parsing JSON report:', e);
reportContent.textContent = this.currentReport.json_report; reportContent.textContent = this.currentReport.json_report;
} }
@ -252,7 +451,9 @@ class ReconTool {
let content, filename, mimeType; let content, filename, mimeType;
if (type === 'json') { if (type === 'json') {
content = this.currentReport.json_report; content = typeof this.currentReport.json_report === 'string'
? this.currentReport.json_report
: JSON.stringify(this.currentReport.json_report, null, 2);
filename = `recon-report-${this.currentScanId}.json`; filename = `recon-report-${this.currentScanId}.json`;
mimeType = 'application/json'; mimeType = 'application/json';
} else { } else {
@ -276,5 +477,6 @@ class ReconTool {
// Initialize the application when DOM is loaded // Initialize the application when DOM is loaded
document.addEventListener('DOMContentLoaded', () => { document.addEventListener('DOMContentLoaded', () => {
console.log('🌐 DNS Reconnaissance Tool initialized');
new ReconTool(); new ReconTool();
}); });

View File

@ -211,6 +211,40 @@ header p {
word-wrap: break-word; word-wrap: break-word;
} }
.hostname-list, .ip-list {
display: flex;
flex-wrap: wrap;
gap: 5px;
}
.discovery-item {
background: #2a2a2a;
color: #00ff41;
padding: 2px 6px;
border-radius: 2px;
font-family: 'Courier New', monospace;
font-size: 0.8rem;
border: 1px solid #444;
}
.activity-list {
max-height: 150px;
overflow-y: auto;
}
.activity-item {
color: #a0a0a0;
font-family: 'Courier New', monospace;
font-size: 0.8rem;
padding: 2px 0;
border-bottom: 1px solid #333;
}
.activity-item:last-child {
border-bottom: none;
}
/* Responsive design adjustments */ /* Responsive design adjustments */
@media (max-width: 768px) { @media (max-width: 768px) {
.container { .container {
@ -240,6 +274,23 @@ header p {
flex: 1; flex: 1;
min-width: 120px; min-width: 120px;
} }
.stats-grid {
grid-template-columns: repeat(2, 1fr);
gap: 10px;
}
.stat-item {
padding: 6px 8px;
}
.stat-label, .stat-value {
font-size: 0.8rem;
}
.hostname-list, .ip-list {
flex-direction: column;
align-items: flex-start;
}
} }
/* Tactical loading spinner */ /* Tactical loading spinner */
@ -253,6 +304,79 @@ header p {
animation: spin 1s linear infinite; animation: spin 1s linear infinite;
} }
.live-discoveries {
background: rgba(0, 20, 0, 0.6);
border: 1px solid #00ff41;
border-radius: 4px;
padding: 20px;
margin-top: 20px;
}
.live-discoveries h3 {
color: #00ff41;
margin-bottom: 15px;
text-transform: uppercase;
letter-spacing: 1px;
}
.stats-grid {
display: grid;
grid-template-columns: repeat(auto-fit, minmax(150px, 1fr));
gap: 15px;
margin-bottom: 20px;
}
.stat-item {
display: flex;
justify-content: space-between;
align-items: center;
padding: 8px 12px;
background: rgba(0, 0, 0, 0.5);
border: 1px solid #333;
border-radius: 2px;
}
.stat-label {
color: #a0a0a0;
font-size: 0.9rem;
}
.stat-value {
color: #00ff41;
font-weight: bold;
font-family: 'Courier New', monospace;
transition: background-color 0.3s ease;
}
.discoveries-list {
margin-top: 20px;
}
.discoveries-list h4 {
color: #ff9900;
margin-bottom: 15px;
border-bottom: 1px solid #444;
padding-bottom: 5px;
}
.discovery-section {
margin-bottom: 15px;
padding: 10px;
background: rgba(0, 0, 0, 0.3);
border: 1px solid #333;
border-radius: 2px;
}
.discovery-section strong {
color: #c7c7c7;
display: block;
margin-bottom: 8px;
font-size: 0.9rem;
}
@keyframes spin { @keyframes spin {
to { transform: rotate(360deg); } to { transform: rotate(360deg); }
} }