746 lines
33 KiB
Python
746 lines
33 KiB
Python
# DNScope/providers/crtsh_provider.py
|
|
|
|
import json
|
|
import re
|
|
from pathlib import Path
|
|
from typing import List, Dict, Any, Set, Optional
|
|
from urllib.parse import quote
|
|
from datetime import datetime, timezone
|
|
import requests
|
|
|
|
from .base_provider import BaseProvider
|
|
from core.provider_result import ProviderResult
|
|
from utils.helpers import _is_valid_domain
|
|
from core.logger import get_forensic_logger
|
|
|
|
class CrtShProvider(BaseProvider):
|
|
"""
|
|
Provider for querying crt.sh certificate transparency database.
|
|
FIXED: Improved caching logic and error handling to prevent infinite retry loops.
|
|
Returns standardized ProviderResult objects with caching support.
|
|
UPDATED: Enhanced with certificate timestamps for time-based edge coloring.
|
|
"""
|
|
|
|
def __init__(self, name=None, session_config=None):
|
|
"""Initialize CrtSh provider with session-specific configuration."""
|
|
super().__init__(
|
|
name="crtsh",
|
|
rate_limit=60,
|
|
timeout=15,
|
|
session_config=session_config
|
|
)
|
|
self.base_url = "https://crt.sh/"
|
|
self._stop_event = None
|
|
|
|
# Initialize cache directory
|
|
self.domain_cache_dir = Path('cache') / 'crtsh'
|
|
self.domain_cache_dir.mkdir(parents=True, exist_ok=True)
|
|
|
|
# Compile regex for date filtering for efficiency
|
|
self.date_pattern = re.compile(r'^\d{4}-\d{2}-\d{2}[ T]\d{2}:\d{2}:\d{2}')
|
|
|
|
def get_name(self) -> str:
|
|
"""Return the provider name."""
|
|
return "crtsh"
|
|
|
|
def get_display_name(self) -> str:
|
|
"""Return the provider display name for the UI."""
|
|
return "crt.sh"
|
|
|
|
def requires_api_key(self) -> bool:
|
|
"""Return True if the provider requires an API key."""
|
|
return False
|
|
|
|
def get_eligibility(self) -> Dict[str, bool]:
|
|
"""Return a dictionary indicating if the provider can query domains and/or IPs."""
|
|
return {'domains': True, 'ips': False}
|
|
|
|
def is_available(self) -> bool:
|
|
"""Check if the provider is configured to be used."""
|
|
return True
|
|
|
|
def _get_cache_file_path(self, domain: str) -> Path:
|
|
"""Generate cache file path for a domain."""
|
|
safe_domain = domain.replace('.', '_').replace('/', '_').replace('\\', '_')
|
|
return self.domain_cache_dir / f"{safe_domain}.json"
|
|
|
|
def _get_cache_status(self, cache_file_path: Path) -> str:
|
|
"""
|
|
FIXED: More robust cache status checking with better error handling.
|
|
Returns: 'not_found', 'fresh', or 'stale'
|
|
"""
|
|
if not cache_file_path.exists():
|
|
return "not_found"
|
|
|
|
try:
|
|
# Check if file is readable and not corrupted
|
|
if cache_file_path.stat().st_size == 0:
|
|
self.logger.logger.warning(f"Empty cache file: {cache_file_path}")
|
|
return "stale"
|
|
|
|
with open(cache_file_path, 'r', encoding='utf-8') as f:
|
|
cache_data = json.load(f)
|
|
|
|
# Validate cache structure
|
|
if not isinstance(cache_data, dict):
|
|
self.logger.logger.warning(f"Invalid cache structure: {cache_file_path}")
|
|
return "stale"
|
|
|
|
last_query_str = cache_data.get("last_upstream_query")
|
|
if not last_query_str or not isinstance(last_query_str, str):
|
|
self.logger.logger.warning(f"Missing or invalid last_upstream_query: {cache_file_path}")
|
|
return "stale"
|
|
|
|
try:
|
|
# More robust datetime parsing
|
|
if last_query_str.endswith('Z'):
|
|
last_query = datetime.fromisoformat(last_query_str.replace('Z', '+00:00'))
|
|
elif '+' in last_query_str or last_query_str.endswith('UTC'):
|
|
# Handle various timezone formats
|
|
clean_time = last_query_str.replace('UTC', '').strip()
|
|
if '+' in clean_time:
|
|
clean_time = clean_time.split('+')[0]
|
|
last_query = datetime.fromisoformat(clean_time).replace(tzinfo=timezone.utc)
|
|
else:
|
|
last_query = datetime.fromisoformat(last_query_str).replace(tzinfo=timezone.utc)
|
|
|
|
except (ValueError, AttributeError) as e:
|
|
self.logger.logger.warning(f"Failed to parse timestamp in cache {cache_file_path}: {e}")
|
|
return "stale"
|
|
|
|
hours_since_query = (datetime.now(timezone.utc) - last_query).total_seconds() / 3600
|
|
cache_timeout = self.config.cache_timeout_hours
|
|
|
|
if hours_since_query < cache_timeout:
|
|
return "fresh"
|
|
else:
|
|
return "stale"
|
|
|
|
except (json.JSONDecodeError, OSError, PermissionError) as e:
|
|
self.logger.logger.warning(f"Cache file error for {cache_file_path}: {e}")
|
|
# FIXED: Try to remove corrupted cache file
|
|
try:
|
|
cache_file_path.unlink()
|
|
self.logger.logger.info(f"Removed corrupted cache file: {cache_file_path}")
|
|
except Exception:
|
|
pass
|
|
return "not_found"
|
|
except Exception as e:
|
|
self.logger.logger.error(f"Unexpected error checking cache status for {cache_file_path}: {e}")
|
|
return "stale"
|
|
|
|
def query_domain(self, domain: str) -> ProviderResult:
|
|
"""
|
|
FIXED: Simplified and more robust domain querying with better error handling.
|
|
UPDATED: Enhanced with certificate timestamps for time-based edge coloring.
|
|
"""
|
|
if not _is_valid_domain(domain):
|
|
return ProviderResult()
|
|
|
|
if self._stop_event and self._stop_event.is_set():
|
|
return ProviderResult()
|
|
|
|
cache_file = self._get_cache_file_path(domain)
|
|
result = ProviderResult()
|
|
|
|
try:
|
|
cache_status = self._get_cache_status(cache_file)
|
|
|
|
if cache_status == "fresh":
|
|
# Load from cache
|
|
result = self._load_from_cache(cache_file)
|
|
if result and (result.relationships or result.attributes):
|
|
self.logger.logger.debug(f"Using fresh cached crt.sh data for {domain}")
|
|
return result
|
|
else:
|
|
# Cache exists but is empty, treat as stale
|
|
cache_status = "stale"
|
|
|
|
# Need to query API (either no cache, stale cache, or empty cache)
|
|
self.logger.logger.debug(f"Querying crt.sh API for {domain} (cache status: {cache_status})")
|
|
new_raw_certs = self._query_crtsh_api(domain)
|
|
|
|
if self._stop_event and self._stop_event.is_set():
|
|
return ProviderResult()
|
|
|
|
# FIXED: Simplified processing - just process the new data
|
|
# Don't try to merge with stale cache as it can cause corruption
|
|
raw_certificates_to_process = new_raw_certs
|
|
|
|
if cache_status == "stale":
|
|
self.logger.logger.info(f"Refreshed stale cache for {domain} with {len(raw_certificates_to_process)} certs")
|
|
else:
|
|
self.logger.logger.info(f"Created fresh cache for {domain} with {len(raw_certificates_to_process)} certs")
|
|
|
|
result = self._process_certificates_to_result_fixed(domain, raw_certificates_to_process)
|
|
|
|
# Save the result to cache
|
|
self._save_result_to_cache(cache_file, result, raw_certificates_to_process, domain)
|
|
|
|
return result
|
|
|
|
except requests.exceptions.RequestException as e:
|
|
# FIXED: Don't re-raise network errors after long idle periods
|
|
# Instead return empty result and log the issue
|
|
self.logger.logger.warning(f"Network error querying crt.sh for {domain}: {e}")
|
|
|
|
# Try to use stale cache if available
|
|
if cache_status == "stale":
|
|
try:
|
|
stale_result = self._load_from_cache(cache_file)
|
|
if stale_result and (stale_result.relationships or stale_result.attributes):
|
|
self.logger.logger.info(f"Using stale cache for {domain} due to network error")
|
|
return stale_result
|
|
except Exception as cache_error:
|
|
self.logger.logger.warning(f"Failed to load stale cache for {domain}: {cache_error}")
|
|
|
|
# Return empty result instead of raising - this prevents infinite retries
|
|
return ProviderResult()
|
|
|
|
except Exception as e:
|
|
# FIXED: Handle any other exceptions gracefully
|
|
self.logger.logger.error(f"Unexpected error querying crt.sh for {domain}: {e}")
|
|
|
|
# Try stale cache as fallback
|
|
try:
|
|
if cache_file.exists():
|
|
fallback_result = self._load_from_cache(cache_file)
|
|
if fallback_result and (fallback_result.relationships or fallback_result.attributes):
|
|
self.logger.logger.info(f"Using cached data for {domain} due to processing error")
|
|
return fallback_result
|
|
except Exception:
|
|
pass
|
|
|
|
# Return empty result to prevent retries
|
|
return ProviderResult()
|
|
|
|
def query_ip(self, ip: str) -> ProviderResult:
|
|
"""
|
|
crt.sh does not support IP-based certificate queries effectively via its API.
|
|
"""
|
|
return ProviderResult()
|
|
|
|
def _load_from_cache(self, cache_file_path: Path) -> ProviderResult:
|
|
"""FIXED: More robust cache loading with better validation."""
|
|
try:
|
|
if not cache_file_path.exists() or cache_file_path.stat().st_size == 0:
|
|
return ProviderResult()
|
|
|
|
with open(cache_file_path, 'r', encoding='utf-8') as f:
|
|
cache_content = json.load(f)
|
|
|
|
if not isinstance(cache_content, dict):
|
|
self.logger.logger.warning(f"Invalid cache format in {cache_file_path}")
|
|
return ProviderResult()
|
|
|
|
result = ProviderResult()
|
|
|
|
# Reconstruct relationships with validation
|
|
relationships = cache_content.get("relationships", [])
|
|
if isinstance(relationships, list):
|
|
for rel_data in relationships:
|
|
if not isinstance(rel_data, dict):
|
|
continue
|
|
try:
|
|
result.add_relationship(
|
|
source_node=rel_data.get("source_node", ""),
|
|
target_node=rel_data.get("target_node", ""),
|
|
relationship_type=rel_data.get("relationship_type", ""),
|
|
provider=rel_data.get("provider", self.name),
|
|
raw_data=rel_data.get("raw_data", {})
|
|
)
|
|
except (ValueError, TypeError) as e:
|
|
self.logger.logger.warning(f"Skipping invalid relationship in cache: {e}")
|
|
continue
|
|
|
|
# Reconstruct attributes with validation
|
|
attributes = cache_content.get("attributes", [])
|
|
if isinstance(attributes, list):
|
|
for attr_data in attributes:
|
|
if not isinstance(attr_data, dict):
|
|
continue
|
|
try:
|
|
result.add_attribute(
|
|
target_node=attr_data.get("target_node", ""),
|
|
name=attr_data.get("name", ""),
|
|
value=attr_data.get("value"),
|
|
attr_type=attr_data.get("type", "unknown"),
|
|
provider=attr_data.get("provider", self.name),
|
|
metadata=attr_data.get("metadata", {})
|
|
)
|
|
except (ValueError, TypeError) as e:
|
|
self.logger.logger.warning(f"Skipping invalid attribute in cache: {e}")
|
|
continue
|
|
|
|
return result
|
|
|
|
except (json.JSONDecodeError, OSError, PermissionError) as e:
|
|
self.logger.logger.warning(f"Failed to load cache from {cache_file_path}: {e}")
|
|
return ProviderResult()
|
|
except Exception as e:
|
|
self.logger.logger.error(f"Unexpected error loading cache from {cache_file_path}: {e}")
|
|
return ProviderResult()
|
|
|
|
def _save_result_to_cache(self, cache_file_path: Path, result: ProviderResult, raw_certificates: List[Dict[str, Any]], domain: str) -> None:
|
|
"""FIXED: More robust cache saving with atomic writes."""
|
|
try:
|
|
cache_data = {
|
|
"domain": domain,
|
|
"last_upstream_query": datetime.now(timezone.utc).isoformat(),
|
|
"raw_certificates": raw_certificates,
|
|
"relationships": [
|
|
{
|
|
"source_node": rel.source_node,
|
|
"target_node": rel.target_node,
|
|
"relationship_type": rel.relationship_type,
|
|
"provider": rel.provider,
|
|
"raw_data": rel.raw_data
|
|
} for rel in result.relationships
|
|
],
|
|
"attributes": [
|
|
{
|
|
"target_node": attr.target_node,
|
|
"name": attr.name,
|
|
"value": attr.value,
|
|
"type": attr.type,
|
|
"provider": attr.provider,
|
|
"metadata": attr.metadata
|
|
} for attr in result.attributes
|
|
]
|
|
}
|
|
|
|
cache_file_path.parent.mkdir(parents=True, exist_ok=True)
|
|
|
|
# FIXED: Atomic write using temporary file
|
|
temp_file = cache_file_path.with_suffix('.tmp')
|
|
try:
|
|
with open(temp_file, 'w', encoding='utf-8') as f:
|
|
json.dump(cache_data, f, separators=(',', ':'), default=str, ensure_ascii=False)
|
|
|
|
# Atomic rename
|
|
temp_file.replace(cache_file_path)
|
|
self.logger.logger.debug(f"Saved cache for {domain} ({len(result.relationships)} relationships)")
|
|
|
|
except Exception as e:
|
|
# Clean up temp file on error
|
|
if temp_file.exists():
|
|
try:
|
|
temp_file.unlink()
|
|
except Exception:
|
|
pass
|
|
raise e
|
|
|
|
except Exception as e:
|
|
self.logger.logger.warning(f"Failed to save cache file for {domain}: {e}")
|
|
|
|
def _query_crtsh_api(self, domain: str) -> List[Dict[str, Any]]:
|
|
"""FIXED: More robust API querying with better error handling."""
|
|
url = f"{self.base_url}?q={quote(domain)}&output=json"
|
|
|
|
try:
|
|
response = self.make_request(url, target_indicator=domain)
|
|
|
|
if not response:
|
|
self.logger.logger.warning(f"No response from crt.sh for {domain}")
|
|
return []
|
|
|
|
if response.status_code != 200:
|
|
self.logger.logger.warning(f"crt.sh returned status {response.status_code} for {domain}")
|
|
return []
|
|
|
|
# FIXED: Better JSON parsing with error handling
|
|
try:
|
|
certificates = response.json()
|
|
except json.JSONDecodeError as e:
|
|
self.logger.logger.error(f"crt.sh returned invalid JSON for {domain}: {e}")
|
|
return []
|
|
|
|
if not certificates or not isinstance(certificates, list):
|
|
self.logger.logger.debug(f"crt.sh returned no certificates for {domain}")
|
|
return []
|
|
|
|
self.logger.logger.debug(f"crt.sh returned {len(certificates)} certificates for {domain}")
|
|
return certificates
|
|
|
|
except Exception as e:
|
|
self.logger.logger.error(f"Error querying crt.sh API for {domain}: {e}")
|
|
raise e
|
|
|
|
def _process_certificates_to_result_fixed(self, query_domain: str, certificates: List[Dict[str, Any]]) -> ProviderResult:
|
|
"""
|
|
Process certificates to create proper domain and CA nodes.
|
|
FIXED: Better error handling and progress tracking.
|
|
UPDATED: Enhanced with certificate timestamps for time-based edge coloring.
|
|
"""
|
|
result = ProviderResult()
|
|
|
|
if self._stop_event and self._stop_event.is_set():
|
|
self.logger.logger.info(f"CrtSh processing cancelled before processing for domain: {query_domain}")
|
|
return result
|
|
|
|
if not certificates:
|
|
self.logger.logger.debug(f"No certificates to process for {query_domain}")
|
|
return result
|
|
|
|
# Check for incomplete data warning
|
|
incompleteness_warning = self._check_for_incomplete_data(query_domain, certificates)
|
|
if incompleteness_warning:
|
|
result.add_attribute(
|
|
target_node=query_domain,
|
|
name="crtsh_data_warning",
|
|
value=incompleteness_warning,
|
|
attr_type='metadata',
|
|
provider=self.name
|
|
)
|
|
|
|
all_discovered_domains = set()
|
|
processed_issuers = set()
|
|
processed_certs = 0
|
|
|
|
for i, cert_data in enumerate(certificates):
|
|
# FIXED: More frequent stop checks and progress logging
|
|
if i % 5 == 0:
|
|
if self._stop_event and self._stop_event.is_set():
|
|
self.logger.logger.info(f"CrtSh processing cancelled at certificate {i}/{len(certificates)} for domain: {query_domain}")
|
|
break
|
|
|
|
if i > 0 and i % 100 == 0:
|
|
self.logger.logger.debug(f"Processed {i}/{len(certificates)} certificates for {query_domain}")
|
|
|
|
try:
|
|
# Extract all domains from this certificate
|
|
cert_domains = self._extract_domains_from_certificate(cert_data)
|
|
if cert_domains:
|
|
all_discovered_domains.update(cert_domains)
|
|
|
|
# Create CA nodes for certificate issuers with timestamp
|
|
issuer_name = self._parse_issuer_organization(cert_data.get('issuer_name', ''))
|
|
if issuer_name and issuer_name not in processed_issuers:
|
|
# Enhanced raw_data with certificate timestamp for time-based edge coloring
|
|
issuer_raw_data = {'issuer_dn': cert_data.get('issuer_name', '')}
|
|
|
|
# Add certificate issue date (not_before) as relevance timestamp
|
|
not_before = cert_data.get('not_before')
|
|
if not_before:
|
|
try:
|
|
not_before_date = self._parse_certificate_date(not_before)
|
|
issuer_raw_data['cert_not_before'] = not_before_date.isoformat()
|
|
issuer_raw_data['relevance_timestamp'] = not_before_date.isoformat() # Standardized field
|
|
except Exception as e:
|
|
self.logger.logger.debug(f"Failed to parse not_before date for issuer: {e}")
|
|
|
|
result.add_relationship(
|
|
source_node=query_domain,
|
|
target_node=issuer_name,
|
|
relationship_type='crtsh_cert_issuer',
|
|
provider=self.name,
|
|
raw_data=issuer_raw_data
|
|
)
|
|
processed_issuers.add(issuer_name)
|
|
|
|
# Add certificate metadata to each domain in this certificate
|
|
cert_metadata = self._extract_certificate_metadata(cert_data)
|
|
for cert_domain in cert_domains:
|
|
if not _is_valid_domain(cert_domain):
|
|
continue
|
|
|
|
for key, value in cert_metadata.items():
|
|
if value is not None:
|
|
result.add_attribute(
|
|
target_node=cert_domain,
|
|
name=f"cert_{key}",
|
|
value=value,
|
|
attr_type='certificate_data',
|
|
provider=self.name,
|
|
metadata={'certificate_id': cert_data.get('id')}
|
|
)
|
|
|
|
processed_certs += 1
|
|
|
|
except Exception as e:
|
|
self.logger.logger.warning(f"Error processing certificate {i} for {query_domain}: {e}")
|
|
continue
|
|
|
|
# Check for stop event before creating final relationships
|
|
if self._stop_event and self._stop_event.is_set():
|
|
self.logger.logger.info(f"CrtSh query cancelled before relationship creation for domain: {query_domain}")
|
|
return result
|
|
|
|
# Create selective relationships to avoid large entities with enhanced timestamps
|
|
relationships_created = 0
|
|
for discovered_domain in all_discovered_domains:
|
|
if discovered_domain == query_domain:
|
|
continue
|
|
|
|
if not _is_valid_domain(discovered_domain):
|
|
continue
|
|
|
|
if self._should_create_relationship(query_domain, discovered_domain):
|
|
# Enhanced raw_data with certificate timestamp for domain relationships
|
|
domain_raw_data = {'relationship_type': 'certificate_discovery'}
|
|
|
|
# Find the most recent certificate for this domain pair to use as timestamp
|
|
most_recent_cert = self._find_most_recent_cert_for_domains(
|
|
certificates, query_domain, discovered_domain
|
|
)
|
|
if most_recent_cert:
|
|
not_before = most_recent_cert.get('not_before')
|
|
if not_before:
|
|
try:
|
|
not_before_date = self._parse_certificate_date(not_before)
|
|
domain_raw_data['cert_not_before'] = not_before_date.isoformat()
|
|
domain_raw_data['relevance_timestamp'] = not_before_date.isoformat()
|
|
except Exception as e:
|
|
self.logger.logger.debug(f"Failed to parse not_before date for domain relationship: {e}")
|
|
|
|
result.add_relationship(
|
|
source_node=query_domain,
|
|
target_node=discovered_domain,
|
|
relationship_type='crtsh_san_certificate',
|
|
provider=self.name,
|
|
raw_data=domain_raw_data
|
|
)
|
|
|
|
self.log_relationship_discovery(
|
|
source_node=query_domain,
|
|
target_node=discovered_domain,
|
|
relationship_type='crtsh_san_certificate',
|
|
raw_data=domain_raw_data,
|
|
discovery_method="certificate_transparency_analysis"
|
|
)
|
|
relationships_created += 1
|
|
|
|
self.logger.logger.info(f"CrtSh processing completed for {query_domain}: processed {processed_certs}/{len(certificates)} certificates, {len(all_discovered_domains)} domains, {relationships_created} relationships")
|
|
return result
|
|
|
|
def _find_most_recent_cert_for_domains(self, certificates: List[Dict[str, Any]],
|
|
domain1: str, domain2: str) -> Optional[Dict[str, Any]]:
|
|
"""
|
|
Find the most recent certificate that contains both domains.
|
|
Used for determining the relevance timestamp for domain relationships.
|
|
"""
|
|
most_recent_cert = None
|
|
most_recent_date = None
|
|
|
|
for cert in certificates:
|
|
# Check if this certificate contains both domains
|
|
cert_domains = self._extract_domains_from_certificate(cert)
|
|
if domain1 in cert_domains and domain2 in cert_domains:
|
|
not_before = cert.get('not_before')
|
|
if not_before:
|
|
try:
|
|
cert_date = self._parse_certificate_date(not_before)
|
|
if most_recent_date is None or cert_date > most_recent_date:
|
|
most_recent_date = cert_date
|
|
most_recent_cert = cert
|
|
except Exception:
|
|
continue
|
|
|
|
return most_recent_cert
|
|
|
|
# [Rest of the methods remain the same as in the original file]
|
|
def _should_create_relationship(self, source_domain: str, target_domain: str) -> bool:
|
|
"""
|
|
Determine if a relationship should be created between two domains.
|
|
"""
|
|
if target_domain.endswith(f'.{source_domain}') or source_domain.endswith(f'.{target_domain}'):
|
|
return True
|
|
|
|
source_parts = source_domain.split('.')
|
|
target_parts = target_domain.split('.')
|
|
|
|
if len(source_parts) >= 2 and len(target_parts) >= 2:
|
|
source_root = '.'.join(source_parts[-2:])
|
|
target_root = '.'.join(target_parts[-2:])
|
|
return source_root == target_root
|
|
|
|
return False
|
|
|
|
def _extract_certificate_metadata(self, cert_data: Dict[str, Any]) -> Dict[str, Any]:
|
|
"""Extract comprehensive metadata from certificate data."""
|
|
raw_issuer_name = cert_data.get('issuer_name', '')
|
|
parsed_issuer_name = self._parse_issuer_organization(raw_issuer_name)
|
|
|
|
metadata = {
|
|
'certificate_id': cert_data.get('id'),
|
|
'serial_number': cert_data.get('serial_number'),
|
|
'issuer_name': parsed_issuer_name,
|
|
'issuer_ca_id': cert_data.get('issuer_ca_id'),
|
|
'common_name': cert_data.get('common_name'),
|
|
'not_before': cert_data.get('not_before'),
|
|
'not_after': cert_data.get('not_after'),
|
|
'entry_timestamp': cert_data.get('entry_timestamp'),
|
|
'source': 'crtsh'
|
|
}
|
|
|
|
try:
|
|
if metadata['not_before'] and metadata['not_after']:
|
|
not_before = self._parse_certificate_date(metadata['not_before'])
|
|
not_after = self._parse_certificate_date(metadata['not_after'])
|
|
|
|
metadata['validity_period_days'] = (not_after - not_before).days
|
|
metadata['is_currently_valid'] = self._is_cert_valid(cert_data)
|
|
metadata['expires_soon'] = (not_after - datetime.now(timezone.utc)).days <= 30
|
|
|
|
metadata['not_before'] = not_before.isoformat()
|
|
metadata['not_after'] = not_after.isoformat()
|
|
|
|
except Exception as e:
|
|
self.logger.logger.debug(f"Error computing certificate metadata: {e}")
|
|
metadata['is_currently_valid'] = False
|
|
metadata['expires_soon'] = False
|
|
|
|
return metadata
|
|
|
|
def _parse_issuer_organization(self, issuer_dn: str) -> str:
|
|
"""Parse the issuer Distinguished Name to extract just the organization name."""
|
|
if not issuer_dn:
|
|
return issuer_dn
|
|
|
|
try:
|
|
components = [comp.strip() for comp in issuer_dn.split(',')]
|
|
|
|
for component in components:
|
|
if component.startswith('O='):
|
|
org_name = component[2:].strip()
|
|
if org_name.startswith('"') and org_name.endswith('"'):
|
|
org_name = org_name[1:-1]
|
|
return org_name
|
|
|
|
return issuer_dn
|
|
|
|
except Exception as e:
|
|
self.logger.logger.debug(f"Failed to parse issuer DN '{issuer_dn}': {e}")
|
|
return issuer_dn
|
|
|
|
def _parse_certificate_date(self, date_string: str) -> datetime:
|
|
"""Parse certificate date from crt.sh format."""
|
|
if not date_string:
|
|
raise ValueError("Empty date string")
|
|
|
|
try:
|
|
if isinstance(date_string, datetime):
|
|
return date_string.replace(tzinfo=timezone.utc)
|
|
if date_string.endswith('Z'):
|
|
return datetime.fromisoformat(date_string[:-1]).replace(tzinfo=timezone.utc)
|
|
elif '+' in date_string or date_string.endswith('UTC'):
|
|
date_string = date_string.replace('UTC', '').strip()
|
|
if '+' in date_string:
|
|
date_string = date_string.split('+')[0]
|
|
return datetime.fromisoformat(date_string).replace(tzinfo=timezone.utc)
|
|
else:
|
|
return datetime.fromisoformat(date_string).replace(tzinfo=timezone.utc)
|
|
except Exception as e:
|
|
try:
|
|
return datetime.strptime(date_string[:19], "%Y-%m-%dT%H:%M:%S").replace(tzinfo=timezone.utc)
|
|
except Exception:
|
|
raise ValueError(f"Unable to parse date: {date_string}") from e
|
|
|
|
def _is_cert_valid(self, cert_data: Dict[str, Any]) -> bool:
|
|
"""Check if a certificate is currently valid based on its expiry date."""
|
|
try:
|
|
not_after_str = cert_data.get('not_after')
|
|
if not not_after_str:
|
|
return False
|
|
|
|
not_after_date = self._parse_certificate_date(not_after_str)
|
|
not_before_str = cert_data.get('not_before')
|
|
|
|
now = datetime.now(timezone.utc)
|
|
is_not_expired = not_after_date > now
|
|
|
|
if not_before_str:
|
|
not_before_date = self._parse_certificate_date(not_before_str)
|
|
is_not_before_valid = not_before_date <= now
|
|
return is_not_expired and is_not_before_valid
|
|
|
|
return is_not_expired
|
|
|
|
except Exception as e:
|
|
return False
|
|
|
|
def _extract_domains_from_certificate(self, cert_data: Dict[str, Any]) -> Set[str]:
|
|
"""Extract all domains from certificate data."""
|
|
domains = set()
|
|
|
|
common_name = cert_data.get('common_name', '')
|
|
if common_name:
|
|
cleaned_cn = self._clean_domain_name(common_name)
|
|
if cleaned_cn:
|
|
domains.update(cleaned_cn)
|
|
|
|
name_value = cert_data.get('name_value', '')
|
|
if name_value:
|
|
for line in name_value.split('\n'):
|
|
cleaned_domains = self._clean_domain_name(line.strip())
|
|
if cleaned_domains:
|
|
domains.update(cleaned_domains)
|
|
|
|
return domains
|
|
|
|
def _clean_domain_name(self, domain_name: str) -> List[str]:
|
|
"""Clean and normalize domain name from certificate data."""
|
|
if not domain_name:
|
|
return []
|
|
|
|
domain = domain_name.strip().lower()
|
|
|
|
if domain.startswith(('http://', 'https://')):
|
|
domain = domain.split('://', 1)[1]
|
|
|
|
if '/' in domain:
|
|
domain = domain.split('/', 1)[0]
|
|
|
|
if ':' in domain and not domain.count(':') > 1:
|
|
domain = domain.split(':', 1)[0]
|
|
|
|
cleaned_domains = []
|
|
if domain.startswith('*.'):
|
|
cleaned_domains.append(domain)
|
|
cleaned_domains.append(domain[2:])
|
|
else:
|
|
cleaned_domains.append(domain)
|
|
|
|
final_domains = []
|
|
for d in cleaned_domains:
|
|
d = re.sub(r'[^\w\-\.]', '', d)
|
|
if d and not d.startswith(('.', '-')) and not d.endswith(('.', '-')):
|
|
final_domains.append(d)
|
|
|
|
return [d for d in final_domains if _is_valid_domain(d)]
|
|
|
|
|
|
def _determine_relationship_context(self, cert_domain: str, query_domain: str) -> str:
|
|
"""Determine the context of the relationship between certificate domain and query domain."""
|
|
if cert_domain == query_domain:
|
|
return 'exact_match'
|
|
elif cert_domain.endswith(f'.{query_domain}'):
|
|
return 'subdomain'
|
|
elif query_domain.endswith(f'.{cert_domain}'):
|
|
return 'parent_domain'
|
|
else:
|
|
return 'related_domain'
|
|
|
|
def _check_for_incomplete_data(self, domain: str, certificates: List[Dict[str, Any]]) -> Optional[str]:
|
|
"""
|
|
Analyzes the certificate list to heuristically detect if the data from crt.sh is incomplete.
|
|
"""
|
|
cert_count = len(certificates)
|
|
|
|
if cert_count >= 10000:
|
|
return f"Result likely truncated; received {cert_count} certificates, which may be the maximum limit."
|
|
|
|
if cert_count > 1000:
|
|
latest_expiry = None
|
|
for cert in certificates:
|
|
try:
|
|
not_after = self._parse_certificate_date(cert.get('not_after'))
|
|
if latest_expiry is None or not_after > latest_expiry:
|
|
latest_expiry = not_after
|
|
except (ValueError, TypeError):
|
|
continue
|
|
|
|
if latest_expiry and (datetime.now(timezone.utc) - latest_expiry).days > 365:
|
|
return f"Incomplete data suspected: The latest certificate expired more than a year ago ({latest_expiry.strftime('%Y-%m-%d')})."
|
|
|
|
return None |