640 lines
26 KiB
Python
640 lines
26 KiB
Python
# dnsrecon/providers/crtsh_provider.py
|
|
|
|
import json
|
|
import re
|
|
from pathlib import Path
|
|
from typing import List, Dict, Any, Set
|
|
from urllib.parse import quote
|
|
from datetime import datetime, timezone
|
|
import requests
|
|
|
|
from .base_provider import BaseProvider
|
|
from core.provider_result import ProviderResult
|
|
from utils.helpers import _is_valid_domain
|
|
|
|
|
|
class CrtShProvider(BaseProvider):
|
|
"""
|
|
Provider for querying crt.sh certificate transparency database.
|
|
Now returns standardized ProviderResult objects with caching support.
|
|
"""
|
|
|
|
def __init__(self, name=None, session_config=None):
|
|
"""Initialize CrtSh provider with session-specific configuration."""
|
|
super().__init__(
|
|
name="crtsh",
|
|
rate_limit=60,
|
|
timeout=15,
|
|
session_config=session_config
|
|
)
|
|
self.base_url = "https://crt.sh/"
|
|
self._stop_event = None
|
|
|
|
# Initialize cache directory
|
|
self.cache_dir = Path('cache') / 'crtsh'
|
|
self.cache_dir.mkdir(parents=True, exist_ok=True)
|
|
|
|
# Compile regex for date filtering for efficiency
|
|
self.date_pattern = re.compile(r'^\d{4}-\d{2}-\d{2}[ T]\d{2}:\d{2}:\d{2}')
|
|
|
|
def get_name(self) -> str:
|
|
"""Return the provider name."""
|
|
return "crtsh"
|
|
|
|
def get_display_name(self) -> str:
|
|
"""Return the provider display name for the UI."""
|
|
return "crt.sh"
|
|
|
|
def requires_api_key(self) -> bool:
|
|
"""Return True if the provider requires an API key."""
|
|
return False
|
|
|
|
def get_eligibility(self) -> Dict[str, bool]:
|
|
"""Return a dictionary indicating if the provider can query domains and/or IPs."""
|
|
return {'domains': True, 'ips': False}
|
|
|
|
def is_available(self) -> bool:
|
|
"""Check if the provider is configured to be used."""
|
|
return True
|
|
|
|
def _get_cache_file_path(self, domain: str) -> Path:
|
|
"""Generate cache file path for a domain."""
|
|
safe_domain = domain.replace('.', '_').replace('/', '_').replace('\\', '_')
|
|
return self.cache_dir / f"{safe_domain}.json"
|
|
|
|
def _get_cache_status(self, cache_file_path: Path) -> str:
|
|
"""
|
|
Check cache status for a domain.
|
|
Returns: 'not_found', 'fresh', or 'stale'
|
|
"""
|
|
if not cache_file_path.exists():
|
|
return "not_found"
|
|
|
|
try:
|
|
with open(cache_file_path, 'r') as f:
|
|
cache_data = json.load(f)
|
|
|
|
last_query_str = cache_data.get("last_upstream_query")
|
|
if not last_query_str:
|
|
return "stale"
|
|
|
|
last_query = datetime.fromisoformat(last_query_str.replace('Z', '+00:00'))
|
|
hours_since_query = (datetime.now(timezone.utc) - last_query).total_seconds() / 3600
|
|
|
|
cache_timeout = self.config.cache_timeout_hours
|
|
if hours_since_query < cache_timeout:
|
|
return "fresh"
|
|
else:
|
|
return "stale"
|
|
|
|
except (json.JSONDecodeError, ValueError, KeyError) as e:
|
|
self.logger.logger.warning(f"Invalid cache file format for {cache_file_path}: {e}")
|
|
return "stale"
|
|
|
|
def query_domain(self, domain: str) -> ProviderResult:
|
|
"""
|
|
Query crt.sh for certificates containing the domain with caching support.
|
|
|
|
Args:
|
|
domain: Domain to investigate
|
|
|
|
Returns:
|
|
ProviderResult containing discovered relationships and attributes
|
|
"""
|
|
if not _is_valid_domain(domain):
|
|
return ProviderResult()
|
|
|
|
if self._stop_event and self._stop_event.is_set():
|
|
return ProviderResult()
|
|
|
|
cache_file = self._get_cache_file_path(domain)
|
|
cache_status = self._get_cache_status(cache_file)
|
|
|
|
processed_certificates = []
|
|
result = ProviderResult()
|
|
|
|
try:
|
|
if cache_status == "fresh":
|
|
result = self._load_from_cache(cache_file)
|
|
self.logger.logger.info(f"Using cached crt.sh data for {domain}")
|
|
|
|
else: # "stale" or "not_found"
|
|
raw_certificates = self._query_crtsh_api(domain)
|
|
|
|
if self._stop_event and self._stop_event.is_set():
|
|
return ProviderResult()
|
|
|
|
# Process raw data into the application's expected format
|
|
current_processed_certs = [self._extract_certificate_metadata(cert) for cert in raw_certificates]
|
|
|
|
if cache_status == "stale":
|
|
# Load existing and append new processed certs
|
|
existing_result = self._load_from_cache(cache_file)
|
|
result = self._merge_results(existing_result, current_processed_certs, domain)
|
|
self.logger.logger.info(f"Refreshed and merged cache for {domain}")
|
|
else: # "not_found"
|
|
# Create new result from processed certs
|
|
result = self._process_certificates_to_result(domain, raw_certificates)
|
|
self.logger.logger.info(f"Created fresh result for {domain} ({result.get_relationship_count()} relationships)")
|
|
|
|
# Save the result to cache
|
|
self._save_result_to_cache(cache_file, result, domain)
|
|
|
|
except requests.exceptions.RequestException as e:
|
|
self.logger.logger.error(f"API query failed for {domain}: {e}")
|
|
if cache_status != "not_found":
|
|
result = self._load_from_cache(cache_file)
|
|
self.logger.logger.warning(f"Using stale cache for {domain} due to API failure.")
|
|
else:
|
|
raise e # Re-raise if there's no cache to fall back on
|
|
|
|
return result
|
|
|
|
def query_ip(self, ip: str) -> ProviderResult:
|
|
"""
|
|
Query crt.sh for certificates containing the IP address.
|
|
Note: crt.sh doesn't typically index by IP, so this returns empty results.
|
|
|
|
Args:
|
|
ip: IP address to investigate
|
|
|
|
Returns:
|
|
Empty ProviderResult (crt.sh doesn't support IP-based certificate queries effectively)
|
|
"""
|
|
return ProviderResult()
|
|
|
|
def _load_from_cache(self, cache_file_path: Path) -> ProviderResult:
|
|
"""Load processed crt.sh data from a cache file."""
|
|
try:
|
|
with open(cache_file_path, 'r') as f:
|
|
cache_content = json.load(f)
|
|
|
|
result = ProviderResult()
|
|
|
|
# Reconstruct relationships
|
|
for rel_data in cache_content.get("relationships", []):
|
|
result.add_relationship(
|
|
source_node=rel_data["source_node"],
|
|
target_node=rel_data["target_node"],
|
|
relationship_type=rel_data["relationship_type"],
|
|
provider=rel_data["provider"],
|
|
confidence=rel_data["confidence"],
|
|
raw_data=rel_data.get("raw_data", {})
|
|
)
|
|
|
|
# Reconstruct attributes
|
|
for attr_data in cache_content.get("attributes", []):
|
|
result.add_attribute(
|
|
target_node=attr_data["target_node"],
|
|
name=attr_data["name"],
|
|
value=attr_data["value"],
|
|
attr_type=attr_data["type"],
|
|
provider=attr_data["provider"],
|
|
confidence=attr_data["confidence"],
|
|
metadata=attr_data.get("metadata", {})
|
|
)
|
|
|
|
return result
|
|
|
|
except (json.JSONDecodeError, FileNotFoundError, KeyError) as e:
|
|
self.logger.logger.error(f"Failed to load cached certificates from {cache_file_path}: {e}")
|
|
return ProviderResult()
|
|
|
|
def _save_result_to_cache(self, cache_file_path: Path, result: ProviderResult, domain: str) -> None:
|
|
"""Save processed crt.sh result to a cache file."""
|
|
try:
|
|
cache_data = {
|
|
"domain": domain,
|
|
"last_upstream_query": datetime.now(timezone.utc).isoformat(),
|
|
"relationships": [
|
|
{
|
|
"source_node": rel.source_node,
|
|
"target_node": rel.target_node,
|
|
"relationship_type": rel.relationship_type,
|
|
"confidence": rel.confidence,
|
|
"provider": rel.provider,
|
|
"raw_data": rel.raw_data
|
|
} for rel in result.relationships
|
|
],
|
|
"attributes": [
|
|
{
|
|
"target_node": attr.target_node,
|
|
"name": attr.name,
|
|
"value": attr.value,
|
|
"type": attr.type,
|
|
"provider": attr.provider,
|
|
"confidence": attr.confidence,
|
|
"metadata": attr.metadata
|
|
} for attr in result.attributes
|
|
]
|
|
}
|
|
cache_file_path.parent.mkdir(parents=True, exist_ok=True)
|
|
with open(cache_file_path, 'w') as f:
|
|
json.dump(cache_data, f, separators=(',', ':'), default=str)
|
|
except Exception as e:
|
|
self.logger.logger.warning(f"Failed to save cache file for {domain}: {e}")
|
|
|
|
def _merge_results(self, existing_result: ProviderResult, new_certificates: List[Dict[str, Any]], domain: str) -> ProviderResult:
|
|
"""Merge new certificate data with existing cached result."""
|
|
# Create a fresh result from the new certificates
|
|
new_result = self._process_certificates_to_result(domain, new_certificates)
|
|
|
|
# Simple merge strategy: combine all relationships and attributes
|
|
# In practice, you might want more sophisticated deduplication
|
|
merged_result = ProviderResult()
|
|
|
|
# Add existing relationships and attributes
|
|
merged_result.relationships.extend(existing_result.relationships)
|
|
merged_result.attributes.extend(existing_result.attributes)
|
|
|
|
# Add new relationships and attributes
|
|
merged_result.relationships.extend(new_result.relationships)
|
|
merged_result.attributes.extend(new_result.attributes)
|
|
|
|
return merged_result
|
|
|
|
def _query_crtsh_api(self, domain: str) -> List[Dict[str, Any]]:
|
|
"""Query crt.sh API for raw certificate data."""
|
|
url = f"{self.base_url}?q={quote(domain)}&output=json"
|
|
response = self.make_request(url, target_indicator=domain)
|
|
|
|
if not response or response.status_code != 200:
|
|
raise requests.exceptions.RequestException(f"crt.sh API returned status {response.status_code if response else 'None'}")
|
|
|
|
certificates = response.json()
|
|
if not certificates:
|
|
return []
|
|
|
|
return certificates
|
|
|
|
def _process_certificates_to_result(self, domain: str, certificates: List[Dict[str, Any]]) -> ProviderResult:
|
|
"""
|
|
Process certificates to create ProviderResult with relationships and attributes.
|
|
"""
|
|
result = ProviderResult()
|
|
|
|
if self._stop_event and self._stop_event.is_set():
|
|
print(f"CrtSh processing cancelled before processing for domain: {domain}")
|
|
return result
|
|
|
|
all_discovered_domains = set()
|
|
|
|
for i, cert_data in enumerate(certificates):
|
|
if i % 5 == 0 and self._stop_event and self._stop_event.is_set():
|
|
print(f"CrtSh processing cancelled at certificate {i} for domain: {domain}")
|
|
break
|
|
|
|
cert_domains = self._extract_domains_from_certificate(cert_data)
|
|
all_discovered_domains.update(cert_domains)
|
|
|
|
for cert_domain in cert_domains:
|
|
if not _is_valid_domain(cert_domain):
|
|
continue
|
|
|
|
for key, value in self._extract_certificate_metadata(cert_data).items():
|
|
if value is not None:
|
|
result.add_attribute(
|
|
target_node=cert_domain,
|
|
name=f"cert_{key}",
|
|
value=value,
|
|
attr_type='certificate_data',
|
|
provider=self.name,
|
|
confidence=0.9
|
|
)
|
|
|
|
if self._stop_event and self._stop_event.is_set():
|
|
print(f"CrtSh query cancelled before relationship creation for domain: {domain}")
|
|
return result
|
|
|
|
for i, discovered_domain in enumerate(all_discovered_domains):
|
|
if discovered_domain == domain:
|
|
continue
|
|
|
|
if i % 10 == 0 and self._stop_event and self._stop_event.is_set():
|
|
print(f"CrtSh relationship creation cancelled for domain: {domain}")
|
|
break
|
|
|
|
if not _is_valid_domain(discovered_domain):
|
|
continue
|
|
|
|
confidence = self._calculate_domain_relationship_confidence(
|
|
domain, discovered_domain, [], all_discovered_domains
|
|
)
|
|
|
|
result.add_relationship(
|
|
source_node=domain,
|
|
target_node=discovered_domain,
|
|
relationship_type='san_certificate',
|
|
provider=self.name,
|
|
confidence=confidence,
|
|
raw_data={'relationship_type': 'certificate_discovery'}
|
|
)
|
|
|
|
self.log_relationship_discovery(
|
|
source_node=domain,
|
|
target_node=discovered_domain,
|
|
relationship_type='san_certificate',
|
|
confidence_score=confidence,
|
|
raw_data={'relationship_type': 'certificate_discovery'},
|
|
discovery_method="certificate_transparency_analysis"
|
|
)
|
|
|
|
return result
|
|
|
|
def _extract_certificate_metadata(self, cert_data: Dict[str, Any]) -> Dict[str, Any]:
|
|
"""Extract comprehensive metadata from certificate data."""
|
|
raw_issuer_name = cert_data.get('issuer_name', '')
|
|
parsed_issuer_name = self._parse_issuer_organization(raw_issuer_name)
|
|
|
|
metadata = {
|
|
'certificate_id': cert_data.get('id'),
|
|
'serial_number': cert_data.get('serial_number'),
|
|
'issuer_name': parsed_issuer_name,
|
|
'issuer_ca_id': cert_data.get('issuer_ca_id'),
|
|
'common_name': cert_data.get('common_name'),
|
|
'not_before': cert_data.get('not_before'),
|
|
'not_after': cert_data.get('not_after'),
|
|
'entry_timestamp': cert_data.get('entry_timestamp'),
|
|
'source': 'crt.sh'
|
|
}
|
|
|
|
try:
|
|
if metadata['not_before'] and metadata['not_after']:
|
|
not_before = self._parse_certificate_date(metadata['not_before'])
|
|
not_after = self._parse_certificate_date(metadata['not_after'])
|
|
|
|
metadata['validity_period_days'] = (not_after - not_before).days
|
|
metadata['is_currently_valid'] = self._is_cert_valid(cert_data)
|
|
metadata['expires_soon'] = (not_after - datetime.now(timezone.utc)).days <= 30
|
|
|
|
metadata['not_before'] = not_before.strftime('%Y-%m-%d %H:%M:%S UTC')
|
|
metadata['not_after'] = not_after.strftime('%Y-%m-%d %H:%M:%S UTC')
|
|
|
|
except Exception as e:
|
|
self.logger.logger.debug(f"Error computing certificate metadata: {e}")
|
|
metadata['is_currently_valid'] = False
|
|
metadata['expires_soon'] = False
|
|
|
|
return metadata
|
|
|
|
def _parse_issuer_organization(self, issuer_dn: str) -> str:
|
|
"""Parse the issuer Distinguished Name to extract just the organization name."""
|
|
if not issuer_dn:
|
|
return issuer_dn
|
|
|
|
try:
|
|
components = [comp.strip() for comp in issuer_dn.split(',')]
|
|
|
|
for component in components:
|
|
if component.startswith('O='):
|
|
org_name = component[2:].strip()
|
|
if org_name.startswith('"') and org_name.endswith('"'):
|
|
org_name = org_name[1:-1]
|
|
return org_name
|
|
|
|
return issuer_dn
|
|
|
|
except Exception as e:
|
|
self.logger.logger.debug(f"Failed to parse issuer DN '{issuer_dn}': {e}")
|
|
return issuer_dn
|
|
|
|
def _parse_certificate_date(self, date_string: str) -> datetime:
|
|
"""Parse certificate date from crt.sh format."""
|
|
if not date_string:
|
|
raise ValueError("Empty date string")
|
|
|
|
try:
|
|
if date_string.endswith('Z'):
|
|
return datetime.fromisoformat(date_string[:-1]).replace(tzinfo=timezone.utc)
|
|
elif '+' in date_string or date_string.endswith('UTC'):
|
|
date_string = date_string.replace('UTC', '').strip()
|
|
if '+' in date_string:
|
|
date_string = date_string.split('+')[0]
|
|
return datetime.fromisoformat(date_string).replace(tzinfo=timezone.utc)
|
|
else:
|
|
return datetime.fromisoformat(date_string).replace(tzinfo=timezone.utc)
|
|
except Exception as e:
|
|
try:
|
|
return datetime.strptime(date_string[:19], "%Y-%m-%dT%H:%M:%S").replace(tzinfo=timezone.utc)
|
|
except Exception:
|
|
raise ValueError(f"Unable to parse date: {date_string}") from e
|
|
|
|
def _is_cert_valid(self, cert_data: Dict[str, Any]) -> bool:
|
|
"""Check if a certificate is currently valid based on its expiry date."""
|
|
try:
|
|
not_after_str = cert_data.get('not_after')
|
|
if not not_after_str:
|
|
return False
|
|
|
|
not_after_date = self._parse_certificate_date(not_after_str)
|
|
not_before_str = cert_data.get('not_before')
|
|
|
|
now = datetime.now(timezone.utc)
|
|
is_not_expired = not_after_date > now
|
|
|
|
if not_before_str:
|
|
not_before_date = self._parse_certificate_date(not_before_str)
|
|
is_not_before_valid = not_before_date <= now
|
|
return is_not_expired and is_not_before_valid
|
|
|
|
return is_not_expired
|
|
|
|
except Exception as e:
|
|
self.logger.logger.debug(f"Certificate validity check failed: {e}")
|
|
return False
|
|
|
|
def _extract_domains_from_certificate(self, cert_data: Dict[str, Any]) -> Set[str]:
|
|
"""Extract all domains from certificate data."""
|
|
domains = set()
|
|
|
|
# Extract from common name
|
|
common_name = cert_data.get('common_name', '')
|
|
if common_name:
|
|
cleaned_cn = self._clean_domain_name(common_name)
|
|
if cleaned_cn:
|
|
domains.update(cleaned_cn)
|
|
|
|
# Extract from name_value field (contains SANs)
|
|
name_value = cert_data.get('name_value', '')
|
|
if name_value:
|
|
for line in name_value.split('\n'):
|
|
cleaned_domains = self._clean_domain_name(line.strip())
|
|
if cleaned_domains:
|
|
domains.update(cleaned_domains)
|
|
|
|
return domains
|
|
|
|
def _clean_domain_name(self, domain_name: str) -> List[str]:
|
|
"""Clean and normalize domain name from certificate data."""
|
|
if not domain_name:
|
|
return []
|
|
|
|
domain = domain_name.strip().lower()
|
|
|
|
if domain.startswith(('http://', 'https://')):
|
|
domain = domain.split('://', 1)[1]
|
|
|
|
if '/' in domain:
|
|
domain = domain.split('/', 1)[0]
|
|
|
|
if ':' in domain and not domain.count(':') > 1:
|
|
domain = domain.split(':', 1)[0]
|
|
|
|
cleaned_domains = []
|
|
if domain.startswith('*.'):
|
|
cleaned_domains.append(domain)
|
|
cleaned_domains.append(domain[2:])
|
|
else:
|
|
cleaned_domains.append(domain)
|
|
|
|
final_domains = []
|
|
for d in cleaned_domains:
|
|
d = re.sub(r'[^\w\-\.]', '', d)
|
|
if d and not d.startswith(('.', '-')) and not d.endswith(('.', '-')):
|
|
final_domains.append(d)
|
|
|
|
return [d for d in final_domains if _is_valid_domain(d)]
|
|
|
|
def _find_shared_certificates(self, certs1: List[Dict[str, Any]], certs2: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
|
|
"""Find certificates that are shared between two domain certificate lists."""
|
|
shared = []
|
|
|
|
cert1_ids = set()
|
|
for cert in certs1:
|
|
cert_id = cert.get('certificate_id')
|
|
if cert_id and isinstance(cert_id, (int, str, float, bool, tuple)):
|
|
cert1_ids.add(cert_id)
|
|
|
|
for cert in certs2:
|
|
cert_id = cert.get('certificate_id')
|
|
if cert_id and isinstance(cert_id, (int, str, float, bool, tuple)):
|
|
if cert_id in cert1_ids:
|
|
shared.append(cert)
|
|
|
|
return shared
|
|
|
|
def _summarize_certificates(self, certificates: List[Dict[str, Any]]) -> Dict[str, Any]:
|
|
"""Create a summary of certificates for a domain."""
|
|
if not certificates:
|
|
return {
|
|
'total_certificates': 0,
|
|
'valid_certificates': 0,
|
|
'expired_certificates': 0,
|
|
'expires_soon_count': 0,
|
|
'unique_issuers': [],
|
|
'latest_certificate': None,
|
|
'has_valid_cert': False,
|
|
'certificate_details': []
|
|
}
|
|
|
|
valid_count = sum(1 for cert in certificates if cert.get('is_currently_valid'))
|
|
expired_count = len(certificates) - valid_count
|
|
expires_soon_count = sum(1 for cert in certificates if cert.get('expires_soon'))
|
|
|
|
unique_issuers = list(set(cert.get('issuer_name') for cert in certificates if cert.get('issuer_name')))
|
|
|
|
# Find the most recent certificate
|
|
latest_cert = None
|
|
latest_date = None
|
|
|
|
for cert in certificates:
|
|
try:
|
|
if cert.get('not_before'):
|
|
cert_date = self._parse_certificate_date(cert['not_before'])
|
|
if latest_date is None or cert_date > latest_date:
|
|
latest_date = cert_date
|
|
latest_cert = cert
|
|
except Exception:
|
|
continue
|
|
|
|
# Sort certificates by date for better display (newest first)
|
|
sorted_certificates = sorted(
|
|
certificates,
|
|
key=lambda c: self._get_certificate_sort_date(c),
|
|
reverse=True
|
|
)
|
|
|
|
return {
|
|
'total_certificates': len(certificates),
|
|
'valid_certificates': valid_count,
|
|
'expired_certificates': expired_count,
|
|
'expires_soon_count': expires_soon_count,
|
|
'unique_issuers': unique_issuers,
|
|
'latest_certificate': latest_cert,
|
|
'has_valid_cert': valid_count > 0,
|
|
'certificate_details': sorted_certificates
|
|
}
|
|
|
|
def _get_certificate_sort_date(self, cert: Dict[str, Any]) -> datetime:
|
|
"""Get a sortable date from certificate data for chronological ordering."""
|
|
try:
|
|
if cert.get('not_before'):
|
|
return self._parse_certificate_date(cert['not_before'])
|
|
|
|
if cert.get('entry_timestamp'):
|
|
return self._parse_certificate_date(cert['entry_timestamp'])
|
|
|
|
return datetime(1970, 1, 1, tzinfo=timezone.utc)
|
|
|
|
except Exception:
|
|
return datetime(1970, 1, 1, tzinfo=timezone.utc)
|
|
|
|
def _calculate_domain_relationship_confidence(self, domain1: str, domain2: str,
|
|
shared_certificates: List[Dict[str, Any]],
|
|
all_discovered_domains: Set[str]) -> float:
|
|
"""Calculate confidence score for domain relationship based on various factors."""
|
|
base_confidence = 0.9
|
|
|
|
# Adjust confidence based on domain relationship context
|
|
relationship_context = self._determine_relationship_context(domain2, domain1)
|
|
|
|
if relationship_context == 'exact_match':
|
|
context_bonus = 0.0
|
|
elif relationship_context == 'subdomain':
|
|
context_bonus = 0.1
|
|
elif relationship_context == 'parent_domain':
|
|
context_bonus = 0.05
|
|
else:
|
|
context_bonus = 0.0
|
|
|
|
# Adjust confidence based on shared certificates
|
|
if shared_certificates:
|
|
shared_count = len(shared_certificates)
|
|
if shared_count >= 3:
|
|
shared_bonus = 0.1
|
|
elif shared_count >= 2:
|
|
shared_bonus = 0.05
|
|
else:
|
|
shared_bonus = 0.02
|
|
|
|
valid_shared = sum(1 for cert in shared_certificates if cert.get('is_currently_valid'))
|
|
if valid_shared > 0:
|
|
validity_bonus = 0.05
|
|
else:
|
|
validity_bonus = 0.0
|
|
else:
|
|
shared_bonus = 0.0
|
|
validity_bonus = 0.0
|
|
|
|
# Adjust confidence based on certificate issuer reputation
|
|
issuer_bonus = 0.0
|
|
if shared_certificates:
|
|
for cert in shared_certificates:
|
|
issuer = cert.get('issuer_name', '').lower()
|
|
if any(trusted_ca in issuer for trusted_ca in ['let\'s encrypt', 'digicert', 'sectigo', 'globalsign']):
|
|
issuer_bonus = max(issuer_bonus, 0.03)
|
|
break
|
|
|
|
final_confidence = base_confidence + context_bonus + shared_bonus + validity_bonus + issuer_bonus
|
|
return max(0.1, min(1.0, final_confidence))
|
|
|
|
def _determine_relationship_context(self, cert_domain: str, query_domain: str) -> str:
|
|
"""Determine the context of the relationship between certificate domain and query domain."""
|
|
if cert_domain == query_domain:
|
|
return 'exact_match'
|
|
elif cert_domain.endswith(f'.{query_domain}'):
|
|
return 'subdomain'
|
|
elif query_domain.endswith(f'.{cert_domain}'):
|
|
return 'parent_domain'
|
|
else:
|
|
return 'related_domain' |