dnscope/providers/shodan_provider.py
2025-09-18 21:32:26 +02:00

420 lines
19 KiB
Python

# dnsrecon/providers/shodan_provider.py
import json
from pathlib import Path
from typing import Dict, Any
from datetime import datetime, timezone
import requests
from .base_provider import BaseProvider
from core.provider_result import ProviderResult
from utils.helpers import _is_valid_ip, _is_valid_domain, get_ip_version, normalize_ip
class ShodanProvider(BaseProvider):
"""
Provider for querying Shodan API for IP address information.
Now returns standardized ProviderResult objects with caching support for IPv4 and IPv6.
"""
def __init__(self, name=None, session_config=None):
"""Initialize Shodan provider with session-specific configuration."""
super().__init__(
name="shodan",
rate_limit=60,
timeout=30,
session_config=session_config
)
self.base_url = "https://api.shodan.io"
self.api_key = self.config.get_api_key('shodan')
self._is_active = self._check_api_connection()
# Initialize cache directory
self.cache_dir = Path('cache') / 'shodan'
self.cache_dir.mkdir(parents=True, exist_ok=True)
def _check_api_connection(self) -> bool:
"""Checks if the Shodan API is reachable."""
if not self.api_key:
return False
try:
response = self.session.get(f"{self.base_url}/api-info?key={self.api_key}", timeout=5)
self.logger.logger.debug("Shodan is reacheable")
return response.status_code == 200
except requests.exceptions.RequestException:
return False
def is_available(self) -> bool:
"""Check if Shodan provider is available (has valid API key in this session)."""
return self._is_active and self.api_key is not None and len(self.api_key.strip()) > 0
def get_name(self) -> str:
"""Return the provider name."""
return "shodan"
def get_display_name(self) -> str:
"""Return the provider display name for the UI."""
return "Shodan"
def requires_api_key(self) -> bool:
"""Return True if the provider requires an API key."""
return True
def get_eligibility(self) -> Dict[str, bool]:
"""Return a dictionary indicating if the provider can query domains and/or IPs."""
return {'domains': False, 'ips': True}
def _get_cache_file_path(self, ip: str) -> Path:
"""
Generate cache file path for an IP address (IPv4 or IPv6).
IPv6 addresses contain colons which are replaced with underscores for filesystem safety.
"""
# Normalize the IP address first to ensure consistent caching
normalized_ip = normalize_ip(ip)
if not normalized_ip:
# Fallback for invalid IPs
safe_ip = ip.replace('.', '_').replace(':', '_')
else:
# Replace problematic characters for both IPv4 and IPv6
safe_ip = normalized_ip.replace('.', '_').replace(':', '_')
return self.cache_dir / f"{safe_ip}.json"
def _get_cache_status(self, cache_file_path: Path) -> str:
"""
Check cache status for an IP.
Returns: 'not_found', 'fresh', or 'stale'
"""
if not cache_file_path.exists():
return "not_found"
try:
with open(cache_file_path, 'r') as f:
cache_data = json.load(f)
last_query_str = cache_data.get("last_upstream_query")
if not last_query_str:
return "stale"
last_query = datetime.fromisoformat(last_query_str.replace('Z', '+00:00'))
hours_since_query = (datetime.now(timezone.utc) - last_query).total_seconds() / 3600
cache_timeout = self.config.cache_timeout_hours
if hours_since_query < cache_timeout:
return "fresh"
else:
return "stale"
except (json.JSONDecodeError, ValueError, KeyError):
return "stale"
def query_domain(self, domain: str) -> ProviderResult:
"""
Shodan does not support domain queries. This method returns an empty result.
"""
return ProviderResult()
def query_ip(self, ip: str) -> ProviderResult:
"""
Query Shodan for information about an IP address (IPv4 or IPv6), with caching of processed data.
Args:
ip: IP address to investigate (IPv4 or IPv6)
Returns:
ProviderResult containing discovered relationships and attributes
Raises:
Exception: For temporary failures that should be retried (timeouts, 502/503 errors, connection issues)
"""
if not _is_valid_ip(ip) or not self.is_available():
return ProviderResult()
# Normalize IP address for consistent processing
normalized_ip = normalize_ip(ip)
if not normalized_ip:
return ProviderResult()
cache_file = self._get_cache_file_path(normalized_ip)
cache_status = self._get_cache_status(cache_file)
if cache_status == "fresh":
self.logger.logger.debug(f"Using fresh cache for Shodan query: {normalized_ip}")
return self._load_from_cache(cache_file)
# Need to query API
self.logger.logger.debug(f"Querying Shodan API for: {normalized_ip}")
url = f"{self.base_url}/shodan/host/{normalized_ip}"
params = {'key': self.api_key}
try:
response = self.make_request(url, method="GET", params=params, target_indicator=normalized_ip)
if not response:
# Connection failed - use stale cache if available, otherwise retry
if cache_status == "stale":
self.logger.logger.info(f"Using stale cache for {normalized_ip} due to connection failure")
return self._load_from_cache(cache_file)
else:
raise requests.exceptions.RequestException("No response from Shodan API - should retry")
if response.status_code == 200:
self.logger.logger.debug(f"Shodan returned data for {normalized_ip}")
data = response.json()
result = self._process_shodan_data(normalized_ip, data)
self._save_to_cache(cache_file, result, data)
return result
elif response.status_code == 404:
# 404 = "no information available" - successful but empty result, don't retry
self.logger.logger.debug(f"Shodan has no information for {normalized_ip} (404)")
result = ProviderResult() # Empty but successful result
# Cache the empty result to avoid repeated queries
self._save_to_cache(cache_file, result, {'shodan_status': 'no_information', 'status_code': 404})
return result
elif response.status_code in [401, 403]:
# Authentication/authorization errors - permanent failures, don't retry
self.logger.logger.error(f"Shodan API authentication failed for {normalized_ip} (HTTP {response.status_code})")
return ProviderResult() # Empty result, don't retry
elif response.status_code in [429]:
# Rate limiting - should be handled by rate limiter, but if we get here, retry
self.logger.logger.warning(f"Shodan API rate limited for {normalized_ip} (HTTP {response.status_code})")
if cache_status == "stale":
self.logger.logger.info(f"Using stale cache for {normalized_ip} due to rate limiting")
return self._load_from_cache(cache_file)
else:
raise requests.exceptions.RequestException(f"Shodan API rate limited (HTTP {response.status_code}) - should retry")
elif response.status_code in [500, 502, 503, 504]:
# Server errors - temporary failures that should be retried
self.logger.logger.warning(f"Shodan API server error for {normalized_ip} (HTTP {response.status_code})")
if cache_status == "stale":
self.logger.logger.info(f"Using stale cache for {normalized_ip} due to server error")
return self._load_from_cache(cache_file)
else:
raise requests.exceptions.RequestException(f"Shodan API server error (HTTP {response.status_code}) - should retry")
else:
# Other HTTP error codes - treat as temporary failures
self.logger.logger.warning(f"Shodan API returned unexpected status {response.status_code} for {normalized_ip}")
if cache_status == "stale":
self.logger.logger.info(f"Using stale cache for {normalized_ip} due to unexpected API error")
return self._load_from_cache(cache_file)
else:
raise requests.exceptions.RequestException(f"Shodan API error (HTTP {response.status_code}) - should retry")
except requests.exceptions.Timeout:
# Timeout errors - should be retried
self.logger.logger.warning(f"Shodan API timeout for {normalized_ip}")
if cache_status == "stale":
self.logger.logger.info(f"Using stale cache for {normalized_ip} due to timeout")
return self._load_from_cache(cache_file)
else:
raise # Re-raise timeout for retry
except requests.exceptions.ConnectionError:
# Connection errors - should be retried
self.logger.logger.warning(f"Shodan API connection error for {normalized_ip}")
if cache_status == "stale":
self.logger.logger.info(f"Using stale cache for {normalized_ip} due to connection error")
return self._load_from_cache(cache_file)
else:
raise # Re-raise connection error for retry
except requests.exceptions.RequestException:
# Other request exceptions - should be retried
self.logger.logger.warning(f"Shodan API request exception for {normalized_ip}")
if cache_status == "stale":
self.logger.logger.info(f"Using stale cache for {normalized_ip} due to request exception")
return self._load_from_cache(cache_file)
else:
raise # Re-raise request exception for retry
except json.JSONDecodeError:
# JSON parsing error on 200 response - treat as temporary failure
self.logger.logger.error(f"Invalid JSON response from Shodan for {normalized_ip}")
if cache_status == "stale":
self.logger.logger.info(f"Using stale cache for {normalized_ip} due to JSON parsing error")
return self._load_from_cache(cache_file)
else:
raise requests.exceptions.RequestException("Invalid JSON response from Shodan - should retry")
except Exception as e:
# Unexpected exceptions - log and treat as temporary failures
self.logger.logger.error(f"Unexpected exception in Shodan query for {normalized_ip}: {e}")
if cache_status == "stale":
self.logger.logger.info(f"Using stale cache for {normalized_ip} due to unexpected exception")
return self._load_from_cache(cache_file)
else:
raise requests.exceptions.RequestException(f"Unexpected error in Shodan query: {e}") from e
def _load_from_cache(self, cache_file_path: Path) -> ProviderResult:
"""Load processed Shodan data from a cache file."""
try:
with open(cache_file_path, 'r') as f:
cache_content = json.load(f)
result = ProviderResult()
# Reconstruct relationships
for rel_data in cache_content.get("relationships", []):
result.add_relationship(
source_node=rel_data["source_node"],
target_node=rel_data["target_node"],
relationship_type=rel_data["relationship_type"],
provider=rel_data["provider"],
confidence=rel_data["confidence"],
raw_data=rel_data.get("raw_data", {})
)
# Reconstruct attributes
for attr_data in cache_content.get("attributes", []):
result.add_attribute(
target_node=attr_data["target_node"],
name=attr_data["name"],
value=attr_data["value"],
attr_type=attr_data["type"],
provider=attr_data["provider"],
confidence=attr_data["confidence"],
metadata=attr_data.get("metadata", {})
)
return result
except (json.JSONDecodeError, FileNotFoundError, KeyError):
return ProviderResult()
def _save_to_cache(self, cache_file_path: Path, result: ProviderResult, raw_data: Dict[str, Any]) -> None:
"""Save processed Shodan data to a cache file."""
try:
cache_data = {
"last_upstream_query": datetime.now(timezone.utc).isoformat(),
"raw_data": raw_data, # Preserve original for forensic purposes
"relationships": [
{
"source_node": rel.source_node,
"target_node": rel.target_node,
"relationship_type": rel.relationship_type,
"confidence": rel.confidence,
"provider": rel.provider,
"raw_data": rel.raw_data
} for rel in result.relationships
],
"attributes": [
{
"target_node": attr.target_node,
"name": attr.name,
"value": attr.value,
"type": attr.type,
"provider": attr.provider,
"confidence": attr.confidence,
"metadata": attr.metadata
} for attr in result.attributes
]
}
with open(cache_file_path, 'w') as f:
json.dump(cache_data, f, separators=(',', ':'), default=str)
except Exception as e:
self.logger.logger.warning(f"Failed to save Shodan cache for {cache_file_path.name}: {e}")
def _process_shodan_data(self, ip: str, data: Dict[str, Any]) -> ProviderResult:
"""
VERIFIED: Process Shodan data creating ISP nodes with ASN attributes and proper relationships.
Enhanced to include IP version information for IPv6 addresses.
"""
result = ProviderResult()
# Determine IP version for metadata
ip_version = get_ip_version(ip)
# VERIFIED: Extract ISP information and create proper ISP node with ASN
isp_name = data.get('org')
asn_value = data.get('asn')
if isp_name and asn_value:
# Create relationship from IP to ISP
result.add_relationship(
source_node=ip,
target_node=isp_name,
relationship_type='shodan_isp',
provider=self.name,
confidence=0.9,
raw_data={'asn': asn_value, 'shodan_org': isp_name, 'ip_version': ip_version}
)
# Add ASN as attribute to the ISP node
result.add_attribute(
target_node=isp_name,
name='asn',
value=asn_value,
attr_type='isp_info',
provider=self.name,
confidence=0.9,
metadata={'description': 'Autonomous System Number from Shodan', 'ip_version': ip_version}
)
# Also add organization name as attribute to ISP node for completeness
result.add_attribute(
target_node=isp_name,
name='organization_name',
value=isp_name,
attr_type='isp_info',
provider=self.name,
confidence=0.9,
metadata={'description': 'Organization name from Shodan', 'ip_version': ip_version}
)
# Process hostnames (reverse DNS)
for key, value in data.items():
if key == 'hostnames':
for hostname in value:
if _is_valid_domain(hostname):
# Use appropriate relationship type based on IP version
if ip_version == 6:
relationship_type = 'shodan_aaaa_record'
else:
relationship_type = 'shodan_a_record'
result.add_relationship(
source_node=ip,
target_node=hostname,
relationship_type=relationship_type,
provider=self.name,
confidence=0.8,
raw_data={**data, 'ip_version': ip_version}
)
self.log_relationship_discovery(
source_node=ip,
target_node=hostname,
relationship_type=relationship_type,
confidence_score=0.8,
raw_data={**data, 'ip_version': ip_version},
discovery_method=f"shodan_host_lookup_ipv{ip_version}"
)
elif key == 'ports':
# Add open ports as attributes to the IP
for port in value:
result.add_attribute(
target_node=ip,
name='shodan_open_port',
value=port,
attr_type='shodan_network_info',
provider=self.name,
confidence=0.9,
metadata={'ip_version': ip_version}
)
elif isinstance(value, (str, int, float, bool)) and value is not None:
# Add other Shodan fields as IP attributes (keep raw field names)
result.add_attribute(
target_node=ip,
name=key, # Raw field name from Shodan API
value=value,
attr_type='shodan_info',
provider=self.name,
confidence=0.9,
metadata={'ip_version': ip_version}
)
return result