# DNScope/providers/shodan_provider.py import json from pathlib import Path from typing import Dict, Any from datetime import datetime, timezone import requests from .base_provider import BaseProvider from core.provider_result import ProviderResult from utils.helpers import _is_valid_ip, _is_valid_domain, get_ip_version, normalize_ip class ShodanProvider(BaseProvider): """ Provider for querying Shodan API for IP address information. Now returns standardized ProviderResult objects with caching support for IPv4 and IPv6. UPDATED: Enhanced with last_seen timestamp for time-based edge coloring. """ def __init__(self, name=None, session_config=None): """Initialize Shodan provider with session-specific configuration.""" super().__init__( name="shodan", rate_limit=60, timeout=30, session_config=session_config ) self.base_url = "https://api.shodan.io" self.api_key = self.config.get_api_key('shodan') # FIXED: Don't fail initialization on connection issues - defer to actual usage self._connection_tested = False self._connection_works = False # Initialize cache directory self.cache_dir = Path('cache') / 'shodan' self.cache_dir.mkdir(parents=True, exist_ok=True) def _check_api_connection(self) -> bool: """ FIXED: Lazy connection checking - only test when actually needed. Don't block provider initialization on network issues. """ if self._connection_tested: return self._connection_works if not self.api_key: self._connection_tested = True self._connection_works = False return False try: print(f"Testing Shodan API connection with key: {self.api_key[:8]}...") response = self.session.get(f"{self.base_url}/api-info?key={self.api_key}", timeout=5) self._connection_works = response.status_code == 200 print(f"Shodan API test result: {response.status_code} - {'Success' if self._connection_works else 'Failed'}") except requests.exceptions.RequestException as e: print(f"Shodan API connection test failed: {e}") self._connection_works = False finally: self._connection_tested = True return self._connection_works def is_available(self) -> bool: """ FIXED: Check if Shodan provider is available based on API key presence. Don't require successful connection test during initialization. """ has_api_key = self.api_key is not None and len(self.api_key.strip()) > 0 if not has_api_key: return False # FIXED: Only test connection on first actual usage, not during initialization return True def get_name(self) -> str: """Return the provider name.""" return "shodan" def get_display_name(self) -> str: """Return the provider display name for the UI.""" return "Shodan" def requires_api_key(self) -> bool: """Return True if the provider requires an API key.""" return True def get_eligibility(self) -> Dict[str, bool]: """Return a dictionary indicating if the provider can query domains and/or IPs.""" return {'domains': False, 'ips': True} def _get_cache_file_path(self, ip: str) -> Path: """ Generate cache file path for an IP address (IPv4 or IPv6). IPv6 addresses contain colons which are replaced with underscores for filesystem safety. """ # Normalize the IP address first to ensure consistent caching normalized_ip = normalize_ip(ip) if not normalized_ip: # Fallback for invalid IPs safe_ip = ip.replace('.', '_').replace(':', '_') else: # Replace problematic characters for both IPv4 and IPv6 safe_ip = normalized_ip.replace('.', '_').replace(':', '_') return self.cache_dir / f"{safe_ip}.json" def _get_cache_status(self, cache_file_path: Path) -> str: """ Check cache status for an IP. Returns: 'not_found', 'fresh', or 'stale' """ if not cache_file_path.exists(): return "not_found" try: with open(cache_file_path, 'r') as f: cache_data = json.load(f) last_query_str = cache_data.get("last_upstream_query") if not last_query_str: return "stale" last_query = datetime.fromisoformat(last_query_str.replace('Z', '+00:00')) hours_since_query = (datetime.now(timezone.utc) - last_query).total_seconds() / 3600 cache_timeout = self.config.cache_timeout_hours if hours_since_query < cache_timeout: return "fresh" else: return "stale" except (json.JSONDecodeError, ValueError, KeyError): return "stale" def query_domain(self, domain: str) -> ProviderResult: """ Shodan does not support domain queries. This method returns an empty result. """ return ProviderResult() def query_ip(self, ip: str) -> ProviderResult: """ Query Shodan for information about an IP address (IPv4 or IPv6), with caching of processed data. FIXED: Proper 404 handling to prevent unnecessary retries. UPDATED: Enhanced with last_seen timestamp extraction for time-based edge coloring. Args: ip: IP address to investigate (IPv4 or IPv6) Returns: ProviderResult containing discovered relationships and attributes Raises: Exception: For temporary failures that should be retried (timeouts, 502/503 errors, connection issues) """ if not _is_valid_ip(ip): return ProviderResult() # Test connection only when actually making requests if not self._check_api_connection(): print(f"Shodan API not available for {ip} - API key: {'present' if self.api_key else 'missing'}") return ProviderResult() # Normalize IP address for consistent processing normalized_ip = normalize_ip(ip) if not normalized_ip: return ProviderResult() cache_file = self._get_cache_file_path(normalized_ip) cache_status = self._get_cache_status(cache_file) if cache_status == "fresh": self.logger.logger.debug(f"Using fresh cache for Shodan query: {normalized_ip}") return self._load_from_cache(cache_file) # Need to query API self.logger.logger.debug(f"Querying Shodan API for: {normalized_ip}") url = f"{self.base_url}/shodan/host/{normalized_ip}" params = {'key': self.api_key} try: response = self.make_request(url, method="GET", params=params, target_indicator=normalized_ip) if not response: self.logger.logger.warning(f"Shodan API unreachable for {normalized_ip} - network failure") if cache_status == "stale": self.logger.logger.info(f"Using stale cache for {normalized_ip} due to network failure") return self._load_from_cache(cache_file) else: # FIXED: Treat network failures as "no information" rather than retryable errors self.logger.logger.info(f"No Shodan data available for {normalized_ip} due to network failure") result = ProviderResult() # Empty result network_failure_data = {'shodan_status': 'network_unreachable', 'error': 'API unreachable'} self._save_to_cache(cache_file, result, network_failure_data) return result # FIXED: Handle different status codes more precisely if response.status_code == 200: self.logger.logger.debug(f"Shodan returned data for {normalized_ip}") try: data = response.json() result = self._process_shodan_data(normalized_ip, data) self._save_to_cache(cache_file, result, data) return result except json.JSONDecodeError as e: self.logger.logger.error(f"Invalid JSON response from Shodan for {normalized_ip}: {e}") if cache_status == "stale": return self._load_from_cache(cache_file) else: raise requests.exceptions.RequestException("Invalid JSON response from Shodan - should retry") elif response.status_code == 404: # FIXED: 404 = "no information available" - successful but empty result, don't retry self.logger.logger.debug(f"Shodan has no information for {normalized_ip} (404)") result = ProviderResult() # Empty but successful result # Cache the empty result to avoid repeated queries empty_data = {'shodan_status': 'no_information', 'status_code': 404} self._save_to_cache(cache_file, result, empty_data) return result elif response.status_code in [401, 403]: # Authentication/authorization errors - permanent failures, don't retry self.logger.logger.error(f"Shodan API authentication failed for {normalized_ip} (HTTP {response.status_code})") return ProviderResult() # Empty result, don't retry elif response.status_code == 429: # Rate limiting - should be handled by rate limiter, but if we get here, retry self.logger.logger.warning(f"Shodan API rate limited for {normalized_ip} (HTTP {response.status_code})") if cache_status == "stale": self.logger.logger.info(f"Using stale cache for {normalized_ip} due to rate limiting") return self._load_from_cache(cache_file) else: raise requests.exceptions.RequestException(f"Shodan API rate limited (HTTP {response.status_code}) - should retry") elif response.status_code in [500, 502, 503, 504]: # Server errors - temporary failures that should be retried self.logger.logger.warning(f"Shodan API server error for {normalized_ip} (HTTP {response.status_code})") if cache_status == "stale": self.logger.logger.info(f"Using stale cache for {normalized_ip} due to server error") return self._load_from_cache(cache_file) else: raise requests.exceptions.RequestException(f"Shodan API server error (HTTP {response.status_code}) - should retry") else: # FIXED: Other HTTP status codes - treat as no information available, don't retry self.logger.logger.info(f"Shodan returned status {response.status_code} for {normalized_ip} - treating as no information") result = ProviderResult() # Empty result no_info_data = {'shodan_status': 'no_information', 'status_code': response.status_code} self._save_to_cache(cache_file, result, no_info_data) return result except requests.exceptions.Timeout: # Timeout errors - should be retried self.logger.logger.warning(f"Shodan API timeout for {normalized_ip}") if cache_status == "stale": self.logger.logger.info(f"Using stale cache for {normalized_ip} due to timeout") return self._load_from_cache(cache_file) else: raise # Re-raise timeout for retry except requests.exceptions.ConnectionError: # Connection errors - should be retried self.logger.logger.warning(f"Shodan API connection error for {normalized_ip}") if cache_status == "stale": self.logger.logger.info(f"Using stale cache for {normalized_ip} due to connection error") return self._load_from_cache(cache_file) else: raise # Re-raise connection error for retry except json.JSONDecodeError: # JSON parsing error - treat as temporary failure self.logger.logger.error(f"Invalid JSON response from Shodan for {normalized_ip}") if cache_status == "stale": self.logger.logger.info(f"Using stale cache for {normalized_ip} due to JSON parsing error") return self._load_from_cache(cache_file) else: raise requests.exceptions.RequestException("Invalid JSON response from Shodan - should retry") # FIXED: Remove the generic RequestException handler that was causing 404s to retry # Now only specific exceptions that should be retried are re-raised except Exception as e: # FIXED: Unexpected exceptions - log but treat as no information available, don't retry self.logger.logger.warning(f"Unexpected exception in Shodan query for {normalized_ip}: {e}") result = ProviderResult() # Empty result error_data = {'shodan_status': 'error', 'error': str(e)} self._save_to_cache(cache_file, result, error_data) return result def _load_from_cache(self, cache_file_path: Path) -> ProviderResult: """Load processed Shodan data from a cache file.""" try: with open(cache_file_path, 'r') as f: cache_content = json.load(f) result = ProviderResult() # Reconstruct relationships for rel_data in cache_content.get("relationships", []): result.add_relationship( source_node=rel_data["source_node"], target_node=rel_data["target_node"], relationship_type=rel_data["relationship_type"], provider=rel_data["provider"], raw_data=rel_data.get("raw_data", {}) ) # Reconstruct attributes for attr_data in cache_content.get("attributes", []): result.add_attribute( target_node=attr_data["target_node"], name=attr_data["name"], value=attr_data["value"], attr_type=attr_data["type"], provider=attr_data["provider"], metadata=attr_data.get("metadata", {}) ) return result except (json.JSONDecodeError, FileNotFoundError, KeyError): return ProviderResult() def _save_to_cache(self, cache_file_path: Path, result: ProviderResult, raw_data: Dict[str, Any]) -> None: """Save processed Shodan data to a cache file.""" try: cache_data = { "last_upstream_query": datetime.now(timezone.utc).isoformat(), "raw_data": raw_data, # Preserve original for forensic purposes "relationships": [ { "source_node": rel.source_node, "target_node": rel.target_node, "relationship_type": rel.relationship_type, "provider": rel.provider, "raw_data": rel.raw_data } for rel in result.relationships ], "attributes": [ { "target_node": attr.target_node, "name": attr.name, "value": attr.value, "type": attr.type, "provider": attr.provider, "metadata": attr.metadata } for attr in result.attributes ] } with open(cache_file_path, 'w') as f: json.dump(cache_data, f, separators=(',', ':'), default=str) except Exception as e: self.logger.logger.warning(f"Failed to save Shodan cache for {cache_file_path.name}: {e}") def _process_shodan_data(self, ip: str, data: Dict[str, Any]) -> ProviderResult: """ VERIFIED: Process Shodan data creating ISP nodes with ASN attributes and proper relationships. Enhanced to include IP version information for IPv6 addresses. UPDATED: Enhanced with last_seen timestamp for time-based edge coloring. """ result = ProviderResult() # Determine IP version for metadata ip_version = get_ip_version(ip) # Extract last_seen timestamp for time-based edge coloring last_seen = data.get('last_seen') # VERIFIED: Extract ISP information and create proper ISP node with ASN isp_name = data.get('org') asn_value = data.get('asn') if isp_name and asn_value: # Enhanced raw_data with last_seen timestamp raw_data = { 'asn': asn_value, 'shodan_org': isp_name, 'ip_version': ip_version } # Add last_seen timestamp if available if last_seen: raw_data['last_seen'] = last_seen raw_data['relevance_timestamp'] = last_seen # Standardized field for time-based coloring # Create relationship from IP to ISP result.add_relationship( source_node=ip, target_node=isp_name, relationship_type='shodan_isp', provider=self.name, raw_data=raw_data ) # Add ASN as attribute to the ISP node result.add_attribute( target_node=isp_name, name='asn', value=asn_value, attr_type='isp_info', provider=self.name, metadata={'description': 'Autonomous System Number from Shodan', 'ip_version': ip_version} ) # Also add organization name as attribute to ISP node for completeness result.add_attribute( target_node=isp_name, name='organization_name', value=isp_name, attr_type='isp_info', provider=self.name, metadata={'description': 'Organization name from Shodan', 'ip_version': ip_version} ) # Process hostnames (reverse DNS) for key, value in data.items(): if key == 'hostnames': for hostname in value: if _is_valid_domain(hostname): # Use appropriate relationship type based on IP version if ip_version == 6: relationship_type = 'shodan_aaaa_record' else: relationship_type = 'shodan_a_record' # Enhanced raw_data with last_seen timestamp hostname_raw_data = {**data, 'ip_version': ip_version} if last_seen: hostname_raw_data['last_seen'] = last_seen hostname_raw_data['relevance_timestamp'] = last_seen result.add_relationship( source_node=ip, target_node=hostname, relationship_type=relationship_type, provider=self.name, raw_data=hostname_raw_data ) self.log_relationship_discovery( source_node=ip, target_node=hostname, relationship_type=relationship_type, raw_data=hostname_raw_data, discovery_method=f"shodan_host_lookup_ipv{ip_version}" ) elif key == 'ports': # Add open ports as attributes to the IP for port in value: result.add_attribute( target_node=ip, name='shodan_open_port', value=port, attr_type='shodan_network_info', provider=self.name, metadata={'ip_version': ip_version} ) elif isinstance(value, (str, int, float, bool)) and value is not None: # Add other Shodan fields as IP attributes (keep raw field names) result.add_attribute( target_node=ip, name=key, # Raw field name from Shodan API value=value, attr_type='shodan_info', provider=self.name, metadata={'ip_version': ip_version} ) return result