misp_analyzer.py aktualisiert
This commit is contained in:
parent
ce289041ba
commit
bff763e4bb
155
misp_analyzer.py
155
misp_analyzer.py
@ -32,8 +32,7 @@ class MispAnalyzer(interface.BaseAnalyzer):
|
|||||||
self._attr = kwargs.get("attr")
|
self._attr = kwargs.get("attr")
|
||||||
self._timesketch_attr = kwargs.get("timesketch_attr")
|
self._timesketch_attr = kwargs.get("timesketch_attr")
|
||||||
|
|
||||||
# Simple configuration for reliability
|
self.include_community = kwargs.get("include_community", False)
|
||||||
self.include_community = kwargs.get("include_community", False) # Default to false for reliability
|
|
||||||
self.chunk_size = kwargs.get("chunk_size", 1000) # Process in chunks
|
self.chunk_size = kwargs.get("chunk_size", 1000) # Process in chunks
|
||||||
self.max_retries = kwargs.get("max_retries", 2) # Minimal retries
|
self.max_retries = kwargs.get("max_retries", 2) # Minimal retries
|
||||||
self.request_delay = kwargs.get("request_delay", 0.5) # Small delay between requests
|
self.request_delay = kwargs.get("request_delay", 0.5) # Small delay between requests
|
||||||
@ -50,12 +49,13 @@ class MispAnalyzer(interface.BaseAnalyzer):
|
|||||||
'indicators_found': 0,
|
'indicators_found': 0,
|
||||||
'api_calls': 0,
|
'api_calls': 0,
|
||||||
'api_timeouts': 0,
|
'api_timeouts': 0,
|
||||||
'events_marked': 0
|
'events_marked': 0,
|
||||||
|
'community_hits': 0,
|
||||||
|
'own_org_hits': 0
|
||||||
}
|
}
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_kwargs():
|
def get_kwargs():
|
||||||
"""Get kwargs for the analyzer - keeping original working structure."""
|
|
||||||
to_query = [
|
to_query = [
|
||||||
{
|
{
|
||||||
"query_string": "md5_hash:*",
|
"query_string": "md5_hash:*",
|
||||||
@ -91,7 +91,6 @@ class MispAnalyzer(interface.BaseAnalyzer):
|
|||||||
return to_query
|
return to_query
|
||||||
|
|
||||||
def _is_valid_ip(self, ip_str):
|
def _is_valid_ip(self, ip_str):
|
||||||
"""Simple IP validation - keeping original working version."""
|
|
||||||
try:
|
try:
|
||||||
import ipaddress
|
import ipaddress
|
||||||
ip_str = ip_str.strip()
|
ip_str = ip_str.strip()
|
||||||
@ -119,17 +118,34 @@ class MispAnalyzer(interface.BaseAnalyzer):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
def query_misp_single(self, value, attr, retry_count=0):
|
def query_misp_single(self, value, attr, retry_count=0):
|
||||||
"""Query MISP for a single value - enhanced with minimal retry logic."""
|
"""Query MISP for a single value - enhanced with community search."""
|
||||||
if value in self.failed_indicators:
|
if value in self.failed_indicators:
|
||||||
return []
|
return []
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Build basic payload
|
# Build enhanced payload for community search
|
||||||
payload = {"returnFormat": "json", "value": value, "type": attr}
|
payload = {
|
||||||
|
"returnFormat": "json",
|
||||||
|
"value": value,
|
||||||
|
"type": attr,
|
||||||
|
"enforceWarninglist": False, # Don't filter known-good indicators
|
||||||
|
"includeEventTags": True, # Include event tags for context
|
||||||
|
"includeContext": True, # Include context information
|
||||||
|
}
|
||||||
|
|
||||||
# Add community search if enabled
|
# community search - include ALL distribution levels
|
||||||
if self.include_community:
|
if self.include_community:
|
||||||
payload["distribution"] = [0, 1, 2] # Own, community, connected
|
payload.update({
|
||||||
|
"distribution": [0, 1, 2, 3, 5], # Own, community, connected, all, inherit
|
||||||
|
"includeEventUuid": True, # Include event UUIDs
|
||||||
|
"includeCorrelations": True, # Include correlations
|
||||||
|
"includeDecayScore": False, # Skip decay for speed
|
||||||
|
"includeFullModel": False, # Skip full model for speed
|
||||||
|
})
|
||||||
|
logger.debug(f"Community search enabled for {value} ({attr})")
|
||||||
|
else:
|
||||||
|
payload["distribution"] = [0] # Own org only
|
||||||
|
logger.debug(f"Own org search only for {value} ({attr})")
|
||||||
|
|
||||||
self.stats['api_calls'] += 1
|
self.stats['api_calls'] += 1
|
||||||
|
|
||||||
@ -138,7 +154,7 @@ class MispAnalyzer(interface.BaseAnalyzer):
|
|||||||
json=payload,
|
json=payload,
|
||||||
headers={"Authorization": self.misp_api_key},
|
headers={"Authorization": self.misp_api_key},
|
||||||
verify=False,
|
verify=False,
|
||||||
timeout=45, # Slightly increased from original 30s
|
timeout=45,
|
||||||
)
|
)
|
||||||
|
|
||||||
if response.status_code != 200:
|
if response.status_code != 200:
|
||||||
@ -146,13 +162,24 @@ class MispAnalyzer(interface.BaseAnalyzer):
|
|||||||
return []
|
return []
|
||||||
|
|
||||||
data = response.json()
|
data = response.json()
|
||||||
return data.get("response", {}).get("Attribute", [])
|
attributes = data.get("response", {}).get("Attribute", [])
|
||||||
|
|
||||||
|
# Log community sources for debugging
|
||||||
|
if attributes and self.include_community:
|
||||||
|
orgs = set()
|
||||||
|
for attr_data in attributes:
|
||||||
|
org = attr_data.get("Event", {}).get("Orgc", {}).get("name", "Unknown")
|
||||||
|
orgs.add(org)
|
||||||
|
if len(orgs) > 1 or (orgs and list(orgs)[0] not in ["Unknown", "Your Org"]):
|
||||||
|
logger.info(f"Community hit for {value}: {len(attributes)} matches from {', '.join(list(orgs)[:3])}")
|
||||||
|
|
||||||
|
return attributes
|
||||||
|
|
||||||
except (requests.exceptions.Timeout, requests.exceptions.ConnectionError) as e:
|
except (requests.exceptions.Timeout, requests.exceptions.ConnectionError) as e:
|
||||||
self.stats['api_timeouts'] += 1
|
self.stats['api_timeouts'] += 1
|
||||||
|
|
||||||
if retry_count < self.max_retries:
|
if retry_count < self.max_retries:
|
||||||
wait_time = (retry_count + 1) * 2 # Simple backoff: 2s, 4s
|
wait_time = (retry_count + 1) * 2 # backoff: 2s, 4s
|
||||||
logger.warning(f"Timeout for {value}, retrying in {wait_time}s (attempt {retry_count + 1})")
|
logger.warning(f"Timeout for {value}, retrying in {wait_time}s (attempt {retry_count + 1})")
|
||||||
time.sleep(wait_time)
|
time.sleep(wait_time)
|
||||||
return self.query_misp_single(value, attr, retry_count + 1)
|
return self.query_misp_single(value, attr, retry_count + 1)
|
||||||
@ -166,21 +193,44 @@ class MispAnalyzer(interface.BaseAnalyzer):
|
|||||||
return []
|
return []
|
||||||
|
|
||||||
def mark_event(self, event, result, attr):
|
def mark_event(self, event, result, attr):
|
||||||
"""Mark event with MISP intelligence - keeping original working version."""
|
"""Mark event with MISP intelligence - enhanced with community info."""
|
||||||
try:
|
try:
|
||||||
if attr.startswith("ip-"):
|
if attr.startswith("ip-"):
|
||||||
msg = "MISP: Malicious IP - "
|
msg = "MISP: Malicious IP"
|
||||||
else:
|
else:
|
||||||
msg = "MISP: Known indicator - "
|
msg = "MISP: Known indicator"
|
||||||
|
|
||||||
|
# Extract event and organization information
|
||||||
event_info = result[0].get("Event", {}).get("info", "Unknown")
|
event_info = result[0].get("Event", {}).get("info", "Unknown")
|
||||||
msg += event_info
|
org_info = result[0].get("Event", {}).get("Orgc", {}).get("name", "Unknown")
|
||||||
|
|
||||||
|
msg += f" - {event_info}"
|
||||||
|
|
||||||
if len(result) > 1:
|
if len(result) > 1:
|
||||||
msg += f" (+{len(result)-1} more)"
|
msg += f" (+{len(result)-1} more)"
|
||||||
|
|
||||||
|
# Add organization information for community awareness
|
||||||
|
if self.include_community and org_info != "Unknown":
|
||||||
|
# Collect unique organizations
|
||||||
|
orgs = set()
|
||||||
|
for res in result[:3]:
|
||||||
|
org = res.get("Event", {}).get("Orgc", {}).get("name", "Unknown")
|
||||||
|
if org != "Unknown":
|
||||||
|
orgs.add(org)
|
||||||
|
|
||||||
|
if len(orgs) > 1:
|
||||||
|
msg += f" | Sources: {', '.join(list(orgs)[:2])}"
|
||||||
|
if len(orgs) > 2:
|
||||||
|
msg += f" +{len(orgs)-2} more"
|
||||||
|
elif orgs:
|
||||||
|
msg += f" | Source: {list(orgs)[0]}"
|
||||||
|
|
||||||
|
tags = [f"MISP-{attr}", "threat-intel"]
|
||||||
|
if self.include_community:
|
||||||
|
tags.append("community-intel")
|
||||||
|
|
||||||
event.add_comment(msg)
|
event.add_comment(msg)
|
||||||
event.add_tags([f"MISP-{attr}", "threat-intel"])
|
event.add_tags(tags)
|
||||||
event.commit()
|
event.commit()
|
||||||
|
|
||||||
self.stats['events_marked'] += 1
|
self.stats['events_marked'] += 1
|
||||||
@ -202,7 +252,7 @@ class MispAnalyzer(interface.BaseAnalyzer):
|
|||||||
|
|
||||||
indicators = []
|
indicators = []
|
||||||
|
|
||||||
# Extract based on attribute type - SAME AS ORIGINAL
|
# Extract based on attribute type
|
||||||
if attr.startswith("ip-") and timesketch_attr == "message":
|
if attr.startswith("ip-") and timesketch_attr == "message":
|
||||||
ip_matches = self.ip_pattern.findall(str(loc))
|
ip_matches = self.ip_pattern.findall(str(loc))
|
||||||
indicators = [ip for ip in ip_matches if self._is_valid_ip(ip)]
|
indicators = [ip for ip in ip_matches if self._is_valid_ip(ip)]
|
||||||
@ -256,16 +306,65 @@ class MispAnalyzer(interface.BaseAnalyzer):
|
|||||||
result = self.query_misp_single(indicator, attr)
|
result = self.query_misp_single(indicator, attr)
|
||||||
if result:
|
if result:
|
||||||
self.result_dict[f"{attr}:{indicator}"] = result
|
self.result_dict[f"{attr}:{indicator}"] = result
|
||||||
logger.info(f"MISP hit: {indicator} ({len(result)} matches)")
|
|
||||||
|
# Track community vs own org hits
|
||||||
|
orgs = set()
|
||||||
|
for res in result:
|
||||||
|
org = res.get("Event", {}).get("Orgc", {}).get("name", "Unknown")
|
||||||
|
orgs.add(org)
|
||||||
|
|
||||||
|
if len(orgs) > 1 or any(org not in ["Unknown", "Your Organization"] for org in orgs):
|
||||||
|
self.stats['community_hits'] += 1
|
||||||
|
logger.info(f"Community MISP hit: {indicator} ({len(result)} matches from {', '.join(list(orgs)[:3])})")
|
||||||
|
else:
|
||||||
|
self.stats['own_org_hits'] += 1
|
||||||
|
logger.info(f"Own org MISP hit: {indicator} ({len(result)} matches)")
|
||||||
|
|
||||||
self.processed_indicators.add(indicator)
|
self.processed_indicators.add(indicator)
|
||||||
|
|
||||||
# Small delay to be nice to MISP server
|
|
||||||
time.sleep(self.request_delay)
|
time.sleep(self.request_delay)
|
||||||
|
|
||||||
# Mark events that have matches
|
# Mark events that have matches
|
||||||
self.check_existing_matches(events_with_indicators, attr)
|
self.check_existing_matches(events_with_indicators, attr)
|
||||||
|
|
||||||
|
def test_community_connectivity(self):
|
||||||
|
"""Test if community feeds are accessible."""
|
||||||
|
if not self.include_community:
|
||||||
|
return "Community search disabled"
|
||||||
|
|
||||||
|
try:
|
||||||
|
test_payload = {
|
||||||
|
"returnFormat": "json",
|
||||||
|
"distribution": [1, 2, 3],
|
||||||
|
"limit": 1,
|
||||||
|
"enforceWarninglist": False,
|
||||||
|
}
|
||||||
|
|
||||||
|
response = requests.post(
|
||||||
|
f"{self.misp_url}/attributes/restSearch/",
|
||||||
|
json=test_payload,
|
||||||
|
headers={"Authorization": self.misp_api_key},
|
||||||
|
verify=False,
|
||||||
|
timeout=30,
|
||||||
|
)
|
||||||
|
|
||||||
|
if response.status_code == 200:
|
||||||
|
data = response.json()
|
||||||
|
attributes = data.get("response", {}).get("Attribute", [])
|
||||||
|
if attributes:
|
||||||
|
orgs = set()
|
||||||
|
for attr in attributes[:5]:
|
||||||
|
org = attr.get("Event", {}).get("Orgc", {}).get("name", "Unknown")
|
||||||
|
orgs.add(org)
|
||||||
|
return f"Community access OK - {len(attributes)} indicators from {len(orgs)} orgs: {', '.join(list(orgs)[:3])}"
|
||||||
|
else:
|
||||||
|
return "Community access OK but no community indicators found"
|
||||||
|
else:
|
||||||
|
return f"Community test failed: HTTP {response.status_code}"
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
return f"Community test error: {e}"
|
||||||
|
|
||||||
def check_existing_matches(self, events_with_indicators, attr):
|
def check_existing_matches(self, events_with_indicators, attr):
|
||||||
"""Check events against existing MISP results."""
|
"""Check events against existing MISP results."""
|
||||||
for event, indicators in events_with_indicators:
|
for event, indicators in events_with_indicators:
|
||||||
@ -332,6 +431,11 @@ class MispAnalyzer(interface.BaseAnalyzer):
|
|||||||
|
|
||||||
start_time = time.time()
|
start_time = time.time()
|
||||||
|
|
||||||
|
# Test community connectivity if enabled
|
||||||
|
if self.include_community:
|
||||||
|
community_status = self.test_community_connectivity()
|
||||||
|
logger.info(f"Community connectivity test: {community_status}")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self.query_misp(self._query_string, self._attr, self._timesketch_attr)
|
self.query_misp(self._query_string, self._attr, self._timesketch_attr)
|
||||||
|
|
||||||
@ -339,10 +443,15 @@ class MispAnalyzer(interface.BaseAnalyzer):
|
|||||||
success_rate = ((self.stats['api_calls'] - self.stats['api_timeouts']) /
|
success_rate = ((self.stats['api_calls'] - self.stats['api_timeouts']) /
|
||||||
max(1, self.stats['api_calls']) * 100)
|
max(1, self.stats['api_calls']) * 100)
|
||||||
|
|
||||||
|
# Enhanced results with community statistics
|
||||||
result = (f"[{self._timesketch_attr}] MISP Analysis Complete: "
|
result = (f"[{self._timesketch_attr}] MISP Analysis Complete: "
|
||||||
f"{self.stats['events_marked']}/{self.stats['events_processed']} events marked | "
|
f"{self.stats['events_marked']}/{self.stats['events_processed']} events marked | "
|
||||||
f"{self.stats['api_calls']} API calls ({success_rate:.1f}% success) | "
|
f"{self.stats['api_calls']} API calls ({success_rate:.1f}% success) | ")
|
||||||
f"{elapsed:.0f}s")
|
|
||||||
|
if self.include_community:
|
||||||
|
result += f"Community hits: {self.stats['community_hits']}, Own org: {self.stats['own_org_hits']} | "
|
||||||
|
|
||||||
|
result += f"{elapsed:.0f}s"
|
||||||
|
|
||||||
logger.info(result)
|
logger.info(result)
|
||||||
return result
|
return result
|
||||||
|
Reference in New Issue
Block a user