commit
This commit is contained in:
parent
afa0454f74
commit
1feea19679
250
collector.sh
250
collector.sh
@ -1,8 +1,8 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Forensic Collector
|
||||
# Version 0.1
|
||||
# Chain of custody and integrity verification
|
||||
# Forensic Collector - Enhanced
|
||||
# Version 0.2
|
||||
# Chain of custody and integrity verification with enhanced technical documentation - tis was largely vibe-coded
|
||||
|
||||
#Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
@ -25,9 +25,8 @@ NC='\033[0m' # No Color
|
||||
|
||||
# header
|
||||
echo -e "${GREEN}=========================================="
|
||||
echo " FORENSIC COLLECTOR"
|
||||
echo
|
||||
echo
|
||||
echo " FORENSIC COLLECTOR - Enhanced"
|
||||
echo " Version 0.2"
|
||||
echo
|
||||
echo "\"Experten, die keine KI verwenden, werden aussterben."
|
||||
echo " Ja, und Experten wie meine Frau, die KI verwenden,"
|
||||
@ -38,16 +37,32 @@ echo
|
||||
|
||||
START_TIME=$(date -u +%Y-%m-%dT%H:%M:%SZ)
|
||||
HOSTNAME=$(hostname)
|
||||
OS_INFO=$(uname -a)
|
||||
WGET_VERSION=$(wget --version | head -n1)
|
||||
USERNAME=$(whoami)
|
||||
|
||||
# Enhanced system information collection
|
||||
echo -e "${YELLOW}Erfasse detaillierte Systeminformationen...${NC}"
|
||||
|
||||
# Tool versions
|
||||
WGET_FULL=$(wget --version)
|
||||
OPENSSL_VERSION=$(openssl version)
|
||||
DIG_VERSION=$(dig -v 2>&1 | head -n1)
|
||||
CURL_VERSION=$(curl --version | head -n1)
|
||||
BASH_VERSION=$(bash --version | head -n1)
|
||||
|
||||
# System configuration
|
||||
UNAME_A=$(uname -a)
|
||||
KERNEL_VERSION=$(uname -r)
|
||||
DISTRO=$(lsb_release -d 2>/dev/null | cut -f2- || cat /etc/os-release | grep PRETTY_NAME | cut -d'"' -f2)
|
||||
GLIBC_VERSION=$(ldd --version | head -n1)
|
||||
TIMEZONE=$(timedatectl show -p Timezone --value 2>/dev/null || cat /etc/timezone)
|
||||
SYSTEM_LOCALE=$(locale | grep LANG=)
|
||||
|
||||
echo -e "${YELLOW}Öffentliche IP wird abgerufen...${NC}"
|
||||
EXTERNAL_IP=$(curl -s https://api.ipify.org)
|
||||
if [ -z "$EXTERNAL_IP" ]; then
|
||||
EXTERNAL_IP="Fehler beim Abruf der öffentlichen IP-Adresse."
|
||||
echo -e "${RED}WARNUNG: Die öffentliche IP-Adresse konnte nicht abgerufen werden.${NC}"
|
||||
echo -e "&{RED}Prüfen Sie die Netzwerkverbindung.${NC}"
|
||||
echo -e "${RED}Prüfen Sie die Netzwerkverbindung.${NC}"
|
||||
else
|
||||
echo -e "${GREEN}Öffentliche IP: $EXTERNAL_IP${NC}"
|
||||
echo -e "${RED}Hinweis:${NC}"
|
||||
@ -78,8 +93,10 @@ else
|
||||
DEPTH_PARAM="--level=$MAX_DEPTH"
|
||||
fi
|
||||
|
||||
# Extract hostname for DNS resolution and certificate capture
|
||||
HOSTNAME_FROM_URL=$(echo "$TARGET_URL" | sed -E 's/^https?:\/\///' | sed 's/\/.*$//' | sed 's/:.*$//')
|
||||
|
||||
# Create output directory structure
|
||||
DATE_STR=$(date +%Y%m%d_%H%M%S)
|
||||
if [ -z "$CASE_NUMBER" ]; then
|
||||
OUTPUT_DIR="${DATE_STR}_${HOSTNAME_FROM_URL}"
|
||||
@ -90,16 +107,101 @@ fi
|
||||
mkdir -p "$OUTPUT_DIR"
|
||||
cd "$OUTPUT_DIR" || exit 1
|
||||
|
||||
# Create subdirectories for organized data collection
|
||||
WEBSITE_DIR="website"
|
||||
mkdir -p "$WEBSITE_DIR"
|
||||
TECH_DIR="technical_data"
|
||||
CERTIFICATE_DIR="$TECH_DIR/certificates"
|
||||
DNS_DIR="$TECH_DIR/dns"
|
||||
CONFIG_DIR="$TECH_DIR/system_config"
|
||||
|
||||
mkdir -p "$WEBSITE_DIR" "$CERTIFICATE_DIR" "$DNS_DIR" "$CONFIG_DIR"
|
||||
|
||||
# Log file for wget output
|
||||
WGET_LOG="wget.log"
|
||||
|
||||
# Initialize report
|
||||
# Enhanced DNS information capture
|
||||
echo -e "\n${YELLOW}Erfasse DNS-Informationen...${NC}"
|
||||
echo "DNS Resolution Documentation - $HOSTNAME_FROM_URL" > "$DNS_DIR/dns_records.txt"
|
||||
echo "Timestamp (UTC): $START_TIME" >> "$DNS_DIR/dns_records.txt"
|
||||
echo "----------------------------------------" >> "$DNS_DIR/dns_records.txt"
|
||||
|
||||
# Capture various DNS record types
|
||||
for record_type in A AAAA MX TXT CNAME NS PTR SOA; do
|
||||
echo -e "\n### $record_type Record ###" >> "$DNS_DIR/dns_records.txt"
|
||||
dig +short $record_type $HOSTNAME_FROM_URL >> "$DNS_DIR/dns_records.txt" 2>&1
|
||||
done
|
||||
|
||||
# Detailed DNS trace
|
||||
echo -e "\n### DNS Trace ###" >> "$DNS_DIR/dns_records.txt"
|
||||
dig +trace $HOSTNAME_FROM_URL >> "$DNS_DIR/dns_records.txt" 2>&1
|
||||
|
||||
# SSL Certificate capture (for HTTPS sites)
|
||||
if [[ $TARGET_URL =~ ^https:// ]]; then
|
||||
echo -e "\n${YELLOW}Erfasse SSL-Zertifikat...${NC}"
|
||||
PORT=443
|
||||
CERT_HOSTNAME=$HOSTNAME_FROM_URL
|
||||
if [[ $CERT_HOSTNAME =~ :([0-9]+)$ ]]; then
|
||||
PORT=${BASH_REMATCH[1]}
|
||||
CERT_HOSTNAME=${CERT_HOSTNAME%%:*}
|
||||
fi
|
||||
|
||||
# Capture certificate
|
||||
echo | openssl s_client -connect $CERT_HOSTNAME:$PORT -showcerts 2>/dev/null > "$CERTIFICATE_DIR/cert_chain.pem"
|
||||
|
||||
# Parse certificate details
|
||||
echo "SSL Certificate Analysis" > "$CERTIFICATE_DIR/cert_details.txt"
|
||||
echo "Target: $CERT_HOSTNAME:$PORT" >> "$CERTIFICATE_DIR/cert_details.txt"
|
||||
echo "Timestamp (UTC): $START_TIME" >> "$CERTIFICATE_DIR/cert_details.txt"
|
||||
echo "----------------------------------------" >> "$CERTIFICATE_DIR/cert_details.txt"
|
||||
|
||||
# Extract individual certificates
|
||||
awk 'BEGIN {cert=0} /-----BEGIN CERTIFICATE-----/ {cert++; filename=sprintf("cert_%d.pem", cert)} cert>0 {print > "'$CERTIFICATE_DIR'/"filename} /-----END CERTIFICATE-----/ {cert=0}' "$CERTIFICATE_DIR/cert_chain.pem"
|
||||
|
||||
# Analyze each certificate
|
||||
for cert_file in "$CERTIFICATE_DIR"/cert_*.pem; do
|
||||
if [ -f "$cert_file" ]; then
|
||||
cert_num=$(basename "$cert_file" .pem | sed 's/cert_//')
|
||||
echo -e "\n=== Certificate $cert_num ===" >> "$CERTIFICATE_DIR/cert_details.txt"
|
||||
openssl x509 -in "$cert_file" -text -noout >> "$CERTIFICATE_DIR/cert_details.txt" 2>/dev/null
|
||||
fi
|
||||
done
|
||||
|
||||
# SSL connection information
|
||||
echo -e "\n=== SSL Connection Details ===" >> "$CERTIFICATE_DIR/cert_details.txt"
|
||||
echo | openssl s_client -connect $CERT_HOSTNAME:$PORT 2>/dev/null | grep -E "(SSL|TLS|Protocol|Cipher)" >> "$CERTIFICATE_DIR/cert_details.txt"
|
||||
fi
|
||||
|
||||
# System configuration documentation
|
||||
echo -e "\n${YELLOW}Dokumentiere Systemkonfiguration...${NC}"
|
||||
echo "System Configuration Documentation" > "$CONFIG_DIR/system_config.txt"
|
||||
echo "Timestamp (UTC): $START_TIME" >> "$CONFIG_DIR/system_config.txt"
|
||||
echo "----------------------------------------" >> "$CONFIG_DIR/system_config.txt"
|
||||
echo "Hostname: $HOSTNAME" >> "$CONFIG_DIR/system_config.txt"
|
||||
echo "Operating System: $UNAME_A" >> "$CONFIG_DIR/system_config.txt"
|
||||
echo "Distribution: $DISTRO" >> "$CONFIG_DIR/system_config.txt"
|
||||
echo "Kernel Version: $KERNEL_VERSION" >> "$CONFIG_DIR/system_config.txt"
|
||||
echo "GLIBC Version: $GLIBC_VERSION" >> "$CONFIG_DIR/system_config.txt"
|
||||
echo "Timezone: $TIMEZONE" >> "$CONFIG_DIR/system_config.txt"
|
||||
echo "Locale: $SYSTEM_LOCALE" >> "$CONFIG_DIR/system_config.txt"
|
||||
echo "Public IP: $EXTERNAL_IP" >> "$CONFIG_DIR/system_config.txt"
|
||||
|
||||
# Tool versions documentation
|
||||
echo -e "\n=== Tool Versions ===" >> "$CONFIG_DIR/system_config.txt"
|
||||
echo "wget: $WGET_FULL" >> "$CONFIG_DIR/system_config.txt"
|
||||
echo "OpenSSL: $OPENSSL_VERSION" >> "$CONFIG_DIR/system_config.txt"
|
||||
echo "dig: $DIG_VERSION" >> "$CONFIG_DIR/system_config.txt"
|
||||
echo "curl: $CURL_VERSION" >> "$CONFIG_DIR/system_config.txt"
|
||||
echo "bash: $BASH_VERSION" >> "$CONFIG_DIR/system_config.txt"
|
||||
|
||||
# Network configuration
|
||||
echo -e "\n=== Network Configuration ===" >> "$CONFIG_DIR/system_config.txt"
|
||||
ip addr show >> "$CONFIG_DIR/system_config.txt" 2>/dev/null
|
||||
netstat -rn >> "$CONFIG_DIR/system_config.txt" 2>/dev/null
|
||||
|
||||
# Initialize forensic report
|
||||
REPORT_FILE="forensic_report.txt"
|
||||
cat > "$REPORT_FILE" << EOF
|
||||
FORENSIC COLLECTOR
|
||||
FORENSIC COLLECTOR - Enhanced
|
||||
===================================
|
||||
|
||||
CASE INFORMATION
|
||||
@ -108,28 +210,56 @@ CASE NUMBER: ${CASE_NUMBER:-N/A}
|
||||
PRESERVATION TARGET: $TARGET_URL
|
||||
PRESERVATION TIMESTAMP (UTC): $START_TIME
|
||||
OUTPUT FOLDER: $OUTPUT_DIR
|
||||
SCRIPT VERSION: 0.2
|
||||
|
||||
HOST SYSTEM INFORMATION
|
||||
-------------------
|
||||
USERNAME: $USERNAME
|
||||
HOSTNAME: $HOSTNAME
|
||||
OPERATING SYSTEM: $OS_INFO
|
||||
wget-version: $WGET_VERSION
|
||||
OPERATING SYSTEM: $UNAME_A
|
||||
DISTRIBUTION: $DISTRO
|
||||
KERNEL VERSION: $KERNEL_VERSION
|
||||
TIMEZONE: $TIMEZONE
|
||||
PUBLIC IP ADDRESS: $EXTERNAL_IP
|
||||
|
||||
TOOL VERSIONS
|
||||
-----------------
|
||||
wget: $(echo "$WGET_FULL" | head -n1)
|
||||
OpenSSL: $OPENSSL_VERSION
|
||||
dig: $DIG_VERSION
|
||||
curl: $CURL_VERSION
|
||||
bash: $BASH_VERSION
|
||||
|
||||
PARAMETERS
|
||||
-----------------
|
||||
MAX RECURSION: $MAX_DEPTH
|
||||
EXTRACTED TARGET DOMAIN: $HOSTNAME_FROM_URL
|
||||
|
||||
TECHNICAL DATA COLLECTED
|
||||
------------------------
|
||||
- DNS Records: $DNS_DIR/dns_records.txt
|
||||
- SSL Certificates: $CERTIFICATE_DIR/
|
||||
- System Configuration: $CONFIG_DIR/system_config.txt
|
||||
|
||||
EOF
|
||||
|
||||
# Enhanced wget command for better dynamic content capture
|
||||
COOKIE_JAR="cookies.txt"
|
||||
WGET_CMD="wget --recursive --page-requisites --html-extension --convert-links \
|
||||
--restrict-file-names=windows --domains=$HOSTNAME_FROM_URL \
|
||||
--user-agent='Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36' \
|
||||
--wait=1 --random-wait --timeout=30 --tries=3 \
|
||||
--no-parent --timestamping --backup-converted \
|
||||
$DEPTH_PARAM --directory-prefix='$WEBSITE_DIR' '$TARGET_URL' 2>&1 | tee '$WGET_LOG'"
|
||||
--load-cookies=\"$COOKIE_JAR\" --save-cookies=\"$COOKIE_JAR\" --keep-session-cookies \
|
||||
--execute robots=off --reject-regex='(logout|signout|sign-out)' \
|
||||
--header='Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' \
|
||||
--header='Accept-Language: en-US,en;q=0.5' \
|
||||
--header='Accept-Encoding: gzip, deflate, br' \
|
||||
--header='Cache-Control: max-age=0' \
|
||||
$DEPTH_PARAM --directory-prefix=\"$WEBSITE_DIR\" \"$TARGET_URL\" 2>&1 | tee \"$WGET_LOG\""
|
||||
|
||||
# Create empty cookie jar
|
||||
touch "$COOKIE_JAR"
|
||||
|
||||
# wget-command documentation
|
||||
echo "wget-method:" >> "$REPORT_FILE"
|
||||
@ -142,7 +272,7 @@ echo -e "\n${YELLOW}Beginne Sicherungsmaßnahme...${NC}"
|
||||
echo -e "${GREEN}Methode:${NC} $WGET_CMD"
|
||||
echo
|
||||
|
||||
# Execute wget (eval is used to properly handle the command with variables)
|
||||
# Execute wget
|
||||
eval "$WGET_CMD"
|
||||
WGET_EXIT_CODE=$?
|
||||
if [ $WGET_EXIT_CODE -ne 0 ]; then
|
||||
@ -151,15 +281,33 @@ if [ $WGET_EXIT_CODE -ne 0 ]; then
|
||||
echo "wget.log contains runtime logs." >> "$REPORT_FILE"
|
||||
fi
|
||||
|
||||
# Save cookie data if any exists
|
||||
if [ -s "$COOKIE_JAR" ]; then
|
||||
cp "$COOKIE_JAR" "$TECH_DIR/cookies.txt"
|
||||
echo -e "\n${GREEN}Cookies gespeichert:${NC} $TECH_DIR/cookies.txt"
|
||||
echo "Cookies and Session Data: $TECH_DIR/cookies.txt" >> "$REPORT_FILE"
|
||||
else
|
||||
echo "No cookies were captured during the session." >> "$REPORT_FILE"
|
||||
fi
|
||||
|
||||
# Enhanced statistics collection
|
||||
echo -e "\n${YELLOW}Kalkulation der Sicherungsstatistiken...${NC}"
|
||||
TOTAL_FILES=$(find "$WEBSITE_DIR" -type f | wc -l)
|
||||
TOTAL_SIZE=$(du -sh "$WEBSITE_DIR" | cut -f1)
|
||||
FILE_TYPES=$(find "$WEBSITE_DIR" -type f -name "*.*" | sed 's/.*\.//' | sort | uniq -c | sort -rn)
|
||||
|
||||
# Analyze captured JavaScript files
|
||||
JS_FILES=$(find "$WEBSITE_DIR" -name "*.js" | wc -l)
|
||||
CSS_FILES=$(find "$WEBSITE_DIR" -name "*.css" | wc -l)
|
||||
IMG_FILES=$(find "$WEBSITE_DIR" \( -name "*.jpg" -o -name "*.jpeg" -o -name "*.png" -o -name "*.gif" -o -name "*.svg" \) | wc -l)
|
||||
|
||||
echo "Preservation Statistics:" >> "$REPORT_FILE"
|
||||
echo "-------------------" >> "$REPORT_FILE"
|
||||
echo "Total File Count: $TOTAL_FILES" >> "$REPORT_FILE"
|
||||
echo "Total Filesize: $TOTAL_SIZE" >> "$REPORT_FILE"
|
||||
echo "JavaScript Files: $JS_FILES" >> "$REPORT_FILE"
|
||||
echo "CSS Files: $CSS_FILES" >> "$REPORT_FILE"
|
||||
echo "Image Files: $IMG_FILES" >> "$REPORT_FILE"
|
||||
echo "Filetype-Distribution:" >> "$REPORT_FILE"
|
||||
echo "$FILE_TYPES" >> "$REPORT_FILE"
|
||||
echo >> "$REPORT_FILE"
|
||||
@ -171,7 +319,7 @@ if [ -s "$WGET_LOG" ]; then
|
||||
echo >> "$REPORT_FILE"
|
||||
fi
|
||||
|
||||
# Generate hash list
|
||||
# Generate preliminary hash list
|
||||
echo -e "\n${YELLOW}Generiere Hashwerte...${NC}"
|
||||
HASH_FILE="file_hashes.sha256"
|
||||
echo "File Hash List (SHA-256)" > "$HASH_FILE"
|
||||
@ -179,19 +327,24 @@ echo "========================" >> "$HASH_FILE"
|
||||
echo "Generated on: $(date -u +%Y-%m-%dT%H:%M:%SZ)" >> "$HASH_FILE"
|
||||
echo >> "$HASH_FILE"
|
||||
|
||||
# Hash all files
|
||||
find "$WEBSITE_DIR" -type f -print0 | while IFS= read -r -d '' file; do
|
||||
sha256sum "$file" >> "$HASH_FILE"
|
||||
done
|
||||
find "$TECH_DIR" -type f -print0 | while IFS= read -r -d '' file; do
|
||||
sha256sum "$file" >> "$HASH_FILE"
|
||||
done
|
||||
|
||||
# Hash the report file itself
|
||||
sha256sum "$REPORT_FILE" >> "$HASH_FILE"
|
||||
sha256sum "$HASH_FILE" >> "$HASH_FILE"
|
||||
# Calculate preliminary report hash (report is not yet complete)
|
||||
REPORT_PRELIM_HASH=$(sha256sum "$REPORT_FILE" | cut -d' ' -f1)
|
||||
|
||||
# Hash verification documentation (before finalizing)
|
||||
echo "Hash Verification:" >> "$REPORT_FILE"
|
||||
echo "------------------" >> "$REPORT_FILE"
|
||||
echo "Hash Algorithm: SHA-256" >> "$REPORT_FILE"
|
||||
echo "Hash File: $HASH_FILE" >> "$HASH_FILE"
|
||||
echo "Report File Hash: $(sha256sum "$REPORT_FILE" | cut -d' ' -f1)" >> "$REPORT_FILE"
|
||||
echo "Hash File: $HASH_FILE" >> "$REPORT_FILE"
|
||||
echo "Report File Hash (preliminary): $REPORT_PRELIM_HASH" >> "$REPORT_FILE"
|
||||
echo "Note: Final report hash will be calculated after completion" >> "$REPORT_FILE"
|
||||
echo >> "$REPORT_FILE"
|
||||
|
||||
# Create final archive
|
||||
@ -201,20 +354,35 @@ tar -czf "$ARCHIVE_NAME" .
|
||||
|
||||
ARCHIVE_HASH=$(sha256sum "$ARCHIVE_NAME" | cut -d' ' -f1)
|
||||
|
||||
# Document archive hash in report (this will require re-archiving)
|
||||
echo "Archive Hash (SHA-256): $ARCHIVE_HASH" >> "$REPORT_FILE"
|
||||
|
||||
# Re-create archive with final report
|
||||
tar -czf "$ARCHIVE_NAME" .
|
||||
FINAL_ARCHIVE_HASH=$(sha256sum "$ARCHIVE_NAME" | cut -d' ' -f1)
|
||||
|
||||
END_TIME=$(date -u +%Y-%m-%dT%H:%M:%SZ)
|
||||
echo "Preservation Completion:" >> "$REPORT_FILE"
|
||||
echo "------------------------" >> "$REPORT_FILE"
|
||||
echo "End Time (UTC): $END_TIME" >> "$REPORT_FILE"
|
||||
echo "Archive Name: ${OUTPUT_DIR}.tar.gz" >> "$REPORT_FILE"
|
||||
echo "Archive Hash (SHA-256): $ARCHIVE_HASH" >> "$REPORT_FILE"
|
||||
|
||||
# Finalize hashes
|
||||
FINAL_REPORT_HASH=$(sha256sum "$REPORT_FILE" | cut -d' ' -f1)
|
||||
sha256sum "$REPORT_FILE" >> "$HASH_FILE"
|
||||
FINAL_HASH_FILE_HASH=$(sha256sum "$HASH_FILE" | cut -d' ' -f1)
|
||||
|
||||
echo "Archive Name: ${OUTPUT_DIR}.tar.gz" >> "$REPORT_FILE"
|
||||
echo "Final Report Hash (SHA-256): $FINAL_REPORT_HASH" >> "$REPORT_FILE"
|
||||
echo "Hash File Final Hash (SHA-256): $FINAL_HASH_FILE_HASH" >> "$REPORT_FILE"
|
||||
|
||||
# Enhanced summary file
|
||||
SUMMARY_FILE="../${OUTPUT_DIR}_summary.txt"
|
||||
cat > "$SUMMARY_FILE" << EOF
|
||||
FORENSISCHE SICHERUNG - ZUSAMMENFASSUNG
|
||||
=============================
|
||||
|
||||
Archiv: ${OUTPUT_DIR}.tar.gz
|
||||
Archivhash (SHA-256): $ARCHIVE_HASH
|
||||
Archivhash (SHA-256): $FINAL_ARCHIVE_HASH
|
||||
Geschäftszeichen: ${CASE_NUMBER:-N/A}
|
||||
Ziel-URL: $TARGET_URL
|
||||
Beginn der Sicherung: $START_TIME
|
||||
@ -222,14 +390,28 @@ Ende der Sicherung: $END_TIME
|
||||
Anzahl der Dateien: $TOTAL_FILES
|
||||
Gesamtvolumen: $TOTAL_SIZE
|
||||
|
||||
ERHOBENE TECHNISCHE DATEN:
|
||||
- DNS-Auflösungen: $DNS_DIR/dns_records.txt
|
||||
- SSL-Zertifikate: $CERTIFICATE_DIR/
|
||||
- Systemkonfiguration: $CONFIG_DIR/system_config.txt
|
||||
- Cookies/Session-Daten: $TECH_DIR/cookies.txt (falls vorhanden)
|
||||
- Komplette Werkzeug-Versionierung: $CONFIG_DIR/system_config.txt
|
||||
|
||||
Das Archiv enthält:
|
||||
1. Komplettsicherung im Ordner 'website/'
|
||||
2. Dokumentation der technischen Sicherung (forensic_report.txt)
|
||||
3. SHA-256-Hashwerte aller Dateien
|
||||
4. wget-Log
|
||||
2. Technische Daten im Ordner 'technical_data/'
|
||||
3. Dokumentation der technischen Sicherung (forensic_report.txt)
|
||||
4. SHA-256-Hashwerte aller Dateien
|
||||
5. wget-Log mit detaillierter Ausführungsdokumentation
|
||||
|
||||
Zur Verifizierung der Integrität: sha256sum ${OUTPUT_DIR}.tar.gz
|
||||
Hashwert: $ARCHIVE_HASH
|
||||
Hashwert: $FINAL_ARCHIVE_HASH
|
||||
|
||||
TECHNISCHE DETAILS:
|
||||
- wget Version: $(echo "$WGET_FULL" | head -n1)
|
||||
- OpenSSL Version: $OPENSSL_VERSION
|
||||
- Sammelsystem: $DISTRO ($KERNEL_VERSION)
|
||||
- Zeitzone: $TIMEZONE
|
||||
EOF
|
||||
|
||||
cd ..
|
||||
@ -241,9 +423,15 @@ echo " SICHERUNG ABGESCHLOSSEN"
|
||||
echo -e "==========================================${NC}"
|
||||
echo
|
||||
echo -e "${GREEN}Archiv erstellt:${NC} ${OUTPUT_DIR}.tar.gz"
|
||||
echo -e "${GREEN}Archivhash:${NC} $ARCHIVE_HASH"
|
||||
echo -e "${GREEN}Archivhash:${NC} $FINAL_ARCHIVE_HASH"
|
||||
echo -e "${GREEN}Zusammenfassung:${NC} ${OUTPUT_DIR}_summary.txt"
|
||||
echo
|
||||
echo -e "${YELLOW}ZUSÄTZLICH ERFASSTE DATEN:${NC}"
|
||||
echo "- DNS-Auflösungsdaten für alle Eintragstypen"
|
||||
echo "- SSL-Zertifikatkette mit vollständiger Analyse"
|
||||
echo "- Cookies und Session-Informationen"
|
||||
echo "- Detaillierte Systemkonfiguration und Werkzeugversionen"
|
||||
echo
|
||||
echo -e "${YELLOW}Zugriff auf die Webseitensicherung:${NC}"
|
||||
echo "1. Extraktion des Archivs: tar -xzf ${OUTPUT_DIR}.tar.gz"
|
||||
echo "2. Navigieren Sie zu: ${OUTPUT_DIR}/website/"
|
||||
@ -255,5 +443,5 @@ echo -e "${RED}Es wird dringend empfohlen, dies nur in einem Airgapped-System zu
|
||||
echo
|
||||
echo -e "${YELLOW}Verifizierung der Integrität:${NC}"
|
||||
echo "sha256sum ${OUTPUT_DIR}.tar.gz"
|
||||
echo "Erwartungswert: $ARCHIVE_HASH"
|
||||
echo "Erwartungswert: $FINAL_ARCHIVE_HASH"
|
||||
echo
|
Loading…
x
Reference in New Issue
Block a user