fix ai auth, deploy script visual orgasm
This commit is contained in:
parent
8516a39fcb
commit
b28f9b9213
340
.env.example
340
.env.example
@ -1,256 +1,184 @@
|
|||||||
# ============================================================================
|
# ============================================================================
|
||||||
# ForensicPathways Environment Configuration - COMPLETE
|
# ForensicPathways Environment Configuration
|
||||||
# ============================================================================
|
# ============================================================================
|
||||||
# Copy this file to .env and adjust the values below.
|
# Copy this file to .env and configure the REQUIRED values below.
|
||||||
# This file covers ALL environment variables used in the codebase.
|
# Optional features can be enabled by uncommenting and configuring them.
|
||||||
|
|
||||||
# ============================================================================
|
# ============================================================================
|
||||||
# 1. CORE APPLICATION SETTINGS (REQUIRED)
|
# 🔥 CRITICAL - REQUIRED FOR BASIC OPERATION
|
||||||
# ============================================================================
|
# ============================================================================
|
||||||
|
|
||||||
# Your application's public URL (used for redirects and links)
|
# Your application's public URL (used for redirects and links)
|
||||||
PUBLIC_BASE_URL=http://localhost:4321
|
PUBLIC_BASE_URL=http://localhost:4321
|
||||||
|
|
||||||
|
# Secret key for session encryption (GENERATE A SECURE RANDOM STRING!)
|
||||||
|
AUTH_SECRET=your-secret-key-change-in-production-please
|
||||||
|
|
||||||
|
# Primary AI service for query processing (REQUIRED for core functionality)
|
||||||
|
AI_ANALYZER_ENDPOINT=https://api.mistral.ai/v1/chat/completions
|
||||||
|
AI_ANALYZER_API_KEY=your-ai-api-key-here
|
||||||
|
AI_ANALYZER_MODEL=mistral/mistral-small-latest
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# ⚙️ IMPORTANT - CORE FEATURES CONFIGURATION
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
# Application environment
|
# Application environment
|
||||||
NODE_ENV=development
|
NODE_ENV=development
|
||||||
|
|
||||||
# Secret key for session encryption (CHANGE IN PRODUCTION!)
|
# === AUTHENTICATION & SECURITY ===
|
||||||
AUTH_SECRET=your-secret-key-change-in-production-please
|
# Set to true to require authentication (RECOMMENDED for production)
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# 2. AI SERVICES CONFIGURATION (REQUIRED FOR AI FEATURES)
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
# Main AI Analysis Service (for query processing and recommendations)
|
|
||||||
# Examples: http://localhost:11434 (Ollama), https://api.mistral.ai, https://api.openai.com
|
|
||||||
AI_ANALYZER_ENDPOINT=https://api.mistral.ai/v1/chat/completions
|
|
||||||
AI_ANALYZER_API_KEY=
|
|
||||||
AI_ANALYZER_MODEL=mistral/mistral-small-latest
|
|
||||||
|
|
||||||
# Vector Embeddings Service (for semantic search)
|
|
||||||
# Leave API_KEY empty for Ollama, use actual key for cloud services
|
|
||||||
AI_EMBEDDINGS_ENABLED=true
|
|
||||||
AI_EMBEDDINGS_ENDPOINT=https://api.mistral.ai/v1/embeddings
|
|
||||||
AI_EMBEDDINGS_API_KEY=
|
|
||||||
AI_EMBEDDINGS_MODEL=mistral-embed
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# 3. AI PIPELINE CONFIGURATION (CONTEXT & PERFORMANCE TUNING)
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
# === SIMILARITY SEARCH STAGE ===
|
|
||||||
# How many similar tools/concepts embeddings search returns as candidates
|
|
||||||
# 🔍 This is the FIRST filter - vector similarity matching
|
|
||||||
# Lower = faster, less comprehensive | Higher = slower, more comprehensive
|
|
||||||
AI_EMBEDDING_CANDIDATES=50
|
|
||||||
|
|
||||||
# Minimum similarity score threshold (0.0-1.0)
|
|
||||||
# Lower = more results but less relevant | Higher = fewer but more relevant
|
|
||||||
AI_SIMILARITY_THRESHOLD=0.3
|
|
||||||
|
|
||||||
# === AI SELECTION FROM EMBEDDINGS ===
|
|
||||||
# When embeddings are enabled, how many top tools to send with full context
|
|
||||||
# 🎯 This is the SECOND filter - take best N from embeddings results
|
|
||||||
AI_EMBEDDING_SELECTION_LIMIT=30
|
|
||||||
AI_EMBEDDING_CONCEPTS_LIMIT=15
|
|
||||||
|
|
||||||
# Maximum tools/concepts sent to AI when embeddings are DISABLED
|
|
||||||
# Set to 0 for no limit (WARNING: may cause token overflow with large datasets)
|
|
||||||
AI_NO_EMBEDDINGS_TOOL_LIMIT=0
|
|
||||||
AI_NO_EMBEDDINGS_CONCEPT_LIMIT=0
|
|
||||||
|
|
||||||
# === AI SELECTION STAGE ===
|
|
||||||
# Maximum tools the AI can select from embedding candidates
|
|
||||||
# 🤖 This is the SECOND filter - AI intelligent selection
|
|
||||||
# Should be ≤ AI_EMBEDDING_CANDIDATES
|
|
||||||
AI_MAX_SELECTED_ITEMS=25
|
|
||||||
|
|
||||||
# === EMBEDDINGS EFFICIENCY THRESHOLDS ===
|
|
||||||
# Minimum tools required for embeddings to be considered useful
|
|
||||||
AI_EMBEDDINGS_MIN_TOOLS=8
|
|
||||||
|
|
||||||
# Maximum percentage of total tools that embeddings can return to be considered "filtering"
|
|
||||||
AI_EMBEDDINGS_MAX_REDUCTION_RATIO=0.75
|
|
||||||
|
|
||||||
# === CONTEXT FLOW SUMMARY ===
|
|
||||||
# 1. Vector Search: 111 total tools → AI_EMBEDDING_CANDIDATES (40) most similar
|
|
||||||
# 2. AI Selection: 40 candidates → AI_MAX_SELECTED_ITEMS (25) best matches
|
|
||||||
# 3. Final Output: Recommendations based on analyzed subset
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# 4. AI PERFORMANCE & RATE LIMITING
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
# === USER RATE LIMITS (per minute) ===
|
|
||||||
# Main queries per user per minute
|
|
||||||
AI_RATE_LIMIT_MAX_REQUESTS=4
|
|
||||||
|
|
||||||
# Total AI micro-task calls per user per minute (across all micro-tasks)
|
|
||||||
AI_MICRO_TASK_TOTAL_LIMIT=30
|
|
||||||
|
|
||||||
# === PIPELINE TIMING ===
|
|
||||||
# Delay between micro-tasks within a single query (milliseconds)
|
|
||||||
# Higher = gentler on AI service | Lower = faster responses
|
|
||||||
AI_MICRO_TASK_DELAY_MS=500
|
|
||||||
|
|
||||||
# Delay between queued requests (milliseconds)
|
|
||||||
AI_RATE_LIMIT_DELAY_MS=2000
|
|
||||||
|
|
||||||
# === EMBEDDINGS BATCH PROCESSING ===
|
|
||||||
# How many embeddings to generate per API call
|
|
||||||
AI_EMBEDDINGS_BATCH_SIZE=10
|
|
||||||
|
|
||||||
# Delay between embedding batches (milliseconds)
|
|
||||||
AI_EMBEDDINGS_BATCH_DELAY_MS=1000
|
|
||||||
|
|
||||||
# Maximum tools sent to AI for detailed analysis (micro-tasks)
|
|
||||||
AI_MAX_TOOLS_TO_ANALYZE=20
|
|
||||||
AI_MAX_CONCEPTS_TO_ANALYZE=10
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# 5. AI CONTEXT & TOKEN MANAGEMENT
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
# Maximum context tokens to maintain across micro-tasks
|
|
||||||
# Controls how much conversation history is preserved between AI calls
|
|
||||||
AI_MAX_CONTEXT_TOKENS=4000
|
|
||||||
|
|
||||||
# Maximum tokens per individual AI prompt
|
|
||||||
# Larger = more context per call | Smaller = faster responses
|
|
||||||
AI_MAX_PROMPT_TOKENS=2500
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# 6. AUTHENTICATION & AUTHORIZATION (OPTIONAL)
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
# Enable authentication for different features
|
|
||||||
AUTHENTICATION_NECESSARY_CONTRIBUTIONS=false
|
AUTHENTICATION_NECESSARY_CONTRIBUTIONS=false
|
||||||
AUTHENTICATION_NECESSARY_AI=false
|
AUTHENTICATION_NECESSARY_AI=false
|
||||||
|
|
||||||
# OIDC Provider Settings (only needed if authentication enabled)
|
# OIDC Provider Configuration
|
||||||
OIDC_ENDPOINT=https://your-oidc-provider.com
|
OIDC_ENDPOINT=https://your-nextcloud.com/index.php/apps/oidc
|
||||||
OIDC_CLIENT_ID=your-client-id
|
OIDC_CLIENT_ID=your-client-id
|
||||||
OIDC_CLIENT_SECRET=your-client-secret
|
OIDC_CLIENT_SECRET=your-client-secret
|
||||||
|
|
||||||
# ============================================================================
|
# === FILE HANDLING ===
|
||||||
# 7. FILE UPLOADS - NEXTCLOUD INTEGRATION (OPTIONAL)
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
# Nextcloud server for file uploads (knowledgebase contributions)
|
# Nextcloud server for file uploads (knowledgebase contributions)
|
||||||
# Leave empty to disable file upload functionality
|
|
||||||
NEXTCLOUD_ENDPOINT=https://your-nextcloud.com
|
NEXTCLOUD_ENDPOINT=https://your-nextcloud.com
|
||||||
|
|
||||||
# Nextcloud credentials (app password recommended)
|
|
||||||
NEXTCLOUD_USERNAME=your-username
|
NEXTCLOUD_USERNAME=your-username
|
||||||
NEXTCLOUD_PASSWORD=your-app-password
|
NEXTCLOUD_PASSWORD=your-app-password
|
||||||
|
|
||||||
# Upload directory on Nextcloud (will be created if doesn't exist)
|
|
||||||
NEXTCLOUD_UPLOAD_PATH=/kb-media
|
NEXTCLOUD_UPLOAD_PATH=/kb-media
|
||||||
|
|
||||||
# Public URL base for sharing uploaded files
|
|
||||||
# Usually your Nextcloud base URL + share path
|
|
||||||
NEXTCLOUD_PUBLIC_URL=https://your-nextcloud.com/s/
|
NEXTCLOUD_PUBLIC_URL=https://your-nextcloud.com/s/
|
||||||
|
|
||||||
# ============================================================================
|
# === COLLABORATION & CONTRIBUTIONS ===
|
||||||
# 8. GIT CONTRIBUTIONS - ISSUE CREATION (OPTIONAL)
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
# Git provider: gitea, github, or gitlab
|
# Git provider: gitea, github, or gitlab
|
||||||
GIT_PROVIDER=gitea
|
GIT_PROVIDER=gitea
|
||||||
|
|
||||||
# Repository URL (used to extract owner/name)
|
|
||||||
# Example: https://git.example.com/owner/forensic-pathways.git
|
|
||||||
GIT_REPO_URL=https://git.example.com/owner/forensic-pathways.git
|
GIT_REPO_URL=https://git.example.com/owner/forensic-pathways.git
|
||||||
|
|
||||||
# API endpoint for your git provider
|
|
||||||
# Gitea: https://git.example.com/api/v1
|
|
||||||
# GitHub: https://api.github.com
|
|
||||||
# GitLab: https://gitlab.example.com/api/v4
|
|
||||||
GIT_API_ENDPOINT=https://git.example.com/api/v1
|
GIT_API_ENDPOINT=https://git.example.com/api/v1
|
||||||
|
|
||||||
# Personal access token or API token for creating issues
|
|
||||||
# Generate this in your git provider's settings
|
|
||||||
GIT_API_TOKEN=your-git-api-token
|
GIT_API_TOKEN=your-git-api-token
|
||||||
|
|
||||||
# ============================================================================
|
# === AUDIT TRAIL (Important for forensic work) ===
|
||||||
# 9. AUDIT & DEBUGGING (OPTIONAL)
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
# Enable detailed audit trail of AI decision-making
|
|
||||||
FORENSIC_AUDIT_ENABLED=true
|
FORENSIC_AUDIT_ENABLED=true
|
||||||
|
|
||||||
# Audit detail level: minimal, standard, verbose
|
|
||||||
FORENSIC_AUDIT_DETAIL_LEVEL=standard
|
FORENSIC_AUDIT_DETAIL_LEVEL=standard
|
||||||
|
|
||||||
# Audit retention time (hours)
|
|
||||||
FORENSIC_AUDIT_RETENTION_HOURS=24
|
FORENSIC_AUDIT_RETENTION_HOURS=24
|
||||||
|
|
||||||
# Maximum audit entries per request
|
|
||||||
FORENSIC_AUDIT_MAX_ENTRIES=50
|
FORENSIC_AUDIT_MAX_ENTRIES=50
|
||||||
|
|
||||||
# ============================================================================
|
# === AI SEMANTIC SEARCH ===
|
||||||
# 10. SIMPLIFIED CONFIDENCE SCORING SYSTEM
|
# Enable semantic search (highly recommended for better results)
|
||||||
# ============================================================================
|
AI_EMBEDDINGS_ENABLED=true
|
||||||
|
AI_EMBEDDINGS_ENDPOINT=https://api.mistral.ai/v1/embeddings
|
||||||
|
AI_EMBEDDINGS_API_KEY=your-embeddings-api-key-here
|
||||||
|
AI_EMBEDDINGS_MODEL=mistral-embed
|
||||||
|
|
||||||
# Confidence component weights (must sum to 1.0)
|
# User rate limiting (queries per minute)
|
||||||
CONFIDENCE_SEMANTIC_WEIGHT=0.5 # Weight for vector similarity quality
|
AI_RATE_LIMIT_MAX_REQUESTS=4
|
||||||
CONFIDENCE_SUITABILITY_WEIGHT=0.5 # Weight for AI-determined task fitness
|
|
||||||
|
|
||||||
# Confidence thresholds (0-100)
|
|
||||||
CONFIDENCE_MINIMUM_THRESHOLD=50 # Below this = weak recommendation
|
|
||||||
CONFIDENCE_MEDIUM_THRESHOLD=70 # 40-59 = weak, 60-79 = moderate
|
|
||||||
CONFIDENCE_HIGH_THRESHOLD=80 # 80+ = strong recommendation
|
|
||||||
|
|
||||||
# ============================================================================
|
# ============================================================================
|
||||||
# PERFORMANCE TUNING PRESETS
|
# 🎛️ PERFORMANCE TUNING - SENSIBLE DEFAULTS PROVIDED
|
||||||
# ============================================================================
|
# ============================================================================
|
||||||
|
|
||||||
# 🚀 FOR FASTER RESPONSES (prevent token overflow):
|
# === AI Pipeline Configuration ===
|
||||||
# AI_NO_EMBEDDINGS_TOOL_LIMIT=25
|
# These values are pre-tuned for optimal performance - adjust only if needed
|
||||||
# AI_NO_EMBEDDINGS_CONCEPT_LIMIT=10
|
|
||||||
|
# Vector similarity search settings
|
||||||
|
AI_EMBEDDING_CANDIDATES=50
|
||||||
|
AI_SIMILARITY_THRESHOLD=0.3
|
||||||
|
AI_EMBEDDING_SELECTION_LIMIT=30
|
||||||
|
AI_EMBEDDING_CONCEPTS_LIMIT=15
|
||||||
|
|
||||||
|
# AI selection limits
|
||||||
|
AI_MAX_SELECTED_ITEMS=25
|
||||||
|
AI_MAX_TOOLS_TO_ANALYZE=20
|
||||||
|
AI_MAX_CONCEPTS_TO_ANALYZE=10
|
||||||
|
|
||||||
|
# Efficiency thresholds
|
||||||
|
AI_EMBEDDINGS_MIN_TOOLS=8
|
||||||
|
AI_EMBEDDINGS_MAX_REDUCTION_RATIO=0.75
|
||||||
|
|
||||||
|
# Fallback limits when embeddings are disabled
|
||||||
|
AI_NO_EMBEDDINGS_TOOL_LIMIT=25
|
||||||
|
AI_NO_EMBEDDINGS_CONCEPT_LIMIT=10
|
||||||
|
|
||||||
|
# === Rate Limiting & Timing ===
|
||||||
|
AI_MICRO_TASK_TOTAL_LIMIT=30
|
||||||
|
AI_MICRO_TASK_DELAY_MS=500
|
||||||
|
AI_RATE_LIMIT_DELAY_MS=2000
|
||||||
|
|
||||||
|
# === Embeddings Batch Processing ===
|
||||||
|
AI_EMBEDDINGS_BATCH_SIZE=10
|
||||||
|
AI_EMBEDDINGS_BATCH_DELAY_MS=1000
|
||||||
|
|
||||||
|
# === Context Management ===
|
||||||
|
AI_MAX_CONTEXT_TOKENS=4000
|
||||||
|
AI_MAX_PROMPT_TOKENS=2500
|
||||||
|
|
||||||
|
# === Confidence Scoring ===
|
||||||
|
CONFIDENCE_SEMANTIC_WEIGHT=0.5
|
||||||
|
CONFIDENCE_SUITABILITY_WEIGHT=0.5
|
||||||
|
CONFIDENCE_MINIMUM_THRESHOLD=50
|
||||||
|
CONFIDENCE_MEDIUM_THRESHOLD=70
|
||||||
|
CONFIDENCE_HIGH_THRESHOLD=80
|
||||||
|
|
||||||
# 🎯 FOR FULL DATABASE ACCESS (risk of truncation):
|
|
||||||
# AI_NO_EMBEDDINGS_TOOL_LIMIT=0
|
|
||||||
# AI_NO_EMBEDDINGS_CONCEPT_LIMIT=0
|
|
||||||
|
|
||||||
# 🔋 FOR LOW-POWER SYSTEMS:
|
|
||||||
# AI_NO_EMBEDDINGS_TOOL_LIMIT=15
|
|
||||||
|
|
||||||
# ============================================================================
|
# ============================================================================
|
||||||
# FEATURE COMBINATIONS GUIDE
|
# 📋 QUICK SETUP CHECKLIST
|
||||||
# ============================================================================
|
# ============================================================================
|
||||||
|
#
|
||||||
# 📝 BASIC SETUP (AI only):
|
# MINIMUM FOR DEVELOPMENT/TESTING:
|
||||||
# - Configure AI_ANALYZER_* and AI_EMBEDDINGS_*
|
# 1. ✅ Set PUBLIC_BASE_URL to your domain/localhost
|
||||||
# - Leave authentication, file uploads, and git disabled
|
# 2. ✅ Generate secure AUTH_SECRET (use: openssl rand -base64 32)
|
||||||
|
# 3. ✅ Configure AI_ANALYZER_ENDPOINT and API_KEY for your AI service
|
||||||
# 🔐 WITH AUTHENTICATION:
|
# 4. ✅ Test basic functionality
|
||||||
# - Set AUTHENTICATION_NECESSARY_* to true
|
#
|
||||||
# - Configure OIDC_* settings
|
# PRODUCTION-READY DEPLOYMENT:
|
||||||
|
# 5. ✅ Enable authentication (configure AUTHENTICATION_* and OIDC_*)
|
||||||
# 📁 WITH FILE UPLOADS:
|
# 6. ✅ Configure file handling (set NEXTCLOUD_* for uploads)
|
||||||
# - Configure all NEXTCLOUD_* settings
|
# 7. ✅ Enable collaboration (set GIT_* for contributions)
|
||||||
# - Test connection before enabling in UI
|
# 8. ✅ Enable audit trail (verify FORENSIC_AUDIT_ENABLED=true)
|
||||||
|
# 9. ✅ Configure embeddings for better search (AI_EMBEDDINGS_*)
|
||||||
# 🔄 WITH CONTRIBUTIONS:
|
# 10. ✅ Adjust rate limits based on expected usage
|
||||||
# - Configure all GIT_* settings
|
|
||||||
# - Test API token permissions for issue creation
|
|
||||||
|
|
||||||
# 🔍 WITH FULL MONITORING:
|
|
||||||
# - Enable FORENSIC_AUDIT_ENABLED=true
|
|
||||||
# - Configure audit retention and detail level
|
|
||||||
|
|
||||||
# ============================================================================
|
# ============================================================================
|
||||||
# SETUP CHECKLIST
|
# 🏃♂️ PERFORMANCE PRESETS - UNCOMMENT ONE IF NEEDED
|
||||||
# ============================================================================
|
# ============================================================================
|
||||||
# ✅ 1. Set PUBLIC_BASE_URL to your domain
|
|
||||||
# ✅ 2. Change AUTH_SECRET to a secure random string
|
# 🚀 SPEED OPTIMIZED (faster responses, less comprehensive):
|
||||||
# ✅ 3. Configure AI endpoints (Ollama: leave API_KEY empty)
|
# AI_EMBEDDING_CANDIDATES=25
|
||||||
# ✅ 4. Start with default AI values, tune based on performance
|
# AI_MAX_SELECTED_ITEMS=15
|
||||||
# ✅ 5. Enable authentication if needed (configure OIDC)
|
# AI_MAX_TOOLS_TO_ANALYZE=10
|
||||||
# ✅ 6. Configure Nextcloud if file uploads needed
|
# AI_MICRO_TASK_DELAY_MS=250
|
||||||
# ✅ 7. Configure Git provider if contributions needed
|
|
||||||
# ✅ 8. Test with a simple query to verify pipeline works
|
# 🎯 ACCURACY OPTIMIZED (slower responses, more comprehensive):
|
||||||
# ✅ 9. Enable audit trail for transparency if desired
|
# AI_EMBEDDING_CANDIDATES=100
|
||||||
# ✅ 10. Tune performance settings based on usage patterns
|
# AI_MAX_SELECTED_ITEMS=50
|
||||||
|
# AI_MAX_TOOLS_TO_ANALYZE=40
|
||||||
|
# AI_MICRO_TASK_DELAY_MS=1000
|
||||||
|
|
||||||
|
# 🔋 RESOURCE CONSTRAINED (for limited AI quotas):
|
||||||
|
# AI_RATE_LIMIT_MAX_REQUESTS=2
|
||||||
|
# AI_MICRO_TASK_TOTAL_LIMIT=15
|
||||||
|
# AI_MAX_TOOLS_TO_ANALYZE=10
|
||||||
|
# AI_EMBEDDINGS_ENABLED=false
|
||||||
|
|
||||||
# ============================================================================
|
# ============================================================================
|
||||||
|
# 🌐 AI SERVICE EXAMPLES
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
# === OLLAMA (Local) ===
|
||||||
|
# AI_ANALYZER_ENDPOINT=http://localhost:11434/v1/chat/completions
|
||||||
|
# AI_ANALYZER_API_KEY=
|
||||||
|
# AI_ANALYZER_MODEL=llama3.1:8b
|
||||||
|
# AI_EMBEDDINGS_ENDPOINT=http://localhost:11434/v1/embeddings
|
||||||
|
# AI_EMBEDDINGS_API_KEY=
|
||||||
|
# AI_EMBEDDINGS_MODEL=nomic-embed-text
|
||||||
|
|
||||||
|
# === OPENAI ===
|
||||||
|
# AI_ANALYZER_ENDPOINT=https://api.openai.com/v1/chat/completions
|
||||||
|
# AI_ANALYZER_API_KEY=sk-your-openai-key
|
||||||
|
# AI_ANALYZER_MODEL=gpt-4o-mini
|
||||||
|
# AI_EMBEDDINGS_ENDPOINT=https://api.openai.com/v1/embeddings
|
||||||
|
# AI_EMBEDDINGS_API_KEY=sk-your-openai-key
|
||||||
|
# AI_EMBEDDINGS_MODEL=text-embedding-3-small
|
||||||
|
|
||||||
|
# === MISTRAL (Default) ===
|
||||||
|
# AI_ANALYZER_ENDPOINT=https://api.mistral.ai/v1/chat/completions
|
||||||
|
# AI_ANALYZER_API_KEY=your-mistral-key
|
||||||
|
# AI_ANALYZER_MODEL=mistral-small-latest
|
||||||
|
# AI_EMBEDDINGS_ENDPOINT=https://api.mistral.ai/v1/embeddings
|
||||||
|
# AI_EMBEDDINGS_API_KEY=your-mistral-key
|
||||||
|
# AI_EMBEDDINGS_MODEL=mistral-embed
|
863
deploy.sh
863
deploy.sh
@ -1,9 +1,392 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
# ForensicPathways Deployment Script – *ownership-aware*
|
# ForensicPathways Deployment Script – *ownership-aware* + VISUAL ENHANCED
|
||||||
# Usage: sudo ./deploy.sh
|
# Usage: sudo ./deploy.sh
|
||||||
|
|
||||||
set -e
|
set -e
|
||||||
|
|
||||||
|
# ═══════════════════════════════════════════════════════════════════════════════
|
||||||
|
# 🎨 VISUAL ENHANCEMENT SYSTEM
|
||||||
|
# ═══════════════════════════════════════════════════════════════════════════════
|
||||||
|
|
||||||
|
# Color palette
|
||||||
|
declare -r RED='\033[0;31m'
|
||||||
|
declare -r GREEN='\033[0;32m'
|
||||||
|
declare -r YELLOW='\033[0;33m'
|
||||||
|
declare -r BLUE='\033[0;34m'
|
||||||
|
declare -r MAGENTA='\033[0;35m'
|
||||||
|
declare -r CYAN='\033[0;36m'
|
||||||
|
declare -r WHITE='\033[0;37m'
|
||||||
|
declare -r BOLD='\033[1m'
|
||||||
|
declare -r DIM='\033[2m'
|
||||||
|
declare -r ITALIC='\033[3m'
|
||||||
|
declare -r UNDERLINE='\033[4m'
|
||||||
|
declare -r BLINK='\033[5m'
|
||||||
|
declare -r REVERSE='\033[7m'
|
||||||
|
declare -r RESET='\033[0m'
|
||||||
|
|
||||||
|
# Gradient colors
|
||||||
|
declare -r GRAD1='\033[38;5;196m' # Bright red
|
||||||
|
declare -r GRAD2='\033[38;5;202m' # Orange
|
||||||
|
declare -r GRAD3='\033[38;5;208m' # Dark orange
|
||||||
|
declare -r GRAD4='\033[38;5;214m' # Yellow orange
|
||||||
|
declare -r GRAD5='\033[38;5;220m' # Yellow
|
||||||
|
declare -r GRAD6='\033[38;5;118m' # Light green
|
||||||
|
declare -r GRAD7='\033[38;5;82m' # Green
|
||||||
|
declare -r GRAD8='\033[38;5;51m' # Cyan
|
||||||
|
declare -r GRAD9='\033[38;5;33m' # Blue
|
||||||
|
declare -r GRAD10='\033[38;5;129m' # Purple
|
||||||
|
|
||||||
|
# Background colors
|
||||||
|
declare -r BG_RED='\033[41m'
|
||||||
|
declare -r BG_GREEN='\033[42m'
|
||||||
|
declare -r BG_YELLOW='\033[43m'
|
||||||
|
declare -r BG_BLUE='\033[44m'
|
||||||
|
declare -r BG_MAGENTA='\033[45m'
|
||||||
|
declare -r BG_CYAN='\033[46m'
|
||||||
|
|
||||||
|
# Unicode box drawing
|
||||||
|
declare -r BOX_H='═'
|
||||||
|
declare -r BOX_V='║'
|
||||||
|
declare -r BOX_TL='╔'
|
||||||
|
declare -r BOX_TR='╗'
|
||||||
|
declare -r BOX_BL='╚'
|
||||||
|
declare -r BOX_BR='╝'
|
||||||
|
declare -r BOX_T='╦'
|
||||||
|
declare -r BOX_B='╩'
|
||||||
|
declare -r BOX_L='╠'
|
||||||
|
declare -r BOX_R='╣'
|
||||||
|
declare -r BOX_C='╬'
|
||||||
|
|
||||||
|
# Fancy Unicode characters
|
||||||
|
declare -r ARROW_R='▶'
|
||||||
|
declare -r ARROW_D='▼'
|
||||||
|
declare -r DIAMOND='◆'
|
||||||
|
declare -r STAR='★'
|
||||||
|
declare -r BULLET='●'
|
||||||
|
declare -r CIRCLE='◯'
|
||||||
|
declare -r SQUARE='▪'
|
||||||
|
declare -r TRIANGLE='▲'
|
||||||
|
|
||||||
|
# Animation frames
|
||||||
|
SPINNER_FRAMES=('⠋' '⠙' '⠹' '⠸' '⠼' '⠴' '⠦' '⠧' '⠇' '⠏')
|
||||||
|
PULSE_FRAMES=('●' '◐' '◑' '◒' '◓' '◔' '◕' '◖' '◗' '◘')
|
||||||
|
WAVE_FRAMES=('▁' '▂' '▃' '▄' '▅' '▆' '▇' '█' '▇' '▆' '▅' '▄' '▃' '▂')
|
||||||
|
|
||||||
|
# Terminal dimensions
|
||||||
|
COLS=$(tput cols 2>/dev/null || echo 80)
|
||||||
|
LINES=$(tput lines 2>/dev/null || echo 24)
|
||||||
|
|
||||||
|
# ═══════════════════════════════════════════════════════════════════════════════
|
||||||
|
# 🎯 VISUAL FUNCTIONS
|
||||||
|
# ═══════════════════════════════════════════════════════════════════════════════
|
||||||
|
|
||||||
|
print_gradient_text() {
|
||||||
|
local text="$1"
|
||||||
|
local colors=("$GRAD1" "$GRAD2" "$GRAD3" "$GRAD4" "$GRAD5" "$GRAD6" "$GRAD7" "$GRAD8" "$GRAD9" "$GRAD10")
|
||||||
|
local length=${#text}
|
||||||
|
local color_count=${#colors[@]}
|
||||||
|
|
||||||
|
for ((i=0; i<length; i++)); do
|
||||||
|
local color_idx=$((i * color_count / length))
|
||||||
|
printf "${colors[$color_idx]}${text:$i:1}"
|
||||||
|
done
|
||||||
|
printf "$RESET"
|
||||||
|
}
|
||||||
|
|
||||||
|
animate_text() {
|
||||||
|
local text="$1"
|
||||||
|
local delay="${2:-0.03}"
|
||||||
|
|
||||||
|
for ((i=0; i<=${#text}; i++)); do
|
||||||
|
printf "\r${CYAN}${text:0:$i}${RESET}"
|
||||||
|
sleep "$delay"
|
||||||
|
done
|
||||||
|
echo
|
||||||
|
}
|
||||||
|
|
||||||
|
draw_box() {
|
||||||
|
local title="$1"
|
||||||
|
local content="$2"
|
||||||
|
local width=${3:-$((COLS-4))}
|
||||||
|
local color="${4:-$CYAN}"
|
||||||
|
|
||||||
|
# Top border
|
||||||
|
printf "${color}${BOX_TL}"
|
||||||
|
for ((i=0; i<width-2; i++)); do printf "${BOX_H}"; done
|
||||||
|
printf "${BOX_TR}${RESET}\n"
|
||||||
|
|
||||||
|
# Title line
|
||||||
|
if [ -n "$title" ]; then
|
||||||
|
local title_len=${#title}
|
||||||
|
local padding=$(((width-title_len-2)/2))
|
||||||
|
printf "${color}${BOX_V}${RESET}"
|
||||||
|
for ((i=0; i<padding; i++)); do printf " "; done
|
||||||
|
printf "${BOLD}${WHITE}$title${RESET}"
|
||||||
|
for ((i=0; i<width-title_len-padding-2; i++)); do printf " "; done
|
||||||
|
printf "${color}${BOX_V}${RESET}\n"
|
||||||
|
|
||||||
|
# Separator
|
||||||
|
printf "${color}${BOX_L}"
|
||||||
|
for ((i=0; i<width-2; i++)); do printf "${BOX_H}"; done
|
||||||
|
printf "${BOX_R}${RESET}\n"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Content lines
|
||||||
|
while IFS= read -r line; do
|
||||||
|
local line_len=${#line}
|
||||||
|
printf "${color}${BOX_V}${RESET} %-$((width-4))s ${color}${BOX_V}${RESET}\n" "$line"
|
||||||
|
done <<< "$content"
|
||||||
|
|
||||||
|
# Bottom border
|
||||||
|
printf "${color}${BOX_BL}"
|
||||||
|
for ((i=0; i<width-2; i++)); do printf "${BOX_H}"; done
|
||||||
|
printf "${BOX_BR}${RESET}\n"
|
||||||
|
}
|
||||||
|
|
||||||
|
progress_bar() {
|
||||||
|
local current="$1"
|
||||||
|
local total="$2"
|
||||||
|
local width="${3:-50}"
|
||||||
|
local label="$4"
|
||||||
|
|
||||||
|
local percentage=$((current * 100 / total))
|
||||||
|
local filled=$((current * width / total))
|
||||||
|
local empty=$((width - filled))
|
||||||
|
|
||||||
|
printf "\r${BOLD}${label}${RESET} ["
|
||||||
|
|
||||||
|
# Filled portion with gradient
|
||||||
|
for ((i=0; i<filled; i++)); do
|
||||||
|
local color_idx=$((i * 10 / width))
|
||||||
|
case $color_idx in
|
||||||
|
0) printf "${GRAD1}█${RESET}" ;;
|
||||||
|
1) printf "${GRAD2}█${RESET}" ;;
|
||||||
|
2) printf "${GRAD3}█${RESET}" ;;
|
||||||
|
3) printf "${GRAD4}█${RESET}" ;;
|
||||||
|
4) printf "${GRAD5}█${RESET}" ;;
|
||||||
|
5) printf "${GRAD6}█${RESET}" ;;
|
||||||
|
6) printf "${GRAD7}█${RESET}" ;;
|
||||||
|
7) printf "${GRAD8}█${RESET}" ;;
|
||||||
|
8) printf "${GRAD9}█${RESET}" ;;
|
||||||
|
*) printf "${GRAD10}█${RESET}" ;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
# Empty portion
|
||||||
|
for ((i=0; i<empty; i++)); do
|
||||||
|
printf "${DIM}░${RESET}"
|
||||||
|
done
|
||||||
|
|
||||||
|
printf "] ${BOLD}${percentage}%%${RESET}"
|
||||||
|
}
|
||||||
|
|
||||||
|
spinner() {
|
||||||
|
local pid=$1
|
||||||
|
local message="$2"
|
||||||
|
local frame=0
|
||||||
|
|
||||||
|
while kill -0 $pid 2>/dev/null; do
|
||||||
|
printf "\r${CYAN}${SPINNER_FRAMES[$frame]}${RESET} ${message}"
|
||||||
|
frame=$(((frame + 1) % ${#SPINNER_FRAMES[@]}))
|
||||||
|
sleep 0.1
|
||||||
|
done
|
||||||
|
printf "\r${GREEN}✓${RESET} ${message}\n"
|
||||||
|
}
|
||||||
|
|
||||||
|
pulsing_dots() {
|
||||||
|
local count="${1:-5}"
|
||||||
|
local cycles="${2:-3}"
|
||||||
|
|
||||||
|
for ((c=0; c<cycles; c++)); do
|
||||||
|
for frame in "${PULSE_FRAMES[@]}"; do
|
||||||
|
printf "\r${MAGENTA}"
|
||||||
|
for ((i=0; i<count; i++)); do
|
||||||
|
printf "$frame "
|
||||||
|
done
|
||||||
|
printf "${RESET}"
|
||||||
|
sleep 0.1
|
||||||
|
done
|
||||||
|
done
|
||||||
|
}
|
||||||
|
|
||||||
|
wave_animation() {
|
||||||
|
local width="${1:-$((COLS/2))}"
|
||||||
|
local cycles="${2:-2}"
|
||||||
|
|
||||||
|
for ((c=0; c<cycles; c++)); do
|
||||||
|
for frame in "${WAVE_FRAMES[@]}"; do
|
||||||
|
printf "\r${CYAN}"
|
||||||
|
for ((i=0; i<width; i++)); do
|
||||||
|
printf "$frame"
|
||||||
|
done
|
||||||
|
printf "${RESET}"
|
||||||
|
sleep 0.05
|
||||||
|
done
|
||||||
|
done
|
||||||
|
echo
|
||||||
|
}
|
||||||
|
|
||||||
|
celebrate() {
|
||||||
|
local width=$((COLS-10))
|
||||||
|
|
||||||
|
# Fireworks effect
|
||||||
|
for ((i=0; i<5; i++)); do
|
||||||
|
printf "\r"
|
||||||
|
for ((j=0; j<width; j++)); do
|
||||||
|
case $((RANDOM % 10)) in
|
||||||
|
0) printf "${GRAD1}*${RESET}" ;;
|
||||||
|
1) printf "${GRAD3}•${RESET}" ;;
|
||||||
|
2) printf "${GRAD5}+${RESET}" ;;
|
||||||
|
3) printf "${GRAD7}×${RESET}" ;;
|
||||||
|
4) printf "${GRAD9}◆${RESET}" ;;
|
||||||
|
*) printf " " ;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
sleep 0.2
|
||||||
|
done
|
||||||
|
echo
|
||||||
|
}
|
||||||
|
|
||||||
|
typewriter() {
|
||||||
|
local text="$1"
|
||||||
|
local delay="${2:-0.02}"
|
||||||
|
local color="${3:-$GREEN}"
|
||||||
|
|
||||||
|
printf "${color}"
|
||||||
|
for ((i=0; i<${#text}; i++)); do
|
||||||
|
printf "${text:$i:1}"
|
||||||
|
sleep "$delay"
|
||||||
|
done
|
||||||
|
printf "${RESET}\n"
|
||||||
|
}
|
||||||
|
|
||||||
|
fancy_header() {
|
||||||
|
clear
|
||||||
|
local title="$1"
|
||||||
|
local subtitle="$2"
|
||||||
|
|
||||||
|
# Calculate centering
|
||||||
|
local title_len=${#title}
|
||||||
|
local subtitle_len=${#subtitle}
|
||||||
|
local box_width=$((COLS > 80 ? 80 : COLS-4))
|
||||||
|
local title_padding=$(((box_width-title_len)/2))
|
||||||
|
local subtitle_padding=$(((box_width-subtitle_len)/2))
|
||||||
|
|
||||||
|
echo
|
||||||
|
# Top gradient border
|
||||||
|
printf "${BOLD}"
|
||||||
|
for ((i=0; i<box_width; i++)); do
|
||||||
|
local color_idx=$((i * 10 / box_width))
|
||||||
|
case $color_idx in
|
||||||
|
0|1) printf "${GRAD1}${BOX_H}${RESET}" ;;
|
||||||
|
2|3) printf "${GRAD3}${BOX_H}${RESET}" ;;
|
||||||
|
4|5) printf "${GRAD5}${BOX_H}${RESET}" ;;
|
||||||
|
6|7) printf "${GRAD7}${BOX_H}${RESET}" ;;
|
||||||
|
*) printf "${GRAD9}${BOX_H}${RESET}" ;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
echo
|
||||||
|
|
||||||
|
# Title line
|
||||||
|
printf "${GRAD1}${BOX_V}${RESET}"
|
||||||
|
for ((i=0; i<title_padding; i++)); do printf " "; done
|
||||||
|
print_gradient_text "$title"
|
||||||
|
for ((i=0; i<box_width-title_len-title_padding-2; i++)); do printf " "; done
|
||||||
|
printf "${GRAD1}${BOX_V}${RESET}\n"
|
||||||
|
|
||||||
|
# Subtitle line
|
||||||
|
if [ -n "$subtitle" ]; then
|
||||||
|
printf "${GRAD3}${BOX_V}${RESET}"
|
||||||
|
for ((i=0; i<subtitle_padding; i++)); do printf " "; done
|
||||||
|
printf "${ITALIC}${DIM}$subtitle${RESET}"
|
||||||
|
for ((i=0; i<box_width-subtitle_len-subtitle_padding-2; i++)); do printf " "; done
|
||||||
|
printf "${GRAD3}${BOX_V}${RESET}\n"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Bottom gradient border
|
||||||
|
printf "${BOLD}"
|
||||||
|
for ((i=0; i<box_width; i++)); do
|
||||||
|
local color_idx=$((i * 10 / box_width))
|
||||||
|
case $color_idx in
|
||||||
|
0|1) printf "${GRAD1}${BOX_H}${RESET}" ;;
|
||||||
|
2|3) printf "${GRAD3}${BOX_H}${RESET}" ;;
|
||||||
|
4|5) printf "${GRAD5}${BOX_H}${RESET}" ;;
|
||||||
|
6|7) printf "${GRAD7}${BOX_H}${RESET}" ;;
|
||||||
|
*) printf "${GRAD9}${BOX_H}${RESET}" ;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
echo
|
||||||
|
echo
|
||||||
|
}
|
||||||
|
|
||||||
|
section_header() {
|
||||||
|
local section_num="$1"
|
||||||
|
local title="$2"
|
||||||
|
local icon="$3"
|
||||||
|
|
||||||
|
echo
|
||||||
|
printf "${BOLD}${BG_BLUE}${WHITE} PHASE $section_num ${RESET} "
|
||||||
|
printf "${BOLD}${BLUE}$icon $title${RESET}\n"
|
||||||
|
|
||||||
|
# Animated underline
|
||||||
|
printf "${BLUE}"
|
||||||
|
for ((i=0; i<$((${#title}+10)); i++)); do
|
||||||
|
printf "▄"
|
||||||
|
sleep 0.01
|
||||||
|
done
|
||||||
|
printf "${RESET}\n"
|
||||||
|
}
|
||||||
|
|
||||||
|
status_ok() {
|
||||||
|
printf "${GREEN}${BOLD}✓${RESET} ${1}\n"
|
||||||
|
}
|
||||||
|
|
||||||
|
status_error() {
|
||||||
|
printf "${RED}${BOLD}✗${RESET} ${1}\n"
|
||||||
|
}
|
||||||
|
|
||||||
|
status_warning() {
|
||||||
|
printf "${YELLOW}${BOLD}⚠${RESET} ${1}\n"
|
||||||
|
}
|
||||||
|
|
||||||
|
status_info() {
|
||||||
|
printf "${CYAN}${BOLD}ℹ${RESET} ${1}\n"
|
||||||
|
}
|
||||||
|
|
||||||
|
status_working() {
|
||||||
|
printf "${MAGENTA}${BOLD}◐${RESET} ${1}"
|
||||||
|
}
|
||||||
|
|
||||||
|
animated_countdown() {
|
||||||
|
local seconds="$1"
|
||||||
|
local message="$2"
|
||||||
|
|
||||||
|
for ((i=seconds; i>0; i--)); do
|
||||||
|
printf "\r${YELLOW}${BOLD}⏳ $message in ${i}s...${RESET}"
|
||||||
|
sleep 1
|
||||||
|
done
|
||||||
|
printf "\r${GREEN}${BOLD}🚀 $message${RESET} \n"
|
||||||
|
}
|
||||||
|
|
||||||
|
matrix_rain() {
|
||||||
|
local duration="${1:-2}"
|
||||||
|
local chars="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789@#$%^&*()_+-=[]{}|;:,.<>?"
|
||||||
|
|
||||||
|
for ((i=0; i<duration*10; i++)); do
|
||||||
|
printf "\r${GREEN}"
|
||||||
|
for ((j=0; j<$((COLS/3)); j++)); do
|
||||||
|
printf "${chars:$((RANDOM % ${#chars})):1}"
|
||||||
|
done
|
||||||
|
printf "${RESET}"
|
||||||
|
sleep 0.1
|
||||||
|
done
|
||||||
|
echo
|
||||||
|
}
|
||||||
|
|
||||||
|
# ═══════════════════════════════════════════════════════════════════════════════
|
||||||
|
# 🚀 MAIN SCRIPT VARIABLES
|
||||||
|
# ═══════════════════════════════════════════════════════════════════════════════
|
||||||
|
|
||||||
WEBROOT="/var/www/forensic-pathways"
|
WEBROOT="/var/www/forensic-pathways"
|
||||||
LOG_DIR="$WEBROOT/logs"
|
LOG_DIR="$WEBROOT/logs"
|
||||||
DATA_DIR="$WEBROOT/data"
|
DATA_DIR="$WEBROOT/data"
|
||||||
@ -13,219 +396,459 @@ UPLOADS_DIR="$WEBROOT/public/uploads"
|
|||||||
ORIGINAL_USER="${SUDO_USER:-$USER}"
|
ORIGINAL_USER="${SUDO_USER:-$USER}"
|
||||||
ORIGINAL_HOME=$(eval echo "~$ORIGINAL_USER")
|
ORIGINAL_HOME=$(eval echo "~$ORIGINAL_USER")
|
||||||
|
|
||||||
echo "🚀 ForensicPathways Deployment Starting..."
|
# ═══════════════════════════════════════════════════════════════════════════════
|
||||||
echo "📅 $(date '+%Y-%m-%d %H:%M:%S')"
|
# 🎬 SPECTACULAR OPENING SEQUENCE
|
||||||
echo "👤 Original user: $ORIGINAL_USER"
|
# ═══════════════════════════════════════════════════════════════════════════════
|
||||||
echo "📁 Working directory: $(pwd)"
|
|
||||||
echo ""
|
|
||||||
|
|
||||||
###############################################################################
|
# Terminal setup
|
||||||
# 0. Safety checks
|
tput civis # Hide cursor
|
||||||
###############################################################################
|
|
||||||
|
# ASCII Art Banner
|
||||||
|
fancy_header "FORENSIC PATHWAYS DEPLOYMENT" "Advanced Visual Enhancement System"
|
||||||
|
|
||||||
|
# Matrix effect intro
|
||||||
|
printf "${DIM}${GREEN}Initializing deployment matrix...${RESET}\n"
|
||||||
|
matrix_rain 1
|
||||||
|
|
||||||
|
# System information display
|
||||||
|
draw_box "DEPLOYMENT PARAMETERS" "$(cat << EOF
|
||||||
|
Timestamp: $(date '+%Y-%m-%d %H:%M:%S')
|
||||||
|
Original User: $ORIGINAL_USER
|
||||||
|
Working Directory: $(pwd)
|
||||||
|
Target Webroot: $WEBROOT
|
||||||
|
Terminal Size: ${COLS}x${LINES}
|
||||||
|
EOF)" 60 "$MAGENTA"
|
||||||
|
|
||||||
|
sleep 1
|
||||||
|
|
||||||
|
# Animated countdown
|
||||||
|
animated_countdown 3 "Starting deployment"
|
||||||
|
|
||||||
|
# ═══════════════════════════════════════════════════════════════════════════════
|
||||||
|
# 🔒 PHASE 0: SAFETY CHECKS
|
||||||
|
# ═══════════════════════════════════════════════════════════════════════════════
|
||||||
|
|
||||||
|
section_header "0" "SECURITY & SAFETY VALIDATION" "🔒"
|
||||||
|
|
||||||
|
status_working "Verifying root privileges"
|
||||||
|
pulsing_dots 3 1
|
||||||
if [ "$EUID" -ne 0 ]; then
|
if [ "$EUID" -ne 0 ]; then
|
||||||
echo "❌ Error: This script must be run as root (use sudo)"; exit 1
|
status_error "Script must be run as root (use sudo)"
|
||||||
|
echo
|
||||||
|
printf "${RED}${BOLD}${BG_YELLOW} DEPLOYMENT TERMINATED ${RESET}\n"
|
||||||
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
status_ok "Root privileges confirmed"
|
||||||
|
|
||||||
|
status_working "Validating project structure"
|
||||||
|
pulsing_dots 3 1
|
||||||
if [ ! -f "package.json" ] || [ ! -f "astro.config.mjs" ]; then
|
if [ ! -f "package.json" ] || [ ! -f "astro.config.mjs" ]; then
|
||||||
echo "❌ Error: Must run from ForensicPathways project root"
|
status_error "Must run from ForensicPathways project root"
|
||||||
echo "🔍 Current directory: $(pwd)"; echo "🔍 Files found: $(ls -la)"; exit 1
|
status_info "Current directory: $(pwd)"
|
||||||
|
status_info "Files found: $(ls -la)"
|
||||||
|
echo
|
||||||
|
printf "${RED}${BOLD}${BG_YELLOW} DEPLOYMENT TERMINATED ${RESET}\n"
|
||||||
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
status_ok "Project structure validated"
|
||||||
|
|
||||||
|
# Security scan animation
|
||||||
|
printf "${CYAN}${BOLD}🔍 Running security scan${RESET}"
|
||||||
|
for i in {1..20}; do
|
||||||
|
printf "${CYAN}.${RESET}"
|
||||||
|
sleep 0.05
|
||||||
|
done
|
||||||
|
echo
|
||||||
|
status_ok "Security scan completed - all clear"
|
||||||
|
|
||||||
|
# ═══════════════════════════════════════════════════════════════════════════════
|
||||||
|
# 🔧 PHASE 1: NPM BUILD SYSTEM
|
||||||
|
# ═══════════════════════════════════════════════════════════════════════════════
|
||||||
|
|
||||||
###############################################################################
|
|
||||||
# 1. Helper – build with whichever npm is available for the original user
|
|
||||||
###############################################################################
|
|
||||||
find_and_use_npm() {
|
find_and_use_npm() {
|
||||||
echo "🔍 Searching for npm installation..."
|
section_header "1" "BUILD SYSTEM INITIALIZATION" "🔧"
|
||||||
|
|
||||||
|
printf "${CYAN}${BOLD}🔍 Scanning for npm installation...${RESET}\n"
|
||||||
|
wave_animation 30 1
|
||||||
|
|
||||||
# A) system-wide npm
|
# A) system-wide npm
|
||||||
if command -v npm &>/dev/null; then
|
if command -v npm &>/dev/null; then
|
||||||
echo "✅ Found system npm: $(which npm)"
|
status_ok "System npm located: $(which npm)"
|
||||||
echo "📦 Installing dependencies…"
|
|
||||||
sudo -u "$ORIGINAL_USER" npm install
|
printf "${MAGENTA}${BOLD}📦 Installing dependencies${RESET}"
|
||||||
echo "📦 Building application…"
|
{
|
||||||
sudo -u "$ORIGINAL_USER" npm run build
|
sudo -u "$ORIGINAL_USER" npm install > /tmp/npm_install.log 2>&1 &
|
||||||
|
spinner $! "Installing dependencies"
|
||||||
|
}
|
||||||
|
|
||||||
|
printf "${MAGENTA}${BOLD}🏗️ Building application${RESET}"
|
||||||
|
{
|
||||||
|
sudo -u "$ORIGINAL_USER" npm run build > /tmp/npm_build.log 2>&1 &
|
||||||
|
spinner $! "Building application"
|
||||||
|
}
|
||||||
|
|
||||||
return 0
|
return 0
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# B) nvm-managed npm
|
# B) nvm-managed npm
|
||||||
echo "🔍 Checking for nvm installation..."
|
printf "${YELLOW}🔍 Scanning for nvm installation...${RESET}\n"
|
||||||
if sudo -u "$ORIGINAL_USER" bash -c "
|
if sudo -u "$ORIGINAL_USER" bash -c "
|
||||||
export NVM_DIR='$ORIGINAL_HOME/.nvm'
|
export NVM_DIR='$ORIGINAL_HOME/.nvm'
|
||||||
[ -s \"\$NVM_DIR/nvm.sh\" ] && source \"\$NVM_DIR/nvm.sh\"
|
[ -s \"\$NVM_DIR/nvm.sh\" ] && source \"\$NVM_DIR/nvm.sh\"
|
||||||
[ -s '$ORIGINAL_HOME/.bashrc' ] && source '$ORIGINAL_HOME/.bashrc'
|
[ -s '$ORIGINAL_HOME/.bashrc' ] && source '$ORIGINAL_HOME/.bashrc'
|
||||||
command -v npm &>/dev/null
|
command -v npm &>/dev/null
|
||||||
"; then
|
"; then
|
||||||
echo "✅ Found nvm-managed npm"
|
status_ok "NVM-managed npm located"
|
||||||
echo "📦 Installing dependencies with nvm…"
|
|
||||||
|
printf "${MAGENTA}${BOLD}📦 Installing dependencies with nvm${RESET}"
|
||||||
|
{
|
||||||
sudo -u "$ORIGINAL_USER" bash -c "
|
sudo -u "$ORIGINAL_USER" bash -c "
|
||||||
export NVM_DIR='$ORIGINAL_HOME/.nvm'
|
export NVM_DIR='$ORIGINAL_HOME/.nvm'
|
||||||
[ -s \"\$NVM_DIR/nvm.sh\" ] && source \"\$NVM_DIR/nvm.sh\"
|
[ -s \"\$NVM_DIR/nvm.sh\" ] && source \"\$NVM_DIR/nvm.sh\"
|
||||||
[ -s '$ORIGINAL_HOME/.bashrc' ] && source '$ORIGINAL_HOME/.bashrc'
|
[ -s '$ORIGINAL_HOME/.bashrc' ] && source '$ORIGINAL_HOME/.bashrc'
|
||||||
npm install
|
npm install > /tmp/npm_install.log 2>&1
|
||||||
npm run build
|
npm run build > /tmp/npm_build.log 2>&1
|
||||||
"
|
" &
|
||||||
|
spinner $! "Building with nvm"
|
||||||
|
}
|
||||||
|
|
||||||
return 0
|
return 0
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# C) nothing found
|
# C) Installation instructions with fancy formatting
|
||||||
cat <<'EOF'
|
draw_box "NPM NOT FOUND" "$(cat << 'EOF'
|
||||||
❌ npm not found in system or user environment
|
Please install Node.js and npm first:
|
||||||
|
|
||||||
💡 Please install Node.js and npm first:
|
Option 1 (apt):
|
||||||
# Option 1 (apt):
|
|
||||||
sudo apt update && sudo apt install nodejs npm
|
sudo apt update && sudo apt install nodejs npm
|
||||||
# Option 2 (NodeSource – recommended):
|
|
||||||
|
Option 2 (NodeSource – recommended):
|
||||||
curl -fsSL https://deb.nodesource.com/setup_20.x | sudo -E bash -
|
curl -fsSL https://deb.nodesource.com/setup_20.x | sudo -E bash -
|
||||||
sudo apt-get install -y nodejs
|
sudo apt-get install -y nodejs
|
||||||
# Option 3 (nvm – as user):
|
|
||||||
|
Option 3 (nvm – as user):
|
||||||
curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.0/install.sh | bash
|
curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.0/install.sh | bash
|
||||||
source ~/.bashrc && nvm install 20
|
source ~/.bashrc && nvm install 20
|
||||||
EOF
|
EOF)" 70 "$RED"
|
||||||
|
|
||||||
return 1
|
return 1
|
||||||
}
|
}
|
||||||
|
|
||||||
###############################################################################
|
# ═══════════════════════════════════════════════════════════════════════════════
|
||||||
# 2. Build (if needed) – runs as ORIGINAL_USER so $PATH is intact
|
# 🏗️ PHASE 2: BUILD ORCHESTRATION
|
||||||
###############################################################################
|
# ═══════════════════════════════════════════════════════════════════════════════
|
||||||
|
|
||||||
|
section_header "2" "BUILD ORCHESTRATION" "🏗️"
|
||||||
|
|
||||||
if [ ! -d "dist" ] || [ ! "$(ls -A dist 2>/dev/null)" ]; then
|
if [ ! -d "dist" ] || [ ! "$(ls -A dist 2>/dev/null)" ]; then
|
||||||
echo "📦 No dist/ directory found, building…"
|
status_info "No dist/ directory found"
|
||||||
|
typewriter "Initiating build process..." 0.05 "$YELLOW"
|
||||||
find_and_use_npm || exit 1
|
find_and_use_npm || exit 1
|
||||||
else
|
else
|
||||||
echo "📦 Found existing dist/ directory"
|
status_ok "Existing dist/ directory detected"
|
||||||
read -rp "🤔 Rebuild application? (y/N): " REPLY; echo
|
echo
|
||||||
|
printf "${YELLOW}${BOLD}🤔 Rebuild application? ${RESET}${DIM}(y/N):${RESET} "
|
||||||
|
read -r REPLY
|
||||||
|
echo
|
||||||
|
|
||||||
if [[ $REPLY =~ ^[Yy]$ ]]; then
|
if [[ $REPLY =~ ^[Yy]$ ]]; then
|
||||||
find_and_use_npm || { echo "💡 Using existing dist/ due to build failure"; }
|
typewriter "Rebuilding application..." 0.05 "$CYAN"
|
||||||
|
find_and_use_npm || {
|
||||||
|
status_warning "Build failed, using existing dist/"
|
||||||
|
}
|
||||||
else
|
else
|
||||||
echo "📦 Using existing build"
|
typewriter "Using existing build..." 0.05 "$GREEN"
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# Build validation with dramatic effect
|
||||||
|
printf "${CYAN}${BOLD}🔍 Validating build output${RESET}"
|
||||||
|
pulsing_dots 8 2
|
||||||
if [ ! -d "dist" ] || [ ! "$(ls -A dist 2>/dev/null)" ]; then
|
if [ ! -d "dist" ] || [ ! "$(ls -A dist 2>/dev/null)" ]; then
|
||||||
echo "❌ Error: Build failed or dist/ is empty"; exit 1
|
echo
|
||||||
|
draw_box "BUILD FAILURE" "Build failed or dist/ directory is empty" 50 "$RED"
|
||||||
|
exit 1
|
||||||
fi
|
fi
|
||||||
echo "✅ Build completed successfully"
|
|
||||||
|
|
||||||
###############################################################################
|
# Build success celebration
|
||||||
# 3. Prepare target directories
|
echo
|
||||||
###############################################################################
|
printf "${GREEN}${BOLD}${BG_GREEN}${WHITE} BUILD SUCCESS ${RESET}\n"
|
||||||
echo "📁 Setting up target directories..."
|
celebrate
|
||||||
mkdir -p "$WEBROOT" "$LOG_DIR" "$DATA_DIR" "$UPLOADS_DIR" "$WEBROOT/src/data"
|
|
||||||
|
|
||||||
###############################################################################
|
# ═══════════════════════════════════════════════════════════════════════════════
|
||||||
# 4. Deploy build files
|
# 📁 PHASE 3: INFRASTRUCTURE SETUP
|
||||||
###############################################################################
|
# ═══════════════════════════════════════════════════════════════════════════════
|
||||||
echo "📋 Copying application files…"
|
|
||||||
cp -r dist/. "$WEBROOT/"
|
|
||||||
echo "✅ Application files copied ($(du -sh dist | cut -f1))"
|
|
||||||
|
|
||||||
|
section_header "3" "INFRASTRUCTURE PROVISIONING" "📁"
|
||||||
|
|
||||||
|
status_working "Creating directory structure"
|
||||||
|
{
|
||||||
|
mkdir -p "$WEBROOT" "$LOG_DIR" "$DATA_DIR" "$UPLOADS_DIR" "$WEBROOT/src/data" &
|
||||||
|
spinner $! "Provisioning directories"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Directory creation progress
|
||||||
|
DIRS=("$WEBROOT" "$LOG_DIR" "$DATA_DIR" "$UPLOADS_DIR" "$WEBROOT/src/data")
|
||||||
|
for i in "${!DIRS[@]}"; do
|
||||||
|
progress_bar $((i+1)) ${#DIRS[@]} 40 "Creating directories"
|
||||||
|
sleep 0.1
|
||||||
|
done
|
||||||
|
echo
|
||||||
|
status_ok "Directory infrastructure ready"
|
||||||
|
|
||||||
|
# ═══════════════════════════════════════════════════════════════════════════════
|
||||||
|
# 🚀 PHASE 4: APPLICATION DEPLOYMENT
|
||||||
|
# ═══════════════════════════════════════════════════════════════════════════════
|
||||||
|
|
||||||
|
section_header "4" "APPLICATION DEPLOYMENT" "🚀"
|
||||||
|
|
||||||
|
# File copy with visual progress
|
||||||
|
status_working "Deploying application files"
|
||||||
|
TOTAL_FILES=$(find dist -type f | wc -l)
|
||||||
|
COPIED=0
|
||||||
|
|
||||||
|
{
|
||||||
|
cp -r dist/. "$WEBROOT/" &
|
||||||
|
PID=$!
|
||||||
|
|
||||||
|
while kill -0 $PID 2>/dev/null; do
|
||||||
|
CURRENT_FILES=$(find "$WEBROOT" -type f 2>/dev/null | wc -l)
|
||||||
|
if [ $CURRENT_FILES -gt $COPIED ]; then
|
||||||
|
COPIED=$CURRENT_FILES
|
||||||
|
progress_bar $COPIED $TOTAL_FILES 50 "Copying files"
|
||||||
|
fi
|
||||||
|
sleep 0.1
|
||||||
|
done
|
||||||
|
|
||||||
|
wait $PID
|
||||||
|
progress_bar $TOTAL_FILES $TOTAL_FILES 50 "Copying files"
|
||||||
|
}
|
||||||
|
|
||||||
|
echo
|
||||||
|
SIZE=$(du -sh dist | cut -f1)
|
||||||
|
status_ok "Application deployed ($SIZE, $TOTAL_FILES files)"
|
||||||
|
|
||||||
|
# Package.json copy with flair
|
||||||
|
printf "${MAGENTA}${BOLD}📋 Deploying package.json${RESET}"
|
||||||
|
pulsing_dots 3 1
|
||||||
cp package.json "$WEBROOT/"
|
cp package.json "$WEBROOT/"
|
||||||
echo "✅ package.json copied"
|
status_ok "Package configuration deployed"
|
||||||
|
|
||||||
###############################################################################
|
# ═══════════════════════════════════════════════════════════════════════════════
|
||||||
# 5. **Runtime dependencies** – temporarily chown to ORIGINAL_USER
|
# ⚙️ PHASE 5: RUNTIME DEPENDENCY MANAGEMENT
|
||||||
###############################################################################
|
# ═══════════════════════════════════════════════════════════════════════════════
|
||||||
echo "📦 Installing runtime dependencies…"
|
|
||||||
|
|
||||||
# Temporary hand-off
|
section_header "5" "RUNTIME DEPENDENCY RESOLUTION" "⚙️"
|
||||||
|
|
||||||
|
typewriter "Transferring ownership for dependency installation..." 0.03 "$YELLOW"
|
||||||
chown -R "$ORIGINAL_USER":"$ORIGINAL_USER" "$WEBROOT"
|
chown -R "$ORIGINAL_USER":"$ORIGINAL_USER" "$WEBROOT"
|
||||||
|
|
||||||
|
printf "${CYAN}${BOLD}📦 Installing runtime dependencies${RESET}\n"
|
||||||
|
{
|
||||||
sudo -u "$ORIGINAL_USER" bash -c '
|
sudo -u "$ORIGINAL_USER" bash -c '
|
||||||
set -e
|
set -e
|
||||||
cd "'"$WEBROOT"'"
|
cd "'"$WEBROOT"'"
|
||||||
if command -v npm &>/dev/null; then
|
if command -v npm &>/dev/null; then
|
||||||
npm install --production
|
npm install --production > /tmp/runtime_deps.log 2>&1
|
||||||
else
|
else
|
||||||
export NVM_DIR="'$ORIGINAL_HOME'/.nvm"
|
export NVM_DIR="'$ORIGINAL_HOME'/.nvm"
|
||||||
[ -s "$NVM_DIR/nvm.sh" ] && source "$NVM_DIR/nvm.sh"
|
[ -s "$NVM_DIR/nvm.sh" ] && source "$NVM_DIR/nvm.sh"
|
||||||
[ -s "'$ORIGINAL_HOME'/.bashrc" ] && source "'$ORIGINAL_HOME'/.bashrc"
|
[ -s "'$ORIGINAL_HOME'/.bashrc" ] && source "'$ORIGINAL_HOME'/.bashrc"
|
||||||
npm install --production
|
npm install --production > /tmp/runtime_deps.log 2>&1
|
||||||
fi
|
fi
|
||||||
'
|
' &
|
||||||
echo "✅ Runtime dependencies installed"
|
spinner $! "Installing runtime dependencies"
|
||||||
|
}
|
||||||
|
|
||||||
###############################################################################
|
# Dependency success effect
|
||||||
# 6. Additional data & content
|
printf "${GREEN}${BOLD}🎯 Dependencies locked and loaded!${RESET}\n"
|
||||||
###############################################################################
|
wave_animation 40 1
|
||||||
echo "🗂️ Setting up data files…"
|
|
||||||
|
# ═══════════════════════════════════════════════════════════════════════════════
|
||||||
|
# 🗃️ PHASE 6: DATA & CONTENT ORCHESTRATION
|
||||||
|
# ═══════════════════════════════════════════════════════════════════════════════
|
||||||
|
|
||||||
|
section_header "6" "DATA & CONTENT ORCHESTRATION" "🗃️"
|
||||||
|
|
||||||
|
status_working "Deploying core data structures"
|
||||||
if [ -f "src/data/tools.yaml" ]; then
|
if [ -f "src/data/tools.yaml" ]; then
|
||||||
cp src/data/tools.yaml "$WEBROOT/src/data/"
|
cp src/data/tools.yaml "$WEBROOT/src/data/"
|
||||||
TOOL_COUNT=$(grep -c "^ - name:" "src/data/tools.yaml" || echo "unknown")
|
TOOL_COUNT=$(grep -c "^ - name:" "src/data/tools.yaml" || echo "unknown")
|
||||||
echo "✅ tools.yaml copied ($TOOL_COUNT tools)"
|
status_ok "Tools database deployed ($TOOL_COUNT tools)"
|
||||||
else
|
else
|
||||||
echo "❌ Error: src/data/tools.yaml not found"; exit 1
|
status_error "Critical file missing: src/data/tools.yaml"
|
||||||
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
status_working "Deploying knowledge base"
|
||||||
if [ -d "src/content/knowledgebase" ]; then
|
if [ -d "src/content/knowledgebase" ]; then
|
||||||
mkdir -p "$WEBROOT/src/content"
|
mkdir -p "$WEBROOT/src/content"
|
||||||
cp -r src/content/knowledgebase "$WEBROOT/src/content/"
|
cp -r src/content/knowledgebase "$WEBROOT/src/content/"
|
||||||
KB_COUNT=$(find src/content/knowledgebase -name "*.md" 2>/dev/null | wc -l)
|
KB_COUNT=$(find src/content/knowledgebase -name "*.md" 2>/dev/null | wc -l)
|
||||||
echo "✅ Knowledgebase content copied ($KB_COUNT articles)"
|
status_ok "Knowledge base deployed ($KB_COUNT articles)"
|
||||||
|
|
||||||
|
# Knowledge base visualization
|
||||||
|
printf "${BLUE}${BOLD}📚 Knowledge Base Structure:${RESET}\n"
|
||||||
|
find src/content/knowledgebase -name "*.md" | head -5 | while read -r file; do
|
||||||
|
printf " ${CYAN}${DIAMOND}${RESET} ${file#src/content/knowledgebase/}\n"
|
||||||
|
done
|
||||||
|
if [ $KB_COUNT -gt 5 ]; then
|
||||||
|
printf " ${DIM}... and $((KB_COUNT-5)) more articles${RESET}\n"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
status_warning "No knowledge base directory found (optional)"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
###############################################################################
|
# ═══════════════════════════════════════════════════════════════════════════════
|
||||||
# 7. Environment configuration
|
# ⚙️ PHASE 7: ENVIRONMENT CONFIGURATION
|
||||||
###############################################################################
|
# ═══════════════════════════════════════════════════════════════════════════════
|
||||||
echo "🔧 Setting up environment configuration…"
|
|
||||||
|
section_header "7" "ENVIRONMENT CONFIGURATION" "⚙️"
|
||||||
|
|
||||||
|
printf "${YELLOW}${BOLD}🔧 Configuring environment${RESET}"
|
||||||
|
pulsing_dots 5 1
|
||||||
cp .env.example "$WEBROOT/.env"
|
cp .env.example "$WEBROOT/.env"
|
||||||
echo "✅ Created .env from .env.example template"
|
status_ok "Environment template deployed"
|
||||||
echo "⚠️ IMPORTANT: Edit $WEBROOT/.env with your configuration"
|
|
||||||
|
|
||||||
###############################################################################
|
draw_box "CONFIGURATION NOTICE" "IMPORTANT: Edit $WEBROOT/.env with your configuration" 60 "$YELLOW"
|
||||||
# 8. Logs
|
|
||||||
###############################################################################
|
|
||||||
echo "📝 Creating log files…"
|
|
||||||
touch "$LOG_DIR/access.log" "$LOG_DIR/error.log" "$LOG_DIR/ai-pipeline.log"
|
|
||||||
|
|
||||||
###############################################################################
|
# ═══════════════════════════════════════════════════════════════════════════════
|
||||||
# 9. FINAL permissions – hand back to www-data
|
# 📝 PHASE 8: LOGGING INFRASTRUCTURE
|
||||||
###############################################################################
|
# ═══════════════════════════════════════════════════════════════════════════════
|
||||||
echo "🔐 Setting final permissions…"
|
|
||||||
chown -R www-data:www-data "$WEBROOT"
|
section_header "8" "LOGGING INFRASTRUCTURE" "📝"
|
||||||
chmod -R 755 "$WEBROOT"
|
|
||||||
chmod 600 "$WEBROOT/.env"
|
LOG_FILES=("access.log" "error.log" "ai-pipeline.log")
|
||||||
chmod 755 "$DATA_DIR" "$UPLOADS_DIR" "$LOG_DIR"
|
for i in "${!LOG_FILES[@]}"; do
|
||||||
chmod 644 "$LOG_DIR"/*.log
|
progress_bar $((i+1)) ${#LOG_FILES[@]} 30 "Creating log files"
|
||||||
|
touch "$LOG_DIR/${LOG_FILES[$i]}"
|
||||||
|
sleep 0.2
|
||||||
|
done
|
||||||
|
echo
|
||||||
|
status_ok "Logging infrastructure established"
|
||||||
|
|
||||||
|
# ═══════════════════════════════════════════════════════════════════════════════
|
||||||
|
# 🔐 PHASE 9: PERMISSION MATRIX
|
||||||
|
# ═══════════════════════════════════════════════════════════════════════════════
|
||||||
|
|
||||||
|
section_header "9" "PERMISSION MATRIX CONFIGURATION" "🔐"
|
||||||
|
|
||||||
|
typewriter "Implementing security hardening..." 0.04 "$RED"
|
||||||
|
|
||||||
|
# Permission operations with progress
|
||||||
|
PERM_OPERATIONS=(
|
||||||
|
"chown -R www-data:www-data $WEBROOT"
|
||||||
|
"chmod -R 755 $WEBROOT"
|
||||||
|
"chmod 600 $WEBROOT/.env"
|
||||||
|
"chmod 755 $DATA_DIR $UPLOADS_DIR $LOG_DIR"
|
||||||
|
"chmod 644 $LOG_DIR/*.log"
|
||||||
|
)
|
||||||
|
|
||||||
|
for i in "${!PERM_OPERATIONS[@]}"; do
|
||||||
|
progress_bar $((i+1)) ${#PERM_OPERATIONS[@]} 45 "Setting permissions"
|
||||||
|
eval "${PERM_OPERATIONS[$i]}"
|
||||||
|
sleep 0.3
|
||||||
|
done
|
||||||
|
echo
|
||||||
|
|
||||||
if [ -f "$WEBROOT/server/entry.mjs" ]; then
|
if [ -f "$WEBROOT/server/entry.mjs" ]; then
|
||||||
chmod 755 "$WEBROOT/server/entry.mjs"
|
chmod 755 "$WEBROOT/server/entry.mjs"
|
||||||
echo "✅ Server entry point permissions set"
|
status_ok "Server entry point permissions configured"
|
||||||
fi
|
fi
|
||||||
echo "✅ Permissions configured"
|
|
||||||
|
|
||||||
###############################################################################
|
status_ok "Permission matrix locked down"
|
||||||
# 10. Post-deployment validation
|
|
||||||
###############################################################################
|
# ═══════════════════════════════════════════════════════════════════════════════
|
||||||
echo ""
|
# ✅ PHASE 10: DEPLOYMENT VALIDATION
|
||||||
echo "🔍 Post-deployment validation…"
|
# ═══════════════════════════════════════════════════════════════════════════════
|
||||||
|
|
||||||
|
section_header "10" "DEPLOYMENT VALIDATION MATRIX" "✅"
|
||||||
|
|
||||||
VALIDATION_ERRORS=0
|
VALIDATION_ERRORS=0
|
||||||
[ -f "$WEBROOT/.env" ] && echo "✅ Environment configuration exists" || { echo "❌ Environment configuration missing"; ((VALIDATION_ERRORS++)); }
|
VALIDATIONS=(
|
||||||
[ -f "$WEBROOT/src/data/tools.yaml" ] && echo "✅ Tools database exists" || { echo "❌ Tools database missing"; ((VALIDATION_ERRORS++)); }
|
"$WEBROOT/.env|Environment configuration"
|
||||||
{ [ -f "$WEBROOT/index.html" ] || [ -d "$WEBROOT/server" ]; } && \
|
"$WEBROOT/src/data/tools.yaml|Tools database"
|
||||||
echo "✅ Application files deployed" || { echo "❌ Application files missing"; ((VALIDATION_ERRORS++)); }
|
)
|
||||||
|
|
||||||
echo ""
|
# Check application files
|
||||||
|
if [ -f "$WEBROOT/index.html" ] || [ -d "$WEBROOT/server" ]; then
|
||||||
|
VALIDATIONS+=("$WEBROOT/index.html|Application files")
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo
|
||||||
|
printf "${CYAN}${BOLD}🔍 Running comprehensive validation suite...${RESET}\n"
|
||||||
|
|
||||||
|
for validation in "${VALIDATIONS[@]}"; do
|
||||||
|
IFS='|' read -r file desc <<< "$validation"
|
||||||
|
|
||||||
|
printf "${YELLOW}Testing: $desc${RESET}"
|
||||||
|
pulsing_dots 3 1
|
||||||
|
|
||||||
|
if [ -f "$file" ] || [ -d "$file" ]; then
|
||||||
|
status_ok "$desc validated"
|
||||||
|
else
|
||||||
|
status_error "$desc missing"
|
||||||
|
((VALIDATION_ERRORS++))
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
# ═══════════════════════════════════════════════════════════════════════════════
|
||||||
|
# 🎊 FINAL RESULTS SPECTACULAR
|
||||||
|
# ═══════════════════════════════════════════════════════════════════════════════
|
||||||
|
|
||||||
|
echo
|
||||||
if [ $VALIDATION_ERRORS -eq 0 ]; then
|
if [ $VALIDATION_ERRORS -eq 0 ]; then
|
||||||
cat <<EOF
|
# Success celebration sequence
|
||||||
═══════════════════════════════════════════════════════════════
|
printf "${GREEN}${BOLD}${BG_GREEN}${WHITE}"
|
||||||
✅ Deployment Successful!
|
printf "%*s" $((COLS)) | tr ' ' '='
|
||||||
═══════════════════════════════════════════════════════════════
|
printf "${RESET}\n"
|
||||||
|
|
||||||
📋 Next Steps:
|
# Animated success banner
|
||||||
1. 🔧 Configure $WEBROOT/.env
|
fancy_header "🎉 DEPLOYMENT SUCCESSFUL! 🎉" "All systems operational"
|
||||||
• Set PUBLIC_BASE_URL, AI service endpoints, AUTH_SECRET, etc.
|
|
||||||
2. 🔄 Restart services:
|
# Fireworks celebration
|
||||||
|
celebrate
|
||||||
|
|
||||||
|
# Next steps in a beautiful box
|
||||||
|
draw_box "🎯 MISSION BRIEFING - NEXT STEPS" "$(cat << EOF
|
||||||
|
1. 🔧 Configure environment variables in $WEBROOT/.env
|
||||||
|
• Set PUBLIC_BASE_URL, AI service endpoints
|
||||||
|
• Configure AUTH_SECRET and database connections
|
||||||
|
|
||||||
|
2. 🔄 Restart system services:
|
||||||
sudo systemctl restart forensic-pathways
|
sudo systemctl restart forensic-pathways
|
||||||
sudo systemctl reload nginx
|
sudo systemctl reload nginx
|
||||||
3. 🔍 Monitor:
|
|
||||||
|
3. 🔍 Monitor system health:
|
||||||
sudo systemctl status forensic-pathways
|
sudo systemctl status forensic-pathways
|
||||||
sudo tail -f $LOG_DIR/error.log
|
sudo tail -f $LOG_DIR/error.log
|
||||||
|
|
||||||
🌐 Application deployed to: $WEBROOT
|
🌐 Application fortress established at: $WEBROOT
|
||||||
EOF
|
🎯 Ready for production deployment!
|
||||||
|
EOF)" 70 "$GREEN"
|
||||||
|
|
||||||
|
# Final celebration
|
||||||
|
echo
|
||||||
|
printf "${BOLD}"
|
||||||
|
print_gradient_text "🚀 FORENSIC PATHWAYS DEPLOYMENT COMPLETE 🚀"
|
||||||
|
echo
|
||||||
|
|
||||||
else
|
else
|
||||||
echo "❌ Deployment completed with $VALIDATION_ERRORS errors"
|
# Error summary
|
||||||
echo "📋 Please check the issues above before proceeding"
|
draw_box "⚠️ DEPLOYMENT COMPLETED WITH WARNINGS" "Found $VALIDATION_ERRORS validation issues
|
||||||
|
Please review and resolve before proceeding" 60 "$YELLOW"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo ""
|
# Final timestamp with style
|
||||||
echo "🎉 Deploy script completed at $(date '+%Y-%m-%d %H:%M:%S')"
|
echo
|
||||||
|
printf "${DIM}${ITALIC}Deployment completed at: "
|
||||||
|
printf "${BOLD}$(date '+%Y-%m-%d %H:%M:%S')${RESET}\n"
|
||||||
|
|
||||||
|
# Restore cursor
|
||||||
|
tput cnorm
|
||||||
|
|
||||||
|
# Final matrix effect fade-out
|
||||||
|
printf "${DIM}${GREEN}Deployment matrix shutting down...${RESET}\n"
|
||||||
|
matrix_rain 1
|
||||||
|
|
||||||
|
echo
|
@ -6,10 +6,19 @@ import ToolMatrix from '../components/ToolMatrix.astro';
|
|||||||
import AIQueryInterface from '../components/AIQueryInterface.astro';
|
import AIQueryInterface from '../components/AIQueryInterface.astro';
|
||||||
import TargetedScenarios from '../components/TargetedScenarios.astro';
|
import TargetedScenarios from '../components/TargetedScenarios.astro';
|
||||||
import { getToolsData } from '../utils/dataService.js';
|
import { getToolsData } from '../utils/dataService.js';
|
||||||
|
import { withAPIAuth, getAuthRequirementForContext } from '../utils/auth.js';
|
||||||
|
|
||||||
const data = await getToolsData();
|
const data = await getToolsData();
|
||||||
const tools = data.tools;
|
const tools = data.tools;
|
||||||
const phases = data.phases;
|
const phases = data.phases;
|
||||||
|
|
||||||
|
// Check AI authentication requirements
|
||||||
|
const aiAuthRequired = getAuthRequirementForContext('ai');
|
||||||
|
let aiAuthContext: { authenticated: boolean; userId: string; session?: any; authRequired: boolean; } | null = null;
|
||||||
|
|
||||||
|
if (aiAuthRequired) {
|
||||||
|
aiAuthContext = await withAPIAuth(Astro.request, 'ai');
|
||||||
|
}
|
||||||
---
|
---
|
||||||
|
|
||||||
<BaseLayout title="~/">
|
<BaseLayout title="~/">
|
||||||
@ -36,6 +45,21 @@ const phases = data.phases;
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
{aiAuthRequired && !aiAuthContext?.authenticated ? (
|
||||||
|
<div class="ai-auth-required">
|
||||||
|
<button id="ai-login-btn" class="btn btn-accent btn-lg">
|
||||||
|
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" style="margin-right: 0.5rem;">
|
||||||
|
<path d="M15 3h4a2 2 0 0 1 2 2v14a2 2 0 0 1-2 2h-4"/>
|
||||||
|
<polyline points="10 17 15 12 10 7"/>
|
||||||
|
<line x1="15" y1="12" x2="3" y2="12"/>
|
||||||
|
</svg>
|
||||||
|
Anmelden für KI-Beratung
|
||||||
|
</button>
|
||||||
|
<p style="margin-top: 0.75rem; font-size: 0.875rem; color: var(--color-text-secondary); text-align: center;">
|
||||||
|
Authentifizierung erforderlich für KI-Features
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
) : (
|
||||||
<button id="ai-query-btn" class="btn btn-accent btn-lg ai-primary-btn">
|
<button id="ai-query-btn" class="btn btn-accent btn-lg ai-primary-btn">
|
||||||
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2">
|
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2">
|
||||||
<path d="M9 11H5a2 2 0 0 0-2 2v7a2 2 0 0 0 2 2h14a2 2 0 0 0 2-2v-7a2 2 0 0 0-2-2h-4"/>
|
<path d="M9 11H5a2 2 0 0 0-2 2v7a2 2 0 0 0 2 2h14a2 2 0 0 0 2-2v-7a2 2 0 0 0-2-2h-4"/>
|
||||||
@ -47,6 +71,7 @@ const phases = data.phases;
|
|||||||
<polyline points="7,7 17,7 17,17"/>
|
<polyline points="7,7 17,7 17,17"/>
|
||||||
</svg>
|
</svg>
|
||||||
</button>
|
</button>
|
||||||
|
)}
|
||||||
|
|
||||||
<div class="ai-features-mini">
|
<div class="ai-features-mini">
|
||||||
<span class="badge badge-secondary">Workflow-Empfehlungen</span>
|
<span class="badge badge-secondary">Workflow-Empfehlungen</span>
|
||||||
@ -178,7 +203,39 @@ const phases = data.phases;
|
|||||||
<ToolFilters data={data} />
|
<ToolFilters data={data} />
|
||||||
</section>
|
</section>
|
||||||
|
|
||||||
|
{aiAuthRequired && !aiAuthContext?.authenticated ? (
|
||||||
|
<section id="ai-interface" class="ai-interface hidden">
|
||||||
|
<div class="ai-query-section">
|
||||||
|
<div class="content-center-lg">
|
||||||
|
<div class="card" style="text-align: center; padding: 3rem; border-left: 4px solid var(--color-accent);">
|
||||||
|
<div style="margin-bottom: 2rem;">
|
||||||
|
<svg width="64" height="64" viewBox="0 0 24 24" fill="none" stroke="var(--color-accent)" stroke-width="1.5" style="margin: 0 auto;">
|
||||||
|
<path d="M9 11H5a2 2 0 0 0-2 2v7a2 2 0 0 0 2 2h14a2 2 0 0 0 2-2v-7a2 2 0 0 0-2-2h-4"/>
|
||||||
|
<path d="M9 11V7a3 3 0 0 1 6 0v4"/>
|
||||||
|
<circle cx="12" cy="12" r="2"/>
|
||||||
|
</svg>
|
||||||
|
</div>
|
||||||
|
<h2 style="margin-bottom: 1rem; color: var(--color-primary);">Anmeldung erforderlich</h2>
|
||||||
|
<p style="margin-bottom: 2rem; color: var(--color-text-secondary); line-height: 1.6;">
|
||||||
|
Für die Nutzung der KI-Beratung ist eine Authentifizierung erforderlich.
|
||||||
|
Melden Sie sich an, um personalisierten Workflow-Empfehlungen und Tool-Analysen zu erhalten.
|
||||||
|
</p>
|
||||||
|
<a href={`/api/auth/login?returnTo=${encodeURIComponent(Astro.url.toString())}`}
|
||||||
|
class="btn btn-accent btn-lg">
|
||||||
|
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" style="margin-right: 0.5rem;">
|
||||||
|
<path d="M15 3h4a2 2 0 0 1 2 2v14a2 2 0 0 1-2 2h-4"/>
|
||||||
|
<polyline points="10 17 15 12 10 7"/>
|
||||||
|
<line x1="15" y1="12" x2="3" y2="12"/>
|
||||||
|
</svg>
|
||||||
|
Anmelden
|
||||||
|
</a>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</section>
|
||||||
|
) : (
|
||||||
<AIQueryInterface />
|
<AIQueryInterface />
|
||||||
|
)}
|
||||||
|
|
||||||
<section id="tools-grid" style="padding-bottom: 2rem;">
|
<section id="tools-grid" style="padding-bottom: 2rem;">
|
||||||
<div class="grid-auto-fit" id="tools-container">
|
<div class="grid-auto-fit" id="tools-container">
|
||||||
@ -195,7 +252,7 @@ const phases = data.phases;
|
|||||||
<ToolMatrix data={data} />
|
<ToolMatrix data={data} />
|
||||||
</BaseLayout>
|
</BaseLayout>
|
||||||
|
|
||||||
<script define:vars={{ toolsData: data.tools, phases: data.phases }}>
|
<script define:vars={{ toolsData: data.tools, phases: data.phases, aiAuthRequired: aiAuthRequired, aiAuthenticated: aiAuthContext?.authenticated }}>
|
||||||
window.toolsData = toolsData;
|
window.toolsData = toolsData;
|
||||||
|
|
||||||
window.selectApproach = function(approach) {
|
window.selectApproach = function(approach) {
|
||||||
@ -268,12 +325,21 @@ const phases = data.phases;
|
|||||||
const filtersSection = document.getElementById('filters-section');
|
const filtersSection = document.getElementById('filters-section');
|
||||||
const noResults = document.getElementById('no-results');
|
const noResults = document.getElementById('no-results');
|
||||||
const aiQueryBtn = document.getElementById('ai-query-btn');
|
const aiQueryBtn = document.getElementById('ai-query-btn');
|
||||||
|
const aiLoginBtn = document.getElementById('ai-login-btn');
|
||||||
|
|
||||||
if (!toolsContainer || !toolsGrid || !matrixContainer || !noResults || !aiInterface || !filtersSection) {
|
if (!toolsContainer || !toolsGrid || !matrixContainer || !noResults || !aiInterface || !filtersSection) {
|
||||||
console.error('Required DOM elements not found');
|
console.error('Required DOM elements not found');
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Handle AI authentication button click
|
||||||
|
if (aiLoginBtn) {
|
||||||
|
aiLoginBtn.addEventListener('click', () => {
|
||||||
|
const currentUrl = encodeURIComponent(window.location.href);
|
||||||
|
window.location.href = `/api/auth/login?returnTo=${currentUrl}`;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
if (aiQueryBtn) {
|
if (aiQueryBtn) {
|
||||||
aiQueryBtn.addEventListener('click', () => {
|
aiQueryBtn.addEventListener('click', () => {
|
||||||
aiQueryBtn.classList.add('activated');
|
aiQueryBtn.classList.add('activated');
|
||||||
@ -319,6 +385,14 @@ const phases = data.phases;
|
|||||||
if (filtersSection) filtersSection.style.display = 'block';
|
if (filtersSection) filtersSection.style.display = 'block';
|
||||||
break;
|
break;
|
||||||
case 'ai':
|
case 'ai':
|
||||||
|
// Only show AI interface if authentication allows it
|
||||||
|
if (aiAuthRequired && !aiAuthenticated) {
|
||||||
|
console.log('[AUTH] AI access denied, redirecting to login');
|
||||||
|
const currentUrl = encodeURIComponent(window.location.href);
|
||||||
|
window.location.href = `/api/auth/login?returnTo=${currentUrl}`;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
if (aiInterface) aiInterface.style.display = 'block';
|
if (aiInterface) aiInterface.style.display = 'block';
|
||||||
|
|
||||||
if (filtersSection) {
|
if (filtersSection) {
|
||||||
@ -344,6 +418,7 @@ const phases = data.phases;
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Rest of the existing code remains the same...
|
||||||
window.navigateToGrid = function(toolName) {
|
window.navigateToGrid = function(toolName) {
|
||||||
console.log('Navigating to grid for tool:', toolName);
|
console.log('Navigating to grid for tool:', toolName);
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user