diff --git a/config/ai-providers.json b/config/ai-providers.json new file mode 100644 index 0000000..4eea709 --- /dev/null +++ b/config/ai-providers.json @@ -0,0 +1,58 @@ +{ + "default_provider": "ollama", + "load_balancing": "round-robin", + "networked": { + "endpoints": [ + { + "url": "http://192.168.1.50:11434", + "enabled": true, + "ip": "192.168.1.50", + "network_interface": "high-speed", + "alt_url": null, + "name": "GB1", + "models": [ + "qwen2.5-coder:32b", + "llama3.1:70b", + "deepseek-coder:33b", + "deepseek-coder:6.7b", + "llama3:latest", + "llama3.1:70b-instruct-q5_K_M", + "llama3.1:70b-instruct-q4_K_M", + "mixtral:8x22b-instruct", + "qwen2:72b-instruct" + ], + "port": 11434, + "alt_ip": "", + "priority": 1 + } + ], + "enabled": true + }, + "local": { + "url": "http://localhost:11434", + "models": [ + "dolphin-mixtral:8x7b", + "qwen2.5:14b", + "deepseek-coder-v2:latest", + "codellama:13b", + "llama3.1:8b", + "llama3.2:latest", + "deepseek-coder:latest", + "mistral:latest", + "llama3:latest" + ], + "enabled": true + }, + "default_model": "llama3.2", + "cloud": { + "openai": { + "api_key": "", + "enabled": false + }, + "anthropic": { + "api_key": "", + "enabled": false + }, + "enabled": false + } +} diff --git a/scripts/install.ps1 b/scripts/install.ps1 index 1877770..c19138d 100644 --- a/scripts/install.ps1 +++ b/scripts/install.ps1 @@ -17,9 +17,9 @@ $ErrorActionPreference = "Stop" function Write-Header { param([string]$Text) Write-Host "" - Write-Host "═══════════════════════════════════════════════════════════════" -ForegroundColor Red + Write-Host "======================================================================" -ForegroundColor Red Write-Host " $Text" -ForegroundColor White - Write-Host "═══════════════════════════════════════════════════════════════" -ForegroundColor Red + Write-Host "======================================================================" -ForegroundColor Red Write-Host "" } @@ -30,24 +30,23 @@ function Write-Step { function Write-Success { param([string]$Text) - Write-Host " [✓] $Text" -ForegroundColor Green + Write-Host " [+] $Text" -ForegroundColor Green } -function Write-Warning { +function Write-Warn { param([string]$Text) Write-Host " [!] $Text" -ForegroundColor Yellow } -function Write-Error { +function Write-Err { param([string]$Text) - Write-Host " [✗] $Text" -ForegroundColor Red + Write-Host " [-] $Text" -ForegroundColor Red } function Get-UserChoice { param( [string]$Prompt, - [string[]]$Options, - [bool]$MultiSelect = $false + [string[]]$Options ) Write-Host "" @@ -58,21 +57,9 @@ function Get-UserChoice { Write-Host " [$($i + 1)] $($Options[$i])" -ForegroundColor Gray } - if ($MultiSelect) { - Write-Host "" - Write-Host " Enter numbers separated by commas (e.g., 1,2,3) or 'all'" -ForegroundColor DarkGray - $input = Read-Host " Selection" - - if ($input -eq "all") { - return (1..$Options.Count) - } - - return $input.Split(",") | ForEach-Object { [int]$_.Trim() } - } else { - Write-Host "" - $choice = Read-Host " Selection" - return [int]$choice - } + Write-Host "" + $choice = Read-Host " Selection" + return [int]$choice } function Test-OllamaEndpoint { @@ -97,48 +84,36 @@ function Get-OllamaModels { } } -# ═══════════════════════════════════════════════════════════════ +# ====================================================================== # MAIN INSTALLATION FLOW -# ═══════════════════════════════════════════════════════════════ +# ====================================================================== Clear-Host Write-Host @" - ╔═══════════════════════════════════════════════════════════════╗ - ║ ║ - ║ ██████╗ ██████╗ ██████╗ ███████╗███████╗ ║ - ║ ██╔════╝ ██╔═══██╗██╔═══██╗██╔════╝██╔════╝ ║ - ║ ██║ ███╗██║ ██║██║ ██║███████╗█████╗ ║ - ║ ██║ ██║██║ ██║██║ ██║╚════██║██╔══╝ ║ - ║ ╚██████╔╝╚██████╔╝╚██████╔╝███████║███████╗ ║ - ║ ╚═════╝ ╚═════╝ ╚═════╝ ╚══════╝╚══════╝ ║ - ║ ║ - ║ ███████╗████████╗██████╗ ██╗██╗ ██╗███████╗ ║ - ║ ██╔════╝╚══██╔══╝██╔══██╗██║██║ ██╔╝██╔════╝ ║ - ║ ███████╗ ██║ ██████╔╝██║█████╔╝ █████╗ ║ - ║ ╚════██║ ██║ ██╔══██╗██║██╔═██╗ ██╔══╝ ║ - ║ ███████║ ██║ ██║ ██║██║██║ ██╗███████╗ ║ - ║ ╚══════╝ ╚═╝ ╚═╝ ╚═╝╚═╝╚═╝ ╚═╝╚══════╝ ║ - ║ ║ - ║ AI-Powered Penetration Testing Platform ║ - ║ Installation Wizard ║ - ╚═══════════════════════════════════════════════════════════════╝ + ================================================================ + + GOOSE STRIKE + AI-Powered Penetration Testing Platform + Installation Wizard + + ================================================================ "@ -ForegroundColor Red Write-Host " Welcome to GooseStrike! This wizard will configure your AI backends." -ForegroundColor White Write-Host "" -# ═══════════════════════════════════════════════════════════════ +# ====================================================================== # STEP 1: AI PROVIDER SELECTION -# ═══════════════════════════════════════════════════════════════ +# ====================================================================== Write-Header "STEP 1: AI Provider Selection" $providerOptions = @( "Local Only (Ollama on this machine)", "Networked Only (Ollama on remote machines)", - "Cloud Only (OpenAI, Anthropic, etc.)", + "Cloud Only (OpenAI or Anthropic)", "Hybrid - Local + Networked", "Hybrid - Local + Cloud", "Hybrid - Networked + Cloud", @@ -160,9 +135,8 @@ $config = @{ } cloud = @{ enabled = $false - openai = @{ enabled = $false; api_key = ""; models = @("gpt-4", "gpt-4-turbo", "gpt-3.5-turbo") } - anthropic = @{ enabled = $false; api_key = ""; models = @("claude-3-opus-20240229", "claude-3-sonnet-20240229") } - groq = @{ enabled = $false; api_key = ""; models = @("llama-3.1-70b-versatile", "mixtral-8x7b-32768") } + openai = @{ enabled = $false; api_key = "" } + anthropic = @{ enabled = $false; api_key = "" } } default_provider = "ollama" default_model = "llama3.2" @@ -174,9 +148,9 @@ $configureLocal = $providerChoice -in @(1, 4, 5, 7) $configureNetworked = $providerChoice -in @(2, 4, 6, 7) $configureCloud = $providerChoice -in @(3, 5, 6, 7) -# ═══════════════════════════════════════════════════════════════ +# ====================================================================== # STEP 2: LOCAL OLLAMA CONFIGURATION -# ═══════════════════════════════════════════════════════════════ +# ====================================================================== if ($configureLocal) { Write-Header "STEP 2: Local Ollama Configuration" @@ -191,16 +165,17 @@ if ($configureLocal) { $models = Get-OllamaModels -Url $localUrl if ($models.Count -gt 0) { - Write-Success "Found $($models.Count) model(s): $($models -join ', ')" + $modelList = $models -join ", " + Write-Success "Found $($models.Count) model(s): $modelList" $config.local.models = $models } else { - Write-Warning "No models found. You may need to pull models with: ollama pull llama3.2" + Write-Warn "No models found. You may need to pull models with: ollama pull llama3.2" } $config.local.enabled = $true $config.local.url = $localUrl } else { - Write-Warning "Ollama not detected at $localUrl" + Write-Warn "Ollama not detected at $localUrl" $installChoice = Read-Host " Would you like to install Ollama? (y/n)" if ($installChoice -eq "y") { @@ -209,16 +184,16 @@ if ($configureLocal) { Write-Host "" Write-Host " Please install Ollama and run: ollama pull llama3.2" -ForegroundColor Yellow Write-Host " Then re-run this installer." -ForegroundColor Yellow - Read-Host " Press Enter to continue anyway, or Ctrl+C to exit" + Read-Host " Press Enter to continue anyway or Ctrl+C to exit" } $config.local.enabled = $false } } -# ═══════════════════════════════════════════════════════════════ +# ====================================================================== # STEP 3: NETWORKED OLLAMA CONFIGURATION -# ═══════════════════════════════════════════════════════════════ +# ====================================================================== if ($configureNetworked) { Write-Header "STEP 3: Networked Ollama Configuration" @@ -230,14 +205,14 @@ if ($configureNetworked) { $endpointIndex = 1 while ($addMore) { - Write-Host " ── Endpoint #$endpointIndex ──" -ForegroundColor Cyan + Write-Host " -- Endpoint #$endpointIndex --" -ForegroundColor Cyan Write-Host "" # Get endpoint details - $name = Read-Host " Friendly name (e.g., 'Dell Pro Max GB10')" - $ip = Read-Host " IP Address (e.g., 192.168.1.50)" - $port = Read-Host " Port (default: 11434)" - if ([string]::IsNullOrEmpty($port)) { $port = "11434" } + $epName = Read-Host " Friendly name (e.g. Dell Pro Max GB10)" + $epIp = Read-Host " IP Address (e.g. 192.168.1.50)" + $epPort = Read-Host " Port (default: 11434)" + if ([string]::IsNullOrEmpty($epPort)) { $epPort = "11434" } # Network interface selection Write-Host "" @@ -251,33 +226,34 @@ if ($configureNetworked) { if ($nicChoice -eq "2") { $networkInterface = "high-speed" - $altIp = Read-Host " High-speed interface IP (e.g., 10.0.0.50)" + $altIp = Read-Host " High-speed interface IP (e.g. 10.0.0.50)" } # Build endpoint URL - $endpointUrl = "http://${ip}:${port}" + $endpointUrl = "http://${epIp}:${epPort}" Write-Step "Testing connection to $endpointUrl..." $endpoint = @{ - name = $name + name = $epName url = $endpointUrl - ip = $ip - port = [int]$port + ip = $epIp + port = [int]$epPort network_interface = $networkInterface alt_ip = $altIp - alt_url = if ($altIp) { "http://${altIp}:${port}" } else { $null } + alt_url = if ($altIp) { "http://${altIp}:${epPort}" } else { $null } enabled = $false models = @() priority = $endpointIndex } if (Test-OllamaEndpoint -Url $endpointUrl) { - Write-Success "Connected to $name at $endpointUrl" + Write-Success "Connected to $epName at $endpointUrl" $models = Get-OllamaModels -Url $endpointUrl if ($models.Count -gt 0) { - Write-Success "Available models: $($models -join ', ')" + $modelList = $models -join ", " + Write-Success "Available models: $modelList" $endpoint.models = $models } @@ -294,11 +270,11 @@ if ($configureNetworked) { $endpoint.prefer_high_speed = $true } } else { - Write-Warning "High-speed interface not reachable (will use primary)" + Write-Warn "High-speed interface not reachable (will use primary)" } } } else { - Write-Warning "Could not connect to $endpointUrl" + Write-Warn "Could not connect to $endpointUrl" $keepEndpoint = Read-Host " Add anyway? (y/n)" if ($keepEndpoint -eq "y") { $endpoint.enabled = $false @@ -328,17 +304,15 @@ if ($configureNetworked) { $lbOptions = @( "Round-robin (distribute evenly)", "Priority-based (use highest priority first)", - "Fastest-response (route to quickest endpoint)", - "Model-based (route by model availability)" + "Random (random selection)" ) $lbChoice = Get-UserChoice -Prompt "Load balancing strategy:" -Options $lbOptions $config.load_balancing = switch ($lbChoice) { 1 { "round-robin" } - 2 { "priority" } - 3 { "fastest" } - 4 { "model-based" } + 2 { "failover" } + 3 { "random" } default { "round-robin" } } @@ -347,9 +321,9 @@ if ($configureNetworked) { } } -# ═══════════════════════════════════════════════════════════════ +# ====================================================================== # STEP 4: CLOUD PROVIDER CONFIGURATION -# ═══════════════════════════════════════════════════════════════ +# ====================================================================== if ($configureCloud) { Write-Header "STEP 4: Cloud Provider Configuration" @@ -358,59 +332,43 @@ if ($configureCloud) { Write-Host "" # OpenAI - Write-Host " ── OpenAI ──" -ForegroundColor Cyan + Write-Host " -- OpenAI --" -ForegroundColor Cyan $useOpenAI = Read-Host " Enable OpenAI? (y/n)" if ($useOpenAI -eq "y") { - $openaiKey = Read-Host " OpenAI API Key" -AsSecureString - $openaiKeyPlain = [Runtime.InteropServices.Marshal]::PtrToStringAuto([Runtime.InteropServices.Marshal]::SecureStringToBSTR($openaiKey)) + $openaiKey = Read-Host " OpenAI API Key" - if ($openaiKeyPlain -match "^sk-") { + if ($openaiKey -match "^sk-") { $config.cloud.openai.enabled = $true - $config.cloud.openai.api_key = $openaiKeyPlain + $config.cloud.openai.api_key = $openaiKey Write-Success "OpenAI configured" } else { - Write-Warning "Invalid API key format (should start with 'sk-')" + Write-Warn "Invalid API key format (should start with sk-)" } } Write-Host "" # Anthropic - Write-Host " ── Anthropic (Claude) ──" -ForegroundColor Cyan + Write-Host " -- Anthropic (Claude) --" -ForegroundColor Cyan $useAnthropic = Read-Host " Enable Anthropic? (y/n)" if ($useAnthropic -eq "y") { - $anthropicKey = Read-Host " Anthropic API Key" -AsSecureString - $anthropicKeyPlain = [Runtime.InteropServices.Marshal]::PtrToStringAuto([Runtime.InteropServices.Marshal]::SecureStringToBSTR($anthropicKey)) + $anthropicKey = Read-Host " Anthropic API Key" - if ($anthropicKeyPlain -match "^sk-ant-") { + if ($anthropicKey -match "^sk-ant-") { $config.cloud.anthropic.enabled = $true - $config.cloud.anthropic.api_key = $anthropicKeyPlain + $config.cloud.anthropic.api_key = $anthropicKey Write-Success "Anthropic configured" } else { - Write-Warning "Invalid API key format (should start with 'sk-ant-')" + Write-Warn "Invalid API key format (should start with sk-ant-)" } } - Write-Host "" - - # Groq - Write-Host " ── Groq (Fast inference) ──" -ForegroundColor Cyan - $useGroq = Read-Host " Enable Groq? (y/n)" - if ($useGroq -eq "y") { - $groqKey = Read-Host " Groq API Key" -AsSecureString - $groqKeyPlain = [Runtime.InteropServices.Marshal]::PtrToStringAuto([Runtime.InteropServices.Marshal]::SecureStringToBSTR($groqKey)) - - $config.cloud.groq.enabled = $true - $config.cloud.groq.api_key = $groqKeyPlain - Write-Success "Groq configured" - } - - $config.cloud.enabled = $config.cloud.openai.enabled -or $config.cloud.anthropic.enabled -or $config.cloud.groq.enabled + $config.cloud.enabled = $config.cloud.openai.enabled -or $config.cloud.anthropic.enabled } -# ═══════════════════════════════════════════════════════════════ +# ====================================================================== # STEP 5: DEFAULT PROVIDER SELECTION -# ═══════════════════════════════════════════════════════════════ +# ====================================================================== Write-Header "STEP 5: Default Provider Selection" @@ -422,10 +380,12 @@ if ($config.local.enabled) { $providerMap["Local Ollama (localhost)"] = @{ provider = "ollama"; url = $config.local.url } } -foreach ($endpoint in $config.networked.endpoints | Where-Object { $_.enabled }) { - $label = "Networked: $($endpoint.name)" - $availableProviders += $label - $providerMap[$label] = @{ provider = "ollama"; url = $endpoint.url; name = $endpoint.name } +foreach ($ep in $config.networked.endpoints) { + if ($ep.enabled) { + $label = "Networked: $($ep.name)" + $availableProviders += $label + $providerMap[$label] = @{ provider = "ollama"; url = $ep.url; name = $ep.name } + } } if ($config.cloud.openai.enabled) { @@ -438,11 +398,6 @@ if ($config.cloud.anthropic.enabled) { $providerMap["Anthropic (Claude)"] = @{ provider = "anthropic" } } -if ($config.cloud.groq.enabled) { - $availableProviders += "Groq (Fast)" - $providerMap["Groq (Fast)"] = @{ provider = "groq" } -} - if ($availableProviders.Count -gt 0) { $defaultChoice = Get-UserChoice -Prompt "Select your default AI provider:" -Options $availableProviders $selectedProvider = $availableProviders[$defaultChoice - 1] @@ -450,94 +405,81 @@ if ($availableProviders.Count -gt 0) { Write-Success "Default provider: $selectedProvider" } else { - Write-Error "No providers configured! At least one provider is required." + Write-Err "No providers configured! At least one provider is required." exit 1 } -# ═══════════════════════════════════════════════════════════════ +# ====================================================================== # STEP 6: GENERATE CONFIGURATION FILES -# ═══════════════════════════════════════════════════════════════ +# ====================================================================== Write-Header "STEP 6: Generating Configuration" $scriptRoot = Split-Path -Parent $PSScriptRoot $envFile = Join-Path $scriptRoot ".env" -$configJsonFile = Join-Path $scriptRoot "config" "ai-providers.json" +$configDir = Join-Path $scriptRoot "config" +$configJsonFile = Join-Path $configDir "ai-providers.json" # Create config directory if needed -$configDir = Join-Path $scriptRoot "config" if (-not (Test-Path $configDir)) { New-Item -ItemType Directory -Path $configDir -Force | Out-Null } +# Build OLLAMA_ENDPOINTS string +$ollamaEndpoints = @() +if ($config.local.enabled) { + $ollamaEndpoints += $config.local.url +} +foreach ($ep in $config.networked.endpoints) { + if ($ep.enabled) { + if ($ep.prefer_high_speed -and $ep.alt_url) { + $ollamaEndpoints += $ep.alt_url + } else { + $ollamaEndpoints += $ep.url + } + } +} +$ollamaEndpointsStr = $ollamaEndpoints -join "," +if ([string]::IsNullOrEmpty($ollamaEndpointsStr)) { + $ollamaEndpointsStr = "http://localhost:11434" +} + # Generate .env file Write-Step "Generating .env file..." -$envContent = @" -# ═══════════════════════════════════════════════════════════════ -# GooseStrike Configuration -# Generated by installer on $(Get-Date -Format "yyyy-MM-dd HH:mm:ss") -# ═══════════════════════════════════════════════════════════════ +$timestamp = Get-Date -Format "yyyy-MM-dd HH:mm:ss" +$envLines = @( + "# ======================================================================", + "# GooseStrike Configuration", + "# Generated by installer on $timestamp", + "# ======================================================================", + "", + "# Ollama Endpoints (comma-separated for load balancing)", + "OLLAMA_ENDPOINTS=$ollamaEndpointsStr", + "", + "# Load Balancing Strategy: round-robin, failover, random", + "LOAD_BALANCE_STRATEGY=$($config.load_balancing)", + "", + "# Cloud Providers", + "OPENAI_API_KEY=$($config.cloud.openai.api_key)", + "ANTHROPIC_API_KEY=$($config.cloud.anthropic.api_key)", + "", + "# Default Settings", + "DEFAULT_PROVIDER=$($config.default_provider)", + "DEFAULT_MODEL=$($config.default_model)" +) -# Default AI Provider -DEFAULT_PROVIDER=$($config.default_provider) -DEFAULT_MODEL=$($config.default_model) - -# Local Ollama -LOCAL_OLLAMA_ENABLED=$($config.local.enabled.ToString().ToLower()) -LOCAL_OLLAMA_URL=$($config.local.url) - -# Networked Ollama Endpoints -NETWORKED_OLLAMA_ENABLED=$($config.networked.enabled.ToString().ToLower()) -LOAD_BALANCING_STRATEGY=$($config.load_balancing) - -"@ - -# Add networked endpoints -$endpointNum = 1 -foreach ($endpoint in $config.networked.endpoints) { - $envContent += @" -# Networked Endpoint $endpointNum - $($endpoint.name) -OLLAMA_ENDPOINT_${endpointNum}_NAME=$($endpoint.name) -OLLAMA_ENDPOINT_${endpointNum}_URL=$($endpoint.url) -OLLAMA_ENDPOINT_${endpointNum}_ENABLED=$($endpoint.enabled.ToString().ToLower()) -OLLAMA_ENDPOINT_${endpointNum}_PRIORITY=$($endpoint.priority) - -"@ - if ($endpoint.alt_url) { - $envContent += "OLLAMA_ENDPOINT_${endpointNum}_ALT_URL=$($endpoint.alt_url)`n" - $envContent += "OLLAMA_ENDPOINT_${endpointNum}_PREFER_ALT=$($endpoint.prefer_high_speed.ToString().ToLower())`n`n" - } - $endpointNum++ -} - -$envContent += "OLLAMA_ENDPOINT_COUNT=$($config.networked.endpoints.Count)`n`n" - -# Add cloud providers -$envContent += @" -# Cloud Providers -OPENAI_ENABLED=$($config.cloud.openai.enabled.ToString().ToLower()) -OPENAI_API_KEY=$($config.cloud.openai.api_key) - -ANTHROPIC_ENABLED=$($config.cloud.anthropic.enabled.ToString().ToLower()) -ANTHROPIC_API_KEY=$($config.cloud.anthropic.api_key) - -GROQ_ENABLED=$($config.cloud.groq.enabled.ToString().ToLower()) -GROQ_API_KEY=$($config.cloud.groq.api_key) -"@ - -$envContent | Out-File -FilePath $envFile -Encoding UTF8 -Force +$envLines -join "`n" | Out-File -FilePath $envFile -Encoding UTF8 -Force Write-Success "Created $envFile" # Generate JSON config Write-Step "Generating AI providers config..." - $config | ConvertTo-Json -Depth 10 | Out-File -FilePath $configJsonFile -Encoding UTF8 -Force Write-Success "Created $configJsonFile" -# ═══════════════════════════════════════════════════════════════ +# ====================================================================== # STEP 7: DOCKER SETUP -# ═══════════════════════════════════════════════════════════════ +# ====================================================================== Write-Header "STEP 7: Docker Setup" @@ -551,7 +493,7 @@ try { $dockerRunning = $true } } catch { - Write-Warning "Docker is not running" + Write-Warn "Docker is not running" } if ($dockerRunning) { @@ -570,12 +512,12 @@ if ($dockerRunning) { Write-Host "" } } else { - Write-Warning "Please start Docker and run: docker-compose up -d --build" + Write-Warn "Please start Docker and run: docker-compose up -d --build" } -# ═══════════════════════════════════════════════════════════════ +# ====================================================================== # COMPLETE -# ═══════════════════════════════════════════════════════════════ +# ====================================================================== Write-Header "Installation Complete!" @@ -583,24 +525,23 @@ Write-Host " Configuration Summary:" -ForegroundColor White Write-Host "" if ($config.local.enabled) { - Write-Host " ✓ Local Ollama: $($config.local.url)" -ForegroundColor Green + Write-Host " + Local Ollama: $($config.local.url)" -ForegroundColor Green } -foreach ($endpoint in $config.networked.endpoints | Where-Object { $_.enabled }) { - Write-Host " ✓ Networked: $($endpoint.name) @ $($endpoint.url)" -ForegroundColor Green - if ($endpoint.alt_url) { - Write-Host " └─ High-speed: $($endpoint.alt_url)" -ForegroundColor DarkGreen +foreach ($ep in $config.networked.endpoints) { + if ($ep.enabled) { + Write-Host " + Networked: $($ep.name) @ $($ep.url)" -ForegroundColor Green + if ($ep.alt_url) { + Write-Host " High-speed: $($ep.alt_url)" -ForegroundColor DarkGreen + } } } if ($config.cloud.openai.enabled) { - Write-Host " ✓ OpenAI: Enabled" -ForegroundColor Green + Write-Host " + OpenAI: Enabled" -ForegroundColor Green } if ($config.cloud.anthropic.enabled) { - Write-Host " ✓ Anthropic: Enabled" -ForegroundColor Green -} -if ($config.cloud.groq.enabled) { - Write-Host " ✓ Groq: Enabled" -ForegroundColor Green + Write-Host " + Anthropic: Enabled" -ForegroundColor Green } Write-Host ""