mirror of
https://github.com/mblanke/StrikePackageGPT.git
synced 2026-03-01 22:30:22 -05:00
Add Vite React component bundling, SSE process streaming, preferences persistence, WebSocket terminal proxy, local Ollama integration
- Enable local Ollama service in compose with llm-router dependency - Add SSE /stream/processes endpoint in kali-executor for live process updates - Add WebSocket /ws/execute for real-time terminal command streaming - Implement preferences persistence (provider/model) via dashboard backend - Create Vite build pipeline for React components (VoiceControls, NetworkMap, GuidedWizard) - Update dashboard Dockerfile with Node builder stage for component bundling - Wire dashboard template to mount components and subscribe to SSE/WebSocket streams - Add preferences load/save hooks in UI to persist LLM provider/model selection
This commit is contained in:
@@ -67,13 +67,13 @@ services:
|
||||
environment:
|
||||
- OPENAI_API_KEY=${OPENAI_API_KEY:-}
|
||||
- ANTHROPIC_API_KEY=${ANTHROPIC_API_KEY:-}
|
||||
# Local Ollama on host machine (use host.docker.internal on Windows/Mac)
|
||||
- OLLAMA_LOCAL_URL=${OLLAMA_LOCAL_URL:-http://host.docker.internal:11434}
|
||||
# Prefer local Ollama container for self-contained setup
|
||||
- OLLAMA_LOCAL_URL=${OLLAMA_LOCAL_URL:-http://strikepackage-ollama:11434}
|
||||
# Network Ollama instances (Dell LLM box with larger models)
|
||||
- OLLAMA_NETWORK_URLS=${OLLAMA_NETWORK_URLS:-http://192.168.1.50:11434}
|
||||
# Legacy single endpoint (fallback)
|
||||
- OLLAMA_ENDPOINTS=${OLLAMA_ENDPOINTS:-http://host.docker.internal:11434}
|
||||
- OLLAMA_BASE_URL=${OLLAMA_BASE_URL:-http://host.docker.internal:11434}
|
||||
- OLLAMA_ENDPOINTS=${OLLAMA_ENDPOINTS:-http://strikepackage-ollama:11434}
|
||||
- OLLAMA_BASE_URL=${OLLAMA_BASE_URL:-http://strikepackage-ollama:11434}
|
||||
# Load balancing: round-robin, random, failover
|
||||
- LOAD_BALANCE_STRATEGY=${LOAD_BALANCE_STRATEGY:-failover}
|
||||
extra_hosts:
|
||||
@@ -81,6 +81,8 @@ services:
|
||||
networks:
|
||||
- strikepackage-net
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- ollama
|
||||
|
||||
# Kali Linux - Security tools container
|
||||
kali:
|
||||
@@ -100,26 +102,25 @@ services:
|
||||
- NET_RAW
|
||||
restart: unless-stopped
|
||||
|
||||
# Ollama - Local LLM (disabled - using Dell LLM box at 192.168.1.50)
|
||||
# Uncomment to use local Ollama instead
|
||||
# ollama:
|
||||
# image: ollama/ollama:latest
|
||||
# container_name: strikepackage-ollama
|
||||
# ports:
|
||||
# - "11434:11434"
|
||||
# volumes:
|
||||
# - ollama-models:/root/.ollama
|
||||
# networks:
|
||||
# - strikepackage-net
|
||||
# restart: unless-stopped
|
||||
# # Uncomment for GPU support:
|
||||
# # deploy:
|
||||
# # resources:
|
||||
# # reservations:
|
||||
# # devices:
|
||||
# # - driver: nvidia
|
||||
# # count: all
|
||||
# # capabilities: [gpu]
|
||||
# Ollama - Local LLM
|
||||
ollama:
|
||||
image: ollama/ollama:latest
|
||||
container_name: strikepackage-ollama
|
||||
ports:
|
||||
- "11434:11434"
|
||||
volumes:
|
||||
- ollama-models:/root/.ollama
|
||||
networks:
|
||||
- strikepackage-net
|
||||
restart: unless-stopped
|
||||
# GPU support (optional): uncomment if using NVIDIA GPU
|
||||
# deploy:
|
||||
# resources:
|
||||
# reservations:
|
||||
# devices:
|
||||
# - driver: nvidia
|
||||
# count: all
|
||||
# capabilities: [gpu]
|
||||
|
||||
networks:
|
||||
strikepackage-net:
|
||||
|
||||
Reference in New Issue
Block a user