# Environment Configuration for StrikePackageGPT # Copy this file to .env and fill in your values # LLM API Keys (optional - Ollama works without API keys) OPENAI_API_KEY= ANTHROPIC_API_KEY= # Ollama Configuration OLLAMA_BASE_URL=http://ollama:11434 # Default LLM Provider and Model # These are used when no explicit provider/model is specified in API requests # Can be changed via API: POST /api/llm/preferences DEFAULT_LLM_PROVIDER=ollama DEFAULT_LLM_MODEL=llama3.2 # Available providers: ollama, ollama-local, ollama-network, openai, anthropic