Add integration test endpoints for n8n and Ollama

This commit is contained in:
2025-11-13 22:35:43 -05:00
parent e21301cffb
commit 97bb575cbd
37 changed files with 4731 additions and 1 deletions

38
app/agents/base_agent.py Normal file
View File

@@ -0,0 +1,38 @@
"""Base LLM agent scaffolding for GooseStrike."""
from __future__ import annotations
from dataclasses import dataclass
from typing import Any, Dict
from .llm_router import LLMProviderError, call_llm_with_fallback
def llm_call(prompt: str) -> str:
"""Call the configured LLM providers with fallback behavior."""
try:
return call_llm_with_fallback(prompt)
except LLMProviderError as exc:
return f"LLM providers unavailable: {exc}"
@dataclass
class AgentResult:
prompt: str
raw_response: str
recommendations: Dict[str, Any]
class BaseAgent:
name = "base"
def run(self, context: Dict[str, Any]) -> AgentResult:
prompt = self.build_prompt(context)
raw = llm_call(prompt)
return AgentResult(prompt=prompt, raw_response=raw, recommendations=self.parse(raw))
def build_prompt(self, context: Dict[str, Any]) -> str:
raise NotImplementedError
def parse(self, raw: str) -> Dict[str, Any]:
return {"notes": raw.strip()}