mirror of
https://github.com/mblanke/GooseStrike.git
synced 2026-03-01 14:00:21 -05:00
Add integration test endpoints for n8n and Ollama
This commit is contained in:
38
app/agents/base_agent.py
Normal file
38
app/agents/base_agent.py
Normal file
@@ -0,0 +1,38 @@
|
||||
"""Base LLM agent scaffolding for GooseStrike."""
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, Dict
|
||||
|
||||
from .llm_router import LLMProviderError, call_llm_with_fallback
|
||||
|
||||
|
||||
def llm_call(prompt: str) -> str:
|
||||
"""Call the configured LLM providers with fallback behavior."""
|
||||
|
||||
try:
|
||||
return call_llm_with_fallback(prompt)
|
||||
except LLMProviderError as exc:
|
||||
return f"LLM providers unavailable: {exc}"
|
||||
|
||||
|
||||
@dataclass
|
||||
class AgentResult:
|
||||
prompt: str
|
||||
raw_response: str
|
||||
recommendations: Dict[str, Any]
|
||||
|
||||
|
||||
class BaseAgent:
|
||||
name = "base"
|
||||
|
||||
def run(self, context: Dict[str, Any]) -> AgentResult:
|
||||
prompt = self.build_prompt(context)
|
||||
raw = llm_call(prompt)
|
||||
return AgentResult(prompt=prompt, raw_response=raw, recommendations=self.parse(raw))
|
||||
|
||||
def build_prompt(self, context: Dict[str, Any]) -> str:
|
||||
raise NotImplementedError
|
||||
|
||||
def parse(self, raw: str) -> Dict[str, Any]:
|
||||
return {"notes": raw.strip()}
|
||||
Reference in New Issue
Block a user