File size: 1,913 Bytes
81ff144 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 | from .base import BaseAgent
from typing import Dict, Any, List
import httpx
from services.config import config_service
class LocalAgent(BaseAgent):
"""
Agent implementation for Local LLMs (Ollama).
"""
def __init__(self, name: str, role: str, model: str = "llama3.1:8b", system_prompt: str = None):
super().__init__(name, role, model, system_prompt)
# Load dynamic config
self.provider_config = config_service.get_provider_config("ollama")
self.base_url = self.provider_config.get("base_url", "http://localhost:11434")
self.temperature = self.provider_config.get("temperature", 0.7)
async def run(self, task_description: str, context: List[Dict[str, Any]], use_tools: bool = False, extra_context: str = "") -> Dict[str, Any]:
full_prompt = f"""
System Instructions: {self.system_prompt}
{self._build_json_prompt(task_description, context, extra_context)}
"""
async with httpx.AsyncClient(timeout=60.0) as client:
try:
response = await client.post(
f"{self.base_url}/api/generate",
json={
"model": self.model,
"prompt": full_prompt,
"stream": False,
"format": "json",
"options": {
"temperature": self.temperature
}
}
)
response.raise_for_status()
result = response.json()
return self._result("local", result.get("response", "{}"))
except Exception as e:
return {
"agent_name": self.name,
"provider": "local",
"status": "error",
"error": f"Ollama connection failed: {str(e)}"
}
|