File size: 4,079 Bytes
81ff144 0cb1aa7 81ff144 0cb1aa7 9cc23a0 81ff144 bea04ab e047946 2257631 bea04ab 81ff144 0cb1aa7 ad8049f 0cb1aa7 81ff144 0cb1aa7 81ff144 e079f6a 81ff144 327739b 81ff144 f5bca71 81ff144 1f094f4 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 | import os
from pydantic_settings import BaseSettings
from typing import Optional, Dict, Any
from supabase import create_client, Client
class Settings(BaseSettings):
# Supabase
SUPABASE_URL: str = ""
SUPABASE_SERVICE_ROLE_KEY: str = ""
# AI Providers
OPENAI_API_KEY: Optional[str] = None
GROQ_API_KEY: Optional[str] = None
GEMINI_API_KEY: Optional[str] = None
ANTHROPIC_API_KEY: Optional[str] = None
AMD_API_KEY: Optional[str] = None
TAVILY_API_KEY: Optional[str] = None
# Infrastructure (DigitalOcean)
DO_API_TOKEN: Optional[str] = None
DO_INFERENCE_KEY: Optional[str] = None
DO_AGENT_ACCESS_KEY: Optional[str] = None
DO_AGENT_ENDPOINT: Optional[str] = None
DO_REGION: str = "nyc3"
# App Config
TASK_QUEUE_EMBEDDED_WORKER: bool = True
TASK_QUEUE_HEARTBEAT_ENABLED: bool = True
TASK_EXECUTION_MODE: str = "queue" # direct | queue
OUTPUT_LANGUAGE: str = "en"
PORT: int = 8000
SENTRY_DSN: Optional[str] = None
model_config = {
"env_file": ".env",
"extra": "ignore"
}
settings = Settings()
class ConfigService:
"""
Manages application-wide settings stored in Supabase with local fallback defaults.
Borrowed from AgentCollab for enhanced flexibility.
"""
_cache: Dict[str, Any] = {}
_supabase: Client = None
@classmethod
def _get_supabase(cls):
if not cls._supabase:
if not settings.SUPABASE_URL or not settings.SUPABASE_SERVICE_ROLE_KEY:
return None
cls._supabase = create_client(settings.SUPABASE_URL, settings.SUPABASE_SERVICE_ROLE_KEY)
return cls._supabase
# Defaults used when DB has no config entry for a provider
_DEFAULTS: Dict[str, Any] = {
"groq": {"enabled": True, "default_model": "llama-3.3-70b-versatile", "temperature": 0.7, "max_tokens": 4096},
"openai": {"enabled": True, "default_model": "gpt-4o", "temperature": 0.7, "max_tokens": 4096},
"openrouter": {"enabled": True, "default_model": "google/gemini-2.0-flash", "temperature": 0.7, "max_tokens": 8192},
"gemini": {"enabled": True, "default_model": "gemini-2.0-flash", "temperature": 0.7, "max_tokens": 8192},
"amd": {"enabled": True, "default_model": "llama-3.3-70b-instruct", "temperature": 0.7, "max_tokens": 4096, "base_url": "https://inference.do-ai.run/v1"},
"ollama": {"enabled": True, "default_model": "llama3.1:8b", "temperature": 0.7, "base_url": "http://localhost:11434"},
}
@classmethod
def get_provider_config(cls, provider: str) -> Dict[str, Any]:
"""Returns config for a provider from cache, DB, then defaults."""
cache_key = f"provider:{provider}"
if cache_key in cls._cache:
return cls._cache[cache_key]
db = cls._get_supabase()
if db:
try:
resp = db.table("app_config").select("*").eq("key", provider).execute()
if resp.data and len(resp.data) > 0:
cls._cache[cache_key] = resp.data[0]["value"]
return cls._cache[cache_key]
except Exception:
pass # Fall through to defaults
result = cls._DEFAULTS.get(provider, {})
cls._cache[cache_key] = result
return result
@classmethod
def get_global_setting(cls, key: str, default: Any = None) -> Any:
cache_key = f"global:{key}"
if cache_key in cls._cache:
return cls._cache[cache_key]
db = cls._get_supabase()
if db:
try:
resp = db.table("app_config").select("*").eq("key", key).execute()
if resp.data and len(resp.data) > 0:
cls._cache[cache_key] = resp.data[0]["value"]
return cls._cache[cache_key]
except Exception:
pass
return default
config_service = ConfigService()
|