# ═══════════════════════════════════════════════════════════ # GraphRAG Inference Hackathon — Environment Configuration # Supports 12 LLM providers + TigerGraph + Ollama # ═══════════════════════════════════════════════════════════ # ──── LLM Providers (set any/all — system auto-detects) ───── OPENAI_API_KEY=sk-... ANTHROPIC_API_KEY=sk-ant-api03-... GEMINI_API_KEY=AIza... MISTRAL_API_KEY=... COHERE_API_KEY=... OPENROUTER_API_KEY=sk-or-v1-... GROQ_API_KEY=gsk_... XAI_API_KEY=xai-... TOGETHER_API_KEY=... HF_TOKEN=hf_... DEEPSEEK_API_KEY=sk-... # ──── Ollama (Local — no API key needed) ──────────────────── # Install: https://ollama.ai # Pull models: ollama pull llama3.2 # Ollama runs on http://localhost:11434 by default OLLAMA_BASE_URL=http://localhost:11434 # ──── Default Provider ────────────────────────────────────── LLM_PROVIDER=anthropic LLM_MODEL=claude-sonnet-4-20250514 # ──── TigerGraph Cloud ────────────────────────────────────── TG_HOST=https://YOUR_SUBDOMAIN.tgcloud.io TG_GRAPH=GraphRAG TG_USERNAME=tigergraph TG_PASSWORD= # ──── Dashboard ───────────────────────────────────────────── DASHBOARD_PORT=7860 DEBUG=false