File size: 4,852 Bytes
fae874a d4000ca 55d9d32 327b23d d4000ca fae874a 2d7b690 ae883d4 5e9f487 d4000ca 55d9d32 5d4dc71 327b23d 2d7b690 fae874a dc31dba 150cf3b | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 | """NeuroBridge FastAPI entrypoint.
Exposes /health for liveness. Pipeline routes are mounted in Task 8.
"""
from __future__ import annotations
from fastapi import FastAPI
from src.api.routes import (
router as pipeline_router,
predict_router,
explain_router,
experiments_router,
agent_router,
fusion_router,
research_router,
)
from src.api.schemas import HealthResponse
app = FastAPI(
title="NeuroBridge Enterprise",
description="Three-modality clinical-ML pipeline surface (BBB / EEG / MRI).",
version="0.4.0",
)
app.include_router(pipeline_router)
app.include_router(predict_router)
app.include_router(explain_router)
app.include_router(experiments_router)
app.include_router(agent_router)
app.include_router(fusion_router)
app.include_router(research_router)
@app.get("/health", response_model=HealthResponse)
def health() -> HealthResponse:
"""Liveness probe — used by docker-compose health checks and Streamlit."""
return HealthResponse(status="ok", pipelines=["bbb", "eeg", "mri"])
@app.get("/diag/openrouter")
def diag_openrouter() -> dict:
"""One-shot OpenRouter reachability probe — diagnostic only.
Reports whether the explainer can reach OpenRouter from this container.
Returns key presence (length + first 12 chars only — never the full
secret), kill-switch state, the first model in the chain, and the
HTTP/error status of an 8-token probe call against that model. Used
to diagnose why /explain/* falls back to template in production.
"""
import os as _os
from src.llm import explainer as _ex
key = _os.environ.get("OPENROUTER_API_KEY") or ""
chain = _ex._free_model_chain()
first_model = chain[0] if chain else None
out: dict = {
"has_key": bool(key),
"key_len": len(key),
"key_prefix": key[:12] if key else None,
"kill_switch_on": _os.environ.get("NEUROBRIDGE_DISABLE_LLM") == "1",
"should_use_llm": _ex._should_use_llm(),
"chain_len": len(chain),
"first_model": first_model,
"probe": None,
}
if not key or not first_model:
out["probe"] = "skipped (no key or empty chain)"
return out
try:
from openai import (
OpenAI,
APIStatusError,
APIConnectionError,
APITimeoutError,
RateLimitError,
)
except ImportError as e:
out["probe"] = f"openai SDK not importable: {e}"
return out
try:
client = OpenAI(
base_url="https://openrouter.ai/api/v1",
api_key=key,
timeout=8.0,
)
c = client.chat.completions.create(
model=first_model,
messages=[{"role": "user", "content": "Reply with the single word OK."}],
max_tokens=8,
temperature=0,
)
text = (c.choices[0].message.content or "").strip()
out["probe"] = {"status": "OK", "preview": text[:60]}
except RateLimitError:
out["probe"] = {"status": "429", "note": "rate-limited"}
except APIStatusError as e:
out["probe"] = {"status": str(getattr(e, "status_code", "?")), "message": str(e)[:200]}
except (APIConnectionError, APITimeoutError) as e:
out["probe"] = {"status": "CONN", "exception": type(e).__name__}
except Exception as e:
out["probe"] = {"status": "ERR", "exception": type(e).__name__, "message": str(e)[:200]}
return out
@app.get("/diag/agent")
def diag_agent() -> dict:
"""Reachability probe for the orchestrator agent surface.
Reports key presence (length + 12-char prefix only — never the full
secret), the configured agent model, knowledge-base index status,
and the registered tool names.
"""
import os as _os
from pathlib import Path as _Path
from src.agents.tools import build_default_tools
key = _os.environ.get("OPENROUTER_API_KEY") or ""
model = _os.environ.get("NEUROBRIDGE_AGENT_MODEL", "google/gemini-2.0-flash-exp:free")
rag_dir = _Path("data/processed/faiss_index")
rag_status: dict = {"index_dir": str(rag_dir), "exists": False, "chunk_count": 0}
if (rag_dir / "index.bin").exists() and (rag_dir / "chunks.json").exists():
rag_status["exists"] = True
try:
import json as _json
rag_status["chunk_count"] = len(_json.loads((rag_dir / "chunks.json").read_text()))
except Exception as e:
rag_status["error"] = f"chunks.json unreadable: {e}"
tools = build_default_tools(rag_index_dir=rag_dir if rag_status["exists"] else None)
return {
"has_key": bool(key),
"key_len": len(key),
"key_prefix": key[:12] if key else None,
"agent_model": model,
"rag": rag_status,
"tool_names": [t.name for t in tools],
}
|