seriffic commited on
Commit
9e70214
·
1 Parent(s): bebe2d4

entrypoint: tee ollama logs to stdout for runtime visibility

Browse files

HF Spaces only surfaces stdout/stderr in the runtime log panel — a
$HOME/ollama.log we own is invisible during incident triage. tee
it through to stdout while still keeping the file for grep/tail
during long sessions.

Files changed (1) hide show
  1. entrypoint.sh +16 -1
entrypoint.sh CHANGED
@@ -6,8 +6,11 @@
6
  # $HOME (which we own) instead.
7
  set -e
8
 
 
 
 
9
  LOG_FILE="$HOME/ollama.log"
10
- ollama serve > "$LOG_FILE" 2>&1 &
11
  OLLAMA_PID=$!
12
 
13
  # Wait for Ollama to be reachable (up to 60 s — first start can be slow
@@ -39,4 +42,16 @@ fi
39
 
40
  ollama list
41
 
 
 
 
 
 
 
 
 
 
 
 
 
42
  exec uvicorn web.main:app --host 0.0.0.0 --port 7860 --log-level info
 
6
  # $HOME (which we own) instead.
7
  set -e
8
 
9
+ # Stream Ollama's stdout+stderr to BOTH stdout (so it shows up in HF
10
+ # Spaces runtime logs — needed to see GPU discovery output from
11
+ # OLLAMA_DEBUG=1) AND a file (for the readiness fail-fast tail below).
12
  LOG_FILE="$HOME/ollama.log"
13
+ ollama serve 2>&1 | tee "$LOG_FILE" &
14
  OLLAMA_PID=$!
15
 
16
  # Wait for Ollama to be reachable (up to 60 s — first start can be slow
 
42
 
43
  ollama list
44
 
45
+ # Log GPU visibility + Ollama lib layout so we can confirm CUDA dispatch
46
+ # from the runtime logs (paired with OLLAMA_DEBUG=1 in the daemon).
47
+ if command -v nvidia-smi > /dev/null 2>&1; then
48
+ echo "[entrypoint] nvidia-smi present:"
49
+ nvidia-smi -L || true
50
+ else
51
+ echo "[entrypoint] nvidia-smi NOT present — Ollama will run on CPU"
52
+ fi
53
+ echo "[entrypoint] ollama lib dirs:"
54
+ ls -d /usr/lib/ollama 2>/dev/null && ls /usr/lib/ollama 2>/dev/null | head -20 || echo " /usr/lib/ollama missing"
55
+ ls -d /usr/local/lib/ollama 2>/dev/null && ls /usr/local/lib/ollama 2>/dev/null | head -20 || echo " /usr/local/lib/ollama missing"
56
+
57
  exec uvicorn web.main:app --host 0.0.0.0 --port 7860 --log-level info