paperhawk / docker-compose.yml
Nándorfi Vince
Initial paperhawk push to HF Space (LFS for binaries)
7ff7119
services:
# ---------------------------------------------------------------------------
# Streamlit + LangGraph backend
# ---------------------------------------------------------------------------
langgraph-app:
build: .
image: paperhawk:latest
container_name: document-intelligence-amd
ports:
- "8501:8501"
env_file:
- .env
environment:
# Default vLLM — overridable via .env or shell export
- LLM_PROFILE=${LLM_PROFILE:-vllm}
- VLLM_BASE_URL=${VLLM_BASE_URL:-http://localhost:8000/v1}
- VLLM_MODEL=${VLLM_MODEL:-Qwen/Qwen2.5-14B-Instruct}
- OLLAMA_BASE_URL=http://ollama:11434
volumes:
# AsyncSqliteSaver checkpointer persists across restarts
- ./data:/app/data
# ChromaDB persistent vector store
- ./chroma_db:/app/chroma_db
depends_on:
ollama:
condition: service_healthy
required: false
restart: unless-stopped
# ---------------------------------------------------------------------------
# Ollama LLM server (OPTIONAL profile — local dev fallback)
# ---------------------------------------------------------------------------
# Start: docker compose --profile ollama up -d
# Model: docker compose exec ollama ollama pull qwen2.5:7b-instruct
ollama:
image: ollama/ollama:latest
container_name: document-intelligence-amd-ollama
profiles: ["ollama"]
ports:
- "11434:11434"
volumes:
- ollama_models:/root/.ollama
healthcheck:
test: ["CMD", "ollama", "list"]
interval: 10s
timeout: 5s
retries: 10
start_period: 30s
restart: unless-stopped
volumes:
ollama_models: