fix: update Jetson Dockerfile to use correct L4T r36.4.0 base image
Browse files- Use dustynv/l4t-pytorch:r36.4.0 for JetPack 6.x compatibility
- Simplify to single-stage build (no multi-stage needed)
- Add Jetson usage hint to run.sh help text
- Dockerfile.jetson +18 -37
- run-jetson.sh +6 -5
- run.sh +4 -3
Dockerfile.jetson
CHANGED
|
@@ -1,8 +1,13 @@
|
|
| 1 |
# Dockerfile for NVIDIA Jetson (ARM64) - Jetson Orin Nano, Xavier, etc.
|
| 2 |
# Uses NVIDIA's L4T PyTorch container as base for proper GPU support
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 3 |
|
| 4 |
-
|
| 5 |
-
FROM nvcr.io/nvidia/l4t-pytorch:r36.2.0-pth2.1-py3 AS builder
|
| 6 |
|
| 7 |
WORKDIR /app
|
| 8 |
|
|
@@ -14,63 +19,39 @@ RUN apt-get update && apt-get install -y \
|
|
| 14 |
&& apt-get install -y nodejs \
|
| 15 |
&& rm -rf /var/lib/apt/lists/*
|
| 16 |
|
| 17 |
-
# Copy
|
| 18 |
COPY crossword-app/frontend/package*.json ./frontend/
|
| 19 |
RUN cd frontend && npm ci
|
| 20 |
|
| 21 |
-
|
|
|
|
|
|
|
|
|
|
| 22 |
COPY crossword-app/backend-py/requirements.txt ./backend-py/
|
| 23 |
-
RUN
|
| 24 |
|
| 25 |
-
# Copy
|
| 26 |
-
COPY crossword-app/frontend/ ./frontend/
|
| 27 |
COPY crossword-app/backend-py/ ./backend-py/
|
| 28 |
COPY crossword-app/words/ ./backend-py/words/
|
| 29 |
|
| 30 |
# Copy cache directory with pre-built models and NLTK data
|
| 31 |
-
COPY cache-dir/ ./
|
| 32 |
-
RUN chmod -R 755 ./
|
| 33 |
|
| 34 |
-
#
|
| 35 |
-
RUN cd frontend && npm run build
|
| 36 |
-
|
| 37 |
-
# Copy built frontend files to Python backend public directory
|
| 38 |
RUN mkdir -p backend-py/public && cp -r frontend/dist/* backend-py/public/
|
| 39 |
|
| 40 |
-
# Stage 2: Runtime
|
| 41 |
-
FROM nvcr.io/nvidia/l4t-pytorch:r36.2.0-pth2.1-py3 AS runtime
|
| 42 |
-
|
| 43 |
-
# Install minimal runtime dependencies
|
| 44 |
-
RUN apt-get update && apt-get install -y \
|
| 45 |
-
curl \
|
| 46 |
-
&& rm -rf /var/lib/apt/lists/*
|
| 47 |
-
|
| 48 |
-
# Create non-root user
|
| 49 |
-
RUN useradd -m -u 1000 appuser
|
| 50 |
-
|
| 51 |
WORKDIR /app/backend-py
|
| 52 |
|
| 53 |
-
# Copy Python packages from builder (sentence-transformers, etc.)
|
| 54 |
-
COPY --from=builder /usr/local/lib/python3.10/dist-packages /usr/local/lib/python3.10/dist-packages
|
| 55 |
-
|
| 56 |
-
# Copy built application files
|
| 57 |
-
COPY --from=builder --chown=appuser:appuser /app/backend-py ./
|
| 58 |
-
|
| 59 |
-
# Copy cache directory
|
| 60 |
-
COPY --from=builder --chown=appuser:appuser /app/cache-dir /app/backend-py/cache
|
| 61 |
-
|
| 62 |
-
USER appuser
|
| 63 |
-
|
| 64 |
EXPOSE 7860
|
| 65 |
|
| 66 |
ENV NODE_ENV=production
|
| 67 |
ENV PORT=7860
|
| 68 |
ENV PYTHONPATH=/app/backend-py
|
| 69 |
ENV PYTHONUNBUFFERED=1
|
| 70 |
-
ENV PIP_NO_CACHE_DIR=1
|
| 71 |
ENV CACHE_DIR=/app/backend-py/cache
|
| 72 |
ENV NLTK_DATA=/app/backend-py/cache/nltk_data
|
| 73 |
ENV VOCAB_SOURCE=norvig
|
| 74 |
ENV NORVIG_VOCAB_PATH=/app/backend-py/words/norvig/count_1w100k.txt
|
| 75 |
|
| 76 |
-
CMD ["
|
|
|
|
| 1 |
# Dockerfile for NVIDIA Jetson (ARM64) - Jetson Orin Nano, Xavier, etc.
|
| 2 |
# Uses NVIDIA's L4T PyTorch container as base for proper GPU support
|
| 3 |
+
#
|
| 4 |
+
# Check your JetPack/L4T version: cat /etc/nv_tegra_release
|
| 5 |
+
#
|
| 6 |
+
# Available base images:
|
| 7 |
+
# JetPack 6.x (L4T R36.x): dustynv/l4t-pytorch:r36.4.0
|
| 8 |
+
# JetPack 5.1.x (L4T R35.x): nvcr.io/nvidia/l4t-pytorch:r35.2.1-pth2.0-py3
|
| 9 |
|
| 10 |
+
FROM dustynv/l4t-pytorch:r36.4.0
|
|
|
|
| 11 |
|
| 12 |
WORKDIR /app
|
| 13 |
|
|
|
|
| 19 |
&& apt-get install -y nodejs \
|
| 20 |
&& rm -rf /var/lib/apt/lists/*
|
| 21 |
|
| 22 |
+
# Copy and build frontend
|
| 23 |
COPY crossword-app/frontend/package*.json ./frontend/
|
| 24 |
RUN cd frontend && npm ci
|
| 25 |
|
| 26 |
+
COPY crossword-app/frontend/ ./frontend/
|
| 27 |
+
RUN cd frontend && npm run build
|
| 28 |
+
|
| 29 |
+
# Install Python dependencies (PyTorch already in base image)
|
| 30 |
COPY crossword-app/backend-py/requirements.txt ./backend-py/
|
| 31 |
+
RUN pip3 install --no-cache-dir -r backend-py/requirements.txt
|
| 32 |
|
| 33 |
+
# Copy backend code
|
|
|
|
| 34 |
COPY crossword-app/backend-py/ ./backend-py/
|
| 35 |
COPY crossword-app/words/ ./backend-py/words/
|
| 36 |
|
| 37 |
# Copy cache directory with pre-built models and NLTK data
|
| 38 |
+
COPY cache-dir/ ./backend-py/cache/
|
| 39 |
+
RUN chmod -R 755 ./backend-py/cache/ || true
|
| 40 |
|
| 41 |
+
# Copy built frontend to backend public directory
|
|
|
|
|
|
|
|
|
|
| 42 |
RUN mkdir -p backend-py/public && cp -r frontend/dist/* backend-py/public/
|
| 43 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 44 |
WORKDIR /app/backend-py
|
| 45 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 46 |
EXPOSE 7860
|
| 47 |
|
| 48 |
ENV NODE_ENV=production
|
| 49 |
ENV PORT=7860
|
| 50 |
ENV PYTHONPATH=/app/backend-py
|
| 51 |
ENV PYTHONUNBUFFERED=1
|
|
|
|
| 52 |
ENV CACHE_DIR=/app/backend-py/cache
|
| 53 |
ENV NLTK_DATA=/app/backend-py/cache/nltk_data
|
| 54 |
ENV VOCAB_SOURCE=norvig
|
| 55 |
ENV NORVIG_VOCAB_PATH=/app/backend-py/words/norvig/count_1w100k.txt
|
| 56 |
|
| 57 |
+
CMD ["python3", "-m", "uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860", "--workers", "1"]
|
run-jetson.sh
CHANGED
|
@@ -2,19 +2,21 @@
|
|
| 2 |
set -e
|
| 3 |
|
| 4 |
# Build and run script for NVIDIA Jetson devices (Orin Nano, Xavier, etc.)
|
|
|
|
| 5 |
|
| 6 |
show_usage() {
|
| 7 |
echo "Usage: $0 [COMMAND]"
|
| 8 |
echo ""
|
| 9 |
echo "Commands:"
|
| 10 |
echo " build - Build the Jetson Docker image"
|
| 11 |
-
echo " run - Run the container with GPU support"
|
| 12 |
-
echo " both - Build and run
|
| 13 |
echo " shell - Run with bash shell for debugging"
|
| 14 |
echo ""
|
| 15 |
}
|
| 16 |
|
| 17 |
-
IMAGE_NAME="crossword-
|
|
|
|
| 18 |
DOCKER_ARGS="--rm -p 7860:7860 --runtime nvidia \
|
| 19 |
-e ENABLE_DEBUG_TAB=true \
|
| 20 |
-e VOCAB_SOURCE=norvig \
|
|
@@ -36,7 +38,7 @@ run_shell() {
|
|
| 36 |
}
|
| 37 |
|
| 38 |
# Parse command
|
| 39 |
-
COMMAND="${1:-
|
| 40 |
|
| 41 |
case "$COMMAND" in
|
| 42 |
build)
|
|
@@ -50,7 +52,6 @@ case "$COMMAND" in
|
|
| 50 |
run_container
|
| 51 |
;;
|
| 52 |
shell)
|
| 53 |
-
build_image
|
| 54 |
run_shell
|
| 55 |
;;
|
| 56 |
-h|--help|help)
|
|
|
|
| 2 |
set -e
|
| 3 |
|
| 4 |
# Build and run script for NVIDIA Jetson devices (Orin Nano, Xavier, etc.)
|
| 5 |
+
# Uses Dockerfile.jetson with L4T PyTorch base image
|
| 6 |
|
| 7 |
show_usage() {
|
| 8 |
echo "Usage: $0 [COMMAND]"
|
| 9 |
echo ""
|
| 10 |
echo "Commands:"
|
| 11 |
echo " build - Build the Jetson Docker image"
|
| 12 |
+
echo " run - Run the container with GPU support (default)"
|
| 13 |
+
echo " both - Build and run"
|
| 14 |
echo " shell - Run with bash shell for debugging"
|
| 15 |
echo ""
|
| 16 |
}
|
| 17 |
|
| 18 |
+
IMAGE_NAME="crossword-app:jetson"
|
| 19 |
+
|
| 20 |
DOCKER_ARGS="--rm -p 7860:7860 --runtime nvidia \
|
| 21 |
-e ENABLE_DEBUG_TAB=true \
|
| 22 |
-e VOCAB_SOURCE=norvig \
|
|
|
|
| 38 |
}
|
| 39 |
|
| 40 |
# Parse command
|
| 41 |
+
COMMAND="${1:-run}"
|
| 42 |
|
| 43 |
case "$COMMAND" in
|
| 44 |
build)
|
|
|
|
| 52 |
run_container
|
| 53 |
;;
|
| 54 |
shell)
|
|
|
|
| 55 |
run_shell
|
| 56 |
;;
|
| 57 |
-h|--help|help)
|
run.sh
CHANGED
|
@@ -15,6 +15,7 @@ show_usage() {
|
|
| 15 |
echo " $0 gpu # Force GPU mode"
|
| 16 |
echo " $0 cpu # Force CPU-only mode"
|
| 17 |
echo ""
|
|
|
|
| 18 |
}
|
| 19 |
|
| 20 |
# Parse command line arguments
|
|
@@ -68,11 +69,11 @@ check_gpu_available() {
|
|
| 68 |
if ! command -v nvidia-smi &> /dev/null; then
|
| 69 |
return 1
|
| 70 |
fi
|
| 71 |
-
|
| 72 |
if ! docker run --rm --gpus all nvidia/cuda:12.1.0-base-ubuntu22.04 nvidia-smi &> /dev/null; then
|
| 73 |
return 1
|
| 74 |
fi
|
| 75 |
-
|
| 76 |
return 0
|
| 77 |
}
|
| 78 |
|
|
@@ -108,4 +109,4 @@ case "$MODE" in
|
|
| 108 |
run_cpu
|
| 109 |
fi
|
| 110 |
;;
|
| 111 |
-
esac
|
|
|
|
| 15 |
echo " $0 gpu # Force GPU mode"
|
| 16 |
echo " $0 cpu # Force CPU-only mode"
|
| 17 |
echo ""
|
| 18 |
+
echo "For Jetson devices, use: ./run-jetson.sh"
|
| 19 |
}
|
| 20 |
|
| 21 |
# Parse command line arguments
|
|
|
|
| 69 |
if ! command -v nvidia-smi &> /dev/null; then
|
| 70 |
return 1
|
| 71 |
fi
|
| 72 |
+
|
| 73 |
if ! docker run --rm --gpus all nvidia/cuda:12.1.0-base-ubuntu22.04 nvidia-smi &> /dev/null; then
|
| 74 |
return 1
|
| 75 |
fi
|
| 76 |
+
|
| 77 |
return 0
|
| 78 |
}
|
| 79 |
|
|
|
|
| 109 |
run_cpu
|
| 110 |
fi
|
| 111 |
;;
|
| 112 |
+
esac
|