Create start.sh
Browse files
start.sh
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash
|
| 2 |
+
set -e
|
| 3 |
+
|
| 4 |
+
echo "🚀 Starting Ollama backend..."
|
| 5 |
+
ollama serve &
|
| 6 |
+
OLLAMA_PID=$!
|
| 7 |
+
|
| 8 |
+
# Wait for Ollama to be ready
|
| 9 |
+
echo "⏳ Waiting for Ollama to initialize..."
|
| 10 |
+
until curl -s http://localhost:11434/api/tags > /dev/null 2>&1; do
|
| 11 |
+
sleep 2
|
| 12 |
+
echo " still waiting for Ollama..."
|
| 13 |
+
done
|
| 14 |
+
echo "✅ Ollama is up!"
|
| 15 |
+
|
| 16 |
+
# Auto-pull a lightweight default model (CPU-friendly)
|
| 17 |
+
echo "📥 Pulling default model: phi3:mini (~2.2GB)..."
|
| 18 |
+
ollama pull phi3:mini || echo "⚠️ Pull failed, continuing anyway (pull from UI)"
|
| 19 |
+
|
| 20 |
+
# Also pull mistral as a general purpose model
|
| 21 |
+
echo "📥 Pulling mistral:latest (~4.4GB)..."
|
| 22 |
+
ollama pull mistral:latest || echo "⚠️ mistral pull failed, continuing"
|
| 23 |
+
|
| 24 |
+
echo "🌐 Starting Open WebUI on port 7860..."
|
| 25 |
+
exec open-webui serve \
|
| 26 |
+
--host 0.0.0.0 \
|
| 27 |
+
--port 7860 \
|
| 28 |
+
--data-dir /data/webui \
|
| 29 |
+
--ollama-base-url http://localhost:11434
|