File size: 7,233 Bytes
21c7db9 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 | #!/usr/bin/env bash
set -euo pipefail
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
cd "$ROOT_DIR"
source "$ROOT_DIR/scripts/venv_utils.sh"
PROFILE="full"
SKIP_UI="false"
SKIP_TRAIN="false"
FRONTIER_READY="false"
for arg in "$@"; do
case "$arg" in
--full) PROFILE="full" ;;
--quick) PROFILE="quick" ;;
--skip-ui) SKIP_UI="true" ;;
--skip-train) SKIP_TRAIN="true" ;;
--frontier-ready) FRONTIER_READY="true" ;;
*)
echo "Unknown flag: $arg"
exit 1
;;
esac
done
mkdir -p outputs/logs outputs/reports checkpoints
LOG_DIR="outputs/logs"
ENV_LOG="$LOG_DIR/env.log"
API_LOG="$LOG_DIR/api.log"
UI_LOG="$LOG_DIR/ui.log"
PIDS_FILE="$LOG_DIR/pids.txt"
rm -f "$PIDS_FILE"
cleanup() {
if [[ -f "$PIDS_FILE" ]]; then
while IFS= read -r pid; do
if [[ -n "$pid" ]] && kill -0 "$pid" >/dev/null 2>&1; then
kill "$pid" >/dev/null 2>&1 || true
fi
done < "$PIDS_FILE"
fi
}
trap cleanup EXIT
echo "[run_all_local] Installing python deps"
ensure_polyguard_venv
if ! sync_polyguard_requirements >/dev/null 2>&1; then
echo "[run_all_local] venv requirements sync skipped (offline or restricted environment)"
fi
if ! venv_has_required_imports >/dev/null 2>&1; then
echo "[run_all_local] Missing required Python dependencies in .venv. Run: bash scripts/bootstrap_venv.sh"
exit 1
fi
activate_polyguard_path
export MPLCONFIGDIR="${MPLCONFIGDIR:-$ROOT_DIR/outputs/.mplconfig}"
mkdir -p "$MPLCONFIGDIR"
if [[ "$PROFILE" == "quick" ]]; then
export POLYGUARD_RISK_DATASET_SIZE=180
export POLYGUARD_DOSE_DATASET_SIZE=120
export POLYGUARD_SUPERVISOR_EPISODES=3
export POLYGUARD_PLANNER_EPISODES=4
export POLYGUARD_DOSING_EPISODES=3
else
export POLYGUARD_RISK_DATASET_SIZE=500
export POLYGUARD_DOSE_DATASET_SIZE=300
export POLYGUARD_SUPERVISOR_EPISODES=6
export POLYGUARD_PLANNER_EPISODES=8
export POLYGUARD_DOSING_EPISODES=6
fi
if [[ "${POLYGUARD_ENABLE_OLLAMA:-false}" == "true" ]] && command -v ollama >/dev/null 2>&1; then
MODEL_NAME="${POLYGUARD_OLLAMA_MODEL:-qwen2.5:3b-instruct}"
echo "[run_all_local] Ensuring Ollama model is available: $MODEL_NAME"
ollama pull "$MODEL_NAME" >/dev/null 2>&1 || echo "[run_all_local] ollama pull skipped/failed; continuing"
else
echo "[run_all_local] Ollama disabled/unavailable; using HF Transformers path"
fi
echo "[run_all_local] Building data assets"
"$POLYGUARD_PYTHON_BIN" scripts/build_synthetic_patients.py
"$POLYGUARD_PYTHON_BIN" scripts/ingest_open_drug_sources.py
"$POLYGUARD_PYTHON_BIN" scripts/build_drug_knowledge.py
"$POLYGUARD_PYTHON_BIN" scripts/build_retrieval_index.py
"$POLYGUARD_PYTHON_BIN" scripts/build_scenarios.py
"$POLYGUARD_PYTHON_BIN" scripts/bootstrap_data.py
"$POLYGUARD_PYTHON_BIN" scripts/build_training_corpus.py --profile "$([[ "$PROFILE" == "quick" ]] && echo small || echo massive)" --with-local --with-synthetic
if [[ "${POLYGUARD_ALLOW_WEB_FETCH:-false}" == "true" ]]; then
"$POLYGUARD_PYTHON_BIN" scripts/crawl_labels_and_guidelines.py
"$POLYGUARD_PYTHON_BIN" scripts/build_training_corpus.py --profile small --with-local --with-synthetic --enable-web-fallback
fi
echo "[run_all_local] Training predictive models"
"$POLYGUARD_PYTHON_BIN" scripts/train_graph_model.py
"$POLYGUARD_PYTHON_BIN" scripts/train_risk_model.py
"$POLYGUARD_PYTHON_BIN" scripts/train_dose_model.py
if [[ "$SKIP_TRAIN" != "true" ]]; then
echo "[run_all_local] Running training and evaluation"
"$POLYGUARD_PYTHON_BIN" scripts/generate_sft_data.py
"$POLYGUARD_PYTHON_BIN" scripts/train_sft.py
"$POLYGUARD_PYTHON_BIN" scripts/train_grpo_supervisor.py
"$POLYGUARD_PYTHON_BIN" scripts/train_grpo_planner.py
"$POLYGUARD_PYTHON_BIN" scripts/train_grpo_dosing.py
"$POLYGUARD_PYTHON_BIN" scripts/train_grpo_trl.py --max-steps "$([[ "$PROFILE" == "quick" ]] && echo 3 || echo 12)"
if [[ -d "checkpoints/sft_adapter" ]]; then
"$POLYGUARD_PYTHON_BIN" scripts/merge_adapters_safe.py --adapter-dir checkpoints/sft_adapter --output-dir checkpoints/merged
else
echo "[run_all_local] sft_adapter not found; skipping adapter merge and using inference fallback checks"
fi
"$POLYGUARD_PYTHON_BIN" scripts/test_inference_postsave.py --samples "$([[ "$PROFILE" == "quick" ]] && echo 1 || echo 3)"
"$POLYGUARD_PYTHON_BIN" scripts/evaluate_policy_ablations.py --episodes "$([[ "$PROFILE" == "quick" ]] && echo 3 || echo 8)"
"$POLYGUARD_PYTHON_BIN" scripts/evaluate_baselines.py
"$POLYGUARD_PYTHON_BIN" scripts/evaluate_all.py
"$POLYGUARD_PYTHON_BIN" scripts/evaluate_compare_runs.py --baseline outputs/reports/baselines.json --candidate outputs/reports/benchmark_report.json --output outputs/reports/improvement_report.json
"$POLYGUARD_PYTHON_BIN" scripts/benchmark_inference.py --provider transformers --model "${POLYGUARD_HF_MODEL:-Qwen/Qwen2.5-0.5B-Instruct}" --runs "$([[ "$PROFILE" == "quick" ]] && echo 2 || echo 5)"
"$POLYGUARD_PYTHON_BIN" scripts/run_robustness_suite.py
"$POLYGUARD_PYTHON_BIN" scripts/acceptance_gate.py
fi
if [[ "$FRONTIER_READY" == "true" ]]; then
"$POLYGUARD_PYTHON_BIN" - <<'PY'
import json
from pathlib import Path
payload = {
"frontier_models": ["qwen2.5:7b-instruct", "qwen2.5:14b-instruct"],
"deployment_mode": "hf_or_vllm_ready",
"notes": "Baseline complete; ready for larger model sweep."
}
out = Path("outputs/reports/frontier_ready.json")
out.parent.mkdir(parents=True, exist_ok=True)
out.write_text(json.dumps(payload, ensure_ascii=True, indent=2), encoding="utf-8")
print("frontier_ready_manifest_written")
PY
fi
echo "[run_all_local] Starting env service"
"$POLYGUARD_PYTHON_BIN" -m app.env.fastapi_app >"$ENV_LOG" 2>&1 &
echo "$!" >> "$PIDS_FILE"
sleep 2
echo "[run_all_local] Starting API service"
"$POLYGUARD_PYTHON_BIN" -m app.api >"$API_LOG" 2>&1 &
echo "$!" >> "$PIDS_FILE"
sleep 2
echo "[run_all_local] Health checks"
SERVICES_UP="false"
if curl -fsS http://127.0.0.1:8100/health >/dev/null 2>&1 && curl -fsS http://127.0.0.1:8200/health >/dev/null 2>&1; then
SERVICES_UP="true"
curl -fsS http://127.0.0.1:8100/metadata >/dev/null 2>&1 || true
echo "[run_all_local] service health checks passed"
else
echo "[run_all_local] network health checks unavailable, using in-process API/env smoke fallback"
if ! "$POLYGUARD_PYTHON_BIN" -m pytest tests/test_api.py tests/test_remote_env.py >/dev/null 2>&1; then
echo "[run_all_local] fallback smoke failed"
exit 1
fi
fi
if [[ "$SKIP_UI" != "true" ]]; then
if command -v npm >/dev/null 2>&1; then
echo "[run_all_local] Starting UI"
(
cd app/ui/frontend
npm install >/dev/null
npm run dev
) >"$UI_LOG" 2>&1 &
echo "$!" >> "$PIDS_FILE"
sleep 3
else
echo "[run_all_local] npm not found, UI skipped."
fi
fi
if [[ "$SERVICES_UP" == "true" ]]; then
bash scripts/smoke_test_all.sh >/dev/null 2>&1
fi
echo "[run_all_local] Completed profile=$PROFILE skip_ui=$SKIP_UI skip_train=$SKIP_TRAIN frontier_ready=$FRONTIER_READY"
echo "[run_all_local] Logs in $LOG_DIR"
if [[ "$SERVICES_UP" == "true" ]]; then
echo "[run_all_local] Services are running. Press Ctrl-C to stop."
wait
else
echo "[run_all_local] Completed in fallback mode (services could not bind in this environment)."
fi
|