SignVerse-2M / reproduce_independently.sh
Sen Fang
Update pipeline orchestration and optimized processing
fa3502a
#!/usr/bin/env bash
set -euo pipefail
ROOT_DIR="${ROOT_DIR:-/home/sf895/SignVerse-2M}"
CONDA_SH="/research/cbim/vast/sf895/miniforge3/etc/profile.d/conda.sh"
CONDA_ENV="dwpose"
SOURCE_METADATA_CSV="$ROOT_DIR/SignVerse-2M-metadata_ori.csv"
OUTPUT_METADATA_CSV="$ROOT_DIR/SignVerse-2M-metadata_processed.csv"
RAW_VIDEO_DIR="$ROOT_DIR/raw_video"
RAW_CAPTION_DIR="$ROOT_DIR/raw_caption"
RAW_METADATA_DIR="$ROOT_DIR/raw_metadata"
DATASET_DIR="$ROOT_DIR/dataset"
ARCHIVE_DIR="$ROOT_DIR/archives"
STATS_NPZ="$ROOT_DIR/stats.npz"
PROGRESS_JSON="$ROOT_DIR/archive_upload_progress.json"
PIPELINE01="$ROOT_DIR/scripts/pipeline01_download_video_fix_caption.py"
PIPELINE02="$ROOT_DIR/scripts/pipeline02_extract_dwpose_from_video.py"
PIPELINE03="$ROOT_DIR/scripts/pipeline03_upload_to_huggingface.py"
STAGE="all"
LIMIT=""
VIDEO_IDS=()
FPS="24"
WORKERS=""
TARGET_BYTES="$((10 * 1024 * 1024 * 1024))"
TARGET_FOLDERS="${TARGET_FOLDERS:-40}"
DOWNLOAD_BATCH_SIZE="1"
PROCESS_BATCH_SIZE=""
RAW_BACKLOG_LIMIT="340"
MAX_RAW_VIDEO_BYTES="0"
MAX_ITERATIONS="0"
IDLE_SLEEP_SECONDS="5"
REPO_ID="SignerX/SignVerse-2M"
COOKIES_FILE=""
COOKIES_FROM_BROWSER=""
EXTRACTOR_ARGS=""
FORCE_METADATA=0
FORCE_SUBTITLES=0
FORCE_DOWNLOAD=0
FORCE_PROCESS=0
SKIP_VIDEO_DOWNLOAD=0
SKIP_SUBTITLES=0
DRY_RUN_UPLOAD=0
print_usage() {
cat <<EOF
Usage:
bash reproduce_independently.sh [options]
Options:
--stage {all,download,process,upload}
--limit N
--video-id ID
--video-ids "ID1 ID2 ..."
--fps N
--workers N
--target-bytes N
--target-folders N
--download-batch-size N
--process-batch-size N
--raw-backlog-limit N
--max-raw-video-bytes N
--max-iterations N
--idle-sleep-seconds N
--repo-id REPO
--cookies FILE
--cookies-from-browser BROWSER
--extractor-args VALUE
--force-metadata
--force-subtitles
--force-download
--force-process
--skip-video-download
--skip-subtitles
--dry-run-upload
--help
Examples:
bash reproduce_independently.sh --stage download --limit 10 --skip-video-download
bash reproduce_independently.sh --stage process --video-id Bdj5MUf_3Hc --workers 1
bash reproduce_independently.sh --stage upload --target-bytes 500000000
bash reproduce_independently.sh --stage all --workers 8 --download-batch-size 1 --raw-backlog-limit 340
EOF
}
while [[ $# -gt 0 ]]; do
case "$1" in
--stage)
STAGE="$2"
shift 2
;;
--limit)
LIMIT="$2"
shift 2
;;
--video-id)
VIDEO_IDS+=("$2")
shift 2
;;
--video-ids)
IFS=' ' read -r -a EXTRA_IDS <<< "$2"
VIDEO_IDS+=("${EXTRA_IDS[@]}")
shift 2
;;
--fps)
FPS="$2"
shift 2
;;
--workers)
WORKERS="$2"
shift 2
;;
--target-bytes)
TARGET_BYTES="$2"
shift 2
;;
--download-batch-size)
DOWNLOAD_BATCH_SIZE="$2"
shift 2
;;
--process-batch-size)
PROCESS_BATCH_SIZE="$2"
shift 2
;;
--raw-backlog-limit)
RAW_BACKLOG_LIMIT="$2"
shift 2
;;
--max-raw-video-bytes)
MAX_RAW_VIDEO_BYTES="$2"
shift 2
;;
--max-iterations)
MAX_ITERATIONS="$2"
shift 2
;;
--idle-sleep-seconds)
IDLE_SLEEP_SECONDS="$2"
shift 2
;;
--repo-id)
REPO_ID="$2"
shift 2
;;
--cookies)
COOKIES_FILE="$2"
shift 2
;;
--cookies-from-browser)
COOKIES_FROM_BROWSER="$2"
shift 2
;;
--extractor-args)
EXTRACTOR_ARGS="$2"
shift 2
;;
--force-metadata)
FORCE_METADATA=1
shift
;;
--force-subtitles)
FORCE_SUBTITLES=1
shift
;;
--force-download)
FORCE_DOWNLOAD=1
shift
;;
--force-process)
FORCE_PROCESS=1
shift
;;
--skip-video-download)
SKIP_VIDEO_DOWNLOAD=1
shift
;;
--skip-subtitles)
SKIP_SUBTITLES=1
shift
;;
--dry-run-upload)
DRY_RUN_UPLOAD=1
shift
;;
-h|--help)
print_usage
exit 0
;;
*)
echo "Unknown argument: $1" >&2
print_usage
exit 1
;;
esac
done
if [[ ! -f "$CONDA_SH" ]]; then
echo "Missing conda init script: $CONDA_SH" >&2
exit 1
fi
if [[ "$STAGE" != "all" && "$STAGE" != "download" && "$STAGE" != "process" && "$STAGE" != "upload" ]]; then
echo "Invalid --stage: $STAGE" >&2
exit 1
fi
mkdir -p "$RAW_VIDEO_DIR" "$RAW_CAPTION_DIR" "$RAW_METADATA_DIR" "$DATASET_DIR"
run_in_dwpose() {
# shellcheck disable=SC1090
source "$CONDA_SH"
CONDA_NO_PLUGINS=true conda run -n "$CONDA_ENV" "$@"
}
append_video_ids() {
local -n target_ref=$1
if [[ ${#VIDEO_IDS[@]} -gt 0 ]]; then
target_ref+=(--video-ids "${VIDEO_IDS[@]}")
fi
}
run_download_stage() {
local stage_limit="${1:-$LIMIT}"
local cmd=(python "$PIPELINE01"
--source-metadata-csv "$SOURCE_METADATA_CSV"
--output-metadata-csv "$OUTPUT_METADATA_CSV"
--raw-video-dir "$RAW_VIDEO_DIR"
--raw-caption-dir "$RAW_CAPTION_DIR"
--raw-metadata-dir "$RAW_METADATA_DIR"
--dataset-dir "$DATASET_DIR"
--stats-npz "$STATS_NPZ"
)
if [[ -n "$stage_limit" ]]; then
cmd+=(--limit "$stage_limit")
fi
append_video_ids cmd
if [[ $FORCE_METADATA -eq 1 ]]; then
cmd+=(--force-metadata)
fi
if [[ $FORCE_SUBTITLES -eq 1 ]]; then
cmd+=(--force-subtitles)
fi
if [[ $FORCE_DOWNLOAD -eq 1 ]]; then
cmd+=(--force-download)
fi
if [[ $SKIP_VIDEO_DOWNLOAD -eq 1 ]]; then
cmd+=(--skip-video-download)
fi
if [[ $SKIP_SUBTITLES -eq 1 ]]; then
cmd+=(--skip-subtitles)
fi
if [[ -n "$COOKIES_FROM_BROWSER" ]]; then
cmd+=(--cookies-from-browser "$COOKIES_FROM_BROWSER")
fi
if [[ -n "$COOKIES_FILE" ]]; then
cmd+=(--cookies "$COOKIES_FILE")
fi
if [[ -n "$EXTRACTOR_ARGS" ]]; then
cmd+=(--extractor-args "$EXTRACTOR_ARGS")
fi
run_in_dwpose "${cmd[@]}"
}
run_process_stage() {
local stage_limit="${1:-$LIMIT}"
local cmd=(python "$PIPELINE02"
--raw-video-dir "$RAW_VIDEO_DIR"
--dataset-dir "$DATASET_DIR"
--stats-npz "$STATS_NPZ"
--fps "$FPS"
--delete-source-on-success
)
if [[ -n "$stage_limit" ]]; then
cmd+=(--limit "$stage_limit")
fi
append_video_ids cmd
if [[ -n "$WORKERS" ]]; then
cmd+=(--workers "$WORKERS")
fi
if [[ $FORCE_PROCESS -eq 1 ]]; then
cmd+=(--force)
fi
run_in_dwpose "${cmd[@]}"
}
run_upload_stage() {
local require_target="${1:-0}"
local cmd=(python "$PIPELINE03"
--dataset-dir "$DATASET_DIR"
--raw-video-dir "$RAW_VIDEO_DIR"
--raw-caption-dir "$RAW_CAPTION_DIR"
--raw-metadata-dir "$RAW_METADATA_DIR"
--archive-dir "$ARCHIVE_DIR"
--progress-path "$PROGRESS_JSON"
--stats-npz "$STATS_NPZ"
--repo-id "$REPO_ID"
--target-bytes "$TARGET_BYTES"
--target-folders "$TARGET_FOLDERS"
)
if [[ "$require_target" == "1" ]]; then
cmd+=(--require-target-bytes)
fi
if [[ $DRY_RUN_UPLOAD -eq 1 ]]; then
cmd+=(--dry-run)
fi
run_in_dwpose "${cmd[@]}"
}
prune_processed_raw_videos() {
python - <<PY
from pathlib import Path
raw_dir = Path("$RAW_VIDEO_DIR")
dataset_dir = Path("$DATASET_DIR")
deleted = 0
if raw_dir.exists():
for video_path in raw_dir.iterdir():
if not video_path.is_file():
continue
marker = dataset_dir / video_path.stem / "npz" / ".complete"
if marker.exists():
video_path.unlink(missing_ok=True)
deleted += 1
print(deleted)
PY
}
dir_size_bytes() {
local dir_path="$1"
if [[ ! -d "$dir_path" ]]; then
echo 0
return
fi
find "$dir_path" -type f -printf '%s\n' | awk '{sum+=$1} END {print sum+0}'
}
count_pending_downloads() {
python - <<PY
import csv, sys
from pathlib import Path
csv.field_size_limit(min(sys.maxsize, 10 * 1024 * 1024))
path = Path("$OUTPUT_METADATA_CSV")
if not path.exists():
path = Path("$SOURCE_METADATA_CSV")
pending = 0
with path.open("r", encoding="utf-8-sig", newline="") as handle:
reader = csv.DictReader(handle)
for row in reader:
if (row.get("download_status") or "").strip() == "ok":
continue
pending += 1
print(pending)
PY
}
count_pending_process() {
python - <<PY
from pathlib import Path
raw_dir = Path("$RAW_VIDEO_DIR")
pending = 0
if raw_dir.exists():
for video_path in raw_dir.iterdir():
if video_path.is_file():
pending += 1
print(pending)
PY
}
count_complete_pending_upload() {
python - <<PY
import json
from pathlib import Path
dataset_dir = Path("$DATASET_DIR")
progress_path = Path("$PROGRESS_JSON")
uploaded = set()
if progress_path.exists():
uploaded = set(json.loads(progress_path.read_text()).get("uploaded_folders", {}).keys())
count = 0
for folder_path in dataset_dir.iterdir():
if not folder_path.is_dir():
continue
if folder_path.name in uploaded:
continue
if (folder_path / "npz" / ".complete").exists():
count += 1
print(count)
PY
}
bytes_complete_pending_upload() {
python - <<PY
import json
from pathlib import Path
dataset_dir = Path("$DATASET_DIR")
progress_path = Path("$PROGRESS_JSON")
uploaded = set()
if progress_path.exists():
uploaded = set(json.loads(progress_path.read_text()).get("uploaded_folders", {}).keys())
total = 0
for folder_path in dataset_dir.iterdir():
if not folder_path.is_dir():
continue
if folder_path.name in uploaded:
continue
if not (folder_path / "npz" / ".complete").exists():
continue
for path in folder_path.rglob("*"):
if path.is_file():
total += path.stat().st_size
print(total)
PY
}
download_loop() {
local iteration=0
while true; do
iteration=$((iteration + 1))
local pruned
pruned="$(prune_processed_raw_videos)"
local pending_download pending_process raw_video_bytes
pending_download="$(count_pending_downloads)"
pending_process="$(count_pending_process)"
raw_video_bytes="$(dir_size_bytes "$RAW_VIDEO_DIR")"
echo "[download] iteration=$iteration pending_download=$pending_download raw_backlog=$pending_process raw_video_bytes=$raw_video_bytes pruned_raw_videos=$pruned"
if [[ "$MAX_ITERATIONS" -gt 0 && "$iteration" -gt "$MAX_ITERATIONS" ]]; then
echo "[download] reached max iterations: $MAX_ITERATIONS"
break
fi
if [[ "$pending_download" -eq 0 ]]; then
echo "[download] nothing left to download"
break
fi
if [[ "$pending_process" -ge "$RAW_BACKLOG_LIMIT" ]]; then
echo "[download] backpressure: raw backlog $pending_process >= limit $RAW_BACKLOG_LIMIT"
sleep "$IDLE_SLEEP_SECONDS"
continue
fi
if [[ "$MAX_RAW_VIDEO_BYTES" -gt 0 && "$raw_video_bytes" -ge "$MAX_RAW_VIDEO_BYTES" ]]; then
echo "[download] backpressure: raw_video_bytes $raw_video_bytes >= limit $MAX_RAW_VIDEO_BYTES"
sleep "$IDLE_SLEEP_SECONDS"
continue
fi
if ! run_download_stage "$DOWNLOAD_BATCH_SIZE"; then
echo "[download] pipeline01 failed; retry after sleep"
sleep "$IDLE_SLEEP_SECONDS"
fi
done
}
process_loop() {
local iteration=0
while true; do
iteration=$((iteration + 1))
local pruned
pruned="$(prune_processed_raw_videos)"
local pending_download pending_process
pending_download="$(count_pending_downloads)"
pending_process="$(count_pending_process)"
echo "[process] iteration=$iteration pending_download=$pending_download raw_backlog=$pending_process pruned_raw_videos=$pruned"
if [[ "$MAX_ITERATIONS" -gt 0 && "$iteration" -gt "$MAX_ITERATIONS" ]]; then
echo "[process] reached max iterations: $MAX_ITERATIONS"
break
fi
if [[ "$pending_process" -eq 0 ]]; then
if [[ "$pending_download" -eq 0 ]]; then
echo "[process] nothing left to process"
break
fi
sleep "$IDLE_SLEEP_SECONDS"
continue
fi
if ! run_process_stage "$PROCESS_BATCH_SIZE"; then
echo "[process] pipeline02 failed; retry after sleep"
sleep "$IDLE_SLEEP_SECONDS"
fi
done
}
upload_loop() {
local iteration=0
while true; do
iteration=$((iteration + 1))
local pruned
pruned="$(prune_processed_raw_videos)"
local pending_download pending_process complete_pending_upload complete_pending_upload_bytes
pending_download="$(count_pending_downloads)"
pending_process="$(count_pending_process)"
complete_pending_upload="$(count_complete_pending_upload)"
complete_pending_upload_bytes="$(bytes_complete_pending_upload)"
echo "[upload] iteration=$iteration pending_download=$pending_download raw_backlog=$pending_process complete_pending_upload=$complete_pending_upload complete_pending_upload_bytes=$complete_pending_upload_bytes pruned_raw_videos=$pruned"
if [[ "$MAX_ITERATIONS" -gt 0 && "$iteration" -gt "$MAX_ITERATIONS" ]]; then
echo "[upload] reached max iterations: $MAX_ITERATIONS"
break
fi
if [[ "$complete_pending_upload" -eq 0 ]]; then
if [[ "$pending_download" -eq 0 && "$pending_process" -eq 0 ]]; then
echo "[upload] nothing left to upload"
break
fi
sleep "$IDLE_SLEEP_SECONDS"
continue
fi
if [[ "$complete_pending_upload_bytes" -lt "$TARGET_BYTES" && "$complete_pending_upload" -lt "$TARGET_FOLDERS" && ( "$pending_download" -gt 0 || "$pending_process" -gt 0 ) ]]; then
sleep "$IDLE_SLEEP_SECONDS"
continue
fi
local require_target=1
if [[ "$pending_download" -eq 0 && "$pending_process" -eq 0 ]]; then
require_target=0
fi
if ! run_upload_stage "$require_target"; then
echo "[upload] pipeline03 failed; retry after sleep"
sleep "$IDLE_SLEEP_SECONDS"
fi
done
}
cleanup_background_jobs() {
local jobs_to_kill=("$@")
for job_pid in "${jobs_to_kill[@]}"; do
if [[ -n "$job_pid" ]] && kill -0 "$job_pid" 2>/dev/null; then
kill "$job_pid" 2>/dev/null || true
fi
done
}
run_all_loop() {
DOWNLOAD_LOOP_PID=""
PROCESS_LOOP_PID=""
UPLOAD_LOOP_PID=""
download_loop &
DOWNLOAD_LOOP_PID=$!
process_loop &
PROCESS_LOOP_PID=$!
upload_loop &
UPLOAD_LOOP_PID=$!
trap 'cleanup_background_jobs "$DOWNLOAD_LOOP_PID" "$PROCESS_LOOP_PID" "$UPLOAD_LOOP_PID"' INT TERM EXIT
wait "$DOWNLOAD_LOOP_PID"
wait "$PROCESS_LOOP_PID"
wait "$UPLOAD_LOOP_PID"
trap - INT TERM EXIT
}
case "$STAGE" in
download)
run_download_stage
;;
process)
run_process_stage
;;
upload)
run_upload_stage
;;
all)
run_all_loop
;;
esac