code-202604_v2 / hf_upload /setup_data.sh
Rakancorle11's picture
Snapshot 20260424-2142
0a4deb9 verified
#!/usr/bin/env bash
# Download all evaluation data to /opt/dlami/nvme (or WORK_ROOT).
#
# Downloads:
# 1) Video-MME benchmark videos
# 2) LVBench benchmark videos
# 3) Sync eval: original oops videos, random-shift videos, extracted audio
# 4) kto_training_data_v2_test.jsonl (sync eval test set, also in repo data/)
#
# Requires: conda env already set up (bash setup_env.sh first).
#
# Usage:
# bash setup_data.sh
# WORK_ROOT=/my/fast/disk bash setup_data.sh
#
set -euo pipefail
WORK_ROOT="${WORK_ROOT:-/opt/dlami/nvme}"
CONDA_ENV="${CONDA_ENV:-video}"
INSTALL_DIR="${INSTALL_DIR:-${HOME}/anaconda3}"
HF_CACHE="${HF_CACHE:-${HOME}/hf_datasets}"
REPO_ROOT="$(cd "$(dirname "$0")" && pwd)"
# Sync eval paths (align with eval_dpo_sync.py --data-root default)
DATA_ROOT="${WORK_ROOT}/video_source"
ORIGINAL_ROOT="${DATA_ROOT}/original"
ORIGINAL_OOPS_DIR="${ORIGINAL_ROOT}/uag_oops"
RANDOM_SHIFT_DIR="${DATA_ROOT}/random_shift_video"
EXTRACTED_AUDIO_DIR="${DATA_ROOT}/extracted_audio"
log() { echo -e "\n\033[1;36m>>> $*\033[0m"; }
# ── Activate conda ──────────────────────────────────────────────────────────
if [[ -f "${INSTALL_DIR}/etc/profile.d/conda.sh" ]]; then
source "${INSTALL_DIR}/etc/profile.d/conda.sh"
elif command -v conda &>/dev/null; then
eval "$(conda shell.bash hook)"
else
echo "Error: conda not found. Run setup_env.sh first."
exit 1
fi
conda activate "${CONDA_ENV}"
# ── Detect HF CLI ───────────────────────────────────────────────────────────
if command -v hf &>/dev/null; then
HF_CLI="hf"
elif command -v huggingface-cli &>/dev/null; then
HF_CLI="huggingface-cli"
else
echo "Error: neither 'hf' nor 'huggingface-cli' found. Run setup_env.sh first."
exit 1
fi
echo "Using HF CLI: ${HF_CLI}"
# ── Helpers ─────────────────────────────────────────────────────────────────
has_mp4_files() { compgen -G "$1/*.mp4" &>/dev/null; }
has_wav_files() { find "$1" -name "*.wav" -print -quit 2>/dev/null | grep -q .; }
flatten_mp4s() {
local dir="$1"
shopt -s globstar nullglob
for src in "${dir}"/**/*.mp4; do
[[ "$(dirname "$src")" != "${dir}" ]] && mv -n "$src" "${dir}/"
done
shopt -u globstar nullglob
}
###############################################################################
# 1) Video-MME
###############################################################################
log "[1/5] Video-MME"
VIDEOMME_DIR="${WORK_ROOT}/videomme"
if [[ -d "${VIDEOMME_DIR}/data/data" ]] && has_mp4_files "${VIDEOMME_DIR}/data/data"; then
echo " Already present at ${VIDEOMME_DIR}/data/data; skipping."
else
python "${REPO_ROOT}/scripts/download_videomme.py" --output-dir "${VIDEOMME_DIR}"
fi
###############################################################################
# 2) LVBench
###############################################################################
log "[2/5] LVBench"
LVBENCH_DIR="${WORK_ROOT}/lvbench"
if [[ -d "${LVBENCH_DIR}" ]] && has_mp4_files "${LVBENCH_DIR}"; then
echo " Already present at ${LVBENCH_DIR}; skipping."
else
python "${REPO_ROOT}/scripts/download_lvbench.py" --output-dir "${LVBENCH_DIR}"
fi
###############################################################################
# 3) Original oops videos (hasnat79/ual_bench β†’ original/uag_oops/)
###############################################################################
log "[3/5] Original oops videos (sync eval)"
mkdir -p "${ORIGINAL_ROOT}" "${HF_CACHE}/ual_bench"
if has_mp4_files "${ORIGINAL_OOPS_DIR}"; then
echo " Already present at ${ORIGINAL_OOPS_DIR}; skipping."
else
"${HF_CLI}" download hasnat79/ual_bench \
--repo-type dataset \
--include "uag_oops.tar" \
--local-dir "${HF_CACHE}/ual_bench"
tar -xf "${HF_CACHE}/ual_bench/uag_oops.tar" -C "${ORIGINAL_ROOT}"
fi
###############################################################################
# 4) Random-shift videos (Rakancorle11/random_shift_video β†’ random_shift_video/{delay,early}/)
###############################################################################
log "[4/5] Random-shift videos (sync eval)"
mkdir -p "${RANDOM_SHIFT_DIR}" "${HF_CACHE}/random_shift_video"
if has_mp4_files "${RANDOM_SHIFT_DIR}/delay" && has_mp4_files "${RANDOM_SHIFT_DIR}/early"; then
echo " Already present; skipping."
else
"${HF_CLI}" download Rakancorle11/random_shift_video \
--repo-type dataset \
--local-dir "${HF_CACHE}/random_shift_video"
mkdir -p "${RANDOM_SHIFT_DIR}/delay" "${RANDOM_SHIFT_DIR}/early"
tar -xzf "${HF_CACHE}/random_shift_video/delay.tar.gz" -C "${RANDOM_SHIFT_DIR}/delay"
tar -xzf "${HF_CACHE}/random_shift_video/early.tar.gz" -C "${RANDOM_SHIFT_DIR}/early"
if [[ -f "${HF_CACHE}/random_shift_video/metadata/shift_metadata.jsonl" ]]; then
cp "${HF_CACHE}/random_shift_video/metadata/shift_metadata.jsonl" "${RANDOM_SHIFT_DIR}/"
fi
flatten_mp4s "${RANDOM_SHIFT_DIR}/delay"
flatten_mp4s "${RANDOM_SHIFT_DIR}/early"
fi
###############################################################################
# 5) Extracted audio WAVs (Rakancorle11/extracted_audio β†’ extracted_audio/)
###############################################################################
log "[5/5] Extracted audio (sync eval)"
mkdir -p "${EXTRACTED_AUDIO_DIR}" "${HF_CACHE}/extracted_audio"
if has_wav_files "${EXTRACTED_AUDIO_DIR}"; then
echo " Already present; skipping."
else
"${HF_CLI}" download Rakancorle11/extracted_audio \
--repo-type dataset \
--include "extracted_audio.tar.gz" \
--local-dir "${HF_CACHE}/extracted_audio"
tar -xzf "${HF_CACHE}/extracted_audio/extracted_audio.tar.gz" -C "${DATA_ROOT}"
fi
###############################################################################
# Copy test JSONL into DATA_ROOT (so --test-jsonl can default)
###############################################################################
TEST_JSONL_SRC="${REPO_ROOT}/data/kto_training_data_v2_test.jsonl"
TEST_JSONL_DST="${DATA_ROOT}/kto_training_data_v2_test.jsonl"
if [[ -f "${TEST_JSONL_SRC}" ]] && [[ ! -f "${TEST_JSONL_DST}" ]]; then
cp "${TEST_JSONL_SRC}" "${TEST_JSONL_DST}"
echo "Copied test JSONL β†’ ${TEST_JSONL_DST}"
fi
###############################################################################
# Summary
###############################################################################
echo ""
echo "========================================"
echo " Data setup complete"
echo "========================================"
echo " Video-MME videos: ${VIDEOMME_DIR}/data/data/"
echo " LVBench videos: ${LVBENCH_DIR}/"
echo " Sync originals: ${ORIGINAL_OOPS_DIR}/"
echo " Sync random_shift: ${RANDOM_SHIFT_DIR}/{delay,early}/"
echo " Sync extracted_audio: ${EXTRACTED_AUDIO_DIR}/"
echo " Sync test JSONL: ${TEST_JSONL_DST}"
echo ""
echo " original mp4: $(find "${ORIGINAL_OOPS_DIR}" -name '*.mp4' 2>/dev/null | wc -l)"
echo " random_shift delay: $(find "${RANDOM_SHIFT_DIR}/delay" -name '*.mp4' 2>/dev/null | wc -l)"
echo " random_shift early: $(find "${RANDOM_SHIFT_DIR}/early" -name '*.mp4' 2>/dev/null | wc -l)"
echo " extracted_audio wav: $(find "${EXTRACTED_AUDIO_DIR}" -name '*.wav' 2>/dev/null | wc -l)"
echo ""
echo "FineVideo pipeline: run these manually (requires Gemini API keys):"
echo " python code-202604/finevideo/filter_finevideo.py --target-count 1000 --early-stop"
echo " python code-202604/finevideo/download_finevideo_subset.py --max-videos 500"
echo " python code-202604/finevideo/extract_audio_finevideo.py"
echo " python code-202604/finevideo/generate_finevideo_sft.py --api-key YOUR_KEY"
echo " python code-202604/finevideo/generate_finevideo_dpo.py"
echo "========================================"