| |
| from __future__ import annotations |
|
|
| import argparse |
| import json |
| import os |
| import random |
| import subprocess |
| import threading |
| from concurrent.futures import ThreadPoolExecutor, as_completed |
| from pathlib import Path |
| from typing import Dict, List, Optional, Set, Tuple |
|
|
| from tqdm import tqdm |
|
|
|
|
| VIDEO_EXTS = {".mp4", ".mov", ".mkv", ".avi", ".webm", ".m4v"} |
|
|
|
|
| def parse_args() -> argparse.Namespace: |
| p = argparse.ArgumentParser( |
| description="Generate delayed/early random-shifted videos using ffmpeg stream copy." |
| ) |
| p.add_argument( |
| "--input-dir", |
| type=Path, |
| default=Path("/home/ubuntu/video_source/original"), |
| help="Root directory containing original videos.", |
| ) |
| p.add_argument( |
| "--output-dir", |
| type=Path, |
| default=Path("/home/ubuntu/video_source/random_shift_video"), |
| help="Output root. Script writes to delay/ and early/ subfolders.", |
| ) |
| p.add_argument( |
| "--metadata-jsonl", |
| type=Path, |
| default=Path("/home/ubuntu/video_source/random_shift_video/shift_metadata.jsonl"), |
| help="Append-only metadata JSONL file.", |
| ) |
| p.add_argument( |
| "--min-shift", |
| type=float, |
| default=1.80, |
| help="Minimum random shift seconds.", |
| ) |
| p.add_argument( |
| "--max-shift", |
| type=float, |
| default=2.50, |
| help="Maximum random shift seconds.", |
| ) |
| p.add_argument( |
| "--seed", |
| type=int, |
| default=42, |
| help="Random seed for reproducibility.", |
| ) |
| p.add_argument( |
| "--workers", |
| type=int, |
| default=os.cpu_count() or 4, |
| help="Max worker threads. Defaults to os.cpu_count().", |
| ) |
| return p.parse_args() |
|
|
|
|
| def run_cmd(cmd: List[str]) -> subprocess.CompletedProcess: |
| return subprocess.run(cmd, check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True) |
|
|
|
|
| def discover_videos(root: Path) -> List[Path]: |
| if not root.exists(): |
| raise FileNotFoundError(f"Input directory not found: {root}") |
| out: List[Path] = [] |
| for p in root.rglob("*"): |
| if p.is_file() and p.suffix.lower() in VIDEO_EXTS: |
| out.append(p) |
| out.sort() |
| return out |
|
|
|
|
| def has_audio_stream(video_path: Path) -> bool: |
| cmd = [ |
| "ffprobe", |
| "-v", |
| "error", |
| "-select_streams", |
| "a:0", |
| "-show_entries", |
| "stream=index", |
| "-of", |
| "csv=p=0", |
| str(video_path), |
| ] |
| out = run_cmd(cmd).stdout.strip() |
| return bool(out) |
|
|
|
|
| def get_duration_seconds(video_path: Path) -> float: |
| cmd = [ |
| "ffprobe", |
| "-v", |
| "error", |
| "-show_entries", |
| "format=duration", |
| "-of", |
| "default=noprint_wrappers=1:nokey=1", |
| str(video_path), |
| ] |
| out = run_cmd(cmd).stdout.strip() |
| return float(out) |
|
|
|
|
| def load_existing_records(path: Path) -> Set[Tuple[str, str]]: |
| """ |
| Returns set of (original_video, shift_direction) that already have metadata records. |
| """ |
| out: Set[Tuple[str, str]] = set() |
| if not path.exists(): |
| return out |
| with path.open("r", encoding="utf-8") as f: |
| for line in f: |
| s = line.strip() |
| if not s: |
| continue |
| try: |
| row = json.loads(s) |
| except json.JSONDecodeError: |
| continue |
| original_video = str(row.get("original_video", "")).strip() |
| direction = str(row.get("shift_direction", "")).strip() |
| if original_video and direction in {"delayed", "early"}: |
| out.add((original_video, direction)) |
| return out |
|
|
|
|
| def append_metadata_threadsafe(path: Path, record: Dict, lock: threading.Lock) -> None: |
| path.parent.mkdir(parents=True, exist_ok=True) |
| with lock: |
| with path.open("a", encoding="utf-8") as f: |
| f.write(json.dumps(record, ensure_ascii=False) + "\n") |
|
|
|
|
| def make_delayed_video(src: Path, dst: Path, shift_sec: float, duration: float) -> None: |
| |
| delay_ms = int(round(shift_sec * 1000)) |
| af = f"adelay={delay_ms}:all=1,atrim=0:{duration:.6f}" |
| cmd = [ |
| "ffmpeg", |
| "-y", |
| "-i", |
| str(src), |
| "-map", |
| "0:v:0", |
| "-map", |
| "0:a:0", |
| "-c:v", |
| "copy", |
| "-af", |
| af, |
| "-c:a", |
| "aac", |
| str(dst), |
| ] |
| run_cmd(cmd) |
|
|
|
|
| def make_early_video(src: Path, dst: Path, shift_sec: float, duration: float) -> None: |
| |
| af = ( |
| f"atrim=start={shift_sec:.6f}," |
| "asetpts=PTS-STARTPTS," |
| f"apad=pad_dur={shift_sec:.6f}," |
| f"atrim=0:{duration:.6f}" |
| ) |
| cmd = [ |
| "ffmpeg", |
| "-y", |
| "-i", |
| str(src), |
| "-map", |
| "0:v:0", |
| "-map", |
| "0:a:0", |
| "-c:v", |
| "copy", |
| "-af", |
| af, |
| "-c:a", |
| "aac", |
| str(dst), |
| ] |
| run_cmd(cmd) |
|
|
|
|
| def build_output_path( |
| output_root: Path, |
| direction_folder: str, |
| rel_video: Path, |
| shift_tag: str, |
| ) -> Path: |
| base_dir = output_root / direction_folder / rel_video.parent |
| base_dir.mkdir(parents=True, exist_ok=True) |
| stem = rel_video.stem |
| suffix = rel_video.suffix if rel_video.suffix else ".mp4" |
| if direction_folder == "delay": |
| name = f"{stem}_delay_{shift_tag}s{suffix}" |
| else: |
| name = f"{stem}_early_{shift_tag}s{suffix}" |
| return base_dir / name |
|
|
|
|
| def process_one_video( |
| src: Path, |
| input_root: Path, |
| output_root: Path, |
| metadata_path: Path, |
| existing_records: Set[Tuple[str, str]], |
| meta_lock: threading.Lock, |
| min_shift: float, |
| max_shift: float, |
| rng_seed: int, |
| ) -> Dict[str, int]: |
| rel = src.relative_to(input_root) |
| original_key = str(rel) |
|
|
| counters = {"generated": 0, "skipped": 0, "failed": 0, "no_audio": 0} |
| local_rng = random.Random(f"{rng_seed}:{original_key}") |
|
|
| try: |
| if not has_audio_stream(src): |
| counters["no_audio"] += 1 |
| return counters |
| duration = get_duration_seconds(src) |
| except Exception: |
| counters["failed"] += 1 |
| return counters |
|
|
| for direction, folder in [("delayed", "delay"), ("early", "early")]: |
| if (original_key, direction) in existing_records: |
| counters["skipped"] += 1 |
| continue |
|
|
| shift_val = local_rng.uniform(min_shift, max_shift) |
| shift_tag = f"{shift_val:.2f}" |
| out_path = build_output_path(output_root, folder, rel, shift_tag) |
|
|
| if out_path.exists(): |
| counters["skipped"] += 1 |
| continue |
|
|
| try: |
| if direction == "delayed": |
| make_delayed_video(src, out_path, shift_val, duration) |
| else: |
| make_early_video(src, out_path, shift_val, duration) |
|
|
| record = { |
| "original_video": original_key, |
| "shifted_video_name": out_path.name, |
| "shifted_video_path": str(out_path), |
| "shift_direction": direction, |
| "exact_shift_seconds": round(float(shift_val), 6), |
| } |
| append_metadata_threadsafe(metadata_path, record, meta_lock) |
| counters["generated"] += 1 |
| except Exception: |
| counters["failed"] += 1 |
|
|
| return counters |
|
|
|
|
| def main() -> None: |
| args = parse_args() |
| if args.min_shift <= 0 or args.max_shift <= 0 or args.min_shift > args.max_shift: |
| raise ValueError("Invalid shift range.") |
|
|
| args.output_dir.mkdir(parents=True, exist_ok=True) |
| (args.output_dir / "delay").mkdir(parents=True, exist_ok=True) |
| (args.output_dir / "early").mkdir(parents=True, exist_ok=True) |
|
|
| videos = discover_videos(args.input_dir) |
| if not videos: |
| print(f"No videos found under: {args.input_dir}") |
| return |
|
|
| existing_records = load_existing_records(args.metadata_jsonl) |
| meta_lock = threading.Lock() |
|
|
| totals = {"generated": 0, "skipped": 0, "failed": 0, "no_audio": 0} |
| futures = [] |
| with ThreadPoolExecutor(max_workers=max(1, int(args.workers))) as ex: |
| for src in videos: |
| futures.append( |
| ex.submit( |
| process_one_video, |
| src, |
| args.input_dir, |
| args.output_dir, |
| args.metadata_jsonl, |
| existing_records, |
| meta_lock, |
| args.min_shift, |
| args.max_shift, |
| args.seed, |
| ) |
| ) |
|
|
| for fut in tqdm(as_completed(futures), total=len(futures), desc="Shifting videos", unit="video"): |
| res = fut.result() |
| for k in totals: |
| totals[k] += res.get(k, 0) |
|
|
| print("========== Shift Generation Summary ==========") |
| print(f"Input videos : {len(videos)}") |
| print(f"Generated shifted videos : {totals['generated']}") |
| print(f"Skipped : {totals['skipped']}") |
| print(f"No-audio skipped : {totals['no_audio']}") |
| print(f"Failed : {totals['failed']}") |
| print(f"Output root : {args.output_dir}") |
| print(f"Metadata JSONL : {args.metadata_jsonl}") |
| print("=============================================") |
|
|
|
|
| if __name__ == "__main__": |
| main() |
|
|
|
|