Datasets:
| #!/usr/bin/env python3 | |
| import argparse | |
| import json | |
| from pathlib import Path | |
| def main() -> int: | |
| parser = argparse.ArgumentParser(description="Validate FalseMemBench JSONL data") | |
| parser.add_argument("--data", default=str(Path(__file__).resolve().parents[1] / "data" / "cases.jsonl")) | |
| args = parser.parse_args() | |
| dataset = Path(args.data) | |
| seen_ids = set() | |
| with dataset.open() as f: | |
| for line_no, line in enumerate(f, 1): | |
| line = line.strip() | |
| if not line: | |
| continue | |
| record = json.loads(line) | |
| case_id = record["id"] | |
| if case_id in seen_ids: | |
| raise SystemExit(f"duplicate case id at line {line_no}: {case_id}") | |
| seen_ids.add(case_id) | |
| entry_ids = {entry["id"] for entry in record["entries"]} | |
| missing = [rid for rid in record["relevant_ids"] if rid not in entry_ids] | |
| if missing: | |
| raise SystemExit(f"case {case_id} has missing relevant ids: {missing}") | |
| print(f"validated {len(seen_ids)} cases from {dataset}") | |
| return 0 | |
| if __name__ == "__main__": | |
| raise SystemExit(main()) | |