Hoyant-Su commited on
Commit
d51e5a3
·
1 Parent(s): ac9f14b

Update ShellOps dataset

Browse files
Files changed (1) hide show
  1. shellops_pro/verify.py +0 -90
shellops_pro/verify.py DELETED
@@ -1,90 +0,0 @@
1
- """Sanity-check the generated shellops_pro dataset.
2
- For every row:
3
- * re-run `gt_bash` against a fresh copy of init_dir
4
- * confirm stdout matches `expected_text` exactly (for string/hybrid)
5
- * confirm resulting file tree byte-matches post_files (for files/hybrid)
6
- * verify file count, file-type diversity, and total char volume
7
- """
8
-
9
- import shutil
10
- import subprocess
11
- import tempfile
12
- from collections import Counter
13
- from pathlib import Path
14
- import pandas as pd
15
-
16
- ROOT = Path(__file__).resolve().parents[3]
17
- DATA_DIR = Path(__file__).resolve().parent
18
-
19
- def write_tree(dst: Path, files: list) -> None:
20
- dst.mkdir(parents=True, exist_ok=True)
21
- for entry in files:
22
- p = dst / entry["path"]
23
- p.parent.mkdir(parents=True, exist_ok=True)
24
- p.write_text(entry["content"])
25
-
26
- def read_tree(src: Path) -> dict:
27
- result = {}
28
- for p in sorted(src.rglob("*")):
29
- if p.is_file():
30
- result[p.relative_to(src).as_posix()] = p.read_text()
31
- return result
32
-
33
- def verify_row(row: dict) -> None:
34
- with tempfile.TemporaryDirectory() as tmp:
35
- work = Path(tmp) / "work"
36
- write_tree(work, list(row["pre_files"]))
37
- r = subprocess.run(
38
- ["bash", "-c", row["gt_bash"]],
39
- cwd=work,
40
- capture_output=True,
41
- text=True,
42
- )
43
- assert r.returncode == 0, (
44
- f"{row['id']} gt_bash failed: rc={r.returncode} stderr={r.stderr!r}"
45
- )
46
- got_stdout = r.stdout.rstrip("\n")
47
- if row["task_type"] in ("string", "hybrid"):
48
- assert got_stdout == row["expected_text"], (
49
- f"{row['id']} stdout mismatch:\nexpected={row['expected_text']!r}\n"
50
- f"got={got_stdout!r}"
51
- )
52
- if row["task_type"] in ("files", "hybrid"):
53
- actual_tree = read_tree(work)
54
- expected_tree = {e["path"]: e["content"] for e in row["post_files"]}
55
- only_actual = set(actual_tree) - set(expected_tree)
56
- only_expected = set(expected_tree) - set(actual_tree)
57
- diff_keys = [
58
- k
59
- for k in actual_tree
60
- if k in expected_tree and actual_tree[k] != expected_tree[k]
61
- ]
62
- assert actual_tree == expected_tree, (
63
- f"{row['id']} file tree mismatch:\n"
64
- f"only_in_actual={only_actual}\n"
65
- f"only_in_expected={only_expected}\n"
66
- f"content_diffs={diff_keys}"
67
- )
68
-
69
- def main() -> None:
70
- df = pd.read_parquet(DATA_DIR / "test.parquet")
71
- assert 1 <= len(df) <= 150, f"unexpected row count: {len(df)}"
72
- print(f"{'id':<24} {'type':<7} {'nfiles':>6} {'nbytes':>9} {'exts':<40}")
73
- for _, row in df.iterrows():
74
- pre = list(row["pre_files"])
75
- n_files = len(pre)
76
- n_bytes = sum(len(e["content"]) for e in pre)
77
- ext_counts = Counter(Path(e["path"]).suffix or "<noext>" for e in pre)
78
- exts = ",".join(sorted(ext_counts))
79
- assert n_files >= 30, f"{row['id']}: n_files={n_files}"
80
- verify_row(row.to_dict())
81
- print(
82
- f"{row['id']:<24} {row['task_type']:<7} {n_files:>6} {n_bytes:>9} {exts:<40}"
83
- )
84
- total_bytes = sum(
85
- sum(len(e["content"]) for e in r["pre_files"]) for _, r in df.iterrows()
86
- )
87
- print(f"\nall {len(df)} tasks verified. total pre_files bytes: {total_bytes:,}")
88
-
89
- if __name__ == "__main__":
90
- main()