rmems commited on
Commit
86619ce
·
unverified ·
1 Parent(s): 0101ce4

Remove manual dataset config - let HF auto-detect

Browse files
dataset_info.json DELETED
@@ -1,19 +0,0 @@
1
- {
2
- "description": "Metis OLMoE Latent Telemetry: SNN routing research dataset with raw telemetry logs",
3
- "builder_name": "metis_ol_moe_latent_telemetry",
4
- "config_name": "default",
5
- "version": "1.0.0",
6
- "splits": {
7
- "test": {
8
- "name": "test",
9
- "num_bytes": 818486,
10
- "num_examples": 20000,
11
- "dataset_name": "metis-ol_mo_e-latent-telemetry"
12
- }
13
- },
14
- "supervised_keys": null,
15
- "builder_config": {
16
- "name": "default",
17
- "version": "1.0.0"
18
- }
19
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
metis_ol_moe_latent_telemetry.py DELETED
@@ -1,69 +0,0 @@
1
- import os
2
- import re
3
- import datasets
4
- from datasets import Dataset, Features, Value
5
-
6
- logger = datasets.logging.get_logger(__name__)
7
-
8
-
9
- class MetisOlMOELatentTelemetry(datasets.GeneratorBasedBuilder):
10
- """Metis OLMoE Latent Telemetry dataset for SNN routing research."""
11
-
12
- VERSION = datasets.Version("1.0.0")
13
-
14
- def _info(self):
15
- return datasets.DatasetInfo(
16
- description="Raw bare-metal telemetry logs from Metis SNN-quantized OLMoE model",
17
- features=Features(
18
- {
19
- "tick": Value("int64"),
20
- "best_walker": Value("int64"),
21
- "elapsed_us": Value("int64"),
22
- "source_file": Value("string"),
23
- }
24
- ),
25
- homepage="https://huggingface.co/datasets/rmems/Metis-OLMoE-Latent-Telemetry",
26
- )
27
-
28
- def _split_generators(self, dl_manager):
29
- """Returns SplitGenerators."""
30
- data_dir = os.path.abspath(os.path.join(os.path.dirname(__file__)))
31
-
32
- # Find all .txt telemetry files
33
- txt_files = []
34
- for root, dirs, files in os.walk(data_dir):
35
- for file in files:
36
- if file.endswith(".txt") and ("telemetry" in file or "map_olmoe" in file):
37
- txt_files.append(os.path.join(root, file))
38
-
39
- logger.info(f"Found {len(txt_files)} telemetry files: {txt_files}")
40
-
41
- return [
42
- datasets.SplitGenerator(
43
- name=datasets.Split.TEST,
44
- gen_kwargs={"filepaths": txt_files},
45
- ),
46
- ]
47
-
48
- def _generate_examples(self, filepaths):
49
- """Yields examples from the telemetry text files."""
50
- pattern = re.compile(r"tick=(\d+)\s+best_walker=(\d+)\s+elapsed_us=(\d+)")
51
-
52
- idx = 0
53
- for filepath in filepaths:
54
- filename = os.path.basename(filepath)
55
- with open(filepath, encoding="utf-8") as f:
56
- for line in f:
57
- line = line.strip()
58
- if not line:
59
- continue
60
- match = pattern.match(line)
61
- if match:
62
- tick, best_walker, elapsed_us = match.groups()
63
- yield idx, {
64
- "tick": int(tick),
65
- "best_walker": int(best_walker),
66
- "elapsed_us": int(elapsed_us),
67
- "source_file": filename,
68
- }
69
- idx += 1