| |
| """Download 300GB from HuggingFace datasets - FineWeb, RedPajama, etc.""" |
| import os |
| import json |
| from datasets import load_dataset |
| from huggingface_hub import snapshot_download |
| import time |
|
|
| TARGET_GB = 300 |
| OUTPUT_DIR = "/workspace/scraped_data" |
| os.makedirs(OUTPUT_DIR, exist_ok=True) |
|
|
| |
| DATASETS = [ |
| ("HuggingFaceFW/fineweb", "sample-10BT", None), |
| ("togethercomputer/RedPajama-Data-V2", "sample", "en_head_middle"), |
| ("allenai/dolma", "v1_6-sample", None), |
| ("cerebras/SlimPajama-627B", None, None), |
| ] |
|
|
| def get_size_gb(): |
| total = 0 |
| for root, dirs, files in os.walk(OUTPUT_DIR): |
| for f in files: |
| total += os.path.getsize(os.path.join(root, f)) |
| return total / 1e9 |
|
|
| def download_streaming(name, config, split): |
| """Stream download to avoid OOM""" |
| print(f"\n📥 Downloading {name} ({config or 'default'})...") |
| try: |
| ds = load_dataset(name, config, split=split or "train", streaming=True, trust_remote_code=True) |
| |
| shard_num = 0 |
| batch = [] |
| batch_size = 10000 |
| |
| for i, example in enumerate(ds): |
| text = example.get("text") or example.get("content") or str(example) |
| batch.append({"text": text, "source": name}) |
| |
| if len(batch) >= batch_size: |
| outfile = f"{OUTPUT_DIR}/{name.replace('/', '_')}_{shard_num:05d}.jsonl" |
| with open(outfile, 'w') as f: |
| for item in batch: |
| f.write(json.dumps(item) + "\n") |
| batch = [] |
| shard_num += 1 |
| |
| size_gb = get_size_gb() |
| print(f" Progress: {size_gb:.1f} GB / {TARGET_GB} GB ({i:,} examples)") |
| |
| if size_gb >= TARGET_GB: |
| print(f"✅ Target reached!") |
| return True |
| |
| except Exception as e: |
| print(f" Error: {e}") |
| return False |
|
|
| if __name__ == "__main__": |
| print(f"🚀 Goddess HF Scraper - Target: {TARGET_GB} GB") |
| print(f"Output: {OUTPUT_DIR}") |
| |
| start = time.time() |
| |
| for name, config, split in DATASETS: |
| if get_size_gb() >= TARGET_GB: |
| break |
| done = download_streaming(name, config, split) |
| if done: |
| break |
| |
| elapsed = time.time() - start |
| final_size = get_size_gb() |
| print(f"\n✨ Done! {final_size:.1f} GB in {elapsed/3600:.1f} hours") |
|
|