| #!/bin/bash |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
|
|
| set -x -e |
| source $six_ALL_CCFRWORK/start-tr13f-6B3-ml-t0 |
| MEGATRON_DEEPSPEED_REPO=$six_ALL_CCFRWORK/code/tr13f-6B3-ml-t0/Megatron-DeepSpeed |
| TOKENIZER_PATH="bigscience/tokenizer" |
|
|
| cat c4tasky_*.jsonl > merged_dups_c4tasky.jsonl |
| sort -u merged_dups_c4tasky.jsonl | shuf > merged_c4tasky.jsonl |
|
|
| cd $MEGATRON_DEEPSPEED_REPO |
| python tools/preprocess_data.py \ |
| --input /gpfswork/rech/six/commun/code/turku/tasky/merged_c4tasky.jsonl \ |
| --output-prefix /gpfswork/rech/six/commun/code/turku/tasky/tasky_bloom \ |
| --dataset-impl mmap \ |
| --json-key text \ |
| --tokenizer-type PretrainedFromHF \ |
| --tokenizer-name-or-path $TOKENIZER_PATH \ |
| --workers 35 |
|
|