Charlie81 commited on
Commit
5b4ec54
·
1 Parent(s): 97a7f0a

delete checkpoints

Browse files
checkpoints/checkpoint-14000/config.json DELETED
@@ -1,39 +0,0 @@
1
- {
2
- "architectures": [
3
- "MyOlmoeForCausalLM"
4
- ],
5
- "attention_bias": false,
6
- "attention_dropout": 0.0,
7
- "clip_qkv": null,
8
- "eos_token_id": 50279,
9
- "hidden_act": "silu",
10
- "hidden_size": 2048,
11
- "initializer_range": 0.02,
12
- "intermediate_size": 1024,
13
- "max_position_embeddings": 4096,
14
- "max_small_expert_count": 64,
15
- "model_type": "olmoe",
16
- "norm_topk_prob": false,
17
- "num_attention_heads": 16,
18
- "num_experts": 64,
19
- "num_experts_per_tok": 2,
20
- "num_hidden_layers": 16,
21
- "num_key_value_heads": 16,
22
- "num_small_experts": 0,
23
- "output_router_logits": false,
24
- "pad_token_id": 1,
25
- "rms_norm_eps": 1e-05,
26
- "rope_scaling": null,
27
- "rope_theta": 10000.0,
28
- "router_aux_loss_coef": 0.01,
29
- "small_expert_count": 64,
30
- "small_expert_intermediate_ratio": 16,
31
- "small_expert_intermediate_size": 0,
32
- "small_expert_sparsity_coef": 0.1,
33
- "small_expert_strategy": "constant",
34
- "tie_word_embeddings": false,
35
- "torch_dtype": "bfloat16",
36
- "transformers_version": "4.53.1",
37
- "use_cache": true,
38
- "vocab_size": 50304
39
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
checkpoints/checkpoint-14000/generation_config.json DELETED
@@ -1,6 +0,0 @@
1
- {
2
- "_from_model_config": true,
3
- "eos_token_id": 50279,
4
- "pad_token_id": 1,
5
- "transformers_version": "4.53.1"
6
- }
 
 
 
 
 
 
 
checkpoints/checkpoint-14000/model-00001-of-00003.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:9a1b9651bad1a045a178e22cf198d5070c94d4374f6331086e38801fe8d88ca3
3
- size 4997482624
 
 
 
 
checkpoints/checkpoint-14000/model-00002-of-00003.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:9bb94d18d0e686613cd18c8fb7cffcf9074bf177a43650ae6d797fc9ebc4bbe3
3
- size 4999439616
 
 
 
 
checkpoints/checkpoint-14000/model-00003-of-00003.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:ffec5cc4c228942b4b81b29a823583fb96c9ed82cbdf45c6f436bf2051fce7d0
3
- size 3892418912
 
 
 
 
checkpoints/checkpoint-14000/model.safetensors.index.json DELETED
The diff for this file is too large to render. See raw diff
 
checkpoints/checkpoint-14000/optimizer.pt DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:6b64bcf494bd15433e4b38f3d6b5a3eaccca9d49c3c864a66b355e1e9932a7df
3
- size 101356346
 
 
 
 
checkpoints/checkpoint-14000/rng_state.pth DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:2b66e3cc7c452b707ddac5caf0aa17618afb9bc1a0333600a22c4afb353f3165
3
- size 14244
 
 
 
 
checkpoints/checkpoint-14000/scheduler.pt DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:ffd32eb325999065cc02ef2964f0a597591e1b55dea9c5b48ce39225b84686db
3
- size 1064
 
 
 
 
checkpoints/checkpoint-14000/trainer_state.json DELETED
The diff for this file is too large to render. See raw diff
 
checkpoints/checkpoint-14000/training_args.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:3426ffd33c60179729f2d6634f33740dda1785184d4955f3ba37cecf1acc3a35
3
- size 5304
 
 
 
 
checkpoints/checkpoint-16000/config.json DELETED
@@ -1,39 +0,0 @@
1
- {
2
- "architectures": [
3
- "MyOlmoeForCausalLM"
4
- ],
5
- "attention_bias": false,
6
- "attention_dropout": 0.0,
7
- "clip_qkv": null,
8
- "eos_token_id": 50279,
9
- "hidden_act": "silu",
10
- "hidden_size": 2048,
11
- "initializer_range": 0.02,
12
- "intermediate_size": 1024,
13
- "max_position_embeddings": 4096,
14
- "max_small_expert_count": 64,
15
- "model_type": "olmoe",
16
- "norm_topk_prob": false,
17
- "num_attention_heads": 16,
18
- "num_experts": 64,
19
- "num_experts_per_tok": 2,
20
- "num_hidden_layers": 16,
21
- "num_key_value_heads": 16,
22
- "num_small_experts": 0,
23
- "output_router_logits": false,
24
- "pad_token_id": 1,
25
- "rms_norm_eps": 1e-05,
26
- "rope_scaling": null,
27
- "rope_theta": 10000.0,
28
- "router_aux_loss_coef": 0.01,
29
- "small_expert_count": 64,
30
- "small_expert_intermediate_ratio": 16,
31
- "small_expert_intermediate_size": 0,
32
- "small_expert_sparsity_coef": 0.1,
33
- "small_expert_strategy": "constant",
34
- "tie_word_embeddings": false,
35
- "torch_dtype": "bfloat16",
36
- "transformers_version": "4.53.1",
37
- "use_cache": true,
38
- "vocab_size": 50304
39
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
checkpoints/checkpoint-16000/generation_config.json DELETED
@@ -1,6 +0,0 @@
1
- {
2
- "_from_model_config": true,
3
- "eos_token_id": 50279,
4
- "pad_token_id": 1,
5
- "transformers_version": "4.53.1"
6
- }
 
 
 
 
 
 
 
checkpoints/checkpoint-16000/model-00001-of-00003.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:9a1b9651bad1a045a178e22cf198d5070c94d4374f6331086e38801fe8d88ca3
3
- size 4997482624
 
 
 
 
checkpoints/checkpoint-16000/model-00002-of-00003.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:af9d308f6ffb528d709ad42b553f7b3f57a3f610944e1393ca90d4fb0cc3051f
3
- size 4999439616
 
 
 
 
checkpoints/checkpoint-16000/model-00003-of-00003.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:1dfcf457dc7b3c6a08f91b93f7b1e4c4221884c11db120cafd5e499f52194b1b
3
- size 3892418912
 
 
 
 
checkpoints/checkpoint-16000/model.safetensors.index.json DELETED
The diff for this file is too large to render. See raw diff
 
checkpoints/checkpoint-16000/optimizer.pt DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:87bb3ee408e58a7c4921bf0c1298ce401cf571dff67ccf061870ed24b5dd0434
3
- size 101356346
 
 
 
 
checkpoints/checkpoint-16000/rng_state.pth DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:2b66e3cc7c452b707ddac5caf0aa17618afb9bc1a0333600a22c4afb353f3165
3
- size 14244
 
 
 
 
checkpoints/checkpoint-16000/scheduler.pt DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:83ca0d2f69bacfec28f7a09f73ca498ed7ea9bd6e60cb0f8abb348c452cc631f
3
- size 1064
 
 
 
 
checkpoints/checkpoint-16000/trainer_state.json DELETED
The diff for this file is too large to render. See raw diff
 
checkpoints/checkpoint-16000/training_args.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:3426ffd33c60179729f2d6634f33740dda1785184d4955f3ba37cecf1acc3a35
3
- size 5304
 
 
 
 
scripts/train.py CHANGED
@@ -78,7 +78,7 @@ def main():
78
  num_train_epochs=3,
79
  logging_dir="./logs",
80
  logging_steps=10,
81
- save_steps=2000,
82
  save_total_limit=2,
83
  bf16=True,
84
  gradient_checkpointing=False, # Disabled for now
 
78
  num_train_epochs=3,
79
  logging_dir="./logs",
80
  logging_steps=10,
81
+ save_steps=20,
82
  save_total_limit=2,
83
  bf16=True,
84
  gradient_checkpointing=False, # Disabled for now