helcig commited on
Commit
dc6a980
·
verified ·
1 Parent(s): c3210e7

Delete files missing locally

Browse files
Files changed (14) hide show
  1. 4bit-asym_8bit-sym/Llama-3.3-70B-Instruct/ll_bsearch_kl0.01_eap0.985_sha_bw6.96_4-8bit_grouped_seed1234/config.json +0 -39
  2. 4bit-asym_8bit-sym/Llama-3.3-70B-Instruct/ll_bsearch_kl0.01_eap0.985_sha_bw6.96_4-8bit_grouped_seed1234/generation_config.json +0 -10
  3. 4bit-asym_8bit-sym/Llama-3.3-70B-Instruct/ll_bsearch_kl0.01_eap0.985_sha_bw6.96_4-8bit_grouped_seed1234/model-00001-of-00020.safetensors +0 -3
  4. 4bit-asym_8bit-sym/Llama-3.3-70B-Instruct/ll_bsearch_kl0.01_eap0.985_sha_bw6.96_4-8bit_grouped_seed1234/model-00002-of-00020.safetensors +0 -3
  5. 4bit-asym_8bit-sym/Llama-3.3-70B-Instruct/ll_bsearch_kl0.01_eap0.985_sha_bw7.01_4-8bit_grouped_seed42/config.json +0 -39
  6. 4bit-asym_8bit-sym/Llama-3.3-70B-Instruct/ll_bsearch_kl0.01_eap0.985_sha_bw7.01_4-8bit_grouped_seed42/generation_config.json +0 -10
  7. 4bit-asym_8bit-sym/Llama-3.3-70B-Instruct/ll_bsearch_kl0.01_eap0.985_sha_bw7.01_4-8bit_grouped_seed42/model-00001-of-00020.safetensors +0 -3
  8. 4bit-asym_8bit-sym/Llama-3.3-70B-Instruct/ll_bsearch_kl0.01_eap0.985_sha_bw7.01_4-8bit_grouped_seed42/model-00002-of-00020.safetensors +0 -3
  9. 4bit-asym_8bit-sym/Llama-3.3-70B-Instruct/ll_bsearch_kl0.01_eap0.99_sha_bw7.60_4-8bit_grouped_seed1234/config.json +0 -39
  10. 4bit-asym_8bit-sym/Llama-3.3-70B-Instruct/ll_bsearch_kl0.01_eap0.99_sha_bw7.60_4-8bit_grouped_seed1234/generation_config.json +0 -10
  11. 4bit-asym_8bit-sym/Llama-3.3-70B-Instruct/ll_bsearch_kl0.01_eap0.99_sha_bw7.60_4-8bit_grouped_seed1234/model-00001-of-00020.safetensors +0 -3
  12. 4bit-asym_8bit-sym/Llama-3.3-70B-Instruct/ll_bsearch_kl0.01_eap0.99_sha_bw7.60_4-8bit_grouped_seed1234/model-00002-of-00020.safetensors +0 -3
  13. 4bit-asym_8bit-sym/Llama-3.3-70B-Instruct/ll_bsearch_kl0.01_eap0.99_sha_bw7.61_4-8bit_grouped_seed42/config.json +0 -39
  14. 4bit-asym_8bit-sym/Llama-3.3-70B-Instruct/ll_bsearch_kl0.01_eap0.99_sha_bw7.61_4-8bit_grouped_seed42/generation_config.json +0 -10
4bit-asym_8bit-sym/Llama-3.3-70B-Instruct/ll_bsearch_kl0.01_eap0.985_sha_bw6.96_4-8bit_grouped_seed1234/config.json DELETED
@@ -1,39 +0,0 @@
1
- {
2
- "architectures": [
3
- "LlamaForCausalLM"
4
- ],
5
- "attention_bias": false,
6
- "attention_dropout": 0.0,
7
- "bos_token_id": 128000,
8
- "dtype": "float16",
9
- "eos_token_id": [
10
- 128001,
11
- 128008,
12
- 128009
13
- ],
14
- "head_dim": 128,
15
- "hidden_act": "silu",
16
- "hidden_size": 8192,
17
- "initializer_range": 0.02,
18
- "intermediate_size": 28672,
19
- "max_position_embeddings": 131072,
20
- "mlp_bias": false,
21
- "model_type": "llama",
22
- "num_attention_heads": 64,
23
- "num_hidden_layers": 80,
24
- "num_key_value_heads": 8,
25
- "pretraining_tp": 1,
26
- "rms_norm_eps": 1e-05,
27
- "rope_scaling": {
28
- "factor": 8.0,
29
- "high_freq_factor": 4.0,
30
- "low_freq_factor": 1.0,
31
- "original_max_position_embeddings": 8192,
32
- "rope_type": "llama3"
33
- },
34
- "rope_theta": 500000.0,
35
- "tie_word_embeddings": false,
36
- "transformers_version": "4.57.3",
37
- "use_cache": false,
38
- "vocab_size": 128256
39
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
4bit-asym_8bit-sym/Llama-3.3-70B-Instruct/ll_bsearch_kl0.01_eap0.985_sha_bw6.96_4-8bit_grouped_seed1234/generation_config.json DELETED
@@ -1,10 +0,0 @@
1
- {
2
- "_from_model_config": true,
3
- "bos_token_id": 128000,
4
- "eos_token_id": [
5
- 128001,
6
- 128008,
7
- 128009
8
- ],
9
- "transformers_version": "4.57.3"
10
- }
 
 
 
 
 
 
 
 
 
 
 
4bit-asym_8bit-sym/Llama-3.3-70B-Instruct/ll_bsearch_kl0.01_eap0.985_sha_bw6.96_4-8bit_grouped_seed1234/model-00001-of-00020.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:0d9fba19ed80d0470e6e719afb7e9d88fa4732e0768f49e953fd41bcb2e5fee5
3
- size 4987264784
 
 
 
 
4bit-asym_8bit-sym/Llama-3.3-70B-Instruct/ll_bsearch_kl0.01_eap0.985_sha_bw6.96_4-8bit_grouped_seed1234/model-00002-of-00020.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:4225fce600836f7cbbe4bc12713b27c217bfdb32f7a91cd2f67b9035e2d122fe
3
- size 4916163664
 
 
 
 
4bit-asym_8bit-sym/Llama-3.3-70B-Instruct/ll_bsearch_kl0.01_eap0.985_sha_bw7.01_4-8bit_grouped_seed42/config.json DELETED
@@ -1,39 +0,0 @@
1
- {
2
- "architectures": [
3
- "LlamaForCausalLM"
4
- ],
5
- "attention_bias": false,
6
- "attention_dropout": 0.0,
7
- "bos_token_id": 128000,
8
- "dtype": "float16",
9
- "eos_token_id": [
10
- 128001,
11
- 128008,
12
- 128009
13
- ],
14
- "head_dim": 128,
15
- "hidden_act": "silu",
16
- "hidden_size": 8192,
17
- "initializer_range": 0.02,
18
- "intermediate_size": 28672,
19
- "max_position_embeddings": 131072,
20
- "mlp_bias": false,
21
- "model_type": "llama",
22
- "num_attention_heads": 64,
23
- "num_hidden_layers": 80,
24
- "num_key_value_heads": 8,
25
- "pretraining_tp": 1,
26
- "rms_norm_eps": 1e-05,
27
- "rope_scaling": {
28
- "factor": 8.0,
29
- "high_freq_factor": 4.0,
30
- "low_freq_factor": 1.0,
31
- "original_max_position_embeddings": 8192,
32
- "rope_type": "llama3"
33
- },
34
- "rope_theta": 500000.0,
35
- "tie_word_embeddings": false,
36
- "transformers_version": "4.57.3",
37
- "use_cache": false,
38
- "vocab_size": 128256
39
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
4bit-asym_8bit-sym/Llama-3.3-70B-Instruct/ll_bsearch_kl0.01_eap0.985_sha_bw7.01_4-8bit_grouped_seed42/generation_config.json DELETED
@@ -1,10 +0,0 @@
1
- {
2
- "_from_model_config": true,
3
- "bos_token_id": 128000,
4
- "eos_token_id": [
5
- 128001,
6
- 128008,
7
- 128009
8
- ],
9
- "transformers_version": "4.57.3"
10
- }
 
 
 
 
 
 
 
 
 
 
 
4bit-asym_8bit-sym/Llama-3.3-70B-Instruct/ll_bsearch_kl0.01_eap0.985_sha_bw7.01_4-8bit_grouped_seed42/model-00001-of-00020.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:4336fae3d8ac8799482b125f003e11561737b131a38128cd0ba6de83e8bef248
3
- size 4987264784
 
 
 
 
4bit-asym_8bit-sym/Llama-3.3-70B-Instruct/ll_bsearch_kl0.01_eap0.985_sha_bw7.01_4-8bit_grouped_seed42/model-00002-of-00020.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:4225fce600836f7cbbe4bc12713b27c217bfdb32f7a91cd2f67b9035e2d122fe
3
- size 4916163664
 
 
 
 
4bit-asym_8bit-sym/Llama-3.3-70B-Instruct/ll_bsearch_kl0.01_eap0.99_sha_bw7.60_4-8bit_grouped_seed1234/config.json DELETED
@@ -1,39 +0,0 @@
1
- {
2
- "architectures": [
3
- "LlamaForCausalLM"
4
- ],
5
- "attention_bias": false,
6
- "attention_dropout": 0.0,
7
- "bos_token_id": 128000,
8
- "dtype": "float16",
9
- "eos_token_id": [
10
- 128001,
11
- 128008,
12
- 128009
13
- ],
14
- "head_dim": 128,
15
- "hidden_act": "silu",
16
- "hidden_size": 8192,
17
- "initializer_range": 0.02,
18
- "intermediate_size": 28672,
19
- "max_position_embeddings": 131072,
20
- "mlp_bias": false,
21
- "model_type": "llama",
22
- "num_attention_heads": 64,
23
- "num_hidden_layers": 80,
24
- "num_key_value_heads": 8,
25
- "pretraining_tp": 1,
26
- "rms_norm_eps": 1e-05,
27
- "rope_scaling": {
28
- "factor": 8.0,
29
- "high_freq_factor": 4.0,
30
- "low_freq_factor": 1.0,
31
- "original_max_position_embeddings": 8192,
32
- "rope_type": "llama3"
33
- },
34
- "rope_theta": 500000.0,
35
- "tie_word_embeddings": false,
36
- "transformers_version": "4.57.3",
37
- "use_cache": false,
38
- "vocab_size": 128256
39
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
4bit-asym_8bit-sym/Llama-3.3-70B-Instruct/ll_bsearch_kl0.01_eap0.99_sha_bw7.60_4-8bit_grouped_seed1234/generation_config.json DELETED
@@ -1,10 +0,0 @@
1
- {
2
- "_from_model_config": true,
3
- "bos_token_id": 128000,
4
- "eos_token_id": [
5
- 128001,
6
- 128008,
7
- 128009
8
- ],
9
- "transformers_version": "4.57.3"
10
- }
 
 
 
 
 
 
 
 
 
 
 
4bit-asym_8bit-sym/Llama-3.3-70B-Instruct/ll_bsearch_kl0.01_eap0.99_sha_bw7.60_4-8bit_grouped_seed1234/model-00001-of-00020.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:23c71f49dbf3d21363a845f1a628271fd3a597334fa1a5b547d9f7ca09e004f6
3
- size 4987264784
 
 
 
 
4bit-asym_8bit-sym/Llama-3.3-70B-Instruct/ll_bsearch_kl0.01_eap0.99_sha_bw7.60_4-8bit_grouped_seed1234/model-00002-of-00020.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:d4abbf92a25bf210129e16512f57a1361d59b5a5e353e88bf4ee83fe7c44b757
3
- size 4916163664
 
 
 
 
4bit-asym_8bit-sym/Llama-3.3-70B-Instruct/ll_bsearch_kl0.01_eap0.99_sha_bw7.61_4-8bit_grouped_seed42/config.json DELETED
@@ -1,39 +0,0 @@
1
- {
2
- "architectures": [
3
- "LlamaForCausalLM"
4
- ],
5
- "attention_bias": false,
6
- "attention_dropout": 0.0,
7
- "bos_token_id": 128000,
8
- "dtype": "float16",
9
- "eos_token_id": [
10
- 128001,
11
- 128008,
12
- 128009
13
- ],
14
- "head_dim": 128,
15
- "hidden_act": "silu",
16
- "hidden_size": 8192,
17
- "initializer_range": 0.02,
18
- "intermediate_size": 28672,
19
- "max_position_embeddings": 131072,
20
- "mlp_bias": false,
21
- "model_type": "llama",
22
- "num_attention_heads": 64,
23
- "num_hidden_layers": 80,
24
- "num_key_value_heads": 8,
25
- "pretraining_tp": 1,
26
- "rms_norm_eps": 1e-05,
27
- "rope_scaling": {
28
- "factor": 8.0,
29
- "high_freq_factor": 4.0,
30
- "low_freq_factor": 1.0,
31
- "original_max_position_embeddings": 8192,
32
- "rope_type": "llama3"
33
- },
34
- "rope_theta": 500000.0,
35
- "tie_word_embeddings": false,
36
- "transformers_version": "4.57.3",
37
- "use_cache": false,
38
- "vocab_size": 128256
39
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
4bit-asym_8bit-sym/Llama-3.3-70B-Instruct/ll_bsearch_kl0.01_eap0.99_sha_bw7.61_4-8bit_grouped_seed42/generation_config.json DELETED
@@ -1,10 +0,0 @@
1
- {
2
- "_from_model_config": true,
3
- "bos_token_id": 128000,
4
- "eos_token_id": [
5
- 128001,
6
- 128008,
7
- 128009
8
- ],
9
- "transformers_version": "4.57.3"
10
- }