natankatz commited on
Commit
ee2c1f6
·
1 Parent(s): 2ae0562

Upload LlamaForCausalLM

Browse files
config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "_name_or_path": "codellama/CodeLlama-7b-hf",
3
  "architectures": [
4
- "LlamaForSequenceClassification"
5
  ],
6
  "bos_token_id": 1,
7
  "eos_token_id": 2,
 
1
  {
2
  "_name_or_path": "codellama/CodeLlama-7b-hf",
3
  "architectures": [
4
+ "LlamaForCausalLM"
5
  ],
6
  "bos_token_id": 1,
7
  "eos_token_id": 2,
pytorch_model-00002-of-00002.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:404a7f15d4a40320b7403cfa9786f5718fd75bc381f25afc8b7abe658ae5677e
3
- size 3238184131
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cb9304406836b73395242360254e16d6f298b1df475efb65b91377896fee265b
3
+ size 3500442883
pytorch_model.bin.index.json CHANGED
@@ -1,8 +1,9 @@
1
  {
2
  "metadata": {
3
- "total_size": 13214834688
4
  },
5
  "weight_map": {
 
6
  "model.embed_tokens.weight": "pytorch_model-00001-of-00002.bin",
7
  "model.layers.0.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
8
  "model.layers.0.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
@@ -292,7 +293,6 @@
292
  "model.layers.9.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
293
  "model.layers.9.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
294
  "model.layers.9.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
295
- "model.norm.weight": "pytorch_model-00002-of-00002.bin",
296
- "score.weight": "pytorch_model-00002-of-00002.bin"
297
  }
298
  }
 
1
  {
2
  "metadata": {
3
+ "total_size": 13477093376
4
  },
5
  "weight_map": {
6
+ "lm_head.weight": "pytorch_model-00002-of-00002.bin",
7
  "model.embed_tokens.weight": "pytorch_model-00001-of-00002.bin",
8
  "model.layers.0.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
9
  "model.layers.0.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
 
293
  "model.layers.9.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
294
  "model.layers.9.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
295
  "model.layers.9.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
296
+ "model.norm.weight": "pytorch_model-00002-of-00002.bin"
 
297
  }
298
  }