lhallee commited on
Commit
04a1e54
·
verified ·
1 Parent(s): 0428300

Upload DPLMForMaskedLM

Browse files
config.json CHANGED
@@ -1,5 +1,4 @@
1
  {
2
- "add_cross_attention": false,
3
  "architectures": [
4
  "DPLMForMaskedLM"
5
  ],
@@ -22,7 +21,6 @@
22
  "hidden_size": 2560,
23
  "initializer_range": 0.02,
24
  "intermediate_size": 10240,
25
- "is_decoder": false,
26
  "is_folding_model": false,
27
  "layer_norm_eps": 1e-05,
28
  "mask_token_id": 32,
@@ -34,7 +32,7 @@
34
  "position_embedding_type": "rotary",
35
  "tie_word_embeddings": false,
36
  "token_dropout": true,
37
- "transformers_version": "5.2.0",
38
  "use_cache": true,
39
  "vocab_list": null,
40
  "vocab_size": 33
 
1
  {
 
2
  "architectures": [
3
  "DPLMForMaskedLM"
4
  ],
 
21
  "hidden_size": 2560,
22
  "initializer_range": 0.02,
23
  "intermediate_size": 10240,
 
24
  "is_folding_model": false,
25
  "layer_norm_eps": 1e-05,
26
  "mask_token_id": 32,
 
32
  "position_embedding_type": "rotary",
33
  "tie_word_embeddings": false,
34
  "token_dropout": true,
35
+ "transformers_version": "4.57.6",
36
  "use_cache": true,
37
  "vocab_list": null,
38
  "vocab_size": 33
model-00001-of-00003.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:eb9cb606e6298c038eb2777a450229597204ea4fa4f0b5e9b25f60c2e48daf2e
3
- size 4941284784
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:37b54855d087ef3e7d883464ae9d5ea3127ec15a16c6323d91ad16a6b98305c9
3
+ size 4930778400
model.safetensors.index.json CHANGED
@@ -1,11 +1,11 @@
1
  {
2
  "metadata": {
3
- "total_size": 11366871304
 
4
  },
5
  "weight_map": {
6
  "esm.contact_head.regression.bias": "model-00003-of-00003.safetensors",
7
  "esm.contact_head.regression.weight": "model-00003-of-00003.safetensors",
8
- "esm.embeddings.position_embeddings.weight": "model-00001-of-00003.safetensors",
9
  "esm.embeddings.word_embeddings.weight": "model-00001-of-00003.safetensors",
10
  "esm.encoder.emb_layer_norm_after.bias": "model-00003-of-00003.safetensors",
11
  "esm.encoder.emb_layer_norm_after.weight": "model-00003-of-00003.safetensors",
 
1
  {
2
  "metadata": {
3
+ "total_parameters": 2839090114,
4
+ "total_size": 11356365064
5
  },
6
  "weight_map": {
7
  "esm.contact_head.regression.bias": "model-00003-of-00003.safetensors",
8
  "esm.contact_head.regression.weight": "model-00003-of-00003.safetensors",
 
9
  "esm.embeddings.word_embeddings.weight": "model-00001-of-00003.safetensors",
10
  "esm.encoder.emb_layer_norm_after.bias": "model-00003-of-00003.safetensors",
11
  "esm.encoder.emb_layer_norm_after.weight": "model-00003-of-00003.safetensors",