epoch 22427 | block 8073904 | steps 175 | loss 1.2035 | sim_abs 0.471 sq_acc 0.14
Browse files- config.json +2 -2
- model.safetensors +2 -2
config.json
CHANGED
|
@@ -9,9 +9,9 @@
|
|
| 9 |
"eos_token_id": 151645,
|
| 10 |
"head_dim": 128,
|
| 11 |
"hidden_act": "silu",
|
| 12 |
-
"hidden_size":
|
| 13 |
"initializer_range": 0.02,
|
| 14 |
-
"intermediate_size":
|
| 15 |
"layer_types": [
|
| 16 |
"full_attention",
|
| 17 |
"full_attention",
|
|
|
|
| 9 |
"eos_token_id": 151645,
|
| 10 |
"head_dim": 128,
|
| 11 |
"hidden_act": "silu",
|
| 12 |
+
"hidden_size": 2048,
|
| 13 |
"initializer_range": 0.02,
|
| 14 |
+
"intermediate_size": 6144,
|
| 15 |
"layer_types": [
|
| 16 |
"full_attention",
|
| 17 |
"full_attention",
|
model.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
-
size
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:978bfa5dbfc4bde12f50636f9e20cef4ef34580067264e41ef10a6ef78f259b3
|
| 3 |
+
size 3441185608
|