Upload folder using huggingface_hub
Browse files- .gitattributes +1 -0
- SFT/README.md +61 -0
- SFT/added_tokens.json +24 -0
- SFT/all_results.json +8 -0
- SFT/chat_template.jinja +7 -0
- SFT/config.json +66 -0
- SFT/generation_config.json +12 -0
- SFT/merges.txt +0 -0
- SFT/model-00001-of-00004.safetensors +3 -0
- SFT/model-00002-of-00004.safetensors +3 -0
- SFT/model-00003-of-00004.safetensors +3 -0
- SFT/model-00004-of-00004.safetensors +3 -0
- SFT/model.safetensors.index.json +737 -0
- SFT/preprocessor_config.json +37 -0
- SFT/special_tokens_map.json +31 -0
- SFT/tokenizer.json +3 -0
- SFT/tokenizer_config.json +209 -0
- SFT/train_results.json +8 -0
- SFT/trainer_log.jsonl +237 -0
- SFT/trainer_state.json +1695 -0
- SFT/training_args.bin +3 -0
- SFT/training_loss.png +0 -0
- SFT/video_preprocessor_config.json +43 -0
- SFT/vocab.json +0 -0
.gitattributes
CHANGED
|
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
| 33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
| 33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
| 36 |
+
SFT/tokenizer.json filter=lfs diff=lfs merge=lfs -text
|
SFT/README.md
ADDED
|
@@ -0,0 +1,61 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
library_name: transformers
|
| 3 |
+
license: other
|
| 4 |
+
base_model: Qwen/Qwen2.5-VL-7B-Instruct
|
| 5 |
+
tags:
|
| 6 |
+
- llama-factory
|
| 7 |
+
- full
|
| 8 |
+
- generated_from_trainer
|
| 9 |
+
model-index:
|
| 10 |
+
- name: sft
|
| 11 |
+
results: []
|
| 12 |
+
---
|
| 13 |
+
|
| 14 |
+
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
|
| 15 |
+
should probably proofread and complete it, then remove this comment. -->
|
| 16 |
+
|
| 17 |
+
# sft
|
| 18 |
+
|
| 19 |
+
This model is a fine-tuned version on the ObsDriveBench_sft_normal_weather dataset.
|
| 20 |
+
|
| 21 |
+
## Model description
|
| 22 |
+
|
| 23 |
+
More information needed
|
| 24 |
+
|
| 25 |
+
## Intended uses & limitations
|
| 26 |
+
|
| 27 |
+
More information needed
|
| 28 |
+
|
| 29 |
+
## Training and evaluation data
|
| 30 |
+
|
| 31 |
+
More information needed
|
| 32 |
+
|
| 33 |
+
## Training procedure
|
| 34 |
+
|
| 35 |
+
### Training hyperparameters
|
| 36 |
+
|
| 37 |
+
The following hyperparameters were used during training:
|
| 38 |
+
- learning_rate: 2e-05
|
| 39 |
+
- train_batch_size: 1
|
| 40 |
+
- eval_batch_size: 8
|
| 41 |
+
- seed: 42
|
| 42 |
+
- distributed_type: multi-GPU
|
| 43 |
+
- num_devices: 4
|
| 44 |
+
- gradient_accumulation_steps: 2
|
| 45 |
+
- total_train_batch_size: 8
|
| 46 |
+
- total_eval_batch_size: 32
|
| 47 |
+
- optimizer: Use adamw_torch with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments
|
| 48 |
+
- lr_scheduler_type: cosine
|
| 49 |
+
- lr_scheduler_warmup_ratio: 0.15
|
| 50 |
+
- num_epochs: 3
|
| 51 |
+
|
| 52 |
+
### Training results
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
### Framework versions
|
| 57 |
+
|
| 58 |
+
- Transformers 4.55.0
|
| 59 |
+
- Pytorch 2.5.1+cu121
|
| 60 |
+
- Datasets 3.6.0
|
| 61 |
+
- Tokenizers 0.21.1
|
SFT/added_tokens.json
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"</tool_call>": 151658,
|
| 3 |
+
"<tool_call>": 151657,
|
| 4 |
+
"<|box_end|>": 151649,
|
| 5 |
+
"<|box_start|>": 151648,
|
| 6 |
+
"<|endoftext|>": 151643,
|
| 7 |
+
"<|file_sep|>": 151664,
|
| 8 |
+
"<|fim_middle|>": 151660,
|
| 9 |
+
"<|fim_pad|>": 151662,
|
| 10 |
+
"<|fim_prefix|>": 151659,
|
| 11 |
+
"<|fim_suffix|>": 151661,
|
| 12 |
+
"<|im_end|>": 151645,
|
| 13 |
+
"<|im_start|>": 151644,
|
| 14 |
+
"<|image_pad|>": 151655,
|
| 15 |
+
"<|object_ref_end|>": 151647,
|
| 16 |
+
"<|object_ref_start|>": 151646,
|
| 17 |
+
"<|quad_end|>": 151651,
|
| 18 |
+
"<|quad_start|>": 151650,
|
| 19 |
+
"<|repo_name|>": 151663,
|
| 20 |
+
"<|video_pad|>": 151656,
|
| 21 |
+
"<|vision_end|>": 151653,
|
| 22 |
+
"<|vision_pad|>": 151654,
|
| 23 |
+
"<|vision_start|>": 151652
|
| 24 |
+
}
|
SFT/all_results.json
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"epoch": 3.0,
|
| 3 |
+
"total_flos": 121880697913344.0,
|
| 4 |
+
"train_loss": 0.6645450910134418,
|
| 5 |
+
"train_runtime": 16043.0595,
|
| 6 |
+
"train_samples_per_second": 1.177,
|
| 7 |
+
"train_steps_per_second": 0.147
|
| 8 |
+
}
|
SFT/chat_template.jinja
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{% set image_count = namespace(value=0) %}{% set video_count = namespace(value=0) %}{% for message in messages %}{% if loop.first and message['role'] != 'system' %}<|im_start|>system
|
| 2 |
+
You are a helpful assistant.<|im_end|>
|
| 3 |
+
{% endif %}<|im_start|>{{ message['role'] }}
|
| 4 |
+
{% if message['content'] is string %}{{ message['content'] }}<|im_end|>
|
| 5 |
+
{% else %}{% for content in message['content'] %}{% if content['type'] == 'image' or 'image' in content or 'image_url' in content %}{% set image_count.value = image_count.value + 1 %}{% if add_vision_id %}Picture {{ image_count.value }}: {% endif %}<|vision_start|><|image_pad|><|vision_end|>{% elif content['type'] == 'video' or 'video' in content %}{% set video_count.value = video_count.value + 1 %}{% if add_vision_id %}Video {{ video_count.value }}: {% endif %}<|vision_start|><|video_pad|><|vision_end|>{% elif 'text' in content %}{{ content['text'] }}{% endif %}{% endfor %}<|im_end|>
|
| 6 |
+
{% endif %}{% endfor %}{% if add_generation_prompt %}<|im_start|>assistant
|
| 7 |
+
{% endif %}
|
SFT/config.json
ADDED
|
@@ -0,0 +1,66 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"architectures": [
|
| 3 |
+
"Qwen2_5_VLForConditionalGeneration"
|
| 4 |
+
],
|
| 5 |
+
"attention_dropout": 0.0,
|
| 6 |
+
"bos_token_id": 151643,
|
| 7 |
+
"eos_token_id": 151645,
|
| 8 |
+
"hidden_act": "silu",
|
| 9 |
+
"hidden_size": 3584,
|
| 10 |
+
"image_token_id": 151655,
|
| 11 |
+
"initializer_range": 0.02,
|
| 12 |
+
"intermediate_size": 18944,
|
| 13 |
+
"max_position_embeddings": 128000,
|
| 14 |
+
"max_window_layers": 28,
|
| 15 |
+
"model_type": "qwen2_5_vl",
|
| 16 |
+
"num_attention_heads": 28,
|
| 17 |
+
"num_hidden_layers": 28,
|
| 18 |
+
"num_key_value_heads": 4,
|
| 19 |
+
"rms_norm_eps": 1e-06,
|
| 20 |
+
"rope_scaling": {
|
| 21 |
+
"mrope_section": [
|
| 22 |
+
16,
|
| 23 |
+
24,
|
| 24 |
+
24
|
| 25 |
+
],
|
| 26 |
+
"rope_type": "default",
|
| 27 |
+
"type": "default"
|
| 28 |
+
},
|
| 29 |
+
"rope_theta": 1000000.0,
|
| 30 |
+
"sliding_window": 32768,
|
| 31 |
+
"tie_word_embeddings": false,
|
| 32 |
+
"torch_dtype": "bfloat16",
|
| 33 |
+
"transformers_version": "4.55.0",
|
| 34 |
+
"use_cache": false,
|
| 35 |
+
"use_sliding_window": false,
|
| 36 |
+
"video_token_id": 151656,
|
| 37 |
+
"vision_config": {
|
| 38 |
+
"depth": 32,
|
| 39 |
+
"fullatt_block_indexes": [
|
| 40 |
+
7,
|
| 41 |
+
15,
|
| 42 |
+
23,
|
| 43 |
+
31
|
| 44 |
+
],
|
| 45 |
+
"hidden_act": "silu",
|
| 46 |
+
"hidden_size": 1280,
|
| 47 |
+
"in_channels": 3,
|
| 48 |
+
"in_chans": 3,
|
| 49 |
+
"initializer_range": 0.02,
|
| 50 |
+
"intermediate_size": 3420,
|
| 51 |
+
"model_type": "qwen2_5_vl",
|
| 52 |
+
"num_heads": 16,
|
| 53 |
+
"out_hidden_size": 3584,
|
| 54 |
+
"patch_size": 14,
|
| 55 |
+
"spatial_merge_size": 2,
|
| 56 |
+
"spatial_patch_size": 14,
|
| 57 |
+
"temporal_patch_size": 2,
|
| 58 |
+
"tokens_per_second": 2,
|
| 59 |
+
"torch_dtype": "float32",
|
| 60 |
+
"window_size": 112
|
| 61 |
+
},
|
| 62 |
+
"vision_end_token_id": 151653,
|
| 63 |
+
"vision_start_token_id": 151652,
|
| 64 |
+
"vision_token_id": 151654,
|
| 65 |
+
"vocab_size": 152064
|
| 66 |
+
}
|
SFT/generation_config.json
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"bos_token_id": 151643,
|
| 3 |
+
"do_sample": true,
|
| 4 |
+
"eos_token_id": [
|
| 5 |
+
151645,
|
| 6 |
+
151643
|
| 7 |
+
],
|
| 8 |
+
"pad_token_id": 151643,
|
| 9 |
+
"repetition_penalty": 1.05,
|
| 10 |
+
"temperature": 1e-06,
|
| 11 |
+
"transformers_version": "4.55.0"
|
| 12 |
+
}
|
SFT/merges.txt
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
SFT/model-00001-of-00004.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:eeff2fbf4fe757152e7bd1e109e27ff4f4ee6e1eb2fd72b797626e149ddb50c9
|
| 3 |
+
size 4968243304
|
SFT/model-00002-of-00004.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:4826ec6980b5a4d2414090420e98e0b61b2df1cadd3883afeff841f75f69d9b6
|
| 3 |
+
size 4991495816
|
SFT/model-00003-of-00004.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:7a6755e4daa11ba83cf44879c27b52b35fbe3d0a5cc371d0bc5912b6fb8ed214
|
| 3 |
+
size 4932751040
|
SFT/model-00004-of-00004.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:7e00e3ba85e42c2c4b37183df614e40f048a5991729575c8d5dd5ed9877449f4
|
| 3 |
+
size 1691924384
|
SFT/model.safetensors.index.json
ADDED
|
@@ -0,0 +1,737 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"metadata": {
|
| 3 |
+
"total_parameters": 848896,
|
| 4 |
+
"total_size": 16584333312
|
| 5 |
+
},
|
| 6 |
+
"weight_map": {
|
| 7 |
+
"lm_head.weight": "model-00004-of-00004.safetensors",
|
| 8 |
+
"model.embed_tokens.weight": "model-00001-of-00004.safetensors",
|
| 9 |
+
"model.layers.0.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 10 |
+
"model.layers.0.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 11 |
+
"model.layers.0.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 12 |
+
"model.layers.0.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 13 |
+
"model.layers.0.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 14 |
+
"model.layers.0.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
| 15 |
+
"model.layers.0.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
| 16 |
+
"model.layers.0.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
| 17 |
+
"model.layers.0.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
| 18 |
+
"model.layers.0.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
| 19 |
+
"model.layers.0.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
| 20 |
+
"model.layers.0.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
| 21 |
+
"model.layers.1.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 22 |
+
"model.layers.1.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 23 |
+
"model.layers.1.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 24 |
+
"model.layers.1.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 25 |
+
"model.layers.1.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 26 |
+
"model.layers.1.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
| 27 |
+
"model.layers.1.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
| 28 |
+
"model.layers.1.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
| 29 |
+
"model.layers.1.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
| 30 |
+
"model.layers.1.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
| 31 |
+
"model.layers.1.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
| 32 |
+
"model.layers.1.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
| 33 |
+
"model.layers.10.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 34 |
+
"model.layers.10.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
| 35 |
+
"model.layers.10.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 36 |
+
"model.layers.10.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 37 |
+
"model.layers.10.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 38 |
+
"model.layers.10.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
| 39 |
+
"model.layers.10.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 40 |
+
"model.layers.10.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 41 |
+
"model.layers.10.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
| 42 |
+
"model.layers.10.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 43 |
+
"model.layers.10.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
| 44 |
+
"model.layers.10.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 45 |
+
"model.layers.11.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 46 |
+
"model.layers.11.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
| 47 |
+
"model.layers.11.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 48 |
+
"model.layers.11.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 49 |
+
"model.layers.11.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 50 |
+
"model.layers.11.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
| 51 |
+
"model.layers.11.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 52 |
+
"model.layers.11.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 53 |
+
"model.layers.11.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
| 54 |
+
"model.layers.11.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 55 |
+
"model.layers.11.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
| 56 |
+
"model.layers.11.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 57 |
+
"model.layers.12.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 58 |
+
"model.layers.12.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
| 59 |
+
"model.layers.12.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 60 |
+
"model.layers.12.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 61 |
+
"model.layers.12.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 62 |
+
"model.layers.12.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
| 63 |
+
"model.layers.12.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 64 |
+
"model.layers.12.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 65 |
+
"model.layers.12.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
| 66 |
+
"model.layers.12.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 67 |
+
"model.layers.12.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
| 68 |
+
"model.layers.12.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 69 |
+
"model.layers.13.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 70 |
+
"model.layers.13.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
| 71 |
+
"model.layers.13.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 72 |
+
"model.layers.13.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 73 |
+
"model.layers.13.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 74 |
+
"model.layers.13.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
| 75 |
+
"model.layers.13.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 76 |
+
"model.layers.13.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 77 |
+
"model.layers.13.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
| 78 |
+
"model.layers.13.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 79 |
+
"model.layers.13.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
| 80 |
+
"model.layers.13.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 81 |
+
"model.layers.14.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 82 |
+
"model.layers.14.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
| 83 |
+
"model.layers.14.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 84 |
+
"model.layers.14.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 85 |
+
"model.layers.14.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 86 |
+
"model.layers.14.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
| 87 |
+
"model.layers.14.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 88 |
+
"model.layers.14.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 89 |
+
"model.layers.14.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
| 90 |
+
"model.layers.14.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 91 |
+
"model.layers.14.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
| 92 |
+
"model.layers.14.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 93 |
+
"model.layers.15.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 94 |
+
"model.layers.15.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
| 95 |
+
"model.layers.15.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 96 |
+
"model.layers.15.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 97 |
+
"model.layers.15.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 98 |
+
"model.layers.15.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
| 99 |
+
"model.layers.15.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 100 |
+
"model.layers.15.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 101 |
+
"model.layers.15.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
| 102 |
+
"model.layers.15.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 103 |
+
"model.layers.15.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
| 104 |
+
"model.layers.15.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 105 |
+
"model.layers.16.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 106 |
+
"model.layers.16.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 107 |
+
"model.layers.16.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
| 108 |
+
"model.layers.16.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
| 109 |
+
"model.layers.16.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 110 |
+
"model.layers.16.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
| 111 |
+
"model.layers.16.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 112 |
+
"model.layers.16.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 113 |
+
"model.layers.16.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
| 114 |
+
"model.layers.16.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 115 |
+
"model.layers.16.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
| 116 |
+
"model.layers.16.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 117 |
+
"model.layers.17.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 118 |
+
"model.layers.17.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 119 |
+
"model.layers.17.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
| 120 |
+
"model.layers.17.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
| 121 |
+
"model.layers.17.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 122 |
+
"model.layers.17.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
| 123 |
+
"model.layers.17.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
| 124 |
+
"model.layers.17.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
| 125 |
+
"model.layers.17.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
| 126 |
+
"model.layers.17.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
| 127 |
+
"model.layers.17.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
| 128 |
+
"model.layers.17.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
| 129 |
+
"model.layers.18.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 130 |
+
"model.layers.18.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 131 |
+
"model.layers.18.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
| 132 |
+
"model.layers.18.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
| 133 |
+
"model.layers.18.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 134 |
+
"model.layers.18.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
| 135 |
+
"model.layers.18.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
| 136 |
+
"model.layers.18.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
| 137 |
+
"model.layers.18.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
| 138 |
+
"model.layers.18.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
| 139 |
+
"model.layers.18.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
| 140 |
+
"model.layers.18.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
| 141 |
+
"model.layers.19.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 142 |
+
"model.layers.19.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 143 |
+
"model.layers.19.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
| 144 |
+
"model.layers.19.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
| 145 |
+
"model.layers.19.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 146 |
+
"model.layers.19.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
| 147 |
+
"model.layers.19.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
| 148 |
+
"model.layers.19.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
| 149 |
+
"model.layers.19.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
| 150 |
+
"model.layers.19.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
| 151 |
+
"model.layers.19.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
| 152 |
+
"model.layers.19.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
| 153 |
+
"model.layers.2.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 154 |
+
"model.layers.2.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 155 |
+
"model.layers.2.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 156 |
+
"model.layers.2.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 157 |
+
"model.layers.2.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 158 |
+
"model.layers.2.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
| 159 |
+
"model.layers.2.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
| 160 |
+
"model.layers.2.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
| 161 |
+
"model.layers.2.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
| 162 |
+
"model.layers.2.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
| 163 |
+
"model.layers.2.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
| 164 |
+
"model.layers.2.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
| 165 |
+
"model.layers.20.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 166 |
+
"model.layers.20.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 167 |
+
"model.layers.20.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
| 168 |
+
"model.layers.20.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
| 169 |
+
"model.layers.20.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 170 |
+
"model.layers.20.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
| 171 |
+
"model.layers.20.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
| 172 |
+
"model.layers.20.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
| 173 |
+
"model.layers.20.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
| 174 |
+
"model.layers.20.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
| 175 |
+
"model.layers.20.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
| 176 |
+
"model.layers.20.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
| 177 |
+
"model.layers.21.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 178 |
+
"model.layers.21.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 179 |
+
"model.layers.21.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
| 180 |
+
"model.layers.21.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
| 181 |
+
"model.layers.21.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 182 |
+
"model.layers.21.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
| 183 |
+
"model.layers.21.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
| 184 |
+
"model.layers.21.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
| 185 |
+
"model.layers.21.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
| 186 |
+
"model.layers.21.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
| 187 |
+
"model.layers.21.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
| 188 |
+
"model.layers.21.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
| 189 |
+
"model.layers.22.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 190 |
+
"model.layers.22.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 191 |
+
"model.layers.22.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
| 192 |
+
"model.layers.22.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
| 193 |
+
"model.layers.22.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 194 |
+
"model.layers.22.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
| 195 |
+
"model.layers.22.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
| 196 |
+
"model.layers.22.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
| 197 |
+
"model.layers.22.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
| 198 |
+
"model.layers.22.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
| 199 |
+
"model.layers.22.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
| 200 |
+
"model.layers.22.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
| 201 |
+
"model.layers.23.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 202 |
+
"model.layers.23.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 203 |
+
"model.layers.23.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
| 204 |
+
"model.layers.23.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
| 205 |
+
"model.layers.23.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 206 |
+
"model.layers.23.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
| 207 |
+
"model.layers.23.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
| 208 |
+
"model.layers.23.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
| 209 |
+
"model.layers.23.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
| 210 |
+
"model.layers.23.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
| 211 |
+
"model.layers.23.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
| 212 |
+
"model.layers.23.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
| 213 |
+
"model.layers.24.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 214 |
+
"model.layers.24.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 215 |
+
"model.layers.24.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
| 216 |
+
"model.layers.24.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
| 217 |
+
"model.layers.24.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 218 |
+
"model.layers.24.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
| 219 |
+
"model.layers.24.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
| 220 |
+
"model.layers.24.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
| 221 |
+
"model.layers.24.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
| 222 |
+
"model.layers.24.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
| 223 |
+
"model.layers.24.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
| 224 |
+
"model.layers.24.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
| 225 |
+
"model.layers.25.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 226 |
+
"model.layers.25.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 227 |
+
"model.layers.25.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
| 228 |
+
"model.layers.25.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
| 229 |
+
"model.layers.25.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 230 |
+
"model.layers.25.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
| 231 |
+
"model.layers.25.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
| 232 |
+
"model.layers.25.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
| 233 |
+
"model.layers.25.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
| 234 |
+
"model.layers.25.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
| 235 |
+
"model.layers.25.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
| 236 |
+
"model.layers.25.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
| 237 |
+
"model.layers.26.input_layernorm.weight": "model-00004-of-00004.safetensors",
|
| 238 |
+
"model.layers.26.mlp.down_proj.weight": "model-00004-of-00004.safetensors",
|
| 239 |
+
"model.layers.26.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
| 240 |
+
"model.layers.26.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
| 241 |
+
"model.layers.26.post_attention_layernorm.weight": "model-00004-of-00004.safetensors",
|
| 242 |
+
"model.layers.26.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
| 243 |
+
"model.layers.26.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
| 244 |
+
"model.layers.26.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
| 245 |
+
"model.layers.26.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
| 246 |
+
"model.layers.26.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
| 247 |
+
"model.layers.26.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
| 248 |
+
"model.layers.26.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
| 249 |
+
"model.layers.27.input_layernorm.weight": "model-00004-of-00004.safetensors",
|
| 250 |
+
"model.layers.27.mlp.down_proj.weight": "model-00004-of-00004.safetensors",
|
| 251 |
+
"model.layers.27.mlp.gate_proj.weight": "model-00004-of-00004.safetensors",
|
| 252 |
+
"model.layers.27.mlp.up_proj.weight": "model-00004-of-00004.safetensors",
|
| 253 |
+
"model.layers.27.post_attention_layernorm.weight": "model-00004-of-00004.safetensors",
|
| 254 |
+
"model.layers.27.self_attn.k_proj.bias": "model-00004-of-00004.safetensors",
|
| 255 |
+
"model.layers.27.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
|
| 256 |
+
"model.layers.27.self_attn.o_proj.weight": "model-00004-of-00004.safetensors",
|
| 257 |
+
"model.layers.27.self_attn.q_proj.bias": "model-00004-of-00004.safetensors",
|
| 258 |
+
"model.layers.27.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
|
| 259 |
+
"model.layers.27.self_attn.v_proj.bias": "model-00004-of-00004.safetensors",
|
| 260 |
+
"model.layers.27.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
|
| 261 |
+
"model.layers.3.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 262 |
+
"model.layers.3.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 263 |
+
"model.layers.3.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 264 |
+
"model.layers.3.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 265 |
+
"model.layers.3.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 266 |
+
"model.layers.3.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
| 267 |
+
"model.layers.3.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
| 268 |
+
"model.layers.3.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
| 269 |
+
"model.layers.3.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
| 270 |
+
"model.layers.3.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
| 271 |
+
"model.layers.3.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
| 272 |
+
"model.layers.3.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
| 273 |
+
"model.layers.4.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 274 |
+
"model.layers.4.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 275 |
+
"model.layers.4.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 276 |
+
"model.layers.4.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 277 |
+
"model.layers.4.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 278 |
+
"model.layers.4.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
| 279 |
+
"model.layers.4.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
| 280 |
+
"model.layers.4.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
| 281 |
+
"model.layers.4.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
| 282 |
+
"model.layers.4.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
| 283 |
+
"model.layers.4.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
| 284 |
+
"model.layers.4.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
| 285 |
+
"model.layers.5.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 286 |
+
"model.layers.5.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
| 287 |
+
"model.layers.5.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 288 |
+
"model.layers.5.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 289 |
+
"model.layers.5.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 290 |
+
"model.layers.5.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
| 291 |
+
"model.layers.5.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
| 292 |
+
"model.layers.5.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
| 293 |
+
"model.layers.5.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
| 294 |
+
"model.layers.5.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
| 295 |
+
"model.layers.5.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
| 296 |
+
"model.layers.5.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
| 297 |
+
"model.layers.6.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 298 |
+
"model.layers.6.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
| 299 |
+
"model.layers.6.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 300 |
+
"model.layers.6.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 301 |
+
"model.layers.6.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 302 |
+
"model.layers.6.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
| 303 |
+
"model.layers.6.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 304 |
+
"model.layers.6.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 305 |
+
"model.layers.6.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
| 306 |
+
"model.layers.6.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 307 |
+
"model.layers.6.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
| 308 |
+
"model.layers.6.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 309 |
+
"model.layers.7.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 310 |
+
"model.layers.7.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
| 311 |
+
"model.layers.7.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 312 |
+
"model.layers.7.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 313 |
+
"model.layers.7.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 314 |
+
"model.layers.7.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
| 315 |
+
"model.layers.7.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 316 |
+
"model.layers.7.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 317 |
+
"model.layers.7.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
| 318 |
+
"model.layers.7.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 319 |
+
"model.layers.7.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
| 320 |
+
"model.layers.7.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 321 |
+
"model.layers.8.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 322 |
+
"model.layers.8.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
| 323 |
+
"model.layers.8.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 324 |
+
"model.layers.8.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 325 |
+
"model.layers.8.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 326 |
+
"model.layers.8.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
| 327 |
+
"model.layers.8.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 328 |
+
"model.layers.8.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 329 |
+
"model.layers.8.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
| 330 |
+
"model.layers.8.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 331 |
+
"model.layers.8.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
| 332 |
+
"model.layers.8.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 333 |
+
"model.layers.9.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 334 |
+
"model.layers.9.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
| 335 |
+
"model.layers.9.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 336 |
+
"model.layers.9.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 337 |
+
"model.layers.9.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 338 |
+
"model.layers.9.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
| 339 |
+
"model.layers.9.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 340 |
+
"model.layers.9.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 341 |
+
"model.layers.9.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
| 342 |
+
"model.layers.9.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 343 |
+
"model.layers.9.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
| 344 |
+
"model.layers.9.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 345 |
+
"model.norm.weight": "model-00004-of-00004.safetensors",
|
| 346 |
+
"visual.blocks.0.attn.proj.bias": "model-00001-of-00004.safetensors",
|
| 347 |
+
"visual.blocks.0.attn.proj.weight": "model-00001-of-00004.safetensors",
|
| 348 |
+
"visual.blocks.0.attn.qkv.bias": "model-00001-of-00004.safetensors",
|
| 349 |
+
"visual.blocks.0.attn.qkv.weight": "model-00001-of-00004.safetensors",
|
| 350 |
+
"visual.blocks.0.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
|
| 351 |
+
"visual.blocks.0.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 352 |
+
"visual.blocks.0.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
|
| 353 |
+
"visual.blocks.0.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 354 |
+
"visual.blocks.0.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
|
| 355 |
+
"visual.blocks.0.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 356 |
+
"visual.blocks.0.norm1.weight": "model-00001-of-00004.safetensors",
|
| 357 |
+
"visual.blocks.0.norm2.weight": "model-00001-of-00004.safetensors",
|
| 358 |
+
"visual.blocks.1.attn.proj.bias": "model-00001-of-00004.safetensors",
|
| 359 |
+
"visual.blocks.1.attn.proj.weight": "model-00001-of-00004.safetensors",
|
| 360 |
+
"visual.blocks.1.attn.qkv.bias": "model-00001-of-00004.safetensors",
|
| 361 |
+
"visual.blocks.1.attn.qkv.weight": "model-00001-of-00004.safetensors",
|
| 362 |
+
"visual.blocks.1.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
|
| 363 |
+
"visual.blocks.1.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 364 |
+
"visual.blocks.1.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
|
| 365 |
+
"visual.blocks.1.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 366 |
+
"visual.blocks.1.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
|
| 367 |
+
"visual.blocks.1.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 368 |
+
"visual.blocks.1.norm1.weight": "model-00001-of-00004.safetensors",
|
| 369 |
+
"visual.blocks.1.norm2.weight": "model-00001-of-00004.safetensors",
|
| 370 |
+
"visual.blocks.10.attn.proj.bias": "model-00001-of-00004.safetensors",
|
| 371 |
+
"visual.blocks.10.attn.proj.weight": "model-00001-of-00004.safetensors",
|
| 372 |
+
"visual.blocks.10.attn.qkv.bias": "model-00001-of-00004.safetensors",
|
| 373 |
+
"visual.blocks.10.attn.qkv.weight": "model-00001-of-00004.safetensors",
|
| 374 |
+
"visual.blocks.10.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
|
| 375 |
+
"visual.blocks.10.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 376 |
+
"visual.blocks.10.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
|
| 377 |
+
"visual.blocks.10.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 378 |
+
"visual.blocks.10.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
|
| 379 |
+
"visual.blocks.10.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 380 |
+
"visual.blocks.10.norm1.weight": "model-00001-of-00004.safetensors",
|
| 381 |
+
"visual.blocks.10.norm2.weight": "model-00001-of-00004.safetensors",
|
| 382 |
+
"visual.blocks.11.attn.proj.bias": "model-00001-of-00004.safetensors",
|
| 383 |
+
"visual.blocks.11.attn.proj.weight": "model-00001-of-00004.safetensors",
|
| 384 |
+
"visual.blocks.11.attn.qkv.bias": "model-00001-of-00004.safetensors",
|
| 385 |
+
"visual.blocks.11.attn.qkv.weight": "model-00001-of-00004.safetensors",
|
| 386 |
+
"visual.blocks.11.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
|
| 387 |
+
"visual.blocks.11.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 388 |
+
"visual.blocks.11.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
|
| 389 |
+
"visual.blocks.11.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 390 |
+
"visual.blocks.11.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
|
| 391 |
+
"visual.blocks.11.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 392 |
+
"visual.blocks.11.norm1.weight": "model-00001-of-00004.safetensors",
|
| 393 |
+
"visual.blocks.11.norm2.weight": "model-00001-of-00004.safetensors",
|
| 394 |
+
"visual.blocks.12.attn.proj.bias": "model-00001-of-00004.safetensors",
|
| 395 |
+
"visual.blocks.12.attn.proj.weight": "model-00001-of-00004.safetensors",
|
| 396 |
+
"visual.blocks.12.attn.qkv.bias": "model-00001-of-00004.safetensors",
|
| 397 |
+
"visual.blocks.12.attn.qkv.weight": "model-00001-of-00004.safetensors",
|
| 398 |
+
"visual.blocks.12.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
|
| 399 |
+
"visual.blocks.12.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 400 |
+
"visual.blocks.12.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
|
| 401 |
+
"visual.blocks.12.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 402 |
+
"visual.blocks.12.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
|
| 403 |
+
"visual.blocks.12.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 404 |
+
"visual.blocks.12.norm1.weight": "model-00001-of-00004.safetensors",
|
| 405 |
+
"visual.blocks.12.norm2.weight": "model-00001-of-00004.safetensors",
|
| 406 |
+
"visual.blocks.13.attn.proj.bias": "model-00001-of-00004.safetensors",
|
| 407 |
+
"visual.blocks.13.attn.proj.weight": "model-00001-of-00004.safetensors",
|
| 408 |
+
"visual.blocks.13.attn.qkv.bias": "model-00001-of-00004.safetensors",
|
| 409 |
+
"visual.blocks.13.attn.qkv.weight": "model-00001-of-00004.safetensors",
|
| 410 |
+
"visual.blocks.13.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
|
| 411 |
+
"visual.blocks.13.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 412 |
+
"visual.blocks.13.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
|
| 413 |
+
"visual.blocks.13.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 414 |
+
"visual.blocks.13.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
|
| 415 |
+
"visual.blocks.13.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 416 |
+
"visual.blocks.13.norm1.weight": "model-00001-of-00004.safetensors",
|
| 417 |
+
"visual.blocks.13.norm2.weight": "model-00001-of-00004.safetensors",
|
| 418 |
+
"visual.blocks.14.attn.proj.bias": "model-00001-of-00004.safetensors",
|
| 419 |
+
"visual.blocks.14.attn.proj.weight": "model-00001-of-00004.safetensors",
|
| 420 |
+
"visual.blocks.14.attn.qkv.bias": "model-00001-of-00004.safetensors",
|
| 421 |
+
"visual.blocks.14.attn.qkv.weight": "model-00001-of-00004.safetensors",
|
| 422 |
+
"visual.blocks.14.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
|
| 423 |
+
"visual.blocks.14.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 424 |
+
"visual.blocks.14.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
|
| 425 |
+
"visual.blocks.14.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 426 |
+
"visual.blocks.14.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
|
| 427 |
+
"visual.blocks.14.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 428 |
+
"visual.blocks.14.norm1.weight": "model-00001-of-00004.safetensors",
|
| 429 |
+
"visual.blocks.14.norm2.weight": "model-00001-of-00004.safetensors",
|
| 430 |
+
"visual.blocks.15.attn.proj.bias": "model-00001-of-00004.safetensors",
|
| 431 |
+
"visual.blocks.15.attn.proj.weight": "model-00001-of-00004.safetensors",
|
| 432 |
+
"visual.blocks.15.attn.qkv.bias": "model-00001-of-00004.safetensors",
|
| 433 |
+
"visual.blocks.15.attn.qkv.weight": "model-00001-of-00004.safetensors",
|
| 434 |
+
"visual.blocks.15.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
|
| 435 |
+
"visual.blocks.15.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 436 |
+
"visual.blocks.15.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
|
| 437 |
+
"visual.blocks.15.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 438 |
+
"visual.blocks.15.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
|
| 439 |
+
"visual.blocks.15.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 440 |
+
"visual.blocks.15.norm1.weight": "model-00001-of-00004.safetensors",
|
| 441 |
+
"visual.blocks.15.norm2.weight": "model-00001-of-00004.safetensors",
|
| 442 |
+
"visual.blocks.16.attn.proj.bias": "model-00001-of-00004.safetensors",
|
| 443 |
+
"visual.blocks.16.attn.proj.weight": "model-00001-of-00004.safetensors",
|
| 444 |
+
"visual.blocks.16.attn.qkv.bias": "model-00001-of-00004.safetensors",
|
| 445 |
+
"visual.blocks.16.attn.qkv.weight": "model-00001-of-00004.safetensors",
|
| 446 |
+
"visual.blocks.16.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
|
| 447 |
+
"visual.blocks.16.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 448 |
+
"visual.blocks.16.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
|
| 449 |
+
"visual.blocks.16.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 450 |
+
"visual.blocks.16.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
|
| 451 |
+
"visual.blocks.16.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 452 |
+
"visual.blocks.16.norm1.weight": "model-00001-of-00004.safetensors",
|
| 453 |
+
"visual.blocks.16.norm2.weight": "model-00001-of-00004.safetensors",
|
| 454 |
+
"visual.blocks.17.attn.proj.bias": "model-00001-of-00004.safetensors",
|
| 455 |
+
"visual.blocks.17.attn.proj.weight": "model-00001-of-00004.safetensors",
|
| 456 |
+
"visual.blocks.17.attn.qkv.bias": "model-00001-of-00004.safetensors",
|
| 457 |
+
"visual.blocks.17.attn.qkv.weight": "model-00001-of-00004.safetensors",
|
| 458 |
+
"visual.blocks.17.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
|
| 459 |
+
"visual.blocks.17.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 460 |
+
"visual.blocks.17.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
|
| 461 |
+
"visual.blocks.17.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 462 |
+
"visual.blocks.17.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
|
| 463 |
+
"visual.blocks.17.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 464 |
+
"visual.blocks.17.norm1.weight": "model-00001-of-00004.safetensors",
|
| 465 |
+
"visual.blocks.17.norm2.weight": "model-00001-of-00004.safetensors",
|
| 466 |
+
"visual.blocks.18.attn.proj.bias": "model-00001-of-00004.safetensors",
|
| 467 |
+
"visual.blocks.18.attn.proj.weight": "model-00001-of-00004.safetensors",
|
| 468 |
+
"visual.blocks.18.attn.qkv.bias": "model-00001-of-00004.safetensors",
|
| 469 |
+
"visual.blocks.18.attn.qkv.weight": "model-00001-of-00004.safetensors",
|
| 470 |
+
"visual.blocks.18.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
|
| 471 |
+
"visual.blocks.18.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 472 |
+
"visual.blocks.18.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
|
| 473 |
+
"visual.blocks.18.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 474 |
+
"visual.blocks.18.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
|
| 475 |
+
"visual.blocks.18.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 476 |
+
"visual.blocks.18.norm1.weight": "model-00001-of-00004.safetensors",
|
| 477 |
+
"visual.blocks.18.norm2.weight": "model-00001-of-00004.safetensors",
|
| 478 |
+
"visual.blocks.19.attn.proj.bias": "model-00001-of-00004.safetensors",
|
| 479 |
+
"visual.blocks.19.attn.proj.weight": "model-00001-of-00004.safetensors",
|
| 480 |
+
"visual.blocks.19.attn.qkv.bias": "model-00001-of-00004.safetensors",
|
| 481 |
+
"visual.blocks.19.attn.qkv.weight": "model-00001-of-00004.safetensors",
|
| 482 |
+
"visual.blocks.19.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
|
| 483 |
+
"visual.blocks.19.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 484 |
+
"visual.blocks.19.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
|
| 485 |
+
"visual.blocks.19.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 486 |
+
"visual.blocks.19.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
|
| 487 |
+
"visual.blocks.19.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 488 |
+
"visual.blocks.19.norm1.weight": "model-00001-of-00004.safetensors",
|
| 489 |
+
"visual.blocks.19.norm2.weight": "model-00001-of-00004.safetensors",
|
| 490 |
+
"visual.blocks.2.attn.proj.bias": "model-00001-of-00004.safetensors",
|
| 491 |
+
"visual.blocks.2.attn.proj.weight": "model-00001-of-00004.safetensors",
|
| 492 |
+
"visual.blocks.2.attn.qkv.bias": "model-00001-of-00004.safetensors",
|
| 493 |
+
"visual.blocks.2.attn.qkv.weight": "model-00001-of-00004.safetensors",
|
| 494 |
+
"visual.blocks.2.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
|
| 495 |
+
"visual.blocks.2.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 496 |
+
"visual.blocks.2.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
|
| 497 |
+
"visual.blocks.2.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 498 |
+
"visual.blocks.2.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
|
| 499 |
+
"visual.blocks.2.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 500 |
+
"visual.blocks.2.norm1.weight": "model-00001-of-00004.safetensors",
|
| 501 |
+
"visual.blocks.2.norm2.weight": "model-00001-of-00004.safetensors",
|
| 502 |
+
"visual.blocks.20.attn.proj.bias": "model-00001-of-00004.safetensors",
|
| 503 |
+
"visual.blocks.20.attn.proj.weight": "model-00001-of-00004.safetensors",
|
| 504 |
+
"visual.blocks.20.attn.qkv.bias": "model-00001-of-00004.safetensors",
|
| 505 |
+
"visual.blocks.20.attn.qkv.weight": "model-00001-of-00004.safetensors",
|
| 506 |
+
"visual.blocks.20.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
|
| 507 |
+
"visual.blocks.20.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 508 |
+
"visual.blocks.20.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
|
| 509 |
+
"visual.blocks.20.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 510 |
+
"visual.blocks.20.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
|
| 511 |
+
"visual.blocks.20.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 512 |
+
"visual.blocks.20.norm1.weight": "model-00001-of-00004.safetensors",
|
| 513 |
+
"visual.blocks.20.norm2.weight": "model-00001-of-00004.safetensors",
|
| 514 |
+
"visual.blocks.21.attn.proj.bias": "model-00001-of-00004.safetensors",
|
| 515 |
+
"visual.blocks.21.attn.proj.weight": "model-00001-of-00004.safetensors",
|
| 516 |
+
"visual.blocks.21.attn.qkv.bias": "model-00001-of-00004.safetensors",
|
| 517 |
+
"visual.blocks.21.attn.qkv.weight": "model-00001-of-00004.safetensors",
|
| 518 |
+
"visual.blocks.21.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
|
| 519 |
+
"visual.blocks.21.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 520 |
+
"visual.blocks.21.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
|
| 521 |
+
"visual.blocks.21.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 522 |
+
"visual.blocks.21.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
|
| 523 |
+
"visual.blocks.21.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 524 |
+
"visual.blocks.21.norm1.weight": "model-00001-of-00004.safetensors",
|
| 525 |
+
"visual.blocks.21.norm2.weight": "model-00001-of-00004.safetensors",
|
| 526 |
+
"visual.blocks.22.attn.proj.bias": "model-00001-of-00004.safetensors",
|
| 527 |
+
"visual.blocks.22.attn.proj.weight": "model-00001-of-00004.safetensors",
|
| 528 |
+
"visual.blocks.22.attn.qkv.bias": "model-00001-of-00004.safetensors",
|
| 529 |
+
"visual.blocks.22.attn.qkv.weight": "model-00001-of-00004.safetensors",
|
| 530 |
+
"visual.blocks.22.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
|
| 531 |
+
"visual.blocks.22.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 532 |
+
"visual.blocks.22.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
|
| 533 |
+
"visual.blocks.22.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 534 |
+
"visual.blocks.22.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
|
| 535 |
+
"visual.blocks.22.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 536 |
+
"visual.blocks.22.norm1.weight": "model-00001-of-00004.safetensors",
|
| 537 |
+
"visual.blocks.22.norm2.weight": "model-00001-of-00004.safetensors",
|
| 538 |
+
"visual.blocks.23.attn.proj.bias": "model-00001-of-00004.safetensors",
|
| 539 |
+
"visual.blocks.23.attn.proj.weight": "model-00001-of-00004.safetensors",
|
| 540 |
+
"visual.blocks.23.attn.qkv.bias": "model-00001-of-00004.safetensors",
|
| 541 |
+
"visual.blocks.23.attn.qkv.weight": "model-00001-of-00004.safetensors",
|
| 542 |
+
"visual.blocks.23.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
|
| 543 |
+
"visual.blocks.23.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 544 |
+
"visual.blocks.23.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
|
| 545 |
+
"visual.blocks.23.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 546 |
+
"visual.blocks.23.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
|
| 547 |
+
"visual.blocks.23.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 548 |
+
"visual.blocks.23.norm1.weight": "model-00001-of-00004.safetensors",
|
| 549 |
+
"visual.blocks.23.norm2.weight": "model-00001-of-00004.safetensors",
|
| 550 |
+
"visual.blocks.24.attn.proj.bias": "model-00001-of-00004.safetensors",
|
| 551 |
+
"visual.blocks.24.attn.proj.weight": "model-00001-of-00004.safetensors",
|
| 552 |
+
"visual.blocks.24.attn.qkv.bias": "model-00001-of-00004.safetensors",
|
| 553 |
+
"visual.blocks.24.attn.qkv.weight": "model-00001-of-00004.safetensors",
|
| 554 |
+
"visual.blocks.24.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
|
| 555 |
+
"visual.blocks.24.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 556 |
+
"visual.blocks.24.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
|
| 557 |
+
"visual.blocks.24.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 558 |
+
"visual.blocks.24.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
|
| 559 |
+
"visual.blocks.24.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 560 |
+
"visual.blocks.24.norm1.weight": "model-00001-of-00004.safetensors",
|
| 561 |
+
"visual.blocks.24.norm2.weight": "model-00001-of-00004.safetensors",
|
| 562 |
+
"visual.blocks.25.attn.proj.bias": "model-00001-of-00004.safetensors",
|
| 563 |
+
"visual.blocks.25.attn.proj.weight": "model-00001-of-00004.safetensors",
|
| 564 |
+
"visual.blocks.25.attn.qkv.bias": "model-00001-of-00004.safetensors",
|
| 565 |
+
"visual.blocks.25.attn.qkv.weight": "model-00001-of-00004.safetensors",
|
| 566 |
+
"visual.blocks.25.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
|
| 567 |
+
"visual.blocks.25.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 568 |
+
"visual.blocks.25.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
|
| 569 |
+
"visual.blocks.25.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 570 |
+
"visual.blocks.25.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
|
| 571 |
+
"visual.blocks.25.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 572 |
+
"visual.blocks.25.norm1.weight": "model-00001-of-00004.safetensors",
|
| 573 |
+
"visual.blocks.25.norm2.weight": "model-00001-of-00004.safetensors",
|
| 574 |
+
"visual.blocks.26.attn.proj.bias": "model-00001-of-00004.safetensors",
|
| 575 |
+
"visual.blocks.26.attn.proj.weight": "model-00001-of-00004.safetensors",
|
| 576 |
+
"visual.blocks.26.attn.qkv.bias": "model-00001-of-00004.safetensors",
|
| 577 |
+
"visual.blocks.26.attn.qkv.weight": "model-00001-of-00004.safetensors",
|
| 578 |
+
"visual.blocks.26.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
|
| 579 |
+
"visual.blocks.26.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 580 |
+
"visual.blocks.26.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
|
| 581 |
+
"visual.blocks.26.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 582 |
+
"visual.blocks.26.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
|
| 583 |
+
"visual.blocks.26.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 584 |
+
"visual.blocks.26.norm1.weight": "model-00001-of-00004.safetensors",
|
| 585 |
+
"visual.blocks.26.norm2.weight": "model-00001-of-00004.safetensors",
|
| 586 |
+
"visual.blocks.27.attn.proj.bias": "model-00001-of-00004.safetensors",
|
| 587 |
+
"visual.blocks.27.attn.proj.weight": "model-00001-of-00004.safetensors",
|
| 588 |
+
"visual.blocks.27.attn.qkv.bias": "model-00001-of-00004.safetensors",
|
| 589 |
+
"visual.blocks.27.attn.qkv.weight": "model-00001-of-00004.safetensors",
|
| 590 |
+
"visual.blocks.27.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
|
| 591 |
+
"visual.blocks.27.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 592 |
+
"visual.blocks.27.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
|
| 593 |
+
"visual.blocks.27.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 594 |
+
"visual.blocks.27.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
|
| 595 |
+
"visual.blocks.27.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 596 |
+
"visual.blocks.27.norm1.weight": "model-00001-of-00004.safetensors",
|
| 597 |
+
"visual.blocks.27.norm2.weight": "model-00001-of-00004.safetensors",
|
| 598 |
+
"visual.blocks.28.attn.proj.bias": "model-00001-of-00004.safetensors",
|
| 599 |
+
"visual.blocks.28.attn.proj.weight": "model-00001-of-00004.safetensors",
|
| 600 |
+
"visual.blocks.28.attn.qkv.bias": "model-00001-of-00004.safetensors",
|
| 601 |
+
"visual.blocks.28.attn.qkv.weight": "model-00001-of-00004.safetensors",
|
| 602 |
+
"visual.blocks.28.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
|
| 603 |
+
"visual.blocks.28.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 604 |
+
"visual.blocks.28.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
|
| 605 |
+
"visual.blocks.28.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 606 |
+
"visual.blocks.28.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
|
| 607 |
+
"visual.blocks.28.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 608 |
+
"visual.blocks.28.norm1.weight": "model-00001-of-00004.safetensors",
|
| 609 |
+
"visual.blocks.28.norm2.weight": "model-00001-of-00004.safetensors",
|
| 610 |
+
"visual.blocks.29.attn.proj.bias": "model-00001-of-00004.safetensors",
|
| 611 |
+
"visual.blocks.29.attn.proj.weight": "model-00001-of-00004.safetensors",
|
| 612 |
+
"visual.blocks.29.attn.qkv.bias": "model-00001-of-00004.safetensors",
|
| 613 |
+
"visual.blocks.29.attn.qkv.weight": "model-00001-of-00004.safetensors",
|
| 614 |
+
"visual.blocks.29.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
|
| 615 |
+
"visual.blocks.29.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 616 |
+
"visual.blocks.29.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
|
| 617 |
+
"visual.blocks.29.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 618 |
+
"visual.blocks.29.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
|
| 619 |
+
"visual.blocks.29.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 620 |
+
"visual.blocks.29.norm1.weight": "model-00001-of-00004.safetensors",
|
| 621 |
+
"visual.blocks.29.norm2.weight": "model-00001-of-00004.safetensors",
|
| 622 |
+
"visual.blocks.3.attn.proj.bias": "model-00001-of-00004.safetensors",
|
| 623 |
+
"visual.blocks.3.attn.proj.weight": "model-00001-of-00004.safetensors",
|
| 624 |
+
"visual.blocks.3.attn.qkv.bias": "model-00001-of-00004.safetensors",
|
| 625 |
+
"visual.blocks.3.attn.qkv.weight": "model-00001-of-00004.safetensors",
|
| 626 |
+
"visual.blocks.3.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
|
| 627 |
+
"visual.blocks.3.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 628 |
+
"visual.blocks.3.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
|
| 629 |
+
"visual.blocks.3.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 630 |
+
"visual.blocks.3.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
|
| 631 |
+
"visual.blocks.3.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 632 |
+
"visual.blocks.3.norm1.weight": "model-00001-of-00004.safetensors",
|
| 633 |
+
"visual.blocks.3.norm2.weight": "model-00001-of-00004.safetensors",
|
| 634 |
+
"visual.blocks.30.attn.proj.bias": "model-00001-of-00004.safetensors",
|
| 635 |
+
"visual.blocks.30.attn.proj.weight": "model-00001-of-00004.safetensors",
|
| 636 |
+
"visual.blocks.30.attn.qkv.bias": "model-00001-of-00004.safetensors",
|
| 637 |
+
"visual.blocks.30.attn.qkv.weight": "model-00001-of-00004.safetensors",
|
| 638 |
+
"visual.blocks.30.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
|
| 639 |
+
"visual.blocks.30.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 640 |
+
"visual.blocks.30.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
|
| 641 |
+
"visual.blocks.30.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 642 |
+
"visual.blocks.30.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
|
| 643 |
+
"visual.blocks.30.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 644 |
+
"visual.blocks.30.norm1.weight": "model-00001-of-00004.safetensors",
|
| 645 |
+
"visual.blocks.30.norm2.weight": "model-00001-of-00004.safetensors",
|
| 646 |
+
"visual.blocks.31.attn.proj.bias": "model-00001-of-00004.safetensors",
|
| 647 |
+
"visual.blocks.31.attn.proj.weight": "model-00001-of-00004.safetensors",
|
| 648 |
+
"visual.blocks.31.attn.qkv.bias": "model-00001-of-00004.safetensors",
|
| 649 |
+
"visual.blocks.31.attn.qkv.weight": "model-00001-of-00004.safetensors",
|
| 650 |
+
"visual.blocks.31.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
|
| 651 |
+
"visual.blocks.31.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 652 |
+
"visual.blocks.31.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
|
| 653 |
+
"visual.blocks.31.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 654 |
+
"visual.blocks.31.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
|
| 655 |
+
"visual.blocks.31.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 656 |
+
"visual.blocks.31.norm1.weight": "model-00001-of-00004.safetensors",
|
| 657 |
+
"visual.blocks.31.norm2.weight": "model-00001-of-00004.safetensors",
|
| 658 |
+
"visual.blocks.4.attn.proj.bias": "model-00001-of-00004.safetensors",
|
| 659 |
+
"visual.blocks.4.attn.proj.weight": "model-00001-of-00004.safetensors",
|
| 660 |
+
"visual.blocks.4.attn.qkv.bias": "model-00001-of-00004.safetensors",
|
| 661 |
+
"visual.blocks.4.attn.qkv.weight": "model-00001-of-00004.safetensors",
|
| 662 |
+
"visual.blocks.4.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
|
| 663 |
+
"visual.blocks.4.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 664 |
+
"visual.blocks.4.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
|
| 665 |
+
"visual.blocks.4.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 666 |
+
"visual.blocks.4.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
|
| 667 |
+
"visual.blocks.4.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 668 |
+
"visual.blocks.4.norm1.weight": "model-00001-of-00004.safetensors",
|
| 669 |
+
"visual.blocks.4.norm2.weight": "model-00001-of-00004.safetensors",
|
| 670 |
+
"visual.blocks.5.attn.proj.bias": "model-00001-of-00004.safetensors",
|
| 671 |
+
"visual.blocks.5.attn.proj.weight": "model-00001-of-00004.safetensors",
|
| 672 |
+
"visual.blocks.5.attn.qkv.bias": "model-00001-of-00004.safetensors",
|
| 673 |
+
"visual.blocks.5.attn.qkv.weight": "model-00001-of-00004.safetensors",
|
| 674 |
+
"visual.blocks.5.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
|
| 675 |
+
"visual.blocks.5.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 676 |
+
"visual.blocks.5.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
|
| 677 |
+
"visual.blocks.5.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 678 |
+
"visual.blocks.5.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
|
| 679 |
+
"visual.blocks.5.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 680 |
+
"visual.blocks.5.norm1.weight": "model-00001-of-00004.safetensors",
|
| 681 |
+
"visual.blocks.5.norm2.weight": "model-00001-of-00004.safetensors",
|
| 682 |
+
"visual.blocks.6.attn.proj.bias": "model-00001-of-00004.safetensors",
|
| 683 |
+
"visual.blocks.6.attn.proj.weight": "model-00001-of-00004.safetensors",
|
| 684 |
+
"visual.blocks.6.attn.qkv.bias": "model-00001-of-00004.safetensors",
|
| 685 |
+
"visual.blocks.6.attn.qkv.weight": "model-00001-of-00004.safetensors",
|
| 686 |
+
"visual.blocks.6.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
|
| 687 |
+
"visual.blocks.6.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 688 |
+
"visual.blocks.6.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
|
| 689 |
+
"visual.blocks.6.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 690 |
+
"visual.blocks.6.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
|
| 691 |
+
"visual.blocks.6.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 692 |
+
"visual.blocks.6.norm1.weight": "model-00001-of-00004.safetensors",
|
| 693 |
+
"visual.blocks.6.norm2.weight": "model-00001-of-00004.safetensors",
|
| 694 |
+
"visual.blocks.7.attn.proj.bias": "model-00001-of-00004.safetensors",
|
| 695 |
+
"visual.blocks.7.attn.proj.weight": "model-00001-of-00004.safetensors",
|
| 696 |
+
"visual.blocks.7.attn.qkv.bias": "model-00001-of-00004.safetensors",
|
| 697 |
+
"visual.blocks.7.attn.qkv.weight": "model-00001-of-00004.safetensors",
|
| 698 |
+
"visual.blocks.7.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
|
| 699 |
+
"visual.blocks.7.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 700 |
+
"visual.blocks.7.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
|
| 701 |
+
"visual.blocks.7.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 702 |
+
"visual.blocks.7.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
|
| 703 |
+
"visual.blocks.7.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 704 |
+
"visual.blocks.7.norm1.weight": "model-00001-of-00004.safetensors",
|
| 705 |
+
"visual.blocks.7.norm2.weight": "model-00001-of-00004.safetensors",
|
| 706 |
+
"visual.blocks.8.attn.proj.bias": "model-00001-of-00004.safetensors",
|
| 707 |
+
"visual.blocks.8.attn.proj.weight": "model-00001-of-00004.safetensors",
|
| 708 |
+
"visual.blocks.8.attn.qkv.bias": "model-00001-of-00004.safetensors",
|
| 709 |
+
"visual.blocks.8.attn.qkv.weight": "model-00001-of-00004.safetensors",
|
| 710 |
+
"visual.blocks.8.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
|
| 711 |
+
"visual.blocks.8.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 712 |
+
"visual.blocks.8.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
|
| 713 |
+
"visual.blocks.8.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 714 |
+
"visual.blocks.8.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
|
| 715 |
+
"visual.blocks.8.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 716 |
+
"visual.blocks.8.norm1.weight": "model-00001-of-00004.safetensors",
|
| 717 |
+
"visual.blocks.8.norm2.weight": "model-00001-of-00004.safetensors",
|
| 718 |
+
"visual.blocks.9.attn.proj.bias": "model-00001-of-00004.safetensors",
|
| 719 |
+
"visual.blocks.9.attn.proj.weight": "model-00001-of-00004.safetensors",
|
| 720 |
+
"visual.blocks.9.attn.qkv.bias": "model-00001-of-00004.safetensors",
|
| 721 |
+
"visual.blocks.9.attn.qkv.weight": "model-00001-of-00004.safetensors",
|
| 722 |
+
"visual.blocks.9.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
|
| 723 |
+
"visual.blocks.9.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 724 |
+
"visual.blocks.9.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
|
| 725 |
+
"visual.blocks.9.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 726 |
+
"visual.blocks.9.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
|
| 727 |
+
"visual.blocks.9.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 728 |
+
"visual.blocks.9.norm1.weight": "model-00001-of-00004.safetensors",
|
| 729 |
+
"visual.blocks.9.norm2.weight": "model-00001-of-00004.safetensors",
|
| 730 |
+
"visual.merger.ln_q.weight": "model-00001-of-00004.safetensors",
|
| 731 |
+
"visual.merger.mlp.0.bias": "model-00001-of-00004.safetensors",
|
| 732 |
+
"visual.merger.mlp.0.weight": "model-00001-of-00004.safetensors",
|
| 733 |
+
"visual.merger.mlp.2.bias": "model-00001-of-00004.safetensors",
|
| 734 |
+
"visual.merger.mlp.2.weight": "model-00001-of-00004.safetensors",
|
| 735 |
+
"visual.patch_embed.proj.weight": "model-00001-of-00004.safetensors"
|
| 736 |
+
}
|
| 737 |
+
}
|
SFT/preprocessor_config.json
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"crop_size": null,
|
| 3 |
+
"data_format": "channels_first",
|
| 4 |
+
"default_to_square": true,
|
| 5 |
+
"device": null,
|
| 6 |
+
"disable_grouping": null,
|
| 7 |
+
"do_center_crop": null,
|
| 8 |
+
"do_convert_rgb": true,
|
| 9 |
+
"do_normalize": true,
|
| 10 |
+
"do_rescale": true,
|
| 11 |
+
"do_resize": true,
|
| 12 |
+
"image_mean": [
|
| 13 |
+
0.48145466,
|
| 14 |
+
0.4578275,
|
| 15 |
+
0.40821073
|
| 16 |
+
],
|
| 17 |
+
"image_processor_type": "Qwen2VLImageProcessorFast",
|
| 18 |
+
"image_std": [
|
| 19 |
+
0.26862954,
|
| 20 |
+
0.26130258,
|
| 21 |
+
0.27577711
|
| 22 |
+
],
|
| 23 |
+
"input_data_format": null,
|
| 24 |
+
"max_pixels": 12845056,
|
| 25 |
+
"merge_size": 2,
|
| 26 |
+
"min_pixels": 3136,
|
| 27 |
+
"patch_size": 14,
|
| 28 |
+
"processor_class": "Qwen2_5_VLProcessor",
|
| 29 |
+
"resample": 3,
|
| 30 |
+
"rescale_factor": 0.00392156862745098,
|
| 31 |
+
"return_tensors": null,
|
| 32 |
+
"size": {
|
| 33 |
+
"longest_edge": 12845056,
|
| 34 |
+
"shortest_edge": 3136
|
| 35 |
+
},
|
| 36 |
+
"temporal_patch_size": 2
|
| 37 |
+
}
|
SFT/special_tokens_map.json
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"additional_special_tokens": [
|
| 3 |
+
"<|im_start|>",
|
| 4 |
+
"<|im_end|>",
|
| 5 |
+
"<|object_ref_start|>",
|
| 6 |
+
"<|object_ref_end|>",
|
| 7 |
+
"<|box_start|>",
|
| 8 |
+
"<|box_end|>",
|
| 9 |
+
"<|quad_start|>",
|
| 10 |
+
"<|quad_end|>",
|
| 11 |
+
"<|vision_start|>",
|
| 12 |
+
"<|vision_end|>",
|
| 13 |
+
"<|vision_pad|>",
|
| 14 |
+
"<|image_pad|>",
|
| 15 |
+
"<|video_pad|>"
|
| 16 |
+
],
|
| 17 |
+
"eos_token": {
|
| 18 |
+
"content": "<|im_end|>",
|
| 19 |
+
"lstrip": false,
|
| 20 |
+
"normalized": false,
|
| 21 |
+
"rstrip": false,
|
| 22 |
+
"single_word": false
|
| 23 |
+
},
|
| 24 |
+
"pad_token": {
|
| 25 |
+
"content": "<|endoftext|>",
|
| 26 |
+
"lstrip": false,
|
| 27 |
+
"normalized": false,
|
| 28 |
+
"rstrip": false,
|
| 29 |
+
"single_word": false
|
| 30 |
+
}
|
| 31 |
+
}
|
SFT/tokenizer.json
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:9c5ae00e602b8860cbd784ba82a8aa14e8feecec692e7076590d014d7b7fdafa
|
| 3 |
+
size 11421896
|
SFT/tokenizer_config.json
ADDED
|
@@ -0,0 +1,209 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"add_bos_token": false,
|
| 3 |
+
"add_prefix_space": false,
|
| 4 |
+
"added_tokens_decoder": {
|
| 5 |
+
"151643": {
|
| 6 |
+
"content": "<|endoftext|>",
|
| 7 |
+
"lstrip": false,
|
| 8 |
+
"normalized": false,
|
| 9 |
+
"rstrip": false,
|
| 10 |
+
"single_word": false,
|
| 11 |
+
"special": true
|
| 12 |
+
},
|
| 13 |
+
"151644": {
|
| 14 |
+
"content": "<|im_start|>",
|
| 15 |
+
"lstrip": false,
|
| 16 |
+
"normalized": false,
|
| 17 |
+
"rstrip": false,
|
| 18 |
+
"single_word": false,
|
| 19 |
+
"special": true
|
| 20 |
+
},
|
| 21 |
+
"151645": {
|
| 22 |
+
"content": "<|im_end|>",
|
| 23 |
+
"lstrip": false,
|
| 24 |
+
"normalized": false,
|
| 25 |
+
"rstrip": false,
|
| 26 |
+
"single_word": false,
|
| 27 |
+
"special": true
|
| 28 |
+
},
|
| 29 |
+
"151646": {
|
| 30 |
+
"content": "<|object_ref_start|>",
|
| 31 |
+
"lstrip": false,
|
| 32 |
+
"normalized": false,
|
| 33 |
+
"rstrip": false,
|
| 34 |
+
"single_word": false,
|
| 35 |
+
"special": true
|
| 36 |
+
},
|
| 37 |
+
"151647": {
|
| 38 |
+
"content": "<|object_ref_end|>",
|
| 39 |
+
"lstrip": false,
|
| 40 |
+
"normalized": false,
|
| 41 |
+
"rstrip": false,
|
| 42 |
+
"single_word": false,
|
| 43 |
+
"special": true
|
| 44 |
+
},
|
| 45 |
+
"151648": {
|
| 46 |
+
"content": "<|box_start|>",
|
| 47 |
+
"lstrip": false,
|
| 48 |
+
"normalized": false,
|
| 49 |
+
"rstrip": false,
|
| 50 |
+
"single_word": false,
|
| 51 |
+
"special": true
|
| 52 |
+
},
|
| 53 |
+
"151649": {
|
| 54 |
+
"content": "<|box_end|>",
|
| 55 |
+
"lstrip": false,
|
| 56 |
+
"normalized": false,
|
| 57 |
+
"rstrip": false,
|
| 58 |
+
"single_word": false,
|
| 59 |
+
"special": true
|
| 60 |
+
},
|
| 61 |
+
"151650": {
|
| 62 |
+
"content": "<|quad_start|>",
|
| 63 |
+
"lstrip": false,
|
| 64 |
+
"normalized": false,
|
| 65 |
+
"rstrip": false,
|
| 66 |
+
"single_word": false,
|
| 67 |
+
"special": true
|
| 68 |
+
},
|
| 69 |
+
"151651": {
|
| 70 |
+
"content": "<|quad_end|>",
|
| 71 |
+
"lstrip": false,
|
| 72 |
+
"normalized": false,
|
| 73 |
+
"rstrip": false,
|
| 74 |
+
"single_word": false,
|
| 75 |
+
"special": true
|
| 76 |
+
},
|
| 77 |
+
"151652": {
|
| 78 |
+
"content": "<|vision_start|>",
|
| 79 |
+
"lstrip": false,
|
| 80 |
+
"normalized": false,
|
| 81 |
+
"rstrip": false,
|
| 82 |
+
"single_word": false,
|
| 83 |
+
"special": true
|
| 84 |
+
},
|
| 85 |
+
"151653": {
|
| 86 |
+
"content": "<|vision_end|>",
|
| 87 |
+
"lstrip": false,
|
| 88 |
+
"normalized": false,
|
| 89 |
+
"rstrip": false,
|
| 90 |
+
"single_word": false,
|
| 91 |
+
"special": true
|
| 92 |
+
},
|
| 93 |
+
"151654": {
|
| 94 |
+
"content": "<|vision_pad|>",
|
| 95 |
+
"lstrip": false,
|
| 96 |
+
"normalized": false,
|
| 97 |
+
"rstrip": false,
|
| 98 |
+
"single_word": false,
|
| 99 |
+
"special": true
|
| 100 |
+
},
|
| 101 |
+
"151655": {
|
| 102 |
+
"content": "<|image_pad|>",
|
| 103 |
+
"lstrip": false,
|
| 104 |
+
"normalized": false,
|
| 105 |
+
"rstrip": false,
|
| 106 |
+
"single_word": false,
|
| 107 |
+
"special": true
|
| 108 |
+
},
|
| 109 |
+
"151656": {
|
| 110 |
+
"content": "<|video_pad|>",
|
| 111 |
+
"lstrip": false,
|
| 112 |
+
"normalized": false,
|
| 113 |
+
"rstrip": false,
|
| 114 |
+
"single_word": false,
|
| 115 |
+
"special": true
|
| 116 |
+
},
|
| 117 |
+
"151657": {
|
| 118 |
+
"content": "<tool_call>",
|
| 119 |
+
"lstrip": false,
|
| 120 |
+
"normalized": false,
|
| 121 |
+
"rstrip": false,
|
| 122 |
+
"single_word": false,
|
| 123 |
+
"special": false
|
| 124 |
+
},
|
| 125 |
+
"151658": {
|
| 126 |
+
"content": "</tool_call>",
|
| 127 |
+
"lstrip": false,
|
| 128 |
+
"normalized": false,
|
| 129 |
+
"rstrip": false,
|
| 130 |
+
"single_word": false,
|
| 131 |
+
"special": false
|
| 132 |
+
},
|
| 133 |
+
"151659": {
|
| 134 |
+
"content": "<|fim_prefix|>",
|
| 135 |
+
"lstrip": false,
|
| 136 |
+
"normalized": false,
|
| 137 |
+
"rstrip": false,
|
| 138 |
+
"single_word": false,
|
| 139 |
+
"special": false
|
| 140 |
+
},
|
| 141 |
+
"151660": {
|
| 142 |
+
"content": "<|fim_middle|>",
|
| 143 |
+
"lstrip": false,
|
| 144 |
+
"normalized": false,
|
| 145 |
+
"rstrip": false,
|
| 146 |
+
"single_word": false,
|
| 147 |
+
"special": false
|
| 148 |
+
},
|
| 149 |
+
"151661": {
|
| 150 |
+
"content": "<|fim_suffix|>",
|
| 151 |
+
"lstrip": false,
|
| 152 |
+
"normalized": false,
|
| 153 |
+
"rstrip": false,
|
| 154 |
+
"single_word": false,
|
| 155 |
+
"special": false
|
| 156 |
+
},
|
| 157 |
+
"151662": {
|
| 158 |
+
"content": "<|fim_pad|>",
|
| 159 |
+
"lstrip": false,
|
| 160 |
+
"normalized": false,
|
| 161 |
+
"rstrip": false,
|
| 162 |
+
"single_word": false,
|
| 163 |
+
"special": false
|
| 164 |
+
},
|
| 165 |
+
"151663": {
|
| 166 |
+
"content": "<|repo_name|>",
|
| 167 |
+
"lstrip": false,
|
| 168 |
+
"normalized": false,
|
| 169 |
+
"rstrip": false,
|
| 170 |
+
"single_word": false,
|
| 171 |
+
"special": false
|
| 172 |
+
},
|
| 173 |
+
"151664": {
|
| 174 |
+
"content": "<|file_sep|>",
|
| 175 |
+
"lstrip": false,
|
| 176 |
+
"normalized": false,
|
| 177 |
+
"rstrip": false,
|
| 178 |
+
"single_word": false,
|
| 179 |
+
"special": false
|
| 180 |
+
}
|
| 181 |
+
},
|
| 182 |
+
"additional_special_tokens": [
|
| 183 |
+
"<|im_start|>",
|
| 184 |
+
"<|im_end|>",
|
| 185 |
+
"<|object_ref_start|>",
|
| 186 |
+
"<|object_ref_end|>",
|
| 187 |
+
"<|box_start|>",
|
| 188 |
+
"<|box_end|>",
|
| 189 |
+
"<|quad_start|>",
|
| 190 |
+
"<|quad_end|>",
|
| 191 |
+
"<|vision_start|>",
|
| 192 |
+
"<|vision_end|>",
|
| 193 |
+
"<|vision_pad|>",
|
| 194 |
+
"<|image_pad|>",
|
| 195 |
+
"<|video_pad|>"
|
| 196 |
+
],
|
| 197 |
+
"bos_token": null,
|
| 198 |
+
"clean_up_tokenization_spaces": false,
|
| 199 |
+
"eos_token": "<|im_end|>",
|
| 200 |
+
"errors": "replace",
|
| 201 |
+
"extra_special_tokens": {},
|
| 202 |
+
"model_max_length": 131072,
|
| 203 |
+
"pad_token": "<|endoftext|>",
|
| 204 |
+
"padding_side": "right",
|
| 205 |
+
"processor_class": "Qwen2_5_VLProcessor",
|
| 206 |
+
"split_special_tokens": false,
|
| 207 |
+
"tokenizer_class": "Qwen2Tokenizer",
|
| 208 |
+
"unk_token": null
|
| 209 |
+
}
|
SFT/train_results.json
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"epoch": 3.0,
|
| 3 |
+
"total_flos": 121880697913344.0,
|
| 4 |
+
"train_loss": 0.6645450910134418,
|
| 5 |
+
"train_runtime": 16043.0595,
|
| 6 |
+
"train_samples_per_second": 1.177,
|
| 7 |
+
"train_steps_per_second": 0.147
|
| 8 |
+
}
|
SFT/trainer_log.jsonl
ADDED
|
@@ -0,0 +1,237 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{"current_steps": 10, "total_steps": 2361, "loss": 2.2184, "lr": 5.070422535211268e-07, "epoch": 0.012706480304955527, "percentage": 0.42, "elapsed_time": "0:01:19", "remaining_time": "5:09:59"}
|
| 2 |
+
{"current_steps": 20, "total_steps": 2361, "loss": 1.8971, "lr": 1.0704225352112677e-06, "epoch": 0.025412960609911054, "percentage": 0.85, "elapsed_time": "0:02:25", "remaining_time": "4:43:59"}
|
| 3 |
+
{"current_steps": 30, "total_steps": 2361, "loss": 1.4505, "lr": 1.6338028169014086e-06, "epoch": 0.03811944091486658, "percentage": 1.27, "elapsed_time": "0:03:32", "remaining_time": "4:35:28"}
|
| 4 |
+
{"current_steps": 40, "total_steps": 2361, "loss": 1.2166, "lr": 2.19718309859155e-06, "epoch": 0.05082592121982211, "percentage": 1.69, "elapsed_time": "0:04:39", "remaining_time": "4:30:14"}
|
| 5 |
+
{"current_steps": 50, "total_steps": 2361, "loss": 1.1232, "lr": 2.7605633802816906e-06, "epoch": 0.06353240152477764, "percentage": 2.12, "elapsed_time": "0:05:46", "remaining_time": "4:26:38"}
|
| 6 |
+
{"current_steps": 60, "total_steps": 2361, "loss": 1.1167, "lr": 3.3239436619718313e-06, "epoch": 0.07623888182973317, "percentage": 2.54, "elapsed_time": "0:06:53", "remaining_time": "4:23:59"}
|
| 7 |
+
{"current_steps": 70, "total_steps": 2361, "loss": 1.0426, "lr": 3.887323943661972e-06, "epoch": 0.08894536213468869, "percentage": 2.96, "elapsed_time": "0:08:00", "remaining_time": "4:21:54"}
|
| 8 |
+
{"current_steps": 80, "total_steps": 2361, "loss": 0.9485, "lr": 4.450704225352113e-06, "epoch": 0.10165184243964422, "percentage": 3.39, "elapsed_time": "0:09:06", "remaining_time": "4:19:41"}
|
| 9 |
+
{"current_steps": 90, "total_steps": 2361, "loss": 0.971, "lr": 5.014084507042254e-06, "epoch": 0.11435832274459974, "percentage": 3.81, "elapsed_time": "0:10:13", "remaining_time": "4:17:51"}
|
| 10 |
+
{"current_steps": 100, "total_steps": 2361, "loss": 1.0133, "lr": 5.577464788732395e-06, "epoch": 0.12706480304955528, "percentage": 4.24, "elapsed_time": "0:11:20", "remaining_time": "4:16:16"}
|
| 11 |
+
{"current_steps": 110, "total_steps": 2361, "loss": 0.9729, "lr": 6.1408450704225356e-06, "epoch": 0.1397712833545108, "percentage": 4.66, "elapsed_time": "0:12:27", "remaining_time": "4:14:51"}
|
| 12 |
+
{"current_steps": 120, "total_steps": 2361, "loss": 0.9679, "lr": 6.704225352112676e-06, "epoch": 0.15247776365946633, "percentage": 5.08, "elapsed_time": "0:13:33", "remaining_time": "4:13:12"}
|
| 13 |
+
{"current_steps": 130, "total_steps": 2361, "loss": 0.9634, "lr": 7.267605633802818e-06, "epoch": 0.16518424396442186, "percentage": 5.51, "elapsed_time": "0:14:39", "remaining_time": "4:11:42"}
|
| 14 |
+
{"current_steps": 140, "total_steps": 2361, "loss": 0.9674, "lr": 7.830985915492958e-06, "epoch": 0.17789072426937738, "percentage": 5.93, "elapsed_time": "0:15:47", "remaining_time": "4:10:23"}
|
| 15 |
+
{"current_steps": 150, "total_steps": 2361, "loss": 0.9323, "lr": 8.3943661971831e-06, "epoch": 0.1905972045743329, "percentage": 6.35, "elapsed_time": "0:16:53", "remaining_time": "4:09:02"}
|
| 16 |
+
{"current_steps": 160, "total_steps": 2361, "loss": 0.9296, "lr": 8.957746478873241e-06, "epoch": 0.20330368487928843, "percentage": 6.78, "elapsed_time": "0:18:01", "remaining_time": "4:07:52"}
|
| 17 |
+
{"current_steps": 170, "total_steps": 2361, "loss": 0.8979, "lr": 9.521126760563381e-06, "epoch": 0.21601016518424396, "percentage": 7.2, "elapsed_time": "0:19:07", "remaining_time": "4:06:28"}
|
| 18 |
+
{"current_steps": 180, "total_steps": 2361, "loss": 0.9398, "lr": 1.0084507042253523e-05, "epoch": 0.22871664548919948, "percentage": 7.62, "elapsed_time": "0:20:15", "remaining_time": "4:05:23"}
|
| 19 |
+
{"current_steps": 190, "total_steps": 2361, "loss": 0.9903, "lr": 1.0647887323943662e-05, "epoch": 0.241423125794155, "percentage": 8.05, "elapsed_time": "0:21:22", "remaining_time": "4:04:18"}
|
| 20 |
+
{"current_steps": 200, "total_steps": 2361, "loss": 0.953, "lr": 1.1211267605633804e-05, "epoch": 0.25412960609911056, "percentage": 8.47, "elapsed_time": "0:22:29", "remaining_time": "4:02:59"}
|
| 21 |
+
{"current_steps": 210, "total_steps": 2361, "loss": 0.9014, "lr": 1.1774647887323944e-05, "epoch": 0.2668360864040661, "percentage": 8.89, "elapsed_time": "0:23:36", "remaining_time": "4:01:48"}
|
| 22 |
+
{"current_steps": 220, "total_steps": 2361, "loss": 0.9612, "lr": 1.2338028169014084e-05, "epoch": 0.2795425667090216, "percentage": 9.32, "elapsed_time": "0:24:43", "remaining_time": "4:00:40"}
|
| 23 |
+
{"current_steps": 230, "total_steps": 2361, "loss": 0.9492, "lr": 1.2901408450704227e-05, "epoch": 0.29224904701397714, "percentage": 9.74, "elapsed_time": "0:25:50", "remaining_time": "3:59:28"}
|
| 24 |
+
{"current_steps": 240, "total_steps": 2361, "loss": 0.9073, "lr": 1.3464788732394367e-05, "epoch": 0.30495552731893266, "percentage": 10.17, "elapsed_time": "0:26:57", "remaining_time": "3:58:13"}
|
| 25 |
+
{"current_steps": 250, "total_steps": 2361, "loss": 0.899, "lr": 1.4028169014084507e-05, "epoch": 0.3176620076238882, "percentage": 10.59, "elapsed_time": "0:28:04", "remaining_time": "3:57:01"}
|
| 26 |
+
{"current_steps": 260, "total_steps": 2361, "loss": 0.9147, "lr": 1.459154929577465e-05, "epoch": 0.3303684879288437, "percentage": 11.01, "elapsed_time": "0:29:10", "remaining_time": "3:55:43"}
|
| 27 |
+
{"current_steps": 270, "total_steps": 2361, "loss": 0.9637, "lr": 1.515492957746479e-05, "epoch": 0.34307496823379924, "percentage": 11.44, "elapsed_time": "0:30:16", "remaining_time": "3:54:27"}
|
| 28 |
+
{"current_steps": 280, "total_steps": 2361, "loss": 0.9521, "lr": 1.571830985915493e-05, "epoch": 0.35578144853875476, "percentage": 11.86, "elapsed_time": "0:31:22", "remaining_time": "3:53:13"}
|
| 29 |
+
{"current_steps": 290, "total_steps": 2361, "loss": 0.9738, "lr": 1.6281690140845072e-05, "epoch": 0.3684879288437103, "percentage": 12.28, "elapsed_time": "0:32:29", "remaining_time": "3:52:01"}
|
| 30 |
+
{"current_steps": 300, "total_steps": 2361, "loss": 1.0046, "lr": 1.6845070422535213e-05, "epoch": 0.3811944091486658, "percentage": 12.71, "elapsed_time": "0:33:36", "remaining_time": "3:50:50"}
|
| 31 |
+
{"current_steps": 310, "total_steps": 2361, "loss": 0.9481, "lr": 1.740845070422535e-05, "epoch": 0.39390088945362134, "percentage": 13.13, "elapsed_time": "0:34:42", "remaining_time": "3:49:39"}
|
| 32 |
+
{"current_steps": 320, "total_steps": 2361, "loss": 0.9588, "lr": 1.7971830985915497e-05, "epoch": 0.40660736975857686, "percentage": 13.55, "elapsed_time": "0:35:49", "remaining_time": "3:48:27"}
|
| 33 |
+
{"current_steps": 330, "total_steps": 2361, "loss": 0.9404, "lr": 1.8535211267605635e-05, "epoch": 0.4193138500635324, "percentage": 13.98, "elapsed_time": "0:36:56", "remaining_time": "3:47:21"}
|
| 34 |
+
{"current_steps": 340, "total_steps": 2361, "loss": 0.9385, "lr": 1.9098591549295776e-05, "epoch": 0.4320203303684879, "percentage": 14.4, "elapsed_time": "0:38:03", "remaining_time": "3:46:10"}
|
| 35 |
+
{"current_steps": 350, "total_steps": 2361, "loss": 0.9394, "lr": 1.9661971830985918e-05, "epoch": 0.44472681067344344, "percentage": 14.82, "elapsed_time": "0:39:09", "remaining_time": "3:45:00"}
|
| 36 |
+
{"current_steps": 360, "total_steps": 2361, "loss": 0.9764, "lr": 1.9999803787597817e-05, "epoch": 0.45743329097839897, "percentage": 15.25, "elapsed_time": "0:40:15", "remaining_time": "3:43:47"}
|
| 37 |
+
{"current_steps": 370, "total_steps": 2361, "loss": 0.9827, "lr": 1.9997596486500402e-05, "epoch": 0.4701397712833545, "percentage": 15.67, "elapsed_time": "0:41:22", "remaining_time": "3:42:40"}
|
| 38 |
+
{"current_steps": 380, "total_steps": 2361, "loss": 0.9291, "lr": 1.999293716197302e-05, "epoch": 0.48284625158831, "percentage": 16.09, "elapsed_time": "0:42:30", "remaining_time": "3:41:34"}
|
| 39 |
+
{"current_steps": 390, "total_steps": 2361, "loss": 1.0364, "lr": 1.998582695676762e-05, "epoch": 0.49555273189326554, "percentage": 16.52, "elapsed_time": "0:43:37", "remaining_time": "3:40:26"}
|
| 40 |
+
{"current_steps": 400, "total_steps": 2361, "loss": 0.9728, "lr": 1.997626761474232e-05, "epoch": 0.5082592121982211, "percentage": 16.94, "elapsed_time": "0:44:44", "remaining_time": "3:39:18"}
|
| 41 |
+
{"current_steps": 410, "total_steps": 2361, "loss": 0.957, "lr": 1.99642614804337e-05, "epoch": 0.5209656925031766, "percentage": 17.37, "elapsed_time": "0:45:50", "remaining_time": "3:38:08"}
|
| 42 |
+
{"current_steps": 420, "total_steps": 2361, "loss": 0.9856, "lr": 1.9949811498481763e-05, "epoch": 0.5336721728081322, "percentage": 17.79, "elapsed_time": "0:46:57", "remaining_time": "3:37:00"}
|
| 43 |
+
{"current_steps": 430, "total_steps": 2361, "loss": 1.0065, "lr": 1.9932921212907753e-05, "epoch": 0.5463786531130876, "percentage": 18.21, "elapsed_time": "0:48:04", "remaining_time": "3:35:52"}
|
| 44 |
+
{"current_steps": 440, "total_steps": 2361, "loss": 0.9175, "lr": 1.991359476624493e-05, "epoch": 0.5590851334180432, "percentage": 18.64, "elapsed_time": "0:49:10", "remaining_time": "3:34:42"}
|
| 45 |
+
{"current_steps": 450, "total_steps": 2361, "loss": 0.9014, "lr": 1.9891836898522566e-05, "epoch": 0.5717916137229987, "percentage": 19.06, "elapsed_time": "0:50:16", "remaining_time": "3:33:31"}
|
| 46 |
+
{"current_steps": 460, "total_steps": 2361, "loss": 0.9324, "lr": 1.9867652946103413e-05, "epoch": 0.5844980940279543, "percentage": 19.48, "elapsed_time": "0:51:23", "remaining_time": "3:32:21"}
|
| 47 |
+
{"current_steps": 470, "total_steps": 2361, "loss": 0.9311, "lr": 1.9841048840374885e-05, "epoch": 0.5972045743329097, "percentage": 19.91, "elapsed_time": "0:52:30", "remaining_time": "3:31:16"}
|
| 48 |
+
{"current_steps": 480, "total_steps": 2361, "loss": 0.952, "lr": 1.9812031106294314e-05, "epoch": 0.6099110546378653, "percentage": 20.33, "elapsed_time": "0:53:37", "remaining_time": "3:30:08"}
|
| 49 |
+
{"current_steps": 490, "total_steps": 2361, "loss": 0.9067, "lr": 1.978060686078866e-05, "epoch": 0.6226175349428208, "percentage": 20.75, "elapsed_time": "0:54:43", "remaining_time": "3:28:57"}
|
| 50 |
+
{"current_steps": 500, "total_steps": 2361, "loss": 0.9559, "lr": 1.974678381100896e-05, "epoch": 0.6353240152477764, "percentage": 21.18, "elapsed_time": "0:55:50", "remaining_time": "3:27:50"}
|
| 51 |
+
{"current_steps": 510, "total_steps": 2361, "loss": 0.9082, "lr": 1.9710570252440106e-05, "epoch": 0.6480304955527318, "percentage": 21.6, "elapsed_time": "0:56:57", "remaining_time": "3:26:44"}
|
| 52 |
+
{"current_steps": 520, "total_steps": 2361, "loss": 0.9241, "lr": 1.9671975066866254e-05, "epoch": 0.6607369758576874, "percentage": 22.02, "elapsed_time": "0:58:05", "remaining_time": "3:25:38"}
|
| 53 |
+
{"current_steps": 530, "total_steps": 2361, "loss": 0.8811, "lr": 1.9631007720192475e-05, "epoch": 0.6734434561626429, "percentage": 22.45, "elapsed_time": "0:59:11", "remaining_time": "3:24:30"}
|
| 54 |
+
{"current_steps": 540, "total_steps": 2361, "loss": 0.9314, "lr": 1.9587678260123146e-05, "epoch": 0.6861499364675985, "percentage": 22.87, "elapsed_time": "1:00:18", "remaining_time": "3:23:21"}
|
| 55 |
+
{"current_steps": 550, "total_steps": 2361, "loss": 0.9018, "lr": 1.9541997313697614e-05, "epoch": 0.6988564167725541, "percentage": 23.3, "elapsed_time": "1:01:25", "remaining_time": "3:22:13"}
|
| 56 |
+
{"current_steps": 560, "total_steps": 2361, "loss": 0.9349, "lr": 1.9493976084683814e-05, "epoch": 0.7115628970775095, "percentage": 23.72, "elapsed_time": "1:02:31", "remaining_time": "3:21:06"}
|
| 57 |
+
{"current_steps": 570, "total_steps": 2361, "loss": 0.9283, "lr": 1.9443626350830417e-05, "epoch": 0.7242693773824651, "percentage": 24.14, "elapsed_time": "1:03:38", "remaining_time": "3:19:58"}
|
| 58 |
+
{"current_steps": 580, "total_steps": 2361, "loss": 0.8936, "lr": 1.9390960460978188e-05, "epoch": 0.7369758576874206, "percentage": 24.57, "elapsed_time": "1:04:45", "remaining_time": "3:18:50"}
|
| 59 |
+
{"current_steps": 590, "total_steps": 2361, "loss": 0.9529, "lr": 1.933599133203131e-05, "epoch": 0.7496823379923762, "percentage": 24.99, "elapsed_time": "1:05:51", "remaining_time": "3:17:41"}
|
| 60 |
+
{"current_steps": 600, "total_steps": 2361, "loss": 0.8961, "lr": 1.9278732445789364e-05, "epoch": 0.7623888182973316, "percentage": 25.41, "elapsed_time": "1:06:58", "remaining_time": "3:16:34"}
|
| 61 |
+
{"current_steps": 610, "total_steps": 2361, "loss": 0.9028, "lr": 1.9219197845640766e-05, "epoch": 0.7750952986022872, "percentage": 25.84, "elapsed_time": "1:08:05", "remaining_time": "3:15:27"}
|
| 62 |
+
{"current_steps": 620, "total_steps": 2361, "loss": 0.9302, "lr": 1.9157402133118454e-05, "epoch": 0.7878017789072427, "percentage": 26.26, "elapsed_time": "1:09:12", "remaining_time": "3:14:19"}
|
| 63 |
+
{"current_steps": 630, "total_steps": 2361, "loss": 0.9233, "lr": 1.909336046431871e-05, "epoch": 0.8005082592121983, "percentage": 26.68, "elapsed_time": "1:10:19", "remaining_time": "3:13:12"}
|
| 64 |
+
{"current_steps": 640, "total_steps": 2361, "loss": 0.9694, "lr": 1.9027088546183968e-05, "epoch": 0.8132147395171537, "percentage": 27.11, "elapsed_time": "1:11:26", "remaining_time": "3:12:05"}
|
| 65 |
+
{"current_steps": 650, "total_steps": 2361, "loss": 0.9003, "lr": 1.8958602632650474e-05, "epoch": 0.8259212198221093, "percentage": 27.53, "elapsed_time": "1:12:33", "remaining_time": "3:11:00"}
|
| 66 |
+
{"current_steps": 660, "total_steps": 2361, "loss": 0.8805, "lr": 1.8887919520661867e-05, "epoch": 0.8386277001270648, "percentage": 27.95, "elapsed_time": "1:13:40", "remaining_time": "3:09:54"}
|
| 67 |
+
{"current_steps": 670, "total_steps": 2361, "loss": 0.9158, "lr": 1.8815056546049505e-05, "epoch": 0.8513341804320204, "percentage": 28.38, "elapsed_time": "1:14:47", "remaining_time": "3:08:47"}
|
| 68 |
+
{"current_steps": 680, "total_steps": 2361, "loss": 0.8835, "lr": 1.8740031579280667e-05, "epoch": 0.8640406607369758, "percentage": 28.8, "elapsed_time": "1:15:54", "remaining_time": "3:07:39"}
|
| 69 |
+
{"current_steps": 690, "total_steps": 2361, "loss": 0.898, "lr": 1.8662863021075632e-05, "epoch": 0.8767471410419314, "percentage": 29.22, "elapsed_time": "1:17:01", "remaining_time": "3:06:32"}
|
| 70 |
+
{"current_steps": 700, "total_steps": 2361, "loss": 0.9253, "lr": 1.8583569797894673e-05, "epoch": 0.8894536213468869, "percentage": 29.65, "elapsed_time": "1:18:09", "remaining_time": "3:05:26"}
|
| 71 |
+
{"current_steps": 710, "total_steps": 2361, "loss": 0.848, "lr": 1.8502171357296144e-05, "epoch": 0.9021601016518425, "percentage": 30.07, "elapsed_time": "1:19:16", "remaining_time": "3:04:19"}
|
| 72 |
+
{"current_steps": 720, "total_steps": 2361, "loss": 0.8965, "lr": 1.8418687663166745e-05, "epoch": 0.9148665819567979, "percentage": 30.5, "elapsed_time": "1:20:23", "remaining_time": "3:03:12"}
|
| 73 |
+
{"current_steps": 730, "total_steps": 2361, "loss": 0.8553, "lr": 1.833313919082515e-05, "epoch": 0.9275730622617535, "percentage": 30.92, "elapsed_time": "1:21:29", "remaining_time": "3:02:04"}
|
| 74 |
+
{"current_steps": 740, "total_steps": 2361, "loss": 0.8695, "lr": 1.8245546922000207e-05, "epoch": 0.940279542566709, "percentage": 31.34, "elapsed_time": "1:22:36", "remaining_time": "3:00:58"}
|
| 75 |
+
{"current_steps": 750, "total_steps": 2361, "loss": 0.8497, "lr": 1.815593233968492e-05, "epoch": 0.9529860228716646, "percentage": 31.77, "elapsed_time": "1:23:43", "remaining_time": "2:59:50"}
|
| 76 |
+
{"current_steps": 760, "total_steps": 2361, "loss": 0.8746, "lr": 1.806431742286752e-05, "epoch": 0.96569250317662, "percentage": 32.19, "elapsed_time": "1:24:50", "remaining_time": "2:58:43"}
|
| 77 |
+
{"current_steps": 770, "total_steps": 2361, "loss": 0.8708, "lr": 1.7970724641140864e-05, "epoch": 0.9783989834815756, "percentage": 32.61, "elapsed_time": "1:25:57", "remaining_time": "2:57:35"}
|
| 78 |
+
{"current_steps": 780, "total_steps": 2361, "loss": 0.94, "lr": 1.7875176949191506e-05, "epoch": 0.9911054637865311, "percentage": 33.04, "elapsed_time": "1:27:03", "remaining_time": "2:56:28"}
|
| 79 |
+
{"current_steps": 790, "total_steps": 2361, "loss": 0.8297, "lr": 1.7777697781169813e-05, "epoch": 1.0038119440914866, "percentage": 33.46, "elapsed_time": "1:28:11", "remaining_time": "2:55:22"}
|
| 80 |
+
{"current_steps": 800, "total_steps": 2361, "loss": 0.6761, "lr": 1.7678311044942464e-05, "epoch": 1.0165184243964422, "percentage": 33.88, "elapsed_time": "1:29:18", "remaining_time": "2:54:15"}
|
| 81 |
+
{"current_steps": 810, "total_steps": 2361, "loss": 0.6868, "lr": 1.757704111622878e-05, "epoch": 1.0292249047013977, "percentage": 34.31, "elapsed_time": "1:30:25", "remaining_time": "2:53:08"}
|
| 82 |
+
{"current_steps": 820, "total_steps": 2361, "loss": 0.6994, "lr": 1.747391283262231e-05, "epoch": 1.0419313850063532, "percentage": 34.73, "elapsed_time": "1:31:32", "remaining_time": "2:52:02"}
|
| 83 |
+
{"current_steps": 830, "total_steps": 2361, "loss": 0.7141, "lr": 1.736895148749911e-05, "epoch": 1.0546378653113089, "percentage": 35.15, "elapsed_time": "1:32:39", "remaining_time": "2:50:55"}
|
| 84 |
+
{"current_steps": 840, "total_steps": 2361, "loss": 0.6941, "lr": 1.7262182823814297e-05, "epoch": 1.0673443456162643, "percentage": 35.58, "elapsed_time": "1:33:46", "remaining_time": "2:49:47"}
|
| 85 |
+
{"current_steps": 850, "total_steps": 2361, "loss": 0.6662, "lr": 1.7153633027788252e-05, "epoch": 1.0800508259212198, "percentage": 36.0, "elapsed_time": "1:34:53", "remaining_time": "2:48:40"}
|
| 86 |
+
{"current_steps": 860, "total_steps": 2361, "loss": 0.6575, "lr": 1.704332872248418e-05, "epoch": 1.0927573062261753, "percentage": 36.43, "elapsed_time": "1:35:59", "remaining_time": "2:47:33"}
|
| 87 |
+
{"current_steps": 870, "total_steps": 2361, "loss": 0.6959, "lr": 1.69312969612785e-05, "epoch": 1.105463786531131, "percentage": 36.85, "elapsed_time": "1:37:06", "remaining_time": "2:46:25"}
|
| 88 |
+
{"current_steps": 880, "total_steps": 2361, "loss": 0.6701, "lr": 1.6817565221225698e-05, "epoch": 1.1181702668360864, "percentage": 37.27, "elapsed_time": "1:38:13", "remaining_time": "2:45:19"}
|
| 89 |
+
{"current_steps": 890, "total_steps": 2361, "loss": 0.7168, "lr": 1.6702161396319266e-05, "epoch": 1.130876747141042, "percentage": 37.7, "elapsed_time": "1:39:21", "remaining_time": "2:44:13"}
|
| 90 |
+
{"current_steps": 900, "total_steps": 2361, "loss": 0.7087, "lr": 1.658511379065039e-05, "epoch": 1.1435832274459974, "percentage": 38.12, "elapsed_time": "1:40:29", "remaining_time": "2:43:07"}
|
| 91 |
+
{"current_steps": 910, "total_steps": 2361, "loss": 0.7509, "lr": 1.6466451111466044e-05, "epoch": 1.156289707750953, "percentage": 38.54, "elapsed_time": "1:41:36", "remaining_time": "2:42:00"}
|
| 92 |
+
{"current_steps": 920, "total_steps": 2361, "loss": 0.6793, "lr": 1.6346202462128228e-05, "epoch": 1.1689961880559085, "percentage": 38.97, "elapsed_time": "1:42:43", "remaining_time": "2:40:54"}
|
| 93 |
+
{"current_steps": 930, "total_steps": 2361, "loss": 0.7172, "lr": 1.6224397334976023e-05, "epoch": 1.181702668360864, "percentage": 39.39, "elapsed_time": "1:43:50", "remaining_time": "2:39:46"}
|
| 94 |
+
{"current_steps": 940, "total_steps": 2361, "loss": 0.6759, "lr": 1.610106560409227e-05, "epoch": 1.1944091486658195, "percentage": 39.81, "elapsed_time": "1:44:56", "remaining_time": "2:38:38"}
|
| 95 |
+
{"current_steps": 950, "total_steps": 2361, "loss": 0.6822, "lr": 1.597623751797662e-05, "epoch": 1.2071156289707752, "percentage": 40.24, "elapsed_time": "1:46:04", "remaining_time": "2:37:32"}
|
| 96 |
+
{"current_steps": 960, "total_steps": 2361, "loss": 0.7034, "lr": 1.584994369212673e-05, "epoch": 1.2198221092757306, "percentage": 40.66, "elapsed_time": "1:47:11", "remaining_time": "2:36:25"}
|
| 97 |
+
{"current_steps": 970, "total_steps": 2361, "loss": 0.767, "lr": 1.572221510152949e-05, "epoch": 1.2325285895806861, "percentage": 41.08, "elapsed_time": "1:48:18", "remaining_time": "2:35:18"}
|
| 98 |
+
{"current_steps": 980, "total_steps": 2361, "loss": 0.7358, "lr": 1.5593083073064037e-05, "epoch": 1.2452350698856416, "percentage": 41.51, "elapsed_time": "1:49:24", "remaining_time": "2:34:10"}
|
| 99 |
+
{"current_steps": 990, "total_steps": 2361, "loss": 0.7336, "lr": 1.5462579277818498e-05, "epoch": 1.2579415501905973, "percentage": 41.93, "elapsed_time": "1:50:31", "remaining_time": "2:33:04"}
|
| 100 |
+
{"current_steps": 1000, "total_steps": 2361, "loss": 0.7102, "lr": 1.5330735723322282e-05, "epoch": 1.2706480304955527, "percentage": 42.35, "elapsed_time": "1:51:38", "remaining_time": "2:31:56"}
|
| 101 |
+
{"current_steps": 1010, "total_steps": 2361, "loss": 0.7053, "lr": 1.5197584745695904e-05, "epoch": 1.2833545108005082, "percentage": 42.78, "elapsed_time": "1:52:45", "remaining_time": "2:30:50"}
|
| 102 |
+
{"current_steps": 1020, "total_steps": 2361, "loss": 0.7223, "lr": 1.506315900172014e-05, "epoch": 1.2960609911054637, "percentage": 43.2, "elapsed_time": "1:53:52", "remaining_time": "2:29:42"}
|
| 103 |
+
{"current_steps": 1030, "total_steps": 2361, "loss": 0.7185, "lr": 1.4927491460826626e-05, "epoch": 1.3087674714104194, "percentage": 43.63, "elapsed_time": "1:54:58", "remaining_time": "2:28:34"}
|
| 104 |
+
{"current_steps": 1040, "total_steps": 2361, "loss": 0.6293, "lr": 1.4790615397011703e-05, "epoch": 1.3214739517153749, "percentage": 44.05, "elapsed_time": "1:56:05", "remaining_time": "2:27:27"}
|
| 105 |
+
{"current_steps": 1050, "total_steps": 2361, "loss": 0.7111, "lr": 1.4652564380675616e-05, "epoch": 1.3341804320203303, "percentage": 44.47, "elapsed_time": "1:57:13", "remaining_time": "2:26:21"}
|
| 106 |
+
{"current_steps": 1060, "total_steps": 2361, "loss": 0.6926, "lr": 1.4513372270388967e-05, "epoch": 1.346886912325286, "percentage": 44.9, "elapsed_time": "1:58:20", "remaining_time": "2:25:15"}
|
| 107 |
+
{"current_steps": 1070, "total_steps": 2361, "loss": 0.7126, "lr": 1.4373073204588556e-05, "epoch": 1.3595933926302415, "percentage": 45.32, "elapsed_time": "1:59:28", "remaining_time": "2:24:08"}
|
| 108 |
+
{"current_steps": 1080, "total_steps": 2361, "loss": 0.6873, "lr": 1.42317015932045e-05, "epoch": 1.372299872935197, "percentage": 45.74, "elapsed_time": "2:00:35", "remaining_time": "2:23:01"}
|
| 109 |
+
{"current_steps": 1090, "total_steps": 2361, "loss": 0.7642, "lr": 1.4089292109220852e-05, "epoch": 1.3850063532401524, "percentage": 46.17, "elapsed_time": "2:01:42", "remaining_time": "2:21:55"}
|
| 110 |
+
{"current_steps": 1100, "total_steps": 2361, "loss": 0.6799, "lr": 1.394587968017162e-05, "epoch": 1.397712833545108, "percentage": 46.59, "elapsed_time": "2:02:49", "remaining_time": "2:20:48"}
|
| 111 |
+
{"current_steps": 1110, "total_steps": 2361, "loss": 0.6536, "lr": 1.3801499479574431e-05, "epoch": 1.4104193138500636, "percentage": 47.01, "elapsed_time": "2:03:56", "remaining_time": "2:19:41"}
|
| 112 |
+
{"current_steps": 1120, "total_steps": 2361, "loss": 0.7092, "lr": 1.3656186918303804e-05, "epoch": 1.423125794155019, "percentage": 47.44, "elapsed_time": "2:05:03", "remaining_time": "2:18:34"}
|
| 113 |
+
{"current_steps": 1130, "total_steps": 2361, "loss": 0.6536, "lr": 1.3509977635906241e-05, "epoch": 1.4358322744599745, "percentage": 47.86, "elapsed_time": "2:06:10", "remaining_time": "2:17:26"}
|
| 114 |
+
{"current_steps": 1140, "total_steps": 2361, "loss": 0.6474, "lr": 1.3362907491859227e-05, "epoch": 1.4485387547649302, "percentage": 48.28, "elapsed_time": "2:07:17", "remaining_time": "2:16:19"}
|
| 115 |
+
{"current_steps": 1150, "total_steps": 2361, "loss": 0.715, "lr": 1.3215012556776287e-05, "epoch": 1.4612452350698857, "percentage": 48.71, "elapsed_time": "2:08:23", "remaining_time": "2:15:12"}
|
| 116 |
+
{"current_steps": 1160, "total_steps": 2361, "loss": 0.715, "lr": 1.3066329103560267e-05, "epoch": 1.4739517153748412, "percentage": 49.13, "elapsed_time": "2:09:30", "remaining_time": "2:14:05"}
|
| 117 |
+
{"current_steps": 1170, "total_steps": 2361, "loss": 0.6217, "lr": 1.2916893598506981e-05, "epoch": 1.4866581956797966, "percentage": 49.56, "elapsed_time": "2:10:37", "remaining_time": "2:12:58"}
|
| 118 |
+
{"current_steps": 1180, "total_steps": 2361, "loss": 0.7366, "lr": 1.276674269236145e-05, "epoch": 1.499364675984752, "percentage": 49.98, "elapsed_time": "2:11:44", "remaining_time": "2:11:51"}
|
| 119 |
+
{"current_steps": 1190, "total_steps": 2361, "loss": 0.6939, "lr": 1.2615913211328894e-05, "epoch": 1.5120711562897078, "percentage": 50.4, "elapsed_time": "2:12:52", "remaining_time": "2:10:44"}
|
| 120 |
+
{"current_steps": 1200, "total_steps": 2361, "loss": 0.6919, "lr": 1.2464442148042679e-05, "epoch": 1.5247776365946633, "percentage": 50.83, "elapsed_time": "2:13:58", "remaining_time": "2:09:37"}
|
| 121 |
+
{"current_steps": 1210, "total_steps": 2361, "loss": 0.6791, "lr": 1.2312366652491476e-05, "epoch": 1.537484116899619, "percentage": 51.25, "elapsed_time": "2:15:05", "remaining_time": "2:08:30"}
|
| 122 |
+
{"current_steps": 1220, "total_steps": 2361, "loss": 0.6574, "lr": 1.2159724022907786e-05, "epoch": 1.5501905972045744, "percentage": 51.67, "elapsed_time": "2:16:12", "remaining_time": "2:07:23"}
|
| 123 |
+
{"current_steps": 1230, "total_steps": 2361, "loss": 0.701, "lr": 1.2006551696620135e-05, "epoch": 1.5628970775095299, "percentage": 52.1, "elapsed_time": "2:17:18", "remaining_time": "2:06:15"}
|
| 124 |
+
{"current_steps": 1240, "total_steps": 2361, "loss": 0.6546, "lr": 1.1852887240871145e-05, "epoch": 1.5756035578144854, "percentage": 52.52, "elapsed_time": "2:18:25", "remaining_time": "2:05:08"}
|
| 125 |
+
{"current_steps": 1250, "total_steps": 2361, "loss": 0.6643, "lr": 1.1698768343603753e-05, "epoch": 1.5883100381194408, "percentage": 52.94, "elapsed_time": "2:19:32", "remaining_time": "2:04:01"}
|
| 126 |
+
{"current_steps": 1260, "total_steps": 2361, "loss": 0.6982, "lr": 1.1544232804217805e-05, "epoch": 1.6010165184243963, "percentage": 53.37, "elapsed_time": "2:20:39", "remaining_time": "2:02:54"}
|
| 127 |
+
{"current_steps": 1270, "total_steps": 2361, "loss": 0.6591, "lr": 1.1389318524299332e-05, "epoch": 1.613722998729352, "percentage": 53.79, "elapsed_time": "2:21:46", "remaining_time": "2:01:47"}
|
| 128 |
+
{"current_steps": 1280, "total_steps": 2361, "loss": 0.6743, "lr": 1.1234063498324764e-05, "epoch": 1.6264294790343075, "percentage": 54.21, "elapsed_time": "2:22:53", "remaining_time": "2:00:40"}
|
| 129 |
+
{"current_steps": 1290, "total_steps": 2361, "loss": 0.7147, "lr": 1.1078505804342327e-05, "epoch": 1.6391359593392631, "percentage": 54.64, "elapsed_time": "2:23:59", "remaining_time": "1:59:32"}
|
| 130 |
+
{"current_steps": 1300, "total_steps": 2361, "loss": 0.671, "lr": 1.092268359463302e-05, "epoch": 1.6518424396442186, "percentage": 55.06, "elapsed_time": "2:25:06", "remaining_time": "1:58:25"}
|
| 131 |
+
{"current_steps": 1310, "total_steps": 2361, "loss": 0.6713, "lr": 1.0766635086353298e-05, "epoch": 1.664548919949174, "percentage": 55.48, "elapsed_time": "2:26:14", "remaining_time": "1:57:19"}
|
| 132 |
+
{"current_steps": 1320, "total_steps": 2361, "loss": 0.6629, "lr": 1.06103985521619e-05, "epoch": 1.6772554002541296, "percentage": 55.91, "elapsed_time": "2:27:21", "remaining_time": "1:56:12"}
|
| 133 |
+
{"current_steps": 1330, "total_steps": 2361, "loss": 0.7035, "lr": 1.0454012310833034e-05, "epoch": 1.689961880559085, "percentage": 56.33, "elapsed_time": "2:28:27", "remaining_time": "1:55:05"}
|
| 134 |
+
{"current_steps": 1340, "total_steps": 2361, "loss": 0.6631, "lr": 1.0297514717858286e-05, "epoch": 1.7026683608640405, "percentage": 56.76, "elapsed_time": "2:29:34", "remaining_time": "1:53:58"}
|
| 135 |
+
{"current_steps": 1350, "total_steps": 2361, "loss": 0.685, "lr": 1.0140944156039481e-05, "epoch": 1.7153748411689962, "percentage": 57.18, "elapsed_time": "2:30:41", "remaining_time": "1:52:51"}
|
| 136 |
+
{"current_steps": 1360, "total_steps": 2361, "loss": 0.6549, "lr": 9.984339026074881e-06, "epoch": 1.7280813214739519, "percentage": 57.6, "elapsed_time": "2:31:48", "remaining_time": "1:51:44"}
|
| 137 |
+
{"current_steps": 1370, "total_steps": 2361, "loss": 0.6467, "lr": 9.827737737140983e-06, "epoch": 1.7407878017789074, "percentage": 58.03, "elapsed_time": "2:32:55", "remaining_time": "1:50:37"}
|
| 138 |
+
{"current_steps": 1380, "total_steps": 2361, "loss": 0.6543, "lr": 9.671178697472217e-06, "epoch": 1.7534942820838628, "percentage": 58.45, "elapsed_time": "2:34:02", "remaining_time": "1:49:30"}
|
| 139 |
+
{"current_steps": 1390, "total_steps": 2361, "loss": 0.6922, "lr": 9.514700304940901e-06, "epoch": 1.7662007623888183, "percentage": 58.87, "elapsed_time": "2:35:10", "remaining_time": "1:48:23"}
|
| 140 |
+
{"current_steps": 1400, "total_steps": 2361, "loss": 0.6557, "lr": 9.358340937639746e-06, "epoch": 1.7789072426937738, "percentage": 59.3, "elapsed_time": "2:36:17", "remaining_time": "1:47:16"}
|
| 141 |
+
{"current_steps": 1410, "total_steps": 2361, "loss": 0.688, "lr": 9.202138944469168e-06, "epoch": 1.7916137229987292, "percentage": 59.72, "elapsed_time": "2:37:24", "remaining_time": "1:46:09"}
|
| 142 |
+
{"current_steps": 1420, "total_steps": 2361, "loss": 0.6675, "lr": 9.046132635731816e-06, "epoch": 1.804320203303685, "percentage": 60.14, "elapsed_time": "2:38:30", "remaining_time": "1:45:02"}
|
| 143 |
+
{"current_steps": 1430, "total_steps": 2361, "loss": 0.6584, "lr": 8.890360273736504e-06, "epoch": 1.8170266836086404, "percentage": 60.57, "elapsed_time": "2:39:37", "remaining_time": "1:43:55"}
|
| 144 |
+
{"current_steps": 1440, "total_steps": 2361, "loss": 0.6735, "lr": 8.734860063413974e-06, "epoch": 1.829733163913596, "percentage": 60.99, "elapsed_time": "2:40:44", "remaining_time": "1:42:48"}
|
| 145 |
+
{"current_steps": 1450, "total_steps": 2361, "loss": 0.7102, "lr": 8.579670142946701e-06, "epoch": 1.8424396442185516, "percentage": 61.41, "elapsed_time": "2:41:50", "remaining_time": "1:41:41"}
|
| 146 |
+
{"current_steps": 1460, "total_steps": 2361, "loss": 0.6749, "lr": 8.42482857441506e-06, "epoch": 1.855146124523507, "percentage": 61.84, "elapsed_time": "2:42:57", "remaining_time": "1:40:34"}
|
| 147 |
+
{"current_steps": 1470, "total_steps": 2361, "loss": 0.672, "lr": 8.270373334462193e-06, "epoch": 1.8678526048284625, "percentage": 62.26, "elapsed_time": "2:44:05", "remaining_time": "1:39:27"}
|
| 148 |
+
{"current_steps": 1480, "total_steps": 2361, "loss": 0.6863, "lr": 8.116342304979783e-06, "epoch": 1.880559085133418, "percentage": 62.69, "elapsed_time": "2:45:11", "remaining_time": "1:38:20"}
|
| 149 |
+
{"current_steps": 1490, "total_steps": 2361, "loss": 0.6815, "lr": 7.962773263817114e-06, "epoch": 1.8932655654383734, "percentage": 63.11, "elapsed_time": "2:46:18", "remaining_time": "1:37:13"}
|
| 150 |
+
{"current_steps": 1500, "total_steps": 2361, "loss": 0.6417, "lr": 7.809703875515613e-06, "epoch": 1.9059720457433291, "percentage": 63.53, "elapsed_time": "2:47:26", "remaining_time": "1:36:06"}
|
| 151 |
+
{"current_steps": 1510, "total_steps": 2361, "loss": 0.62, "lr": 7.657171682071198e-06, "epoch": 1.9186785260482846, "percentage": 63.96, "elapsed_time": "2:48:33", "remaining_time": "1:34:59"}
|
| 152 |
+
{"current_steps": 1520, "total_steps": 2361, "loss": 0.6439, "lr": 7.505214093726692e-06, "epoch": 1.9313850063532403, "percentage": 64.38, "elapsed_time": "2:49:40", "remaining_time": "1:33:52"}
|
| 153 |
+
{"current_steps": 1530, "total_steps": 2361, "loss": 0.6705, "lr": 7.353868379796518e-06, "epoch": 1.9440914866581958, "percentage": 64.8, "elapsed_time": "2:50:47", "remaining_time": "1:32:45"}
|
| 154 |
+
{"current_steps": 1540, "total_steps": 2361, "loss": 0.6324, "lr": 7.203171659526e-06, "epoch": 1.9567979669631512, "percentage": 65.23, "elapsed_time": "2:51:54", "remaining_time": "1:31:38"}
|
| 155 |
+
{"current_steps": 1550, "total_steps": 2361, "loss": 0.6757, "lr": 7.053160892987434e-06, "epoch": 1.9695044472681067, "percentage": 65.65, "elapsed_time": "2:53:01", "remaining_time": "1:30:32"}
|
| 156 |
+
{"current_steps": 1560, "total_steps": 2361, "loss": 0.6456, "lr": 6.903872872015209e-06, "epoch": 1.9822109275730622, "percentage": 66.07, "elapsed_time": "2:54:08", "remaining_time": "1:29:25"}
|
| 157 |
+
{"current_steps": 1570, "total_steps": 2361, "loss": 0.6166, "lr": 6.755344211182221e-06, "epoch": 1.9949174078780176, "percentage": 66.5, "elapsed_time": "2:55:15", "remaining_time": "1:28:17"}
|
| 158 |
+
{"current_steps": 1580, "total_steps": 2361, "loss": 0.5016, "lr": 6.607611338819697e-06, "epoch": 2.007623888182973, "percentage": 66.92, "elapsed_time": "2:56:26", "remaining_time": "1:27:12"}
|
| 159 |
+
{"current_steps": 1590, "total_steps": 2361, "loss": 0.374, "lr": 6.460710488082774e-06, "epoch": 2.020330368487929, "percentage": 67.34, "elapsed_time": "2:57:33", "remaining_time": "1:26:05"}
|
| 160 |
+
{"current_steps": 1600, "total_steps": 2361, "loss": 0.3524, "lr": 6.31467768806388e-06, "epoch": 2.0330368487928845, "percentage": 67.77, "elapsed_time": "2:58:40", "remaining_time": "1:24:58"}
|
| 161 |
+
{"current_steps": 1610, "total_steps": 2361, "loss": 0.3485, "lr": 6.169548754956201e-06, "epoch": 2.04574332909784, "percentage": 68.19, "elapsed_time": "2:59:47", "remaining_time": "1:23:52"}
|
| 162 |
+
{"current_steps": 1620, "total_steps": 2361, "loss": 0.348, "lr": 6.025359283269363e-06, "epoch": 2.0584498094027954, "percentage": 68.61, "elapsed_time": "3:00:55", "remaining_time": "1:22:45"}
|
| 163 |
+
{"current_steps": 1630, "total_steps": 2361, "loss": 0.3753, "lr": 5.882144637099465e-06, "epoch": 2.071156289707751, "percentage": 69.04, "elapsed_time": "3:02:03", "remaining_time": "1:21:38"}
|
| 164 |
+
{"current_steps": 1640, "total_steps": 2361, "loss": 0.3526, "lr": 5.739939941455644e-06, "epoch": 2.0838627700127064, "percentage": 69.46, "elapsed_time": "3:03:10", "remaining_time": "1:20:31"}
|
| 165 |
+
{"current_steps": 1650, "total_steps": 2361, "loss": 0.3543, "lr": 5.598780073645267e-06, "epoch": 2.096569250317662, "percentage": 69.89, "elapsed_time": "3:04:17", "remaining_time": "1:19:24"}
|
| 166 |
+
{"current_steps": 1660, "total_steps": 2361, "loss": 0.3642, "lr": 5.458699654719873e-06, "epoch": 2.1092757306226178, "percentage": 70.31, "elapsed_time": "3:05:23", "remaining_time": "1:18:17"}
|
| 167 |
+
{"current_steps": 1670, "total_steps": 2361, "loss": 0.3428, "lr": 5.319733040983972e-06, "epoch": 2.121982210927573, "percentage": 70.73, "elapsed_time": "3:06:30", "remaining_time": "1:17:10"}
|
| 168 |
+
{"current_steps": 1680, "total_steps": 2361, "loss": 0.3403, "lr": 5.181914315568782e-06, "epoch": 2.1346886912325287, "percentage": 71.16, "elapsed_time": "3:07:36", "remaining_time": "1:16:02"}
|
| 169 |
+
{"current_steps": 1690, "total_steps": 2361, "loss": 0.3469, "lr": 5.0452772800729375e-06, "epoch": 2.147395171537484, "percentage": 71.58, "elapsed_time": "3:08:43", "remaining_time": "1:14:55"}
|
| 170 |
+
{"current_steps": 1700, "total_steps": 2361, "loss": 0.3454, "lr": 4.909855446272288e-06, "epoch": 2.1601016518424396, "percentage": 72.0, "elapsed_time": "3:09:49", "remaining_time": "1:13:48"}
|
| 171 |
+
{"current_steps": 1710, "total_steps": 2361, "loss": 0.341, "lr": 4.775682027900739e-06, "epoch": 2.172808132147395, "percentage": 72.43, "elapsed_time": "3:10:56", "remaining_time": "1:12:41"}
|
| 172 |
+
{"current_steps": 1720, "total_steps": 2361, "loss": 0.3352, "lr": 4.6427899325042135e-06, "epoch": 2.1855146124523506, "percentage": 72.85, "elapsed_time": "3:12:03", "remaining_time": "1:11:34"}
|
| 173 |
+
{"current_steps": 1730, "total_steps": 2361, "loss": 0.3447, "lr": 4.511211753369712e-06, "epoch": 2.198221092757306, "percentage": 73.27, "elapsed_time": "3:13:10", "remaining_time": "1:10:27"}
|
| 174 |
+
{"current_steps": 1740, "total_steps": 2361, "loss": 0.3531, "lr": 4.380979761531431e-06, "epoch": 2.210927573062262, "percentage": 73.7, "elapsed_time": "3:14:17", "remaining_time": "1:09:20"}
|
| 175 |
+
{"current_steps": 1750, "total_steps": 2361, "loss": 0.356, "lr": 4.2521258978559324e-06, "epoch": 2.2236340533672174, "percentage": 74.12, "elapsed_time": "3:15:24", "remaining_time": "1:08:13"}
|
| 176 |
+
{"current_steps": 1760, "total_steps": 2361, "loss": 0.3266, "lr": 4.124681765208286e-06, "epoch": 2.236340533672173, "percentage": 74.54, "elapsed_time": "3:16:32", "remaining_time": "1:07:06"}
|
| 177 |
+
{"current_steps": 1770, "total_steps": 2361, "loss": 0.3386, "lr": 3.998678620701102e-06, "epoch": 2.2490470139771284, "percentage": 74.97, "elapsed_time": "3:17:38", "remaining_time": "1:05:59"}
|
| 178 |
+
{"current_steps": 1780, "total_steps": 2361, "loss": 0.3544, "lr": 3.874147368028396e-06, "epoch": 2.261753494282084, "percentage": 75.39, "elapsed_time": "3:18:45", "remaining_time": "1:04:52"}
|
| 179 |
+
{"current_steps": 1790, "total_steps": 2361, "loss": 0.3227, "lr": 3.751118549886065e-06, "epoch": 2.2744599745870393, "percentage": 75.82, "elapsed_time": "3:19:52", "remaining_time": "1:03:45"}
|
| 180 |
+
{"current_steps": 1800, "total_steps": 2361, "loss": 0.3399, "lr": 3.6296223404809903e-06, "epoch": 2.2871664548919948, "percentage": 76.24, "elapsed_time": "3:20:59", "remaining_time": "1:02:38"}
|
| 181 |
+
{"current_steps": 1810, "total_steps": 2361, "loss": 0.3369, "lr": 3.509688538130448e-06, "epoch": 2.2998729351969507, "percentage": 76.66, "elapsed_time": "3:22:06", "remaining_time": "1:01:31"}
|
| 182 |
+
{"current_steps": 1820, "total_steps": 2361, "loss": 0.341, "lr": 3.39134655795374e-06, "epoch": 2.312579415501906, "percentage": 77.09, "elapsed_time": "3:23:14", "remaining_time": "1:00:24"}
|
| 183 |
+
{"current_steps": 1830, "total_steps": 2361, "loss": 0.3365, "lr": 3.2746254246578167e-06, "epoch": 2.3252858958068616, "percentage": 77.51, "elapsed_time": "3:24:21", "remaining_time": "0:59:17"}
|
| 184 |
+
{"current_steps": 1840, "total_steps": 2361, "loss": 0.3546, "lr": 3.1595537654186114e-06, "epoch": 2.337992376111817, "percentage": 77.93, "elapsed_time": "3:25:29", "remaining_time": "0:58:11"}
|
| 185 |
+
{"current_steps": 1850, "total_steps": 2361, "loss": 0.3431, "lr": 3.0461598028599305e-06, "epoch": 2.3506988564167726, "percentage": 78.36, "elapsed_time": "3:26:36", "remaining_time": "0:57:04"}
|
| 186 |
+
{"current_steps": 1860, "total_steps": 2361, "loss": 0.3303, "lr": 2.9344713481315225e-06, "epoch": 2.363405336721728, "percentage": 78.78, "elapsed_time": "3:27:43", "remaining_time": "0:55:56"}
|
| 187 |
+
{"current_steps": 1870, "total_steps": 2361, "loss": 0.3337, "lr": 2.8245157940880784e-06, "epoch": 2.3761118170266835, "percentage": 79.2, "elapsed_time": "3:28:50", "remaining_time": "0:54:50"}
|
| 188 |
+
{"current_steps": 1880, "total_steps": 2361, "loss": 0.3223, "lr": 2.7163201085708424e-06, "epoch": 2.388818297331639, "percentage": 79.63, "elapsed_time": "3:29:57", "remaining_time": "0:53:43"}
|
| 189 |
+
{"current_steps": 1890, "total_steps": 2361, "loss": 0.3398, "lr": 2.6099108277934105e-06, "epoch": 2.4015247776365944, "percentage": 80.05, "elapsed_time": "3:31:04", "remaining_time": "0:52:36"}
|
| 190 |
+
{"current_steps": 1900, "total_steps": 2361, "loss": 0.3483, "lr": 2.505314049833457e-06, "epoch": 2.4142312579415504, "percentage": 80.47, "elapsed_time": "3:32:11", "remaining_time": "0:51:29"}
|
| 191 |
+
{"current_steps": 1910, "total_steps": 2361, "loss": 0.3273, "lr": 2.402555428231872e-06, "epoch": 2.426937738246506, "percentage": 80.9, "elapsed_time": "3:33:17", "remaining_time": "0:50:21"}
|
| 192 |
+
{"current_steps": 1920, "total_steps": 2361, "loss": 0.3374, "lr": 2.3016601657009364e-06, "epoch": 2.4396442185514613, "percentage": 81.32, "elapsed_time": "3:34:24", "remaining_time": "0:49:14"}
|
| 193 |
+
{"current_steps": 1930, "total_steps": 2361, "loss": 0.3464, "lr": 2.202653007943093e-06, "epoch": 2.4523506988564168, "percentage": 81.75, "elapsed_time": "3:35:31", "remaining_time": "0:48:07"}
|
| 194 |
+
{"current_steps": 1940, "total_steps": 2361, "loss": 0.325, "lr": 2.1055582375817475e-06, "epoch": 2.4650571791613722, "percentage": 82.17, "elapsed_time": "3:36:38", "remaining_time": "0:47:00"}
|
| 195 |
+
{"current_steps": 1950, "total_steps": 2361, "loss": 0.3255, "lr": 2.0103996682057235e-06, "epoch": 2.4777636594663277, "percentage": 82.59, "elapsed_time": "3:37:44", "remaining_time": "0:45:53"}
|
| 196 |
+
{"current_steps": 1960, "total_steps": 2361, "loss": 0.3391, "lr": 1.9172006385286723e-06, "epoch": 2.490470139771283, "percentage": 83.02, "elapsed_time": "3:38:51", "remaining_time": "0:44:46"}
|
| 197 |
+
{"current_steps": 1970, "total_steps": 2361, "loss": 0.3389, "lr": 1.8259840066650136e-06, "epoch": 2.503176620076239, "percentage": 83.44, "elapsed_time": "3:39:58", "remaining_time": "0:43:39"}
|
| 198 |
+
{"current_steps": 1980, "total_steps": 2361, "loss": 0.3258, "lr": 1.7367721445237285e-06, "epoch": 2.5158831003811946, "percentage": 83.86, "elapsed_time": "3:41:05", "remaining_time": "0:42:32"}
|
| 199 |
+
{"current_steps": 1990, "total_steps": 2361, "loss": 0.3185, "lr": 1.6495869323213654e-06, "epoch": 2.52858958068615, "percentage": 84.29, "elapsed_time": "3:42:12", "remaining_time": "0:41:25"}
|
| 200 |
+
{"current_steps": 2000, "total_steps": 2361, "loss": 0.3247, "lr": 1.564449753215711e-06, "epoch": 2.5412960609911055, "percentage": 84.71, "elapsed_time": "3:43:18", "remaining_time": "0:40:18"}
|
| 201 |
+
{"current_steps": 2010, "total_steps": 2361, "loss": 0.3198, "lr": 1.4813814880612942e-06, "epoch": 2.554002541296061, "percentage": 85.13, "elapsed_time": "3:44:25", "remaining_time": "0:39:11"}
|
| 202 |
+
{"current_steps": 2020, "total_steps": 2361, "loss": 0.3143, "lr": 1.4004025102881402e-06, "epoch": 2.5667090216010164, "percentage": 85.56, "elapsed_time": "3:45:32", "remaining_time": "0:38:04"}
|
| 203 |
+
{"current_steps": 2030, "total_steps": 2361, "loss": 0.3312, "lr": 1.321532680904959e-06, "epoch": 2.579415501905972, "percentage": 85.98, "elapsed_time": "3:46:38", "remaining_time": "0:36:57"}
|
| 204 |
+
{"current_steps": 2040, "total_steps": 2361, "loss": 0.3129, "lr": 1.2447913436279879e-06, "epoch": 2.5921219822109274, "percentage": 86.4, "elapsed_time": "3:47:45", "remaining_time": "0:35:50"}
|
| 205 |
+
{"current_steps": 2050, "total_steps": 2361, "loss": 0.3253, "lr": 1.1701973201367544e-06, "epoch": 2.604828462515883, "percentage": 86.83, "elapsed_time": "3:48:53", "remaining_time": "0:34:43"}
|
| 206 |
+
{"current_steps": 2060, "total_steps": 2361, "loss": 0.3531, "lr": 1.09776890545782e-06, "epoch": 2.6175349428208388, "percentage": 87.25, "elapsed_time": "3:50:00", "remaining_time": "0:33:36"}
|
| 207 |
+
{"current_steps": 2070, "total_steps": 2361, "loss": 0.3105, "lr": 1.0275238634777441e-06, "epoch": 2.6302414231257942, "percentage": 87.67, "elapsed_time": "3:51:06", "remaining_time": "0:32:29"}
|
| 208 |
+
{"current_steps": 2080, "total_steps": 2361, "loss": 0.3331, "lr": 9.594794225862692e-07, "epoch": 2.6429479034307497, "percentage": 88.1, "elapsed_time": "3:52:13", "remaining_time": "0:31:22"}
|
| 209 |
+
{"current_steps": 2090, "total_steps": 2361, "loss": 0.3336, "lr": 8.936522714508678e-07, "epoch": 2.655654383735705, "percentage": 88.52, "elapsed_time": "3:53:20", "remaining_time": "0:30:15"}
|
| 210 |
+
{"current_steps": 2100, "total_steps": 2361, "loss": 0.3232, "lr": 8.300585549236773e-07, "epoch": 2.6683608640406606, "percentage": 88.95, "elapsed_time": "3:54:26", "remaining_time": "0:29:08"}
|
| 211 |
+
{"current_steps": 2110, "total_steps": 2361, "loss": 0.3165, "lr": 7.687138700817598e-07, "epoch": 2.681067344345616, "percentage": 89.37, "elapsed_time": "3:55:34", "remaining_time": "0:28:01"}
|
| 212 |
+
{"current_steps": 2120, "total_steps": 2361, "loss": 0.3126, "lr": 7.096332624017755e-07, "epoch": 2.693773824650572, "percentage": 89.79, "elapsed_time": "3:56:41", "remaining_time": "0:26:54"}
|
| 213 |
+
{"current_steps": 2130, "total_steps": 2361, "loss": 0.3303, "lr": 6.528312220698885e-07, "epoch": 2.7064803049555275, "percentage": 90.22, "elapsed_time": "3:57:47", "remaining_time": "0:25:47"}
|
| 214 |
+
{"current_steps": 2140, "total_steps": 2361, "loss": 0.3191, "lr": 5.983216804278869e-07, "epoch": 2.719186785260483, "percentage": 90.64, "elapsed_time": "3:58:54", "remaining_time": "0:24:40"}
|
| 215 |
+
{"current_steps": 2150, "total_steps": 2361, "loss": 0.3059, "lr": 5.461180065563787e-07, "epoch": 2.7318932655654384, "percentage": 91.06, "elapsed_time": "4:00:01", "remaining_time": "0:23:33"}
|
| 216 |
+
{"current_steps": 2160, "total_steps": 2361, "loss": 0.3194, "lr": 4.962330039958585e-07, "epoch": 2.744599745870394, "percentage": 91.49, "elapsed_time": "4:01:08", "remaining_time": "0:22:26"}
|
| 217 |
+
{"current_steps": 2170, "total_steps": 2361, "loss": 0.3148, "lr": 4.486789076064968e-07, "epoch": 2.7573062261753494, "percentage": 91.91, "elapsed_time": "4:02:14", "remaining_time": "0:21:19"}
|
| 218 |
+
{"current_steps": 2180, "total_steps": 2361, "loss": 0.3285, "lr": 4.034673805674116e-07, "epoch": 2.770012706480305, "percentage": 92.33, "elapsed_time": "4:03:21", "remaining_time": "0:20:12"}
|
| 219 |
+
{"current_steps": 2190, "total_steps": 2361, "loss": 0.3172, "lr": 3.606095115161279e-07, "epoch": 2.7827191867852603, "percentage": 92.76, "elapsed_time": "4:04:27", "remaining_time": "0:19:05"}
|
| 220 |
+
{"current_steps": 2200, "total_steps": 2361, "loss": 0.3183, "lr": 3.201158118289793e-07, "epoch": 2.795425667090216, "percentage": 93.18, "elapsed_time": "4:05:34", "remaining_time": "0:17:58"}
|
| 221 |
+
{"current_steps": 2210, "total_steps": 2361, "loss": 0.3209, "lr": 2.8199621304306425e-07, "epoch": 2.8081321473951717, "percentage": 93.6, "elapsed_time": "4:06:41", "remaining_time": "0:16:51"}
|
| 222 |
+
{"current_steps": 2220, "total_steps": 2361, "loss": 0.3165, "lr": 2.46260064420426e-07, "epoch": 2.820838627700127, "percentage": 94.03, "elapsed_time": "4:07:49", "remaining_time": "0:15:44"}
|
| 223 |
+
{"current_steps": 2230, "total_steps": 2361, "loss": 0.3233, "lr": 2.1291613065504313e-07, "epoch": 2.8335451080050826, "percentage": 94.45, "elapsed_time": "4:08:55", "remaining_time": "0:14:37"}
|
| 224 |
+
{"current_steps": 2240, "total_steps": 2361, "loss": 0.318, "lr": 1.819725897231872e-07, "epoch": 2.846251588310038, "percentage": 94.88, "elapsed_time": "4:10:02", "remaining_time": "0:13:30"}
|
| 225 |
+
{"current_steps": 2250, "total_steps": 2361, "loss": 0.3323, "lr": 1.5343703087768225e-07, "epoch": 2.8589580686149936, "percentage": 95.3, "elapsed_time": "4:11:09", "remaining_time": "0:12:23"}
|
| 226 |
+
{"current_steps": 2260, "total_steps": 2361, "loss": 0.3088, "lr": 1.2731645278655448e-07, "epoch": 2.871664548919949, "percentage": 95.72, "elapsed_time": "4:12:16", "remaining_time": "0:11:16"}
|
| 227 |
+
{"current_steps": 2270, "total_steps": 2361, "loss": 0.3213, "lr": 1.0361726181653209e-07, "epoch": 2.884371029224905, "percentage": 96.15, "elapsed_time": "4:13:22", "remaining_time": "0:10:09"}
|
| 228 |
+
{"current_steps": 2280, "total_steps": 2361, "loss": 0.3193, "lr": 8.234527046180885e-08, "epoch": 2.8970775095298604, "percentage": 96.57, "elapsed_time": "4:14:29", "remaining_time": "0:09:02"}
|
| 229 |
+
{"current_steps": 2290, "total_steps": 2361, "loss": 0.3334, "lr": 6.350569591846434e-08, "epoch": 2.909783989834816, "percentage": 96.99, "elapsed_time": "4:15:37", "remaining_time": "0:07:55"}
|
| 230 |
+
{"current_steps": 2300, "total_steps": 2361, "loss": 0.3273, "lr": 4.710315880489091e-08, "epoch": 2.9224904701397714, "percentage": 97.42, "elapsed_time": "4:16:43", "remaining_time": "0:06:48"}
|
| 231 |
+
{"current_steps": 2310, "total_steps": 2361, "loss": 0.3177, "lr": 3.31416820285313e-08, "epoch": 2.935196950444727, "percentage": 97.84, "elapsed_time": "4:17:51", "remaining_time": "0:05:41"}
|
| 232 |
+
{"current_steps": 2320, "total_steps": 2361, "loss": 0.322, "lr": 2.1624689799214503e-08, "epoch": 2.9479034307496823, "percentage": 98.26, "elapsed_time": "4:18:57", "remaining_time": "0:04:34"}
|
| 233 |
+
{"current_steps": 2330, "total_steps": 2361, "loss": 0.3038, "lr": 1.2555006789334301e-08, "epoch": 2.9606099110546378, "percentage": 98.69, "elapsed_time": "4:20:04", "remaining_time": "0:03:27"}
|
| 234 |
+
{"current_steps": 2340, "total_steps": 2361, "loss": 0.313, "lr": 5.934857441062258e-09, "epoch": 2.9733163913595932, "percentage": 99.11, "elapsed_time": "4:21:11", "remaining_time": "0:02:20"}
|
| 235 |
+
{"current_steps": 2350, "total_steps": 2361, "loss": 0.315, "lr": 1.765865420779722e-09, "epoch": 2.9860228716645487, "percentage": 99.53, "elapsed_time": "4:22:17", "remaining_time": "0:01:13"}
|
| 236 |
+
{"current_steps": 2360, "total_steps": 2361, "loss": 0.328, "lr": 4.9053220856354335e-11, "epoch": 2.998729351969504, "percentage": 99.96, "elapsed_time": "4:23:23", "remaining_time": "0:00:06"}
|
| 237 |
+
{"current_steps": 2361, "total_steps": 2361, "epoch": 3.0, "percentage": 100.0, "elapsed_time": "4:27:23", "remaining_time": "0:00:00"}
|
SFT/trainer_state.json
ADDED
|
@@ -0,0 +1,1695 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"best_global_step": null,
|
| 3 |
+
"best_metric": null,
|
| 4 |
+
"best_model_checkpoint": null,
|
| 5 |
+
"epoch": 3.0,
|
| 6 |
+
"eval_steps": 500,
|
| 7 |
+
"global_step": 2361,
|
| 8 |
+
"is_hyper_param_search": false,
|
| 9 |
+
"is_local_process_zero": true,
|
| 10 |
+
"is_world_process_zero": true,
|
| 11 |
+
"log_history": [
|
| 12 |
+
{
|
| 13 |
+
"epoch": 0.012706480304955527,
|
| 14 |
+
"grad_norm": 29.66147254458293,
|
| 15 |
+
"learning_rate": 5.070422535211268e-07,
|
| 16 |
+
"loss": 2.2184,
|
| 17 |
+
"step": 10
|
| 18 |
+
},
|
| 19 |
+
{
|
| 20 |
+
"epoch": 0.025412960609911054,
|
| 21 |
+
"grad_norm": 13.392595629867637,
|
| 22 |
+
"learning_rate": 1.0704225352112677e-06,
|
| 23 |
+
"loss": 1.8971,
|
| 24 |
+
"step": 20
|
| 25 |
+
},
|
| 26 |
+
{
|
| 27 |
+
"epoch": 0.03811944091486658,
|
| 28 |
+
"grad_norm": 11.776335522019831,
|
| 29 |
+
"learning_rate": 1.6338028169014086e-06,
|
| 30 |
+
"loss": 1.4505,
|
| 31 |
+
"step": 30
|
| 32 |
+
},
|
| 33 |
+
{
|
| 34 |
+
"epoch": 0.05082592121982211,
|
| 35 |
+
"grad_norm": 9.270920607469225,
|
| 36 |
+
"learning_rate": 2.19718309859155e-06,
|
| 37 |
+
"loss": 1.2166,
|
| 38 |
+
"step": 40
|
| 39 |
+
},
|
| 40 |
+
{
|
| 41 |
+
"epoch": 0.06353240152477764,
|
| 42 |
+
"grad_norm": 8.813337405138961,
|
| 43 |
+
"learning_rate": 2.7605633802816906e-06,
|
| 44 |
+
"loss": 1.1232,
|
| 45 |
+
"step": 50
|
| 46 |
+
},
|
| 47 |
+
{
|
| 48 |
+
"epoch": 0.07623888182973317,
|
| 49 |
+
"grad_norm": 8.759370998993289,
|
| 50 |
+
"learning_rate": 3.3239436619718313e-06,
|
| 51 |
+
"loss": 1.1167,
|
| 52 |
+
"step": 60
|
| 53 |
+
},
|
| 54 |
+
{
|
| 55 |
+
"epoch": 0.08894536213468869,
|
| 56 |
+
"grad_norm": 9.774431896253528,
|
| 57 |
+
"learning_rate": 3.887323943661972e-06,
|
| 58 |
+
"loss": 1.0426,
|
| 59 |
+
"step": 70
|
| 60 |
+
},
|
| 61 |
+
{
|
| 62 |
+
"epoch": 0.10165184243964422,
|
| 63 |
+
"grad_norm": 6.934105141250302,
|
| 64 |
+
"learning_rate": 4.450704225352113e-06,
|
| 65 |
+
"loss": 0.9485,
|
| 66 |
+
"step": 80
|
| 67 |
+
},
|
| 68 |
+
{
|
| 69 |
+
"epoch": 0.11435832274459974,
|
| 70 |
+
"grad_norm": 9.541786088857878,
|
| 71 |
+
"learning_rate": 5.014084507042254e-06,
|
| 72 |
+
"loss": 0.971,
|
| 73 |
+
"step": 90
|
| 74 |
+
},
|
| 75 |
+
{
|
| 76 |
+
"epoch": 0.12706480304955528,
|
| 77 |
+
"grad_norm": 8.771108246775684,
|
| 78 |
+
"learning_rate": 5.577464788732395e-06,
|
| 79 |
+
"loss": 1.0133,
|
| 80 |
+
"step": 100
|
| 81 |
+
},
|
| 82 |
+
{
|
| 83 |
+
"epoch": 0.1397712833545108,
|
| 84 |
+
"grad_norm": 8.314820650161352,
|
| 85 |
+
"learning_rate": 6.1408450704225356e-06,
|
| 86 |
+
"loss": 0.9729,
|
| 87 |
+
"step": 110
|
| 88 |
+
},
|
| 89 |
+
{
|
| 90 |
+
"epoch": 0.15247776365946633,
|
| 91 |
+
"grad_norm": 8.539055316805458,
|
| 92 |
+
"learning_rate": 6.704225352112676e-06,
|
| 93 |
+
"loss": 0.9679,
|
| 94 |
+
"step": 120
|
| 95 |
+
},
|
| 96 |
+
{
|
| 97 |
+
"epoch": 0.16518424396442186,
|
| 98 |
+
"grad_norm": 7.751785436062567,
|
| 99 |
+
"learning_rate": 7.267605633802818e-06,
|
| 100 |
+
"loss": 0.9634,
|
| 101 |
+
"step": 130
|
| 102 |
+
},
|
| 103 |
+
{
|
| 104 |
+
"epoch": 0.17789072426937738,
|
| 105 |
+
"grad_norm": 8.338219679133466,
|
| 106 |
+
"learning_rate": 7.830985915492958e-06,
|
| 107 |
+
"loss": 0.9674,
|
| 108 |
+
"step": 140
|
| 109 |
+
},
|
| 110 |
+
{
|
| 111 |
+
"epoch": 0.1905972045743329,
|
| 112 |
+
"grad_norm": 6.970312776486159,
|
| 113 |
+
"learning_rate": 8.3943661971831e-06,
|
| 114 |
+
"loss": 0.9323,
|
| 115 |
+
"step": 150
|
| 116 |
+
},
|
| 117 |
+
{
|
| 118 |
+
"epoch": 0.20330368487928843,
|
| 119 |
+
"grad_norm": 8.934334887580613,
|
| 120 |
+
"learning_rate": 8.957746478873241e-06,
|
| 121 |
+
"loss": 0.9296,
|
| 122 |
+
"step": 160
|
| 123 |
+
},
|
| 124 |
+
{
|
| 125 |
+
"epoch": 0.21601016518424396,
|
| 126 |
+
"grad_norm": 8.855402068732014,
|
| 127 |
+
"learning_rate": 9.521126760563381e-06,
|
| 128 |
+
"loss": 0.8979,
|
| 129 |
+
"step": 170
|
| 130 |
+
},
|
| 131 |
+
{
|
| 132 |
+
"epoch": 0.22871664548919948,
|
| 133 |
+
"grad_norm": 7.627448152527667,
|
| 134 |
+
"learning_rate": 1.0084507042253523e-05,
|
| 135 |
+
"loss": 0.9398,
|
| 136 |
+
"step": 180
|
| 137 |
+
},
|
| 138 |
+
{
|
| 139 |
+
"epoch": 0.241423125794155,
|
| 140 |
+
"grad_norm": 8.293311252648705,
|
| 141 |
+
"learning_rate": 1.0647887323943662e-05,
|
| 142 |
+
"loss": 0.9903,
|
| 143 |
+
"step": 190
|
| 144 |
+
},
|
| 145 |
+
{
|
| 146 |
+
"epoch": 0.25412960609911056,
|
| 147 |
+
"grad_norm": 7.860820039563278,
|
| 148 |
+
"learning_rate": 1.1211267605633804e-05,
|
| 149 |
+
"loss": 0.953,
|
| 150 |
+
"step": 200
|
| 151 |
+
},
|
| 152 |
+
{
|
| 153 |
+
"epoch": 0.2668360864040661,
|
| 154 |
+
"grad_norm": 12.101780056709387,
|
| 155 |
+
"learning_rate": 1.1774647887323944e-05,
|
| 156 |
+
"loss": 0.9014,
|
| 157 |
+
"step": 210
|
| 158 |
+
},
|
| 159 |
+
{
|
| 160 |
+
"epoch": 0.2795425667090216,
|
| 161 |
+
"grad_norm": 8.180635063012854,
|
| 162 |
+
"learning_rate": 1.2338028169014084e-05,
|
| 163 |
+
"loss": 0.9612,
|
| 164 |
+
"step": 220
|
| 165 |
+
},
|
| 166 |
+
{
|
| 167 |
+
"epoch": 0.29224904701397714,
|
| 168 |
+
"grad_norm": 7.043614458189797,
|
| 169 |
+
"learning_rate": 1.2901408450704227e-05,
|
| 170 |
+
"loss": 0.9492,
|
| 171 |
+
"step": 230
|
| 172 |
+
},
|
| 173 |
+
{
|
| 174 |
+
"epoch": 0.30495552731893266,
|
| 175 |
+
"grad_norm": 6.22917625029958,
|
| 176 |
+
"learning_rate": 1.3464788732394367e-05,
|
| 177 |
+
"loss": 0.9073,
|
| 178 |
+
"step": 240
|
| 179 |
+
},
|
| 180 |
+
{
|
| 181 |
+
"epoch": 0.3176620076238882,
|
| 182 |
+
"grad_norm": 6.158034885075652,
|
| 183 |
+
"learning_rate": 1.4028169014084507e-05,
|
| 184 |
+
"loss": 0.899,
|
| 185 |
+
"step": 250
|
| 186 |
+
},
|
| 187 |
+
{
|
| 188 |
+
"epoch": 0.3303684879288437,
|
| 189 |
+
"grad_norm": 6.511044998547078,
|
| 190 |
+
"learning_rate": 1.459154929577465e-05,
|
| 191 |
+
"loss": 0.9147,
|
| 192 |
+
"step": 260
|
| 193 |
+
},
|
| 194 |
+
{
|
| 195 |
+
"epoch": 0.34307496823379924,
|
| 196 |
+
"grad_norm": 7.2764171631878405,
|
| 197 |
+
"learning_rate": 1.515492957746479e-05,
|
| 198 |
+
"loss": 0.9637,
|
| 199 |
+
"step": 270
|
| 200 |
+
},
|
| 201 |
+
{
|
| 202 |
+
"epoch": 0.35578144853875476,
|
| 203 |
+
"grad_norm": 7.6464115804873405,
|
| 204 |
+
"learning_rate": 1.571830985915493e-05,
|
| 205 |
+
"loss": 0.9521,
|
| 206 |
+
"step": 280
|
| 207 |
+
},
|
| 208 |
+
{
|
| 209 |
+
"epoch": 0.3684879288437103,
|
| 210 |
+
"grad_norm": 7.027026272110715,
|
| 211 |
+
"learning_rate": 1.6281690140845072e-05,
|
| 212 |
+
"loss": 0.9738,
|
| 213 |
+
"step": 290
|
| 214 |
+
},
|
| 215 |
+
{
|
| 216 |
+
"epoch": 0.3811944091486658,
|
| 217 |
+
"grad_norm": 7.0177338278091765,
|
| 218 |
+
"learning_rate": 1.6845070422535213e-05,
|
| 219 |
+
"loss": 1.0046,
|
| 220 |
+
"step": 300
|
| 221 |
+
},
|
| 222 |
+
{
|
| 223 |
+
"epoch": 0.39390088945362134,
|
| 224 |
+
"grad_norm": 6.988788493899054,
|
| 225 |
+
"learning_rate": 1.740845070422535e-05,
|
| 226 |
+
"loss": 0.9481,
|
| 227 |
+
"step": 310
|
| 228 |
+
},
|
| 229 |
+
{
|
| 230 |
+
"epoch": 0.40660736975857686,
|
| 231 |
+
"grad_norm": 6.592900845765771,
|
| 232 |
+
"learning_rate": 1.7971830985915497e-05,
|
| 233 |
+
"loss": 0.9588,
|
| 234 |
+
"step": 320
|
| 235 |
+
},
|
| 236 |
+
{
|
| 237 |
+
"epoch": 0.4193138500635324,
|
| 238 |
+
"grad_norm": 5.836583770100009,
|
| 239 |
+
"learning_rate": 1.8535211267605635e-05,
|
| 240 |
+
"loss": 0.9404,
|
| 241 |
+
"step": 330
|
| 242 |
+
},
|
| 243 |
+
{
|
| 244 |
+
"epoch": 0.4320203303684879,
|
| 245 |
+
"grad_norm": 6.69521882642329,
|
| 246 |
+
"learning_rate": 1.9098591549295776e-05,
|
| 247 |
+
"loss": 0.9385,
|
| 248 |
+
"step": 340
|
| 249 |
+
},
|
| 250 |
+
{
|
| 251 |
+
"epoch": 0.44472681067344344,
|
| 252 |
+
"grad_norm": 5.945281536047344,
|
| 253 |
+
"learning_rate": 1.9661971830985918e-05,
|
| 254 |
+
"loss": 0.9394,
|
| 255 |
+
"step": 350
|
| 256 |
+
},
|
| 257 |
+
{
|
| 258 |
+
"epoch": 0.45743329097839897,
|
| 259 |
+
"grad_norm": 5.686864112756425,
|
| 260 |
+
"learning_rate": 1.9999803787597817e-05,
|
| 261 |
+
"loss": 0.9764,
|
| 262 |
+
"step": 360
|
| 263 |
+
},
|
| 264 |
+
{
|
| 265 |
+
"epoch": 0.4701397712833545,
|
| 266 |
+
"grad_norm": 5.459918154381771,
|
| 267 |
+
"learning_rate": 1.9997596486500402e-05,
|
| 268 |
+
"loss": 0.9827,
|
| 269 |
+
"step": 370
|
| 270 |
+
},
|
| 271 |
+
{
|
| 272 |
+
"epoch": 0.48284625158831,
|
| 273 |
+
"grad_norm": 5.164520519383337,
|
| 274 |
+
"learning_rate": 1.999293716197302e-05,
|
| 275 |
+
"loss": 0.9291,
|
| 276 |
+
"step": 380
|
| 277 |
+
},
|
| 278 |
+
{
|
| 279 |
+
"epoch": 0.49555273189326554,
|
| 280 |
+
"grad_norm": 6.177104322189275,
|
| 281 |
+
"learning_rate": 1.998582695676762e-05,
|
| 282 |
+
"loss": 1.0364,
|
| 283 |
+
"step": 390
|
| 284 |
+
},
|
| 285 |
+
{
|
| 286 |
+
"epoch": 0.5082592121982211,
|
| 287 |
+
"grad_norm": 5.346131678285112,
|
| 288 |
+
"learning_rate": 1.997626761474232e-05,
|
| 289 |
+
"loss": 0.9728,
|
| 290 |
+
"step": 400
|
| 291 |
+
},
|
| 292 |
+
{
|
| 293 |
+
"epoch": 0.5209656925031766,
|
| 294 |
+
"grad_norm": 5.075630186929998,
|
| 295 |
+
"learning_rate": 1.99642614804337e-05,
|
| 296 |
+
"loss": 0.957,
|
| 297 |
+
"step": 410
|
| 298 |
+
},
|
| 299 |
+
{
|
| 300 |
+
"epoch": 0.5336721728081322,
|
| 301 |
+
"grad_norm": 5.766342613636245,
|
| 302 |
+
"learning_rate": 1.9949811498481763e-05,
|
| 303 |
+
"loss": 0.9856,
|
| 304 |
+
"step": 420
|
| 305 |
+
},
|
| 306 |
+
{
|
| 307 |
+
"epoch": 0.5463786531130876,
|
| 308 |
+
"grad_norm": 4.773734435548506,
|
| 309 |
+
"learning_rate": 1.9932921212907753e-05,
|
| 310 |
+
"loss": 1.0065,
|
| 311 |
+
"step": 430
|
| 312 |
+
},
|
| 313 |
+
{
|
| 314 |
+
"epoch": 0.5590851334180432,
|
| 315 |
+
"grad_norm": 4.8306675684219105,
|
| 316 |
+
"learning_rate": 1.991359476624493e-05,
|
| 317 |
+
"loss": 0.9175,
|
| 318 |
+
"step": 440
|
| 319 |
+
},
|
| 320 |
+
{
|
| 321 |
+
"epoch": 0.5717916137229987,
|
| 322 |
+
"grad_norm": 4.8928892220094236,
|
| 323 |
+
"learning_rate": 1.9891836898522566e-05,
|
| 324 |
+
"loss": 0.9014,
|
| 325 |
+
"step": 450
|
| 326 |
+
},
|
| 327 |
+
{
|
| 328 |
+
"epoch": 0.5844980940279543,
|
| 329 |
+
"grad_norm": 4.466687752161082,
|
| 330 |
+
"learning_rate": 1.9867652946103413e-05,
|
| 331 |
+
"loss": 0.9324,
|
| 332 |
+
"step": 460
|
| 333 |
+
},
|
| 334 |
+
{
|
| 335 |
+
"epoch": 0.5972045743329097,
|
| 336 |
+
"grad_norm": 4.706203130518702,
|
| 337 |
+
"learning_rate": 1.9841048840374885e-05,
|
| 338 |
+
"loss": 0.9311,
|
| 339 |
+
"step": 470
|
| 340 |
+
},
|
| 341 |
+
{
|
| 342 |
+
"epoch": 0.6099110546378653,
|
| 343 |
+
"grad_norm": 4.776570809357277,
|
| 344 |
+
"learning_rate": 1.9812031106294314e-05,
|
| 345 |
+
"loss": 0.952,
|
| 346 |
+
"step": 480
|
| 347 |
+
},
|
| 348 |
+
{
|
| 349 |
+
"epoch": 0.6226175349428208,
|
| 350 |
+
"grad_norm": 5.443465949868151,
|
| 351 |
+
"learning_rate": 1.978060686078866e-05,
|
| 352 |
+
"loss": 0.9067,
|
| 353 |
+
"step": 490
|
| 354 |
+
},
|
| 355 |
+
{
|
| 356 |
+
"epoch": 0.6353240152477764,
|
| 357 |
+
"grad_norm": 4.985579208130221,
|
| 358 |
+
"learning_rate": 1.974678381100896e-05,
|
| 359 |
+
"loss": 0.9559,
|
| 360 |
+
"step": 500
|
| 361 |
+
},
|
| 362 |
+
{
|
| 363 |
+
"epoch": 0.6480304955527318,
|
| 364 |
+
"grad_norm": 4.406803792781764,
|
| 365 |
+
"learning_rate": 1.9710570252440106e-05,
|
| 366 |
+
"loss": 0.9082,
|
| 367 |
+
"step": 510
|
| 368 |
+
},
|
| 369 |
+
{
|
| 370 |
+
"epoch": 0.6607369758576874,
|
| 371 |
+
"grad_norm": 4.620905177890474,
|
| 372 |
+
"learning_rate": 1.9671975066866254e-05,
|
| 373 |
+
"loss": 0.9241,
|
| 374 |
+
"step": 520
|
| 375 |
+
},
|
| 376 |
+
{
|
| 377 |
+
"epoch": 0.6734434561626429,
|
| 378 |
+
"grad_norm": 3.9984494959651533,
|
| 379 |
+
"learning_rate": 1.9631007720192475e-05,
|
| 380 |
+
"loss": 0.8811,
|
| 381 |
+
"step": 530
|
| 382 |
+
},
|
| 383 |
+
{
|
| 384 |
+
"epoch": 0.6861499364675985,
|
| 385 |
+
"grad_norm": 4.760193821922472,
|
| 386 |
+
"learning_rate": 1.9587678260123146e-05,
|
| 387 |
+
"loss": 0.9314,
|
| 388 |
+
"step": 540
|
| 389 |
+
},
|
| 390 |
+
{
|
| 391 |
+
"epoch": 0.6988564167725541,
|
| 392 |
+
"grad_norm": 4.4328809578626895,
|
| 393 |
+
"learning_rate": 1.9541997313697614e-05,
|
| 394 |
+
"loss": 0.9018,
|
| 395 |
+
"step": 550
|
| 396 |
+
},
|
| 397 |
+
{
|
| 398 |
+
"epoch": 0.7115628970775095,
|
| 399 |
+
"grad_norm": 3.785123304702001,
|
| 400 |
+
"learning_rate": 1.9493976084683814e-05,
|
| 401 |
+
"loss": 0.9349,
|
| 402 |
+
"step": 560
|
| 403 |
+
},
|
| 404 |
+
{
|
| 405 |
+
"epoch": 0.7242693773824651,
|
| 406 |
+
"grad_norm": 4.623522007776074,
|
| 407 |
+
"learning_rate": 1.9443626350830417e-05,
|
| 408 |
+
"loss": 0.9283,
|
| 409 |
+
"step": 570
|
| 410 |
+
},
|
| 411 |
+
{
|
| 412 |
+
"epoch": 0.7369758576874206,
|
| 413 |
+
"grad_norm": 4.367382745999128,
|
| 414 |
+
"learning_rate": 1.9390960460978188e-05,
|
| 415 |
+
"loss": 0.8936,
|
| 416 |
+
"step": 580
|
| 417 |
+
},
|
| 418 |
+
{
|
| 419 |
+
"epoch": 0.7496823379923762,
|
| 420 |
+
"grad_norm": 4.640745350515662,
|
| 421 |
+
"learning_rate": 1.933599133203131e-05,
|
| 422 |
+
"loss": 0.9529,
|
| 423 |
+
"step": 590
|
| 424 |
+
},
|
| 425 |
+
{
|
| 426 |
+
"epoch": 0.7623888182973316,
|
| 427 |
+
"grad_norm": 4.0946471225054974,
|
| 428 |
+
"learning_rate": 1.9278732445789364e-05,
|
| 429 |
+
"loss": 0.8961,
|
| 430 |
+
"step": 600
|
| 431 |
+
},
|
| 432 |
+
{
|
| 433 |
+
"epoch": 0.7750952986022872,
|
| 434 |
+
"grad_norm": 4.250614479191838,
|
| 435 |
+
"learning_rate": 1.9219197845640766e-05,
|
| 436 |
+
"loss": 0.9028,
|
| 437 |
+
"step": 610
|
| 438 |
+
},
|
| 439 |
+
{
|
| 440 |
+
"epoch": 0.7878017789072427,
|
| 441 |
+
"grad_norm": 4.147828875270731,
|
| 442 |
+
"learning_rate": 1.9157402133118454e-05,
|
| 443 |
+
"loss": 0.9302,
|
| 444 |
+
"step": 620
|
| 445 |
+
},
|
| 446 |
+
{
|
| 447 |
+
"epoch": 0.8005082592121983,
|
| 448 |
+
"grad_norm": 5.568981020266887,
|
| 449 |
+
"learning_rate": 1.909336046431871e-05,
|
| 450 |
+
"loss": 0.9233,
|
| 451 |
+
"step": 630
|
| 452 |
+
},
|
| 453 |
+
{
|
| 454 |
+
"epoch": 0.8132147395171537,
|
| 455 |
+
"grad_norm": 4.311517125711432,
|
| 456 |
+
"learning_rate": 1.9027088546183968e-05,
|
| 457 |
+
"loss": 0.9694,
|
| 458 |
+
"step": 640
|
| 459 |
+
},
|
| 460 |
+
{
|
| 461 |
+
"epoch": 0.8259212198221093,
|
| 462 |
+
"grad_norm": 5.556351823725932,
|
| 463 |
+
"learning_rate": 1.8958602632650474e-05,
|
| 464 |
+
"loss": 0.9003,
|
| 465 |
+
"step": 650
|
| 466 |
+
},
|
| 467 |
+
{
|
| 468 |
+
"epoch": 0.8386277001270648,
|
| 469 |
+
"grad_norm": 4.269905367926679,
|
| 470 |
+
"learning_rate": 1.8887919520661867e-05,
|
| 471 |
+
"loss": 0.8805,
|
| 472 |
+
"step": 660
|
| 473 |
+
},
|
| 474 |
+
{
|
| 475 |
+
"epoch": 0.8513341804320204,
|
| 476 |
+
"grad_norm": 3.863721835826297,
|
| 477 |
+
"learning_rate": 1.8815056546049505e-05,
|
| 478 |
+
"loss": 0.9158,
|
| 479 |
+
"step": 670
|
| 480 |
+
},
|
| 481 |
+
{
|
| 482 |
+
"epoch": 0.8640406607369758,
|
| 483 |
+
"grad_norm": 3.963824200874715,
|
| 484 |
+
"learning_rate": 1.8740031579280667e-05,
|
| 485 |
+
"loss": 0.8835,
|
| 486 |
+
"step": 680
|
| 487 |
+
},
|
| 488 |
+
{
|
| 489 |
+
"epoch": 0.8767471410419314,
|
| 490 |
+
"grad_norm": 3.680960497113959,
|
| 491 |
+
"learning_rate": 1.8662863021075632e-05,
|
| 492 |
+
"loss": 0.898,
|
| 493 |
+
"step": 690
|
| 494 |
+
},
|
| 495 |
+
{
|
| 496 |
+
"epoch": 0.8894536213468869,
|
| 497 |
+
"grad_norm": 3.7414803428899606,
|
| 498 |
+
"learning_rate": 1.8583569797894673e-05,
|
| 499 |
+
"loss": 0.9253,
|
| 500 |
+
"step": 700
|
| 501 |
+
},
|
| 502 |
+
{
|
| 503 |
+
"epoch": 0.9021601016518425,
|
| 504 |
+
"grad_norm": 4.680988801232008,
|
| 505 |
+
"learning_rate": 1.8502171357296144e-05,
|
| 506 |
+
"loss": 0.848,
|
| 507 |
+
"step": 710
|
| 508 |
+
},
|
| 509 |
+
{
|
| 510 |
+
"epoch": 0.9148665819567979,
|
| 511 |
+
"grad_norm": 3.9671267724005785,
|
| 512 |
+
"learning_rate": 1.8418687663166745e-05,
|
| 513 |
+
"loss": 0.8965,
|
| 514 |
+
"step": 720
|
| 515 |
+
},
|
| 516 |
+
{
|
| 517 |
+
"epoch": 0.9275730622617535,
|
| 518 |
+
"grad_norm": 4.137039499686447,
|
| 519 |
+
"learning_rate": 1.833313919082515e-05,
|
| 520 |
+
"loss": 0.8553,
|
| 521 |
+
"step": 730
|
| 522 |
+
},
|
| 523 |
+
{
|
| 524 |
+
"epoch": 0.940279542566709,
|
| 525 |
+
"grad_norm": 4.784766455706121,
|
| 526 |
+
"learning_rate": 1.8245546922000207e-05,
|
| 527 |
+
"loss": 0.8695,
|
| 528 |
+
"step": 740
|
| 529 |
+
},
|
| 530 |
+
{
|
| 531 |
+
"epoch": 0.9529860228716646,
|
| 532 |
+
"grad_norm": 4.418195979726905,
|
| 533 |
+
"learning_rate": 1.815593233968492e-05,
|
| 534 |
+
"loss": 0.8497,
|
| 535 |
+
"step": 750
|
| 536 |
+
},
|
| 537 |
+
{
|
| 538 |
+
"epoch": 0.96569250317662,
|
| 539 |
+
"grad_norm": 4.103893841492413,
|
| 540 |
+
"learning_rate": 1.806431742286752e-05,
|
| 541 |
+
"loss": 0.8746,
|
| 542 |
+
"step": 760
|
| 543 |
+
},
|
| 544 |
+
{
|
| 545 |
+
"epoch": 0.9783989834815756,
|
| 546 |
+
"grad_norm": 3.798164417492566,
|
| 547 |
+
"learning_rate": 1.7970724641140864e-05,
|
| 548 |
+
"loss": 0.8708,
|
| 549 |
+
"step": 770
|
| 550 |
+
},
|
| 551 |
+
{
|
| 552 |
+
"epoch": 0.9911054637865311,
|
| 553 |
+
"grad_norm": 4.623760315878684,
|
| 554 |
+
"learning_rate": 1.7875176949191506e-05,
|
| 555 |
+
"loss": 0.94,
|
| 556 |
+
"step": 780
|
| 557 |
+
},
|
| 558 |
+
{
|
| 559 |
+
"epoch": 1.0038119440914866,
|
| 560 |
+
"grad_norm": 3.381126634985229,
|
| 561 |
+
"learning_rate": 1.7777697781169813e-05,
|
| 562 |
+
"loss": 0.8297,
|
| 563 |
+
"step": 790
|
| 564 |
+
},
|
| 565 |
+
{
|
| 566 |
+
"epoch": 1.0165184243964422,
|
| 567 |
+
"grad_norm": 4.072031882597377,
|
| 568 |
+
"learning_rate": 1.7678311044942464e-05,
|
| 569 |
+
"loss": 0.6761,
|
| 570 |
+
"step": 800
|
| 571 |
+
},
|
| 572 |
+
{
|
| 573 |
+
"epoch": 1.0292249047013977,
|
| 574 |
+
"grad_norm": 4.666218927514245,
|
| 575 |
+
"learning_rate": 1.757704111622878e-05,
|
| 576 |
+
"loss": 0.6868,
|
| 577 |
+
"step": 810
|
| 578 |
+
},
|
| 579 |
+
{
|
| 580 |
+
"epoch": 1.0419313850063532,
|
| 581 |
+
"grad_norm": 3.010389554548932,
|
| 582 |
+
"learning_rate": 1.747391283262231e-05,
|
| 583 |
+
"loss": 0.6994,
|
| 584 |
+
"step": 820
|
| 585 |
+
},
|
| 586 |
+
{
|
| 587 |
+
"epoch": 1.0546378653113089,
|
| 588 |
+
"grad_norm": 4.690411895539488,
|
| 589 |
+
"learning_rate": 1.736895148749911e-05,
|
| 590 |
+
"loss": 0.7141,
|
| 591 |
+
"step": 830
|
| 592 |
+
},
|
| 593 |
+
{
|
| 594 |
+
"epoch": 1.0673443456162643,
|
| 595 |
+
"grad_norm": 4.135758513727204,
|
| 596 |
+
"learning_rate": 1.7262182823814297e-05,
|
| 597 |
+
"loss": 0.6941,
|
| 598 |
+
"step": 840
|
| 599 |
+
},
|
| 600 |
+
{
|
| 601 |
+
"epoch": 1.0800508259212198,
|
| 602 |
+
"grad_norm": 4.378977675253243,
|
| 603 |
+
"learning_rate": 1.7153633027788252e-05,
|
| 604 |
+
"loss": 0.6662,
|
| 605 |
+
"step": 850
|
| 606 |
+
},
|
| 607 |
+
{
|
| 608 |
+
"epoch": 1.0927573062261753,
|
| 609 |
+
"grad_norm": 3.8569291056754498,
|
| 610 |
+
"learning_rate": 1.704332872248418e-05,
|
| 611 |
+
"loss": 0.6575,
|
| 612 |
+
"step": 860
|
| 613 |
+
},
|
| 614 |
+
{
|
| 615 |
+
"epoch": 1.105463786531131,
|
| 616 |
+
"grad_norm": 3.9650953005920666,
|
| 617 |
+
"learning_rate": 1.69312969612785e-05,
|
| 618 |
+
"loss": 0.6959,
|
| 619 |
+
"step": 870
|
| 620 |
+
},
|
| 621 |
+
{
|
| 622 |
+
"epoch": 1.1181702668360864,
|
| 623 |
+
"grad_norm": 3.349386344864765,
|
| 624 |
+
"learning_rate": 1.6817565221225698e-05,
|
| 625 |
+
"loss": 0.6701,
|
| 626 |
+
"step": 880
|
| 627 |
+
},
|
| 628 |
+
{
|
| 629 |
+
"epoch": 1.130876747141042,
|
| 630 |
+
"grad_norm": 4.530446985368436,
|
| 631 |
+
"learning_rate": 1.6702161396319266e-05,
|
| 632 |
+
"loss": 0.7168,
|
| 633 |
+
"step": 890
|
| 634 |
+
},
|
| 635 |
+
{
|
| 636 |
+
"epoch": 1.1435832274459974,
|
| 637 |
+
"grad_norm": 4.048659358174538,
|
| 638 |
+
"learning_rate": 1.658511379065039e-05,
|
| 639 |
+
"loss": 0.7087,
|
| 640 |
+
"step": 900
|
| 641 |
+
},
|
| 642 |
+
{
|
| 643 |
+
"epoch": 1.156289707750953,
|
| 644 |
+
"grad_norm": 3.897340539186477,
|
| 645 |
+
"learning_rate": 1.6466451111466044e-05,
|
| 646 |
+
"loss": 0.7509,
|
| 647 |
+
"step": 910
|
| 648 |
+
},
|
| 649 |
+
{
|
| 650 |
+
"epoch": 1.1689961880559085,
|
| 651 |
+
"grad_norm": 3.106349799248209,
|
| 652 |
+
"learning_rate": 1.6346202462128228e-05,
|
| 653 |
+
"loss": 0.6793,
|
| 654 |
+
"step": 920
|
| 655 |
+
},
|
| 656 |
+
{
|
| 657 |
+
"epoch": 1.181702668360864,
|
| 658 |
+
"grad_norm": 3.7338218401998753,
|
| 659 |
+
"learning_rate": 1.6224397334976023e-05,
|
| 660 |
+
"loss": 0.7172,
|
| 661 |
+
"step": 930
|
| 662 |
+
},
|
| 663 |
+
{
|
| 664 |
+
"epoch": 1.1944091486658195,
|
| 665 |
+
"grad_norm": 5.11718627522725,
|
| 666 |
+
"learning_rate": 1.610106560409227e-05,
|
| 667 |
+
"loss": 0.6759,
|
| 668 |
+
"step": 940
|
| 669 |
+
},
|
| 670 |
+
{
|
| 671 |
+
"epoch": 1.2071156289707752,
|
| 672 |
+
"grad_norm": 3.6889308944466177,
|
| 673 |
+
"learning_rate": 1.597623751797662e-05,
|
| 674 |
+
"loss": 0.6822,
|
| 675 |
+
"step": 950
|
| 676 |
+
},
|
| 677 |
+
{
|
| 678 |
+
"epoch": 1.2198221092757306,
|
| 679 |
+
"grad_norm": 3.6223318506400135,
|
| 680 |
+
"learning_rate": 1.584994369212673e-05,
|
| 681 |
+
"loss": 0.7034,
|
| 682 |
+
"step": 960
|
| 683 |
+
},
|
| 684 |
+
{
|
| 685 |
+
"epoch": 1.2325285895806861,
|
| 686 |
+
"grad_norm": 3.3333910693718662,
|
| 687 |
+
"learning_rate": 1.572221510152949e-05,
|
| 688 |
+
"loss": 0.767,
|
| 689 |
+
"step": 970
|
| 690 |
+
},
|
| 691 |
+
{
|
| 692 |
+
"epoch": 1.2452350698856416,
|
| 693 |
+
"grad_norm": 4.265447578007238,
|
| 694 |
+
"learning_rate": 1.5593083073064037e-05,
|
| 695 |
+
"loss": 0.7358,
|
| 696 |
+
"step": 980
|
| 697 |
+
},
|
| 698 |
+
{
|
| 699 |
+
"epoch": 1.2579415501905973,
|
| 700 |
+
"grad_norm": 3.874622904654225,
|
| 701 |
+
"learning_rate": 1.5462579277818498e-05,
|
| 702 |
+
"loss": 0.7336,
|
| 703 |
+
"step": 990
|
| 704 |
+
},
|
| 705 |
+
{
|
| 706 |
+
"epoch": 1.2706480304955527,
|
| 707 |
+
"grad_norm": 3.925758808832438,
|
| 708 |
+
"learning_rate": 1.5330735723322282e-05,
|
| 709 |
+
"loss": 0.7102,
|
| 710 |
+
"step": 1000
|
| 711 |
+
},
|
| 712 |
+
{
|
| 713 |
+
"epoch": 1.2833545108005082,
|
| 714 |
+
"grad_norm": 4.212874894353556,
|
| 715 |
+
"learning_rate": 1.5197584745695904e-05,
|
| 716 |
+
"loss": 0.7053,
|
| 717 |
+
"step": 1010
|
| 718 |
+
},
|
| 719 |
+
{
|
| 720 |
+
"epoch": 1.2960609911054637,
|
| 721 |
+
"grad_norm": 3.7288496569236154,
|
| 722 |
+
"learning_rate": 1.506315900172014e-05,
|
| 723 |
+
"loss": 0.7223,
|
| 724 |
+
"step": 1020
|
| 725 |
+
},
|
| 726 |
+
{
|
| 727 |
+
"epoch": 1.3087674714104194,
|
| 728 |
+
"grad_norm": 3.79413472563588,
|
| 729 |
+
"learning_rate": 1.4927491460826626e-05,
|
| 730 |
+
"loss": 0.7185,
|
| 731 |
+
"step": 1030
|
| 732 |
+
},
|
| 733 |
+
{
|
| 734 |
+
"epoch": 1.3214739517153749,
|
| 735 |
+
"grad_norm": 4.197391869723048,
|
| 736 |
+
"learning_rate": 1.4790615397011703e-05,
|
| 737 |
+
"loss": 0.6293,
|
| 738 |
+
"step": 1040
|
| 739 |
+
},
|
| 740 |
+
{
|
| 741 |
+
"epoch": 1.3341804320203303,
|
| 742 |
+
"grad_norm": 3.3274802014296254,
|
| 743 |
+
"learning_rate": 1.4652564380675616e-05,
|
| 744 |
+
"loss": 0.7111,
|
| 745 |
+
"step": 1050
|
| 746 |
+
},
|
| 747 |
+
{
|
| 748 |
+
"epoch": 1.346886912325286,
|
| 749 |
+
"grad_norm": 3.984633199779957,
|
| 750 |
+
"learning_rate": 1.4513372270388967e-05,
|
| 751 |
+
"loss": 0.6926,
|
| 752 |
+
"step": 1060
|
| 753 |
+
},
|
| 754 |
+
{
|
| 755 |
+
"epoch": 1.3595933926302415,
|
| 756 |
+
"grad_norm": 4.32141196403412,
|
| 757 |
+
"learning_rate": 1.4373073204588556e-05,
|
| 758 |
+
"loss": 0.7126,
|
| 759 |
+
"step": 1070
|
| 760 |
+
},
|
| 761 |
+
{
|
| 762 |
+
"epoch": 1.372299872935197,
|
| 763 |
+
"grad_norm": 3.7790442182857302,
|
| 764 |
+
"learning_rate": 1.42317015932045e-05,
|
| 765 |
+
"loss": 0.6873,
|
| 766 |
+
"step": 1080
|
| 767 |
+
},
|
| 768 |
+
{
|
| 769 |
+
"epoch": 1.3850063532401524,
|
| 770 |
+
"grad_norm": 4.2661658978513355,
|
| 771 |
+
"learning_rate": 1.4089292109220852e-05,
|
| 772 |
+
"loss": 0.7642,
|
| 773 |
+
"step": 1090
|
| 774 |
+
},
|
| 775 |
+
{
|
| 776 |
+
"epoch": 1.397712833545108,
|
| 777 |
+
"grad_norm": 4.2591149854567645,
|
| 778 |
+
"learning_rate": 1.394587968017162e-05,
|
| 779 |
+
"loss": 0.6799,
|
| 780 |
+
"step": 1100
|
| 781 |
+
},
|
| 782 |
+
{
|
| 783 |
+
"epoch": 1.4104193138500636,
|
| 784 |
+
"grad_norm": 3.689601844022756,
|
| 785 |
+
"learning_rate": 1.3801499479574431e-05,
|
| 786 |
+
"loss": 0.6536,
|
| 787 |
+
"step": 1110
|
| 788 |
+
},
|
| 789 |
+
{
|
| 790 |
+
"epoch": 1.423125794155019,
|
| 791 |
+
"grad_norm": 4.289242494025662,
|
| 792 |
+
"learning_rate": 1.3656186918303804e-05,
|
| 793 |
+
"loss": 0.7092,
|
| 794 |
+
"step": 1120
|
| 795 |
+
},
|
| 796 |
+
{
|
| 797 |
+
"epoch": 1.4358322744599745,
|
| 798 |
+
"grad_norm": 3.891766076099888,
|
| 799 |
+
"learning_rate": 1.3509977635906241e-05,
|
| 800 |
+
"loss": 0.6536,
|
| 801 |
+
"step": 1130
|
| 802 |
+
},
|
| 803 |
+
{
|
| 804 |
+
"epoch": 1.4485387547649302,
|
| 805 |
+
"grad_norm": 3.4313665664745465,
|
| 806 |
+
"learning_rate": 1.3362907491859227e-05,
|
| 807 |
+
"loss": 0.6474,
|
| 808 |
+
"step": 1140
|
| 809 |
+
},
|
| 810 |
+
{
|
| 811 |
+
"epoch": 1.4612452350698857,
|
| 812 |
+
"grad_norm": 4.303628344639665,
|
| 813 |
+
"learning_rate": 1.3215012556776287e-05,
|
| 814 |
+
"loss": 0.715,
|
| 815 |
+
"step": 1150
|
| 816 |
+
},
|
| 817 |
+
{
|
| 818 |
+
"epoch": 1.4739517153748412,
|
| 819 |
+
"grad_norm": 4.009317272354951,
|
| 820 |
+
"learning_rate": 1.3066329103560267e-05,
|
| 821 |
+
"loss": 0.715,
|
| 822 |
+
"step": 1160
|
| 823 |
+
},
|
| 824 |
+
{
|
| 825 |
+
"epoch": 1.4866581956797966,
|
| 826 |
+
"grad_norm": 3.171330560062687,
|
| 827 |
+
"learning_rate": 1.2916893598506981e-05,
|
| 828 |
+
"loss": 0.6217,
|
| 829 |
+
"step": 1170
|
| 830 |
+
},
|
| 831 |
+
{
|
| 832 |
+
"epoch": 1.499364675984752,
|
| 833 |
+
"grad_norm": 3.3926952435565676,
|
| 834 |
+
"learning_rate": 1.276674269236145e-05,
|
| 835 |
+
"loss": 0.7366,
|
| 836 |
+
"step": 1180
|
| 837 |
+
},
|
| 838 |
+
{
|
| 839 |
+
"epoch": 1.5120711562897078,
|
| 840 |
+
"grad_norm": 3.8316403134343537,
|
| 841 |
+
"learning_rate": 1.2615913211328894e-05,
|
| 842 |
+
"loss": 0.6939,
|
| 843 |
+
"step": 1190
|
| 844 |
+
},
|
| 845 |
+
{
|
| 846 |
+
"epoch": 1.5247776365946633,
|
| 847 |
+
"grad_norm": 4.868361745818093,
|
| 848 |
+
"learning_rate": 1.2464442148042679e-05,
|
| 849 |
+
"loss": 0.6919,
|
| 850 |
+
"step": 1200
|
| 851 |
+
},
|
| 852 |
+
{
|
| 853 |
+
"epoch": 1.537484116899619,
|
| 854 |
+
"grad_norm": 3.5185484888328644,
|
| 855 |
+
"learning_rate": 1.2312366652491476e-05,
|
| 856 |
+
"loss": 0.6791,
|
| 857 |
+
"step": 1210
|
| 858 |
+
},
|
| 859 |
+
{
|
| 860 |
+
"epoch": 1.5501905972045744,
|
| 861 |
+
"grad_norm": 3.543401291583064,
|
| 862 |
+
"learning_rate": 1.2159724022907786e-05,
|
| 863 |
+
"loss": 0.6574,
|
| 864 |
+
"step": 1220
|
| 865 |
+
},
|
| 866 |
+
{
|
| 867 |
+
"epoch": 1.5628970775095299,
|
| 868 |
+
"grad_norm": 3.6437779582291063,
|
| 869 |
+
"learning_rate": 1.2006551696620135e-05,
|
| 870 |
+
"loss": 0.701,
|
| 871 |
+
"step": 1230
|
| 872 |
+
},
|
| 873 |
+
{
|
| 874 |
+
"epoch": 1.5756035578144854,
|
| 875 |
+
"grad_norm": 3.2559101294982025,
|
| 876 |
+
"learning_rate": 1.1852887240871145e-05,
|
| 877 |
+
"loss": 0.6546,
|
| 878 |
+
"step": 1240
|
| 879 |
+
},
|
| 880 |
+
{
|
| 881 |
+
"epoch": 1.5883100381194408,
|
| 882 |
+
"grad_norm": 3.9272330209126634,
|
| 883 |
+
"learning_rate": 1.1698768343603753e-05,
|
| 884 |
+
"loss": 0.6643,
|
| 885 |
+
"step": 1250
|
| 886 |
+
},
|
| 887 |
+
{
|
| 888 |
+
"epoch": 1.6010165184243963,
|
| 889 |
+
"grad_norm": 4.624643945291569,
|
| 890 |
+
"learning_rate": 1.1544232804217805e-05,
|
| 891 |
+
"loss": 0.6982,
|
| 892 |
+
"step": 1260
|
| 893 |
+
},
|
| 894 |
+
{
|
| 895 |
+
"epoch": 1.613722998729352,
|
| 896 |
+
"grad_norm": 3.7368581014964803,
|
| 897 |
+
"learning_rate": 1.1389318524299332e-05,
|
| 898 |
+
"loss": 0.6591,
|
| 899 |
+
"step": 1270
|
| 900 |
+
},
|
| 901 |
+
{
|
| 902 |
+
"epoch": 1.6264294790343075,
|
| 903 |
+
"grad_norm": 3.4323757873137177,
|
| 904 |
+
"learning_rate": 1.1234063498324764e-05,
|
| 905 |
+
"loss": 0.6743,
|
| 906 |
+
"step": 1280
|
| 907 |
+
},
|
| 908 |
+
{
|
| 909 |
+
"epoch": 1.6391359593392631,
|
| 910 |
+
"grad_norm": 4.208550713330492,
|
| 911 |
+
"learning_rate": 1.1078505804342327e-05,
|
| 912 |
+
"loss": 0.7147,
|
| 913 |
+
"step": 1290
|
| 914 |
+
},
|
| 915 |
+
{
|
| 916 |
+
"epoch": 1.6518424396442186,
|
| 917 |
+
"grad_norm": 2.978768874310465,
|
| 918 |
+
"learning_rate": 1.092268359463302e-05,
|
| 919 |
+
"loss": 0.671,
|
| 920 |
+
"step": 1300
|
| 921 |
+
},
|
| 922 |
+
{
|
| 923 |
+
"epoch": 1.664548919949174,
|
| 924 |
+
"grad_norm": 3.5924777944521606,
|
| 925 |
+
"learning_rate": 1.0766635086353298e-05,
|
| 926 |
+
"loss": 0.6713,
|
| 927 |
+
"step": 1310
|
| 928 |
+
},
|
| 929 |
+
{
|
| 930 |
+
"epoch": 1.6772554002541296,
|
| 931 |
+
"grad_norm": 3.495623048824376,
|
| 932 |
+
"learning_rate": 1.06103985521619e-05,
|
| 933 |
+
"loss": 0.6629,
|
| 934 |
+
"step": 1320
|
| 935 |
+
},
|
| 936 |
+
{
|
| 937 |
+
"epoch": 1.689961880559085,
|
| 938 |
+
"grad_norm": 4.086638389260075,
|
| 939 |
+
"learning_rate": 1.0454012310833034e-05,
|
| 940 |
+
"loss": 0.7035,
|
| 941 |
+
"step": 1330
|
| 942 |
+
},
|
| 943 |
+
{
|
| 944 |
+
"epoch": 1.7026683608640405,
|
| 945 |
+
"grad_norm": 3.475772078501932,
|
| 946 |
+
"learning_rate": 1.0297514717858286e-05,
|
| 947 |
+
"loss": 0.6631,
|
| 948 |
+
"step": 1340
|
| 949 |
+
},
|
| 950 |
+
{
|
| 951 |
+
"epoch": 1.7153748411689962,
|
| 952 |
+
"grad_norm": 3.5510342885210164,
|
| 953 |
+
"learning_rate": 1.0140944156039481e-05,
|
| 954 |
+
"loss": 0.685,
|
| 955 |
+
"step": 1350
|
| 956 |
+
},
|
| 957 |
+
{
|
| 958 |
+
"epoch": 1.7280813214739519,
|
| 959 |
+
"grad_norm": 3.5594852661382634,
|
| 960 |
+
"learning_rate": 9.984339026074881e-06,
|
| 961 |
+
"loss": 0.6549,
|
| 962 |
+
"step": 1360
|
| 963 |
+
},
|
| 964 |
+
{
|
| 965 |
+
"epoch": 1.7407878017789074,
|
| 966 |
+
"grad_norm": 3.3395635194008415,
|
| 967 |
+
"learning_rate": 9.827737737140983e-06,
|
| 968 |
+
"loss": 0.6467,
|
| 969 |
+
"step": 1370
|
| 970 |
+
},
|
| 971 |
+
{
|
| 972 |
+
"epoch": 1.7534942820838628,
|
| 973 |
+
"grad_norm": 3.219821540782638,
|
| 974 |
+
"learning_rate": 9.671178697472217e-06,
|
| 975 |
+
"loss": 0.6543,
|
| 976 |
+
"step": 1380
|
| 977 |
+
},
|
| 978 |
+
{
|
| 979 |
+
"epoch": 1.7662007623888183,
|
| 980 |
+
"grad_norm": 3.384594388965041,
|
| 981 |
+
"learning_rate": 9.514700304940901e-06,
|
| 982 |
+
"loss": 0.6922,
|
| 983 |
+
"step": 1390
|
| 984 |
+
},
|
| 985 |
+
{
|
| 986 |
+
"epoch": 1.7789072426937738,
|
| 987 |
+
"grad_norm": 3.64590250632275,
|
| 988 |
+
"learning_rate": 9.358340937639746e-06,
|
| 989 |
+
"loss": 0.6557,
|
| 990 |
+
"step": 1400
|
| 991 |
+
},
|
| 992 |
+
{
|
| 993 |
+
"epoch": 1.7916137229987292,
|
| 994 |
+
"grad_norm": 3.765353121248252,
|
| 995 |
+
"learning_rate": 9.202138944469168e-06,
|
| 996 |
+
"loss": 0.688,
|
| 997 |
+
"step": 1410
|
| 998 |
+
},
|
| 999 |
+
{
|
| 1000 |
+
"epoch": 1.804320203303685,
|
| 1001 |
+
"grad_norm": 3.7449398399867624,
|
| 1002 |
+
"learning_rate": 9.046132635731816e-06,
|
| 1003 |
+
"loss": 0.6675,
|
| 1004 |
+
"step": 1420
|
| 1005 |
+
},
|
| 1006 |
+
{
|
| 1007 |
+
"epoch": 1.8170266836086404,
|
| 1008 |
+
"grad_norm": 3.942030599345544,
|
| 1009 |
+
"learning_rate": 8.890360273736504e-06,
|
| 1010 |
+
"loss": 0.6584,
|
| 1011 |
+
"step": 1430
|
| 1012 |
+
},
|
| 1013 |
+
{
|
| 1014 |
+
"epoch": 1.829733163913596,
|
| 1015 |
+
"grad_norm": 4.037931457583538,
|
| 1016 |
+
"learning_rate": 8.734860063413974e-06,
|
| 1017 |
+
"loss": 0.6735,
|
| 1018 |
+
"step": 1440
|
| 1019 |
+
},
|
| 1020 |
+
{
|
| 1021 |
+
"epoch": 1.8424396442185516,
|
| 1022 |
+
"grad_norm": 3.6205476660211247,
|
| 1023 |
+
"learning_rate": 8.579670142946701e-06,
|
| 1024 |
+
"loss": 0.7102,
|
| 1025 |
+
"step": 1450
|
| 1026 |
+
},
|
| 1027 |
+
{
|
| 1028 |
+
"epoch": 1.855146124523507,
|
| 1029 |
+
"grad_norm": 3.821487835967331,
|
| 1030 |
+
"learning_rate": 8.42482857441506e-06,
|
| 1031 |
+
"loss": 0.6749,
|
| 1032 |
+
"step": 1460
|
| 1033 |
+
},
|
| 1034 |
+
{
|
| 1035 |
+
"epoch": 1.8678526048284625,
|
| 1036 |
+
"grad_norm": 3.3623194464637574,
|
| 1037 |
+
"learning_rate": 8.270373334462193e-06,
|
| 1038 |
+
"loss": 0.672,
|
| 1039 |
+
"step": 1470
|
| 1040 |
+
},
|
| 1041 |
+
{
|
| 1042 |
+
"epoch": 1.880559085133418,
|
| 1043 |
+
"grad_norm": 4.020841970961885,
|
| 1044 |
+
"learning_rate": 8.116342304979783e-06,
|
| 1045 |
+
"loss": 0.6863,
|
| 1046 |
+
"step": 1480
|
| 1047 |
+
},
|
| 1048 |
+
{
|
| 1049 |
+
"epoch": 1.8932655654383734,
|
| 1050 |
+
"grad_norm": 4.08254040286643,
|
| 1051 |
+
"learning_rate": 7.962773263817114e-06,
|
| 1052 |
+
"loss": 0.6815,
|
| 1053 |
+
"step": 1490
|
| 1054 |
+
},
|
| 1055 |
+
{
|
| 1056 |
+
"epoch": 1.9059720457433291,
|
| 1057 |
+
"grad_norm": 4.148274894889353,
|
| 1058 |
+
"learning_rate": 7.809703875515613e-06,
|
| 1059 |
+
"loss": 0.6417,
|
| 1060 |
+
"step": 1500
|
| 1061 |
+
},
|
| 1062 |
+
{
|
| 1063 |
+
"epoch": 1.9186785260482846,
|
| 1064 |
+
"grad_norm": 4.640824882446659,
|
| 1065 |
+
"learning_rate": 7.657171682071198e-06,
|
| 1066 |
+
"loss": 0.62,
|
| 1067 |
+
"step": 1510
|
| 1068 |
+
},
|
| 1069 |
+
{
|
| 1070 |
+
"epoch": 1.9313850063532403,
|
| 1071 |
+
"grad_norm": 4.7797510297359835,
|
| 1072 |
+
"learning_rate": 7.505214093726692e-06,
|
| 1073 |
+
"loss": 0.6439,
|
| 1074 |
+
"step": 1520
|
| 1075 |
+
},
|
| 1076 |
+
{
|
| 1077 |
+
"epoch": 1.9440914866581958,
|
| 1078 |
+
"grad_norm": 3.613563186875674,
|
| 1079 |
+
"learning_rate": 7.353868379796518e-06,
|
| 1080 |
+
"loss": 0.6705,
|
| 1081 |
+
"step": 1530
|
| 1082 |
+
},
|
| 1083 |
+
{
|
| 1084 |
+
"epoch": 1.9567979669631512,
|
| 1085 |
+
"grad_norm": 3.271201239131824,
|
| 1086 |
+
"learning_rate": 7.203171659526e-06,
|
| 1087 |
+
"loss": 0.6324,
|
| 1088 |
+
"step": 1540
|
| 1089 |
+
},
|
| 1090 |
+
{
|
| 1091 |
+
"epoch": 1.9695044472681067,
|
| 1092 |
+
"grad_norm": 3.89489541610708,
|
| 1093 |
+
"learning_rate": 7.053160892987434e-06,
|
| 1094 |
+
"loss": 0.6757,
|
| 1095 |
+
"step": 1550
|
| 1096 |
+
},
|
| 1097 |
+
{
|
| 1098 |
+
"epoch": 1.9822109275730622,
|
| 1099 |
+
"grad_norm": 3.701828258351079,
|
| 1100 |
+
"learning_rate": 6.903872872015209e-06,
|
| 1101 |
+
"loss": 0.6456,
|
| 1102 |
+
"step": 1560
|
| 1103 |
+
},
|
| 1104 |
+
{
|
| 1105 |
+
"epoch": 1.9949174078780176,
|
| 1106 |
+
"grad_norm": 3.5373164710070957,
|
| 1107 |
+
"learning_rate": 6.755344211182221e-06,
|
| 1108 |
+
"loss": 0.6166,
|
| 1109 |
+
"step": 1570
|
| 1110 |
+
},
|
| 1111 |
+
{
|
| 1112 |
+
"epoch": 2.007623888182973,
|
| 1113 |
+
"grad_norm": 2.425760113176382,
|
| 1114 |
+
"learning_rate": 6.607611338819697e-06,
|
| 1115 |
+
"loss": 0.5016,
|
| 1116 |
+
"step": 1580
|
| 1117 |
+
},
|
| 1118 |
+
{
|
| 1119 |
+
"epoch": 2.020330368487929,
|
| 1120 |
+
"grad_norm": 3.427501282817139,
|
| 1121 |
+
"learning_rate": 6.460710488082774e-06,
|
| 1122 |
+
"loss": 0.374,
|
| 1123 |
+
"step": 1590
|
| 1124 |
+
},
|
| 1125 |
+
{
|
| 1126 |
+
"epoch": 2.0330368487928845,
|
| 1127 |
+
"grad_norm": 3.4855149165350636,
|
| 1128 |
+
"learning_rate": 6.31467768806388e-06,
|
| 1129 |
+
"loss": 0.3524,
|
| 1130 |
+
"step": 1600
|
| 1131 |
+
},
|
| 1132 |
+
{
|
| 1133 |
+
"epoch": 2.04574332909784,
|
| 1134 |
+
"grad_norm": 3.5473678303457996,
|
| 1135 |
+
"learning_rate": 6.169548754956201e-06,
|
| 1136 |
+
"loss": 0.3485,
|
| 1137 |
+
"step": 1610
|
| 1138 |
+
},
|
| 1139 |
+
{
|
| 1140 |
+
"epoch": 2.0584498094027954,
|
| 1141 |
+
"grad_norm": 3.2554977371598466,
|
| 1142 |
+
"learning_rate": 6.025359283269363e-06,
|
| 1143 |
+
"loss": 0.348,
|
| 1144 |
+
"step": 1620
|
| 1145 |
+
},
|
| 1146 |
+
{
|
| 1147 |
+
"epoch": 2.071156289707751,
|
| 1148 |
+
"grad_norm": 3.4222657332943376,
|
| 1149 |
+
"learning_rate": 5.882144637099465e-06,
|
| 1150 |
+
"loss": 0.3753,
|
| 1151 |
+
"step": 1630
|
| 1152 |
+
},
|
| 1153 |
+
{
|
| 1154 |
+
"epoch": 2.0838627700127064,
|
| 1155 |
+
"grad_norm": 2.9777568505895675,
|
| 1156 |
+
"learning_rate": 5.739939941455644e-06,
|
| 1157 |
+
"loss": 0.3526,
|
| 1158 |
+
"step": 1640
|
| 1159 |
+
},
|
| 1160 |
+
{
|
| 1161 |
+
"epoch": 2.096569250317662,
|
| 1162 |
+
"grad_norm": 3.7955516489911805,
|
| 1163 |
+
"learning_rate": 5.598780073645267e-06,
|
| 1164 |
+
"loss": 0.3543,
|
| 1165 |
+
"step": 1650
|
| 1166 |
+
},
|
| 1167 |
+
{
|
| 1168 |
+
"epoch": 2.1092757306226178,
|
| 1169 |
+
"grad_norm": 3.8406500166667885,
|
| 1170 |
+
"learning_rate": 5.458699654719873e-06,
|
| 1171 |
+
"loss": 0.3642,
|
| 1172 |
+
"step": 1660
|
| 1173 |
+
},
|
| 1174 |
+
{
|
| 1175 |
+
"epoch": 2.121982210927573,
|
| 1176 |
+
"grad_norm": 3.813395969645494,
|
| 1177 |
+
"learning_rate": 5.319733040983972e-06,
|
| 1178 |
+
"loss": 0.3428,
|
| 1179 |
+
"step": 1670
|
| 1180 |
+
},
|
| 1181 |
+
{
|
| 1182 |
+
"epoch": 2.1346886912325287,
|
| 1183 |
+
"grad_norm": 3.7266891839301763,
|
| 1184 |
+
"learning_rate": 5.181914315568782e-06,
|
| 1185 |
+
"loss": 0.3403,
|
| 1186 |
+
"step": 1680
|
| 1187 |
+
},
|
| 1188 |
+
{
|
| 1189 |
+
"epoch": 2.147395171537484,
|
| 1190 |
+
"grad_norm": 3.688709734552298,
|
| 1191 |
+
"learning_rate": 5.0452772800729375e-06,
|
| 1192 |
+
"loss": 0.3469,
|
| 1193 |
+
"step": 1690
|
| 1194 |
+
},
|
| 1195 |
+
{
|
| 1196 |
+
"epoch": 2.1601016518424396,
|
| 1197 |
+
"grad_norm": 3.6629109337292403,
|
| 1198 |
+
"learning_rate": 4.909855446272288e-06,
|
| 1199 |
+
"loss": 0.3454,
|
| 1200 |
+
"step": 1700
|
| 1201 |
+
},
|
| 1202 |
+
{
|
| 1203 |
+
"epoch": 2.172808132147395,
|
| 1204 |
+
"grad_norm": 3.7085182263998555,
|
| 1205 |
+
"learning_rate": 4.775682027900739e-06,
|
| 1206 |
+
"loss": 0.341,
|
| 1207 |
+
"step": 1710
|
| 1208 |
+
},
|
| 1209 |
+
{
|
| 1210 |
+
"epoch": 2.1855146124523506,
|
| 1211 |
+
"grad_norm": 3.481723946532174,
|
| 1212 |
+
"learning_rate": 4.6427899325042135e-06,
|
| 1213 |
+
"loss": 0.3352,
|
| 1214 |
+
"step": 1720
|
| 1215 |
+
},
|
| 1216 |
+
{
|
| 1217 |
+
"epoch": 2.198221092757306,
|
| 1218 |
+
"grad_norm": 3.2839395610983027,
|
| 1219 |
+
"learning_rate": 4.511211753369712e-06,
|
| 1220 |
+
"loss": 0.3447,
|
| 1221 |
+
"step": 1730
|
| 1222 |
+
},
|
| 1223 |
+
{
|
| 1224 |
+
"epoch": 2.210927573062262,
|
| 1225 |
+
"grad_norm": 3.6755308055006464,
|
| 1226 |
+
"learning_rate": 4.380979761531431e-06,
|
| 1227 |
+
"loss": 0.3531,
|
| 1228 |
+
"step": 1740
|
| 1229 |
+
},
|
| 1230 |
+
{
|
| 1231 |
+
"epoch": 2.2236340533672174,
|
| 1232 |
+
"grad_norm": 3.7905960831955916,
|
| 1233 |
+
"learning_rate": 4.2521258978559324e-06,
|
| 1234 |
+
"loss": 0.356,
|
| 1235 |
+
"step": 1750
|
| 1236 |
+
},
|
| 1237 |
+
{
|
| 1238 |
+
"epoch": 2.236340533672173,
|
| 1239 |
+
"grad_norm": 3.627875927246556,
|
| 1240 |
+
"learning_rate": 4.124681765208286e-06,
|
| 1241 |
+
"loss": 0.3266,
|
| 1242 |
+
"step": 1760
|
| 1243 |
+
},
|
| 1244 |
+
{
|
| 1245 |
+
"epoch": 2.2490470139771284,
|
| 1246 |
+
"grad_norm": 3.3246186092589447,
|
| 1247 |
+
"learning_rate": 3.998678620701102e-06,
|
| 1248 |
+
"loss": 0.3386,
|
| 1249 |
+
"step": 1770
|
| 1250 |
+
},
|
| 1251 |
+
{
|
| 1252 |
+
"epoch": 2.261753494282084,
|
| 1253 |
+
"grad_norm": 3.804007286983282,
|
| 1254 |
+
"learning_rate": 3.874147368028396e-06,
|
| 1255 |
+
"loss": 0.3544,
|
| 1256 |
+
"step": 1780
|
| 1257 |
+
},
|
| 1258 |
+
{
|
| 1259 |
+
"epoch": 2.2744599745870393,
|
| 1260 |
+
"grad_norm": 3.143040423820396,
|
| 1261 |
+
"learning_rate": 3.751118549886065e-06,
|
| 1262 |
+
"loss": 0.3227,
|
| 1263 |
+
"step": 1790
|
| 1264 |
+
},
|
| 1265 |
+
{
|
| 1266 |
+
"epoch": 2.2871664548919948,
|
| 1267 |
+
"grad_norm": 3.352132852945674,
|
| 1268 |
+
"learning_rate": 3.6296223404809903e-06,
|
| 1269 |
+
"loss": 0.3399,
|
| 1270 |
+
"step": 1800
|
| 1271 |
+
},
|
| 1272 |
+
{
|
| 1273 |
+
"epoch": 2.2998729351969507,
|
| 1274 |
+
"grad_norm": 4.043987038976339,
|
| 1275 |
+
"learning_rate": 3.509688538130448e-06,
|
| 1276 |
+
"loss": 0.3369,
|
| 1277 |
+
"step": 1810
|
| 1278 |
+
},
|
| 1279 |
+
{
|
| 1280 |
+
"epoch": 2.312579415501906,
|
| 1281 |
+
"grad_norm": 3.954856965708331,
|
| 1282 |
+
"learning_rate": 3.39134655795374e-06,
|
| 1283 |
+
"loss": 0.341,
|
| 1284 |
+
"step": 1820
|
| 1285 |
+
},
|
| 1286 |
+
{
|
| 1287 |
+
"epoch": 2.3252858958068616,
|
| 1288 |
+
"grad_norm": 3.5214147520563626,
|
| 1289 |
+
"learning_rate": 3.2746254246578167e-06,
|
| 1290 |
+
"loss": 0.3365,
|
| 1291 |
+
"step": 1830
|
| 1292 |
+
},
|
| 1293 |
+
{
|
| 1294 |
+
"epoch": 2.337992376111817,
|
| 1295 |
+
"grad_norm": 3.218428553726758,
|
| 1296 |
+
"learning_rate": 3.1595537654186114e-06,
|
| 1297 |
+
"loss": 0.3546,
|
| 1298 |
+
"step": 1840
|
| 1299 |
+
},
|
| 1300 |
+
{
|
| 1301 |
+
"epoch": 2.3506988564167726,
|
| 1302 |
+
"grad_norm": 3.163287967416541,
|
| 1303 |
+
"learning_rate": 3.0461598028599305e-06,
|
| 1304 |
+
"loss": 0.3431,
|
| 1305 |
+
"step": 1850
|
| 1306 |
+
},
|
| 1307 |
+
{
|
| 1308 |
+
"epoch": 2.363405336721728,
|
| 1309 |
+
"grad_norm": 3.0988204272069573,
|
| 1310 |
+
"learning_rate": 2.9344713481315225e-06,
|
| 1311 |
+
"loss": 0.3303,
|
| 1312 |
+
"step": 1860
|
| 1313 |
+
},
|
| 1314 |
+
{
|
| 1315 |
+
"epoch": 2.3761118170266835,
|
| 1316 |
+
"grad_norm": 3.9034586935786395,
|
| 1317 |
+
"learning_rate": 2.8245157940880784e-06,
|
| 1318 |
+
"loss": 0.3337,
|
| 1319 |
+
"step": 1870
|
| 1320 |
+
},
|
| 1321 |
+
{
|
| 1322 |
+
"epoch": 2.388818297331639,
|
| 1323 |
+
"grad_norm": 3.5690630552722786,
|
| 1324 |
+
"learning_rate": 2.7163201085708424e-06,
|
| 1325 |
+
"loss": 0.3223,
|
| 1326 |
+
"step": 1880
|
| 1327 |
+
},
|
| 1328 |
+
{
|
| 1329 |
+
"epoch": 2.4015247776365944,
|
| 1330 |
+
"grad_norm": 3.163806174642701,
|
| 1331 |
+
"learning_rate": 2.6099108277934105e-06,
|
| 1332 |
+
"loss": 0.3398,
|
| 1333 |
+
"step": 1890
|
| 1334 |
+
},
|
| 1335 |
+
{
|
| 1336 |
+
"epoch": 2.4142312579415504,
|
| 1337 |
+
"grad_norm": 3.7465583268537275,
|
| 1338 |
+
"learning_rate": 2.505314049833457e-06,
|
| 1339 |
+
"loss": 0.3483,
|
| 1340 |
+
"step": 1900
|
| 1341 |
+
},
|
| 1342 |
+
{
|
| 1343 |
+
"epoch": 2.426937738246506,
|
| 1344 |
+
"grad_norm": 3.516374761436456,
|
| 1345 |
+
"learning_rate": 2.402555428231872e-06,
|
| 1346 |
+
"loss": 0.3273,
|
| 1347 |
+
"step": 1910
|
| 1348 |
+
},
|
| 1349 |
+
{
|
| 1350 |
+
"epoch": 2.4396442185514613,
|
| 1351 |
+
"grad_norm": 3.5353549798113284,
|
| 1352 |
+
"learning_rate": 2.3016601657009364e-06,
|
| 1353 |
+
"loss": 0.3374,
|
| 1354 |
+
"step": 1920
|
| 1355 |
+
},
|
| 1356 |
+
{
|
| 1357 |
+
"epoch": 2.4523506988564168,
|
| 1358 |
+
"grad_norm": 3.357432157861631,
|
| 1359 |
+
"learning_rate": 2.202653007943093e-06,
|
| 1360 |
+
"loss": 0.3464,
|
| 1361 |
+
"step": 1930
|
| 1362 |
+
},
|
| 1363 |
+
{
|
| 1364 |
+
"epoch": 2.4650571791613722,
|
| 1365 |
+
"grad_norm": 3.6506743298663675,
|
| 1366 |
+
"learning_rate": 2.1055582375817475e-06,
|
| 1367 |
+
"loss": 0.325,
|
| 1368 |
+
"step": 1940
|
| 1369 |
+
},
|
| 1370 |
+
{
|
| 1371 |
+
"epoch": 2.4777636594663277,
|
| 1372 |
+
"grad_norm": 3.907282101797735,
|
| 1373 |
+
"learning_rate": 2.0103996682057235e-06,
|
| 1374 |
+
"loss": 0.3255,
|
| 1375 |
+
"step": 1950
|
| 1376 |
+
},
|
| 1377 |
+
{
|
| 1378 |
+
"epoch": 2.490470139771283,
|
| 1379 |
+
"grad_norm": 3.711785490906897,
|
| 1380 |
+
"learning_rate": 1.9172006385286723e-06,
|
| 1381 |
+
"loss": 0.3391,
|
| 1382 |
+
"step": 1960
|
| 1383 |
+
},
|
| 1384 |
+
{
|
| 1385 |
+
"epoch": 2.503176620076239,
|
| 1386 |
+
"grad_norm": 3.2473323176322135,
|
| 1387 |
+
"learning_rate": 1.8259840066650136e-06,
|
| 1388 |
+
"loss": 0.3389,
|
| 1389 |
+
"step": 1970
|
| 1390 |
+
},
|
| 1391 |
+
{
|
| 1392 |
+
"epoch": 2.5158831003811946,
|
| 1393 |
+
"grad_norm": 3.6433209864443916,
|
| 1394 |
+
"learning_rate": 1.7367721445237285e-06,
|
| 1395 |
+
"loss": 0.3258,
|
| 1396 |
+
"step": 1980
|
| 1397 |
+
},
|
| 1398 |
+
{
|
| 1399 |
+
"epoch": 2.52858958068615,
|
| 1400 |
+
"grad_norm": 4.12961794749056,
|
| 1401 |
+
"learning_rate": 1.6495869323213654e-06,
|
| 1402 |
+
"loss": 0.3185,
|
| 1403 |
+
"step": 1990
|
| 1404 |
+
},
|
| 1405 |
+
{
|
| 1406 |
+
"epoch": 2.5412960609911055,
|
| 1407 |
+
"grad_norm": 4.1376649833602865,
|
| 1408 |
+
"learning_rate": 1.564449753215711e-06,
|
| 1409 |
+
"loss": 0.3247,
|
| 1410 |
+
"step": 2000
|
| 1411 |
+
},
|
| 1412 |
+
{
|
| 1413 |
+
"epoch": 2.554002541296061,
|
| 1414 |
+
"grad_norm": 4.441583691608097,
|
| 1415 |
+
"learning_rate": 1.4813814880612942e-06,
|
| 1416 |
+
"loss": 0.3198,
|
| 1417 |
+
"step": 2010
|
| 1418 |
+
},
|
| 1419 |
+
{
|
| 1420 |
+
"epoch": 2.5667090216010164,
|
| 1421 |
+
"grad_norm": 2.7847992176083047,
|
| 1422 |
+
"learning_rate": 1.4004025102881402e-06,
|
| 1423 |
+
"loss": 0.3143,
|
| 1424 |
+
"step": 2020
|
| 1425 |
+
},
|
| 1426 |
+
{
|
| 1427 |
+
"epoch": 2.579415501905972,
|
| 1428 |
+
"grad_norm": 3.1337243741428473,
|
| 1429 |
+
"learning_rate": 1.321532680904959e-06,
|
| 1430 |
+
"loss": 0.3312,
|
| 1431 |
+
"step": 2030
|
| 1432 |
+
},
|
| 1433 |
+
{
|
| 1434 |
+
"epoch": 2.5921219822109274,
|
| 1435 |
+
"grad_norm": 3.3653692372757225,
|
| 1436 |
+
"learning_rate": 1.2447913436279879e-06,
|
| 1437 |
+
"loss": 0.3129,
|
| 1438 |
+
"step": 2040
|
| 1439 |
+
},
|
| 1440 |
+
{
|
| 1441 |
+
"epoch": 2.604828462515883,
|
| 1442 |
+
"grad_norm": 3.7337029805672635,
|
| 1443 |
+
"learning_rate": 1.1701973201367544e-06,
|
| 1444 |
+
"loss": 0.3253,
|
| 1445 |
+
"step": 2050
|
| 1446 |
+
},
|
| 1447 |
+
{
|
| 1448 |
+
"epoch": 2.6175349428208388,
|
| 1449 |
+
"grad_norm": 4.214904637605022,
|
| 1450 |
+
"learning_rate": 1.09776890545782e-06,
|
| 1451 |
+
"loss": 0.3531,
|
| 1452 |
+
"step": 2060
|
| 1453 |
+
},
|
| 1454 |
+
{
|
| 1455 |
+
"epoch": 2.6302414231257942,
|
| 1456 |
+
"grad_norm": 3.3613800857078764,
|
| 1457 |
+
"learning_rate": 1.0275238634777441e-06,
|
| 1458 |
+
"loss": 0.3105,
|
| 1459 |
+
"step": 2070
|
| 1460 |
+
},
|
| 1461 |
+
{
|
| 1462 |
+
"epoch": 2.6429479034307497,
|
| 1463 |
+
"grad_norm": 3.7555272174929595,
|
| 1464 |
+
"learning_rate": 9.594794225862692e-07,
|
| 1465 |
+
"loss": 0.3331,
|
| 1466 |
+
"step": 2080
|
| 1467 |
+
},
|
| 1468 |
+
{
|
| 1469 |
+
"epoch": 2.655654383735705,
|
| 1470 |
+
"grad_norm": 3.6364434124366,
|
| 1471 |
+
"learning_rate": 8.936522714508678e-07,
|
| 1472 |
+
"loss": 0.3336,
|
| 1473 |
+
"step": 2090
|
| 1474 |
+
},
|
| 1475 |
+
{
|
| 1476 |
+
"epoch": 2.6683608640406606,
|
| 1477 |
+
"grad_norm": 3.684690863431174,
|
| 1478 |
+
"learning_rate": 8.300585549236773e-07,
|
| 1479 |
+
"loss": 0.3232,
|
| 1480 |
+
"step": 2100
|
| 1481 |
+
},
|
| 1482 |
+
{
|
| 1483 |
+
"epoch": 2.681067344345616,
|
| 1484 |
+
"grad_norm": 4.307296056522447,
|
| 1485 |
+
"learning_rate": 7.687138700817598e-07,
|
| 1486 |
+
"loss": 0.3165,
|
| 1487 |
+
"step": 2110
|
| 1488 |
+
},
|
| 1489 |
+
{
|
| 1490 |
+
"epoch": 2.693773824650572,
|
| 1491 |
+
"grad_norm": 3.418062959790304,
|
| 1492 |
+
"learning_rate": 7.096332624017755e-07,
|
| 1493 |
+
"loss": 0.3126,
|
| 1494 |
+
"step": 2120
|
| 1495 |
+
},
|
| 1496 |
+
{
|
| 1497 |
+
"epoch": 2.7064803049555275,
|
| 1498 |
+
"grad_norm": 3.5508346766299397,
|
| 1499 |
+
"learning_rate": 6.528312220698885e-07,
|
| 1500 |
+
"loss": 0.3303,
|
| 1501 |
+
"step": 2130
|
| 1502 |
+
},
|
| 1503 |
+
{
|
| 1504 |
+
"epoch": 2.719186785260483,
|
| 1505 |
+
"grad_norm": 4.518044935525217,
|
| 1506 |
+
"learning_rate": 5.983216804278869e-07,
|
| 1507 |
+
"loss": 0.3191,
|
| 1508 |
+
"step": 2140
|
| 1509 |
+
},
|
| 1510 |
+
{
|
| 1511 |
+
"epoch": 2.7318932655654384,
|
| 1512 |
+
"grad_norm": 3.2181606746612044,
|
| 1513 |
+
"learning_rate": 5.461180065563787e-07,
|
| 1514 |
+
"loss": 0.3059,
|
| 1515 |
+
"step": 2150
|
| 1516 |
+
},
|
| 1517 |
+
{
|
| 1518 |
+
"epoch": 2.744599745870394,
|
| 1519 |
+
"grad_norm": 3.8710207257234144,
|
| 1520 |
+
"learning_rate": 4.962330039958585e-07,
|
| 1521 |
+
"loss": 0.3194,
|
| 1522 |
+
"step": 2160
|
| 1523 |
+
},
|
| 1524 |
+
{
|
| 1525 |
+
"epoch": 2.7573062261753494,
|
| 1526 |
+
"grad_norm": 3.6523179671049215,
|
| 1527 |
+
"learning_rate": 4.486789076064968e-07,
|
| 1528 |
+
"loss": 0.3148,
|
| 1529 |
+
"step": 2170
|
| 1530 |
+
},
|
| 1531 |
+
{
|
| 1532 |
+
"epoch": 2.770012706480305,
|
| 1533 |
+
"grad_norm": 3.5451882545264053,
|
| 1534 |
+
"learning_rate": 4.034673805674116e-07,
|
| 1535 |
+
"loss": 0.3285,
|
| 1536 |
+
"step": 2180
|
| 1537 |
+
},
|
| 1538 |
+
{
|
| 1539 |
+
"epoch": 2.7827191867852603,
|
| 1540 |
+
"grad_norm": 3.0920265120158827,
|
| 1541 |
+
"learning_rate": 3.606095115161279e-07,
|
| 1542 |
+
"loss": 0.3172,
|
| 1543 |
+
"step": 2190
|
| 1544 |
+
},
|
| 1545 |
+
{
|
| 1546 |
+
"epoch": 2.795425667090216,
|
| 1547 |
+
"grad_norm": 3.496745467546405,
|
| 1548 |
+
"learning_rate": 3.201158118289793e-07,
|
| 1549 |
+
"loss": 0.3183,
|
| 1550 |
+
"step": 2200
|
| 1551 |
+
},
|
| 1552 |
+
{
|
| 1553 |
+
"epoch": 2.8081321473951717,
|
| 1554 |
+
"grad_norm": 3.361865186046809,
|
| 1555 |
+
"learning_rate": 2.8199621304306425e-07,
|
| 1556 |
+
"loss": 0.3209,
|
| 1557 |
+
"step": 2210
|
| 1558 |
+
},
|
| 1559 |
+
{
|
| 1560 |
+
"epoch": 2.820838627700127,
|
| 1561 |
+
"grad_norm": 3.951234253131063,
|
| 1562 |
+
"learning_rate": 2.46260064420426e-07,
|
| 1563 |
+
"loss": 0.3165,
|
| 1564 |
+
"step": 2220
|
| 1565 |
+
},
|
| 1566 |
+
{
|
| 1567 |
+
"epoch": 2.8335451080050826,
|
| 1568 |
+
"grad_norm": 4.033338515766138,
|
| 1569 |
+
"learning_rate": 2.1291613065504313e-07,
|
| 1570 |
+
"loss": 0.3233,
|
| 1571 |
+
"step": 2230
|
| 1572 |
+
},
|
| 1573 |
+
{
|
| 1574 |
+
"epoch": 2.846251588310038,
|
| 1575 |
+
"grad_norm": 3.8424683095800303,
|
| 1576 |
+
"learning_rate": 1.819725897231872e-07,
|
| 1577 |
+
"loss": 0.318,
|
| 1578 |
+
"step": 2240
|
| 1579 |
+
},
|
| 1580 |
+
{
|
| 1581 |
+
"epoch": 2.8589580686149936,
|
| 1582 |
+
"grad_norm": 3.6002718533091,
|
| 1583 |
+
"learning_rate": 1.5343703087768225e-07,
|
| 1584 |
+
"loss": 0.3323,
|
| 1585 |
+
"step": 2250
|
| 1586 |
+
},
|
| 1587 |
+
{
|
| 1588 |
+
"epoch": 2.871664548919949,
|
| 1589 |
+
"grad_norm": 3.392955644057836,
|
| 1590 |
+
"learning_rate": 1.2731645278655448e-07,
|
| 1591 |
+
"loss": 0.3088,
|
| 1592 |
+
"step": 2260
|
| 1593 |
+
},
|
| 1594 |
+
{
|
| 1595 |
+
"epoch": 2.884371029224905,
|
| 1596 |
+
"grad_norm": 3.9060628771881842,
|
| 1597 |
+
"learning_rate": 1.0361726181653209e-07,
|
| 1598 |
+
"loss": 0.3213,
|
| 1599 |
+
"step": 2270
|
| 1600 |
+
},
|
| 1601 |
+
{
|
| 1602 |
+
"epoch": 2.8970775095298604,
|
| 1603 |
+
"grad_norm": 3.6937829161145515,
|
| 1604 |
+
"learning_rate": 8.234527046180885e-08,
|
| 1605 |
+
"loss": 0.3193,
|
| 1606 |
+
"step": 2280
|
| 1607 |
+
},
|
| 1608 |
+
{
|
| 1609 |
+
"epoch": 2.909783989834816,
|
| 1610 |
+
"grad_norm": 3.7501161450474036,
|
| 1611 |
+
"learning_rate": 6.350569591846434e-08,
|
| 1612 |
+
"loss": 0.3334,
|
| 1613 |
+
"step": 2290
|
| 1614 |
+
},
|
| 1615 |
+
{
|
| 1616 |
+
"epoch": 2.9224904701397714,
|
| 1617 |
+
"grad_norm": 3.4784424672902605,
|
| 1618 |
+
"learning_rate": 4.710315880489091e-08,
|
| 1619 |
+
"loss": 0.3273,
|
| 1620 |
+
"step": 2300
|
| 1621 |
+
},
|
| 1622 |
+
{
|
| 1623 |
+
"epoch": 2.935196950444727,
|
| 1624 |
+
"grad_norm": 3.4126846620318707,
|
| 1625 |
+
"learning_rate": 3.31416820285313e-08,
|
| 1626 |
+
"loss": 0.3177,
|
| 1627 |
+
"step": 2310
|
| 1628 |
+
},
|
| 1629 |
+
{
|
| 1630 |
+
"epoch": 2.9479034307496823,
|
| 1631 |
+
"grad_norm": 3.9848361195981785,
|
| 1632 |
+
"learning_rate": 2.1624689799214503e-08,
|
| 1633 |
+
"loss": 0.322,
|
| 1634 |
+
"step": 2320
|
| 1635 |
+
},
|
| 1636 |
+
{
|
| 1637 |
+
"epoch": 2.9606099110546378,
|
| 1638 |
+
"grad_norm": 3.6028548305792203,
|
| 1639 |
+
"learning_rate": 1.2555006789334301e-08,
|
| 1640 |
+
"loss": 0.3038,
|
| 1641 |
+
"step": 2330
|
| 1642 |
+
},
|
| 1643 |
+
{
|
| 1644 |
+
"epoch": 2.9733163913595932,
|
| 1645 |
+
"grad_norm": 4.107748523282802,
|
| 1646 |
+
"learning_rate": 5.934857441062258e-09,
|
| 1647 |
+
"loss": 0.313,
|
| 1648 |
+
"step": 2340
|
| 1649 |
+
},
|
| 1650 |
+
{
|
| 1651 |
+
"epoch": 2.9860228716645487,
|
| 1652 |
+
"grad_norm": 4.057112734457165,
|
| 1653 |
+
"learning_rate": 1.765865420779722e-09,
|
| 1654 |
+
"loss": 0.315,
|
| 1655 |
+
"step": 2350
|
| 1656 |
+
},
|
| 1657 |
+
{
|
| 1658 |
+
"epoch": 2.998729351969504,
|
| 1659 |
+
"grad_norm": 3.491454630886239,
|
| 1660 |
+
"learning_rate": 4.9053220856354335e-11,
|
| 1661 |
+
"loss": 0.328,
|
| 1662 |
+
"step": 2360
|
| 1663 |
+
},
|
| 1664 |
+
{
|
| 1665 |
+
"epoch": 3.0,
|
| 1666 |
+
"step": 2361,
|
| 1667 |
+
"total_flos": 121880697913344.0,
|
| 1668 |
+
"train_loss": 0.6645450910134418,
|
| 1669 |
+
"train_runtime": 16043.0595,
|
| 1670 |
+
"train_samples_per_second": 1.177,
|
| 1671 |
+
"train_steps_per_second": 0.147
|
| 1672 |
+
}
|
| 1673 |
+
],
|
| 1674 |
+
"logging_steps": 10,
|
| 1675 |
+
"max_steps": 2361,
|
| 1676 |
+
"num_input_tokens_seen": 0,
|
| 1677 |
+
"num_train_epochs": 3,
|
| 1678 |
+
"save_steps": 500000,
|
| 1679 |
+
"stateful_callbacks": {
|
| 1680 |
+
"TrainerControl": {
|
| 1681 |
+
"args": {
|
| 1682 |
+
"should_epoch_stop": false,
|
| 1683 |
+
"should_evaluate": false,
|
| 1684 |
+
"should_log": false,
|
| 1685 |
+
"should_save": true,
|
| 1686 |
+
"should_training_stop": true
|
| 1687 |
+
},
|
| 1688 |
+
"attributes": {}
|
| 1689 |
+
}
|
| 1690 |
+
},
|
| 1691 |
+
"total_flos": 121880697913344.0,
|
| 1692 |
+
"train_batch_size": 1,
|
| 1693 |
+
"trial_name": null,
|
| 1694 |
+
"trial_params": null
|
| 1695 |
+
}
|
SFT/training_args.bin
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:1e920bddacad73488285793502481f6fe770fd55b1c85840db591a5987c0ea33
|
| 3 |
+
size 7608
|
SFT/training_loss.png
ADDED
|
SFT/video_preprocessor_config.json
ADDED
|
@@ -0,0 +1,43 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"crop_size": null,
|
| 3 |
+
"data_format": "channels_first",
|
| 4 |
+
"default_to_square": true,
|
| 5 |
+
"device": null,
|
| 6 |
+
"do_center_crop": null,
|
| 7 |
+
"do_convert_rgb": true,
|
| 8 |
+
"do_normalize": true,
|
| 9 |
+
"do_pad": null,
|
| 10 |
+
"do_rescale": true,
|
| 11 |
+
"do_resize": true,
|
| 12 |
+
"do_sample_frames": false,
|
| 13 |
+
"fps": null,
|
| 14 |
+
"image_mean": [
|
| 15 |
+
0.48145466,
|
| 16 |
+
0.4578275,
|
| 17 |
+
0.40821073
|
| 18 |
+
],
|
| 19 |
+
"image_std": [
|
| 20 |
+
0.26862954,
|
| 21 |
+
0.26130258,
|
| 22 |
+
0.27577711
|
| 23 |
+
],
|
| 24 |
+
"input_data_format": null,
|
| 25 |
+
"max_frames": 768,
|
| 26 |
+
"max_pixels": 12845056,
|
| 27 |
+
"merge_size": 2,
|
| 28 |
+
"min_frames": 4,
|
| 29 |
+
"min_pixels": 3136,
|
| 30 |
+
"num_frames": null,
|
| 31 |
+
"patch_size": 14,
|
| 32 |
+
"processor_class": "Qwen2_5_VLProcessor",
|
| 33 |
+
"resample": 3,
|
| 34 |
+
"rescale_factor": 0.00392156862745098,
|
| 35 |
+
"size": {
|
| 36 |
+
"longest_edge": 12845056,
|
| 37 |
+
"shortest_edge": 3136
|
| 38 |
+
},
|
| 39 |
+
"size_divisor": null,
|
| 40 |
+
"temporal_patch_size": 2,
|
| 41 |
+
"video_metadata": null,
|
| 42 |
+
"video_processor_type": "Qwen2VLVideoProcessor"
|
| 43 |
+
}
|
SFT/vocab.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|