melihcatal commited on
Commit
45b1c92
·
verified ·
1 Parent(s): 076fd74

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +18 -0
  2. granite-4.0-h-tiny/base_attn/adapter/adapter_model.safetensors +3 -0
  3. granite-4.0-h-tiny/base_attn/audit_scores.npz +3 -0
  4. granite-4.0-h-tiny/dp3_attn/adapter/adapter_model.safetensors +3 -0
  5. granite-4.0-h-tiny/dp3_attn/audit_scores.npz +3 -0
  6. granite-4.0-h-tiny/dp8_attn/adapter/adapter_model.safetensors +3 -0
  7. granite-4.0-h-tiny/dp8_attn/audit_scores.npz +3 -0
  8. llama3-8b/base/adapter/adapter_model.safetensors +3 -0
  9. llama3-8b/base/audit_scores.npz +3 -0
  10. llama3-8b/base/tokenizer/tokenizer.json +3 -0
  11. llama3-8b/dp3/adapter/adapter_model.safetensors +3 -0
  12. llama3-8b/dp3/audit_scores.npz +3 -0
  13. llama3-8b/dp3/tokenizer/tokenizer.json +3 -0
  14. llama3-8b/dp8/adapter/adapter_model.safetensors +3 -0
  15. llama3-8b/dp8/audit_scores.npz +3 -0
  16. llama3-8b/dp8/tokenizer/tokenizer.json +3 -0
  17. llama3-8b/dp8_v2/audit_scores.npz +3 -0
  18. llama3-8b/dp8_v2/tokenizer/tokenizer.json +3 -0
  19. llama3.1-8b/dp3/adapter/adapter_model.safetensors +3 -0
  20. llama3.1-8b/dp3/audit_scores.npz +3 -0
  21. llama3.1-8b/dp3/tokenizer/tokenizer.json +3 -0
  22. llama3.1-8b/dp8/adapter/adapter_model.safetensors +3 -0
  23. llama3.1-8b/dp8/audit_scores.npz +3 -0
  24. llama3.1-8b/dp8/tokenizer/tokenizer.json +3 -0
  25. llama3.2-3b/base/adapter/README.md +207 -0
  26. llama3.2-3b/base/adapter/adapter_config.json +46 -0
  27. llama3.2-3b/base/adapter/adapter_model.safetensors +3 -0
  28. llama3.2-3b/base/audit_results.json +137 -0
  29. llama3.2-3b/base/audit_scores.npz +3 -0
  30. llama3.2-3b/base/canary_meta.json +0 -0
  31. llama3.2-3b/base/metrics.jsonl +0 -0
  32. llama3.2-3b/base/resolved_config.yaml +102 -0
  33. llama3.2-3b/base/summary.json +71 -0
  34. llama3.2-3b/base/tokenizer/tokenizer.json +3 -0
  35. llama3.2-3b/base/tokenizer/tokenizer_config.json +516 -0
  36. llama3.2-3b/base/train.log +274 -0
  37. llama3.2-3b/dp3/adapter/README.md +207 -0
  38. llama3.2-3b/dp3/adapter/adapter_config.json +46 -0
  39. llama3.2-3b/dp3/adapter/adapter_model.safetensors +3 -0
  40. llama3.2-3b/dp3/audit_results.json +137 -0
  41. llama3.2-3b/dp3/audit_scores.npz +3 -0
  42. llama3.2-3b/dp3/canary_meta.json +0 -0
  43. llama3.2-3b/dp3/metrics.jsonl +0 -0
  44. llama3.2-3b/dp3/resolved_config.yaml +102 -0
  45. llama3.2-3b/dp3/summary.json +72 -0
  46. llama3.2-3b/dp3/tokenizer/tokenizer.json +3 -0
  47. llama3.2-3b/dp3/tokenizer/tokenizer_config.json +516 -0
  48. llama3.2-3b/dp3/train.log +276 -0
  49. llama3.2-3b/dp8/adapter/README.md +207 -0
  50. llama3.2-3b/dp8/adapter/adapter_config.json +46 -0
.gitattributes CHANGED
@@ -33,3 +33,21 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ qwen1.5-moe-a2.7b/dp8_attn_v2/tokenizer/tokenizer.json filter=lfs diff=lfs merge=lfs -text
37
+ qwen1.5-moe-a2.7b/base_attn_v2/tokenizer/tokenizer.json filter=lfs diff=lfs merge=lfs -text
38
+ qwen1.5-moe-a2.7b/dp3_attn/tokenizer/tokenizer.json filter=lfs diff=lfs merge=lfs -text
39
+ qwen1.5-moe-a2.7b/dp3_attn_v2/tokenizer/tokenizer.json filter=lfs diff=lfs merge=lfs -text
40
+ llama3.1-8b/dp8/tokenizer/tokenizer.json filter=lfs diff=lfs merge=lfs -text
41
+ llama3.1-8b/dp3/tokenizer/tokenizer.json filter=lfs diff=lfs merge=lfs -text
42
+ qwen3-8b-base/base/tokenizer/tokenizer.json filter=lfs diff=lfs merge=lfs -text
43
+ qwen3-8b-base/dp8/tokenizer/tokenizer.json filter=lfs diff=lfs merge=lfs -text
44
+ qwen3-8b-base/dp3/tokenizer/tokenizer.json filter=lfs diff=lfs merge=lfs -text
45
+ qwen3-8b-base/dp8_v2/tokenizer/tokenizer.json filter=lfs diff=lfs merge=lfs -text
46
+ qwen3-8b-base/dp3_v2/tokenizer/tokenizer.json filter=lfs diff=lfs merge=lfs -text
47
+ llama3-8b/base/tokenizer/tokenizer.json filter=lfs diff=lfs merge=lfs -text
48
+ llama3-8b/dp8/tokenizer/tokenizer.json filter=lfs diff=lfs merge=lfs -text
49
+ llama3-8b/dp3/tokenizer/tokenizer.json filter=lfs diff=lfs merge=lfs -text
50
+ llama3-8b/dp8_v2/tokenizer/tokenizer.json filter=lfs diff=lfs merge=lfs -text
51
+ llama3.2-3b/base/tokenizer/tokenizer.json filter=lfs diff=lfs merge=lfs -text
52
+ llama3.2-3b/dp8/tokenizer/tokenizer.json filter=lfs diff=lfs merge=lfs -text
53
+ llama3.2-3b/dp3/tokenizer/tokenizer.json filter=lfs diff=lfs merge=lfs -text
granite-4.0-h-tiny/base_attn/adapter/adapter_model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b32d0352013767cd5858ee6069f6c84feae77a846e2868b14d13fd0778dfd205
3
+ size 1861530512
granite-4.0-h-tiny/base_attn/audit_scores.npz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8b965020d1631635cace628929dd0c75d3bfafc2bc61b404a7bdbf20927e7a27
3
+ size 12784
granite-4.0-h-tiny/dp3_attn/adapter/adapter_model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0996972036c04c0f2ae9ceb54e19c2ebc11a99e561e646c459050b58ace6ed0d
3
+ size 1861530512
granite-4.0-h-tiny/dp3_attn/audit_scores.npz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:15cf4f83f26c6de8291f3ea7dad6ac91e3c2b36833aa200f509b15740515edb4
3
+ size 12784
granite-4.0-h-tiny/dp8_attn/adapter/adapter_model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b6d5f425402e00447597609be39a1c28a4ca07c650e8f3591f8e55887997818d
3
+ size 1861530512
granite-4.0-h-tiny/dp8_attn/audit_scores.npz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c66a33299782f0a00ef0092b5ab5c066ce91da0a82fcfd0de33c7ece11e7955f
3
+ size 12784
llama3-8b/base/adapter/adapter_model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e62858eb4deac28cef22d9ec52926089668b6cc91f8fbd4fd259698f3e2eb940
3
+ size 6383176048
llama3-8b/base/audit_scores.npz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aa072c48f4bc09fc8b0c078c480adca7687b90b8a07cf84be4c3e03b8610d755
3
+ size 12784
llama3-8b/base/tokenizer/tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:021395c825a3d66cfc71d6aba9b80acec5eb1f7a6525368638b44a35e38a3649
3
+ size 17304461
llama3-8b/dp3/adapter/adapter_model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b47af81dcd7835041421898e84d60bbaa85e700c9b695ba67a6f49330488b980
3
+ size 6383176048
llama3-8b/dp3/audit_scores.npz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:51785df3b0c88e63d08e82bf73d36f80dd3f19f7d14bd83bf628c0b5c77d3c6e
3
+ size 12784
llama3-8b/dp3/tokenizer/tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:021395c825a3d66cfc71d6aba9b80acec5eb1f7a6525368638b44a35e38a3649
3
+ size 17304461
llama3-8b/dp8/adapter/adapter_model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9f4231d32a43f0dedd5520104aaf354662104c10c78003c28175603b6a86cdd8
3
+ size 6383176048
llama3-8b/dp8/audit_scores.npz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:876c3ed7b63cd6dedf8d63c7e2db7daa0d08b8856f168b2700ecae91b0d63536
3
+ size 12784
llama3-8b/dp8/tokenizer/tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:021395c825a3d66cfc71d6aba9b80acec5eb1f7a6525368638b44a35e38a3649
3
+ size 17304461
llama3-8b/dp8_v2/audit_scores.npz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c6fd4804460d9b997af63a9e02df99c3accefbcf2c3741a3d8bfe27dabb5c669
3
+ size 12784
llama3-8b/dp8_v2/tokenizer/tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:021395c825a3d66cfc71d6aba9b80acec5eb1f7a6525368638b44a35e38a3649
3
+ size 17304461
llama3.1-8b/dp3/adapter/adapter_model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2029c8812f0974e448f617f8626fddba2a583b849cda5bfa9c3d914f145e53fa
3
+ size 6383176048
llama3.1-8b/dp3/audit_scores.npz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:55a0cb3a121b2af0318bf6e3e20e0ab6d8127da80de6c781a42d7a46c73ea881
3
+ size 12784
llama3.1-8b/dp3/tokenizer/tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a51697eccb3cec5f738016546cd668e72543a96f95900714d89e9c88f41271bf
3
+ size 17304420
llama3.1-8b/dp8/adapter/adapter_model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2c06270c085b585399afecabda594e2dc94c0d9b365d975e7e2932c36b023356
3
+ size 6383176048
llama3.1-8b/dp8/audit_scores.npz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:54bd912871ebf6d7cb29b11b8bb36008384f67a5c75c3a4f4f4fce3b9baa35e5
3
+ size 12784
llama3.1-8b/dp8/tokenizer/tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a51697eccb3cec5f738016546cd668e72543a96f95900714d89e9c88f41271bf
3
+ size 17304420
llama3.2-3b/base/adapter/README.md ADDED
@@ -0,0 +1,207 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ base_model: meta-llama/Llama-3.2-3B
3
+ library_name: peft
4
+ pipeline_tag: text-generation
5
+ tags:
6
+ - base_model:adapter:meta-llama/Llama-3.2-3B
7
+ - lora
8
+ - transformers
9
+ ---
10
+
11
+ # Model Card for Model ID
12
+
13
+ <!-- Provide a quick summary of what the model is/does. -->
14
+
15
+
16
+
17
+ ## Model Details
18
+
19
+ ### Model Description
20
+
21
+ <!-- Provide a longer summary of what this model is. -->
22
+
23
+
24
+
25
+ - **Developed by:** [More Information Needed]
26
+ - **Funded by [optional]:** [More Information Needed]
27
+ - **Shared by [optional]:** [More Information Needed]
28
+ - **Model type:** [More Information Needed]
29
+ - **Language(s) (NLP):** [More Information Needed]
30
+ - **License:** [More Information Needed]
31
+ - **Finetuned from model [optional]:** [More Information Needed]
32
+
33
+ ### Model Sources [optional]
34
+
35
+ <!-- Provide the basic links for the model. -->
36
+
37
+ - **Repository:** [More Information Needed]
38
+ - **Paper [optional]:** [More Information Needed]
39
+ - **Demo [optional]:** [More Information Needed]
40
+
41
+ ## Uses
42
+
43
+ <!-- Address questions around how the model is intended to be used, including the foreseeable users of the model and those affected by the model. -->
44
+
45
+ ### Direct Use
46
+
47
+ <!-- This section is for the model use without fine-tuning or plugging into a larger ecosystem/app. -->
48
+
49
+ [More Information Needed]
50
+
51
+ ### Downstream Use [optional]
52
+
53
+ <!-- This section is for the model use when fine-tuned for a task, or when plugged into a larger ecosystem/app -->
54
+
55
+ [More Information Needed]
56
+
57
+ ### Out-of-Scope Use
58
+
59
+ <!-- This section addresses misuse, malicious use, and uses that the model will not work well for. -->
60
+
61
+ [More Information Needed]
62
+
63
+ ## Bias, Risks, and Limitations
64
+
65
+ <!-- This section is meant to convey both technical and sociotechnical limitations. -->
66
+
67
+ [More Information Needed]
68
+
69
+ ### Recommendations
70
+
71
+ <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
72
+
73
+ Users (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations.
74
+
75
+ ## How to Get Started with the Model
76
+
77
+ Use the code below to get started with the model.
78
+
79
+ [More Information Needed]
80
+
81
+ ## Training Details
82
+
83
+ ### Training Data
84
+
85
+ <!-- This should link to a Dataset Card, perhaps with a short stub of information on what the training data is all about as well as documentation related to data pre-processing or additional filtering. -->
86
+
87
+ [More Information Needed]
88
+
89
+ ### Training Procedure
90
+
91
+ <!-- This relates heavily to the Technical Specifications. Content here should link to that section when it is relevant to the training procedure. -->
92
+
93
+ #### Preprocessing [optional]
94
+
95
+ [More Information Needed]
96
+
97
+
98
+ #### Training Hyperparameters
99
+
100
+ - **Training regime:** [More Information Needed] <!--fp32, fp16 mixed precision, bf16 mixed precision, bf16 non-mixed precision, fp16 non-mixed precision, fp8 mixed precision -->
101
+
102
+ #### Speeds, Sizes, Times [optional]
103
+
104
+ <!-- This section provides information about throughput, start/end time, checkpoint size if relevant, etc. -->
105
+
106
+ [More Information Needed]
107
+
108
+ ## Evaluation
109
+
110
+ <!-- This section describes the evaluation protocols and provides the results. -->
111
+
112
+ ### Testing Data, Factors & Metrics
113
+
114
+ #### Testing Data
115
+
116
+ <!-- This should link to a Dataset Card if possible. -->
117
+
118
+ [More Information Needed]
119
+
120
+ #### Factors
121
+
122
+ <!-- These are the things the evaluation is disaggregating by, e.g., subpopulations or domains. -->
123
+
124
+ [More Information Needed]
125
+
126
+ #### Metrics
127
+
128
+ <!-- These are the evaluation metrics being used, ideally with a description of why. -->
129
+
130
+ [More Information Needed]
131
+
132
+ ### Results
133
+
134
+ [More Information Needed]
135
+
136
+ #### Summary
137
+
138
+
139
+
140
+ ## Model Examination [optional]
141
+
142
+ <!-- Relevant interpretability work for the model goes here -->
143
+
144
+ [More Information Needed]
145
+
146
+ ## Environmental Impact
147
+
148
+ <!-- Total emissions (in grams of CO2eq) and additional considerations, such as electricity usage, go here. Edit the suggested text below accordingly -->
149
+
150
+ Carbon emissions can be estimated using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700).
151
+
152
+ - **Hardware Type:** [More Information Needed]
153
+ - **Hours used:** [More Information Needed]
154
+ - **Cloud Provider:** [More Information Needed]
155
+ - **Compute Region:** [More Information Needed]
156
+ - **Carbon Emitted:** [More Information Needed]
157
+
158
+ ## Technical Specifications [optional]
159
+
160
+ ### Model Architecture and Objective
161
+
162
+ [More Information Needed]
163
+
164
+ ### Compute Infrastructure
165
+
166
+ [More Information Needed]
167
+
168
+ #### Hardware
169
+
170
+ [More Information Needed]
171
+
172
+ #### Software
173
+
174
+ [More Information Needed]
175
+
176
+ ## Citation [optional]
177
+
178
+ <!-- If there is a paper or blog post introducing the model, the APA and Bibtex information for that should go in this section. -->
179
+
180
+ **BibTeX:**
181
+
182
+ [More Information Needed]
183
+
184
+ **APA:**
185
+
186
+ [More Information Needed]
187
+
188
+ ## Glossary [optional]
189
+
190
+ <!-- If relevant, include terms and calculations in this section that can help readers understand the model or model card. -->
191
+
192
+ [More Information Needed]
193
+
194
+ ## More Information [optional]
195
+
196
+ [More Information Needed]
197
+
198
+ ## Model Card Authors [optional]
199
+
200
+ [More Information Needed]
201
+
202
+ ## Model Card Contact
203
+
204
+ [More Information Needed]
205
+ ### Framework versions
206
+
207
+ - PEFT 0.18.1
llama3.2-3b/base/adapter/adapter_config.json ADDED
@@ -0,0 +1,46 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "alora_invocation_tokens": null,
3
+ "alpha_pattern": {},
4
+ "arrow_config": null,
5
+ "auto_mapping": null,
6
+ "base_model_name_or_path": "meta-llama/Llama-3.2-3B",
7
+ "bias": "none",
8
+ "corda_config": null,
9
+ "ensure_weight_tying": true,
10
+ "eva_config": null,
11
+ "exclude_modules": null,
12
+ "fan_in_fan_out": false,
13
+ "inference_mode": true,
14
+ "init_lora_weights": true,
15
+ "layer_replication": null,
16
+ "layers_pattern": null,
17
+ "layers_to_transform": null,
18
+ "loftq_config": {},
19
+ "lora_alpha": 32,
20
+ "lora_bias": false,
21
+ "lora_dropout": 0.05,
22
+ "megatron_config": null,
23
+ "megatron_core": "megatron.core",
24
+ "modules_to_save": [
25
+ "lm_head",
26
+ "embed_tokens"
27
+ ],
28
+ "peft_type": "LORA",
29
+ "peft_version": "0.18.1",
30
+ "qalora_group_size": 16,
31
+ "r": 16,
32
+ "rank_pattern": {},
33
+ "revision": null,
34
+ "target_modules": [
35
+ "q_proj",
36
+ "o_proj",
37
+ "k_proj",
38
+ "v_proj"
39
+ ],
40
+ "target_parameters": null,
41
+ "task_type": "CAUSAL_LM",
42
+ "trainable_token_indices": null,
43
+ "use_dora": false,
44
+ "use_qalora": false,
45
+ "use_rslora": false
46
+ }
llama3.2-3b/base/adapter/adapter_model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6bfbbec9ae85e4aefa425df68c2e9ff2653ddcc31f8d596d1ae777ad2edcaf87
3
+ size 4783192176
llama3.2-3b/base/audit_results.json ADDED
@@ -0,0 +1,137 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "delta": 1e-05,
3
+ "num_canaries": 500,
4
+ "num_members": 250,
5
+ "paper_guess_fraction": 0.2,
6
+ "paper_guess_steps": 20,
7
+ "loss": {
8
+ "auc": 1.0,
9
+ "empirical_epsilon": {
10
+ "0.05": 3.4791953936219215,
11
+ "0.01": 3.023197554051876
12
+ },
13
+ "empirical_epsilon_details": {
14
+ "0.05": {
15
+ "epsilon": 3.4791953936219215,
16
+ "num_guesses": 100,
17
+ "correct_guesses": 100,
18
+ "candidate_num_guesses": [
19
+ 5,
20
+ 10,
21
+ 15,
22
+ 20,
23
+ 25,
24
+ 30,
25
+ 35,
26
+ 40,
27
+ 45,
28
+ 50,
29
+ 55,
30
+ 60,
31
+ 65,
32
+ 70,
33
+ 75,
34
+ 80,
35
+ 85,
36
+ 90,
37
+ 95,
38
+ 100
39
+ ],
40
+ "direction": "lower"
41
+ },
42
+ "0.01": {
43
+ "epsilon": 3.023197554051876,
44
+ "num_guesses": 100,
45
+ "correct_guesses": 100,
46
+ "candidate_num_guesses": [
47
+ 5,
48
+ 10,
49
+ 15,
50
+ 20,
51
+ 25,
52
+ 30,
53
+ 35,
54
+ 40,
55
+ 45,
56
+ 50,
57
+ 55,
58
+ 60,
59
+ 65,
60
+ 70,
61
+ 75,
62
+ 80,
63
+ 85,
64
+ 90,
65
+ 95,
66
+ 100
67
+ ],
68
+ "direction": "lower"
69
+ }
70
+ }
71
+ },
72
+ "embedding": {
73
+ "auc": 0.668,
74
+ "empirical_epsilon": {
75
+ "0.05": 3.4791953936219215,
76
+ "0.01": 3.023197554051876
77
+ },
78
+ "empirical_epsilon_details": {
79
+ "0.05": {
80
+ "epsilon": 3.4791953936219215,
81
+ "num_guesses": 100,
82
+ "correct_guesses": 100,
83
+ "candidate_num_guesses": [
84
+ 5,
85
+ 10,
86
+ 15,
87
+ 20,
88
+ 25,
89
+ 30,
90
+ 35,
91
+ 40,
92
+ 45,
93
+ 50,
94
+ 55,
95
+ 60,
96
+ 65,
97
+ 70,
98
+ 75,
99
+ 80,
100
+ 85,
101
+ 90,
102
+ 95,
103
+ 100
104
+ ],
105
+ "direction": "higher"
106
+ },
107
+ "0.01": {
108
+ "epsilon": 3.023197554051876,
109
+ "num_guesses": 100,
110
+ "correct_guesses": 100,
111
+ "candidate_num_guesses": [
112
+ 5,
113
+ 10,
114
+ 15,
115
+ 20,
116
+ 25,
117
+ 30,
118
+ 35,
119
+ 40,
120
+ 45,
121
+ 50,
122
+ 55,
123
+ 60,
124
+ 65,
125
+ 70,
126
+ 75,
127
+ 80,
128
+ 85,
129
+ 90,
130
+ 95,
131
+ 100
132
+ ],
133
+ "direction": "higher"
134
+ }
135
+ }
136
+ }
137
+ }
llama3.2-3b/base/audit_scores.npz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d35bfe1c7cf3aa3091f64f25490bd33c41e5b30f6220c2408e5c59fa7e47b85c
3
+ size 12784
llama3.2-3b/base/canary_meta.json ADDED
The diff for this file is too large to render. See raw diff
 
llama3.2-3b/base/metrics.jsonl ADDED
The diff for this file is too large to render. See raw diff
 
llama3.2-3b/base/resolved_config.yaml ADDED
@@ -0,0 +1,102 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ model:
2
+ name: meta-llama/Llama-3.2-3B
3
+ tokenizer_name: meta-llama/Llama-3.2-3B
4
+ max_length: 1024
5
+ dtype: bfloat16
6
+ trust_remote_code: true
7
+ use_fast_tokenizer: true
8
+ cache_dir: null
9
+ local_files_only: false
10
+ low_cpu_mem_usage: true
11
+ tie_word_embeddings: true
12
+ gradient_checkpointing: false
13
+ use_chat_template: false
14
+ dataset:
15
+ name: melihcatal/codedp-cpt
16
+ split: train
17
+ mode: cpt
18
+ text_column: text
19
+ validation_ratio: 0.05
20
+ max_samples: -1
21
+ lora:
22
+ enabled: true
23
+ r: 16
24
+ alpha: 32
25
+ dropout: 0.05
26
+ target_modules:
27
+ - q_proj
28
+ - k_proj
29
+ - v_proj
30
+ - o_proj
31
+ modules_to_save:
32
+ - lm_head
33
+ bias: none
34
+ training:
35
+ seed: 42
36
+ epochs: 5
37
+ warmup_steps: null
38
+ warmup_ratio: 0.05
39
+ mixed_precision: false
40
+ mixed_precision_dtype: bfloat16
41
+ batch_size: 4
42
+ eval_batch_size: 8
43
+ eval_every_steps: 10
44
+ eval_every_epochs: 1
45
+ learning_rate: 0.0005
46
+ optimizer: adamw
47
+ lr_scheduler: cosine
48
+ adam_beta1: 0.9
49
+ adam_beta2: 0.999
50
+ adam_epsilon: 1.0e-08
51
+ sgd_momentum: 0.9
52
+ weight_decay: 0.01
53
+ max_grad_norm: 1.0
54
+ log_every: 5
55
+ gradient_accumulation_steps: 8
56
+ num_workers: 4
57
+ output_dir: /scratch/mcatal/runs/cpt/llama3.2-3b/base
58
+ min_lr_ratio: 0.15
59
+ distributed:
60
+ strategy: dpddp
61
+ backend: nccl
62
+ devices: null
63
+ dp:
64
+ module_validator: auto
65
+ target_delta: 1.0e-05
66
+ noise_multiplier: null
67
+ max_grad_norm: 1.0
68
+ grad_sample_mode: hooks
69
+ clipping: flat
70
+ secure_mode: false
71
+ enabled: false
72
+ target_epsilon: 8.0
73
+ audit:
74
+ enabled: true
75
+ run_every_epoch: true
76
+ epoch_device: cuda
77
+ q_canary: auto
78
+ num_canaries: 500
79
+ prefix_length: 49
80
+ num_digits: 12
81
+ batch_size: 32
82
+ delta: 1.0e-05
83
+ p_values:
84
+ - 0.05
85
+ - 0.01
86
+ paper_guess_fraction: 0.2
87
+ paper_guess_steps: 20
88
+ enable_holdout_empirical_epsilon: false
89
+ holdout_seed: 42
90
+ tie_seed: 42
91
+ tracking:
92
+ enabled: true
93
+ tensorboard: true
94
+ wandb: false
95
+ wandb_project: codedp-finetune-h200-audit
96
+ wandb_run_name: llama3.2-3b-cpt-base
97
+ wandb_mode: online
98
+ codecarbon: true
99
+ codecarbon_output_file: codecarbon.csv
100
+ codecarbon_measure_power_secs: 15
101
+ codecarbon_country_iso_code: null
102
+ codecarbon_project_name: codedp-llama3.2-3b-cpt-base
llama3.2-3b/base/summary.json ADDED
@@ -0,0 +1,71 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "audit/delta": 1e-05,
3
+ "audit/embedding/auc": 0.668,
4
+ "audit/embedding/empirical_epsilon/0.01": 3.023197554051876,
5
+ "audit/embedding/empirical_epsilon/0.05": 3.4791953936219215,
6
+ "audit/embedding/empirical_epsilon_details/0.01/correct_guesses": 100.0,
7
+ "audit/embedding/empirical_epsilon_details/0.01/epsilon": 3.023197554051876,
8
+ "audit/embedding/empirical_epsilon_details/0.01/num_guesses": 100.0,
9
+ "audit/embedding/empirical_epsilon_details/0.05/correct_guesses": 100.0,
10
+ "audit/embedding/empirical_epsilon_details/0.05/epsilon": 3.4791953936219215,
11
+ "audit/embedding/empirical_epsilon_details/0.05/num_guesses": 100.0,
12
+ "audit/loss/auc": 1.0,
13
+ "audit/loss/empirical_epsilon/0.01": 3.023197554051876,
14
+ "audit/loss/empirical_epsilon/0.05": 3.4791953936219215,
15
+ "audit/loss/empirical_epsilon_details/0.01/correct_guesses": 100.0,
16
+ "audit/loss/empirical_epsilon_details/0.01/epsilon": 3.023197554051876,
17
+ "audit/loss/empirical_epsilon_details/0.01/num_guesses": 100.0,
18
+ "audit/loss/empirical_epsilon_details/0.05/correct_guesses": 100.0,
19
+ "audit/loss/empirical_epsilon_details/0.05/epsilon": 3.4791953936219215,
20
+ "audit/loss/empirical_epsilon_details/0.05/num_guesses": 100.0,
21
+ "audit/num_canaries": 500.0,
22
+ "audit/num_members": 250.0,
23
+ "audit/paper_guess_fraction": 0.2,
24
+ "audit/paper_guess_steps": 20.0,
25
+ "energy/codecarbon/cpu_count": 16.0,
26
+ "energy/codecarbon/cpu_energy": 0.13077741875096305,
27
+ "energy/codecarbon/cpu_power": 80.03175773458268,
28
+ "energy/codecarbon/cpu_utilization_percent": 3.2804797896812357,
29
+ "energy/codecarbon/duration": 6111.527659785934,
30
+ "energy/codecarbon/emissions": 0.18161032575848388,
31
+ "energy/codecarbon/emissions_rate": 2.9716027786879898e-05,
32
+ "energy/codecarbon/energy_consumed": 5.212247101526386,
33
+ "energy/codecarbon/gpu_count": 8.0,
34
+ "energy/codecarbon/gpu_energy": 5.0193755718861155,
35
+ "energy/codecarbon/gpu_power": 2964.403708111266,
36
+ "energy/codecarbon/gpu_utilization_percent": 95.01803319093,
37
+ "energy/codecarbon/latitude": 47.4843,
38
+ "energy/codecarbon/longitude": 8.212,
39
+ "energy/codecarbon/pue": 1.0,
40
+ "energy/codecarbon/ram_energy": 0.062094110889303666,
41
+ "energy/codecarbon/ram_power": 38.0,
42
+ "energy/codecarbon/ram_total_size": 128.0,
43
+ "energy/codecarbon/ram_used_gb": 507.75555072550003,
44
+ "energy/codecarbon/ram_utilization_percent": 25.6338317449885,
45
+ "energy/codecarbon/water_consumed": 0.0,
46
+ "energy/codecarbon/wue": 0.0,
47
+ "eval/duration_sec": 8.043377958703786,
48
+ "eval/loss": 0.8347259744619713,
49
+ "perf/audit_duration_sec": 6.541899859905243,
50
+ "perf/epoch_duration_sec": 1165.6133534889668,
51
+ "perf/epoch_samples": 47252.0,
52
+ "perf/epoch_samples_per_sec": 40.538313891620376,
53
+ "perf/epoch_tokens": 37200874.0,
54
+ "perf/epoch_tokens_per_sec": 31915.27781373528,
55
+ "perf/gradient_accumulation_steps": 8.0,
56
+ "perf/logical_batch_size": 32.0,
57
+ "perf/logical_token_count": 24452.0,
58
+ "perf/samples_per_sec": 5.712322910228281,
59
+ "perf/step_duration_sec": 5.601924208924174,
60
+ "perf/tokens_per_sec": 4364.928743778185,
61
+ "system/cuda_epoch_peak_memory_gb": 45.563560009002686,
62
+ "system/cuda_max_memory_allocated_gb": 45.563560009002686,
63
+ "system/cuda_memory_allocated_gb": 13.062866687774658,
64
+ "train/epoch_canary_loss": 0.05231993769605954,
65
+ "train/epoch_loss": 0.46649868210017165,
66
+ "train/epoch_real_loss": 0.4699867704318317,
67
+ "train/lr": 7.5e-05,
68
+ "train/step_canary_loss": 0.007476806640625,
69
+ "train/step_loss": 0.41994332522153854,
70
+ "train/step_real_loss": 0.41994332522153854
71
+ }
llama3.2-3b/base/tokenizer/tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a51697eccb3cec5f738016546cd668e72543a96f95900714d89e9c88f41271bf
3
+ size 17304420
llama3.2-3b/base/tokenizer/tokenizer_config.json ADDED
@@ -0,0 +1,516 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "backend": "tokenizers",
3
+ "bos_token": "<|begin_of_text|>",
4
+ "clean_up_tokenization_spaces": true,
5
+ "eos_token": "<|end_of_text|>",
6
+ "extra_special_tokens": {
7
+ "extra_special_token_0": "865331112869",
8
+ "extra_special_token_1": "569765693871",
9
+ "extra_special_token_2": "485177821815",
10
+ "extra_special_token_3": "135441121756",
11
+ "extra_special_token_4": "367459894796",
12
+ "extra_special_token_5": "877482678543",
13
+ "extra_special_token_6": "457919547633",
14
+ "extra_special_token_7": "765474393376",
15
+ "extra_special_token_8": "114848338811",
16
+ "extra_special_token_9": "746285987371",
17
+ "extra_special_token_10": "649291669397",
18
+ "extra_special_token_11": "927914615679",
19
+ "extra_special_token_12": "445925149649",
20
+ "extra_special_token_13": "691587454538",
21
+ "extra_special_token_14": "143777992227",
22
+ "extra_special_token_15": "997981281989",
23
+ "extra_special_token_16": "425949483533",
24
+ "extra_special_token_17": "982993456429",
25
+ "extra_special_token_18": "718726519731",
26
+ "extra_special_token_19": "172599315861",
27
+ "extra_special_token_20": "643489267333",
28
+ "extra_special_token_21": "282322838685",
29
+ "extra_special_token_22": "781653545886",
30
+ "extra_special_token_23": "796415361892",
31
+ "extra_special_token_24": "841991688488",
32
+ "extra_special_token_25": "211411365397",
33
+ "extra_special_token_26": "698218415444",
34
+ "extra_special_token_27": "355977139358",
35
+ "extra_special_token_28": "682564697312",
36
+ "extra_special_token_29": "383837596997",
37
+ "extra_special_token_30": "689362171782",
38
+ "extra_special_token_31": "749966767285",
39
+ "extra_special_token_32": "753159165157",
40
+ "extra_special_token_33": "795693824762",
41
+ "extra_special_token_34": "669689115557",
42
+ "extra_special_token_35": "327491773134",
43
+ "extra_special_token_36": "983569279932",
44
+ "extra_special_token_37": "612128769512",
45
+ "extra_special_token_38": "374327157578",
46
+ "extra_special_token_39": "311632789559",
47
+ "extra_special_token_40": "523918658846",
48
+ "extra_special_token_41": "765981581453",
49
+ "extra_special_token_42": "794825141891",
50
+ "extra_special_token_43": "873898736873",
51
+ "extra_special_token_44": "447445629421",
52
+ "extra_special_token_45": "473822473819",
53
+ "extra_special_token_46": "181439694557",
54
+ "extra_special_token_47": "592538279337",
55
+ "extra_special_token_48": "668134915514",
56
+ "extra_special_token_49": "643692393748",
57
+ "extra_special_token_50": "696651276628",
58
+ "extra_special_token_51": "853859348234",
59
+ "extra_special_token_52": "778466723723",
60
+ "extra_special_token_53": "929826356991",
61
+ "extra_special_token_54": "272362973463",
62
+ "extra_special_token_55": "694235616268",
63
+ "extra_special_token_56": "281673864127",
64
+ "extra_special_token_57": "479676316326",
65
+ "extra_special_token_58": "646979124677",
66
+ "extra_special_token_59": "922327493433",
67
+ "extra_special_token_60": "883685933161",
68
+ "extra_special_token_61": "264259917554",
69
+ "extra_special_token_62": "836746273134",
70
+ "extra_special_token_63": "658481324922",
71
+ "extra_special_token_64": "481884157827",
72
+ "extra_special_token_65": "587787496812",
73
+ "extra_special_token_66": "579184949249",
74
+ "extra_special_token_67": "912193598348",
75
+ "extra_special_token_68": "529679678956",
76
+ "extra_special_token_69": "795838284624",
77
+ "extra_special_token_70": "159337222655",
78
+ "extra_special_token_71": "173781362446",
79
+ "extra_special_token_72": "773687856563",
80
+ "extra_special_token_73": "535787224917",
81
+ "extra_special_token_74": "351885857332",
82
+ "extra_special_token_75": "578827344666",
83
+ "extra_special_token_76": "198462689911",
84
+ "extra_special_token_77": "722618266242",
85
+ "extra_special_token_78": "952872416512",
86
+ "extra_special_token_79": "517778845323",
87
+ "extra_special_token_80": "749665846687",
88
+ "extra_special_token_81": "661436365453",
89
+ "extra_special_token_82": "259666844669",
90
+ "extra_special_token_83": "242851284913",
91
+ "extra_special_token_84": "514532995959",
92
+ "extra_special_token_85": "161588262349",
93
+ "extra_special_token_86": "742765629356",
94
+ "extra_special_token_87": "225164373623",
95
+ "extra_special_token_88": "676539973863",
96
+ "extra_special_token_89": "826214551218",
97
+ "extra_special_token_90": "182345464792",
98
+ "extra_special_token_91": "232776999554",
99
+ "extra_special_token_92": "337326533813",
100
+ "extra_special_token_93": "676676697292",
101
+ "extra_special_token_94": "929185622831",
102
+ "extra_special_token_95": "545512344383",
103
+ "extra_special_token_96": "499444466686",
104
+ "extra_special_token_97": "314697386682",
105
+ "extra_special_token_98": "517379856925",
106
+ "extra_special_token_99": "379557332953",
107
+ "extra_special_token_100": "614797267726",
108
+ "extra_special_token_101": "429781429464",
109
+ "extra_special_token_102": "922466849763",
110
+ "extra_special_token_103": "721737645236",
111
+ "extra_special_token_104": "479227349997",
112
+ "extra_special_token_105": "136931728327",
113
+ "extra_special_token_106": "259533577263",
114
+ "extra_special_token_107": "488538864842",
115
+ "extra_special_token_108": "937495658852",
116
+ "extra_special_token_109": "489991411364",
117
+ "extra_special_token_110": "499148455254",
118
+ "extra_special_token_111": "441373944925",
119
+ "extra_special_token_112": "899151413682",
120
+ "extra_special_token_113": "467893531755",
121
+ "extra_special_token_114": "527117488925",
122
+ "extra_special_token_115": "928335588653",
123
+ "extra_special_token_116": "374439448821",
124
+ "extra_special_token_117": "879425227932",
125
+ "extra_special_token_118": "867678158885",
126
+ "extra_special_token_119": "399749397872",
127
+ "extra_special_token_120": "129693547287",
128
+ "extra_special_token_121": "689285841825",
129
+ "extra_special_token_122": "771619544974",
130
+ "extra_special_token_123": "724883568652",
131
+ "extra_special_token_124": "516968424863",
132
+ "extra_special_token_125": "733737988257",
133
+ "extra_special_token_126": "852347289392",
134
+ "extra_special_token_127": "296953381169",
135
+ "extra_special_token_128": "377273562477",
136
+ "extra_special_token_129": "262296912232",
137
+ "extra_special_token_130": "547149832394",
138
+ "extra_special_token_131": "298464134954",
139
+ "extra_special_token_132": "216667245274",
140
+ "extra_special_token_133": "843998562287",
141
+ "extra_special_token_134": "572154333646",
142
+ "extra_special_token_135": "124589118494",
143
+ "extra_special_token_136": "841824384614",
144
+ "extra_special_token_137": "232896526252",
145
+ "extra_special_token_138": "295448593321",
146
+ "extra_special_token_139": "123741461297",
147
+ "extra_special_token_140": "653573457168",
148
+ "extra_special_token_141": "196735786156",
149
+ "extra_special_token_142": "377338713663",
150
+ "extra_special_token_143": "964342468552",
151
+ "extra_special_token_144": "586855179568",
152
+ "extra_special_token_145": "484773717614",
153
+ "extra_special_token_146": "894885246797",
154
+ "extra_special_token_147": "677896358599",
155
+ "extra_special_token_148": "848845611563",
156
+ "extra_special_token_149": "851852651677",
157
+ "extra_special_token_150": "398549545767",
158
+ "extra_special_token_151": "454244839926",
159
+ "extra_special_token_152": "799364566435",
160
+ "extra_special_token_153": "967114116556",
161
+ "extra_special_token_154": "817378986438",
162
+ "extra_special_token_155": "233795848681",
163
+ "extra_special_token_156": "824387273757",
164
+ "extra_special_token_157": "916198946615",
165
+ "extra_special_token_158": "563117729724",
166
+ "extra_special_token_159": "951794811935",
167
+ "extra_special_token_160": "374598961236",
168
+ "extra_special_token_161": "922867396683",
169
+ "extra_special_token_162": "765737843639",
170
+ "extra_special_token_163": "175469284871",
171
+ "extra_special_token_164": "231853711778",
172
+ "extra_special_token_165": "662426712668",
173
+ "extra_special_token_166": "711412347158",
174
+ "extra_special_token_167": "753466987363",
175
+ "extra_special_token_168": "513361312532",
176
+ "extra_special_token_169": "712992815957",
177
+ "extra_special_token_170": "971621888444",
178
+ "extra_special_token_171": "829235161526",
179
+ "extra_special_token_172": "585544633356",
180
+ "extra_special_token_173": "582471228164",
181
+ "extra_special_token_174": "678666359123",
182
+ "extra_special_token_175": "557533689478",
183
+ "extra_special_token_176": "632962475133",
184
+ "extra_special_token_177": "484489193824",
185
+ "extra_special_token_178": "489562189822",
186
+ "extra_special_token_179": "589547936288",
187
+ "extra_special_token_180": "363214487524",
188
+ "extra_special_token_181": "244885399387",
189
+ "extra_special_token_182": "431751228368",
190
+ "extra_special_token_183": "433581868192",
191
+ "extra_special_token_184": "486391569221",
192
+ "extra_special_token_185": "185438575221",
193
+ "extra_special_token_186": "126574388585",
194
+ "extra_special_token_187": "741757479784",
195
+ "extra_special_token_188": "529854679937",
196
+ "extra_special_token_189": "996116119839",
197
+ "extra_special_token_190": "616248973917",
198
+ "extra_special_token_191": "763531783491",
199
+ "extra_special_token_192": "955456118295",
200
+ "extra_special_token_193": "364196983365",
201
+ "extra_special_token_194": "195792996468",
202
+ "extra_special_token_195": "151859598873",
203
+ "extra_special_token_196": "399223169721",
204
+ "extra_special_token_197": "938488813964",
205
+ "extra_special_token_198": "961981959227",
206
+ "extra_special_token_199": "183368827562",
207
+ "extra_special_token_200": "533417736566",
208
+ "extra_special_token_201": "786391632558",
209
+ "extra_special_token_202": "665661658354",
210
+ "extra_special_token_203": "693281533643",
211
+ "extra_special_token_204": "475794684356",
212
+ "extra_special_token_205": "652154162978",
213
+ "extra_special_token_206": "753233719644",
214
+ "extra_special_token_207": "668514843129",
215
+ "extra_special_token_208": "819162623892",
216
+ "extra_special_token_209": "941169431859",
217
+ "extra_special_token_210": "877385381798",
218
+ "extra_special_token_211": "752644929761",
219
+ "extra_special_token_212": "881136466196",
220
+ "extra_special_token_213": "275597777299",
221
+ "extra_special_token_214": "731681792655",
222
+ "extra_special_token_215": "961133895172",
223
+ "extra_special_token_216": "864718285734",
224
+ "extra_special_token_217": "963852916563",
225
+ "extra_special_token_218": "319584985416",
226
+ "extra_special_token_219": "563365646341",
227
+ "extra_special_token_220": "811371928234",
228
+ "extra_special_token_221": "837131396371",
229
+ "extra_special_token_222": "267514771964",
230
+ "extra_special_token_223": "944513428457",
231
+ "extra_special_token_224": "117298239631",
232
+ "extra_special_token_225": "158142752582",
233
+ "extra_special_token_226": "252867443568",
234
+ "extra_special_token_227": "839269684865",
235
+ "extra_special_token_228": "612788593128",
236
+ "extra_special_token_229": "145669731981",
237
+ "extra_special_token_230": "121557291859",
238
+ "extra_special_token_231": "245416776926",
239
+ "extra_special_token_232": "799417897197",
240
+ "extra_special_token_233": "997958836435",
241
+ "extra_special_token_234": "892336777248",
242
+ "extra_special_token_235": "158929292238",
243
+ "extra_special_token_236": "581976444672",
244
+ "extra_special_token_237": "897784492783",
245
+ "extra_special_token_238": "492373714791",
246
+ "extra_special_token_239": "512659818733",
247
+ "extra_special_token_240": "881112998642",
248
+ "extra_special_token_241": "619454958782",
249
+ "extra_special_token_242": "431149748713",
250
+ "extra_special_token_243": "624221476921",
251
+ "extra_special_token_244": "125866399464",
252
+ "extra_special_token_245": "339882449689",
253
+ "extra_special_token_246": "186198784585",
254
+ "extra_special_token_247": "943193294691",
255
+ "extra_special_token_248": "955668961269",
256
+ "extra_special_token_249": "232787996724",
257
+ "extra_special_token_250": "215671314196",
258
+ "extra_special_token_251": "286173241916",
259
+ "extra_special_token_252": "745977673725",
260
+ "extra_special_token_253": "556976448182",
261
+ "extra_special_token_254": "599961512792",
262
+ "extra_special_token_255": "766294538337",
263
+ "extra_special_token_256": "934912591213",
264
+ "extra_special_token_257": "295118729589",
265
+ "extra_special_token_258": "529455466433",
266
+ "extra_special_token_259": "196119929397",
267
+ "extra_special_token_260": "379571934299",
268
+ "extra_special_token_261": "251789649997",
269
+ "extra_special_token_262": "564544131355",
270
+ "extra_special_token_263": "244371196654",
271
+ "extra_special_token_264": "384598329253",
272
+ "extra_special_token_265": "887753195844",
273
+ "extra_special_token_266": "364947325679",
274
+ "extra_special_token_267": "655517954651",
275
+ "extra_special_token_268": "673948786567",
276
+ "extra_special_token_269": "857231548835",
277
+ "extra_special_token_270": "816115936673",
278
+ "extra_special_token_271": "644234165531",
279
+ "extra_special_token_272": "182782912224",
280
+ "extra_special_token_273": "234316622259",
281
+ "extra_special_token_274": "421369185549",
282
+ "extra_special_token_275": "434632855397",
283
+ "extra_special_token_276": "921889371893",
284
+ "extra_special_token_277": "415956914763",
285
+ "extra_special_token_278": "598916996413",
286
+ "extra_special_token_279": "773671349113",
287
+ "extra_special_token_280": "952465217972",
288
+ "extra_special_token_281": "117657531962",
289
+ "extra_special_token_282": "729825168745",
290
+ "extra_special_token_283": "691315125346",
291
+ "extra_special_token_284": "768461952319",
292
+ "extra_special_token_285": "664847713559",
293
+ "extra_special_token_286": "953267689786",
294
+ "extra_special_token_287": "886464195129",
295
+ "extra_special_token_288": "824488329416",
296
+ "extra_special_token_289": "837873762491",
297
+ "extra_special_token_290": "532833541879",
298
+ "extra_special_token_291": "669183782449",
299
+ "extra_special_token_292": "941976537588",
300
+ "extra_special_token_293": "739394546916",
301
+ "extra_special_token_294": "267954879268",
302
+ "extra_special_token_295": "637551427887",
303
+ "extra_special_token_296": "217756494954",
304
+ "extra_special_token_297": "524444658383",
305
+ "extra_special_token_298": "117783274348",
306
+ "extra_special_token_299": "138218735276",
307
+ "extra_special_token_300": "814611949491",
308
+ "extra_special_token_301": "711641973413",
309
+ "extra_special_token_302": "499156317423",
310
+ "extra_special_token_303": "515856611931",
311
+ "extra_special_token_304": "454164859837",
312
+ "extra_special_token_305": "345271433112",
313
+ "extra_special_token_306": "462294118988",
314
+ "extra_special_token_307": "511785788222",
315
+ "extra_special_token_308": "497294727353",
316
+ "extra_special_token_309": "866519986723",
317
+ "extra_special_token_310": "334513529294",
318
+ "extra_special_token_311": "549946382131",
319
+ "extra_special_token_312": "284445431422",
320
+ "extra_special_token_313": "396521188476",
321
+ "extra_special_token_314": "421435255895",
322
+ "extra_special_token_315": "133373659361",
323
+ "extra_special_token_316": "322683334381",
324
+ "extra_special_token_317": "228358422847",
325
+ "extra_special_token_318": "291762694874",
326
+ "extra_special_token_319": "143182978129",
327
+ "extra_special_token_320": "511923256573",
328
+ "extra_special_token_321": "327158398268",
329
+ "extra_special_token_322": "879764613759",
330
+ "extra_special_token_323": "564395222747",
331
+ "extra_special_token_324": "451161679736",
332
+ "extra_special_token_325": "538631466654",
333
+ "extra_special_token_326": "221762325616",
334
+ "extra_special_token_327": "218391991184",
335
+ "extra_special_token_328": "322589379462",
336
+ "extra_special_token_329": "876537814263",
337
+ "extra_special_token_330": "152676556624",
338
+ "extra_special_token_331": "332522971941",
339
+ "extra_special_token_332": "884354318946",
340
+ "extra_special_token_333": "513349618943",
341
+ "extra_special_token_334": "116639746413",
342
+ "extra_special_token_335": "635185846287",
343
+ "extra_special_token_336": "993832498489",
344
+ "extra_special_token_337": "813981174797",
345
+ "extra_special_token_338": "438745114173",
346
+ "extra_special_token_339": "983493951323",
347
+ "extra_special_token_340": "724492262421",
348
+ "extra_special_token_341": "622553389126",
349
+ "extra_special_token_342": "889965243135",
350
+ "extra_special_token_343": "364492359246",
351
+ "extra_special_token_344": "154962668224",
352
+ "extra_special_token_345": "179564995814",
353
+ "extra_special_token_346": "418412875665",
354
+ "extra_special_token_347": "718951851413",
355
+ "extra_special_token_348": "699446724178",
356
+ "extra_special_token_349": "624266421831",
357
+ "extra_special_token_350": "815458725125",
358
+ "extra_special_token_351": "455423278865",
359
+ "extra_special_token_352": "393741199486",
360
+ "extra_special_token_353": "328552864359",
361
+ "extra_special_token_354": "211662639865",
362
+ "extra_special_token_355": "218784516525",
363
+ "extra_special_token_356": "762486672996",
364
+ "extra_special_token_357": "142799718159",
365
+ "extra_special_token_358": "858146415154",
366
+ "extra_special_token_359": "767858144912",
367
+ "extra_special_token_360": "571317457151",
368
+ "extra_special_token_361": "635127952696",
369
+ "extra_special_token_362": "116427191984",
370
+ "extra_special_token_363": "268921994538",
371
+ "extra_special_token_364": "523937669294",
372
+ "extra_special_token_365": "165429152138",
373
+ "extra_special_token_366": "739246183345",
374
+ "extra_special_token_367": "591464355756",
375
+ "extra_special_token_368": "212985874612",
376
+ "extra_special_token_369": "191887635211",
377
+ "extra_special_token_370": "967214577653",
378
+ "extra_special_token_371": "119342152414",
379
+ "extra_special_token_372": "946444632795",
380
+ "extra_special_token_373": "618423867817",
381
+ "extra_special_token_374": "228565148417",
382
+ "extra_special_token_375": "729116422489",
383
+ "extra_special_token_376": "527874729936",
384
+ "extra_special_token_377": "739784153482",
385
+ "extra_special_token_378": "387763951128",
386
+ "extra_special_token_379": "331369926711",
387
+ "extra_special_token_380": "562716493614",
388
+ "extra_special_token_381": "739667844957",
389
+ "extra_special_token_382": "562389434565",
390
+ "extra_special_token_383": "256497188281",
391
+ "extra_special_token_384": "859927364588",
392
+ "extra_special_token_385": "417668946583",
393
+ "extra_special_token_386": "357621613582",
394
+ "extra_special_token_387": "438435178228",
395
+ "extra_special_token_388": "485692541169",
396
+ "extra_special_token_389": "825815739116",
397
+ "extra_special_token_390": "342221452223",
398
+ "extra_special_token_391": "697747991249",
399
+ "extra_special_token_392": "716763689965",
400
+ "extra_special_token_393": "141499982867",
401
+ "extra_special_token_394": "818479319499",
402
+ "extra_special_token_395": "336813343298",
403
+ "extra_special_token_396": "594688742928",
404
+ "extra_special_token_397": "472129283475",
405
+ "extra_special_token_398": "514354144759",
406
+ "extra_special_token_399": "349249721685",
407
+ "extra_special_token_400": "546276298359",
408
+ "extra_special_token_401": "353755529131",
409
+ "extra_special_token_402": "315534574435",
410
+ "extra_special_token_403": "523723475786",
411
+ "extra_special_token_404": "215826764872",
412
+ "extra_special_token_405": "367968398551",
413
+ "extra_special_token_406": "569853653352",
414
+ "extra_special_token_407": "389715484387",
415
+ "extra_special_token_408": "293847485454",
416
+ "extra_special_token_409": "714738141818",
417
+ "extra_special_token_410": "178478368922",
418
+ "extra_special_token_411": "581493616981",
419
+ "extra_special_token_412": "589439538674",
420
+ "extra_special_token_413": "846657726193",
421
+ "extra_special_token_414": "722339992679",
422
+ "extra_special_token_415": "138154781148",
423
+ "extra_special_token_416": "757785319772",
424
+ "extra_special_token_417": "492516914298",
425
+ "extra_special_token_418": "919181521716",
426
+ "extra_special_token_419": "985781138935",
427
+ "extra_special_token_420": "476969195485",
428
+ "extra_special_token_421": "313145133463",
429
+ "extra_special_token_422": "758963111966",
430
+ "extra_special_token_423": "147541537162",
431
+ "extra_special_token_424": "557163366873",
432
+ "extra_special_token_425": "144373897488",
433
+ "extra_special_token_426": "522515164754",
434
+ "extra_special_token_427": "724964923582",
435
+ "extra_special_token_428": "284776712475",
436
+ "extra_special_token_429": "375429755114",
437
+ "extra_special_token_430": "181233596124",
438
+ "extra_special_token_431": "948585673431",
439
+ "extra_special_token_432": "243165586174",
440
+ "extra_special_token_433": "396847976144",
441
+ "extra_special_token_434": "997724962668",
442
+ "extra_special_token_435": "558837194455",
443
+ "extra_special_token_436": "163165456396",
444
+ "extra_special_token_437": "378749551722",
445
+ "extra_special_token_438": "161238482259",
446
+ "extra_special_token_439": "754978243758",
447
+ "extra_special_token_440": "195388849133",
448
+ "extra_special_token_441": "229775525672",
449
+ "extra_special_token_442": "262437452884",
450
+ "extra_special_token_443": "441377892146",
451
+ "extra_special_token_444": "451885565366",
452
+ "extra_special_token_445": "981277526855",
453
+ "extra_special_token_446": "762495822823",
454
+ "extra_special_token_447": "368763327262",
455
+ "extra_special_token_448": "757422791351",
456
+ "extra_special_token_449": "636324136426",
457
+ "extra_special_token_450": "214193645583",
458
+ "extra_special_token_451": "412843856172",
459
+ "extra_special_token_452": "179386156569",
460
+ "extra_special_token_453": "756916173536",
461
+ "extra_special_token_454": "892697125149",
462
+ "extra_special_token_455": "625334487352",
463
+ "extra_special_token_456": "941861857715",
464
+ "extra_special_token_457": "887417525236",
465
+ "extra_special_token_458": "649516938598",
466
+ "extra_special_token_459": "717628619782",
467
+ "extra_special_token_460": "438124184139",
468
+ "extra_special_token_461": "547563892268",
469
+ "extra_special_token_462": "856317483891",
470
+ "extra_special_token_463": "313313831273",
471
+ "extra_special_token_464": "371496153876",
472
+ "extra_special_token_465": "587541149322",
473
+ "extra_special_token_466": "265847332563",
474
+ "extra_special_token_467": "449549215429",
475
+ "extra_special_token_468": "163497196769",
476
+ "extra_special_token_469": "861342291298",
477
+ "extra_special_token_470": "268433315926",
478
+ "extra_special_token_471": "774679513717",
479
+ "extra_special_token_472": "851254219729",
480
+ "extra_special_token_473": "583527834464",
481
+ "extra_special_token_474": "488496781997",
482
+ "extra_special_token_475": "556814553861",
483
+ "extra_special_token_476": "482829231639",
484
+ "extra_special_token_477": "618878266619",
485
+ "extra_special_token_478": "147444452794",
486
+ "extra_special_token_479": "949235426629",
487
+ "extra_special_token_480": "357299947518",
488
+ "extra_special_token_481": "175528632226",
489
+ "extra_special_token_482": "645527857972",
490
+ "extra_special_token_483": "186872457894",
491
+ "extra_special_token_484": "552738847828",
492
+ "extra_special_token_485": "626748382482",
493
+ "extra_special_token_486": "921894985642",
494
+ "extra_special_token_487": "943878645871",
495
+ "extra_special_token_488": "859289776479",
496
+ "extra_special_token_489": "614583493135",
497
+ "extra_special_token_490": "933775286797",
498
+ "extra_special_token_491": "332234613346",
499
+ "extra_special_token_492": "325196781219",
500
+ "extra_special_token_493": "142526557681",
501
+ "extra_special_token_494": "356722692178",
502
+ "extra_special_token_495": "449318681694",
503
+ "extra_special_token_496": "687284547244",
504
+ "extra_special_token_497": "947262995132",
505
+ "extra_special_token_498": "893974619684",
506
+ "extra_special_token_499": "797238311233"
507
+ },
508
+ "is_local": false,
509
+ "model_input_names": [
510
+ "input_ids",
511
+ "attention_mask"
512
+ ],
513
+ "model_max_length": 131072,
514
+ "pad_token": "<|end_of_text|>",
515
+ "tokenizer_class": "PreTrainedTokenizerFast"
516
+ }
llama3.2-3b/base/train.log ADDED
@@ -0,0 +1,274 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2026-03-29 10:57:10,862 [INFO] new_opacus_codex.train_steps: epoch=1 step=5 loss=1.2779
2
+ 2026-03-29 10:57:38,490 [INFO] new_opacus_codex.train_steps: epoch=1 step=10 loss=1.2583
3
+ 2026-03-29 10:57:46,486 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=1 step=10 eval_loss=1.0522 duration_sec=7.91
4
+ 2026-03-29 10:58:14,358 [INFO] new_opacus_codex.train_steps: epoch=1 step=15 loss=1.2878
5
+ 2026-03-29 10:58:42,932 [INFO] new_opacus_codex.train_steps: epoch=1 step=20 loss=1.2188
6
+ 2026-03-29 10:58:50,947 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=1 step=20 eval_loss=1.0179 duration_sec=7.89
7
+ 2026-03-29 10:59:18,487 [INFO] new_opacus_codex.train_steps: epoch=1 step=25 loss=1.1632
8
+ 2026-03-29 10:59:45,795 [INFO] new_opacus_codex.train_steps: epoch=1 step=30 loss=1.1578
9
+ 2026-03-29 10:59:53,874 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=1 step=30 eval_loss=0.9920 duration_sec=8.06
10
+ 2026-03-29 11:00:22,280 [INFO] new_opacus_codex.train_steps: epoch=1 step=35 loss=1.0909
11
+ 2026-03-29 11:00:50,017 [INFO] new_opacus_codex.train_steps: epoch=1 step=40 loss=1.1115
12
+ 2026-03-29 11:00:57,956 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=1 step=40 eval_loss=0.9743 duration_sec=7.91
13
+ 2026-03-29 11:01:25,718 [INFO] new_opacus_codex.train_steps: epoch=1 step=45 loss=1.1037
14
+ 2026-03-29 11:01:53,030 [INFO] new_opacus_codex.train_steps: epoch=1 step=50 loss=1.0936
15
+ 2026-03-29 11:02:01,002 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=1 step=50 eval_loss=0.9664 duration_sec=7.93
16
+ 2026-03-29 11:02:28,774 [INFO] new_opacus_codex.train_steps: epoch=1 step=55 loss=1.0844
17
+ 2026-03-29 11:02:55,870 [INFO] new_opacus_codex.train_steps: epoch=1 step=60 loss=1.0319
18
+ 2026-03-29 11:03:03,906 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=1 step=60 eval_loss=0.9574 duration_sec=8.00
19
+ 2026-03-29 11:03:32,725 [INFO] new_opacus_codex.train_steps: epoch=1 step=65 loss=1.0412
20
+ 2026-03-29 11:04:00,087 [INFO] new_opacus_codex.train_steps: epoch=1 step=70 loss=1.0146
21
+ 2026-03-29 11:04:08,097 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=1 step=70 eval_loss=0.9466 duration_sec=7.97
22
+ 2026-03-29 11:04:36,782 [INFO] new_opacus_codex.train_steps: epoch=1 step=75 loss=1.1016
23
+ 2026-03-29 11:05:03,977 [INFO] new_opacus_codex.train_steps: epoch=1 step=80 loss=1.1395
24
+ 2026-03-29 11:05:12,031 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=1 step=80 eval_loss=0.9382 duration_sec=7.88
25
+ 2026-03-29 11:05:38,829 [INFO] new_opacus_codex.train_steps: epoch=1 step=85 loss=1.0375
26
+ 2026-03-29 11:06:06,545 [INFO] new_opacus_codex.train_steps: epoch=1 step=90 loss=1.1016
27
+ 2026-03-29 11:06:14,603 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=1 step=90 eval_loss=0.9298 duration_sec=7.97
28
+ 2026-03-29 11:06:42,960 [INFO] new_opacus_codex.train_steps: epoch=1 step=95 loss=1.1273
29
+ 2026-03-29 11:07:10,821 [INFO] new_opacus_codex.train_steps: epoch=1 step=100 loss=1.0804
30
+ 2026-03-29 11:07:18,774 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=1 step=100 eval_loss=0.9203 duration_sec=7.91
31
+ 2026-03-29 11:07:46,317 [INFO] new_opacus_codex.train_steps: epoch=1 step=105 loss=1.0201
32
+ 2026-03-29 11:08:14,623 [INFO] new_opacus_codex.train_steps: epoch=1 step=110 loss=0.9657
33
+ 2026-03-29 11:08:22,694 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=1 step=110 eval_loss=0.9109 duration_sec=7.88
34
+ 2026-03-29 11:08:49,807 [INFO] new_opacus_codex.train_steps: epoch=1 step=115 loss=0.9802
35
+ 2026-03-29 11:09:18,087 [INFO] new_opacus_codex.train_steps: epoch=1 step=120 loss=0.9970
36
+ 2026-03-29 11:09:26,145 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=1 step=120 eval_loss=0.9056 duration_sec=7.96
37
+ 2026-03-29 11:09:54,270 [INFO] new_opacus_codex.train_steps: epoch=1 step=125 loss=1.0030
38
+ 2026-03-29 11:10:21,720 [INFO] new_opacus_codex.train_steps: epoch=1 step=130 loss=1.0139
39
+ 2026-03-29 11:10:29,765 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=1 step=130 eval_loss=0.9001 duration_sec=8.02
40
+ 2026-03-29 11:10:56,888 [INFO] new_opacus_codex.train_steps: epoch=1 step=135 loss=1.0013
41
+ 2026-03-29 11:11:24,431 [INFO] new_opacus_codex.train_steps: epoch=1 step=140 loss=0.9787
42
+ 2026-03-29 11:11:32,429 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=1 step=140 eval_loss=0.8939 duration_sec=7.93
43
+ 2026-03-29 11:11:59,073 [INFO] new_opacus_codex.train_steps: epoch=1 step=145 loss=0.9829
44
+ 2026-03-29 11:12:26,580 [INFO] new_opacus_codex.train_steps: epoch=1 step=150 loss=0.9856
45
+ 2026-03-29 11:12:34,595 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=1 step=150 eval_loss=0.8901 duration_sec=7.92
46
+ 2026-03-29 11:13:02,191 [INFO] new_opacus_codex.train_steps: epoch=1 step=155 loss=0.9589
47
+ 2026-03-29 11:13:30,684 [INFO] new_opacus_codex.train_steps: epoch=1 step=160 loss=0.9600
48
+ 2026-03-29 11:13:38,699 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=1 step=160 eval_loss=0.8863 duration_sec=7.94
49
+ 2026-03-29 11:14:05,782 [INFO] new_opacus_codex.train_steps: epoch=1 step=165 loss=0.9440
50
+ 2026-03-29 11:14:33,420 [INFO] new_opacus_codex.train_steps: epoch=1 step=170 loss=0.8875
51
+ 2026-03-29 11:14:41,460 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=1 step=170 eval_loss=0.8838 duration_sec=7.94
52
+ 2026-03-29 11:15:09,485 [INFO] new_opacus_codex.train_steps: epoch=1 step=175 loss=0.8979
53
+ 2026-03-29 11:15:36,973 [INFO] new_opacus_codex.train_steps: epoch=1 step=180 loss=0.9346
54
+ 2026-03-29 11:15:45,074 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=1 step=180 eval_loss=0.8779 duration_sec=7.92
55
+ 2026-03-29 11:16:33,304 [INFO] new_opacus_codex.train_steps: epoch=2 step=185 loss=0.7506
56
+ 2026-03-29 11:17:00,631 [INFO] new_opacus_codex.train_steps: epoch=2 step=190 loss=0.8556
57
+ 2026-03-29 11:17:08,658 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=2 step=190 eval_loss=0.8795 duration_sec=7.94
58
+ 2026-03-29 11:17:35,943 [INFO] new_opacus_codex.train_steps: epoch=2 step=195 loss=0.8761
59
+ 2026-03-29 11:18:04,731 [INFO] new_opacus_codex.train_steps: epoch=2 step=200 loss=0.8560
60
+ 2026-03-29 11:18:12,709 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=2 step=200 eval_loss=0.8785 duration_sec=7.93
61
+ 2026-03-29 11:18:40,531 [INFO] new_opacus_codex.train_steps: epoch=2 step=205 loss=0.7950
62
+ 2026-03-29 11:19:08,026 [INFO] new_opacus_codex.train_steps: epoch=2 step=210 loss=0.7900
63
+ 2026-03-29 11:19:16,200 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=2 step=210 eval_loss=0.8737 duration_sec=7.98
64
+ 2026-03-29 11:19:45,766 [INFO] new_opacus_codex.train_steps: epoch=2 step=215 loss=0.8673
65
+ 2026-03-29 11:20:13,895 [INFO] new_opacus_codex.train_steps: epoch=2 step=220 loss=0.8328
66
+ 2026-03-29 11:20:22,004 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=2 step=220 eval_loss=0.8718 duration_sec=7.93
67
+ 2026-03-29 11:20:49,111 [INFO] new_opacus_codex.train_steps: epoch=2 step=225 loss=0.7739
68
+ 2026-03-29 11:21:17,183 [INFO] new_opacus_codex.train_steps: epoch=2 step=230 loss=0.7892
69
+ 2026-03-29 11:21:25,237 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=2 step=230 eval_loss=0.8690 duration_sec=8.01
70
+ 2026-03-29 11:21:52,935 [INFO] new_opacus_codex.train_steps: epoch=2 step=235 loss=0.7906
71
+ 2026-03-29 11:22:21,161 [INFO] new_opacus_codex.train_steps: epoch=2 step=240 loss=0.8245
72
+ 2026-03-29 11:22:29,152 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=2 step=240 eval_loss=0.8668 duration_sec=7.94
73
+ 2026-03-29 11:22:57,125 [INFO] new_opacus_codex.train_steps: epoch=2 step=245 loss=0.8493
74
+ 2026-03-29 11:23:24,235 [INFO] new_opacus_codex.train_steps: epoch=2 step=250 loss=0.7984
75
+ 2026-03-29 11:23:32,268 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=2 step=250 eval_loss=0.8640 duration_sec=8.00
76
+ 2026-03-29 11:24:01,037 [INFO] new_opacus_codex.train_steps: epoch=2 step=255 loss=0.7511
77
+ 2026-03-29 11:24:30,313 [INFO] new_opacus_codex.train_steps: epoch=2 step=260 loss=0.7798
78
+ 2026-03-29 11:24:38,439 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=2 step=260 eval_loss=0.8662 duration_sec=8.11
79
+ 2026-03-29 11:25:06,389 [INFO] new_opacus_codex.train_steps: epoch=2 step=265 loss=0.7970
80
+ 2026-03-29 11:25:33,541 [INFO] new_opacus_codex.train_steps: epoch=2 step=270 loss=0.7641
81
+ 2026-03-29 11:25:41,644 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=2 step=270 eval_loss=0.8662 duration_sec=7.95
82
+ 2026-03-29 11:26:09,634 [INFO] new_opacus_codex.train_steps: epoch=2 step=275 loss=0.7670
83
+ 2026-03-29 11:26:37,815 [INFO] new_opacus_codex.train_steps: epoch=2 step=280 loss=0.7750
84
+ 2026-03-29 11:26:45,819 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=2 step=280 eval_loss=0.8618 duration_sec=7.96
85
+ 2026-03-29 11:27:13,697 [INFO] new_opacus_codex.train_steps: epoch=2 step=285 loss=0.7302
86
+ 2026-03-29 11:27:41,652 [INFO] new_opacus_codex.train_steps: epoch=2 step=290 loss=0.7386
87
+ 2026-03-29 11:27:49,712 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=2 step=290 eval_loss=0.8545 duration_sec=8.04
88
+ 2026-03-29 11:28:18,095 [INFO] new_opacus_codex.train_steps: epoch=2 step=295 loss=0.8187
89
+ 2026-03-29 11:28:46,012 [INFO] new_opacus_codex.train_steps: epoch=2 step=300 loss=0.7858
90
+ 2026-03-29 11:28:54,003 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=2 step=300 eval_loss=0.8525 duration_sec=7.98
91
+ 2026-03-29 11:29:22,386 [INFO] new_opacus_codex.train_steps: epoch=2 step=305 loss=0.7458
92
+ 2026-03-29 11:29:50,627 [INFO] new_opacus_codex.train_steps: epoch=2 step=310 loss=0.7672
93
+ 2026-03-29 11:29:58,606 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=2 step=310 eval_loss=0.8489 duration_sec=7.96
94
+ 2026-03-29 11:30:25,886 [INFO] new_opacus_codex.train_steps: epoch=2 step=315 loss=0.7693
95
+ 2026-03-29 11:30:54,183 [INFO] new_opacus_codex.train_steps: epoch=2 step=320 loss=0.7696
96
+ 2026-03-29 11:31:02,308 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=2 step=320 eval_loss=0.8459 duration_sec=7.97
97
+ 2026-03-29 11:31:30,132 [INFO] new_opacus_codex.train_steps: epoch=2 step=325 loss=0.7588
98
+ 2026-03-29 11:31:56,867 [INFO] new_opacus_codex.train_steps: epoch=2 step=330 loss=0.7283
99
+ 2026-03-29 11:32:04,928 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=2 step=330 eval_loss=0.8444 duration_sec=7.99
100
+ 2026-03-29 11:32:33,283 [INFO] new_opacus_codex.train_steps: epoch=2 step=335 loss=0.7375
101
+ 2026-03-29 11:33:01,429 [INFO] new_opacus_codex.train_steps: epoch=2 step=340 loss=0.7470
102
+ 2026-03-29 11:33:09,478 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=2 step=340 eval_loss=0.8420 duration_sec=7.97
103
+ 2026-03-29 11:33:36,717 [INFO] new_opacus_codex.train_steps: epoch=2 step=345 loss=0.7300
104
+ 2026-03-29 11:34:04,989 [INFO] new_opacus_codex.train_steps: epoch=2 step=350 loss=0.7311
105
+ 2026-03-29 11:34:13,119 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=2 step=350 eval_loss=0.8393 duration_sec=7.96
106
+ 2026-03-29 11:34:41,407 [INFO] new_opacus_codex.train_steps: epoch=2 step=355 loss=0.7333
107
+ 2026-03-29 11:35:08,689 [INFO] new_opacus_codex.train_steps: epoch=2 step=360 loss=0.7374
108
+ 2026-03-29 11:35:16,919 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=2 step=360 eval_loss=0.8364 duration_sec=8.09
109
+ 2026-03-29 11:35:44,446 [INFO] new_opacus_codex.train_steps: epoch=2 step=365 loss=0.7114
110
+ 2026-03-29 11:36:36,009 [INFO] new_opacus_codex.train_steps: epoch=3 step=370 loss=0.6415
111
+ 2026-03-29 11:36:44,047 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=3 step=370 eval_loss=0.8508 duration_sec=7.99
112
+ 2026-03-29 11:37:11,916 [INFO] new_opacus_codex.train_steps: epoch=3 step=375 loss=0.6307
113
+ 2026-03-29 11:37:40,092 [INFO] new_opacus_codex.train_steps: epoch=3 step=380 loss=0.6135
114
+ 2026-03-29 11:37:48,232 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=3 step=380 eval_loss=0.8463 duration_sec=8.09
115
+ 2026-03-29 11:38:19,056 [INFO] new_opacus_codex.train_steps: epoch=3 step=385 loss=0.5879
116
+ 2026-03-29 11:38:48,654 [INFO] new_opacus_codex.train_steps: epoch=3 step=390 loss=0.5891
117
+ 2026-03-29 11:38:56,675 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=3 step=390 eval_loss=0.8420 duration_sec=7.97
118
+ 2026-03-29 11:39:25,635 [INFO] new_opacus_codex.train_steps: epoch=3 step=395 loss=0.6083
119
+ 2026-03-29 11:39:52,770 [INFO] new_opacus_codex.train_steps: epoch=3 step=400 loss=0.6454
120
+ 2026-03-29 11:40:00,832 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=3 step=400 eval_loss=0.8426 duration_sec=8.04
121
+ 2026-03-29 11:40:28,137 [INFO] new_opacus_codex.train_steps: epoch=3 step=405 loss=0.6557
122
+ 2026-03-29 11:40:56,507 [INFO] new_opacus_codex.train_steps: epoch=3 step=410 loss=0.6268
123
+ 2026-03-29 11:41:04,585 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=3 step=410 eval_loss=0.8417 duration_sec=7.97
124
+ 2026-03-29 11:41:31,869 [INFO] new_opacus_codex.train_steps: epoch=3 step=415 loss=0.6198
125
+ 2026-03-29 11:42:00,177 [INFO] new_opacus_codex.train_steps: epoch=3 step=420 loss=0.6380
126
+ 2026-03-29 11:42:08,261 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=3 step=420 eval_loss=0.8407 duration_sec=7.94
127
+ 2026-03-29 11:42:36,035 [INFO] new_opacus_codex.train_steps: epoch=3 step=425 loss=0.6320
128
+ 2026-03-29 11:43:04,636 [INFO] new_opacus_codex.train_steps: epoch=3 step=430 loss=0.6030
129
+ 2026-03-29 11:43:12,680 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=3 step=430 eval_loss=0.8384 duration_sec=8.02
130
+ 2026-03-29 11:43:40,144 [INFO] new_opacus_codex.train_steps: epoch=3 step=435 loss=0.6325
131
+ 2026-03-29 11:44:08,002 [INFO] new_opacus_codex.train_steps: epoch=3 step=440 loss=0.6348
132
+ 2026-03-29 11:44:16,101 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=3 step=440 eval_loss=0.8376 duration_sec=7.98
133
+ 2026-03-29 11:44:44,019 [INFO] new_opacus_codex.train_steps: epoch=3 step=445 loss=0.5865
134
+ 2026-03-29 11:45:11,974 [INFO] new_opacus_codex.train_steps: epoch=3 step=450 loss=0.5832
135
+ 2026-03-29 11:45:20,070 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=3 step=450 eval_loss=0.8359 duration_sec=8.03
136
+ 2026-03-29 11:45:48,636 [INFO] new_opacus_codex.train_steps: epoch=3 step=455 loss=0.6247
137
+ 2026-03-29 11:46:16,309 [INFO] new_opacus_codex.train_steps: epoch=3 step=460 loss=0.6331
138
+ 2026-03-29 11:46:24,323 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=3 step=460 eval_loss=0.8335 duration_sec=7.98
139
+ 2026-03-29 11:46:51,551 [INFO] new_opacus_codex.train_steps: epoch=3 step=465 loss=0.6376
140
+ 2026-03-29 11:47:18,972 [INFO] new_opacus_codex.train_steps: epoch=3 step=470 loss=0.6431
141
+ 2026-03-29 11:47:27,011 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=3 step=470 eval_loss=0.8328 duration_sec=7.98
142
+ 2026-03-29 11:47:54,824 [INFO] new_opacus_codex.train_steps: epoch=3 step=475 loss=0.6203
143
+ 2026-03-29 11:48:22,806 [INFO] new_opacus_codex.train_steps: epoch=3 step=480 loss=0.6073
144
+ 2026-03-29 11:48:30,880 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=3 step=480 eval_loss=0.8290 duration_sec=7.96
145
+ 2026-03-29 11:48:59,009 [INFO] new_opacus_codex.train_steps: epoch=3 step=485 loss=0.6052
146
+ 2026-03-29 11:49:26,412 [INFO] new_opacus_codex.train_steps: epoch=3 step=490 loss=0.6010
147
+ 2026-03-29 11:49:34,457 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=3 step=490 eval_loss=0.8296 duration_sec=7.98
148
+ 2026-03-29 11:50:01,478 [INFO] new_opacus_codex.train_steps: epoch=3 step=495 loss=0.6041
149
+ 2026-03-29 11:50:28,959 [INFO] new_opacus_codex.train_steps: epoch=3 step=500 loss=0.6202
150
+ 2026-03-29 11:50:37,003 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=3 step=500 eval_loss=0.8276 duration_sec=7.98
151
+ 2026-03-29 11:51:05,108 [INFO] new_opacus_codex.train_steps: epoch=3 step=505 loss=0.6175
152
+ 2026-03-29 11:51:32,766 [INFO] new_opacus_codex.train_steps: epoch=3 step=510 loss=0.6176
153
+ 2026-03-29 11:51:40,823 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=3 step=510 eval_loss=0.8264 duration_sec=8.02
154
+ 2026-03-29 11:52:09,079 [INFO] new_opacus_codex.train_steps: epoch=3 step=515 loss=0.6196
155
+ 2026-03-29 11:52:36,133 [INFO] new_opacus_codex.train_steps: epoch=3 step=520 loss=0.6241
156
+ 2026-03-29 11:52:44,170 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=3 step=520 eval_loss=0.8254 duration_sec=8.01
157
+ 2026-03-29 11:53:11,451 [INFO] new_opacus_codex.train_steps: epoch=3 step=525 loss=0.6102
158
+ 2026-03-29 11:53:39,969 [INFO] new_opacus_codex.train_steps: epoch=3 step=530 loss=0.5918
159
+ 2026-03-29 11:53:48,082 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=3 step=530 eval_loss=0.8241 duration_sec=8.01
160
+ 2026-03-29 11:54:16,550 [INFO] new_opacus_codex.train_steps: epoch=3 step=535 loss=0.5962
161
+ 2026-03-29 11:54:44,719 [INFO] new_opacus_codex.train_steps: epoch=3 step=540 loss=0.5836
162
+ 2026-03-29 11:54:52,770 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=3 step=540 eval_loss=0.8230 duration_sec=8.03
163
+ 2026-03-29 11:55:20,814 [INFO] new_opacus_codex.train_steps: epoch=3 step=545 loss=0.5880
164
+ 2026-03-29 11:56:11,759 [INFO] new_opacus_codex.train_steps: epoch=4 step=550 loss=0.5181
165
+ 2026-03-29 11:56:19,868 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=4 step=550 eval_loss=0.8233 duration_sec=7.97
166
+ 2026-03-29 11:56:48,418 [INFO] new_opacus_codex.train_steps: epoch=4 step=555 loss=0.5253
167
+ 2026-03-29 11:57:16,348 [INFO] new_opacus_codex.train_steps: epoch=4 step=560 loss=0.5435
168
+ 2026-03-29 11:57:24,434 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=4 step=560 eval_loss=0.8331 duration_sec=8.08
169
+ 2026-03-29 11:57:52,529 [INFO] new_opacus_codex.train_steps: epoch=4 step=565 loss=0.5490
170
+ 2026-03-29 11:58:20,097 [INFO] new_opacus_codex.train_steps: epoch=4 step=570 loss=0.5288
171
+ 2026-03-29 11:58:28,411 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=4 step=570 eval_loss=0.8334 duration_sec=8.28
172
+ 2026-03-29 11:58:56,825 [INFO] new_opacus_codex.train_steps: epoch=4 step=575 loss=0.5030
173
+ 2026-03-29 11:59:24,101 [INFO] new_opacus_codex.train_steps: epoch=4 step=580 loss=0.5100
174
+ 2026-03-29 11:59:32,132 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=4 step=580 eval_loss=0.8332 duration_sec=8.00
175
+ 2026-03-29 12:00:00,170 [INFO] new_opacus_codex.train_steps: epoch=4 step=585 loss=0.5275
176
+ 2026-03-29 12:00:28,105 [INFO] new_opacus_codex.train_steps: epoch=4 step=590 loss=0.5425
177
+ 2026-03-29 12:00:36,262 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=4 step=590 eval_loss=0.8327 duration_sec=8.08
178
+ 2026-03-29 12:01:04,567 [INFO] new_opacus_codex.train_steps: epoch=4 step=595 loss=0.5759
179
+ 2026-03-29 12:01:33,099 [INFO] new_opacus_codex.train_steps: epoch=4 step=600 loss=0.5458
180
+ 2026-03-29 12:01:41,372 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=4 step=600 eval_loss=0.8327 duration_sec=7.99
181
+ 2026-03-29 12:02:08,867 [INFO] new_opacus_codex.train_steps: epoch=4 step=605 loss=0.5254
182
+ 2026-03-29 12:02:37,705 [INFO] new_opacus_codex.train_steps: epoch=4 step=610 loss=0.5260
183
+ 2026-03-29 12:02:45,758 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=4 step=610 eval_loss=0.8327 duration_sec=8.00
184
+ 2026-03-29 12:03:13,008 [INFO] new_opacus_codex.train_steps: epoch=4 step=615 loss=0.5115
185
+ 2026-03-29 12:03:41,727 [INFO] new_opacus_codex.train_steps: epoch=4 step=620 loss=0.5201
186
+ 2026-03-29 12:03:49,938 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=4 step=620 eval_loss=0.8314 duration_sec=8.12
187
+ 2026-03-29 12:04:17,555 [INFO] new_opacus_codex.train_steps: epoch=4 step=625 loss=0.5066
188
+ 2026-03-29 12:04:46,349 [INFO] new_opacus_codex.train_steps: epoch=4 step=630 loss=0.5403
189
+ 2026-03-29 12:04:54,472 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=4 step=630 eval_loss=0.8315 duration_sec=8.08
190
+ 2026-03-29 12:05:23,328 [INFO] new_opacus_codex.train_steps: epoch=4 step=635 loss=0.5700
191
+ 2026-03-29 12:05:52,557 [INFO] new_opacus_codex.train_steps: epoch=4 step=640 loss=0.5229
192
+ 2026-03-29 12:06:00,662 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=4 step=640 eval_loss=0.8303 duration_sec=8.05
193
+ 2026-03-29 12:06:28,152 [INFO] new_opacus_codex.train_steps: epoch=4 step=645 loss=0.5077
194
+ 2026-03-29 12:06:55,830 [INFO] new_opacus_codex.train_steps: epoch=4 step=650 loss=0.5214
195
+ 2026-03-29 12:07:04,006 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=4 step=650 eval_loss=0.8293 duration_sec=8.00
196
+ 2026-03-29 12:07:32,182 [INFO] new_opacus_codex.train_steps: epoch=4 step=655 loss=0.5220
197
+ 2026-03-29 12:08:00,070 [INFO] new_opacus_codex.train_steps: epoch=4 step=660 loss=0.5356
198
+ 2026-03-29 12:08:08,300 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=4 step=660 eval_loss=0.8283 duration_sec=7.95
199
+ 2026-03-29 12:08:35,588 [INFO] new_opacus_codex.train_steps: epoch=4 step=665 loss=0.5448
200
+ 2026-03-29 12:09:03,517 [INFO] new_opacus_codex.train_steps: epoch=4 step=670 loss=0.5327
201
+ 2026-03-29 12:09:11,678 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=4 step=670 eval_loss=0.8280 duration_sec=7.97
202
+ 2026-03-29 12:09:39,526 [INFO] new_opacus_codex.train_steps: epoch=4 step=675 loss=0.5056
203
+ 2026-03-29 12:10:07,142 [INFO] new_opacus_codex.train_steps: epoch=4 step=680 loss=0.4986
204
+ 2026-03-29 12:10:15,188 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=4 step=680 eval_loss=0.8272 duration_sec=8.03
205
+ 2026-03-29 12:10:43,200 [INFO] new_opacus_codex.train_steps: epoch=4 step=685 loss=0.5073
206
+ 2026-03-29 12:11:11,705 [INFO] new_opacus_codex.train_steps: epoch=4 step=690 loss=0.5052
207
+ 2026-03-29 12:11:19,795 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=4 step=690 eval_loss=0.8257 duration_sec=8.04
208
+ 2026-03-29 12:11:47,752 [INFO] new_opacus_codex.train_steps: epoch=4 step=695 loss=0.5220
209
+ 2026-03-29 12:12:15,725 [INFO] new_opacus_codex.train_steps: epoch=4 step=700 loss=0.5319
210
+ 2026-03-29 12:12:23,980 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=4 step=700 eval_loss=0.8253 duration_sec=8.00
211
+ 2026-03-29 12:12:51,979 [INFO] new_opacus_codex.train_steps: epoch=4 step=705 loss=0.5348
212
+ 2026-03-29 12:13:19,858 [INFO] new_opacus_codex.train_steps: epoch=4 step=710 loss=0.5454
213
+ 2026-03-29 12:13:27,983 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=4 step=710 eval_loss=0.8257 duration_sec=8.08
214
+ 2026-03-29 12:13:56,281 [INFO] new_opacus_codex.train_steps: epoch=4 step=715 loss=0.5320
215
+ 2026-03-29 12:14:24,262 [INFO] new_opacus_codex.train_steps: epoch=4 step=720 loss=0.5195
216
+ 2026-03-29 12:14:32,311 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=4 step=720 eval_loss=0.8254 duration_sec=8.03
217
+ 2026-03-29 12:14:59,454 [INFO] new_opacus_codex.train_steps: epoch=4 step=725 loss=0.5339
218
+ 2026-03-29 12:15:26,513 [INFO] new_opacus_codex.train_steps: epoch=4 step=730 loss=0.5347
219
+ 2026-03-29 12:15:34,698 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=4 step=730 eval_loss=0.8238 duration_sec=7.97
220
+ 2026-03-29 12:16:25,799 [INFO] new_opacus_codex.train_steps: epoch=5 step=735 loss=0.4346
221
+ 2026-03-29 12:16:54,313 [INFO] new_opacus_codex.train_steps: epoch=5 step=740 loss=0.4553
222
+ 2026-03-29 12:17:02,403 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=5 step=740 eval_loss=0.8344 duration_sec=8.04
223
+ 2026-03-29 12:17:31,042 [INFO] new_opacus_codex.train_steps: epoch=5 step=745 loss=0.4657
224
+ 2026-03-29 12:17:58,588 [INFO] new_opacus_codex.train_steps: epoch=5 step=750 loss=0.4555
225
+ 2026-03-29 12:18:06,706 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=5 step=750 eval_loss=0.8365 duration_sec=8.08
226
+ 2026-03-29 12:18:34,412 [INFO] new_opacus_codex.train_steps: epoch=5 step=755 loss=0.4594
227
+ 2026-03-29 12:19:02,785 [INFO] new_opacus_codex.train_steps: epoch=5 step=760 loss=0.4741
228
+ 2026-03-29 12:19:10,881 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=5 step=760 eval_loss=0.8355 duration_sec=8.08
229
+ 2026-03-29 12:19:38,704 [INFO] new_opacus_codex.train_steps: epoch=5 step=765 loss=0.4579
230
+ 2026-03-29 12:20:07,623 [INFO] new_opacus_codex.train_steps: epoch=5 step=770 loss=0.4606
231
+ 2026-03-29 12:20:15,686 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=5 step=770 eval_loss=0.8358 duration_sec=8.05
232
+ 2026-03-29 12:20:43,326 [INFO] new_opacus_codex.train_steps: epoch=5 step=775 loss=0.4614
233
+ 2026-03-29 12:21:11,409 [INFO] new_opacus_codex.train_steps: epoch=5 step=780 loss=0.4581
234
+ 2026-03-29 12:21:19,443 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=5 step=780 eval_loss=0.8354 duration_sec=8.01
235
+ 2026-03-29 12:21:47,924 [INFO] new_opacus_codex.train_steps: epoch=5 step=785 loss=0.4793
236
+ 2026-03-29 12:22:15,623 [INFO] new_opacus_codex.train_steps: epoch=5 step=790 loss=0.4761
237
+ 2026-03-29 12:22:23,735 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=5 step=790 eval_loss=0.8360 duration_sec=7.98
238
+ 2026-03-29 12:22:51,189 [INFO] new_opacus_codex.train_steps: epoch=5 step=795 loss=0.4736
239
+ 2026-03-29 12:23:19,018 [INFO] new_opacus_codex.train_steps: epoch=5 step=800 loss=0.4910
240
+ 2026-03-29 12:23:27,052 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=5 step=800 eval_loss=0.8356 duration_sec=8.01
241
+ 2026-03-29 12:23:55,327 [INFO] new_opacus_codex.train_steps: epoch=5 step=805 loss=0.4790
242
+ 2026-03-29 12:24:23,426 [INFO] new_opacus_codex.train_steps: epoch=5 step=810 loss=0.4390
243
+ 2026-03-29 12:24:31,473 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=5 step=810 eval_loss=0.8356 duration_sec=8.02
244
+ 2026-03-29 12:24:58,606 [INFO] new_opacus_codex.train_steps: epoch=5 step=815 loss=0.4486
245
+ 2026-03-29 12:25:25,936 [INFO] new_opacus_codex.train_steps: epoch=5 step=820 loss=0.4748
246
+ 2026-03-29 12:25:34,175 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=5 step=820 eval_loss=0.8346 duration_sec=8.06
247
+ 2026-03-29 12:26:00,931 [INFO] new_opacus_codex.train_steps: epoch=5 step=825 loss=0.4827
248
+ 2026-03-29 12:26:28,277 [INFO] new_opacus_codex.train_steps: epoch=5 step=830 loss=0.4756
249
+ 2026-03-29 12:26:36,594 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=5 step=830 eval_loss=0.8348 duration_sec=8.09
250
+ 2026-03-29 12:27:05,080 [INFO] new_opacus_codex.train_steps: epoch=5 step=835 loss=0.4729
251
+ 2026-03-29 12:27:32,694 [INFO] new_opacus_codex.train_steps: epoch=5 step=840 loss=0.4883
252
+ 2026-03-29 12:27:40,802 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=5 step=840 eval_loss=0.8356 duration_sec=8.04
253
+ 2026-03-29 12:28:08,730 [INFO] new_opacus_codex.train_steps: epoch=5 step=845 loss=0.4890
254
+ 2026-03-29 12:28:36,203 [INFO] new_opacus_codex.train_steps: epoch=5 step=850 loss=0.4740
255
+ 2026-03-29 12:28:44,279 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=5 step=850 eval_loss=0.8353 duration_sec=8.05
256
+ 2026-03-29 12:29:11,577 [INFO] new_opacus_codex.train_steps: epoch=5 step=855 loss=0.4722
257
+ 2026-03-29 12:29:38,730 [INFO] new_opacus_codex.train_steps: epoch=5 step=860 loss=0.4944
258
+ 2026-03-29 12:29:46,805 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=5 step=860 eval_loss=0.8338 duration_sec=7.99
259
+ 2026-03-29 12:30:14,822 [INFO] new_opacus_codex.train_steps: epoch=5 step=865 loss=0.4844
260
+ 2026-03-29 12:30:42,686 [INFO] new_opacus_codex.train_steps: epoch=5 step=870 loss=0.4533
261
+ 2026-03-29 12:30:50,877 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=5 step=870 eval_loss=0.8352 duration_sec=8.09
262
+ 2026-03-29 12:31:19,245 [INFO] new_opacus_codex.train_steps: epoch=5 step=875 loss=0.4673
263
+ 2026-03-29 12:31:46,814 [INFO] new_opacus_codex.train_steps: epoch=5 step=880 loss=0.4917
264
+ 2026-03-29 12:31:54,902 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=5 step=880 eval_loss=0.8351 duration_sec=8.01
265
+ 2026-03-29 12:32:23,210 [INFO] new_opacus_codex.train_steps: epoch=5 step=885 loss=0.4611
266
+ 2026-03-29 12:32:51,487 [INFO] new_opacus_codex.train_steps: epoch=5 step=890 loss=0.4500
267
+ 2026-03-29 12:32:59,582 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=5 step=890 eval_loss=0.8356 duration_sec=8.02
268
+ 2026-03-29 12:33:27,035 [INFO] new_opacus_codex.train_steps: epoch=5 step=895 loss=0.4707
269
+ 2026-03-29 12:33:55,266 [INFO] new_opacus_codex.train_steps: epoch=5 step=900 loss=0.4744
270
+ 2026-03-29 12:34:03,346 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=5 step=900 eval_loss=0.8349 duration_sec=8.05
271
+ 2026-03-29 12:34:31,343 [INFO] new_opacus_codex.train_steps: epoch=5 step=905 loss=0.4692
272
+ 2026-03-29 12:34:58,620 [INFO] new_opacus_codex.train_steps: epoch=5 step=910 loss=0.4724
273
+ 2026-03-29 12:35:06,706 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=5 step=910 eval_loss=0.8341 duration_sec=8.02
274
+ 2026-03-29 12:35:34,016 [INFO] new_opacus_codex.train_steps: epoch=5 step=915 loss=0.4778
llama3.2-3b/dp3/adapter/README.md ADDED
@@ -0,0 +1,207 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ base_model: meta-llama/Llama-3.2-3B
3
+ library_name: peft
4
+ pipeline_tag: text-generation
5
+ tags:
6
+ - base_model:adapter:meta-llama/Llama-3.2-3B
7
+ - lora
8
+ - transformers
9
+ ---
10
+
11
+ # Model Card for Model ID
12
+
13
+ <!-- Provide a quick summary of what the model is/does. -->
14
+
15
+
16
+
17
+ ## Model Details
18
+
19
+ ### Model Description
20
+
21
+ <!-- Provide a longer summary of what this model is. -->
22
+
23
+
24
+
25
+ - **Developed by:** [More Information Needed]
26
+ - **Funded by [optional]:** [More Information Needed]
27
+ - **Shared by [optional]:** [More Information Needed]
28
+ - **Model type:** [More Information Needed]
29
+ - **Language(s) (NLP):** [More Information Needed]
30
+ - **License:** [More Information Needed]
31
+ - **Finetuned from model [optional]:** [More Information Needed]
32
+
33
+ ### Model Sources [optional]
34
+
35
+ <!-- Provide the basic links for the model. -->
36
+
37
+ - **Repository:** [More Information Needed]
38
+ - **Paper [optional]:** [More Information Needed]
39
+ - **Demo [optional]:** [More Information Needed]
40
+
41
+ ## Uses
42
+
43
+ <!-- Address questions around how the model is intended to be used, including the foreseeable users of the model and those affected by the model. -->
44
+
45
+ ### Direct Use
46
+
47
+ <!-- This section is for the model use without fine-tuning or plugging into a larger ecosystem/app. -->
48
+
49
+ [More Information Needed]
50
+
51
+ ### Downstream Use [optional]
52
+
53
+ <!-- This section is for the model use when fine-tuned for a task, or when plugged into a larger ecosystem/app -->
54
+
55
+ [More Information Needed]
56
+
57
+ ### Out-of-Scope Use
58
+
59
+ <!-- This section addresses misuse, malicious use, and uses that the model will not work well for. -->
60
+
61
+ [More Information Needed]
62
+
63
+ ## Bias, Risks, and Limitations
64
+
65
+ <!-- This section is meant to convey both technical and sociotechnical limitations. -->
66
+
67
+ [More Information Needed]
68
+
69
+ ### Recommendations
70
+
71
+ <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
72
+
73
+ Users (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations.
74
+
75
+ ## How to Get Started with the Model
76
+
77
+ Use the code below to get started with the model.
78
+
79
+ [More Information Needed]
80
+
81
+ ## Training Details
82
+
83
+ ### Training Data
84
+
85
+ <!-- This should link to a Dataset Card, perhaps with a short stub of information on what the training data is all about as well as documentation related to data pre-processing or additional filtering. -->
86
+
87
+ [More Information Needed]
88
+
89
+ ### Training Procedure
90
+
91
+ <!-- This relates heavily to the Technical Specifications. Content here should link to that section when it is relevant to the training procedure. -->
92
+
93
+ #### Preprocessing [optional]
94
+
95
+ [More Information Needed]
96
+
97
+
98
+ #### Training Hyperparameters
99
+
100
+ - **Training regime:** [More Information Needed] <!--fp32, fp16 mixed precision, bf16 mixed precision, bf16 non-mixed precision, fp16 non-mixed precision, fp8 mixed precision -->
101
+
102
+ #### Speeds, Sizes, Times [optional]
103
+
104
+ <!-- This section provides information about throughput, start/end time, checkpoint size if relevant, etc. -->
105
+
106
+ [More Information Needed]
107
+
108
+ ## Evaluation
109
+
110
+ <!-- This section describes the evaluation protocols and provides the results. -->
111
+
112
+ ### Testing Data, Factors & Metrics
113
+
114
+ #### Testing Data
115
+
116
+ <!-- This should link to a Dataset Card if possible. -->
117
+
118
+ [More Information Needed]
119
+
120
+ #### Factors
121
+
122
+ <!-- These are the things the evaluation is disaggregating by, e.g., subpopulations or domains. -->
123
+
124
+ [More Information Needed]
125
+
126
+ #### Metrics
127
+
128
+ <!-- These are the evaluation metrics being used, ideally with a description of why. -->
129
+
130
+ [More Information Needed]
131
+
132
+ ### Results
133
+
134
+ [More Information Needed]
135
+
136
+ #### Summary
137
+
138
+
139
+
140
+ ## Model Examination [optional]
141
+
142
+ <!-- Relevant interpretability work for the model goes here -->
143
+
144
+ [More Information Needed]
145
+
146
+ ## Environmental Impact
147
+
148
+ <!-- Total emissions (in grams of CO2eq) and additional considerations, such as electricity usage, go here. Edit the suggested text below accordingly -->
149
+
150
+ Carbon emissions can be estimated using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700).
151
+
152
+ - **Hardware Type:** [More Information Needed]
153
+ - **Hours used:** [More Information Needed]
154
+ - **Cloud Provider:** [More Information Needed]
155
+ - **Compute Region:** [More Information Needed]
156
+ - **Carbon Emitted:** [More Information Needed]
157
+
158
+ ## Technical Specifications [optional]
159
+
160
+ ### Model Architecture and Objective
161
+
162
+ [More Information Needed]
163
+
164
+ ### Compute Infrastructure
165
+
166
+ [More Information Needed]
167
+
168
+ #### Hardware
169
+
170
+ [More Information Needed]
171
+
172
+ #### Software
173
+
174
+ [More Information Needed]
175
+
176
+ ## Citation [optional]
177
+
178
+ <!-- If there is a paper or blog post introducing the model, the APA and Bibtex information for that should go in this section. -->
179
+
180
+ **BibTeX:**
181
+
182
+ [More Information Needed]
183
+
184
+ **APA:**
185
+
186
+ [More Information Needed]
187
+
188
+ ## Glossary [optional]
189
+
190
+ <!-- If relevant, include terms and calculations in this section that can help readers understand the model or model card. -->
191
+
192
+ [More Information Needed]
193
+
194
+ ## More Information [optional]
195
+
196
+ [More Information Needed]
197
+
198
+ ## Model Card Authors [optional]
199
+
200
+ [More Information Needed]
201
+
202
+ ## Model Card Contact
203
+
204
+ [More Information Needed]
205
+ ### Framework versions
206
+
207
+ - PEFT 0.18.1
llama3.2-3b/dp3/adapter/adapter_config.json ADDED
@@ -0,0 +1,46 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "alora_invocation_tokens": null,
3
+ "alpha_pattern": {},
4
+ "arrow_config": null,
5
+ "auto_mapping": null,
6
+ "base_model_name_or_path": "meta-llama/Llama-3.2-3B",
7
+ "bias": "none",
8
+ "corda_config": null,
9
+ "ensure_weight_tying": true,
10
+ "eva_config": null,
11
+ "exclude_modules": null,
12
+ "fan_in_fan_out": false,
13
+ "inference_mode": true,
14
+ "init_lora_weights": true,
15
+ "layer_replication": null,
16
+ "layers_pattern": null,
17
+ "layers_to_transform": null,
18
+ "loftq_config": {},
19
+ "lora_alpha": 32,
20
+ "lora_bias": false,
21
+ "lora_dropout": 0.05,
22
+ "megatron_config": null,
23
+ "megatron_core": "megatron.core",
24
+ "modules_to_save": [
25
+ "lm_head",
26
+ "embed_tokens"
27
+ ],
28
+ "peft_type": "LORA",
29
+ "peft_version": "0.18.1",
30
+ "qalora_group_size": 16,
31
+ "r": 16,
32
+ "rank_pattern": {},
33
+ "revision": null,
34
+ "target_modules": [
35
+ "v_proj",
36
+ "k_proj",
37
+ "o_proj",
38
+ "q_proj"
39
+ ],
40
+ "target_parameters": null,
41
+ "task_type": "CAUSAL_LM",
42
+ "trainable_token_indices": null,
43
+ "use_dora": false,
44
+ "use_qalora": false,
45
+ "use_rslora": false
46
+ }
llama3.2-3b/dp3/adapter/adapter_model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:da217e3f13e0ea9d2cd26d83216741f05817ab085970ab9c18cabf6f6756dc4c
3
+ size 4783192176
llama3.2-3b/dp3/audit_results.json ADDED
@@ -0,0 +1,137 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "delta": 1e-05,
3
+ "num_canaries": 500,
4
+ "num_members": 250,
5
+ "paper_guess_fraction": 0.2,
6
+ "paper_guess_steps": 20,
7
+ "loss": {
8
+ "auc": 0.5616,
9
+ "empirical_epsilon": {
10
+ "0.05": 0.0,
11
+ "0.01": 0.0
12
+ },
13
+ "empirical_epsilon_details": {
14
+ "0.05": {
15
+ "epsilon": 0.0,
16
+ "num_guesses": 0,
17
+ "correct_guesses": 0,
18
+ "candidate_num_guesses": [
19
+ 5,
20
+ 10,
21
+ 15,
22
+ 20,
23
+ 25,
24
+ 30,
25
+ 35,
26
+ 40,
27
+ 45,
28
+ 50,
29
+ 55,
30
+ 60,
31
+ 65,
32
+ 70,
33
+ 75,
34
+ 80,
35
+ 85,
36
+ 90,
37
+ 95,
38
+ 100
39
+ ],
40
+ "direction": "lower"
41
+ },
42
+ "0.01": {
43
+ "epsilon": 0.0,
44
+ "num_guesses": 0,
45
+ "correct_guesses": 0,
46
+ "candidate_num_guesses": [
47
+ 5,
48
+ 10,
49
+ 15,
50
+ 20,
51
+ 25,
52
+ 30,
53
+ 35,
54
+ 40,
55
+ 45,
56
+ 50,
57
+ 55,
58
+ 60,
59
+ 65,
60
+ 70,
61
+ 75,
62
+ 80,
63
+ 85,
64
+ 90,
65
+ 95,
66
+ 100
67
+ ],
68
+ "direction": "lower"
69
+ }
70
+ }
71
+ },
72
+ "embedding": {
73
+ "auc": 0.508464,
74
+ "empirical_epsilon": {
75
+ "0.05": 0.0,
76
+ "0.01": 0.0
77
+ },
78
+ "empirical_epsilon_details": {
79
+ "0.05": {
80
+ "epsilon": 0.0,
81
+ "num_guesses": 0,
82
+ "correct_guesses": 0,
83
+ "candidate_num_guesses": [
84
+ 5,
85
+ 10,
86
+ 15,
87
+ 20,
88
+ 25,
89
+ 30,
90
+ 35,
91
+ 40,
92
+ 45,
93
+ 50,
94
+ 55,
95
+ 60,
96
+ 65,
97
+ 70,
98
+ 75,
99
+ 80,
100
+ 85,
101
+ 90,
102
+ 95,
103
+ 100
104
+ ],
105
+ "direction": "lower"
106
+ },
107
+ "0.01": {
108
+ "epsilon": 0.0,
109
+ "num_guesses": 0,
110
+ "correct_guesses": 0,
111
+ "candidate_num_guesses": [
112
+ 5,
113
+ 10,
114
+ 15,
115
+ 20,
116
+ 25,
117
+ 30,
118
+ 35,
119
+ 40,
120
+ 45,
121
+ 50,
122
+ 55,
123
+ 60,
124
+ 65,
125
+ 70,
126
+ 75,
127
+ 80,
128
+ 85,
129
+ 90,
130
+ 95,
131
+ 100
132
+ ],
133
+ "direction": "lower"
134
+ }
135
+ }
136
+ }
137
+ }
llama3.2-3b/dp3/audit_scores.npz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d9dc397c5ece4c51cf36da48b1b12ad05763e1d0bf365ad562e22a858cc5efa3
3
+ size 12784
llama3.2-3b/dp3/canary_meta.json ADDED
The diff for this file is too large to render. See raw diff
 
llama3.2-3b/dp3/metrics.jsonl ADDED
The diff for this file is too large to render. See raw diff
 
llama3.2-3b/dp3/resolved_config.yaml ADDED
@@ -0,0 +1,102 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ model:
2
+ name: meta-llama/Llama-3.2-3B
3
+ tokenizer_name: meta-llama/Llama-3.2-3B
4
+ max_length: 1024
5
+ dtype: bfloat16
6
+ trust_remote_code: true
7
+ use_fast_tokenizer: true
8
+ cache_dir: null
9
+ local_files_only: false
10
+ low_cpu_mem_usage: true
11
+ tie_word_embeddings: true
12
+ gradient_checkpointing: false
13
+ use_chat_template: false
14
+ dataset:
15
+ name: melihcatal/codedp-cpt
16
+ split: train
17
+ mode: cpt
18
+ text_column: text
19
+ validation_ratio: 0.05
20
+ max_samples: -1
21
+ lora:
22
+ enabled: true
23
+ r: 16
24
+ alpha: 32
25
+ dropout: 0.05
26
+ target_modules:
27
+ - q_proj
28
+ - k_proj
29
+ - v_proj
30
+ - o_proj
31
+ modules_to_save:
32
+ - lm_head
33
+ bias: none
34
+ training:
35
+ seed: 42
36
+ epochs: 5
37
+ warmup_steps: null
38
+ warmup_ratio: 0.05
39
+ mixed_precision: false
40
+ mixed_precision_dtype: bfloat16
41
+ batch_size: 4
42
+ eval_batch_size: 8
43
+ eval_every_steps: 10
44
+ eval_every_epochs: 1
45
+ learning_rate: 0.0005
46
+ optimizer: adamw
47
+ lr_scheduler: cosine
48
+ adam_beta1: 0.9
49
+ adam_beta2: 0.999
50
+ adam_epsilon: 1.0e-08
51
+ sgd_momentum: 0.9
52
+ weight_decay: 0.01
53
+ max_grad_norm: 1.0
54
+ log_every: 5
55
+ gradient_accumulation_steps: 8
56
+ num_workers: 4
57
+ output_dir: /scratch/mcatal/runs/cpt/llama3.2-3b/dp3
58
+ min_lr_ratio: 0.15
59
+ distributed:
60
+ strategy: dpddp
61
+ backend: nccl
62
+ devices: null
63
+ dp:
64
+ module_validator: auto
65
+ target_delta: 1.0e-05
66
+ noise_multiplier: null
67
+ max_grad_norm: 1.0
68
+ grad_sample_mode: hooks
69
+ clipping: per_layer
70
+ secure_mode: false
71
+ enabled: true
72
+ target_epsilon: 3.0
73
+ audit:
74
+ enabled: true
75
+ run_every_epoch: true
76
+ epoch_device: cuda
77
+ q_canary: auto
78
+ num_canaries: 500
79
+ prefix_length: 49
80
+ num_digits: 12
81
+ batch_size: 32
82
+ delta: 1.0e-05
83
+ p_values:
84
+ - 0.05
85
+ - 0.01
86
+ paper_guess_fraction: 0.2
87
+ paper_guess_steps: 20
88
+ enable_holdout_empirical_epsilon: false
89
+ holdout_seed: 42
90
+ tie_seed: 42
91
+ tracking:
92
+ enabled: true
93
+ tensorboard: true
94
+ wandb: false
95
+ wandb_project: codedp-finetune-h200-audit
96
+ wandb_run_name: llama3.2-3b-cpt-dp3
97
+ wandb_mode: online
98
+ codecarbon: true
99
+ codecarbon_output_file: codecarbon.csv
100
+ codecarbon_measure_power_secs: 15
101
+ codecarbon_country_iso_code: null
102
+ codecarbon_project_name: codedp-llama3.2-3b-cpt-dp3
llama3.2-3b/dp3/summary.json ADDED
@@ -0,0 +1,72 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "audit/delta": 1e-05,
3
+ "audit/embedding/auc": 0.508464,
4
+ "audit/embedding/empirical_epsilon/0.01": 0.0,
5
+ "audit/embedding/empirical_epsilon/0.05": 0.0,
6
+ "audit/embedding/empirical_epsilon_details/0.01/correct_guesses": 0.0,
7
+ "audit/embedding/empirical_epsilon_details/0.01/epsilon": 0.0,
8
+ "audit/embedding/empirical_epsilon_details/0.01/num_guesses": 0.0,
9
+ "audit/embedding/empirical_epsilon_details/0.05/correct_guesses": 0.0,
10
+ "audit/embedding/empirical_epsilon_details/0.05/epsilon": 0.0,
11
+ "audit/embedding/empirical_epsilon_details/0.05/num_guesses": 0.0,
12
+ "audit/loss/auc": 0.5616,
13
+ "audit/loss/empirical_epsilon/0.01": 0.0,
14
+ "audit/loss/empirical_epsilon/0.05": 0.0,
15
+ "audit/loss/empirical_epsilon_details/0.01/correct_guesses": 0.0,
16
+ "audit/loss/empirical_epsilon_details/0.01/epsilon": 0.0,
17
+ "audit/loss/empirical_epsilon_details/0.01/num_guesses": 0.0,
18
+ "audit/loss/empirical_epsilon_details/0.05/correct_guesses": 0.0,
19
+ "audit/loss/empirical_epsilon_details/0.05/epsilon": 0.0,
20
+ "audit/loss/empirical_epsilon_details/0.05/num_guesses": 0.0,
21
+ "audit/num_canaries": 500.0,
22
+ "audit/num_members": 250.0,
23
+ "audit/paper_guess_fraction": 0.2,
24
+ "audit/paper_guess_steps": 20.0,
25
+ "energy/codecarbon/cpu_count": 16.0,
26
+ "energy/codecarbon/cpu_energy": 0.1551679753698767,
27
+ "energy/codecarbon/cpu_power": 80.03149480412914,
28
+ "energy/codecarbon/cpu_utilization_percent": 3.7720395653385346,
29
+ "energy/codecarbon/duration": 7251.761993754655,
30
+ "energy/codecarbon/emissions": 0.20754124614078484,
31
+ "energy/codecarbon/emissions_rate": 2.8619423295955245e-05,
32
+ "energy/codecarbon/energy_consumed": 5.956468907407078,
33
+ "energy/codecarbon/gpu_count": 8.0,
34
+ "energy/codecarbon/gpu_energy": 5.7276257109858335,
35
+ "energy/codecarbon/gpu_power": 2853.731528437409,
36
+ "energy/codecarbon/gpu_utilization_percent": 94.3128657007523,
37
+ "energy/codecarbon/latitude": 47.4843,
38
+ "energy/codecarbon/longitude": 8.212,
39
+ "energy/codecarbon/pue": 1.0,
40
+ "energy/codecarbon/ram_energy": 0.07367522105136855,
41
+ "energy/codecarbon/ram_power": 38.0,
42
+ "energy/codecarbon/ram_total_size": 128.0,
43
+ "energy/codecarbon/ram_used_gb": 509.22042753411455,
44
+ "energy/codecarbon/ram_utilization_percent": 25.705725828921707,
45
+ "energy/codecarbon/water_consumed": 0.0,
46
+ "energy/codecarbon/wue": 0.0,
47
+ "eval/duration_sec": 7.999012880027294,
48
+ "eval/loss": 1.1953592909834323,
49
+ "perf/audit_duration_sec": 6.093202186282724,
50
+ "perf/epoch_duration_sec": 1431.2705155275762,
51
+ "perf/epoch_samples": 51412.0,
52
+ "perf/epoch_samples_per_sec": 35.920533150261384,
53
+ "perf/epoch_tokens": 37419500.0,
54
+ "perf/epoch_tokens_per_sec": 26144.25406940414,
55
+ "perf/logical_batch_size": 3.0,
56
+ "perf/logical_token_count": 432.0,
57
+ "perf/physical_batches": 2.0,
58
+ "perf/samples_per_sec": 2.0696493913025646,
59
+ "perf/step_duration_sec": 1.4495208766311407,
60
+ "perf/tokens_per_sec": 298.02951234756927,
61
+ "privacy/epsilon": 2.991647548922312,
62
+ "system/cuda_epoch_peak_memory_gb": 39.07441425323486,
63
+ "system/cuda_max_memory_allocated_gb": 39.07441425323486,
64
+ "system/cuda_memory_allocated_gb": 10.810073375701904,
65
+ "train/epoch_canary_loss": 8.3262361002136,
66
+ "train/epoch_loss": 2.1001656648724922,
67
+ "train/epoch_real_loss": 1.2966650054213065,
68
+ "train/lr": 7.5e-05,
69
+ "train/step_canary_loss": 7.166666666666667,
70
+ "train/step_loss": 5.666530251502991,
71
+ "train/step_real_loss": 1.1661202907562256
72
+ }
llama3.2-3b/dp3/tokenizer/tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a51697eccb3cec5f738016546cd668e72543a96f95900714d89e9c88f41271bf
3
+ size 17304420
llama3.2-3b/dp3/tokenizer/tokenizer_config.json ADDED
@@ -0,0 +1,516 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "backend": "tokenizers",
3
+ "bos_token": "<|begin_of_text|>",
4
+ "clean_up_tokenization_spaces": true,
5
+ "eos_token": "<|end_of_text|>",
6
+ "extra_special_tokens": {
7
+ "extra_special_token_0": "865331112869",
8
+ "extra_special_token_1": "569765693871",
9
+ "extra_special_token_2": "485177821815",
10
+ "extra_special_token_3": "135441121756",
11
+ "extra_special_token_4": "367459894796",
12
+ "extra_special_token_5": "877482678543",
13
+ "extra_special_token_6": "457919547633",
14
+ "extra_special_token_7": "765474393376",
15
+ "extra_special_token_8": "114848338811",
16
+ "extra_special_token_9": "746285987371",
17
+ "extra_special_token_10": "649291669397",
18
+ "extra_special_token_11": "927914615679",
19
+ "extra_special_token_12": "445925149649",
20
+ "extra_special_token_13": "691587454538",
21
+ "extra_special_token_14": "143777992227",
22
+ "extra_special_token_15": "997981281989",
23
+ "extra_special_token_16": "425949483533",
24
+ "extra_special_token_17": "982993456429",
25
+ "extra_special_token_18": "718726519731",
26
+ "extra_special_token_19": "172599315861",
27
+ "extra_special_token_20": "643489267333",
28
+ "extra_special_token_21": "282322838685",
29
+ "extra_special_token_22": "781653545886",
30
+ "extra_special_token_23": "796415361892",
31
+ "extra_special_token_24": "841991688488",
32
+ "extra_special_token_25": "211411365397",
33
+ "extra_special_token_26": "698218415444",
34
+ "extra_special_token_27": "355977139358",
35
+ "extra_special_token_28": "682564697312",
36
+ "extra_special_token_29": "383837596997",
37
+ "extra_special_token_30": "689362171782",
38
+ "extra_special_token_31": "749966767285",
39
+ "extra_special_token_32": "753159165157",
40
+ "extra_special_token_33": "795693824762",
41
+ "extra_special_token_34": "669689115557",
42
+ "extra_special_token_35": "327491773134",
43
+ "extra_special_token_36": "983569279932",
44
+ "extra_special_token_37": "612128769512",
45
+ "extra_special_token_38": "374327157578",
46
+ "extra_special_token_39": "311632789559",
47
+ "extra_special_token_40": "523918658846",
48
+ "extra_special_token_41": "765981581453",
49
+ "extra_special_token_42": "794825141891",
50
+ "extra_special_token_43": "873898736873",
51
+ "extra_special_token_44": "447445629421",
52
+ "extra_special_token_45": "473822473819",
53
+ "extra_special_token_46": "181439694557",
54
+ "extra_special_token_47": "592538279337",
55
+ "extra_special_token_48": "668134915514",
56
+ "extra_special_token_49": "643692393748",
57
+ "extra_special_token_50": "696651276628",
58
+ "extra_special_token_51": "853859348234",
59
+ "extra_special_token_52": "778466723723",
60
+ "extra_special_token_53": "929826356991",
61
+ "extra_special_token_54": "272362973463",
62
+ "extra_special_token_55": "694235616268",
63
+ "extra_special_token_56": "281673864127",
64
+ "extra_special_token_57": "479676316326",
65
+ "extra_special_token_58": "646979124677",
66
+ "extra_special_token_59": "922327493433",
67
+ "extra_special_token_60": "883685933161",
68
+ "extra_special_token_61": "264259917554",
69
+ "extra_special_token_62": "836746273134",
70
+ "extra_special_token_63": "658481324922",
71
+ "extra_special_token_64": "481884157827",
72
+ "extra_special_token_65": "587787496812",
73
+ "extra_special_token_66": "579184949249",
74
+ "extra_special_token_67": "912193598348",
75
+ "extra_special_token_68": "529679678956",
76
+ "extra_special_token_69": "795838284624",
77
+ "extra_special_token_70": "159337222655",
78
+ "extra_special_token_71": "173781362446",
79
+ "extra_special_token_72": "773687856563",
80
+ "extra_special_token_73": "535787224917",
81
+ "extra_special_token_74": "351885857332",
82
+ "extra_special_token_75": "578827344666",
83
+ "extra_special_token_76": "198462689911",
84
+ "extra_special_token_77": "722618266242",
85
+ "extra_special_token_78": "952872416512",
86
+ "extra_special_token_79": "517778845323",
87
+ "extra_special_token_80": "749665846687",
88
+ "extra_special_token_81": "661436365453",
89
+ "extra_special_token_82": "259666844669",
90
+ "extra_special_token_83": "242851284913",
91
+ "extra_special_token_84": "514532995959",
92
+ "extra_special_token_85": "161588262349",
93
+ "extra_special_token_86": "742765629356",
94
+ "extra_special_token_87": "225164373623",
95
+ "extra_special_token_88": "676539973863",
96
+ "extra_special_token_89": "826214551218",
97
+ "extra_special_token_90": "182345464792",
98
+ "extra_special_token_91": "232776999554",
99
+ "extra_special_token_92": "337326533813",
100
+ "extra_special_token_93": "676676697292",
101
+ "extra_special_token_94": "929185622831",
102
+ "extra_special_token_95": "545512344383",
103
+ "extra_special_token_96": "499444466686",
104
+ "extra_special_token_97": "314697386682",
105
+ "extra_special_token_98": "517379856925",
106
+ "extra_special_token_99": "379557332953",
107
+ "extra_special_token_100": "614797267726",
108
+ "extra_special_token_101": "429781429464",
109
+ "extra_special_token_102": "922466849763",
110
+ "extra_special_token_103": "721737645236",
111
+ "extra_special_token_104": "479227349997",
112
+ "extra_special_token_105": "136931728327",
113
+ "extra_special_token_106": "259533577263",
114
+ "extra_special_token_107": "488538864842",
115
+ "extra_special_token_108": "937495658852",
116
+ "extra_special_token_109": "489991411364",
117
+ "extra_special_token_110": "499148455254",
118
+ "extra_special_token_111": "441373944925",
119
+ "extra_special_token_112": "899151413682",
120
+ "extra_special_token_113": "467893531755",
121
+ "extra_special_token_114": "527117488925",
122
+ "extra_special_token_115": "928335588653",
123
+ "extra_special_token_116": "374439448821",
124
+ "extra_special_token_117": "879425227932",
125
+ "extra_special_token_118": "867678158885",
126
+ "extra_special_token_119": "399749397872",
127
+ "extra_special_token_120": "129693547287",
128
+ "extra_special_token_121": "689285841825",
129
+ "extra_special_token_122": "771619544974",
130
+ "extra_special_token_123": "724883568652",
131
+ "extra_special_token_124": "516968424863",
132
+ "extra_special_token_125": "733737988257",
133
+ "extra_special_token_126": "852347289392",
134
+ "extra_special_token_127": "296953381169",
135
+ "extra_special_token_128": "377273562477",
136
+ "extra_special_token_129": "262296912232",
137
+ "extra_special_token_130": "547149832394",
138
+ "extra_special_token_131": "298464134954",
139
+ "extra_special_token_132": "216667245274",
140
+ "extra_special_token_133": "843998562287",
141
+ "extra_special_token_134": "572154333646",
142
+ "extra_special_token_135": "124589118494",
143
+ "extra_special_token_136": "841824384614",
144
+ "extra_special_token_137": "232896526252",
145
+ "extra_special_token_138": "295448593321",
146
+ "extra_special_token_139": "123741461297",
147
+ "extra_special_token_140": "653573457168",
148
+ "extra_special_token_141": "196735786156",
149
+ "extra_special_token_142": "377338713663",
150
+ "extra_special_token_143": "964342468552",
151
+ "extra_special_token_144": "586855179568",
152
+ "extra_special_token_145": "484773717614",
153
+ "extra_special_token_146": "894885246797",
154
+ "extra_special_token_147": "677896358599",
155
+ "extra_special_token_148": "848845611563",
156
+ "extra_special_token_149": "851852651677",
157
+ "extra_special_token_150": "398549545767",
158
+ "extra_special_token_151": "454244839926",
159
+ "extra_special_token_152": "799364566435",
160
+ "extra_special_token_153": "967114116556",
161
+ "extra_special_token_154": "817378986438",
162
+ "extra_special_token_155": "233795848681",
163
+ "extra_special_token_156": "824387273757",
164
+ "extra_special_token_157": "916198946615",
165
+ "extra_special_token_158": "563117729724",
166
+ "extra_special_token_159": "951794811935",
167
+ "extra_special_token_160": "374598961236",
168
+ "extra_special_token_161": "922867396683",
169
+ "extra_special_token_162": "765737843639",
170
+ "extra_special_token_163": "175469284871",
171
+ "extra_special_token_164": "231853711778",
172
+ "extra_special_token_165": "662426712668",
173
+ "extra_special_token_166": "711412347158",
174
+ "extra_special_token_167": "753466987363",
175
+ "extra_special_token_168": "513361312532",
176
+ "extra_special_token_169": "712992815957",
177
+ "extra_special_token_170": "971621888444",
178
+ "extra_special_token_171": "829235161526",
179
+ "extra_special_token_172": "585544633356",
180
+ "extra_special_token_173": "582471228164",
181
+ "extra_special_token_174": "678666359123",
182
+ "extra_special_token_175": "557533689478",
183
+ "extra_special_token_176": "632962475133",
184
+ "extra_special_token_177": "484489193824",
185
+ "extra_special_token_178": "489562189822",
186
+ "extra_special_token_179": "589547936288",
187
+ "extra_special_token_180": "363214487524",
188
+ "extra_special_token_181": "244885399387",
189
+ "extra_special_token_182": "431751228368",
190
+ "extra_special_token_183": "433581868192",
191
+ "extra_special_token_184": "486391569221",
192
+ "extra_special_token_185": "185438575221",
193
+ "extra_special_token_186": "126574388585",
194
+ "extra_special_token_187": "741757479784",
195
+ "extra_special_token_188": "529854679937",
196
+ "extra_special_token_189": "996116119839",
197
+ "extra_special_token_190": "616248973917",
198
+ "extra_special_token_191": "763531783491",
199
+ "extra_special_token_192": "955456118295",
200
+ "extra_special_token_193": "364196983365",
201
+ "extra_special_token_194": "195792996468",
202
+ "extra_special_token_195": "151859598873",
203
+ "extra_special_token_196": "399223169721",
204
+ "extra_special_token_197": "938488813964",
205
+ "extra_special_token_198": "961981959227",
206
+ "extra_special_token_199": "183368827562",
207
+ "extra_special_token_200": "533417736566",
208
+ "extra_special_token_201": "786391632558",
209
+ "extra_special_token_202": "665661658354",
210
+ "extra_special_token_203": "693281533643",
211
+ "extra_special_token_204": "475794684356",
212
+ "extra_special_token_205": "652154162978",
213
+ "extra_special_token_206": "753233719644",
214
+ "extra_special_token_207": "668514843129",
215
+ "extra_special_token_208": "819162623892",
216
+ "extra_special_token_209": "941169431859",
217
+ "extra_special_token_210": "877385381798",
218
+ "extra_special_token_211": "752644929761",
219
+ "extra_special_token_212": "881136466196",
220
+ "extra_special_token_213": "275597777299",
221
+ "extra_special_token_214": "731681792655",
222
+ "extra_special_token_215": "961133895172",
223
+ "extra_special_token_216": "864718285734",
224
+ "extra_special_token_217": "963852916563",
225
+ "extra_special_token_218": "319584985416",
226
+ "extra_special_token_219": "563365646341",
227
+ "extra_special_token_220": "811371928234",
228
+ "extra_special_token_221": "837131396371",
229
+ "extra_special_token_222": "267514771964",
230
+ "extra_special_token_223": "944513428457",
231
+ "extra_special_token_224": "117298239631",
232
+ "extra_special_token_225": "158142752582",
233
+ "extra_special_token_226": "252867443568",
234
+ "extra_special_token_227": "839269684865",
235
+ "extra_special_token_228": "612788593128",
236
+ "extra_special_token_229": "145669731981",
237
+ "extra_special_token_230": "121557291859",
238
+ "extra_special_token_231": "245416776926",
239
+ "extra_special_token_232": "799417897197",
240
+ "extra_special_token_233": "997958836435",
241
+ "extra_special_token_234": "892336777248",
242
+ "extra_special_token_235": "158929292238",
243
+ "extra_special_token_236": "581976444672",
244
+ "extra_special_token_237": "897784492783",
245
+ "extra_special_token_238": "492373714791",
246
+ "extra_special_token_239": "512659818733",
247
+ "extra_special_token_240": "881112998642",
248
+ "extra_special_token_241": "619454958782",
249
+ "extra_special_token_242": "431149748713",
250
+ "extra_special_token_243": "624221476921",
251
+ "extra_special_token_244": "125866399464",
252
+ "extra_special_token_245": "339882449689",
253
+ "extra_special_token_246": "186198784585",
254
+ "extra_special_token_247": "943193294691",
255
+ "extra_special_token_248": "955668961269",
256
+ "extra_special_token_249": "232787996724",
257
+ "extra_special_token_250": "215671314196",
258
+ "extra_special_token_251": "286173241916",
259
+ "extra_special_token_252": "745977673725",
260
+ "extra_special_token_253": "556976448182",
261
+ "extra_special_token_254": "599961512792",
262
+ "extra_special_token_255": "766294538337",
263
+ "extra_special_token_256": "934912591213",
264
+ "extra_special_token_257": "295118729589",
265
+ "extra_special_token_258": "529455466433",
266
+ "extra_special_token_259": "196119929397",
267
+ "extra_special_token_260": "379571934299",
268
+ "extra_special_token_261": "251789649997",
269
+ "extra_special_token_262": "564544131355",
270
+ "extra_special_token_263": "244371196654",
271
+ "extra_special_token_264": "384598329253",
272
+ "extra_special_token_265": "887753195844",
273
+ "extra_special_token_266": "364947325679",
274
+ "extra_special_token_267": "655517954651",
275
+ "extra_special_token_268": "673948786567",
276
+ "extra_special_token_269": "857231548835",
277
+ "extra_special_token_270": "816115936673",
278
+ "extra_special_token_271": "644234165531",
279
+ "extra_special_token_272": "182782912224",
280
+ "extra_special_token_273": "234316622259",
281
+ "extra_special_token_274": "421369185549",
282
+ "extra_special_token_275": "434632855397",
283
+ "extra_special_token_276": "921889371893",
284
+ "extra_special_token_277": "415956914763",
285
+ "extra_special_token_278": "598916996413",
286
+ "extra_special_token_279": "773671349113",
287
+ "extra_special_token_280": "952465217972",
288
+ "extra_special_token_281": "117657531962",
289
+ "extra_special_token_282": "729825168745",
290
+ "extra_special_token_283": "691315125346",
291
+ "extra_special_token_284": "768461952319",
292
+ "extra_special_token_285": "664847713559",
293
+ "extra_special_token_286": "953267689786",
294
+ "extra_special_token_287": "886464195129",
295
+ "extra_special_token_288": "824488329416",
296
+ "extra_special_token_289": "837873762491",
297
+ "extra_special_token_290": "532833541879",
298
+ "extra_special_token_291": "669183782449",
299
+ "extra_special_token_292": "941976537588",
300
+ "extra_special_token_293": "739394546916",
301
+ "extra_special_token_294": "267954879268",
302
+ "extra_special_token_295": "637551427887",
303
+ "extra_special_token_296": "217756494954",
304
+ "extra_special_token_297": "524444658383",
305
+ "extra_special_token_298": "117783274348",
306
+ "extra_special_token_299": "138218735276",
307
+ "extra_special_token_300": "814611949491",
308
+ "extra_special_token_301": "711641973413",
309
+ "extra_special_token_302": "499156317423",
310
+ "extra_special_token_303": "515856611931",
311
+ "extra_special_token_304": "454164859837",
312
+ "extra_special_token_305": "345271433112",
313
+ "extra_special_token_306": "462294118988",
314
+ "extra_special_token_307": "511785788222",
315
+ "extra_special_token_308": "497294727353",
316
+ "extra_special_token_309": "866519986723",
317
+ "extra_special_token_310": "334513529294",
318
+ "extra_special_token_311": "549946382131",
319
+ "extra_special_token_312": "284445431422",
320
+ "extra_special_token_313": "396521188476",
321
+ "extra_special_token_314": "421435255895",
322
+ "extra_special_token_315": "133373659361",
323
+ "extra_special_token_316": "322683334381",
324
+ "extra_special_token_317": "228358422847",
325
+ "extra_special_token_318": "291762694874",
326
+ "extra_special_token_319": "143182978129",
327
+ "extra_special_token_320": "511923256573",
328
+ "extra_special_token_321": "327158398268",
329
+ "extra_special_token_322": "879764613759",
330
+ "extra_special_token_323": "564395222747",
331
+ "extra_special_token_324": "451161679736",
332
+ "extra_special_token_325": "538631466654",
333
+ "extra_special_token_326": "221762325616",
334
+ "extra_special_token_327": "218391991184",
335
+ "extra_special_token_328": "322589379462",
336
+ "extra_special_token_329": "876537814263",
337
+ "extra_special_token_330": "152676556624",
338
+ "extra_special_token_331": "332522971941",
339
+ "extra_special_token_332": "884354318946",
340
+ "extra_special_token_333": "513349618943",
341
+ "extra_special_token_334": "116639746413",
342
+ "extra_special_token_335": "635185846287",
343
+ "extra_special_token_336": "993832498489",
344
+ "extra_special_token_337": "813981174797",
345
+ "extra_special_token_338": "438745114173",
346
+ "extra_special_token_339": "983493951323",
347
+ "extra_special_token_340": "724492262421",
348
+ "extra_special_token_341": "622553389126",
349
+ "extra_special_token_342": "889965243135",
350
+ "extra_special_token_343": "364492359246",
351
+ "extra_special_token_344": "154962668224",
352
+ "extra_special_token_345": "179564995814",
353
+ "extra_special_token_346": "418412875665",
354
+ "extra_special_token_347": "718951851413",
355
+ "extra_special_token_348": "699446724178",
356
+ "extra_special_token_349": "624266421831",
357
+ "extra_special_token_350": "815458725125",
358
+ "extra_special_token_351": "455423278865",
359
+ "extra_special_token_352": "393741199486",
360
+ "extra_special_token_353": "328552864359",
361
+ "extra_special_token_354": "211662639865",
362
+ "extra_special_token_355": "218784516525",
363
+ "extra_special_token_356": "762486672996",
364
+ "extra_special_token_357": "142799718159",
365
+ "extra_special_token_358": "858146415154",
366
+ "extra_special_token_359": "767858144912",
367
+ "extra_special_token_360": "571317457151",
368
+ "extra_special_token_361": "635127952696",
369
+ "extra_special_token_362": "116427191984",
370
+ "extra_special_token_363": "268921994538",
371
+ "extra_special_token_364": "523937669294",
372
+ "extra_special_token_365": "165429152138",
373
+ "extra_special_token_366": "739246183345",
374
+ "extra_special_token_367": "591464355756",
375
+ "extra_special_token_368": "212985874612",
376
+ "extra_special_token_369": "191887635211",
377
+ "extra_special_token_370": "967214577653",
378
+ "extra_special_token_371": "119342152414",
379
+ "extra_special_token_372": "946444632795",
380
+ "extra_special_token_373": "618423867817",
381
+ "extra_special_token_374": "228565148417",
382
+ "extra_special_token_375": "729116422489",
383
+ "extra_special_token_376": "527874729936",
384
+ "extra_special_token_377": "739784153482",
385
+ "extra_special_token_378": "387763951128",
386
+ "extra_special_token_379": "331369926711",
387
+ "extra_special_token_380": "562716493614",
388
+ "extra_special_token_381": "739667844957",
389
+ "extra_special_token_382": "562389434565",
390
+ "extra_special_token_383": "256497188281",
391
+ "extra_special_token_384": "859927364588",
392
+ "extra_special_token_385": "417668946583",
393
+ "extra_special_token_386": "357621613582",
394
+ "extra_special_token_387": "438435178228",
395
+ "extra_special_token_388": "485692541169",
396
+ "extra_special_token_389": "825815739116",
397
+ "extra_special_token_390": "342221452223",
398
+ "extra_special_token_391": "697747991249",
399
+ "extra_special_token_392": "716763689965",
400
+ "extra_special_token_393": "141499982867",
401
+ "extra_special_token_394": "818479319499",
402
+ "extra_special_token_395": "336813343298",
403
+ "extra_special_token_396": "594688742928",
404
+ "extra_special_token_397": "472129283475",
405
+ "extra_special_token_398": "514354144759",
406
+ "extra_special_token_399": "349249721685",
407
+ "extra_special_token_400": "546276298359",
408
+ "extra_special_token_401": "353755529131",
409
+ "extra_special_token_402": "315534574435",
410
+ "extra_special_token_403": "523723475786",
411
+ "extra_special_token_404": "215826764872",
412
+ "extra_special_token_405": "367968398551",
413
+ "extra_special_token_406": "569853653352",
414
+ "extra_special_token_407": "389715484387",
415
+ "extra_special_token_408": "293847485454",
416
+ "extra_special_token_409": "714738141818",
417
+ "extra_special_token_410": "178478368922",
418
+ "extra_special_token_411": "581493616981",
419
+ "extra_special_token_412": "589439538674",
420
+ "extra_special_token_413": "846657726193",
421
+ "extra_special_token_414": "722339992679",
422
+ "extra_special_token_415": "138154781148",
423
+ "extra_special_token_416": "757785319772",
424
+ "extra_special_token_417": "492516914298",
425
+ "extra_special_token_418": "919181521716",
426
+ "extra_special_token_419": "985781138935",
427
+ "extra_special_token_420": "476969195485",
428
+ "extra_special_token_421": "313145133463",
429
+ "extra_special_token_422": "758963111966",
430
+ "extra_special_token_423": "147541537162",
431
+ "extra_special_token_424": "557163366873",
432
+ "extra_special_token_425": "144373897488",
433
+ "extra_special_token_426": "522515164754",
434
+ "extra_special_token_427": "724964923582",
435
+ "extra_special_token_428": "284776712475",
436
+ "extra_special_token_429": "375429755114",
437
+ "extra_special_token_430": "181233596124",
438
+ "extra_special_token_431": "948585673431",
439
+ "extra_special_token_432": "243165586174",
440
+ "extra_special_token_433": "396847976144",
441
+ "extra_special_token_434": "997724962668",
442
+ "extra_special_token_435": "558837194455",
443
+ "extra_special_token_436": "163165456396",
444
+ "extra_special_token_437": "378749551722",
445
+ "extra_special_token_438": "161238482259",
446
+ "extra_special_token_439": "754978243758",
447
+ "extra_special_token_440": "195388849133",
448
+ "extra_special_token_441": "229775525672",
449
+ "extra_special_token_442": "262437452884",
450
+ "extra_special_token_443": "441377892146",
451
+ "extra_special_token_444": "451885565366",
452
+ "extra_special_token_445": "981277526855",
453
+ "extra_special_token_446": "762495822823",
454
+ "extra_special_token_447": "368763327262",
455
+ "extra_special_token_448": "757422791351",
456
+ "extra_special_token_449": "636324136426",
457
+ "extra_special_token_450": "214193645583",
458
+ "extra_special_token_451": "412843856172",
459
+ "extra_special_token_452": "179386156569",
460
+ "extra_special_token_453": "756916173536",
461
+ "extra_special_token_454": "892697125149",
462
+ "extra_special_token_455": "625334487352",
463
+ "extra_special_token_456": "941861857715",
464
+ "extra_special_token_457": "887417525236",
465
+ "extra_special_token_458": "649516938598",
466
+ "extra_special_token_459": "717628619782",
467
+ "extra_special_token_460": "438124184139",
468
+ "extra_special_token_461": "547563892268",
469
+ "extra_special_token_462": "856317483891",
470
+ "extra_special_token_463": "313313831273",
471
+ "extra_special_token_464": "371496153876",
472
+ "extra_special_token_465": "587541149322",
473
+ "extra_special_token_466": "265847332563",
474
+ "extra_special_token_467": "449549215429",
475
+ "extra_special_token_468": "163497196769",
476
+ "extra_special_token_469": "861342291298",
477
+ "extra_special_token_470": "268433315926",
478
+ "extra_special_token_471": "774679513717",
479
+ "extra_special_token_472": "851254219729",
480
+ "extra_special_token_473": "583527834464",
481
+ "extra_special_token_474": "488496781997",
482
+ "extra_special_token_475": "556814553861",
483
+ "extra_special_token_476": "482829231639",
484
+ "extra_special_token_477": "618878266619",
485
+ "extra_special_token_478": "147444452794",
486
+ "extra_special_token_479": "949235426629",
487
+ "extra_special_token_480": "357299947518",
488
+ "extra_special_token_481": "175528632226",
489
+ "extra_special_token_482": "645527857972",
490
+ "extra_special_token_483": "186872457894",
491
+ "extra_special_token_484": "552738847828",
492
+ "extra_special_token_485": "626748382482",
493
+ "extra_special_token_486": "921894985642",
494
+ "extra_special_token_487": "943878645871",
495
+ "extra_special_token_488": "859289776479",
496
+ "extra_special_token_489": "614583493135",
497
+ "extra_special_token_490": "933775286797",
498
+ "extra_special_token_491": "332234613346",
499
+ "extra_special_token_492": "325196781219",
500
+ "extra_special_token_493": "142526557681",
501
+ "extra_special_token_494": "356722692178",
502
+ "extra_special_token_495": "449318681694",
503
+ "extra_special_token_496": "687284547244",
504
+ "extra_special_token_497": "947262995132",
505
+ "extra_special_token_498": "893974619684",
506
+ "extra_special_token_499": "797238311233"
507
+ },
508
+ "is_local": false,
509
+ "model_input_names": [
510
+ "input_ids",
511
+ "attention_mask"
512
+ ],
513
+ "model_max_length": 131072,
514
+ "pad_token": "<|end_of_text|>",
515
+ "tokenizer_class": "PreTrainedTokenizerFast"
516
+ }
llama3.2-3b/dp3/train.log ADDED
@@ -0,0 +1,276 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2026-03-29 12:39:27,864 [INFO] new_opacus_codex.train_steps: epoch=1 step=5 loss=2.1414
2
+ 2026-03-29 12:40:01,880 [INFO] new_opacus_codex.train_steps: epoch=1 step=10 loss=2.0940
3
+ 2026-03-29 12:40:09,843 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=1 step=10 eval_loss=1.0784 duration_sec=7.95
4
+ 2026-03-29 12:40:44,766 [INFO] new_opacus_codex.train_steps: epoch=1 step=15 loss=2.0756
5
+ 2026-03-29 12:41:17,942 [INFO] new_opacus_codex.train_steps: epoch=1 step=20 loss=2.0796
6
+ 2026-03-29 12:41:25,920 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=1 step=20 eval_loss=1.0784 duration_sec=7.79
7
+ 2026-03-29 12:42:00,137 [INFO] new_opacus_codex.train_steps: epoch=1 step=25 loss=2.2617
8
+ 2026-03-29 12:42:34,186 [INFO] new_opacus_codex.train_steps: epoch=1 step=30 loss=2.2423
9
+ 2026-03-29 12:42:42,133 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=1 step=30 eval_loss=1.0789 duration_sec=7.88
10
+ 2026-03-29 12:43:15,416 [INFO] new_opacus_codex.train_steps: epoch=1 step=35 loss=1.9424
11
+ 2026-03-29 12:43:49,138 [INFO] new_opacus_codex.train_steps: epoch=1 step=40 loss=2.1154
12
+ 2026-03-29 12:43:57,095 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=1 step=40 eval_loss=1.0802 duration_sec=7.90
13
+ 2026-03-29 12:44:31,313 [INFO] new_opacus_codex.train_steps: epoch=1 step=45 loss=2.3548
14
+ 2026-03-29 12:45:04,739 [INFO] new_opacus_codex.train_steps: epoch=1 step=50 loss=2.0867
15
+ 2026-03-29 12:45:12,740 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=1 step=50 eval_loss=1.0827 duration_sec=7.84
16
+ 2026-03-29 12:45:46,338 [INFO] new_opacus_codex.train_steps: epoch=1 step=55 loss=1.8560
17
+ 2026-03-29 12:46:19,880 [INFO] new_opacus_codex.train_steps: epoch=1 step=60 loss=2.0129
18
+ 2026-03-29 12:46:27,868 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=1 step=60 eval_loss=1.0857 duration_sec=7.93
19
+ 2026-03-29 12:47:01,521 [INFO] new_opacus_codex.train_steps: epoch=1 step=65 loss=2.2698
20
+ 2026-03-29 12:47:35,636 [INFO] new_opacus_codex.train_steps: epoch=1 step=70 loss=2.3539
21
+ 2026-03-29 12:47:43,594 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=1 step=70 eval_loss=1.0888 duration_sec=7.79
22
+ 2026-03-29 12:48:18,525 [INFO] new_opacus_codex.train_steps: epoch=1 step=75 loss=2.4839
23
+ 2026-03-29 12:48:52,796 [INFO] new_opacus_codex.train_steps: epoch=1 step=80 loss=2.3510
24
+ 2026-03-29 12:49:00,741 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=1 step=80 eval_loss=1.0919 duration_sec=7.91
25
+ 2026-03-29 12:49:34,712 [INFO] new_opacus_codex.train_steps: epoch=1 step=85 loss=2.0127
26
+ 2026-03-29 12:50:09,160 [INFO] new_opacus_codex.train_steps: epoch=1 step=90 loss=2.1534
27
+ 2026-03-29 12:50:17,110 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=1 step=90 eval_loss=1.0952 duration_sec=7.90
28
+ 2026-03-29 12:50:50,303 [INFO] new_opacus_codex.train_steps: epoch=1 step=95 loss=2.3404
29
+ 2026-03-29 12:51:23,782 [INFO] new_opacus_codex.train_steps: epoch=1 step=100 loss=2.3978
30
+ 2026-03-29 12:51:31,820 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=1 step=100 eval_loss=1.0988 duration_sec=7.81
31
+ 2026-03-29 12:52:06,986 [INFO] new_opacus_codex.train_steps: epoch=1 step=105 loss=2.4941
32
+ 2026-03-29 12:52:40,055 [INFO] new_opacus_codex.train_steps: epoch=1 step=110 loss=2.4085
33
+ 2026-03-29 12:52:48,064 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=1 step=110 eval_loss=1.1025 duration_sec=7.89
34
+ 2026-03-29 12:53:22,691 [INFO] new_opacus_codex.train_steps: epoch=1 step=115 loss=2.0873
35
+ 2026-03-29 12:53:56,595 [INFO] new_opacus_codex.train_steps: epoch=1 step=120 loss=2.1171
36
+ 2026-03-29 12:54:04,559 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=1 step=120 eval_loss=1.1062 duration_sec=7.87
37
+ 2026-03-29 12:54:37,608 [INFO] new_opacus_codex.train_steps: epoch=1 step=125 loss=2.3034
38
+ 2026-03-29 12:55:11,551 [INFO] new_opacus_codex.train_steps: epoch=1 step=130 loss=2.0379
39
+ 2026-03-29 12:55:19,479 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=1 step=130 eval_loss=1.1096 duration_sec=7.90
40
+ 2026-03-29 12:55:52,702 [INFO] new_opacus_codex.train_steps: epoch=1 step=135 loss=2.0551
41
+ 2026-03-29 12:56:25,250 [INFO] new_opacus_codex.train_steps: epoch=1 step=140 loss=2.3071
42
+ 2026-03-29 12:56:33,189 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=1 step=140 eval_loss=1.1129 duration_sec=7.84
43
+ 2026-03-29 12:57:06,692 [INFO] new_opacus_codex.train_steps: epoch=1 step=145 loss=2.1510
44
+ 2026-03-29 12:57:40,535 [INFO] new_opacus_codex.train_steps: epoch=1 step=150 loss=2.1629
45
+ 2026-03-29 12:57:48,568 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=1 step=150 eval_loss=1.1161 duration_sec=7.85
46
+ 2026-03-29 12:58:21,507 [INFO] new_opacus_codex.train_steps: epoch=1 step=155 loss=1.9640
47
+ 2026-03-29 12:58:53,905 [INFO] new_opacus_codex.train_steps: epoch=1 step=160 loss=1.7805
48
+ 2026-03-29 12:59:01,923 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=1 step=160 eval_loss=1.1192 duration_sec=7.97
49
+ 2026-03-29 12:59:36,090 [INFO] new_opacus_codex.train_steps: epoch=1 step=165 loss=2.2389
50
+ 2026-03-29 13:00:09,429 [INFO] new_opacus_codex.train_steps: epoch=1 step=170 loss=2.2683
51
+ 2026-03-29 13:00:17,439 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=1 step=170 eval_loss=1.1222 duration_sec=7.78
52
+ 2026-03-29 13:00:50,770 [INFO] new_opacus_codex.train_steps: epoch=1 step=175 loss=1.9595
53
+ 2026-03-29 13:01:25,497 [INFO] new_opacus_codex.train_steps: epoch=1 step=180 loss=2.2240
54
+ 2026-03-29 13:01:33,517 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=1 step=180 eval_loss=1.1253 duration_sec=7.88
55
+ 2026-03-29 13:02:21,555 [INFO] new_opacus_codex.train_steps: epoch=2 step=185 loss=2.4148
56
+ 2026-03-29 13:02:55,969 [INFO] new_opacus_codex.train_steps: epoch=2 step=190 loss=2.4414
57
+ 2026-03-29 13:03:03,929 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=2 step=190 eval_loss=1.1283 duration_sec=7.95
58
+ 2026-03-29 13:03:37,940 [INFO] new_opacus_codex.train_steps: epoch=2 step=195 loss=2.3856
59
+ 2026-03-29 13:04:11,891 [INFO] new_opacus_codex.train_steps: epoch=2 step=200 loss=2.1155
60
+ 2026-03-29 13:04:19,856 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=2 step=200 eval_loss=1.1310 duration_sec=7.95
61
+ 2026-03-29 13:04:53,144 [INFO] new_opacus_codex.train_steps: epoch=2 step=205 loss=1.9992
62
+ 2026-03-29 13:05:26,287 [INFO] new_opacus_codex.train_steps: epoch=2 step=210 loss=2.0431
63
+ 2026-03-29 13:05:34,293 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=2 step=210 eval_loss=1.1335 duration_sec=7.82
64
+ 2026-03-29 13:06:08,406 [INFO] new_opacus_codex.train_steps: epoch=2 step=215 loss=2.1301
65
+ 2026-03-29 13:06:42,393 [INFO] new_opacus_codex.train_steps: epoch=2 step=220 loss=2.2434
66
+ 2026-03-29 13:06:50,416 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=2 step=220 eval_loss=1.1358 duration_sec=7.96
67
+ 2026-03-29 13:07:23,437 [INFO] new_opacus_codex.train_steps: epoch=2 step=225 loss=2.0532
68
+ 2026-03-29 13:07:56,869 [INFO] new_opacus_codex.train_steps: epoch=2 step=230 loss=2.0523
69
+ 2026-03-29 13:08:04,834 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=2 step=230 eval_loss=1.1385 duration_sec=7.81
70
+ 2026-03-29 13:08:37,635 [INFO] new_opacus_codex.train_steps: epoch=2 step=235 loss=2.3411
71
+ 2026-03-29 13:09:11,339 [INFO] new_opacus_codex.train_steps: epoch=2 step=240 loss=2.3991
72
+ 2026-03-29 13:09:19,277 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=2 step=240 eval_loss=1.1411 duration_sec=7.89
73
+ 2026-03-29 13:09:53,518 [INFO] new_opacus_codex.train_steps: epoch=2 step=245 loss=2.1324
74
+ 2026-03-29 13:10:26,705 [INFO] new_opacus_codex.train_steps: epoch=2 step=250 loss=2.1331
75
+ 2026-03-29 13:10:34,642 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=2 step=250 eval_loss=1.1440 duration_sec=7.92
76
+ 2026-03-29 13:11:08,516 [INFO] new_opacus_codex.train_steps: epoch=2 step=255 loss=2.1302
77
+ 2026-03-29 13:11:41,253 [INFO] new_opacus_codex.train_steps: epoch=2 step=260 loss=1.9438
78
+ 2026-03-29 13:11:49,237 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=2 step=260 eval_loss=1.1473 duration_sec=7.84
79
+ 2026-03-29 13:12:23,310 [INFO] new_opacus_codex.train_steps: epoch=2 step=265 loss=2.0245
80
+ 2026-03-29 13:12:55,960 [INFO] new_opacus_codex.train_steps: epoch=2 step=270 loss=2.1532
81
+ 2026-03-29 13:13:03,973 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=2 step=270 eval_loss=1.1500 duration_sec=7.87
82
+ 2026-03-29 13:13:37,437 [INFO] new_opacus_codex.train_steps: epoch=2 step=275 loss=2.1889
83
+ 2026-03-29 13:14:12,789 [INFO] new_opacus_codex.train_steps: epoch=2 step=280 loss=2.1849
84
+ 2026-03-29 13:14:20,783 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=2 step=280 eval_loss=1.1525 duration_sec=7.94
85
+ 2026-03-29 13:14:53,675 [INFO] new_opacus_codex.train_steps: epoch=2 step=285 loss=2.1921
86
+ 2026-03-29 13:15:26,893 [INFO] new_opacus_codex.train_steps: epoch=2 step=290 loss=2.1573
87
+ 2026-03-29 13:15:34,867 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=2 step=290 eval_loss=1.1548 duration_sec=7.93
88
+ 2026-03-29 13:16:08,739 [INFO] new_opacus_codex.train_steps: epoch=2 step=295 loss=2.1582
89
+ 2026-03-29 13:16:42,094 [INFO] new_opacus_codex.train_steps: epoch=2 step=300 loss=2.3217
90
+ 2026-03-29 13:16:50,139 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=2 step=300 eval_loss=1.1567 duration_sec=7.88
91
+ 2026-03-29 13:17:23,980 [INFO] new_opacus_codex.train_steps: epoch=2 step=305 loss=2.3762
92
+ 2026-03-29 13:17:58,987 [INFO] new_opacus_codex.train_steps: epoch=2 step=310 loss=2.3694
93
+ 2026-03-29 13:18:06,954 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=2 step=310 eval_loss=1.1586 duration_sec=7.91
94
+ 2026-03-29 13:18:41,110 [INFO] new_opacus_codex.train_steps: epoch=2 step=315 loss=2.3544
95
+ 2026-03-29 13:19:15,806 [INFO] new_opacus_codex.train_steps: epoch=2 step=320 loss=2.4294
96
+ 2026-03-29 13:19:23,789 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=2 step=320 eval_loss=1.1607 duration_sec=7.81
97
+ 2026-03-29 13:19:56,878 [INFO] new_opacus_codex.train_steps: epoch=2 step=325 loss=2.3388
98
+ 2026-03-29 13:20:30,271 [INFO] new_opacus_codex.train_steps: epoch=2 step=330 loss=2.2566
99
+ 2026-03-29 13:20:38,306 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=2 step=330 eval_loss=1.1628 duration_sec=7.94
100
+ 2026-03-29 13:21:11,577 [INFO] new_opacus_codex.train_steps: epoch=2 step=335 loss=2.1189
101
+ 2026-03-29 13:21:45,296 [INFO] new_opacus_codex.train_steps: epoch=2 step=340 loss=1.8544
102
+ 2026-03-29 13:21:53,267 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=2 step=340 eval_loss=1.1646 duration_sec=7.84
103
+ 2026-03-29 13:22:26,996 [INFO] new_opacus_codex.train_steps: epoch=2 step=345 loss=1.7834
104
+ 2026-03-29 13:23:00,896 [INFO] new_opacus_codex.train_steps: epoch=2 step=350 loss=1.9495
105
+ 2026-03-29 13:23:08,927 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=2 step=350 eval_loss=1.1667 duration_sec=8.02
106
+ 2026-03-29 13:23:42,585 [INFO] new_opacus_codex.train_steps: epoch=2 step=355 loss=2.3319
107
+ 2026-03-29 13:24:15,880 [INFO] new_opacus_codex.train_steps: epoch=2 step=360 loss=2.4031
108
+ 2026-03-29 13:24:24,580 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=2 step=360 eval_loss=1.1685 duration_sec=8.68
109
+ 2026-03-29 13:24:57,742 [INFO] new_opacus_codex.train_steps: epoch=2 step=365 loss=2.1965
110
+ 2026-03-29 13:25:47,343 [INFO] new_opacus_codex.train_steps: epoch=3 step=370 loss=2.1625
111
+ 2026-03-29 13:25:55,316 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=3 step=370 eval_loss=1.1702 duration_sec=7.96
112
+ 2026-03-29 13:26:29,694 [INFO] new_opacus_codex.train_steps: epoch=3 step=375 loss=1.8703
113
+ 2026-03-29 13:27:04,073 [INFO] new_opacus_codex.train_steps: epoch=3 step=380 loss=2.0050
114
+ 2026-03-29 13:27:12,034 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=3 step=380 eval_loss=1.1715 duration_sec=7.95
115
+ 2026-03-29 13:27:45,924 [INFO] new_opacus_codex.train_steps: epoch=3 step=385 loss=2.2484
116
+ 2026-03-29 13:28:20,162 [INFO] new_opacus_codex.train_steps: epoch=3 step=390 loss=2.2221
117
+ 2026-03-29 13:28:28,088 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=3 step=390 eval_loss=1.1731 duration_sec=7.92
118
+ 2026-03-29 13:29:02,834 [INFO] new_opacus_codex.train_steps: epoch=3 step=395 loss=2.0620
119
+ 2026-03-29 13:29:36,422 [INFO] new_opacus_codex.train_steps: epoch=3 step=400 loss=1.9724
120
+ 2026-03-29 13:29:44,391 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=3 step=400 eval_loss=1.1747 duration_sec=7.96
121
+ 2026-03-29 13:30:18,101 [INFO] new_opacus_codex.train_steps: epoch=3 step=405 loss=1.9960
122
+ 2026-03-29 13:30:52,550 [INFO] new_opacus_codex.train_steps: epoch=3 step=410 loss=2.1541
123
+ 2026-03-29 13:31:00,485 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=3 step=410 eval_loss=1.1764 duration_sec=7.92
124
+ 2026-03-29 13:31:33,700 [INFO] new_opacus_codex.train_steps: epoch=3 step=415 loss=2.1045
125
+ 2026-03-29 13:32:08,001 [INFO] new_opacus_codex.train_steps: epoch=3 step=420 loss=2.0444
126
+ 2026-03-29 13:32:15,974 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=3 step=420 eval_loss=1.1781 duration_sec=7.96
127
+ 2026-03-29 13:32:50,945 [INFO] new_opacus_codex.train_steps: epoch=3 step=425 loss=2.0840
128
+ 2026-03-29 13:33:24,966 [INFO] new_opacus_codex.train_steps: epoch=3 step=430 loss=2.0955
129
+ 2026-03-29 13:33:32,942 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=3 step=430 eval_loss=1.1796 duration_sec=7.96
130
+ 2026-03-29 13:34:06,722 [INFO] new_opacus_codex.train_steps: epoch=3 step=435 loss=2.0477
131
+ 2026-03-29 13:34:40,134 [INFO] new_opacus_codex.train_steps: epoch=3 step=440 loss=1.8266
132
+ 2026-03-29 13:34:48,160 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=3 step=440 eval_loss=1.1808 duration_sec=8.01
133
+ 2026-03-29 13:35:22,713 [INFO] new_opacus_codex.train_steps: epoch=3 step=445 loss=1.9006
134
+ 2026-03-29 13:35:55,946 [INFO] new_opacus_codex.train_steps: epoch=3 step=450 loss=2.1128
135
+ 2026-03-29 13:36:03,895 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=3 step=450 eval_loss=1.1820 duration_sec=7.93
136
+ 2026-03-29 13:36:37,489 [INFO] new_opacus_codex.train_steps: epoch=3 step=455 loss=2.3267
137
+ 2026-03-29 13:37:11,357 [INFO] new_opacus_codex.train_steps: epoch=3 step=460 loss=2.1922
138
+ 2026-03-29 13:37:19,307 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=3 step=460 eval_loss=1.1831 duration_sec=7.94
139
+ 2026-03-29 13:37:52,494 [INFO] new_opacus_codex.train_steps: epoch=3 step=465 loss=2.0772
140
+ 2026-03-29 13:38:26,412 [INFO] new_opacus_codex.train_steps: epoch=3 step=470 loss=2.1971
141
+ 2026-03-29 13:38:34,364 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=3 step=470 eval_loss=1.1843 duration_sec=7.93
142
+ 2026-03-29 13:39:07,045 [INFO] new_opacus_codex.train_steps: epoch=3 step=475 loss=2.1865
143
+ 2026-03-29 13:39:40,983 [INFO] new_opacus_codex.train_steps: epoch=3 step=480 loss=2.1851
144
+ 2026-03-29 13:39:48,962 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=3 step=480 eval_loss=1.1856 duration_sec=7.97
145
+ 2026-03-29 13:40:22,478 [INFO] new_opacus_codex.train_steps: epoch=3 step=485 loss=2.0159
146
+ 2026-03-29 13:40:55,888 [INFO] new_opacus_codex.train_steps: epoch=3 step=490 loss=2.0555
147
+ 2026-03-29 13:41:03,864 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=3 step=490 eval_loss=1.1865 duration_sec=7.96
148
+ 2026-03-29 13:41:38,058 [INFO] new_opacus_codex.train_steps: epoch=3 step=495 loss=2.3790
149
+ 2026-03-29 13:42:11,961 [INFO] new_opacus_codex.train_steps: epoch=3 step=500 loss=2.4352
150
+ 2026-03-29 13:42:19,917 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=3 step=500 eval_loss=1.1875 duration_sec=7.94
151
+ 2026-03-29 13:42:53,983 [INFO] new_opacus_codex.train_steps: epoch=3 step=505 loss=2.2263
152
+ 2026-03-29 13:43:27,194 [INFO] new_opacus_codex.train_steps: epoch=3 step=510 loss=2.2148
153
+ 2026-03-29 13:43:35,160 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=3 step=510 eval_loss=1.1884 duration_sec=7.95
154
+ 2026-03-29 13:44:09,130 [INFO] new_opacus_codex.train_steps: epoch=3 step=515 loss=2.0884
155
+ 2026-03-29 13:44:43,191 [INFO] new_opacus_codex.train_steps: epoch=3 step=520 loss=2.1071
156
+ 2026-03-29 13:44:51,174 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=3 step=520 eval_loss=1.1890 duration_sec=7.97
157
+ 2026-03-29 13:45:23,901 [INFO] new_opacus_codex.train_steps: epoch=3 step=525 loss=2.0753
158
+ 2026-03-29 13:45:58,928 [INFO] new_opacus_codex.train_steps: epoch=3 step=530 loss=1.9215
159
+ 2026-03-29 13:46:06,944 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=3 step=530 eval_loss=1.1897 duration_sec=8.00
160
+ 2026-03-29 13:46:41,119 [INFO] new_opacus_codex.train_steps: epoch=3 step=535 loss=2.3334
161
+ 2026-03-29 13:47:14,804 [INFO] new_opacus_codex.train_steps: epoch=3 step=540 loss=2.3198
162
+ 2026-03-29 13:47:22,777 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=3 step=540 eval_loss=1.1905 duration_sec=7.95
163
+ 2026-03-29 13:47:56,709 [INFO] new_opacus_codex.train_steps: epoch=3 step=545 loss=2.1973
164
+ 2026-03-29 13:48:31,081 [INFO] new_opacus_codex.train_steps: epoch=3 step=550 loss=2.2243
165
+ 2026-03-29 13:48:39,050 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=3 step=550 eval_loss=1.1910 duration_sec=7.96
166
+ 2026-03-29 13:49:29,053 [INFO] new_opacus_codex.train_steps: epoch=4 step=555 loss=1.8517
167
+ 2026-03-29 13:50:03,330 [INFO] new_opacus_codex.train_steps: epoch=4 step=560 loss=1.9152
168
+ 2026-03-29 13:50:11,380 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=4 step=560 eval_loss=1.1917 duration_sec=8.03
169
+ 2026-03-29 13:50:45,732 [INFO] new_opacus_codex.train_steps: epoch=4 step=565 loss=2.0430
170
+ 2026-03-29 13:51:20,091 [INFO] new_opacus_codex.train_steps: epoch=4 step=570 loss=2.1685
171
+ 2026-03-29 13:51:28,085 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=4 step=570 eval_loss=1.1922 duration_sec=7.98
172
+ 2026-03-29 13:52:02,617 [INFO] new_opacus_codex.train_steps: epoch=4 step=575 loss=2.1379
173
+ 2026-03-29 13:52:36,780 [INFO] new_opacus_codex.train_steps: epoch=4 step=580 loss=2.0458
174
+ 2026-03-29 13:52:44,757 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=4 step=580 eval_loss=1.1927 duration_sec=7.97
175
+ 2026-03-29 13:53:20,549 [INFO] new_opacus_codex.train_steps: epoch=4 step=585 loss=2.1790
176
+ 2026-03-29 13:53:53,775 [INFO] new_opacus_codex.train_steps: epoch=4 step=590 loss=2.3529
177
+ 2026-03-29 13:54:01,755 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=4 step=590 eval_loss=1.1931 duration_sec=7.97
178
+ 2026-03-29 13:54:35,856 [INFO] new_opacus_codex.train_steps: epoch=4 step=595 loss=2.2772
179
+ 2026-03-29 13:55:10,936 [INFO] new_opacus_codex.train_steps: epoch=4 step=600 loss=2.0523
180
+ 2026-03-29 13:55:18,929 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=4 step=600 eval_loss=1.1934 duration_sec=7.98
181
+ 2026-03-29 13:55:52,364 [INFO] new_opacus_codex.train_steps: epoch=4 step=605 loss=2.1497
182
+ 2026-03-29 13:56:26,676 [INFO] new_opacus_codex.train_steps: epoch=4 step=610 loss=2.2643
183
+ 2026-03-29 13:56:34,691 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=4 step=610 eval_loss=1.1937 duration_sec=7.92
184
+ 2026-03-29 13:57:09,730 [INFO] new_opacus_codex.train_steps: epoch=4 step=615 loss=2.1950
185
+ 2026-03-29 13:57:43,758 [INFO] new_opacus_codex.train_steps: epoch=4 step=620 loss=2.2009
186
+ 2026-03-29 13:57:51,762 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=4 step=620 eval_loss=1.1941 duration_sec=7.98
187
+ 2026-03-29 13:58:25,990 [INFO] new_opacus_codex.train_steps: epoch=4 step=625 loss=2.2490
188
+ 2026-03-29 13:59:01,048 [INFO] new_opacus_codex.train_steps: epoch=4 step=630 loss=2.4352
189
+ 2026-03-29 13:59:09,039 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=4 step=630 eval_loss=1.1942 duration_sec=7.98
190
+ 2026-03-29 13:59:44,794 [INFO] new_opacus_codex.train_steps: epoch=4 step=635 loss=2.3730
191
+ 2026-03-29 14:00:18,418 [INFO] new_opacus_codex.train_steps: epoch=4 step=640 loss=2.2074
192
+ 2026-03-29 14:00:26,439 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=4 step=640 eval_loss=1.1943 duration_sec=8.01
193
+ 2026-03-29 14:01:00,983 [INFO] new_opacus_codex.train_steps: epoch=4 step=645 loss=2.1818
194
+ 2026-03-29 14:01:35,856 [INFO] new_opacus_codex.train_steps: epoch=4 step=650 loss=2.0930
195
+ 2026-03-29 14:01:43,862 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=4 step=650 eval_loss=1.1943 duration_sec=7.99
196
+ 2026-03-29 14:02:17,350 [INFO] new_opacus_codex.train_steps: epoch=4 step=655 loss=2.1313
197
+ 2026-03-29 14:02:52,859 [INFO] new_opacus_codex.train_steps: epoch=4 step=660 loss=2.0583
198
+ 2026-03-29 14:03:00,854 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=4 step=660 eval_loss=1.1943 duration_sec=7.98
199
+ 2026-03-29 14:03:34,428 [INFO] new_opacus_codex.train_steps: epoch=4 step=665 loss=2.1132
200
+ 2026-03-29 14:04:08,406 [INFO] new_opacus_codex.train_steps: epoch=4 step=670 loss=2.2821
201
+ 2026-03-29 14:04:16,397 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=4 step=670 eval_loss=1.1944 duration_sec=7.98
202
+ 2026-03-29 14:04:50,679 [INFO] new_opacus_codex.train_steps: epoch=4 step=675 loss=2.2101
203
+ 2026-03-29 14:05:24,988 [INFO] new_opacus_codex.train_steps: epoch=4 step=680 loss=2.0254
204
+ 2026-03-29 14:05:32,976 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=4 step=680 eval_loss=1.1945 duration_sec=7.98
205
+ 2026-03-29 14:06:07,847 [INFO] new_opacus_codex.train_steps: epoch=4 step=685 loss=1.8727
206
+ 2026-03-29 14:06:43,386 [INFO] new_opacus_codex.train_steps: epoch=4 step=690 loss=2.0265
207
+ 2026-03-29 14:06:51,377 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=4 step=690 eval_loss=1.1945 duration_sec=7.98
208
+ 2026-03-29 14:07:27,308 [INFO] new_opacus_codex.train_steps: epoch=4 step=695 loss=2.2352
209
+ 2026-03-29 14:08:01,886 [INFO] new_opacus_codex.train_steps: epoch=4 step=700 loss=2.1520
210
+ 2026-03-29 14:08:09,880 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=4 step=700 eval_loss=1.1945 duration_sec=7.98
211
+ 2026-03-29 14:08:43,870 [INFO] new_opacus_codex.train_steps: epoch=4 step=705 loss=2.0136
212
+ 2026-03-29 14:09:17,957 [INFO] new_opacus_codex.train_steps: epoch=4 step=710 loss=2.2308
213
+ 2026-03-29 14:09:25,966 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=4 step=710 eval_loss=1.1946 duration_sec=7.99
214
+ 2026-03-29 14:09:59,592 [INFO] new_opacus_codex.train_steps: epoch=4 step=715 loss=2.1821
215
+ 2026-03-29 14:10:35,267 [INFO] new_opacus_codex.train_steps: epoch=4 step=720 loss=2.1633
216
+ 2026-03-29 14:10:43,246 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=4 step=720 eval_loss=1.1947 duration_sec=7.97
217
+ 2026-03-29 14:11:20,177 [INFO] new_opacus_codex.train_steps: epoch=4 step=725 loss=2.2183
218
+ 2026-03-29 14:11:56,044 [INFO] new_opacus_codex.train_steps: epoch=4 step=730 loss=2.1375
219
+ 2026-03-29 14:12:04,251 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=4 step=730 eval_loss=1.1949 duration_sec=7.84
220
+ 2026-03-29 14:12:40,126 [INFO] new_opacus_codex.train_steps: epoch=4 step=735 loss=2.1842
221
+ 2026-03-29 14:13:32,053 [INFO] new_opacus_codex.train_steps: epoch=5 step=740 loss=2.3384
222
+ 2026-03-29 14:13:40,081 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=5 step=740 eval_loss=1.1950 duration_sec=8.01
223
+ 2026-03-29 14:14:15,238 [INFO] new_opacus_codex.train_steps: epoch=5 step=745 loss=2.3813
224
+ 2026-03-29 14:14:51,241 [INFO] new_opacus_codex.train_steps: epoch=5 step=750 loss=2.1130
225
+ 2026-03-29 14:14:59,231 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=5 step=750 eval_loss=1.1951 duration_sec=7.96
226
+ 2026-03-29 14:15:35,345 [INFO] new_opacus_codex.train_steps: epoch=5 step=755 loss=1.9910
227
+ 2026-03-29 14:16:09,810 [INFO] new_opacus_codex.train_steps: epoch=5 step=760 loss=2.1842
228
+ 2026-03-29 14:16:17,902 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=5 step=760 eval_loss=1.1951 duration_sec=7.84
229
+ 2026-03-29 14:16:52,379 [INFO] new_opacus_codex.train_steps: epoch=5 step=765 loss=2.1585
230
+ 2026-03-29 14:17:26,949 [INFO] new_opacus_codex.train_steps: epoch=5 step=770 loss=2.0313
231
+ 2026-03-29 14:17:34,947 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=5 step=770 eval_loss=1.1952 duration_sec=7.98
232
+ 2026-03-29 14:18:08,180 [INFO] new_opacus_codex.train_steps: epoch=5 step=775 loss=2.1541
233
+ 2026-03-29 14:18:43,753 [INFO] new_opacus_codex.train_steps: epoch=5 step=780 loss=2.3825
234
+ 2026-03-29 14:18:51,989 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=5 step=780 eval_loss=1.1953 duration_sec=7.89
235
+ 2026-03-29 14:19:27,412 [INFO] new_opacus_codex.train_steps: epoch=5 step=785 loss=2.2194
236
+ 2026-03-29 14:20:03,256 [INFO] new_opacus_codex.train_steps: epoch=5 step=790 loss=2.0934
237
+ 2026-03-29 14:20:11,715 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=5 step=790 eval_loss=1.1952 duration_sec=7.80
238
+ 2026-03-29 14:20:46,878 [INFO] new_opacus_codex.train_steps: epoch=5 step=795 loss=2.1842
239
+ 2026-03-29 14:21:21,078 [INFO] new_opacus_codex.train_steps: epoch=5 step=800 loss=2.3637
240
+ 2026-03-29 14:21:29,106 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=5 step=800 eval_loss=1.1952 duration_sec=7.93
241
+ 2026-03-29 14:22:03,022 [INFO] new_opacus_codex.train_steps: epoch=5 step=805 loss=2.4234
242
+ 2026-03-29 14:22:38,348 [INFO] new_opacus_codex.train_steps: epoch=5 step=810 loss=2.1847
243
+ 2026-03-29 14:22:46,333 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=5 step=810 eval_loss=1.1952 duration_sec=7.96
244
+ 2026-03-29 14:23:20,065 [INFO] new_opacus_codex.train_steps: epoch=5 step=815 loss=2.0008
245
+ 2026-03-29 14:23:55,785 [INFO] new_opacus_codex.train_steps: epoch=5 step=820 loss=1.9232
246
+ 2026-03-29 14:24:03,759 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=5 step=820 eval_loss=1.1952 duration_sec=7.94
247
+ 2026-03-29 14:24:38,636 [INFO] new_opacus_codex.train_steps: epoch=5 step=825 loss=1.9093
248
+ 2026-03-29 14:25:12,795 [INFO] new_opacus_codex.train_steps: epoch=5 step=830 loss=1.8947
249
+ 2026-03-29 14:25:21,162 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=5 step=830 eval_loss=1.1952 duration_sec=7.85
250
+ 2026-03-29 14:25:58,042 [INFO] new_opacus_codex.train_steps: epoch=5 step=835 loss=1.7821
251
+ 2026-03-29 14:26:31,862 [INFO] new_opacus_codex.train_steps: epoch=5 step=840 loss=1.9764
252
+ 2026-03-29 14:26:39,856 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=5 step=840 eval_loss=1.1952 duration_sec=7.95
253
+ 2026-03-29 14:27:14,664 [INFO] new_opacus_codex.train_steps: epoch=5 step=845 loss=2.0499
254
+ 2026-03-29 14:27:48,374 [INFO] new_opacus_codex.train_steps: epoch=5 step=850 loss=1.9743
255
+ 2026-03-29 14:27:56,451 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=5 step=850 eval_loss=1.1952 duration_sec=8.05
256
+ 2026-03-29 14:28:31,456 [INFO] new_opacus_codex.train_steps: epoch=5 step=855 loss=2.1148
257
+ 2026-03-29 14:29:05,979 [INFO] new_opacus_codex.train_steps: epoch=5 step=860 loss=2.0200
258
+ 2026-03-29 14:29:13,969 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=5 step=860 eval_loss=1.1952 duration_sec=7.98
259
+ 2026-03-29 14:29:48,267 [INFO] new_opacus_codex.train_steps: epoch=5 step=865 loss=1.8971
260
+ 2026-03-29 14:30:23,700 [INFO] new_opacus_codex.train_steps: epoch=5 step=870 loss=1.9099
261
+ 2026-03-29 14:30:31,754 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=5 step=870 eval_loss=1.1952 duration_sec=8.04
262
+ 2026-03-29 14:31:07,137 [INFO] new_opacus_codex.train_steps: epoch=5 step=875 loss=2.0294
263
+ 2026-03-29 14:31:42,611 [INFO] new_opacus_codex.train_steps: epoch=5 step=880 loss=2.1030
264
+ 2026-03-29 14:31:50,622 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=5 step=880 eval_loss=1.1953 duration_sec=8.00
265
+ 2026-03-29 14:32:25,268 [INFO] new_opacus_codex.train_steps: epoch=5 step=885 loss=2.1241
266
+ 2026-03-29 14:32:59,427 [INFO] new_opacus_codex.train_steps: epoch=5 step=890 loss=1.9822
267
+ 2026-03-29 14:33:07,429 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=5 step=890 eval_loss=1.1953 duration_sec=7.99
268
+ 2026-03-29 14:33:42,642 [INFO] new_opacus_codex.train_steps: epoch=5 step=895 loss=1.9325
269
+ 2026-03-29 14:34:16,546 [INFO] new_opacus_codex.train_steps: epoch=5 step=900 loss=2.1005
270
+ 2026-03-29 14:34:24,568 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=5 step=900 eval_loss=1.1953 duration_sec=7.84
271
+ 2026-03-29 14:34:58,651 [INFO] new_opacus_codex.train_steps: epoch=5 step=905 loss=2.3615
272
+ 2026-03-29 14:35:34,047 [INFO] new_opacus_codex.train_steps: epoch=5 step=910 loss=2.3325
273
+ 2026-03-29 14:35:42,014 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=5 step=910 eval_loss=1.1953 duration_sec=7.95
274
+ 2026-03-29 14:36:16,666 [INFO] new_opacus_codex.train_steps: epoch=5 step=915 loss=2.1500
275
+ 2026-03-29 14:36:45,051 [INFO] new_opacus_codex.train_steps: epoch=5 step=920 loss=2.5724
276
+ 2026-03-29 14:36:53,056 [INFO] new_opacus_codex.train_steps: eval event=eval_step epoch=5 step=920 eval_loss=1.1954 duration_sec=8.00
llama3.2-3b/dp8/adapter/README.md ADDED
@@ -0,0 +1,207 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ base_model: meta-llama/Llama-3.2-3B
3
+ library_name: peft
4
+ pipeline_tag: text-generation
5
+ tags:
6
+ - base_model:adapter:meta-llama/Llama-3.2-3B
7
+ - lora
8
+ - transformers
9
+ ---
10
+
11
+ # Model Card for Model ID
12
+
13
+ <!-- Provide a quick summary of what the model is/does. -->
14
+
15
+
16
+
17
+ ## Model Details
18
+
19
+ ### Model Description
20
+
21
+ <!-- Provide a longer summary of what this model is. -->
22
+
23
+
24
+
25
+ - **Developed by:** [More Information Needed]
26
+ - **Funded by [optional]:** [More Information Needed]
27
+ - **Shared by [optional]:** [More Information Needed]
28
+ - **Model type:** [More Information Needed]
29
+ - **Language(s) (NLP):** [More Information Needed]
30
+ - **License:** [More Information Needed]
31
+ - **Finetuned from model [optional]:** [More Information Needed]
32
+
33
+ ### Model Sources [optional]
34
+
35
+ <!-- Provide the basic links for the model. -->
36
+
37
+ - **Repository:** [More Information Needed]
38
+ - **Paper [optional]:** [More Information Needed]
39
+ - **Demo [optional]:** [More Information Needed]
40
+
41
+ ## Uses
42
+
43
+ <!-- Address questions around how the model is intended to be used, including the foreseeable users of the model and those affected by the model. -->
44
+
45
+ ### Direct Use
46
+
47
+ <!-- This section is for the model use without fine-tuning or plugging into a larger ecosystem/app. -->
48
+
49
+ [More Information Needed]
50
+
51
+ ### Downstream Use [optional]
52
+
53
+ <!-- This section is for the model use when fine-tuned for a task, or when plugged into a larger ecosystem/app -->
54
+
55
+ [More Information Needed]
56
+
57
+ ### Out-of-Scope Use
58
+
59
+ <!-- This section addresses misuse, malicious use, and uses that the model will not work well for. -->
60
+
61
+ [More Information Needed]
62
+
63
+ ## Bias, Risks, and Limitations
64
+
65
+ <!-- This section is meant to convey both technical and sociotechnical limitations. -->
66
+
67
+ [More Information Needed]
68
+
69
+ ### Recommendations
70
+
71
+ <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
72
+
73
+ Users (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations.
74
+
75
+ ## How to Get Started with the Model
76
+
77
+ Use the code below to get started with the model.
78
+
79
+ [More Information Needed]
80
+
81
+ ## Training Details
82
+
83
+ ### Training Data
84
+
85
+ <!-- This should link to a Dataset Card, perhaps with a short stub of information on what the training data is all about as well as documentation related to data pre-processing or additional filtering. -->
86
+
87
+ [More Information Needed]
88
+
89
+ ### Training Procedure
90
+
91
+ <!-- This relates heavily to the Technical Specifications. Content here should link to that section when it is relevant to the training procedure. -->
92
+
93
+ #### Preprocessing [optional]
94
+
95
+ [More Information Needed]
96
+
97
+
98
+ #### Training Hyperparameters
99
+
100
+ - **Training regime:** [More Information Needed] <!--fp32, fp16 mixed precision, bf16 mixed precision, bf16 non-mixed precision, fp16 non-mixed precision, fp8 mixed precision -->
101
+
102
+ #### Speeds, Sizes, Times [optional]
103
+
104
+ <!-- This section provides information about throughput, start/end time, checkpoint size if relevant, etc. -->
105
+
106
+ [More Information Needed]
107
+
108
+ ## Evaluation
109
+
110
+ <!-- This section describes the evaluation protocols and provides the results. -->
111
+
112
+ ### Testing Data, Factors & Metrics
113
+
114
+ #### Testing Data
115
+
116
+ <!-- This should link to a Dataset Card if possible. -->
117
+
118
+ [More Information Needed]
119
+
120
+ #### Factors
121
+
122
+ <!-- These are the things the evaluation is disaggregating by, e.g., subpopulations or domains. -->
123
+
124
+ [More Information Needed]
125
+
126
+ #### Metrics
127
+
128
+ <!-- These are the evaluation metrics being used, ideally with a description of why. -->
129
+
130
+ [More Information Needed]
131
+
132
+ ### Results
133
+
134
+ [More Information Needed]
135
+
136
+ #### Summary
137
+
138
+
139
+
140
+ ## Model Examination [optional]
141
+
142
+ <!-- Relevant interpretability work for the model goes here -->
143
+
144
+ [More Information Needed]
145
+
146
+ ## Environmental Impact
147
+
148
+ <!-- Total emissions (in grams of CO2eq) and additional considerations, such as electricity usage, go here. Edit the suggested text below accordingly -->
149
+
150
+ Carbon emissions can be estimated using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700).
151
+
152
+ - **Hardware Type:** [More Information Needed]
153
+ - **Hours used:** [More Information Needed]
154
+ - **Cloud Provider:** [More Information Needed]
155
+ - **Compute Region:** [More Information Needed]
156
+ - **Carbon Emitted:** [More Information Needed]
157
+
158
+ ## Technical Specifications [optional]
159
+
160
+ ### Model Architecture and Objective
161
+
162
+ [More Information Needed]
163
+
164
+ ### Compute Infrastructure
165
+
166
+ [More Information Needed]
167
+
168
+ #### Hardware
169
+
170
+ [More Information Needed]
171
+
172
+ #### Software
173
+
174
+ [More Information Needed]
175
+
176
+ ## Citation [optional]
177
+
178
+ <!-- If there is a paper or blog post introducing the model, the APA and Bibtex information for that should go in this section. -->
179
+
180
+ **BibTeX:**
181
+
182
+ [More Information Needed]
183
+
184
+ **APA:**
185
+
186
+ [More Information Needed]
187
+
188
+ ## Glossary [optional]
189
+
190
+ <!-- If relevant, include terms and calculations in this section that can help readers understand the model or model card. -->
191
+
192
+ [More Information Needed]
193
+
194
+ ## More Information [optional]
195
+
196
+ [More Information Needed]
197
+
198
+ ## Model Card Authors [optional]
199
+
200
+ [More Information Needed]
201
+
202
+ ## Model Card Contact
203
+
204
+ [More Information Needed]
205
+ ### Framework versions
206
+
207
+ - PEFT 0.18.1
llama3.2-3b/dp8/adapter/adapter_config.json ADDED
@@ -0,0 +1,46 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "alora_invocation_tokens": null,
3
+ "alpha_pattern": {},
4
+ "arrow_config": null,
5
+ "auto_mapping": null,
6
+ "base_model_name_or_path": "meta-llama/Llama-3.2-3B",
7
+ "bias": "none",
8
+ "corda_config": null,
9
+ "ensure_weight_tying": true,
10
+ "eva_config": null,
11
+ "exclude_modules": null,
12
+ "fan_in_fan_out": false,
13
+ "inference_mode": true,
14
+ "init_lora_weights": true,
15
+ "layer_replication": null,
16
+ "layers_pattern": null,
17
+ "layers_to_transform": null,
18
+ "loftq_config": {},
19
+ "lora_alpha": 32,
20
+ "lora_bias": false,
21
+ "lora_dropout": 0.05,
22
+ "megatron_config": null,
23
+ "megatron_core": "megatron.core",
24
+ "modules_to_save": [
25
+ "lm_head",
26
+ "embed_tokens"
27
+ ],
28
+ "peft_type": "LORA",
29
+ "peft_version": "0.18.1",
30
+ "qalora_group_size": 16,
31
+ "r": 16,
32
+ "rank_pattern": {},
33
+ "revision": null,
34
+ "target_modules": [
35
+ "k_proj",
36
+ "q_proj",
37
+ "v_proj",
38
+ "o_proj"
39
+ ],
40
+ "target_parameters": null,
41
+ "task_type": "CAUSAL_LM",
42
+ "trainable_token_indices": null,
43
+ "use_dora": false,
44
+ "use_qalora": false,
45
+ "use_rslora": false
46
+ }