Release BibleAI: HF + GGUF + Ollama bundle
Browse files- .gitattributes +3 -34
- README.md +46 -3
- adapters/dpo_final/README.md +210 -0
- adapters/dpo_final/adapter_config.json +52 -0
- adapters/dpo_final/adapter_model.safetensors +3 -0
- adapters/dpo_final/chat_template.jinja +1 -0
- adapters/dpo_final/tokenizer.json +3 -0
- adapters/dpo_final/tokenizer_config.json +52 -0
- adapters/sft_final/README.md +210 -0
- adapters/sft_final/adapter_config.json +52 -0
- adapters/sft_final/adapter_model.safetensors +3 -0
- adapters/sft_final/chat_template.jinja +4 -0
- adapters/sft_final/tokenizer.json +3 -0
- adapters/sft_final/tokenizer_config.json +52 -0
- checksums/sha256.txt +8 -0
- config.json +196 -0
- docs/ARTIFACTS.md +27 -0
- docs/PUBLISHING.md +49 -0
- docs/RELEASE_SNAPSHOT.txt +20 -0
- gguf/final_merged.BF16.gguf +3 -0
- gguf/final_merged.Q8_0.gguf +3 -0
- logs/dpo_fresh_20260415_003821.log +72 -0
- logs/train_resume_20260414_210603.log +0 -0
- model.safetensors +3 -0
- ollama/Modelfile.bf16 +32 -0
- ollama/Modelfile.canonical_project_reference +31 -0
- ollama/Modelfile.q8 +32 -0
- tokenizer.json +3 -0
- tokenizer_config.json +52 -0
.gitattributes
CHANGED
|
@@ -1,35 +1,4 @@
|
|
| 1 |
-
*.7z filter=lfs diff=lfs merge=lfs -text
|
| 2 |
-
*.arrow filter=lfs diff=lfs merge=lfs -text
|
| 3 |
-
*.bin filter=lfs diff=lfs merge=lfs -text
|
| 4 |
-
*.bz2 filter=lfs diff=lfs merge=lfs -text
|
| 5 |
-
*.ckpt filter=lfs diff=lfs merge=lfs -text
|
| 6 |
-
*.ftz filter=lfs diff=lfs merge=lfs -text
|
| 7 |
-
*.gz filter=lfs diff=lfs merge=lfs -text
|
| 8 |
-
*.h5 filter=lfs diff=lfs merge=lfs -text
|
| 9 |
-
*.joblib filter=lfs diff=lfs merge=lfs -text
|
| 10 |
-
*.lfs.* filter=lfs diff=lfs merge=lfs -text
|
| 11 |
-
*.mlmodel filter=lfs diff=lfs merge=lfs -text
|
| 12 |
-
*.model filter=lfs diff=lfs merge=lfs -text
|
| 13 |
-
*.msgpack filter=lfs diff=lfs merge=lfs -text
|
| 14 |
-
*.npy filter=lfs diff=lfs merge=lfs -text
|
| 15 |
-
*.npz filter=lfs diff=lfs merge=lfs -text
|
| 16 |
-
*.onnx filter=lfs diff=lfs merge=lfs -text
|
| 17 |
-
*.ot filter=lfs diff=lfs merge=lfs -text
|
| 18 |
-
*.parquet filter=lfs diff=lfs merge=lfs -text
|
| 19 |
-
*.pb filter=lfs diff=lfs merge=lfs -text
|
| 20 |
-
*.pickle filter=lfs diff=lfs merge=lfs -text
|
| 21 |
-
*.pkl filter=lfs diff=lfs merge=lfs -text
|
| 22 |
-
*.pt filter=lfs diff=lfs merge=lfs -text
|
| 23 |
-
*.pth filter=lfs diff=lfs merge=lfs -text
|
| 24 |
-
*.rar filter=lfs diff=lfs merge=lfs -text
|
| 25 |
*.safetensors filter=lfs diff=lfs merge=lfs -text
|
| 26 |
-
|
| 27 |
-
|
| 28 |
-
*.
|
| 29 |
-
*.tflite filter=lfs diff=lfs merge=lfs -text
|
| 30 |
-
*.tgz filter=lfs diff=lfs merge=lfs -text
|
| 31 |
-
*.wasm filter=lfs diff=lfs merge=lfs -text
|
| 32 |
-
*.xz filter=lfs diff=lfs merge=lfs -text
|
| 33 |
-
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
-
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
-
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
*.safetensors filter=lfs diff=lfs merge=lfs -text
|
| 2 |
+
*.gguf filter=lfs diff=lfs merge=lfs -text
|
| 3 |
+
tokenizer.json filter=lfs diff=lfs merge=lfs -text
|
| 4 |
+
adapters/*/tokenizer.json filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
README.md
CHANGED
|
@@ -1,3 +1,46 @@
|
|
| 1 |
-
---
|
| 2 |
-
license:
|
| 3 |
-
--
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
license: apache-2.0
|
| 3 |
+
base_model: google/gemma-4-e4b
|
| 4 |
+
tags:
|
| 5 |
+
- bible
|
| 6 |
+
- theology
|
| 7 |
+
- gemma
|
| 8 |
+
- gguf
|
| 9 |
+
- ollama
|
| 10 |
+
library_name: transformers
|
| 11 |
+
pipeline_tag: text-generation
|
| 12 |
+
---
|
| 13 |
+
|
| 14 |
+
# BibleAI
|
| 15 |
+
|
| 16 |
+
BibleAI is a Gemma 4 E4B model tuned with CPT + SFT + DPO for Bible/theology study use cases.
|
| 17 |
+
|
| 18 |
+
## Model Variants
|
| 19 |
+
- `hf/model.safetensors` (merged HF weights)
|
| 20 |
+
- `gguf/final_merged.BF16.gguf`
|
| 21 |
+
- `gguf/final_merged.Q8_0.gguf`
|
| 22 |
+
|
| 23 |
+
## Checksums
|
| 24 |
+
- `hf/model.safetensors`
|
| 25 |
+
`3163ffdcf841d829632af5932ccda65c893fcca63b84605df34aed275db66929`
|
| 26 |
+
- `gguf/final_merged.BF16.gguf`
|
| 27 |
+
`e07e38d28d3032d3b438b7b8b90cbf4cf5e66177b52e8f60673cac3586dc10a1`
|
| 28 |
+
- `gguf/final_merged.Q8_0.gguf`
|
| 29 |
+
`3c7f5f9caf080fe44720f16b5f4b5e7e95a097d6be3d1d8d89aea22e8574bad1`
|
| 30 |
+
|
| 31 |
+
Full checksum file: `checksums/sha256.txt`
|
| 32 |
+
|
| 33 |
+
## Ollama
|
| 34 |
+
- Q8:
|
| 35 |
+
`ollama create bibleaiq8 -f ollama/Modelfile.q8`
|
| 36 |
+
- BF16:
|
| 37 |
+
`ollama create bibleaibf16 -f ollama/Modelfile.bf16`
|
| 38 |
+
|
| 39 |
+
## Included Artifacts
|
| 40 |
+
- `hf/` merged HF model files
|
| 41 |
+
- `gguf/` quantized GGUF exports
|
| 42 |
+
- `ollama/` ready Modelfiles
|
| 43 |
+
- `adapters/` final SFT and DPO adapters
|
| 44 |
+
- `logs/` training logs
|
| 45 |
+
- `checksums/` integrity hashes
|
| 46 |
+
- `docs/` release documentation
|
adapters/dpo_final/README.md
ADDED
|
@@ -0,0 +1,210 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
base_model: /workspace/outputs/sft_cpt_merged
|
| 3 |
+
library_name: peft
|
| 4 |
+
pipeline_tag: text-generation
|
| 5 |
+
tags:
|
| 6 |
+
- base_model:adapter:/workspace/outputs/sft_cpt_merged
|
| 7 |
+
- dpo
|
| 8 |
+
- lora
|
| 9 |
+
- transformers
|
| 10 |
+
- trl
|
| 11 |
+
- unsloth
|
| 12 |
+
---
|
| 13 |
+
|
| 14 |
+
# Model Card for Model ID
|
| 15 |
+
|
| 16 |
+
<!-- Provide a quick summary of what the model is/does. -->
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
## Model Details
|
| 21 |
+
|
| 22 |
+
### Model Description
|
| 23 |
+
|
| 24 |
+
<!-- Provide a longer summary of what this model is. -->
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
- **Developed by:** [More Information Needed]
|
| 29 |
+
- **Funded by [optional]:** [More Information Needed]
|
| 30 |
+
- **Shared by [optional]:** [More Information Needed]
|
| 31 |
+
- **Model type:** [More Information Needed]
|
| 32 |
+
- **Language(s) (NLP):** [More Information Needed]
|
| 33 |
+
- **License:** [More Information Needed]
|
| 34 |
+
- **Finetuned from model [optional]:** [More Information Needed]
|
| 35 |
+
|
| 36 |
+
### Model Sources [optional]
|
| 37 |
+
|
| 38 |
+
<!-- Provide the basic links for the model. -->
|
| 39 |
+
|
| 40 |
+
- **Repository:** [More Information Needed]
|
| 41 |
+
- **Paper [optional]:** [More Information Needed]
|
| 42 |
+
- **Demo [optional]:** [More Information Needed]
|
| 43 |
+
|
| 44 |
+
## Uses
|
| 45 |
+
|
| 46 |
+
<!-- Address questions around how the model is intended to be used, including the foreseeable users of the model and those affected by the model. -->
|
| 47 |
+
|
| 48 |
+
### Direct Use
|
| 49 |
+
|
| 50 |
+
<!-- This section is for the model use without fine-tuning or plugging into a larger ecosystem/app. -->
|
| 51 |
+
|
| 52 |
+
[More Information Needed]
|
| 53 |
+
|
| 54 |
+
### Downstream Use [optional]
|
| 55 |
+
|
| 56 |
+
<!-- This section is for the model use when fine-tuned for a task, or when plugged into a larger ecosystem/app -->
|
| 57 |
+
|
| 58 |
+
[More Information Needed]
|
| 59 |
+
|
| 60 |
+
### Out-of-Scope Use
|
| 61 |
+
|
| 62 |
+
<!-- This section addresses misuse, malicious use, and uses that the model will not work well for. -->
|
| 63 |
+
|
| 64 |
+
[More Information Needed]
|
| 65 |
+
|
| 66 |
+
## Bias, Risks, and Limitations
|
| 67 |
+
|
| 68 |
+
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
|
| 69 |
+
|
| 70 |
+
[More Information Needed]
|
| 71 |
+
|
| 72 |
+
### Recommendations
|
| 73 |
+
|
| 74 |
+
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
|
| 75 |
+
|
| 76 |
+
Users (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations.
|
| 77 |
+
|
| 78 |
+
## How to Get Started with the Model
|
| 79 |
+
|
| 80 |
+
Use the code below to get started with the model.
|
| 81 |
+
|
| 82 |
+
[More Information Needed]
|
| 83 |
+
|
| 84 |
+
## Training Details
|
| 85 |
+
|
| 86 |
+
### Training Data
|
| 87 |
+
|
| 88 |
+
<!-- This should link to a Dataset Card, perhaps with a short stub of information on what the training data is all about as well as documentation related to data pre-processing or additional filtering. -->
|
| 89 |
+
|
| 90 |
+
[More Information Needed]
|
| 91 |
+
|
| 92 |
+
### Training Procedure
|
| 93 |
+
|
| 94 |
+
<!-- This relates heavily to the Technical Specifications. Content here should link to that section when it is relevant to the training procedure. -->
|
| 95 |
+
|
| 96 |
+
#### Preprocessing [optional]
|
| 97 |
+
|
| 98 |
+
[More Information Needed]
|
| 99 |
+
|
| 100 |
+
|
| 101 |
+
#### Training Hyperparameters
|
| 102 |
+
|
| 103 |
+
- **Training regime:** [More Information Needed] <!--fp32, fp16 mixed precision, bf16 mixed precision, bf16 non-mixed precision, fp16 non-mixed precision, fp8 mixed precision -->
|
| 104 |
+
|
| 105 |
+
#### Speeds, Sizes, Times [optional]
|
| 106 |
+
|
| 107 |
+
<!-- This section provides information about throughput, start/end time, checkpoint size if relevant, etc. -->
|
| 108 |
+
|
| 109 |
+
[More Information Needed]
|
| 110 |
+
|
| 111 |
+
## Evaluation
|
| 112 |
+
|
| 113 |
+
<!-- This section describes the evaluation protocols and provides the results. -->
|
| 114 |
+
|
| 115 |
+
### Testing Data, Factors & Metrics
|
| 116 |
+
|
| 117 |
+
#### Testing Data
|
| 118 |
+
|
| 119 |
+
<!-- This should link to a Dataset Card if possible. -->
|
| 120 |
+
|
| 121 |
+
[More Information Needed]
|
| 122 |
+
|
| 123 |
+
#### Factors
|
| 124 |
+
|
| 125 |
+
<!-- These are the things the evaluation is disaggregating by, e.g., subpopulations or domains. -->
|
| 126 |
+
|
| 127 |
+
[More Information Needed]
|
| 128 |
+
|
| 129 |
+
#### Metrics
|
| 130 |
+
|
| 131 |
+
<!-- These are the evaluation metrics being used, ideally with a description of why. -->
|
| 132 |
+
|
| 133 |
+
[More Information Needed]
|
| 134 |
+
|
| 135 |
+
### Results
|
| 136 |
+
|
| 137 |
+
[More Information Needed]
|
| 138 |
+
|
| 139 |
+
#### Summary
|
| 140 |
+
|
| 141 |
+
|
| 142 |
+
|
| 143 |
+
## Model Examination [optional]
|
| 144 |
+
|
| 145 |
+
<!-- Relevant interpretability work for the model goes here -->
|
| 146 |
+
|
| 147 |
+
[More Information Needed]
|
| 148 |
+
|
| 149 |
+
## Environmental Impact
|
| 150 |
+
|
| 151 |
+
<!-- Total emissions (in grams of CO2eq) and additional considerations, such as electricity usage, go here. Edit the suggested text below accordingly -->
|
| 152 |
+
|
| 153 |
+
Carbon emissions can be estimated using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700).
|
| 154 |
+
|
| 155 |
+
- **Hardware Type:** [More Information Needed]
|
| 156 |
+
- **Hours used:** [More Information Needed]
|
| 157 |
+
- **Cloud Provider:** [More Information Needed]
|
| 158 |
+
- **Compute Region:** [More Information Needed]
|
| 159 |
+
- **Carbon Emitted:** [More Information Needed]
|
| 160 |
+
|
| 161 |
+
## Technical Specifications [optional]
|
| 162 |
+
|
| 163 |
+
### Model Architecture and Objective
|
| 164 |
+
|
| 165 |
+
[More Information Needed]
|
| 166 |
+
|
| 167 |
+
### Compute Infrastructure
|
| 168 |
+
|
| 169 |
+
[More Information Needed]
|
| 170 |
+
|
| 171 |
+
#### Hardware
|
| 172 |
+
|
| 173 |
+
[More Information Needed]
|
| 174 |
+
|
| 175 |
+
#### Software
|
| 176 |
+
|
| 177 |
+
[More Information Needed]
|
| 178 |
+
|
| 179 |
+
## Citation [optional]
|
| 180 |
+
|
| 181 |
+
<!-- If there is a paper or blog post introducing the model, the APA and Bibtex information for that should go in this section. -->
|
| 182 |
+
|
| 183 |
+
**BibTeX:**
|
| 184 |
+
|
| 185 |
+
[More Information Needed]
|
| 186 |
+
|
| 187 |
+
**APA:**
|
| 188 |
+
|
| 189 |
+
[More Information Needed]
|
| 190 |
+
|
| 191 |
+
## Glossary [optional]
|
| 192 |
+
|
| 193 |
+
<!-- If relevant, include terms and calculations in this section that can help readers understand the model or model card. -->
|
| 194 |
+
|
| 195 |
+
[More Information Needed]
|
| 196 |
+
|
| 197 |
+
## More Information [optional]
|
| 198 |
+
|
| 199 |
+
[More Information Needed]
|
| 200 |
+
|
| 201 |
+
## Model Card Authors [optional]
|
| 202 |
+
|
| 203 |
+
[More Information Needed]
|
| 204 |
+
|
| 205 |
+
## Model Card Contact
|
| 206 |
+
|
| 207 |
+
[More Information Needed]
|
| 208 |
+
### Framework versions
|
| 209 |
+
|
| 210 |
+
- PEFT 0.19.0
|
adapters/dpo_final/adapter_config.json
ADDED
|
@@ -0,0 +1,52 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"alora_invocation_tokens": null,
|
| 3 |
+
"alpha_pattern": {},
|
| 4 |
+
"arrow_config": null,
|
| 5 |
+
"auto_mapping": {
|
| 6 |
+
"base_model_class": "Gemma4ForConditionalGeneration",
|
| 7 |
+
"parent_library": "transformers.models.gemma4.modeling_gemma4",
|
| 8 |
+
"unsloth_fixed": true
|
| 9 |
+
},
|
| 10 |
+
"base_model_name_or_path": "/workspace/outputs/sft_cpt_merged",
|
| 11 |
+
"bias": "none",
|
| 12 |
+
"corda_config": null,
|
| 13 |
+
"ensure_weight_tying": false,
|
| 14 |
+
"eva_config": null,
|
| 15 |
+
"exclude_modules": null,
|
| 16 |
+
"fan_in_fan_out": false,
|
| 17 |
+
"inference_mode": true,
|
| 18 |
+
"init_lora_weights": true,
|
| 19 |
+
"layer_replication": null,
|
| 20 |
+
"layers_pattern": null,
|
| 21 |
+
"layers_to_transform": null,
|
| 22 |
+
"loftq_config": {},
|
| 23 |
+
"lora_alpha": 64,
|
| 24 |
+
"lora_bias": false,
|
| 25 |
+
"lora_dropout": 0.05,
|
| 26 |
+
"lora_ga_config": null,
|
| 27 |
+
"megatron_config": null,
|
| 28 |
+
"megatron_core": "megatron.core",
|
| 29 |
+
"modules_to_save": null,
|
| 30 |
+
"peft_type": "LORA",
|
| 31 |
+
"peft_version": "0.19.0",
|
| 32 |
+
"qalora_group_size": 16,
|
| 33 |
+
"r": 32,
|
| 34 |
+
"rank_pattern": {},
|
| 35 |
+
"revision": null,
|
| 36 |
+
"target_modules": [
|
| 37 |
+
"gate_proj",
|
| 38 |
+
"up_proj",
|
| 39 |
+
"o_proj",
|
| 40 |
+
"k_proj",
|
| 41 |
+
"q_proj",
|
| 42 |
+
"v_proj",
|
| 43 |
+
"down_proj"
|
| 44 |
+
],
|
| 45 |
+
"target_parameters": null,
|
| 46 |
+
"task_type": "CAUSAL_LM",
|
| 47 |
+
"trainable_token_indices": null,
|
| 48 |
+
"use_bdlora": null,
|
| 49 |
+
"use_dora": false,
|
| 50 |
+
"use_qalora": false,
|
| 51 |
+
"use_rslora": false
|
| 52 |
+
}
|
adapters/dpo_final/adapter_model.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:280ec145dbfd1793ca89025be89888aa4d2fc2dc02bce9d92211f02375fef837
|
| 3 |
+
size 339349544
|
adapters/dpo_final/chat_template.jinja
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{% for message in messages %}{{ '<start_of_turn>' + message['role'] + '\n' + message['content'] + '<end_of_turn>\n' }}{% endfor %}{% if add_generation_prompt %}{{ '<start_of_turn>model\n' }}{% endif %}
|
adapters/dpo_final/tokenizer.json
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:12bac982b793c44b03d52a250a9f0d0b666813da566b910c24a6da0695fd11e6
|
| 3 |
+
size 32170070
|
adapters/dpo_final/tokenizer_config.json
ADDED
|
@@ -0,0 +1,52 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"audio_token": "<|audio|>",
|
| 3 |
+
"backend": "tokenizers",
|
| 4 |
+
"boa_token": "<|audio>",
|
| 5 |
+
"boi_token": "<|image>",
|
| 6 |
+
"bos_token": "<bos>",
|
| 7 |
+
"eoa_token": "<audio|>",
|
| 8 |
+
"eoc_token": "<channel|>",
|
| 9 |
+
"eoi_token": "<image|>",
|
| 10 |
+
"eos_token": "<eos>",
|
| 11 |
+
"eot_token": "<turn|>",
|
| 12 |
+
"escape_token": "<|\"|>",
|
| 13 |
+
"etc_token": "<tool_call|>",
|
| 14 |
+
"etd_token": "<tool|>",
|
| 15 |
+
"etr_token": "<tool_response|>",
|
| 16 |
+
"extra_special_tokens": [],
|
| 17 |
+
"image_token": "<|image|>",
|
| 18 |
+
"is_local": true,
|
| 19 |
+
"mask_token": "<mask>",
|
| 20 |
+
"model_max_length": 1000000000000000019884624838656,
|
| 21 |
+
"model_specific_special_tokens": {
|
| 22 |
+
"audio_token": "<|audio|>",
|
| 23 |
+
"boa_token": "<|audio>",
|
| 24 |
+
"boi_token": "<|image>",
|
| 25 |
+
"eoa_token": "<audio|>",
|
| 26 |
+
"eoc_token": "<channel|>",
|
| 27 |
+
"eoi_token": "<image|>",
|
| 28 |
+
"eot_token": "<turn|>",
|
| 29 |
+
"escape_token": "<|\"|>",
|
| 30 |
+
"etc_token": "<tool_call|>",
|
| 31 |
+
"etd_token": "<tool|>",
|
| 32 |
+
"etr_token": "<tool_response|>",
|
| 33 |
+
"image_token": "<|image|>",
|
| 34 |
+
"soc_token": "<|channel>",
|
| 35 |
+
"sot_token": "<|turn>",
|
| 36 |
+
"stc_token": "<|tool_call>",
|
| 37 |
+
"std_token": "<|tool>",
|
| 38 |
+
"str_token": "<|tool_response>",
|
| 39 |
+
"think_token": "<|think|>"
|
| 40 |
+
},
|
| 41 |
+
"pad_token": "<pad>",
|
| 42 |
+
"padding_side": "left",
|
| 43 |
+
"processor_class": "Gemma4Processor",
|
| 44 |
+
"soc_token": "<|channel>",
|
| 45 |
+
"sot_token": "<|turn>",
|
| 46 |
+
"stc_token": "<|tool_call>",
|
| 47 |
+
"std_token": "<|tool>",
|
| 48 |
+
"str_token": "<|tool_response>",
|
| 49 |
+
"think_token": "<|think|>",
|
| 50 |
+
"tokenizer_class": "GemmaTokenizer",
|
| 51 |
+
"unk_token": "<unk>"
|
| 52 |
+
}
|
adapters/sft_final/README.md
ADDED
|
@@ -0,0 +1,210 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
base_model: /root/cpt_merged
|
| 3 |
+
library_name: peft
|
| 4 |
+
pipeline_tag: text-generation
|
| 5 |
+
tags:
|
| 6 |
+
- base_model:adapter:/root/cpt_merged
|
| 7 |
+
- lora
|
| 8 |
+
- sft
|
| 9 |
+
- transformers
|
| 10 |
+
- trl
|
| 11 |
+
- unsloth
|
| 12 |
+
---
|
| 13 |
+
|
| 14 |
+
# Model Card for Model ID
|
| 15 |
+
|
| 16 |
+
<!-- Provide a quick summary of what the model is/does. -->
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
## Model Details
|
| 21 |
+
|
| 22 |
+
### Model Description
|
| 23 |
+
|
| 24 |
+
<!-- Provide a longer summary of what this model is. -->
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
- **Developed by:** [More Information Needed]
|
| 29 |
+
- **Funded by [optional]:** [More Information Needed]
|
| 30 |
+
- **Shared by [optional]:** [More Information Needed]
|
| 31 |
+
- **Model type:** [More Information Needed]
|
| 32 |
+
- **Language(s) (NLP):** [More Information Needed]
|
| 33 |
+
- **License:** [More Information Needed]
|
| 34 |
+
- **Finetuned from model [optional]:** [More Information Needed]
|
| 35 |
+
|
| 36 |
+
### Model Sources [optional]
|
| 37 |
+
|
| 38 |
+
<!-- Provide the basic links for the model. -->
|
| 39 |
+
|
| 40 |
+
- **Repository:** [More Information Needed]
|
| 41 |
+
- **Paper [optional]:** [More Information Needed]
|
| 42 |
+
- **Demo [optional]:** [More Information Needed]
|
| 43 |
+
|
| 44 |
+
## Uses
|
| 45 |
+
|
| 46 |
+
<!-- Address questions around how the model is intended to be used, including the foreseeable users of the model and those affected by the model. -->
|
| 47 |
+
|
| 48 |
+
### Direct Use
|
| 49 |
+
|
| 50 |
+
<!-- This section is for the model use without fine-tuning or plugging into a larger ecosystem/app. -->
|
| 51 |
+
|
| 52 |
+
[More Information Needed]
|
| 53 |
+
|
| 54 |
+
### Downstream Use [optional]
|
| 55 |
+
|
| 56 |
+
<!-- This section is for the model use when fine-tuned for a task, or when plugged into a larger ecosystem/app -->
|
| 57 |
+
|
| 58 |
+
[More Information Needed]
|
| 59 |
+
|
| 60 |
+
### Out-of-Scope Use
|
| 61 |
+
|
| 62 |
+
<!-- This section addresses misuse, malicious use, and uses that the model will not work well for. -->
|
| 63 |
+
|
| 64 |
+
[More Information Needed]
|
| 65 |
+
|
| 66 |
+
## Bias, Risks, and Limitations
|
| 67 |
+
|
| 68 |
+
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
|
| 69 |
+
|
| 70 |
+
[More Information Needed]
|
| 71 |
+
|
| 72 |
+
### Recommendations
|
| 73 |
+
|
| 74 |
+
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
|
| 75 |
+
|
| 76 |
+
Users (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations.
|
| 77 |
+
|
| 78 |
+
## How to Get Started with the Model
|
| 79 |
+
|
| 80 |
+
Use the code below to get started with the model.
|
| 81 |
+
|
| 82 |
+
[More Information Needed]
|
| 83 |
+
|
| 84 |
+
## Training Details
|
| 85 |
+
|
| 86 |
+
### Training Data
|
| 87 |
+
|
| 88 |
+
<!-- This should link to a Dataset Card, perhaps with a short stub of information on what the training data is all about as well as documentation related to data pre-processing or additional filtering. -->
|
| 89 |
+
|
| 90 |
+
[More Information Needed]
|
| 91 |
+
|
| 92 |
+
### Training Procedure
|
| 93 |
+
|
| 94 |
+
<!-- This relates heavily to the Technical Specifications. Content here should link to that section when it is relevant to the training procedure. -->
|
| 95 |
+
|
| 96 |
+
#### Preprocessing [optional]
|
| 97 |
+
|
| 98 |
+
[More Information Needed]
|
| 99 |
+
|
| 100 |
+
|
| 101 |
+
#### Training Hyperparameters
|
| 102 |
+
|
| 103 |
+
- **Training regime:** [More Information Needed] <!--fp32, fp16 mixed precision, bf16 mixed precision, bf16 non-mixed precision, fp16 non-mixed precision, fp8 mixed precision -->
|
| 104 |
+
|
| 105 |
+
#### Speeds, Sizes, Times [optional]
|
| 106 |
+
|
| 107 |
+
<!-- This section provides information about throughput, start/end time, checkpoint size if relevant, etc. -->
|
| 108 |
+
|
| 109 |
+
[More Information Needed]
|
| 110 |
+
|
| 111 |
+
## Evaluation
|
| 112 |
+
|
| 113 |
+
<!-- This section describes the evaluation protocols and provides the results. -->
|
| 114 |
+
|
| 115 |
+
### Testing Data, Factors & Metrics
|
| 116 |
+
|
| 117 |
+
#### Testing Data
|
| 118 |
+
|
| 119 |
+
<!-- This should link to a Dataset Card if possible. -->
|
| 120 |
+
|
| 121 |
+
[More Information Needed]
|
| 122 |
+
|
| 123 |
+
#### Factors
|
| 124 |
+
|
| 125 |
+
<!-- These are the things the evaluation is disaggregating by, e.g., subpopulations or domains. -->
|
| 126 |
+
|
| 127 |
+
[More Information Needed]
|
| 128 |
+
|
| 129 |
+
#### Metrics
|
| 130 |
+
|
| 131 |
+
<!-- These are the evaluation metrics being used, ideally with a description of why. -->
|
| 132 |
+
|
| 133 |
+
[More Information Needed]
|
| 134 |
+
|
| 135 |
+
### Results
|
| 136 |
+
|
| 137 |
+
[More Information Needed]
|
| 138 |
+
|
| 139 |
+
#### Summary
|
| 140 |
+
|
| 141 |
+
|
| 142 |
+
|
| 143 |
+
## Model Examination [optional]
|
| 144 |
+
|
| 145 |
+
<!-- Relevant interpretability work for the model goes here -->
|
| 146 |
+
|
| 147 |
+
[More Information Needed]
|
| 148 |
+
|
| 149 |
+
## Environmental Impact
|
| 150 |
+
|
| 151 |
+
<!-- Total emissions (in grams of CO2eq) and additional considerations, such as electricity usage, go here. Edit the suggested text below accordingly -->
|
| 152 |
+
|
| 153 |
+
Carbon emissions can be estimated using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700).
|
| 154 |
+
|
| 155 |
+
- **Hardware Type:** [More Information Needed]
|
| 156 |
+
- **Hours used:** [More Information Needed]
|
| 157 |
+
- **Cloud Provider:** [More Information Needed]
|
| 158 |
+
- **Compute Region:** [More Information Needed]
|
| 159 |
+
- **Carbon Emitted:** [More Information Needed]
|
| 160 |
+
|
| 161 |
+
## Technical Specifications [optional]
|
| 162 |
+
|
| 163 |
+
### Model Architecture and Objective
|
| 164 |
+
|
| 165 |
+
[More Information Needed]
|
| 166 |
+
|
| 167 |
+
### Compute Infrastructure
|
| 168 |
+
|
| 169 |
+
[More Information Needed]
|
| 170 |
+
|
| 171 |
+
#### Hardware
|
| 172 |
+
|
| 173 |
+
[More Information Needed]
|
| 174 |
+
|
| 175 |
+
#### Software
|
| 176 |
+
|
| 177 |
+
[More Information Needed]
|
| 178 |
+
|
| 179 |
+
## Citation [optional]
|
| 180 |
+
|
| 181 |
+
<!-- If there is a paper or blog post introducing the model, the APA and Bibtex information for that should go in this section. -->
|
| 182 |
+
|
| 183 |
+
**BibTeX:**
|
| 184 |
+
|
| 185 |
+
[More Information Needed]
|
| 186 |
+
|
| 187 |
+
**APA:**
|
| 188 |
+
|
| 189 |
+
[More Information Needed]
|
| 190 |
+
|
| 191 |
+
## Glossary [optional]
|
| 192 |
+
|
| 193 |
+
<!-- If relevant, include terms and calculations in this section that can help readers understand the model or model card. -->
|
| 194 |
+
|
| 195 |
+
[More Information Needed]
|
| 196 |
+
|
| 197 |
+
## More Information [optional]
|
| 198 |
+
|
| 199 |
+
[More Information Needed]
|
| 200 |
+
|
| 201 |
+
## Model Card Authors [optional]
|
| 202 |
+
|
| 203 |
+
[More Information Needed]
|
| 204 |
+
|
| 205 |
+
## Model Card Contact
|
| 206 |
+
|
| 207 |
+
[More Information Needed]
|
| 208 |
+
### Framework versions
|
| 209 |
+
|
| 210 |
+
- PEFT 0.19.0
|
adapters/sft_final/adapter_config.json
ADDED
|
@@ -0,0 +1,52 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"alora_invocation_tokens": null,
|
| 3 |
+
"alpha_pattern": {},
|
| 4 |
+
"arrow_config": null,
|
| 5 |
+
"auto_mapping": {
|
| 6 |
+
"base_model_class": "Gemma4ForConditionalGeneration",
|
| 7 |
+
"parent_library": "transformers.models.gemma4.modeling_gemma4",
|
| 8 |
+
"unsloth_fixed": true
|
| 9 |
+
},
|
| 10 |
+
"base_model_name_or_path": "/root/cpt_merged",
|
| 11 |
+
"bias": "none",
|
| 12 |
+
"corda_config": null,
|
| 13 |
+
"ensure_weight_tying": false,
|
| 14 |
+
"eva_config": null,
|
| 15 |
+
"exclude_modules": null,
|
| 16 |
+
"fan_in_fan_out": false,
|
| 17 |
+
"inference_mode": true,
|
| 18 |
+
"init_lora_weights": true,
|
| 19 |
+
"layer_replication": null,
|
| 20 |
+
"layers_pattern": null,
|
| 21 |
+
"layers_to_transform": null,
|
| 22 |
+
"loftq_config": {},
|
| 23 |
+
"lora_alpha": 128,
|
| 24 |
+
"lora_bias": false,
|
| 25 |
+
"lora_dropout": 0.05,
|
| 26 |
+
"lora_ga_config": null,
|
| 27 |
+
"megatron_config": null,
|
| 28 |
+
"megatron_core": "megatron.core",
|
| 29 |
+
"modules_to_save": null,
|
| 30 |
+
"peft_type": "LORA",
|
| 31 |
+
"peft_version": "0.19.0",
|
| 32 |
+
"qalora_group_size": 16,
|
| 33 |
+
"r": 64,
|
| 34 |
+
"rank_pattern": {},
|
| 35 |
+
"revision": null,
|
| 36 |
+
"target_modules": [
|
| 37 |
+
"gate_proj",
|
| 38 |
+
"q_proj",
|
| 39 |
+
"v_proj",
|
| 40 |
+
"o_proj",
|
| 41 |
+
"down_proj",
|
| 42 |
+
"up_proj",
|
| 43 |
+
"k_proj"
|
| 44 |
+
],
|
| 45 |
+
"target_parameters": null,
|
| 46 |
+
"task_type": "CAUSAL_LM",
|
| 47 |
+
"trainable_token_indices": null,
|
| 48 |
+
"use_bdlora": null,
|
| 49 |
+
"use_dora": false,
|
| 50 |
+
"use_qalora": false,
|
| 51 |
+
"use_rslora": false
|
| 52 |
+
}
|
adapters/sft_final/adapter_model.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:a9ffcf8419e82cddfe88a4521a863f0d4c97e4d5c3303ba96b268d7f299d0408
|
| 3 |
+
size 678564160
|
adapters/sft_final/chat_template.jinja
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{% for message in messages %}{{ '<start_of_turn>' + message['role'] + '
|
| 2 |
+
' + message['content'] + '<end_of_turn>
|
| 3 |
+
' }}{% endfor %}{% if add_generation_prompt %}{{ '<start_of_turn>model
|
| 4 |
+
' }}{% endif %}
|
adapters/sft_final/tokenizer.json
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:12bac982b793c44b03d52a250a9f0d0b666813da566b910c24a6da0695fd11e6
|
| 3 |
+
size 32170070
|
adapters/sft_final/tokenizer_config.json
ADDED
|
@@ -0,0 +1,52 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"audio_token": "<|audio|>",
|
| 3 |
+
"backend": "tokenizers",
|
| 4 |
+
"boa_token": "<|audio>",
|
| 5 |
+
"boi_token": "<|image>",
|
| 6 |
+
"bos_token": "<bos>",
|
| 7 |
+
"eoa_token": "<audio|>",
|
| 8 |
+
"eoc_token": "<channel|>",
|
| 9 |
+
"eoi_token": "<image|>",
|
| 10 |
+
"eos_token": "<eos>",
|
| 11 |
+
"eot_token": "<turn|>",
|
| 12 |
+
"escape_token": "<|\"|>",
|
| 13 |
+
"etc_token": "<tool_call|>",
|
| 14 |
+
"etd_token": "<tool|>",
|
| 15 |
+
"etr_token": "<tool_response|>",
|
| 16 |
+
"extra_special_tokens": [],
|
| 17 |
+
"image_token": "<|image|>",
|
| 18 |
+
"is_local": true,
|
| 19 |
+
"mask_token": "<mask>",
|
| 20 |
+
"model_max_length": 1000000000000000019884624838656,
|
| 21 |
+
"model_specific_special_tokens": {
|
| 22 |
+
"audio_token": "<|audio|>",
|
| 23 |
+
"boa_token": "<|audio>",
|
| 24 |
+
"boi_token": "<|image>",
|
| 25 |
+
"eoa_token": "<audio|>",
|
| 26 |
+
"eoc_token": "<channel|>",
|
| 27 |
+
"eoi_token": "<image|>",
|
| 28 |
+
"eot_token": "<turn|>",
|
| 29 |
+
"escape_token": "<|\"|>",
|
| 30 |
+
"etc_token": "<tool_call|>",
|
| 31 |
+
"etd_token": "<tool|>",
|
| 32 |
+
"etr_token": "<tool_response|>",
|
| 33 |
+
"image_token": "<|image|>",
|
| 34 |
+
"soc_token": "<|channel>",
|
| 35 |
+
"sot_token": "<|turn>",
|
| 36 |
+
"stc_token": "<|tool_call>",
|
| 37 |
+
"std_token": "<|tool>",
|
| 38 |
+
"str_token": "<|tool_response>",
|
| 39 |
+
"think_token": "<|think|>"
|
| 40 |
+
},
|
| 41 |
+
"pad_token": "<pad>",
|
| 42 |
+
"padding_side": "left",
|
| 43 |
+
"processor_class": "Gemma4Processor",
|
| 44 |
+
"soc_token": "<|channel>",
|
| 45 |
+
"sot_token": "<|turn>",
|
| 46 |
+
"stc_token": "<|tool_call>",
|
| 47 |
+
"std_token": "<|tool>",
|
| 48 |
+
"str_token": "<|tool_response>",
|
| 49 |
+
"think_token": "<|think|>",
|
| 50 |
+
"tokenizer_class": "GemmaTokenizer",
|
| 51 |
+
"unk_token": "<unk>"
|
| 52 |
+
}
|
checksums/sha256.txt
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
3163ffdcf841d829632af5932ccda65c893fcca63b84605df34aed275db66929 hf/model.safetensors
|
| 2 |
+
e07e38d28d3032d3b438b7b8b90cbf4cf5e66177b52e8f60673cac3586dc10a1 gguf/final_merged.BF16.gguf
|
| 3 |
+
3c7f5f9caf080fe44720f16b5f4b5e7e95a097d6be3d1d8d89aea22e8574bad1 gguf/final_merged.Q8_0.gguf
|
| 4 |
+
a9ffcf8419e82cddfe88a4521a863f0d4c97e4d5c3303ba96b268d7f299d0408 adapters/sft_final/adapter_model.safetensors
|
| 5 |
+
280ec145dbfd1793ca89025be89888aa4d2fc2dc02bce9d92211f02375fef837 adapters/dpo_final/adapter_model.safetensors
|
| 6 |
+
|
| 7 |
+
# Verified previously on downloaded HF merged file:
|
| 8 |
+
# 3163ffdcf841d829632af5932ccda65c893fcca63b84605df34aed275db66929 hf/model.safetensors
|
config.json
ADDED
|
@@ -0,0 +1,196 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"architectures": [
|
| 3 |
+
"Gemma4ForConditionalGeneration"
|
| 4 |
+
],
|
| 5 |
+
"audio_config": {
|
| 6 |
+
"_name_or_path": "",
|
| 7 |
+
"architectures": null,
|
| 8 |
+
"attention_chunk_size": 12,
|
| 9 |
+
"attention_context_left": 13,
|
| 10 |
+
"attention_context_right": 0,
|
| 11 |
+
"attention_invalid_logits_value": -1000000000.0,
|
| 12 |
+
"attention_logit_cap": 50.0,
|
| 13 |
+
"chunk_size_feed_forward": 0,
|
| 14 |
+
"conv_kernel_size": 5,
|
| 15 |
+
"torch_dtype": "bfloat16",
|
| 16 |
+
"gradient_clipping": 10000000000.0,
|
| 17 |
+
"hidden_act": "silu",
|
| 18 |
+
"hidden_size": 1024,
|
| 19 |
+
"id2label": {
|
| 20 |
+
"0": "LABEL_0",
|
| 21 |
+
"1": "LABEL_1"
|
| 22 |
+
},
|
| 23 |
+
"initializer_range": 0.02,
|
| 24 |
+
"is_encoder_decoder": false,
|
| 25 |
+
"label2id": {
|
| 26 |
+
"LABEL_0": 0,
|
| 27 |
+
"LABEL_1": 1
|
| 28 |
+
},
|
| 29 |
+
"model_type": "gemma4_audio",
|
| 30 |
+
"num_attention_heads": 8,
|
| 31 |
+
"num_hidden_layers": 12,
|
| 32 |
+
"output_attentions": false,
|
| 33 |
+
"output_hidden_states": false,
|
| 34 |
+
"output_proj_dims": 1536,
|
| 35 |
+
"problem_type": null,
|
| 36 |
+
"residual_weight": 0.5,
|
| 37 |
+
"return_dict": true,
|
| 38 |
+
"rms_norm_eps": 1e-06,
|
| 39 |
+
"subsampling_conv_channels": [
|
| 40 |
+
128,
|
| 41 |
+
32
|
| 42 |
+
],
|
| 43 |
+
"use_clipped_linears": true
|
| 44 |
+
},
|
| 45 |
+
"audio_token_id": 258881,
|
| 46 |
+
"boa_token_id": 256000,
|
| 47 |
+
"boi_token_id": 255999,
|
| 48 |
+
"torch_dtype": "bfloat16",
|
| 49 |
+
"eoa_token_id": 258883,
|
| 50 |
+
"eoa_token_index": 258883,
|
| 51 |
+
"eoi_token_id": 258882,
|
| 52 |
+
"image_token_id": 258880,
|
| 53 |
+
"initializer_range": 0.02,
|
| 54 |
+
"model_name": "/workspace/outputs/sft_cpt_merged",
|
| 55 |
+
"model_type": "gemma4",
|
| 56 |
+
"pad_token_id": 0,
|
| 57 |
+
"text_config": {
|
| 58 |
+
"attention_bias": false,
|
| 59 |
+
"attention_dropout": 0.0,
|
| 60 |
+
"attention_k_eq_v": false,
|
| 61 |
+
"bos_token_id": 2,
|
| 62 |
+
"torch_dtype": "bfloat16",
|
| 63 |
+
"enable_moe_block": false,
|
| 64 |
+
"eos_token_id": 1,
|
| 65 |
+
"expert_intermediate_size": null,
|
| 66 |
+
"final_logit_softcapping": 30.0,
|
| 67 |
+
"global_head_dim": 512,
|
| 68 |
+
"head_dim": 256,
|
| 69 |
+
"hidden_activation": "gelu_pytorch_tanh",
|
| 70 |
+
"hidden_size": 2560,
|
| 71 |
+
"hidden_size_per_layer_input": 256,
|
| 72 |
+
"initializer_range": 0.02,
|
| 73 |
+
"intermediate_size": 10240,
|
| 74 |
+
"layer_types": [
|
| 75 |
+
"sliding_attention",
|
| 76 |
+
"sliding_attention",
|
| 77 |
+
"sliding_attention",
|
| 78 |
+
"sliding_attention",
|
| 79 |
+
"sliding_attention",
|
| 80 |
+
"full_attention",
|
| 81 |
+
"sliding_attention",
|
| 82 |
+
"sliding_attention",
|
| 83 |
+
"sliding_attention",
|
| 84 |
+
"sliding_attention",
|
| 85 |
+
"sliding_attention",
|
| 86 |
+
"full_attention",
|
| 87 |
+
"sliding_attention",
|
| 88 |
+
"sliding_attention",
|
| 89 |
+
"sliding_attention",
|
| 90 |
+
"sliding_attention",
|
| 91 |
+
"sliding_attention",
|
| 92 |
+
"full_attention",
|
| 93 |
+
"sliding_attention",
|
| 94 |
+
"sliding_attention",
|
| 95 |
+
"sliding_attention",
|
| 96 |
+
"sliding_attention",
|
| 97 |
+
"sliding_attention",
|
| 98 |
+
"full_attention",
|
| 99 |
+
"sliding_attention",
|
| 100 |
+
"sliding_attention",
|
| 101 |
+
"sliding_attention",
|
| 102 |
+
"sliding_attention",
|
| 103 |
+
"sliding_attention",
|
| 104 |
+
"full_attention",
|
| 105 |
+
"sliding_attention",
|
| 106 |
+
"sliding_attention",
|
| 107 |
+
"sliding_attention",
|
| 108 |
+
"sliding_attention",
|
| 109 |
+
"sliding_attention",
|
| 110 |
+
"full_attention",
|
| 111 |
+
"sliding_attention",
|
| 112 |
+
"sliding_attention",
|
| 113 |
+
"sliding_attention",
|
| 114 |
+
"sliding_attention",
|
| 115 |
+
"sliding_attention",
|
| 116 |
+
"full_attention"
|
| 117 |
+
],
|
| 118 |
+
"max_position_embeddings": 131072,
|
| 119 |
+
"model_type": "gemma4_text",
|
| 120 |
+
"moe_intermediate_size": null,
|
| 121 |
+
"num_attention_heads": 8,
|
| 122 |
+
"num_experts": null,
|
| 123 |
+
"num_global_key_value_heads": null,
|
| 124 |
+
"num_hidden_layers": 42,
|
| 125 |
+
"num_key_value_heads": 2,
|
| 126 |
+
"num_kv_shared_layers": 18,
|
| 127 |
+
"pad_token_id": 0,
|
| 128 |
+
"rms_norm_eps": 1e-06,
|
| 129 |
+
"rope_parameters": {
|
| 130 |
+
"full_attention": {
|
| 131 |
+
"partial_rotary_factor": 0.25,
|
| 132 |
+
"rope_theta": 1000000.0,
|
| 133 |
+
"rope_type": "proportional"
|
| 134 |
+
},
|
| 135 |
+
"sliding_attention": {
|
| 136 |
+
"rope_theta": 10000.0,
|
| 137 |
+
"rope_type": "default"
|
| 138 |
+
}
|
| 139 |
+
},
|
| 140 |
+
"sliding_window": 512,
|
| 141 |
+
"tie_word_embeddings": true,
|
| 142 |
+
"top_k_experts": null,
|
| 143 |
+
"use_bidirectional_attention": null,
|
| 144 |
+
"use_cache": true,
|
| 145 |
+
"use_double_wide_mlp": false,
|
| 146 |
+
"vocab_size": 262144,
|
| 147 |
+
"vocab_size_per_layer_input": 262144
|
| 148 |
+
},
|
| 149 |
+
"tie_word_embeddings": true,
|
| 150 |
+
"unsloth_version": "2026.4.4",
|
| 151 |
+
"video_token_id": 258884,
|
| 152 |
+
"vision_config": {
|
| 153 |
+
"_name_or_path": "",
|
| 154 |
+
"architectures": null,
|
| 155 |
+
"attention_bias": false,
|
| 156 |
+
"attention_dropout": 0.0,
|
| 157 |
+
"chunk_size_feed_forward": 0,
|
| 158 |
+
"default_output_length": 280,
|
| 159 |
+
"torch_dtype": "bfloat16",
|
| 160 |
+
"global_head_dim": 64,
|
| 161 |
+
"head_dim": 64,
|
| 162 |
+
"hidden_activation": "gelu_pytorch_tanh",
|
| 163 |
+
"hidden_size": 768,
|
| 164 |
+
"id2label": {
|
| 165 |
+
"0": "LABEL_0",
|
| 166 |
+
"1": "LABEL_1"
|
| 167 |
+
},
|
| 168 |
+
"initializer_range": 0.02,
|
| 169 |
+
"intermediate_size": 3072,
|
| 170 |
+
"is_encoder_decoder": false,
|
| 171 |
+
"label2id": {
|
| 172 |
+
"LABEL_0": 0,
|
| 173 |
+
"LABEL_1": 1
|
| 174 |
+
},
|
| 175 |
+
"max_position_embeddings": 131072,
|
| 176 |
+
"model_type": "gemma4_vision",
|
| 177 |
+
"num_attention_heads": 12,
|
| 178 |
+
"num_hidden_layers": 16,
|
| 179 |
+
"num_key_value_heads": 12,
|
| 180 |
+
"output_attentions": false,
|
| 181 |
+
"output_hidden_states": false,
|
| 182 |
+
"patch_size": 16,
|
| 183 |
+
"pooling_kernel_size": 3,
|
| 184 |
+
"position_embedding_size": 10240,
|
| 185 |
+
"problem_type": null,
|
| 186 |
+
"return_dict": true,
|
| 187 |
+
"rms_norm_eps": 1e-06,
|
| 188 |
+
"rope_parameters": {
|
| 189 |
+
"rope_theta": 100.0,
|
| 190 |
+
"rope_type": "default"
|
| 191 |
+
},
|
| 192 |
+
"standardize": false,
|
| 193 |
+
"use_clipped_linears": true
|
| 194 |
+
},
|
| 195 |
+
"vision_soft_tokens_per_image": 280
|
| 196 |
+
}
|
docs/ARTIFACTS.md
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Artifact Manifest
|
| 2 |
+
|
| 3 |
+
Source workspace: /Users/robert/bibleai-backup/runpod_download_20260414/workspace
|
| 4 |
+
Release folder: /Users/robert/bibleai-backup/release/BibleAI-Gemma4-E4B-CPT-SFT-DPO-20260414
|
| 5 |
+
|
| 6 |
+
## HF merged model
|
| 7 |
+
- hf/config.json
|
| 8 |
+
- hf/model.safetensors
|
| 9 |
+
- hf/tokenizer.json
|
| 10 |
+
- hf/tokenizer_config.json
|
| 11 |
+
|
| 12 |
+
## GGUF
|
| 13 |
+
- gguf/final_merged.BF16.gguf
|
| 14 |
+
- gguf/final_merged.Q8_0.gguf
|
| 15 |
+
|
| 16 |
+
## Ollama
|
| 17 |
+
- ollama/Modelfile.q8
|
| 18 |
+
- ollama/Modelfile.bf16
|
| 19 |
+
- ollama/Modelfile.canonical_project_reference
|
| 20 |
+
|
| 21 |
+
## Final adapters
|
| 22 |
+
- adapters/sft_final/*
|
| 23 |
+
- adapters/dpo_final/*
|
| 24 |
+
|
| 25 |
+
## Logs
|
| 26 |
+
- logs/train_resume_20260414_210603.log
|
| 27 |
+
- logs/dpo_fresh_20260415_003821.log
|
docs/PUBLISHING.md
ADDED
|
@@ -0,0 +1,49 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Hugging Face Publish
|
| 2 |
+
|
| 3 |
+
## 1) Go to release folder
|
| 4 |
+
|
| 5 |
+
```bash
|
| 6 |
+
cd /Users/robert/bibleai-backup/release/BibleAI-Gemma4-E4B-CPT-SFT-DPO-20260414
|
| 7 |
+
```
|
| 8 |
+
|
| 9 |
+
## 2) Verify GGUF checksums
|
| 10 |
+
|
| 11 |
+
```bash
|
| 12 |
+
sha256sum gguf/final_merged.BF16.gguf gguf/final_merged.Q8_0.gguf
|
| 13 |
+
```
|
| 14 |
+
|
| 15 |
+
Expected:
|
| 16 |
+
|
| 17 |
+
- `e07e38d28d3032d3b438b7b8b90cbf4cf5e66177b52e8f60673cac3586dc10a1` `final_merged.BF16.gguf`
|
| 18 |
+
- `3c7f5f9caf080fe44720f16b5f4b5e7e95a097d6be3d1d8d89aea22e8574bad1` `final_merged.Q8_0.gguf`
|
| 19 |
+
|
| 20 |
+
## 3) Log in to Hugging Face
|
| 21 |
+
|
| 22 |
+
```bash
|
| 23 |
+
huggingface-cli login
|
| 24 |
+
```
|
| 25 |
+
|
| 26 |
+
or
|
| 27 |
+
|
| 28 |
+
```bash
|
| 29 |
+
hf auth login
|
| 30 |
+
```
|
| 31 |
+
|
| 32 |
+
## 4) Push the full release
|
| 33 |
+
|
| 34 |
+
```bash
|
| 35 |
+
HF_REPO=<your-username-or-org>/<your-model-repo> ./scripts/upload_to_hf.sh
|
| 36 |
+
```
|
| 37 |
+
|
| 38 |
+
Example:
|
| 39 |
+
|
| 40 |
+
```bash
|
| 41 |
+
HF_REPO=rhemabible/BibleAI-Gemma4-E4B-CPT-SFT-DPO ./scripts/upload_to_hf.sh
|
| 42 |
+
```
|
| 43 |
+
|
| 44 |
+
This pushes:
|
| 45 |
+
|
| 46 |
+
- HF merged model (`hf/`)
|
| 47 |
+
- GGUF (`gguf/` BF16 + Q8_0)
|
| 48 |
+
- Ollama Modelfiles (`ollama/`)
|
| 49 |
+
- adapters, checksums, logs, docs
|
docs/RELEASE_SNAPSHOT.txt
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Release folder: /Users/robert/bibleai-backup/release/BibleAI-Gemma4-E4B-CPT-SFT-DPO-20260414
|
| 2 |
+
|
| 3 |
+
37G .
|
| 4 |
+
|
| 5 |
+
./README.md
|
| 6 |
+
./checksums/sha256.txt
|
| 7 |
+
./docs/ARTIFACTS.md
|
| 8 |
+
./docs/RELEASE_SNAPSHOT.txt
|
| 9 |
+
./gguf/final_merged.BF16.gguf
|
| 10 |
+
./gguf/final_merged.Q8_0.gguf
|
| 11 |
+
./hf/config.json
|
| 12 |
+
./hf/model.safetensors
|
| 13 |
+
./hf/tokenizer.json
|
| 14 |
+
./hf/tokenizer_config.json
|
| 15 |
+
./logs/dpo_fresh_20260415_003821.log
|
| 16 |
+
./logs/train_resume_20260414_210603.log
|
| 17 |
+
./ollama/Modelfile.bf16
|
| 18 |
+
./ollama/Modelfile.canonical_project_reference
|
| 19 |
+
./ollama/Modelfile.q8
|
| 20 |
+
./scripts/upload_to_hf.sh
|
gguf/final_merged.BF16.gguf
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:e07e38d28d3032d3b438b7b8b90cbf4cf5e66177b52e8f60673cac3586dc10a1
|
| 3 |
+
size 15053078304
|
gguf/final_merged.Q8_0.gguf
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:3c7f5f9caf080fe44720f16b5f4b5e7e95a097d6be3d1d8d89aea22e8574bad1
|
| 3 |
+
size 8031223584
|
logs/dpo_fresh_20260415_003821.log
ADDED
|
@@ -0,0 +1,72 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 0 |
0%| | 0/242 [00:00<?, ?it/s]Caching is incompatible with gradient checkpointing in Gemma4TextDecoderLayer. Setting `past_key_values=None`.
|
|
|
|
| 1 |
0%| | 1/242 [00:11<46:43, 11.63s/it]
|
| 2 |
1%| | 2/242 [00:15<28:22, 7.10s/it]
|
| 3 |
1%| | 3/242 [00:19<22:28, 5.64s/it]
|
| 4 |
2%|▏ | 4/242 [00:23<19:36, 4.94s/it]
|
| 5 |
2%|▏ | 5/242 [00:27<18:03, 4.57s/it]
|
| 6 |
2%|▏ | 6/242 [00:31<17:03, 4.34s/it]
|
| 7 |
3%|▎ | 7/242 [00:35<16:55, 4.32s/it]
|
| 8 |
3%|▎ | 8/242 [00:39<16:18, 4.18s/it]
|
| 9 |
4%|▎ | 9/242 [00:43<15:53, 4.09s/it]
|
| 10 |
4%|▍ | 10/242 [00:47<15:34, 4.03s/it]
|
| 11 |
|
| 12 |
4%|▍ | 10/242 [00:47<15:34, 4.03s/it]
|
| 13 |
5%|▍ | 11/242 [00:50<15:21, 3.99s/it]
|
| 14 |
5%|▍ | 12/242 [00:54<15:11, 3.96s/it]
|
| 15 |
5%|▌ | 13/242 [00:58<15:09, 3.97s/it]
|
| 16 |
6%|▌ | 14/242 [01:02<14:57, 3.94s/it]
|
| 17 |
6%|▌ | 15/242 [01:06<14:48, 3.91s/it]
|
| 18 |
7%|▋ | 16/242 [01:10<14:38, 3.89s/it]
|
| 19 |
7%|▋ | 17/242 [01:14<14:59, 4.00s/it]
|
| 20 |
7%|▋ | 18/242 [01:18<14:45, 3.95s/it]
|
| 21 |
8%|▊ | 19/242 [01:22<14:33, 3.92s/it]
|
| 22 |
8%|▊ | 20/242 [01:26<14:25, 3.90s/it]
|
| 23 |
|
| 24 |
8%|▊ | 20/242 [01:26<14:25, 3.90s/it]
|
| 25 |
9%|▊ | 21/242 [01:30<14:17, 3.88s/it]
|
| 26 |
9%|▉ | 22/242 [01:33<14:09, 3.86s/it]
|
| 27 |
10%|▉ | 23/242 [01:37<14:04, 3.86s/it]
|
| 28 |
10%|▉ | 24/242 [01:41<13:59, 3.85s/it]
|
| 29 |
10%|█ | 25/242 [01:45<13:54, 3.85s/it]
|
| 30 |
11%|█ | 26/242 [01:49<13:49, 3.84s/it]
|
| 31 |
11%|█ | 27/242 [01:53<13:43, 3.83s/it]
|
| 32 |
12%|█▏ | 28/242 [01:56<13:39, 3.83s/it]
|
| 33 |
12%|█▏ | 29/242 [02:00<13:35, 3.83s/it]
|
| 34 |
12%|█▏ | 30/242 [02:04<13:30, 3.82s/it]
|
| 35 |
|
| 36 |
12%|█▏ | 30/242 [02:04<13:30, 3.82s/it]
|
| 37 |
13%|█▎ | 31/242 [02:08<13:27, 3.83s/it]
|
| 38 |
13%|█▎ | 32/242 [02:12<13:23, 3.82s/it]
|
| 39 |
14%|█▎ | 33/242 [02:15<13:19, 3.83s/it]
|
| 40 |
14%|█▍ | 34/242 [02:19<13:22, 3.86s/it]
|
| 41 |
14%|█▍ | 35/242 [02:23<13:17, 3.85s/it]
|
| 42 |
15%|█▍ | 36/242 [02:27<13:11, 3.84s/it]
|
| 43 |
15%|█▌ | 37/242 [02:31<13:07, 3.84s/it]
|
| 44 |
16%|█▌ | 38/242 [02:35<13:04, 3.84s/it]
|
| 45 |
16%|█▌ | 39/242 [02:39<13:01, 3.85s/it]
|
| 46 |
17%|█▋ | 40/242 [02:42<12:55, 3.84s/it]
|
| 47 |
|
| 48 |
17%|█▋ | 40/242 [02:42<12:55, 3.84s/it]
|
| 49 |
17%|█▋ | 41/242 [02:46<12:51, 3.84s/it]
|
| 50 |
17%|█▋ | 42/242 [02:50<12:47, 3.84s/it]
|
| 51 |
18%|█▊ | 43/242 [02:54<12:43, 3.84s/it]
|
| 52 |
18%|█▊ | 44/242 [02:58<12:39, 3.84s/it]
|
| 53 |
19%|█▊ | 45/242 [03:02<12:34, 3.83s/it]
|
| 54 |
19%|█▉ | 46/242 [03:05<12:30, 3.83s/it]
|
| 55 |
19%|█▉ | 47/242 [03:09<12:27, 3.83s/it]
|
| 56 |
20%|█▉ | 48/242 [03:13<12:22, 3.83s/it]
|
| 57 |
20%|██ | 49/242 [03:17<12:18, 3.83s/it]
|
| 58 |
21%|██ | 50/242 [03:21<12:18, 3.85s/it]
|
| 59 |
|
| 60 |
21%|██ | 50/242 [03:21<12:18, 3.85s/it]
|
| 61 |
21%|██ | 51/242 [03:26<13:56, 4.38s/it]
|
| 62 |
21%|██▏ | 52/242 [03:30<13:24, 4.23s/it]
|
| 63 |
22%|██▏ | 53/242 [03:34<13:01, 4.13s/it]
|
| 64 |
22%|██▏ | 54/242 [03:38<12:42, 4.06s/it]
|
| 65 |
23%|██▎ | 55/242 [03:42<12:30, 4.01s/it]
|
| 66 |
23%|██▎ | 56/242 [03:46<12:18, 3.97s/it]
|
| 67 |
24%|██▎ | 57/242 [03:50<12:10, 3.95s/it]
|
| 68 |
24%|██▍ | 58/242 [03:54<12:03, 3.93s/it]
|
| 69 |
24%|██▍ | 59/242 [03:58<11:57, 3.92s/it]
|
| 70 |
25%|██▍ | 60/242 [04:01<11:51, 3.91s/it]
|
| 71 |
|
| 72 |
25%|██▍ | 60/242 [04:01<11:51, 3.91s/it]
|
| 73 |
25%|██▌ | 61/242 [04:05<11:45, 3.90s/it]
|
| 74 |
26%|██▌ | 62/242 [04:09<11:40, 3.89s/it]
|
| 75 |
26%|██▌ | 63/242 [04:13<11:34, 3.88s/it]
|
| 76 |
26%|██▋ | 64/242 [04:17<11:29, 3.88s/it]
|
| 77 |
27%|██▋ | 65/242 [04:21<11:25, 3.87s/it]
|
| 78 |
27%|██▋ | 66/242 [04:25<11:21, 3.87s/it]
|
| 79 |
28%|██▊ | 67/242 [04:28<11:16, 3.87s/it]
|
| 80 |
28%|██▊ | 68/242 [04:32<11:13, 3.87s/it]
|
| 81 |
29%|██▊ | 69/242 [04:36<11:10, 3.88s/it]
|
| 82 |
29%|██▉ | 70/242 [04:40<11:06, 3.88s/it]
|
| 83 |
|
| 84 |
29%|██▉ | 70/242 [04:40<11:06, 3.88s/it]
|
| 85 |
29%|██▉ | 71/242 [04:44<11:01, 3.87s/it]
|
| 86 |
30%|██▉ | 72/242 [04:48<10:57, 3.87s/it]
|
| 87 |
30%|███ | 73/242 [04:52<10:53, 3.87s/it]
|
| 88 |
31%|███ | 74/242 [04:56<10:49, 3.87s/it]
|
| 89 |
31%|███ | 75/242 [04:59<10:45, 3.87s/it]
|
| 90 |
31%|███▏ | 76/242 [05:03<10:41, 3.87s/it]
|
| 91 |
32%|███▏ | 77/242 [05:07<10:37, 3.86s/it]
|
| 92 |
32%|███▏ | 78/242 [05:11<10:33, 3.86s/it]
|
| 93 |
33%|███▎ | 79/242 [05:15<10:29, 3.86s/it]
|
| 94 |
33%|███▎ | 80/242 [05:19<10:24, 3.86s/it]
|
| 95 |
|
| 96 |
33%|███▎ | 80/242 [05:19<10:24, 3.86s/it]
|
| 97 |
33%|███▎ | 81/242 [05:23<10:20, 3.86s/it]
|
| 98 |
34%|███▍ | 82/242 [05:26<10:17, 3.86s/it]
|
| 99 |
34%|███▍ | 83/242 [05:30<10:14, 3.86s/it]
|
| 100 |
35%|███▍ | 84/242 [05:34<10:10, 3.87s/it]
|
| 101 |
35%|███▌ | 85/242 [05:38<10:07, 3.87s/it]
|
| 102 |
36%|███▌ | 86/242 [05:42<10:02, 3.86s/it]
|
| 103 |
36%|███▌ | 87/242 [05:46<09:58, 3.86s/it]
|
| 104 |
36%|███▋ | 88/242 [05:50<09:54, 3.86s/it]
|
| 105 |
37%|███▋ | 89/242 [05:54<09:51, 3.86s/it]
|
| 106 |
37%|███▋ | 90/242 [05:57<09:47, 3.86s/it]
|
| 107 |
|
| 108 |
37%|███▋ | 90/242 [05:57<09:47, 3.86s/it]
|
| 109 |
38%|███▊ | 91/242 [06:01<09:42, 3.86s/it]
|
| 110 |
38%|███▊ | 92/242 [06:05<09:39, 3.86s/it]
|
| 111 |
38%|███▊ | 93/242 [06:09<09:34, 3.86s/it]
|
| 112 |
39%|███▉ | 94/242 [06:13<09:30, 3.85s/it]
|
| 113 |
39%|███▉ | 95/242 [06:17<09:26, 3.85s/it]
|
| 114 |
40%|███▉ | 96/242 [06:20<09:22, 3.85s/it]
|
| 115 |
40%|████ | 97/242 [06:24<09:19, 3.86s/it]
|
| 116 |
40%|████ | 98/242 [06:28<09:15, 3.86s/it]
|
| 117 |
41%|████ | 99/242 [06:32<09:11, 3.86s/it]
|
| 118 |
41%|████▏ | 100/242 [06:36<09:07, 3.86s/it]
|
| 119 |
|
| 120 |
41%|████▏ | 100/242 [06:36<09:07, 3.86s/it]
|
| 121 |
42%|████▏ | 101/242 [06:42<10:35, 4.51s/it]
|
| 122 |
42%|████▏ | 102/242 [06:46<10:04, 4.32s/it]
|
| 123 |
43%|████▎ | 103/242 [06:50<09:41, 4.18s/it]
|
| 124 |
43%|████▎ | 104/242 [06:54<09:23, 4.09s/it]
|
| 125 |
43%|████▎ | 105/242 [06:57<09:10, 4.02s/it]
|
| 126 |
44%|████▍ | 106/242 [07:01<09:00, 3.97s/it]
|
| 127 |
44%|████▍ | 107/242 [07:05<08:51, 3.94s/it]
|
| 128 |
45%|████▍ | 108/242 [07:09<08:44, 3.92s/it]
|
| 129 |
45%|████▌ | 109/242 [07:13<08:38, 3.90s/it]
|
| 130 |
45%|████▌ | 110/242 [07:17<08:33, 3.89s/it]
|
| 131 |
|
| 132 |
45%|████▌ | 110/242 [07:17<08:33, 3.89s/it]
|
| 133 |
46%|████▌ | 111/242 [07:21<08:29, 3.89s/it]
|
| 134 |
46%|████▋ | 112/242 [07:24<08:24, 3.88s/it]
|
| 135 |
47%|████▋ | 113/242 [07:28<08:19, 3.87s/it]
|
| 136 |
47%|████▋ | 114/242 [07:32<08:14, 3.87s/it]
|
| 137 |
48%|████▊ | 115/242 [07:36<08:11, 3.87s/it]
|
| 138 |
48%|████▊ | 116/242 [07:40<08:06, 3.86s/it]
|
| 139 |
48%|████▊ | 117/242 [07:44<08:02, 3.86s/it]
|
| 140 |
49%|████▉ | 118/242 [07:48<07:58, 3.86s/it]
|
| 141 |
49%|████▉ | 119/242 [07:51<07:54, 3.86s/it]
|
| 142 |
50%|████▉ | 120/242 [07:55<07:50, 3.86s/it]
|
| 143 |
|
| 144 |
50%|████▉ | 120/242 [07:55<07:50, 3.86s/it]
|
| 145 |
50%|█████ | 121/242 [07:59<07:45, 3.85s/it]
|
| 146 |
50%|█████ | 122/242 [08:03<07:41, 3.85s/it]
|
| 147 |
51%|█████ | 123/242 [08:07<07:37, 3.85s/it]
|
| 148 |
51%|█████ | 124/242 [08:11<07:34, 3.85s/it]
|
| 149 |
52%|█████▏ | 125/242 [08:15<07:30, 3.85s/it]
|
| 150 |
52%|█████▏ | 126/242 [08:18<07:26, 3.85s/it]
|
| 151 |
52%|█████▏ | 127/242 [08:22<07:22, 3.85s/it]
|
| 152 |
53%|█████▎ | 128/242 [08:26<07:18, 3.85s/it]
|
| 153 |
53%|█████▎ | 129/242 [08:30<07:15, 3.85s/it]
|
| 154 |
54%|█████▎ | 130/242 [08:34<07:11, 3.85s/it]
|
| 155 |
|
| 156 |
54%|█████▎ | 130/242 [08:34<07:11, 3.85s/it]
|
| 157 |
54%|█████▍ | 131/242 [08:38<07:07, 3.85s/it]
|
| 158 |
55%|█████▍ | 132/242 [08:42<07:03, 3.85s/it]
|
| 159 |
55%|█████▍ | 133/242 [08:45<06:59, 3.85s/it]
|
| 160 |
55%|█████▌ | 134/242 [08:49<06:56, 3.85s/it]
|
| 161 |
56%|█████▌ | 135/242 [08:53<06:52, 3.85s/it]
|
| 162 |
56%|█████▌ | 136/242 [08:57<06:48, 3.85s/it]
|
| 163 |
57%|█████▋ | 137/242 [09:01<06:44, 3.85s/it]
|
| 164 |
57%|█████▋ | 138/242 [09:05<06:40, 3.85s/it]
|
| 165 |
57%|█████▋ | 139/242 [09:09<06:37, 3.86s/it]
|
| 166 |
58%|█████▊ | 140/242 [09:12<06:33, 3.86s/it]
|
| 167 |
|
| 168 |
58%|█████▊ | 140/242 [09:12<06:33, 3.86s/it]
|
| 169 |
58%|█████▊ | 141/242 [09:16<06:29, 3.85s/it]
|
| 170 |
59%|█████▊ | 142/242 [09:20<06:25, 3.85s/it]
|
| 171 |
59%|█████▉ | 143/242 [09:24<06:21, 3.85s/it]
|
| 172 |
60%|█████▉ | 144/242 [09:28<06:17, 3.85s/it]
|
| 173 |
60%|█████▉ | 145/242 [09:32<06:13, 3.85s/it]
|
| 174 |
60%|██████ | 146/242 [09:35<06:09, 3.85s/it]
|
| 175 |
61%|██████ | 147/242 [09:39<06:06, 3.85s/it]
|
| 176 |
61%|██████ | 148/242 [09:43<06:02, 3.85s/it]
|
| 177 |
62%|██████▏ | 149/242 [09:47<05:58, 3.85s/it]
|
| 178 |
62%|██████▏ | 150/242 [09:51<05:54, 3.86s/it]
|
| 179 |
|
| 180 |
62%|██████▏ | 150/242 [09:51<05:54, 3.86s/it]
|
| 181 |
62%|██████▏ | 151/242 [09:57<06:42, 4.43s/it]
|
| 182 |
63%|██████▎ | 152/242 [10:01<06:24, 4.27s/it]
|
| 183 |
63%|██████▎ | 153/242 [10:04<06:09, 4.15s/it]
|
| 184 |
64%|██████▎ | 154/242 [10:08<05:58, 4.07s/it]
|
| 185 |
64%|██████▍ | 155/242 [10:12<05:48, 4.01s/it]
|
| 186 |
64%|██████▍ | 156/242 [10:16<05:40, 3.96s/it]
|
| 187 |
65%|██████▍ | 157/242 [10:20<05:34, 3.94s/it]
|
| 188 |
65%|██████▌ | 158/242 [10:24<05:29, 3.92s/it]
|
| 189 |
66%|██████▌ | 159/242 [10:28<05:24, 3.91s/it]
|
| 190 |
66%|██████▌ | 160/242 [10:32<05:19, 3.90s/it]
|
| 191 |
|
| 192 |
66%|██████▌ | 160/242 [10:32<05:19, 3.90s/it]
|
| 193 |
67%|██████▋ | 161/242 [10:35<05:15, 3.89s/it]
|
| 194 |
67%|██████▋ | 162/242 [10:39<05:10, 3.88s/it]
|
| 195 |
67%|██████▋ | 163/242 [10:43<05:07, 3.89s/it]
|
| 196 |
68%|██████▊ | 164/242 [10:47<05:02, 3.88s/it]
|
| 197 |
68%|██████▊ | 165/242 [10:51<04:58, 3.88s/it]
|
| 198 |
69%|██████▊ | 166/242 [10:55<04:54, 3.88s/it]
|
| 199 |
69%|██████▉ | 167/242 [10:59<04:50, 3.88s/it]
|
| 200 |
69%|██████▉ | 168/242 [11:03<04:47, 3.88s/it]
|
| 201 |
70%|██████▉ | 169/242 [11:06<04:43, 3.88s/it]
|
| 202 |
70%|███████ | 170/242 [11:10<04:38, 3.87s/it]
|
| 203 |
|
| 204 |
70%|███████ | 170/242 [11:10<04:38, 3.87s/it]
|
| 205 |
71%|███████ | 171/242 [11:14<04:34, 3.87s/it]
|
| 206 |
71%|███████ | 172/242 [11:18<04:31, 3.87s/it]
|
| 207 |
71%|███████▏ | 173/242 [11:22<04:27, 3.87s/it]
|
| 208 |
72%|███████▏ | 174/242 [11:26<04:23, 3.87s/it]
|
| 209 |
72%|███████▏ | 175/242 [11:30<04:19, 3.88s/it]
|
| 210 |
73%|███████▎ | 176/242 [11:34<04:15, 3.87s/it]
|
| 211 |
73%|███████▎ | 177/242 [11:37<04:12, 3.88s/it]
|
| 212 |
74%|███████▎ | 178/242 [11:41<04:08, 3.88s/it]
|
| 213 |
74%|███████▍ | 179/242 [11:45<04:04, 3.87s/it]
|
| 214 |
74%|███████▍ | 180/242 [11:49<04:00, 3.87s/it]
|
| 215 |
|
| 216 |
74%|███████▍ | 180/242 [11:49<04:00, 3.87s/it]
|
| 217 |
75%|███████▍ | 181/242 [11:53<03:55, 3.87s/it]
|
| 218 |
75%|███████▌ | 182/242 [11:57<03:51, 3.86s/it]
|
| 219 |
76%|███████▌ | 183/242 [12:01<03:47, 3.86s/it]
|
| 220 |
76%|███████▌ | 184/242 [12:04<03:43, 3.86s/it]
|
| 221 |
76%|███████▋ | 185/242 [12:08<03:39, 3.86s/it]
|
| 222 |
77%|███████▋ | 186/242 [12:12<03:35, 3.86s/it]
|
| 223 |
77%|███████▋ | 187/242 [12:16<03:32, 3.86s/it]
|
| 224 |
78%|███████▊ | 188/242 [12:20<03:28, 3.87s/it]
|
| 225 |
78%|███████▊ | 189/242 [12:24<03:24, 3.86s/it]
|
| 226 |
79%|███████▊ | 190/242 [12:28<03:26, 3.98s/it]
|
| 227 |
|
| 228 |
79%|███████▊ | 190/242 [12:28<03:26, 3.98s/it]
|
| 229 |
79%|███████▉ | 191/242 [12:32<03:21, 3.94s/it]
|
| 230 |
79%|███████▉ | 192/242 [12:36<03:16, 3.93s/it]
|
| 231 |
80%|███████▉ | 193/242 [12:40<03:11, 3.91s/it]
|
| 232 |
80%|████████ | 194/242 [12:44<03:07, 3.90s/it]
|
| 233 |
81%|████████ | 195/242 [12:47<03:02, 3.89s/it]
|
| 234 |
81%|████████ | 196/242 [12:51<02:58, 3.88s/it]
|
| 235 |
81%|████████▏ | 197/242 [12:55<02:54, 3.88s/it]
|
| 236 |
82%|████████▏ | 198/242 [12:59<02:50, 3.87s/it]
|
| 237 |
82%|████████▏ | 199/242 [13:03<02:46, 3.86s/it]
|
| 238 |
83%|████████▎ | 200/242 [13:07<02:42, 3.86s/it]
|
| 239 |
|
| 240 |
83%|████████▎ | 200/242 [13:07<02:42, 3.86s/it]
|
| 241 |
83%|████████▎ | 201/242 [13:12<03:02, 4.45s/it]
|
| 242 |
83%|████████▎ | 202/242 [13:16<02:51, 4.28s/it]
|
| 243 |
84%|████████▍ | 203/242 [13:20<02:41, 4.15s/it]
|
| 244 |
84%|████████▍ | 204/242 [13:24<02:34, 4.07s/it]
|
| 245 |
85%|████████▍ | 205/242 [13:28<02:28, 4.00s/it]
|
| 246 |
85%|████████▌ | 206/242 [13:32<02:22, 3.96s/it]
|
| 247 |
86%|████████▌ | 207/242 [13:36<02:17, 3.93s/it]
|
| 248 |
86%|████████▌ | 208/242 [13:40<02:12, 3.91s/it]
|
| 249 |
86%|████████▋ | 209/242 [13:43<02:08, 3.89s/it]
|
| 250 |
87%|████████▋ | 210/242 [13:47<02:04, 3.89s/it]
|
| 251 |
|
| 252 |
87%|████████▋ | 210/242 [13:47<02:04, 3.89s/it]
|
| 253 |
87%|████████▋ | 211/242 [13:51<02:00, 3.88s/it]
|
| 254 |
88%|████████▊ | 212/242 [13:55<01:56, 3.88s/it]
|
| 255 |
88%|████████▊ | 213/242 [13:59<01:52, 3.88s/it]
|
| 256 |
88%|████████▊ | 214/242 [14:03<01:48, 3.87s/it]
|
| 257 |
89%|████████▉ | 215/242 [14:07<01:44, 3.87s/it]
|
| 258 |
89%|████████▉ | 216/242 [14:10<01:40, 3.87s/it]
|
| 259 |
90%|████████▉ | 217/242 [14:14<01:36, 3.86s/it]
|
| 260 |
90%|█████████ | 218/242 [14:18<01:32, 3.87s/it]
|
| 261 |
90%|█████████ | 219/242 [14:22<01:29, 3.87s/it]
|
| 262 |
91%|█████████ | 220/242 [14:26<01:24, 3.86s/it]
|
| 263 |
|
| 264 |
91%|█████████ | 220/242 [14:26<01:24, 3.86s/it]
|
| 265 |
91%|█████████▏| 221/242 [14:30<01:21, 3.86s/it]
|
| 266 |
92%|█████████▏| 222/242 [14:34<01:17, 3.86s/it]
|
| 267 |
92%|█████████▏| 223/242 [14:37<01:13, 3.86s/it]
|
| 268 |
93%|█████████▎| 224/242 [14:41<01:09, 3.86s/it]
|
| 269 |
93%|█████████▎| 225/242 [14:45<01:05, 3.86s/it]
|
| 270 |
93%|█████████▎| 226/242 [14:49<01:01, 3.85s/it]
|
| 271 |
94%|█████████▍| 227/242 [14:53<00:57, 3.85s/it]
|
| 272 |
94%|█████████▍| 228/242 [14:57<00:53, 3.85s/it]
|
| 273 |
95%|█████████▍| 229/242 [15:01<00:50, 3.85s/it]
|
| 274 |
95%|█████████▌| 230/242 [15:04<00:46, 3.85s/it]
|
| 275 |
|
| 276 |
95%|█████████▌| 230/242 [15:04<00:46, 3.85s/it]
|
| 277 |
95%|█████████▌| 231/242 [15:08<00:42, 3.86s/it]
|
| 278 |
96%|█████████▌| 232/242 [15:12<00:38, 3.87s/it]
|
| 279 |
96%|█████████▋| 233/242 [15:16<00:34, 3.88s/it]
|
| 280 |
97%|█████████▋| 234/242 [15:20<00:31, 3.89s/it]
|
| 281 |
97%|█████████▋| 235/242 [15:24<00:27, 3.89s/it]
|
| 282 |
98%|█████████▊| 236/242 [15:28<00:23, 3.90s/it]
|
| 283 |
98%|█████████▊| 237/242 [15:32<00:19, 3.91s/it]
|
| 284 |
98%|█████████▊| 238/242 [15:36<00:15, 3.92s/it]
|
| 285 |
99%|█████████▉| 239/242 [15:40<00:11, 3.92s/it]
|
| 286 |
99%|█████████▉| 240/242 [15:44<00:07, 3.92s/it]
|
| 287 |
|
| 288 |
99%|█████████▉| 240/242 [15:44<00:07, 3.92s/it]
|
| 289 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
/usr/local/lib/python3.11/dist-packages/requests/__init__.py:113: RequestsDependencyWarning: urllib3 (2.2.3) or chardet (7.4.3)/charset_normalizer (3.3.2) doesn't match a supported version!
|
| 2 |
+
warnings.warn(
|
| 3 |
+
🦥 Unsloth: Will patch your computer to enable 2x faster free finetuning.
|
| 4 |
+
Unsloth: Your Flash Attention 2 installation seems to be broken. Using Xformers instead. No performance changes will be seen.
|
| 5 |
+
🦥 Unsloth Zoo will now patch everything to make training faster!
|
| 6 |
+
============================================================
|
| 7 |
+
BibleAI DPO Training (Stage 3)
|
| 8 |
+
============================================================
|
| 9 |
+
Base model: /workspace/outputs/sft_cpt_merged
|
| 10 |
+
DPO data: /workspace/data/dpo_pairs.jsonl
|
| 11 |
+
Output: /workspace/outputs/dpo
|
| 12 |
+
Epochs: 2
|
| 13 |
+
Beta: 0.1
|
| 14 |
+
LoRA rank: 32
|
| 15 |
+
Learning rate: 5e-06
|
| 16 |
+
|
| 17 |
+
Loading SFT model...
|
| 18 |
+
==((====))== Unsloth 2026.4.4: Fast Gemma4 patching. Transformers: 5.5.0.
|
| 19 |
+
\\ /| NVIDIA A100-SXM4-80GB. Num GPUs = 1. Max memory: 79.25 GB. Platform: Linux.
|
| 20 |
+
O^O/ \_/ \ Torch: 2.7.0+cu126. CUDA: 8.0. CUDA Toolkit: 12.6. Triton: 3.3.0
|
| 21 |
+
\ / Bfloat16 = TRUE. FA [Xformers = 0.0.30. FA2 = False]
|
| 22 |
+
"-____-" Free license: http://github.com/unslothai/unsloth
|
| 23 |
+
Unsloth: Fast downloading is enabled - ignore downloading bars which are red colored!
|
| 24 |
+
|
| 25 |
+
The tokenizer you are loading from '/workspace/outputs/sft_cpt_merged' with an incorrect regex pattern: https://huggingface.co/mistralai/Mistral-Small-3.1-24B-Instruct-2503/discussions/84#69121093e8b480e709447d5e. This will lead to incorrect tokenization. You should set the `fix_mistral_regex=True` flag when loading this tokenizer to fix this issue.
|
| 26 |
+
Unsloth: Dropout = 0 is supported for fast patching. You are using dropout = 0.05.
|
| 27 |
+
Unsloth will patch all other layers, except LoRA matrices, causing a performance hit.
|
| 28 |
+
The tokenizer has new PAD/BOS/EOS tokens that differ from the model config and generation config. The model config and generation config were aligned accordingly, being updated with the tokenizer's values. Updated tokens: {'eos_token_id': 1, 'bos_token_id': 2}.
|
| 29 |
+
==((====))== Unsloth - 2x faster free finetuning | Num GPUs used = 1
|
| 30 |
+
\\ /| Num examples = 967 | Num Epochs = 2 | Total steps = 242
|
| 31 |
+
O^O/ \_/ \ Batch size per device = 2 | Gradient accumulation steps = 4
|
| 32 |
+
\ / Data Parallel GPUs = 1 | Total batch size (2 x 4 x 1) = 8
|
| 33 |
+
"-____-" Trainable parameters = 84,803,584 of 8,080,960,032 (1.05% trained)
|
| 34 |
+
Tokenizer: GemmaTokenizer → text: GemmaTokenizer
|
| 35 |
+
Trainable: 84,803,584 / 6,064,089,632 (1.40%)
|
| 36 |
+
Loading DPO data from /workspace/data/dpo_pairs.jsonl...
|
| 37 |
+
DPO pairs: 967
|
| 38 |
+
|
| 39 |
+
Starting DPO training...
|
| 40 |
+
|
| 41 |
0%| | 0/242 [00:00<?, ?it/s]Caching is incompatible with gradient checkpointing in Gemma4TextDecoderLayer. Setting `past_key_values=None`.
|
| 42 |
+
|
| 43 |
0%| | 1/242 [00:11<46:43, 11.63s/it]
|
| 44 |
1%| | 2/242 [00:15<28:22, 7.10s/it]
|
| 45 |
1%| | 3/242 [00:19<22:28, 5.64s/it]
|
| 46 |
2%|▏ | 4/242 [00:23<19:36, 4.94s/it]
|
| 47 |
2%|▏ | 5/242 [00:27<18:03, 4.57s/it]
|
| 48 |
2%|▏ | 6/242 [00:31<17:03, 4.34s/it]
|
| 49 |
3%|▎ | 7/242 [00:35<16:55, 4.32s/it]
|
| 50 |
3%|▎ | 8/242 [00:39<16:18, 4.18s/it]
|
| 51 |
4%|▎ | 9/242 [00:43<15:53, 4.09s/it]
|
| 52 |
4%|▍ | 10/242 [00:47<15:34, 4.03s/it]
|
| 53 |
|
| 54 |
4%|▍ | 10/242 [00:47<15:34, 4.03s/it]
|
| 55 |
5%|▍ | 11/242 [00:50<15:21, 3.99s/it]
|
| 56 |
5%|▍ | 12/242 [00:54<15:11, 3.96s/it]
|
| 57 |
5%|▌ | 13/242 [00:58<15:09, 3.97s/it]
|
| 58 |
6%|▌ | 14/242 [01:02<14:57, 3.94s/it]
|
| 59 |
6%|▌ | 15/242 [01:06<14:48, 3.91s/it]
|
| 60 |
7%|▋ | 16/242 [01:10<14:38, 3.89s/it]
|
| 61 |
7%|▋ | 17/242 [01:14<14:59, 4.00s/it]
|
| 62 |
7%|▋ | 18/242 [01:18<14:45, 3.95s/it]
|
| 63 |
8%|▊ | 19/242 [01:22<14:33, 3.92s/it]
|
| 64 |
8%|▊ | 20/242 [01:26<14:25, 3.90s/it]
|
| 65 |
|
| 66 |
8%|▊ | 20/242 [01:26<14:25, 3.90s/it]
|
| 67 |
9%|▊ | 21/242 [01:30<14:17, 3.88s/it]
|
| 68 |
9%|▉ | 22/242 [01:33<14:09, 3.86s/it]
|
| 69 |
10%|▉ | 23/242 [01:37<14:04, 3.86s/it]
|
| 70 |
10%|▉ | 24/242 [01:41<13:59, 3.85s/it]
|
| 71 |
10%|█ | 25/242 [01:45<13:54, 3.85s/it]
|
| 72 |
11%|█ | 26/242 [01:49<13:49, 3.84s/it]
|
| 73 |
11%|█ | 27/242 [01:53<13:43, 3.83s/it]
|
| 74 |
12%|█▏ | 28/242 [01:56<13:39, 3.83s/it]
|
| 75 |
12%|█▏ | 29/242 [02:00<13:35, 3.83s/it]
|
| 76 |
12%|█▏ | 30/242 [02:04<13:30, 3.82s/it]
|
| 77 |
|
| 78 |
12%|█▏ | 30/242 [02:04<13:30, 3.82s/it]
|
| 79 |
13%|█▎ | 31/242 [02:08<13:27, 3.83s/it]
|
| 80 |
13%|█▎ | 32/242 [02:12<13:23, 3.82s/it]
|
| 81 |
14%|█▎ | 33/242 [02:15<13:19, 3.83s/it]
|
| 82 |
14%|█▍ | 34/242 [02:19<13:22, 3.86s/it]
|
| 83 |
14%|█▍ | 35/242 [02:23<13:17, 3.85s/it]
|
| 84 |
15%|█▍ | 36/242 [02:27<13:11, 3.84s/it]
|
| 85 |
15%|█▌ | 37/242 [02:31<13:07, 3.84s/it]
|
| 86 |
16%|█▌ | 38/242 [02:35<13:04, 3.84s/it]
|
| 87 |
16%|█▌ | 39/242 [02:39<13:01, 3.85s/it]
|
| 88 |
17%|█▋ | 40/242 [02:42<12:55, 3.84s/it]
|
| 89 |
|
| 90 |
17%|█▋ | 40/242 [02:42<12:55, 3.84s/it]
|
| 91 |
17%|█▋ | 41/242 [02:46<12:51, 3.84s/it]
|
| 92 |
17%|█▋ | 42/242 [02:50<12:47, 3.84s/it]
|
| 93 |
18%|█▊ | 43/242 [02:54<12:43, 3.84s/it]
|
| 94 |
18%|█▊ | 44/242 [02:58<12:39, 3.84s/it]
|
| 95 |
19%|█▊ | 45/242 [03:02<12:34, 3.83s/it]
|
| 96 |
19%|█▉ | 46/242 [03:05<12:30, 3.83s/it]
|
| 97 |
19%|█▉ | 47/242 [03:09<12:27, 3.83s/it]
|
| 98 |
20%|█▉ | 48/242 [03:13<12:22, 3.83s/it]
|
| 99 |
20%|██ | 49/242 [03:17<12:18, 3.83s/it]
|
| 100 |
21%|██ | 50/242 [03:21<12:18, 3.85s/it]
|
| 101 |
|
| 102 |
21%|██ | 50/242 [03:21<12:18, 3.85s/it]
|
| 103 |
21%|██ | 51/242 [03:26<13:56, 4.38s/it]
|
| 104 |
21%|██▏ | 52/242 [03:30<13:24, 4.23s/it]
|
| 105 |
22%|██▏ | 53/242 [03:34<13:01, 4.13s/it]
|
| 106 |
22%|██▏ | 54/242 [03:38<12:42, 4.06s/it]
|
| 107 |
23%|██▎ | 55/242 [03:42<12:30, 4.01s/it]
|
| 108 |
23%|██▎ | 56/242 [03:46<12:18, 3.97s/it]
|
| 109 |
24%|██▎ | 57/242 [03:50<12:10, 3.95s/it]
|
| 110 |
24%|██▍ | 58/242 [03:54<12:03, 3.93s/it]
|
| 111 |
24%|██▍ | 59/242 [03:58<11:57, 3.92s/it]
|
| 112 |
25%|██▍ | 60/242 [04:01<11:51, 3.91s/it]
|
| 113 |
|
| 114 |
25%|██▍ | 60/242 [04:01<11:51, 3.91s/it]
|
| 115 |
25%|██▌ | 61/242 [04:05<11:45, 3.90s/it]
|
| 116 |
26%|██▌ | 62/242 [04:09<11:40, 3.89s/it]
|
| 117 |
26%|██▌ | 63/242 [04:13<11:34, 3.88s/it]
|
| 118 |
26%|██▋ | 64/242 [04:17<11:29, 3.88s/it]
|
| 119 |
27%|██▋ | 65/242 [04:21<11:25, 3.87s/it]
|
| 120 |
27%|██▋ | 66/242 [04:25<11:21, 3.87s/it]
|
| 121 |
28%|██▊ | 67/242 [04:28<11:16, 3.87s/it]
|
| 122 |
28%|██▊ | 68/242 [04:32<11:13, 3.87s/it]
|
| 123 |
29%|██▊ | 69/242 [04:36<11:10, 3.88s/it]
|
| 124 |
29%|██▉ | 70/242 [04:40<11:06, 3.88s/it]
|
| 125 |
|
| 126 |
29%|██▉ | 70/242 [04:40<11:06, 3.88s/it]
|
| 127 |
29%|██▉ | 71/242 [04:44<11:01, 3.87s/it]
|
| 128 |
30%|██▉ | 72/242 [04:48<10:57, 3.87s/it]
|
| 129 |
30%|███ | 73/242 [04:52<10:53, 3.87s/it]
|
| 130 |
31%|███ | 74/242 [04:56<10:49, 3.87s/it]
|
| 131 |
31%|███ | 75/242 [04:59<10:45, 3.87s/it]
|
| 132 |
31%|███▏ | 76/242 [05:03<10:41, 3.87s/it]
|
| 133 |
32%|███▏ | 77/242 [05:07<10:37, 3.86s/it]
|
| 134 |
32%|███▏ | 78/242 [05:11<10:33, 3.86s/it]
|
| 135 |
33%|███▎ | 79/242 [05:15<10:29, 3.86s/it]
|
| 136 |
33%|███▎ | 80/242 [05:19<10:24, 3.86s/it]
|
| 137 |
|
| 138 |
33%|███▎ | 80/242 [05:19<10:24, 3.86s/it]
|
| 139 |
33%|███▎ | 81/242 [05:23<10:20, 3.86s/it]
|
| 140 |
34%|███▍ | 82/242 [05:26<10:17, 3.86s/it]
|
| 141 |
34%|███▍ | 83/242 [05:30<10:14, 3.86s/it]
|
| 142 |
35%|███▍ | 84/242 [05:34<10:10, 3.87s/it]
|
| 143 |
35%|███▌ | 85/242 [05:38<10:07, 3.87s/it]
|
| 144 |
36%|███▌ | 86/242 [05:42<10:02, 3.86s/it]
|
| 145 |
36%|███▌ | 87/242 [05:46<09:58, 3.86s/it]
|
| 146 |
36%|███▋ | 88/242 [05:50<09:54, 3.86s/it]
|
| 147 |
37%|███▋ | 89/242 [05:54<09:51, 3.86s/it]
|
| 148 |
37%|███▋ | 90/242 [05:57<09:47, 3.86s/it]
|
| 149 |
|
| 150 |
37%|███▋ | 90/242 [05:57<09:47, 3.86s/it]
|
| 151 |
38%|███▊ | 91/242 [06:01<09:42, 3.86s/it]
|
| 152 |
38%|███▊ | 92/242 [06:05<09:39, 3.86s/it]
|
| 153 |
38%|███▊ | 93/242 [06:09<09:34, 3.86s/it]
|
| 154 |
39%|███▉ | 94/242 [06:13<09:30, 3.85s/it]
|
| 155 |
39%|███▉ | 95/242 [06:17<09:26, 3.85s/it]
|
| 156 |
40%|███▉ | 96/242 [06:20<09:22, 3.85s/it]
|
| 157 |
40%|████ | 97/242 [06:24<09:19, 3.86s/it]
|
| 158 |
40%|████ | 98/242 [06:28<09:15, 3.86s/it]
|
| 159 |
41%|████ | 99/242 [06:32<09:11, 3.86s/it]
|
| 160 |
41%|████▏ | 100/242 [06:36<09:07, 3.86s/it]
|
| 161 |
|
| 162 |
41%|████▏ | 100/242 [06:36<09:07, 3.86s/it]
|
| 163 |
42%|████▏ | 101/242 [06:42<10:35, 4.51s/it]
|
| 164 |
42%|████▏ | 102/242 [06:46<10:04, 4.32s/it]
|
| 165 |
43%|████▎ | 103/242 [06:50<09:41, 4.18s/it]
|
| 166 |
43%|████▎ | 104/242 [06:54<09:23, 4.09s/it]
|
| 167 |
43%|████▎ | 105/242 [06:57<09:10, 4.02s/it]
|
| 168 |
44%|████▍ | 106/242 [07:01<09:00, 3.97s/it]
|
| 169 |
44%|████▍ | 107/242 [07:05<08:51, 3.94s/it]
|
| 170 |
45%|████▍ | 108/242 [07:09<08:44, 3.92s/it]
|
| 171 |
45%|████▌ | 109/242 [07:13<08:38, 3.90s/it]
|
| 172 |
45%|████▌ | 110/242 [07:17<08:33, 3.89s/it]
|
| 173 |
|
| 174 |
45%|████▌ | 110/242 [07:17<08:33, 3.89s/it]
|
| 175 |
46%|████▌ | 111/242 [07:21<08:29, 3.89s/it]
|
| 176 |
46%|████▋ | 112/242 [07:24<08:24, 3.88s/it]
|
| 177 |
47%|████▋ | 113/242 [07:28<08:19, 3.87s/it]
|
| 178 |
47%|████▋ | 114/242 [07:32<08:14, 3.87s/it]
|
| 179 |
48%|████▊ | 115/242 [07:36<08:11, 3.87s/it]
|
| 180 |
48%|████▊ | 116/242 [07:40<08:06, 3.86s/it]
|
| 181 |
48%|████▊ | 117/242 [07:44<08:02, 3.86s/it]
|
| 182 |
49%|████▉ | 118/242 [07:48<07:58, 3.86s/it]
|
| 183 |
49%|████▉ | 119/242 [07:51<07:54, 3.86s/it]
|
| 184 |
50%|████▉ | 120/242 [07:55<07:50, 3.86s/it]
|
| 185 |
|
| 186 |
50%|████▉ | 120/242 [07:55<07:50, 3.86s/it]
|
| 187 |
50%|█████ | 121/242 [07:59<07:45, 3.85s/it]
|
| 188 |
50%|█████ | 122/242 [08:03<07:41, 3.85s/it]
|
| 189 |
51%|█████ | 123/242 [08:07<07:37, 3.85s/it]
|
| 190 |
51%|█████ | 124/242 [08:11<07:34, 3.85s/it]
|
| 191 |
52%|█████▏ | 125/242 [08:15<07:30, 3.85s/it]
|
| 192 |
52%|█████▏ | 126/242 [08:18<07:26, 3.85s/it]
|
| 193 |
52%|█████▏ | 127/242 [08:22<07:22, 3.85s/it]
|
| 194 |
53%|█████▎ | 128/242 [08:26<07:18, 3.85s/it]
|
| 195 |
53%|█████▎ | 129/242 [08:30<07:15, 3.85s/it]
|
| 196 |
54%|█████▎ | 130/242 [08:34<07:11, 3.85s/it]
|
| 197 |
|
| 198 |
54%|█████▎ | 130/242 [08:34<07:11, 3.85s/it]
|
| 199 |
54%|█████▍ | 131/242 [08:38<07:07, 3.85s/it]
|
| 200 |
55%|█████▍ | 132/242 [08:42<07:03, 3.85s/it]
|
| 201 |
55%|█████▍ | 133/242 [08:45<06:59, 3.85s/it]
|
| 202 |
55%|█████▌ | 134/242 [08:49<06:56, 3.85s/it]
|
| 203 |
56%|█████▌ | 135/242 [08:53<06:52, 3.85s/it]
|
| 204 |
56%|█████▌ | 136/242 [08:57<06:48, 3.85s/it]
|
| 205 |
57%|█████▋ | 137/242 [09:01<06:44, 3.85s/it]
|
| 206 |
57%|█████▋ | 138/242 [09:05<06:40, 3.85s/it]
|
| 207 |
57%|█████▋ | 139/242 [09:09<06:37, 3.86s/it]
|
| 208 |
58%|█████▊ | 140/242 [09:12<06:33, 3.86s/it]
|
| 209 |
|
| 210 |
58%|█████▊ | 140/242 [09:12<06:33, 3.86s/it]
|
| 211 |
58%|█████▊ | 141/242 [09:16<06:29, 3.85s/it]
|
| 212 |
59%|█████▊ | 142/242 [09:20<06:25, 3.85s/it]
|
| 213 |
59%|█████▉ | 143/242 [09:24<06:21, 3.85s/it]
|
| 214 |
60%|█████▉ | 144/242 [09:28<06:17, 3.85s/it]
|
| 215 |
60%|█████▉ | 145/242 [09:32<06:13, 3.85s/it]
|
| 216 |
60%|██████ | 146/242 [09:35<06:09, 3.85s/it]
|
| 217 |
61%|██████ | 147/242 [09:39<06:06, 3.85s/it]
|
| 218 |
61%|██████ | 148/242 [09:43<06:02, 3.85s/it]
|
| 219 |
62%|██████▏ | 149/242 [09:47<05:58, 3.85s/it]
|
| 220 |
62%|██████▏ | 150/242 [09:51<05:54, 3.86s/it]
|
| 221 |
|
| 222 |
62%|██████▏ | 150/242 [09:51<05:54, 3.86s/it]
|
| 223 |
62%|██████▏ | 151/242 [09:57<06:42, 4.43s/it]
|
| 224 |
63%|██████▎ | 152/242 [10:01<06:24, 4.27s/it]
|
| 225 |
63%|██████▎ | 153/242 [10:04<06:09, 4.15s/it]
|
| 226 |
64%|██████▎ | 154/242 [10:08<05:58, 4.07s/it]
|
| 227 |
64%|██████▍ | 155/242 [10:12<05:48, 4.01s/it]
|
| 228 |
64%|██████▍ | 156/242 [10:16<05:40, 3.96s/it]
|
| 229 |
65%|██████▍ | 157/242 [10:20<05:34, 3.94s/it]
|
| 230 |
65%|██████▌ | 158/242 [10:24<05:29, 3.92s/it]
|
| 231 |
66%|██████▌ | 159/242 [10:28<05:24, 3.91s/it]
|
| 232 |
66%|██████▌ | 160/242 [10:32<05:19, 3.90s/it]
|
| 233 |
|
| 234 |
66%|██████▌ | 160/242 [10:32<05:19, 3.90s/it]
|
| 235 |
67%|██████▋ | 161/242 [10:35<05:15, 3.89s/it]
|
| 236 |
67%|██████▋ | 162/242 [10:39<05:10, 3.88s/it]
|
| 237 |
67%|██████▋ | 163/242 [10:43<05:07, 3.89s/it]
|
| 238 |
68%|██████▊ | 164/242 [10:47<05:02, 3.88s/it]
|
| 239 |
68%|██████▊ | 165/242 [10:51<04:58, 3.88s/it]
|
| 240 |
69%|██████▊ | 166/242 [10:55<04:54, 3.88s/it]
|
| 241 |
69%|██████▉ | 167/242 [10:59<04:50, 3.88s/it]
|
| 242 |
69%|██████▉ | 168/242 [11:03<04:47, 3.88s/it]
|
| 243 |
70%|██████▉ | 169/242 [11:06<04:43, 3.88s/it]
|
| 244 |
70%|███████ | 170/242 [11:10<04:38, 3.87s/it]
|
| 245 |
|
| 246 |
70%|███████ | 170/242 [11:10<04:38, 3.87s/it]
|
| 247 |
71%|███████ | 171/242 [11:14<04:34, 3.87s/it]
|
| 248 |
71%|███████ | 172/242 [11:18<04:31, 3.87s/it]
|
| 249 |
71%|███████▏ | 173/242 [11:22<04:27, 3.87s/it]
|
| 250 |
72%|███████▏ | 174/242 [11:26<04:23, 3.87s/it]
|
| 251 |
72%|███████▏ | 175/242 [11:30<04:19, 3.88s/it]
|
| 252 |
73%|███████▎ | 176/242 [11:34<04:15, 3.87s/it]
|
| 253 |
73%|███████▎ | 177/242 [11:37<04:12, 3.88s/it]
|
| 254 |
74%|███████▎ | 178/242 [11:41<04:08, 3.88s/it]
|
| 255 |
74%|███████▍ | 179/242 [11:45<04:04, 3.87s/it]
|
| 256 |
74%|███████▍ | 180/242 [11:49<04:00, 3.87s/it]
|
| 257 |
|
| 258 |
74%|███████▍ | 180/242 [11:49<04:00, 3.87s/it]
|
| 259 |
75%|███████▍ | 181/242 [11:53<03:55, 3.87s/it]
|
| 260 |
75%|███████▌ | 182/242 [11:57<03:51, 3.86s/it]
|
| 261 |
76%|███████▌ | 183/242 [12:01<03:47, 3.86s/it]
|
| 262 |
76%|███████▌ | 184/242 [12:04<03:43, 3.86s/it]
|
| 263 |
76%|███████▋ | 185/242 [12:08<03:39, 3.86s/it]
|
| 264 |
77%|███████▋ | 186/242 [12:12<03:35, 3.86s/it]
|
| 265 |
77%|███████▋ | 187/242 [12:16<03:32, 3.86s/it]
|
| 266 |
78%|███████▊ | 188/242 [12:20<03:28, 3.87s/it]
|
| 267 |
78%|███████▊ | 189/242 [12:24<03:24, 3.86s/it]
|
| 268 |
79%|███████▊ | 190/242 [12:28<03:26, 3.98s/it]
|
| 269 |
|
| 270 |
79%|███████▊ | 190/242 [12:28<03:26, 3.98s/it]
|
| 271 |
79%|███████▉ | 191/242 [12:32<03:21, 3.94s/it]
|
| 272 |
79%|███████▉ | 192/242 [12:36<03:16, 3.93s/it]
|
| 273 |
80%|███████▉ | 193/242 [12:40<03:11, 3.91s/it]
|
| 274 |
80%|████████ | 194/242 [12:44<03:07, 3.90s/it]
|
| 275 |
81%|████████ | 195/242 [12:47<03:02, 3.89s/it]
|
| 276 |
81%|████████ | 196/242 [12:51<02:58, 3.88s/it]
|
| 277 |
81%|████████▏ | 197/242 [12:55<02:54, 3.88s/it]
|
| 278 |
82%|████████▏ | 198/242 [12:59<02:50, 3.87s/it]
|
| 279 |
82%|████████▏ | 199/242 [13:03<02:46, 3.86s/it]
|
| 280 |
83%|████████▎ | 200/242 [13:07<02:42, 3.86s/it]
|
| 281 |
|
| 282 |
83%|████████▎ | 200/242 [13:07<02:42, 3.86s/it]
|
| 283 |
83%|████████▎ | 201/242 [13:12<03:02, 4.45s/it]
|
| 284 |
83%|████████▎ | 202/242 [13:16<02:51, 4.28s/it]
|
| 285 |
84%|████████▍ | 203/242 [13:20<02:41, 4.15s/it]
|
| 286 |
84%|████████▍ | 204/242 [13:24<02:34, 4.07s/it]
|
| 287 |
85%|████████▍ | 205/242 [13:28<02:28, 4.00s/it]
|
| 288 |
85%|████████▌ | 206/242 [13:32<02:22, 3.96s/it]
|
| 289 |
86%|████████▌ | 207/242 [13:36<02:17, 3.93s/it]
|
| 290 |
86%|████████▌ | 208/242 [13:40<02:12, 3.91s/it]
|
| 291 |
86%|████████▋ | 209/242 [13:43<02:08, 3.89s/it]
|
| 292 |
87%|████████▋ | 210/242 [13:47<02:04, 3.89s/it]
|
| 293 |
|
| 294 |
87%|████████▋ | 210/242 [13:47<02:04, 3.89s/it]
|
| 295 |
87%|████████▋ | 211/242 [13:51<02:00, 3.88s/it]
|
| 296 |
88%|████████▊ | 212/242 [13:55<01:56, 3.88s/it]
|
| 297 |
88%|████████▊ | 213/242 [13:59<01:52, 3.88s/it]
|
| 298 |
88%|████████▊ | 214/242 [14:03<01:48, 3.87s/it]
|
| 299 |
89%|████████▉ | 215/242 [14:07<01:44, 3.87s/it]
|
| 300 |
89%|████████▉ | 216/242 [14:10<01:40, 3.87s/it]
|
| 301 |
90%|████████▉ | 217/242 [14:14<01:36, 3.86s/it]
|
| 302 |
90%|█████████ | 218/242 [14:18<01:32, 3.87s/it]
|
| 303 |
90%|█████████ | 219/242 [14:22<01:29, 3.87s/it]
|
| 304 |
91%|█████████ | 220/242 [14:26<01:24, 3.86s/it]
|
| 305 |
|
| 306 |
91%|█████████ | 220/242 [14:26<01:24, 3.86s/it]
|
| 307 |
91%|█████████▏| 221/242 [14:30<01:21, 3.86s/it]
|
| 308 |
92%|█████████▏| 222/242 [14:34<01:17, 3.86s/it]
|
| 309 |
92%|█████████▏| 223/242 [14:37<01:13, 3.86s/it]
|
| 310 |
93%|█████████▎| 224/242 [14:41<01:09, 3.86s/it]
|
| 311 |
93%|█████████▎| 225/242 [14:45<01:05, 3.86s/it]
|
| 312 |
93%|█████████▎| 226/242 [14:49<01:01, 3.85s/it]
|
| 313 |
94%|█████████▍| 227/242 [14:53<00:57, 3.85s/it]
|
| 314 |
94%|█████████▍| 228/242 [14:57<00:53, 3.85s/it]
|
| 315 |
95%|█████████▍| 229/242 [15:01<00:50, 3.85s/it]
|
| 316 |
95%|█████████▌| 230/242 [15:04<00:46, 3.85s/it]
|
| 317 |
|
| 318 |
95%|█████████▌| 230/242 [15:04<00:46, 3.85s/it]
|
| 319 |
95%|█████████▌| 231/242 [15:08<00:42, 3.86s/it]
|
| 320 |
96%|█████████▌| 232/242 [15:12<00:38, 3.87s/it]
|
| 321 |
96%|█████████▋| 233/242 [15:16<00:34, 3.88s/it]
|
| 322 |
97%|█████████▋| 234/242 [15:20<00:31, 3.89s/it]
|
| 323 |
97%|█████████▋| 235/242 [15:24<00:27, 3.89s/it]
|
| 324 |
98%|█████████▊| 236/242 [15:28<00:23, 3.90s/it]
|
| 325 |
98%|█████████▊| 237/242 [15:32<00:19, 3.91s/it]
|
| 326 |
98%|█████████▊| 238/242 [15:36<00:15, 3.92s/it]
|
| 327 |
99%|█████████▉| 239/242 [15:40<00:11, 3.92s/it]
|
| 328 |
99%|█████████▉| 240/242 [15:44<00:07, 3.92s/it]
|
| 329 |
|
| 330 |
99%|█████████▉| 240/242 [15:44<00:07, 3.92s/it]
|
| 331 |
|
| 332 |
+
Unsloth: Will smartly offload gradients to save VRAM!
|
| 333 |
+
{'loss': '0.9271', 'grad_norm': '198.7', 'learning_rate': '9e-07', 'rewards/chosen': '-0.05307', 'rewards/rejected': '-0.005694', 'rewards/accuracies': '0.4125', 'rewards/margins': '-0.04738', 'logps/chosen': '-3755', 'logps/rejected': '-2303', 'logits/chosen': '-11.42', 'logits/rejected': '-11.47', 'epoch': '0.08264'}
|
| 334 |
+
{'loss': '0.454', 'grad_norm': '25.33', 'learning_rate': '1.9e-06', 'rewards/chosen': '2.245', 'rewards/rejected': '0.7875', 'rewards/accuracies': '0.7875', 'rewards/margins': '1.457', 'logps/chosen': '-3733', 'logps/rejected': '-2372', 'logits/chosen': '-11.34', 'logits/rejected': '-11.34', 'epoch': '0.1653'}
|
| 335 |
+
{'loss': '0.05742', 'grad_norm': '10.8', 'learning_rate': '2.9e-06', 'rewards/chosen': '10.9', 'rewards/rejected': '3.573', 'rewards/accuracies': '0.9875', 'rewards/margins': '7.329', 'logps/chosen': '-3603', 'logps/rejected': '-2278', 'logits/chosen': '-11.21', 'logits/rejected': '-11.15', 'epoch': '0.2479'}
|
| 336 |
+
{'loss': '0.0152', 'grad_norm': '0.0009383', 'learning_rate': '3.9e-06', 'rewards/chosen': '23.31', 'rewards/rejected': '4.238', 'rewards/accuracies': '1', 'rewards/margins': '19.07', 'logps/chosen': '-3510', 'logps/rejected': '-2291', 'logits/chosen': '-11.12', 'logits/rejected': '-11.2', 'epoch': '0.3306'}
|
| 337 |
+
{'loss': '0.009164', 'grad_norm': '3.639e-07', 'learning_rate': '4.9e-06', 'rewards/chosen': '24.5', 'rewards/rejected': '-1.617', 'rewards/accuracies': '1', 'rewards/margins': '26.12', 'logps/chosen': '-3374', 'logps/rejected': '-2323', 'logits/chosen': '-11.23', 'logits/rejected': '-11.38', 'epoch': '0.4132'}
|
| 338 |
+
{'loss': '3.722e-07', 'grad_norm': '6.808e-08', 'learning_rate': '4.973e-06', 'rewards/chosen': '19.64', 'rewards/rejected': '-11.7', 'rewards/accuracies': '1', 'rewards/margins': '31.33', 'logps/chosen': '-3468', 'logps/rejected': '-2433', 'logits/chosen': '-11.37', 'logits/rejected': '-11.74', 'epoch': '0.4959'}
|
| 339 |
+
{'loss': '5.501e-07', 'grad_norm': '1.655e-09', 'learning_rate': '4.88e-06', 'rewards/chosen': '16.18', 'rewards/rejected': '-18.2', 'rewards/accuracies': '1', 'rewards/margins': '34.38', 'logps/chosen': '-3623', 'logps/rejected': '-2515', 'logits/chosen': '-11.52', 'logits/rejected': '-11.77', 'epoch': '0.5785'}
|
| 340 |
+
{'loss': '1.991e-07', 'grad_norm': '1.013e-10', 'learning_rate': '4.724e-06', 'rewards/chosen': '15.59', 'rewards/rejected': '-20.33', 'rewards/accuracies': '1', 'rewards/margins': '35.92', 'logps/chosen': '-3673', 'logps/rejected': '-2564', 'logits/chosen': '-11.52', 'logits/rejected': '-11.89', 'epoch': '0.6612'}
|
| 341 |
+
{'loss': '4.913e-08', 'grad_norm': '4.028e-10', 'learning_rate': '4.508e-06', 'rewards/chosen': '12.84', 'rewards/rejected': '-21.68', 'rewards/accuracies': '1', 'rewards/margins': '34.52', 'logps/chosen': '-3652', 'logps/rejected': '-2563', 'logits/chosen': '-11.53', 'logits/rejected': '-11.78', 'epoch': '0.7438'}
|
| 342 |
+
{'loss': '6.312e-07', 'grad_norm': '8.415e-06', 'learning_rate': '4.239e-06', 'rewards/chosen': '12.63', 'rewards/rejected': '-21.65', 'rewards/accuracies': '1', 'rewards/margins': '34.28', 'logps/chosen': '-3544', 'logps/rejected': '-2617', 'logits/chosen': '-11.6', 'logits/rejected': '-11.93', 'epoch': '0.8264'}
|
| 343 |
+
{'loss': '1.452e-05', 'grad_norm': '3.07e-10', 'learning_rate': '3.923e-06', 'rewards/chosen': '13.53', 'rewards/rejected': '-21.73', 'rewards/accuracies': '1', 'rewards/margins': '35.26', 'logps/chosen': '-3635', 'logps/rejected': '-2562', 'logits/chosen': '-11.55', 'logits/rejected': '-11.83', 'epoch': '0.9091'}
|
| 344 |
+
{'loss': '0.00764', 'grad_norm': '21.77', 'learning_rate': '3.569e-06', 'rewards/chosen': '12.49', 'rewards/rejected': '-21.65', 'rewards/accuracies': '1', 'rewards/margins': '34.13', 'logps/chosen': '-3522', 'logps/rejected': '-2493', 'logits/chosen': '-11.58', 'logits/rejected': '-11.86', 'epoch': '0.9917'}
|
| 345 |
+
{'loss': '2.103e-07', 'grad_norm': '5.981e-07', 'learning_rate': '3.186e-06', 'rewards/chosen': '13.15', 'rewards/rejected': '-21.7', 'rewards/accuracies': '1', 'rewards/margins': '34.85', 'logps/chosen': '-3557', 'logps/rejected': '-2590', 'logits/chosen': '-11.68', 'logits/rejected': '-11.99', 'epoch': '1.074'}
|
| 346 |
+
{'loss': '3.559e-08', 'grad_norm': '8.223e-07', 'learning_rate': '2.786e-06', 'rewards/chosen': '14.41', 'rewards/rejected': '-21.41', 'rewards/accuracies': '1', 'rewards/margins': '35.82', 'logps/chosen': '-3470', 'logps/rejected': '-2560', 'logits/chosen': '-11.74', 'logits/rejected': '-12.08', 'epoch': '1.157'}
|
| 347 |
+
{'loss': '1.473e-07', 'grad_norm': '1.332e-09', 'learning_rate': '2.377e-06', 'rewards/chosen': '14.15', 'rewards/rejected': '-21.22', 'rewards/accuracies': '1', 'rewards/margins': '35.37', 'logps/chosen': '-3576', 'logps/rejected': '-2575', 'logits/chosen': '-11.71', 'logits/rejected': '-11.96', 'epoch': '1.24'}
|
| 348 |
+
{'loss': '1.274e-08', 'grad_norm': '2.098e-09', 'learning_rate': '1.972e-06', 'rewards/chosen': '16.15', 'rewards/rejected': '-21.08', 'rewards/accuracies': '1', 'rewards/margins': '37.23', 'logps/chosen': '-3689', 'logps/rejected': '-2564', 'logits/chosen': '-11.75', 'logits/rejected': '-12.09', 'epoch': '1.322'}
|
| 349 |
+
{'loss': '6.593e-08', 'grad_norm': '2.842e-05', 'learning_rate': '1.581e-06', 'rewards/chosen': '16.32', 'rewards/rejected': '-19.54', 'rewards/accuracies': '1', 'rewards/margins': '35.85', 'logps/chosen': '-3579', 'logps/rejected': '-2557', 'logits/chosen': '-11.76', 'logits/rejected': '-12.05', 'epoch': '1.405'}
|
| 350 |
+
{'loss': '2.681e-08', 'grad_norm': '3.175e-06', 'learning_rate': '1.215e-06', 'rewards/chosen': '17.36', 'rewards/rejected': '-20.76', 'rewards/accuracies': '1', 'rewards/margins': '38.12', 'logps/chosen': '-3732', 'logps/rejected': '-2609', 'logits/chosen': '-11.75', 'logits/rejected': '-12.14', 'epoch': '1.488'}
|
| 351 |
+
{'loss': '9.061e-09', 'grad_norm': '1.294e-05', 'learning_rate': '8.826e-07', 'rewards/chosen': '17.16', 'rewards/rejected': '-20.78', 'rewards/accuracies': '1', 'rewards/margins': '37.94', 'logps/chosen': '-3618', 'logps/rejected': '-2533', 'logits/chosen': '-11.75', 'logits/rejected': '-12.05', 'epoch': '1.57'}
|
| 352 |
+
{'loss': '2.793e-07', 'grad_norm': '8.467e-05', 'learning_rate': '5.937e-07', 'rewards/chosen': '15.28', 'rewards/rejected': '-20.5', 'rewards/accuracies': '1', 'rewards/margins': '35.78', 'logps/chosen': '-3386', 'logps/rejected': '-2454', 'logits/chosen': '-11.7', 'logits/rejected': '-12.17', 'epoch': '1.653'}
|
| 353 |
+
{'loss': '3.405e-07', 'grad_norm': '1.662e-09', 'learning_rate': '3.557e-07', 'rewards/chosen': '16.79', 'rewards/rejected': '-20.01', 'rewards/accuracies': '1', 'rewards/margins': '36.8', 'logps/chosen': '-3677', 'logps/rejected': '-2567', 'logits/chosen': '-11.86', 'logits/rejected': '-12.22', 'epoch': '1.736'}
|
| 354 |
+
{'loss': '9.05e-10', 'grad_norm': '3.363e-08', 'learning_rate': '1.75e-07', 'rewards/chosen': '17.05', 'rewards/rejected': '-20.15', 'rewards/accuracies': '1', 'rewards/margins': '37.2', 'logps/chosen': '-3534', 'logps/rejected': '-2474', 'logits/chosen': '-11.71', 'logits/rejected': '-12.03', 'epoch': '1.818'}
|
| 355 |
+
{'loss': '2.482e-08', 'grad_norm': '1.544e-08', 'learning_rate': '5.635e-08', 'rewards/chosen': '13.72', 'rewards/rejected': '-20.55', 'rewards/accuracies': '1', 'rewards/margins': '34.26', 'logps/chosen': '-3614', 'logps/rejected': '-2530', 'logits/chosen': '-11.74', 'logits/rejected': '-11.99', 'epoch': '1.901'}
|
| 356 |
+
{'loss': '1.692e-09', 'grad_norm': '1.316e-08', 'learning_rate': '3.011e-09', 'rewards/chosen': '16.01', 'rewards/rejected': '-20.62', 'rewards/accuracies': '1', 'rewards/margins': '36.63', 'logps/chosen': '-3492', 'logps/rejected': '-2496', 'logits/chosen': '-11.77', 'logits/rejected': '-12.07', 'epoch': '1.983'}
|
| 357 |
+
{'train_runtime': '953.8', 'train_samples_per_second': '2.028', 'train_steps_per_second': '0.254', 'train_loss': '0.06077', 'epoch': '2'}
|
| 358 |
+
|
| 359 |
+
Saving DPO adapter to /workspace/outputs/dpo/final_adapter...
|
| 360 |
+
|
| 361 |
+
DPO training complete!
|
| 362 |
+
Next: python3 merge_adapter.py --adapter /workspace/outputs/dpo/final_adapter
|
logs/train_resume_20260414_210603.log
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
model.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:3163ffdcf841d829632af5932ccda65c893fcca63b84605df34aed275db66929
|
| 3 |
+
size 15992595852
|
ollama/Modelfile.bf16
ADDED
|
@@ -0,0 +1,32 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
FROM /Users/robert/bibleai-backup/release/BibleAI-Gemma4-E4B-CPT-SFT-DPO-20260414/gguf/final_merged.BF16.gguf
|
| 2 |
+
|
| 3 |
+
SYSTEM """You are BibleAI.
|
| 4 |
+
|
| 5 |
+
Response policy (highest priority):
|
| 6 |
+
1) Answer only Bible/theology/church-history/faith questions.
|
| 7 |
+
2) Be concise by default.
|
| 8 |
+
3) For questions that ask to list items from a specific verse:
|
| 9 |
+
- Output ONLY a numbered list of the exact items in that verse.
|
| 10 |
+
- Do NOT add synonyms, commentary, Greek/Hebrew, Strong's numbers, or scholar quotes.
|
| 11 |
+
- Add one final line with the verse reference.
|
| 12 |
+
4) Do not fabricate verses, facts, or language details. If uncertain, say so.
|
| 13 |
+
5) If the user asks for deeper analysis, then provide it.
|
| 14 |
+
"""
|
| 15 |
+
|
| 16 |
+
TEMPLATE """{{- if .System }}<start_of_turn>system
|
| 17 |
+
{{ .System }}<end_of_turn>
|
| 18 |
+
{{- end }}<start_of_turn>user
|
| 19 |
+
{{ .Prompt }}<end_of_turn>
|
| 20 |
+
<start_of_turn>model
|
| 21 |
+
"""
|
| 22 |
+
|
| 23 |
+
PARAMETER temperature 0
|
| 24 |
+
PARAMETER top_p 0.7
|
| 25 |
+
PARAMETER repeat_penalty 1.15
|
| 26 |
+
PARAMETER num_ctx 8192
|
| 27 |
+
PARAMETER num_predict 160
|
| 28 |
+
PARAMETER stop "<end_of_turn>"
|
| 29 |
+
PARAMETER stop "<start_of_turn>user"
|
| 30 |
+
PARAMETER stop "<start_of_turn>system"
|
| 31 |
+
PARAMETER stop "<start_of_turn>model"
|
| 32 |
+
PARAMETER seed 42
|
ollama/Modelfile.canonical_project_reference
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
FROM ./models/gguf/bibleai-gemma4-e4b-Q8_0.gguf
|
| 2 |
+
|
| 3 |
+
TEMPLATE """<bos>{{ if .System }}<|turn>system
|
| 4 |
+
{{ .System }}<turn|>
|
| 5 |
+
{{ end }}{{ if .Prompt }}<|turn>user
|
| 6 |
+
{{ .Prompt }}<turn|>
|
| 7 |
+
{{ end }}<|turn>model
|
| 8 |
+
"""
|
| 9 |
+
|
| 10 |
+
PARAMETER stop "<turn|>"
|
| 11 |
+
PARAMETER temperature 0.3
|
| 12 |
+
PARAMETER top_p 0.9
|
| 13 |
+
PARAMETER num_ctx 4096
|
| 14 |
+
|
| 15 |
+
SYSTEM """You are BibleAI, a scholarly Bible study assistant grounded in the Berean Standard Bible (BSB). You exist solely to help people study the Bible, understand theology, and apply Scripture to life.
|
| 16 |
+
|
| 17 |
+
CORE PRINCIPLES:
|
| 18 |
+
1. SCRIPTURE FIRST: Always quote the BSB text exactly. Use "Book Chapter:Verse (BSB)" format. Every answer must be rooted in specific passages.
|
| 19 |
+
2. PRECISION: Never paraphrase when quoting. If uncertain, say so and provide the reference to verify.
|
| 20 |
+
3. FAITHFUL TO THE TEXT: Let Scripture speak for itself. Where the Bible speaks clearly, present what it says without softening, hedging, or adding modern qualifications. Do not impose contemporary cultural frameworks onto the text. Present the biblical position faithfully, then note where historic Christian traditions agree or differ.
|
| 21 |
+
4. MORAL CLARITY: On matters where Scripture speaks plainly and historic Christianity has consensus (sexual ethics, sanctity of life, exclusivity of Christ, reality of judgment), present the biblical position directly and confidently. Do not present revisionist interpretations as equally valid. Compassion and clarity are not opposites.
|
| 22 |
+
5. MULTI-TRADITION (genuinely debated matters only): On genuinely debated theological matters (predestination, baptism mode, end times), present Protestant, Catholic, and Orthodox perspectives fairly. But do not treat clear biblical moral teaching as "debatable" simply because modern culture disagrees with it.
|
| 23 |
+
6. LINGUISTIC DEPTH: Reference Greek and Hebrew terms with transliteration and Strong's numbers where relevant.
|
| 24 |
+
7. ALWAYS CITE SOURCES: Attribute interpretive claims to specific scholars, church fathers, confessions, or traditions.
|
| 25 |
+
8. COMPLETENESS: Cite the most relevant passages across both Testaments. Note tensions or counterpoints within Scripture itself.
|
| 26 |
+
9. CORRECTION: Gently correct misquoted scripture with exact BSB text.
|
| 27 |
+
|
| 28 |
+
BOUNDARIES:
|
| 29 |
+
10. THEOLOGY ONLY: Only answer Bible, theology, church history, and faith questions. Politely decline coding, math, politics, medical/legal advice, or any non-theological topic.
|
| 30 |
+
11. PASTORAL CARE: For deeply personal matters, share relevant Scripture with compassion, but always encourage speaking with a pastor, biblical counselor, or church community.
|
| 31 |
+
12. NO FABRICATION: Never fabricate references, verses, or content. If you don't know, say so clearly."""
|
ollama/Modelfile.q8
ADDED
|
@@ -0,0 +1,32 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
FROM /Users/robert/bibleai-backup/release/BibleAI-Gemma4-E4B-CPT-SFT-DPO-20260414/gguf/final_merged.Q8_0.gguf
|
| 2 |
+
|
| 3 |
+
SYSTEM """You are BibleAI.
|
| 4 |
+
|
| 5 |
+
Response policy (highest priority):
|
| 6 |
+
1) Answer only Bible/theology/church-history/faith questions.
|
| 7 |
+
2) Be concise by default.
|
| 8 |
+
3) For questions that ask to list items from a specific verse:
|
| 9 |
+
- Output ONLY a numbered list of the exact items in that verse.
|
| 10 |
+
- Do NOT add synonyms, commentary, Greek/Hebrew, Strong's numbers, or scholar quotes.
|
| 11 |
+
- Add one final line with the verse reference.
|
| 12 |
+
4) Do not fabricate verses, facts, or language details. If uncertain, say so.
|
| 13 |
+
5) If the user asks for deeper analysis, then provide it.
|
| 14 |
+
"""
|
| 15 |
+
|
| 16 |
+
TEMPLATE """{{- if .System }}<start_of_turn>system
|
| 17 |
+
{{ .System }}<end_of_turn>
|
| 18 |
+
{{- end }}<start_of_turn>user
|
| 19 |
+
{{ .Prompt }}<end_of_turn>
|
| 20 |
+
<start_of_turn>model
|
| 21 |
+
"""
|
| 22 |
+
|
| 23 |
+
PARAMETER temperature 0
|
| 24 |
+
PARAMETER top_p 0.7
|
| 25 |
+
PARAMETER repeat_penalty 1.15
|
| 26 |
+
PARAMETER num_ctx 8192
|
| 27 |
+
PARAMETER num_predict 160
|
| 28 |
+
PARAMETER stop "<end_of_turn>"
|
| 29 |
+
PARAMETER stop "<start_of_turn>user"
|
| 30 |
+
PARAMETER stop "<start_of_turn>system"
|
| 31 |
+
PARAMETER stop "<start_of_turn>model"
|
| 32 |
+
PARAMETER seed 42
|
tokenizer.json
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:12bac982b793c44b03d52a250a9f0d0b666813da566b910c24a6da0695fd11e6
|
| 3 |
+
size 32170070
|
tokenizer_config.json
ADDED
|
@@ -0,0 +1,52 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"audio_token": "<|audio|>",
|
| 3 |
+
"backend": "tokenizers",
|
| 4 |
+
"boa_token": "<|audio>",
|
| 5 |
+
"boi_token": "<|image>",
|
| 6 |
+
"bos_token": "<bos>",
|
| 7 |
+
"eoa_token": "<audio|>",
|
| 8 |
+
"eoc_token": "<channel|>",
|
| 9 |
+
"eoi_token": "<image|>",
|
| 10 |
+
"eos_token": "<eos>",
|
| 11 |
+
"eot_token": "<turn|>",
|
| 12 |
+
"escape_token": "<|\"|>",
|
| 13 |
+
"etc_token": "<tool_call|>",
|
| 14 |
+
"etd_token": "<tool|>",
|
| 15 |
+
"etr_token": "<tool_response|>",
|
| 16 |
+
"extra_special_tokens": [],
|
| 17 |
+
"image_token": "<|image|>",
|
| 18 |
+
"is_local": true,
|
| 19 |
+
"mask_token": "<mask>",
|
| 20 |
+
"model_max_length": 1000000000000000019884624838656,
|
| 21 |
+
"model_specific_special_tokens": {
|
| 22 |
+
"audio_token": "<|audio|>",
|
| 23 |
+
"boa_token": "<|audio>",
|
| 24 |
+
"boi_token": "<|image>",
|
| 25 |
+
"eoa_token": "<audio|>",
|
| 26 |
+
"eoc_token": "<channel|>",
|
| 27 |
+
"eoi_token": "<image|>",
|
| 28 |
+
"eot_token": "<turn|>",
|
| 29 |
+
"escape_token": "<|\"|>",
|
| 30 |
+
"etc_token": "<tool_call|>",
|
| 31 |
+
"etd_token": "<tool|>",
|
| 32 |
+
"etr_token": "<tool_response|>",
|
| 33 |
+
"image_token": "<|image|>",
|
| 34 |
+
"soc_token": "<|channel>",
|
| 35 |
+
"sot_token": "<|turn>",
|
| 36 |
+
"stc_token": "<|tool_call>",
|
| 37 |
+
"std_token": "<|tool>",
|
| 38 |
+
"str_token": "<|tool_response>",
|
| 39 |
+
"think_token": "<|think|>"
|
| 40 |
+
},
|
| 41 |
+
"pad_token": "<pad>",
|
| 42 |
+
"padding_side": "left",
|
| 43 |
+
"processor_class": "Gemma4Processor",
|
| 44 |
+
"soc_token": "<|channel>",
|
| 45 |
+
"sot_token": "<|turn>",
|
| 46 |
+
"stc_token": "<|tool_call>",
|
| 47 |
+
"std_token": "<|tool>",
|
| 48 |
+
"str_token": "<|tool_response>",
|
| 49 |
+
"think_token": "<|think|>",
|
| 50 |
+
"tokenizer_class": "GemmaTokenizer",
|
| 51 |
+
"unk_token": "<unk>"
|
| 52 |
+
}
|