File size: 794 Bytes
3b96dc8
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
{
  "model": "logos29-gemma2_9b",
  "family": "gemma2_9b",
  "family_name": "Gemma 2 9B",
  "base_model": "google/gemma-2-9b-it",
  "base_model_quantized": "unsloth/gemma-2-9b-it-bnb-4bit",
  "method": "QLoRA (4-bit NF4 + LoRA)",
  "framework": "unsloth",
  "lora_rank": 16,
  "lora_alpha": 16,
  "lora_target_modules": [
    "q_proj",
    "k_proj",
    "v_proj",
    "o_proj",
    "gate_proj",
    "up_proj",
    "down_proj"
  ],
  "epochs": 3,
  "effective_batch_size": 8,
  "load_in_4bit": true,
  "learning_rate": 0.0002,
  "lr_scheduler": "cosine",
  "max_seq_length": 2048,
  "dataset": "logos29_gemma9b.jsonl",
  "dataset_size": 1026,
  "train_on_responses_only": true,
  "think_blocks": "stripped (no-think variant)",
  "final_loss": 1.0404357817745948,
  "runtime_seconds": 2157.1858
}