File size: 1,033 Bytes
03815d6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
{
  "sft_baseline": {
    "detection": 0.9930555555555556,
    "fpr": 0.03225806451612903,
    "precision": 0.9930555555555556,
    "f1": 0.9930555555555556,
    "n": 175,
    "per_difficulty": {
      "easy": {
        "n": 30,
        "detection_rate": 1.0
      },
      "medium": {
        "n": 78,
        "detection_rate": 1.0
      },
      "hard": {
        "n": 33,
        "detection_rate": 0.9444444444444444
      },
      "novel": {
        "n": 34,
        "detection_rate": 1.0
      }
    },
    "threshold": 0.55
  },
  "meta": {
    "base_model": "Qwen/Qwen2.5-7B-Instruct",
    "lora_config": {
      "r": 32,
      "alpha": 64,
      "target_modules": [
        "q_proj",
        "k_proj",
        "v_proj",
        "o_proj",
        "gate_proj",
        "up_proj",
        "down_proj"
      ]
    },
    "training": {
      "epochs": 3,
      "batch_size": 4,
      "grad_accum": 2,
      "lr": 5e-05,
      "optim": "paged_adamw_8bit"
    },
    "dataset_size": 741,
    "temperature": 0.0,
    "seed": 42
  }
}