{ "fine_tune_type": "lora", "lora_parameters": { "rank": 16, "alpha": 32, "dropout": 0.05, "scale": 2.0 }, "num_layers": 16, "lora_layers": [ "self_attn.q_proj", "self_attn.k_proj", "self_attn.v_proj", "self_attn.o_proj" ] }