ConicCat commited on
Commit
cc7526a
·
verified ·
1 Parent(s): 8da43bb

Upload folder using huggingface_hub

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tekken.json filter=lfs diff=lfs merge=lfs -text
LoRA/adapter_config.json ADDED
@@ -0,0 +1,49 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "alora_invocation_tokens": null,
3
+ "alpha_pattern": {},
4
+ "arrow_config": null,
5
+ "auto_mapping": null,
6
+ "base_model_name_or_path": "mistralai/Mistral-Small-3.2-24B-Instruct-2506",
7
+ "bias": "none",
8
+ "corda_config": null,
9
+ "ensure_weight_tying": false,
10
+ "eva_config": null,
11
+ "exclude_modules": null,
12
+ "fan_in_fan_out": null,
13
+ "inference_mode": true,
14
+ "init_lora_weights": true,
15
+ "layer_replication": null,
16
+ "layers_pattern": null,
17
+ "layers_to_transform": null,
18
+ "loftq_config": {},
19
+ "lora_alpha": 128,
20
+ "lora_bias": false,
21
+ "lora_dropout": 0.0,
22
+ "megatron_config": null,
23
+ "megatron_core": "megatron.core",
24
+ "modules_to_save": null,
25
+ "peft_type": "LORA",
26
+ "peft_version": "0.18.1",
27
+ "qalora_group_size": 16,
28
+ "r": 64,
29
+ "rank_pattern": {},
30
+ "revision": null,
31
+ "target_modules": [
32
+ "down_proj",
33
+ "v_proj",
34
+ "linear_1",
35
+ "merging_layer",
36
+ "gate_proj",
37
+ "linear_2",
38
+ "up_proj",
39
+ "k_proj",
40
+ "q_proj",
41
+ "o_proj"
42
+ ],
43
+ "target_parameters": [],
44
+ "task_type": "CAUSAL_LM",
45
+ "trainable_token_indices": null,
46
+ "use_dora": false,
47
+ "use_qalora": false,
48
+ "use_rslora": false
49
+ }
LoRA/adapter_model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8b11c59cccd98a2a55cf98da42c52c6c0aa702c40875c8081e00b578c9ac0b9a
3
+ size 1628841848
config.json ADDED
@@ -0,0 +1,60 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "Mistral3ForConditionalGeneration"
4
+ ],
5
+ "dtype": "bfloat16",
6
+ "image_token_index": 10,
7
+ "model_type": "mistral3",
8
+ "multimodal_projector_bias": false,
9
+ "projector_hidden_act": "gelu",
10
+ "spatial_merge_size": 2,
11
+ "text_config": {
12
+ "attention_dropout": 0.0,
13
+ "bos_token_id": 1,
14
+ "dtype": "bfloat16",
15
+ "eos_token_id": 2,
16
+ "head_dim": 128,
17
+ "hidden_act": "silu",
18
+ "hidden_size": 5120,
19
+ "initializer_range": 0.02,
20
+ "intermediate_size": 32768,
21
+ "max_position_embeddings": 131072,
22
+ "model_type": "mistral",
23
+ "num_attention_heads": 32,
24
+ "num_hidden_layers": 40,
25
+ "num_key_value_heads": 8,
26
+ "pad_token_id": null,
27
+ "rms_norm_eps": 1e-05,
28
+ "rope_parameters": {
29
+ "rope_theta": 1000000000.0,
30
+ "rope_type": "default"
31
+ },
32
+ "sliding_window": null,
33
+ "tie_word_embeddings": false,
34
+ "use_cache": false,
35
+ "vocab_size": 131072
36
+ },
37
+ "tie_word_embeddings": true,
38
+ "transformers_version": "5.5.3",
39
+ "use_cache": true,
40
+ "vision_config": {
41
+ "attention_dropout": 0.0,
42
+ "dtype": "bfloat16",
43
+ "head_dim": 64,
44
+ "hidden_act": "silu",
45
+ "hidden_size": 1024,
46
+ "image_size": 1540,
47
+ "initializer_range": 0.02,
48
+ "intermediate_size": 4096,
49
+ "model_type": "pixtral",
50
+ "num_attention_heads": 16,
51
+ "num_channels": 3,
52
+ "num_hidden_layers": 24,
53
+ "patch_size": 14,
54
+ "rope_parameters": {
55
+ "rope_theta": 10000.0,
56
+ "rope_type": "default"
57
+ }
58
+ },
59
+ "vision_feature_layer": -1
60
+ }
generation_config.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "do_sample": true,
5
+ "eos_token_id": 2,
6
+ "temperature": 0.15,
7
+ "transformers_version": "5.5.3"
8
+ }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0f7289ced6c54f81a035f10918fe86f3e16ffc2324410eef9d33ca0be4a4a5c6
3
+ size 46680627520
processor_config.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ {
2
+ "processor_class": "Mistral3Processor"
3
+ }
runs/Apr12_16-20-17_axolotl-trainer-0/events.out.tfevents.1776010817.axolotl-trainer-0.340.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dea2241ea4a617afb8243c5f2ee6119b46f113b3f14f26913df0031077a4c08d
3
+ size 115186
tekken.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6e2501687ccd0e1f30f36319eaf2b46958b897811e246cd8eb5d385b9e3de7d1
3
+ size 19399895