chatpig commited on
Commit
4d1441d
·
verified ·
1 Parent(s): f3d858b

Upload config.json with huggingface_hub

Browse files
Files changed (1) hide show
  1. config.json +66 -0
config.json ADDED
@@ -0,0 +1,66 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "Idefics3ForConditionalGeneration"
4
+ ],
5
+ "bos_token_id": 100264,
6
+ "dtype": "bfloat16",
7
+ "eos_token_id": 100257,
8
+ "image_token_id": 100270,
9
+ "model_type": "idefics3",
10
+ "pad_token_id": 100257,
11
+ "scale_factor": 4,
12
+ "text_config": {
13
+ "_name_or_path": "models/granitev06_hf_ai4k_sft_data_v4",
14
+ "architectures": [
15
+ "LlamaForCausalLM"
16
+ ],
17
+ "attention_bias": false,
18
+ "attention_dropout": 0.0,
19
+ "bos_token_id": 100264,
20
+ "dtype": "bfloat16",
21
+ "eos_token_id": 100257,
22
+ "head_dim": 64,
23
+ "hidden_act": "silu",
24
+ "hidden_size": 576,
25
+ "initializer_range": 0.02,
26
+ "intermediate_size": 1536,
27
+ "max_position_embeddings": 8192,
28
+ "mlp_bias": false,
29
+ "model_type": "llama",
30
+ "num_attention_heads": 9,
31
+ "num_hidden_layers": 30,
32
+ "num_key_value_heads": 3,
33
+ "pad_token_id": 100257,
34
+ "pretraining_tp": 1,
35
+ "rms_norm_eps": 1e-05,
36
+ "rope_scaling": null,
37
+ "rope_theta": 100000.0,
38
+ "tie_word_embeddings": true,
39
+ "use_cache": false,
40
+ "vocab_size": 100352
41
+ },
42
+ "tie_word_embeddings": true,
43
+ "transformers_version": "4.56.1",
44
+ "use_cache": true,
45
+ "vision_config": {
46
+ "attention_dropout": 0.0,
47
+ "hidden_act": "gelu_pytorch_tanh",
48
+ "hidden_size": 768,
49
+ "image_size": 512,
50
+ "initializer_range": 0.02,
51
+ "intermediate_size": 3072,
52
+ "layer_norm_eps": 1e-06,
53
+ "max_image_size": {
54
+ "longest_edge": 512
55
+ },
56
+ "model_type": "idefics3_vision",
57
+ "num_attention_heads": 12,
58
+ "num_channels": 3,
59
+ "num_hidden_layers": 12,
60
+ "patch_size": 16,
61
+ "size": {
62
+ "longest_edge": 512
63
+ }
64
+ },
65
+ "vocab_size": 100352
66
+ }