lianglv's picture
Upload folder using huggingface_hub
634107f verified
{
"architectures": [
"BagelForConditionalGeneration"
],
"connector_act": "gelu_pytorch_tanh",
"dtype": "bfloat16",
"interpolate_pos": false,
"latent_patch_size": 2,
"llm_config": {
"_name_or_path": "",
"architectures": [
"Qwen2ForCausalLM"
],
"attention_dropout": 0.0,
"bos_token_id": 151643,
"chunk_size_feed_forward": 0,
"dtype": "bfloat16",
"eos_token_id": 151645,
"hidden_act": "silu",
"hidden_size": 3584,
"id2label": {
"0": "LABEL_0",
"1": "LABEL_1"
},
"initializer_range": 0.02,
"intermediate_size": 18944,
"is_encoder_decoder": false,
"label2id": {
"LABEL_0": 0,
"LABEL_1": 1
},
"layer_types": [
"full_attention",
"full_attention",
"full_attention",
"full_attention",
"full_attention",
"full_attention",
"full_attention",
"full_attention",
"full_attention",
"full_attention",
"full_attention",
"full_attention",
"full_attention",
"full_attention",
"full_attention",
"full_attention",
"full_attention",
"full_attention",
"full_attention",
"full_attention",
"full_attention",
"full_attention",
"full_attention",
"full_attention",
"full_attention",
"full_attention",
"full_attention",
"full_attention"
],
"max_position_embeddings": 32768,
"max_window_layers": 28,
"model_type": "qwen2",
"num_attention_heads": 28,
"num_hidden_layers": 28,
"num_key_value_heads": 4,
"output_attentions": false,
"output_hidden_states": false,
"pad_token_id": null,
"problem_type": null,
"qk_norm": true,
"return_dict": true,
"rms_norm_eps": 1e-06,
"rope_parameters": {
"rope_theta": 1000000.0,
"rope_type": "default"
},
"sliding_window": null,
"tie_word_embeddings": false,
"transformers_version": "5.3.0",
"use_cache": true,
"use_sliding_window": false,
"vocab_size": 152064
},
"max_latent_size": 32,
"model_type": "bagel",
"quantization_config": {
"autoround_version": "0.12.0",
"bits": 4,
"block_name_to_quantize": "language_model.model.layers",
"data_type": "int",
"extra_config": {
".*moe_gen.*": {
"bits": 16,
"data_type": "float"
},
".*self_attn\\.k_proj.*": {
"bits": 16,
"data_type": "float"
},
".*self_attn\\.o_proj.*": {
"bits": 16,
"data_type": "float"
},
".*self_attn\\.q_proj.*": {
"bits": 16,
"data_type": "float"
},
".*self_attn\\.v_proj.*": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.0.mlp_moe_gen.down_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.0.mlp_moe_gen.gate_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.0.mlp_moe_gen.up_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.0.self_attn.k_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.0.self_attn.k_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.0.self_attn.o_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.0.self_attn.o_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.0.self_attn.q_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.0.self_attn.q_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.0.self_attn.v_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.0.self_attn.v_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.1.mlp_moe_gen.down_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.1.mlp_moe_gen.gate_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.1.mlp_moe_gen.up_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.1.self_attn.k_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.1.self_attn.k_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.1.self_attn.o_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.1.self_attn.o_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.1.self_attn.q_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.1.self_attn.q_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.1.self_attn.v_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.1.self_attn.v_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.10.mlp_moe_gen.down_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.10.mlp_moe_gen.gate_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.10.mlp_moe_gen.up_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.10.self_attn.k_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.10.self_attn.k_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.10.self_attn.o_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.10.self_attn.o_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.10.self_attn.q_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.10.self_attn.q_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.10.self_attn.v_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.10.self_attn.v_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.11.mlp_moe_gen.down_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.11.mlp_moe_gen.gate_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.11.mlp_moe_gen.up_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.11.self_attn.k_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.11.self_attn.k_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.11.self_attn.o_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.11.self_attn.o_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.11.self_attn.q_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.11.self_attn.q_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.11.self_attn.v_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.11.self_attn.v_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.12.mlp_moe_gen.down_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.12.mlp_moe_gen.gate_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.12.mlp_moe_gen.up_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.12.self_attn.k_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.12.self_attn.k_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.12.self_attn.o_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.12.self_attn.o_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.12.self_attn.q_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.12.self_attn.q_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.12.self_attn.v_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.12.self_attn.v_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.13.mlp_moe_gen.down_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.13.mlp_moe_gen.gate_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.13.mlp_moe_gen.up_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.13.self_attn.k_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.13.self_attn.k_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.13.self_attn.o_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.13.self_attn.o_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.13.self_attn.q_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.13.self_attn.q_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.13.self_attn.v_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.13.self_attn.v_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.14.mlp_moe_gen.down_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.14.mlp_moe_gen.gate_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.14.mlp_moe_gen.up_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.14.self_attn.k_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.14.self_attn.k_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.14.self_attn.o_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.14.self_attn.o_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.14.self_attn.q_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.14.self_attn.q_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.14.self_attn.v_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.14.self_attn.v_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.15.mlp_moe_gen.down_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.15.mlp_moe_gen.gate_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.15.mlp_moe_gen.up_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.15.self_attn.k_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.15.self_attn.k_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.15.self_attn.o_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.15.self_attn.o_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.15.self_attn.q_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.15.self_attn.q_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.15.self_attn.v_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.15.self_attn.v_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.16.mlp_moe_gen.down_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.16.mlp_moe_gen.gate_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.16.mlp_moe_gen.up_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.16.self_attn.k_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.16.self_attn.k_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.16.self_attn.o_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.16.self_attn.o_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.16.self_attn.q_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.16.self_attn.q_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.16.self_attn.v_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.16.self_attn.v_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.17.mlp_moe_gen.down_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.17.mlp_moe_gen.gate_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.17.mlp_moe_gen.up_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.17.self_attn.k_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.17.self_attn.k_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.17.self_attn.o_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.17.self_attn.o_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.17.self_attn.q_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.17.self_attn.q_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.17.self_attn.v_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.17.self_attn.v_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.18.mlp_moe_gen.down_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.18.mlp_moe_gen.gate_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.18.mlp_moe_gen.up_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.18.self_attn.k_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.18.self_attn.k_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.18.self_attn.o_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.18.self_attn.o_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.18.self_attn.q_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.18.self_attn.q_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.18.self_attn.v_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.18.self_attn.v_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.19.mlp_moe_gen.down_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.19.mlp_moe_gen.gate_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.19.mlp_moe_gen.up_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.19.self_attn.k_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.19.self_attn.k_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.19.self_attn.o_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.19.self_attn.o_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.19.self_attn.q_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.19.self_attn.q_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.19.self_attn.v_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.19.self_attn.v_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.2.mlp_moe_gen.down_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.2.mlp_moe_gen.gate_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.2.mlp_moe_gen.up_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.2.self_attn.k_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.2.self_attn.k_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.2.self_attn.o_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.2.self_attn.o_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.2.self_attn.q_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.2.self_attn.q_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.2.self_attn.v_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.2.self_attn.v_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.20.mlp_moe_gen.down_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.20.mlp_moe_gen.gate_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.20.mlp_moe_gen.up_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.20.self_attn.k_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.20.self_attn.k_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.20.self_attn.o_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.20.self_attn.o_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.20.self_attn.q_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.20.self_attn.q_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.20.self_attn.v_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.20.self_attn.v_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.21.mlp_moe_gen.down_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.21.mlp_moe_gen.gate_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.21.mlp_moe_gen.up_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.21.self_attn.k_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.21.self_attn.k_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.21.self_attn.o_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.21.self_attn.o_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.21.self_attn.q_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.21.self_attn.q_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.21.self_attn.v_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.21.self_attn.v_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.22.mlp_moe_gen.down_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.22.mlp_moe_gen.gate_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.22.mlp_moe_gen.up_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.22.self_attn.k_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.22.self_attn.k_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.22.self_attn.o_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.22.self_attn.o_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.22.self_attn.q_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.22.self_attn.q_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.22.self_attn.v_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.22.self_attn.v_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.23.mlp_moe_gen.down_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.23.mlp_moe_gen.gate_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.23.mlp_moe_gen.up_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.23.self_attn.k_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.23.self_attn.k_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.23.self_attn.o_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.23.self_attn.o_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.23.self_attn.q_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.23.self_attn.q_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.23.self_attn.v_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.23.self_attn.v_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.24.mlp_moe_gen.down_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.24.mlp_moe_gen.gate_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.24.mlp_moe_gen.up_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.24.self_attn.k_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.24.self_attn.k_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.24.self_attn.o_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.24.self_attn.o_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.24.self_attn.q_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.24.self_attn.q_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.24.self_attn.v_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.24.self_attn.v_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.25.mlp_moe_gen.down_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.25.mlp_moe_gen.gate_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.25.mlp_moe_gen.up_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.25.self_attn.k_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.25.self_attn.k_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.25.self_attn.o_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.25.self_attn.o_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.25.self_attn.q_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.25.self_attn.q_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.25.self_attn.v_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.25.self_attn.v_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.26.mlp_moe_gen.down_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.26.mlp_moe_gen.gate_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.26.mlp_moe_gen.up_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.26.self_attn.k_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.26.self_attn.k_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.26.self_attn.o_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.26.self_attn.o_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.26.self_attn.q_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.26.self_attn.q_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.26.self_attn.v_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.26.self_attn.v_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.27.mlp_moe_gen.down_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.27.mlp_moe_gen.gate_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.27.mlp_moe_gen.up_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.27.self_attn.k_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.27.self_attn.k_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.27.self_attn.o_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.27.self_attn.o_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.27.self_attn.q_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.27.self_attn.q_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.27.self_attn.v_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.27.self_attn.v_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.3.mlp_moe_gen.down_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.3.mlp_moe_gen.gate_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.3.mlp_moe_gen.up_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.3.self_attn.k_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.3.self_attn.k_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.3.self_attn.o_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.3.self_attn.o_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.3.self_attn.q_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.3.self_attn.q_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.3.self_attn.v_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.3.self_attn.v_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.4.mlp_moe_gen.down_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.4.mlp_moe_gen.gate_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.4.mlp_moe_gen.up_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.4.self_attn.k_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.4.self_attn.k_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.4.self_attn.o_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.4.self_attn.o_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.4.self_attn.q_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.4.self_attn.q_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.4.self_attn.v_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.4.self_attn.v_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.5.mlp_moe_gen.down_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.5.mlp_moe_gen.gate_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.5.mlp_moe_gen.up_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.5.self_attn.k_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.5.self_attn.k_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.5.self_attn.o_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.5.self_attn.o_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.5.self_attn.q_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.5.self_attn.q_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.5.self_attn.v_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.5.self_attn.v_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.6.mlp_moe_gen.down_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.6.mlp_moe_gen.gate_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.6.mlp_moe_gen.up_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.6.self_attn.k_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.6.self_attn.k_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.6.self_attn.o_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.6.self_attn.o_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.6.self_attn.q_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.6.self_attn.q_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.6.self_attn.v_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.6.self_attn.v_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.7.mlp_moe_gen.down_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.7.mlp_moe_gen.gate_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.7.mlp_moe_gen.up_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.7.self_attn.k_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.7.self_attn.k_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.7.self_attn.o_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.7.self_attn.o_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.7.self_attn.q_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.7.self_attn.q_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.7.self_attn.v_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.7.self_attn.v_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.8.mlp_moe_gen.down_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.8.mlp_moe_gen.gate_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.8.mlp_moe_gen.up_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.8.self_attn.k_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.8.self_attn.k_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.8.self_attn.o_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.8.self_attn.o_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.8.self_attn.q_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.8.self_attn.q_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.8.self_attn.v_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.8.self_attn.v_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.9.mlp_moe_gen.down_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.9.mlp_moe_gen.gate_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.9.mlp_moe_gen.up_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.9.self_attn.k_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.9.self_attn.k_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.9.self_attn.o_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.9.self_attn.o_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.9.self_attn.q_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.9.self_attn.q_proj_moe_gen": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.9.self_attn.v_proj": {
"bits": 16,
"data_type": "float"
},
"language_model.model.layers.9.self_attn.v_proj_moe_gen": {
"bits": 16,
"data_type": "float"
}
},
"group_size": 128,
"packing_format": "auto_round:auto_gptq",
"quant_method": "auto-round",
"sym": true,
"transform_config": {}
},
"timestep_shift": 1.0,
"transformers_version": "5.3.0",
"vae_config": {
"downsample": 8,
"z_channels": 16
},
"visual_gen": true,
"visual_und": true,
"vit_config": {
"hidden_size": 1152,
"image_size": 980,
"intermediate_size": 4304,
"model_type": "siglip_vision_model",
"num_attention_heads": 16,
"num_channels": 3,
"num_hidden_layers": 27,
"patch_size": 14
},
"vit_max_num_patch_per_side": 70
}