File size: 4,506 Bytes
9dad400 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 | {
"action_end_token_id": 151933,
"action_expert_condition_source": "kv_cache",
"action_expert_config": {
"attn_dropout": 0.0,
"causal_attn": false,
"compile": "blocks",
"context_layer_norm": true,
"dropout": 0.0,
"ffn_multiple_of": 256,
"hidden_size": 768,
"implementation": "new",
"max_action_dim": 32,
"max_horizon": 32,
"mlp_ratio": 4.0,
"model_type": "molmoact2_action_expert",
"num_heads": 8,
"num_layers": 36,
"qk_norm": true,
"qk_norm_eps": 1e-06,
"rope": true,
"rope_on_cross_attention": true,
"timestep_embed_dim": 256
},
"action_expert_depth_gate": true,
"action_expert_depth_gate_init_bias": -4.0,
"action_expert_depth_gate_per_layer": true,
"action_expert_layer_mode": "per_layer",
"action_format": "both",
"action_horizon": 10,
"action_output_token_id": 151931,
"action_start_token_id": 151932,
"action_token_start_id": 151934,
"adapter_config": {
"attention_dropout": 0.0,
"attn_implementation": "sdpa",
"float32_attention": true,
"head_dim": 72,
"hidden_act": "silu",
"hidden_size": 1152,
"image_feature_dropout": 0.0,
"initializer_range": 0.02,
"intermediate_size": 9728,
"model_type": "molmoact2",
"num_attention_heads": 16,
"num_key_value_heads": 16,
"pooling_attention_mask": true,
"residual_dropout": 0.0,
"text_hidden_size": 2560,
"vit_layers": [
-3,
-9
]
},
"add_action_expert": true,
"add_control_tokens": true,
"add_setup_tokens": true,
"architectures": [
"MolmoAct2ForConditionalGeneration"
],
"auto_map": {
"AutoConfig": "configuration_molmoact2.MolmoAct2Config",
"AutoModelForImageTextToText": "modeling_molmoact2.MolmoAct2ForConditionalGeneration"
},
"depth_end_token_id": 153984,
"depth_mode": 2,
"depth_output_token_id": 153982,
"depth_start_token_id": 153983,
"depth_token_start_id": 153985,
"dtype": "float32",
"enable_depth_reasoning": true,
"flow_matching_beta_alpha": 1.0,
"flow_matching_beta_beta": 1.5,
"flow_matching_cutoff": 1.0,
"flow_matching_num_steps": 10,
"flow_matching_time_offset": 0.001,
"flow_matching_time_scale": 0.999,
"frame_end_token_id": 155656,
"frame_start_token_id": 155655,
"image_col_id": 155651,
"image_end_token_id": 155649,
"image_high_res_id": 155650,
"image_low_res_id": 155654,
"image_patch_id": 155650,
"image_start_token_id": 155648,
"initializer_range": 0.02,
"low_res_image_start_token_id": 155652,
"mask_action_dim_padding": true,
"max_action_dim": 32,
"model_type": "molmoact2",
"n_obs_steps": 1,
"norm_stats_filename": "norm_stats.json",
"num_action_tokens": 2048,
"num_depth_codes": 100,
"num_depth_tokens": 128,
"num_state_tokens": 256,
"state_end_token_id": 151674,
"state_format": "discrete",
"state_start_token_id": 151673,
"state_token_start_id": 151675,
"text_config": {
"additional_vocab_size": 128,
"attention_dropout": 0.0,
"attn_implementation": "sdpa",
"embedding_dropout": 0.0,
"head_dim": 128,
"hidden_act": "silu",
"hidden_size": 2560,
"initializer_range": 0.02,
"intermediate_size": 9728,
"layer_norm_eps": 1e-06,
"max_position_embeddings": 16384,
"model_type": "molmoact2_text",
"norm_after": false,
"num_attention_heads": 32,
"num_hidden_layers": 36,
"num_key_value_heads": 8,
"qk_norm_type": "qwen3",
"qkv_bias": false,
"residual_dropout": 0.0,
"rope_parameters": {
"rope_theta": 5000000.0,
"rope_type": "default"
},
"rope_scaling_layers": null,
"rope_theta": 5000000.0,
"tie_word_embeddings": false,
"use_cache": true,
"use_qk_norm": true,
"vocab_size": 155648
},
"tie_word_embeddings": false,
"transformers_version": "5.3.0",
"use_frame_special_tokens": true,
"vit_config": {
"attention_dropout": 0.0,
"attn_implementation": "sdpa",
"float32_attention": true,
"head_dim": 72,
"hidden_act": "gelu_pytorch_tanh",
"hidden_size": 1152,
"image_default_input_size": [
378,
378
],
"image_num_pos": 729,
"image_patch_size": 14,
"initializer_range": 0.02,
"intermediate_size": 4304,
"layer_norm_eps": 1e-06,
"model_type": "molmoact2",
"num_attention_heads": 16,
"num_hidden_layers": 27,
"num_key_value_heads": 16,
"residual_dropout": 0.0
},
"bos_token_id": 151645,
"eos_token_id": 151645,
"pad_token_id": 151643
} |