default_stage: default_modifiers: AWQModifier: mappings: - smooth_layer: re:.*input_layernorm$ balance_layers: ['re:.*q_proj$', 're:.*k_proj$', 're:.*v_proj$'] activation_hook_target: null - smooth_layer: re:.*v_proj$ balance_layers: ['re:.*o_proj$'] activation_hook_target: null - smooth_layer: re:.*post_attention_layernorm$ balance_layers: ['re:.*gate_proj$', 're:.*up_proj$'] activation_hook_target: null - smooth_layer: re:.*up_proj$ balance_layers: ['re:.*down_proj$'] activation_hook_target: null duo_scaling: true n_grid: 20 QuantizationModifier: targets: [Linear] ignore: [lm_head, 're:.*embed.*', 're:.*router.*', 're:.*\.gate$'] scheme: W4A16 bypass_divisibility_checks: false