default_stage: default_modifiers: AWQModifier: config_groups: group_0: targets: [Linear] weights: num_bits: 8 type: int symmetric: true group_size: 128 strategy: group block_structure: null dynamic: false actorder: null observer: minmax observer_kwargs: {} input_activations: null output_activations: null format: null targets: [Linear] ignore: [model.layers.0.self_attn.q_proj, model.layers.0.self_attn.k_proj, model.layers.0.self_attn.v_proj, model.layers.0.self_attn.o_proj, model.layers.0.mlp.gate_proj, model.layers.0.mlp.up_proj, model.layers.0.mlp.down_proj, model.layers.1.mlp.shared_experts.gate_proj, model.layers.1.mlp.shared_experts.up_proj, model.layers.1.mlp.shared_experts.down_proj, model.layers.2.mlp.shared_experts.gate_proj, model.layers.2.mlp.shared_experts.up_proj, model.layers.2.mlp.shared_experts.down_proj, model.layers.3.mlp.shared_experts.gate_proj, model.layers.3.mlp.shared_experts.up_proj, model.layers.3.mlp.shared_experts.down_proj, model.layers.4.mlp.shared_experts.gate_proj, model.layers.4.mlp.shared_experts.up_proj, model.layers.4.mlp.shared_experts.down_proj, model.layers.5.mlp.shared_experts.gate_proj, model.layers.5.mlp.shared_experts.up_proj, model.layers.5.mlp.shared_experts.down_proj, model.layers.6.mlp.shared_experts.gate_proj, model.layers.6.mlp.shared_experts.up_proj, model.layers.6.mlp.shared_experts.down_proj, model.layers.7.mlp.shared_experts.gate_proj, model.layers.7.mlp.shared_experts.up_proj, model.layers.7.mlp.shared_experts.down_proj, model.layers.8.mlp.shared_experts.gate_proj, model.layers.8.mlp.shared_experts.up_proj, model.layers.8.mlp.shared_experts.down_proj, model.layers.9.mlp.shared_experts.gate_proj, model.layers.9.mlp.shared_experts.up_proj, model.layers.9.mlp.shared_experts.down_proj, model.layers.10.mlp.shared_experts.gate_proj, model.layers.10.mlp.shared_experts.up_proj, model.layers.10.mlp.shared_experts.down_proj, model.layers.11.mlp.shared_experts.gate_proj, model.layers.11.mlp.shared_experts.up_proj, model.layers.11.mlp.shared_experts.down_proj, model.layers.12.mlp.shared_experts.gate_proj, model.layers.12.mlp.shared_experts.up_proj, model.layers.12.mlp.shared_experts.down_proj, model.layers.13.mlp.shared_experts.gate_proj, model.layers.13.mlp.shared_experts.up_proj, model.layers.13.mlp.shared_experts.down_proj, model.layers.14.mlp.shared_experts.gate_proj, model.layers.14.mlp.shared_experts.up_proj, model.layers.14.mlp.shared_experts.down_proj, model.layers.15.mlp.shared_experts.gate_proj, model.layers.15.mlp.shared_experts.up_proj, model.layers.15.mlp.shared_experts.down_proj, model.layers.16.mlp.shared_experts.gate_proj, model.layers.16.mlp.shared_experts.up_proj, model.layers.16.mlp.shared_experts.down_proj, model.layers.17.mlp.shared_experts.gate_proj, model.layers.17.mlp.shared_experts.up_proj, model.layers.17.mlp.shared_experts.down_proj, model.layers.18.mlp.shared_experts.gate_proj, model.layers.18.mlp.shared_experts.up_proj, model.layers.18.mlp.shared_experts.down_proj, model.layers.19.mlp.shared_experts.gate_proj, model.layers.19.mlp.shared_experts.up_proj, model.layers.19.mlp.shared_experts.down_proj, model.layers.20.mlp.shared_experts.gate_proj, model.layers.20.mlp.shared_experts.up_proj, model.layers.20.mlp.shared_experts.down_proj, model.layers.21.mlp.shared_experts.gate_proj, model.layers.21.mlp.shared_experts.up_proj, model.layers.21.mlp.shared_experts.down_proj, model.layers.22.mlp.shared_experts.gate_proj, model.layers.22.mlp.shared_experts.up_proj, model.layers.22.mlp.shared_experts.down_proj, model.layers.23.mlp.shared_experts.gate_proj, model.layers.23.mlp.shared_experts.up_proj, model.layers.23.mlp.shared_experts.down_proj, model.layers.24.mlp.shared_experts.gate_proj, model.layers.24.mlp.shared_experts.up_proj, model.layers.24.mlp.shared_experts.down_proj, model.layers.25.mlp.shared_experts.gate_proj, model.layers.25.mlp.shared_experts.up_proj, model.layers.25.mlp.shared_experts.down_proj, model.layers.26.mlp.shared_experts.gate_proj, model.layers.26.mlp.shared_experts.up_proj, model.layers.26.mlp.shared_experts.down_proj, model.layers.27.mlp.shared_experts.gate_proj, model.layers.27.mlp.shared_experts.up_proj, model.layers.27.mlp.shared_experts.down_proj, model.layers.28.mlp.shared_experts.gate_proj, model.layers.28.mlp.shared_experts.up_proj, model.layers.28.mlp.shared_experts.down_proj, model.layers.29.mlp.shared_experts.gate_proj, model.layers.29.mlp.shared_experts.up_proj, model.layers.29.mlp.shared_experts.down_proj, model.layers.30.mlp.shared_experts.gate_proj, model.layers.30.mlp.shared_experts.up_proj, model.layers.30.mlp.shared_experts.down_proj, model.layers.31.mlp.shared_experts.gate_proj, model.layers.31.mlp.shared_experts.up_proj, model.layers.31.mlp.shared_experts.down_proj, model.layers.32.mlp.shared_experts.gate_proj, model.layers.32.mlp.shared_experts.up_proj, model.layers.32.mlp.shared_experts.down_proj, model.layers.33.mlp.shared_experts.gate_proj, model.layers.33.mlp.shared_experts.up_proj, model.layers.33.mlp.shared_experts.down_proj, model.layers.34.mlp.shared_experts.gate_proj, model.layers.34.mlp.shared_experts.up_proj, model.layers.34.mlp.shared_experts.down_proj, model.layers.35.mlp.shared_experts.gate_proj, model.layers.35.mlp.shared_experts.up_proj, model.layers.35.mlp.shared_experts.down_proj, model.layers.36.mlp.shared_experts.gate_proj, model.layers.36.mlp.shared_experts.up_proj, model.layers.36.mlp.shared_experts.down_proj, model.layers.37.mlp.shared_experts.gate_proj, model.layers.37.mlp.shared_experts.up_proj, model.layers.37.mlp.shared_experts.down_proj, model.layers.38.mlp.shared_experts.gate_proj, model.layers.38.mlp.shared_experts.up_proj, model.layers.38.mlp.shared_experts.down_proj, model.layers.39.mlp.shared_experts.gate_proj, model.layers.39.mlp.shared_experts.up_proj, model.layers.39.mlp.shared_experts.down_proj, model.layers.40.mlp.shared_experts.gate_proj, model.layers.40.mlp.shared_experts.up_proj, model.layers.40.mlp.shared_experts.down_proj, model.layers.41.mlp.shared_experts.gate_proj, model.layers.41.mlp.shared_experts.up_proj, model.layers.41.mlp.shared_experts.down_proj, model.layers.42.mlp.shared_experts.gate_proj, model.layers.42.mlp.shared_experts.up_proj, model.layers.42.mlp.shared_experts.down_proj, model.layers.43.mlp.shared_experts.gate_proj, model.layers.43.mlp.shared_experts.up_proj, model.layers.43.mlp.shared_experts.down_proj, model.layers.44.mlp.shared_experts.gate_proj, model.layers.44.mlp.shared_experts.up_proj, model.layers.44.mlp.shared_experts.down_proj, model.layers.45.mlp.shared_experts.gate_proj, model.layers.45.mlp.shared_experts.up_proj, model.layers.45.mlp.shared_experts.down_proj, model.layers.46.self_attn.q_proj, model.layers.46.self_attn.k_proj, model.layers.46.self_attn.v_proj, model.layers.46.self_attn.o_proj, model.layers.46.mlp.shared_experts.gate_proj, model.layers.46.mlp.shared_experts.up_proj, model.layers.46.mlp.shared_experts.down_proj, lm_head] mappings: - smooth_layer: re:.*input_layernorm$ balance_layers: ['re:.*q_proj$', 're:.*k_proj$', 're:.*v_proj$'] - smooth_layer: re:.*v_proj$ balance_layers: ['re:.*o_proj$'] - smooth_layer: re:.*post_attention_layernorm$ balance_layers: ['re:.*gate_proj$', 're:.*up_proj$'] - smooth_layer: re:.*up_proj$ balance_layers: ['re:.*down_proj$'] duo_scaling: true