File size: 8,145 Bytes
22d9515
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
default_stage:
  default_modifiers:
    AWQModifier:
      config_groups:
        group_0:
          targets: [Linear]
          weights:
            num_bits: 8
            type: int
            symmetric: true
            group_size: 128
            strategy: group
            block_structure: null
            dynamic: false
            actorder: null
            observer: minmax
            observer_kwargs: {}
          input_activations: null
          output_activations: null
          format: null
      targets: [Linear]
      ignore: [model.layers.0.self_attn.q_proj, model.layers.0.self_attn.k_proj, model.layers.0.self_attn.v_proj,
        model.layers.0.self_attn.o_proj, model.layers.0.mlp.gate_proj, model.layers.0.mlp.up_proj,
        model.layers.0.mlp.down_proj, model.layers.1.mlp.shared_experts.gate_proj, model.layers.1.mlp.shared_experts.up_proj,
        model.layers.1.mlp.shared_experts.down_proj, model.layers.2.mlp.shared_experts.gate_proj,
        model.layers.2.mlp.shared_experts.up_proj, model.layers.2.mlp.shared_experts.down_proj,
        model.layers.3.mlp.shared_experts.gate_proj, model.layers.3.mlp.shared_experts.up_proj,
        model.layers.3.mlp.shared_experts.down_proj, model.layers.4.mlp.shared_experts.gate_proj,
        model.layers.4.mlp.shared_experts.up_proj, model.layers.4.mlp.shared_experts.down_proj,
        model.layers.5.mlp.shared_experts.gate_proj, model.layers.5.mlp.shared_experts.up_proj,
        model.layers.5.mlp.shared_experts.down_proj, model.layers.6.mlp.shared_experts.gate_proj,
        model.layers.6.mlp.shared_experts.up_proj, model.layers.6.mlp.shared_experts.down_proj,
        model.layers.7.mlp.shared_experts.gate_proj, model.layers.7.mlp.shared_experts.up_proj,
        model.layers.7.mlp.shared_experts.down_proj, model.layers.8.mlp.shared_experts.gate_proj,
        model.layers.8.mlp.shared_experts.up_proj, model.layers.8.mlp.shared_experts.down_proj,
        model.layers.9.mlp.shared_experts.gate_proj, model.layers.9.mlp.shared_experts.up_proj,
        model.layers.9.mlp.shared_experts.down_proj, model.layers.10.mlp.shared_experts.gate_proj,
        model.layers.10.mlp.shared_experts.up_proj, model.layers.10.mlp.shared_experts.down_proj,
        model.layers.11.mlp.shared_experts.gate_proj, model.layers.11.mlp.shared_experts.up_proj,
        model.layers.11.mlp.shared_experts.down_proj, model.layers.12.mlp.shared_experts.gate_proj,
        model.layers.12.mlp.shared_experts.up_proj, model.layers.12.mlp.shared_experts.down_proj,
        model.layers.13.mlp.shared_experts.gate_proj, model.layers.13.mlp.shared_experts.up_proj,
        model.layers.13.mlp.shared_experts.down_proj, model.layers.14.mlp.shared_experts.gate_proj,
        model.layers.14.mlp.shared_experts.up_proj, model.layers.14.mlp.shared_experts.down_proj,
        model.layers.15.mlp.shared_experts.gate_proj, model.layers.15.mlp.shared_experts.up_proj,
        model.layers.15.mlp.shared_experts.down_proj, model.layers.16.mlp.shared_experts.gate_proj,
        model.layers.16.mlp.shared_experts.up_proj, model.layers.16.mlp.shared_experts.down_proj,
        model.layers.17.mlp.shared_experts.gate_proj, model.layers.17.mlp.shared_experts.up_proj,
        model.layers.17.mlp.shared_experts.down_proj, model.layers.18.mlp.shared_experts.gate_proj,
        model.layers.18.mlp.shared_experts.up_proj, model.layers.18.mlp.shared_experts.down_proj,
        model.layers.19.mlp.shared_experts.gate_proj, model.layers.19.mlp.shared_experts.up_proj,
        model.layers.19.mlp.shared_experts.down_proj, model.layers.20.mlp.shared_experts.gate_proj,
        model.layers.20.mlp.shared_experts.up_proj, model.layers.20.mlp.shared_experts.down_proj,
        model.layers.21.mlp.shared_experts.gate_proj, model.layers.21.mlp.shared_experts.up_proj,
        model.layers.21.mlp.shared_experts.down_proj, model.layers.22.mlp.shared_experts.gate_proj,
        model.layers.22.mlp.shared_experts.up_proj, model.layers.22.mlp.shared_experts.down_proj,
        model.layers.23.mlp.shared_experts.gate_proj, model.layers.23.mlp.shared_experts.up_proj,
        model.layers.23.mlp.shared_experts.down_proj, model.layers.24.mlp.shared_experts.gate_proj,
        model.layers.24.mlp.shared_experts.up_proj, model.layers.24.mlp.shared_experts.down_proj,
        model.layers.25.mlp.shared_experts.gate_proj, model.layers.25.mlp.shared_experts.up_proj,
        model.layers.25.mlp.shared_experts.down_proj, model.layers.26.mlp.shared_experts.gate_proj,
        model.layers.26.mlp.shared_experts.up_proj, model.layers.26.mlp.shared_experts.down_proj,
        model.layers.27.mlp.shared_experts.gate_proj, model.layers.27.mlp.shared_experts.up_proj,
        model.layers.27.mlp.shared_experts.down_proj, model.layers.28.mlp.shared_experts.gate_proj,
        model.layers.28.mlp.shared_experts.up_proj, model.layers.28.mlp.shared_experts.down_proj,
        model.layers.29.mlp.shared_experts.gate_proj, model.layers.29.mlp.shared_experts.up_proj,
        model.layers.29.mlp.shared_experts.down_proj, model.layers.30.mlp.shared_experts.gate_proj,
        model.layers.30.mlp.shared_experts.up_proj, model.layers.30.mlp.shared_experts.down_proj,
        model.layers.31.mlp.shared_experts.gate_proj, model.layers.31.mlp.shared_experts.up_proj,
        model.layers.31.mlp.shared_experts.down_proj, model.layers.32.mlp.shared_experts.gate_proj,
        model.layers.32.mlp.shared_experts.up_proj, model.layers.32.mlp.shared_experts.down_proj,
        model.layers.33.mlp.shared_experts.gate_proj, model.layers.33.mlp.shared_experts.up_proj,
        model.layers.33.mlp.shared_experts.down_proj, model.layers.34.mlp.shared_experts.gate_proj,
        model.layers.34.mlp.shared_experts.up_proj, model.layers.34.mlp.shared_experts.down_proj,
        model.layers.35.mlp.shared_experts.gate_proj, model.layers.35.mlp.shared_experts.up_proj,
        model.layers.35.mlp.shared_experts.down_proj, model.layers.36.mlp.shared_experts.gate_proj,
        model.layers.36.mlp.shared_experts.up_proj, model.layers.36.mlp.shared_experts.down_proj,
        model.layers.37.mlp.shared_experts.gate_proj, model.layers.37.mlp.shared_experts.up_proj,
        model.layers.37.mlp.shared_experts.down_proj, model.layers.38.mlp.shared_experts.gate_proj,
        model.layers.38.mlp.shared_experts.up_proj, model.layers.38.mlp.shared_experts.down_proj,
        model.layers.39.mlp.shared_experts.gate_proj, model.layers.39.mlp.shared_experts.up_proj,
        model.layers.39.mlp.shared_experts.down_proj, model.layers.40.mlp.shared_experts.gate_proj,
        model.layers.40.mlp.shared_experts.up_proj, model.layers.40.mlp.shared_experts.down_proj,
        model.layers.41.mlp.shared_experts.gate_proj, model.layers.41.mlp.shared_experts.up_proj,
        model.layers.41.mlp.shared_experts.down_proj, model.layers.42.mlp.shared_experts.gate_proj,
        model.layers.42.mlp.shared_experts.up_proj, model.layers.42.mlp.shared_experts.down_proj,
        model.layers.43.mlp.shared_experts.gate_proj, model.layers.43.mlp.shared_experts.up_proj,
        model.layers.43.mlp.shared_experts.down_proj, model.layers.44.mlp.shared_experts.gate_proj,
        model.layers.44.mlp.shared_experts.up_proj, model.layers.44.mlp.shared_experts.down_proj,
        model.layers.45.mlp.shared_experts.gate_proj, model.layers.45.mlp.shared_experts.up_proj,
        model.layers.45.mlp.shared_experts.down_proj, model.layers.46.self_attn.q_proj, model.layers.46.self_attn.k_proj,
        model.layers.46.self_attn.v_proj, model.layers.46.self_attn.o_proj, model.layers.46.mlp.shared_experts.gate_proj,
        model.layers.46.mlp.shared_experts.up_proj, model.layers.46.mlp.shared_experts.down_proj,
        lm_head]
      mappings:
      - smooth_layer: re:.*input_layernorm$
        balance_layers: ['re:.*q_proj$', 're:.*k_proj$', 're:.*v_proj$']
      - smooth_layer: re:.*v_proj$
        balance_layers: ['re:.*o_proj$']
      - smooth_layer: re:.*post_attention_layernorm$
        balance_layers: ['re:.*gate_proj$', 're:.*up_proj$']
      - smooth_layer: re:.*up_proj$
        balance_layers: ['re:.*down_proj$']
      duo_scaling: true