{ "d_model": 3072, "ssm_cfg": { "expand": 1 }, "rms_norm_eps": 1e-05, "vocab_size": null, "d_inner": 3072, "d_xb": 1024, "intermediate_size": 8192, "hidden_act": "silu", "n_layer": 28, "attn_layers": [ 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26 ] }