tiny-DbrxForCausalLM / model.safetensors.index.json
qgallouedec's picture
qgallouedec HF Staff
Upload DbrxForCausalLM
3f9f58d verified
raw
history blame
1.74 kB
{
"metadata": {
"total_parameters": 6348566688,
"total_size": 25394266752
},
"weight_map": {
"lm_head.weight": "model-00006-of-00006.safetensors",
"transformer.blocks.0.ffn.experts.mlp.v1": "model-00002-of-00006.safetensors",
"transformer.blocks.0.ffn.experts.mlp.w1": "model-00001-of-00006.safetensors",
"transformer.blocks.0.ffn.experts.mlp.w2": "model-00003-of-00006.safetensors",
"transformer.blocks.0.ffn.router.layer.weight": "model-00001-of-00006.safetensors",
"transformer.blocks.0.norm_attn_norm.attn.Wqkv.weight": "model-00001-of-00006.safetensors",
"transformer.blocks.0.norm_attn_norm.attn.out_proj.weight": "model-00001-of-00006.safetensors",
"transformer.blocks.0.norm_attn_norm.norm_1.weight": "model-00001-of-00006.safetensors",
"transformer.blocks.0.norm_attn_norm.norm_2.weight": "model-00001-of-00006.safetensors",
"transformer.blocks.1.ffn.experts.mlp.v1": "model-00005-of-00006.safetensors",
"transformer.blocks.1.ffn.experts.mlp.w1": "model-00004-of-00006.safetensors",
"transformer.blocks.1.ffn.experts.mlp.w2": "model-00006-of-00006.safetensors",
"transformer.blocks.1.ffn.router.layer.weight": "model-00003-of-00006.safetensors",
"transformer.blocks.1.norm_attn_norm.attn.Wqkv.weight": "model-00003-of-00006.safetensors",
"transformer.blocks.1.norm_attn_norm.attn.out_proj.weight": "model-00003-of-00006.safetensors",
"transformer.blocks.1.norm_attn_norm.norm_1.weight": "model-00003-of-00006.safetensors",
"transformer.blocks.1.norm_attn_norm.norm_2.weight": "model-00003-of-00006.safetensors",
"transformer.norm_f.weight": "model-00006-of-00006.safetensors",
"transformer.wte.weight": "model-00001-of-00006.safetensors"
}
}