| { | |
| "architectures": [ | |
| "TransnormerForCausalLM" | |
| ], | |
| "auto_map": { | |
| "AutoConfig": "configuration_transnormer.TransnormerConfig", | |
| "AutoModelForCausalLM": "modeling_transnormer.TransnormerForCausalLM" | |
| }, | |
| "pad_token_id": 0, | |
| "bos_token_id": 1, | |
| "eos_token_id": 2, | |
| "vocab_size": 64000, | |
| "use_cache": true, | |
| "init_std": 0.02, | |
| "decoder_embed_dim": 1024, | |
| "decoder_layers": 24, | |
| "decoder_attention_heads": 8, | |
| "no_scale_embedding": false, | |
| "add_bos_token": false, | |
| "norm_type": "simplermsnorm", | |
| "linear_use_lrpe_list": [ | |
| 1, | |
| 0, | |
| 0, | |
| 0, | |
| 0, | |
| 0, | |
| 0, | |
| 0, | |
| 0, | |
| 0, | |
| 0, | |
| 0, | |
| 0, | |
| 0, | |
| 0, | |
| 0, | |
| 0, | |
| 0, | |
| 0, | |
| 0, | |
| 0, | |
| 0, | |
| 0, | |
| 0 | |
| ], | |
| "hidden_dim": 1024, | |
| "linear_act_fun": "swish", | |
| "glu_dim": 2816, | |
| "bias": false, | |
| "torch_dtype": "bfloat16", | |
| "transformers_version": "4.32.0" | |
| } |