File size: 746 Bytes
d1b63e9 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 |
{
"_class_name": "Transformer1DModel",
"_diffusers_version": "0.22.0.dev0",
"activation_fn": "gelu-approximate",
"attention_bias": true,
"attention_head_dim": 64,
"attention_type": "default",
"cross_attention_dim": null,
"double_self_attention": false,
"dropout": 0.0,
"in_channels": 1040,
"norm_elementwise_affine": false,
"norm_eps": 1e-06,
"norm_num_groups": 32,
"norm_type": "ada_norm_single",
"num_attention_heads": 24,
"num_embeds_ada_norm": 1000,
"num_layers": 32,
"num_vector_embeds": null,
"only_cross_attention": false,
"out_channels": 136,
"patch_size": 1,
"sample_size": 384,
"upcast_attention": false,
"use_linear_projection": false
}
|