Upload config
Browse files- config.json +1 -3
config.json
CHANGED
|
@@ -8,7 +8,6 @@
|
|
| 8 |
"cls_attn_layers": 2,
|
| 9 |
"decoder_dropout": 0.1,
|
| 10 |
"decoder_hidden_size": 768,
|
| 11 |
-
"depth": 18,
|
| 12 |
"depths": null,
|
| 13 |
"drop_path_rate": 0.0,
|
| 14 |
"drop_rate": 0.0,
|
|
@@ -45,8 +44,7 @@
|
|
| 45 |
"norm_layer": null,
|
| 46 |
"num_attention_heads": 8,
|
| 47 |
"num_classes": 1000,
|
| 48 |
-
"
|
| 49 |
-
"num_hidden_layers": 12,
|
| 50 |
"out_index": -1,
|
| 51 |
"patch_size": 16,
|
| 52 |
"qkv_bias": true,
|
|
|
|
| 8 |
"cls_attn_layers": 2,
|
| 9 |
"decoder_dropout": 0.1,
|
| 10 |
"decoder_hidden_size": 768,
|
|
|
|
| 11 |
"depths": null,
|
| 12 |
"drop_path_rate": 0.0,
|
| 13 |
"drop_rate": 0.0,
|
|
|
|
| 44 |
"norm_layer": null,
|
| 45 |
"num_attention_heads": 8,
|
| 46 |
"num_classes": 1000,
|
| 47 |
+
"num_hidden_layers": 18,
|
|
|
|
| 48 |
"out_index": -1,
|
| 49 |
"patch_size": 16,
|
| 50 |
"qkv_bias": true,
|