Update config.json for transformers compatibility
Browse files- config.json +9 -3
config.json
CHANGED
|
@@ -513,9 +513,15 @@
|
|
| 513 |
"vision_end_token_id": 151653,
|
| 514 |
"vision_start_token_id": 151652
|
| 515 |
},
|
| 516 |
-
"transformers_version": "
|
| 517 |
"tts_bos_token_id": 151672,
|
| 518 |
"tts_eos_token_id": 151673,
|
| 519 |
"tts_pad_token_id": 151671,
|
| 520 |
-
"user_token_id": 872
|
| 521 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 513 |
"vision_end_token_id": 151653,
|
| 514 |
"vision_start_token_id": 151652
|
| 515 |
},
|
| 516 |
+
"transformers_version": "4.45.0",
|
| 517 |
"tts_bos_token_id": 151672,
|
| 518 |
"tts_eos_token_id": 151673,
|
| 519 |
"tts_pad_token_id": 151671,
|
| 520 |
+
"user_token_id": 872,
|
| 521 |
+
"auto_map": {
|
| 522 |
+
"AutoModelForCausalLM": "Qwen3OmniMoeForConditionalGeneration",
|
| 523 |
+
"AutoProcessor": "Qwen3OmniMoeProcessor"
|
| 524 |
+
},
|
| 525 |
+
"torch_dtype": "bfloat16",
|
| 526 |
+
"attn_implementation": "flash_attention_2"
|
| 527 |
+
}
|