{ "module": "keras_hub.src.models.qwen3_moe.qwen3_moe_causal_lm", "class_name": "Qwen3MoeCausalLM", "config": { "backbone": { "module": "keras_hub.src.models.qwen3_moe.qwen3_moe_backbone", "class_name": "Qwen3MoeBackbone", "config": { "name": "qwen3_moe_backbone", "trainable": true, "dtype": { "module": "keras", "class_name": "DTypePolicy", "config": { "name": "float32" }, "registered_name": null }, "vocabulary_size": 151936, "num_layers": 48, "num_query_heads": 32, "head_dim": 128, "hidden_dim": 2048, "intermediate_dim": 6144, "moe_intermediate_dim": 768, "rope_max_wavelength": 1000000.0, "num_key_value_heads": 4, "rope_scaling_factor": 1.0, "layer_norm_epsilon": 1e-06, "dropout": 0, "tie_word_embeddings": false, "sliding_window_size": null, "num_experts": 128, "top_k": 8, "norm_top_k_prob": true, "decoder_sparse_step": 1, "mlp_only_layers": [], "router_aux_loss_coefficient": 0.001 }, "registered_name": "keras_hub>Qwen3MoeBackbone" }, "preprocessor": { "module": "keras_hub.src.models.qwen3_moe.qwen3_moe_causal_lm_preprocessor", "class_name": "Qwen3MoeCausalLMPreprocessor", "config": { "name": "qwen3_moe_causal_lm_preprocessor_3", "trainable": true, "dtype": { "module": "keras", "class_name": "DTypePolicy", "config": { "name": "float32" }, "registered_name": null }, "tokenizer": { "module": "keras_hub.src.models.qwen3_moe.qwen3_moe_tokenizer", "class_name": "Qwen3MoeTokenizer", "config": { "name": "qwen3_moe_tokenizer", "trainable": true, "dtype": { "module": "keras", "class_name": "DTypePolicy", "config": { "name": "int32" }, "registered_name": null }, "config_file": "tokenizer.json", "sequence_length": null, "add_prefix_space": false, "unsplittable_tokens": [ "<|box_end|>", "<|box_start|>", "<|fim_pad|>", "<|vision_start|>", "<|endoftext|>", "<|fim_suffix|>", "<|fim_prefix|>", "", "<|im_start|>", "<|vision_pad|>", "", "<|quad_start|>", "", "<|object_ref_start|>", "<|file_sep|>", "<|image_pad|>", "<|object_ref_end|>", "<|quad_end|>", "<|im_end|>", "", "", "<|video_pad|>", "<|vision_end|>", "<|fim_middle|>", "", "<|repo_name|>" ] }, "registered_name": "keras_hub>Qwen3MoeTokenizer" }, "config_file": "preprocessor.json", "sequence_length": 1024, "add_start_token": true, "add_end_token": true }, "registered_name": "keras_hub>Qwen3MoeCausalLMPreprocessor" }, "name": "qwen3_moe_causal_lm_1" }, "registered_name": "keras_hub>Qwen3MoeCausalLM" }