| { |
| "module": "keras_hub.src.models.qwen3.qwen3_causal_lm", |
| "class_name": "Qwen3CausalLM", |
| "config": { |
| "backbone": { |
| "module": "keras_hub.src.models.qwen3.qwen3_backbone", |
| "class_name": "Qwen3Backbone", |
| "config": { |
| "name": "qwen3_backbone", |
| "trainable": true, |
| "dtype": { |
| "module": "keras", |
| "class_name": "DTypePolicy", |
| "config": { |
| "name": "float32" |
| }, |
| "registered_name": null |
| }, |
| "vocabulary_size": 151936, |
| "num_layers": 36, |
| "num_query_heads": 32, |
| "hidden_dim": 2560, |
| "head_dim": 128, |
| "intermediate_dim": 9728, |
| "rope_max_wavelength": 1000000, |
| "rope_scaling_factor": 1.0, |
| "num_key_value_heads": 8, |
| "layer_norm_epsilon": 1e-06, |
| "dropout": 0.0, |
| "tie_word_embeddings": true, |
| "sliding_window_size": null |
| }, |
| "registered_name": "keras_hub>Qwen3Backbone" |
| }, |
| "preprocessor": { |
| "module": "keras_hub.src.models.qwen3.qwen3_causal_lm_preprocessor", |
| "class_name": "Qwen3CausalLMPreprocessor", |
| "config": { |
| "name": "qwen3_causal_lm_preprocessor", |
| "trainable": true, |
| "dtype": { |
| "module": "keras", |
| "class_name": "DTypePolicy", |
| "config": { |
| "name": "float32" |
| }, |
| "registered_name": null |
| }, |
| "tokenizer": { |
| "module": null, |
| "class_name": "HFRustTokenizerWrapper", |
| "config": { |
| "name": "hf_rust_tokenizer_wrapper", |
| "trainable": true, |
| "dtype": { |
| "module": "keras", |
| "class_name": "DTypePolicy", |
| "config": { |
| "name": "float32" |
| }, |
| "registered_name": null |
| }, |
| "config_file": "tokenizer.json" |
| }, |
| "registered_name": "HFRustTokenizerWrapper" |
| }, |
| "config_file": "preprocessor.json", |
| "sequence_length": 2048, |
| "add_start_token": false, |
| "add_end_token": true |
| }, |
| "registered_name": "keras_hub>Qwen3CausalLMPreprocessor" |
| }, |
| "name": "qwen3_causal_lm" |
| }, |
| "registered_name": "keras_hub>Qwen3CausalLM" |
| } |