| { | |
| "architectures": [ | |
| "Xerxes2ForCausalLM" | |
| ], | |
| "attention_axis_name": "sp", | |
| "attn_mechanism": "vanilla", | |
| "axis_dims": [ | |
| 1, | |
| -1, | |
| 1, | |
| 1 | |
| ], | |
| "axis_names": [ | |
| "dp", | |
| "fsdp", | |
| "tp", | |
| "sp" | |
| ], | |
| "backend": null, | |
| "bits": null, | |
| "blocksize_b": 1, | |
| "blocksize_k": 128, | |
| "blocksize_q": 128, | |
| "bos_token_id": 2, | |
| "cache_implementation": "hybrid", | |
| "compute_blocksize": 64, | |
| "easy_method": "train", | |
| "eos_token_id": 1, | |
| "fcm_max_ratio": 0.0, | |
| "fcm_min_ratio": 0.0, | |
| "flash_attention_backward_pass_impl": "triton", | |
| "gradient_checkpointing": "", | |
| "hardware_abstraction": false, | |
| "head_dim": 64, | |
| "hidden_size": 2048, | |
| "initializer_range": 0.02, | |
| "intermediate_size": 8192, | |
| "kv_cache_quantization_blocksize": 64, | |
| "kv_cache_quantization_method": "None", | |
| "kv_cache_sharding_sequence_axis_name": "sp", | |
| "max_position_embeddings": 2048, | |
| "model_type": "xerxes2", | |
| "num_attention_heads": 32, | |
| "num_hidden_layers": 32, | |
| "pad_token_id": 0, | |
| "pallas_k_block_size": null, | |
| "pallas_m_block_size": null, | |
| "pallas_n_block_size": null, | |
| "partition_axis": [ | |
| [ | |
| "fsdp", | |
| "dp" | |
| ], | |
| "sp", | |
| "sp", | |
| "tp", | |
| "sp", | |
| "tp", | |
| null, | |
| null, | |
| null, | |
| null, | |
| "tp", | |
| "sp", | |
| null | |
| ], | |
| "platform": null, | |
| "pretraining_tp": 1, | |
| "quantization_blocksize": 64, | |
| "quantization_method": "None", | |
| "quantization_pattern": ".*", | |
| "rms_norm_eps": 1e-06, | |
| "rope_theta": 10000.0, | |
| "scan_attention_layers": false, | |
| "scan_layers": false, | |
| "scan_mlp_chunk_size": 1024, | |
| "scan_ring_attention": true, | |
| "shard_attention_computation": true, | |
| "tie_word_embeddings": false, | |
| "transformers_version": "4.48.0", | |
| "use_cache": true, | |
| "use_scan_mlp": false, | |
| "use_sharded_kv_caching": false, | |
| "use_sharding_constraint": false, | |
| "vocab_size": 152064 | |
| } | |