| { |
| "architectures": [ |
| "DbrxForCausalLM" |
| ], |
| "attn_config": { |
| "attn_pdrop": 0.0, |
| "clip_qkv": 8, |
| "kv_n_heads": 8, |
| "model_type": "", |
| "rope_theta": 500000 |
| }, |
| "d_model": 24, |
| "dtype": "bfloat16", |
| "emb_pdrop": 0.0, |
| "ffn_config": { |
| "ffn_act_fn": { |
| "name": "silu" |
| }, |
| "ffn_hidden_size": 24, |
| "hidden_size": 6144, |
| "model_type": "", |
| "moe_jitter_eps": 0, |
| "moe_loss_weight": 0.05, |
| "moe_normalize_expert_weights": 1.0, |
| "moe_num_experts": 16, |
| "moe_top_k": 4 |
| }, |
| "initializer_range": 0.02, |
| "max_seq_len": 32768, |
| "model_type": "dbrx", |
| "n_heads": 16, |
| "n_layers": 2, |
| "num_key_value_heads": 8, |
| "output_router_logits": false, |
| "resid_pdrop": 0.0, |
| "rope_theta": 10000.0, |
| "tie_word_embeddings": false, |
| "transformers_version": "4.57.0.dev0", |
| "use_cache": true, |
| "vocab_size": 100352 |
| } |
|
|