{ "activation": "silu", "attn_type": "mla", "bias": false, "d_model": 1024, "dropout": 0.2, "hidden_dim": 1536, "kv_lora_rank": 128, "mlp": "GLU", "nope_head_dim": 32, "num_heads": 46, "num_kv_heads": 46, "num_layers": 16, "q_lora_rank": 384, "rope_head_dim": 64, "seq_len": 256, "v_head_dim": 32, "vocab_size": 50257, "weight_tying": true }