Update README.md
This commit is contained in:
34
config.json
Normal file
34
config.json
Normal file
@@ -0,0 +1,34 @@
|
||||
{
|
||||
"architectures": [
|
||||
"GraniteMoeForCausalLM"
|
||||
],
|
||||
"attention_bias": false,
|
||||
"attention_dropout": 0.0,
|
||||
"attention_multiplier": 0.015625,
|
||||
"bos_token_id": 0,
|
||||
"embedding_multiplier": 12.0,
|
||||
"eos_token_id": 0,
|
||||
"hidden_act": "silu",
|
||||
"hidden_size": 1536,
|
||||
"initializer_range": 0.02,
|
||||
"intermediate_size": 512,
|
||||
"logits_scaling": 6.0,
|
||||
"max_position_embeddings": 4096,
|
||||
"model_type": "granitemoe",
|
||||
"num_attention_heads": 24,
|
||||
"num_experts_per_tok": 8,
|
||||
"num_hidden_layers": 32,
|
||||
"num_key_value_heads": 8,
|
||||
"num_local_experts": 40,
|
||||
"output_router_logits": false,
|
||||
"pad_token_id": 0,
|
||||
"residual_multiplier": 0.22,
|
||||
"rms_norm_eps": 1e-06,
|
||||
"rope_scaling": null,
|
||||
"rope_theta": 10000,
|
||||
"router_aux_loss_coef": 0.001,
|
||||
"tie_word_embeddings": true,
|
||||
"transformers_version": "4.45.0.dev0",
|
||||
"use_cache": true,
|
||||
"vocab_size": 49152
|
||||
}
|
||||
Reference in New Issue
Block a user