From 07f90ba4c7d37fc17ddf0119cf8cc92b236d1215 Mon Sep 17 00:00:00 2001 From: Parker Sytz Date: Sat, 21 Mar 2026 12:24:51 +0000 Subject: [PATCH] Fix config for vLLM v2 compatibility: use_cache=true, rope_scaling format --- config.json | 68 ++++++++++++++++++++++++++--------------------------- 1 file changed, 33 insertions(+), 35 deletions(-) diff --git a/config.json b/config.json index 66cd040..f6460bd 100644 --- a/config.json +++ b/config.json @@ -1,37 +1,35 @@ { - "architectures": [ - "LlamaForCausalLM" - ], - "attention_bias": false, - "attention_dropout": 0.0, - "bos_token_id": 128000, - "torch_dtype": "bfloat16", - "eos_token_id": 128009, - "head_dim": 128, - "hidden_act": "silu", - "hidden_size": 4096, - "initializer_range": 0.02, - "intermediate_size": 14336, - "max_position_embeddings": 131072, - "mlp_bias": false, - "model_type": "llama", - "num_attention_heads": 32, - "num_hidden_layers": 32, - "num_key_value_heads": 8, - "pad_token_id": 128004, - "pretraining_tp": 1, - "rms_norm_eps": 1e-05, - "rope_parameters": { - "factor": 8.0, - "high_freq_factor": 4.0, - "low_freq_factor": 1.0, - "original_max_position_embeddings": 8192, - "rope_theta": 500000.0, - "rope_type": "llama3" - }, - "tie_word_embeddings": false, - "unsloth_fixed": true, - "unsloth_version": "2026.3.8", - "use_cache": false, - "vocab_size": 128256 + "architectures": [ + "LlamaForCausalLM" + ], + "attention_bias": false, + "attention_dropout": 0.0, + "bos_token_id": 128000, + "torch_dtype": "bfloat16", + "eos_token_id": 128009, + "head_dim": 128, + "hidden_act": "silu", + "hidden_size": 4096, + "initializer_range": 0.02, + "intermediate_size": 14336, + "max_position_embeddings": 131072, + "mlp_bias": false, + "model_type": "llama", + "num_attention_heads": 32, + "num_hidden_layers": 32, + "num_key_value_heads": 8, + "pad_token_id": 128004, + "pretraining_tp": 1, + "rms_norm_eps": 1e-05, + "tie_word_embeddings": false, + "use_cache": true, + "vocab_size": 128256, + "rope_scaling": { + "factor": 8.0, + "high_freq_factor": 4.0, + "low_freq_factor": 1.0, + "original_max_position_embeddings": 8192, + "rope_theta": 500000.0, + "rope_type": "llama3" + } } \ No newline at end of file