Update vllm version to support llama3.1 (#705)

This commit is contained in:
Ying Sheng
2024-07-23 13:49:34 -07:00
committed by GitHub
parent fa7ccb3316
commit 444a02441a
4 changed files with 5 additions and 9 deletions

View File

@@ -73,6 +73,8 @@ def get_context_length(config):
rope_scaling = getattr(config, "rope_scaling", None)
if rope_scaling:
rope_scaling_factor = config.rope_scaling["factor"]
if config.rope_scaling["rope_type"] == "llama3":
rope_scaling_factor = 1
else:
rope_scaling_factor = 1