Update vllm to 0.6.3 (#1711) (#1720)

Co-authored-by: Ke Bao <ISPObaoke@163.com>
This commit is contained in:
Yineng Zhang
2024-10-19 20:45:41 -07:00
committed by GitHub
parent 12cad0feae
commit 8bee20f80b
9 changed files with 133 additions and 76 deletions

View File

@@ -26,7 +26,7 @@ runtime_common = ["aiohttp", "decord", "fastapi", "hf_transfer", "huggingface_hu
"outlines>=0.0.44", "modelscope"]
# xpu is not enabled in public vllm and torch whl,
# need to follow https://docs.vllm.ai/en/latest/getting_started/xpu-installation.htmlinstall vllm
srt = ["sglang[runtime_common]", "torch", "vllm==0.5.5"]
srt = ["sglang[runtime_common]", "torch", "vllm==0.6.3.post1"]
srt_xpu = ["sglang[runtime_common]"]
openai = ["openai>=1.0", "tiktoken"]