From d10b933a36eed76b72c351f7914efa6c4c86d842 Mon Sep 17 00:00:00 2001 From: Ke Bao Date: Wed, 16 Oct 2024 23:21:20 +0800 Subject: [PATCH] Fix srt dependency (#1685) --- python/pyproject.toml | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/python/pyproject.toml b/python/pyproject.toml index 473e4b6b5..116ba9c98 100644 --- a/python/pyproject.toml +++ b/python/pyproject.toml @@ -24,11 +24,9 @@ runtime_common = ["aiohttp", "decord", "fastapi", "hf_transfer", "huggingface_hu "packaging", "pillow", "psutil", "pydantic", "python-multipart", "torchao", "uvicorn", "uvloop", "zmq", "outlines>=0.0.44", "modelscope"] -torch = ["torch"] # xpu is not enabled in public vllm and torch whl, # need to follow https://docs.vllm.ai/en/latest/getting_started/xpu-installation.htmlinstall vllm -vllm = ["vllm==0.5.5"] -srt = ["sglang[runtime_common]", "torch", "vllm"] +srt = ["sglang[runtime_common]", "torch", "vllm==0.5.5"] srt_xpu = ["sglang[runtime_common]"] openai = ["openai>=1.0", "tiktoken"]