diff --git a/python/pyproject.toml b/python/pyproject.toml index 473e4b6b5..116ba9c98 100644 --- a/python/pyproject.toml +++ b/python/pyproject.toml @@ -24,11 +24,9 @@ runtime_common = ["aiohttp", "decord", "fastapi", "hf_transfer", "huggingface_hu "packaging", "pillow", "psutil", "pydantic", "python-multipart", "torchao", "uvicorn", "uvloop", "zmq", "outlines>=0.0.44", "modelscope"] -torch = ["torch"] # xpu is not enabled in public vllm and torch whl, # need to follow https://docs.vllm.ai/en/latest/getting_started/xpu-installation.htmlinstall vllm -vllm = ["vllm==0.5.5"] -srt = ["sglang[runtime_common]", "torch", "vllm"] +srt = ["sglang[runtime_common]", "torch", "vllm==0.5.5"] srt_xpu = ["sglang[runtime_common]"] openai = ["openai>=1.0", "tiktoken"]