chore: add multipart dep for fastapi (#895)

This commit is contained in:
Yineng Zhang
2024-08-02 22:50:19 +08:00
committed by GitHub
parent 6b8f66efe1
commit 046c2b339e
3 changed files with 2 additions and 2 deletions

View File

@@ -35,7 +35,6 @@ jobs:
pip install -e "python[all]" pip install -e "python[all]"
pip install flashinfer -i https://flashinfer.ai/whl/cu121/torch2.3/ --force-reinstall pip install flashinfer -i https://flashinfer.ai/whl/cu121/torch2.3/ --force-reinstall
pip install --upgrade transformers pip install --upgrade transformers
pip install python-multipart
- name: Benchmark Serving Throughput - name: Benchmark Serving Throughput
run: | run: |

View File

@@ -21,7 +21,7 @@ dependencies = [
[project.optional-dependencies] [project.optional-dependencies]
srt = ["aiohttp", "fastapi", "hf_transfer", "huggingface_hub", "interegular", "packaging", "pillow", srt = ["aiohttp", "fastapi", "hf_transfer", "huggingface_hub", "interegular", "packaging", "pillow",
"psutil", "pydantic", "torch", "uvicorn", "uvloop", "zmq", "vllm==0.5.3.post1", "outlines>=0.0.44"] "psutil", "pydantic", "torch", "uvicorn", "uvloop", "zmq", "vllm==0.5.3.post1", "outlines>=0.0.44", "python-multipart"]
openai = ["openai>=1.0", "tiktoken"] openai = ["openai>=1.0", "tiktoken"]
anthropic = ["anthropic>=0.20.0"] anthropic = ["anthropic>=0.20.0"]
litellm = ["litellm>=1.0.0"] litellm = ["litellm>=1.0.0"]

View File

@@ -30,6 +30,7 @@ PACKAGE_LIST = [
"zmq", "zmq",
"vllm", "vllm",
"outlines", "outlines",
"multipart",
"openai", "openai",
"tiktoken", "tiktoken",
"anthropic", "anthropic",