Update vllm pin to 12.25 (#5342)
### What this PR does / why we need it?
- Fix vllm break in the pr:
1.[Drop v0.14 deprecations
]https://github.com/vllm-project/vllm/pull/31285
### Does this PR introduce _any_ user-facing change?
No
### How was this patch tested?
- vLLM version: release/v0.13.0
- vLLM main:
bc0a5a0c08
---------
Signed-off-by: ZT-AIA <1028681969@qq.com>
This commit is contained in:
@@ -19,7 +19,7 @@ from typing import Any
|
||||
|
||||
import openai
|
||||
import pytest
|
||||
from vllm.utils import get_open_port
|
||||
from vllm.utils.network_utils import get_open_port
|
||||
|
||||
from tests.e2e.conftest import RemoteOpenAIServer
|
||||
from tools.aisbench import run_aisbench_cases
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
import json
|
||||
|
||||
import pytest
|
||||
from vllm.utils import get_open_port
|
||||
from vllm.utils.network_utils import get_open_port
|
||||
|
||||
from tests.e2e.conftest import RemoteOpenAIServer
|
||||
from tools.aisbench import get_TTFT, run_aisbench_cases
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
import json
|
||||
|
||||
import pytest
|
||||
from vllm.utils import get_open_port
|
||||
from vllm.utils.network_utils import get_open_port
|
||||
|
||||
from tests.e2e.conftest import RemoteOpenAIServer
|
||||
from tools.aisbench import get_TTFT, run_aisbench_cases
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
#
|
||||
|
||||
import pytest
|
||||
from vllm.utils import get_open_port
|
||||
from vllm.utils.network_utils import get_open_port
|
||||
|
||||
from tests.e2e.conftest import RemoteOpenAIServer
|
||||
from tools.aisbench import run_aisbench_cases
|
||||
|
||||
@@ -19,7 +19,7 @@ from typing import Any
|
||||
|
||||
import openai
|
||||
import pytest
|
||||
from vllm.utils import get_open_port
|
||||
from vllm.utils.network_utils import get_open_port
|
||||
|
||||
from tests.e2e.conftest import RemoteOpenAIServer
|
||||
from tools.aisbench import run_aisbench_cases
|
||||
|
||||
@@ -19,7 +19,7 @@ from typing import Any
|
||||
|
||||
import openai
|
||||
import pytest
|
||||
from vllm.utils import get_open_port
|
||||
from vllm.utils.network_utils import get_open_port
|
||||
|
||||
from tests.e2e.conftest import RemoteOpenAIServer
|
||||
from tools.aisbench import run_aisbench_cases
|
||||
|
||||
@@ -18,7 +18,7 @@ from typing import Any
|
||||
|
||||
import openai
|
||||
import pytest
|
||||
from vllm.utils import get_open_port
|
||||
from vllm.utils.network_utils import get_open_port
|
||||
|
||||
from tests.e2e.conftest import RemoteOpenAIServer
|
||||
from tools.aisbench import run_aisbench_cases
|
||||
|
||||
@@ -18,7 +18,7 @@ from typing import Any
|
||||
|
||||
import openai
|
||||
import pytest
|
||||
from vllm.utils import get_open_port
|
||||
from vllm.utils.network_utils import get_open_port
|
||||
|
||||
from tests.e2e.conftest import RemoteOpenAIServer
|
||||
from tools.aisbench import run_aisbench_cases
|
||||
|
||||
@@ -18,7 +18,7 @@ from typing import Any
|
||||
|
||||
import openai
|
||||
import pytest
|
||||
from vllm.utils import get_open_port
|
||||
from vllm.utils.network_utils import get_open_port
|
||||
|
||||
from tests.e2e.conftest import RemoteOpenAIServer
|
||||
from tools.aisbench import run_aisbench_cases
|
||||
|
||||
@@ -18,7 +18,7 @@ from typing import Any
|
||||
|
||||
import openai
|
||||
import pytest
|
||||
from vllm.utils import get_open_port
|
||||
from vllm.utils.network_utils import get_open_port
|
||||
|
||||
from tests.e2e.conftest import RemoteOpenAIServer
|
||||
from tools.aisbench import run_aisbench_cases
|
||||
|
||||
@@ -19,7 +19,7 @@ from typing import Any
|
||||
|
||||
import openai
|
||||
import pytest
|
||||
from vllm.utils import get_open_port
|
||||
from vllm.utils.network_utils import get_open_port
|
||||
|
||||
from tests.e2e.conftest import RemoteOpenAIServer
|
||||
from tools.aisbench import run_aisbench_cases
|
||||
|
||||
@@ -19,7 +19,7 @@ from typing import Any
|
||||
|
||||
import openai
|
||||
import pytest
|
||||
from vllm.utils import get_open_port
|
||||
from vllm.utils.network_utils import get_open_port
|
||||
|
||||
from tests.e2e.conftest import RemoteOpenAIServer
|
||||
from tools.aisbench import run_aisbench_cases
|
||||
|
||||
@@ -18,7 +18,7 @@ from typing import Any
|
||||
|
||||
import openai
|
||||
import pytest
|
||||
from vllm.utils import get_open_port
|
||||
from vllm.utils.network_utils import get_open_port
|
||||
|
||||
from tests.e2e.conftest import RemoteOpenAIServer
|
||||
from tools.aisbench import run_aisbench_cases
|
||||
|
||||
@@ -20,7 +20,7 @@ from typing import Any
|
||||
|
||||
import openai
|
||||
import pytest
|
||||
from vllm.utils import get_open_port
|
||||
from vllm.utils.network_utils import get_open_port
|
||||
|
||||
from tests.e2e.conftest import RemoteOpenAIServer
|
||||
from tools.aisbench import run_aisbench_cases
|
||||
|
||||
@@ -18,7 +18,7 @@ from typing import Any
|
||||
|
||||
import openai
|
||||
import pytest
|
||||
from vllm.utils import get_open_port
|
||||
from vllm.utils.network_utils import get_open_port
|
||||
|
||||
from tests.e2e.conftest import RemoteOpenAIServer
|
||||
from tools.aisbench import run_aisbench_cases
|
||||
|
||||
Reference in New Issue
Block a user