[CI] Fix broken CI (#2302)
1. disable test_eagle_ccorrectness test, we'll reopen it once oom error fixed. 2. drop transformers version limit for main, since vLLM rely on >=4.55.0, see:65552b476b3. fix kv_connector_output bug, see:796bae07c5- vLLM version: v0.10.0 - vLLM main:d1af8b7be9Signed-off-by: wangxiyuan <wangxiyuan1007@gmail.com>
This commit is contained in:
@@ -1605,9 +1605,12 @@ class NPUModelRunner(LoRAModelRunnerMixin):
|
||||
intermediate_tensors))
|
||||
kv_connector_output = None
|
||||
if not vllm_version_is("0.10.0"):
|
||||
kv_connector_output = KVConnectorOutput(
|
||||
finished_sending=finished_sending,
|
||||
finished_recving=finished_recving)
|
||||
if finished_sending is not None and finished_recving is not None:
|
||||
kv_connector_output = KVConnectorOutput(
|
||||
finished_sending=finished_sending,
|
||||
finished_recving=finished_recving)
|
||||
else:
|
||||
kv_connector_output = None
|
||||
finished_sending = None
|
||||
finished_recving = None
|
||||
with ProfileExecuteDuration().capture_async("post process"):
|
||||
|
||||
Reference in New Issue
Block a user