Fix lora batch processing when input lora_path contains None (#5930)

This commit is contained in:
Qiaolin Yu
2025-04-30 22:42:42 -04:00
committed by GitHub
parent 11383cec3c
commit 7bcd8b1cb2
4 changed files with 60 additions and 279 deletions

View File

@@ -143,7 +143,9 @@ def run_lora_test_one_by_one(
torch_dtype=torch_dtype,
model_type="generation",
tp_size=model_case.tp_size,
lora_paths=[adaptor.name for adaptor in model_case.adaptors],
lora_paths=[
adaptor.name for adaptor in model_case.adaptors if adaptor.name is not None
],
max_loras_per_batch=model_case.max_loras_per_batch,
lora_backend=backend,
disable_cuda_graph=disable_cuda_graph,
@@ -288,7 +290,9 @@ def run_lora_test_by_batch(
torch_dtype=torch_dtype,
model_type="generation",
tp_size=model_case.tp_size,
lora_paths=[adaptor.name for adaptor in model_case.adaptors],
lora_paths=[
adaptor.name for adaptor in model_case.adaptors if adaptor.name is not None
],
max_loras_per_batch=model_case.max_loras_per_batch,
lora_backend=backend,
disable_cuda_graph=disable_cuda_graph,