Fix the lora adapter when lora path is none (#4799)

Co-authored-by: Beichen Ma <mabeichen12@gmail.com>
This commit is contained in:
Qiaolin Yu
2025-03-28 00:03:08 -04:00
committed by GitHub
parent 42a45df043
commit 9fdc6d6abc
3 changed files with 17 additions and 14 deletions

View File

@@ -133,10 +133,6 @@ class LoRAManager:
assert len(cur_uids) <= self.max_loras_per_batch
self.memory_pool.prepare_lora_batch(cur_uids, self.loras)
# FIXME: Handle lora uid with None more safely
if cur_uids == set([None]):
return
# set up batch info shared by all lora moruldes
bs = forward_batch.batch_size
seg_lens = (

View File

@@ -163,7 +163,7 @@ class LoRAMemoryPool:
if uid is None:
for i in range(self.num_layer):
for k in self.A_buffer.keys():
self.A_buffer[k][i][buffer_id] *= 0
self.A_buffer[k][i][buffer_id] = 0
return
assert lora_adapter is not None