Fix the lora adapter when lora path is none (#4799)
Co-authored-by: Beichen Ma <mabeichen12@gmail.com>
This commit is contained in:
@@ -133,10 +133,6 @@ class LoRAManager:
|
||||
assert len(cur_uids) <= self.max_loras_per_batch
|
||||
self.memory_pool.prepare_lora_batch(cur_uids, self.loras)
|
||||
|
||||
# FIXME: Handle lora uid with None more safely
|
||||
if cur_uids == set([None]):
|
||||
return
|
||||
|
||||
# set up batch info shared by all lora moruldes
|
||||
bs = forward_batch.batch_size
|
||||
seg_lens = (
|
||||
|
||||
@@ -163,7 +163,7 @@ class LoRAMemoryPool:
|
||||
if uid is None:
|
||||
for i in range(self.num_layer):
|
||||
for k in self.A_buffer.keys():
|
||||
self.A_buffer[k][i][buffer_id] *= 0
|
||||
self.A_buffer[k][i][buffer_id] = 0
|
||||
return
|
||||
|
||||
assert lora_adapter is not None
|
||||
|
||||
Reference in New Issue
Block a user