[Bugfix] Resolve the interface compatibility issue of get_input_embeddings in MM (#4638)
### What this PR does / why we need it? Resolve the interface compatibility issue of get_input_embeddings in MM, because the get_input_embeddings func of other model does not have the is_multimodal parameter --------- Signed-off-by: Levi-JQ <yujinqi2@huawei.com> Co-authored-by: Levi-JQ <yujinqi2@huawei.com>
This commit is contained in:
@@ -1394,11 +1394,19 @@ class NPUModelRunner(LoRAModelRunnerMixin):
|
|||||||
# embeddings), we always use embeddings (rather than token ids)
|
# embeddings), we always use embeddings (rather than token ids)
|
||||||
# as input to the multimodal model, even when the input is text.
|
# as input to the multimodal model, even when the input is text.
|
||||||
input_ids = self.input_ids[:total_num_scheduled_tokens]
|
input_ids = self.input_ids[:total_num_scheduled_tokens]
|
||||||
inputs_embeds = self.model.get_input_embeddings(
|
model_type = self.vllm_config.model_config.hf_config.model_type
|
||||||
input_ids,
|
if model_type == "qwen2_5_vl":
|
||||||
multimodal_embeddings=mm_embeds,
|
inputs_embeds = self.model.get_input_embeddings(
|
||||||
is_multimodal=is_mm_embed,
|
input_ids,
|
||||||
)
|
multimodal_embeddings=mm_embeds,
|
||||||
|
is_multimodal=is_mm_embed,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
if mm_embeds:
|
||||||
|
inputs_embeds = self.model.get_input_embeddings(
|
||||||
|
input_ids, mm_embeds)
|
||||||
|
else:
|
||||||
|
inputs_embeds = self.model.get_input_embeddings(input_ids)
|
||||||
# TODO(woosuk): Avoid the copy. Optimize.
|
# TODO(woosuk): Avoid the copy. Optimize.
|
||||||
self.inputs_embeds[:total_num_scheduled_tokens].copy_(
|
self.inputs_embeds[:total_num_scheduled_tokens].copy_(
|
||||||
inputs_embeds)
|
inputs_embeds)
|
||||||
|
|||||||
Reference in New Issue
Block a user