mirror of
https://github.com/hiyouga/LLaMA-Factory.git
synced 2025-08-02 19:52:50 +08:00
[data] fix kimi vl template (#8015)
This commit is contained in:
parent
cef3a0b2e2
commit
845af89ea4
@ -655,8 +655,10 @@ class KimiVLPlugin(BasePlugin):
|
||||
self._validate_messages(messages, images, videos, audios)
|
||||
if self.expand_mm_tokens:
|
||||
mm_inputs = self._get_mm_inputs(images, videos, audios, processor)
|
||||
|
||||
image_grid_hws = mm_inputs.get("image_grid_hws", [])
|
||||
else:
|
||||
image_grid_hws = [None] * len(images)
|
||||
|
||||
num_image_tokens = 0
|
||||
image_processor: BaseImageProcessor = getattr(processor, "image_processor")
|
||||
merge_length = math.prod(image_processor.merge_kernel_size)
|
||||
|
Loading…
x
Reference in New Issue
Block a user