mirror of
https://github.com/hiyouga/LLaMA-Factory.git
synced 2025-10-15 08:08:09 +08:00
fix VllmEngine: 将inputs参数替换为prompt
Former-commit-id: 5affb1d20921afd3fe48802ff80785e412e2e3aa
This commit is contained in:
parent
fcb6283a72
commit
7f8ef8c132
@ -173,7 +173,7 @@ class VllmEngine(BaseEngine):
|
||||
multi_modal_data = None
|
||||
|
||||
result_generator = self.model.generate(
|
||||
inputs={"prompt_token_ids": prompt_ids, "multi_modal_data": multi_modal_data},
|
||||
prompt={"prompt_token_ids": prompt_ids, "multi_modal_data": multi_modal_data},
|
||||
sampling_params=sampling_params,
|
||||
request_id=request_id,
|
||||
lora_request=self.lora_request,
|
||||
|
Loading…
x
Reference in New Issue
Block a user