Merge pull request #5973 from JJJJerry/fix_vllm_generate

fix VllmEngine: 将inputs参数替换为prompt

Former-commit-id: 40a2fcc02dad2f1633c117d457651b221b8c5ae0
This commit is contained in:
hoshi-hiyouga 2024-11-10 21:04:38 +08:00 committed by GitHub
commit 0386fa6a4f

View File

@ -173,7 +173,7 @@ class VllmEngine(BaseEngine):
multi_modal_data = None
result_generator = self.model.generate(
inputs={"prompt_token_ids": prompt_ids, "multi_modal_data": multi_modal_data},
{"prompt_token_ids": prompt_ids, "multi_modal_data": multi_modal_data},
sampling_params=sampling_params,
request_id=request_id,
lora_request=self.lora_request,