Former-commit-id: 24e1c0e2ee365812357fb9cf82e2162915299a26
This commit is contained in:
hiyouga 2024-06-03 18:38:36 +08:00
parent af7748139a
commit d0ceb1b091
3 changed files with 3 additions and 5 deletions

View File

@ -25,7 +25,7 @@ extra_require = {
"metrics": ["nltk", "jieba", "rouge-chinese"], "metrics": ["nltk", "jieba", "rouge-chinese"],
"deepspeed": ["deepspeed>=0.10.0,<=0.14.0"], "deepspeed": ["deepspeed>=0.10.0,<=0.14.0"],
"bitsandbytes": ["bitsandbytes>=0.39.0"], "bitsandbytes": ["bitsandbytes>=0.39.0"],
"vllm": ["vllm>=0.4.1"], "vllm": ["vllm>=0.4.3"],
"galore": ["galore-torch"], "galore": ["galore-torch"],
"badam": ["badam"], "badam": ["badam"],
"gptq": ["optimum>=1.16.0", "auto-gptq>=0.5.0"], "gptq": ["optimum>=1.16.0", "auto-gptq>=0.5.0"],

View File

@ -158,12 +158,10 @@ class VllmEngine(BaseEngine):
) )
result_generator = self.model.generate( result_generator = self.model.generate(
prompt=None, inputs={"prompt_token_ids": prompt_ids, "multi_modal_data": multi_modal_data},
sampling_params=sampling_params, sampling_params=sampling_params,
request_id=request_id, request_id=request_id,
prompt_token_ids=prompt_ids,
lora_request=self.lora_request, lora_request=self.lora_request,
multi_modal_data=multi_modal_data,
) )
return result_generator return result_generator

View File

@ -94,7 +94,7 @@ def _check_extra_dependencies(
require_version("mixture-of-depth>=1.1.6", "To fix: pip install mixture-of-depth>=1.1.6") require_version("mixture-of-depth>=1.1.6", "To fix: pip install mixture-of-depth>=1.1.6")
if model_args.infer_backend == "vllm": if model_args.infer_backend == "vllm":
require_version("vllm>=0.4.1", "To fix: pip install vllm>=0.4.1") require_version("vllm>=0.4.3", "To fix: pip install vllm>=0.4.3")
if finetuning_args.use_galore: if finetuning_args.use_galore:
require_version("galore_torch", "To fix: pip install galore_torch") require_version("galore_torch", "To fix: pip install galore_torch")