add vllm config

This commit is contained in:
hiyouga
2024-11-10 21:28:18 +08:00
parent adc5849ce7
commit 58ab4579dc
34 changed files with 44 additions and 34 deletions

View File

@@ -54,7 +54,7 @@ extra_require = {
"gptq": ["optimum>=1.17.0", "auto-gptq>=0.5.0"],
"awq": ["autoawq"],
"aqlm": ["aqlm[gpu]>=1.1.0"],
"vllm": ["vllm>=0.4.3,<=0.6.3"],
"vllm": ["vllm>=0.4.3,<0.6.4"],
"galore": ["galore-torch"],
"badam": ["badam>=1.2.1"],
"adam-mini": ["adam-mini"],