support batch infer in vllm

This commit is contained in:
hiyouga
2024-12-04 13:50:00 +00:00
parent dc78355002
commit 1324d158f9
29 changed files with 148 additions and 407 deletions

View File

@@ -122,7 +122,7 @@ def _check_extra_dependencies(
require_version("mixture-of-depth>=1.1.6", "To fix: pip install mixture-of-depth>=1.1.6")
if model_args.infer_backend == "vllm":
require_version("vllm>=0.4.3,<0.6.4", "To fix: pip install vllm>=0.4.3,<0.6.4")
require_version("vllm>=0.4.3,<0.6.5", "To fix: pip install vllm>=0.4.3,<0.6.5")
if finetuning_args.use_galore:
require_version("galore_torch", "To fix: pip install galore_torch")