fix llava qlora

This commit is contained in:
hiyouga
2024-04-26 18:00:23 +08:00
parent cd3a960f81
commit fc67b736ba
6 changed files with 55 additions and 10 deletions

View File

@@ -323,6 +323,9 @@ def get_infer_args(args: Optional[Dict[str, Any]] = None) -> _INFER_CLS:
if model_args.visual_inputs:
raise ValueError("vLLM engine does not support MLLM yet. Stay tuned.")
if finetuning_args.stage == "rm" and model_args.visual_inputs:
raise ValueError("Reward server does not support MLLM yet. Stay tuned.")
_verify_model_args(model_args, finetuning_args)
_check_extra_dependencies(model_args, finetuning_args)