support pretraining of llava

This commit is contained in:
BUAADreamer
2024-05-21 08:57:14 +08:00
parent 2a67457e39
commit 29a6d5bdb8
4 changed files with 115 additions and 0 deletions

View File

@@ -85,6 +85,10 @@ class ModelArguments:
default=False,
metadata={"help": "Whethor or not to use multimodal LLM that accepts visual inputs."},
)
tune_mm_proj: bool = field(
default=False,
metadata={"help": "Whethor or not only finetune mm_projector for MLLM."},
)
moe_aux_loss_coef: Optional[float] = field(
default=None,
metadata={"help": "Coefficient of the auxiliary router loss in mixture-of-experts model."},