update hparam name

Former-commit-id: 860549b99b520ae9509559b18646f83fd95933b8
This commit is contained in:
hoshi-hiyouga 2024-04-26 02:49:39 +08:00 committed by GitHub
parent e7725f2bbb
commit 6ef8ee5988

View File

@ -81,6 +81,10 @@ class ModelArguments:
default=False, default=False,
metadata={"help": "Whether or not to use unsloth's optimization for the LoRA training."}, metadata={"help": "Whether or not to use unsloth's optimization for the LoRA training."},
) )
visual_inputs: bool = field(
default=False,
metadata={"help": "Whethor or not to use multimodal LLM that accepts visual inputs."},
)
moe_aux_loss_coef: Optional[float] = field( moe_aux_loss_coef: Optional[float] = field(
default=None, default=None,
metadata={"help": "Coefficient of the auxiliary router loss in mixture-of-experts model."}, metadata={"help": "Coefficient of the auxiliary router loss in mixture-of-experts model."},
@ -169,10 +173,6 @@ class ModelArguments:
default=False, default=False,
metadata={"help": "For debugging purposes, print the status of the parameters in the model."}, metadata={"help": "For debugging purposes, print the status of the parameters in the model."},
) )
use_mllm: bool = field(
default=False,
metadata={"help": "Whether use Multimodal LLM."},
)
def __post_init__(self): def __post_init__(self):
self.compute_dtype = None self.compute_dtype = None