mirror of
https://github.com/hiyouga/LLaMA-Factory.git
synced 2025-10-14 15:52:49 +08:00
update hparam name
Former-commit-id: 9941adfbf06db37f8ba32c4555f6e58e27188aaf
This commit is contained in:
parent
13117b69d7
commit
279439abbe
@ -81,6 +81,10 @@ class ModelArguments:
|
||||
default=False,
|
||||
metadata={"help": "Whether or not to use unsloth's optimization for the LoRA training."},
|
||||
)
|
||||
visual_inputs: bool = field(
|
||||
default=False,
|
||||
metadata={"help": "Whethor or not to use multimodal LLM that accepts visual inputs."},
|
||||
)
|
||||
moe_aux_loss_coef: Optional[float] = field(
|
||||
default=None,
|
||||
metadata={"help": "Coefficient of the auxiliary router loss in mixture-of-experts model."},
|
||||
@ -169,10 +173,6 @@ class ModelArguments:
|
||||
default=False,
|
||||
metadata={"help": "For debugging purposes, print the status of the parameters in the model."},
|
||||
)
|
||||
use_mllm: bool = field(
|
||||
default=False,
|
||||
metadata={"help": "Whether use Multimodal LLM."},
|
||||
)
|
||||
|
||||
def __post_init__(self):
|
||||
self.compute_dtype = None
|
||||
|
Loading…
x
Reference in New Issue
Block a user