mirror of
https://github.com/hiyouga/LLaMA-Factory.git
synced 2025-12-15 03:10:35 +08:00
🐞 fix: typo, move MoE fix to patcher
This commit is contained in:
@@ -284,6 +284,11 @@ def patch_model(
|
||||
if is_trainable:
|
||||
_prepare_model_for_training(model, model_args)
|
||||
|
||||
if getattr(config, "model_type", None) == "mixtral" and is_deepspeed_zero3_enabled():
|
||||
from deepspeed.utils import set_z3_leaf_modules
|
||||
from transformers.models.mixtral.modeling_mixtral import MixtralSparseMoeBlock
|
||||
set_z3_leaf_modules(model, [MixtralSparseMoeBlock])
|
||||
|
||||
|
||||
def patch_valuehead_model(model: "AutoModelForCausalLMWithValueHead") -> None:
|
||||
def tie_weights(self: "AutoModelForCausalLMWithValueHead") -> None:
|
||||
|
||||
Reference in New Issue
Block a user