mirror of
https://github.com/hiyouga/LLaMA-Factory.git
synced 2025-08-04 12:42:51 +08:00
🐞 fix: typo, move MoE fix to patcher
Former-commit-id: 319a72b48d79c5887f39fc09c66761d5d230a317
This commit is contained in:
parent
8985c43033
commit
39d9aba166
@ -96,11 +96,6 @@ def load_model_and_tokenizer(
|
||||
**config_kwargs,
|
||||
)
|
||||
|
||||
if getattr(config, "model_type", None) == "mistral" and is_deepspeed_zero3_enabled():
|
||||
from deepspeed.utils import set_z3_leaf_modules
|
||||
from transformers.models.mixtral.modeling_mixtral import MixtralSparseMoeBlock
|
||||
set_z3_leaf_modules(model, [MixtralSparseMoeBlock])
|
||||
|
||||
patch_model(model, tokenizer, model_args, is_trainable)
|
||||
register_autoclass(config, model, tokenizer)
|
||||
|
||||
|
@ -284,6 +284,11 @@ def patch_model(
|
||||
if is_trainable:
|
||||
_prepare_model_for_training(model, model_args)
|
||||
|
||||
if getattr(config, "model_type", None) == "mixtral" and is_deepspeed_zero3_enabled():
|
||||
from deepspeed.utils import set_z3_leaf_modules
|
||||
from transformers.models.mixtral.modeling_mixtral import MixtralSparseMoeBlock
|
||||
set_z3_leaf_modules(model, [MixtralSparseMoeBlock])
|
||||
|
||||
|
||||
def patch_valuehead_model(model: "AutoModelForCausalLMWithValueHead") -> None:
|
||||
def tie_weights(self: "AutoModelForCausalLMWithValueHead") -> None:
|
||||
|
Loading…
x
Reference in New Issue
Block a user