mirror of
https://github.com/hiyouga/LLaMA-Factory.git
synced 2025-08-04 12:42:51 +08:00
🐞 fix: typo
Former-commit-id: b06a31e76ac623ea89ecc567dd9e9ec88dfdb585
This commit is contained in:
parent
39d9aba166
commit
f00ad6b4f8
@ -284,7 +284,7 @@ def patch_model(
|
|||||||
if is_trainable:
|
if is_trainable:
|
||||||
_prepare_model_for_training(model, model_args)
|
_prepare_model_for_training(model, model_args)
|
||||||
|
|
||||||
if getattr(config, "model_type", None) == "mixtral" and is_deepspeed_zero3_enabled():
|
if getattr(model.config, "model_type", None) == "mixtral" and is_deepspeed_zero3_enabled():
|
||||||
from deepspeed.utils import set_z3_leaf_modules
|
from deepspeed.utils import set_z3_leaf_modules
|
||||||
from transformers.models.mixtral.modeling_mixtral import MixtralSparseMoeBlock
|
from transformers.models.mixtral.modeling_mixtral import MixtralSparseMoeBlock
|
||||||
set_z3_leaf_modules(model, [MixtralSparseMoeBlock])
|
set_z3_leaf_modules(model, [MixtralSparseMoeBlock])
|
||||||
|
Loading…
x
Reference in New Issue
Block a user