fix: ZeRO3 does not work with MoE models

Former-commit-id: e1d5c9851922522f45314c3058d4658198631875
This commit is contained in:
A-Cepheus 2024-01-22 15:21:14 +08:00 committed by GitHub
parent 48cab43cb5
commit 8985c43033

View File

@ -96,6 +96,11 @@ def load_model_and_tokenizer(
**config_kwargs, **config_kwargs,
) )
if getattr(config, "model_type", None) == "mistral" and is_deepspeed_zero3_enabled():
from deepspeed.utils import set_z3_leaf_modules
from transformers.models.mixtral.modeling_mixtral import MixtralSparseMoeBlock
set_z3_leaf_modules(model, [MixtralSparseMoeBlock])
patch_model(model, tokenizer, model_args, is_trainable) patch_model(model, tokenizer, model_args, is_trainable)
register_autoclass(config, model, tokenizer) register_autoclass(config, model, tokenizer)