From 18ad259fb30d54b2973553cfc933eaa2fc7dd4f9 Mon Sep 17 00:00:00 2001 From: A-Cepheus <60658915+A-Cepheus@users.noreply.github.com> Date: Mon, 22 Jan 2024 15:21:14 +0800 Subject: [PATCH 1/4] fix: ZeRO3 does not work with MoE models Former-commit-id: b2844c049a88ea89f8e1812e2d2e8662b4002965 --- src/llmtuner/model/loader.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/llmtuner/model/loader.py b/src/llmtuner/model/loader.py index b02a4560..656bfa6d 100644 --- a/src/llmtuner/model/loader.py +++ b/src/llmtuner/model/loader.py @@ -96,6 +96,11 @@ def load_model_and_tokenizer( **config_kwargs, ) + if getattr(config, "model_type", None) == "mistral" and is_deepspeed_zero3_enabled(): + from deepspeed.utils import set_z3_leaf_modules + from transformers.models.mixtral.modeling_mixtral import MixtralSparseMoeBlock + set_z3_leaf_modules(model, [MixtralSparseMoeBlock]) + patch_model(model, tokenizer, model_args, is_trainable) register_autoclass(config, model, tokenizer) From 712ab4ae7aa3fff3085d4f439f5ecfee7a4099d3 Mon Sep 17 00:00:00 2001 From: A-Cepheus <60658915+A-Cepheus@users.noreply.github.com> Date: Mon, 22 Jan 2024 16:01:58 +0800 Subject: [PATCH 2/4] =?UTF-8?q?=F0=9F=90=9E=20fix:=20typo,=20move=20MoE=20?= =?UTF-8?q?fix=20to=20patcher?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Former-commit-id: 4ff28e99ff9b48df7150591c6bbd3723f22b7715 --- src/llmtuner/model/loader.py | 5 ----- src/llmtuner/model/patcher.py | 5 +++++ 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/llmtuner/model/loader.py b/src/llmtuner/model/loader.py index 656bfa6d..b02a4560 100644 --- a/src/llmtuner/model/loader.py +++ b/src/llmtuner/model/loader.py @@ -96,11 +96,6 @@ def load_model_and_tokenizer( **config_kwargs, ) - if getattr(config, "model_type", None) == "mistral" and is_deepspeed_zero3_enabled(): - from deepspeed.utils import set_z3_leaf_modules - from transformers.models.mixtral.modeling_mixtral import MixtralSparseMoeBlock - set_z3_leaf_modules(model, [MixtralSparseMoeBlock]) - patch_model(model, tokenizer, model_args, is_trainable) register_autoclass(config, model, tokenizer) diff --git a/src/llmtuner/model/patcher.py b/src/llmtuner/model/patcher.py index 5f67f618..52690e68 100644 --- a/src/llmtuner/model/patcher.py +++ b/src/llmtuner/model/patcher.py @@ -284,6 +284,11 @@ def patch_model( if is_trainable: _prepare_model_for_training(model, model_args) + if getattr(config, "model_type", None) == "mixtral" and is_deepspeed_zero3_enabled(): + from deepspeed.utils import set_z3_leaf_modules + from transformers.models.mixtral.modeling_mixtral import MixtralSparseMoeBlock + set_z3_leaf_modules(model, [MixtralSparseMoeBlock]) + def patch_valuehead_model(model: "AutoModelForCausalLMWithValueHead") -> None: def tie_weights(self: "AutoModelForCausalLMWithValueHead") -> None: From 882a6a1d519d60796a9e7200c24cfc1f14d85b64 Mon Sep 17 00:00:00 2001 From: A-Cepheus <60658915+A-Cepheus@users.noreply.github.com> Date: Mon, 22 Jan 2024 16:04:39 +0800 Subject: [PATCH 3/4] =?UTF-8?q?=F0=9F=90=9E=20fix:=20typo?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Former-commit-id: 57a3687ecd23237559aee0e8e811b782846f2415 --- src/llmtuner/model/patcher.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/llmtuner/model/patcher.py b/src/llmtuner/model/patcher.py index 52690e68..892a9165 100644 --- a/src/llmtuner/model/patcher.py +++ b/src/llmtuner/model/patcher.py @@ -284,7 +284,7 @@ def patch_model( if is_trainable: _prepare_model_for_training(model, model_args) - if getattr(config, "model_type", None) == "mixtral" and is_deepspeed_zero3_enabled(): + if getattr(model.config, "model_type", None) == "mixtral" and is_deepspeed_zero3_enabled(): from deepspeed.utils import set_z3_leaf_modules from transformers.models.mixtral.modeling_mixtral import MixtralSparseMoeBlock set_z3_leaf_modules(model, [MixtralSparseMoeBlock]) From b36c4b99ccd80bc43454b50078b2be9bcbd83f16 Mon Sep 17 00:00:00 2001 From: hoshi-hiyouga Date: Mon, 22 Jan 2024 23:27:39 +0800 Subject: [PATCH 4/4] Update patcher.py Former-commit-id: 33556cc6b0b65cc6db02e66f4f6e75112c33d966 --- src/llmtuner/model/patcher.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/llmtuner/model/patcher.py b/src/llmtuner/model/patcher.py index 892a9165..c537a6d2 100644 --- a/src/llmtuner/model/patcher.py +++ b/src/llmtuner/model/patcher.py @@ -285,6 +285,7 @@ def patch_model( _prepare_model_for_training(model, model_args) if getattr(model.config, "model_type", None) == "mixtral" and is_deepspeed_zero3_enabled(): + require_version("deepspeed>=0.13.0", "To fix: pip install deepspeed>=0.13.0") from deepspeed.utils import set_z3_leaf_modules from transformers.models.mixtral.modeling_mixtral import MixtralSparseMoeBlock set_z3_leaf_modules(model, [MixtralSparseMoeBlock])