mirror of
https://github.com/hiyouga/LLaMA-Factory.git
synced 2025-10-14 15:52:49 +08:00
fix qwen2 moe (#6684)
Former-commit-id: ab624419fa0ab23ef7a331a0ec14e393328772b5
This commit is contained in:
parent
3607caa2ad
commit
33525a34b6
@ -61,7 +61,7 @@ def add_z3_leaf_module(model: "PreTrainedModel") -> None:
|
||||
|
||||
_set_z3_leaf_modules(model, [MixtralSparseMoeBlock])
|
||||
|
||||
if model_type == "qwen2moe":
|
||||
if model_type == "qwen2_moe":
|
||||
from transformers.models.qwen2_moe.modeling_qwen2_moe import Qwen2MoeSparseMoeBlock
|
||||
|
||||
_set_z3_leaf_modules(model, [Qwen2MoeSparseMoeBlock])
|
||||
|
Loading…
x
Reference in New Issue
Block a user