[fix] fix the issue when using fsdp2 with gradient checkpointing. (#9541)

Co-authored-by: jin-yongxu <jinyongxu@h-partners.com>
This commit is contained in:
xvxuopop
2025-12-06 16:04:51 +08:00
committed by GitHub
parent 165f3f073a
commit 109162dc56
2 changed files with 32 additions and 0 deletions

View File

@@ -18,6 +18,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import inspect
from functools import WRAPPER_ASSIGNMENTS, partial, wraps
from types import MethodType
@@ -152,6 +153,15 @@ def prepare_model_for_training(model: "PreTrainedModel", model_args: "ModelArgum
if param.ndim == 1 and any(ln_name in name for ln_name in LAYERNORM_NAMES):
param.data = param.data.to(torch.float32)
if (
os.environ.get("ACCELERATE_USE_FSDP", "false").lower() == "true"
and int(os.environ.get("FSDP_VERSION", "1")) == 2
):
model_args.use_reentrant_gc = False
logger.warning_rank0(
"You are using fsdp2, `use_reentrant_gc` has been set to False. "
)
if not model_args.disable_gradient_checkpointing:
if not getattr(model, "supports_gradient_checkpointing", False):
logger.warning_rank0("Current model does not support gradient checkpointing.")