mirror of
https://github.com/hiyouga/LLaMA-Factory.git
synced 2025-12-30 02:30:35 +08:00
[fix] fix the issue when using fsdp2 with gradient checkpointing. (#9541)
Co-authored-by: jin-yongxu <jinyongxu@h-partners.com>
This commit is contained in:
@@ -18,6 +18,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import inspect
|
||||
from functools import WRAPPER_ASSIGNMENTS, partial, wraps
|
||||
from types import MethodType
|
||||
@@ -152,6 +153,15 @@ def prepare_model_for_training(model: "PreTrainedModel", model_args: "ModelArgum
|
||||
if param.ndim == 1 and any(ln_name in name for ln_name in LAYERNORM_NAMES):
|
||||
param.data = param.data.to(torch.float32)
|
||||
|
||||
if (
|
||||
os.environ.get("ACCELERATE_USE_FSDP", "false").lower() == "true"
|
||||
and int(os.environ.get("FSDP_VERSION", "1")) == 2
|
||||
):
|
||||
model_args.use_reentrant_gc = False
|
||||
logger.warning_rank0(
|
||||
"You are using fsdp2, `use_reentrant_gc` has been set to False. "
|
||||
)
|
||||
|
||||
if not model_args.disable_gradient_checkpointing:
|
||||
if not getattr(model, "supports_gradient_checkpointing", False):
|
||||
logger.warning_rank0("Current model does not support gradient checkpointing.")
|
||||
|
||||
Reference in New Issue
Block a user