mirror of
https://github.com/hiyouga/LLaMA-Factory.git
synced 2025-12-29 18:20:35 +08:00
[misc] lint (#9593)
Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
This commit is contained in:
@@ -18,8 +18,8 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import inspect
|
||||
import os
|
||||
from functools import WRAPPER_ASSIGNMENTS, partial, wraps
|
||||
from types import MethodType
|
||||
from typing import TYPE_CHECKING, Any, Callable, Optional, Union
|
||||
@@ -156,11 +156,9 @@ def prepare_model_for_training(model: "PreTrainedModel", model_args: "ModelArgum
|
||||
if (
|
||||
os.environ.get("ACCELERATE_USE_FSDP", "false").lower() == "true"
|
||||
and int(os.environ.get("FSDP_VERSION", "1")) == 2
|
||||
):
|
||||
):
|
||||
model_args.use_reentrant_gc = False
|
||||
logger.warning_rank0(
|
||||
"You are using fsdp2, `use_reentrant_gc` has been set to False. "
|
||||
)
|
||||
logger.warning_rank0("You are using fsdp2, `use_reentrant_gc` has been set to False.")
|
||||
|
||||
if not model_args.disable_gradient_checkpointing:
|
||||
if not getattr(model, "supports_gradient_checkpointing", False):
|
||||
|
||||
Reference in New Issue
Block a user