diff --git a/src/llmtuner/extras/save_and_load.py b/src/llmtuner/extras/save_and_load.py deleted file mode 100644 index 6d819ce6..00000000 --- a/src/llmtuner/extras/save_and_load.py +++ /dev/null @@ -1,21 +0,0 @@ -import os -import torch -from transformers.trainer import WEIGHTS_NAME - -from llmtuner.extras.logging import get_logger - - -logger = get_logger(__name__) - - -def load_valuehead_params(model: torch.nn.Module, checkpoint_dir: os.PathLike) -> bool: - vhead_file = os.path.join(checkpoint_dir, WEIGHTS_NAME) - if not os.path.exists(vhead_file): - logger.warning("Provided path ({}) does not contain valuehead weights.".format(checkpoint_dir)) - return False - vhead_params = torch.load(vhead_file, map_location="cpu") - model.register_buffer("reward_head_weight", vhead_params["v_head.summary.weight"], persistent=False) - model.register_buffer("reward_head_bias", vhead_params["v_head.summary.bias"], persistent=False) - model.register_buffer("default_head_weight", torch.zeros_like(vhead_params["v_head.summary.weight"]), persistent=False) - model.register_buffer("default_head_bias", torch.zeros_like(vhead_params["v_head.summary.bias"]), persistent=False) - return True diff --git a/src/llmtuner/hparams/general_args.py b/src/llmtuner/hparams/general_args.py deleted file mode 100644 index c0c1a0de..00000000 --- a/src/llmtuner/hparams/general_args.py +++ /dev/null @@ -1,13 +0,0 @@ -from typing import Literal, Optional -from dataclasses import dataclass, field - - -@dataclass -class GeneralArguments: - r""" - Arguments pertaining to which stage we are going to perform. - """ - stage: Optional[Literal["pt", "sft", "rm", "ppo", "dpo"]] = field( - default="sft", - metadata={"help": "Which stage will be performed in training."} - )