mirror of
https://github.com/hiyouga/LLaMA-Factory.git
synced 2025-08-22 13:42:51 +08:00
modify style
Former-commit-id: 235b4113709fe788b4f1a1a3089ce8356940877b
This commit is contained in:
parent
549f35b1fd
commit
40bfe767f7
@ -1,4 +1,4 @@
|
||||
from typing import TYPE_CHECKING, Union
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import torch
|
||||
from peft import LoraConfig, LoraModel, PeftModel, TaskType, get_peft_model
|
||||
@ -21,11 +21,11 @@ logger = get_logger(__name__)
|
||||
|
||||
def init_adapter(
|
||||
config: "PretrainedConfig",
|
||||
model: Union["PreTrainedModel"],
|
||||
model: "PreTrainedModel",
|
||||
model_args: "ModelArguments",
|
||||
finetuning_args: "FinetuningArguments",
|
||||
is_trainable: bool,
|
||||
) -> Union["PreTrainedModel"]:
|
||||
) -> "PreTrainedModel":
|
||||
r"""
|
||||
Initializes the adapters.
|
||||
|
||||
|
@ -112,7 +112,7 @@ def load_model(
|
||||
finetuning_args: "FinetuningArguments",
|
||||
is_trainable: bool = False,
|
||||
add_valuehead: bool = False,
|
||||
) -> Union["PreTrainedModel"]:
|
||||
) -> "PreTrainedModel":
|
||||
r"""
|
||||
Loads pretrained model.
|
||||
"""
|
||||
|
Loading…
x
Reference in New Issue
Block a user