mirror of
https://github.com/hiyouga/LLaMA-Factory.git
synced 2025-12-16 11:50:35 +08:00
add patch processor func
Former-commit-id: 0cd6327da6a044b4a62f203a662e5bb6068d9c29
This commit is contained in:
@@ -34,11 +34,17 @@ from .model_utils.packing import configure_packing
|
||||
from .model_utils.quantization import configure_quantization
|
||||
from .model_utils.rope import configure_rope
|
||||
from .model_utils.valuehead import prepare_valuehead_model
|
||||
from .model_utils.visual import autocast_projector_dtype, configure_visual_model
|
||||
from .model_utils.visual import (
|
||||
autocast_projector_dtype,
|
||||
configure_visual_model,
|
||||
get_image_seqlen,
|
||||
get_patch_size,
|
||||
get_vision_feature_select_strategy,
|
||||
)
|
||||
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from transformers import PretrainedConfig, PreTrainedTokenizer
|
||||
from transformers import PretrainedConfig, PreTrainedTokenizer, ProcessorMixin
|
||||
from trl import AutoModelForCausalLMWithValueHead
|
||||
|
||||
from ..hparams import ModelArguments
|
||||
@@ -52,6 +58,22 @@ def patch_tokenizer(tokenizer: "PreTrainedTokenizer") -> None:
|
||||
tokenizer._pad = MethodType(PreTrainedTokenizerBase._pad, tokenizer)
|
||||
|
||||
|
||||
def patch_processor(
|
||||
processor: "ProcessorMixin",
|
||||
config: "PretrainedConfig",
|
||||
tokenizer: "PreTrainedTokenizer",
|
||||
model_args: "ModelArguments",
|
||||
) -> None:
|
||||
setattr(processor, "tokenizer", tokenizer)
|
||||
setattr(processor, "image_seqlen", get_image_seqlen(config))
|
||||
setattr(processor, "image_resolution", model_args.image_resolution)
|
||||
setattr(processor, "patch_size", get_patch_size(config))
|
||||
setattr(processor, "video_resolution", model_args.video_resolution)
|
||||
setattr(processor, "video_fps", model_args.video_fps)
|
||||
setattr(processor, "video_maxlen", model_args.video_maxlen)
|
||||
setattr(processor, "vision_feature_select_strategy", get_vision_feature_select_strategy(config))
|
||||
|
||||
|
||||
def patch_config(
|
||||
config: "PretrainedConfig",
|
||||
tokenizer: "PreTrainedTokenizer",
|
||||
|
||||
Reference in New Issue
Block a user