From 906e7f09831323a6bdba0089d7d42625ad24d3e4 Mon Sep 17 00:00:00 2001 From: BUAADreamer <1428195643@qq.com> Date: Thu, 25 Apr 2024 00:56:06 +0800 Subject: [PATCH] remove conflicts Former-commit-id: f85f403d39e0af6dd57f9c94290e4fa63b344f0d --- scripts/test_mllm.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/scripts/test_mllm.py b/scripts/test_mllm.py index 882bf032..961f02bf 100644 --- a/scripts/test_mllm.py +++ b/scripts/test_mllm.py @@ -5,6 +5,7 @@ import torch from datasets import load_dataset from peft import PeftModel from transformers import AutoTokenizer, AutoModelForVision2Seq, AutoProcessor +import shutil """usage python3 scripts/test_mllm.py \ @@ -47,15 +48,14 @@ def apply_lora(base_model_path, model_path, lora_path): model.save_pretrained(model_path) tokenizer.save_pretrained(model_path) processor.image_processor.save_pretrained(model_path) - if 'instructblip' in model_path: - processor.qformer_tokenizer.save_pretrained(model_path) + def main( - model_path: str, - dataset_name: str, - base_model_path: str = "", - lora_model_path: str = "", - do_merge: bool = False, + model_path: str, + dataset_name: str, + base_model_path: str = "", + lora_model_path: str = "", + do_merge: bool = False, ): if not os.path.exists(model_path) or do_merge: apply_lora(base_model_path, model_path, lora_model_path)