mirror of
https://github.com/hiyouga/LLaMA-Factory.git
synced 2025-08-04 12:42:51 +08:00
remove conflicts
Former-commit-id: f85f403d39e0af6dd57f9c94290e4fa63b344f0d
This commit is contained in:
parent
ff8d729b59
commit
906e7f0983
@ -5,6 +5,7 @@ import torch
|
|||||||
from datasets import load_dataset
|
from datasets import load_dataset
|
||||||
from peft import PeftModel
|
from peft import PeftModel
|
||||||
from transformers import AutoTokenizer, AutoModelForVision2Seq, AutoProcessor
|
from transformers import AutoTokenizer, AutoModelForVision2Seq, AutoProcessor
|
||||||
|
import shutil
|
||||||
|
|
||||||
"""usage
|
"""usage
|
||||||
python3 scripts/test_mllm.py \
|
python3 scripts/test_mllm.py \
|
||||||
@ -47,15 +48,14 @@ def apply_lora(base_model_path, model_path, lora_path):
|
|||||||
model.save_pretrained(model_path)
|
model.save_pretrained(model_path)
|
||||||
tokenizer.save_pretrained(model_path)
|
tokenizer.save_pretrained(model_path)
|
||||||
processor.image_processor.save_pretrained(model_path)
|
processor.image_processor.save_pretrained(model_path)
|
||||||
if 'instructblip' in model_path:
|
|
||||||
processor.qformer_tokenizer.save_pretrained(model_path)
|
|
||||||
|
|
||||||
def main(
|
def main(
|
||||||
model_path: str,
|
model_path: str,
|
||||||
dataset_name: str,
|
dataset_name: str,
|
||||||
base_model_path: str = "",
|
base_model_path: str = "",
|
||||||
lora_model_path: str = "",
|
lora_model_path: str = "",
|
||||||
do_merge: bool = False,
|
do_merge: bool = False,
|
||||||
):
|
):
|
||||||
if not os.path.exists(model_path) or do_merge:
|
if not os.path.exists(model_path) or do_merge:
|
||||||
apply_lora(base_model_path, model_path, lora_model_path)
|
apply_lora(base_model_path, model_path, lora_model_path)
|
||||||
|
Loading…
x
Reference in New Issue
Block a user