mirror of
https://github.com/hiyouga/LLaMA-Factory.git
synced 2025-12-14 10:56:56 +08:00
@@ -101,7 +101,7 @@ def _init_adapter(
|
||||
logger.info("Fine-tuning method: LoRA")
|
||||
lastest_checkpoint = None
|
||||
|
||||
assert os.path.exists(model_args.checkpoint_dir[0], CONFIG_NAME), \
|
||||
assert os.path.exists(os.path.join(model_args.checkpoint_dir[0], CONFIG_NAME)), \
|
||||
"The given checkpoint is not a LoRA checkpoint, please specify `--finetuning_type full/freeze` instead."
|
||||
|
||||
if model_args.checkpoint_dir is not None:
|
||||
|
||||
Reference in New Issue
Block a user