mirror of
				https://github.com/hiyouga/LLaMA-Factory.git
				synced 2025-11-04 18:02:19 +08:00 
			
		
		
		
	tiny fix
Former-commit-id: f9d50501aac1f60a3b445ca3fee9aa60995461ee
This commit is contained in:
		
							parent
							
								
									aa6c3766de
								
							
						
					
					
						commit
						2ac2cde03e
					
				@ -1,10 +1,6 @@
 | 
			
		||||
#!/bin/bash
 | 
			
		||||
# DO NOT use GPTQ/AWQ model in FSDP+QLoRA
 | 
			
		||||
 | 
			
		||||
pip install "transformers>=4.39.1"
 | 
			
		||||
pip install "accelerate>=0.28.0"
 | 
			
		||||
pip install "bitsandbytes>=0.43.0"
 | 
			
		||||
 | 
			
		||||
CUDA_VISIBLE_DEVICES=0,1 accelerate launch \
 | 
			
		||||
    --config_file examples/accelerate/fsdp_config.yaml \
 | 
			
		||||
    src/train.py examples/extras/fsdp_qlora/llama3_lora_sft.yaml
 | 
			
		||||
 | 
			
		||||
@ -104,10 +104,10 @@ def block_expansion(
 | 
			
		||||
        print("Model weights saved in {}".format(output_dir))
 | 
			
		||||
 | 
			
		||||
    print("Fine-tune this model with:")
 | 
			
		||||
    print("  --model_name_or_path {} \\".format(output_dir))
 | 
			
		||||
    print("  --finetuning_type freeze \\")
 | 
			
		||||
    print("  --freeze_trainable_layers {} \\".format(num_expand))
 | 
			
		||||
    print("  --use_llama_pro")
 | 
			
		||||
    print("model_name_or_path: {}".format(output_dir))
 | 
			
		||||
    print("finetuning_type: freeze")
 | 
			
		||||
    print("freeze_trainable_layers: {}".format(num_expand))
 | 
			
		||||
    print("use_llama_pro: true")
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
if __name__ == "__main__":
 | 
			
		||||
 | 
			
		||||
@ -179,7 +179,7 @@ def get_dataset(
 | 
			
		||||
            if training_args.should_save:
 | 
			
		||||
                dataset.save_to_disk(data_args.tokenized_path)
 | 
			
		||||
                logger.info("Tokenized dataset saved at {}.".format(data_args.tokenized_path))
 | 
			
		||||
                logger.info("Please restart the training with `--tokenized_path {}`.".format(data_args.tokenized_path))
 | 
			
		||||
                logger.info("Please restart the training with `tokenized_path: {}`.".format(data_args.tokenized_path))
 | 
			
		||||
 | 
			
		||||
            sys.exit(0)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
		Loading…
	
	
			
			x
			
			
		
	
		Reference in New Issue
	
	Block a user