mirror of
				https://github.com/hiyouga/LLaMA-Factory.git
				synced 2025-11-04 18:02:19 +08:00 
			
		
		
		
	
							parent
							
								
									48f0819327
								
							
						
					
					
						commit
						27f2c3cae1
					
				@ -67,7 +67,7 @@ def quantize_loftq(
 | 
			
		||||
    loftq_dir = os.path.join(output_dir, "loftq_init")
 | 
			
		||||
 | 
			
		||||
    # Save LoftQ model
 | 
			
		||||
    setattr(peft_model.peft_config["default"], "base_model_name_or_path", output_dir)
 | 
			
		||||
    setattr(peft_model.peft_config["default"], "base_model_name_or_path", os.path.abspath(output_dir))
 | 
			
		||||
    setattr(peft_model.peft_config["default"], "init_lora_weights", True)  # don't apply loftq again
 | 
			
		||||
    peft_model.save_pretrained(loftq_dir, safe_serialization=save_safetensors)
 | 
			
		||||
    print("Adapter weights saved in {}".format(loftq_dir))
 | 
			
		||||
 | 
			
		||||
@ -62,6 +62,7 @@ def quantize_pissa(
 | 
			
		||||
    pissa_dir = os.path.join(output_dir, "pissa_init")
 | 
			
		||||
 | 
			
		||||
    # Save PiSSA model
 | 
			
		||||
    setattr(peft_model.peft_config["default"], "base_model_name_or_path", os.path.abspath(output_dir))
 | 
			
		||||
    setattr(peft_model.peft_config["default"], "init_lora_weights", True)  # don't apply pissa again
 | 
			
		||||
    peft_model.save_pretrained(pissa_dir, safe_serialization=save_safetensors)
 | 
			
		||||
    print("Adapter weights saved in {}".format(pissa_dir))
 | 
			
		||||
 | 
			
		||||
		Loading…
	
	
			
			x
			
			
		
	
		Reference in New Issue
	
	Block a user