mirror of
				https://github.com/hiyouga/LLaMA-Factory.git
				synced 2025-11-04 18:02:19 +08:00 
			
		
		
		
	loose gemma2 attention
Former-commit-id: a0b645017a2de3d58b6cbc71bd91ec96fc7a818b
This commit is contained in:
		
							parent
							
								
									6a75d57060
								
							
						
					
					
						commit
						3c4f8eaa55
					
				@ -32,8 +32,14 @@ def configure_attn_implementation(
 | 
			
		||||
    config: "PretrainedConfig", model_args: "ModelArguments", is_trainable: bool
 | 
			
		||||
) -> None:
 | 
			
		||||
    if getattr(config, "model_type", None) == "gemma2" and is_trainable:  # gemma2 adopts soft-cap attention
 | 
			
		||||
        logger.warning("Gemma-2 models should use eager attention in training, change `flash_attn` to disabled.")
 | 
			
		||||
        model_args.flash_attn = "disabled"
 | 
			
		||||
        if model_args.flash_attn == "auto":
 | 
			
		||||
            logger.warning("Gemma-2 models should use eager attention in training, change `flash_attn` to disabled.")
 | 
			
		||||
            model_args.flash_attn = "disabled"
 | 
			
		||||
        else:
 | 
			
		||||
            logger.warning(
 | 
			
		||||
                "Gemma-2 models should use eager attention in training, but you set `flash_attn: {}`. "
 | 
			
		||||
                "Will proceed at your own risk.".format(model_args.flash_attn)
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
    if model_args.flash_attn == "auto":
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
@ -1,7 +1,4 @@
 | 
			
		||||
# Copyright 2024 HuggingFace Inc. and the LlamaFactory team.
 | 
			
		||||
#
 | 
			
		||||
# This code is inspired by the HuggingFace's transformers library.
 | 
			
		||||
# https://github.com/huggingface/transformers/blob/v4.40.0/src/transformers/trainer.py
 | 
			
		||||
# Copyright 2024 the LlamaFactory team.
 | 
			
		||||
#
 | 
			
		||||
# Licensed under the Apache License, Version 2.0 (the "License");
 | 
			
		||||
# you may not use this file except in compliance with the License.
 | 
			
		||||
 | 
			
		||||
		Loading…
	
	
			
			x
			
			
		
	
		Reference in New Issue
	
	Block a user