mirror of
https://github.com/hiyouga/LLaMA-Factory.git
synced 2025-08-22 22:02:51 +08:00
fix attn patch for kimivl (#7867)
This commit is contained in:
parent
b4407e4b0b
commit
035e98035c
@ -69,6 +69,9 @@ def configure_attn_implementation(
|
||||
|
||||
if getattr(config, "model_type", None) == "internlm2": # special case for custom models
|
||||
setattr(config, "attn_implementation", requested_attn_implementation)
|
||||
elif getattr(config, "model_type", None) == "kimi_vl":
|
||||
setattr(config.vision_config, "_attn_implementation", requested_attn_implementation)
|
||||
setattr(config.text_config, "_attn_implementation", requested_attn_implementation)
|
||||
else:
|
||||
setattr(config, "_attn_implementation", requested_attn_implementation)
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user