mirror of
https://github.com/hiyouga/LLaMA-Factory.git
synced 2025-11-05 10:22:15 +08:00
fix attn patch for kimivl (#7867)
This commit is contained in:
parent
4ec560df1c
commit
3e2460bb38
@ -69,6 +69,9 @@ def configure_attn_implementation(
|
||||
|
||||
if getattr(config, "model_type", None) == "internlm2": # special case for custom models
|
||||
setattr(config, "attn_implementation", requested_attn_implementation)
|
||||
elif getattr(config, "model_type", None) == "kimi_vl":
|
||||
setattr(config.vision_config, "_attn_implementation", requested_attn_implementation)
|
||||
setattr(config.text_config, "_attn_implementation", requested_attn_implementation)
|
||||
else:
|
||||
setattr(config, "_attn_implementation", requested_attn_implementation)
|
||||
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user