mirror of
https://github.com/hiyouga/LLaMA-Factory.git
synced 2026-01-05 05:30:36 +08:00
[misc] Compatible with an empty architectures field in config.json (#9709)
This commit is contained in:
@@ -138,13 +138,15 @@ def patch_config(
|
||||
if getattr(config, "model_type", None) == "kimi_vl" and is_trainable:
|
||||
setattr(config.text_config, "topk_method", "greedy")
|
||||
|
||||
if "InternVLChatModel" in getattr(config, "architectures", []):
|
||||
architectures = getattr(config, "architectures", None)
|
||||
|
||||
if isinstance(architectures, (list, tuple)) and "InternVLChatModel" in architectures:
|
||||
raise ValueError(
|
||||
"Please download the internvl models in a Hugging Face–compatible format "
|
||||
"(for example, https://huggingface.co/OpenGVLab/InternVL3-8B-hf)."
|
||||
)
|
||||
|
||||
if "LlavaLlamaForCausalLM" in getattr(config, "architectures", []):
|
||||
if isinstance(architectures, (list, tuple)) and "LlavaLlamaForCausalLM" in architectures:
|
||||
raise ValueError("Please download llava models with hf-compatible format: https://huggingface.co/llava-hf")
|
||||
|
||||
if getattr(config, "model_type", None) == "internlm3" and not is_transformers_version_greater_than("4.47.1"):
|
||||
|
||||
Reference in New Issue
Block a user