mirror of
https://github.com/hiyouga/LLaMA-Factory.git
synced 2025-08-04 12:42:51 +08:00
fix bug
Former-commit-id: d9e52957e272e8133f1b37cf20d193084425e09e
This commit is contained in:
parent
b2200409f5
commit
eb835b693d
@ -20,7 +20,7 @@ SUBJECTS = ["Average", "STEM", "Social Sciences", "Humanities", "Other"]
|
|||||||
|
|
||||||
SUPPORTED_MODELS = OrderedDict()
|
SUPPORTED_MODELS = OrderedDict()
|
||||||
|
|
||||||
MODELSCOPE_MODELS = OrderedDict()
|
ALL_OFFICIAL_MODELS = OrderedDict()
|
||||||
|
|
||||||
TRAINING_STAGES = {
|
TRAINING_STAGES = {
|
||||||
"Supervised Fine-Tuning": "sft",
|
"Supervised Fine-Tuning": "sft",
|
||||||
@ -43,12 +43,14 @@ def register_model_group(
|
|||||||
else:
|
else:
|
||||||
assert prefix == name.split("-")[0], "prefix should be identical."
|
assert prefix == name.split("-")[0], "prefix should be identical."
|
||||||
|
|
||||||
|
ALL_OFFICIAL_MODELS[name] = [path] if isinstance(path, str) else list(path.values())
|
||||||
if not int(os.environ.get('USE_MODELSCOPE_HUB', '0')):
|
if not int(os.environ.get('USE_MODELSCOPE_HUB', '0')):
|
||||||
# If path is a string, we treat it as a huggingface model-id by default.
|
# If path is a string, we treat it as a huggingface model-id by default.
|
||||||
SUPPORTED_MODELS[name] = path["hf"] if isinstance(path, dict) else path
|
SUPPORTED_MODELS[name] = path["hf"] if isinstance(path, dict) else path
|
||||||
elif isinstance(path, dict) and "ms" in path:
|
elif isinstance(path, dict) and "ms" in path:
|
||||||
# Use ModelScope modelhub
|
# Use ModelScope modelhub
|
||||||
SUPPORTED_MODELS[name] = path["ms"]
|
SUPPORTED_MODELS[name] = path["ms"]
|
||||||
|
print(f'Supported models add {name}/{SUPPORTED_MODELS[name]}')
|
||||||
if module is not None:
|
if module is not None:
|
||||||
DEFAULT_MODULE[prefix] = module
|
DEFAULT_MODULE[prefix] = module
|
||||||
if template is not None:
|
if template is not None:
|
||||||
|
@ -11,7 +11,7 @@ from transformers.utils import (
|
|||||||
ADAPTER_SAFE_WEIGHTS_NAME
|
ADAPTER_SAFE_WEIGHTS_NAME
|
||||||
)
|
)
|
||||||
|
|
||||||
from llmtuner.extras.constants import DEFAULT_MODULE, DEFAULT_TEMPLATE, SUPPORTED_MODELS, TRAINING_STAGES
|
from llmtuner.extras.constants import DEFAULT_MODULE, DEFAULT_TEMPLATE, SUPPORTED_MODELS, ALL_OFFICIAL_MODELS, TRAINING_STAGES
|
||||||
|
|
||||||
|
|
||||||
DEFAULT_CACHE_DIR = "cache"
|
DEFAULT_CACHE_DIR = "cache"
|
||||||
@ -58,7 +58,10 @@ def save_config(lang: str, model_name: Optional[str] = None, model_path: Optiona
|
|||||||
|
|
||||||
def get_model_path(model_name: str) -> str:
|
def get_model_path(model_name: str) -> str:
|
||||||
user_config = load_config()
|
user_config = load_config()
|
||||||
return user_config["path_dict"].get(model_name, None) or SUPPORTED_MODELS.get(model_name, "")
|
cached_path = user_config["path_dict"].get(model_name, None)
|
||||||
|
if cached_path in ALL_OFFICIAL_MODELS.get(model_name, []):
|
||||||
|
cached_path = None
|
||||||
|
return cached_path or SUPPORTED_MODELS.get(model_name, "")
|
||||||
|
|
||||||
|
|
||||||
def get_prefix(model_name: str) -> str:
|
def get_prefix(model_name: str) -> str:
|
||||||
|
Loading…
x
Reference in New Issue
Block a user