mirror of
https://github.com/hiyouga/LLaMA-Factory.git
synced 2025-08-01 11:12:50 +08:00
[model] support MiniCPM4 (#8314)
This commit is contained in:
parent
8ffe7daa8d
commit
d39d3106cb
@ -803,6 +803,46 @@ register_template(
|
||||
)
|
||||
|
||||
|
||||
# copied from chatml template
|
||||
register_template(
|
||||
name="cpm4",
|
||||
format_user=StringFormatter(slots=["<|im_start|>user\n{{content}}<|im_end|>\n<|im_start|>assistant\n"]),
|
||||
format_assistant=StringFormatter(slots=["{{content}}<|im_end|>\n"]),
|
||||
format_system=StringFormatter(slots=["<|im_start|>system\n{{content}}<|im_end|>\n"]),
|
||||
format_function=FunctionFormatter(slots=["{{content}}<|im_end|>\n"], tool_format="default"),
|
||||
format_observation=StringFormatter(slots=["<|im_start|>tool\n{{content}}<|im_end|>\n<|im_start|>assistant\n"]),
|
||||
format_tools=ToolFormatter(tool_format="default"),
|
||||
format_prefix=EmptyFormatter(slots=[{"bos_token"}]),
|
||||
thought_words=("<|thought_start|>", "<|thought_end|>"),
|
||||
stop_words=["<|im_end|>", "<|tool_call_start|>", "<|tool_call_end|>"],
|
||||
default_system=(
|
||||
"# Functions\n"
|
||||
"Here is a list of functions that you can invoke:\n"
|
||||
"```python\n"
|
||||
"from enum import Enum\n"
|
||||
"from typing import List, Dict, Optional\n"
|
||||
"from pydantic import BaseModel, Field\n"
|
||||
"```\n\n"
|
||||
"# Function Call Rule and Output Format\n"
|
||||
"- If the user's question can be answered without calling any function, please answer the user's question directly. In this situation, you should return your thought and answer the user's question directly.\n"
|
||||
"- If the user cannot be answered without calling any function, and the user does not provide enough information to call functions, please ask the user for more information. In this situation, you should return your thought and ask the user for more information.\n"
|
||||
"- If the user's question cannot be answered without calling any function, and the user has provided enough information to call functions to solve it, you should call the functions. In this situation, the assistant should return your thought and call the functions.\n"
|
||||
"- Use default parameters unless the user has specified otherwise.\n"
|
||||
"- You should answer in the following format:\n\n"
|
||||
"<|thought_start|>\n"
|
||||
"{explain why the user's question can be answered without calling a function or why you should ask the user for more information or why you should call one or more functions and your plan to solve the user's question.}\n"
|
||||
"<|thought_end|>\n"
|
||||
"<|tool_call_start|>\n"
|
||||
"```python\n"
|
||||
"func1(params_name=params_value, params_name2=params_value2...)\n"
|
||||
"func2(params)\n"
|
||||
"```\n"
|
||||
"<|tool_call_end|>\n"
|
||||
"{answer the user's question directly or ask the user for more information}"
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
# copied from chatml template
|
||||
register_template(
|
||||
name="dbrx",
|
||||
|
@ -1502,6 +1502,25 @@ register_model_group(
|
||||
template="cpm3",
|
||||
)
|
||||
|
||||
register_model_group(
|
||||
models={
|
||||
"MiniCPM4-0_5B-Chat": {
|
||||
DownloadSource.DEFAULT: "openbmb/MiniCPM4-0.5B",
|
||||
DownloadSource.MODELSCOPE: "OpenBMB/MiniCPM4-0.5B",
|
||||
},
|
||||
},
|
||||
template="cpm4",
|
||||
)
|
||||
|
||||
register_model_group(
|
||||
models={
|
||||
"MiniCPM4-8B-Chat": {
|
||||
DownloadSource.DEFAULT: "openbmb/MiniCPM4-8B",
|
||||
DownloadSource.MODELSCOPE: "OpenBMB/MiniCPM4-8B",
|
||||
},
|
||||
},
|
||||
template="cpm4",
|
||||
)
|
||||
|
||||
register_model_group(
|
||||
models={
|
||||
|
Loading…
x
Reference in New Issue
Block a user