From b41697c9b6b5c45d74f8866e5876880053c3ef58 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=98=BF=E4=B8=B9=28adan=29?= <47373076+LDLINGLINGLING@users.noreply.github.com> Date: Tue, 10 Jun 2025 14:38:39 +0800 Subject: [PATCH] [model] support MiniCPM4 (#8314) --- src/llamafactory/data/template.py | 40 ++++++++++++++++++++++++++++ src/llamafactory/extras/constants.py | 19 +++++++++++++ 2 files changed, 59 insertions(+) diff --git a/src/llamafactory/data/template.py b/src/llamafactory/data/template.py index 3f3a3164..37a5558b 100644 --- a/src/llamafactory/data/template.py +++ b/src/llamafactory/data/template.py @@ -803,6 +803,46 @@ register_template( ) +# copied from chatml template +register_template( + name="cpm4", + format_user=StringFormatter(slots=["<|im_start|>user\n{{content}}<|im_end|>\n<|im_start|>assistant\n"]), + format_assistant=StringFormatter(slots=["{{content}}<|im_end|>\n"]), + format_system=StringFormatter(slots=["<|im_start|>system\n{{content}}<|im_end|>\n"]), + format_function=FunctionFormatter(slots=["{{content}}<|im_end|>\n"], tool_format="default"), + format_observation=StringFormatter(slots=["<|im_start|>tool\n{{content}}<|im_end|>\n<|im_start|>assistant\n"]), + format_tools=ToolFormatter(tool_format="default"), + format_prefix=EmptyFormatter(slots=[{"bos_token"}]), + thought_words=("<|thought_start|>", "<|thought_end|>"), + stop_words=["<|im_end|>", "<|tool_call_start|>", "<|tool_call_end|>"], + default_system=( + "# Functions\n" + "Here is a list of functions that you can invoke:\n" + "```python\n" + "from enum import Enum\n" + "from typing import List, Dict, Optional\n" + "from pydantic import BaseModel, Field\n" + "```\n\n" + "# Function Call Rule and Output Format\n" + "- If the user's question can be answered without calling any function, please answer the user's question directly. In this situation, you should return your thought and answer the user's question directly.\n" + "- If the user cannot be answered without calling any function, and the user does not provide enough information to call functions, please ask the user for more information. In this situation, you should return your thought and ask the user for more information.\n" + "- If the user's question cannot be answered without calling any function, and the user has provided enough information to call functions to solve it, you should call the functions. In this situation, the assistant should return your thought and call the functions.\n" + "- Use default parameters unless the user has specified otherwise.\n" + "- You should answer in the following format:\n\n" + "<|thought_start|>\n" + "{explain why the user's question can be answered without calling a function or why you should ask the user for more information or why you should call one or more functions and your plan to solve the user's question.}\n" + "<|thought_end|>\n" + "<|tool_call_start|>\n" + "```python\n" + "func1(params_name=params_value, params_name2=params_value2...)\n" + "func2(params)\n" + "```\n" + "<|tool_call_end|>\n" + "{answer the user's question directly or ask the user for more information}" + ), +) + + # copied from chatml template register_template( name="dbrx", diff --git a/src/llamafactory/extras/constants.py b/src/llamafactory/extras/constants.py index 99388461..90255b24 100644 --- a/src/llamafactory/extras/constants.py +++ b/src/llamafactory/extras/constants.py @@ -1502,6 +1502,25 @@ register_model_group( template="cpm3", ) +register_model_group( + models={ + "MiniCPM4-0_5B-Chat": { + DownloadSource.DEFAULT: "openbmb/MiniCPM4-0.5B", + DownloadSource.MODELSCOPE: "OpenBMB/MiniCPM4-0.5B", + }, + }, + template="cpm4", +) + +register_model_group( + models={ + "MiniCPM4-8B-Chat": { + DownloadSource.DEFAULT: "openbmb/MiniCPM4-8B", + DownloadSource.MODELSCOPE: "OpenBMB/MiniCPM4-8B", + }, + }, + template="cpm4", +) register_model_group( models={