mirror of
https://github.com/hiyouga/LLaMA-Factory.git
synced 2025-08-04 04:32:50 +08:00
Merge pull request #3254 from marko1616/feature/Add-support-for-CohereForAI/c4ai-command-r-plus
Add template&support for c4ai-command-r/plus (tested) Former-commit-id: 7a8ae3f4ac278f2773ec23db7f760099e18b8d0c
This commit is contained in:
commit
57ddf739c2
@ -526,6 +526,21 @@ _register_template(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
_register_template(
|
||||||
|
name="cohere",
|
||||||
|
format_user=StringFormatter(
|
||||||
|
slots=[
|
||||||
|
(
|
||||||
|
"<|START_OF_TURN_TOKEN|><|USER_TOKEN|>{{content}}<|END_OF_TURN_TOKEN|>"
|
||||||
|
"<|START_OF_TURN_TOKEN|><|CHATBOT_TOKEN|>"
|
||||||
|
)
|
||||||
|
]
|
||||||
|
),
|
||||||
|
format_system=EmptyFormatter(slots=[{"bos_token"}]),
|
||||||
|
force_system=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
_register_template(
|
_register_template(
|
||||||
name="cpm",
|
name="cpm",
|
||||||
format_user=StringFormatter(slots=["<用户>{{content}}<AI>"]),
|
format_user=StringFormatter(slots=["<用户>{{content}}<AI>"]),
|
||||||
|
@ -242,6 +242,28 @@ register_model_group(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
register_model_group(
|
||||||
|
models={
|
||||||
|
"CommandR-35B-Chat": {
|
||||||
|
DownloadSource.DEFAULT: "CohereForAI/c4ai-command-r-v01",
|
||||||
|
DownloadSource.MODELSCOPE: "AI-ModelScope/c4ai-command-r-v01",
|
||||||
|
},
|
||||||
|
"CommandR-Plus-104B-Chat": {
|
||||||
|
DownloadSource.DEFAULT: "CohereForAI/c4ai-command-r-plus",
|
||||||
|
DownloadSource.MODELSCOPE: "AI-ModelScope/c4ai-command-r-plus",
|
||||||
|
},
|
||||||
|
"CommandR-35B-4bit-Chat": {
|
||||||
|
DownloadSource.DEFAULT: "CohereForAI/c4ai-command-r-v01-4bit",
|
||||||
|
DownloadSource.MODELSCOPE: "mirror013/c4ai-command-r-v01-4bit",
|
||||||
|
},
|
||||||
|
"CommandR-Plus-104B-4bit-Chat": {
|
||||||
|
DownloadSource.DEFAULT: "CohereForAI/c4ai-command-r-plus-4bit",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
template="cohere",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
register_model_group(
|
register_model_group(
|
||||||
models={
|
models={
|
||||||
"DeepSeek-LLM-7B-Base": {
|
"DeepSeek-LLM-7B-Base": {
|
||||||
|
Loading…
x
Reference in New Issue
Block a user