support ORPO

This commit is contained in:
hiyouga
2024-03-31 18:29:50 +08:00
parent 27776c3474
commit 17bf8a2c3a
22 changed files with 395 additions and 47 deletions

View File

@@ -39,9 +39,12 @@ TRAINING_STAGES = {
"Reward Modeling": "rm",
"PPO": "ppo",
"DPO": "dpo",
"ORPO": "orpo",
"Pre-Training": "pt",
}
STAGES_USE_PAIR_DATA = ["rm", "dpo", "orpo"]
V_HEAD_WEIGHTS_NAME = "value_head.bin"
V_HEAD_SAFE_WEIGHTS_NAME = "value_head.safetensors"