mirror of
https://github.com/hiyouga/LLaMA-Factory.git
synced 2025-12-29 02:00:36 +08:00
[api] support repetition_penalty and align presence_penalty with OpenAI Client (#7958)
This commit is contained in:
@@ -106,6 +106,8 @@ class ChatCompletionRequest(BaseModel):
|
||||
max_tokens: Optional[int] = None
|
||||
stop: Optional[Union[str, list[str]]] = None
|
||||
stream: bool = False
|
||||
presence_penalty: Optional[float] = None
|
||||
repetition_penalty: Optional[float] = None
|
||||
|
||||
|
||||
class ChatCompletionResponseChoice(BaseModel):
|
||||
|
||||
Reference in New Issue
Block a user