mirror of
https://github.com/hiyouga/LLaMA-Factory.git
synced 2025-08-23 22:32:54 +08:00
”add stop parameter in chat.py“
Former-commit-id: 80645751bc4db20dbadb53950fe35af8b67eec41
This commit is contained in:
parent
0c6c50f9b5
commit
189346188b
@ -103,6 +103,7 @@ async def create_chat_completion_response(
|
|||||||
top_p=request.top_p,
|
top_p=request.top_p,
|
||||||
max_new_tokens=request.max_tokens,
|
max_new_tokens=request.max_tokens,
|
||||||
num_return_sequences=request.n,
|
num_return_sequences=request.n,
|
||||||
|
stop=request.stop
|
||||||
)
|
)
|
||||||
|
|
||||||
prompt_length, response_length = 0, 0
|
prompt_length, response_length = 0, 0
|
||||||
@ -155,6 +156,7 @@ async def create_stream_chat_completion_response(
|
|||||||
temperature=request.temperature,
|
temperature=request.temperature,
|
||||||
top_p=request.top_p,
|
top_p=request.top_p,
|
||||||
max_new_tokens=request.max_tokens,
|
max_new_tokens=request.max_tokens,
|
||||||
|
stop=request.stop
|
||||||
):
|
):
|
||||||
if len(new_token) != 0:
|
if len(new_token) != 0:
|
||||||
yield _create_stream_chat_completion_chunk(
|
yield _create_stream_chat_completion_chunk(
|
||||||
|
Loading…
x
Reference in New Issue
Block a user