mirror of
https://github.com/hiyouga/LLaMA-Factory.git
synced 2025-10-15 08:08:09 +08:00
remove empty stream response
Former-commit-id: 070d0da928b1e974a094279a2782201016d2a3ab
This commit is contained in:
parent
9381fecca7
commit
e9fe8815be
@ -156,9 +156,10 @@ async def create_stream_chat_completion_response(
|
|||||||
top_p=request.top_p,
|
top_p=request.top_p,
|
||||||
max_new_tokens=request.max_tokens,
|
max_new_tokens=request.max_tokens,
|
||||||
):
|
):
|
||||||
yield _create_stream_chat_completion_chunk(
|
if len(new_token) != 0:
|
||||||
completion_id=completion_id, model=request.model, delta=ChatCompletionMessage(content=new_token)
|
yield _create_stream_chat_completion_chunk(
|
||||||
)
|
completion_id=completion_id, model=request.model, delta=ChatCompletionMessage(content=new_token)
|
||||||
|
)
|
||||||
|
|
||||||
yield _create_stream_chat_completion_chunk(
|
yield _create_stream_chat_completion_chunk(
|
||||||
completion_id=completion_id, model=request.model, delta=ChatCompletionMessage(), finish_reason=Finish.STOP
|
completion_id=completion_id, model=request.model, delta=ChatCompletionMessage(), finish_reason=Finish.STOP
|
||||||
|
Loading…
x
Reference in New Issue
Block a user