mirror of
https://github.com/hiyouga/LLaMA-Factory.git
synced 2025-10-15 08:08:09 +08:00
Update chat_model.py
Former-commit-id: 7736aafdc81d175e9fb484dbb7cae9263120a0fc
This commit is contained in:
parent
77a089c35c
commit
17d398f419
@ -29,7 +29,6 @@ class ChatModel:
|
|||||||
else:
|
else:
|
||||||
raise NotImplementedError("Unknown backend: {}".format(model_args.infer_backend))
|
raise NotImplementedError("Unknown backend: {}".format(model_args.infer_backend))
|
||||||
|
|
||||||
self.system_message = generating_args.system_message or None
|
|
||||||
self._loop = asyncio.new_event_loop()
|
self._loop = asyncio.new_event_loop()
|
||||||
self._thread = Thread(target=_start_background_loop, args=(self._loop,), daemon=True)
|
self._thread = Thread(target=_start_background_loop, args=(self._loop,), daemon=True)
|
||||||
self._thread.start()
|
self._thread.start()
|
||||||
@ -64,7 +63,6 @@ class ChatModel:
|
|||||||
image: Optional["NDArray"] = None,
|
image: Optional["NDArray"] = None,
|
||||||
**input_kwargs,
|
**input_kwargs,
|
||||||
) -> Generator[str, None, None]:
|
) -> Generator[str, None, None]:
|
||||||
system = system or self.system_message
|
|
||||||
generator = self.astream_chat(messages, system, tools, image, **input_kwargs)
|
generator = self.astream_chat(messages, system, tools, image, **input_kwargs)
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
|
Loading…
x
Reference in New Issue
Block a user