mirror of
https://github.com/hiyouga/LLaMA-Factory.git
synced 2025-10-14 23:58:11 +08:00
Merge pull request #3449 from hiyouga/mllm
add webui backend option Former-commit-id: 372fcedef40b79fe8bd3932c06c720f2a03db6e6
This commit is contained in:
commit
d64c87f928
@ -31,7 +31,10 @@ class WebChatModel(ChatModel):
|
|||||||
if demo_mode and os.environ.get("DEMO_MODEL") and os.environ.get("DEMO_TEMPLATE"): # load demo model
|
if demo_mode and os.environ.get("DEMO_MODEL") and os.environ.get("DEMO_TEMPLATE"): # load demo model
|
||||||
model_name_or_path = os.environ.get("DEMO_MODEL")
|
model_name_or_path = os.environ.get("DEMO_MODEL")
|
||||||
template = os.environ.get("DEMO_TEMPLATE")
|
template = os.environ.get("DEMO_TEMPLATE")
|
||||||
super().__init__(dict(model_name_or_path=model_name_or_path, template=template))
|
infer_backend = os.environ.get("DEMO_BACKEND", "huggingface")
|
||||||
|
super().__init__(
|
||||||
|
dict(model_name_or_path=model_name_or_path, template=template, infer_backend=infer_backend)
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def loaded(self) -> bool:
|
def loaded(self) -> bool:
|
||||||
|
Loading…
x
Reference in New Issue
Block a user