fix mm inference

Former-commit-id: 60fc6b926e
This commit is contained in:
hiyouga
2024-09-02 01:47:40 +08:00
parent f13e974930
commit b2a5f49a24
6 changed files with 19 additions and 23 deletions

View File

@@ -14,9 +14,7 @@
import json
import os
from typing import TYPE_CHECKING, Dict, Generator, List, Optional, Sequence, Tuple
from numpy.typing import NDArray
from typing import TYPE_CHECKING, Any, Dict, Generator, List, Optional, Sequence, Tuple
from ..chat import ChatModel
from ..data import Role
@@ -134,7 +132,7 @@ class WebChatModel(ChatModel):
messages: Sequence[Dict[str, str]],
system: str,
tools: str,
image: Optional[NDArray],
image: Optional[Any],
max_new_tokens: int,
top_p: float,
temperature: float,

View File

@@ -44,7 +44,7 @@ def create_chat_box(
tools = gr.Textbox(show_label=False, lines=3)
with gr.Column() as image_box:
image = gr.Image(sources=["upload"], type="numpy")
image = gr.Image(sources=["upload"], type="pil")
query = gr.Textbox(show_label=False, lines=8)
submit_btn = gr.Button(variant="primary")