mirror of
https://github.com/hiyouga/LLaMA-Factory.git
synced 2025-08-04 20:52:59 +08:00
84 lines
2.4 KiB
Python
84 lines
2.4 KiB
Python
# Copyright 2024 the LlamaFactory team.
|
|
#
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
# you may not use this file except in compliance with the License.
|
|
# You may obtain a copy of the License at
|
|
#
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
#
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
# See the License for the specific language governing permissions and
|
|
# limitations under the License.
|
|
|
|
from abc import ABC, abstractmethod
|
|
from dataclasses import dataclass
|
|
from typing import TYPE_CHECKING, Any, AsyncGenerator, Dict, List, Literal, Optional, Sequence, Union
|
|
|
|
|
|
if TYPE_CHECKING:
|
|
from numpy.typing import NDArray
|
|
from transformers import PreTrainedModel, PreTrainedTokenizer
|
|
from vllm import AsyncLLMEngine
|
|
|
|
from ..data import Template
|
|
from ..hparams import DataArguments, FinetuningArguments, GeneratingArguments, ModelArguments
|
|
|
|
|
|
@dataclass
|
|
class Response:
|
|
response_text: str
|
|
response_length: int
|
|
prompt_length: int
|
|
finish_reason: Literal["stop", "length"]
|
|
|
|
|
|
class BaseEngine(ABC):
|
|
model: Union["PreTrainedModel", "AsyncLLMEngine"]
|
|
tokenizer: "PreTrainedTokenizer"
|
|
can_generate: bool
|
|
template: "Template"
|
|
generating_args: Dict[str, Any]
|
|
|
|
@abstractmethod
|
|
def __init__(
|
|
self,
|
|
model_args: "ModelArguments",
|
|
data_args: "DataArguments",
|
|
finetuning_args: "FinetuningArguments",
|
|
generating_args: "GeneratingArguments",
|
|
) -> None: ...
|
|
|
|
@abstractmethod
|
|
async def start(
|
|
self,
|
|
) -> None: ...
|
|
|
|
@abstractmethod
|
|
async def chat(
|
|
self,
|
|
messages: Sequence[Dict[str, str]],
|
|
system: Optional[str] = None,
|
|
tools: Optional[str] = None,
|
|
image: Optional["NDArray"] = None,
|
|
**input_kwargs,
|
|
) -> List["Response"]: ...
|
|
|
|
@abstractmethod
|
|
async def stream_chat(
|
|
self,
|
|
messages: Sequence[Dict[str, str]],
|
|
system: Optional[str] = None,
|
|
tools: Optional[str] = None,
|
|
image: Optional["NDArray"] = None,
|
|
**input_kwargs,
|
|
) -> AsyncGenerator[str, None]: ...
|
|
|
|
@abstractmethod
|
|
async def get_scores(
|
|
self,
|
|
batch_input: List[str],
|
|
**input_kwargs,
|
|
) -> List[float]: ...
|