fix qwen tokenizer #361

Former-commit-id: 78a2fa95c8ab669254a6c8fce8138c4395fb0a09
This commit is contained in:
hiyouga 2023-08-05 17:06:05 +08:00
parent fdbb2c5378
commit a70d56864e

View File

@ -98,12 +98,16 @@ class Template:
r"""
Converts context to token ids.
"""
if hasattr(tokenizer, "tokenizer"): # for tiktoken tokenizer (Qwen)
kwargs = dict(allowed_special="all")
else:
kwargs = dict(add_special_tokens=False)
token_ids = []
for elem in context:
if isinstance(elem, str):
elem = elem.replace("{{query}}", query, 1)
elem = elem.replace("<mask>", "[MASK]")
token_ids = token_ids + tokenizer.encode(elem, add_special_tokens=False)
token_ids = token_ids + tokenizer.encode(elem, **kwargs)
elif isinstance(elem, dict):
token_ids = token_ids + [tokenizer.convert_tokens_to_ids(elem.get("token"))]
else: