Former-commit-id: 4a871e80e205466262534cdc710b0495954b153e
This commit is contained in:
hiyouga 2024-02-29 17:28:50 +08:00
parent 8487b66532
commit 1853b5c172

View File

@ -169,11 +169,14 @@ def _configure_quantization(
quantization_config["use_exllama"] = False # disable exllama quantization_config["use_exllama"] = False # disable exllama
if quantization_config.get("quant_method", None) == "aqlm": if quantization_config.get("quant_method", None) == "aqlm":
require_version(
"transformers>=4.39.0.dev0", "To fix: pip install git+https://github.com/huggingface/transformers.git"
)
quantization_config["bits"] = 2 quantization_config["bits"] = 2
logger.info( logger.info(
"Loading {}-bit {}-quantized model.".format( "Loading {}-bit {}-quantized model.".format(
quantization_config.get("bits", "?"), quantization_config.get("quant_method", None) quantization_config.get("bits", "?"), str(quantization_config.get("quant_method", "")).upper()
) )
) )