mirror of
https://github.com/hiyouga/LLaMA-Factory.git
synced 2025-08-04 12:42:51 +08:00
tiny fix
Former-commit-id: 4a871e80e205466262534cdc710b0495954b153e
This commit is contained in:
parent
8487b66532
commit
1853b5c172
@ -169,11 +169,14 @@ def _configure_quantization(
|
|||||||
quantization_config["use_exllama"] = False # disable exllama
|
quantization_config["use_exllama"] = False # disable exllama
|
||||||
|
|
||||||
if quantization_config.get("quant_method", None) == "aqlm":
|
if quantization_config.get("quant_method", None) == "aqlm":
|
||||||
|
require_version(
|
||||||
|
"transformers>=4.39.0.dev0", "To fix: pip install git+https://github.com/huggingface/transformers.git"
|
||||||
|
)
|
||||||
quantization_config["bits"] = 2
|
quantization_config["bits"] = 2
|
||||||
|
|
||||||
logger.info(
|
logger.info(
|
||||||
"Loading {}-bit {}-quantized model.".format(
|
"Loading {}-bit {}-quantized model.".format(
|
||||||
quantization_config.get("bits", "?"), quantization_config.get("quant_method", None)
|
quantization_config.get("bits", "?"), str(quantization_config.get("quant_method", "")).upper()
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user