mirror of
https://github.com/hiyouga/LLaMA-Factory.git
synced 2025-12-29 02:00:36 +08:00
fix mod stuff
Former-commit-id: cf3988226e6398c67bb2955578e436fc505aa5c5
This commit is contained in:
@@ -83,6 +83,8 @@ def count_parameters(model: torch.nn.Module) -> Tuple[int, int]:
|
||||
if param.__class__.__name__ == "Params4bit":
|
||||
if hasattr(param, "quant_storage") and hasattr(param.quant_storage, "itemsize"):
|
||||
num_bytes = param.quant_storage.itemsize
|
||||
elif hasattr(param, "element_size"): # for older pytorch version
|
||||
num_bytes = param.element_size()
|
||||
else:
|
||||
num_bytes = 1
|
||||
|
||||
|
||||
Reference in New Issue
Block a user