From 63e2bf2dba551607dcf8b7b133fa1bd73d012d5d Mon Sep 17 00:00:00 2001 From: "He, Xin3" Date: Thu, 18 Dec 2025 21:54:25 -0500 Subject: [PATCH] rewrite fill_default_value logic for robust Signed-off-by: He, Xin3 --- auto_round/compressors/utils.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/auto_round/compressors/utils.py b/auto_round/compressors/utils.py index dd85f4ebd..29d01bf9e 100644 --- a/auto_round/compressors/utils.py +++ b/auto_round/compressors/utils.py @@ -334,13 +334,15 @@ def normalize_item(item: Union[str, dict, "QuantizationScheme"], layer_name: str # In AutoScheme with mixed gguf:q4_k_m, the super_group_size of gguf:q8_0 layer is None, # which should not be filled by default q4km again - if fill_default_value: - tmp_scheme_keys = scheme_keys - else: - tmp_scheme_keys = extra_scheme_keys for cfg in layer_config.values(): - for key in tmp_scheme_keys: - cfg.setdefault(key, copy.deepcopy(default_dict.get(key))) + for key in scheme_keys: + if fill_default_value: + cfg.setdefault(key, copy.deepcopy(default_dict.get(key))) + else: + if key in extra_scheme_keys: + cfg.setdefault(key, copy.deepcopy(default_dict.get(key))) + else: + cfg.setdefault(key, None) # 5. collect supported modules embedding_types = (torch.nn.Embedding,)