diff --git a/nemo/collections/nlp/parts/peft_config.py b/nemo/collections/nlp/parts/peft_config.py index 50c97e349885..726ca33611d7 100644 --- a/nemo/collections/nlp/parts/peft_config.py +++ b/nemo/collections/nlp/parts/peft_config.py @@ -170,7 +170,7 @@ def __init__(self, cfg): elif module == PEFT_MODULE_MAP["dense_module"]: adapter_cfg = self._create_lora_config( - cfg, lora_cfg, cfg.hidden_size, cfg.hidden_size, LoraDenseAttentionAdapterConfig + cfg, lora_cfg, projection_size, cfg.hidden_size, LoraDenseAttentionAdapterConfig ) name_key_to_cfg[AdapterName.LORA_DENSE_ATTENTION_ADAPTER] = adapter_cfg name_key_to_mcore_mixins[AdapterName.LORA_DENSE_ATTENTION_ADAPTER] = [