diff --git a/python/llm/src/ipex_llm/transformers/models/minicpmv.py b/python/llm/src/ipex_llm/transformers/models/minicpmv.py index 7703f9f3158..accfd2dc0a8 100644 --- a/python/llm/src/ipex_llm/transformers/models/minicpmv.py +++ b/python/llm/src/ipex_llm/transformers/models/minicpmv.py @@ -62,7 +62,7 @@ def siglip_attention_forward( import xe_addons attn_weights = None attn_output = xe_addons.siglip_sdp_non_causal(query_states, key_states, - value_states, attention_softmax) + value_states, attention_mask) else: query_states, key_states, value_states = padding_qkv_hd( query_states, key_states, value_states,