From 6db3637da6781cdb64a7d7c462b8f148aa8be28a Mon Sep 17 00:00:00 2001 From: huanghaian Date: Tue, 9 Jan 2024 19:10:35 +0800 Subject: [PATCH] fix bug --- mmdet/models/layers/transformer/grounding_dino_layers.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/mmdet/models/layers/transformer/grounding_dino_layers.py b/mmdet/models/layers/transformer/grounding_dino_layers.py index e1344793c51..50e858c63d3 100644 --- a/mmdet/models/layers/transformer/grounding_dino_layers.py +++ b/mmdet/models/layers/transformer/grounding_dino_layers.py @@ -238,17 +238,17 @@ def forward(self, layer_id].self_attn_cfg.num_heads if text_self_attention_masks is None: # rec - key_padding_mask = text_attention_mask + l_key_padding_mask = text_attention_mask else: # phrase grounding - key_padding_mask = None + l_key_padding_mask = None memory_text = self.text_layers[layer_id]( query=memory_text, query_pos=(pos_text if pos_text is not None else None), attn_mask=~text_self_attention_masks.repeat( text_num_heads, 1, 1) if text_self_attention_masks is not None else None, # note we use ~ for mask here - key_padding_mask=key_padding_mask, + key_padding_mask=l_key_padding_mask, ) output = layer( query=output,