Skip to content

Commit

Permalink
Remove padding_masks from gpt_bigcode. (huggingface#27348)
Browse files Browse the repository at this point in the history
Update modeling_gpt_bigcode.py
  • Loading branch information
susnato authored Nov 7, 2023
1 parent 8c91f15 commit cc9f27b
Showing 1 changed file with 0 additions and 15 deletions.
15 changes: 0 additions & 15 deletions src/transformers/models/gpt_bigcode/modeling_gpt_bigcode.py
Original file line number Diff line number Diff line change
Expand Up @@ -235,16 +235,10 @@ def forward(
encoder_attention_mask: Optional[torch.Tensor] = None,
use_cache: Optional[bool] = False,
output_attentions: Optional[bool] = False,
**kwargs,
) -> Union[
Tuple[torch.Tensor, Optional[torch.Tensor]],
Tuple[torch.Tensor, Optional[torch.Tensor], Tuple[torch.Tensor, ...]],
]:
if "padding_mask" in kwargs:
logger.warning_once(
"Passing `padding_mask` is deprecated and will be removed in v4.37. Please make sure use `attention_mask` instead.`"
)

if encoder_hidden_states is not None:
if not hasattr(self, "q_attn") or not self.is_cross_attention:
raise ValueError(
Expand Down Expand Up @@ -308,19 +302,10 @@ def forward(
encoder_attention_mask: Optional[torch.Tensor] = None,
use_cache: Optional[bool] = False,
output_attentions: Optional[bool] = False,
**kwargs,
) -> Union[
Tuple[torch.Tensor, Optional[torch.Tensor]],
Tuple[torch.Tensor, Optional[torch.Tensor], Tuple[torch.Tensor, ...]],
]:
if "padding_mask" in kwargs:
logger.warning_once(
"Passing `padding_mask` is deprecated and will be removed in v4.37. Please make sure use `attention_mask` instead.`"
)

# overwrite attention_mask with padding_mask
attention_mask = kwargs.pop("padding_mask")

if encoder_hidden_states is not None:
if not hasattr(self, "q_attn") or not self.is_cross_attention:
raise ValueError(
Expand Down

0 comments on commit cc9f27b

Please sign in to comment.