Skip to content
This repository was archived by the owner on Sep 10, 2025. It is now read-only.

Commit b061086

Browse files
committed
merge changes
2 parents 16123f0 + d857e4f commit b061086

File tree

1 file changed

+0
-1
lines changed

1 file changed

+0
-1
lines changed

torchtext/models/roberta/modules.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -286,7 +286,6 @@ def __init__(
286286
def forward(self, tokens: torch.Tensor, attn_mask: Optional[torch.Tensor] = None) -> Union[torch.Tensor, List[torch.Tensor]]:
287287
if attn_mask is not None:
288288
torch._assert(attn_mask.is_floating_point() or attn_mask.dtype == torch.bool, f"Only float or bool types are supported for attn_mask not {attn_mask.dtype}")
289-
290289
padding_mask = tokens.eq(self.padding_idx)
291290

292291
token_embeddings = self.token_embedding(tokens)

0 commit comments

Comments
 (0)