diff --git a/fastNLP/embeddings/bert_embedding.py b/fastNLP/embeddings/bert_embedding.py index 80a5b45f..9bedd983 100644 --- a/fastNLP/embeddings/bert_embedding.py +++ b/fastNLP/embeddings/bert_embedding.py @@ -294,7 +294,7 @@ class _WordBertModel(nn.Module): word_mask = words.ne(self._pad_index) seq_len = word_mask.sum(dim=-1) batch_word_pieces_length = self.word_pieces_lengths[words] # batch_size x max_len - word_pieces_lengths = batch_word_pieces_length.masked_fill(word_mask, 0).sum(dim=-1) + word_pieces_lengths = batch_word_pieces_length.masked_fill(word_mask.eq(0), 0).sum(dim=-1) max_word_piece_length = word_pieces_lengths.max().item() real_max_word_piece_length = max_word_piece_length # 表示没有截断的word piece的长度 if max_word_piece_length+2>self._max_position_embeddings: