Browse Source

Update attn.py

main
BBing 2 years ago
parent
commit
ce2a6b1082
1 changed files with 0 additions and 1 deletions
  1. +0
    -1
      models/attn.py

+ 0
- 1
models/attn.py View File

@@ -4,7 +4,6 @@ import mindspore.ops as ops
from mindspore import Tensor
from mindspore.common import dtype as mstype

1
class FullAttention(nn.Module):
def __init__(self, mask_flag=True, factor=5, scale=None, attention_dropout=0.1, output_attention=False):
super(FullAttention, self).__init__()


Loading…
Cancel
Save