|
- import mindspore.nn as nn
- import mindspore.ops.operations as ops
-
- class DecoderLayer(nn.Cell):
- def __init__(self, self_attention, cross_attention, d_model, d_ff=None,
- dropout=0.1, activation="relu"):
- super(DecoderLayer, self).__init__()
- d_ff = d_ff or 4 * d_model
- self.self_attention = self_attention
- self.cross_attention = cross_attention
- self.conv1 = nn.Conv1d(d_model, d_ff, kernel_size=1)
- self.conv2 = nn.Conv1d(d_ff, d_model, kernel_size=1)
- self.norm1 = nn.LayerNorm(d_model)
- self.norm2 = nn.LayerNorm(d_model)
- self.norm3 = nn.LayerNorm(d_model)
- self.dropout = nn.Dropout(dropout)
- self.activation = ops.ReLU() if activation == "relu" else ops.GELU()
-
- def construct(self, x, cross, x_mask=None, cross_mask=None):
- attn_output, _ = self.self_attention(x, x, x, x_mask)
- x = x + self.dropout(attn_output)
- x = self.norm1(x)
-
- attn_output, _ = self.cross_attention(x, cross, cross, cross_mask)
- x = x + self.dropout(attn_output)
- x = self.norm2(x)
-
- y = x
- y = y.transpose(1, -1)
- y = self.dropout(self.activation(self.conv1(y)))
- y = self.conv2(y).transpose(1, -1)
- y = self.dropout(y)
-
- x = x + y
- x = self.norm3(x)
-
- return x
-
- class Decoder(nn.Cell):
- def __init__(self, layers, norm_layer=None):
- super(Decoder, self).__init__()
- self.layers = nn.LayerList(layers)
- self.norm = norm_layer
-
- def construct(self, x, cross, x_mask=None, cross_mask=None):
- for layer in self.layers:
- x = layer(x, cross, x_mask=x_mask, cross_mask=cross_mask)
-
- if self.norm is not None:
- x = self.norm(x)
-
- return x
|