Browse Source

add drop out in MLP layers

tags/v0.2.0
xuyige 6 years ago
parent
commit
a6ecc8be83
1 changed files with 6 additions and 3 deletions
  1. +6
    -3
      fastNLP/modules/decoder/MLP.py

+ 6
- 3
fastNLP/modules/decoder/MLP.py View File

@@ -4,12 +4,13 @@ from fastNLP.modules.utils import initial_parameter




class MLP(nn.Module): class MLP(nn.Module):
def __init__(self, size_layer, activation='relu', initial_method=None):
def __init__(self, size_layer, activation='relu', initial_method=None, dropout=0.0):
"""Multilayer Perceptrons as a decoder """Multilayer Perceptrons as a decoder


:param size_layer: list of int, define the size of MLP layers. :param size_layer: list of int, define the size of MLP layers.
:param activation: str or function, the activation function for hidden layers. :param activation: str or function, the activation function for hidden layers.
:param initial_method: str, the name of init method. :param initial_method: str, the name of init method.
:param dropout: float, the probability of dropout.


.. note:: .. note::
There is no activation function applying on output layer. There is no activation function applying on output layer.
@@ -24,6 +25,8 @@ class MLP(nn.Module):
else: else:
self.hiddens.append(nn.Linear(size_layer[i-1], size_layer[i])) self.hiddens.append(nn.Linear(size_layer[i-1], size_layer[i]))


self.dropout = nn.Dropout(p=dropout)

actives = { actives = {
'relu': nn.ReLU(), 'relu': nn.ReLU(),
'tanh': nn.Tanh(), 'tanh': nn.Tanh(),
@@ -38,8 +41,8 @@ class MLP(nn.Module):


def forward(self, x): def forward(self, x):
for layer in self.hiddens: for layer in self.hiddens:
x = self.hidden_active(layer(x))
x = self.output(x)
x = self.dropout(self.hidden_active(layer(x)))
x = self.dropout(self.output(x))
return x return x






Loading…
Cancel
Save