From 81bcb513141c9a67587fa94cf94a0533e488f3e5 Mon Sep 17 00:00:00 2001 From: "w.g" <32560866+pptt168@users.noreply.github.com> Date: Thu, 22 Oct 2020 09:41:22 +0800 Subject: [PATCH] =?UTF-8?q?=E5=BD=93activation=3Dlambda=20x:=20x=E5=87=BA?= =?UTF-8?q?=E7=8E=B0=E9=94=99=E8=AF=AF=20(#330)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: 路人咦 <1417954729@qq.com> --- fastNLP/modules/decoder/mlp.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/fastNLP/modules/decoder/mlp.py b/fastNLP/modules/decoder/mlp.py index 10348324..e4df542d 100644 --- a/fastNLP/modules/decoder/mlp.py +++ b/fastNLP/modules/decoder/mlp.py @@ -71,8 +71,8 @@ class MLP(nn.Module): f"the length of activation function list except {len(size_layer) - 2} but got {len(activation)}!") self.hidden_active = [] for func in activation: - if callable(activation): - self.hidden_active.append(activation) + if callable(func): + self.hidden_active.append(func) elif func.lower() in actives: self.hidden_active.append(actives[func]) else: