Browse Source

1.修改ClipGradientCallback的bug;删除LRSchedulerCallback中的print,之后应该传入pbar进行打印;2.增加MLP注释

tags/v0.4.10
yh 5 years ago
parent
commit
3ea7de1673
2 changed files with 5 additions and 3 deletions
  1. +4
    -2
      fastNLP/core/callback.py
  2. +1
    -1
      fastNLP/modules/decoder/MLP.py

+ 4
- 2
fastNLP/core/callback.py View File

@@ -248,7 +248,10 @@ class GradientClipCallback(Callback):
self.clip_value = clip_value self.clip_value = clip_value


def on_backward_end(self, model): def on_backward_end(self, model):
self.clip_fun(model.parameters(), self.clip_value)
if self.parameters is None:
self.clip_fun(model.parameters(), self.clip_value)
else:
self.clip_fun(self.parameters, self.clip_value)




class CallbackException(BaseException): class CallbackException(BaseException):
@@ -306,7 +309,6 @@ class LRScheduler(Callback):


def on_epoch_begin(self, cur_epoch, total_epoch): def on_epoch_begin(self, cur_epoch, total_epoch):
self.scheduler.step() self.scheduler.step()
print("scheduler step ", "lr=", self.trainer.optimizer.param_groups[0]["lr"])




class ControlC(Callback): class ControlC(Callback):


+ 1
- 1
fastNLP/modules/decoder/MLP.py View File

@@ -7,7 +7,7 @@ from fastNLP.modules.utils import initial_parameter
class MLP(nn.Module): class MLP(nn.Module):
"""Multilayer Perceptrons as a decoder """Multilayer Perceptrons as a decoder


:param list size_layer: list of int, define the size of MLP layers.
:param list size_layer: list of int, define the size of MLP layers. layer的层数为(len(size_layer)-1)//2 + 1
:param str activation: str or function, the activation function for hidden layers. :param str activation: str or function, the activation function for hidden layers.
:param str initial_method: the name of initialization method. :param str initial_method: the name of initialization method.
:param float dropout: the probability of dropout. :param float dropout: the probability of dropout.


Loading…
Cancel
Save