diff --git a/fastNLP/core/metrics.py b/fastNLP/core/metrics.py index ec1a1864..72380fd6 100644 --- a/fastNLP/core/metrics.py +++ b/fastNLP/core/metrics.py @@ -152,6 +152,7 @@ class MetricBase(object): def get_metric_name(self): """ 返回metric的名称 + :return: """ return self._metric_name diff --git a/fastNLP/core/optimizer.py b/fastNLP/core/optimizer.py index 5e7c1cba..b782cfa6 100644 --- a/fastNLP/core/optimizer.py +++ b/fastNLP/core/optimizer.py @@ -120,12 +120,11 @@ class AdamW(TorchOptimizer): The original Adam algorithm was proposed in `Adam: A Method for Stochastic Optimization`_. The AdamW variant was proposed in `Decoupled Weight Decay Regularization`_. - .. _Adam\: A Method for Stochastic Optimization: - https://arxiv.org/abs/1412.6980 - .. _Decoupled Weight Decay Regularization: - https://arxiv.org/abs/1711.05101 - .. _On the Convergence of Adam and Beyond: - https://openreview.net/forum?id=ryQu7f-RZ + .. _Adam\: A Method for Stochastic Optimization: https://arxiv.org/abs/1412.6980 + + .. _Decoupled Weight Decay Regularization: https://arxiv.org/abs/1711.05101 + + .. _On the Convergence of Adam and Beyond: https://openreview.net/forum?id=ryQu7f-RZ """ def __init__(self, params, lr=1e-3, betas=(0.9, 0.999), eps=1e-8,