From 60a535db08be4621e8b2f52bb83caad81c693075 Mon Sep 17 00:00:00 2001 From: ChenXin Date: Wed, 4 Sep 2019 17:15:21 +0800 Subject: [PATCH] fix a little error in doc. TODO: fix the bug doc of the class which inherit the class from outer space --- fastNLP/core/metrics.py | 1 + fastNLP/core/optimizer.py | 11 +++++------ 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/fastNLP/core/metrics.py b/fastNLP/core/metrics.py index ec1a1864..72380fd6 100644 --- a/fastNLP/core/metrics.py +++ b/fastNLP/core/metrics.py @@ -152,6 +152,7 @@ class MetricBase(object): def get_metric_name(self): """ 返回metric的名称 + :return: """ return self._metric_name diff --git a/fastNLP/core/optimizer.py b/fastNLP/core/optimizer.py index 5e7c1cba..b782cfa6 100644 --- a/fastNLP/core/optimizer.py +++ b/fastNLP/core/optimizer.py @@ -120,12 +120,11 @@ class AdamW(TorchOptimizer): The original Adam algorithm was proposed in `Adam: A Method for Stochastic Optimization`_. The AdamW variant was proposed in `Decoupled Weight Decay Regularization`_. - .. _Adam\: A Method for Stochastic Optimization: - https://arxiv.org/abs/1412.6980 - .. _Decoupled Weight Decay Regularization: - https://arxiv.org/abs/1711.05101 - .. _On the Convergence of Adam and Beyond: - https://openreview.net/forum?id=ryQu7f-RZ + .. _Adam\: A Method for Stochastic Optimization: https://arxiv.org/abs/1412.6980 + + .. _Decoupled Weight Decay Regularization: https://arxiv.org/abs/1711.05101 + + .. _On the Convergence of Adam and Beyond: https://openreview.net/forum?id=ryQu7f-RZ """ def __init__(self, params, lr=1e-3, betas=(0.9, 0.999), eps=1e-8,