Skip to content

Commit 5502abb

Browse files
refine docstrings
1 parent 9580c45 commit 5502abb

File tree

5 files changed

+10
-27
lines changed

5 files changed

+10
-27
lines changed

paddle/gserver/layers/CudnnBatchNormLayer.cpp

+4-18
Original file line numberDiff line numberDiff line change
@@ -21,8 +21,6 @@ namespace paddle {
2121

2222
REGISTER_LAYER(cudnn_batch_norm, CudnnBatchNormLayer);
2323

24-
const double CudnnBatchNormLayer::MIN_EPS = 1E-5;
25-
2624
bool CudnnBatchNormLayer::init(const LayerMap& layerMap,
2725
const ParameterMap& parameterMap) {
2826
/* Initialize the basic parent class */
@@ -61,14 +59,8 @@ void CudnnBatchNormLayer::forward(PassType passType) {
6159
real* movingMean = movingMean_->getW()->getData();
6260
real* movingVar = movingVar_->getW()->getData();
6361

64-
/**
65-
* If epsilon_ equals to 1e-5 and eps_ is assigned the value of
66-
* static_cast<double>(epsilon_), The CUDNN_STATUS_BAD_PARAM error
67-
* will occur due to eps_ value is less than
68-
* CUDNN_BN_MIN_EPSILON.
69-
* The following code is to ensure that the eps_ meets requirement.
70-
*/
71-
eps_ = std::max(MIN_EPS, static_cast<double>(epsilon_));
62+
// cuDNN does not allow an epsilon value less than CUDNN_BN_MIN_EPSILON.
63+
eps_ = std::max(CUDNN_BN_MIN_EPSILON, static_cast<double>(epsilon_));
7264

7365
if (!useGlobalStats_) {
7466
REGISTER_TIMER_INFO("CudnnBatchFwTimer", getName().c_str());
@@ -137,14 +129,8 @@ void CudnnBatchNormLayer::backward(const UpdateCallback& callback) {
137129
real* savedMean = savedMean_->getData();
138130
real* savedInvVar = savedInvVar_->getData();
139131

140-
/**
141-
* If epsilon_ equals to 1e-5 and eps_ is assigned the value of
142-
* static_cast<double>(epsilon_), The CUDNN_STATUS_BAD_PARAM error
143-
* will occur due to eps_ value is less than
144-
* CUDNN_BN_MIN_EPSILON.
145-
* The following code is to ensure that the eps_ meets requirement.
146-
*/
147-
eps_ = std::max(MIN_EPS, static_cast<double>(epsilon_));
132+
// cuDNN does not allow an epsilon value less than CUDNN_BN_MIN_EPSILON.
133+
eps_ = std::max(CUDNN_BN_MIN_EPSILON, static_cast<double>(epsilon_));
148134

149135
auto create = [](MatrixPtr& m, size_t h, size_t w, real** p) {
150136
Matrix::resizeOrCreate(m, h, w, false, true);

paddle/gserver/layers/CudnnBatchNormLayer.h

+1-3
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@ limitations under the License. */
1414

1515
#pragma once
1616

17+
#include <cudnn.h>
1718
#include "BatchNormBaseLayer.h"
1819
#include "Layer.h"
1920
#include "paddle/utils/Stat.h"
@@ -46,9 +47,6 @@ class CudnnBatchNormLayer : public BatchNormBaseLayer {
4647
void backward(const UpdateCallback& callback = nullptr) override;
4748

4849
protected:
49-
/// Minimum allowed value is CUDNN_BN_MIN_EPSILON defined in cudnn.h.
50-
static const double MIN_EPS;
51-
5250
/// Epsilon value used in the batch normalization formula.
5351
/// Same epsilon value should be used in forward and backward functions.
5452
double eps_;

proto/ModelConfig.proto

+1-1
Original file line numberDiff line numberDiff line change
@@ -542,7 +542,7 @@ message LayerConfig {
542542
optional ReshapeConfig reshape_conf = 59;
543543

544544
// for batch normalization layer
545-
// small constant added to the variance to avoid numerical problems.
545+
// The small constant added to the variance to improve numeric stability.
546546
optional double epsilon = 60 [ default = 0.00001 ];
547547
}
548548

python/paddle/trainer/config_parser.py

+3-2
Original file line numberDiff line numberDiff line change
@@ -2483,8 +2483,9 @@ def __init__(self,
24832483
self.config.use_global_stats = use_global_stats
24842484
if moving_average_fraction is not None:
24852485
self.config.moving_average_fraction = moving_average_fraction
2486-
2487-
self.config.epsilon = epsilon
2486+
if epsilon is not None:
2487+
assert epsilon >= 1e-5, "epsilon must be no less than 1e-5."
2488+
self.config.epsilon = epsilon
24882489

24892490
input_layer = self.get_input_layer(0)
24902491
image_conf = self.config.inputs[0].image_conf

python/paddle/trainer_config_helpers/layers.py

+1-3
Original file line numberDiff line numberDiff line change
@@ -3107,7 +3107,7 @@ def batch_norm_layer(input,
31073107
will use the mean and variance of the current batch
31083108
of test data.
31093109
:type use_global_stats: bool | None.
3110-
:param epsilon: Small constant added to the variance to avoid numerical problems.
3110+
:param epsilon: The small constant added to the variance to improve numeric stability.
31113111
:type epsilon: float.
31123112
:param moving_average_fraction: Factor used in the moving average computation.
31133113
:math:`runningMean = newMean*(1-factor) + runningMean*factor`
@@ -3127,8 +3127,6 @@ def batch_norm_layer(input,
31273127
(batch_norm_type == "mkldnn_batch_norm") or \
31283128
(batch_norm_type == "cudnn_batch_norm")
31293129

3130-
assert epsilon >= 1e-5, "epsilon must be no less than 1e-5."
3131-
31323130
l = Layer(
31333131
name=name,
31343132
img3D=img3D,

0 commit comments

Comments
 (0)