forked from mindspore-Ecosystem/mindspore
Fix BatchNorm1d and BatchNorm2d doc bug, and slog print source path
twice in log
This commit is contained in:
parent
7595c91730
commit
19a5e14b31
|
@ -96,6 +96,13 @@ static int GetGlogLevel(MsLogLevel level) {
|
|||
}
|
||||
}
|
||||
#else
|
||||
|
||||
#undef Dlog
|
||||
#define Dlog(module_id, level, format, ...) \
|
||||
do { \
|
||||
DlogInner((module_id), (level), (format), ##__VA_ARGS__); \
|
||||
} while (0)
|
||||
|
||||
// convert MsLogLevel to corresponding slog level
|
||||
static int GetSlogLevel(MsLogLevel level) {
|
||||
switch (level) {
|
||||
|
|
|
@ -136,6 +136,7 @@ class BatchNorm1d(_BatchNorm):
|
|||
eps (float): A value added to the denominator for numerical stability. Default: 1e-5.
|
||||
momentum (float): A floating hyperparameter of the momentum for the
|
||||
running_mean and running_var computation. Default: 0.9.
|
||||
affine (bool): A bool value when set to True, gamma and beta can be learnable. Default: True.
|
||||
gamma_init (Union[Tensor, str, Initializer, numbers.Number]): Initializer for the gamma weight.
|
||||
The values of str refer to the function `initializer` including 'zeros', 'ones', 'xavier_uniform',
|
||||
'he_uniform', etc. Default: 'ones'.
|
||||
|
@ -187,6 +188,7 @@ class BatchNorm2d(_BatchNorm):
|
|||
eps (float): A value added to the denominator for numerical stability. Default: 1e-5.
|
||||
momentum (float): A floating hyperparameter of the momentum for the
|
||||
running_mean and running_var computation. Default: 0.9.
|
||||
affine (bool): A bool value when set to True, gamma and beta can be learnable. Default: True.
|
||||
gamma_init (Union[Tensor, str, Initializer, numbers.Number]): Initializer for the gamma weight.
|
||||
The values of str refer to the function `initializer` including 'zeros', 'ones', 'xavier_uniform',
|
||||
'he_uniform', etc. Default: 'ones'.
|
||||
|
|
Loading…
Reference in New Issue