From 2b182633e9989fe857eeb482c321585ac72a5b27 Mon Sep 17 00:00:00 2001 From: "wangnan39@huawei.com" Date: Wed, 22 Jul 2020 12:18:33 +0800 Subject: [PATCH] delete annotation of decay filter in optimizers --- mindspore/nn/optim/adam.py | 2 -- mindspore/nn/optim/lamb.py | 2 -- 2 files changed, 4 deletions(-) diff --git a/mindspore/nn/optim/adam.py b/mindspore/nn/optim/adam.py index ad7096a93f7..b823bf69f30 100755 --- a/mindspore/nn/optim/adam.py +++ b/mindspore/nn/optim/adam.py @@ -398,8 +398,6 @@ class AdamWeightDecay(Optimizer): eps (float): Term added to the denominator to improve numerical stability. Default: 1e-6. Should be greater than 0. weight_decay (float): Weight decay (L2 penalty). It should be in range [0.0, 1.0]. Default: 0.0. - decay_filter (Function): A function to determine whether to apply weight decay on parameters. Default: - lambda x: 'LayerNorm' not in x.name and 'bias' not in x.name. Inputs: - **gradients** (tuple[Tensor]) - The gradients of `params`, the shape is the same as `params`. diff --git a/mindspore/nn/optim/lamb.py b/mindspore/nn/optim/lamb.py index d80facfbb18..0d2552b8c14 100755 --- a/mindspore/nn/optim/lamb.py +++ b/mindspore/nn/optim/lamb.py @@ -228,8 +228,6 @@ class Lamb(Optimizer): eps (float): Term added to the denominator to improve numerical stability. Default: 1e-6. Should be greater than 0. weight_decay (float): Weight decay (L2 penalty). Default: 0.0. Should be in range [0.0, 1.0]. - decay_filter (Function): A function to determine whether to apply weight decay on parameters. Default: - lambda x: 'LayerNorm' not in x.name and 'bias' not in x.name. Inputs: - **gradients** (tuple[Tensor]) - The gradients of `params`, the shape is the same as `params`.