!8775 remove "is_grad" in SoftmaxCrossEntropyWithLogits and correct its comments

From: @wanyiming
Reviewed-by: @kingxian,@zh_qh
Signed-off-by: @kingxian
This commit is contained in:
mindspore-ci-bot 2020-12-01 16:26:16 +08:00 committed by Gitee
commit ca66aef549
10 changed files with 23 additions and 29 deletions

View File

@ -100,7 +100,7 @@ The loss function `SoftmaxCrossEntropyWithLogits` and the optimizer `AdamWeightD
if __name__ == "__main__":
...
# define the loss function
criterion = SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=True, reduction="mean")
criterion = SoftmaxCrossEntropyWithLogits(sparse=True, reduction="mean")
optimizer = AdamWeightDecay(params=network.trainable_params(), learning_rate=0.0001)
...
```
@ -320,7 +320,7 @@ from mindspore.nn import WithLossCell, TrainOneStepCell
if __name__ == "__main__":
network = LeNet5()
criterion = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=True, reduction="mean")
criterion = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction="mean")
optimizer = nn.AdamWeightDecay(params=network.trainable_params(), learning_rate=0.0001)
net_with_loss = WithLossCell(network, criterion)

View File

@ -53,7 +53,7 @@ class TransformToBNN:
>>> return out
>>>
>>> net = Net()
>>> criterion = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=True)
>>> criterion = nn.SoftmaxCrossEntropyWithLogits(sparse=True)
>>> optim = Momentum(params=net.trainable_params(), learning_rate=0.1, momentum=0.9)
>>> net_with_loss = WithLossCell(network, criterion)
>>> train_network = TrainOneStepCell(net_with_loss, optim)
@ -107,7 +107,7 @@ class TransformToBNN:
Examples:
>>> net = Net()
>>> criterion = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=True)
>>> criterion = nn.SoftmaxCrossEntropyWithLogits(sparse=True)
>>> optim = Momentum(params=net.trainable_params(), learning_rate=0.1, momentum=0.9)
>>> net_with_loss = WithLossCell(network, criterion)
>>> train_network = TrainOneStepCell(net_with_loss, optim)
@ -149,7 +149,7 @@ class TransformToBNN:
Examples:
>>> net = Net()
>>> criterion = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=True)
>>> criterion = nn.SoftmaxCrossEntropyWithLogits(sparse=True)
>>> optim = Momentum(params=net.trainable_params(), learning_rate=0.1, momentum=0.9)
>>> net_with_loss = WithLossCell(network, criterion)
>>> train_network = TrainOneStepCell(net_with_loss, optim)

View File

@ -425,8 +425,7 @@ If you need to use the trained model to perform inference on multiple hardware p
net = GoogleNet(num_classes=cfg.num_classes)
opt = Momentum(filter(lambda x: x.requires_grad, net.get_parameters()), 0.01,
cfg.momentum, weight_decay=cfg.weight_decay)
loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean',
is_grad=False)
loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean')
model = Model(net, loss_fn=loss, optimizer=opt, metrics={'acc'})
# Load pre-trained model
@ -452,8 +451,7 @@ If you need to use the trained model to perform inference on multiple hardware p
net = GoogleNet(num_classes=cfg.num_classes)
opt = Momentum(filter(lambda x: x.requires_grad, net.get_parameters()), 0.01,
cfg.momentum, weight_decay=cfg.weight_decay)
loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean',
is_grad=False)
loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean')
model = Model(net, loss_fn=loss, optimizer=opt, metrics={'acc'})
# Load pre-trained model
@ -486,7 +484,7 @@ If you need to use the trained model to perform inference on multiple hardware p
steps_per_epoch=batch_num)
opt = Momentum(filter(lambda x: x.requires_grad, net.get_parameters()),
Tensor(lr), cfg.momentum, weight_decay=cfg.weight_decay)
loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean', is_grad=False)
loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean')
model = Model(net, loss_fn=loss, optimizer=opt, metrics={'acc'},
amp_level="O2", keep_batchnorm_fp32=False, loss_scale_manager=None)
@ -520,7 +518,7 @@ If you need to use the trained model to perform inference on multiple hardware p
steps_per_epoch=batch_num)
opt = Momentum(filter(lambda x: x.requires_grad, net.get_parameters()),
Tensor(lr), cfg.momentum, weight_decay=cfg.weight_decay)
loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean', is_grad=False)
loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean')
model = Model(net, loss_fn=loss, optimizer=opt, metrics={'acc'},
amp_level="O2", keep_batchnorm_fp32=False, loss_scale_manager=None)

View File

@ -135,7 +135,7 @@ class Model:
>>> return out
>>>
>>> net = Net()
>>> loss = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=True)
>>> loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True)
>>> optim = Momentum(params=net.trainable_params(), learning_rate=0.1, momentum=0.9)
>>> model = Model(net, loss_fn=loss, optimizer=optim, metrics=None)
>>> dataset = get_dataset()
@ -307,7 +307,7 @@ class Model:
>>> train_dataset = get_train_dataset()
>>> valid_dataset = get_valid_dataset()
>>> net = Net()
>>> loss = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=True)
>>> loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True)
>>> optim = Momentum(params=net.trainable_params(), learning_rate=0.1, momentum=0.9)
>>> model = Model(net, loss_fn=loss, optimizer=optim, metrics={'acc'})
>>> model.init(train_dataset, valid_dataset)
@ -597,7 +597,7 @@ class Model:
Examples:
>>> dataset = get_dataset()
>>> net = Net()
>>> loss = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=True)
>>> loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True)
>>> loss_scale_manager = FixedLossScaleManager()
>>> optim = Momentum(params=net.trainable_params(), learning_rate=0.1, momentum=0.9)
>>> model = Model(net, loss_fn=loss, optimizer=optim, metrics=None, loss_scale_manager=loss_scale_manager)
@ -714,7 +714,7 @@ class Model:
Examples:
>>> dataset = get_dataset()
>>> net = Net()
>>> loss = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=True)
>>> loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True)
>>> model = Model(net, loss_fn=loss, optimizer=None, metrics={'acc'})
>>> model.eval(dataset)
"""

View File

@ -243,8 +243,7 @@ https://www.mindspore.cn/tutorial/zh-CN/master/use/multi_platform_inference.html
net = GoogleNet(num_classes=cfg.num_classes)
opt = Momentum(filter(lambda x: x.requires_grad, net.get_parameters()), 0.01,
cfg.momentum, weight_decay=cfg.weight_decay)
loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean',
is_grad=False)
loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean')
model = Model(net, loss_fn=loss, optimizer=opt, metrics={'acc'})
# Load pre-trained model
@ -275,7 +274,7 @@ https://www.mindspore.cn/tutorial/zh-CN/master/use/multi_platform_inference.html
steps_per_epoch=batch_num)
opt = Momentum(filter(lambda x: x.requires_grad, net.get_parameters()),
Tensor(lr), cfg.momentum, weight_decay=cfg.weight_decay)
loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean', is_grad=False)
loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean')
model = Model(net, loss_fn=loss, optimizer=opt, metrics={'acc'},
amp_level="O2", keep_batchnorm_fp32=False, loss_scale_manager=None)

View File

@ -199,7 +199,7 @@ class NetWithLossClass(nn.Cell):
"""
def __init__(self, network):
super(NetWithLossClass, self).__init__(auto_prefix=False)
#self.loss = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=True)
#self.loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True)
self.loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True)
self.network = network
self.reducesum = P.ReduceSum(keep_dims=False)

View File

@ -50,8 +50,7 @@ if __name__ == '__main__':
else:
raise ValueError("Unsupport platform.")
loss = nn.SoftmaxCrossEntropyWithLogits(
is_grad=False, sparse=True, reduction='mean')
loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean')
if args_opt.model == 'ghostnet':
net = ghostnet_1x(num_classes=config_platform.num_classes)

View File

@ -49,8 +49,7 @@ if __name__ == '__main__':
else:
raise ValueError("Unsupport platform.")
loss = nn.SoftmaxCrossEntropyWithLogits(
is_grad=False, sparse=True, reduction='mean')
loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean')
net = ghostnet_1x(num_classes=config_platform.num_classes)

View File

@ -53,8 +53,7 @@ if __name__ == '__main__':
else:
raise ValueError("Unsupport platform.")
loss = nn.SoftmaxCrossEntropyWithLogits(
is_grad=False, sparse=True, reduction='mean')
loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean')
if args_opt.platform == "Ascend":
net.to_float(mstype.float16)

View File

@ -128,7 +128,7 @@ class Model:
>>> return out
>>>
>>> net = Net()
>>> loss = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=True)
>>> loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True)
>>> optim = Momentum(params=net.trainable_params(), learning_rate=0.1, momentum=0.9)
>>> model = Model(net, loss_fn=loss, optimizer=optim, metrics=None)
>>> dataset = get_dataset()
@ -295,7 +295,7 @@ class Model:
>>> train_dataset = get_train_dataset()
>>> valid_dataset = get_valid_dataset()
>>> net = Net()
>>> loss = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=True)
>>> loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True)
>>> optim = Momentum(params=net.trainable_params(), learning_rate=0.1, momentum=0.9)
>>> model = Model(net, loss_fn=loss, optimizer=optim, metrics={'acc'})
>>> model.init(train_dataset, valid_dataset)
@ -566,7 +566,7 @@ class Model:
Examples:
>>> dataset = get_dataset()
>>> net = Net()
>>> loss = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=True)
>>> loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True)
>>> loss_scale_manager = FixedLossScaleManager()
>>> optim = Momentum(params=net.trainable_params(), learning_rate=0.1, momentum=0.9)
>>> model = Model(net, loss_fn=loss, optimizer=optim, metrics=None, loss_scale_manager=loss_scale_manager)
@ -678,7 +678,7 @@ class Model:
Examples:
>>> dataset = get_dataset()
>>> net = Net()
>>> loss = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=True)
>>> loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True)
>>> model = Model(net, loss_fn=loss, optimizer=None, metrics={'acc'})
>>> model.eval(dataset)
"""