forked from mindspore-Ecosystem/mindspore
fix bug of freeze_bn default value
This commit is contained in:
parent
e9eee16e46
commit
9a12fab404
|
@ -476,7 +476,7 @@ def export(network, *inputs, file_name, mean=127.5, std_dev=127.5, file_format='
|
|||
|
||||
def convert_quant_network(network,
|
||||
bn_fold=True,
|
||||
freeze_bn=1e7,
|
||||
freeze_bn=10000000,
|
||||
quant_delay=(0, 0),
|
||||
num_bits=(8, 8),
|
||||
per_channel=(False, False),
|
||||
|
|
|
@ -50,7 +50,8 @@ if __name__ == "__main__":
|
|||
# define fusion network
|
||||
network = LeNet5Fusion(cfg.num_classes)
|
||||
# convert fusion network to quantization aware network
|
||||
network = quant.convert_quant_network(network, quant_delay=0, bn_fold=False, freeze_bn=10000)
|
||||
network = quant.convert_quant_network(network, quant_delay=0, bn_fold=False, freeze_bn=10000,
|
||||
per_channel=[True, False])
|
||||
|
||||
# define loss
|
||||
net_loss = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=True, reduction="mean")
|
||||
|
|
Loading…
Reference in New Issue