From a9e552949606dcd32af9a6cc206f0936d0202a47 Mon Sep 17 00:00:00 2001 From: zhengjun10 Date: Wed, 21 Apr 2021 16:50:35 +0800 Subject: [PATCH] fix activation grad bug --- .../runtime/kernel/arm/fp32_grad/activation_grad.cc | 2 +- mindspore/lite/tools/anf_exporter/anf_exporter.cc | 11 ++++++----- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/mindspore/lite/src/runtime/kernel/arm/fp32_grad/activation_grad.cc b/mindspore/lite/src/runtime/kernel/arm/fp32_grad/activation_grad.cc index 2f9dd9b7b6c..5c1ff35886f 100644 --- a/mindspore/lite/src/runtime/kernel/arm/fp32_grad/activation_grad.cc +++ b/mindspore/lite/src/runtime/kernel/arm/fp32_grad/activation_grad.cc @@ -66,7 +66,7 @@ int ActivationGradCPUKernel::DoActivation(int task_id) { // Sigmoid gets the input tensors in reverse order! error_code = SigmoidGrad(input_addr + start, yt_addr + start, count, output_addr + start); } else if (param_act_grad_->type_ == schema::ActivationType_TANH) { - error_code = TanhGrad(yt_addr + start, input_addr + start, count, output_addr + start); + error_code = TanhGrad(input_addr + start, yt_addr + start, count, output_addr + start); } else if (param_act_grad_->type_ == schema::ActivationType_HSWISH) { error_code = HSwishGrad(yt_addr + start, input_addr + start, count, output_addr + start); } else if (param_act_grad_->type_ == schema::ActivationType_HSIGMOID) { diff --git a/mindspore/lite/tools/anf_exporter/anf_exporter.cc b/mindspore/lite/tools/anf_exporter/anf_exporter.cc index 4576a286ed7..117ead818f7 100644 --- a/mindspore/lite/tools/anf_exporter/anf_exporter.cc +++ b/mindspore/lite/tools/anf_exporter/anf_exporter.cc @@ -854,11 +854,12 @@ int AnfExporter::ProcessValueSequence(const ValueNodePtr &value_node, std::uniqu (*schema_tensor)->dims = {static_cast(shape.size())}; (*schema_tensor)->nodeType = NodeType_ValueNode; (*schema_tensor)->data.resize(shape.size() * sizeof(int)); - ret = memcpy_s((*schema_tensor)->data.data(), shape.size() * sizeof(int32_t), shape.data(), - shape.size() * sizeof(int32_t)); - if (ret != RET_OK) { - MS_LOG(ERROR) << "memcpy_s data into schema_tensor failed."; - return RET_ERROR; + if (!shape.empty()) { + if (EOK != memcpy_s((*schema_tensor)->data.data(), shape.size() * sizeof(int32_t), shape.data(), + shape.size() * sizeof(int32_t))) { + MS_LOG(ERROR) << "memcpy_s data into schema_tensor failed."; + return RET_MEMORY_FAILED; + } } node_id_map_[value_node->fullname_with_scope()] = meta_graphT->allTensors.size(); output_cnode->inputIndex.emplace_back(meta_graphT->allTensors.size());