forked from mindspore-Ecosystem/mindspore
!15111 [MSLITE] refix train tanhgrad bug
From: @zhengjun10 Reviewed-by: @HilbertDavid,@hangangqiang,@HilbertDavid Signed-off-by: @HilbertDavid,@HilbertDavid
This commit is contained in:
commit
e30fe568e7
|
@ -66,7 +66,7 @@ int ActivationGradCPUKernel::DoActivation(int task_id) {
|
||||||
// Sigmoid gets the input tensors in reverse order!
|
// Sigmoid gets the input tensors in reverse order!
|
||||||
error_code = SigmoidGrad(input_addr + start, yt_addr + start, count, output_addr + start);
|
error_code = SigmoidGrad(input_addr + start, yt_addr + start, count, output_addr + start);
|
||||||
} else if (param_act_grad_->type_ == schema::ActivationType_TANH) {
|
} else if (param_act_grad_->type_ == schema::ActivationType_TANH) {
|
||||||
error_code = TanhGrad(yt_addr + start, input_addr + start, count, output_addr + start);
|
error_code = TanhGrad(input_addr + start, yt_addr + start, count, output_addr + start);
|
||||||
} else if (param_act_grad_->type_ == schema::ActivationType_HSWISH) {
|
} else if (param_act_grad_->type_ == schema::ActivationType_HSWISH) {
|
||||||
error_code = HSwishGrad(yt_addr + start, input_addr + start, count, output_addr + start);
|
error_code = HSwishGrad(yt_addr + start, input_addr + start, count, output_addr + start);
|
||||||
} else if (param_act_grad_->type_ == schema::ActivationType_HSIGMOID) {
|
} else if (param_act_grad_->type_ == schema::ActivationType_HSIGMOID) {
|
||||||
|
|
|
@ -856,11 +856,12 @@ int AnfExporter::ProcessValueSequence(const ValueNodePtr &value_node, std::uniqu
|
||||||
(*schema_tensor)->dims = {static_cast<int32_t>(shape.size())};
|
(*schema_tensor)->dims = {static_cast<int32_t>(shape.size())};
|
||||||
(*schema_tensor)->nodeType = NodeType_ValueNode;
|
(*schema_tensor)->nodeType = NodeType_ValueNode;
|
||||||
(*schema_tensor)->data.resize(shape.size() * sizeof(int));
|
(*schema_tensor)->data.resize(shape.size() * sizeof(int));
|
||||||
ret = memcpy_s((*schema_tensor)->data.data(), shape.size() * sizeof(int32_t), shape.data(),
|
if (!shape.empty()) {
|
||||||
shape.size() * sizeof(int32_t));
|
if (EOK != memcpy_s((*schema_tensor)->data.data(), shape.size() * sizeof(int32_t), shape.data(),
|
||||||
if (ret != RET_OK) {
|
shape.size() * sizeof(int32_t))) {
|
||||||
MS_LOG(ERROR) << "memcpy_s data into schema_tensor failed.";
|
MS_LOG(ERROR) << "memcpy_s data into schema_tensor failed.";
|
||||||
return RET_ERROR;
|
return RET_MEMORY_FAILED;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
node_id_map_[value_node->fullname_with_scope()] = meta_graphT->allTensors.size();
|
node_id_map_[value_node->fullname_with_scope()] = meta_graphT->allTensors.size();
|
||||||
output_cnode->inputIndex.emplace_back(meta_graphT->allTensors.size());
|
output_cnode->inputIndex.emplace_back(meta_graphT->allTensors.size());
|
||||||
|
|
Loading…
Reference in New Issue