fix fp16 bug

This commit is contained in:
lzk 2021-02-02 18:35:48 -08:00
parent 6d764fe66c
commit 9c5fad32f9
2 changed files with 2 additions and 1 deletions

View File

@ -82,7 +82,7 @@ int BiasCPUFp16Kernel::Init() {
return RET_NULL_PTR;
}
auto *bias = reinterpret_cast<float *>(bias_tensor->MutableData());
if (bias != nullptr) {
if (bias == nullptr) {
MS_LOG(ERROR) << "bias is nullptr!";
return RET_NULL_PTR;
}

View File

@ -40,6 +40,7 @@ int TransposeFp16CPUKernel::Init() {
int TransposeFp16CPUKernel::Run() {
MS_ASSERT(in_tensors_.size() == 1 || in_tensors_.size() == 2);
TransposeParameter *param = reinterpret_cast<TransposeParameter *>(this->op_parameter_);
param->data_size_ = in_tensors_[0]->Size();
if (in_tensors_.size() == 2) {
auto input_perm = in_tensors_.at(1);
MS_ASSERT(input_perm != nullptr);