diff --git a/mindspore/lite/nnacl/fp32/activation.h b/mindspore/lite/nnacl/fp32/activation.h index 24f32a54c2a..999b04eb7a3 100644 --- a/mindspore/lite/nnacl/fp32/activation.h +++ b/mindspore/lite/nnacl/fp32/activation.h @@ -40,6 +40,8 @@ int HSigmoid(const float *src, int length, float *dst); int Swish(const float *src, int length, float *dst); int HSwish(const float *src, int length, float *dst); int HardTanh(const float *src, int length, float *dst, float min_val, float max_val); + +float TanhOpt(float src); #ifdef __cplusplus } #endif diff --git a/mindspore/lite/nnacl/int8/tanh_int8.c b/mindspore/lite/nnacl/int8/tanh_int8.c new file mode 100644 index 00000000000..354b5f629ab --- /dev/null +++ b/mindspore/lite/nnacl/int8/tanh_int8.c @@ -0,0 +1,30 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "nnacl/int8/tanh_int8.h" +#ifdef ENABLE_NEON +#include +#endif + +void TanhInt8(const int8_t *input_ptr, int8_t *output_ptr, int size, TanhQuantParameter *quant) { + for (int i = 0; i < size; ++i) { + float fp32_src = (input_ptr[i] - quant->in_zp_) * quant->in_scale_; + float fp32_dst = TanhOpt(fp32_src); + int32_t int32_dst = (int32_t)round(fp32_dst * 1.0 / quant->out_scale_ + quant->out_zp_); + output_ptr[i] = (int8_t)MSMAX(MSMIN(int32_dst, 127), -128); + } + return; +} diff --git a/mindspore/lite/nnacl/int8/tanh_int8.h b/mindspore/lite/nnacl/int8/tanh_int8.h new file mode 100644 index 00000000000..17c8ab3edd0 --- /dev/null +++ b/mindspore/lite/nnacl/int8/tanh_int8.h @@ -0,0 +1,43 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef MINDSPORE_LITE_NNACL_INT8_TANH_INT8_H_ +#define MINDSPORE_LITE_NNACL_INT8_TANH_INT8_H_ + +#include "nnacl/op_base.h" +#include "nnacl/quantization/quantize.h" +#include "nnacl/quantization/fixed_point.h" +#include "nnacl/int8/quant_dtype_cast_int8.h" +#include "nnacl/fp32/activation.h" + +typedef struct TanhQuantParameter { + int32_t in_zp_; + int32_t out_zp_; + double in_scale_; + double out_scale_; +} TanhQuantParameter; + +#ifdef __cplusplus +extern "C" { +#endif + +void TanhInt8(const int8_t *input_ptr, int8_t *output_ptr, int size, TanhQuantParameter *quant); + +#ifdef __cplusplus +} +#endif + +#endif // MINDSPORE_LITE_NNACL_INT8_TANH_INT8_H_ diff --git a/mindspore/lite/src/ops/transpose.cc b/mindspore/lite/src/ops/transpose.cc index a228dda6f3f..400173a9a90 100644 --- a/mindspore/lite/src/ops/transpose.cc +++ b/mindspore/lite/src/ops/transpose.cc @@ -124,7 +124,7 @@ int Transpose::InferShape(std::vector inputs_, std::vector o if (!GetInferFlag()) { return RET_OK; } - MS_ASSERT(inputs_.size() == kSingleNum); + MS_ASSERT(inputs_.size() == kDoubleNum); MS_ASSERT(outputs_.size() == kSingleNum); int conjugate = GetConjugate(); diff --git a/mindspore/lite/src/runtime/kernel/arm/fp32/transpose_fp32.cc b/mindspore/lite/src/runtime/kernel/arm/fp32/transpose_fp32.cc index 89ba77f9ecb..afb733849a9 100644 --- a/mindspore/lite/src/runtime/kernel/arm/fp32/transpose_fp32.cc +++ b/mindspore/lite/src/runtime/kernel/arm/fp32/transpose_fp32.cc @@ -116,7 +116,7 @@ int TransposeFp32Run(void *cdata, int task_id) { } int TransposeCPUKernel::Run() { - MS_ASSERT(in_tensors_.size() == 1); + MS_ASSERT(in_tensors_.size() == 2); MS_ASSERT(out_tensors_.size() == 1); auto &in_tensor = in_tensors_.front(); auto &out_tensor = out_tensors_.front(); diff --git a/mindspore/lite/src/runtime/kernel/arm/int8/activation_int8.cc b/mindspore/lite/src/runtime/kernel/arm/int8/activation_int8.cc index 588cd37334a..c21672094cb 100644 --- a/mindspore/lite/src/runtime/kernel/arm/int8/activation_int8.cc +++ b/mindspore/lite/src/runtime/kernel/arm/int8/activation_int8.cc @@ -17,6 +17,7 @@ #include "src/runtime/kernel/arm/int8/relux_int8.h" #include "src/runtime/kernel/arm/int8/hswish_int8.h" #include "src/runtime/kernel/arm/int8/sigmoid_int8.h" +#include "src/runtime/kernel/arm/int8/tanh_int8.h" #include "src/runtime/kernel/arm/int8/leaky_relu_int8.h" #include "schema/model_generated.h" #include "src/kernel_registry.h" @@ -57,6 +58,9 @@ kernel::LiteKernel *CpuActivationInt8KernelCreator(const std::vectorGetQuantParams().front().scale; + tanh_quant_.in_zp_ = input->GetQuantParams().front().zeroPoint; + tanh_quant_.out_scale_ = output->GetQuantParams().front().scale; + tanh_quant_.out_zp_ = output->GetQuantParams().front().zeroPoint; + + if (!InferShapeDone()) { + return RET_OK; + } + return ReSize(); +} + +int TanhInt8CPUKernel::ReSize() { + element_size_ = in_tensors_.at(0)->ElementsNum(); + thread_count_ = MSMIN(element_size_, op_parameter_->thread_num_); + thread_stride_ = UP_DIV(element_size_, thread_count_); + return RET_OK; +} + +int TanhInt8CPUKernel::DoActivation(int task_id) { + int current_size = element_size_ - task_id * thread_stride_; + current_size = MSMIN(thread_stride_, current_size); + if (current_size <= 0) { + return RET_OK; + } + + int8_t *cur_input = in_ptr_ + task_id * thread_stride_; + int8_t *cur_output = out_ptr_ + task_id * thread_stride_; + + TanhInt8(cur_input, cur_output, current_size, &tanh_quant_); + return RET_OK; +} + +int TanhInt8Run(void *cdata, int task_id) { + auto activation_kernel = reinterpret_cast(cdata); + auto error_code = activation_kernel->DoActivation(task_id); + if (error_code != RET_OK) { + MS_LOG(ERROR) << "TanhInt8Run error task_id[" << task_id << "] error_code[" << error_code << "]"; + return RET_ERROR; + } + return RET_OK; +} + +int TanhInt8CPUKernel::Run() { + in_ptr_ = reinterpret_cast(in_tensors_.at(0)->data_c()); + out_ptr_ = reinterpret_cast(out_tensors_.at(0)->data_c()); + + ParallelLaunch(this->context_->thread_pool_, TanhInt8Run, this, thread_count_); + return RET_OK; +} +} // namespace mindspore::kernel diff --git a/mindspore/lite/src/runtime/kernel/arm/int8/tanh_int8.h b/mindspore/lite/src/runtime/kernel/arm/int8/tanh_int8.h new file mode 100644 index 00000000000..54495b88a44 --- /dev/null +++ b/mindspore/lite/src/runtime/kernel/arm/int8/tanh_int8.h @@ -0,0 +1,54 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef MINDSPORE_LITE_SRC_BACKEND_ARM_INT8_TANH_INT8_H_ +#define MINDSPORE_LITE_SRC_BACKEND_ARM_INT8_TANH_INT8_H_ + +#include +#include +#include +#include "src/lite_kernel.h" +#include "nnacl/int8/tanh_int8.h" +#include "nnacl/quantization/quantize.h" +#include "include/errorcode.h" + +namespace mindspore::kernel { +class TanhInt8CPUKernel : public LiteKernel { + public: + TanhInt8CPUKernel(OpParameter *parameter, const std::vector &inputs, + const std::vector &outputs, const lite::InnerContext *ctx, + const mindspore::lite::PrimitiveC *primitive) + : LiteKernel(parameter, inputs, outputs, ctx, primitive) {} + ~TanhInt8CPUKernel() override = default; + + int Init() override; + int ReSize() override; + int Run() override; + + public: + int DoActivation(int task_id); + + private: + int8_t *in_ptr_; + int8_t *out_ptr_; + int element_size_; + int thread_count_; + int thread_stride_; + TanhQuantParameter tanh_quant_; +}; +} // namespace mindspore::kernel + +#endif // MINDSPORE_LITE_SRC_BACKEND_ARM_INT8_TANH_INT8_H_