From 3fbcd29bfd1cba75bd975a36d73ab830be392b07 Mon Sep 17 00:00:00 2001 From: lzk Date: Mon, 4 Jan 2021 01:48:19 -0800 Subject: [PATCH] code clean --- mindspore/lite/nnacl/fp32/adder_fp32.c | 1 - mindspore/lite/nnacl/fp32/arithmetic_fp32.c | 4 ++-- mindspore/lite/nnacl/fp32/gelu_fp32.c | 2 +- mindspore/lite/nnacl/fp32/gelu_fp32.h | 2 +- mindspore/lite/src/ops/partial.h | 6 ++---- mindspore/lite/src/ops/switch.h | 6 ++---- mindspore/lite/src/runtime/kernel/arm/fp32/upsample_fp32.h | 2 +- .../lite/src/runtime/kernel/arm/int8/layer_norm_int8.cc | 5 +++++ mindspore/lite/src/runtime/kernel/arm/int8/transpose_int8.h | 4 ++-- 9 files changed, 16 insertions(+), 16 deletions(-) diff --git a/mindspore/lite/nnacl/fp32/adder_fp32.c b/mindspore/lite/nnacl/fp32/adder_fp32.c index 8060b8ce179..142d933e468 100644 --- a/mindspore/lite/nnacl/fp32/adder_fp32.c +++ b/mindspore/lite/nnacl/fp32/adder_fp32.c @@ -17,7 +17,6 @@ #include "nnacl/fp32/adder_fp32.h" #include #include -#include "nnacl/fp32/common_func_fp32.h" #include "nnacl/fp32/matmul_fp32.h" void Adder12x4(const float *a, const float *b, float *dst, const float *bias, ActType act_type, int deep, int row, diff --git a/mindspore/lite/nnacl/fp32/arithmetic_fp32.c b/mindspore/lite/nnacl/fp32/arithmetic_fp32.c index 56e4ee5c132..839f40b2a7f 100644 --- a/mindspore/lite/nnacl/fp32/arithmetic_fp32.c +++ b/mindspore/lite/nnacl/fp32/arithmetic_fp32.c @@ -917,7 +917,7 @@ int ElementLogicalAnd(const float *in0, const float *in1, float *out, int size) int ElementLogicalAndInt(const int *in0, const int *in1, int *out, int size) { int index = 0; for (; index < size; index++) { - out[index] = (int)((int)(in0[index]) & (int)(in1[index])); + out[index] = (int)((unsigned int)(in0[index]) & (unsigned int)(in1[index])); } return NNACL_OK; } @@ -925,7 +925,7 @@ int ElementLogicalAndInt(const int *in0, const int *in1, int *out, int size) { int ElementLogicalAndBool(const bool *in0, const bool *in1, bool *out, int size) { int index = 0; for (; index < size; index++) { - out[index] = (bool)((bool)(in0[index]) & (bool)(in1[index])); + out[index] = (bool)((unsigned int)(in0[index]) & (unsigned int)(in1[index])); } return NNACL_OK; } diff --git a/mindspore/lite/nnacl/fp32/gelu_fp32.c b/mindspore/lite/nnacl/fp32/gelu_fp32.c index 8eee72cc9e9..3340208dfdd 100644 --- a/mindspore/lite/nnacl/fp32/gelu_fp32.c +++ b/mindspore/lite/nnacl/fp32/gelu_fp32.c @@ -20,7 +20,7 @@ #include #include "nnacl/errorcode.h" -int DoGeLU(float *src, float *out, int64_t real_dst_count, const GeLUParameter *param) { +int DoGeLU(const float *src, float *out, int64_t real_dst_count, const GeLUParameter *param) { if (src == NULL || out == NULL) { return NNACL_ERR; } diff --git a/mindspore/lite/nnacl/fp32/gelu_fp32.h b/mindspore/lite/nnacl/fp32/gelu_fp32.h index 9a236196352..e37f0593822 100644 --- a/mindspore/lite/nnacl/fp32/gelu_fp32.h +++ b/mindspore/lite/nnacl/fp32/gelu_fp32.h @@ -23,7 +23,7 @@ extern "C" { #endif -int DoGeLU(float *src, float *out, int64_t real_dst_count, const GeLUParameter *param); +int DoGeLU(const float *src, float *out, int64_t real_dst_count, const GeLUParameter *param); #ifdef __cplusplus } #endif diff --git a/mindspore/lite/src/ops/partial.h b/mindspore/lite/src/ops/partial.h index 6ef3e702558..66c680c8459 100644 --- a/mindspore/lite/src/ops/partial.h +++ b/mindspore/lite/src/ops/partial.h @@ -28,15 +28,13 @@ namespace mindspore { namespace lite { class Partial : public PrimitiveC { public: + Partial() = default; + ~Partial() = default; #ifdef PRIMITIVE_WRITEABLE MS_DECLARE_PARENT(Partial, PrimitiveC); - Partial() = default; explicit Partial(schema::PrimitiveT *primitive) : PrimitiveC(primitive) {} int UnPackAttr(const Primitive &prim, const std::vector &inputs) override; - #else - Partial() = default; - int UnPackToFlatBuilder(const schema::Primitive *primitive, flatbuffers::FlatBufferBuilder *fbb) override; #endif int InferShape(std::vector inputs_, std::vector outputs_) override; diff --git a/mindspore/lite/src/ops/switch.h b/mindspore/lite/src/ops/switch.h index c52d43c7d38..80e6d4fed72 100644 --- a/mindspore/lite/src/ops/switch.h +++ b/mindspore/lite/src/ops/switch.h @@ -28,15 +28,13 @@ namespace mindspore { namespace lite { class Switch : public PrimitiveC { public: + Switch() = default; + ~Switch() = default; #ifdef PRIMITIVE_WRITEABLE MS_DECLARE_PARENT(Switch, PrimitiveC); - Switch() = default; explicit Switch(schema::PrimitiveT *primitive) : PrimitiveC(primitive) {} int UnPackAttr(const Primitive &prim, const std::vector &inputs) override; - #else - Switch() = default; - int UnPackToFlatBuilder(const schema::Primitive *primitive, flatbuffers::FlatBufferBuilder *fbb) override; #endif int InferShape(std::vector inputs_, std::vector outputs_) override; diff --git a/mindspore/lite/src/runtime/kernel/arm/fp32/upsample_fp32.h b/mindspore/lite/src/runtime/kernel/arm/fp32/upsample_fp32.h index c31c5845239..22bd7660cbd 100644 --- a/mindspore/lite/src/runtime/kernel/arm/fp32/upsample_fp32.h +++ b/mindspore/lite/src/runtime/kernel/arm/fp32/upsample_fp32.h @@ -36,7 +36,7 @@ class UpsampleCPUKernel : public ResizeCPUKernel { int RunImpl(int task_id) override; private: - UpsampleParameter *param_; + UpsampleParameter *param_ = nullptr; }; } // namespace mindspore::kernel diff --git a/mindspore/lite/src/runtime/kernel/arm/int8/layer_norm_int8.cc b/mindspore/lite/src/runtime/kernel/arm/int8/layer_norm_int8.cc index fa108646b18..23ae0624b92 100644 --- a/mindspore/lite/src/runtime/kernel/arm/int8/layer_norm_int8.cc +++ b/mindspore/lite/src/runtime/kernel/arm/int8/layer_norm_int8.cc @@ -19,6 +19,7 @@ using mindspore::lite::KernelRegistrar; using mindspore::lite::RET_ERROR; +using mindspore::lite::RET_NULL_PTR; using mindspore::lite::RET_OK; using mindspore::schema::PrimitiveType_LayerNorm; @@ -90,6 +91,10 @@ int LayerNormInt8CPUKernel::ReSize() { op_parameter_ = nullptr; } op_parameter_ = PopulateLayerNormParameter(primitive_); + if (op_parameter_ == nullptr) { + MS_LOG(ERROR) << "op_parameter_ is nullptr!"; + return RET_NULL_PTR; + } op_parameter_->thread_num_ = context_->thread_num_; param_ = reinterpret_cast(op_parameter_); auto shape = in_tensors_.front()->shape(); diff --git a/mindspore/lite/src/runtime/kernel/arm/int8/transpose_int8.h b/mindspore/lite/src/runtime/kernel/arm/int8/transpose_int8.h index 2f71cec7575..b8acf56773a 100644 --- a/mindspore/lite/src/runtime/kernel/arm/int8/transpose_int8.h +++ b/mindspore/lite/src/runtime/kernel/arm/int8/transpose_int8.h @@ -55,8 +55,8 @@ class TransposeInt8CPUKernel : public LiteKernel { int thread_h_stride_ = 0; int thread_h_num_ = 0; int num_unit_ = 0; - int in_shape_[8]; - int out_shape_[8]; + int in_shape_[8] = {0}; + int out_shape_[8] = {0}; }; } // namespace mindspore::kernel