forked from mindspore-Ecosystem/mindspore
code clean
This commit is contained in:
parent
efd22c96ad
commit
3fbcd29bfd
|
@ -17,7 +17,6 @@
|
|||
#include "nnacl/fp32/adder_fp32.h"
|
||||
#include <string.h>
|
||||
#include <math.h>
|
||||
#include "nnacl/fp32/common_func_fp32.h"
|
||||
#include "nnacl/fp32/matmul_fp32.h"
|
||||
|
||||
void Adder12x4(const float *a, const float *b, float *dst, const float *bias, ActType act_type, int deep, int row,
|
||||
|
|
|
@ -917,7 +917,7 @@ int ElementLogicalAnd(const float *in0, const float *in1, float *out, int size)
|
|||
int ElementLogicalAndInt(const int *in0, const int *in1, int *out, int size) {
|
||||
int index = 0;
|
||||
for (; index < size; index++) {
|
||||
out[index] = (int)((int)(in0[index]) & (int)(in1[index]));
|
||||
out[index] = (int)((unsigned int)(in0[index]) & (unsigned int)(in1[index]));
|
||||
}
|
||||
return NNACL_OK;
|
||||
}
|
||||
|
@ -925,7 +925,7 @@ int ElementLogicalAndInt(const int *in0, const int *in1, int *out, int size) {
|
|||
int ElementLogicalAndBool(const bool *in0, const bool *in1, bool *out, int size) {
|
||||
int index = 0;
|
||||
for (; index < size; index++) {
|
||||
out[index] = (bool)((bool)(in0[index]) & (bool)(in1[index]));
|
||||
out[index] = (bool)((unsigned int)(in0[index]) & (unsigned int)(in1[index]));
|
||||
}
|
||||
return NNACL_OK;
|
||||
}
|
||||
|
|
|
@ -20,7 +20,7 @@
|
|||
#include <math.h>
|
||||
#include "nnacl/errorcode.h"
|
||||
|
||||
int DoGeLU(float *src, float *out, int64_t real_dst_count, const GeLUParameter *param) {
|
||||
int DoGeLU(const float *src, float *out, int64_t real_dst_count, const GeLUParameter *param) {
|
||||
if (src == NULL || out == NULL) {
|
||||
return NNACL_ERR;
|
||||
}
|
||||
|
|
|
@ -23,7 +23,7 @@
|
|||
extern "C" {
|
||||
#endif
|
||||
|
||||
int DoGeLU(float *src, float *out, int64_t real_dst_count, const GeLUParameter *param);
|
||||
int DoGeLU(const float *src, float *out, int64_t real_dst_count, const GeLUParameter *param);
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
|
|
|
@ -28,15 +28,13 @@ namespace mindspore {
|
|||
namespace lite {
|
||||
class Partial : public PrimitiveC {
|
||||
public:
|
||||
Partial() = default;
|
||||
~Partial() = default;
|
||||
#ifdef PRIMITIVE_WRITEABLE
|
||||
MS_DECLARE_PARENT(Partial, PrimitiveC);
|
||||
Partial() = default;
|
||||
explicit Partial(schema::PrimitiveT *primitive) : PrimitiveC(primitive) {}
|
||||
int UnPackAttr(const Primitive &prim, const std::vector<AnfNodePtr> &inputs) override;
|
||||
|
||||
#else
|
||||
Partial() = default;
|
||||
|
||||
int UnPackToFlatBuilder(const schema::Primitive *primitive, flatbuffers::FlatBufferBuilder *fbb) override;
|
||||
#endif
|
||||
int InferShape(std::vector<lite::Tensor *> inputs_, std::vector<lite::Tensor *> outputs_) override;
|
||||
|
|
|
@ -28,15 +28,13 @@ namespace mindspore {
|
|||
namespace lite {
|
||||
class Switch : public PrimitiveC {
|
||||
public:
|
||||
Switch() = default;
|
||||
~Switch() = default;
|
||||
#ifdef PRIMITIVE_WRITEABLE
|
||||
MS_DECLARE_PARENT(Switch, PrimitiveC);
|
||||
Switch() = default;
|
||||
explicit Switch(schema::PrimitiveT *primitive) : PrimitiveC(primitive) {}
|
||||
int UnPackAttr(const Primitive &prim, const std::vector<AnfNodePtr> &inputs) override;
|
||||
|
||||
#else
|
||||
Switch() = default;
|
||||
|
||||
int UnPackToFlatBuilder(const schema::Primitive *primitive, flatbuffers::FlatBufferBuilder *fbb) override;
|
||||
#endif
|
||||
int InferShape(std::vector<lite::Tensor *> inputs_, std::vector<lite::Tensor *> outputs_) override;
|
||||
|
|
|
@ -36,7 +36,7 @@ class UpsampleCPUKernel : public ResizeCPUKernel {
|
|||
int RunImpl(int task_id) override;
|
||||
|
||||
private:
|
||||
UpsampleParameter *param_;
|
||||
UpsampleParameter *param_ = nullptr;
|
||||
};
|
||||
} // namespace mindspore::kernel
|
||||
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
using mindspore::lite::KernelRegistrar;
|
||||
using mindspore::lite::RET_ERROR;
|
||||
using mindspore::lite::RET_NULL_PTR;
|
||||
using mindspore::lite::RET_OK;
|
||||
using mindspore::schema::PrimitiveType_LayerNorm;
|
||||
|
||||
|
@ -90,6 +91,10 @@ int LayerNormInt8CPUKernel::ReSize() {
|
|||
op_parameter_ = nullptr;
|
||||
}
|
||||
op_parameter_ = PopulateLayerNormParameter(primitive_);
|
||||
if (op_parameter_ == nullptr) {
|
||||
MS_LOG(ERROR) << "op_parameter_ is nullptr!";
|
||||
return RET_NULL_PTR;
|
||||
}
|
||||
op_parameter_->thread_num_ = context_->thread_num_;
|
||||
param_ = reinterpret_cast<LayerNormParameter *>(op_parameter_);
|
||||
auto shape = in_tensors_.front()->shape();
|
||||
|
|
|
@ -55,8 +55,8 @@ class TransposeInt8CPUKernel : public LiteKernel {
|
|||
int thread_h_stride_ = 0;
|
||||
int thread_h_num_ = 0;
|
||||
int num_unit_ = 0;
|
||||
int in_shape_[8];
|
||||
int out_shape_[8];
|
||||
int in_shape_[8] = {0};
|
||||
int out_shape_[8] = {0};
|
||||
};
|
||||
} // namespace mindspore::kernel
|
||||
|
||||
|
|
Loading…
Reference in New Issue