forked from mindspore-Ecosystem/mindspore
hard tanh fp32
This commit is contained in:
parent
a9fcc3e3eb
commit
9b5400bb6e
|
@ -116,3 +116,21 @@ int HSwish(const float *src, int length, float *dst) {
|
||||||
}
|
}
|
||||||
return NNACL_OK;
|
return NNACL_OK;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
int HardTanh(const float *src, int length, float *dst, float min_val, float max_val) {
|
||||||
|
if (max_val <= min_val) {
|
||||||
|
return NNACL_ERR;
|
||||||
|
}
|
||||||
|
int i = 0;
|
||||||
|
for (i = 0; i < length; ++i) {
|
||||||
|
float in = src[i];
|
||||||
|
if (in < min_val) {
|
||||||
|
dst[i] = min_val;
|
||||||
|
} else if (in > max_val) {
|
||||||
|
dst[i] = max_val;
|
||||||
|
} else {
|
||||||
|
dst[i] = in;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return NNACL_OK;
|
||||||
|
}
|
||||||
|
|
|
@ -24,6 +24,8 @@ typedef struct ActivationParameter {
|
||||||
OpParameter op_parameter_;
|
OpParameter op_parameter_;
|
||||||
int type_;
|
int type_;
|
||||||
float alpha_;
|
float alpha_;
|
||||||
|
float min_val_;
|
||||||
|
float max_val_;
|
||||||
} ActivationParameter;
|
} ActivationParameter;
|
||||||
|
|
||||||
#ifdef __cplusplus
|
#ifdef __cplusplus
|
||||||
|
@ -35,6 +37,7 @@ int LRelu(const float *src, int length, float *dst, float alpha);
|
||||||
int Sigmoid(const float *src, int length, float *dst);
|
int Sigmoid(const float *src, int length, float *dst);
|
||||||
int Tanh(const float *src, int length, float *dst);
|
int Tanh(const float *src, int length, float *dst);
|
||||||
int HSwish(const float *src, int length, float *dst);
|
int HSwish(const float *src, int length, float *dst);
|
||||||
|
int HardTanh(const float *src, int length, float *dst, float min_val, float max_val);
|
||||||
#ifdef __cplusplus
|
#ifdef __cplusplus
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
|
@ -57,7 +57,8 @@ enum ActivationType : byte {
|
||||||
HSIGMOID = 13,
|
HSIGMOID = 13,
|
||||||
THRESHOLDRELU = 14,
|
THRESHOLDRELU = 14,
|
||||||
LINEAR = 15,
|
LINEAR = 15,
|
||||||
UNKNOW = 16
|
HARD_TANH = 16,
|
||||||
|
UNKNOW = 17
|
||||||
}
|
}
|
||||||
enum ActivationGradType : byte {
|
enum ActivationGradType : byte {
|
||||||
NO_ACTIVATION = 0,
|
NO_ACTIVATION = 0,
|
||||||
|
@ -155,6 +156,8 @@ table SoftMax {
|
||||||
table Activation {
|
table Activation {
|
||||||
type: ActivationType = 0;
|
type: ActivationType = 0;
|
||||||
alpha: float = 0.2;
|
alpha: float = 0.2;
|
||||||
|
min_val: float = -1.0;
|
||||||
|
max_val: float = 1.0;
|
||||||
}
|
}
|
||||||
table ActivationGrad {
|
table ActivationGrad {
|
||||||
type: ActivationType = 0;
|
type: ActivationType = 0;
|
||||||
|
|
|
@ -22,9 +22,13 @@ namespace lite {
|
||||||
#ifdef PRIMITIVE_WRITEABLE
|
#ifdef PRIMITIVE_WRITEABLE
|
||||||
int Activation::GetType() const { return this->primitive_->value.AsActivation()->type; }
|
int Activation::GetType() const { return this->primitive_->value.AsActivation()->type; }
|
||||||
float Activation::GetAlpha() const { return this->primitive_->value.AsActivation()->alpha; }
|
float Activation::GetAlpha() const { return this->primitive_->value.AsActivation()->alpha; }
|
||||||
|
float Activation::GetMinVal() const { return this->primitive_->value.AsActivation()->min_val; }
|
||||||
|
float Activation::GetMaxVal() const { return this->primitive_->value.AsActivation()->max_val; }
|
||||||
|
|
||||||
void Activation::SetType(int type) { this->primitive_->value.AsActivation()->type = (schema::ActivationType)type; }
|
void Activation::SetType(int type) { this->primitive_->value.AsActivation()->type = (schema::ActivationType)type; }
|
||||||
void Activation::SetAlpha(float alpha) { this->primitive_->value.AsActivation()->alpha = alpha; }
|
void Activation::SetAlpha(float alpha) { this->primitive_->value.AsActivation()->alpha = alpha; }
|
||||||
|
void Activation::SetMinVal(float min_val) { this->primitive_->value.AsActivation()->min_val = min_val; }
|
||||||
|
void Activation::SetMaxVal(float max_val) { this->primitive_->value.AsActivation()->max_val = max_val; }
|
||||||
|
|
||||||
int Activation::UnPackAttr(const Primitive &prim, const std::vector<AnfNodePtr> &inputs) {
|
int Activation::UnPackAttr(const Primitive &prim, const std::vector<AnfNodePtr> &inputs) {
|
||||||
if (this->primitive_ == nullptr) {
|
if (this->primitive_ == nullptr) {
|
||||||
|
@ -63,13 +67,15 @@ int Activation::UnPackToFlatBuilder(const schema::Primitive *primitive, flatbuff
|
||||||
MS_LOG(ERROR) << "value_as_Activation return nullptr";
|
MS_LOG(ERROR) << "value_as_Activation return nullptr";
|
||||||
return RET_ERROR;
|
return RET_ERROR;
|
||||||
}
|
}
|
||||||
auto val_offset = schema::CreateActivation(*fbb, attr->type(), attr->alpha());
|
auto val_offset = schema::CreateActivation(*fbb, attr->type(), attr->alpha(), attr->min_val(), attr->max_val());
|
||||||
auto prim_offset = schema::CreatePrimitive(*fbb, schema::PrimitiveType_Activation, val_offset.o);
|
auto prim_offset = schema::CreatePrimitive(*fbb, schema::PrimitiveType_Activation, val_offset.o);
|
||||||
fbb->Finish(prim_offset);
|
fbb->Finish(prim_offset);
|
||||||
return RET_OK;
|
return RET_OK;
|
||||||
}
|
}
|
||||||
int Activation::GetType() const { return this->primitive_->value_as_Activation()->type(); }
|
int Activation::GetType() const { return this->primitive_->value_as_Activation()->type(); }
|
||||||
float Activation::GetAlpha() const { return this->primitive_->value_as_Activation()->alpha(); }
|
float Activation::GetAlpha() const { return this->primitive_->value_as_Activation()->alpha(); }
|
||||||
|
float Activation::GetMinVal() const { return this->primitive_->value_as_Activation()->min_val(); }
|
||||||
|
float Activation::GetMaxVal() const { return this->primitive_->value_as_Activation()->max_val(); }
|
||||||
#endif
|
#endif
|
||||||
} // namespace lite
|
} // namespace lite
|
||||||
} // namespace mindspore
|
} // namespace mindspore
|
||||||
|
|
|
@ -33,6 +33,8 @@ class Activation : public PrimitiveC {
|
||||||
int UnPackAttr(const Primitive &prim, const std::vector<AnfNodePtr> &inputs) override;
|
int UnPackAttr(const Primitive &prim, const std::vector<AnfNodePtr> &inputs) override;
|
||||||
void SetType(int type);
|
void SetType(int type);
|
||||||
void SetAlpha(float alpha);
|
void SetAlpha(float alpha);
|
||||||
|
void SetMinVal(float minVal);
|
||||||
|
void SetMaxVal(float maxVal);
|
||||||
#else
|
#else
|
||||||
Activation() = default;
|
Activation() = default;
|
||||||
|
|
||||||
|
@ -40,6 +42,8 @@ class Activation : public PrimitiveC {
|
||||||
#endif
|
#endif
|
||||||
int GetType() const;
|
int GetType() const;
|
||||||
float GetAlpha() const;
|
float GetAlpha() const;
|
||||||
|
float GetMinVal() const;
|
||||||
|
float GetMaxVal() const;
|
||||||
};
|
};
|
||||||
} // namespace lite
|
} // namespace lite
|
||||||
} // namespace mindspore
|
} // namespace mindspore
|
||||||
|
|
|
@ -636,6 +636,8 @@ OpParameter *PopulateActivationParameter(const mindspore::lite::PrimitiveC *prim
|
||||||
reinterpret_cast<mindspore::lite::Activation *>(const_cast<mindspore::lite::PrimitiveC *>(primitive));
|
reinterpret_cast<mindspore::lite::Activation *>(const_cast<mindspore::lite::PrimitiveC *>(primitive));
|
||||||
act_param->type_ = static_cast<int>(activation->GetType());
|
act_param->type_ = static_cast<int>(activation->GetType());
|
||||||
act_param->alpha_ = activation->GetAlpha();
|
act_param->alpha_ = activation->GetAlpha();
|
||||||
|
act_param->min_val_ = activation->GetMinVal();
|
||||||
|
act_param->max_val_ = activation->GetMaxVal();
|
||||||
return reinterpret_cast<OpParameter *>(act_param);
|
return reinterpret_cast<OpParameter *>(act_param);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -57,6 +57,8 @@ int ActivationCPUKernel::DoActivation(int task_id) {
|
||||||
error_code = Tanh(input_addr + stride * task_id, count, output_addr + stride * task_id);
|
error_code = Tanh(input_addr + stride * task_id, count, output_addr + stride * task_id);
|
||||||
} else if (type_ == schema::ActivationType_HSWISH) {
|
} else if (type_ == schema::ActivationType_HSWISH) {
|
||||||
error_code = HSwish(input_addr + stride * task_id, count, output_addr + stride * task_id);
|
error_code = HSwish(input_addr + stride * task_id, count, output_addr + stride * task_id);
|
||||||
|
} else if (type_ == schema::ActivationType_HARD_TANH) {
|
||||||
|
error_code = HardTanh(input_addr + stride * task_id, count, output_addr + stride * task_id, min_val_, max_val_);
|
||||||
} else {
|
} else {
|
||||||
MS_LOG(ERROR) << "Activation type error";
|
MS_LOG(ERROR) << "Activation type error";
|
||||||
return RET_ERROR;
|
return RET_ERROR;
|
||||||
|
|
|
@ -30,6 +30,8 @@ class ActivationCPUKernel : public LiteKernel {
|
||||||
: LiteKernel(param, inputs, outputs, ctx, primitive), thread_count_(ctx->thread_num_) {
|
: LiteKernel(param, inputs, outputs, ctx, primitive), thread_count_(ctx->thread_num_) {
|
||||||
type_ = (reinterpret_cast<ActivationParameter *>(param))->type_;
|
type_ = (reinterpret_cast<ActivationParameter *>(param))->type_;
|
||||||
alpha_ = (reinterpret_cast<ActivationParameter *>(param))->alpha_;
|
alpha_ = (reinterpret_cast<ActivationParameter *>(param))->alpha_;
|
||||||
|
min_val_ = (reinterpret_cast<ActivationParameter *>(param))->min_val_;
|
||||||
|
max_val_ = (reinterpret_cast<ActivationParameter *>(param))->max_val_;
|
||||||
}
|
}
|
||||||
~ActivationCPUKernel() override = default;
|
~ActivationCPUKernel() override = default;
|
||||||
|
|
||||||
|
@ -42,6 +44,8 @@ class ActivationCPUKernel : public LiteKernel {
|
||||||
int thread_count_;
|
int thread_count_;
|
||||||
int type_;
|
int type_;
|
||||||
float alpha_;
|
float alpha_;
|
||||||
|
float min_val_;
|
||||||
|
float max_val_;
|
||||||
};
|
};
|
||||||
} // namespace mindspore::kernel
|
} // namespace mindspore::kernel
|
||||||
|
|
||||||
|
|
|
@ -126,4 +126,93 @@ TEST_F(TestActivationFp32, HSwishFp32) {
|
||||||
input0_tensor.SetData(nullptr);
|
input0_tensor.SetData(nullptr);
|
||||||
output0_tensor.SetData(nullptr);
|
output0_tensor.SetData(nullptr);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
TEST_F(TestActivationFp32, HardTanh1) {
|
||||||
|
std::vector<lite::Tensor *> inputs_tensor;
|
||||||
|
std::vector<lite::Tensor *> outputs_tensor;
|
||||||
|
|
||||||
|
ActivationParameter op_param;
|
||||||
|
op_param.op_parameter_.type_ = schema::PrimitiveType_Activation;
|
||||||
|
op_param.type_ = schema::ActivationType_HARD_TANH;
|
||||||
|
op_param.min_val_ = -1.0f;
|
||||||
|
op_param.max_val_ = 1.0f;
|
||||||
|
|
||||||
|
std::vector<float> input = {-2.0, -1.0, -0.5, 0.0, 0.5, 1.0, 5.0, 6.0};
|
||||||
|
std::vector<int> in_shape = {8};
|
||||||
|
|
||||||
|
lite::Tensor input0_tensor;
|
||||||
|
inputs_tensor.push_back(&input0_tensor);
|
||||||
|
input0_tensor.SetData(input.data());
|
||||||
|
input0_tensor.set_shape(in_shape);
|
||||||
|
|
||||||
|
std::vector<float> output(8);
|
||||||
|
std::vector<int> output_shape = {8};
|
||||||
|
|
||||||
|
lite::Tensor output0_tensor;
|
||||||
|
outputs_tensor.push_back(&output0_tensor);
|
||||||
|
output0_tensor.SetData(output.data());
|
||||||
|
|
||||||
|
kernel::KernelKey desc = {kernel::KERNEL_ARCH::kCPU, kNumberTypeFloat32, schema::PrimitiveType_Activation};
|
||||||
|
auto creator = lite::KernelRegistry::GetInstance()->GetCreator(desc);
|
||||||
|
ASSERT_NE(creator, nullptr);
|
||||||
|
lite::InnerContext ctx;
|
||||||
|
ctx.thread_num_ = 2;
|
||||||
|
ASSERT_EQ(lite::RET_OK, ctx.Init());
|
||||||
|
kernel::LiteKernel *kernel =
|
||||||
|
creator(inputs_tensor, outputs_tensor, reinterpret_cast<OpParameter *>(&op_param), &ctx, desc, nullptr);
|
||||||
|
ASSERT_NE(kernel, nullptr);
|
||||||
|
auto output_tensor_shape = output0_tensor.shape();
|
||||||
|
kernel->Run();
|
||||||
|
|
||||||
|
std::vector<float> expect_output = {-1.0, -1.0, -0.5, 0.0, 0.5, 1.0, 1.0, 1.0};
|
||||||
|
CompareOutputData(output.data(), expect_output.data(), 8, 0.00001);
|
||||||
|
|
||||||
|
input0_tensor.SetData(nullptr);
|
||||||
|
output0_tensor.SetData(nullptr);
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(TestActivationFp32, HardTanh2) {
|
||||||
|
std::vector<lite::Tensor *> inputs_tensor;
|
||||||
|
std::vector<lite::Tensor *> outputs_tensor;
|
||||||
|
|
||||||
|
ActivationParameter op_param;
|
||||||
|
op_param.op_parameter_.type_ = schema::PrimitiveType_Activation;
|
||||||
|
op_param.type_ = schema::ActivationType_HARD_TANH;
|
||||||
|
op_param.min_val_ = -2.0f;
|
||||||
|
op_param.max_val_ = 2.0f;
|
||||||
|
|
||||||
|
std::vector<float> input = {-3.0, -2.0, -1.0, 0.0, 1.0, 5.0, 6.0, 7.0};
|
||||||
|
std::vector<int> in_shape = {8};
|
||||||
|
|
||||||
|
lite::Tensor input0_tensor;
|
||||||
|
inputs_tensor.push_back(&input0_tensor);
|
||||||
|
input0_tensor.SetData(input.data());
|
||||||
|
input0_tensor.set_shape(in_shape);
|
||||||
|
|
||||||
|
std::vector<float> output(8);
|
||||||
|
std::vector<int> output_shape = {8};
|
||||||
|
|
||||||
|
lite::Tensor output0_tensor;
|
||||||
|
outputs_tensor.push_back(&output0_tensor);
|
||||||
|
output0_tensor.SetData(output.data());
|
||||||
|
|
||||||
|
kernel::KernelKey desc = {kernel::KERNEL_ARCH::kCPU, kNumberTypeFloat32, schema::PrimitiveType_Activation};
|
||||||
|
auto creator = lite::KernelRegistry::GetInstance()->GetCreator(desc);
|
||||||
|
ASSERT_NE(creator, nullptr);
|
||||||
|
lite::InnerContext ctx;
|
||||||
|
ctx.thread_num_ = 2;
|
||||||
|
ASSERT_EQ(lite::RET_OK, ctx.Init());
|
||||||
|
kernel::LiteKernel *kernel =
|
||||||
|
creator(inputs_tensor, outputs_tensor, reinterpret_cast<OpParameter *>(&op_param), &ctx, desc, nullptr);
|
||||||
|
ASSERT_NE(kernel, nullptr);
|
||||||
|
auto output_tensor_shape = output0_tensor.shape();
|
||||||
|
kernel->Run();
|
||||||
|
|
||||||
|
std::vector<float> expect_output = {-2.0, -2.0, -1.0, 0.0, 1.0, 2.0, 2.0, 2.0};
|
||||||
|
CompareOutputData(output.data(), expect_output.data(), 8, 0.00001);
|
||||||
|
|
||||||
|
input0_tensor.SetData(nullptr);
|
||||||
|
output0_tensor.SetData(nullptr);
|
||||||
|
}
|
||||||
|
|
||||||
} // namespace mindspore
|
} // namespace mindspore
|
||||||
|
|
Loading…
Reference in New Issue