!12508 [MS][LITE][CPU]fix bug of pad_fp16 op

From: @fuzhiye
Reviewed-by: @zhang_xue_tong,@zhanghaibo5
Signed-off-by: @zhang_xue_tong
This commit is contained in:
mindspore-ci-bot 2021-03-01 14:49:41 +08:00 committed by Gitee
commit b16fcc8037
2 changed files with 7 additions and 1 deletions

View File

@ -26,6 +26,9 @@ using mindspore::lite::RET_OK;
using mindspore::schema::PrimitiveType_Pad;
namespace mindspore::kernel {
namespace {
constexpr size_t kPadMaxInputSize = 2;
} // namespace
int PadFp16CPUKernel::RunImpl(int task_id) {
PadFp16(input_, output_, in_, out_, pad_param_->paddings_, task_id, context_->thread_num_);
return RET_OK;
@ -48,6 +51,9 @@ int PadFp16CPUKernel::Run() {
int ret = 0;
if (pad_param_->pad_mode_ == static_cast<int>(schema::PaddingMode_CONSTANT)) {
if (in_tensors_.size() == kPadMaxInputSize) {
CopyPaddingFromInput();
}
if (pad_param_->constant_value_ - 0.0f < 1e-5) {
memset(output_, 0, output_tensor->ElementsNum() * sizeof(float16_t));
} else {

View File

@ -47,7 +47,6 @@ class PadCPUKernel : public LiteKernel {
private:
int CheckPaddings(int *paddings, int length, int *input_shape, int mode);
int CopyPaddingFromInput();
void CalculateStrides();
int ExtendShape(int *shape, int length, const int *ori_shape, int rank);
int ExtendPaddings(int *paddings, int length, const int *ori_paddings, int ori_length);
@ -55,6 +54,7 @@ class PadCPUKernel : public LiteKernel {
protected:
int HandleMirrorPad();
int CopyPaddingFromInput();
PadParameter *pad_param_ = nullptr;
int in_[4] = {0};
int out_[4] = {0};