fix the memory leak caused by unreleased var in parameter

This commit is contained in:
zengxianglong 2021-09-23 17:17:07 +08:00
parent a19291f051
commit a03470f24c
8 changed files with 61 additions and 0 deletions

View File

@ -262,6 +262,7 @@ typedef struct OpParameter {
int thread_num_;
int quant_type_;
bool is_train_session_;
void (*destroy_func_)(struct OpParameter *param);
} OpParameter;
typedef struct QuantArg {

View File

@ -20,6 +20,15 @@ using mindspore::schema::PrimitiveType_Split;
namespace mindspore {
namespace lite {
void DestroySplitParameter(OpParameter *parameter) {
MS_CHECK_PTR_IF_NULL(parameter);
auto param = reinterpret_cast<SplitParameter *>(parameter);
if (param->split_sizes_ != nullptr) {
free(param->split_sizes_);
param->split_sizes_ = nullptr;
}
}
OpParameter *PopulateSplitParameter(const void *prim) {
MS_CHECK_TRUE_RET(prim != nullptr, nullptr);
auto primitive = static_cast<const schema::Primitive *>(prim);
@ -55,6 +64,7 @@ OpParameter *PopulateSplitParameter(const void *prim) {
free(param);
return nullptr;
}
param->op_parameter_.destroy_func_ = DestroySplitParameter;
memset(param->split_sizes_, 0, static_cast<size_t>(param->num_split_) * sizeof(int));
auto split_sizes_vector_ = value->size_splits();
if (split_sizes_vector_ != nullptr && split_sizes_vector_->size() <= static_cast<uint32_t>(param->num_split_)) {

View File

@ -22,6 +22,15 @@
namespace mindspore {
namespace lite {
namespace {
void DestroySplitParameter(OpParameter *parameter) {
MS_CHECK_PTR_IF_NULL(parameter);
auto param = reinterpret_cast<SplitParameter *>(parameter);
if (param->split_sizes_ != nullptr) {
free(param->split_sizes_);
param->split_sizes_ = nullptr;
}
}
OpParameter *PopulateSplitParameter(const void *prim) {
MS_CHECK_TRUE_RET(prim != nullptr, nullptr);
auto *primitive = static_cast<const schema::v0::Primitive *>(prim);
@ -54,6 +63,7 @@ OpParameter *PopulateSplitParameter(const void *prim) {
free(split_param);
return nullptr;
}
split_param->op_parameter_.destroy_func_ = DestroySplitParameter;
memset(split_sizes, 0, static_cast<size_t>(split_param->num_split_) * sizeof(int));
split_param->split_sizes_ = split_sizes;
auto split_sizes_vector_ = split_prim->sizeSplits();

View File

@ -45,6 +45,7 @@ OpParameter *PopulateSmoothL1LossParameter(const void *prim) {
MS_LOG(ERROR) << "malloc SmoothL1LossParameter failed.";
return nullptr;
}
memset(p, 0, sizeof(SmoothL1LossParameter));
auto primitive = static_cast<const schema::Primitive *>(prim);
auto value = primitive->value_as_SmoothL1Loss();
MS_ASSERT(value != nullptr);
@ -59,6 +60,7 @@ OpParameter *PopulateSmoothL1LossGradParameter(const void *prim) {
MS_LOG(ERROR) << "malloc SmoothL1LossParameter failed.";
return nullptr;
}
memset(p, 0, sizeof(SmoothL1LossParameter));
auto primitive = static_cast<const schema::Primitive *>(prim);
auto value = primitive->value_as_SmoothL1LossGrad();
MS_ASSERT(value != nullptr);
@ -73,6 +75,7 @@ OpParameter *PopulateApplyMomentumParameter(const void *prim) {
MS_LOG(ERROR) << "malloc ApplyMomentumParameter failed.";
return nullptr;
}
memset(p, 0, sizeof(ApplyMomentumParameter));
auto primitive = static_cast<const schema::Primitive *>(prim);
auto value = primitive->value_as_ApplyMomentum();
p->op_parameter_.type_ = primitive->value_type();
@ -113,6 +116,7 @@ OpParameter *PopulateAdamParameter(const void *prim) {
MS_LOG(ERROR) << "new AdamParameter failed.";
return nullptr;
}
memset(p, 0, sizeof(AdamParameter));
auto primitive = static_cast<const schema::Primitive *>(prim);
auto value = primitive->value_as_Adam();
MS_ASSERT(value != nullptr);
@ -127,6 +131,7 @@ OpParameter *PopulateSgdParameter(const void *prim) {
MS_LOG(ERROR) << "malloc SgdParameter failed.";
return nullptr;
}
memset(p, 0, sizeof(SgdParameter));
auto primitive = static_cast<const schema::Primitive *>(prim);
auto value = primitive->value_as_SGD();
MS_ASSERT(value != nullptr);
@ -145,6 +150,7 @@ OpParameter *PopulateSparseSoftmaxCrossEntropyWithLogitsParameter(const void *pr
MS_LOG(ERROR) << "malloc SoftmaxCrossEntropyParameter failed.";
return nullptr;
}
memset(sce_param, 0, sizeof(SoftmaxCrossEntropyParameter));
auto primitive = static_cast<const schema::Primitive *>(prim);
auto value = primitive->value_as_SparseSoftmaxCrossEntropyWithLogits();
MS_ASSERT(value != nullptr);
@ -160,6 +166,7 @@ OpParameter *PopulateSoftmaxCrossEntropyParameter(const void *prim) {
MS_LOG(ERROR) << "malloc SoftmaxCrossEntropyParameter failed.";
return nullptr;
}
memset(sce_param, 0, sizeof(SoftmaxCrossEntropyParameter));
auto primitive = static_cast<const schema::Primitive *>(prim);
sce_param->op_parameter_.type_ = primitive->value_type();
sce_param->is_grad_ = 0;
@ -172,6 +179,7 @@ OpParameter *PopulateMaxPoolGradParameter(const void *prim) {
MS_LOG(ERROR) << "malloc PoolingParameter failed.";
return nullptr;
}
memset(pooling_param, 0, sizeof(PoolingParameter));
auto primitive = static_cast<const schema::Primitive *>(prim);
auto value = primitive->value_as_MaxPoolGrad();
MS_ASSERT(value != nullptr);
@ -210,6 +218,7 @@ OpParameter *PopulateAvgPoolGradParameter(const void *prim) {
MS_LOG(ERROR) << "malloc PoolingParameter failed.";
return nullptr;
}
memset(pooling_param, 0, sizeof(PoolingParameter));
auto primitive = static_cast<const schema::Primitive *>(prim);
auto value = primitive->value_as_AvgPoolGrad();
MS_ASSERT(value != nullptr);
@ -259,6 +268,7 @@ OpParameter *PopulateActivationGradParameter(const void *prim) {
MS_LOG(ERROR) << "malloc ActivationParameter failed.";
return nullptr;
}
memset(act_param, 0, sizeof(ActivationParameter));
auto primitive = static_cast<const schema::Primitive *>(prim);
auto value = primitive->value_as_ActivationGrad();
MS_ASSERT(value != nullptr);
@ -312,6 +322,7 @@ OpParameter *PopulateConvolutionGradInputParameter(const void *prim) {
MS_LOG(ERROR) << "malloc Param for conv grad filter failed.";
return nullptr;
}
memset(param, 0, sizeof(ConvParameter));
auto primitive = static_cast<const schema::Primitive *>(prim);
auto value = primitive->value_as_Conv2DBackpropInputFusion();
MS_ASSERT(value != nullptr);
@ -349,6 +360,7 @@ OpParameter *PopulatePowerGradParameter(const void *prim) {
MS_LOG(ERROR) << "malloc PowerParameter failed.";
return nullptr;
}
memset(power_param, 0, sizeof(PowerParameter));
auto primitive = static_cast<const schema::Primitive *>(prim);
auto value = primitive->value_as_PowerGrad();
MS_ASSERT(value != nullptr);
@ -365,6 +377,7 @@ OpParameter *PopulateBiasGradParameter(const void *prim) {
MS_LOG(ERROR) << "malloc ArithmeticParameter failed.";
return nullptr;
}
memset(arithmetic_param, 0, sizeof(ArithmeticParameter));
auto primitive = static_cast<const schema::Primitive *>(prim);
arithmetic_param->op_parameter_.type_ = primitive->value_type();
return reinterpret_cast<OpParameter *>(arithmetic_param);
@ -376,6 +389,7 @@ OpParameter *PopulateBNGradParameter(const void *prim) {
MS_LOG(ERROR) << "malloc BNGradParameter failed.";
return nullptr;
}
memset(bnGrad_param, 0, sizeof(BNGradParameter));
auto primitive = static_cast<const schema::Primitive *>(prim);
auto value = primitive->value_as_BatchNormGrad();
MS_ASSERT(value != nullptr);

View File

@ -46,6 +46,7 @@ OpParameter *DefaultPopulateParameter(const void *primitive) {
MS_LOG(ERROR) << "malloc Param for primitive failed.";
return nullptr;
}
memset(param, 0, sizeof(OpParameter));
auto type = prim->value_type();
switch (prim->value_type()) {
case schema::v0::PrimitiveType_Depend:
@ -101,6 +102,7 @@ OpParameter *PopulateSmoothL1LossParameter(const void *primitive) {
MS_LOG(ERROR) << "malloc SmoothL1LossParameter failed.";
return nullptr;
}
memset(p, 0, sizeof(SmoothL1LossParameter));
p->op_parameter_.type_ = schema::PrimitiveType_SmoothL1Loss;
auto smoothL1Loss_prim = prim->value_as_SmoothL1Loss();
@ -120,6 +122,7 @@ OpParameter *PopulateSmoothL1LossGradParameter(const void *primitive) {
MS_LOG(ERROR) << "malloc SmoothL1LossParameter failed.";
return nullptr;
}
memset(p, 0, sizeof(SmoothL1LossParameter));
p->op_parameter_.type_ = schema::PrimitiveType_SmoothL1LossGrad;
auto smoothL1LossGrad_prim = prim->value_as_SmoothL1LossGrad();
@ -139,6 +142,7 @@ OpParameter *PopulateApplyMomentumParameter(const void *primitive) {
MS_LOG(ERROR) << "malloc ApplyMomentumParameter failed.";
return nullptr;
}
memset(p, 0, sizeof(ApplyMomentumParameter));
p->op_parameter_.type_ = schema::PrimitiveType_ApplyMomentum;
auto applyMomentum_prim = prim->value_as_ApplyMomentum();
@ -186,6 +190,7 @@ OpParameter *PopulateAdamParameter(const void *primitive) {
MS_LOG(ERROR) << "new AdamParameter failed.";
return nullptr;
}
memset(p, 0, sizeof(AdamParameter));
p->op_parameter_.type_ = schema::PrimitiveType_Adam;
auto adam_prim = prim->value_as_Adam();
@ -205,6 +210,7 @@ OpParameter *PopulateSgdParameter(const void *primitive) {
MS_LOG(ERROR) << "malloc SgdParameter failed.";
return nullptr;
}
memset(p, 0, sizeof(SgdParameter));
p->op_parameter_.type_ = schema::PrimitiveType_SGD;
auto sgd_prim = prim->value_as_Sgd();
@ -228,6 +234,7 @@ OpParameter *PopulateSparseSoftmaxCrossEntropyParameter(const void *primitive) {
MS_LOG(ERROR) << "malloc SoftmaxCrossEntropyParameter failed.";
return nullptr;
}
memset(sce_param, 0, sizeof(SoftmaxCrossEntropyParameter));
auto sparseSoftmaxCrossEntropy_prim = prim->value_as_SparseSoftmaxCrossEntropy();
MS_ASSERT(sparseSoftmaxCrossEntropy_prim != nullptr);
sce_param->is_grad_ = sparseSoftmaxCrossEntropy_prim->isGrad();
@ -247,6 +254,7 @@ OpParameter *PopulateSoftmaxCrossEntropyParameter(const void *primitive) {
MS_LOG(ERROR) << "malloc SoftmaxCrossEntropyParameter failed.";
return nullptr;
}
memset(sce_param, 0, sizeof(SoftmaxCrossEntropyParameter));
sce_param->is_grad_ = 0;
sce_param->op_parameter_.type_ = schema::PrimitiveType_SoftmaxCrossEntropyWithLogits;
return reinterpret_cast<OpParameter *>(sce_param);
@ -263,6 +271,7 @@ OpParameter *PopulatePoolingGradParameter(const void *primitive) {
MS_LOG(ERROR) << "malloc PoolingParameter failed.";
return nullptr;
}
memset(pooling_param, 0, sizeof(PoolingParameter));
auto poolingGrad_prim = prim->value_as_PoolingGrad();
MS_ASSERT(poolingGrad_prim != nullptr);
@ -320,6 +329,7 @@ OpParameter *PopulateActivationGradParameter(const void *primitive) {
MS_LOG(ERROR) << "malloc ActivationParameter failed.";
return nullptr;
}
memset(act_param, 0, sizeof(ActivationParameter));
act_param->op_parameter_.type_ = schema::PrimitiveType_ActivationGrad;
auto activationGrad_prim = prim->value_as_ActivationGrad();
MS_ASSERT(activationGrad_prim != nullptr);
@ -340,6 +350,7 @@ OpParameter *PopulateConvolutionGradFilterParameter(const void *primitive) {
MS_LOG(ERROR) << "malloc Param for conv grad filter failed.";
return nullptr;
}
memset(param, 0, sizeof(ConvParameter));
param->op_parameter_.type_ = schema::PrimitiveType_Conv2DBackpropFilterFusion;
auto convolutionGradFilter_prim = prim->value_as_Conv2DGradFilter();
@ -390,6 +401,7 @@ OpParameter *PopulateConvolutionGradInputParameter(const void *primitive) {
MS_LOG(ERROR) << "malloc Param for conv grad filter failed.";
return nullptr;
}
memset(param, 0, sizeof(ConvParameter));
param->op_parameter_.type_ = schema::PrimitiveType_Conv2DBackpropInputFusion;
auto convolutionGradInput_prim = prim->value_as_Conv2DGradInput();
@ -440,6 +452,7 @@ OpParameter *PopulateGroupConvolutionGradInputParameter(const void *primitive) {
MS_LOG(ERROR) << "new Param for conv grad filter failed.";
return nullptr;
}
memset(param, 0, sizeof(ConvParameter));
param->op_parameter_.type_ = schema::PrimitiveType_Conv2DBackpropInputFusion;
auto groupConvolutionGradInput_prim = prim->value_as_GroupConv2DGradInput();
@ -490,6 +503,7 @@ OpParameter *PopulatePowerGradParameter(const void *primitive) {
MS_LOG(ERROR) << "malloc PowerParameter failed.";
return nullptr;
}
memset(power_param, 0, sizeof(PowerParameter));
power_param->op_parameter_.type_ = schema::PrimitiveType_PowerGrad;
auto powerGrad_prim = prim->value_as_PowerGrad();
MS_ASSERT(powerGrad_prim != nullptr);
@ -510,6 +524,7 @@ OpParameter *PopulateBiasGradParameter(const void *primitive) {
MS_LOG(ERROR) << "malloc ArithmeticParameter failed.";
return nullptr;
}
memset(arithmetic_param, 0, sizeof(ArithmeticParameter));
arithmetic_param->op_parameter_.type_ = schema::PrimitiveType_BiasAddGrad;
return reinterpret_cast<OpParameter *>(arithmetic_param);
}
@ -526,6 +541,7 @@ OpParameter *PopulateBNGradParameter(const void *primitive) {
MS_LOG(ERROR) << "malloc BNGradParameter failed.";
return nullptr;
}
memset(bnGrad_param, 0, sizeof(BNGradParameter));
bnGrad_param->op_parameter_.type_ = schema::PrimitiveType_BatchNormGrad;
auto bNGrad_prim = prim->value_as_BNGrad();
MS_ASSERT(bNGrad_prim != nullptr);

View File

@ -256,6 +256,9 @@ STATUS NodeInferShpae(const schema::CNodeT &node, const std::vector<Tensor *> &i
}
auto ret = KernelInferShape(inputs, *outputs, parameter);
fbb.Clear();
if (parameter->destroy_func_ != nullptr) {
parameter->destroy_func_(parameter);
}
free(parameter);
parameter = nullptr;
return ret;

View File

@ -233,7 +233,11 @@ STATUS NodeInferShape(const std::unique_ptr<schema::CNodeT> &node, const std::ve
}
parameter->quant_type_ = node->quantType;
ret = KernelInferShape(inputs, *outputs, parameter);
if (parameter->destroy_func_ != nullptr) {
parameter->destroy_func_(parameter);
}
free(parameter);
parameter = nullptr;
}
fbb.Clear();

View File

@ -143,6 +143,9 @@ STATUS NodeInferShape::InferShape(const CNodePtr &cnode) {
}
RectifyFormat(cnode, inputs, fmk_type_);
ret = KernelInferShape(inputs, outputs, parameter);
if (parameter->destroy_func_ != nullptr) {
parameter->destroy_func_(parameter);
}
free(parameter);
parameter = nullptr;
}