forked from mindspore-Ecosystem/mindspore
fix the bug that subgraph outputs are ommited in special case
This commit is contained in:
parent
0ff27ef3b4
commit
2ef5d4a0f2
|
@ -315,23 +315,24 @@ std::vector<lite::Tensor *> LiteKernelUtil::SubgraphOutputTensors(const std::vec
|
|||
for (const auto &output_kernel : output_nodes) {
|
||||
auto &outer_out_kernels = output_kernel->out_kernels();
|
||||
auto &out_kernel_out_tensors = output_kernel->out_tensors();
|
||||
if (outer_out_kernels.empty()) {
|
||||
for (auto out_kernel_out_tensor : out_kernel_out_tensors) {
|
||||
for (auto out_kernel_out_tensor : out_kernel_out_tensors) {
|
||||
if (out_kernel_out_tensor->IsGraphOutput()) {
|
||||
output_tensors.insert(out_kernel_out_tensor);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
for (auto outer_out_kernel : outer_out_kernels) {
|
||||
auto iter = std::find(kernels.begin(), kernels.end(), outer_out_kernel);
|
||||
if (iter != kernels.end()) {
|
||||
continue;
|
||||
}
|
||||
auto &outer_out_kernel_in_tensors = outer_out_kernel->in_tensors();
|
||||
for (auto out_kernel_out_tensor : out_kernel_out_tensors) {
|
||||
auto outer_out_kernel_in_tensors_iter =
|
||||
std::find(outer_out_kernel_in_tensors.begin(), outer_out_kernel_in_tensors.end(), out_kernel_out_tensor);
|
||||
if (outer_out_kernel_in_tensors_iter != outer_out_kernel_in_tensors.end()) {
|
||||
output_tensors.insert(out_kernel_out_tensor);
|
||||
if (!outer_out_kernels.empty()) {
|
||||
for (auto outer_out_kernel : outer_out_kernels) {
|
||||
auto iter = std::find(kernels.begin(), kernels.end(), outer_out_kernel);
|
||||
if (iter != kernels.end()) {
|
||||
continue;
|
||||
}
|
||||
auto &outer_out_kernel_in_tensors = outer_out_kernel->in_tensors();
|
||||
for (auto out_kernel_out_tensor : out_kernel_out_tensors) {
|
||||
auto outer_out_kernel_in_tensors_iter =
|
||||
std::find(outer_out_kernel_in_tensors.begin(), outer_out_kernel_in_tensors.end(), out_kernel_out_tensor);
|
||||
if (outer_out_kernel_in_tensors_iter != outer_out_kernel_in_tensors.end()) {
|
||||
output_tensors.insert(out_kernel_out_tensor);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -176,6 +176,7 @@ int LiteSession::ConvertTensors(const lite::Model *model) {
|
|||
uint32_t tensor_count = model->all_tensors_.size();
|
||||
MS_ASSERT(!model->sub_graphs_.empty());
|
||||
auto model_input_indices = model->sub_graphs_.front()->input_indices_;
|
||||
auto model_output_indices = model->sub_graphs_.front()->output_indices_;
|
||||
for (uint32_t i = 0; i < tensor_count; ++i) {
|
||||
auto *src_tensor = model->all_tensors_[i];
|
||||
if (src_tensor == nullptr) {
|
||||
|
@ -197,6 +198,9 @@ int LiteSession::ConvertTensors(const lite::Model *model) {
|
|||
if (IsContain(model_input_indices, i)) {
|
||||
dst_tensor->set_category(Tensor::GRAPH_INPUT);
|
||||
}
|
||||
if (IsContain(model_output_indices, i)) {
|
||||
dst_tensor->set_category(Tensor::GRAPH_OUTPUT);
|
||||
}
|
||||
if (src_tensor->name() != nullptr) {
|
||||
dst_tensor->set_tensor_name(src_tensor->name()->str());
|
||||
}
|
||||
|
|
|
@ -50,6 +50,7 @@ class Tensor : public mindspore::tensor::MSTensor {
|
|||
CONST_SCALAR, // weight scalar
|
||||
VAR, // activation tensor
|
||||
GRAPH_INPUT,
|
||||
GRAPH_OUTPUT,
|
||||
};
|
||||
Tensor() = default;
|
||||
|
||||
|
@ -163,6 +164,8 @@ class Tensor : public mindspore::tensor::MSTensor {
|
|||
|
||||
bool IsGraphInput() const { return this->category_ == GRAPH_INPUT; }
|
||||
|
||||
bool IsGraphOutput() const { return this->category_ == GRAPH_OUTPUT; }
|
||||
|
||||
void Prepare() {
|
||||
if (allocator_ != nullptr) {
|
||||
data_ = allocator_->Prepare(data_);
|
||||
|
|
|
@ -27,4 +27,3 @@ quant_aware_bank_card_detection_inception.onnx
|
|||
quant_aware_bank_card_recognition_fcny.onnx
|
||||
quant_aware_identify_card_detect.onnx
|
||||
tiny-yolov3-11.onnx;2;1,416,416,3:1,2
|
||||
ml_video_edit_person_divison_pic;2
|
||||
|
|
|
@ -9,3 +9,4 @@ ml_video_edit_video_segment_gauss_adaptis_part2.pb;2
|
|||
ml_video_edit_video_segment_gauss_adaptis_part2_pb2tflite.tflite;2
|
||||
decoder.onnx;2;1,7,512:1,7
|
||||
fasterrcnn_crop.pb;1;420,630,3
|
||||
ml_video_edit_person_divison_pic;2
|
||||
|
|
|
@ -294,20 +294,26 @@ function Run_Converter() {
|
|||
tflite)
|
||||
model_fmk="TFLITE"
|
||||
;;
|
||||
caffemodel)
|
||||
model_name=${model_name%.*}
|
||||
model_fmk="CAFFE"
|
||||
;;
|
||||
onnx)
|
||||
model_fmk="ONNX"
|
||||
;;
|
||||
mindir)
|
||||
model_fmk="MINDIR"
|
||||
;;
|
||||
*)
|
||||
model_type="caffe"
|
||||
model_fmk="CAFFE"
|
||||
;;
|
||||
esac
|
||||
echo ${model_name} >> "${run_converter_log_file}"
|
||||
echo './converter_lite --fmk='${model_fmk}' --modelFile='${models_path}'/'${model_name}' --outputFile='${ms_models_path}'/'${model_name} >> "${run_converter_log_file}"
|
||||
./converter_lite --fmk=${model_fmk} --modelFile=${models_path}/${model_name} --outputFile=${ms_models_path}/${model_name}
|
||||
if [[ $model_fmk == "CAFFE" ]]; then
|
||||
echo ${model_name} >> "${run_converter_log_file}"
|
||||
echo './converter_lite --fmk='${model_fmk}' --modelFile='$models_path/${model_name}'.prototxt --weightFile='$models_path'/'${model_name}'.caffemodel --outputFile='${ms_models_path}'/'${model_name} >> "${run_converter_log_file}"
|
||||
./converter_lite --fmk=${model_fmk} --modelFile=${models_path}/${model_name}.prototxt --weightFile=${models_path}/${model_name}.caffemodel --outputFile=${ms_models_path}/${model_name}
|
||||
else
|
||||
echo ${model_name} >> "${run_converter_log_file}"
|
||||
echo './converter_lite --fmk='${model_fmk}' --modelFile='${models_path}'/'${model_name}' --outputFile='${ms_models_path}'/'${model_name} >> "${run_converter_log_file}"
|
||||
./converter_lite --fmk=${model_fmk} --modelFile=${models_path}/${model_name} --outputFile=${ms_models_path}/${model_name}
|
||||
fi
|
||||
if [ $? = 0 ]; then
|
||||
converter_result='converter '${model_type}' '${model_name}' pass';echo ${converter_result} >> ${run_converter_result_file}
|
||||
else
|
||||
|
|
Loading…
Reference in New Issue