add examples for micro arm64 and arm32M

This commit is contained in:
lz 2022-03-01 09:48:26 +08:00
parent b91472822e
commit 78f98ad3f5
53 changed files with 5758 additions and 39 deletions

View File

@ -48,3 +48,4 @@
"mindspore/mindspore/lite/tools/converter/quantizer/quantize_util.cc" "useStlAlgorithm"
"mindspore/mindspore/lite/src/runtime/kernel/opencl/kernel/" "unreadVariable"
"mindspore/mindspore/lite/src/runtime/kernel/opencl/cl/" "unreadVariable"
"mindspore/mindspore/lite/examples/quick_start_micro/" "syntaxError"

View File

@ -196,4 +196,4 @@ mindspore/mindspore/ccsrc/frontend/parallel/auto_parallel/rec_core/rec_partition
mindspore/mindspore/ccsrc/plugin/device/cpu/kernel/nnacl/fp16/instance_norm_fp16.c:InstanceNormNC8HW8Fp16
mindspore/mindspore/lite/src/runtime/kernel/arm/fp32/matmul_fp32_base.cc:mindspore::kernel::MatmulFp32BaseCPUKernel::init_global_variable
mindspore/mindspore/ccsrc/plugin/device/cpu/kernel/nnacl/fp32/conv_winograd_fp32.c:ConvWinogardFp32
mindspore/mindspore/ccsrc/plugin/device/ascend/optimizer/ir_fusion/lamb_next_mv_with_decay_v1_rule.cc:mindspore::opt::MatchAdd5Pattern
mindspore/mindspore/ccsrc/plugin/device/ascend/optimizer/ir_fusion/lamb_next_mv_with_decay_v1_rule.cc:mindspore::opt::MatchAdd5Pattern

View File

@ -0,0 +1,51 @@
#include "include/errorcode.h"
#include "include/lite_session.h"
#include "include/ms_tensor.h"
#include "mnist_input_data.h"
using namespace mindspore;
int main(void) {
while (1) {
/* USER CODE END WHILE */
SEGGER_RTT_printf(0, "***********mnist test start***********\n");
float a = 3.1415926;
SEGGER_RTT_printf(0, "output: [%d] \n", (int)(a * 10000));
const char *model_buffer = nullptr;
int model_size = 0;
session::LiteSession *session = mindspore::session::LiteSession::CreateSession(model_buffer, model_size, nullptr);
Vector<tensor::MSTensor *> inputs = session->GetInputs();
size_t inputs_num = inputs.size();
void *inputs_binbuf[inputs_num];
int inputs_size[inputs_num];
for (size_t i = 0; i < inputs_num; ++i) {
inputs_size[i] = inputs[i]->Size();
}
// here mnist only have one input data,just hard code to it's array;
inputs_binbuf[0] = mnist_inputs_data;
for (size_t i = 0; i < inputs_num; ++i) {
void *input_data = inputs[i]->MutableData();
memcpy(input_data, inputs_binbuf[i], inputs_size[i]);
}
int ret = session->RunGraph();
if (ret != lite::RET_OK) {
return lite::RET_ERROR;
}
Vector<String> outputs_name = session->GetOutputTensorNames();
for (int i = 0; i < outputs_name.size(); ++i) {
tensor::MSTensor *output_tensor = session->GetOutputByTensorName(outputs_name[i]);
if (output_tensor == nullptr) {
return -1;
}
float *casted_data = static_cast<float *>(output_tensor->MutableData());
if (casted_data == nullptr) {
return -1;
}
for (size_t j = 0; j < 10 && j < output_tensor->ElementsNum(); j++) {
SEGGER_RTT_printf(0, "output[%d]: [%d]\n", j, (int)(casted_data[j] * 10000));
}
}
delete session;
SEGGER_RTT_printf(0, "***********mnist test end***********\n");
/* USER CODE BEGIN 3 */
}
/* USER CODE END 3 */
}

View File

@ -0,0 +1,58 @@
cmake_minimum_required(VERSION 3.14)
project(benchmark)
if(NOT DEFINED PKG_PATH)
message(FATAL_ERROR "PKG_PATH not set")
endif()
get_filename_component(PKG_PATH ${PKG_PATH} ABSOLUTE BASE_DIR ${CMAKE_CURRENT_BINARY_DIR})
set(HEADER_PATH ${PKG_PATH}/runtime)
option(MICRO_BUILD_ARM64 "build android arm64" OFF)
option(MICRO_BUILD_ARM32A "build android arm32" OFF)
add_compile_definitions(NOT_USE_STL)
if(MICRO_BUILD_ARM64 OR MICRO_BUILD_ARM32A)
add_compile_definitions(ENABLE_NEON)
add_compile_definitions(ENABLE_ARM)
endif()
if(MICRO_BUILD_ARM64)
add_compile_definitions(ENABLE_ARM64)
endif()
if(MICRO_BUILD_ARM32A)
add_compile_definitions(ENABLE_ARM32)
add_definitions(-mfloat-abi=softfp -mfpu=neon)
endif()
set(CMAKE_C_FLAGS "${CMAKE_ENABLE_C99} ${CMAKE_C_FLAGS}")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++17")
if("${CMAKE_BUILD_TYPE}" STREQUAL "Debug")
message(STATUS "build benchmark with debug info")
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -DDebug -g")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -DDebug -g")
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fvisibility=default")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fvisibility=default")
else()
message(STATUS "build benchmark release version")
set(CMAKE_C_FLAGS "-fPIC -fPIE -D_FORTIFY_SOURCE=2 -O3 -Wall -Werror -fstack-protector-strong -Wno-attributes \
-Wno-deprecated-declarations -Wno-missing-braces ${CMAKE_C_FLAGS}")
set(CMAKE_CXX_FLAGS "-fPIC -fPIE -D_FORTIFY_SOURCE=2 -O3 -Wall -Werror -fstack-protector-strong -Wno-attributes \
-Wno-deprecated-declarations -Wno-missing-braces -Wno-overloaded-virtual ${CMAKE_CXX_FLAGS}")
string(REPLACE "-g" "" CMAKE_C_FLAGS "${CMAKE_C_FLAGS}")
string(REPLACE "-g" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
endif()
add_subdirectory(src)
include_directories(${CMAKE_CURRENT_SOURCE_DIR})
include_directories(${HEADER_PATH})
set(SRC_FILES
benchmark/benchmark.cc
benchmark/load_input.c
)
add_executable(benchmark ${SRC_FILES})
target_link_libraries(benchmark net -lm -pthread)

View File

@ -0,0 +1,207 @@
/**
* Copyright 2021 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <iostream>
#include <string>
#include <cstring>
#include "include/lite_session.h"
#include "include/ms_tensor.h"
#include "include/errorcode.h"
#include "load_input.h"
#include "calib_output.h"
using namespace mindspore;
void usage() {
printf(
"-- mindspore benchmark params usage:\n"
"args[0]: executable file\n"
"args[1]: inputs binary file\n"
"args[2]: model weight binary file\n"
"args[3]: loop count for performance test\n"
"args[4]: calibration file\n"
"args[5]: runtime thread num\n"
"args[6]: runtime thread bind mode\n\n");
}
uint64_t GetTimeUs() {
const int USEC = 1000000;
const int MSEC = 1000;
struct timespec ts = {0, 0};
if (clock_gettime(CLOCK_MONOTONIC, &ts) != 0) {
return 0;
}
uint64_t retval = (uint64_t)((ts.tv_sec * USEC) + (ts.tv_nsec / MSEC));
return retval;
}
template <typename T>
void PrintData(void *data, size_t data_number) {
if (data == nullptr) {
return;
}
auto casted_data = static_cast<T *>(data);
for (size_t i = 0; i < 10 && i < data_number; i++) {
printf("%s, ", std::to_string(casted_data[i]).c_str());
}
printf("\n");
}
void TensorToString(tensor::MSTensor *tensor) {
printf("name: %s, ", tensor->tensor_name().c_str());
printf("DataType: %d, ", tensor->data_type());
printf("Elements: %d, ", tensor->ElementsNum());
printf("Shape: [");
for (auto &dim : tensor->shape()) {
printf("%d ", dim);
}
printf("], Data: \n");
switch (tensor->data_type()) {
case kNumberTypeFloat32: {
PrintData<float>(tensor->MutableData(), tensor->ElementsNum());
} break;
case kNumberTypeFloat16: {
PrintData<int16_t>(tensor->MutableData(), tensor->ElementsNum());
} break;
case kNumberTypeInt32: {
PrintData<int32_t>(tensor->MutableData(), tensor->ElementsNum());
} break;
case kNumberTypeInt16: {
PrintData<int16_t>(tensor->MutableData(), tensor->ElementsNum());
} break;
case kNumberTypeInt8: {
PrintData<int8_t>(tensor->MutableData(), tensor->ElementsNum());
} break;
case kNumberTypeUInt8: {
PrintData<uint8_t>(tensor->MutableData(), tensor->ElementsNum());
} break;
default:
std::cout << "Unsupported data type to print" << std::endl;
break;
}
}
int main(int argc, const char **argv) {
if (argc < 2) {
printf("input command is invalid\n");
usage();
return lite::RET_ERROR;
}
printf("=======run benchmark======\n");
const char *model_buffer = nullptr;
int model_size = 0;
// read .bin file by ReadBinaryFile;
if (argc >= 3) {
model_buffer = static_cast<const char *>(ReadInputData(argv[2], &model_size));
}
lite::Context *context = nullptr;
if (argc >= 7) {
// config benchmark context
context = new (std::nothrow) lite::Context();
if (context == nullptr) {
return lite::RET_ERROR;
}
context->thread_num_ = atoi(argv[5]);
context->device_list_.resize(1);
context->device_list_[0] = {lite::DT_CPU, {{false, static_cast<lite::CpuBindMode>(atoi(argv[6]))}}};
printf("context: ThreadNum: %d, BindMode: %d\n", context->thread_num_,
context->device_list_[0].device_info_.cpu_device_info_.cpu_bind_mode_);
}
session::LiteSession *session = mindspore::session::LiteSession::CreateSession(model_buffer, model_size, context);
if (session == nullptr) {
printf("create lite session failed\n");
return lite::RET_ERROR;
}
delete[] model_buffer;
// set model inputs tensor data
Vector<tensor::MSTensor *> inputs = session->GetInputs();
size_t inputs_num = inputs.size();
void *inputs_binbuf[inputs_num];
int inputs_size[inputs_num];
for (size_t i = 0; i < inputs_num; ++i) {
inputs_size[i] = inputs[i]->Size();
}
int ret = ReadInputsFile(const_cast<char *>(argv[1]), inputs_binbuf, inputs_size, inputs_num);
if (ret != lite::RET_OK) {
return lite::RET_ERROR;
}
for (size_t i = 0; i < inputs_num; ++i) {
void *input_data = inputs[i]->MutableData();
memcpy(input_data, inputs_binbuf[i], inputs_size[i]);
}
if (argc >= 4) {
int loop_count = atoi(argv[3]);
printf("\nloop count: %d\n", loop_count);
uint64_t start_time = GetTimeUs();
for (int i = 0; i < loop_count; ++i) {
ret = session->RunGraph();
if (ret != lite::RET_OK) {
return lite::RET_ERROR;
}
}
uint64_t end_time = GetTimeUs();
float total_time = (float)(end_time - start_time) / 1000.0f;
printf("total time: %.5fms, per time: %.5fms\n", total_time, total_time / loop_count);
}
ret = session->RunGraph();
if (ret != lite::RET_OK) {
return lite::RET_ERROR;
}
printf("\noutputs: \n");
Vector<String> outputs_name = session->GetOutputTensorNames();
Vector<tensor::MSTensor *> outputs;
for (const auto &name : outputs_name) {
auto output = session->GetOutputByTensorName(name);
outputs.push_back(output);
TensorToString(output);
}
if (argc >= 5) {
lite::Calibrator *calibrator = new (std::nothrow) lite::Calibrator();
if (calibrator == nullptr) {
return lite::RET_NULL_PTR;
}
ret = calibrator->ReadCalibData(argv[4]);
if (ret != lite::RET_OK) {
return lite::RET_ERROR;
}
ret = calibrator->CompareOutputs(outputs);
if (ret != lite::RET_OK) {
return lite::RET_ERROR;
}
delete calibrator;
}
printf("========run success=======\n");
delete session;
session = nullptr;
if (context != nullptr) {
delete context;
context = nullptr;
}
for (size_t i = 0; i < inputs_num; ++i) {
free(inputs_binbuf[i]);
inputs_binbuf[i] = nullptr;
}
return lite::RET_OK;
}

View File

@ -0,0 +1,148 @@
/**
* Copyright 2021 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "calib_output.h"
#include <fstream>
#include <sstream>
#include <iostream>
#include <stdio.h>
#include <cmath>
namespace mindspore {
namespace lite {
constexpr float kToleranceVal = 0.0001;
#define MS_ERROR_IF_NULL(ptr) \
do { \
if ((ptr) == nullptr) { \
return mindspore::lite::RET_ERROR; \
} \
} while (0)
int Calibrator::ReadCalibData(const char *calib_data_path) {
std::ifstream in_file(calib_data_path);
if (!in_file.good()) {
printf("file is not exist, %s\n", calib_data_path);
return RET_ERROR;
}
if (!in_file.is_open()) {
printf("open file failed, %s\n", calib_data_path);
in_file.close();
return RET_ERROR;
}
while (!in_file.eof()) {
std::string line;
getline(in_file, line);
if (line.empty()) {
continue;
}
std::stringstream name_line(line);
std::string tensor_name;
size_t dim = 0;
name_line >> tensor_name >> dim;
size_t elements = 1;
for (size_t i = 0; i < dim; i++) {
size_t tmp_dim;
name_line >> tmp_dim;
elements *= tmp_dim;
}
getline(in_file, line);
std::stringstream data_line(line);
String name(tensor_name.c_str());
CalibTensor *output = new (std::nothrow) CalibTensor(name, elements);
MS_ERROR_IF_NULL(output);
float *data = output->MutableData();
MS_ERROR_IF_NULL(data);
for (size_t i = 0; i < elements; i++) {
data_line >> data[i];
}
calib_outputs_.push_back(output);
}
in_file.close();
return RET_OK;
}
template <typename T>
float CompareData(const T *output, const float *calib, size_t elements_num) {
float error = 0.;
if (output == nullptr || calib == nullptr) {
printf("output or calib is nullptr\n");
return error;
}
for (size_t i = 0; i < elements_num; ++i) {
if (std::isnan(output[i]) || std::isinf(output[i]) || std::isnan(calib[i]) || std::isinf(calib[i])) {
printf("error, output data is nan or inf\n");
return error;
}
error += std::abs(output[i] - calib[i]);
}
return error;
}
int Calibrator::CompareOutputs(const Vector<tensor::MSTensor *> &outputs) const {
if (outputs.size() != calib_outputs_.size()) {
printf("error, outputs and calibs size is mismatch\n");
return RET_ERROR;
}
float total_error = 0;
size_t outputs_num = outputs.size();
for (size_t i = 0; i < outputs_num; ++i) {
tensor::MSTensor *output = outputs[i];
MS_ERROR_IF_NULL(output);
CalibTensor *calib = calib_outputs_[i];
MS_ERROR_IF_NULL(calib);
if (output->tensor_name() != calib->tensor_name()) {
printf("error, output tensor name is not equal to calib\n");
return RET_ERROR;
}
if (output->ElementsNum() != calib->ElementsNum()) {
printf("error, output elements num is not equal to calib\n");
return RET_ERROR;
}
switch (output->data_type()) {
case TypeId::kNumberTypeFloat:
case TypeId::kNumberTypeFloat32: {
total_error += CompareData(static_cast<float *>(output->data()), calib->MutableData(), output->ElementsNum());
break;
}
case TypeId::kNumberTypeInt8: {
total_error += CompareData(static_cast<int8_t *>(output->data()), calib->MutableData(), output->ElementsNum());
break;
}
case TypeId::kNumberTypeUInt8: {
total_error += CompareData(static_cast<uint8_t *>(output->data()), calib->MutableData(), output->ElementsNum());
break;
}
case TypeId::kNumberTypeUInt:
case TypeId::kNumberTypeUInt32: {
total_error += CompareData(static_cast<int32_t *>(output->data()), calib->MutableData(), output->ElementsNum());
break;
}
default: {
printf("unsupported tensor data type\n");
}
}
}
if (total_error > kToleranceVal) {
printf("compare outputs failed, total error: %f\n", total_error);
return RET_ERROR;
}
printf("compare outputs success, total error: %f\n", total_error);
return RET_OK;
}
} // namespace lite
} // namespace mindspore

View File

@ -0,0 +1,73 @@
/**
* Copyright 2021 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_LITE_MICRO_CALIB_OUTPUT_H_
#define MINDSPORE_LITE_MICRO_CALIB_OUTPUT_H_
#include "include/lite_utils.h"
#include "include/ms_tensor.h"
#include "include/errorcode.h"
namespace mindspore {
namespace lite {
class CalibTensor {
public:
CalibTensor(String name, size_t elements_num) : tensor_name_(name), elements_num_(elements_num) {}
~CalibTensor() {
free(data_);
data_ = nullptr;
}
String tensor_name() const { return tensor_name_; }
int ElementsNum() const { return elements_num_; }
float *MutableData() {
if (data_ == nullptr) {
if (elements_num_ == 0 || elements_num_ > INT16_MAX) {
return nullptr;
}
data_ = static_cast<float *>(malloc(elements_num_ * sizeof(float)));
}
return data_;
}
private:
String tensor_name_;
int elements_num_{0};
float *data_{nullptr};
};
class Calibrator {
public:
Calibrator() = default;
~Calibrator() {
for (auto &calib : calib_outputs_) {
delete calib;
calib = nullptr;
}
calib_outputs_.clear();
}
int ReadCalibData(const char *calib_data_path);
int CompareOutputs(const Vector<tensor::MSTensor *> &outputs) const;
private:
Vector<CalibTensor *> calib_outputs_;
};
} // namespace lite
} // namespace mindspore
#endif // MINDSPORE_LITE_MICRO_CALIB_OUTPUT_H_

View File

@ -0,0 +1,94 @@
/**
* Copyright 2021 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "load_input.h"
#include <stdlib.h>
#include <stdio.h>
#include <string.h>
void *ReadInputData(const char *real_input_path, int *size) {
if (real_input_path == NULL) {
return NULL;
}
if (strstr(real_input_path, ".bin") || strstr(real_input_path, ".net")) {
FILE *file;
file = fopen(real_input_path, "rb+");
if (!file) {
printf("Can't find %s\n", real_input_path);
return NULL;
}
int curr_file_posi = ftell(file);
fseek(file, 0, SEEK_END);
*size = ftell(file);
unsigned char *buf = malloc((*size));
(void)memset(buf, 0, (*size));
fseek(file, curr_file_posi, SEEK_SET);
int read_size = (int)(fread(buf, 1, *size, file));
if (read_size != (*size)) {
printf("read file failed, total file size: %d, read_size: %d\n", (*size), read_size);
fclose(file);
free(buf);
return NULL;
}
fclose(file);
return (void *)buf;
} else {
printf("input data file should be .bin , .net");
return NULL;
}
}
void SaveOutputData(char *final_name, unsigned char *output_data, unsigned int out_size) {
FILE *output_file;
output_file = fopen(final_name, "w");
if (output_file == NULL) {
printf("fopen output file: %s failed\n", final_name);
return;
}
unsigned char str[out_size];
for (unsigned int i = 0; i < out_size; ++i) {
str[i] = output_data[i];
fprintf(output_file, "%d\t", str[i]);
}
fclose(output_file);
}
int ReadInputsFile(char *path, void **buffers, const int *inputs_size, int inputs_num) {
char *inputs_path[inputs_num];
char *delim = ",";
char *token;
int i = 0;
while ((token = strtok_r(path, delim, &path))) {
if (i >= inputs_num) {
printf("inputs num is error, need: %d\n", inputs_num);
return -1;
}
inputs_path[i] = token;
printf("input %d: %s\n", i, inputs_path[i]);
i++;
}
for (i = 0; i < inputs_num; ++i) {
int size = 0;
buffers[i] = ReadInputData(inputs_path[i], &size);
if (size != inputs_size[i] || buffers[i] == NULL) {
printf("size mismatch, %s, input: %d, needed: %d\n", inputs_path[i], size, inputs_size[i]);
return -1;
}
}
return 0;
}

View File

@ -0,0 +1,35 @@
/**
* Copyright 2021 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MICRO_EXAMPLE_LOAD_INPUT_LOAD_INPUT_H_
#define MICRO_EXAMPLE_LOAD_INPUT_LOAD_INPUT_H_
#ifdef __cplusplus
extern "C" {
#endif
void *ReadInputData(const char *real_input_path, int *size);
void SaveOutputData(char *final_name, unsigned char *output_data, unsigned int out_size);
int ReadInputsFile(char *path, void **buffers, const int *inputs_size, int inputs_num);
#ifdef __cplusplus
}
#endif
#endif // MICRO_EXAMPLE_LOAD_INPUT_LOAD_INPUT_H_

View File

@ -0,0 +1,134 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_INCLUDE_API_CELL_H
#define MINDSPORE_INCLUDE_API_CELL_H
#include <string>
#include <vector>
#include <map>
#include <memory>
#include "include/api/status.h"
#include "include/api/types.h"
#include "include/api/graph.h"
namespace mindspore {
class InputAndOutput;
using Input = InputAndOutput;
using Output = InputAndOutput;
class MS_API CellBase {
public:
CellBase() = default;
virtual ~CellBase() = default;
virtual std::vector<Output> Construct(const std::vector<Input> &inputs) { return {}; }
virtual std::shared_ptr<CellBase> Clone() const = 0;
virtual Status Run(const std::vector<MSTensor> &inputs, std::vector<MSTensor> *outputs) { return kSuccess; }
std::vector<Output> operator()(const std::vector<Input> &inputs) const;
};
template <class T>
class MS_API Cell : public CellBase {
public:
virtual ~Cell() = default;
std::shared_ptr<CellBase> Clone() const override { return std::make_shared<T>(static_cast<const T &>(*this)); }
};
class MS_API ParameterCell final : public Cell<ParameterCell> {
public:
ParameterCell() = default;
~ParameterCell() override = default;
ParameterCell(const ParameterCell &);
ParameterCell &operator=(const ParameterCell &);
ParameterCell(ParameterCell &&);
ParameterCell &operator=(ParameterCell &&);
explicit ParameterCell(const MSTensor &);
ParameterCell &operator=(const MSTensor &);
explicit ParameterCell(MSTensor &&);
ParameterCell &operator=(MSTensor &&);
MSTensor GetTensor() const { return tensor_; }
private:
MSTensor tensor_;
};
class MS_API OpCellBase : public CellBase {
public:
explicit OpCellBase(const std::string &name) : name_(name) {}
~OpCellBase() override = default;
const std::string &GetOpType() const { return name_; }
protected:
std::string name_;
};
template <class T>
class MS_API OpCell : public OpCellBase, public std::enable_shared_from_this<T> {
public:
explicit OpCell(const std::string &name) : OpCellBase(name) {}
~OpCell() override = default;
std::shared_ptr<CellBase> Clone() const override { return std::make_shared<T>(static_cast<const T &>(*this)); }
};
class MS_API GraphCell final : public Cell<GraphCell> {
public:
class GraphImpl;
GraphCell() = default;
~GraphCell() override = default;
explicit GraphCell(const Graph &);
explicit GraphCell(Graph &&);
explicit GraphCell(const std::shared_ptr<Graph> &);
const std::shared_ptr<Graph> &GetGraph() const { return graph_; }
Status Run(const std::vector<MSTensor> &inputs, std::vector<MSTensor> *outputs) override;
std::vector<MSTensor> GetInputs();
std::vector<MSTensor> GetOutputs();
private:
friend class Model;
friend class ModelImpl;
Status Load(uint32_t device_id);
std::shared_ptr<Graph> graph_;
std::shared_ptr<GraphImpl> executor_;
};
class MS_API InputAndOutput {
public:
InputAndOutput();
~InputAndOutput() = default;
// no explicit
InputAndOutput(const MSTensor &); // NOLINT(runtime/explicit)
InputAndOutput(MSTensor &&); // NOLINT(runtime/explicit)
InputAndOutput(const std::shared_ptr<CellBase> &, const std::vector<InputAndOutput> &, int32_t index);
int32_t GetIndex() const { return index_; }
void SetIndex(int32_t index) { index_ = index; }
private:
std::shared_ptr<CellBase> cell_;
std::vector<InputAndOutput> prev_;
int32_t index_;
};
} // namespace mindspore
#endif // MINDSPORE_INCLUDE_API_CELL_H

View File

@ -0,0 +1,455 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_INCLUDE_API_CONTEXT_H
#define MINDSPORE_INCLUDE_API_CONTEXT_H
#include <string>
#include <memory>
#include <vector>
#include <map>
#include "include/api/types.h"
#include "include/api/dual_abi_helper.h"
namespace mindspore {
enum DeviceType {
kCPU = 0,
kGPU,
kKirinNPU,
kAscend,
kAscend910,
kAscend310,
// add new type here
kInvalidDeviceType = 100,
};
class Allocator;
class Delegate;
class DeviceInfoContext;
/// \brief Context is used to store environment variables during execution.
class MS_API Context {
public:
struct Data;
Context();
~Context() = default;
/// \brief Set the number of threads at runtime. Only valid for Lite.
///
/// \param[in] thread_num the number of threads at runtime.
void SetThreadNum(int32_t thread_num);
/// \brief Get the current thread number setting. Only valid for Lite.
///
/// \return The current thread number setting.
int32_t GetThreadNum() const;
/// \brief Set the thread affinity to CPU cores. Only valid for Lite.
///
/// \param[in] mode: 0: no affinities, 1: big cores first, 2: little cores first
void SetThreadAffinity(int mode);
/// \brief Get the thread affinity of CPU cores. Only valid for Lite.
///
/// \return Thread affinity to CPU cores. 0: no affinities, 1: big cores first, 2: little cores first
int GetThreadAffinityMode() const;
/// \brief Set the thread lists to CPU cores. Only valid for Lite.
///
/// \note If core_list and mode are set by SetThreadAffinity at the same time, the core_list is effective, but the
/// mode is not effective.
///
/// \param[in] core_list: a vector of thread core lists.
void SetThreadAffinity(const std::vector<int> &core_list);
/// \brief Get the thread lists of CPU cores. Only valid for Lite.
///
/// \return core_list: a vector of thread core lists.
std::vector<int32_t> GetThreadAffinityCoreList() const;
/// \brief Set the status whether to perform model inference or training in parallel. Only valid for Lite.
///
/// \param[in] is_parallel: true, parallel; false, not in parallel.
void SetEnableParallel(bool is_parallel);
/// \brief Get the status whether to perform model inference or training in parallel. Only valid for Lite.
///
/// \return Bool value that indicates whether in parallel.
bool GetEnableParallel() const;
/// \brief Set Delegate to access third-party AI framework. Only valid for Lite.
///
/// \param[in] Pointer to the custom delegate.
void SetDelegate(const std::shared_ptr<Delegate> &delegate);
/// \brief Get the delegate of the third-party AI framework. Only valid for Lite.
///
/// \return Pointer to the custom delegate.
std::shared_ptr<Delegate> GetDelegate() const;
/// \brief Get a mutable reference of DeviceInfoContext vector in this context. Only MindSpore Lite supports
/// heterogeneous scenarios with multiple members in the vector.
///
/// \return Mutable reference of DeviceInfoContext vector in this context.
std::vector<std::shared_ptr<DeviceInfoContext>> &MutableDeviceInfo();
private:
std::shared_ptr<Data> data_;
};
/// \brief DeviceInfoContext defines different device contexts.
class MS_API DeviceInfoContext : public std::enable_shared_from_this<DeviceInfoContext> {
public:
struct Data;
DeviceInfoContext();
virtual ~DeviceInfoContext() = default;
/// \brief Get the type of this DeviceInfoContext.
///
/// \return Type of this DeviceInfoContext.
virtual enum DeviceType GetDeviceType() const = 0;
/// \brief A similar function to RTTI is provided when the -fno-rtti compilation option is turned on, which converts
/// DeviceInfoContext to a shared pointer of type T, and returns nullptr if the conversion fails.
///
/// \param T Type
/// \return A pointer of type T after conversion. If the conversion fails, it will be nullptr.
template <class T>
std::shared_ptr<T> Cast() {
static_assert(std::is_base_of<DeviceInfoContext, T>::value, "Wrong cast type.");
if (GetDeviceType() != T().GetDeviceType()) {
return nullptr;
}
return std::static_pointer_cast<T>(shared_from_this());
}
/// \brief obtain provider's name
///
/// \return provider's name.
std::string GetProvider() const;
/// \brief set provider's name.
///
/// \param[in] provider define the provider's name.
void SetProvider(const std::string &provider);
/// \brief obtain provider's device type.
///
/// \return provider's device type.
std::string GetProviderDevice() const;
/// \brief set provider's device type.
///
/// \param[in] device define the provider's device type.EG: CPU.
void SetProviderDevice(const std::string &device);
/// \brief set memory allocator.
///
/// \param[in] allocator define the memory allocator which can be defined by user.
void SetAllocator(const std::shared_ptr<Allocator> &allocator);
/// \brief obtain memory allocator.
///
/// \return memory allocator.
std::shared_ptr<Allocator> GetAllocator() const;
protected:
std::shared_ptr<Data> data_;
};
/// \brief Derived from DeviceInfoContext, The configuration of the model running on the CPU. This option is only valid
/// for MindSpore Lite.
class MS_API CPUDeviceInfo : public DeviceInfoContext {
public:
/// \brief Get the type of this DeviceInfoContext.
///
/// \return Type of this DeviceInfoContext.
enum DeviceType GetDeviceType() const override { return DeviceType::kCPU; };
/// \brief Set enables to perform the float16 inference
///
/// \param[in] is_fp16 Enable float16 inference or not.
void SetEnableFP16(bool is_fp16);
/// \brief Get enables to perform the float16 inference
///
/// \return Whether enable float16 inference.
bool GetEnableFP16() const;
};
/// \brief Derived from DeviceInfoContext, The configuration of the model running on the NPU. This option is only valid
/// for MindSpore Lite.
class MS_API KirinNPUDeviceInfo : public DeviceInfoContext {
public:
/// \brief Get the type of this DeviceInfoContext.
///
/// \return Type of this DeviceInfoContext.
enum DeviceType GetDeviceType() const override { return DeviceType::kKirinNPU; };
/// \brief Set the NPU frequency.
///
/// \param[in] frequency Can be set to 1 (low power consumption), 2 (balanced), 3 (high performance), 4 (extreme
/// performance), default as 3.
void SetFrequency(int frequency);
/// \brief Get the NPU frequency.
///
/// \return NPU frequency
int GetFrequency() const;
};
/// \brief Derived from DeviceInfoContext, The configuration of the model running on the GPU.
class MS_API GPUDeviceInfo : public DeviceInfoContext {
public:
/// \brief Get the type of this DeviceInfoContext.
///
/// \return Type of this DeviceInfoContext.
enum DeviceType GetDeviceType() const override { return DeviceType::kGPU; };
/// \brief Set device id.
///
/// \param[in] device_id The device id.
void SetDeviceID(uint32_t device_id);
/// \brief Get the device id.
///
/// \return The device id.
uint32_t GetDeviceID() const;
/// \brief Get the distribution rank id.
///
/// \return The device id.
int GetRankID() const;
/// \brief Get the distribution group size.
///
/// \return The device id.
int GetGroupSize() const;
/// \brief Set the precision mode.
///
/// \param[in] precision_mode Optional "origin", "fp16". "origin" is set as default.
inline void SetPrecisionMode(const std::string &precision_mode);
/// \brief Get the precision mode.
///
/// \return The precision mode.
inline std::string GetPrecisionMode() const;
/// \brief Set enables to perform the float16 inference
///
/// \param[in] is_fp16 Enable float16 inference or not.
void SetEnableFP16(bool is_fp16);
/// \brief Get enables to perform the float16 inference
///
/// \return Whether enable float16 inference.
bool GetEnableFP16() const;
private:
void SetPrecisionMode(const std::vector<char> &precision_mode);
std::vector<char> GetPrecisionModeChar() const;
};
void GPUDeviceInfo::SetPrecisionMode(const std::string &precision_mode) {
SetPrecisionMode(StringToChar(precision_mode));
}
std::string GPUDeviceInfo::GetPrecisionMode() const { return CharToString(GetPrecisionModeChar()); }
/// \brief Derived from DeviceInfoContext, The configuration of the model running on the Ascend310. This option is
/// invalid for MindSpore Lite.
class MS_API AscendDeviceInfo : public DeviceInfoContext {
public:
/// \brief Get the type of this DeviceInfoContext.
///
/// \return Type of this DeviceInfoContext.
enum DeviceType GetDeviceType() const override { return DeviceType::kAscend; };
/// \brief Set device id.
///
/// \param[in] device_id The device id.
void SetDeviceID(uint32_t device_id);
/// \brief Get the device id.
///
/// \return The device id.
uint32_t GetDeviceID() const;
/// \brief Set AIPP configuration file path.
///
/// \param[in] cfg_path AIPP configuration file path.
inline void SetInsertOpConfigPath(const std::string &cfg_path);
/// \brief Get AIPP configuration file path.
///
/// \return AIPP configuration file path.
inline std::string GetInsertOpConfigPath() const;
/// \brief Set format of model inputs.
///
/// \param[in] format Optional "NCHW", "NHWC", etc.
inline void SetInputFormat(const std::string &format);
/// \brief Get format of model inputs.
///
/// \return The format of model inputs.
inline std::string GetInputFormat() const;
/// \brief Set shape of model inputs.
///
/// \param[in] shape e.g. "input_op_name1: 1,2,3,4;input_op_name2: 4,3,2,1".
inline void SetInputShape(const std::string &shape);
/// \brief Get shape of model inputs.
///
/// \return The shape of model inputs.
inline std::string GetInputShape() const;
/// \brief Set shape of model inputs.
///
/// \param[in] shape e.g. {{1, {1,2,3,4}}, {2, {4,3,2,1}}} means the first input shape 1,2,3,4 and the second input
/// shape 4,3,2,1.
void SetInputShapeMap(const std::map<int, std::vector<int>> &shape);
/// \brief Get shape of model inputs.
///
/// \return The shape of model inputs.
std::map<int, std::vector<int>> GetInputShapeMap() const;
void SetDynamicBatchSize(const std::vector<size_t> &dynamic_batch_size);
inline std::string GetDynamicBatchSize() const;
/// \brief Set the dynamic image size of model inputs.
///
/// \param[in] image size hw e.g. "66,88;32,64" means h1:66,w1:88; h2:32,w2:64.
inline void SetDynamicImageSize(const std::string &dynamic_image_size);
/// \brief Get dynamic image size of model inputs.
///
/// \return The image size of model inputs.
inline std::string GetDynamicImageSize() const;
/// \brief Set type of model outputs.
///
/// \param[in] output_type FP32, UINT8 or FP16, default as FP32.
void SetOutputType(enum DataType output_type);
/// \brief Get type of model outputs.
///
/// \return The set type of model outputs.
enum DataType GetOutputType() const;
/// \brief Set precision mode of model.
///
/// \param[in] precision_mode Optional "force_fp16", "allow_fp32_to_fp16", "must_keep_origin_dtype" and
/// "allow_mix_precision", "force_fp16" is set as default
inline void SetPrecisionMode(const std::string &precision_mode);
/// \brief Get precision mode of model.
///
/// \return The set type of model outputs
inline std::string GetPrecisionMode() const;
/// \brief Set op select implementation mode.
///
/// \param[in] op_select_impl_mode Optional "high_performance" and "high_precision", "high_performance" is set as
/// default.
inline void SetOpSelectImplMode(const std::string &op_select_impl_mode);
/// \brief Get op select implementation mode.
///
/// \return The set op select implementation mode.
inline std::string GetOpSelectImplMode() const;
inline void SetFusionSwitchConfigPath(const std::string &cfg_path);
inline std::string GetFusionSwitchConfigPath() const;
// Optional "l1_optimize", "l2_optimize", "off_optimize" or "l1_and_l2_optimize", default as "l2_optimize"
inline void SetBufferOptimizeMode(const std::string &buffer_optimize_mode);
inline std::string GetBufferOptimizeMode() const;
private:
void SetInsertOpConfigPath(const std::vector<char> &cfg_path);
std::vector<char> GetInsertOpConfigPathChar() const;
void SetInputFormat(const std::vector<char> &format);
std::vector<char> GetInputFormatChar() const;
void SetInputShape(const std::vector<char> &shape);
std::vector<char> GetInputShapeChar() const;
std::vector<char> GetDynamicBatchSizeChar() const;
void SetDynamicImageSize(const std::vector<char> &dynamic_image_size);
std::vector<char> GetDynamicImageSizeChar() const;
void SetPrecisionMode(const std::vector<char> &precision_mode);
std::vector<char> GetPrecisionModeChar() const;
void SetOpSelectImplMode(const std::vector<char> &op_select_impl_mode);
std::vector<char> GetOpSelectImplModeChar() const;
void SetFusionSwitchConfigPath(const std::vector<char> &cfg_path);
std::vector<char> GetFusionSwitchConfigPathChar() const;
void SetBufferOptimizeMode(const std::vector<char> &buffer_optimize_mode);
std::vector<char> GetBufferOptimizeModeChar() const;
};
using Ascend310DeviceInfo = AscendDeviceInfo;
using Ascend910DeviceInfo = AscendDeviceInfo;
void AscendDeviceInfo::SetInsertOpConfigPath(const std::string &cfg_path) {
SetInsertOpConfigPath(StringToChar(cfg_path));
}
std::string AscendDeviceInfo::GetInsertOpConfigPath() const { return CharToString(GetInsertOpConfigPathChar()); }
void AscendDeviceInfo::SetInputFormat(const std::string &format) { SetInputFormat(StringToChar(format)); }
std::string AscendDeviceInfo::GetInputFormat() const { return CharToString(GetInputFormatChar()); }
void AscendDeviceInfo::SetInputShape(const std::string &shape) { SetInputShape(StringToChar(shape)); }
std::string AscendDeviceInfo::GetInputShape() const { return CharToString(GetInputShapeChar()); }
std::string AscendDeviceInfo::GetDynamicBatchSize() const { return CharToString(GetDynamicBatchSizeChar()); }
void AscendDeviceInfo::SetDynamicImageSize(const std::string &dynamic_image_size) {
SetDynamicImageSize(StringToChar(dynamic_image_size));
}
std::string AscendDeviceInfo::GetDynamicImageSize() const { return CharToString(GetDynamicImageSizeChar()); }
void AscendDeviceInfo::SetPrecisionMode(const std::string &precision_mode) {
SetPrecisionMode(StringToChar(precision_mode));
}
std::string AscendDeviceInfo::GetPrecisionMode() const { return CharToString(GetPrecisionModeChar()); }
void AscendDeviceInfo::SetOpSelectImplMode(const std::string &op_select_impl_mode) {
SetOpSelectImplMode(StringToChar(op_select_impl_mode));
}
std::string AscendDeviceInfo::GetOpSelectImplMode() const { return CharToString(GetOpSelectImplModeChar()); }
void AscendDeviceInfo::SetFusionSwitchConfigPath(const std::string &cfg_path) {
SetFusionSwitchConfigPath(StringToChar(cfg_path));
}
std::string AscendDeviceInfo::GetFusionSwitchConfigPath() const {
return CharToString(GetFusionSwitchConfigPathChar());
}
void AscendDeviceInfo::SetBufferOptimizeMode(const std::string &buffer_optimize_mode) {
SetBufferOptimizeMode(StringToChar(buffer_optimize_mode));
}
std::string AscendDeviceInfo::GetBufferOptimizeMode() const { return CharToString(GetBufferOptimizeModeChar()); }
} // namespace mindspore
#endif // MINDSPORE_INCLUDE_API_CONTEXT_H

View File

@ -0,0 +1,43 @@
/**
* Copyright 2021 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_INCLUDE_API_DATA_TYPE_H_
#define MINDSPORE_INCLUDE_API_DATA_TYPE_H_
namespace mindspore {
enum class DataType : int {
kTypeUnknown = 0,
kObjectTypeString = 12,
kObjectTypeList = 13,
kObjectTypeTuple = 14,
kObjectTypeTensorType = 17,
kNumberTypeBool = 30,
kNumberTypeInt8 = 32,
kNumberTypeInt16 = 33,
kNumberTypeInt32 = 34,
kNumberTypeInt64 = 35,
kNumberTypeUInt8 = 37,
kNumberTypeUInt16 = 38,
kNumberTypeUInt32 = 39,
kNumberTypeUInt64 = 40,
kNumberTypeFloat16 = 42,
kNumberTypeFloat32 = 43,
kNumberTypeFloat64 = 44,
kNumberTypeEnd = 46,
// add new enum here
kInvalidType = INT32_MAX,
};
} // namespace mindspore
#endif // MINDSPORE_INCLUDE_API_DATA_TYPE_H_

View File

@ -0,0 +1,164 @@
/**
* Copyright 2021 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_INCLUDE_API_DUAL_ABI_HELPER_H_
#define MINDSPORE_INCLUDE_API_DUAL_ABI_HELPER_H_
#include <algorithm>
#include <map>
#include <memory>
#include <optional>
#include <string>
#include <set>
#include <unordered_map>
#include <utility>
#include <vector>
namespace mindspore {
inline std::vector<char> StringToChar(const std::string &s) { return std::vector<char>(s.begin(), s.end()); }
inline std::string CharToString(const std::vector<char> &c) { return std::string(c.begin(), c.end()); }
inline std::optional<std::vector<char>> OptionalStringToChar(const std::optional<std::string> &s) {
if (s == std::nullopt) return std::nullopt;
std::optional<std::vector<char>> ret = std::vector<char>(s->begin(), s->end());
return ret;
}
inline std::optional<std::string> OptionalCharToString(const std::optional<std::vector<char>> &c) {
if (c == std::nullopt) return std::nullopt;
std::optional<std::string> ret = std::string(c->begin(), c->end());
return ret;
}
inline std::pair<std::vector<char>, int32_t> PairStringToChar(const std::pair<std::string, int32_t> &s) {
return std::pair<std::vector<char>, int32_t>(std::vector<char>(s.first.begin(), s.first.end()), s.second);
}
inline std::pair<std::string, int32_t> PairCharToString(const std::pair<std::vector<char>, int32_t> &c) {
return std::pair<std::string, int32_t>(std::string(c.first.begin(), c.first.end()), c.second);
}
inline std::vector<std::vector<char>> VectorStringToChar(const std::vector<std::string> &s) {
std::vector<std::vector<char>> ret;
std::transform(s.begin(), s.end(), std::back_inserter(ret),
[](auto str) { return std::vector<char>(str.begin(), str.end()); });
return ret;
}
inline std::vector<std::string> VectorCharToString(const std::vector<std::vector<char>> &c) {
std::vector<std::string> ret;
std::transform(c.begin(), c.end(), std::back_inserter(ret),
[](auto ch) { return std::string(ch.begin(), ch.end()); });
return ret;
}
inline std::set<std::vector<char>> SetStringToChar(const std::set<std::string> &s) {
std::set<std::vector<char>> ret;
std::transform(s.begin(), s.end(), std::inserter(ret, ret.begin()),
[](auto str) { return std::vector<char>(str.begin(), str.end()); });
return ret;
}
inline std::set<std::string> SetCharToString(const std::set<std::vector<char>> &c) {
std::set<std::string> ret;
std::transform(c.begin(), c.end(), std::inserter(ret, ret.begin()),
[](auto ch) { return std::string(ch.begin(), ch.end()); });
return ret;
}
inline std::map<std::vector<char>, int32_t> MapStringToChar(const std::map<std::string, int32_t> &s) {
std::map<std::vector<char>, int32_t> ret;
std::transform(s.begin(), s.end(), std::inserter(ret, ret.begin()), [](auto str) {
return std::pair<std::vector<char>, int32_t>(std::vector<char>(str.first.begin(), str.first.end()), str.second);
});
return ret;
}
inline std::map<std::string, int32_t> MapCharToString(const std::map<std::vector<char>, int32_t> &c) {
std::map<std::string, int32_t> ret;
std::transform(c.begin(), c.end(), std::inserter(ret, ret.begin()), [](auto ch) {
return std::pair<std::string, int32_t>(std::string(ch.first.begin(), ch.first.end()), ch.second);
});
return ret;
}
inline std::map<std::vector<char>, std::vector<char>> UnorderedMapStringToChar(
const std::unordered_map<std::string, std::string> &s) {
std::map<std::vector<char>, std::vector<char>> ret;
std::transform(s.begin(), s.end(), std::inserter(ret, ret.begin()), [](auto str) {
return std::pair<std::vector<char>, std::vector<char>>(std::vector<char>(str.first.begin(), str.first.end()),
std::vector<char>(str.second.begin(), str.second.end()));
});
return ret;
}
inline std::unordered_map<std::string, std::string> UnorderedMapCharToString(
const std::map<std::vector<char>, std::vector<char>> &c) {
std::unordered_map<std::string, std::string> ret;
std::transform(c.begin(), c.end(), std::inserter(ret, ret.begin()), [](auto ch) {
return std::pair<std::string, std::string>(std::string(ch.first.begin(), ch.first.end()),
std::string(ch.second.begin(), ch.second.end()));
});
return ret;
}
inline std::vector<std::pair<std::vector<char>, std::vector<int32_t>>> ClassIndexStringToChar(
const std::vector<std::pair<std::string, std::vector<int32_t>>> &s) {
std::vector<std::pair<std::vector<char>, std::vector<int32_t>>> ret;
std::transform(s.begin(), s.end(), std::back_inserter(ret), [](auto str) {
return std::pair<std::vector<char>, std::vector<int32_t>>(std::vector<char>(str.first.begin(), str.first.end()),
str.second);
});
return ret;
}
inline std::vector<std::pair<std::string, std::vector<int32_t>>> ClassIndexCharToString(
const std::vector<std::pair<std::vector<char>, std::vector<int32_t>>> &c) {
std::vector<std::pair<std::string, std::vector<int32_t>>> ret;
std::transform(c.begin(), c.end(), std::back_inserter(ret), [](auto ch) {
return std::pair<std::string, std::vector<int32_t>>(std::string(ch.first.begin(), ch.first.end()), ch.second);
});
return ret;
}
template <class T>
inline std::map<std::vector<char>, T> PadInfoStringToChar(const std::map<std::string, T> &s_pad_info) {
std::map<std::vector<char>, T> ret;
std::transform(s_pad_info.begin(), s_pad_info.end(), std::inserter(ret, ret.begin()), [](auto str) {
return std::pair<std::vector<char>, T>(std::vector<char>(str.first.begin(), str.first.end()), str.second);
});
return ret;
}
template <class T>
inline std::map<std::string, T> PadInfoCharToString(const std::map<std::vector<char>, T> &c_pad_info) {
std::map<std::string, T> ret;
std::transform(c_pad_info.begin(), c_pad_info.end(), std::inserter(ret, ret.begin()), [](auto ch) {
return std::pair<std::string, T>(std::string(ch.first.begin(), ch.first.end()), ch.second);
});
return ret;
}
template <class T>
inline void TensorMapCharToString(const std::map<std::vector<char>, T> *c, std::unordered_map<std::string, T> *s) {
for (auto ch : *c) {
auto key = std::string(ch.first.begin(), ch.first.end());
auto val = ch.second;
s->insert(std::pair<std::string, T>(key, val));
}
}
} // namespace mindspore
#endif // MINDSPORE_INCLUDE_API_DUAL_ABI_HELPER_H_

View File

@ -0,0 +1,46 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_INCLUDE_API_GRAPH_H
#define MINDSPORE_INCLUDE_API_GRAPH_H
#include <cstddef>
#include <vector>
#include <map>
#include <memory>
#include "include/api/status.h"
#include "include/api/types.h"
namespace mindspore {
class MS_API Graph {
public:
class GraphData;
Graph();
explicit Graph(const std::shared_ptr<GraphData> &graph_data);
explicit Graph(std::shared_ptr<GraphData> &&graph_data);
explicit Graph(std::nullptr_t);
~Graph();
enum ModelType ModelType() const;
bool operator==(std::nullptr_t) const;
bool operator!=(std::nullptr_t) const;
private:
friend class GraphCell;
friend class ModelImpl;
std::shared_ptr<GraphData> graph_data_;
};
} // namespace mindspore
#endif // MINDSPORE_INCLUDE_API_GRAPH_H

View File

@ -0,0 +1,80 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_INCLUDE_API_MODEL_H
#define MINDSPORE_INCLUDE_API_MODEL_H
#include <string>
#include <vector>
#include <map>
#include <memory>
#include <utility>
#include "include/api/status.h"
#include "include/api/types.h"
#include "include/api/graph.h"
#include "include/api/context.h"
#include "include/api/cell.h"
#include "include/api/dual_abi_helper.h"
namespace mindspore {
class ModelImpl;
class MS_API Model {
public:
Model();
~Model();
Model(const Model &) = delete;
void operator=(const Model &) = delete;
Status Build(GraphCell graph, const std::shared_ptr<Context> &model_context = nullptr);
Status Resize(const std::vector<MSTensor> &inputs, const std::vector<std::vector<int64_t>> &dims);
Status Predict(const std::vector<MSTensor> &inputs, std::vector<MSTensor> *outputs);
std::vector<MSTensor> GetInputs();
inline MSTensor GetInputByTensorName(const std::string &tensor_name);
std::vector<MSTensor> GetOutputs();
inline std::vector<std::string> GetOutputTensorNames();
inline MSTensor GetOutputByTensorName(const std::string &tensor_name);
inline std::vector<MSTensor> GetOutputsByNodeName(const std::string &tensor_name);
static bool CheckModelSupport(enum DeviceType device_type, ModelType model_type);
private:
// api without std::string
MSTensor GetInputByTensorName(const std::vector<char> &tensor_name);
std::vector<std::vector<char>> GetOutputTensorNamesChar();
MSTensor GetOutputByTensorName(const std::vector<char> &tensor_name);
std::vector<MSTensor> GetOutputsByNodeName(const std::vector<char> &node_name);
std::shared_ptr<ModelImpl> impl_;
};
MSTensor Model::GetInputByTensorName(const std::string &tensor_name) {
return GetInputByTensorName(StringToChar(tensor_name));
}
std::vector<std::string> Model::GetOutputTensorNames() { return VectorCharToString(GetOutputTensorNamesChar()); }
MSTensor Model::GetOutputByTensorName(const std::string &tensor_name) {
return GetOutputByTensorName(StringToChar(tensor_name));
}
std::vector<MSTensor> Model::GetOutputsByNodeName(const std::string &tensor_name) {
return GetOutputsByNodeName(StringToChar(tensor_name));
}
} // namespace mindspore
#endif // MINDSPORE_INCLUDE_API_MODEL_H

View File

@ -0,0 +1,47 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_INCLUDE_API_SERIALIZATION_H
#define MINDSPORE_INCLUDE_API_SERIALIZATION_H
#include <string>
#include <vector>
#include <map>
#include <memory>
#include "include/api/status.h"
#include "include/api/types.h"
#include "include/api/model.h"
#include "include/api/graph.h"
#include "include/api/dual_abi_helper.h"
namespace mindspore {
class MS_API Serialization {
public:
static Status Load(const void *model_data, size_t data_size, ModelType model_type, Graph *graph);
inline static Status Load(const std::string &file, ModelType model_type, Graph *graph);
static Status LoadCheckPoint(const std::string &ckpt_file, std::map<std::string, Buffer> *parameters);
static Status SetParameters(const std::map<std::string, Buffer> &parameters, Model *model);
static Status ExportModel(const Model &model, ModelType model_type, Buffer *model_data);
static Status ExportModel(const Model &model, ModelType model_type, const std::string &model_file);
private:
static Status Load(const std::vector<char> &file, ModelType model_type, Graph *graph);
};
Status Serialization::Load(const std::string &file, ModelType model_type, Graph *graph) {
return Load(StringToChar(file), model_type, graph);
}
} // namespace mindspore
#endif // MINDSPORE_INCLUDE_API_SERIALIZATION_H

View File

@ -0,0 +1,164 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_INCLUDE_API_STATUS_H
#define MINDSPORE_INCLUDE_API_STATUS_H
#include <memory>
#include <string>
#include <vector>
#include <ostream>
#include <climits>
#include "include/api/dual_abi_helper.h"
#include "include/api/types.h"
namespace mindspore {
enum CompCode : uint32_t {
kCore = 0x00000000u,
kMD = 0x10000000u,
kME = 0x20000000u,
kMC = 0x30000000u,
kLite = 0xF0000000u,
};
enum StatusCode : uint32_t {
kSuccess = 0,
// Core
kCoreFailed = kCore | 0x1,
// MD
kMDOutOfMemory = kMD | 1,
kMDShapeMisMatch = kMD | 2,
kMDInterrupted = kMD | 3,
kMDNoSpace = kMD | 4,
kMDPyFuncException = kMD | 5,
kMDDuplicateKey = kMD | 6,
kMDPythonInterpreterFailure = kMD | 7,
kMDTDTPushFailure = kMD | 8,
kMDFileNotExist = kMD | 9,
kMDProfilingError = kMD | 10,
kMDBoundingBoxOutOfBounds = kMD | 11,
kMDBoundingBoxInvalidShape = kMD | 12,
kMDSyntaxError = kMD | 13,
kMDTimeOut = kMD | 14,
kMDBuddySpaceFull = kMD | 15,
kMDNetWorkError = kMD | 16,
kMDNotImplementedYet = kMD | 17,
// Make this error code the last one. Add new error code above it.
kMDUnexpectedError = kMD | 127,
// ME
kMEFailed = kME | 0x1,
kMEInvalidInput = kME | 0x2,
// MC
kMCFailed = kMC | 0x1,
kMCDeviceError = kMC | 0x2,
kMCInvalidInput = kMC | 0x3,
kMCInvalidArgs = kMC | 0x4,
// Lite // Common error code, range: [-1, -100
kLiteError = kLite | (0x0FFFFFFF & -1), /**< Common error code. */
kLiteNullptr = kLite | (0x0FFFFFFF & -2), /**< NULL pointer returned.*/
kLiteParamInvalid = kLite | (0x0FFFFFFF & -3), /**< Invalid parameter.*/
kLiteNoChange = kLite | (0x0FFFFFFF & -4), /**< No change. */
kLiteSuccessExit = kLite | (0x0FFFFFFF & -5), /**< No error but exit. */
kLiteMemoryFailed = kLite | (0x0FFFFFFF & -6), /**< Fail to create memory. */
kLiteNotSupport = kLite | (0x0FFFFFFF & -7), /**< Fail to support. */
kLiteThreadPoolError = kLite | (0x0FFFFFFF & -8), /**< Error occur in thread pool. */
// Executor error code, range: [-100,-200)
kLiteOutOfTensorRange = kLite | (0x0FFFFFFF & -100), /**< Failed to check range. */
kLiteInputTensorError = kLite | (0x0FFFFFFF & -101), /**< Failed to check input tensor. */
kLiteReentrantError = kLite | (0x0FFFFFFF & -102), /**< Exist executor running. */
// Graph error code, range: [-200,-300)
kLiteGraphFileError = kLite | (0x0FFFFFFF & -200), /**< Failed to verify graph file. */
// Node error code, range: [-300,-400)
kLiteNotFindOp = kLite | (0x0FFFFFFF & -300), /**< Failed to find operator. */
kLiteInvalidOpName = kLite | (0x0FFFFFFF & -301), /**< Invalid operator name. */
kLiteInvalidOpAttr = kLite | (0x0FFFFFFF & -302), /**< Invalid operator attr. */
kLiteOpExecuteFailure = kLite | (0x0FFFFFFF & -303), /**< Failed to execution operator. */
// Tensor error code, range: [-400,-500)
kLiteFormatError = kLite | (0x0FFFFFFF & -400), /**< Failed to checking tensor format. */
// InferShape error code, range: [-500,-600)
kLiteInferError = kLite | (0x0FFFFFFF & -500), /**< Failed to infer shape. */
kLiteInferInvalid = kLite | (0x0FFFFFFF & -501), /**< Invalid infer shape before runtime. */
// User input param error code, range: [-600, 700)
kLiteInputParamInvalid = kLite | (0x0FFFFFFF & -600), /**< Invalid input param by user. */
};
class MS_API Status {
public:
Status();
inline Status(enum StatusCode status_code, const std::string &status_msg = ""); // NOLINT(runtime/explicit)
inline Status(const StatusCode code, int line_of_code, const char *file_name, const std::string &extra = "");
~Status() = default;
enum StatusCode StatusCode() const;
inline std::string ToString() const;
int GetLineOfCode() const;
inline std::string GetErrDescription() const;
inline std::string SetErrDescription(const std::string &err_description);
friend std::ostream &operator<<(std::ostream &os, const Status &s);
bool operator==(const Status &other) const;
bool operator==(enum StatusCode other_code) const;
bool operator!=(const Status &other) const;
bool operator!=(enum StatusCode other_code) const;
explicit operator bool() const;
explicit operator int() const;
static Status OK();
bool IsOk() const;
bool IsError() const;
static inline std::string CodeAsString(enum StatusCode c);
private:
// api without std::string
explicit Status(enum StatusCode status_code, const std::vector<char> &status_msg);
Status(const enum StatusCode code, int line_of_code, const char *file_name, const std::vector<char> &extra);
std::vector<char> ToCString() const;
std::vector<char> GetErrDescriptionChar() const;
std::vector<char> SetErrDescription(const std::vector<char> &err_description);
static std::vector<char> CodeAsCString(enum StatusCode c);
struct Data;
std::shared_ptr<Data> data_;
};
Status::Status(enum StatusCode status_code, const std::string &status_msg)
: Status(status_code, StringToChar(status_msg)) {}
Status::Status(const enum StatusCode code, int line_of_code, const char *file_name, const std::string &extra)
: Status(code, line_of_code, file_name, StringToChar(extra)) {}
std::string Status::ToString() const { return CharToString(ToCString()); }
std::string Status::GetErrDescription() const { return CharToString(GetErrDescriptionChar()); }
std::string Status::SetErrDescription(const std::string &err_description) {
return CharToString(SetErrDescription(StringToChar(err_description)));
}
std::string Status::CodeAsString(enum StatusCode c) { return CharToString(CodeAsCString(c)); }
} // namespace mindspore
#endif // MINDSPORE_INCLUDE_API_STATUS_H

View File

@ -0,0 +1,137 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_INCLUDE_API_TYPES_H
#define MINDSPORE_INCLUDE_API_TYPES_H
#include <cstddef>
#include <string>
#include <vector>
#include <memory>
#include "include/api/data_type.h"
#include "include/api/dual_abi_helper.h"
#ifdef _WIN32
#define MS_API __declspec(dllexport)
#else
#define MS_API __attribute__((visibility("default")))
#endif
namespace mindspore {
enum ModelType : uint32_t {
kMindIR = 0,
kAIR = 1,
kOM = 2,
kONNX = 3,
// insert new data type here
kUnknownType = 0xFFFFFFFF
};
class MS_API MSTensor {
public:
class Impl;
static inline MSTensor *CreateTensor(const std::string &name, DataType type, const std::vector<int64_t> &shape,
const void *data, size_t data_len) noexcept;
static inline MSTensor *CreateRefTensor(const std::string &name, DataType type, const std::vector<int64_t> &shape,
const void *data, size_t data_len) noexcept;
static inline MSTensor *StringsToTensor(const std::string &name, const std::vector<std::string> &str);
static inline std::vector<std::string> TensorToStrings(const MSTensor &tensor);
static void DestroyTensorPtr(MSTensor *tensor) noexcept;
MSTensor();
explicit MSTensor(const std::shared_ptr<Impl> &impl);
inline MSTensor(const std::string &name, DataType type, const std::vector<int64_t> &shape, const void *data,
size_t data_len);
explicit MSTensor(std::nullptr_t);
~MSTensor();
inline std::string Name() const;
enum DataType DataType() const;
const std::vector<int64_t> &Shape() const;
int64_t ElementNum() const;
std::shared_ptr<const void> Data() const;
void *MutableData();
size_t DataSize() const;
bool IsDevice() const;
MSTensor *Clone() const;
bool operator==(std::nullptr_t) const;
bool operator!=(std::nullptr_t) const;
private:
// api without std::string
static MSTensor *CreateTensor(const std::vector<char> &name, enum DataType type, const std::vector<int64_t> &shape,
const void *data, size_t data_len) noexcept;
static MSTensor *CreateRefTensor(const std::vector<char> &name, enum DataType type, const std::vector<int64_t> &shape,
const void *data, size_t data_len) noexcept;
static MSTensor *CharStringsToTensor(const std::vector<char> &name, const std::vector<std::vector<char>> &str);
static std::vector<std::vector<char>> TensorToStringChars(const MSTensor &tensor);
MSTensor(const std::vector<char> &name, enum DataType type, const std::vector<int64_t> &shape, const void *data,
size_t data_len);
std::vector<char> CharName() const;
friend class ModelImpl;
std::shared_ptr<Impl> impl_;
};
class MS_API Buffer {
public:
Buffer();
Buffer(const void *data, size_t data_len);
~Buffer();
const void *Data() const;
void *MutableData();
size_t DataSize() const;
bool ResizeData(size_t data_len);
bool SetData(const void *data, size_t data_len);
Buffer Clone() const;
private:
class Impl;
std::shared_ptr<Impl> impl_;
};
MSTensor *MSTensor::CreateTensor(const std::string &name, enum DataType type, const std::vector<int64_t> &shape,
const void *data, size_t data_len) noexcept {
return CreateTensor(StringToChar(name), type, shape, data, data_len);
}
MSTensor *MSTensor::CreateRefTensor(const std::string &name, enum DataType type, const std::vector<int64_t> &shape,
const void *data, size_t data_len) noexcept {
return CreateRefTensor(StringToChar(name), type, shape, data, data_len);
}
MSTensor *MSTensor::StringsToTensor(const std::string &name, const std::vector<std::string> &str) {
return CharStringsToTensor(StringToChar(name), VectorStringToChar(str));
}
std::vector<std::string> MSTensor::TensorToStrings(const MSTensor &tensor) {
return VectorCharToString(TensorToStringChars(tensor));
}
MSTensor::MSTensor(const std::string &name, enum DataType type, const std::vector<int64_t> &shape, const void *data,
size_t data_len)
: MSTensor(StringToChar(name), type, shape, data, data_len) {}
std::string MSTensor::Name() const { return CharToString(CharName()); }
} // namespace mindspore
#endif // MINDSPORE_INCLUDE_API_TYPES_H

View File

@ -0,0 +1,74 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_LITE_INCLUDE_CONTEXT_H_
#define MINDSPORE_LITE_INCLUDE_CONTEXT_H_
#include "include/ms_tensor.h"
#include "include/lite_utils.h"
#include "include/lite_types.h"
namespace mindspore::lite {
/// \brief CpuDeviceInfo defined for CPU's configuration information.
typedef struct {
bool enable_float16_ = false; /**< prior enable float16 inference */
CpuBindMode cpu_bind_mode_ = MID_CPU;
} CpuDeviceInfo;
/// \brief GpuDeviceInfo defined for GPU's configuration information.
typedef struct {
bool enable_float16_ = false; /**< prior enable float16 inference */
} GpuDeviceInfo;
/// \brief NpuDeviceInfo defined for NPU's configuration information.
typedef struct {
int frequency_ = 3; /**< npu frequency inference */
} NpuDeviceInfo;
/// \brief DeviceInfo defined for backend's configuration information.
#ifdef NOT_USE_STL
// DeviceInfo() is implicitly deleted because
// the default definition of union struct would be ill-formed
struct DeviceInfo {
CpuDeviceInfo cpu_device_info_;
};
#else
union DeviceInfo {
CpuDeviceInfo cpu_device_info_;
GpuDeviceInfo gpu_device_info_;
NpuDeviceInfo npu_device_info_;
};
#endif // NOT_USE_STL
/// \brief DeviceContext defined for holding backend's configuration information.
struct DeviceContext {
DeviceType device_type_ = DT_CPU;
DeviceInfo device_info_;
};
/// \brief Context defined for holding environment variables during runtime.
struct Context {
String vendor_name_;
int thread_num_ = 2; /**< thread number config for thread pool */
AllocatorPtr allocator = nullptr;
#ifndef NOT_USE_STL
DeviceContextVector device_list_ = {{DT_CPU, {false, MID_CPU}}};
#else
DeviceContextVector device_list_;
#endif // NOT_USE_STL
};
} // namespace mindspore::lite
#endif // MINDSPORE_LITE_INCLUDE_CONTEXT_H_

View File

@ -0,0 +1,74 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_LITE_INCLUDE_ERRORCODE_H_
#define MINDSPORE_LITE_INCLUDE_ERRORCODE_H_
#include "include/lite_utils.h"
namespace mindspore {
namespace lite {
/// \brief STATUS defined for holding error code in MindSpore Lite.
using STATUS = int;
/* Success */
constexpr int RET_OK = 0; /**< No error occurs. */
/* Common error code, range: [-1, -100*/
constexpr int RET_ERROR = -1; /**< Common error code. */
constexpr int RET_NULL_PTR = -2; /**< NULL pointer returned.*/
constexpr int RET_PARAM_INVALID = -3; /**< Invalid parameter.*/
constexpr int RET_NO_CHANGE = -4; /**< No change. */
constexpr int RET_SUCCESS_EXIT = -5; /**< No error but exit. */
constexpr int RET_MEMORY_FAILED = -6; /**< Fail to create memory. */
constexpr int RET_NOT_SUPPORT = -7; /**< Fail to support. */
constexpr int RET_THREAD_POOL_ERROR = -8; /**< Error occur in thread pool. */
/* Executor error code, range: [-100,-200) */
constexpr int RET_OUT_OF_TENSOR_RANGE = -100; /**< Failed to check range. */
constexpr int RET_INPUT_TENSOR_ERROR = -101; /**< Failed to check input tensor. */
constexpr int RET_REENTRANT_ERROR = -102; /**< Exist executor running. */
/* Graph error code, range: [-200,-300) */
constexpr int RET_GRAPH_FILE_ERR = -200; /**< Failed to verify graph file. */
/* Node error code, range: [-300,-400) */
constexpr int RET_NOT_FIND_OP = -300; /**< Failed to find operator. */
constexpr int RET_INVALID_OP_NAME = -301; /**< Invalid operator name. */
constexpr int RET_INVALID_OP_ATTR = -302; /**< Invalid operator attr. */
constexpr int RET_OP_EXECUTE_FAILURE = -303; /**< Failed to execution operator. */
/* Tensor error code, range: [-400,-500) */
constexpr int RET_FORMAT_ERR = -400; /**< Failed to checking tensor format. */
/* InferShape error code, range: [-500,-600) */
constexpr int RET_INFER_ERR = -500; /**< Failed to infer shape. */
constexpr int RET_INFER_INVALID = -501; /**< Invalid infer shape before runtime. */
/* User input param error code, range: [-600, 700)*/
constexpr int RET_INPUT_PARAM_INVALID = -600; /**< Invalid input param by user. */
/// \brief Print description of errorcode.
///
/// \param[in] error_code define return status of procedure.
///
/// \return String of errorcode info.
String GetErrorInfo(STATUS error_code);
} // namespace lite
} // namespace mindspore
#endif // MINDSPORE_LITE_INCLUDE_ERRORCODE_H_

View File

@ -0,0 +1,95 @@
/**
* This is the C++ adaptation and derivative work of Myia (https://github.com/mila-iqia/myia/).
*
* Copyright 2019-2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_CORE_IR_DTYPE_TYPE_ID_H_
#define MINDSPORE_CORE_IR_DTYPE_TYPE_ID_H_
namespace mindspore {
//
// Supported meta type
//
enum TypeId : int {
kTypeUnknown = 0,
kMetaTypeBegin = kTypeUnknown,
kMetaTypeType, // Type
kMetaTypeAnything,
kMetaTypeObject,
kMetaTypeTypeType, // TypeType
kMetaTypeProblem,
kMetaTypeExternal,
kMetaTypeNone,
kMetaTypeNull,
kMetaTypeEllipsis,
kMetaTypeEnd,
//
// Object types
//
kObjectTypeBegin = kMetaTypeEnd,
kObjectTypeNumber,
kObjectTypeString,
kObjectTypeList,
kObjectTypeTuple,
kObjectTypeSlice,
kObjectTypeKeyword,
kObjectTypeTensorType,
kObjectTypeRowTensorType,
kObjectTypeCOOTensorType,
kObjectTypeUndeterminedType,
kObjectTypeClass,
kObjectTypeDictionary,
kObjectTypeFunction,
kObjectTypeJTagged,
kObjectTypeSymbolicKeyType,
kObjectTypeEnvType,
kObjectTypeRefKey,
kObjectTypeRef,
kObjectTypeEnd,
//
// Number Types
//
kNumberTypeBegin = kObjectTypeEnd,
kNumberTypeBool,
kNumberTypeInt,
kNumberTypeInt8,
kNumberTypeInt16,
kNumberTypeInt32,
kNumberTypeInt64,
kNumberTypeUInt,
kNumberTypeUInt8,
kNumberTypeUInt16,
kNumberTypeUInt32,
kNumberTypeUInt64,
kNumberTypeFloat,
kNumberTypeFloat16,
kNumberTypeFloat32,
kNumberTypeFloat64,
kNumberTypeComplex64,
kNumberTypeEnd,
//
// Monad Types
//
// Monad types is placed at the end of enum,
// in order to keep fit with the type of existing model on the lite side.
kMonadTypeBegin = kNumberTypeEnd,
kObjectTypeMonad,
kObjectTypeUMonad,
kObjectTypeIOMonad,
kMonadTypeEnd
};
} // namespace mindspore
#endif // MINDSPORE_CORE_IR_DTYPE_TYPE_ID_H_

View File

@ -0,0 +1,46 @@
/**
* This is the C++ adaptation and derivative work of Myia (https://github.com/mila-iqia/myia/).
*
* Copyright 2021 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_CORE_IR_FORMAT_H_
#define MINDSPORE_CORE_IR_FORMAT_H_
#include <cstdint>
namespace mindspore {
enum Format : int64_t {
NCHW = 0,
NHWC = 1,
NHWC4 = 2,
HWKC = 3,
HWCK = 4,
KCHW = 5,
CKHW = 6,
KHWC = 7,
CHWK = 8,
HW = 9,
HW4 = 10,
NC = 11,
NC4 = 12,
NC4HW4 = 13,
NUM_OF_FORMAT = 14,
NCDHW = 15,
NWC = 16,
NCW = 17
};
} // namespace mindspore
#endif // MINDSPORE_CORE_IR_FORMAT_H_

View File

@ -0,0 +1,125 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_LITE_INCLUDE_LITE_SESSION_H
#define MINDSPORE_LITE_INCLUDE_LITE_SESSION_H
#ifndef NOT_USE_STL
#include <unordered_map>
#endif // NOT_USE_STL
#include "include/ms_tensor.h"
#include "include/model.h"
#include "include/context.h"
namespace mindspore {
namespace session {
/// \brief LiteSession defined session in MindSpore Lite for compiling Model and forwarding model.
class MS_API LiteSession {
public:
/// \brief Static method to create a LiteSession pointer.
///
/// \param[in] context Define the context of session to be created.
///
/// \return Pointer of MindSpore Lite LiteSession.
static LiteSession *CreateSession(const lite::Context *context);
/// \brief Static method to create a LiteSession pointer which has already compiled a model.
///
/// \param[in] model_buf Define the buffer read from a model file.
/// \param[in] size Define bytes number of model buffer.
/// \param[in] context Define the context of session to be created.
///
/// \return Pointer of MindSpore Lite LiteSession.
static LiteSession *CreateSession(const char *model_buf, size_t size, const lite::Context *context);
/// \brief Destructor of MindSpore Lite LiteSession.
virtual ~LiteSession() = default;
/// \brief Attempt to bind or unbind threads in the thread pool to or from the specified cpu core.
///
/// \param[in] if_bind Define whether to bind or unbind threads.
virtual void BindThread(bool if_bind) = 0;
/// \brief Compile MindSpore Lite model.
///
/// \note CompileGraph should be called before RunGraph.
///
/// \param[in] model Define the model to be compiled.
///
/// \return STATUS as an error code of compiling graph, STATUS is defined in errorcode.h.
virtual int CompileGraph(lite::Model *model) = 0;
/// \brief Get input MindSpore Lite MSTensors of model.
///
/// \return The vector of MindSpore Lite MSTensor.
virtual Vector<tensor::MSTensor *> GetInputs() const = 0;
/// \brief Get input MindSpore Lite MSTensors of model by tensor name.
///
/// \param[in] node_name Define tensor name.
///
/// \return The vector of MindSpore Lite MSTensor.
virtual mindspore::tensor::MSTensor *GetInputsByTensorName(const String &tensor_name) const = 0;
/// \brief Run session with callback.
///
/// \param[in] before Define a call_back_function to be called before running each node.
/// \param[in] after Define a call_back_function called after running each node.
///
/// \note RunGraph should be called after CompileGraph.
///
/// \return STATUS as an error code of running graph, STATUS is defined in errorcode.h.
virtual int RunGraph(const KernelCallBack &before = nullptr, const KernelCallBack &after = nullptr) = 0;
/// \brief Get output MindSpore Lite MSTensors of model by node name.
///
/// \param[in] node_name Define node name.
///
/// \note Deprecated, replace with GetOutputByTensorName
///
/// \return The vector of MindSpore Lite MSTensor.
virtual Vector<tensor::MSTensor *> GetOutputsByNodeName(const String &node_name) const = 0;
#ifndef NOT_USE_STL
/// \brief Get output MindSpore Lite MSTensors of model mapped by tensor name.
///
/// \return The map of output tensor name and MindSpore Lite MSTensor.
virtual std::unordered_map<String, mindspore::tensor::MSTensor *> GetOutputs() const = 0;
#endif
/// \brief Get name of output tensors of model compiled by this session.
///
/// \return The vector of string as output tensor names in order.
virtual Vector<String> GetOutputTensorNames() const = 0;
/// \brief Get output MindSpore Lite MSTensors of model by tensor name.
///
/// \param[in] tensor_name Define tensor name.
///
/// \return Pointer of MindSpore Lite MSTensor.
virtual mindspore::tensor::MSTensor *GetOutputByTensorName(const String &tensor_name) const = 0;
/// \brief Resize inputs shape.
///
/// \param[in] inputs Define the inputs of the model.
/// \param[in] dims Define the inputs new shape.
///
/// \return STATUS as an error code of resize inputs, STATUS is defined in errorcode.h.
virtual int Resize(const Vector<tensor::MSTensor *> &inputs, const Vector<Vector<int>> &dims) = 0;
};
} // namespace session
} // namespace mindspore
#endif // MINDSPORE_LITE_INCLUDE_LITE_SESSION_H

View File

@ -0,0 +1,36 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_LITE_INCLUDE_LITE_TYPES_H_
#define MINDSPORE_LITE_INCLUDE_LITE_TYPES_H_
namespace mindspore::lite {
/// \brief CpuBindMode defined for holding bind cpu strategy argument.
typedef enum {
NO_BIND, /**< no bind */
HIGHER_CPU, /**< bind higher cpu first */
MID_CPU /**< bind middle cpu first */
} CpuBindMode;
/// \brief DeviceType defined for holding user's preferred backend.
typedef enum {
DT_CPU, /**< CPU device type */
DT_GPU, /**< GPU device type */
DT_NPU /**< NPU device type */
} DeviceType;
} // namespace mindspore::lite
#endif // MINDSPORE_LITE_INCLUDE_LITE_TYPES_H_

View File

@ -0,0 +1,666 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_LITE_INCLUDE_LITE_UTILS_H_
#define MINDSPORE_LITE_INCLUDE_LITE_UTILS_H_
#ifndef NOT_USE_STL
#include <vector>
#include <string>
#include <memory>
#include <functional>
#else
#include <stdint.h>
#include <stdlib.h>
#include <string.h>
#include <stddef.h>
#include <stdio.h>
#include <float.h>
#include <new>
#endif // NOT_USE_STL
#ifndef MS_API
#ifdef _WIN32
#define MS_API __declspec(dllexport)
#else
#define MS_API __attribute__((visibility("default")))
#endif
#endif
namespace mindspore {
namespace schema {
struct Tensor;
} // namespace schema
namespace tensor {
class MSTensor;
} // namespace tensor
namespace lite {
struct DeviceContext;
} // namespace lite
#ifdef NOT_USE_STL
#define MS_C_EXCEPTION(...) exit(1)
class String {
public:
String() {
buffer_ = reinterpret_cast<char *>(malloc(sizeof(char) * 1));
if (buffer_ == nullptr) {
MS_C_EXCEPTION("malloc data failed");
}
buffer_[0] = '\0';
size_ = 0;
}
String(size_t count, char ch) {
buffer_ = reinterpret_cast<char *>(malloc(sizeof(char) * (count + 1)));
if (buffer_ == nullptr) {
MS_C_EXCEPTION("malloc data failed");
}
memset(buffer_, ch, count);
buffer_[count] = '\0';
size_ = count;
}
String(const char *s, size_t count) {
if (s == nullptr) {
buffer_ = reinterpret_cast<char *>(malloc(sizeof(char) * 1));
if (buffer_ == nullptr) {
MS_C_EXCEPTION("malloc data failed");
}
buffer_[0] = '\0';
size_ = 0;
return;
}
size_t size_s = strlen(s);
if (size_s <= count) {
size_ = size_s;
} else {
size_ = count;
}
buffer_ = reinterpret_cast<char *>(malloc(sizeof(char) * (size_ + 1)));
if (buffer_ == nullptr) {
MS_C_EXCEPTION("malloc data failed");
}
strncpy(buffer_, s, size_);
buffer_[size_] = '\0';
}
explicit String(const char *s) {
if (s == nullptr) {
buffer_ = reinterpret_cast<char *>(malloc(sizeof(char) * 1));
if (buffer_ == nullptr) {
MS_C_EXCEPTION("malloc data failed");
}
buffer_[0] = '\0';
size_ = 0;
return;
}
size_ = strlen(s);
buffer_ = reinterpret_cast<char *>(malloc(sizeof(char) * (size_ + 1)));
if (buffer_ == nullptr) {
MS_C_EXCEPTION("malloc data failed");
}
memcpy(buffer_, s, size_ + 1);
}
String(const String &other) {
buffer_ = reinterpret_cast<char *>(malloc(sizeof(char) * (other.size_ + 1)));
if (buffer_ == nullptr) {
MS_C_EXCEPTION("malloc data failed");
}
size_ = other.size_;
memcpy(buffer_, other.buffer_, size_ + 1);
}
String(const String &other, size_t pos, size_t count = npos) {
if (pos >= other.size_) {
buffer_ = reinterpret_cast<char *>(malloc(sizeof(char) * 1));
if (buffer_ == nullptr) {
MS_C_EXCEPTION("malloc data failed");
}
buffer_[0] = '\0';
size_ = 0;
} else {
if (count == npos) {
count = other.size_ - pos;
}
if (pos + count > other.size_) {
size_ = other.size_ - pos;
} else {
size_ = count;
}
buffer_ = reinterpret_cast<char *>(malloc(sizeof(char) * (size_ + 1)));
if (buffer_ == nullptr) {
MS_C_EXCEPTION("malloc data failed");
}
strncpy_s(buffer_, size_ + 1, other.buffer_ + pos, size_);
buffer_[size_] = '\0';
}
}
~String() { free(buffer_); }
String &operator=(const String &str) {
if (this == &str) {
return *this;
}
free(buffer_);
buffer_ = reinterpret_cast<char *>(malloc(sizeof(char) * (str.size_ + 1)));
if (buffer_ == nullptr) {
MS_C_EXCEPTION("malloc data failed");
}
size_ = str.size_;
memcpy(buffer_, str.buffer_, size_ + 1);
return *this;
}
String &operator=(const char *str) {
free(buffer_);
if (str == nullptr) {
buffer_ = reinterpret_cast<char *>(malloc(sizeof(char) * 1));
if (buffer_ == nullptr) {
MS_C_EXCEPTION("malloc data failed");
}
buffer_[0] = '\0';
size_ = 0;
return *this;
}
size_t size_s = strlen(str);
buffer_ = reinterpret_cast<char *>(malloc(sizeof(char) * (size_s + 1)));
if (buffer_ == nullptr) {
MS_C_EXCEPTION("malloc data failed");
}
size_ = size_s;
memcpy(buffer_, str, size_ + 1);
return *this;
}
char &at(size_t pos) {
if (pos >= size_) {
MS_C_EXCEPTION("pos out of range");
}
return buffer_[pos];
}
const char &at(size_t pos) const {
if (pos >= size_) {
MS_C_EXCEPTION("pos out of range");
}
return buffer_[pos];
}
inline char &operator[](size_t pos) {
if (pos >= size_) {
MS_C_EXCEPTION("pos out of range");
}
return this->at(pos);
}
inline const char &operator[](size_t pos) const {
if (pos >= size_) {
MS_C_EXCEPTION("pos out of range");
}
return this->at(pos);
}
char *data() noexcept { return buffer_; }
const char *data() const noexcept { return buffer_; }
const char *c_str() const noexcept { return buffer_; }
// capacity
bool empty() const noexcept { return size_ == 0; }
size_t size() const noexcept { return size_; }
size_t length() const noexcept { return size_; }
// operations
void clear() noexcept {
free(buffer_);
buffer_ = reinterpret_cast<char *>(malloc(sizeof(char) * 1));
if (buffer_ == nullptr) {
MS_C_EXCEPTION("malloc data failed");
}
buffer_[0] = '\0';
size_ = 0;
}
String &append(size_t count, const char ch) {
(*this) += ch;
return *this;
}
String &append(const String &str) {
(*this) += str;
return *this;
}
String &append(const char *str) {
if (str == nullptr) {
return *this;
}
(*this) += str;
return *this;
}
String &operator+(const String &str) {
(*this) += str;
return *this;
}
String &operator+=(const String &str) {
size_t new_size = size_ + str.size_;
char *tmp = reinterpret_cast<char *>(malloc(sizeof(char) * (new_size + 1)));
if (tmp == nullptr) {
MS_C_EXCEPTION("malloc data failed");
}
memcpy(tmp, this->buffer_, size_ + 1);
strncat_s(tmp, new_size + 1, str.buffer_, str.size_);
tmp[new_size] = '\0';
free(buffer_);
buffer_ = tmp;
size_ = new_size;
return *this;
}
String &operator+=(const char *str) {
if (str == nullptr) {
return *this;
}
size_t str_size = strlen(str);
size_t new_size = size_ + str_size;
char *tmp = reinterpret_cast<char *>(malloc(sizeof(char) * (new_size + 1)));
if (tmp == nullptr) {
MS_C_EXCEPTION("malloc data failed");
}
memcpy(tmp, this->buffer_, size_ + 1);
strncat(tmp, str, str_size);
tmp[new_size] = '\0';
free(buffer_);
buffer_ = tmp;
size_ = new_size;
return *this;
}
String &operator+=(const char ch) {
char *tmp = reinterpret_cast<char *>(malloc(sizeof(char) * (size_ + 2)));
if (tmp == nullptr) {
MS_C_EXCEPTION("malloc data failed");
}
memcpy(tmp, this->buffer_, size_ + 1);
tmp[size_] = ch;
tmp[size_ + 1] = '\0';
free(buffer_);
buffer_ = tmp;
size_ += 1;
return *this;
}
int compare(const String &str) const { return strcmp(buffer_, str.buffer_); }
int compare(const char *str) const { return strcmp(buffer_, str); }
String substr(size_t pos = 0, size_t count = npos) const { return String(*this, pos, count); }
static const size_t npos = -1;
private:
size_t size_;
char *buffer_;
};
inline String operator+(const String &lhs, const char *rhs) {
String str = lhs;
str += rhs;
return str;
}
inline String operator+(const char *lhs, const String &rhs) {
String str = rhs;
str += lhs;
return str;
}
inline bool operator!=(const String &lhs, const String &rhs) { return lhs.compare(rhs) != 0; }
inline bool operator==(const String &lhs, const String &rhs) { return lhs.compare(rhs) == 0; }
inline bool operator==(const String &lhs, const char *rhs) { return lhs.compare(rhs) == 0; }
inline bool operator==(const char *lhs, const String &rhs) { return rhs.compare(lhs) == 0; }
inline String to_String(int32_t value) {
char tmp[sizeof(int32_t) * 4];
snprintf(tmp, sizeof(int32_t) * 4, "%d", value);
return String(tmp, strlen(tmp));
}
inline String to_String(float value) {
char tmp[FLT_MAX_10_EXP + 20];
snprintf(tmp, FLT_MAX_10_EXP + 20, "%f", value);
return String(tmp, strlen(tmp));
}
#define DEFAULT_CAPACITY 4
#define MIN(x, y) ((x < y) ? (x) : (y))
template <typename T>
class Vector {
public:
Vector() {
size_ = 0;
capacity_ = DEFAULT_CAPACITY;
elem_size_ = sizeof(T);
data_ = nullptr;
}
explicit Vector(size_t size) {
size_ = size;
elem_size_ = sizeof(T);
capacity_ = (size == 0 ? DEFAULT_CAPACITY : size);
data_ = new (std::nothrow) T[capacity_];
if (data_ == nullptr) {
MS_C_EXCEPTION("malloc data failed");
}
}
Vector(size_t size, const T &value) {
size_ = size;
elem_size_ = sizeof(T);
capacity_ = (size == 0 ? DEFAULT_CAPACITY : size);
data_ = new (std::nothrow) T[capacity_];
if (data_ == nullptr) {
MS_C_EXCEPTION("malloc data failed");
}
for (int i = 0; i < static_cast<int>(size_); ++i) {
data_[i] = value;
}
}
Vector(const Vector<T> &vec) {
size_ = vec.size_;
elem_size_ = sizeof(T);
capacity_ = vec.capacity_;
data_ = new (std::nothrow) T[capacity_];
if (data_ == nullptr) {
MS_C_EXCEPTION("malloc data failed");
}
for (int i = 0; i < static_cast<int>(size_); ++i) {
data_[i] = vec.data_[i];
}
}
~Vector() {
if (data_ != nullptr) {
delete[] data_;
}
}
void clear() {
size_ = 0;
if (data_ != nullptr) {
delete[] data_;
data_ = nullptr;
}
}
void push_back(const T &elem) {
if (data_ == nullptr) {
data_ = new (std::nothrow) T[capacity_];
if (data_ == nullptr) {
MS_C_EXCEPTION("malloc data failed");
}
} else if (size_ == capacity_) {
resize(size_ + 1);
--size_;
}
data_[size_] = elem;
++size_;
}
void push_back(T &&elem) {
if (data_ == nullptr) {
data_ = new (std::nothrow) T[capacity_];
if (data_ == nullptr) {
MS_C_EXCEPTION("malloc data failed");
}
} else if (size_ == capacity_) {
resize(size_ + 1);
--size_;
}
data_[size_] = elem;
++size_;
}
void pop_back() {
if (size_ > 0) {
--size_;
} else {
MS_C_EXCEPTION("Index is out of range!");
}
}
void insert(const T &elem, size_t index) {
if (index <= size_) {
++size_;
if (size_ > capacity_) {
resize(size_);
}
if (index == size_ - 1) {
push_back(elem);
} else {
for (int i = static_cast<int>(size_) - 1; i > static_cast<int>(index); --i) {
data_[i + 1] = data_[i];
}
data_[index] = elem;
}
} else {
MS_C_EXCEPTION("Input index is out of range!");
}
}
T *begin() { return data_; }
const T *begin() const { return data_; }
T *end() { return data_ + size_; }
const T *end() const { return data_ + size_; }
T &front() {
if (size_ > 0) {
return data_[0];
}
MS_C_EXCEPTION("Index is out of range!");
}
const T &front() const {
if (size_ > 0) {
return data_[0];
}
MS_C_EXCEPTION("Index is out of range!");
}
T &back() {
if (size_ > 0) {
return data_[size_ - 1];
}
MS_C_EXCEPTION("Index is out of range!");
}
const T &back() const {
if (size_ > 0) {
return data_[size_ - 1];
}
MS_C_EXCEPTION("Index is out of range!");
}
T &at(size_t index) {
if (index < size_) {
return data_[index];
}
MS_C_EXCEPTION("Input index is out of range!");
}
const T &at(size_t index) const {
if (index < size_) {
return data_[index];
}
MS_C_EXCEPTION("Input index is out of range!");
}
T &operator[](size_t index) {
if (index < size_) {
return data_[index];
}
MS_C_EXCEPTION("Input index is out of range!");
}
const T &operator[](size_t index) const {
if (index < size_) {
return data_[index];
}
MS_C_EXCEPTION("Input index is out of range!");
}
T *data() { return data_; }
const T *data() const { return data_; }
size_t size() const { return size_; }
size_t capacity() const { return capacity_; }
bool empty() const { return size_ == 0; }
void erase(size_t index) {
if (index == size_ - 1) {
--size_;
} else if (index < size_) {
for (int i = index; i < static_cast<int>(size_); ++i) {
data_[i] = data_[i + 1];
}
--size_;
} else {
MS_C_EXCEPTION("Input index is out of range!");
}
}
void resize(size_t size) {
while (size > capacity_) {
capacity_ *= 2;
}
T *tmp = data_;
data_ = new (std::nothrow) T[capacity_];
if (data_ == nullptr) {
MS_C_EXCEPTION("malloc data failed");
}
for (int i = 0; i < MIN(static_cast<int>(size), static_cast<int>(size_)); ++i) {
data_[i] = tmp[i];
}
size_ = size;
delete[] tmp;
}
void reserve(size_t capacity) {
if (capacity > capacity_) {
capacity_ = capacity;
}
}
Vector<T> &operator=(const Vector<T> &vec) {
if (this == &vec) {
return *this;
}
size_ = vec.size_;
elem_size_ = sizeof(T);
capacity_ = vec.capacity_;
data_ = new (std::nothrow) T[capacity_];
if (data_ == nullptr) {
MS_C_EXCEPTION("malloc data failed");
}
for (int i = 0; i < static_cast<int>(size_); ++i) {
data_[i] = vec.data_[i];
}
return *this;
}
private:
size_t size_;
size_t elem_size_;
size_t capacity_;
T *data_;
};
using TensorPtrVector = Vector<mindspore::schema::Tensor *>;
using Uint32Vector = Vector<uint32_t>;
using AllocatorPtr = void *;
using DeviceContextVector = Vector<lite::DeviceContext>;
using KernelCallBack = void (*)(void *, void *);
#else
/// \brief Allocator defined a memory pool for malloc memory and free memory dynamically.
///
/// \note List public class and interface for reference.
class Allocator;
using AllocatorPtr = std::shared_ptr<Allocator>;
using TensorPtrVector = std::vector<mindspore::schema::Tensor *>;
using Uint32Vector = std::vector<uint32_t>;
template <typename T>
using Vector = std::vector<T>;
template <typename T>
inline std::string to_string(T t) {
return std::to_string(t);
}
namespace tensor {
using String = std::string;
} // namespace tensor
namespace session {
using String = std::string;
} // namespace session
/// \brief CallBackParam defined input arguments for callBack function.
struct CallBackParam {
session::String node_name; /**< node name argument */
session::String node_type; /**< node type argument */
};
struct GPUCallBackParam : CallBackParam {
double execute_time{-1.f};
};
/// \brief KernelCallBack defined the function pointer for callBack.
using KernelCallBack = std::function<bool(Vector<tensor::MSTensor *> inputs, Vector<tensor::MSTensor *> outputs,
const CallBackParam &opInfo)>;
namespace lite {
using String = std::string;
using DeviceContextVector = std::vector<DeviceContext>;
/// \brief Set data of MSTensor from string vector.
///
/// \param[in] input string vector.
/// \param[out] MSTensor.
///
/// \return STATUS as an error code of this interface, STATUS is defined in errorcode.h.
int MS_API StringsToMSTensor(const Vector<String> &inputs, tensor::MSTensor *tensor);
/// \brief Get string vector from MSTensor.
/// \param[in] MSTensor.
/// \return string vector.
Vector<String> MS_API MSTensorToStrings(const tensor::MSTensor *tensor);
} // namespace lite
#endif // NOT_USE_STL
} // namespace mindspore
#endif // MINDSPORE_LITE_INCLUDE_LITE_UTILS_H_

View File

@ -0,0 +1,66 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_LITE_INCLUDE_MODEL_H_
#define MINDSPORE_LITE_INCLUDE_MODEL_H_
#include "include/lite_utils.h"
namespace mindspore::lite {
struct MS_API Model {
struct Node {
String name_;
int node_type_;
const void *primitive_;
Uint32Vector input_indices_;
Uint32Vector output_indices_;
int quant_type_;
};
using NodePtrVector = Vector<Node *>;
struct SubGraph {
String name_;
Uint32Vector input_indices_;
Uint32Vector output_indices_;
Uint32Vector node_indices_;
Uint32Vector tensor_indices_;
};
using SubGraphPtrVector = Vector<SubGraph *>;
String name_;
String version_;
TensorPtrVector all_tensors_;
NodePtrVector all_nodes_;
char *buf;
SubGraphPtrVector sub_graphs_;
/// \brief Static method to create a Model pointer.
///
/// \param[in] model_buf Define the buffer read from a model file.
/// \param[in] size Define bytes number of model buffer.
///
/// \return Pointer of MindSpore Lite Model.
static Model *Import(const char *model_buf, size_t size);
/// \brief Free meta graph temporary buffer
virtual void Free() = 0;
/// \brief Free all temporary buffer.EG: nodes in the model.
virtual void Destroy() = 0;
/// \brief Model destruct, free all memory
virtual ~Model() = default;
};
} // namespace mindspore::lite
#endif // MINDSPORE_LITE_INCLUDE_MODEL_H_

View File

@ -0,0 +1,118 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_LITE_INCLUDE_MS_TENSOR_H_
#define MINDSPORE_LITE_INCLUDE_MS_TENSOR_H_
#include "include/lite_utils.h"
#include "ir/dtype/type_id.h"
namespace mindspore {
enum Format : int64_t;
namespace tensor {
/// \brief MSTensor defined tensor in MindSpore Lite.
class MS_API MSTensor {
public:
/// \brief Constructor of MindSpore Lite MSTensor.
///
/// \return Instance of MindSpore Lite MSTensor.
MSTensor() = default;
/// \brief Destructor of MindSpore Lite Model.
virtual ~MSTensor() = default;
/// \brief Create a MSTensor.
///
/// \return Pointer to an instance of MindSpore Lite MSTensor.
static MSTensor *CreateTensor(const String &name, TypeId type, const Vector<int> &shape, const void *data,
size_t data_len);
/// \brief Get memory allocator of current MSTensor.
///
/// \return Pointer of memory allocator class.
virtual AllocatorPtr allocator() const = 0;
/// \brief Get data type of the MindSpore Lite MSTensor.
///
/// \note TypeId is defined in mindspore/mindspore/include/api/type_id.h. Only number types in TypeId enum are
/// suitable for MSTensor.
///
/// \return MindSpore Lite TypeId of the MindSpore Lite MSTensor.
virtual TypeId data_type() const = 0;
/// \brief Set data type of current MSTensor.
///
/// \param[in] data_type Define data type, which is shown in type_id.h.
virtual void set_data_type(TypeId data_type) = 0;
/// \brief Set format of current MSTensor.
///
/// \param[in] format Define format of data, which is shown in format.h
virtual void set_format(mindspore::Format format) = 0;
/// \brief Get format of current MSTensor.
///
/// \return format, which is shown in format.h
virtual mindspore::Format format() const = 0;
/// \brief Get shape of the MindSpore Lite MSTensor.
///
/// \return A vector of int as the shape of the MindSpore Lite MSTensor.
virtual Vector<int> shape() const = 0;
/// \brief Set the shape of MSTensor.
virtual void set_shape(const Vector<int> &shape) = 0;
/// \brief Get number of element in MSTensor.
///
/// \return Number of element in MSTensor.
virtual int ElementsNum() const = 0;
/// \brief Get byte size of data in MSTensor.
///
/// \return Byte size of data in MSTensor.
virtual size_t Size() const = 0;
/// \brief Get the name of MSTensor.
///
/// \return the name of MSTensor.
virtual String tensor_name() const = 0;
/// \brief Set the name of MSTensor.
virtual void set_tensor_name(const String &name) = 0;
/// \brief Get the pointer of data in MSTensor.
///
/// \note The data pointer can be used to both write and read data in MSTensor. The memory buffer will be
/// automatically allocated.
///
/// \return the pointer points to data in MSTensor.
virtual void *MutableData() = 0;
/// \brief Get the pointer of data in MSTensor.
///
/// \note The data pointer can be used to both write and read data in MSTensor. No memory buffer will be
/// allocated.
///
/// \return the pointer points to data in MSTensor.
virtual void *data() = 0;
/// \brief Set the data of MSTensor.
virtual void set_data(void *data) = 0;
};
} // namespace tensor
} // namespace mindspore
#endif // MINDSPORE_LITE_INCLUDE_MS_TENSOR_H_

View File

@ -0,0 +1,38 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_LITE_INCLUDE_VERSION_H_
#define MINDSPORE_LITE_INCLUDE_VERSION_H_
#include "include/lite_utils.h"
namespace mindspore {
namespace lite {
const int ms_version_major = 1;
const int ms_version_minor = 2;
const int ms_version_revision = 0;
/// \brief Global method to get a version string.
///
/// \return The version string of MindSpore Lite.
inline String Version() {
return "MindSpore Lite " + to_string(ms_version_major) + "." + to_string(ms_version_minor) + "." +
to_string(ms_version_revision);
}
} // namespace lite
} // namespace mindspore
#endif // MINDSPORE_LITE_INCLUDE_VERSION_H_

View File

@ -0,0 +1,196 @@
[-1.5255959e+00, -7.5023180e-01, -6.5398091e-01, -1.6094848e+00,
-1.0016718e-01, -6.0918891e-01, -9.7977227e-01, -1.6090963e+00,
-7.1214461e-01, 3.0372199e-01, -7.7731431e-01, -2.5145525e-01,
-2.2227049e-01, 1.6871134e+00, 2.2842517e-01, 4.6763551e-01,
-6.9697243e-01, -1.1607615e+00, 6.9954240e-01, 1.9908163e-01,
8.6569238e-01, 2.4440390e-01, -6.6291136e-01, 8.0730826e-01,
1.1016806e+00, -1.7593604e-01, -2.2455578e+00, -1.4464580e+00,
6.1155282e-02, -6.1774445e-01, -7.9806983e-01, -1.3162321e-01,
1.8793458e+00, -7.2131783e-02, 1.5777060e-01, -7.7345490e-01,
1.9905651e-01, 4.5702778e-02, 1.5295692e-01, -4.7567880e-01,
-1.1101983e-01, 2.9273525e-01, -1.5784515e-01, -2.8787140e-02,
2.3571110e+00, -1.0373387e+00, 1.5747981e+00, -6.2984723e-01,
-9.2739171e-01, 5.4514152e-01, 6.6280261e-02, -4.3704012e-01,
7.6260060e-01, 4.4151092e-01, 1.1651385e+00, 2.0153918e+00,
1.3741246e-01, 9.3864471e-01, -1.8600109e-01, -6.4463931e-01,
1.5392458e+00, -8.6958760e-01, -3.3311536e+00, -7.4787223e-01,
-2.5502462e-02, -1.0233306e+00, -5.9618515e-01, -1.0055307e+00,
-2.1060631e-01, -7.5475276e-03, 1.6734272e+00, 1.0342831e-02,
-7.0395666e-01, -1.8526579e-01, -9.9623507e-01, -8.3125526e-01,
-4.6102202e-01, -5.6008244e-01, 3.9557618e-01, -9.8227710e-01,
-5.0648659e-01, 9.9775404e-02, -6.5397340e-01, 7.3169369e-01,
-1.4343859e+00, -5.0081307e-01, 1.7163314e-01, -1.5999313e-01,
2.5463349e-01, -5.0195730e-01, -1.0412000e+00, 7.3226720e-01,
-1.0483401e+00, -4.7087720e-01, 2.9113635e-01, 1.9907043e+00,
6.6144532e-01, 1.1899205e+00, 8.1653392e-01, -9.1352361e-01,
1.3851457e+00, -8.1384623e-01, -9.2757654e-01, 1.1119633e+00,
1.3352057e+00, 6.0427362e-01, -1.0344208e-01, -1.5121692e-01,
-2.1020830e+00, -6.2002194e-01, -1.4782310e+00, -1.1334175e+00,
8.7379628e-01, -5.6025940e-01, 1.2857845e+00, 8.1682384e-01,
2.0530410e-01, 3.0510718e-01, 5.3568703e-01, -4.3118501e-01,
2.5581384e+00, -2.3336388e-01, -1.3472130e-02, 1.8606348e+00,
-1.9804063e+00, 1.7985829e+00, 1.0181159e-01, 3.4000599e-01,
7.1236455e-01, -1.7765073e+00, 3.5386458e-01, 1.1996132e+00,
-3.0299741e-01, -1.7618417e+00, 6.3484460e-01, -8.0435908e-01,
-1.6111118e+00, -1.8716129e+00, 5.4308361e-01, 6.6067863e-01,
2.2952116e+00, 6.7490596e-01, 1.7133216e+00, -1.7942734e+00,
-1.3632672e+00, -9.8321962e-01, 1.5112667e+00, 6.4187074e-01,
4.7296381e-01, -4.2859009e-01, 5.5137074e-01, -1.5473709e+00,
5.1811212e-01, 1.0653535e-01, 2.6924077e-01, 1.3247679e+00,
1.7460191e+00, 1.8549690e+00, -7.0636910e-01, 2.5570862e+00,
4.1753429e-01, -2.1271861e-01, -8.3995801e-01, -4.2001787e-01,
-6.2403631e-01, -9.7729611e-01, 8.7484282e-01, 9.8728138e-01,
3.0957633e-01, 1.5206900e+00, 1.2052339e+00, -1.8155910e+00,
-4.0346155e-01, -9.5914519e-01, -5.2077039e-03, -7.8863136e-02,
8.4365427e-01, 1.1657013e+00, 5.2693218e-01, 1.6192533e+00,
-9.6397626e-01, 1.4152038e-01, -1.6366096e-01, -3.5822257e-01,
1.7222793e+00, -3.0357561e-01, 2.3887420e-01, 1.3440012e+00,
1.0322569e-01, 1.1003542e+00, -3.4168020e-01, 9.4733888e-01,
-5.6851596e-01, 8.3759618e-01, 1.7836607e+00, -1.9542466e-01,
5.1491612e-01, -1.8474776e+00, -2.9167426e+00, -5.6732988e-01,
-5.4128021e-01, 8.9517403e-01, -8.8250703e-01, 5.3181124e-01,
-1.5457772e+00, -1.7329982e-01, 7.2824633e-01, 5.7061020e-02,
9.0551722e-01, 1.0462948e+00, -5.2059698e-01, 1.3547838e+00,
2.3519313e-01, 1.9142433e+00, 1.8364111e+00, 1.3245324e+00,
-9.6900916e-01, 1.2516364e+00, 1.2103242e+00, -5.2792060e-01,
2.1856615e-01, -5.7430726e-01, 1.4571251e+00, 1.7709557e+00,
1.6499138e+00, -4.3200457e-01, -2.7102691e-01, -1.4391626e+00,
1.2470404e+00, 1.2738512e+00, 3.9094925e-01, 3.8721049e-01,
-7.9828717e-02, 3.4172431e-01, 9.4882733e-01, -1.3839359e+00,
1.7240863e+00, -2.3647652e+00, -9.2949092e-01, 2.9362530e-01,
2.1513203e-01, 9.3846369e-01, 1.4657077e+00, -5.5647439e-01,
-7.4484080e-01, -2.0215721e-01, -2.2966790e-01, 1.3313366e-03,
3.7527591e-01, -5.8106792e-01, -5.7230884e-01, 1.0097175e+00,
-1.0564939e-01, -1.1796960e+00, -9.0779595e-02, 5.6311435e-01,
-1.2560141e+00, 8.9555502e-01, 1.6747737e-01, 7.5142086e-01,
2.4142299e+00, 1.0205840e+00, -4.4048381e-01, -1.7341677e+00,
-1.2362250e+00, 1.5785813e+00, -1.1160507e+00, 7.6777023e-01,
-5.8820677e-01, 2.1188903e+00, -5.4219025e-01, -2.4592547e+00,
-1.1108288e+00, -1.1187209e+00, 7.5799555e-01, -4.9565765e-01,
-1.9700006e-01, -3.3396218e-02, 7.1929151e-01, 1.0644146e+00,
8.3402544e-01, -1.9162164e+00, -3.4202927e-01, -6.6049206e-01,
3.1508535e-01, 1.1422518e+00, 3.0550566e-01, -5.7888174e-01,
-2.3828252e-01, -1.3541743e+00, 2.6868939e-01, 1.1455697e-01,
-1.5562972e+00, -1.0757437e+00, -8.7519461e-01, -4.7281876e-01,
9.9123681e-01, -5.8622282e-02, 1.1787646e+00, 6.2218499e-01,
7.8785008e-01, 1.3685523e+00, -8.5068983e-01, 5.1260746e-01,
1.0476325e+00, -3.1758463e-01, 1.3948506e-01, 2.3402624e+00,
-6.1160916e-01, 8.1602710e-01, 2.4772300e-01, -3.8672671e-01,
1.9948451e-01, 7.9926956e-01, -2.6190341e-01, 1.5132962e-01,
1.1981666e+00, -2.2832582e+00, -1.0129594e+00, -8.8789088e-01,
6.5221924e-01, -8.7262028e-01, 3.5253752e-02, -3.3653030e-01,
1.4023319e+00, 4.8412141e-01, -7.0304507e-01, -8.2676607e-01,
7.7439600e-01, 6.9199395e-01, -1.0184799e+00, -8.0337167e-01,
-7.0711321e-01, 7.5211829e-01, -1.9208279e-02, 1.1033330e+00,
-6.0679215e-01, -5.2522349e-01, -5.6618774e-01, 6.6039857e-04,
7.2245878e-01, 1.5263520e-01, 1.4495978e-01, -2.3442194e+00,
3.6000299e-01, 4.6668175e-01, 1.2830665e+00, 1.2678007e+00,
1.9883296e-01, 5.4408771e-01, -3.9781693e-01, -1.9291055e+00,
2.3236869e-01, 8.6146563e-01, 6.2175733e-01, -1.7811896e+00,
-7.8206092e-01, -1.4236701e+00, 1.6090765e+00, -3.2787595e-02,
8.5323340e-01, 5.5063650e-02, -1.7425371e+00, 8.7500376e-01,
-2.7188172e+00, -2.2192061e-01, 3.4208494e-01, 1.1093477e+00,
-5.7314759e-01, 9.5778459e-01, 9.8202319e-04, -1.3847686e+00,
-9.9650228e-01, 8.0734813e-01, 1.1738863e+00, -9.3984646e-01,
1.3109189e+00, -3.1670693e-01, -1.8610410e-01, -5.7646018e-01,
6.8665183e-01, 4.2086706e-01, -1.0213808e+00, 9.8856664e-01,
-5.6187165e-01, -1.5792575e-01, 1.5042593e+00, -1.3950295e+00,
8.0079097e-01, -6.6194439e-01, 1.2563107e+00, 4.9999446e-01,
-2.7133808e-01, 1.8469073e+00, -3.1249959e-02, -9.3872704e-02,
-6.1907429e-01, -6.3632655e-01, -4.2415860e-01, -2.0271668e+00,
4.0962908e-01, -1.5421267e+00, -1.0128618e+00, -2.9737514e-02,
-2.8895226e-01, 1.5219319e-01, -2.9803404e-01, -1.3135384e-01,
-6.2809873e-01, 1.1968799e+00, 6.1099350e-01, -4.5477438e-01,
-9.6037018e-01, 2.7690458e-01, -6.8010890e-01, -5.4578751e-01,
-4.5518342e-01, 3.1859580e-01, -3.5494208e-01, 6.8589437e-01,
-3.7613729e-01, -2.4106996e+00, -1.2778088e+00, -6.2887415e-02,
-9.4712764e-02, -2.3144305e+00, 5.5653399e-01, 5.0569206e-01,
-2.0759584e-01, 6.9363183e-01, 4.1949040e-01, 2.2523544e+00,
9.3852311e-01, 1.4252927e+00, 1.5083258e+00, 1.0539497e-01,
-1.6049961e+00, -1.0644839e-01, 2.4656655e-01, 6.1250836e-01,
7.3980182e-01, -1.7860015e-01, 7.8490011e-02, -4.3981805e-01,
-3.6079338e-01, -1.2617406e+00, 1.9146918e+00, -1.8612741e+00,
-9.6749123e-03, 2.6038763e-01, 2.8203353e-01, 2.5829947e-01,
-4.2654869e-01, 9.8075122e-01, 1.8588890e+00, -1.0920147e+00,
7.6300204e-01, 2.2761525e-01, -1.4569789e+00, 1.7043737e+00,
-3.2686386e+00, 4.7498712e-01, -2.1142473e+00, -1.5002301e+00,
1.0692973e+00, 1.4393831e+00, 5.0645941e-01, 8.3597529e-01,
1.1752968e+00, -3.4211743e-01, -3.8716367e-01, 5.4765379e-01,
-1.5891987e-01, -7.3604894e-01, -2.3351878e-01, -5.4039150e-01,
1.5708433e-01, -5.9762299e-01, -8.8390934e-01, 6.0767305e-01,
-3.8843614e-01, -3.1578582e-02, -5.6058836e-01, -6.5552413e-01,
7.2615027e-01, 6.7892069e-01, -4.3017429e-01, -3.8485083e-01,
-1.5082921e+00, -7.1995616e-01, -1.1909670e+00, 1.3271062e+00,
-2.1984124e+00, 2.8614265e-01, -2.0104712e-01, -2.5348804e+00,
-1.5848289e+00, 2.1679449e-01, -1.4276333e-01, 1.4274154e+00,
1.6425379e-01, -3.1606898e-01, 1.2852281e-01, -5.2765143e-01,
1.0834497e+00, 7.2746372e-01, 5.7725620e-01, 5.3688127e-01,
-4.3616110e-01, 2.7676934e-01, 2.9459488e-01, -5.6314898e-01,
5.1899290e-01, 1.3394899e+00, -2.3876244e-01, -6.7961216e-02,
-1.5035529e-01, 5.2330041e-01, -2.1156418e-01, -1.2541972e+00,
1.8176029e-02, 1.4141930e+00, -1.7437581e+00, 1.1289321e-01,
4.5267120e-01, 3.1554270e-01, -6.9010293e-01, -2.8289640e-01,
3.5618150e-01, -6.5616649e-01, 6.7499673e-01, 1.2909728e+00,
2.8768075e-01, 1.1313233e+00, -1.9227705e-03, -2.3545134e-01,
-7.7834469e-01, 1.7674841e-02, 1.1869689e+00, -5.9568787e-01,
-1.5738513e+00, 9.0094990e-01, 1.0499262e+00, 4.2925611e-01,
3.4665063e-01, 1.1960464e+00, 5.0744399e-02, -2.4047236e+00,
6.6365647e-01, -3.9687249e-01, 4.0486488e-01, 3.4154087e-01,
-5.9558362e-01, 1.1019011e+00, 5.5386519e-01, -9.5087808e-01,
-5.0393552e-01, 1.7358937e+00, 1.1365190e+00, 7.3528785e-01,
-6.3713288e-01, -8.8953024e-01, 5.9735751e-01, -6.1928016e-01,
1.2089928e+00, 8.0966818e-01, -3.7273017e-01, -5.3331411e-01,
-4.9985203e-01, 3.9947726e-02, -7.8146380e-01, 3.1946027e-01,
8.2106584e-01, 8.6431539e-01, 4.9166805e-01, 4.4538009e-01,
-8.8726664e-01, 5.2979738e-01, 2.6839951e-01, 3.5011527e-01,
-2.7225810e-01, 1.0665658e+00, -8.9532214e-01, 1.4147978e+00,
-9.1728181e-01, 8.3720893e-01, 1.4950181e+00, -8.3034581e-01,
-1.9900607e+00, -8.7786657e-01, 2.2035673e-01, -1.9547749e+00,
8.5329479e-01, -1.4188342e+00, 9.8297036e-01, -5.3868419e-01,
1.3784917e-01, 9.2474985e-01, 2.9384881e-01, 3.0301414e+00,
-1.4259109e+00, 3.3642095e-01, -6.0710046e-02, -2.7827954e+00,
1.3488874e+00, 2.6844734e-01, -1.1277022e+00, -5.9944046e-01,
-2.7945054e-01, -2.1999671e-01, 1.1315615e+00, -5.5813056e-01,
-8.4985018e-01, -5.9133893e-01, 9.1871524e-01, -1.7054160e+00,
-6.2452555e-01, -1.5477768e+00, -4.3917063e-01, -8.2900178e-01,
-4.2779538e-01, 1.2994735e+00, -1.0199753e+00, -8.5336286e-01,
-1.8470149e+00, -5.6316632e-01, -2.9311785e-01, -1.5726203e+00,
-1.0079967e+00, -1.1254747e+00, 2.0839548e+00, 2.8445369e-01,
-2.0898786e-01, 2.7948596e+00, 9.4693983e-01, 1.1613066e+00,
2.1592824e-02, 2.1849406e+00, 3.7046966e-01, 8.3229375e-01,
1.0294781e+00, -4.6743554e-01, 1.2099822e+00, -9.2927051e-01,
1.5964565e+00, -3.5177864e-02, 1.9276363e-01, 9.4458717e-01,
4.0307879e-01, 7.8339100e-01, 1.6240975e+00, -1.9683785e+00,
9.2987645e-01, 1.5981036e+00, 4.2616895e-01, 2.5072601e+00,
4.4090030e-01, -2.0394561e+00, 1.0628663e+00, 7.7601296e-01,
8.3457164e-02, 1.7073935e+00, -2.0758156e-01, -2.7201766e-01,
-6.5246433e-01, 2.3190866e+00, -3.1556660e-01, 1.2293459e+00,
1.9086858e-02, 1.6939967e+00, -9.7426087e-01, 1.0000985e-01,
1.6331865e-01, 1.1104544e+00, 6.5858930e-01, -1.8446711e-01,
-6.9782162e-01, 5.4673910e-01, -1.0919048e+00, -2.0058967e-01,
-2.1976221e-01, -7.5056171e-01, 9.1047740e-01, 1.4996040e+00,
-2.7725294e-01, 9.9202655e-02, -1.5756993e+00, 7.4856669e-01,
-2.4229655e-01, -1.8000333e-01, 9.5837879e-01, 3.7814003e-01,
1.9289158e-01, 2.4711327e-01, -3.1152922e-01, 4.4534847e-02,
-7.7046400e-01, 4.5658717e-01, -1.3150460e+00, -5.0721991e-01,
4.1748023e-01, 9.2643857e-01, 6.3569260e-01, -1.6128796e-01,
1.0286627e+00, 4.7581047e-02, 4.1486391e-01, -2.7009306e+00,
-1.5045499e+00, -1.8634710e-01, -9.3207240e-01, 3.0545831e-01,
-5.1035285e-01, 8.7927073e-01, 1.7738712e+00, -1.3286506e-01,
1.3458737e+00, -4.6432903e-01, -3.7430039e-01, 9.7058731e-01,
-1.9518436e+00, -6.4998013e-01, 1.3482264e+00, 3.0995172e-01,
-1.5216483e+00, 9.7610706e-01, 3.9083481e-01, 2.7913565e-02,
-4.1744223e-01, 1.7064806e+00, -2.5080970e-01, -3.3612009e-02,
5.8338016e-01, 1.6178854e+00, -1.3733586e+00, -8.5550433e-01,
1.5778065e+00, 1.0752751e-01, 1.1045673e+00, 5.9758538e-01,
7.1269102e-02, -5.0374931e-01, 8.0341589e-01, 1.1834451e+00,
6.3811505e-01, -5.0269210e-01, -9.9724096e-01, -5.6425828e-01,
-3.4610125e-01, 2.7074468e-01, -1.3578615e+00, -9.6113062e-01,
1.1768451e+00, 1.1981529e-01, 6.6130060e-01, 1.7996032e+00,
-1.4726470e+00, -1.4529139e+00, 2.5632006e-01, -7.5283742e-01,
1.2143371e+00, 5.3680718e-01, -5.9180927e-01, 1.1358957e+00,
1.4462845e+00, -1.1436753e+00, 7.8876835e-01, -6.7686230e-01,
-9.3259799e-01, 7.4118137e-02, 2.1128911e-01, 2.6312185e-02,
-2.2259822e-02, -1.5083861e+00, -2.7273307e+00, -8.5954350e-01,
-4.6734902e-01, 1.5499024e+00, 4.5016751e-01, 1.2971551e+00,
2.9964414e-01, -1.0238653e+00, 1.0269226e+00, -1.9246057e-01]

View File

@ -0,0 +1,132 @@
/**
* Copyright 2021 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_LITE_INPUT_DATA_H_
#define MINDSPORE_LITE_INPUT_DATA_H_
float mnist_inputs_data[] = {
-1.5255959e+00, -7.5023180e-01, -6.5398091e-01, -1.6094848e+00, -1.0016718e-01, -6.0918891e-01, -9.7977227e-01,
-1.6090963e+00, -7.1214461e-01, 3.0372199e-01, -7.7731431e-01, -2.5145525e-01, -2.2227049e-01, 1.6871134e+00,
2.2842517e-01, 4.6763551e-01, -6.9697243e-01, -1.1607615e+00, 6.9954240e-01, 1.9908163e-01, 8.6569238e-01,
2.4440390e-01, -6.6291136e-01, 8.0730826e-01, 1.1016806e+00, -1.7593604e-01, -2.2455578e+00, -1.4464580e+00,
6.1155282e-02, -6.1774445e-01, -7.9806983e-01, -1.3162321e-01, 1.8793458e+00, -7.2131783e-02, 1.5777060e-01,
-7.7345490e-01, 1.9905651e-01, 4.5702778e-02, 1.5295692e-01, -4.7567880e-01, -1.1101983e-01, 2.9273525e-01,
-1.5784515e-01, -2.8787140e-02, 2.3571110e+00, -1.0373387e+00, 1.5747981e+00, -6.2984723e-01, -9.2739171e-01,
5.4514152e-01, 6.6280261e-02, -4.3704012e-01, 7.6260060e-01, 4.4151092e-01, 1.1651385e+00, 2.0153918e+00,
1.3741246e-01, 9.3864471e-01, -1.8600109e-01, -6.4463931e-01, 1.5392458e+00, -8.6958760e-01, -3.3311536e+00,
-7.4787223e-01, -2.5502462e-02, -1.0233306e+00, -5.9618515e-01, -1.0055307e+00, -2.1060631e-01, -7.5475276e-03,
1.6734272e+00, 1.0342831e-02, -7.0395666e-01, -1.8526579e-01, -9.9623507e-01, -8.3125526e-01, -4.6102202e-01,
-5.6008244e-01, 3.9557618e-01, -9.8227710e-01, -5.0648659e-01, 9.9775404e-02, -6.5397340e-01, 7.3169369e-01,
-1.4343859e+00, -5.0081307e-01, 1.7163314e-01, -1.5999313e-01, 2.5463349e-01, -5.0195730e-01, -1.0412000e+00,
7.3226720e-01, -1.0483401e+00, -4.7087720e-01, 2.9113635e-01, 1.9907043e+00, 6.6144532e-01, 1.1899205e+00,
8.1653392e-01, -9.1352361e-01, 1.3851457e+00, -8.1384623e-01, -9.2757654e-01, 1.1119633e+00, 1.3352057e+00,
6.0427362e-01, -1.0344208e-01, -1.5121692e-01, -2.1020830e+00, -6.2002194e-01, -1.4782310e+00, -1.1334175e+00,
8.7379628e-01, -5.6025940e-01, 1.2857845e+00, 8.1682384e-01, 2.0530410e-01, 3.0510718e-01, 5.3568703e-01,
-4.3118501e-01, 2.5581384e+00, -2.3336388e-01, -1.3472130e-02, 1.8606348e+00, -1.9804063e+00, 1.7985829e+00,
1.0181159e-01, 3.4000599e-01, 7.1236455e-01, -1.7765073e+00, 3.5386458e-01, 1.1996132e+00, -3.0299741e-01,
-1.7618417e+00, 6.3484460e-01, -8.0435908e-01, -1.6111118e+00, -1.8716129e+00, 5.4308361e-01, 6.6067863e-01,
2.2952116e+00, 6.7490596e-01, 1.7133216e+00, -1.7942734e+00, -1.3632672e+00, -9.8321962e-01, 1.5112667e+00,
6.4187074e-01, 4.7296381e-01, -4.2859009e-01, 5.5137074e-01, -1.5473709e+00, 5.1811212e-01, 1.0653535e-01,
2.6924077e-01, 1.3247679e+00, 1.7460191e+00, 1.8549690e+00, -7.0636910e-01, 2.5570862e+00, 4.1753429e-01,
-2.1271861e-01, -8.3995801e-01, -4.2001787e-01, -6.2403631e-01, -9.7729611e-01, 8.7484282e-01, 9.8728138e-01,
3.0957633e-01, 1.5206900e+00, 1.2052339e+00, -1.8155910e+00, -4.0346155e-01, -9.5914519e-01, -5.2077039e-03,
-7.8863136e-02, 8.4365427e-01, 1.1657013e+00, 5.2693218e-01, 1.6192533e+00, -9.6397626e-01, 1.4152038e-01,
-1.6366096e-01, -3.5822257e-01, 1.7222793e+00, -3.0357561e-01, 2.3887420e-01, 1.3440012e+00, 1.0322569e-01,
1.1003542e+00, -3.4168020e-01, 9.4733888e-01, -5.6851596e-01, 8.3759618e-01, 1.7836607e+00, -1.9542466e-01,
5.1491612e-01, -1.8474776e+00, -2.9167426e+00, -5.6732988e-01, -5.4128021e-01, 8.9517403e-01, -8.8250703e-01,
5.3181124e-01, -1.5457772e+00, -1.7329982e-01, 7.2824633e-01, 5.7061020e-02, 9.0551722e-01, 1.0462948e+00,
-5.2059698e-01, 1.3547838e+00, 2.3519313e-01, 1.9142433e+00, 1.8364111e+00, 1.3245324e+00, -9.6900916e-01,
1.2516364e+00, 1.2103242e+00, -5.2792060e-01, 2.1856615e-01, -5.7430726e-01, 1.4571251e+00, 1.7709557e+00,
1.6499138e+00, -4.3200457e-01, -2.7102691e-01, -1.4391626e+00, 1.2470404e+00, 1.2738512e+00, 3.9094925e-01,
3.8721049e-01, -7.9828717e-02, 3.4172431e-01, 9.4882733e-01, -1.3839359e+00, 1.7240863e+00, -2.3647652e+00,
-9.2949092e-01, 2.9362530e-01, 2.1513203e-01, 9.3846369e-01, 1.4657077e+00, -5.5647439e-01, -7.4484080e-01,
-2.0215721e-01, -2.2966790e-01, 1.3313366e-03, 3.7527591e-01, -5.8106792e-01, -5.7230884e-01, 1.0097175e+00,
-1.0564939e-01, -1.1796960e+00, -9.0779595e-02, 5.6311435e-01, -1.2560141e+00, 8.9555502e-01, 1.6747737e-01,
7.5142086e-01, 2.4142299e+00, 1.0205840e+00, -4.4048381e-01, -1.7341677e+00, -1.2362250e+00, 1.5785813e+00,
-1.1160507e+00, 7.6777023e-01, -5.8820677e-01, 2.1188903e+00, -5.4219025e-01, -2.4592547e+00, -1.1108288e+00,
-1.1187209e+00, 7.5799555e-01, -4.9565765e-01, -1.9700006e-01, -3.3396218e-02, 7.1929151e-01, 1.0644146e+00,
8.3402544e-01, -1.9162164e+00, -3.4202927e-01, -6.6049206e-01, 3.1508535e-01, 1.1422518e+00, 3.0550566e-01,
-5.7888174e-01, -2.3828252e-01, -1.3541743e+00, 2.6868939e-01, 1.1455697e-01, -1.5562972e+00, -1.0757437e+00,
-8.7519461e-01, -4.7281876e-01, 9.9123681e-01, -5.8622282e-02, 1.1787646e+00, 6.2218499e-01, 7.8785008e-01,
1.3685523e+00, -8.5068983e-01, 5.1260746e-01, 1.0476325e+00, -3.1758463e-01, 1.3948506e-01, 2.3402624e+00,
-6.1160916e-01, 8.1602710e-01, 2.4772300e-01, -3.8672671e-01, 1.9948451e-01, 7.9926956e-01, -2.6190341e-01,
1.5132962e-01, 1.1981666e+00, -2.2832582e+00, -1.0129594e+00, -8.8789088e-01, 6.5221924e-01, -8.7262028e-01,
3.5253752e-02, -3.3653030e-01, 1.4023319e+00, 4.8412141e-01, -7.0304507e-01, -8.2676607e-01, 7.7439600e-01,
6.9199395e-01, -1.0184799e+00, -8.0337167e-01, -7.0711321e-01, 7.5211829e-01, -1.9208279e-02, 1.1033330e+00,
-6.0679215e-01, -5.2522349e-01, -5.6618774e-01, 6.6039857e-04, 7.2245878e-01, 1.5263520e-01, 1.4495978e-01,
-2.3442194e+00, 3.6000299e-01, 4.6668175e-01, 1.2830665e+00, 1.2678007e+00, 1.9883296e-01, 5.4408771e-01,
-3.9781693e-01, -1.9291055e+00, 2.3236869e-01, 8.6146563e-01, 6.2175733e-01, -1.7811896e+00, -7.8206092e-01,
-1.4236701e+00, 1.6090765e+00, -3.2787595e-02, 8.5323340e-01, 5.5063650e-02, -1.7425371e+00, 8.7500376e-01,
-2.7188172e+00, -2.2192061e-01, 3.4208494e-01, 1.1093477e+00, -5.7314759e-01, 9.5778459e-01, 9.8202319e-04,
-1.3847686e+00, -9.9650228e-01, 8.0734813e-01, 1.1738863e+00, -9.3984646e-01, 1.3109189e+00, -3.1670693e-01,
-1.8610410e-01, -5.7646018e-01, 6.8665183e-01, 4.2086706e-01, -1.0213808e+00, 9.8856664e-01, -5.6187165e-01,
-1.5792575e-01, 1.5042593e+00, -1.3950295e+00, 8.0079097e-01, -6.6194439e-01, 1.2563107e+00, 4.9999446e-01,
-2.7133808e-01, 1.8469073e+00, -3.1249959e-02, -9.3872704e-02, -6.1907429e-01, -6.3632655e-01, -4.2415860e-01,
-2.0271668e+00, 4.0962908e-01, -1.5421267e+00, -1.0128618e+00, -2.9737514e-02, -2.8895226e-01, 1.5219319e-01,
-2.9803404e-01, -1.3135384e-01, -6.2809873e-01, 1.1968799e+00, 6.1099350e-01, -4.5477438e-01, -9.6037018e-01,
2.7690458e-01, -6.8010890e-01, -5.4578751e-01, -4.5518342e-01, 3.1859580e-01, -3.5494208e-01, 6.8589437e-01,
-3.7613729e-01, -2.4106996e+00, -1.2778088e+00, -6.2887415e-02, -9.4712764e-02, -2.3144305e+00, 5.5653399e-01,
5.0569206e-01, -2.0759584e-01, 6.9363183e-01, 4.1949040e-01, 2.2523544e+00, 9.3852311e-01, 1.4252927e+00,
1.5083258e+00, 1.0539497e-01, -1.6049961e+00, -1.0644839e-01, 2.4656655e-01, 6.1250836e-01, 7.3980182e-01,
-1.7860015e-01, 7.8490011e-02, -4.3981805e-01, -3.6079338e-01, -1.2617406e+00, 1.9146918e+00, -1.8612741e+00,
-9.6749123e-03, 2.6038763e-01, 2.8203353e-01, 2.5829947e-01, -4.2654869e-01, 9.8075122e-01, 1.8588890e+00,
-1.0920147e+00, 7.6300204e-01, 2.2761525e-01, -1.4569789e+00, 1.7043737e+00, -3.2686386e+00, 4.7498712e-01,
-2.1142473e+00, -1.5002301e+00, 1.0692973e+00, 1.4393831e+00, 5.0645941e-01, 8.3597529e-01, 1.1752968e+00,
-3.4211743e-01, -3.8716367e-01, 5.4765379e-01, -1.5891987e-01, -7.3604894e-01, -2.3351878e-01, -5.4039150e-01,
1.5708433e-01, -5.9762299e-01, -8.8390934e-01, 6.0767305e-01, -3.8843614e-01, -3.1578582e-02, -5.6058836e-01,
-6.5552413e-01, 7.2615027e-01, 6.7892069e-01, -4.3017429e-01, -3.8485083e-01, -1.5082921e+00, -7.1995616e-01,
-1.1909670e+00, 1.3271062e+00, -2.1984124e+00, 2.8614265e-01, -2.0104712e-01, -2.5348804e+00, -1.5848289e+00,
2.1679449e-01, -1.4276333e-01, 1.4274154e+00, 1.6425379e-01, -3.1606898e-01, 1.2852281e-01, -5.2765143e-01,
1.0834497e+00, 7.2746372e-01, 5.7725620e-01, 5.3688127e-01, -4.3616110e-01, 2.7676934e-01, 2.9459488e-01,
-5.6314898e-01, 5.1899290e-01, 1.3394899e+00, -2.3876244e-01, -6.7961216e-02, -1.5035529e-01, 5.2330041e-01,
-2.1156418e-01, -1.2541972e+00, 1.8176029e-02, 1.4141930e+00, -1.7437581e+00, 1.1289321e-01, 4.5267120e-01,
3.1554270e-01, -6.9010293e-01, -2.8289640e-01, 3.5618150e-01, -6.5616649e-01, 6.7499673e-01, 1.2909728e+00,
2.8768075e-01, 1.1313233e+00, -1.9227705e-03, -2.3545134e-01, -7.7834469e-01, 1.7674841e-02, 1.1869689e+00,
-5.9568787e-01, -1.5738513e+00, 9.0094990e-01, 1.0499262e+00, 4.2925611e-01, 3.4665063e-01, 1.1960464e+00,
5.0744399e-02, -2.4047236e+00, 6.6365647e-01, -3.9687249e-01, 4.0486488e-01, 3.4154087e-01, -5.9558362e-01,
1.1019011e+00, 5.5386519e-01, -9.5087808e-01, -5.0393552e-01, 1.7358937e+00, 1.1365190e+00, 7.3528785e-01,
-6.3713288e-01, -8.8953024e-01, 5.9735751e-01, -6.1928016e-01, 1.2089928e+00, 8.0966818e-01, -3.7273017e-01,
-5.3331411e-01, -4.9985203e-01, 3.9947726e-02, -7.8146380e-01, 3.1946027e-01, 8.2106584e-01, 8.6431539e-01,
4.9166805e-01, 4.4538009e-01, -8.8726664e-01, 5.2979738e-01, 2.6839951e-01, 3.5011527e-01, -2.7225810e-01,
1.0665658e+00, -8.9532214e-01, 1.4147978e+00, -9.1728181e-01, 8.3720893e-01, 1.4950181e+00, -8.3034581e-01,
-1.9900607e+00, -8.7786657e-01, 2.2035673e-01, -1.9547749e+00, 8.5329479e-01, -1.4188342e+00, 9.8297036e-01,
-5.3868419e-01, 1.3784917e-01, 9.2474985e-01, 2.9384881e-01, 3.0301414e+00, -1.4259109e+00, 3.3642095e-01,
-6.0710046e-02, -2.7827954e+00, 1.3488874e+00, 2.6844734e-01, -1.1277022e+00, -5.9944046e-01, -2.7945054e-01,
-2.1999671e-01, 1.1315615e+00, -5.5813056e-01, -8.4985018e-01, -5.9133893e-01, 9.1871524e-01, -1.7054160e+00,
-6.2452555e-01, -1.5477768e+00, -4.3917063e-01, -8.2900178e-01, -4.2779538e-01, 1.2994735e+00, -1.0199753e+00,
-8.5336286e-01, -1.8470149e+00, -5.6316632e-01, -2.9311785e-01, -1.5726203e+00, -1.0079967e+00, -1.1254747e+00,
2.0839548e+00, 2.8445369e-01, -2.0898786e-01, 2.7948596e+00, 9.4693983e-01, 1.1613066e+00, 2.1592824e-02,
2.1849406e+00, 3.7046966e-01, 8.3229375e-01, 1.0294781e+00, -4.6743554e-01, 1.2099822e+00, -9.2927051e-01,
1.5964565e+00, -3.5177864e-02, 1.9276363e-01, 9.4458717e-01, 4.0307879e-01, 7.8339100e-01, 1.6240975e+00,
-1.9683785e+00, 9.2987645e-01, 1.5981036e+00, 4.2616895e-01, 2.5072601e+00, 4.4090030e-01, -2.0394561e+00,
1.0628663e+00, 7.7601296e-01, 8.3457164e-02, 1.7073935e+00, -2.0758156e-01, -2.7201766e-01, -6.5246433e-01,
2.3190866e+00, -3.1556660e-01, 1.2293459e+00, 1.9086858e-02, 1.6939967e+00, -9.7426087e-01, 1.0000985e-01,
1.6331865e-01, 1.1104544e+00, 6.5858930e-01, -1.8446711e-01, -6.9782162e-01, 5.4673910e-01, -1.0919048e+00,
-2.0058967e-01, -2.1976221e-01, -7.5056171e-01, 9.1047740e-01, 1.4996040e+00, -2.7725294e-01, 9.9202655e-02,
-1.5756993e+00, 7.4856669e-01, -2.4229655e-01, -1.8000333e-01, 9.5837879e-01, 3.7814003e-01, 1.9289158e-01,
2.4711327e-01, -3.1152922e-01, 4.4534847e-02, -7.7046400e-01, 4.5658717e-01, -1.3150460e+00, -5.0721991e-01,
4.1748023e-01, 9.2643857e-01, 6.3569260e-01, -1.6128796e-01, 1.0286627e+00, 4.7581047e-02, 4.1486391e-01,
-2.7009306e+00, -1.5045499e+00, -1.8634710e-01, -9.3207240e-01, 3.0545831e-01, -5.1035285e-01, 8.7927073e-01,
1.7738712e+00, -1.3286506e-01, 1.3458737e+00, -4.6432903e-01, -3.7430039e-01, 9.7058731e-01, -1.9518436e+00,
-6.4998013e-01, 1.3482264e+00, 3.0995172e-01, -1.5216483e+00, 9.7610706e-01, 3.9083481e-01, 2.7913565e-02,
-4.1744223e-01, 1.7064806e+00, -2.5080970e-01, -3.3612009e-02, 5.8338016e-01, 1.6178854e+00, -1.3733586e+00,
-8.5550433e-01, 1.5778065e+00, 1.0752751e-01, 1.1045673e+00, 5.9758538e-01, 7.1269102e-02, -5.0374931e-01,
8.0341589e-01, 1.1834451e+00, 6.3811505e-01, -5.0269210e-01, -9.9724096e-01, -5.6425828e-01, -3.4610125e-01,
2.7074468e-01, -1.3578615e+00, -9.6113062e-01, 1.1768451e+00, 1.1981529e-01, 6.6130060e-01, 1.7996032e+00,
-1.4726470e+00, -1.4529139e+00, 2.5632006e-01, -7.5283742e-01, 1.2143371e+00, 5.3680718e-01, -5.9180927e-01,
1.1358957e+00, 1.4462845e+00, -1.1436753e+00, 7.8876835e-01, -6.7686230e-01, -9.3259799e-01, 7.4118137e-02,
2.1128911e-01, 2.6312185e-02, -2.2259822e-02, -1.5083861e+00, -2.7273307e+00, -8.5954350e-01, -4.6734902e-01,
1.5499024e+00, 4.5016751e-01, 1.2971551e+00, 2.9964414e-01, -1.0238653e+00, 1.0269226e+00, -1.9246057e-01};
#endif // MINDSPORE_LITE_INPUT_DATA_H_

View File

@ -0,0 +1,5 @@
# Ignore everything in this directory
*
# Except this file
!.gitignore

View File

@ -0,0 +1,5 @@
# Ignore everything in this directory
*
# Except this file
!.gitignore

View File

@ -0,0 +1,51 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "nnacl/fp32/exp_fp32.h"
#include <math.h>
#include <string.h>
#include "nnacl/errorcode.h"
int Exp(const float *input_data, float *output_data, const ExpParameter *parameter, int task_id) {
if (parameter->scale_ == 1) {
for (size_t i = task_id; i < parameter->element_num_; i += parameter->thread_num_) {
output_data[i] = expf(input_data[i]);
}
} else {
for (size_t i = task_id; i < parameter->element_num_; i += parameter->thread_num_) {
output_data[i] = expf(input_data[i] * parameter->in_scale_);
}
}
if (parameter->out_scale_ != 1) {
for (size_t i = task_id; i < parameter->element_num_; i += parameter->thread_num_) {
output_data[i] = output_data[i] * parameter->out_scale_;
}
}
return NNACL_OK;
}
void ExpFp32(const float *src, float *dst, int num) {
int i = 0;
#ifdef ENABLE_ARM64
int count = (num / C4NUM) * C4NUM;
for (; i < count; i += C4NUM) {
simd_exp128(vld1q_f32(src + i), dst + i);
}
#endif
for (; i < num; ++i) {
simd_exp32(src[i], dst + i);
}
}

View File

@ -0,0 +1,146 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "nnacl/fp32/softmax_fp32.h"
#include <math.h>
#include <float.h>
#include "nnacl/fp32/exp_fp32.h"
void SoftmaxNorm(const float *src, float *dst, int batch, int channel) {
int cur_batch_offset = 0;
for (int i = 0; i < batch; i++, cur_batch_offset += channel) {
int j = 0;
#ifdef ENABLE_NEON
float32x4_t max4 = vdupq_n_f32(-FLT_MAX);
int count = (channel / C4NUM) * C4NUM;
for (; j < count; j += C4NUM) {
float32x4_t input4 = vld1q_f32(src + cur_batch_offset + j);
max4 = vmaxq_f32(max4, input4);
}
#ifdef ENABLE_ARM64
float max = vmaxvq_f32(max4);
#else
float max = max4[0];
for (int m = 1; m < 4; ++m) {
max = MSMAX(max, max4[m]);
}
#endif
#else
float max = -FLT_MAX;
#endif
for (; j < channel; j++) {
float input = src[cur_batch_offset + j];
if (input > max) {
max = input;
}
}
int k = 0;
#ifdef ENABLE_NEON
int count2 = (channel / C4NUM) * C4NUM;
for (; k < count2; k += C4NUM) {
float32x4_t input4 = vld1q_f32(src + cur_batch_offset + k);
float32x4_t output4 = vsubq_f32(input4, vdupq_n_f32(max));
vst1q_f32(dst + cur_batch_offset + k, output4);
}
#endif
for (; k < channel; k++) {
int offset = cur_batch_offset + k;
dst[offset] = src[offset] - max;
}
}
}
void SumAndDiv(const float *src, float *dst, int batch, int channel) {
int cur_batch_offset = 0;
for (int i = 0; i < batch; i++, cur_batch_offset += channel) {
float sum = 0;
int j = 0;
#ifdef ENABLE_NEON
float32x4_t sum4 = vdupq_n_f32(0);
int count = (channel / C4NUM) * C4NUM;
for (; j < count; j += C4NUM) {
sum4 = vaddq_f32(sum4, vld1q_f32(src + cur_batch_offset + j));
}
#ifdef ENABLE_ARM64
sum = vaddvq_f32(sum4);
#else
sum = sum4[0] + sum4[1] + sum4[2] + sum4[3];
#endif
#endif
for (; j < channel; j++) {
sum += src[cur_batch_offset + j];
}
int k = 0;
#ifdef ENABLE_NEON
const float div = 1.0f / sum;
for (; k < count; k += C4NUM) {
vst1q_f32(dst + cur_batch_offset + k, vmulq_n_f32(vld1q_f32(src + cur_batch_offset + k), div));
}
#endif
for (; k < channel; k++) {
dst[cur_batch_offset + k] = src[cur_batch_offset + k] / sum;
}
}
}
void SoftmaxLastAxis(const float *src, float *dst, int batch, int channel) {
SoftmaxNorm(src, dst, batch, channel);
ExpFp32(dst, dst, batch * channel);
SumAndDiv(dst, dst, batch, channel);
}
// output = exp(input) / reduce_sum(exp(input), axis)
void Softmax(const float *input_ptr, float *output_ptr, float *sum_data, const SoftmaxParameter *parameter) {
int axis = parameter->axis_;
int n_dim = parameter->n_dim_;
const int *input_shape = parameter->input_shape_;
int inner_size = 1;
int outter_size = 1;
for (int i = 0; i < axis; i++) {
outter_size *= input_shape[i];
}
for (int i = axis + 1; i < n_dim; i++) {
inner_size *= input_shape[i];
}
for (int i = 0; i < outter_size; i++) {
int outter_offset = i * input_shape[axis] * inner_size;
int sum_outter_offset = i * inner_size;
for (int k = 0; k < inner_size; k++) {
int inner_offset = outter_offset + k;
float max_data = input_ptr[inner_offset];
for (int j = 0; j < input_shape[axis]; j++) {
int axis_offset = inner_offset + j * inner_size;
max_data = max_data > input_ptr[axis_offset] ? max_data : input_ptr[axis_offset];
}
for (int j = 0; j < input_shape[axis]; j++) {
int axis_offset = inner_offset + j * inner_size;
output_ptr[axis_offset] = exp(input_ptr[axis_offset] - max_data);
sum_data[k + sum_outter_offset] += output_ptr[axis_offset];
}
}
}
for (int i = 0; i < outter_size; i++) {
int outter_offset = i * input_shape[axis] * inner_size;
int sum_outter_offset = i * inner_size;
for (int j = 0; j < input_shape[axis]; j++) {
int axis_offset = outter_offset + j * inner_size;
for (int k = 0; k < inner_size; k++) {
int inner_offset = axis_offset + k;
output_ptr[inner_offset] = output_ptr[inner_offset] / sum_data[k + sum_outter_offset];
}
}
}
}

View File

@ -0,0 +1,125 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <math.h>
#include "nnacl/int8/quant_dtype_cast_int8.h"
#include "nnacl/errorcode.h"
int DoDequantizeInt8ToFp32(const int8_t *quant_values, float *real_values, float scale, int32_t zp, int size) {
if (quant_values == NULL || real_values == NULL) {
return NNACL_PARAM_INVALID;
}
for (int i = 0; i < size; ++i) {
real_values[i] = (quant_values[i] - zp) * scale;
}
return NNACL_OK;
}
int DoQuantizeFp32ToInt8(const float *real_values, int8_t *quant_values, float scale, int32_t zp, int size,
bool uint8_flag) {
if (quant_values == NULL || real_values == NULL) {
return NNACL_PARAM_INVALID;
}
if (uint8_flag) {
zp += 128;
}
const float inverse_scale = 1.0f / scale;
for (int i = 0; i < size; ++i) {
if (isinf(real_values[i])) {
quant_values[i] = 127;
} else {
int temp = round(real_values[i] * inverse_scale + zp);
if (uint8_flag) {
temp -= 128;
}
temp = temp < 127 ? temp : 127;
temp = temp > -128 ? temp : -128;
quant_values[i] = (int8_t)temp;
}
}
return NNACL_OK;
}
int DoDequantizeUInt8ToFp32(const uint8_t *quant_values, float *real_values, float scale, int32_t zp, int size) {
if (quant_values == NULL || real_values == NULL) {
return NNACL_PARAM_INVALID;
}
for (int i = 0; i < size; ++i) {
real_values[i] = (float)((int)quant_values[i] - zp) * scale;
}
return NNACL_OK;
}
int DoQuantizeFp32ToUInt8(const float *real_values, uint8_t *quant_values, float scale, int32_t zp, int size) {
if (quant_values == NULL || real_values == NULL) {
return NNACL_PARAM_INVALID;
}
for (int i = 0; i < size; ++i) {
if (isinf(real_values[i])) {
quant_values[i] = 255;
} else {
float temp = (float)round(real_values[i] * 1.0 / scale + zp);
if (temp > 255) {
quant_values[i] = 255;
} else if (temp < 0) {
quant_values[i] = 0;
} else {
quant_values[i] = (uint8_t)temp;
}
}
}
return NNACL_OK;
}
int Int8ToUInt8(const int8_t *quant_values, uint8_t *real_values, int size) {
if (quant_values == NULL || real_values == NULL) {
return NNACL_PARAM_INVALID;
}
for (int i = 0; i < size; ++i) {
int temp = quant_values[i] + 128;
if (temp > 255) {
real_values[i] = (uint8_t)255;
} else if (temp < 0) {
real_values[i] = 0;
} else {
real_values[i] = (uint8_t)temp;
}
}
return NNACL_OK;
}
int UInt8ToInt8(const uint8_t *real_values, int8_t *quant_values, int size) {
if (quant_values == NULL || real_values == NULL) {
return NNACL_PARAM_INVALID;
}
for (int i = 0; i < size; ++i) {
int temp = real_values[i] - 128;
if (temp > 127) {
quant_values[i] = 127;
} else if (temp < -128) {
quant_values[i] = -128;
} else {
quant_values[i] = (int8_t)temp;
}
}
return NNACL_OK;
}

View File

@ -0,0 +1,5 @@
# Ignore everything in this directory
*
# Except this file
!.gitignore

View File

@ -0,0 +1,85 @@
cmake_minimum_required(VERSION 3.14)
project(net)
if(NOT DEFINED PKG_PATH)
message(FATAL_ERROR "PKG_PATH not set")
endif()
get_filename_component(PKG_PATH ${PKG_PATH} ABSOLUTE BASE_DIR ${CMAKE_CURRENT_BINARY_DIR})
set(OP_LIB ${PKG_PATH}/tools/codegen/operator_library/lib/libops.a)
set(OP_HEADER_PATH ${PKG_PATH}/tools/codegen/operator_library/include)
set(HEADER_PATH ${PKG_PATH}/runtime)
message("operator lib path: ${OP_LIB}")
message("operator header path: ${OP_HEADER_PATH}")
add_compile_definitions(NOT_USE_STL)
include_directories(${OP_HEADER_PATH})
include_directories(${HEADER_PATH})
include(net.cmake)
option(MICRO_BUILD_ARM64 "build android arm64" OFF)
option(MICRO_BUILD_ARM32A "build android arm32" OFF)
if(MICRO_BUILD_ARM64 OR MICRO_BUILD_ARM32A)
add_compile_definitions(ENABLE_NEON)
add_compile_definitions(ENABLE_ARM)
endif()
if(MICRO_BUILD_ARM64)
add_compile_definitions(ENABLE_ARM64)
endif()
if(MICRO_BUILD_ARM32A)
add_compile_definitions(ENABLE_ARM32)
add_definitions(-mfloat-abi=softfp -mfpu=neon)
endif()
set(CMAKE_C_FLAGS "${CMAKE_ENABLE_C99} ${CMAKE_C_FLAGS}")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++17")
if("${CMAKE_BUILD_TYPE}" STREQUAL "Debug")
message(STATUS "build net library with debug info")
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -DDebug -g")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -DDebug -g")
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fvisibility=default")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fvisibility=default")
else()
message(STATUS "build net library release version")
set(CMAKE_C_FLAGS "-fPIC -fPIE -D_FORTIFY_SOURCE=2 -O3 -Wall -Werror -fstack-protector-strong -Wno-attributes \
-Wno-deprecated-declarations -Wno-missing-braces ${CMAKE_C_FLAGS}")
set(CMAKE_CXX_FLAGS "-fPIC -fPIE -D_FORTIFY_SOURCE=2 -O3 -Wall -Werror -fstack-protector-strong -Wno-attributes \
-Wno-deprecated-declarations -Wno-missing-braces -Wno-overloaded-virtual ${CMAKE_CXX_FLAGS}")
string(REPLACE "-g" "" CMAKE_C_FLAGS "${CMAKE_C_FLAGS}")
string(REPLACE "-g" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
endif()
function(create_library)
add_custom_command(TARGET net
POST_BUILD
COMMAND rm -rf tmp
COMMAND mkdir tmp
COMMAND cd tmp && ar -x ${OP_LIB}
COMMAND echo "raw static library ${library_name} size:"
COMMAND ls -lh ${library_name}
COMMAND mv ${library_name} ./tmp && cd tmp && ar -x ${library_name}
COMMENT "unzip raw static library ${library_name}"
)
foreach(object_file ${OP_SRC})
add_custom_command(TARGET net POST_BUILD COMMAND mv ./tmp/${object_file} .)
endforeach()
add_custom_command(TARGET net
POST_BUILD
COMMAND ar cr ${library_name} *.o
COMMAND ranlib ${library_name}
COMMAND echo "new static library ${library_name} size:"
COMMAND ls -lh ${library_name}
COMMAND rm -rf tmp && rm -rf *.o
COMMENT "generate specified static library ${library_name}"
)
endfunction(create_library)
string(CONCAT library_name "lib" net ".a")
create_library()

View File

@ -0,0 +1,64 @@
/**
* Copyright 2021 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_LITE_LIBRARY_SOURCE_MODEL_H_
#define MINDSPORE_LITE_LIBRARY_SOURCE_MODEL_H_
#include "include/model.h"
#include "session.h"
#include <new>
#include <string.h>
namespace mindspore::lite {
class MModel : public Model {
public:
void Free() override {
if (this->buf != nullptr) {
free(this->buf);
this->buf = nullptr;
this->buf_size_ = 0;
}
}
void Destroy() override { Free(); }
~MModel() override { Destroy(); }
void set_buf_size(size_t size) { buf_size_ = size; }
size_t buf_size() const { return buf_size_; }
private:
size_t buf_size_{0};
};
Model *Model::Import(const char *model_buf, size_t size) {
MS_NULLPTR_IF_NULL(model_buf);
if (size == 0) {
return nullptr;
}
MModel *model = new (std::nothrow) MModel();
MS_NULLPTR_IF_NULL(model);
model->buf = reinterpret_cast<char *>(malloc(size));
if (model->buf == nullptr) {
delete model;
return nullptr;
}
memcpy(model->buf, model_buf, size);
model->set_buf_size(size);
return model;
}
} // namespace mindspore::lite
#endif // MINDSPORE_LITE_LIBRARY_SOURCE_MODEL_H_

View File

@ -0,0 +1,98 @@
/**
* Copyright 2021 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "weight.h"
#include "net.h"
static const unsigned char *g_Input0 = 0;
int SetInputs(const void **inputs, int num) {
if (inputs == NULL) {
return RET_ERROR;
}
if (num != 1) {
return RET_ERROR;
}
g_Input0 = inputs[0];
return RET_OK;
}
int CopyOutputsData(void **outputs, int num) {
if (outputs == NULL) {
return RET_ERROR;
}
if (num != 1) {
return RET_ERROR;
}
memcpy(outputs[0], g_Buffer + 32, 40);
return RET_OK;
}
int GetBufferSize() { return 10576; }
int SetBuffer(void *buffer) {
if (buffer == NULL) {
return RET_ERROR;
}
g_Buffer = buffer;
return RET_OK;
}
void FreeResource() {
g_Buffer = NULL;
g_Input0 = NULL;
void *allocated[] = {};
for (int i = 0; i < 0; ++i) {
free(allocated[i]);
allocated[i] = NULL;
}
}
void Inference() {
{
memset((int16_t *)(g_Buffer + 10144), 0, 36);
const int output_shift[12] = {-9, -9, -9, -9, -9, -9, -9, -10, -10, -9, -9, -9};
const int output_mult[12] = {1354133566, 1485574432, 1737792646, 1225484872, 1221530746, 1184403831,
1344308820, 1080459089, 1432168625, 1245831715, 1804167149, 1092395059};
arm_convolve_s8((int8_t *)(g_Input0), 28, 28, 1, 1, g_Weight1, 12, 3, 3, 0, 0, 1, 1, g_Weight2,
(int8_t *)(g_Buffer + 0), output_shift, output_mult, 17, 128, -128, 127, 26, 26,
(int16_t *)(g_Buffer + 10144));
}
{
arm_max_pool_s8(26, 26, 13, 13, 2, 2, 2, 2, 0, 0, -128, 127, 12, (int8_t *)(g_Buffer + 0), NULL,
(int8_t *)(g_Buffer + 8112));
}
{
memset((int16_t *)(g_Buffer + 10144), 0, 432);
const int output_shift[12] = {-10, -10, -10, -9, -10, -10, -10, -10, -10, -9, -9, -10};
const int output_mult[12] = {2143437276, 1710269977, 1664140445, 1275314678, 2121906679, 1591651427,
1589631258, 1721320620, 1939131746, 1186858310, 1223164752, 1583392613};
arm_convolve_s8((int8_t *)(g_Buffer + 8112), 13, 13, 12, 1, g_Weight3, 12, 3, 3, 0, 0, 1, 1, g_Weight4,
(int8_t *)(g_Buffer + 0), output_shift, output_mult, 31, -17, -128, 127, 11, 11,
(int16_t *)(g_Buffer + 10144));
}
{
arm_max_pool_s8(11, 11, 5, 5, 2, 2, 2, 2, 0, 0, -128, 127, 12, (int8_t *)(g_Buffer + 0), NULL,
(int8_t *)(g_Buffer + 1456));
}
{ memcpy((int8_t *)(g_Buffer + 0), (int8_t *)(g_Buffer + 1456), 300); }
{
arm_fully_connected_s8((int8_t *)(g_Buffer + 0), g_Weight6, 300, 20, 1, -31, 0, 1379728884, -8, 11, g_Weight7,
(int8_t *)(g_Buffer + 304), -128, 127, NULL);
}
{
arm_fully_connected_s8((int8_t *)(g_Buffer + 304), g_Weight8, 20, 10, 1, -11, 0, 1282256809, -8, -20, g_Weight9,
(int8_t *)(g_Buffer + 0), -128, 127, NULL);
}
{ arm_softmax_s8((int8_t *)(g_Buffer + 0), 1, 10, 1152553088, 27, -15, (int8_t *)(g_Buffer + 16)); }
{ DoDequantizeInt8ToFp32((int8_t *)(g_Buffer + 16), (float *)(g_Buffer + 32), 0.00390625, -128, 10); }
}

View File

@ -0,0 +1,24 @@
include_directories(${CMAKE_CURRENT_SOURCE_DIR}/../include/)
include_directories(${OP_HEADER_PATH}/CMSIS/NN/Include)
include_directories(${OP_HEADER_PATH}/CMSIS/DSP/Include)
include_directories(${OP_HEADER_PATH}/CMSIS/Core/Include)
set(OP_SRC
arm_convolve_s8.c.o
arm_fully_connected_s8.c.o
arm_max_pool_s8.c.o
arm_nn_mat_mult_kernel_s8_s16.c.o
arm_nn_vec_mat_mult_t_s8.c.o
arm_q7_to_q15_with_offset.c.o
arm_softmax_s8.c.o
quant_dtype_cast_int8.c.o
weight.c.o
net.c.o
session.cc.o
tensor.cc.o
string.cc.o
)
file(GLOB NET_SRC
${CMAKE_CURRENT_SOURCE_DIR}/*.cc
${CMAKE_CURRENT_SOURCE_DIR}/*.c
)
add_library(net STATIC ${NET_SRC})

View File

@ -0,0 +1,50 @@
/**
* Copyright 2021 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifdef __cplusplus
extern "C" {
#endif
/**
* set input tensors
* @param inputs, the input data ptr's array of the model, the tensors' count of input may be greater than one.
* @param num, the input data's number of the model.
**/
int SetInputs(const void **inputs, int num);
int CopyOutputsData(void **outputs, int num);
/**
* get the memory space size of the inference.
**/
int GetBufferSize();
/**
* set the memory space for the inference
**/
int SetBuffer(void *buffer);
/**
* free the memory of packed weights, and set the membuf buffer and input address to NULL
**/
void FreeResource();
/**
* net inference function
**/
void Inference();
#ifdef __cplusplus
}
#endif

View File

@ -0,0 +1,147 @@
/**
* Copyright 2021 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "session.h"
#include "mmodel.h"
#include "net.h"
#include <new>
namespace mindspore {
namespace lite {
int LiteSession::CompileGraph(lite::Model *model) {
inputs_.resize(1);
Vector<int> in_shape_0;
in_shape_0.resize(4);
in_shape_0[0] = 1;
in_shape_0[1] = 28;
in_shape_0[2] = 28;
in_shape_0[3] = 1;
inputs_[0] = new (std::nothrow) MTensor(String("graph_input-0"), kNumberTypeInt8, in_shape_0);
MS_ERROR_IF_NULL(inputs_[0]);
outputs_.resize(1);
Vector<int> out_shape_0;
out_shape_0.resize(2);
out_shape_0[0] = 1;
out_shape_0[1] = 10;
outputs_[0] =
new (std::nothrow) MTensor(String("int8toft32_Softmax-7_post0/output-0"), kNumberTypeFloat32, out_shape_0);
MS_ERROR_IF_NULL(outputs_[0]);
return RET_OK;
}
int LiteSession::RunGraph(const KernelCallBack &before, const KernelCallBack &after) {
const void *inputs_data[inputs_.size()];
for (size_t i = 0; i < inputs_.size(); ++i) {
inputs_data[i] = inputs_[i]->MutableData();
}
SetInputs(inputs_data, inputs_.size());
Inference();
void *outputs_data[outputs_.size()];
for (size_t i = 0; i < outputs_.size(); ++i) {
outputs_data[i] = outputs_[i]->MutableData();
}
CopyOutputsData(outputs_data, outputs_.size());
return RET_OK;
}
LiteSession::~LiteSession() {
FreeResource();
if (runtime_buffer_ != nullptr) {
free(runtime_buffer_);
runtime_buffer_ = nullptr;
}
for (auto &input : inputs_) {
if (input == nullptr) {
continue;
}
delete input;
input = nullptr;
}
for (auto &output : outputs_) {
if (output == nullptr) {
continue;
}
delete output;
output = nullptr;
}
}
int LiteSession::InitRuntimeBuffer() {
int buffer_size = GetBufferSize();
runtime_buffer_ = malloc(buffer_size);
if (runtime_buffer_ == nullptr) {
return RET_ERROR;
}
int ret = SetBuffer(runtime_buffer_);
if (ret != RET_OK) {
return RET_ERROR;
}
return RET_OK;
}
Vector<tensor::MSTensor *> LiteSession::GetInputs() const {
Vector<tensor::MSTensor *> inputs;
for (const auto &input : inputs_) {
inputs.push_back(input);
}
return inputs;
}
Vector<tensor::MSTensor *> LiteSession::GetOutputsByNodeName(const String &node_name) const {
Vector<tensor::MSTensor *> outputs;
return outputs;
}
Vector<String> LiteSession::GetOutputTensorNames() const {
Vector<String> output_names;
for (const auto &output : outputs_) {
output_names.push_back(output->tensor_name());
}
return output_names;
}
mindspore::tensor::MSTensor *LiteSession::GetOutputByTensorName(const String &tensor_name) const {
for (const auto &output : outputs_) {
if (output->tensor_name() == tensor_name) {
return output;
}
}
return nullptr;
}
} // namespace lite
session::LiteSession *session::LiteSession::CreateSession(const lite::Context *context) {
auto *session = new (std::nothrow) lite::LiteSession();
MS_NULLPTR_IF_NULL(session);
int ret = session->InitRuntimeBuffer();
MS_NULLPTR_IF_ERROR(ret);
return session;
}
session::LiteSession *session::LiteSession::CreateSession(const char *model_buf, size_t size,
const lite::Context *context) {
session::LiteSession *session = CreateSession(context);
MS_NULLPTR_IF_NULL(session);
lite::Model *model = lite::Model::Import(model_buf, size);
int ret = session->CompileGraph(model);
MS_NULLPTR_IF_ERROR(ret);
delete model;
return session;
}
} // namespace mindspore

View File

@ -0,0 +1,85 @@
/**
* Copyright 2021 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_LITE_MICRO_LIBRARY_SOURCE_SESSION_H_
#define MINDSPORE_LITE_MICRO_LIBRARY_SOURCE_SESSION_H_
#include "include/errorcode.h"
#include "include/lite_session.h"
#include "tensor.h"
namespace mindspore {
namespace lite {
#define MS_ERROR_IF_NULL(ptr) \
do { \
if ((ptr) == nullptr) { \
return mindspore::lite::RET_ERROR; \
} \
} while (0)
#define MS_NULLPTR_IF_NULL(ptr) \
do { \
if ((ptr) == nullptr) { \
return nullptr; \
} \
} while (0)
#define MS_NULLPTR_IF_ERROR(ptr) \
do { \
if ((ptr) != mindspore::lite::RET_OK) { \
return nullptr; \
} \
} while (0)
class LiteSession : public session::LiteSession {
public:
LiteSession() = default;
~LiteSession() override;
void BindThread(bool if_bind) override {}
int CompileGraph(lite::Model *model) override;
Vector<tensor::MSTensor *> GetInputs() const override;
mindspore::tensor::MSTensor *GetInputsByTensorName(const String &tensor_name) const override { return nullptr; }
int RunGraph(const KernelCallBack &before = nullptr, const KernelCallBack &after = nullptr) override;
Vector<tensor::MSTensor *> GetOutputsByNodeName(const String &node_name) const override;
Vector<String> GetOutputTensorNames() const override;
mindspore::tensor::MSTensor *GetOutputByTensorName(const String &tensor_name) const override;
int Resize(const Vector<tensor::MSTensor *> &inputs, const Vector<Vector<int>> &dims) override { return RET_ERROR; }
int InitRuntimeBuffer();
private:
Vector<MTensor *> inputs_;
Vector<MTensor *> outputs_;
void *runtime_buffer_;
};
} // namespace lite
} // namespace mindspore
#endif // MINDSPORE_LITE_MICRO_LIBRARY_SOURCE_SESSION_H_

View File

@ -0,0 +1,307 @@
/**
* Copyright 2021 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifdef NOT_USE_STL
#include <stdlib.h>
#include <string.h>
#include <stdio.h>
#include <float.h>
#include <stdint.h>
#include "include/lite_utils.h"
namespace mindspore {
String::String() {
buffer_ = reinterpret_cast<char *>(malloc(sizeof(char) * 1));
if (buffer_ == nullptr) {
MS_C_EXCEPTION("malloc data failed");
}
buffer_[0] = '\0';
size_ = 0;
}
String::String(size_t count, char ch) {
buffer_ = reinterpret_cast<char *>(malloc(sizeof(char) * (count + 1)));
if (buffer_ == nullptr) {
MS_C_EXCEPTION("malloc data failed");
}
memset(buffer_, ch, count);
buffer_[count] = '\0';
size_ = count;
}
String::String(const char *s, size_t count) {
if (s == nullptr) {
buffer_ = reinterpret_cast<char *>(malloc(sizeof(char) * 1));
if (buffer_ == nullptr) {
MS_C_EXCEPTION("malloc data failed");
}
buffer_[0] = '\0';
size_ = 0;
return;
}
size_t size_s = strlen(s);
if (size_s <= count) {
size_ = size_s;
} else {
size_ = count;
}
buffer_ = reinterpret_cast<char *>(malloc(sizeof(char) * (size_ + 1)));
if (buffer_ == nullptr) {
MS_C_EXCEPTION("malloc data failed");
}
strncpy(buffer_, s, size_);
buffer_[size_] = '\0';
}
String::String(const char *s) {
if (s == nullptr) {
buffer_ = reinterpret_cast<char *>(malloc(sizeof(char) * 1));
if (buffer_ == nullptr) {
MS_C_EXCEPTION("malloc data failed");
}
buffer_[0] = '\0';
size_ = 0;
return;
}
size_ = strlen(s);
buffer_ = reinterpret_cast<char *>(malloc(sizeof(char) * (size_ + 1)));
if (buffer_ == nullptr) {
MS_C_EXCEPTION("malloc data failed");
}
memcpy(buffer_, s, size_ + 1);
}
String::String(const String &other) {
buffer_ = reinterpret_cast<char *>(malloc(sizeof(char) * (other.size_ + 1)));
if (buffer_ == nullptr) {
MS_C_EXCEPTION("malloc data failed");
}
size_ = other.size_;
memcpy(buffer_, other.buffer_, size_ + 1);
}
String::String(const String &other, size_t pos, size_t count) {
if (pos >= other.size_) {
buffer_ = reinterpret_cast<char *>(malloc(sizeof(char) * 1));
if (buffer_ == nullptr) {
MS_C_EXCEPTION("malloc data failed");
}
buffer_[0] = '\0';
size_ = 0;
} else {
if (count == npos) {
count = other.size_ - pos;
}
if (pos + count > other.size_) {
size_ = other.size_ - pos;
} else {
size_ = count;
}
buffer_ = reinterpret_cast<char *>(malloc(sizeof(char) * (size_ + 1)));
if (buffer_ == nullptr) {
MS_C_EXCEPTION("malloc data failed");
}
strncpy(buffer_, other.buffer_ + pos, size_);
buffer_[size_] = '\0';
}
}
String::~String() { free(buffer_); }
String &String::operator=(const String &str) {
if (this == &str) {
return *this;
}
free(buffer_);
buffer_ = reinterpret_cast<char *>(malloc(sizeof(char) * (str.size_ + 1)));
if (buffer_ == nullptr) {
MS_C_EXCEPTION("malloc data failed");
}
size_ = str.size_;
memcpy(buffer_, str.buffer_, size_ + 1);
return *this;
}
String &String::operator=(const char *str) {
free(buffer_);
if (str == nullptr) {
buffer_ = reinterpret_cast<char *>(malloc(sizeof(char) * 1));
if (buffer_ == nullptr) {
MS_C_EXCEPTION("malloc data failed");
}
buffer_[0] = '\0';
size_ = 0;
return *this;
}
size_t size_s = strlen(str);
buffer_ = reinterpret_cast<char *>(malloc(sizeof(char) * (size_s + 1)));
if (buffer_ == nullptr) {
MS_C_EXCEPTION("malloc data failed");
}
size_ = size_s;
memcpy(buffer_, str, size_ + 1);
return *this;
}
char &String::at(size_t pos) {
if (pos >= size_) {
MS_C_EXCEPTION("pos out of range");
}
return buffer_[pos];
}
const char &String::at(size_t pos) const {
if (pos >= size_) {
MS_C_EXCEPTION("pos out of range");
}
return buffer_[pos];
}
char &String::operator[](size_t pos) {
if (pos >= size_) {
MS_C_EXCEPTION("pos out of range");
}
return this->at(pos);
}
const char &String::operator[](size_t pos) const {
if (pos >= size_) {
MS_C_EXCEPTION("pos out of range");
}
return this->at(pos);
}
char *String::data() noexcept { return buffer_; };
const char *String::data() const noexcept { return buffer_; }
const char *String::c_str() const noexcept { return buffer_; }
// capacity
bool String::empty() const noexcept { return size_ == 0; }
size_t String::size() const noexcept { return size_; }
size_t String::length() const noexcept { return size_; }
// operations
void String::clear() noexcept {
free(buffer_);
buffer_ = reinterpret_cast<char *>(malloc(sizeof(char) * 1));
if (buffer_ == nullptr) {
MS_C_EXCEPTION("malloc data failed");
}
buffer_[0] = '\0';
size_ = 0;
}
String &String::operator+(const String &str) {
(*this) += str;
return *this;
}
String &String::operator+=(const String &str) {
size_t new_size = size_ + str.size_;
char *tmp = reinterpret_cast<char *>(malloc(sizeof(char) * (new_size + 1)));
if (tmp == nullptr) {
MS_C_EXCEPTION("malloc data failed");
}
memcpy(tmp, this->buffer_, size_ + 1);
strncat(tmp, str.buffer_, str.size_);
tmp[new_size] = '\0';
free(buffer_);
buffer_ = tmp;
size_ = new_size;
return *this;
}
String &String::operator+=(const char *str) {
if (str == nullptr) {
return *this;
}
size_t str_size = strlen(str);
size_t new_size = size_ + str_size;
char *tmp = reinterpret_cast<char *>(malloc(sizeof(char) * (new_size + 1)));
if (tmp == nullptr) {
MS_C_EXCEPTION("malloc data failed");
}
memcpy(tmp, this->buffer_, size_ + 1);
strncat(tmp, str, str_size);
tmp[new_size] = '\0';
free(buffer_);
buffer_ = tmp;
size_ = new_size;
return *this;
}
String &String::operator+=(const char ch) {
char *tmp = reinterpret_cast<char *>(malloc(sizeof(char) * (size_ + 2)));
if (tmp == nullptr) {
MS_C_EXCEPTION("malloc data failed");
}
memcpy(tmp, this->buffer_, size_ + 1);
tmp[size_] = ch;
tmp[size_ + 1] = '\0';
free(buffer_);
buffer_ = tmp;
size_ += 1;
return *this;
}
String &String::append(size_t count, const char ch) {
(*this) += ch;
return *this;
}
String &String::append(const String &str) {
(*this) += str;
return *this;
}
String &String::append(const char *str) {
if (str == nullptr) {
return *this;
}
(*this) += str;
return *this;
}
int String::compare(const String &str) const { return strcmp(buffer_, str.buffer_); }
int String::compare(const char *str) const { return strcmp(buffer_, str); }
String String::substr(size_t pos, size_t count) const { return String(*this, pos, count); }
String operator+(const String &lhs, const char *rhs) {
String str = lhs;
str += rhs;
return str;
}
String operator+(const char *lhs, const String &rhs) {
String str = rhs;
str += lhs;
return str;
}
bool operator!=(const String &lhs, const String &rhs) { return lhs.compare(rhs) != 0; }
bool operator==(const String &lhs, const String &rhs) { return lhs.compare(rhs) == 0; }
bool operator==(const String &lhs, const char *rhs) { return lhs.compare(rhs) == 0; }
bool operator==(const char *lhs, const String &rhs) { return rhs.compare(lhs) == 0; }
String to_String(int32_t value) {
char tmp[sizeof(int32_t) * 4];
snprintf(tmp, sizeof(int32_t) * 4, "%d", value);
return String(tmp, strlen(tmp));
}
String to_String(float value) {
char tmp[FLT_MAX_10_EXP + 20];
snprintf(tmp, FLT_MAX_10_EXP + 20, "%f", value);
return String(tmp, strlen(tmp));
}
} // namespace mindspore
#endif // NOT_USE_STL

View File

@ -0,0 +1,83 @@
/**
* Copyright 2021 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "tensor.h"
namespace mindspore {
namespace lite {
size_t DataTypeSize(const TypeId type) {
switch (type) {
case kNumberTypeFloat64:
return sizeof(double);
case kNumberTypeFloat:
case kNumberTypeFloat32:
return sizeof(float);
case kNumberTypeInt8:
return sizeof(int8_t);
case kNumberTypeUInt8:
return sizeof(uint8_t);
case kNumberTypeFloat16:
case kNumberTypeInt16:
return sizeof(int16_t);
case kNumberTypeInt32:
return sizeof(int32_t);
case kNumberTypeInt64:
return sizeof(int64_t);
case kNumberTypeUInt16:
return sizeof(uint16_t);
case kNumberTypeUInt32:
return sizeof(uint32_t);
case kNumberTypeUInt64:
return sizeof(uint64_t);
case kNumberTypeBool:
return sizeof(bool);
case kObjectTypeString:
return sizeof(char);
case kObjectTypeTensorType:
default:
return 0;
}
}
MTensor::~MTensor() {
if (data_ != nullptr) {
free(data_);
data_ = nullptr;
}
}
int MTensor::ElementsNum() const {
int elements = 1;
for (int i : shape_) {
elements *= i;
}
return elements;
}
size_t MTensor::Size() const {
size_t element_size = DataTypeSize(data_type_);
return element_size * ElementsNum();
}
void *MTensor::MutableData() {
if (data_ == nullptr) {
data_ = malloc(this->Size());
}
return data_;
}
} // namespace lite
} // namespace mindspore

View File

@ -0,0 +1,73 @@
/**
* Copyright 2021 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_LITE_MICRO_LIBRARY_SOURCE_TENSOR_H_
#define MINDSPORE_LITE_MICRO_LIBRARY_SOURCE_TENSOR_H_
#include "include/ms_tensor.h"
#include "ir/format.h"
namespace mindspore {
namespace lite {
struct LiteQuantParam {
double scale;
int32_t zeroPoint;
float var_corr{1};
float mean_corr{0};
bool inited;
Vector<float> clusters{};
int bitNum;
int roundType;
int multiplier;
int dstDtype;
};
class MTensor : public mindspore::tensor::MSTensor {
public:
MTensor() = default;
MTensor(String name, TypeId type, Vector<int> shape) : tensor_name_(name), data_type_(type), shape_(shape) {}
~MTensor() override;
void set_allocator(mindspore::Allocator *allocator) override {}
mindspore::Allocator *allocator() const override { return nullptr; }
TypeId data_type() const override { return data_type_; }
void set_data_type(TypeId data_type) override { data_type_ = data_type; }
void set_format(mindspore::Format format) override {}
mindspore::Format format() const override { return mindspore::NHWC; }
Vector<int> shape() const override { return shape_; }
void set_shape(const Vector<int> &shape) override { shape_ = shape; }
int ElementsNum() const override;
size_t Size() const override;
String tensor_name() const override { return tensor_name_; }
void set_tensor_name(const String &name) override { tensor_name_ = name; }
void *MutableData() override;
void *data() override { return data_; }
void set_data(void *data) override { data_ = data; }
Vector<LiteQuantParam> quant_params() const override { return this->quant_params_; }
void set_quant_params(const Vector<LiteQuantParam> quant_params) override { this->quant_params_ = quant_params; }
private:
String tensor_name_;
TypeId data_type_;
Vector<int> shape_;
void *data_ = nullptr;
Vector<LiteQuantParam> quant_params_;
};
} // namespace lite
} // namespace mindspore
#endif // MINDSPORE_LITE_MICRO_LIBRARY_SOURCE_TENSOR_H_

View File

@ -0,0 +1,399 @@
/**
* Copyright 2021 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "weight.h"
int g_thread_num = 1;
unsigned char *g_Buffer = 0;
const int8_t g_Weight1[] = {
-101, -12, 52, 40, 127, 91, 23, 66, -109, 13, -61, -7, -15, -127, -36, -16, -67, 6,
30, -83, -127, 97, 62, -82, 25, 33, 27, -120, -11, -73, -46, -26, -127, 45, 5, -60,
-20, 68, 101, -94, 26, 123, -127, -115, 78, -127, -39, 42, -106, 49, 105, -12, 24, 80,
-19, -127, -106, -34, 5, -10, 55, 96, 106, -7, -13, 106, 22, -12, 108, 127, 27, 73,
7, 71, 16, -6, 96, 23, 37, 127, 75, -57, -80, -127, -96, -47, -19, -100, 30, 69,
53, 86, 66, 75, 20, -40, -63, -71, -127, -52, -127, -39, 57, -41, 45, 22, 93, 62,
};
const int32_t g_Weight2[] = {
-3147, 16811, -3738, 18629, -147, -9883, -7741, -22579, -16827, 3940, -6678, -16145,
};
const int8_t g_Weight3[] = {
-28, -29, 54, -29, -16, -13, -6, 44, 34, -41, -11, 49, 7, -22, 21, -44, -47, -6, 85, 45, 43,
16, 17, 53, 71, -14, -22, -12, -11, 51, 102, 39, 39, 6, 64, 92, -52, -3, -43, -1, 32, -23,
-70, -29, -21, -39, 47, -16, 11, -69, -12, -33, 49, -59, 25, 6, 40, -32, 87, -12, 20, -70, -86,
-20, -45, -48, -7, -13, -22, -4, 108, 17, -31, 9, -13, 94, -41, -60, 30, -59, -75, 33, -38, 8,
-58, 37, -19, 90, -35, -53, 84, -39, -56, 64, 36, 38, -63, 13, -73, 35, -91, -41, 72, -31, -20,
127, -13, -3, 45, 13, 3, -21, -37, 28, -47, 27, 29, -3, 56, -8, -53, -6, -36, 11, -31, -72,
-17, -39, -37, 49, 30, -25, -127, -33, -48, 4, -75, -8, 101, -40, -57, 48, -38, 60, -42, 98, -77,
66, -51, -34, -15, -31, -10, 78, 116, 24, -8, 1, -102, -48, 21, 38, 44, 4, -46, 33, -55, 36,
79, -19, -30, -9, 15, 95, 27, 64, 63, 7, 44, 62, -84, 17, -15, 85, 5, 0, 6, -57, -50,
65, 5, 17, -17, 4, 5, 1, 55, 61, -12, -38, -21, 25, -13, 39, -15, -44, 4, -68, -17, -9,
-25, 36, 0, -3, 0, 22, -28, 43, 45, 16, -57, -45, -32, 32, -2, 7, 55, -25, -63, 2, -8,
5, -76, 15, 13, -62, -28, 74, -61, -41, 36, -74, -45, -77, 5, 42, 13, 12, -51, -27, -16, 56,
-28, -25, -14, -37, -71, 13, -9, 4, 5, 48, 30, 3, -52, -67, -21, -24, -16, 41, 25, -54, -58,
-15, -81, 63, 60, -13, -120, -87, -47, 93, -73, 16, 7, -1, 27, 65, -46, -55, -72, -31, -40, 51,
18, 4, -4, 21, -44, 50, 53, -52, -85, -75, -92, 79, 9, 67, 44, -15, -43, 57, 69, -69, -59,
-59, -74, 75, -127, 61, 29, 13, 82, 8, 7, 12, 30, 11, -43, 14, 40, -10, 4, -10, 3, 63,
36, 21, -23, 13, -45, 52, 22, -3, -18, 71, 63, 44, 14, -25, -45, -3, 12, 29, 4, 14, -5,
28, 52, 3, -1, -28, 12, -41, -19, -17, 40, -15, 10, -22, -28, 9, 6, -3, 59, 13, -20, -6,
102, 8, -12, 7, -32, 89, -4, 59, 2, 41, 1, 24, -21, -43, -61, 64, 17, -16, -37, -12, -34,
-6, 71, -25, -57, -29, -20, -7, -18, -84, -49, -50, 107, -56, 102, -28, 24, -15, 18, -38, -48, 22,
-25, 0, 127, 5, -60, -36, 90, -29, -5, -34, -71, 77, 8, -50, 14, -13, -6, 25, -38, 56, 26,
-50, 14, 13, 3, -20, 36, -9, 7, -8, -91, -3, -1, -6, 16, 1, -62, 29, 42, 14, -55, -25,
-62, -11, -5, -4, -43, 24, -13, -36, 6, -43, -3, 17, -99, 37, 38, -38, 30, -24, -55, -30, -8,
-7, -27, -11, -98, -46, -8, 22, 36, -35, -51, 68, -39, 38, -15, 7, -41, 9, 5, 57, -36, -2,
-9, -70, -63, -37, -29, -16, -127, 26, 27, -2, 17, -49, -57, 31, -30, 37, -23, -18, -80, -17, 3,
50, -7, -38, -21, 42, -14, 25, 10, 3, -2, -39, -26, 57, 5, -42, 20, -49, -21, -29, -46, -5,
7, -38, -70, 66, 29, -34, -127, 31, -26, 7, -30, -24, -11, -33, -55, 62, 22, -7, 23, 56, -2,
38, 101, 29, -2, 37, 10, 4, -10, 12, 23, -43, 103, -39, -37, -47, 86, -40, -15, -32, -52, 67,
3, -60, 37, -49, 19, 26, 88, -38, -85, -19, -72, 19, -63, 9, 99, 29, 5, -34, 46, -71, -1,
-4, -57, 33, -20, -7, 96, -37, 49, 2, 19, 23, 24, -50, 36, -8, 42, -56, 47, -28, 25, 27,
72, 3, -29, -50, 5, 44, -8, -62, -6, -5, -44, 6, 60, 57, 8, -7, 11, 17, -100, 19, -84,
62, -37, 28, -54, -66, -97, 91, -31, 4, -11, -80, -33, -38, 38, 22, -4, -18, -3, -38, -31, 10,
-4, 23, -8, 24, -102, -125, -33, -4, 11, 2, 65, -13, -113, 53, -81, 27, 45, 36, -64, -31, -97,
112, -62, -18, 47, -81, -16, -89, 51, 67, -20, 18, 48, -59, -22, 79, -29, -1, 90, 50, -127, -48,
-16, -31, -1, -19, 81, -26, -80, 15, -20, 41, 84, 75, -55, -11, -52, 45, -59, 12, 22, -37, 41,
-35, 46, 81, -78, 64, 80, -36, -64, 16, -64, -40, 102, 8, -86, -54, -17, -54, -30, -15, 11, -52,
-68, 14, 21, 42, 47, -60, -98, -65, -58, 24, -50, -43, -15, -105, 22, -77, 58, 0, -47, 27, -31,
14, -16, -9, -6, -58, -69, -62, 62, 73, -33, 66, 62, -43, -12, 24, -105, 46, 30, 106, 39, -57,
-27, -61, -75, 62, -92, -43, -127, 8, 44, 6, 31, -79, -115, -84, -24, -24, -68, -61, -35, -15, -51,
-10, 64, -22, -99, -1, 14, -49, 0, -49, -16, -74, 40, -17, -32, 3, -16, 4, -21, 2, -46, -1,
-43, -10, 39, 59, -21, -68, 15, 0, -53, -24, -40, 42, -87, 24, 13, 39, -17, -89, -44, -50, -69,
-2, -65, -13, -20, 27, -24, -18, -83, -23, 68, -40, -94, 66, -15, -14, -15, 17, -51, -11, -22, -21,
86, -64, -72, 50, 46, 20, -39, -4, 31, 45, -93, -2, 127, -27, -68, 51, -33, 0, 40, 7, 53,
-62, -44, 25, 116, 21, 0, -3, -8, 90, 47, -44, 17, -22, -44, 54, 72, 1, -11, -19, -4, 15,
3, -40, -26, -14, -10, 17, 15, 10, 11, -13, 23, 11, -8, -11, -23, -5, 13, -61, -23, -27, -27,
-47, 96, -30, -17, -45, -39, -38, -4, 1, 28, -17, -3, -32, 67, -21, 28, -27, -36, 5, -30, 12,
25, 9, 3, -21, 23, 5, -53, -5, -82, 3, 29, -3, -92, -47, -42, -1, -16, -60, -24, -8, -33,
-53, 41, -3, -37, 34, -2, 0, 36, 18, 32, -61, -89, -62, 17, -6, -104, 26, 6, -40, 127, -19,
-66, -9, -3, 38, -42, -50, 89, -59, -51, 31, -26, 4, -78, 47, 27, 74, -37, -44, 74, -38, -35,
77, 21, 32, -74, 74, -41, 69, -80, -79, 65, -74, -33, 66, -33, 13, 45, -3, -7, -41, -4, 21,
49, -1, 4, -30, 4, 30, -10, -21, -15, -23, -13, 7, 25, 35, 18, 16, -21, 9, -6, -66, 64,
-17, 22, 16, 75, 7, 12, 1, -58, 52, 9, -43, 50, -22, 44, 44, 40, 23, 21, -44, 23, 53,
29, 17, 11, -11, -4, 31, -49, 16, 20, 20, 70, 0, -36, 16, -38, 1, -67, -51, -23, -55, -59,
53, -2, -23, 10, -17, -1, 30, -35, -38, -14, -6, -23, 15, 61, -1, -35, 33, -5, 68, -57, 32,
10, -7, -44, 122, 36, 15, -18, -22, -127, -60, -72, 30, 54, 3, -8, 27, -54, 11, -35, 7, -63,
14, -62, -2, -49, -25, -21, 54, -11, -11, 10, 4, -39, -21, -33, 61, 25, -14, -3, 49, -65, 29,
11, -34, -45, -60, -26, 20, 10, 3, 29, -9, 27, 17, -54, -10, 64, -3, -22, -71, 76, -57, -50,
-8, -63, -3, -87, -2, 29, 65, -57, -36, 32, -46, -85, 30, -97, 8, -82, -32, -69, 9, -46, -75,
-33, -63, -16, -39, -42, -36, 17, -56, -7, -84, -36, -27, -14, -18, 0, -66, -24, 1, -17, -127, 50,
-108, -27, -79, -29, -5, -6, -116, -109, -32, -49, -115, 16, -56, -66, -38, -39, 5, -24, -100, -114, 6,
1, -10, -53, 24, -44, 47, 86, 36, 37, -23, 10, -4, -6, -45, -49, -4, -36, -16, -6, 29, -8,
-44, -56, 7, -5, -70, 1, -19, 39, 22, 34, 52, 57, -34, -27, 3,
};
const int32_t g_Weight4[] = {
-1881, -1752, -3047, -3475, -1192, -6212, -2717, -535, -2109, -1355, -969, -828,
};
const int8_t g_Weight6[] = {
-8, 22, 6, -9, -14, 4, -6, -1, 17, 29, 33, 5, 12, 3, 29, 8, 24, 8, 3, 7, 33, 26,
28, 6, 3, 31, -12, 26, -15, -1, -18, -4, 9, 0, -17, 28, -25, -7, -26, 29, -37, -18, 22, -4,
-23, -11, -9, 25, -34, 3, -32, 18, -7, 10, 26, -10, 17, -21, -51, 23, 25, 20, 2, 62, -13, 23,
-16, 3, 20, 39, -5, 46, -24, -7, -14, 51, -16, 3, -9, -2, -4, -19, 6, 11, -14, 4, -3, 59,
-12, 9, 30, -14, -14, -33, -20, -27, -39, -1, 34, -9, 5, -6, 38, -6, 10, -72, 10, -22, -52, 13,
0, 33, 16, 0, 69, -10, -22, -7, -17, 18, -4, 14, 17, -12, 10, 4, -23, 17, 49, 16, -1, 18,
-21, -15, -20, 56, -8, 20, 2, 26, 6, 33, 8, 34, -13, 1, 33, 21, -53, 54, 14, 21, 6, -21,
-15, 13, -8, 23, 68, 1, 10, 26, 13, -66, 31, -14, 29, -40, 5, 38, 59, 21, 26, 3, 9, -69,
24, -45, 31, -59, -4, 16, 26, -22, 12, -38, 26, -24, 27, -40, 10, -22, -21, 43, 87, -29, 39, -41,
30, -75, 2, -26, 42, -35, -2, 33, -11, 9, -4, -15, -23, -16, -41, -25, 14, 27, 25, -28, -9, 6,
14, -11, -3, -9, 30, 15, 12, -9, 48, 14, -22, 18, 5, 33, 9, -12, 54, 27, 25, 28, -36, -25,
25, -26, -23, -3, 27, -31, -29, -76, 0, -30, -15, -66, 2, 6, -5, -60, 29, -20, -22, -40, -33, -43,
3, -11, -51, -12, -44, -6, 6, -19, -22, -9, -36, -80, -6, 13, 0, 15, -14, 34, -21, -15, 15, -27,
-41, -33, 11, -20, -9, 2, -9, 7, -9, 26, 20, 17, -11, 3, -35, 10, 5, 16, -6, 0, 20, -14,
0, -1, -22, -34, -19, 7, 10, 0, 27, 5, 15, -17, -15, -10, 10, 8, 41, -14, 4, -48, -42, -2,
-18, -8, 12, -1, 6, -29, 5, -22, 34, -26, -2, -10, 4, 10, 21, 1, 11, -13, 5, -13, 59, 1,
35, -3, -11, -26, 22, 36, 35, 28, -25, -6, 16, 20, -21, 1, -14, 28, 1, -29, -32, 12, 10, -24,
-7, 15, -40, 30, 11, -11, 18, 2, -40, 50, -16, -32, -36, -2, 5, -21, -18, 16, 26, 43, 4, 53,
28, -28, -2, 14, -33, 30, -14, -14, 34, 39, -22, 10, 66, -35, 8, 9, -8, 33, -41, -5, 67, 52,
12, 39, -13, 5, -23, -7, -4, -8, 7, 18, -24, -29, -27, 7, -34, 11, 23, -22, 25, 4, 20, -12,
-19, -24, 15, 21, -23, 14, -17, 5, 70, -57, 34, -19, -11, -19, -5, -73, 59, 13, -56, 47, -41, -69,
-26, 23, -35, 13, 19, -27, 9, -18, -37, 50, -70, 6, -24, 24, 3, 26, -43, 32, -17, 7, 25, 17,
7, 26, 26, 12, -4, -7, 1, 27, -16, 31, 17, 6, 22, -16, 16, 50, -14, -34, 6, -21, -48, 6,
-36, 74, 11, -26, 17, -3, -7, -41, 9, 9, 1, -29, -24, 62, -49, 38, -31, -17, 14, 40, -58, -38,
11, 28, -14, 45, -9, 32, -6, 53, -15, -40, 16, -34, -21, 9, -12, 38, -36, -2, 13, 11, -18, -11,
-23, 26, -29, -37, 6, 34, -16, 28, -17, 49, -27, -45, -54, 27, -18, -39, -15, 22, -32, 54, 8, -32,
14, -13, -32, 23, -15, 23, 82, -39, 0, 54, -4, -55, 26, 1, 46, 9, 3, 91, 51, -88, 70, -110,
9, -27, -48, -77, 109, -44, -1, 9, -5, 0, 14, -8, -50, -17, -17, 6, -2, -16, -5, 25, -22, 56,
-2, -3, 3, -14, 0, 21, 7, 33, 27, -14, 3, -3, -64, 2, 23, -20, 20, -15, -36, -28, -12, -4,
3, -7, -14, 10, 73, 7, 29, -23, -23, -17, -4, 16, 56, -27, 21, 0, 20, 3, 43, 23, 21, -1,
-24, -25, -7, -1, -37, -8, -46, 23, -11, -22, -41, 8, -7, -5, -15, 34, -46, 15, -11, 29, 1, 33,
-40, 12, -14, -62, -21, -25, -32, 3, 6, 2, 25, 0, -36, 8, 11, -45, 23, -16, 21, 8, 26, -32,
41, -14, -31, 9, 41, 18, 96, -41, 32, 0, -26, -55, 33, -11, 27, -12, 32, -29, -44, 3, -54, 11,
-53, 35, -21, 1, -8, 45, 20, -1, -7, 41, 22, 24, 5, 38, 14, 4, -31, 17, -20, -28, -30, 31,
35, -28, 12, 3, -1, 5, -17, -19, 39, -14, 14, 8, -15, -26, -6, -14, -13, 8, -12, -21, 100, -14,
-60, 42, 17, 19, -54, -9, 10, 38, 4, 1, 66, 26, -20, -18, 43, 77, -23, 21, 27, 80, 42, 37,
52, -1, -33, 14, 5, 18, -22, 75, 7, 57, 38, -17, 34, -11, -44, 32, -30, 4, -27, 70, 18, 33,
-23, 14, 22, 1, -31, 28, -23, 29, -13, -5, -15, 57, -15, -23, -28, -9, -33, 33, -4, 39, -2, 9,
3, 3, -3, 28, 5, 14, 10, 4, 2, 2, -8, -3, 17, 44, 3, 28, -32, 31, 1, 17, -6, 29,
-58, 4, 17, 28, 31, 46, -14, 20, 44, -47, 20, 12, 20, -47, -12, 8, 10, -11, -51, 28, 37, -51,
0, -6, 56, -32, -53, -33, -3, -10, -51, 18, 33, -70, 12, -39, 45, -19, -64, -60, 31, -77, -9, -18,
-29, 46, 19, -1, -4, 8, -6, 7, -24, -8, 39, 1, -44, 106, 8, 31, 32, 37, -22, 15, -42, 1,
13, 11, -47, 0, -38, -8, 61, 10, -17, -20, -36, -48, -18, -10, -15, -42, 53, -13, 82, 8, -3, -15,
-11, -23, -39, 11, 3, -12, 43, -14, -13, -4, -11, -12, 27, -5, 37, -25, -33, 21, 4, 1, -64, 23,
-52, 38, -61, -13, 48, -39, -49, -8, -36, 70, -9, 59, -4, 24, -30, 52, 32, -29, 27, -65, 16, 7,
4, 51, 18, 16, 30, 18, -16, 18, 44, -13, 59, 21, 19, 14, 36, -12, 12, 10, -32, 9, 96, -5,
71, -2, 28, -12, -5, -39, 39, -34, 33, -30, -35, -38, 37, 18, -34, 13, 31, 57, 3, 45, 67, 75,
-13, -67, 60, 0, -27, 9, 34, 50, 70, -30, -12, 85, 25, -31, 52, -20, 12, 4, -21, 13, 22, 9,
29, -13, 13, 20, 6, -38, 18, -6, -14, 8, -21, -6, -18, 6, -45, 18, 7, 5, -33, -25, -10, 22,
-17, -23, 10, 31, 2, -43, 59, 4, -8, 8, 36, 16, 21, 51, 11, 30, -43, -8, 35, -6, -6, 68,
29, 20, 53, -4, -11, 0, -4, -2, 48, -29, 3, 59, -19, -6, -20, 41, -1, -28, -22, 40, -21, 32,
-9, 9, -4, 39, -2, 6, 46, -35, -35, 31, -1, 13, -7, -12, 46, 18, -7, 5, 20, 21, 10, -2,
21, 36, -31, -2, 16, 69, 13, 38, -19, 29, 2, -28, 45, 39, -23, 5, 11, -23, 30, 40, -36, 6,
32, -48, 17, -38, 37, 13, -43, -5, -13, 3, -9, 0, -22, 13, -33, -33, -6, 8, -53, -1, -16, -20,
11, -17, -24, 1, 25, -24, -25, -31, -23, -22, 57, -50, -25, -29, -32, -21, 18, -1, 5, 9, -20, -10,
4, -10, -33, -21, -3, -1, -3, -20, 16, -35, -18, 8, -14, 16, 7, 18, 27, -30, 14, 8, 29, -23,
8, -18, 32, -12, 5, 16, 44, 2, 24, -19, -1, -11, 18, -14, 41, -29, 12, -1, 59, -17, 26, 15,
53, 13, 17, 36, 77, -14, -62, 13, 9, -9, 14, -13, 30, 13, -23, -26, -6, -52, -35, -15, 9, 42,
-24, -27, 14, -5, -27, -10, -62, -37, 5, 3, -9, 16, 8, 14, 2, 24, 9, 43, -25, 30, 49, 9,
-13, 1, 1, 32, -45, 9, 20, 39, 29, 34, 97, 17, -7, -47, -40, 29, -114, -10, 53, 23, 56, 1,
-7, -21, -20, 10, 1, -9, 13, 13, -68, -37, -35, 13, -12, -52, -38, 41, 0, 51, -6, 16, -11, 3,
-73, -9, -28, 8, 1, -16, 15, 12, 32, 24, -7, -49, -21, 41, -10, -54, -53, -2, 24, -16, -3, 42,
-14, -42, 5, 23, 22, -28, -52, -14, -17, -38, -35, 69, -35, 39, -27, 21, 52, 22, -27, -8, -5, 22,
-30, 43, -23, 46, 42, 16, 30, -39, -19, 6, 7, 24, -22, 11, 30, 34, 28, -7, -4, 32, -3, -44,
42, -16, 59, 31, -13, -45, 0, -10, -29, 9, 5, -11, 3, -21, 0, 0, -63, -7, 5, -21, -65, 27,
-13, 1, -17, -32, 16, 20, -47, 50, -30, -7, 24, 48, 12, -6, 10, -12, 6, 18, -5, 21, 36, 7,
35, 48, 4, -35, -20, 7, -13, 5, 38, 0, 14, 32, -2, 11, 12, 1, 1, 25, 40, 13, -25, 6,
-20, 23, -1, -57, -41, -24, 10, 24, 29, -13, -13, 11, -27, 3, -23, 15, -21, 11, 6, 0, -19, -6,
19, 4, -30, -28, 16, 21, 41, 29, -29, 23, -30, -36, 14, 4, 20, 30, -14, -2, 7, -13, -12, 28,
-6, -53, -4, 14, 42, -4, 6, 12, -17, -16, -21, 2, -1, -2, -19, -2, 34, 8, -3, 11, 5, 29,
-17, -3, -11, 37, 8, 32, 14, 24, 29, 39, 9, 28, -11, 58, 8, -7, 25, -14, -34, 23, -10, 4,
24, 45, -43, 37, 8, -31, 20, -25, 10, 62, 3, 22, 47, 51, 6, 12, 32, -24, 7, -2, -1, 2,
-14, 4, 5, 41, -13, 1, 13, -36, -1, -10, -14, 4, -26, 12, 28, 2, -22, 17, 42, -35, 1, -19,
11, -28, -25, -32, -29, 63, -34, 16, 47, -3, 18, 43, -41, 43, -45, -7, 27, -30, -21, -20, 10, -23,
-6, -58, -56, 0, -37, -20, 9, 70, 28, 28, 38, -10, -36, -48, -35, 21, -9, -68, 26, 15, -3, 36,
-46, 4, 4, -21, -32, 8, 4, 1, 18, -2, -5, 31, -5, -37, -3, -13, 10, -31, -25, 15, 43, 6,
-4, 15, 45, -28, 40, -81, 6, -4, -38, -11, 7, -21, 16, -72, 14, 6, -53, -66, -40, -63, 11, 13,
-7, -2, 17, -44, 32, -31, 18, -28, 9, -46, -35, 38, -24, -8, -16, 52, 3, -51, 23, 16, 17, -17,
24, -27, 6, -57, -2, -24, 3, 11, 15, -1, -17, -22, 54, 16, 47, -14, 25, -46, -4, 2, -13, -33,
40, -28, -52, -33, 21, -7, -52, -18, -19, -3, -19, -17, -5, 3, -21, 0, 14, -2, -14, -38, 32, -20,
-68, -15, -21, -78, -32, 12, -8, 17, -30, -62, 34, 29, -23, -23, 12, -78, -41, 33, -1, -15, 16, 14,
50, 69, -22, 20, -33, -51, -34, 34, -18, -54, -6, -67, 8, -25, -69, -47, 51, -40, 5, 7, 15, -33,
18, 2, -14, -10, 19, -3, -11, -13, 16, -30, 11, -16, -2, -16, -12, 13, -8, -6, 7, 13, 6, -41,
-7, 9, 24, 24, -22, 41, 14, -8, -24, 38, 1, -18, -8, 17, -1, 29, -32, -3, -3, 2, -33, -3,
-11, -32, -47, -18, -37, -17, -43, -14, -14, 4, -34, -7, 24, -7, -14, -76, 11, -27, 14, -28, 22, 19,
30, -45, 19, 33, 9, -53, -6, -35, -31, -22, -2, 15, 31, 8, 9, -16, -8, -55, -18, -29, -35, -35,
6, -9, -4, -25, -9, -8, 2, 19, -5, -37, -6, -9, -43, 23, -27, 5, -17, -24, -19, -4, 4, -38,
18, -1, -64, -41, 0, -4, 20, 26, 7, 23, -24, -5, -20, -22, 3, 13, 68, -32, 7, 52, 27, -27,
-37, -40, -26, -5, 13, -3, 17, 5, 26, -40, -49, 11, -29, -16, -3, -11, -6, 31, -1, -45, -10, 12,
-11, -23, -14, 13, -18, 9, -3, 41, -16, -1, -12, -10, -33, -11, 9, 5, 2, 16, 2, 39, 10, 13,
-13, -16, -15, 25, -31, 27, -25, -26, 17, 10, -24, 53, -29, -50, -39, -8, -48, 35, -20, 22, -24, 40,
-60, 43, 45, -30, -7, 45, -60, 16, -23, 29, 30, 55, -18, -11, -2, 30, 8, 15, 8, 28, 32, 5,
14, -13, 1, 34, -35, -7, 21, -42, 2, 3, 12, -9, -35, -27, -7, 16, 11, -12, -9, 24, 31, 0,
-14, 27, 44, 19, -26, 31, 37, 18, -25, 25, -13, 66, -42, 64, 44, 47, 25, 54, 53, -4, 34, -8,
16, 20, -35, -43, 32, 22, 43, 92, 17, 34, 33, -1, -16, -17, -24, -8, 13, -13, 40, 28, 4, 5,
32, 11, 2, 46, -2, 17, 28, 30, -34, 22, -57, 4, -8, 6, 7, 13, 79, 25, 26, -1, -18, 25,
-30, 11, -29, 64, 56, -15, 68, 6, -51, -25, -20, -25, -47, 62, -17, 0, 35, -24, 94, 0, -22, -24,
-4, -64, -44, 91, -16, -28, 42, -21, 69, -29, -26, -41, 29, -36, -29, 76, 16, -19, 28, -42, 64, -14,
-31, -1, 39, 9, 11, 1, -22, -15, -3, -22, 21, 4, -35, 38, -23, -16, -31, 22, 4, -18, -15, 7,
18, 17, -19, -56, -5, -6, -57, -7, 29, -55, 65, -1, 8, 32, -55, -52, 29, -38, -37, 22, 6, -19,
32, -21, 25, 16, -47, -31, -24, -12, 15, 64, 55, -9, 3, -23, 31, 34, -31, -1, 51, -42, -28, 11,
43, 6, -22, -16, 5, 8, 28, -17, 29, 15, -11, 20, 42, -46, -5, -24, -16, -44, 13, -4, 45, -2,
7, 1, -42, 43, 2, 3, -14, -27, 23, -8, 1, 6, -37, 16, 12, -17, -27, 4, -2, -1, 1, 6,
-44, -1, -7, -16, -12, -1, -7, 33, 10, -38, 18, -1, -15, 21, 0, 16, 22, -10, -16, 4, 7, -1,
23, -30, -36, -22, -9, 15, 34, -27, 15, -5, 4, -10, -16, -22, -34, 24, -26, -29, 16, 4, 26, 12,
7, -24, -3, 26, -21, 14, 45, -15, 11, 40, -7, 17, 13, 0, 32, 9, -4, 36, 44, -29, 25, 31,
-3, 53, -6, -10, 65, 13, -30, -4, 36, -3, -20, -8, 22, -36, 24, 5, -13, 16, -24, -20, -24, -32,
-13, -3, 21, 8, 24, -8, -21, -20, -17, -19, 4, -49, 12, 23, 5, 14, -30, -5, 15, 5, -35, 2,
22, -9, 13, 32, -18, -16, -27, 18, 45, -3, -1, 4, 74, -26, -9, 28, -11, 13, -27, -6, 51, 34,
13, 5, 1, -20, 31, 7, -13, 0, -10, -40, 19, -1, -22, 24, -39, -13, -10, -19, -20, -12, 13, -26,
13, -18, -22, 14, -36, 40, 18, -11, 5, -8, 70, -15, 10, -36, 37, -38, -5, 74, 38, 7, 48, 15,
30, 24, 12, 5, 58, -17, 8, 45, 77, 25, 25, 4, 72, 1, 20, 18, 65, 3, -49, -1, 24, -20,
-31, -16, 5, -31, 31, -42, 32, 2, -32, 8, 34, 13, 10, -43, 27, -40, -11, -42, 16, -56, -26, 21,
22, -24, -45, -10, 17, -70, -19, -48, 11, -46, 7, -5, 30, 29, 3, -16, -12, -8, -32, 9, 3, 2,
34, 0, -9, -2, 1, -17, -10, 6, -26, -10, 47, -22, -32, 10, 15, 73, -40, -5, 23, -30, -37, -71,
19, -27, -33, -22, 20, 58, -28, -26, 7, -62, -41, -77, -27, -21, -2, -39, -31, 4, -8, 12, -11, -14,
30, -34, -30, -5, -9, -15, -23, -25, 30, 31, -38, -8, 23, 0, -10, 58, 21, 4, -11, -5, 16, 6,
-17, 19, 6, 12, 3, 34, 10, -41, -6, 27, -16, 16, -37, -8, -27, 14, -5, -3, 19, -57, -77, 14,
-2, 16, -21, 30, -47, 38, -70, 25, 51, -27, -25, -12, -35, 13, 17, 46, 41, 36, -43, -19, 25, 21,
-4, -4, 1, -18, 13, 2, -14, -18, -21, 10, -74, 4, 25, -33, -22, -29, 1, 6, -20, -49, -32, -10,
22, -9, -27, -4, -31, -19, -34, 21, 10, 49, -1, 23, 35, 31, -16, 23, -32, 54, -15, 19, 43, 34,
7, 4, 19, 25, 13, 5, 10, 42, 30, 22, 27, 9, 13, 18, -31, -12, -6, -21, 27, 5, 47, 39,
-18, -9, 21, 41, -49, 25, 21, 6, -19, 26, 20, -15, -23, -48, -24, -19, 26, -30, -21, 45, 1, 19,
-26, 9, -15, 2, -5, -1, 15, -4, -10, 8, 6, -3, -31, 44, -26, 30, -11, -26, -18, -14, -22, 44,
-17, 11, -5, 4, 18, 16, -25, 12, 8, -46, -22, 15, 11, 10, 4, 12, 14, -26, -21, 58, -67, -41,
-57, 18, -53, -11, -14, -25, 1, -37, -71, 19, 74, -15, -50, 30, -23, 26, -42, 12, 12, 30, -2, 39,
76, -22, -15, -45, -28, 43, -43, 32, 53, 59, 16, 57, 14, -17, 11, -15, -31, 33, -11, 32, 45, -11,
-6, 48, -83, -18, 28, -5, 21, -1, 29, -4, 8, -62, -55, 27, -79, -13, 32, 42, 47, -1, 84, -40,
-36, -72, -58, -7, 18, 17, -11, -64, 25, -2, -24, -14, 73, 55, 41, 15, 38, 49, 12, -42, 25, -7,
-39, -6, 5, 22, 51, -50, -33, 41, 3, -45, 10, -53, 18, 45, -23, -29, 32, -34, -7, -10, 45, -30,
24, -19, 27, -24, -68, -73, -2, -41, 0, 33, 40, -13, 17, -5, 34, -20, -11, -79, 45, -69, -23, -7,
19, -10, 30, -59, -7, -19, -14, -37, -5, 2, -5, 5, -19, 14, -7, -9, -8, 24, -14, -21, 42, 1,
-6, -31, 2, -13, 27, -24, -4, 59, -47, -2, -8, 5, 6, 14, 16, -34, 4, 22, 2, -1, -30, 15,
29, 1, 38, 2, 41, -36, 2, -39, 19, 15, -20, -18, 47, -20, -11, 5, 12, 23, -30, 45, -1, -15,
38, 7, 14, 42, -18, -5, -8, 2, 40, 52, -21, -44, 25, 6, -21, 28, -20, 20, -9, -3, 11, -9,
15, 3, 10, 2, 32, -26, 28, -14, -5, 2, -8, 6, -22, 27, -9, -12, -13, 1, 24, -27, -10, -50,
1, -49, 11, 15, -50, 5, 35, 15, 31, -25, -4, 5, 6, -16, -50, -24, -12, 2, 18, -13, 35, -9,
-13, -36, -27, 4, -38, -14, -6, 3, 11, 15, 30, -45, 15, -27, -20, 25, -54, 14, 3, 36, -8, 20,
37, -40, 32, 4, -41, 42, -45, 10, 28, 23, -17, -8, 34, -54, 25, -1, -14, 21, -52, 27, 7, -4,
2, -18, -37, -1, 12, -27, -22, -19, 15, -12, -2, -52, 14, -3, 16, -14, 15, -8, 8, 19, 32, -4,
27, 15, -9, 10, 36, -12, -12, 19, 0, -3, -35, -17, 41, 31, -3, 21, 58, -44, -26, 11, -38, 10,
-36, 2, 41, 54, -14, 17, 54, -55, -32, 10, -7, 38, -58, 12, 44, 23, -24, 25, 9, 3, 0, 7,
28, -16, 12, -26, -11, -21, -43, -8, -2, -12, -1, -19, 32, -7, 18, -4, -25, -7, -27, -17, 10, -40,
-4, -50, 17, -50, 4, -6, -8, 22, -27, -30, 34, 8, -42, -36, -16, -44, -17, 28, -36, 25, -2, 2,
-3, -20, -27, 6, -18, 3, 22, 52, -38, 39, -44, 56, -13, -21, -36, 3, 14, -17, 9, 12, -28, 6,
-2, 22, 33, -22, -72, 40, -19, 14, -24, 56, 3, 15, 22, 6, 3, 14, -6, 3, -20, 15, -27, 16,
35, 27, -4, -36, -18, 21, 12, -59, -8, -16, -8, 7, -32, -21, 11, 1, -20, 12, 33, -24, 29, -53,
30, 17, -64, -38, 28, -20, -16, 7, -9, 6, 21, 14, -64, -11, 4, 42, 25, 35, 8, 45, -13, 21,
-4, 31, -37, 19, 1, 59, 35, 47, 4, 34, 19, 8, 18, -11, 8, 44, -2, -3, 63, 12, -5, 38,
38, -33, 20, -11, 37, -2, -55, 29, 1, -5, -45, 58, 30, -72, -3, -25, 9, -6, -43, -34, 9, -17,
2, -2, -41, -15, 28, 1, -7, 42, -26, -4, 0, -17, -16, 17, -12, -18, 8, 5, -29, 13, -4, 24,
0, 19, 6, 9, 11, -9, 13, -7, -20, -19, 10, 9, -2, -8, 29, -19, 4, 5, -5, 3, -20, 6,
19, 12, 14, 10, 19, 15, 8, -3, -20, -4, -27, 22, 25, 4, 14, 0, -13, -21, -4, 39, -10, -10,
9, 1, -7, 14, 11, 24, 3, -14, 12, 25, 6, -4, -6, 22, -15, 19, -53, 31, 33, 16, -30, 59,
41, 36, 16, 60, 21, 18, -7, 50, 44, -9, -43, 15, -8, 18, 1, 21, 37, 17, -1, 25, 3, 9,
-1, 7, -36, 14, -17, -10, 51, 35, -17, 19, 30, -34, -33, -45, 31, -5, 7, 34, 15, 4, -19, 23,
39, -50, -7, 13, 18, 49, -13, 6, 28, 50, -21, -24, -22, -13, -16, -15, -5, 3, 7, 26, -20, -14,
-23, 31, -21, -26, 0, -61, 15, -36, 31, 61, -61, -49, -11, -15, -19, -4, 25, -31, -17, -42, 34, 28,
-28, 3, -27, -47, 12, 48, -2, 11, 36, -15, 34, -13, 7, -4, 52, -8, 15, 25, 33, 6, 19, -20,
-6, -34, 33, -7, 31, -19, -42, 34, 31, -43, 84, -10, 36, -55, -26, -58, 51, -4, -43, 4, 12, -50,
25, -21, 4, 29, -56, -1, 27, -21, 29, 16, -22, 13, 14, -20, -11, -5, 16, 62, -7, -1, -10, 17,
33, -18, -8, 7, 22, -14, 4, 3, 43, -19, 2, 37, 11, -16, 29, -58, 9, -50, 4, 2, 20, 19,
-16, 14, -27, 13, 26, -28, 33, -17, -25, -22, 7, -28, 2, -36, -22, 13, -9, 5, -7, 10, 8, 13,
-55, 5, 33, -19, -23, 22, -3, 34, 20, 31, 26, 61, -19, -2, 12, 21, 48, -13, -10, -5, 9, -13,
9, -22, 21, -3, 65, -1, 21, -40, -3, -9, -22, 6, 45, -39, 17, -1, 57, -40, -16, -2, 10, 19,
-84, 25, 28, 0, -21, 50, 33, -79, -40, -18, -29, 29, -54, -14, 9, 13, -52, 31, 39, -80, -54, -28,
8, -10, -86, 19, -11, 13, -54, -2, 22, 13, -3, -37, 75, -15, 11, -18, 18, 14, 10, -43, 42, 5,
-19, -37, 36, -16, -17, -36, 11, 34, 34, -25, 60, 14, -11, 20, -19, -30, -37, -23, 1, 63, 32, -14,
40, -2, -11, 14, -19, -35, -46, 11, 2, 75, 18, 15, -11, -57, -33, -6, 20, -12, -11, 33, -27, 30,
-18, 38, 48, 10, -21, 14, 59, 9, 3, 5, 7, 39, 2, -55, -20, 22, -18, -52, -28, -2, 10, 52,
-29, 34, 5, 7, 48, 30, 52, -38, -27, -5, -24, -17, 34, 109, 17, 8, 39, 31, -27, 79, -30, 16,
-42, -25, 62, 74, 26, 35, -31, -16, 1, 5, -11, 6, -15, 5, 14, 40, -17, 53, -29, -5, -6, 31,
-2, 30, 17, -1, 25, 11, 6, 39, -29, 7, -2, -12, -32, 7, -20, 31, 1, 13, -12, -19, 7, -3,
-18, 44, -63, -5, -13, -3, 23, -6, -19, 23, -46, -10, -15, 21, 9, 7, -12, -41, 8, -36, -8, 16,
-21, -3, -7, 9, 8, 14, -4, -28, -29, -21, -10, 6, -38, -38, 8, 9, 47, 4, 9, 17, -38, -33,
-4, 13, 14, -5, -1, 36, -11, 20, -19, 67, -10, 19, 16, -16, 10, 10, -12, -17, 6, 6, -61, -9,
14, -7, 19, 30, -5, 24, 21, -7, 11, 0, -14, -28, 22, 14, 57, 14, -5, 14, -3, 9, -6, 30,
-10, 16, -13, 5, 16, 23, 61, -4, 25, 21, 2, 32, -52, -51, 26, -22, -2, 20, 38, 15, -4, 42,
-14, 7, 26, -2, 31, 29, 1, -3, 13, 34, -13, 27, -17, 14, 34, -2, -1, -17, -16, -1, 8, 25,
12, 4, 27, 19, 41, 8, -13, 3, -9, 25, 30, -20, 23, -2, 37, 6, 28, 32, 23, 0, -10, -4,
47, -20, 19, 9, -16, 39, -23, 9, 33, -5, -23, 47, -1, 6, -2, 46, -12, 8, 12, -16, 21, 31,
-14, -61, -13, -13, 41, -1, -48, 12, 30, -41, 4, -15, -3, -31, 10, 4, 43, -39, 29, -3, 45, -46,
7, -51, 47, -22, -40, 20, 4, -15, 39, -2, 29, -14, -11, -9, 37, -17, -17, -10, -13, -2, -2, 13,
-37, -20, 12, -25, -24, 11, 24, 4, -11, 59, 24, 6, 1, 32, -4, -20, -10, 2, 23, -10, 21, -20,
-27, 49, -16, 35, 33, -6, 1, 55, 5, -7, 60, 0, 9, 36, -11, -57, 57, -8, 3, 4, 21, 25,
-8, 16, 44, -15, 6, -29, 25, -33, 29, 6, 12, -32, -27, -61, 45, 10, -28, 7, -27, -3, -11, -15,
53, -3, -56, 30, 18, -10, -18, 35, 15, 55, 18, 12, 50, 12, 24, -21, -34, 37, -37, 45, 40, 46,
-41, 25, 54, -16, -1, -70, 16, -35, 15, 10, 5, 24, 28, 14, -2, -14, -17, -4, 27, -21, -4, -60,
-1, -8, 26, 27, -10, -19, 27, -1, 37, 26, -60, -23, -3, 35, 45, 14, -15, 72, -14, -21, 9, -28,
14, -23, -3, 24, 52, -26, -30, 61, 37, -64, 9, -112, 86, 52, -35, -26, 18, -80, -65, 12, -40, 10,
-15, -54, 98, 67, -74, -22, -30, -28, -79, -30, -59, 3, -34, 22, 0, -57, -72, -68, -14, -11, 29, 16,
20, -3, 4, 2, -6, -28, 2, -3, 9, -1, -20, -16, 11, -51, -19, -35, -30, -25, 13, -31, 17, 5,
-3, 15, 30, -4, 5, -12, -45, -1, 21, 0, 37, 21, -9, -39, 22, 11, 10, 4, -39, 4, 17, -10,
3, 7, -4, -58, -28, 42, 47, 2, 26, -12, 15, -17, -26, -18, -6, 20, 18, 7, 8, 4, 9, -4,
22, -10, 22, 7, -4, -13, -10, -18, -27, -28, -10, -55, 10, 17, 17, -37, -3, 29, 31, 10, -91, -37,
-29, -62, 26, 21, -31, -2, -20, -5, 76, 9, -19, 12, -53, -33, -2, -8, 39, 16, 31, -26, -10, -4,
9, -9, -36, -34, -45, -18, -5, 36, -24, 3, 1, 44, 5, -18, 20, 4, -47, -58, -7, -72, -52, -3,
-12, 63, -38, 6, 6, 5, -30, -39, -13, -14, -25, 55, 83, 18, -14, -18, 26, -26, 1, -48, -7, -11,
27, -35, 2, 86, 48, 9, -52, -94, 86, -13, 20, 55, 43, 9, 11, 14, 22, -43, -31, -30, 14, -41,
3, -3, -18, -21, -6, 21, -33, 10, -25, 15, -51, 2, 0, 4, -40, -38, 11, 21, 4, -31, -1, 38,
-48, -20, -8, -20, 24, 4, -36, 46, -39, -53, 13, 39, -15, -32, -29, -3, -19, -15, -59, 59, 8, -16,
-29, -12, -27, 14, -10, -28, -105, 24, -32, -7, 3, -1, -2, 4, -56, -46, -5, -9, -15, -4, 10, -10,
-32, -24, -21, 5, 12, -15, -24, 32, 7, -27, 3, 14, -56, 62, -33, 34, 31, -7, -28, 10, -1, 0,
5, -34, -68, 31, -13, -13, 14, -5, -52, -4, -8, -10, 25, -36, 6, 32, 18, -36, 9, -21, 11, 25,
-81, 15, 42, -8, 14, -36, 11, -21, -49, -67, 5, -1, 51, -14, 21, -22, -21, 17, -50, -35, 0, -8,
5, -16, 41, -12, -18, -31, -8, -29, -45, 54, 5, 14, -23, -8, 3, -11, -19, 28, -8, 18, -45, 34,
21, 23, -37, 14, 9, -36, 7, 20, 1, 34, -12, 23, 8, 20, -9, 18, -13, -69, -36, -15, -12, -25,
-42, 22, -10, -24, -65, -21, 32, -18, -19, -49, 10, 3, -23, -12, 6, 25, 13, -24, 73, -5, 9, -72,
16, -25, -17, -23, 37, 56, 20, -8, 29, 22, 14, -28, -43, -1, -25, -12, 2, 0, 19, 2, -14, -28,
16, 20, 5, -26, -12, -4, -17, -20, -5, 10, -68, -16, -22, 1, 21, -33, -2, -6, -63, -50, -16, -27,
33, 11, -14, 0, 15, -4, 6, -34, 19, 20, 23, -4, 25, 31, -5, -1, 21, -20, -2, 19, -2, 14,
-5, -51, 28, 25, -5, -46, -34, -8, -19, 27, 23, 19, 22, -8, 7, 1, 13, -21, 27, -1, -23, -17,
8, -5, -8, 18, -15, 26, -17, -41, 48, -22, 12, 3, -34, -32, 24, -17, -27, -12, -37, 4, 10, 9,
-24, 14, -28, 28, -7, 24, 14, -59, -72, 20, -22, 39, -20, 49, -9, 52, -39, 24, 41, -32, 4, -6,
-41, -16, -24, 88, 15, 49, -19, -6, -1, 14, 29, -35, 27, -3, 4, 26, -35, -46, 41, 17, -45, -14,
28, -33, 9, -42, 7, -7, -21, -25, 24, 24, 17, -6, -16, 28, 18, 48, -59, 9, 53, 27, 31, 41,
53, 62, -6, 2, 5, 37, -33, 15, 44, 67, 36, 28, 19, 43, 29, -27, 34, -25, 10, 22, 13, 29,
69, 53, 0, 12, -4, -1, 1, -40, 20, 41, -25, -14, -18, 26, -30, -31, 17, 10, -11, 52, 0, 6,
-34, 2, -28, 35, -22, 17, 35, -25, 32, -41, 26, 31, 12, -9, 16, 5, 11, 26, 20, 26, -8, 17,
43, 14, 21, -12, 28, 12, 27, -19, 25, 16, -24, 14, 11, 7, -3, 2, -16, 17, -9, 4, -32, -16,
-6, -17, -6, -38, 26, -9, -34, -10, -19, -17, 8, -26, 41, -18, 26, -22, 9, 4, 10, 0, -35, 7,
4, -12, 48, -41, 5, -2, -26, -2, 4, -37, -29, 5, -39, 18, -18, -37, -25, 27, -37, 1, -16, -32,
-18, -23, -24, 24, -20, -35, -10, 9, -22, -13, -59, -34, 15, -11, 12, -21, 0, 2, -9, -20, -11, -17,
30, -27, 36, 42, 47, -33, 32, 8, -5, -18, -2, 3, 19, -29, 40, -29, -18, 43, -10, 31, -43, 39,
-28, 61, 35, 7, 13, -13, -30, 34, -52, 16, -33, 32, 9, 46, -10, 22, 4, 25, 31, 45, -12, 19,
42, -26, 31, 40, -24, 31, 1, -31, 24, 47, 1, 28, -29, -26, 78, 32, 39, -1, 68, -8, -26, 24,
28, -7, -58, -29, 32, 38, 29, -29, 35, 32, -21, -5, -18, 87, -12, 23, 91, 94, 41, 48, 10, 30,
43, -42, 7, -13, -29, 8, -4, 4, 6, -15, 30, -14, 2, 45, -37, -41, 19, 27, -9, -19, 7, 27,
-10, -27, -57, 110, -4, -5, -15, -12, 1, 22, 5, -1, -30, -48, -70, 58, -22, 85, -1, -10, 5, 21,
-41, 59, 9, 9, 22, -36, 19, -56, 24, 3, -18, -12, 16, -32, 10, -5, 0, -4, -14, -6, 14, 32,
34, -5, 3, 20, 25, 3, -4, -47, -36, 33, -37, -57, 24, 0, -43, 19, -24, 35, 17, 4, -36, -4,
-27, -107, 9, -52, 13, 24, -28, -13, -3, 56, 7, 23, 17, -1, 31, 5, -56, 10, -33, 15, -1, -41,
-3, -1, 7, 57, -6, 1, 44, -15, -14, -19, 37, -61, 12, -18, -34, 14, -16, -15, 22, 20, -12, -15,
19, -4, 42, -7, -65, -18, -8, 32, -7, 50, -12, 6, -52, 11, -13, -12, -51, -35, 9, 0, -12, 27,
-35, -11, -48, 5, -30, -2, -25, -19, 9, 19, -24, 24, -6, 3, 19, -16, 5, -33, 47, 2, 0, 31,
42, -7, -7, 7, -13, -17, 1, -33, -18, -6, -9, -15, 8, 21, 10, 11, -30, 24, 3, 16, -3, 22,
-6, 2, -28, 5, -5, -3, -38, 34, -8, -16, -22, 38, -13, 6, -43, -5, -16, -9, -34, 13, -28, -2,
-3, 9, -30, -15, -64, 15, -3, 30, 16, 10, 2, 21, 27, 17, 30, 27, 40, -30, -26, -27, -6, 5,
-50, 20, -41, -3, 19, 50, 5, 0, 2, -25, -31, 35, -50, 34, 16, -1, -29, 5, -15, -9, -39, 19,
18, -24, 15, -14, 5, 0, -12, -23, 21, -6, -8, 32, 34, -11, 16, -28, 9, -11, -4, 6, -5, -31,
-26, 16, 25, 48, -33, 3, 7, -6, 47, -13, 7, -2, -41, -9, 68, -11, -13, 8, 36, -81, -11, -54,
-21, -4, -23, 21, 22, 21, 13, -19, 14, -71, -35, -49, 33, -9, -42, 36, 7, 56, 16, 11, -4, -6,
12, -30, 22, -12, 6, 10, 4, -12, 5, 31, 10, 22, 12, 47, -33, 4, 1, 34, 25, -15, -40, -27,
67, -15, -18, -68, -36, -13, -6, -35, 23, 23, -29, -12, 37, -2, -11, -23, -25, 9, 31, -43, -53, 77,
-6, 61, -55, -47, 51, 6, -1, 28, 37, -14, 1, 14, 2, 53, -74, -48, 84, -1, 20, 51, 61, 4,
25, 32, -3, 31, -9, 48, 64, 74, -52, 43, -32, -13, -25, 34, -15, -14, 72, -14, 3, -6, -28, 1,
-52, -17, -15, 49, 33, -18, 54, -26, -33, -65, -39, -40, -39, 42, 18, -42, 58, -40, 82, 7, -16, -62,
36, -70, 2, 87, 32, -15, 72, -33, 73, -5, -36, -23, 51, -52, 24, 54, 52, -10, 53, -36, 58, -13,
-34, 12, 53, 5, -26, -8, 10, 1, 8, 0, 10, -4, -38, -7, -42, -19, -49, -6, 17, -21, 35, 21,
37, -14, -33, -64, -5, 7, -39, 41, 48, -66, 48, -23, 23, -12, -54, -46, 65, 9, 20, 40, -22, -25,
50, -3, -31, 32, -21, 37, 40, -27, 51, 30, 9, 13, 9, -3, -26, 59, 7, 35, 55, -9, -80, 14,
19, 12, -25, -38, 32, -25, -43, -59, -6, 8, -15, 27, 38, -27, 14, -18, 10, -79, -10, -51, 21, 14,
14, 31, -11, -2, 53, 1, 9, -30, 28, -38, 9, 4, -12, 1, -36, 2, -17, -7, -21, 41, -10, 11,
-32, 18, -27, -8, -55, -6, -40, 41, -4, 23, 23, 34, -40, 60, 27, -22, 8, -21, 3, -19, -24, 0,
-33, -18, -70, -18, -8, -29, -39, -9, 11, 3, 3, 23, -45, -18, -34, 39, 2, -26, -6, -40, 23, 26,
41, 7, -15, 10, -1, 1, 3, -28, 8, -26, -11, 32, 7, -20, 29, -8, -28, 21, 23, -15, 34, -7,
-27, 7, -12, -11, 3, -33, -13, -22, 20, -7, -32, 8, 10, -17, -17, 6, 23, 52, -9, 7, 15, -9,
0, -2, 0, 3, 3, 6, -13, -21, -16, -6, -12, -28, 18, 19, 20, -5, 13, 47, -10, 5, -8, 30,
29, -36, -15, -5, 18, -13, -18, 20, 8, 12, 6, 7, 27, 5, -17, 20, -6, -13, -34, -20, 33, -29,
28, -11, -1, 3, 3, 11, 7, 15, 72, 8, 5, -21, -9, -28, 31, -4, 7, 9, 22, -15, -8, 40,
-27, -43, -25, -42, -27, -4, -32, 27, 7, -5, -42, 15, 4, -9, 1, -2, -4, -54, -29, -1, 20, -7,
-33, -16, -24, -19, -42, 10, -35, -49, -64, -15, 2, -37, -59, 19, -35, -30, -56, -3, 48, 9, -43, -6,
39, -16, -5, 4, -30, 64, -5, -18, 60, -23, -48, -61, -4, 19, -33, 23, 8, 49, 35, 30, 31, 14,
10, -12, 34, -3, -25, 38, 15, 40, 44, 42, -7, 24, -9, -7, 19, 6, -21, 30, 5, 42, 41, 24,
-50, -15, -18, 20, 22, -10, 18, 12, -51, 0, -10, 6, 51, 5, -1, -47, 56, -7, 16, -16, 34, 58,
21, 2, 11, 53, -24, -127, 15, -35, -3, 10, 18, 57, 100, -3, -18, 94, 28, -62, -1, -38, 7, -19,
13, 32, 35, 14, 12, 10, -18, 17, -14, -8, 13, -1, 21, 5, -1, -21, -71, 19, 25, 3, -16, 15,
10, -28, -3, -10, 3, -14, -52, 5, 8, -14, 14, -32, 38, 18, 16, -26, -41, 12, -34, 31, 34, -36,
20, 0, 18, 0, 2, -33, 0, 8, -35, -25, 37, -20, 47, -26, -2, -19, -61, -33, 3, 8, -43, -23,
25, 36, 0, 16, -9, -31, -15, -40, 17, 55, 25, -14, 12, 1, -1, 13, 19, -13, 30, 2, 4, -1,
-14, -5, -16, -7, 5, 6, 0, -2, 12, -28, 1, -4, 7, -32, 0, -1, 16, 10, 3, 19, -4, -46,
-30, 0, 13, 0, -32, 13, 22, 2, -46, 10, -6, 5, -25, 25, 35, -18, -18, 45, -15, 1, -42, -18,
11, -5, 43, 22, 57, -27, -36, 40, -15, 25, -50, 10, 18, 24, 29, 56,
};
const int32_t g_Weight7[] = {
51, 6, -132, 36, 151, -241, -110, 192, -196, -23, -218, 283, -121, -351, -168, -267, 139, 331, 49, 173,
};
const int8_t g_Weight8[] = {
-11, 87, -61, -70, -49, 39, 21, 37, -18, 75, 15, 28, -67, -113, -58, -41, -96, 81, -17, -2,
-26, -64, -16, 108, 102, -84, -50, 29, -1, 91, -78, 93, -121, -38, 28, 40, 18, -4, 14, 54,
53, -86, -53, 20, -53, -35, 5, 26, -106, 24, -67, 58, 40, -46, -107, 9, 61, 75, -76, 60,
-27, 44, 75, 75, -52, -55, 64, -12, -13, 54, 15, -62, 79, -28, 76, 84, 74, -41, -70, 28,
36, -89, 45, 12, -35, 58, -22, 84, 81, -18, -78, -32, -127, 38, -60, -5, 13, -90, 30, -96,
-100, 87, 90, -7, 84, -57, 19, -5, 29, -98, 74, 28, 18, -58, -18, -27, 72, -24, -12, -63,
-13, 65, -109, -31, -15, 38, -36, 64, 25, -99, -51, -45, 59, -57, 53, -85, 0, -16, 92, 45,
26, 25, 40, 108, -12, 74, -90, -29, -90, 68, 42, 79, -24, 19, -78, 1, -38, -101, -62, 8,
81, 55, 38, -41, 13, 30, -44, -21, -24, -17, -50, 15, -10, -38, 109, -66, 95, 38, -89, -42,
-11, -85, -9, -84, 14, 62, -2, -54, 55, -2, 61, 26, 5, 21, 41, 89, -26, -45, -67, -63,
};
const int32_t g_Weight9[] = {
30, 58, 83, -64, -12, 29, -45, -28, 48, -79,
};

View File

@ -0,0 +1,36 @@
/**
* Copyright 2021 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "CMSIS/NN/Include/arm_nnfunctions.h"
#include "nnacl/int8/quant_dtype_cast_int8.h"
#include <stdlib.h>
#include <string.h>
extern unsigned char *g_Buffer;
enum STATUS {
RET_OK = 0,
RET_ERROR = 1,
};
extern int g_thread_num;
extern const int8_t g_Weight1[];
extern const int32_t g_Weight2[];
extern const int8_t g_Weight3[];
extern const int32_t g_Weight4[];
extern const int8_t g_Weight6[];
extern const int32_t g_Weight7[];
extern const int8_t g_Weight8[];
extern const int32_t g_Weight9[];

View File

@ -21,17 +21,27 @@
## 概述
本教程以MNIST分类模型推理代码为例帮助用户了解codegen生成代码、编译构建、部署等流程
本教程以MNIST分类模型为例介绍面向边缘侧设备超轻量AI推理引擎Micro包括模型自动生成代码、编译构建、部署等三步
## 模型编译体验
用户可以使用脚本一键式编译生成MNIST分类模型的推理代码并执行推理得到单次推理输出。下载[MindSpore源码](https://gitee.com/mindspore/mindspore),进入[`mindspore/mindspore/lite/micro/examples/mnist_x86`](https://gitee.com/mindspore/mindspore/tree/master/mindspore/lite/micro/example/mnist_x86)目录,执行脚本`mnist.sh`自动生成模型推理代码并编译工程目录,即可得到单次推理输出。
用户可以使用脚本一键式编译生成MNIST分类模型的推理代码并执行推理得到单次推理输出。
第一步下载MindSpore源码(https://gitee.com/mindspore/mindspore),在项目根目录使用命令
```bash
bash mnist.sh
bash build.sh -I x86_64 -j128
```
推理结果如下:
编译MindSpore在项目根目录的output目录会生成MindSpore tar 包。
第二步进入mindspore/mindspore/lite/examples/quick_start_micro/mnist_x86 目录,执行脚本
```bash
bash mnist.sh -g on -r ${dir}/mindspore-lite-${VERSION_STR}-linux-x64.tar.gz
```
自动生成模型推理代码并编译工程目录,即可得到单次推理输出。推理结果如下:
```text
======run benchmark======
@ -47,17 +57,12 @@ name: Softmax-7, DataType: 43, Size: 40, Shape: [1 10], Data:
## 详细步骤
在编译此工程之前需要预先获取Ubuntu-x64 CPU平台的[Release包](https://www.mindspore.cn/lite/docs/zh-CN/master/use/downloads.html),解压后得到`mindspore-lite-{version}-linux-x64`,将其拷贝到当前目录下。
> `{version}`为版本号字符串,如`1.2.0`。
以本教程为例预置x86平台的Release包目录如下
**模型编译体验**第一步中编译的x86平台tar包目录如下
```text
mindspore-lite-{version}-linux-x64
└── tools
└── codegen # 代码生成工具
├── codegen # 可执行程序
├── include # 推理框架头文件
│ ├── nnacl # nnacl 算子头文件
│ └── wrapper
@ -72,18 +77,19 @@ name: Softmax-7, DataType: 43, Size: 40, Shape: [1 10], Data:
### 生成代码
下载[MNIST分类网络](https://download.mindspore.cn/model_zoo/official/lite/mnist_lite/mnist.ms)。使用Release包中的codegen编译MNIST分类模型生成对应的x86平台推理代码。生成代码的具体命令如下
**模型编译体验**第二步中会先下载[MNIST分类网络](https://download.mindspore.cn/model_zoo/official/lite/quick_start/micro/mnist.tar.gz) 模型及输入输出标杆数据解压在 quick_start_micro/models 目录下。
使用 Release 包中的 codegen 编译 MNIST 分类模型,生成对应的 x86 平台推理代码,具体命令如下:
```bash
./codegen --codePath=. --modelPath=mnist.ms --target=x86
```shell
./converter_lite --fmk=TFLITE --modelFile=${model_dir}/mnist.tflite --outputFile=${SOURCE_CODE_DIR} --configFile=${COFIG_FILE}
```
codegen在当前目录下将生成mnist目录其中包含了可编译构建的mnist分类模型的代码。
outputFile 指定micro代码生成目录当前脚本目录下将生成source_code目录其中包含了可编译构建的mnist分类模型的代码src和benchmark用例
> 关于codegen的更多使用命令说明可参见[codegen使用说明](https://www.mindspore.cn/lite/docs/zh-CN/master/use/micro.html#id4)。
### 部署应用
接下来介绍如何构建MindSpore Lite CodeGen生成的模型推理代码工程并在x86平台完成部署。上文中codegen生成的代码与`mindspore/mindspore/lite/micro/example/mnist_x86`相同本章节编译、构建步骤将对该目录展开用户也可参照相同操作编译上文codegen生成mnist目录代码。
接下来介绍如何构建MindSpore Lite Micro生成的模型推理代码工程并在x86平台完成部署。
#### 编译依赖
@ -94,28 +100,23 @@ codegen在当前目录下将生成mnist目录其中包含了可编译构建
1. **生成代码工程说明**
进入`mindspore/mindspore/lite/micro/example/mnist_x86`目录中。
生成代码工程目录说明:
当前目录下预置了MNIST分类网络生成的代码。
进入`mindspore/mindspore/lite/example/quick_start_micro/mnist_x86`目录中,生成代码工程目录说明:
```text
mnist_x86/ # 生成代码的根目录
source_code/ # 生成代码的根目录
├── benchmark # 生成代码的benchmark目录
└── src # 模型推理代码目录
```
2. **代码编译**
组织模型生成的推理代码以及算子静态库编译生成模型推理静态库并编译生成benchmark可执行文件,
进入代码工程目录下新建并进入build目录
组织模型生成的推理代码以及算子静态库编译生成模型推理静态库并编译生成benchmark可执行文件, 进入代码工程目录下新建并进入build目录
```bash
cd source_code
mkdir build && cd build
```
开始编译:
```bash
@ -147,17 +148,17 @@ codegen在当前目录下将生成mnist目录其中包含了可编译构建
[100%] Linking CXX executable benchmark
[100%] Built target benchmark
```
此时在`mnist_x86/build/src/`目录下生成了`libnet.a`,推理执行库,在`mnist_x86/build`目录下生成了`benchmark`可执行文件。
此时在`mnist_x86/source_code/build/src/`目录下生成了`libnet.a`,推理执行库,在`mnist_x86/build`目录下生成了`benchmark`可执行文件。
3. **代码部署**
本示例部署于x86平台。由代码工程编译成功以后的产物为`benchmark`可执行文件将其拷贝到用户的目标Linux服务器中即可执行。
在目标Linux服务上执行编译成功的二进制文件
```bash
./benchmark mnist_input.bin net.bin
./benchmark mnist.tflite.ms.bin net.bin mnist.tflite.ms.out
```
> mnist_input.bin在`example/mnist_x86`目录下,`net.bin`为模型参数文件,在`example/mnist_x86/src`目录下。
@ -176,7 +177,8 @@ codegen在当前目录下将生成mnist目录其中包含了可编译构建
#### 编写推理代码示例
本教程中的`benchmark`内部实现主要用于指导用户如何编写以及调用codegen编译的模型推理代码接口。以下为接口调用的详细介绍详情代码可以参见[examples/mnist_x86](https://gitee.com/mindspore/mindspore/tree/master/mindspore/lite/micro/example/mnist_x86)下的示例代码示例:
本教程中的`benchmark`内部实现主要用于指导用户如何编写以及调用codegen编译的模型推理代码接口。以下为接口调用的详细介绍
详情代码可以参见[examples/quick_start_micro/mnist_x86](https://gitee.com/mindspore/mindspore/tree/master/mindspore/lite/examples/quick_start_micro/mnist_x86)下的示例代码示例:
1. **构建推理的上下文以及会话**
@ -262,7 +264,6 @@ codegen在当前目录下将生成mnist目录其中包含了可编译构建
```
## 更多详情
### [Android平台编译部署](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/micro/example/mobilenetv2/README.md#)
### [Android平台编译部署](https://gitee.com/mindspore/mindspore/blob/master/mindspore/lite/examples/quick_start_micro/mobilenetv2_arm64/README.md)
### [Arm&nbsp;Cortex-M平台编译部署](https://www.mindspore.cn/lite/docs/zh-CN/master/use/micro.html)

View File

@ -37,6 +37,7 @@ DEMO_DIR=${ROOT_DIR}/mindspore/lite/examples/quick_start_micro/mnist_x86
MODEL_DIR=${ROOT_DIR}/mindspore/lite/examples/quick_start_micro/models
PKG_DIR=${ROOT_DIR}/mindspore/lite/examples/quick_start_micro/pkgs
COFIG_FILE=${DEMO_DIR}/micro.cfg
SOURCE_CODE_DIR=${ROOT_DIR}/mindspore/lite/examples/quick_start_micro/mnist_x86/source_code
echo "root dir is: ${ROOT_DIR}"
echo "current dir is: ${BASEPATH}"
echo "demo dir is: ${DEMO_DIR}"
@ -73,7 +74,7 @@ DownloadModel() {
CodeGeneration() {
tar xzvf ${PKG_DIR}/${MINDSPORE_FILE} -C ${PKG_DIR} || exit 1
export LD_LIBRARY_PATH=${PKG_DIR}/${MINDSPORE_FILE_NAME}/tools/converter/lib:${LD_LIBRARY_PATH}
${PKG_DIR}/${MINDSPORE_FILE_NAME}/tools/converter/converter/converter_lite --fmk=TFLITE --modelFile=${MODEL} --outputFile=${DEMO_DIR} --configFile=${COFIG_FILE}
${PKG_DIR}/${MINDSPORE_FILE_NAME}/tools/converter/converter/converter_lite --fmk=TFLITE --modelFile=${MODEL} --outputFile=${SOURCE_CODE_DIR} --configFile=${COFIG_FILE}
}
GetVersion
@ -84,8 +85,8 @@ PKG_PATH=${PKG_DIR}/${MINDSPORE_FILE_NAME}
echo "tar ball is: ${TARBALL}"
if [ -n "$TARBALL" ]; then
echo "cp file"
rm -rf ${PKG_DIR}
mkdir -p ${PKG_DIR}
rm -rf ${PKG_PATH}
mkdir -p ${PKG_PATH}
cp ${TARBALL} ${PKG_DIR}
fi
@ -98,7 +99,7 @@ if [[ "${GEN}" == "ON" ]] || [[ "${GEN}" == "on" ]]; then
fi
# 2. build benchmark
mkdir -p ${DEMO_DIR}/build && cd ${DEMO_DIR}/build || exit 1
mkdir -p ${SOURCE_CODE_DIR}/build && cd ${SOURCE_CODE_DIR}/build || exit 1
cmake -DPKG_PATH=${PKG_PATH} ..
make

View File

@ -0,0 +1,145 @@
# Android编译部署
`Linux` `Android` `IOT` `C/C++` `全流程` `模型编译` `模型代码生成` `模型部署` `推理应用` `初级` `中级` `高级`
<!-- TOC -->
- Android编译部署
- [编译依赖](#编译依赖)
- [工程构建](#工程构建)
- [工程部署](#工程部署)
- [更多详情](#更多详情)
- [Linux_x86_64编译部署](#Linux_x86_64编译部署)
- [STM32F746编译部署](#STM32F746编译部署)
<!-- /TOC -->
## Android编译部署
本教程以MobileNetv2在安卓手机编译部署为例使用用户快速了解codegen在安卓平台生成代码、工程构建以及部署的一系列流程。关于converter、codegen的获取以及详细参数介绍可参考mindspore的[编译构建介绍](https://www.mindspore.cn/lite/docs/zh-CN/master/use/build.html)。
### 编译依赖
安卓平台的编译部署需要提前配置ANDROID_NDK到环境变量。
- NDK 21.3
- [GCC](https://gcc.gnu.org/releases.html) >= 7.3.0
- [CMake](https://cmake.org/download/) >= 3.18.3
### 工程构建
#### 快速使用
进入`mindspore/mindspore/lite/examples/quick_start_micro/mobilenetv2_arm64`目录执行脚本`mobilenetv2.sh`自动生成模型推理代码并编译工程目录
```
bash mobilenetv2.sh -r ${dir}/mindspore-lite-${VERSION_STR}-linux-x64.tar.gz
```
codegen编译[MobileNetv2模型](https://download.mindspore.cn/model_zoo/official/lite/quick_start/micro/mobilenetv2.tar.gz),生成对应的模型推理代码。具体命令如下:
```shell
./converter_lite --fmk=TFLITE --modelFile=${model_dir}/mnist.tflite --outputFile=${SOURCE_CODE_DIR} --configFile=${COFIG_FILE}
```
关于codegen的更多使用命令说明可参见[codegen工具的详细介绍](https://www.mindspore.cn/lite/docs/zh-CN/master/use/downloads.html)
#### 生成代码工程说明
```bash
├── mobilenetv2
└── operator_library
```
##### 算子静态库目录说明
在编译此工程之前需要预先获取安卓平台对应的[Release包](https://www.mindspore.cn/lite/docs/zh-CN/master/use/downloads.html)。
安卓平台对应的Release包的目录如下:
```text
mindspore-lite-{version}-inference-android-{arch}
├── inference
│ ├── include # 推理框架头文件
│ ├── lib # 推理框架库
│ │ ├── libmindspore-lite.a # MindSpore Lite推理框架的静态库
│ │ └── libmindspore-lite.so # MindSpore Lite推理框架的动态库
│ ├── minddata # 图像处理库
│ │ ├── include
│ │ └── lib
│ │ └── libminddata-lite.so # 图像处理动态库文件
│ └── third_party # NPU库
│ └── hiai_ddk
└── tools
├── benchmark # 基准测试工具
│ └── benchmark
└── codegen # 代码生成工具
├── include # 算子头文件
└── lib # 算子静态库
```
生成代码工程目录如下:
```bash
├── mobilenetv2 # 生成代码的根目录
├── benchmark # 生成代码的benchmark目录
└── src # 模型推理代码目录
```
#### 代码工程编译
组织生成的模型推理代码以及安卓平台算子静态库编译模型推理静态库
进入代码工程目录新建并进入build目录
```bash
mkdir mobilenetv2/build && cd mobilenetv2/build
```
开始编译
```bash
cmake -DCMAKE_BUILD_TYPE=Release \
-DCMAKE_TOOLCHAIN_FILE="${ANDROID_NDK}/build/cmake/android.toolchain.cmake" \
-DANDROID_ABI="arm64-v8a" \
-DANDROID_TOOLCHAIN_NAME="aarch64-linux-android-clang" \
-DANDROID_NATIVE_API_LEVEL="19" \
-DPLATFORM_ARM64=ON \
-DPKG_PATH={path to}/mindspore-lite-{version}-inference-android-{arch} ..
make
```
`{path to}`和`{version}`需要用户根据实际情况填写。若用户需要编译安卓arm32环境则使用:
```bash
cmake -DCMAKE_BUILD_TYPE=Release \
-DCMAKE_TOOLCHAIN_FILE="${ANDROID_NDK}/build/cmake/android.toolchain.cmake" \
-DANDROID_ABI="armeabi-v7a" \
-DANDROID_TOOLCHAIN_NAME="clang" \
-DANDROID_NATIVE_API_LEVEL="19" \
-DPLATFORM_ARM32=ON \
-DPKG_PATH={path to}/mindspore-lite-{version}-inference-android-{arch} ..
make
```
此时在`mobilenetv2/build/src/`目录下生成了`libnet.a`,推理执行库,在`mobilenetv2/build`目录下生成了`benchmark`可执行文件。而对应的模型参数文件net.bin在生成的代码src目录下。
### 工程部署
adb将生成的可执行二进制文件benchmark、mobilenetv2_input.bin以及模型参数文件net.bin拷贝到目标安卓服务器执行以下命令即可
```bash
./benchmark mobilenetv2_input.bin net.bin 100
```
#### 执行结果
```bash
=========run benchmark========
input 0: mobilenetv2_input.bin
name: Softmax-65, ,DataType: 43, Size: 4004, Shape:1 1001, Data:
0.000010,0.000010,0.000014,0.000091,0.000080,0.000717,0.000112,0.000738,0.000008,0.000003
=========run success========
```
## 更多详情
### [Linux_x86_64编译部署](https://www.mindspore.cn/lite/docs/zh-CN/master/use/micro.html)
### [STM32F746编译部署](https://gitee.com/mindspore/mindspore/tree/master/mindspore/lite/examples/quick_start_micro/mnist_stm32f746)

View File

@ -0,0 +1,27 @@
[common_quant_param]
# Supports WEIGHT_QUANT or FULL_QUANT
#quant_type=WEIGHT_QUANT
# Weight quantization support the number of bits [0,16], Set to 0 is mixed bit quantization, otherwise it is fixed bit quantization
# Full quantization support the number of bits [1,8]
#bit_num=8
# Layers with size of weights exceeds threshold `min_quant_weight_size` will be quantized.
#min_quant_weight_size=0
# Layers with channel size of weights exceeds threshold `min_quant_weight_channel` will be quantized.
#min_quant_weight_channel=16
[micro_param]
# enable code-generation for MCU HW
enable_micro=true
# specify HW target, support x86,ARM32M, AMR32A, ARM64 only.
target=ARM64
# code generation for Inference or Train
codegen_mode=Inference
# enable parallel inference or not
support_parallel=false
# enable debug
debug_mode=false

View File

@ -0,0 +1,154 @@
#!/bin/bash
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
set -e
usage()
{
echo "Usage:"
echo "bash build.sh [-I arm64|arm32]"
echo "Options:"
echo " -I download and build for arm64 or arm32, default arm64"
}
LITE_PLATFORM="arm64"
GEN=OFF
TARBALL=""
while getopts 'r:g:' OPT
do
case "${OPT}" in
g)
GEN=$OPTARG
;;
r)
TARBALL=$OPTARG
;;
?)
echo "Usage: add -g on , -r specific release.tar.gz"
esac
done
BASEPATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
ROOT_DIR=${BASEPATH%%/mindspore/lite/examples/quick_start_micro/mobilenetv2_arm64}
DEMO_DIR=${ROOT_DIR}/mindspore/lite/examples/quick_start_micro/mobilenetv2_arm64
MODEL_DIR=${ROOT_DIR}/mindspore/lite/examples/quick_start_micro/models
PKG_DIR=${ROOT_DIR}/mindspore/lite/examples/quick_start_micro/pkgs
COFIG_FILE=${DEMO_DIR}/micro.cfg
SOURCE_CODE_DIR=${ROOT_DIR}/mindspore/lite/examples/quick_start_micro/mobilenetv2_arm64/source_code
echo "root dir is: ${ROOT_DIR}"
echo "current dir is: ${BASEPATH}"
echo "demo dir is: ${DEMO_DIR}"
echo "model dir is: ${MODEL_DIR}"
MODEL_NAME=mobilenetv2
#INPUT_BIN=${MODEL_DIR}/${MODEL_NAME}/mobilenetv2.tflite.ms.bin
#VALICATION_DATA=${MODEL_DIR}/${MODEL_NAME}/mobilenetv2.tflite.ms.out
MODEL=${MODEL_DIR}/${MODEL_NAME}/mobilenet_v2_1.0_224.tflite
MODEL_FILE=${MODEL_NAME}.tar.gz
echo "current dir is: ${BASEPATH}"
get_version() {
local VERSION_HEADER=${ROOT_DIR}/mindspore/lite/include/version.h
local VERSION_MAJOR=$(grep "const int ms_version_major =" ${VERSION_HEADER} | tr -dc "[0-9]")
local VERSION_MINOR=$(grep "const int ms_version_minor =" ${VERSION_HEADER} | tr -dc "[0-9]")
local VERSION_REVISION=$(grep "const int ms_version_revision =" ${VERSION_HEADER} | tr -dc "[0-9]")
VERSION_STR=${VERSION_MAJOR}.${VERSION_MINOR}.${VERSION_REVISION}
}
download_inference() {
if [[ "${LITE_PLATFORM}" == "arm64" ]]; then
local ARM_NAME=aarch64
local DEVICE=cpu
else
local ARM_NAME=aarch32
local DEVICE=cpu
fi
rm -rf ${BASEPATH:?}/${MINDSPORE_FILE_NAME} || exit 1
MINDSPORE_FILE_NAME="mindspore-lite-${VERSION_STR}-android-${ARM_NAME}"
local MINDSPORE_FILE="${MINDSPORE_FILE_NAME}.tar.gz"
local MINDSPORE_LITE_DOWNLOAD_URL="https://ms-release.obs.cn-north-4.myhuaweicloud.com/${VERSION_STR}/MindSpore/lite/release/android/${DEVICE}/${MINDSPORE_FILE}"
if [ ! -e ${PKG_DIR}/${MINDSPORE_FILE} ]; then
wget -c -O ${PKG_DIR}/${MINDSPORE_FILE} --no-check-certificate ${MINDSPORE_LITE_DOWNLOAD_URL}
fi
tar xzvf ${PKG_DIR}/${MINDSPORE_FILE} -C ${PKG_DIR} || exit 1
PKG_PATH=${PKG_DIR}/${MINDSPORE_FILE_NAME}
}
DownloadModel() {
rm -rf ${MODEL_DIR:?}/${MODEL_NAME}
mkdir -p ${MODEL_DIR}/${MODEL_NAME}
local DOWNLOAD_URL=https://download.mindspore.cn/model_zoo/official/lite/quick_start/micro/${MODEL_FILE}
if [ ! -e ${MODEL_DIR}/${MODEL_FILE} ]; then
echo "download models ..."
wget -c -O ${MODEL_DIR}/${MODEL_FILE} --no-check-certificate ${DOWNLOAD_URL}
fi
echo "unpack models ..."
tar xzvf ${MODEL_DIR}/${MODEL_FILE} -C ${MODEL_DIR} || exit 1
}
CodeGeneration() {
tar xzvf ${PKG_DIR}/${MINDSPORE_FILE} -C ${PKG_DIR} || exit 1
export LD_LIBRARY_PATH=${PKG_DIR}/${MINDSPORE_FILE_NAME}/tools/converter/lib:${LD_LIBRARY_PATH}
${PKG_DIR}/${MINDSPORE_FILE_NAME}/tools/converter/converter/converter_lite --fmk=TFLITE --modelFile=${MODEL} --outputFile=${SOURCE_CODE_DIR} --configFile=${COFIG_FILE}
}
get_version
MINDSPORE_FILE_NAME="mindspore-lite-${VERSION_STR}-linux-x64"
MINDSPORE_FILE="${MINDSPORE_FILE_NAME}.tar.gz"
PKG_PATH=${PKG_DIR}/${MINDSPORE_FILE_NAME}
echo "tar ball is: ${TARBALL}"
if [ -n "$TARBALL" ]; then
echo "cp file"
rm -rf ${PKG_PATH}
mkdir -p ${PKG_PATH}
cp ${TARBALL} ${PKG_DIR}
fi
# 1. code-generation
if [[ "${GEN}" == "ON" ]] || [[ "${GEN}" == "on" ]]; then
echo "downloading ${MODEL_FILE}!"
DownloadModel
echo "micro code-generation"
CodeGeneration
fi
# 2. build benchmark
mkdir -p ${SOURCE_CODE_DIR}/build && cd ${SOURCE_CODE_DIR}/build || exit 1
download_inference
if [[ "${LITE_PLATFORM}" == "arm64" ]]; then
echo "making arm64"
cmake -DCMAKE_BUILD_TYPE=Release \
-DCMAKE_TOOLCHAIN_FILE="${ANDROID_NDK}/build/cmake/android.toolchain.cmake" \
-DANDROID_ABI="arm64-v8a" \
-DANDROID_TOOLCHAIN_NAME="aarch64-linux-android-clang" \
-DANDROID_NATIVE_API_LEVEL="19" \
-DPLATFORM_ARM64=ON \
-DPKG_PATH=${PKG_PATH} ..
else
cmake -DCMAKE_BUILD_TYPE=Release \
-DCMAKE_TOOLCHAIN_FILE="${ANDROID_NDK}/build/cmake/android.toolchain.cmake" \
-DANDROID_ABI="armeabi-v7a" \
-DANDROID_TOOLCHAIN_NAME="clang" \
-DANDROID_NATIVE_API_LEVEL="19" \
-DPLATFORM_ARM32=ON \
-DPKG_PATH=${PKG_PATH} ..
fi
make