2020-09-18 14:09:16 +08:00
|
|
|
|
/**
|
|
|
|
|
* Copyright 2020 Huawei Technologies Co., Ltd
|
|
|
|
|
*
|
|
|
|
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
|
* you may not use this file except in compliance with the License.
|
|
|
|
|
* You may obtain a copy of the License at
|
|
|
|
|
*
|
|
|
|
|
* http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
|
*
|
|
|
|
|
* Unless required by applicable law or agreed to in writing, software
|
|
|
|
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
|
* See the License for the specific language governing permissions and
|
|
|
|
|
* limitations under the License.
|
|
|
|
|
*/
|
|
|
|
|
#ifndef MINDSPORE_INCLUDE_API_MODEL_H
|
|
|
|
|
#define MINDSPORE_INCLUDE_API_MODEL_H
|
|
|
|
|
|
|
|
|
|
#include <string>
|
|
|
|
|
#include <vector>
|
|
|
|
|
#include <map>
|
|
|
|
|
#include <memory>
|
2021-01-26 17:06:34 +08:00
|
|
|
|
#include <utility>
|
2020-09-18 14:09:16 +08:00
|
|
|
|
#include "include/api/status.h"
|
|
|
|
|
#include "include/api/types.h"
|
2020-12-01 18:35:15 +08:00
|
|
|
|
#include "include/api/graph.h"
|
2021-03-18 21:01:55 +08:00
|
|
|
|
#include "include/api/context.h"
|
2021-07-09 18:07:51 +08:00
|
|
|
|
#include "include/api/callback/callback.h"
|
2020-12-01 18:35:15 +08:00
|
|
|
|
#include "include/api/cell.h"
|
2021-07-09 18:07:51 +08:00
|
|
|
|
#include "include/api/cfg.h"
|
2021-02-19 17:09:40 +08:00
|
|
|
|
#include "include/api/dual_abi_helper.h"
|
2020-09-18 14:09:16 +08:00
|
|
|
|
|
|
|
|
|
namespace mindspore {
|
|
|
|
|
class ModelImpl;
|
2021-07-09 18:07:51 +08:00
|
|
|
|
class Metrics;
|
2022-04-14 20:22:36 +08:00
|
|
|
|
class Net;
|
|
|
|
|
class Node;
|
|
|
|
|
class Expr;
|
2021-07-09 18:07:51 +08:00
|
|
|
|
|
|
|
|
|
namespace dataset {
|
|
|
|
|
class Dataset;
|
|
|
|
|
} // namespace dataset
|
2021-08-06 11:25:35 +08:00
|
|
|
|
/// \brief The Model class is used to define a MindSpore model, facilitating computational graph management.
|
2020-09-18 14:09:16 +08:00
|
|
|
|
class MS_API Model {
|
|
|
|
|
public:
|
2021-03-18 21:01:55 +08:00
|
|
|
|
Model();
|
2020-09-18 14:09:16 +08:00
|
|
|
|
~Model();
|
|
|
|
|
Model(const Model &) = delete;
|
|
|
|
|
void operator=(const Model &) = delete;
|
2021-08-19 09:46:29 +08:00
|
|
|
|
|
2022-08-10 15:51:35 +08:00
|
|
|
|
/// \brief Build a model from model buffer so that it can run on a device.
|
2022-04-02 14:39:37 +08:00
|
|
|
|
///
|
|
|
|
|
/// \param[in] model_data Define the buffer read from a model file.
|
|
|
|
|
/// \param[in] data_size Define bytes number of model buffer.
|
|
|
|
|
/// \param[in] model_type Define The type of model file. Options: ModelType::kMindIR, ModelType::kOM. Only
|
|
|
|
|
/// ModelType::kMindIR is valid for Lite.
|
|
|
|
|
/// \param[in] model_context Define the context used to store options during execution.
|
|
|
|
|
///
|
|
|
|
|
/// \return Status.
|
|
|
|
|
Status Build(const void *model_data, size_t data_size, ModelType model_type,
|
|
|
|
|
const std::shared_ptr<Context> &model_context = nullptr);
|
|
|
|
|
|
2022-08-10 15:51:35 +08:00
|
|
|
|
/// \brief Load and build a model from model buffer so that it can run on a device.
|
2022-04-02 14:39:37 +08:00
|
|
|
|
///
|
|
|
|
|
/// \param[in] model_path Define the model path.
|
|
|
|
|
/// \param[in] model_type Define The type of model file. Options: ModelType::kMindIR, ModelType::kOM. Only
|
|
|
|
|
/// ModelType::kMindIR is valid for Lite.
|
|
|
|
|
/// \param[in] model_context Define the context used to store options during execution.
|
|
|
|
|
///
|
|
|
|
|
/// \return Status.
|
2022-06-24 14:28:36 +08:00
|
|
|
|
inline Status Build(const std::string &model_path, ModelType model_type,
|
|
|
|
|
const std::shared_ptr<Context> &model_context = nullptr);
|
2022-04-02 14:39:37 +08:00
|
|
|
|
|
2022-08-10 15:51:35 +08:00
|
|
|
|
/// \brief Build a model from model buffer so that it can run on a device.
|
2022-04-02 14:39:37 +08:00
|
|
|
|
///
|
|
|
|
|
/// \param[in] model_data Define the buffer read from a model file.
|
|
|
|
|
/// \param[in] data_size Define bytes number of model buffer.
|
|
|
|
|
/// \param[in] model_type Define The type of model file. Options: ModelType::kMindIR, ModelType::kOM. Only
|
|
|
|
|
/// ModelType::kMindIR is valid for Lite.
|
|
|
|
|
/// \param[in] model_context Define the context used to store options during execution.
|
|
|
|
|
/// \param[in] dec_key Define the key used to decrypt the ciphertext model. The key length is 16.
|
|
|
|
|
/// \param[in] dec_mode Define the decryption mode. Options: AES-GCM.
|
|
|
|
|
/// \param[in] cropto_lib_path Define the openssl library path.
|
|
|
|
|
///
|
|
|
|
|
/// \return Status.
|
2022-07-20 22:36:12 +08:00
|
|
|
|
inline Status Build(const void *model_data, size_t data_size, ModelType model_type,
|
|
|
|
|
const std::shared_ptr<Context> &model_context, const Key &dec_key, const std::string &dec_mode,
|
|
|
|
|
const std::string &cropto_lib_path);
|
2022-04-02 14:39:37 +08:00
|
|
|
|
|
2022-08-10 15:51:35 +08:00
|
|
|
|
/// \brief Load and build a model from model buffer so that it can run on a device.
|
2022-04-02 14:39:37 +08:00
|
|
|
|
///
|
|
|
|
|
/// \param[in] model_path Define the model path.
|
|
|
|
|
/// \param[in] model_type Define The type of model file. Options: ModelType::kMindIR, ModelType::kOM. Only
|
|
|
|
|
/// ModelType::kMindIR is valid for Lite.
|
|
|
|
|
/// \param[in] model_context Define the context used to store options during execution.
|
|
|
|
|
/// \param[in] dec_key Define the key used to decrypt the ciphertext model. The key length is 16.
|
|
|
|
|
/// \param[in] dec_mode Define the decryption mode. Options: AES-GCM.
|
|
|
|
|
/// \param[in] cropto_lib_path Define the openssl library path.
|
|
|
|
|
///
|
|
|
|
|
/// \return Status.
|
2022-06-24 14:28:36 +08:00
|
|
|
|
inline Status Build(const std::string &model_path, ModelType model_type,
|
|
|
|
|
const std::shared_ptr<Context> &model_context, const Key &dec_key, const std::string &dec_mode,
|
|
|
|
|
const std::string &cropto_lib_path);
|
2022-04-02 14:39:37 +08:00
|
|
|
|
|
2022-08-04 19:35:27 +08:00
|
|
|
|
/// \brief Build a model
|
2021-08-06 11:25:35 +08:00
|
|
|
|
///
|
|
|
|
|
/// \param[in] graph GraphCell is a derivative of Cell. Cell is not available currently. GraphCell can be constructed
|
|
|
|
|
/// from Graph, for example, model.Build(GraphCell(graph), context).
|
|
|
|
|
/// \param[in] model_context A context used to store options during execution.
|
|
|
|
|
/// \param[in] train_cfg A config used by training.
|
|
|
|
|
///
|
|
|
|
|
/// \return Status.
|
2021-07-09 18:07:51 +08:00
|
|
|
|
Status Build(GraphCell graph, const std::shared_ptr<Context> &model_context = nullptr,
|
|
|
|
|
const std::shared_ptr<TrainCfg> &train_cfg = nullptr);
|
2021-08-06 11:25:35 +08:00
|
|
|
|
|
2022-04-14 20:22:36 +08:00
|
|
|
|
/// \brief Build train model
|
|
|
|
|
///
|
|
|
|
|
/// \param[in] graph A forward network
|
|
|
|
|
/// \param[in] optimizer An optimizer node
|
|
|
|
|
/// \param[in] inputs Inputs expression for the trained network (ex: input, label )
|
2022-08-24 17:24:49 +08:00
|
|
|
|
/// \param[in] model_context A context used to store options during execution.
|
2022-04-14 20:22:36 +08:00
|
|
|
|
/// \param[in] train_cfg A config used by training
|
|
|
|
|
/// \return Status
|
|
|
|
|
|
|
|
|
|
Status Build(GraphCell graph, Node *optimizer, std::vector<Expr *> inputs,
|
|
|
|
|
const std::shared_ptr<Context> &model_context, const std::shared_ptr<TrainCfg> &train_cfg);
|
2022-08-04 19:35:27 +08:00
|
|
|
|
/// \brief Build a Transfer Learning model where the backbone weights are fixed and the head weights are trainable
|
2021-12-22 00:39:40 +08:00
|
|
|
|
///
|
|
|
|
|
/// \param[in] backbone The static, non-learnable part of the graph
|
|
|
|
|
/// \param[in] head The trainable part of the graph
|
|
|
|
|
/// \param[in] context A context used to store options during execution
|
2022-04-28 15:41:34 +08:00
|
|
|
|
/// \param[in] train_cfg A config used by training
|
2021-12-22 00:39:40 +08:00
|
|
|
|
///
|
|
|
|
|
/// \return Status
|
|
|
|
|
Status BuildTransferLearning(GraphCell backbone, GraphCell head, const std::shared_ptr<Context> &context,
|
|
|
|
|
const std::shared_ptr<TrainCfg> &train_cfg = nullptr);
|
|
|
|
|
|
2022-08-04 19:35:27 +08:00
|
|
|
|
/// \brief Resize the shapes of inputs.
|
2021-08-06 11:25:35 +08:00
|
|
|
|
///
|
|
|
|
|
/// \param[in] inputs A vector that includes all input tensors in order.
|
|
|
|
|
/// \param[in] dims Defines the new shapes of inputs, should be consistent with inputs.
|
|
|
|
|
///
|
|
|
|
|
/// \return Status.
|
2021-01-26 17:06:34 +08:00
|
|
|
|
Status Resize(const std::vector<MSTensor> &inputs, const std::vector<std::vector<int64_t>> &dims);
|
2020-09-18 14:09:16 +08:00
|
|
|
|
|
2021-11-03 22:02:09 +08:00
|
|
|
|
/// \brief Change the size and or content of weight tensors
|
|
|
|
|
///
|
|
|
|
|
/// \param[in] new_weights a vector of tensors with new shapes and data to use in the model
|
|
|
|
|
/// If data pointer is null, the data of the original tensors will be copied to the new ones
|
|
|
|
|
///
|
|
|
|
|
/// \return Status.
|
|
|
|
|
Status UpdateWeights(const std::vector<MSTensor> &new_weights);
|
|
|
|
|
|
2022-12-14 17:44:09 +08:00
|
|
|
|
/// \brief Inference model API. If use this API in train mode, it's equal to RunStep API.
|
2021-08-06 11:25:35 +08:00
|
|
|
|
///
|
|
|
|
|
/// \param[in] inputs A vector where model inputs are arranged in sequence.
|
|
|
|
|
/// \param[out] outputs Which is a pointer to a vector. The model outputs are filled in the container in sequence.
|
|
|
|
|
/// \param[in] before CallBack before predict.
|
|
|
|
|
/// \param[in] after CallBack after predict.
|
|
|
|
|
///
|
|
|
|
|
/// \return Status.
|
2021-07-07 11:13:09 +08:00
|
|
|
|
Status Predict(const std::vector<MSTensor> &inputs, std::vector<MSTensor> *outputs,
|
|
|
|
|
const MSKernelCallBack &before = nullptr, const MSKernelCallBack &after = nullptr);
|
2020-09-18 14:09:16 +08:00
|
|
|
|
|
2022-12-14 17:44:09 +08:00
|
|
|
|
/// \brief Inference model API. If use this API in train mode, it's equal to RunStep API.
|
2022-03-08 20:31:11 +08:00
|
|
|
|
///
|
|
|
|
|
/// \param[in] before CallBack before predict.
|
|
|
|
|
/// \param[in] after CallBack after predict.
|
|
|
|
|
///
|
|
|
|
|
/// \return Status.
|
|
|
|
|
Status Predict(const MSKernelCallBack &before = nullptr, const MSKernelCallBack &after = nullptr);
|
|
|
|
|
|
2022-12-14 17:44:09 +08:00
|
|
|
|
/// \brief Training API. Run model by step.
|
2021-12-17 10:02:18 +08:00
|
|
|
|
///
|
2022-08-04 19:35:27 +08:00
|
|
|
|
/// \param[in] before CallBack before RunStep.
|
|
|
|
|
/// \param[in] after CallBack after RunStep.
|
2021-12-17 10:02:18 +08:00
|
|
|
|
///
|
|
|
|
|
/// \return Status.
|
|
|
|
|
Status RunStep(const MSKernelCallBack &before = nullptr, const MSKernelCallBack &after = nullptr);
|
|
|
|
|
|
2021-09-15 18:04:15 +08:00
|
|
|
|
/// \brief Inference model with preprocess in model.
|
2021-08-30 16:29:03 +08:00
|
|
|
|
///
|
2021-09-15 18:04:15 +08:00
|
|
|
|
/// \param[in] inputs A vector where model inputs are arranged in sequence.
|
2021-08-30 16:29:03 +08:00
|
|
|
|
/// \param[out] outputs Which is a pointer to a vector. The model outputs are filled in the container in sequence.
|
|
|
|
|
/// \param[in] before CallBack before predict.
|
|
|
|
|
/// \param[in] after CallBack after predict.
|
|
|
|
|
///
|
|
|
|
|
/// \return Status.
|
2021-11-29 19:26:04 +08:00
|
|
|
|
Status PredictWithPreprocess(const std::vector<std::vector<MSTensor>> &inputs, std::vector<MSTensor> *outputs,
|
2021-09-15 18:04:15 +08:00
|
|
|
|
const MSKernelCallBack &before = nullptr, const MSKernelCallBack &after = nullptr);
|
|
|
|
|
|
|
|
|
|
/// \brief Apply data preprocess if it exits in model.
|
|
|
|
|
///
|
|
|
|
|
/// \param[in] inputs A vector where model inputs are arranged in sequence.
|
|
|
|
|
/// \param[out] outputs Which is a pointer to a vector. The model outputs are filled in the container in sequence.
|
|
|
|
|
///
|
|
|
|
|
/// \return Status.
|
2021-11-29 19:26:04 +08:00
|
|
|
|
Status Preprocess(const std::vector<std::vector<MSTensor>> &inputs, std::vector<MSTensor> *outputs);
|
2021-09-15 18:04:15 +08:00
|
|
|
|
|
|
|
|
|
/// \brief Check if data preprocess exists in model.
|
|
|
|
|
/// \return true if data preprocess exists.
|
|
|
|
|
bool HasPreprocess();
|
2021-08-30 16:29:03 +08:00
|
|
|
|
|
2021-08-19 09:17:01 +08:00
|
|
|
|
/// \brief Load config file.
|
|
|
|
|
///
|
|
|
|
|
/// \param[in] config_path config file path.
|
|
|
|
|
///
|
|
|
|
|
/// \return Status.
|
2021-09-18 10:46:46 +08:00
|
|
|
|
inline Status LoadConfig(const std::string &config_path);
|
2021-08-19 09:17:01 +08:00
|
|
|
|
|
2021-11-04 22:09:55 +08:00
|
|
|
|
/// \brief Update config.
|
|
|
|
|
///
|
|
|
|
|
/// \param[in] section define the config section.
|
|
|
|
|
/// \param[in] config define the config will be updated.
|
|
|
|
|
///
|
|
|
|
|
/// \return Status.
|
|
|
|
|
inline Status UpdateConfig(const std::string §ion, const std::pair<std::string, std::string> &config);
|
|
|
|
|
|
2021-08-06 11:25:35 +08:00
|
|
|
|
/// \brief Obtains all input tensors of the model.
|
|
|
|
|
///
|
|
|
|
|
/// \return The vector that includes all input tensors.
|
2021-01-26 17:06:34 +08:00
|
|
|
|
std::vector<MSTensor> GetInputs();
|
2021-08-19 09:46:29 +08:00
|
|
|
|
|
2021-08-06 11:25:35 +08:00
|
|
|
|
/// \brief Obtains the input tensor of the model by name.
|
|
|
|
|
///
|
|
|
|
|
/// \return The input tensor with the given name, if the name is not found, an invalid tensor is returned.
|
2021-03-18 21:01:55 +08:00
|
|
|
|
inline MSTensor GetInputByTensorName(const std::string &tensor_name);
|
|
|
|
|
|
2022-08-04 19:35:27 +08:00
|
|
|
|
/// \brief Obtain all gradient tensors of the model.
|
2021-09-01 23:24:32 +08:00
|
|
|
|
///
|
|
|
|
|
/// \return The vector that includes all gradient tensors.
|
|
|
|
|
std::vector<MSTensor> GetGradients() const;
|
|
|
|
|
|
2022-08-04 19:35:27 +08:00
|
|
|
|
/// \brief Update gradient tensors of the model.
|
2021-09-01 23:24:32 +08:00
|
|
|
|
///
|
2022-04-28 15:41:34 +08:00
|
|
|
|
/// \param[in] gradients A vector new gradients.
|
2022-05-20 17:32:24 +08:00
|
|
|
|
///
|
2021-09-01 23:24:32 +08:00
|
|
|
|
/// \return Status of operation
|
|
|
|
|
Status ApplyGradients(const std::vector<MSTensor> &gradients);
|
|
|
|
|
|
2022-08-04 19:35:27 +08:00
|
|
|
|
/// \brief Obtain all weights tensors of the model.
|
2021-12-17 10:02:18 +08:00
|
|
|
|
///
|
2022-11-22 19:32:16 +08:00
|
|
|
|
/// \return The vector that includes all weights tensors.
|
2021-12-17 10:02:18 +08:00
|
|
|
|
std::vector<MSTensor> GetFeatureMaps() const;
|
|
|
|
|
|
2022-12-08 21:44:13 +08:00
|
|
|
|
/// \brief Obtain all trainable parameters of the model optimizers.
|
|
|
|
|
///
|
|
|
|
|
/// \return The vector that includes all trainable parameters.
|
|
|
|
|
std::vector<MSTensor> GetTrainableParams() const;
|
|
|
|
|
|
2022-08-04 19:35:27 +08:00
|
|
|
|
/// \brief Update weights tensors of the model.
|
2021-12-17 10:02:18 +08:00
|
|
|
|
///
|
2022-04-28 15:41:34 +08:00
|
|
|
|
/// \param[in] new_weights A vector new weights.
|
2022-05-20 17:32:24 +08:00
|
|
|
|
///
|
2021-12-17 10:02:18 +08:00
|
|
|
|
/// \return Status of operation
|
|
|
|
|
Status UpdateFeatureMaps(const std::vector<MSTensor> &new_weights);
|
|
|
|
|
|
2022-08-04 19:35:27 +08:00
|
|
|
|
/// \brief Obtain optimizer params tensors of the model.
|
2021-09-01 23:24:32 +08:00
|
|
|
|
///
|
|
|
|
|
/// \return The vector that includes all params tensors.
|
|
|
|
|
std::vector<MSTensor> GetOptimizerParams() const;
|
|
|
|
|
|
2022-08-04 19:35:27 +08:00
|
|
|
|
/// \brief Update the optimizer parameters.
|
2021-09-01 23:24:32 +08:00
|
|
|
|
///
|
2022-04-28 15:41:34 +08:00
|
|
|
|
/// \param[in] params A vector new optimizer params.
|
2022-05-20 17:32:24 +08:00
|
|
|
|
///
|
|
|
|
|
/// \return Status of operation.
|
2021-09-01 23:24:32 +08:00
|
|
|
|
Status SetOptimizerParams(const std::vector<MSTensor> ¶ms);
|
|
|
|
|
|
2022-05-20 17:32:24 +08:00
|
|
|
|
/// \brief Setup training with virtual batches.
|
2021-12-22 00:39:40 +08:00
|
|
|
|
///
|
2022-05-20 17:32:24 +08:00
|
|
|
|
/// \param[in] virtual_batch_multiplier - virtual batch multiplier, use any number < 1 to disable.
|
|
|
|
|
/// \param[in] lr - learning rate to use for virtual batch, -1 for internal configuration.
|
|
|
|
|
/// \param[in] momentum - batch norm momentum to use for virtual batch, -1 for internal configuration.
|
|
|
|
|
///
|
|
|
|
|
/// \return Status of operation.
|
2021-12-22 00:39:40 +08:00
|
|
|
|
Status SetupVirtualBatch(int virtual_batch_multiplier, float lr = -1.0f, float momentum = -1.0f);
|
|
|
|
|
|
2022-08-04 19:35:27 +08:00
|
|
|
|
/// \brief Set the Learning Rate of the training.
|
2021-12-22 00:39:40 +08:00
|
|
|
|
///
|
2022-05-20 17:32:24 +08:00
|
|
|
|
/// \param[in] learning_rate to set.
|
|
|
|
|
///
|
|
|
|
|
/// \return Status of operation.
|
2021-12-22 00:39:40 +08:00
|
|
|
|
Status SetLearningRate(float learning_rate);
|
|
|
|
|
|
2022-08-04 19:35:27 +08:00
|
|
|
|
/// \brief Get the Learning Rate of the optimizer.
|
2021-12-22 00:39:40 +08:00
|
|
|
|
///
|
2022-05-20 17:32:24 +08:00
|
|
|
|
/// \return Learning rate. 0.0 if no optimizer was found.
|
2021-12-22 00:39:40 +08:00
|
|
|
|
float GetLearningRate();
|
|
|
|
|
|
2022-05-20 17:32:24 +08:00
|
|
|
|
/// \brief Initialize object with metrics.
|
|
|
|
|
///
|
|
|
|
|
/// \param[in] metrics A verctor of metrics objects.
|
|
|
|
|
///
|
|
|
|
|
/// \return 0 on success or -1 in case of error
|
2021-07-09 18:07:51 +08:00
|
|
|
|
Status InitMetrics(std::vector<Metrics *> metrics);
|
2022-05-20 17:32:24 +08:00
|
|
|
|
|
|
|
|
|
/// \brief Accessor to TrainLoop metric objects
|
|
|
|
|
///
|
|
|
|
|
/// \return A vector of metrics
|
2021-07-09 18:07:51 +08:00
|
|
|
|
std::vector<Metrics *> GetMetrics();
|
|
|
|
|
|
2021-08-06 11:25:35 +08:00
|
|
|
|
/// \brief Obtains all output tensors of the model.
|
|
|
|
|
///
|
|
|
|
|
/// \return The vector that includes all output tensors.
|
2021-01-26 17:06:34 +08:00
|
|
|
|
std::vector<MSTensor> GetOutputs();
|
2021-08-19 09:46:29 +08:00
|
|
|
|
|
2021-08-06 11:25:35 +08:00
|
|
|
|
/// \brief Obtains names of all output tensors of the model.
|
|
|
|
|
///
|
|
|
|
|
/// \return A vector that includes names of all output tensors.
|
2021-03-18 21:01:55 +08:00
|
|
|
|
inline std::vector<std::string> GetOutputTensorNames();
|
2021-08-19 09:46:29 +08:00
|
|
|
|
|
2021-08-06 11:25:35 +08:00
|
|
|
|
/// \brief Obtains the output tensor of the model by name.
|
|
|
|
|
///
|
|
|
|
|
/// \return The output tensor with the given name, if the name is not found, an invalid tensor is returned.
|
2021-03-18 21:01:55 +08:00
|
|
|
|
inline MSTensor GetOutputByTensorName(const std::string &tensor_name);
|
2021-08-19 09:46:29 +08:00
|
|
|
|
|
|
|
|
|
/// \brief Get output MSTensors of model by node name.
|
|
|
|
|
///
|
|
|
|
|
/// \param[in] node_name Define node name.
|
|
|
|
|
///
|
|
|
|
|
/// \note Deprecated, replace with GetOutputByTensorName
|
|
|
|
|
///
|
|
|
|
|
/// \return The vector of output MSTensor.
|
|
|
|
|
inline std::vector<MSTensor> GetOutputsByNodeName(const std::string &node_name);
|
2020-09-18 14:09:16 +08:00
|
|
|
|
|
2021-12-06 20:22:48 +08:00
|
|
|
|
/// \brief Bind GLTexture2D object to cl Memory.
|
|
|
|
|
///
|
2022-04-28 15:41:34 +08:00
|
|
|
|
/// \param[in] inputGLTexture The input GLTexture id for Model.
|
2021-12-06 20:22:48 +08:00
|
|
|
|
/// \param[in] outputGLTexture The output GLTexture id for Model.
|
|
|
|
|
///
|
|
|
|
|
/// \return Status of operation.
|
|
|
|
|
|
|
|
|
|
Status BindGLTexture2DMemory(const std::map<std::string, unsigned int> &inputGLTexture,
|
|
|
|
|
std::map<std::string, unsigned int> *outputGLTexture);
|
|
|
|
|
|
2021-08-06 11:25:35 +08:00
|
|
|
|
/// \brief Inference model.
|
|
|
|
|
///
|
2021-12-13 16:23:36 +08:00
|
|
|
|
/// \param[in] device_type Device type,options are kGPU, kAscend etc.
|
2021-08-06 11:25:35 +08:00
|
|
|
|
/// \param[in] model_type The type of model file, options are ModelType::kMindIR, ModelType::kOM.
|
|
|
|
|
///
|
|
|
|
|
/// \return Is supported or not.
|
2021-03-18 21:01:55 +08:00
|
|
|
|
static bool CheckModelSupport(enum DeviceType device_type, ModelType model_type);
|
2020-11-23 17:07:53 +08:00
|
|
|
|
|
2022-05-20 17:32:24 +08:00
|
|
|
|
/// \brief Set the model running mode.
|
|
|
|
|
///
|
|
|
|
|
/// \param[in] train True means model runs in Train Mode, otherwise Eval Mode.
|
|
|
|
|
///
|
|
|
|
|
/// \return Status of operation.
|
2021-07-09 18:07:51 +08:00
|
|
|
|
Status SetTrainMode(bool train);
|
2022-05-20 17:32:24 +08:00
|
|
|
|
|
|
|
|
|
/// \brief Get the model running mode.
|
|
|
|
|
///
|
|
|
|
|
/// \return Is Train Mode or not.
|
2021-07-09 18:07:51 +08:00
|
|
|
|
bool GetTrainMode() const;
|
2022-05-20 17:32:24 +08:00
|
|
|
|
|
|
|
|
|
/// \brief Performs the training Loop in Train Mode.
|
|
|
|
|
///
|
|
|
|
|
/// \param[in] epochs The number of epoch to run.
|
2022-08-24 17:24:49 +08:00
|
|
|
|
/// \param[in] ds A smart pointer to MindData Dataset object.
|
2022-05-20 17:32:24 +08:00
|
|
|
|
/// \param[in] cbs A vector of TrainLoopCallBack objects.
|
|
|
|
|
///
|
|
|
|
|
/// \return Status of operation.
|
2021-07-09 18:07:51 +08:00
|
|
|
|
Status Train(int epochs, std::shared_ptr<dataset::Dataset> ds, std::vector<TrainCallBack *> cbs);
|
2022-05-20 17:32:24 +08:00
|
|
|
|
|
|
|
|
|
/// \brief Performs the training loop over all data in Eval Mode.
|
|
|
|
|
///
|
2022-08-24 17:24:49 +08:00
|
|
|
|
/// \param[in] ds A smart pointer to MindData Dataset object.
|
2022-05-20 17:32:24 +08:00
|
|
|
|
/// \param[in] cbs A vector of TrainLoopCallBack objects.
|
|
|
|
|
///
|
|
|
|
|
/// \return Status of operation.
|
2021-07-09 18:07:51 +08:00
|
|
|
|
Status Evaluate(std::shared_ptr<dataset::Dataset> ds, std::vector<TrainCallBack *> cbs);
|
2021-08-19 09:46:29 +08:00
|
|
|
|
|
2020-09-18 14:09:16 +08:00
|
|
|
|
private:
|
2021-07-09 18:07:51 +08:00
|
|
|
|
friend class Serialization;
|
2021-02-19 17:09:40 +08:00
|
|
|
|
// api without std::string
|
2021-03-18 21:01:55 +08:00
|
|
|
|
MSTensor GetInputByTensorName(const std::vector<char> &tensor_name);
|
|
|
|
|
std::vector<std::vector<char>> GetOutputTensorNamesChar();
|
|
|
|
|
MSTensor GetOutputByTensorName(const std::vector<char> &tensor_name);
|
|
|
|
|
std::vector<MSTensor> GetOutputsByNodeName(const std::vector<char> &node_name);
|
2021-09-18 10:46:46 +08:00
|
|
|
|
Status LoadConfig(const std::vector<char> &config_path);
|
2021-11-04 22:09:55 +08:00
|
|
|
|
Status UpdateConfig(const std::vector<char> §ion, const std::pair<std::vector<char>, std::vector<char>> &config);
|
2022-03-03 14:11:40 +08:00
|
|
|
|
Status Build(const std::vector<char> &model_path, ModelType model_type,
|
|
|
|
|
const std::shared_ptr<Context> &model_context);
|
2022-07-20 22:36:12 +08:00
|
|
|
|
Status Build(const void *model_data, size_t data_size, ModelType model_type,
|
|
|
|
|
const std::shared_ptr<Context> &model_context, const Key &dec_key, const std::vector<char> &dec_mode,
|
|
|
|
|
const std::vector<char> &cropto_lib_path);
|
2021-09-18 10:46:46 +08:00
|
|
|
|
Status Build(const std::vector<char> &model_path, ModelType model_type, const std::shared_ptr<Context> &model_context,
|
2022-07-20 22:36:12 +08:00
|
|
|
|
const Key &dec_key, const std::vector<char> &dec_mode, const std::vector<char> &cropto_lib_path);
|
2020-09-18 14:09:16 +08:00
|
|
|
|
std::shared_ptr<ModelImpl> impl_;
|
|
|
|
|
};
|
2021-02-19 17:09:40 +08:00
|
|
|
|
|
2021-03-18 21:01:55 +08:00
|
|
|
|
MSTensor Model::GetInputByTensorName(const std::string &tensor_name) {
|
|
|
|
|
return GetInputByTensorName(StringToChar(tensor_name));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
std::vector<std::string> Model::GetOutputTensorNames() { return VectorCharToString(GetOutputTensorNamesChar()); }
|
|
|
|
|
|
|
|
|
|
MSTensor Model::GetOutputByTensorName(const std::string &tensor_name) {
|
|
|
|
|
return GetOutputByTensorName(StringToChar(tensor_name));
|
2021-02-19 17:09:40 +08:00
|
|
|
|
}
|
2021-03-23 16:12:19 +08:00
|
|
|
|
|
2021-08-19 09:46:29 +08:00
|
|
|
|
std::vector<MSTensor> Model::GetOutputsByNodeName(const std::string &node_name) {
|
|
|
|
|
return GetOutputsByNodeName(StringToChar(node_name));
|
2021-03-23 16:12:19 +08:00
|
|
|
|
}
|
2021-09-18 10:46:46 +08:00
|
|
|
|
|
2021-12-06 20:22:48 +08:00
|
|
|
|
Status Model::LoadConfig(const std::string &config_path) { return LoadConfig(StringToChar(config_path)); }
|
2021-09-18 10:46:46 +08:00
|
|
|
|
|
2021-11-04 22:09:55 +08:00
|
|
|
|
Status Model::UpdateConfig(const std::string §ion, const std::pair<std::string, std::string> &config) {
|
|
|
|
|
std::pair<std::vector<char>, std::vector<char>> config_pair = {StringToChar(config.first),
|
|
|
|
|
StringToChar(config.second)};
|
|
|
|
|
return UpdateConfig(StringToChar(section), config_pair);
|
|
|
|
|
}
|
|
|
|
|
|
2022-07-20 22:36:12 +08:00
|
|
|
|
Status Model::Build(const void *model_data, size_t data_size, ModelType model_type,
|
|
|
|
|
const std::shared_ptr<Context> &model_context, const Key &dec_key, const std::string &dec_mode,
|
|
|
|
|
const std::string &cropto_lib_path) {
|
|
|
|
|
return Build(model_data, data_size, model_type, model_context, dec_key, StringToChar(dec_mode),
|
|
|
|
|
StringToChar(cropto_lib_path));
|
|
|
|
|
}
|
|
|
|
|
|
2022-06-24 14:28:36 +08:00
|
|
|
|
Status Model::Build(const std::string &model_path, ModelType model_type, const std::shared_ptr<Context> &model_context,
|
|
|
|
|
const Key &dec_key, const std::string &dec_mode, const std::string &cropto_lib_path) {
|
2022-07-20 22:36:12 +08:00
|
|
|
|
return Build(StringToChar(model_path), model_type, model_context, dec_key, StringToChar(dec_mode),
|
|
|
|
|
StringToChar(cropto_lib_path));
|
2021-09-18 10:46:46 +08:00
|
|
|
|
}
|
|
|
|
|
|
2022-06-24 14:28:36 +08:00
|
|
|
|
Status Model::Build(const std::string &model_path, ModelType model_type,
|
|
|
|
|
const std::shared_ptr<Context> &model_context) {
|
2022-03-03 14:11:40 +08:00
|
|
|
|
return Build(StringToChar(model_path), model_type, model_context);
|
2021-09-18 10:46:46 +08:00
|
|
|
|
}
|
2020-09-18 14:09:16 +08:00
|
|
|
|
} // namespace mindspore
|
|
|
|
|
#endif // MINDSPORE_INCLUDE_API_MODEL_H
|