2020-12-01 18:35:15 +08:00
|
|
|
/**
|
|
|
|
* Copyright 2020 Huawei Technologies Co., Ltd
|
|
|
|
*
|
|
|
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
* you may not use this file except in compliance with the License.
|
|
|
|
* You may obtain a copy of the License at
|
|
|
|
*
|
|
|
|
* http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
*
|
|
|
|
* Unless required by applicable law or agreed to in writing, software
|
|
|
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
* See the License for the specific language governing permissions and
|
|
|
|
* limitations under the License.
|
|
|
|
*/
|
|
|
|
#ifndef MINDSPORE_INCLUDE_API_CONTEXT_H
|
|
|
|
#define MINDSPORE_INCLUDE_API_CONTEXT_H
|
|
|
|
|
|
|
|
#include <string>
|
|
|
|
#include <memory>
|
2021-02-19 17:09:40 +08:00
|
|
|
#include <vector>
|
2021-03-08 10:28:44 +08:00
|
|
|
#include <map>
|
2020-12-01 18:35:15 +08:00
|
|
|
#include "include/api/types.h"
|
2021-02-19 17:09:40 +08:00
|
|
|
#include "include/api/dual_abi_helper.h"
|
2020-12-01 18:35:15 +08:00
|
|
|
|
|
|
|
namespace mindspore {
|
2021-03-18 21:01:55 +08:00
|
|
|
enum DeviceType {
|
|
|
|
kCPU = 0,
|
2021-07-12 09:41:51 +08:00
|
|
|
kGPU,
|
2021-03-18 21:01:55 +08:00
|
|
|
kKirinNPU,
|
|
|
|
kAscend910,
|
|
|
|
kAscend310,
|
|
|
|
// add new type here
|
|
|
|
kInvalidDeviceType = 100,
|
|
|
|
};
|
|
|
|
|
|
|
|
class Allocator;
|
2021-07-12 10:34:22 +08:00
|
|
|
class Delegate;
|
2021-03-18 21:01:55 +08:00
|
|
|
class DeviceInfoContext;
|
2021-01-26 17:06:34 +08:00
|
|
|
|
2021-08-06 11:25:35 +08:00
|
|
|
/// \brief Context is used to store environment variables during execution.
|
2021-03-18 21:01:55 +08:00
|
|
|
class MS_API Context {
|
2021-02-19 17:09:40 +08:00
|
|
|
public:
|
|
|
|
Context();
|
2021-03-18 21:01:55 +08:00
|
|
|
~Context() = default;
|
|
|
|
|
2021-08-19 09:46:29 +08:00
|
|
|
/// \brief Set the number of threads at runtime. Only valid for Lite.
|
2021-08-06 11:25:35 +08:00
|
|
|
///
|
|
|
|
/// \param[in] thread_num the number of threads at runtime.
|
2021-03-18 21:01:55 +08:00
|
|
|
void SetThreadNum(int32_t thread_num);
|
2021-08-19 09:46:29 +08:00
|
|
|
|
|
|
|
/// \brief Get the current thread number setting. Only valid for Lite.
|
2021-08-06 11:25:35 +08:00
|
|
|
///
|
|
|
|
/// \return The current thread number setting.
|
2021-03-18 21:01:55 +08:00
|
|
|
int32_t GetThreadNum() const;
|
|
|
|
|
2021-08-19 09:46:29 +08:00
|
|
|
/// \brief Set the thread affinity to CPU cores. Only valid for Lite.
|
2021-07-07 11:13:09 +08:00
|
|
|
///
|
2021-08-19 09:46:29 +08:00
|
|
|
/// \param[in] mode: 0: no affinities, 1: big cores first, 2: little cores first
|
2021-07-07 11:13:09 +08:00
|
|
|
void SetThreadAffinity(int mode);
|
2021-08-19 09:46:29 +08:00
|
|
|
|
|
|
|
/// \brief Get the thread affinity of CPU cores. Only valid for Lite.
|
|
|
|
///
|
|
|
|
/// \return Thread affinity to CPU cores. 0: no affinities, 1: big cores first, 2: little cores first
|
2021-07-07 11:13:09 +08:00
|
|
|
int GetThreadAffinityMode() const;
|
|
|
|
|
2021-08-19 09:46:29 +08:00
|
|
|
/// \brief Set the thread lists to CPU cores. Only valid for Lite.
|
|
|
|
///
|
|
|
|
/// \note If core_list and mode are set by SetThreadAffinity at the same time, the core_list is effective, but the
|
|
|
|
/// mode is not effective.
|
|
|
|
///
|
|
|
|
/// \param[in] core_list: a vector of thread core lists.
|
2021-07-07 11:13:09 +08:00
|
|
|
void SetThreadAffinity(const std::vector<int> &core_list);
|
2021-08-19 09:46:29 +08:00
|
|
|
|
|
|
|
/// \brief Get the thread lists of CPU cores. Only valid for Lite.
|
|
|
|
///
|
|
|
|
/// \return core_list: a vector of thread core lists.
|
2021-07-07 11:13:09 +08:00
|
|
|
std::vector<int32_t> GetThreadAffinityCoreList() const;
|
2021-08-19 09:46:29 +08:00
|
|
|
|
|
|
|
/// \brief Set the status whether to perform model inference or training in parallel. Only valid for Lite.
|
|
|
|
///
|
|
|
|
/// \param[in] is_parallel: true, parallel; false, not in parallel.
|
2021-07-07 11:13:09 +08:00
|
|
|
void SetEnableParallel(bool is_parallel);
|
2021-08-19 09:46:29 +08:00
|
|
|
|
|
|
|
/// \brief Get the status whether to perform model inference or training in parallel. Only valid for Lite.
|
|
|
|
///
|
|
|
|
/// \return Bool value that indicates whether in parallel.
|
2021-07-07 11:13:09 +08:00
|
|
|
bool GetEnableParallel() const;
|
2021-03-18 21:01:55 +08:00
|
|
|
|
2021-08-19 09:46:29 +08:00
|
|
|
/// \brief Set Delegate to access third-party AI framework. Only valid for Lite.
|
|
|
|
///
|
|
|
|
/// \param[in] Pointer to the custom delegate.
|
2021-07-12 10:34:22 +08:00
|
|
|
void SetDelegate(const std::shared_ptr<Delegate> &delegate);
|
2021-08-19 09:46:29 +08:00
|
|
|
|
|
|
|
/// \brief Get the delegate of the third-party AI framework. Only valid for Lite.
|
|
|
|
///
|
|
|
|
/// \return Pointer to the custom delegate.
|
2021-07-12 10:34:22 +08:00
|
|
|
std::shared_ptr<Delegate> GetDelegate() const;
|
|
|
|
|
2021-08-06 11:25:35 +08:00
|
|
|
/// \brief Get a mutable reference of DeviceInfoContext vector in this context. Only MindSpore Lite supports
|
|
|
|
/// heterogeneous scenarios with multiple members in the vector.
|
|
|
|
///
|
|
|
|
/// \return Mutable reference of DeviceInfoContext vector in this context.
|
2021-03-18 21:01:55 +08:00
|
|
|
std::vector<std::shared_ptr<DeviceInfoContext>> &MutableDeviceInfo();
|
|
|
|
|
|
|
|
private:
|
2021-02-19 17:09:40 +08:00
|
|
|
struct Data;
|
2021-03-18 21:01:55 +08:00
|
|
|
std::shared_ptr<Data> data_;
|
2021-01-26 17:06:34 +08:00
|
|
|
};
|
|
|
|
|
2021-08-06 11:25:35 +08:00
|
|
|
/// \brief DeviceInfoContext defines different device contexts.
|
2021-03-18 21:01:55 +08:00
|
|
|
class MS_API DeviceInfoContext : public std::enable_shared_from_this<DeviceInfoContext> {
|
2021-02-19 17:09:40 +08:00
|
|
|
public:
|
2021-03-18 21:01:55 +08:00
|
|
|
struct Data;
|
2021-01-26 17:06:34 +08:00
|
|
|
|
2021-03-18 21:01:55 +08:00
|
|
|
DeviceInfoContext();
|
|
|
|
virtual ~DeviceInfoContext() = default;
|
2021-08-06 11:25:35 +08:00
|
|
|
|
|
|
|
/// \brief Get the type of this DeviceInfoContext.
|
|
|
|
///
|
|
|
|
/// \return Type of this DeviceInfoContext.
|
2021-03-18 21:01:55 +08:00
|
|
|
virtual enum DeviceType GetDeviceType() const = 0;
|
2021-01-26 17:06:34 +08:00
|
|
|
|
2021-08-06 11:25:35 +08:00
|
|
|
/// \brief A similar function to RTTI is provided when the -fno-rtti compilation option is turned on, which converts
|
|
|
|
/// DeviceInfoContext to a shared pointer of type T, and returns nullptr if the conversion fails.
|
|
|
|
///
|
|
|
|
/// \param T Type
|
|
|
|
/// \return A pointer of type T after conversion. If the conversion fails, it will be nullptr.
|
2021-03-18 21:01:55 +08:00
|
|
|
template <class T>
|
|
|
|
std::shared_ptr<T> Cast() {
|
|
|
|
static_assert(std::is_base_of<DeviceInfoContext, T>::value, "Wrong cast type.");
|
|
|
|
if (GetDeviceType() != T().GetDeviceType()) {
|
|
|
|
return nullptr;
|
|
|
|
}
|
2021-02-19 17:09:40 +08:00
|
|
|
|
2021-03-18 21:01:55 +08:00
|
|
|
return std::static_pointer_cast<T>(shared_from_this());
|
|
|
|
}
|
2021-08-10 15:08:17 +08:00
|
|
|
/// \brief obtain provider's name
|
|
|
|
///
|
|
|
|
/// \return provider's name.
|
2021-06-28 18:55:51 +08:00
|
|
|
std::string GetProvider() const;
|
2021-08-10 15:08:17 +08:00
|
|
|
/// \brief set provider's name.
|
|
|
|
///
|
|
|
|
/// \param[in] provider define the provider's name.
|
2021-08-19 09:46:29 +08:00
|
|
|
|
2021-06-28 18:55:51 +08:00
|
|
|
void SetProvider(const std::string &provider);
|
2021-08-10 15:08:17 +08:00
|
|
|
/// \brief obtain provider's device type.
|
|
|
|
///
|
|
|
|
/// \return provider's device type.
|
2021-08-19 09:46:29 +08:00
|
|
|
|
2021-06-28 18:55:51 +08:00
|
|
|
std::string GetProviderDevice() const;
|
2021-08-10 15:08:17 +08:00
|
|
|
/// \brief set provider's device type.
|
|
|
|
///
|
|
|
|
/// \param[in] device define the provider's device type.EG: CPU.
|
2021-06-28 18:55:51 +08:00
|
|
|
void SetProviderDevice(const std::string &device);
|
2021-08-19 09:46:29 +08:00
|
|
|
|
2021-08-10 15:08:17 +08:00
|
|
|
/// \brief set memory allocator.
|
|
|
|
///
|
|
|
|
/// \param[in] allocator define the memory allocator which can be defined by user.
|
2021-06-28 18:55:51 +08:00
|
|
|
void SetAllocator(const std::shared_ptr<Allocator> &allocator);
|
2021-08-19 09:46:29 +08:00
|
|
|
|
2021-08-10 15:08:17 +08:00
|
|
|
/// \brief obtain memory allocator.
|
|
|
|
///
|
|
|
|
/// \return memory allocator.
|
2021-06-28 18:55:51 +08:00
|
|
|
std::shared_ptr<Allocator> GetAllocator() const;
|
|
|
|
|
2021-03-18 21:01:55 +08:00
|
|
|
protected:
|
|
|
|
std::shared_ptr<Data> data_;
|
|
|
|
};
|
2021-03-04 11:30:53 +08:00
|
|
|
|
2021-08-06 11:25:35 +08:00
|
|
|
/// \brief Derived from DeviceInfoContext, The configuration of the model running on the CPU. This option is only valid
|
|
|
|
/// for MindSpore Lite.
|
2021-03-18 21:01:55 +08:00
|
|
|
class MS_API CPUDeviceInfo : public DeviceInfoContext {
|
|
|
|
public:
|
2021-08-06 11:25:35 +08:00
|
|
|
/// \brief Get the type of this DeviceInfoContext.
|
|
|
|
///
|
|
|
|
/// \return Type of this DeviceInfoContext.
|
2021-03-18 21:01:55 +08:00
|
|
|
enum DeviceType GetDeviceType() const override { return DeviceType::kCPU; };
|
|
|
|
|
2021-08-06 11:25:35 +08:00
|
|
|
/// \brief Set enables to perform the float16 inference
|
|
|
|
///
|
|
|
|
/// \param[in] is_fp16 Enable float16 inference or not.
|
2021-03-18 21:01:55 +08:00
|
|
|
void SetEnableFP16(bool is_fp16);
|
2021-08-19 09:46:29 +08:00
|
|
|
|
2021-08-06 11:25:35 +08:00
|
|
|
/// \brief Get enables to perform the float16 inference
|
|
|
|
///
|
|
|
|
/// \return Whether enable float16 inference.
|
2021-03-18 21:01:55 +08:00
|
|
|
bool GetEnableFP16() const;
|
2021-01-26 17:06:34 +08:00
|
|
|
};
|
|
|
|
|
2021-08-06 11:25:35 +08:00
|
|
|
/// \brief Derived from DeviceInfoContext, The configuration of the model running on the NPU. This option is only valid
|
|
|
|
/// for MindSpore Lite.
|
2021-03-18 21:01:55 +08:00
|
|
|
class MS_API KirinNPUDeviceInfo : public DeviceInfoContext {
|
|
|
|
public:
|
2021-08-06 11:25:35 +08:00
|
|
|
/// \brief Get the type of this DeviceInfoContext.
|
|
|
|
///
|
|
|
|
/// \return Type of this DeviceInfoContext.
|
2021-03-18 21:01:55 +08:00
|
|
|
enum DeviceType GetDeviceType() const override { return DeviceType::kKirinNPU; };
|
2021-01-26 17:06:34 +08:00
|
|
|
|
2021-08-06 11:25:35 +08:00
|
|
|
/// \brief Set the NPU frequency.
|
|
|
|
///
|
|
|
|
/// \param[in] frequency Can be set to 1 (low power consumption), 2 (balanced), 3 (high performance), 4 (extreme
|
|
|
|
/// performance), default as 3.
|
2021-03-18 21:01:55 +08:00
|
|
|
void SetFrequency(int frequency);
|
2021-08-19 09:46:29 +08:00
|
|
|
|
2021-08-06 11:25:35 +08:00
|
|
|
/// \brief Get the NPU frequency.
|
|
|
|
///
|
|
|
|
/// \return NPU frequency
|
2021-03-18 21:01:55 +08:00
|
|
|
int GetFrequency() const;
|
|
|
|
};
|
2021-03-08 10:28:44 +08:00
|
|
|
|
2021-08-06 11:25:35 +08:00
|
|
|
/// \brief Derived from DeviceInfoContext, The configuration of the model running on the GPU.
|
2021-07-12 09:41:51 +08:00
|
|
|
class MS_API GPUDeviceInfo : public DeviceInfoContext {
|
2021-03-18 21:01:55 +08:00
|
|
|
public:
|
2021-08-06 11:25:35 +08:00
|
|
|
/// \brief Get the type of this DeviceInfoContext.
|
|
|
|
///
|
|
|
|
/// \return Type of this DeviceInfoContext.
|
2021-07-12 09:41:51 +08:00
|
|
|
enum DeviceType GetDeviceType() const override { return DeviceType::kGPU; };
|
2021-03-08 10:28:44 +08:00
|
|
|
|
2021-08-06 11:25:35 +08:00
|
|
|
/// \brief Set device id.
|
|
|
|
///
|
|
|
|
/// \param[in] device_id The device id.
|
2021-03-18 21:01:55 +08:00
|
|
|
void SetDeviceID(uint32_t device_id);
|
2021-08-19 09:46:29 +08:00
|
|
|
|
2021-08-06 11:25:35 +08:00
|
|
|
/// \brief Get the device id.
|
|
|
|
///
|
|
|
|
/// \return The device id.
|
2021-03-18 21:01:55 +08:00
|
|
|
uint32_t GetDeviceID() const;
|
2021-01-26 17:06:34 +08:00
|
|
|
|
2021-09-14 14:10:20 +08:00
|
|
|
/// \brief Set the precision mode.
|
|
|
|
///
|
|
|
|
/// \param[in] precision_mode Optional "origin", "fp16". "origin" is set as default.
|
|
|
|
inline void SetPrecisionMode(const std::string &precision_mode);
|
2021-04-25 09:53:47 +08:00
|
|
|
|
2021-09-14 14:10:20 +08:00
|
|
|
/// \brief Get the precision mode.
|
|
|
|
///
|
|
|
|
/// \return The precision mode.
|
2021-04-25 09:53:47 +08:00
|
|
|
inline std::string GetPrecisionMode() const;
|
|
|
|
|
2021-08-06 11:25:35 +08:00
|
|
|
/// \brief Set enables to perform the float16 inference
|
|
|
|
///
|
|
|
|
/// \param[in] is_fp16 Enable float16 inference or not.
|
2021-07-12 09:41:51 +08:00
|
|
|
void SetEnableFP16(bool is_fp16);
|
2021-08-19 09:46:29 +08:00
|
|
|
|
2021-08-06 11:25:35 +08:00
|
|
|
/// \brief Get enables to perform the float16 inference
|
|
|
|
///
|
|
|
|
/// \return Whether enable float16 inference.
|
2021-07-12 09:41:51 +08:00
|
|
|
bool GetEnableFP16() const;
|
2021-08-06 11:25:35 +08:00
|
|
|
|
2021-04-25 09:53:47 +08:00
|
|
|
private:
|
|
|
|
void SetPrecisionMode(const std::vector<char> &precision_mode);
|
|
|
|
std::vector<char> GetPrecisionModeChar() const;
|
2021-03-18 21:01:55 +08:00
|
|
|
};
|
2021-02-19 17:09:40 +08:00
|
|
|
|
2021-07-12 09:41:51 +08:00
|
|
|
void GPUDeviceInfo::SetPrecisionMode(const std::string &precision_mode) {
|
2021-04-25 09:53:47 +08:00
|
|
|
SetPrecisionMode(StringToChar(precision_mode));
|
|
|
|
}
|
2021-07-12 09:41:51 +08:00
|
|
|
std::string GPUDeviceInfo::GetPrecisionMode() const { return CharToString(GetPrecisionModeChar()); }
|
2021-04-25 09:53:47 +08:00
|
|
|
|
2021-08-06 11:25:35 +08:00
|
|
|
/// \brief Derived from DeviceInfoContext, The configuration of the model running on the Ascend910. This option is
|
|
|
|
/// invalid for MindSpore Lite.
|
2021-03-18 21:01:55 +08:00
|
|
|
class MS_API Ascend910DeviceInfo : public DeviceInfoContext {
|
|
|
|
public:
|
2021-08-06 11:25:35 +08:00
|
|
|
/// \brief Get the type of this DeviceInfoContext.
|
|
|
|
///
|
|
|
|
/// \return Type of this DeviceInfoContext.
|
2021-03-18 21:01:55 +08:00
|
|
|
enum DeviceType GetDeviceType() const override { return DeviceType::kAscend910; };
|
2021-02-19 17:09:40 +08:00
|
|
|
|
2021-08-06 11:25:35 +08:00
|
|
|
/// \brief Set device id.
|
|
|
|
///
|
|
|
|
/// \param[in] device_id The device id.
|
2021-03-18 21:01:55 +08:00
|
|
|
void SetDeviceID(uint32_t device_id);
|
2021-08-19 09:46:29 +08:00
|
|
|
|
2021-08-06 11:25:35 +08:00
|
|
|
/// \brief Get the device id.
|
|
|
|
///
|
|
|
|
/// \return The device id.
|
2021-03-18 21:01:55 +08:00
|
|
|
uint32_t GetDeviceID() const;
|
|
|
|
};
|
2021-03-04 11:30:53 +08:00
|
|
|
|
2021-08-06 11:25:35 +08:00
|
|
|
/// \brief Derived from DeviceInfoContext, The configuration of the model running on the Ascend310. This option is
|
|
|
|
/// invalid for MindSpore Lite.
|
2021-03-18 21:01:55 +08:00
|
|
|
class MS_API Ascend310DeviceInfo : public DeviceInfoContext {
|
|
|
|
public:
|
2021-08-06 11:25:35 +08:00
|
|
|
/// \brief Get the type of this DeviceInfoContext.
|
|
|
|
///
|
|
|
|
/// \return Type of this DeviceInfoContext.
|
2021-03-18 21:01:55 +08:00
|
|
|
enum DeviceType GetDeviceType() const override { return DeviceType::kAscend310; };
|
2021-03-04 15:08:26 +08:00
|
|
|
|
2021-08-06 11:25:35 +08:00
|
|
|
/// \brief Set device id.
|
|
|
|
///
|
|
|
|
/// \param[in] device_id The device id.
|
2021-03-18 21:01:55 +08:00
|
|
|
void SetDeviceID(uint32_t device_id);
|
2021-08-19 09:46:29 +08:00
|
|
|
|
2021-08-06 11:25:35 +08:00
|
|
|
/// \brief Get the device id.
|
|
|
|
///
|
|
|
|
/// \return The device id.
|
2021-03-18 21:01:55 +08:00
|
|
|
uint32_t GetDeviceID() const;
|
2021-02-19 17:09:40 +08:00
|
|
|
|
2021-08-06 11:25:35 +08:00
|
|
|
/// \brief Set AIPP configuration file path.
|
|
|
|
///
|
|
|
|
/// \param[in] cfg_path AIPP configuration file path.
|
2021-03-18 21:01:55 +08:00
|
|
|
inline void SetInsertOpConfigPath(const std::string &cfg_path);
|
2021-08-19 09:46:29 +08:00
|
|
|
|
2021-08-06 11:25:35 +08:00
|
|
|
/// \brief Get AIPP configuration file path.
|
|
|
|
///
|
|
|
|
/// \return AIPP configuration file path.
|
2021-03-18 21:01:55 +08:00
|
|
|
inline std::string GetInsertOpConfigPath() const;
|
2021-02-19 17:09:40 +08:00
|
|
|
|
2021-08-06 11:25:35 +08:00
|
|
|
/// \brief Set format of model inputs.
|
|
|
|
///
|
|
|
|
/// \param[in] format Optional "NCHW", "NHWC", etc.
|
2021-03-18 21:01:55 +08:00
|
|
|
inline void SetInputFormat(const std::string &format);
|
2021-08-19 09:46:29 +08:00
|
|
|
|
2021-08-06 11:25:35 +08:00
|
|
|
/// \brief Get format of model inputs.
|
|
|
|
///
|
|
|
|
/// \return The format of model inputs.
|
2021-03-18 21:01:55 +08:00
|
|
|
inline std::string GetInputFormat() const;
|
2021-01-26 17:06:34 +08:00
|
|
|
|
2021-08-06 11:25:35 +08:00
|
|
|
/// \brief Set shape of model inputs.
|
|
|
|
///
|
|
|
|
/// \param[in] shape e.g. "input_op_name1: 1,2,3,4;input_op_name2: 4,3,2,1".
|
2021-03-18 21:01:55 +08:00
|
|
|
inline void SetInputShape(const std::string &shape);
|
2021-08-19 09:46:29 +08:00
|
|
|
|
2021-08-06 11:25:35 +08:00
|
|
|
/// \brief Get shape of model inputs.
|
|
|
|
///
|
|
|
|
/// \return The shape of model inputs.
|
2021-03-18 21:01:55 +08:00
|
|
|
inline std::string GetInputShape() const;
|
2021-03-04 11:30:53 +08:00
|
|
|
|
2021-08-06 11:25:35 +08:00
|
|
|
/// \brief Set shape of model inputs.
|
|
|
|
///
|
|
|
|
/// \param[in] shape e.g. {{1, {1,2,3,4}}, {2, {4,3,2,1}}} means the first input shape 1,2,3,4 and the second input
|
|
|
|
/// shape 4,3,2,1.
|
2021-03-18 21:01:55 +08:00
|
|
|
void SetInputShapeMap(const std::map<int, std::vector<int>> &shape);
|
2021-08-19 09:46:29 +08:00
|
|
|
|
2021-08-06 11:25:35 +08:00
|
|
|
/// \brief Get shape of model inputs.
|
|
|
|
///
|
|
|
|
/// \return The shape of model inputs.
|
2021-03-18 21:01:55 +08:00
|
|
|
std::map<int, std::vector<int>> GetInputShapeMap() const;
|
2021-03-04 15:08:26 +08:00
|
|
|
|
2021-03-18 21:01:55 +08:00
|
|
|
void SetDynamicBatchSize(const std::vector<size_t> &dynamic_batch_size);
|
|
|
|
inline std::string GetDynamicBatchSize() const;
|
2021-02-19 17:09:40 +08:00
|
|
|
|
2021-08-06 11:25:35 +08:00
|
|
|
/// \brief Set type of model outputs.
|
|
|
|
///
|
|
|
|
/// \param[in] output_type FP32, UINT8 or FP16, default as FP32.
|
2021-03-18 21:01:55 +08:00
|
|
|
void SetOutputType(enum DataType output_type);
|
2021-08-19 09:46:29 +08:00
|
|
|
|
2021-08-06 11:25:35 +08:00
|
|
|
/// \brief Get type of model outputs.
|
|
|
|
///
|
|
|
|
/// \return The set type of model outputs.
|
2021-03-18 21:01:55 +08:00
|
|
|
enum DataType GetOutputType() const;
|
2021-02-19 17:09:40 +08:00
|
|
|
|
2021-08-06 11:25:35 +08:00
|
|
|
/// \brief Set precision mode of model.
|
|
|
|
///
|
|
|
|
/// \param[in] precision_mode Optional "force_fp16", "allow_fp32_to_fp16", "must_keep_origin_dtype" and
|
|
|
|
/// "allow_mix_precision", "force_fp16" is set as default
|
2021-03-18 21:01:55 +08:00
|
|
|
inline void SetPrecisionMode(const std::string &precision_mode);
|
2021-08-19 09:46:29 +08:00
|
|
|
|
2021-08-06 11:25:35 +08:00
|
|
|
/// \brief Get precision mode of model.
|
|
|
|
///
|
|
|
|
/// \return The set type of model outputs
|
2021-03-18 21:01:55 +08:00
|
|
|
inline std::string GetPrecisionMode() const;
|
2021-03-04 11:30:53 +08:00
|
|
|
|
2021-08-06 11:25:35 +08:00
|
|
|
/// \brief Set op select implementation mode.
|
|
|
|
///
|
|
|
|
/// \param[in] op_select_impl_mode Optional "high_performance" and "high_precision", "high_performance" is set as
|
|
|
|
/// default.
|
2021-03-18 21:01:55 +08:00
|
|
|
inline void SetOpSelectImplMode(const std::string &op_select_impl_mode);
|
2021-08-19 09:46:29 +08:00
|
|
|
|
2021-08-06 11:25:35 +08:00
|
|
|
/// \brief Get op select implementation mode.
|
|
|
|
///
|
|
|
|
/// \return The set op select implementation mode.
|
2021-03-18 21:01:55 +08:00
|
|
|
inline std::string GetOpSelectImplMode() const;
|
2021-02-19 17:09:40 +08:00
|
|
|
|
2021-03-18 21:01:55 +08:00
|
|
|
inline void SetFusionSwitchConfigPath(const std::string &cfg_path);
|
|
|
|
inline std::string GetFusionSwitchConfigPath() const;
|
2021-02-19 17:09:40 +08:00
|
|
|
|
2021-03-07 17:06:18 +08:00
|
|
|
// Optional "l1_optimize", "l2_optimize", "off_optimize" or "l1_and_l2_optimize", default as "l2_optimize"
|
|
|
|
inline void SetBufferOptimizeMode(const std::string &buffer_optimize_mode);
|
|
|
|
inline std::string GetBufferOptimizeMode() const;
|
|
|
|
|
2021-03-18 21:01:55 +08:00
|
|
|
private:
|
|
|
|
void SetInsertOpConfigPath(const std::vector<char> &cfg_path);
|
|
|
|
std::vector<char> GetInsertOpConfigPathChar() const;
|
2021-02-19 17:09:40 +08:00
|
|
|
|
2021-03-18 21:01:55 +08:00
|
|
|
void SetInputFormat(const std::vector<char> &format);
|
|
|
|
std::vector<char> GetInputFormatChar() const;
|
2021-03-04 11:30:53 +08:00
|
|
|
|
2021-03-18 21:01:55 +08:00
|
|
|
void SetInputShape(const std::vector<char> &shape);
|
|
|
|
std::vector<char> GetInputShapeChar() const;
|
|
|
|
|
|
|
|
std::vector<char> GetDynamicBatchSizeChar() const;
|
|
|
|
|
|
|
|
void SetPrecisionMode(const std::vector<char> &precision_mode);
|
|
|
|
std::vector<char> GetPrecisionModeChar() const;
|
|
|
|
|
|
|
|
void SetOpSelectImplMode(const std::vector<char> &op_select_impl_mode);
|
|
|
|
std::vector<char> GetOpSelectImplModeChar() const;
|
|
|
|
|
|
|
|
void SetFusionSwitchConfigPath(const std::vector<char> &cfg_path);
|
|
|
|
std::vector<char> GetFusionSwitchConfigPathChar() const;
|
2021-03-07 17:06:18 +08:00
|
|
|
|
|
|
|
void SetBufferOptimizeMode(const std::vector<char> &buffer_optimize_mode);
|
|
|
|
std::vector<char> GetBufferOptimizeModeChar() const;
|
2021-03-18 21:01:55 +08:00
|
|
|
};
|
|
|
|
|
|
|
|
void Ascend310DeviceInfo::SetInsertOpConfigPath(const std::string &cfg_path) {
|
|
|
|
SetInsertOpConfigPath(StringToChar(cfg_path));
|
2021-03-04 11:30:53 +08:00
|
|
|
}
|
2021-03-18 21:01:55 +08:00
|
|
|
std::string Ascend310DeviceInfo::GetInsertOpConfigPath() const { return CharToString(GetInsertOpConfigPathChar()); }
|
|
|
|
|
|
|
|
void Ascend310DeviceInfo::SetInputFormat(const std::string &format) { SetInputFormat(StringToChar(format)); }
|
|
|
|
std::string Ascend310DeviceInfo::GetInputFormat() const { return CharToString(GetInputFormatChar()); }
|
|
|
|
|
|
|
|
void Ascend310DeviceInfo::SetInputShape(const std::string &shape) { SetInputShape(StringToChar(shape)); }
|
|
|
|
std::string Ascend310DeviceInfo::GetInputShape() const { return CharToString(GetInputShapeChar()); }
|
|
|
|
|
|
|
|
std::string Ascend310DeviceInfo::GetDynamicBatchSize() const { return CharToString(GetDynamicBatchSizeChar()); }
|
|
|
|
|
|
|
|
void Ascend310DeviceInfo::SetPrecisionMode(const std::string &precision_mode) {
|
|
|
|
SetPrecisionMode(StringToChar(precision_mode));
|
2021-03-04 11:30:53 +08:00
|
|
|
}
|
2021-03-18 21:01:55 +08:00
|
|
|
std::string Ascend310DeviceInfo::GetPrecisionMode() const { return CharToString(GetPrecisionModeChar()); }
|
2021-03-04 15:08:26 +08:00
|
|
|
|
2021-03-18 21:01:55 +08:00
|
|
|
void Ascend310DeviceInfo::SetOpSelectImplMode(const std::string &op_select_impl_mode) {
|
|
|
|
SetOpSelectImplMode(StringToChar(op_select_impl_mode));
|
2021-03-08 10:28:44 +08:00
|
|
|
}
|
2021-03-18 21:01:55 +08:00
|
|
|
std::string Ascend310DeviceInfo::GetOpSelectImplMode() const { return CharToString(GetOpSelectImplModeChar()); }
|
2021-03-08 10:28:44 +08:00
|
|
|
|
2021-03-18 21:01:55 +08:00
|
|
|
void Ascend310DeviceInfo::SetFusionSwitchConfigPath(const std::string &cfg_path) {
|
|
|
|
SetFusionSwitchConfigPath(StringToChar(cfg_path));
|
2021-03-04 15:08:26 +08:00
|
|
|
}
|
2021-03-18 21:01:55 +08:00
|
|
|
std::string Ascend310DeviceInfo::GetFusionSwitchConfigPath() const {
|
|
|
|
return CharToString(GetFusionSwitchConfigPathChar());
|
2021-03-04 15:08:26 +08:00
|
|
|
}
|
2021-03-07 17:06:18 +08:00
|
|
|
|
|
|
|
void Ascend310DeviceInfo::SetBufferOptimizeMode(const std::string &buffer_optimize_mode) {
|
|
|
|
SetBufferOptimizeMode(StringToChar(buffer_optimize_mode));
|
|
|
|
}
|
|
|
|
std::string Ascend310DeviceInfo::GetBufferOptimizeMode() const { return CharToString(GetBufferOptimizeModeChar()); }
|
2020-12-01 18:35:15 +08:00
|
|
|
} // namespace mindspore
|
|
|
|
#endif // MINDSPORE_INCLUDE_API_CONTEXT_H
|