!46653 [MS][LITE][parallel predict] Add python ST:Gpu cloud

Merge pull request !46653 from yefeng/478-add_python_ST
This commit is contained in:
i-robot 2023-01-03 12:31:39 +00:00 committed by Gitee
commit 562254073a
No known key found for this signature in database
GPG Key ID: 173E9B9CA92EEF8F
5 changed files with 27 additions and 9 deletions

View File

@ -22,10 +22,10 @@ DelegateRegistry &DelegateRegistry::GetInstance() {
} }
void DelegateRegistry::RegDelegate(const mindspore::DeviceType &device_type, const std::string &provider, void DelegateRegistry::RegDelegate(const mindspore::DeviceType &device_type, const std::string &provider,
DelegateCreator creator) { DelegateCreator *creator) {
auto it = creator_map_.find(device_type); auto it = creator_map_.find(device_type);
if (it == creator_map_.end()) { if (it == creator_map_.end()) {
HashMap<std::string, DelegateCreator> map; HashMap<std::string, DelegateCreator *> map;
map[provider] = creator; map[provider] = creator;
creator_map_[device_type] = map; creator_map_[device_type] = map;
return; return;
@ -53,6 +53,6 @@ std::shared_ptr<GraphExecutor> DelegateRegistry::GetDelegate(const mindspore::De
if (creator_it == it->second.end()) { if (creator_it == it->second.end()) {
return nullptr; return nullptr;
} }
return creator_it->second(ctx, config_infos); return (*(creator_it->second))(ctx, config_infos);
} }
} // namespace mindspore } // namespace mindspore

View File

@ -40,25 +40,28 @@ class MS_API DelegateRegistry {
static DelegateRegistry &GetInstance(); static DelegateRegistry &GetInstance();
void RegDelegate(const mindspore::DeviceType &device_type, const std::string &provider, DelegateCreator creator); void RegDelegate(const mindspore::DeviceType &device_type, const std::string &provider, DelegateCreator *creator);
void UnRegDelegate(const mindspore::DeviceType &device_type, const std::string &provider); void UnRegDelegate(const mindspore::DeviceType &device_type, const std::string &provider);
std::shared_ptr<GraphExecutor> GetDelegate(const mindspore::DeviceType &device_type, const std::string &provider, std::shared_ptr<GraphExecutor> GetDelegate(const mindspore::DeviceType &device_type, const std::string &provider,
const std::shared_ptr<Context> &ctx, const ConfigInfos &config_infos); const std::shared_ptr<Context> &ctx, const ConfigInfos &config_infos);
private: private:
mindspore::HashMap<DeviceType, mindspore::HashMap<std::string, DelegateCreator>> creator_map_; mindspore::HashMap<DeviceType, mindspore::HashMap<std::string, DelegateCreator *>> creator_map_;
}; };
class DelegateRegistrar { class DelegateRegistrar {
public: public:
DelegateRegistrar(const mindspore::DeviceType &device_type, const std::string &provider, DelegateCreator creator) { DelegateRegistrar(const mindspore::DeviceType &device_type, const std::string &provider, DelegateCreator *creator) {
DelegateRegistry::GetInstance().RegDelegate(device_type, provider, creator); DelegateRegistry::GetInstance().RegDelegate(device_type, provider, creator);
} }
~DelegateRegistrar() = default; ~DelegateRegistrar() = default;
}; };
#define REG_DELEGATE(device_type, provider, creator) \ #define REG_DELEGATE(device_type, provider, creator) \
static DelegateRegistrar g_##device_type##provider##Delegate(device_type, provider, creator); static DelegateCreator func = [=](const std::shared_ptr<Context> &context, const ConfigInfos &config_infos) { \
return creator(context, config_infos); \
}; \
static DelegateRegistrar g_##device_type##provider##Delegate(device_type, provider, &func);
} // namespace mindspore } // namespace mindspore
#endif // MINDSPORE_LITE_SRC_EXTENDRT_DELEGATE_FACTORY_H_ #endif // MINDSPORE_LITE_SRC_EXTENDRT_DELEGATE_FACTORY_H_

View File

@ -0,0 +1 @@
yolox.onnx;1:input;1,3,640,640;;

View File

@ -10,7 +10,7 @@ function Run_python_ST() {
backend=$6 backend=$6
mindspore_lite_whl=`ls ${whl_path}/*.whl` mindspore_lite_whl=`ls ${whl_path}/*.whl`
if [[ -f "${mindspore_lite_whl}" ]]; then if [[ -f "${mindspore_lite_whl}" ]]; then
pip install ${mindspore_lite_whl} --force-reinstall || exit 1 pip install ${mindspore_lite_whl} --force-reinstall --user || exit 1
echo "install python whl success." echo "install python whl success."
else else
echo "not find python whl.." echo "not find python whl.."

View File

@ -1,5 +1,6 @@
#!/bin/bash #!/bin/bash
source ./scripts/base_functions.sh source ./scripts/base_functions.sh
source ./scripts/run_benchmark_python.sh
# Run converter on x86 platform: # Run converter on x86 platform:
function Run_Converter() { function Run_Converter() {
@ -250,5 +251,18 @@ if [[ $backend == "all" || $backend == "server_inference_x86_cloud_gpu" ]]; then
fi fi
Print_Benchmark_Result ${run_benchmark_result_file} Print_Benchmark_Result ${run_benchmark_result_file}
# run python ST
if [[ $backend == "all" || $backend == "server_inference_x86_cloud_gpu" ]]; then
models_python_config=${basepath}/../config_level0/models_python_gpu.cfg
models_python_cfg_file_list=("$models_python_config")
Run_python_ST ${basepath} ${x86_path} ${ms_models_path} ${models_path} "${models_python_cfg_file_list[*]}" "GPU"
Run_python_status=$?
if [[ ${Run_python_status} != 0 ]];then
echo "Run_python_status failed"
isFailed=1
fi
fi
echo "run x86_gpu_server_inference is ended" echo "run x86_gpu_server_inference is ended"
exit ${isFailed} exit ${isFailed}