forked from mindspore-Ecosystem/mindspore
!46653 [MS][LITE][parallel predict] Add python ST:Gpu cloud
Merge pull request !46653 from yefeng/478-add_python_ST
This commit is contained in:
commit
562254073a
|
@ -22,10 +22,10 @@ DelegateRegistry &DelegateRegistry::GetInstance() {
|
|||
}
|
||||
|
||||
void DelegateRegistry::RegDelegate(const mindspore::DeviceType &device_type, const std::string &provider,
|
||||
DelegateCreator creator) {
|
||||
DelegateCreator *creator) {
|
||||
auto it = creator_map_.find(device_type);
|
||||
if (it == creator_map_.end()) {
|
||||
HashMap<std::string, DelegateCreator> map;
|
||||
HashMap<std::string, DelegateCreator *> map;
|
||||
map[provider] = creator;
|
||||
creator_map_[device_type] = map;
|
||||
return;
|
||||
|
@ -53,6 +53,6 @@ std::shared_ptr<GraphExecutor> DelegateRegistry::GetDelegate(const mindspore::De
|
|||
if (creator_it == it->second.end()) {
|
||||
return nullptr;
|
||||
}
|
||||
return creator_it->second(ctx, config_infos);
|
||||
return (*(creator_it->second))(ctx, config_infos);
|
||||
}
|
||||
} // namespace mindspore
|
||||
|
|
|
@ -40,25 +40,28 @@ class MS_API DelegateRegistry {
|
|||
|
||||
static DelegateRegistry &GetInstance();
|
||||
|
||||
void RegDelegate(const mindspore::DeviceType &device_type, const std::string &provider, DelegateCreator creator);
|
||||
void RegDelegate(const mindspore::DeviceType &device_type, const std::string &provider, DelegateCreator *creator);
|
||||
void UnRegDelegate(const mindspore::DeviceType &device_type, const std::string &provider);
|
||||
std::shared_ptr<GraphExecutor> GetDelegate(const mindspore::DeviceType &device_type, const std::string &provider,
|
||||
const std::shared_ptr<Context> &ctx, const ConfigInfos &config_infos);
|
||||
|
||||
private:
|
||||
mindspore::HashMap<DeviceType, mindspore::HashMap<std::string, DelegateCreator>> creator_map_;
|
||||
mindspore::HashMap<DeviceType, mindspore::HashMap<std::string, DelegateCreator *>> creator_map_;
|
||||
};
|
||||
|
||||
class DelegateRegistrar {
|
||||
public:
|
||||
DelegateRegistrar(const mindspore::DeviceType &device_type, const std::string &provider, DelegateCreator creator) {
|
||||
DelegateRegistrar(const mindspore::DeviceType &device_type, const std::string &provider, DelegateCreator *creator) {
|
||||
DelegateRegistry::GetInstance().RegDelegate(device_type, provider, creator);
|
||||
}
|
||||
~DelegateRegistrar() = default;
|
||||
};
|
||||
|
||||
#define REG_DELEGATE(device_type, provider, creator) \
|
||||
static DelegateRegistrar g_##device_type##provider##Delegate(device_type, provider, creator);
|
||||
static DelegateCreator func = [=](const std::shared_ptr<Context> &context, const ConfigInfos &config_infos) { \
|
||||
return creator(context, config_infos); \
|
||||
}; \
|
||||
static DelegateRegistrar g_##device_type##provider##Delegate(device_type, provider, &func);
|
||||
} // namespace mindspore
|
||||
|
||||
#endif // MINDSPORE_LITE_SRC_EXTENDRT_DELEGATE_FACTORY_H_
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
yolox.onnx;1:input;1,3,640,640;;
|
|
@ -10,7 +10,7 @@ function Run_python_ST() {
|
|||
backend=$6
|
||||
mindspore_lite_whl=`ls ${whl_path}/*.whl`
|
||||
if [[ -f "${mindspore_lite_whl}" ]]; then
|
||||
pip install ${mindspore_lite_whl} --force-reinstall || exit 1
|
||||
pip install ${mindspore_lite_whl} --force-reinstall --user || exit 1
|
||||
echo "install python whl success."
|
||||
else
|
||||
echo "not find python whl.."
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
#!/bin/bash
|
||||
source ./scripts/base_functions.sh
|
||||
source ./scripts/run_benchmark_python.sh
|
||||
|
||||
# Run converter on x86 platform:
|
||||
function Run_Converter() {
|
||||
|
@ -250,5 +251,18 @@ if [[ $backend == "all" || $backend == "server_inference_x86_cloud_gpu" ]]; then
|
|||
fi
|
||||
|
||||
Print_Benchmark_Result ${run_benchmark_result_file}
|
||||
|
||||
# run python ST
|
||||
if [[ $backend == "all" || $backend == "server_inference_x86_cloud_gpu" ]]; then
|
||||
models_python_config=${basepath}/../config_level0/models_python_gpu.cfg
|
||||
models_python_cfg_file_list=("$models_python_config")
|
||||
Run_python_ST ${basepath} ${x86_path} ${ms_models_path} ${models_path} "${models_python_cfg_file_list[*]}" "GPU"
|
||||
Run_python_status=$?
|
||||
if [[ ${Run_python_status} != 0 ]];then
|
||||
echo "Run_python_status failed"
|
||||
isFailed=1
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "run x86_gpu_server_inference is ended"
|
||||
exit ${isFailed}
|
||||
|
|
Loading…
Reference in New Issue