forked from mindspore-Ecosystem/mindspore
!48157 [MS][LITE][parallel predict] runner java st
Merge pull request !48157 from yefeng/511-runner_java_ST
This commit is contained in:
commit
6dea14ddd5
|
@ -0,0 +1 @@
|
|||
yolox.onnx;1:input;1,3,640,640;;
|
|
@ -18,7 +18,11 @@ import com.mindspore.MSTensor;
|
|||
import com.mindspore.config.DeviceType;
|
||||
import com.mindspore.config.MSContext;
|
||||
import com.mindspore.config.DataType;
|
||||
import com.mindspore.config.RunnerConfig;
|
||||
import com.mindspore.Model;
|
||||
import com.mindspore.ModelParallelRunner;
|
||||
import java.util.List;
|
||||
import java.util.ArrayList;
|
||||
|
||||
import java.io.*;
|
||||
|
||||
|
@ -112,6 +116,61 @@ public class Benchmark {
|
|||
String[] inDataFile = args[1].split(",");
|
||||
String benchmarkDataFile = args[2];
|
||||
float accuracy = Float.parseFloat(args[3]);
|
||||
if (args.length == 5 && args[4].equals("Runner")) {
|
||||
// use default param init context
|
||||
MSContext context = new MSContext();
|
||||
context.init(1,0);
|
||||
boolean ret = context.addDeviceInfo(DeviceType.DT_CPU, false, 0);
|
||||
if (!ret) {
|
||||
System.err.println("init context failed");
|
||||
context.free();
|
||||
return ;
|
||||
}
|
||||
// init runner config
|
||||
RunnerConfig config = new RunnerConfig();
|
||||
config.init(context);
|
||||
config.setWorkersNum(2);
|
||||
// init ModelParallelRunner
|
||||
ModelParallelRunner runner = new ModelParallelRunner();
|
||||
ret = runner.init(modelPath, config);
|
||||
if (!ret) {
|
||||
System.err.println("ModelParallelRunner init failed.");
|
||||
runner.free();
|
||||
return;
|
||||
}
|
||||
List<MSTensor> inputs = runner.getInputs();
|
||||
for (int index = 0; index < inputs.size(); index++) {
|
||||
MSTensor msTensor = inputs.get(index);
|
||||
if (msTensor.getDataType() != DataType.kNumberTypeFloat32) {
|
||||
System.err.println("Input tensor data type is not float, the data type is " + msTensor.getDataType());
|
||||
runner.free();
|
||||
return;
|
||||
}
|
||||
// Set Input Data.
|
||||
byte[] data = readBinFile(inDataFile[index], (int) msTensor.size());
|
||||
msTensor.setData(data);
|
||||
}
|
||||
// init output
|
||||
List<MSTensor> outputs = new ArrayList<>();
|
||||
|
||||
// runner do predict
|
||||
ret = runner.predict(inputs,outputs);
|
||||
if (!ret) {
|
||||
System.err.println("MindSpore Lite predict failed.");
|
||||
runner.free();
|
||||
return;
|
||||
}
|
||||
System.out.println("========== model parallel runner predict success ==========");
|
||||
config.free();
|
||||
for (int i = 0; i < inputs.size(); i++) {
|
||||
inputs.get(i).free();
|
||||
}
|
||||
for (int i = 0; i < outputs.size(); i++) {
|
||||
outputs.get(i).free();
|
||||
}
|
||||
runner.free();
|
||||
return;
|
||||
}
|
||||
|
||||
MSContext context = new MSContext();
|
||||
context.init(1, 0);
|
||||
|
|
|
@ -2,6 +2,41 @@
|
|||
source ./scripts/base_functions.sh
|
||||
source ./scripts/run_benchmark_python.sh
|
||||
|
||||
# Run on x86 java platform:
|
||||
function Run_x86_java() {
|
||||
cd ${x86_path} || exit 1
|
||||
mkdir java || exit 1
|
||||
cp ${x86_path}/mindspore-lite-${version}-linux-x64.tar.gz ./java/ || exit 1
|
||||
cd ./java || exit 1
|
||||
tar -zxf mindspore-lite-${version}-linux-x64.tar.gz || exit 1
|
||||
# compile benchmark
|
||||
cd mindspore-lite-${version}-linux-x64 || exit 1
|
||||
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./runtime/lib:./runtime/third_party/glog
|
||||
export LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:./tools/converter/lib/:./runtime/third_party/glog
|
||||
echo "javac -cp ${x86_path}/java/mindspore-lite-${version}-linux-x64/runtime/lib/mindspore-lite-java.jar ${basepath}/java/src/main/java/Benchmark.java -d ."
|
||||
javac -cp ${x86_path}/java/mindspore-lite-${version}-linux-x64/runtime/lib/mindspore-lite-java.jar ${basepath}/java/src/main/java/Benchmark.java -d .
|
||||
|
||||
# Run tflite converted models:
|
||||
while read line; do
|
||||
model_name=`echo ${line} | awk -F ';' '{print $1}'`
|
||||
if [[ $model_name == \#* ]]; then
|
||||
continue
|
||||
fi
|
||||
echo ${model_name} >> "${run_x86_java_log_file}"
|
||||
echo "java -classpath .:${x86_path}/java/mindspore-lite-${version}-linux-x64/runtime/lib/mindspore-lite-java.jar Benchmark ${ms_models_path}/${model_name}.ms '${models_path}'/input_output/input/${model_name}.ms.bin '${models_path}'/input_output/output/${model_name}.ms.out 1" >> "${run_x86_java_log_file}"
|
||||
java -classpath .:${x86_path}/java/mindspore-lite-${version}-linux-x64/runtime/lib/mindspore-lite-java.jar Benchmark ${ms_models_path}/${model_name}.mindir ${models_path}/input_output/input/${model_name}.bin ${models_path}/input_output/output/${model_name}.out 1 "Runner"
|
||||
if [ $? = 0 ]; then
|
||||
run_result='x86_java: '${model_name}' pass'; echo ${run_result} >> ${run_benchmark_result_file}
|
||||
else
|
||||
run_result='x86_java: '${model_name}' failed'; echo ${run_result} >> ${run_benchmark_result_file}
|
||||
cat ${run_x86_java_log_file}
|
||||
cat ${run_benchmark_result_file}
|
||||
exit 1
|
||||
fi
|
||||
done < ${models_java_config}
|
||||
}
|
||||
|
||||
|
||||
# Run converter on x86 platform:
|
||||
function Run_Converter() {
|
||||
# Unzip x86 runtime and converter
|
||||
|
@ -168,4 +203,19 @@ if [[ $backend == "all" || $backend == "x86_cloud_onnx" ]]; then
|
|||
fi
|
||||
fi
|
||||
|
||||
# run Java ST
|
||||
if [[ $backend == "all" || $backend == "x86_cloud_onnx" ]]; then
|
||||
run_x86_java_log_file=${basepath}/run_x86_java_log.txt
|
||||
echo 'run x86 java logs: ' > ${run_x86_java_log_file}
|
||||
models_java_config=${basepath}/../config_level0/models_java_cpu_cloud.cfg
|
||||
Run_x86_java
|
||||
Run_java_status=$?
|
||||
if [[ ${Run_java_status} != 0 ]];then
|
||||
cat $run_x86_java_log_file
|
||||
echo "Run_java_status failed"
|
||||
isFailed=1
|
||||
fi
|
||||
fi
|
||||
|
||||
|
||||
exit ${isFailed}
|
||||
|
|
Loading…
Reference in New Issue