Adaptation version 1.0.1 And Add HiMindspore Demo

This commit is contained in:
gongdaguo 2020-10-29 15:50:52 +08:00
parent 25c388e01e
commit 6245fdd584
99 changed files with 6537 additions and 25 deletions

View File

@ -7,6 +7,12 @@ efficientnet-lite4-11.onnx
mobilenetv2-7.onnx
shufflenet-v2-10.onnx
squeezenet1.1-7.onnx
densenet-9.onnx
googlenet-9.onnx
inception-v1-9.onnx
inception-v2-9.onnx
#shufflenet-9.onnx
squeezenet1.0-9.onnx
ml_face_3d.onnx
gts_version-RFB-320_simplified.onnx
mnist-8.onnx

View File

@ -0,0 +1,83 @@
# MindSpore
build/
app/src/main/cpp/mindspore-lite*
app/src/main/assets/model/
mindspore/lib
output
*.ir
mindspore/ccsrc/schema/inner/*
# Cmake files
CMakeFiles/
cmake_install.cmake
CMakeCache.txt
Makefile
cmake-build-debug
# Dynamic libraries
*.so
*.so.*
*.dylib
# Static libraries
*.la
*.lai
*.a
*.lib
# Protocol buffers
*_pb2.py
*.pb.h
*.pb.cc
# Object files
*.o
# Editor
.vscode
.idea/
# Cquery
.cquery_cached_index/
compile_commands.json
# Ctags and cscope
tags
TAGS
CTAGS
GTAGS
GRTAGS
GSYMS
GPATH
cscope.*
# Python files
*__pycache__*
.pytest_cache
# Mac files
*.DS_Store
# Test results
test_temp_summary_event_file/
*.dot
*.dat
*.svg
*.perf
*.info
*.ckpt
*.shp
*.pkl
.clangd
mindspore/version.py
mindspore/default_config.py
mindspore/.commit_id
onnx.proto
mindspore/ccsrc/onnx.proto
# Android
local.properties
.gradle
sdk/build
sdk/.cxx
app/.cxx

View File

@ -0,0 +1,2 @@
/build
/src/main/cpp/mindspore-lite-1.0.0-minddata-arm64-cpu/

View File

@ -0,0 +1,86 @@
# For more information about using CMake with Android Studio, read the
# documentation: https://d.android.com/studio/projects/add-native-code.html
# Sets the minimum version of CMake required to build the native library.
cmake_minimum_required(VERSION 3.4.1)
set(CMAKE_VERBOSE_MAKEFILE on)
set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${CMAKE_SOURCE_DIR}/libs/${ANDROID_ABI})
set(MINDSPORELITE_VERSION mindspore-lite-1.0.1-runtime-arm64-cpu)
# ============== Set MindSpore Dependencies. =============
include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp)
include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/third_party/flatbuffers/include)
include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION})
include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/include)
include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/include/ir/dtype)
include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/include/schema)
include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/minddata/include)
add_library(mindspore-lite SHARED IMPORTED )
add_library(minddata-lite SHARED IMPORTED )
add_library(libmindspore-lite-fp16 SHARED IMPORTED )
set_target_properties(mindspore-lite PROPERTIES IMPORTED_LOCATION
${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/lib/libmindspore-lite.so)
set_target_properties(minddata-lite PROPERTIES IMPORTED_LOCATION
${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/minddata/lib/libminddata-lite.so)
set_target_properties(libmindspore-lite-fp16 PROPERTIES IMPORTED_LOCATION
${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/lib/libmindspore-lite-fp16.so)
# --------------- MindSpore Lite set End. --------------------
# Creates and names a library, sets it as either STATIC
# or SHARED, and provides the relative paths to its source code.
# You can define multiple libraries, and CMake builds them for you.
# Gradle automatically packages shared libraries with your APK.
file(GLOB_RECURSE cpp_src "src/main/cpp/*.cpp" "src/main/cpp/*.h")
add_library( # Sets the name of the library.
mlkit-label-MS
# Sets the library as a shared library.
SHARED
# Provides a relative path to your source file(s).
${cpp_src})
# Searches for a specified prebuilt library and stores the path as a
# variable. Because CMake includes system libraries in the search path by
# default, you only need to specify the name of the public NDK library
# you want to add. CMake verifies that the library exists before
# completing its build.
find_library( # Sets the name of the path variable.
log-lib
# Specifies the name of the NDK library that
# you want CMake to locate.
log )
find_library( jnigraphics-lib jnig·raphics )
# Specifies libraries CMake should link to your target library. You
# can link multiple libraries, such as libraries you define in this
# build script, prebuilt third-party libraries, or system libraries.
add_definitions(-DMNN_USE_LOGCAT)
target_link_libraries( # Specifies the target library.
mlkit-label-MS
# --- mindspore ---
minddata-lite
mindspore-lite
libmindspore-lite-fp16
# --- other dependencies.---
-ljnigraphics
android
# Links the target library to the log library
${log-lib}
)

View File

@ -0,0 +1,72 @@
apply plugin: 'com.android.application'
android {
compileSdkVersion 30
buildToolsVersion "30.0.1"
defaultConfig {
applicationId "com.mindspore.himindspore"
minSdkVersion 21
targetSdkVersion 30
versionCode 1
versionName "1.0"
testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
externalNativeBuild {
cmake {
arguments "-DANDROID_STL=c++_shared"
cppFlags "-std=c++17"
}
}
ndk {
abiFilters 'arm64-v8a'
}
}
aaptOptions {
noCompress '.so', 'ms'
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
}
customDebugType {
debuggable true
}
}
externalNativeBuild {
cmake {
path file('CMakeLists.txt')
}
}
ndkVersion '21.3.6528147'
sourceSets{
main {
jniLibs.srcDirs = ['libs']
}
}
packagingOptions{
pickFirst 'lib/arm64-v8a/libmlkit-label-MS.so'
}
}
// Before gradle build.
// To download some necessary libraries.
apply from:'download.gradle'
dependencies {
implementation fileTree(dir: "libs", include: ["*.jar"])
implementation 'androidx.appcompat:appcompat:1.2.0'
implementation 'androidx.constraintlayout:constraintlayout:2.0.2'
implementation 'androidx.cardview:cardview:1.0.0'
testImplementation 'junit:junit:4.13.1'
androidTestImplementation 'androidx.test.ext:junit:1.1.2'
androidTestImplementation 'androidx.test.espresso:espresso-core:3.3.0'
implementation 'com.sun.mail:android-mail:1.6.5'
implementation 'com.sun.mail:android-activation:1.6.5'
}

View File

@ -0,0 +1,115 @@
/**
* To download necessary library from HuaWei server.
* Including mindspore-lite .so file, minddata-lite .so file and model file.
* The libraries can be downloaded manually.
*/
def targetMindSporeInclude = "src/main/cpp/"
def mindsporeLite_Version = "mindspore-lite-1.0.1-runtime-arm64-cpu"
def targetModelFile = "src/main/assets/model/mobilenetv2.ms"
def mindSporeLibrary_arm64 = "src/main/cpp/${mindsporeLite_Version}.tar.gz"
def modelDownloadUrl = "https://download.mindspore.cn/model_zoo/official/lite/mobilenetv2_openimage_lite/mobilenetv2.ms"
//def mindsporeLiteDownloadUrl = "https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/lite/android_aarch64/${mindsporeLite_Version}.tar.gz"
def mindsporeLiteDownloadUrl = "https://download.mindspore.cn/model_zoo/official/lite/lib/mindspore%20version%201.0.1/${mindsporeLite_Version}.tar.gz"
def targetObjectModelFile = "src/main/assets/model/ssd.ms"
def targetGarbageModelFile = "src/main/assets/model/garbage_mobilenetv2.ms"
def modelObjectDownloadUrl = "https://download.mindspore.cn/model_zoo/official/lite/ssd_mobilenetv2_lite/ssd.ms"
def modelGarbageDownloadUrl = "https://download.mindspore.cn/model_zoo/official/lite/garbage_mobilenetv2_lite/garbage_mobilenetv2.ms"
def cleantargetMindSporeInclude = "src/main/cpp"
task cleanCmakeCache(type: Delete) {
delete '.cxx/cmake/debug'
delete '.cxx/cmake/release'
}
task downloadModelFile(type: DownloadUrlTask) {
doFirst {
println "Downloading ${modelDownloadUrl}"
}
sourceUrl = "${modelDownloadUrl}"
target = file("${targetModelFile}")
}
task downloadObjectModelFile(type: DownloadUrlTask) {
doFirst {
println "Downloading ${modelObjectDownloadUrl}"
}
sourceUrl = "${modelObjectDownloadUrl}"
target = file("${targetObjectModelFile}")
}
task downloadGarbageModelFile(type: DownloadUrlTask) {
doFirst {
println "Downloading ${modelGarbageDownloadUrl}"
}
sourceUrl = "${modelGarbageDownloadUrl}"
target = file("${targetGarbageModelFile}")
}
task downloadMindSporeLibrary(type: DownloadUrlTask) {
doFirst {
println "Downloading ${mindsporeLiteDownloadUrl}"
}
sourceUrl = "${mindsporeLiteDownloadUrl}"
target = file("${mindSporeLibrary_arm64}")
}
task unzipMindSporeInclude(type: Copy, dependsOn: 'downloadMindSporeLibrary') {
doFirst {
println "Unzipping ${mindSporeLibrary_arm64}"
}
from tarTree(resources.gzip("${mindSporeLibrary_arm64}"))
into "${targetMindSporeInclude}"
}
task cleanUnusedmindsporeFiles(type: Delete, dependsOn: ['unzipMindSporeInclude']) {
delete fileTree("${cleantargetMindSporeInclude}").matching {
include "*.tar.gz"
}
}
/*
* Using preBuild to download mindspore library and model file.
* Run before gradle build.
*/
if (file("src/main/cpp/${mindsporeLite_Version}/lib/libmindspore-lite.so").exists()){
downloadMindSporeLibrary.enabled = false
unzipMindSporeInclude.enabled = false
cleanUnusedmindsporeFiles.enabled = false
}
if (file("src/main/assets/model/garbage_mobilenetv2.ms").exists()){
downloadGarbageModelFile.enabled = false
}
if (file("src/main/assets/model/mobilenetv2.ms").exists()){
downloadModelFile.enabled = false
}
if (file("src/main/assets/model/ssd.ms").exists()){
downloadObjectModelFile.enabled = false
}
preBuild.dependsOn cleanCmakeCache
preBuild.dependsOn downloadModelFile
preBuild.dependsOn downloadObjectModelFile
preBuild.dependsOn downloadMindSporeLibrary
preBuild.dependsOn downloadGarbageModelFile
preBuild.dependsOn unzipMindSporeInclude
preBuild.dependsOn cleanUnusedmindsporeFiles
class DownloadUrlTask extends DefaultTask {
@Input
String sourceUrl
@OutputFile
File target
@TaskAction
void download() {
ant.get(src: sourceUrl, dest: target)
}
}

View File

@ -0,0 +1,21 @@
# Add project specific ProGuard rules here.
# You can control the set of applied configuration files using the
# proguardFiles setting in build.gradle.
#
# For more details, see
# http://developer.android.com/guide/developing/tools/proguard.html
# If your project uses WebView with JS, uncomment the following
# and specify the fully qualified class name to the JavaScript interface
# class:
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
# public *;
#}
# Uncomment this to preserve the line number information for
# debugging stack traces.
#-keepattributes SourceFile,LineNumberTable
# If you keep the line number information, uncomment this to
# hide the original source file name.
#-renamesourcefileattribute SourceFile

View File

@ -0,0 +1,26 @@
package com.mindspore.himindspore;
import android.content.Context;
import androidx.test.platform.app.InstrumentationRegistry;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import org.junit.Test;
import org.junit.runner.RunWith;
import static org.junit.Assert.*;
/**
* Instrumented test, which will execute on an Android device.
*
* @see <a href="http://d.android.com/tools/testing">Testing documentation</a>
*/
@RunWith(AndroidJUnit4.class)
public class ExampleInstrumentedTest {
@Test
public void useAppContext() {
// Context of the app under test.
Context appContext = InstrumentationRegistry.getInstrumentation().getTargetContext();
assertEquals("com.mindspore.himindspore", appContext.getPackageName());
}
}

View File

@ -0,0 +1,58 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.mindspore.himindspore">
<uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.permission.WRITE_INTERNAL_STORAGE" />
<uses-permission android:name="android.permission.READ_INTERNAL_STORAGE" />
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
<uses-permission android:name="android.permission.MOUNT_UNMOUNT_FILESYSTEM" />
<uses-permission android:name="android.permission.READ_PHONE_STATE" />
<uses-permission android:name="android.permission.INTERNET" />
<application
android:allowBackup="true"
android:icon="@mipmap/ic_launcher"
android:label="@string/app_name"
android:requestLegacyExternalStorage="true"
android:roundIcon="@mipmap/ic_launcher_round"
android:supportsRtl="true"
android:theme="@style/AppTheme">
<activity
android:name=".SplashActivity"
android:screenOrientation="portrait"
android:theme="@style/Theme.AppCompat.NoActionBar">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
<activity
android:name=".imageclassification.ui.ImageMainActivity"
android:screenOrientation="portrait"
android:theme="@style/Theme.AppCompat.NoActionBar"/>
<activity
android:name=".contract.ContractActivity"
android:screenOrientation="portrait"
android:theme="@style/Theme.AppCompat.NoActionBar" />
<activity
android:name=".objectdetection.ui.ObjectDetectionMainActivity"
android:screenOrientation="portrait"
android:theme="@style/Theme.AppCompat.NoActionBar" />
<activity
android:name=".objectdetection.ui.ObjectCameraActivity"
android:screenOrientation="portrait"
android:theme="@style/Theme.AppCompat.NoActionBar" />
<activity
android:name=".objectdetection.ui.PhotoActivity"
android:screenOrientation="portrait"
android:theme="@style/Theme.AppCompat.NoActionBar" />
<activity
android:name=".imageclassification.ui.ImageCameraActivity"
android:screenOrientation="portrait"
android:theme="@style/Theme.AppCompat.NoActionBar" />
</application>
</manifest>

View File

@ -0,0 +1,338 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <jni.h>
#include <android/bitmap.h>
#include <android/asset_manager_jni.h>
#include <android/log.h>
#include <utility>
#include <cstring>
#include <vector>
#include <string>
#include <unordered_map>
#include <set>
#include "include/errorcode.h"
#include "include/ms_tensor.h"
#include "GarbageMindSporeNetnative.h"
#include "MSNetWork.h"
#include "lite_cv/lite_mat.h"
#include "lite_cv/image_process.h"
using mindspore::dataset::LiteMat;
using mindspore::dataset::LPixelType;
using mindspore::dataset::LDataType;
#define MS_PRINT(format, ...) __android_log_print(ANDROID_LOG_INFO, "MSJNI", format, ##__VA_ARGS__)
static const int RET_GARBAGE_SORT_SUM = 4;
static const char *labels_name_grbage_sort_map[RET_GARBAGE_SORT_SUM] = {{"可回收物"},
{"干垃圾"},
{"有害垃圾"},
{"湿垃圾"}};
static const int RET_GARBAGE_DETAILED_SUM = 26;
static const char *labels_name_grbage_detailed_map[RET_GARBAGE_DETAILED_SUM] = {
{"塑料瓶"},
{"帽子"},
{"报纸"},
{"易拉罐"},
{"玻璃制品"},
{"玻璃瓶"},
{"硬纸板"},
{"篮球"},
{"纸张"},
{"金属制品"},
{"一次性筷子"},
{"打火机"},
{"扫把"},
{"旧镜子"},
{"牙刷"},
{"脏污衣服"},
{"贝壳"},
{"陶瓷碗"},
{"油漆桶"},
{"电池"},
{"荧光灯"},
{"药片胶囊"},
{"橙皮"},
{"菜叶"},
{"蛋壳"},
{"香蕉皮"}};
char *CreateLocalModelBuffer(JNIEnv *env, jobject modelBuffer) {
jbyte *modelAddr = static_cast<jbyte *>(env->GetDirectBufferAddress(modelBuffer));
int modelLen = static_cast<int>(env->GetDirectBufferCapacity(modelBuffer));
char *buffer(new char[modelLen]);
memcpy(buffer, modelAddr, modelLen);
return buffer;
}
/**
* To process the result of mindspore inference.
* @param msOutputs
* @return
*/
std::string GarbageProcessRunnetResult(const int RET_CATEGORY_SUM, const char *const labels_name_map[],
std::unordered_map<std::string, mindspore::tensor::MSTensor *> msOutputs) {
// Get the branch of the model output.
// Use iterators to get map elements.
std::unordered_map<std::string, mindspore::tensor::MSTensor *>::iterator iter;
iter = msOutputs.begin();
// The mobilenetv2.ms model output just one branch.
auto outputTensor = iter->second;
// Get a pointer to the first score.
float *temp_scores = static_cast<float *>(outputTensor->MutableData());
float max = 0.0;
unsigned int maxIndex = 0;
for (unsigned int i = 0; i < RET_CATEGORY_SUM; ++i) {
if (temp_scores[i] > max) {
max = temp_scores[i];
maxIndex = i;
}
}
// Score for each category.
// Converted to text information that needs to be displayed in the APP.
std::string categoryScore = "";
if (maxIndex <= 9) {
categoryScore += labels_name_grbage_sort_map[0];
categoryScore += ":";
} else if (maxIndex > 9 && maxIndex <= 17) {
categoryScore += labels_name_grbage_sort_map[1];
categoryScore += ":";
} else if (maxIndex > 17 && maxIndex <= 21) {
categoryScore += labels_name_grbage_sort_map[2];
categoryScore += ":";
} else if (maxIndex > 21 && maxIndex <= 25) {
categoryScore += labels_name_grbage_sort_map[3];
categoryScore += ":";
}
categoryScore += labels_name_map[maxIndex];
return categoryScore;
}
bool BitmapToLiteMat(JNIEnv *env, const jobject &srcBitmap, LiteMat *lite_mat) {
bool ret = false;
AndroidBitmapInfo info;
void *pixels = nullptr;
LiteMat &lite_mat_bgr = *lite_mat;
AndroidBitmap_getInfo(env, srcBitmap, &info);
if (info.format != ANDROID_BITMAP_FORMAT_RGBA_8888) {
MS_PRINT("Image Err, Request RGBA");
return false;
}
AndroidBitmap_lockPixels(env, srcBitmap, &pixels);
if (info.stride == info.width * 4) {
ret = InitFromPixel(reinterpret_cast<const unsigned char *>(pixels),
LPixelType::RGBA2RGB, LDataType::UINT8,
info.width, info.height, lite_mat_bgr);
if (!ret) {
MS_PRINT("Init From RGBA error");
}
} else {
unsigned char *pixels_ptr = new unsigned char[info.width * info.height * 4];
unsigned char *ptr = pixels_ptr;
unsigned char *data = reinterpret_cast<unsigned char *>(pixels);
for (int i = 0; i < info.height; i++) {
memcpy(ptr, data, info.width * 4);
ptr += info.width * 4;
data += info.stride;
}
ret = InitFromPixel(reinterpret_cast<const unsigned char *>(pixels_ptr),
LPixelType::RGBA2RGB, LDataType::UINT8,
info.width, info.height, lite_mat_bgr);
if (!ret) {
MS_PRINT("Init From RGBA error");
}
delete[] (pixels_ptr);
}
AndroidBitmap_unlockPixels(env, srcBitmap);
return ret;
}
bool PreProcessImageData(const LiteMat &lite_mat_bgr, LiteMat *lite_norm_mat_ptr) {
bool ret = false;
LiteMat lite_mat_resize;
LiteMat &lite_norm_mat_cut = *lite_norm_mat_ptr;
ret = ResizeBilinear(lite_mat_bgr, lite_mat_resize, 256, 256);
if (!ret) {
MS_PRINT("ResizeBilinear error");
return false;
}
LiteMat lite_mat_convert_float;
ret = ConvertTo(lite_mat_resize, lite_mat_convert_float, 1.0 / 255.0);
if (!ret) {
MS_PRINT("ConvertTo error");
return false;
}
LiteMat lite_mat_cut;
ret = Crop(lite_mat_convert_float, lite_mat_cut, 16, 16, 224, 224);
if (!ret) {
MS_PRINT("Crop error");
return false;
}
std::vector<float> means = {0.485, 0.456, 0.406};
std::vector<float> stds = {0.229, 0.224, 0.225};
SubStractMeanNormalize(lite_mat_cut, lite_norm_mat_cut, means, stds);
return true;
}
/**
* The Java layer reads the model into MappedByteBuffer or ByteBuffer to load the model.
*/
extern "C"
JNIEXPORT jlong JNICALL
Java_com_mindspore_himindspore_imageclassification_help_GarbageTrackingMobile_loadModel(JNIEnv *env,
jobject thiz,
jobject model_buffer,
jint num_thread) {
if (nullptr == model_buffer) {
MS_PRINT("error, buffer is nullptr!");
return (jlong) nullptr;
}
jlong bufferLen = env->GetDirectBufferCapacity(model_buffer);
if (0 == bufferLen) {
MS_PRINT("error, bufferLen is 0!");
return (jlong) nullptr;
}
char *modelBuffer = CreateLocalModelBuffer(env, model_buffer);
if (modelBuffer == nullptr) {
MS_PRINT("modelBuffer create failed!");
return (jlong) nullptr;
}
// To create a mindspore network inference environment.
void **labelEnv = new void *;
MSNetWork *labelNet = new MSNetWork;
*labelEnv = labelNet;
mindspore::lite::Context *context = new mindspore::lite::Context;
context->thread_num_ = num_thread;
labelNet->CreateSessionMS(modelBuffer, bufferLen, context);
delete context;
if (labelNet->session() == nullptr) {
MS_PRINT("MindSpore create session failed!.");
delete labelNet;
delete labelEnv;
return (jlong) nullptr;
}
if (model_buffer != nullptr) {
env->DeleteLocalRef(model_buffer);
}
return (jlong) labelEnv;
}
/**
* After the inference environment is successfully created,
* sending a picture to the model and run inference.
*/
extern "C" JNIEXPORT jstring JNICALL
Java_com_mindspore_himindspore_imageclassification_help_GarbageTrackingMobile_runNet(JNIEnv *env, jclass type,
jlong netEnv,
jobject srcBitmap) {
LiteMat lite_mat_bgr, lite_norm_mat_cut;
if (!BitmapToLiteMat(env, srcBitmap, &lite_mat_bgr)) {
MS_PRINT("BitmapToLiteMat error");
return NULL;
}
if (!PreProcessImageData(lite_mat_bgr, &lite_norm_mat_cut)) {
MS_PRINT("PreProcessImageData error");
return NULL;
}
ImgDims inputDims;
inputDims.channel = lite_norm_mat_cut.channel_;
inputDims.width = lite_norm_mat_cut.width_;
inputDims.height = lite_norm_mat_cut.height_;
// Get the mindsore inference environment which created in loadModel().
void **labelEnv = reinterpret_cast<void **>(netEnv);
if (labelEnv == nullptr) {
MS_PRINT("MindSpore error, labelEnv is a nullptr.");
return NULL;
}
MSNetWork *labelNet = static_cast<MSNetWork *>(*labelEnv);
auto mSession = labelNet->session();
if (mSession == nullptr) {
MS_PRINT("MindSpore error, Session is a nullptr.");
return NULL;
}
MS_PRINT("MindSpore get session.");
auto msInputs = mSession->GetInputs();
if (msInputs.size() == 0) {
MS_PRINT("MindSpore error, msInputs.size() equals 0.");
return NULL;
}
auto inTensor = msInputs.front();
float *dataHWC = reinterpret_cast<float *>(lite_norm_mat_cut.data_ptr_);
// Copy dataHWC to the model input tensor.
memcpy(inTensor->MutableData(), dataHWC,
inputDims.channel * inputDims.width * inputDims.height * sizeof(float));
// After the model and image tensor data is loaded, run inference.
auto status = mSession->RunGraph();
if (status != mindspore::lite::RET_OK) {
MS_PRINT("MindSpore run net error.");
return NULL;
}
/**
* Get the mindspore inference results.
* Return the map of output node name and MindSpore Lite MSTensor.
*/
auto names = mSession->GetOutputTensorNames();
std::unordered_map<std::string, mindspore::tensor::MSTensor *> msOutputs;
for (const auto &name : names) {
auto temp_dat = mSession->GetOutputByTensorName(name);
msOutputs.insert(std::pair<std::string, mindspore::tensor::MSTensor *>{name, temp_dat});
}
std::string resultStr = GarbageProcessRunnetResult(::RET_GARBAGE_DETAILED_SUM,
::labels_name_grbage_detailed_map, msOutputs);
const char *resultCharData = resultStr.c_str();
return (env)->NewStringUTF(resultCharData);
}
extern "C" JNIEXPORT jboolean JNICALL
Java_com_mindspore_himindspore_imageclassification_help_GarbageTrackingMobile_unloadModel(JNIEnv *env,
jclass type,
jlong netEnv) {
MS_PRINT("MindSpore release net.");
void **labelEnv = reinterpret_cast<void **>(netEnv);
if (labelEnv == nullptr) {
MS_PRINT("MindSpore error, labelEnv is a nullptr.");
}
MSNetWork *labelNet = static_cast<MSNetWork *>(*labelEnv);
labelNet->ReleaseNets();
return (jboolean) true;
}

View File

@ -0,0 +1,21 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef GARBAGE_MINDSPORE_JNI_HMS_DEBUG_MINDSPORENETNATIVE_H
#define GARBAGE_MINDSPORE_JNI_HMS_DEBUG_MINDSPORENETNATIVE_H
#endif // MINDSPORE_JNI_HMS_DEBUG_MINDSPORENETNATIVE_H

View File

@ -0,0 +1,811 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <jni.h>
#include <android/bitmap.h>
#include <android/asset_manager_jni.h>
#include <android/log.h>
#include <utility>
#include <cstring>
#include <vector>
#include <string>
#include <unordered_map>
#include <set>
#include "include/errorcode.h"
#include "include/ms_tensor.h"
#include "ImageMindSporeNetnative.h"
#include "MSNetWork.h"
#include "lite_cv/lite_mat.h"
#include "lite_cv/image_process.h"
using mindspore::dataset::LiteMat;
using mindspore::dataset::LPixelType;
using mindspore::dataset::LDataType;
#define MS_PRINT(format, ...) __android_log_print(ANDROID_LOG_INFO, "MSJNI", format, ##__VA_ARGS__)
static const int RET_CATEGORY_SUM = 410;
static const char *labels_name_map[RET_CATEGORY_SUM] = {
{"Herd"},
{"Safari"},
{"Bangle"},
{"Cushion"},
{"Countertop"},
{"Prom"},
{"Branch"},
{"Sports"},
{"Sky"},
{"Community"},
{"Wheel"},
{"Cola"},
{"Tuxedo"},
{"Flowerpot"},
{"Team"},
{"Computer"},
{"Unicycle"},
{"Brig"},
{"Aerospace engineering"},
{"Scuba diving"},
{"Goggles"},
{"Fruit"},
{"Badminton"},
{"Horse"},
{"Sunglasses"},
{"Fun"},
{"Prairie"},
{"Poster"},
{"Flag"},
{"Speedboat"},
{"Eyelash"},
{"Veil"},
{"Mobile phone"},
{"Wheelbarrow"},
{"Saucer"},
{"Leather"},
{"Drawer"},
{"Paper"},
{"Pier"},
{"Waterfowl"},
{"Tights"},
{"Rickshaw"},
{"Vegetable"},
{"Handrail"},
{"Ice"},
{"Metal"},
{"Flower"},
{"Wing"},
{"Silverware"},
{"Event"},
{"Skyline"},
{"Money"},
{"Comics"},
{"Handbag"},
{"Porcelain"},
{"Rodeo"},
{"Curtain"},
{"Tile"},
{"Human mouth"},
{"Army"},
{"Menu"},
{"Boat"},
{"Snowboarding"},
{"Cairn terrier"},
{"Net"},
{"Pasteles"},
{"Cup"},
{"Rugby"},
{"Pho"},
{"Cap"},
{"Human hair"},
{"Surfing"},
{"Loveseat"},
{"Museum"},
{"Shipwreck"},
{"Trunk (Tree)"},
{"Plush"},
{"Monochrome"},
{"Volcano"},
{"Rock"},
{"Pillow"},
{"Presentation"},
{"Nebula"},
{"Subwoofer"},
{"Lake"},
{"Sledding"},
{"Bangs"},
{"Tablecloth"},
{"Necklace"},
{"Swimwear"},
{"Standing"},
{"Jeans"},
{"Carnival"},
{"Softball"},
{"Centrepiece"},
{"Skateboarder"},
{"Cake"},
{"Dragon"},
{"Aurora"},
{"Skiing"},
{"Bathroom"},
{"Dog"},
{"Needlework"},
{"Umbrella"},
{"Church"},
{"Fire"},
{"Piano"},
{"Denim"},
{"Bridle"},
{"Cabinetry"},
{"Lipstick"},
{"Ring"},
{"Television"},
{"Roller"},
{"Seal"},
{"Concert"},
{"Product"},
{"News"},
{"Fast food"},
{"Horn (Animal)"},
{"Tattoo"},
{"Bird"},
{"Bridegroom"},
{"Love"},
{"Helmet"},
{"Dinosaur"},
{"Icing"},
{"Miniature"},
{"Tire"},
{"Toy"},
{"Icicle"},
{"Jacket"},
{"Coffee"},
{"Mosque"},
{"Rowing"},
{"Wetsuit"},
{"Camping"},
{"Underwater"},
{"Christmas"},
{"Gelato"},
{"Whiteboard"},
{"Field"},
{"Ragdoll"},
{"Construction"},
{"Lampshade"},
{"Palace"},
{"Meal"},
{"Factory"},
{"Cage"},
{"Clipper (Boat)"},
{"Gymnastics"},
{"Turtle"},
{"Human foot"},
{"Marriage"},
{"Web page"},
{"Human beard"},
{"Fog"},
{"Wool"},
{"Cappuccino"},
{"Lighthouse"},
{"Lego"},
{"Sparkler"},
{"Sari"},
{"Model"},
{"Temple"},
{"Beanie"},
{"Building"},
{"Waterfall"},
{"Penguin"},
{"Cave"},
{"Stadium"},
{"Smile"},
{"Human hand"},
{"Park"},
{"Desk"},
{"Shetland sheepdog"},
{"Bar"},
{"Eating"},
{"Neon"},
{"Dalmatian"},
{"Crocodile"},
{"Wakeboarding"},
{"Longboard"},
{"Road"},
{"Race"},
{"Kitchen"},
{"Odometer"},
{"Cliff"},
{"Fiction"},
{"School"},
{"Interaction"},
{"Bullfighting"},
{"Boxer"},
{"Gown"},
{"Aquarium"},
{"Superhero"},
{"Pie"},
{"Asphalt"},
{"Surfboard"},
{"Cheeseburger"},
{"Screenshot"},
{"Supper"},
{"Laugh"},
{"Lunch"},
{"Party "},
{"Glacier"},
{"Bench"},
{"Grandparent"},
{"Sink"},
{"Pomacentridae"},
{"Blazer"},
{"Brick"},
{"Space"},
{"Backpacking"},
{"Stuffed toy"},
{"Sushi"},
{"Glitter"},
{"Bonfire"},
{"Castle"},
{"Marathon"},
{"Pizza"},
{"Beach"},
{"Human ear"},
{"Racing"},
{"Sitting"},
{"Iceberg"},
{"Shelf"},
{"Vehicle"},
{"Pop music"},
{"Playground"},
{"Clown"},
{"Car"},
{"Rein"},
{"Fur"},
{"Musician"},
{"Casino"},
{"Baby"},
{"Alcohol"},
{"Strap"},
{"Reef"},
{"Balloon"},
{"Outerwear"},
{"Cathedral"},
{"Competition"},
{"Joker"},
{"Blackboard"},
{"Bunk bed"},
{"Bear"},
{"Moon"},
{"Archery"},
{"Polo"},
{"River"},
{"Fishing"},
{"Ferris wheel"},
{"Mortarboard"},
{"Bracelet"},
{"Flesh"},
{"Statue"},
{"Farm"},
{"Desert"},
{"Chain"},
{"Aircraft"},
{"Textile"},
{"Hot dog"},
{"Knitting"},
{"Singer"},
{"Juice"},
{"Circus"},
{"Chair"},
{"Musical instrument"},
{"Room"},
{"Crochet"},
{"Sailboat"},
{"Newspaper"},
{"Santa claus"},
{"Swamp"},
{"Skyscraper"},
{"Skin"},
{"Rocket"},
{"Aviation"},
{"Airliner"},
{"Garden"},
{"Ruins"},
{"Storm"},
{"Glasses"},
{"Balance"},
{"Nail (Body part)"},
{"Rainbow"},
{"Soil "},
{"Vacation "},
{"Moustache"},
{"Doily"},
{"Food"},
{"Bride "},
{"Cattle"},
{"Pocket"},
{"Infrastructure"},
{"Train"},
{"Gerbil"},
{"Fireworks"},
{"Pet"},
{"Dam"},
{"Crew"},
{"Couch"},
{"Bathing"},
{"Quilting"},
{"Motorcycle"},
{"Butterfly"},
{"Sled"},
{"Watercolor paint"},
{"Rafting"},
{"Monument"},
{"Lightning"},
{"Sunset"},
{"Bumper"},
{"Shoe"},
{"Waterskiing"},
{"Sneakers"},
{"Tower"},
{"Insect"},
{"Pool"},
{"Placemat"},
{"Airplane"},
{"Plant"},
{"Jungle"},
{"Armrest"},
{"Duck"},
{"Dress"},
{"Tableware"},
{"Petal"},
{"Bus"},
{"Hanukkah"},
{"Forest"},
{"Hat"},
{"Barn"},
{"Tubing"},
{"Snorkeling"},
{"Cool"},
{"Cookware and bakeware"},
{"Cycling"},
{"Swing (Seat)"},
{"Muscle"},
{"Cat"},
{"Skateboard"},
{"Star"},
{"Toe"},
{"Junk"},
{"Bicycle"},
{"Bedroom"},
{"Person"},
{"Sand"},
{"Canyon"},
{"Tie"},
{"Twig"},
{"Sphynx"},
{"Supervillain"},
{"Nightclub"},
{"Ranch"},
{"Pattern"},
{"Shorts"},
{"Himalayan"},
{"Wall"},
{"Leggings"},
{"Windsurfing"},
{"Deejay"},
{"Dance"},
{"Van"},
{"Bento"},
{"Sleep"},
{"Wine"},
{"Picnic"},
{"Leisure"},
{"Dune"},
{"Crowd"},
{"Kayak"},
{"Ballroom"},
{"Selfie"},
{"Graduation"},
{"Frigate"},
{"Mountain"},
{"Dude"},
{"Windshield"},
{"Skiff"},
{"Class"},
{"Scarf"},
{"Bull"},
{"Soccer"},
{"Bag"},
{"Basset hound"},
{"Tractor"},
{"Swimming"},
{"Running"},
{"Track"},
{"Helicopter"},
{"Pitch"},
{"Clock"},
{"Song"},
{"Jersey"},
{"Stairs"},
{"Flap"},
{"Jewellery"},
{"Bridge"},
{"Cuisine"},
{"Bread"},
{"Caving"},
{"Shell"},
{"Wreath"},
{"Roof"},
{"Cookie"},
{"Canoe"}};
static float g_thres_map[RET_CATEGORY_SUM] = {
0.23, 0.03, 0.10, 0.13, 0.03,
0.10, 0.06, 0.09, 0.09, 0.05,
0.01, 0.04, 0.01, 0.27, 0.05,
0.16, 0.01, 0.16, 0.04, 0.13,
0.09, 0.18, 0.10, 0.65, 0.08,
0.04, 0.08, 0.01, 0.05, 0.20,
0.01, 0.16, 0.10, 0.10, 0.10,
0.02, 0.24, 0.08, 0.10, 0.53,
0.07, 0.05, 0.07, 0.27, 0.02,
0.01, 0.71, 0.01, 0.06, 0.06,
0.03, 0.96, 0.03, 0.94, 0.05,
0.03, 0.14, 0.09, 0.03, 0.11,
0.50, 0.16, 0.07, 0.07, 0.06,
0.07, 0.08, 0.10, 0.29, 0.03,
0.05, 0.11, 0.03, 0.03, 0.03,
0.01, 0.11, 0.07, 0.03, 0.49,
0.12, 0.30, 0.10, 0.15, 0.02,
0.06, 0.17, 0.01, 0.04, 0.07,
0.06, 0.02, 0.19, 0.20, 0.14,
0.35, 0.15, 0.01, 0.10, 0.13,
0.43, 0.11, 0.12, 0.32, 0.01,
0.22, 0.51, 0.02, 0.04, 0.14,
0.04, 0.35, 0.35, 0.01, 0.54,
0.04, 0.02, 0.03, 0.02, 0.38,
0.13, 0.19, 0.06, 0.01, 0.02,
0.06, 0.03, 0.04, 0.01, 0.10,
0.01, 0.07, 0.07, 0.07, 0.33,
0.08, 0.04, 0.06, 0.07, 0.07,
0.11, 0.02, 0.32, 0.48, 0.14,
0.01, 0.01, 0.04, 0.05, 0.04,
0.16, 0.50, 0.11, 0.03, 0.04,
0.02, 0.55, 0.17, 0.13, 0.84,
0.18, 0.03, 0.16, 0.02, 0.06,
0.03, 0.11, 0.96, 0.36, 0.68,
0.02, 0.08, 0.02, 0.01, 0.03,
0.05, 0.14, 0.09, 0.06, 0.03,
0.20, 0.15, 0.62, 0.03, 0.10,
0.08, 0.02, 0.02, 0.06, 0.03,
0.04, 0.01, 0.10, 0.05, 0.04,
0.02, 0.07, 0.03, 0.32, 0.11,
0.03, 0.02, 0.03, 0.01, 0.03,
0.03, 0.25, 0.20, 0.19, 0.03,
0.11, 0.03, 0.02, 0.03, 0.15,
0.14, 0.06, 0.11, 0.03, 0.02,
0.02, 0.52, 0.03, 0.02, 0.02,
0.02, 0.09, 0.56, 0.01, 0.22,
0.01, 0.48, 0.14, 0.10, 0.08,
0.73, 0.39, 0.09, 0.10, 0.85,
0.31, 0.03, 0.05, 0.01, 0.01,
0.01, 0.10, 0.28, 0.02, 0.03,
0.04, 0.03, 0.07, 0.14, 0.20,
0.10, 0.01, 0.05, 0.37, 0.12,
0.04, 0.44, 0.04, 0.26, 0.08,
0.07, 0.27, 0.10, 0.03, 0.01,
0.03, 0.16, 0.41, 0.16, 0.34,
0.04, 0.30, 0.04, 0.05, 0.18,
0.33, 0.03, 0.21, 0.03, 0.04,
0.22, 0.01, 0.04, 0.02, 0.01,
0.06, 0.02, 0.08, 0.87, 0.11,
0.15, 0.05, 0.14, 0.09, 0.08,
0.22, 0.09, 0.07, 0.06, 0.06,
0.05, 0.43, 0.70, 0.03, 0.07,
0.06, 0.07, 0.14, 0.04, 0.01,
0.03, 0.05, 0.65, 0.06, 0.04,
0.23, 0.06, 0.75, 0.10, 0.01,
0.63, 0.41, 0.09, 0.01, 0.01,
0.18, 0.10, 0.03, 0.01, 0.05,
0.13, 0.18, 0.03, 0.23, 0.01,
0.04, 0.03, 0.38, 0.90, 0.21,
0.18, 0.10, 0.48, 0.08, 0.46,
0.03, 0.01, 0.02, 0.03, 0.10,
0.01, 0.09, 0.01, 0.01, 0.01,
0.10, 0.41, 0.01, 0.06, 0.75,
0.08, 0.01, 0.01, 0.08, 0.21,
0.06, 0.02, 0.05, 0.02, 0.05,
0.09, 0.12, 0.03, 0.06, 0.11,
0.03, 0.01, 0.01, 0.06, 0.84,
0.04, 0.81, 0.39, 0.02, 0.29,
0.77, 0.07, 0.06, 0.22, 0.23,
0.23, 0.01, 0.02, 0.13, 0.04,
0.19, 0.04, 0.08, 0.27, 0.09,
0.06, 0.01, 0.03, 0.21, 0.04,
};
char *ImageCreateLocalModelBuffer(JNIEnv *env, jobject modelBuffer) {
jbyte *modelAddr = static_cast<jbyte *>(env->GetDirectBufferAddress(modelBuffer));
int modelLen = static_cast<int>(env->GetDirectBufferCapacity(modelBuffer));
char *buffer(new char[modelLen]);
memcpy(buffer, modelAddr, modelLen);
return buffer;
}
/**
* To process the result of mindspore inference.
* @param msOutputs
* @return
*/
std::string ProcessRunnetResult(const int RET_CATEGORY_SUM, const char *const labels_name_map[],
std::unordered_map<std::string, mindspore::tensor::MSTensor *> msOutputs) {
// Get the branch of the model output.
// Use iterators to get map elements.
std::unordered_map<std::string, mindspore::tensor::MSTensor *>::iterator iter;
iter = msOutputs.begin();
// The mobilenetv2.ms model output just one branch.
auto outputTensor = iter->second;
int tensorNum = outputTensor->ElementsNum();
MS_PRINT("Number of tensor elements:%d", tensorNum);
// Get a pointer to the first score.
float *temp_scores = static_cast<float *>(outputTensor->MutableData());
float scores[RET_CATEGORY_SUM];
for (int i = 0; i < RET_CATEGORY_SUM; ++i) {
scores[i] = temp_scores[i];
}
float unifiedThre = 0.5;
float probMax = 1.0;
for (size_t i = 0; i < RET_CATEGORY_SUM; ++i) {
float threshold = g_thres_map[i];
float tmpProb = scores[i];
if (tmpProb < threshold) {
tmpProb = tmpProb / threshold * unifiedThre;
} else {
tmpProb = (tmpProb - threshold) / (probMax - threshold) * unifiedThre + unifiedThre;
}
scores[i] = tmpProb;
}
for (int i = 0; i < RET_CATEGORY_SUM; ++i) {
if (scores[i] > 0.5) {
MS_PRINT("MindSpore scores[%d] : [%f]", i, scores[i]);
}
}
// Score for each category.
// Converted to text information that needs to be displayed in the APP.
std::string categoryScore = "";
for (int i = 0; i < RET_CATEGORY_SUM; ++i) {
categoryScore += labels_name_map[i];
categoryScore += ":";
std::string score_str = std::to_string(scores[i]);
categoryScore += score_str;
categoryScore += ";";
}
return categoryScore;
}
bool ImageBitmapToLiteMat(JNIEnv *env, const jobject &srcBitmap, LiteMat *lite_mat) {
bool ret = false;
AndroidBitmapInfo info;
void *pixels = nullptr;
LiteMat &lite_mat_bgr = *lite_mat;
AndroidBitmap_getInfo(env, srcBitmap, &info);
if (info.format != ANDROID_BITMAP_FORMAT_RGBA_8888) {
MS_PRINT("Image Err, Request RGBA");
return false;
}
AndroidBitmap_lockPixels(env, srcBitmap, &pixels);
if (info.stride == info.width * 4) {
ret = InitFromPixel(reinterpret_cast<const unsigned char *>(pixels),
LPixelType::RGBA2RGB, LDataType::UINT8,
info.width, info.height, lite_mat_bgr);
if (!ret) {
MS_PRINT("Init From RGBA error");
}
} else {
unsigned char *pixels_ptr = new unsigned char[info.width * info.height * 4];
unsigned char *ptr = pixels_ptr;
unsigned char *data = reinterpret_cast<unsigned char *>(pixels);
for (int i = 0; i < info.height; i++) {
memcpy(ptr, data, info.width * 4);
ptr += info.width * 4;
data += info.stride;
}
ret = InitFromPixel(reinterpret_cast<const unsigned char *>(pixels_ptr),
LPixelType::RGBA2RGB, LDataType::UINT8,
info.width, info.height, lite_mat_bgr);
if (!ret) {
MS_PRINT("Init From RGBA error");
}
delete[] (pixels_ptr);
}
AndroidBitmap_unlockPixels(env, srcBitmap);
return ret;
}
bool ImagePreProcessImageData(const LiteMat &lite_mat_bgr, LiteMat *lite_norm_mat_ptr) {
bool ret = false;
LiteMat lite_mat_resize;
LiteMat &lite_norm_mat_cut = *lite_norm_mat_ptr;
ret = ResizeBilinear(lite_mat_bgr, lite_mat_resize, 256, 256);
if (!ret) {
MS_PRINT("ResizeBilinear error");
return false;
}
LiteMat lite_mat_convert_float;
ret = ConvertTo(lite_mat_resize, lite_mat_convert_float, 1.0 / 255.0);
if (!ret) {
MS_PRINT("ConvertTo error");
return false;
}
LiteMat lite_mat_cut;
ret = Crop(lite_mat_convert_float, lite_mat_cut, 16, 16, 224, 224);
if (!ret) {
MS_PRINT("Crop error");
return false;
}
std::vector<float> means = {0.485, 0.456, 0.406};
std::vector<float> stds = {0.229, 0.224, 0.225};
SubStractMeanNormalize(lite_mat_cut, lite_norm_mat_cut, means, stds);
return true;
}
/**
* The Java layer reads the model into MappedByteBuffer or ByteBuffer to load the model.
*/
extern "C"
JNIEXPORT jlong JNICALL
Java_com_mindspore_himindspore_imageclassification_help_ImageTrackingMobile_loadModel(JNIEnv *env,
jobject thiz,
jobject model_buffer,
jint num_thread) {
if (nullptr == model_buffer) {
MS_PRINT("error, buffer is nullptr!");
return (jlong) nullptr;
}
jlong bufferLen = env->GetDirectBufferCapacity(model_buffer);
if (0 == bufferLen) {
MS_PRINT("error, bufferLen is 0!");
return (jlong) nullptr;
}
char *modelBuffer = ImageCreateLocalModelBuffer(env, model_buffer);
if (modelBuffer == nullptr) {
MS_PRINT("modelBuffer create failed!");
return (jlong) nullptr;
}
// To create a mindspore network inference environment.
void **labelEnv = new void *;
MSNetWork *labelNet = new MSNetWork;
*labelEnv = labelNet;
mindspore::lite::Context *context = new mindspore::lite::Context;
context->thread_num_ = num_thread;
labelNet->CreateSessionMS(modelBuffer, bufferLen, context);
delete context;
if (labelNet->session() == nullptr) {
MS_PRINT("MindSpore create session failed!.");
delete labelNet;
delete labelEnv;
return (jlong) nullptr;
}
if (model_buffer != nullptr) {
env->DeleteLocalRef(model_buffer);
}
return (jlong) labelEnv;
}
/**
* After the inference environment is successfully created,
* sending a picture to the model and run inference.
*/
extern "C" JNIEXPORT jstring JNICALL
Java_com_mindspore_himindspore_imageclassification_help_ImageTrackingMobile_runNet(JNIEnv *env,
jclass type,
jlong netEnv,
jobject srcBitmap) {
LiteMat lite_mat_bgr, lite_norm_mat_cut;
if (!ImageBitmapToLiteMat(env, srcBitmap, &lite_mat_bgr)) {
MS_PRINT("ImageBitmapToLiteMat error");
return NULL;
}
if (!ImagePreProcessImageData(lite_mat_bgr, &lite_norm_mat_cut)) {
MS_PRINT("ImagePreProcessImageData error");
return NULL;
}
ImgDims inputDims;
inputDims.channel = lite_norm_mat_cut.channel_;
inputDims.width = lite_norm_mat_cut.width_;
inputDims.height = lite_norm_mat_cut.height_;
// Get the mindsore inference environment which created in loadModel().
void **labelEnv = reinterpret_cast<void **>(netEnv);
if (labelEnv == nullptr) {
MS_PRINT("MindSpore error, labelEnv is a nullptr.");
return NULL;
}
MSNetWork *labelNet = static_cast<MSNetWork *>(*labelEnv);
auto mSession = labelNet->session();
if (mSession == nullptr) {
MS_PRINT("MindSpore error, Session is a nullptr.");
return NULL;
}
MS_PRINT("MindSpore get session.");
auto msInputs = mSession->GetInputs();
if (msInputs.size() == 0) {
MS_PRINT("MindSpore error, msInputs.size() equals 0.");
return NULL;
}
auto inTensor = msInputs.front();
float *dataHWC = reinterpret_cast<float *>(lite_norm_mat_cut.data_ptr_);
// Copy dataHWC to the model input tensor.
memcpy(inTensor->MutableData(), dataHWC,
inputDims.channel * inputDims.width * inputDims.height * sizeof(float));
// After the model and image tensor data is loaded, run inference.
auto status = mSession->RunGraph();
if (status != mindspore::lite::RET_OK) {
MS_PRINT("MindSpore run net error.");
return NULL;
}
/**
* Get the mindspore inference results.
* Return the map of output node name and MindSpore Lite MSTensor.
*/
auto names = mSession->GetOutputTensorNames();
std::unordered_map<std::string, mindspore::tensor::MSTensor *> msOutputs;
for (const auto &name : names) {
auto temp_dat = mSession->GetOutputByTensorName(name);
msOutputs.insert(std::pair<std::string, mindspore::tensor::MSTensor *>{name, temp_dat});
}
std::string resultStr = ProcessRunnetResult(::RET_CATEGORY_SUM,
::labels_name_map, msOutputs);
const char *resultCharData = resultStr.c_str();
return (env)->NewStringUTF(resultCharData);
}
extern "C" JNIEXPORT jboolean JNICALL
Java_com_mindspore_himindspore_imageclassification_help_ImageTrackingMobile_unloadModel(JNIEnv *env,
jclass type,
jlong netEnv) {
MS_PRINT("MindSpore release net.");
void **labelEnv = reinterpret_cast<void **>(netEnv);
if (labelEnv == nullptr) {
MS_PRINT("MindSpore error, labelEnv is a nullptr.");
}
MSNetWork *labelNet = static_cast<MSNetWork *>(*labelEnv);
labelNet->ReleaseNets();
return (jboolean) true;
}

View File

@ -0,0 +1,21 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef IMAGE_MINDSPORE_JNI_HMS_DEBUG_MINDSPORENETNATIVE_H
#define IMAGE_MINDSPORE_JNI_HMS_DEBUG_MINDSPORENETNATIVE_H
#endif // MINDSPORE_JNI_HMS_DEBUG_MINDSPORENETNATIVE_H

View File

@ -0,0 +1,52 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "MSNetWork.h"
#include <android/log.h>
#include <iostream>
#include <string>
#define MS_PRINT(format, ...) __android_log_print(ANDROID_LOG_INFO, "MSJNI", format, ##__VA_ARGS__)
MSNetWork::MSNetWork(void) : session_(nullptr) {}
MSNetWork::~MSNetWork(void) {}
void MSNetWork::CreateSessionMS(char *modelBuffer, size_t bufferLen, mindspore::lite::Context *ctx) {
session_ = mindspore::session::LiteSession::CreateSession(ctx);
if (session_ == nullptr) {
MS_PRINT("Create Session failed.");
return;
}
// Compile model.
auto model = mindspore::lite::Model::Import(modelBuffer, bufferLen);
if (model == nullptr) {
MS_PRINT("Import model failed.");
return;
}
int ret = session_->CompileGraph(model);
if (ret != mindspore::lite::RET_OK) {
MS_PRINT("CompileGraph failed.");
return;
}
}
int MSNetWork::ReleaseNets(void) {
delete session_;
return 0;
}

View File

@ -0,0 +1,59 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MSNETWORK_H
#define MSNETWORK_H
#include <context.h>
#include <lite_session.h>
#include <model.h>
#include <errorcode.h>
#include <cstdio>
#include <algorithm>
#include <fstream>
#include <functional>
#include <sstream>
#include <vector>
#include <map>
#include <string>
#include <memory>
#include <utility>
struct ImgDims {
int channel = 0;
int width = 0;
int height = 0;
};
/*struct SessIterm {
std::shared_ptr<mindspore::session::LiteSession> sess = nullptr;
};*/
class MSNetWork {
public:
MSNetWork();
~MSNetWork();
void CreateSessionMS(char *modelBuffer, size_t bufferLen, mindspore::lite::Context *ctx);
int ReleaseNets(void);
mindspore::session::LiteSession *session() const { return session_; }
private:
mindspore::session::LiteSession *session_;
};
#endif

View File

@ -0,0 +1,267 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <jni.h>
#include <android/bitmap.h>
#include <android/asset_manager_jni.h>
#include <android/log.h>
#include <sstream>
#include <cstring>
#include <set>
#include <utility>
#include "include/errorcode.h"
#include "include/ms_tensor.h"
#include "MSNetWork.h"
#include "ssd_util/ssd_util.h"
#include "lite_cv/lite_mat.h"
#include "lite_cv/image_process.h"
using mindspore::dataset::LiteMat;
using mindspore::dataset::LPixelType;
using mindspore::dataset::LDataType;
#define MS_PRINT(format, ...) __android_log_print(ANDROID_LOG_INFO, "MSJNI", format, ##__VA_ARGS__)
bool ObjectBitmapToLiteMat(JNIEnv *env, const jobject &srcBitmap, LiteMat *lite_mat) {
bool ret = false;
AndroidBitmapInfo info;
void *pixels = nullptr;
LiteMat &lite_mat_bgr = *lite_mat;
AndroidBitmap_getInfo(env, srcBitmap, &info);
if (info.format != ANDROID_BITMAP_FORMAT_RGBA_8888) {
MS_PRINT("Image Err, Request RGBA");
return false;
}
AndroidBitmap_lockPixels(env, srcBitmap, &pixels);
if (info.stride == info.width * 4) {
ret = InitFromPixel(reinterpret_cast<const unsigned char *>(pixels),
LPixelType::RGBA2RGB, LDataType::UINT8,
info.width, info.height, lite_mat_bgr);
if (!ret) {
MS_PRINT("Init From RGBA error");
}
} else {
unsigned char *pixels_ptr = new unsigned char[info.width * info.height * 4];
unsigned char *ptr = pixels_ptr;
unsigned char *data = reinterpret_cast<unsigned char *>(pixels);
for (int i = 0; i < info.height; i++) {
memcpy(ptr, data, info.width * 4);
ptr += info.width * 4;
data += info.stride;
}
ret = InitFromPixel(reinterpret_cast<const unsigned char *>(pixels_ptr),
LPixelType::RGBA2RGB, LDataType::UINT8,
info.width, info.height, lite_mat_bgr);
if (!ret) {
MS_PRINT("Init From RGBA error");
}
delete[] (pixels_ptr);
}
AndroidBitmap_unlockPixels(env, srcBitmap);
return ret;
}
bool ObjectPreProcessImageData(const LiteMat &lite_mat_bgr, LiteMat *lite_norm_mat_ptr) {
bool ret = false;
LiteMat lite_mat_resize;
LiteMat &lite_norm_mat_cut = *lite_norm_mat_ptr;
ret = ResizeBilinear(lite_mat_bgr, lite_mat_resize, 300, 300);
if (!ret) {
MS_PRINT("ResizeBilinear error");
return false;
}
LiteMat lite_mat_convert_float;
ret = ConvertTo(lite_mat_resize, lite_mat_convert_float, 1.0 / 255.0);
if (!ret) {
MS_PRINT("ConvertTo error");
return false;
}
std::vector<float> means = {0.485, 0.456, 0.406};
std::vector<float> stds = {0.229, 0.224, 0.225};
SubStractMeanNormalize(lite_mat_convert_float, lite_norm_mat_cut, means, stds);
return true;
}
char *ObjectCreateLocalModelBuffer(JNIEnv *env, jobject modelBuffer) {
jbyte *modelAddr = static_cast<jbyte *>(env->GetDirectBufferAddress(modelBuffer));
int modelLen = static_cast<int>(env->GetDirectBufferCapacity(modelBuffer));
char *buffer(new char[modelLen]);
memcpy(buffer, modelAddr, modelLen);
return buffer;
}
/**
*
* @param msOutputs Model output, the mindspore inferencing result.
* @param srcImageWidth The width of the original input image.
* @param srcImageHeight The height of the original input image.
* @return
*/
std::string ProcessRunnetResult(std::unordered_map<std::string, mindspore::tensor::MSTensor *> msOutputs,
int srcImageWidth, int srcImageHeight) {
std::unordered_map<std::string, mindspore::tensor::MSTensor *>::iterator iter;
iter = msOutputs.begin();
auto branch2_string = iter->first;
auto branch2_tensor = iter->second;
++iter;
auto branch1_string = iter->first;
auto branch1_tensor = iter->second;
MS_PRINT("%s %s", branch1_string.c_str(), branch2_string.c_str());
// ----------- 接口测试 --------------------------
float *tmpscores2 = reinterpret_cast<float *>(branch1_tensor->MutableData());
float *tmpdata = reinterpret_cast<float *>(branch2_tensor->MutableData());
// Using ssd model util to process model branch outputs.
SSDModelUtil ssdUtil(srcImageWidth, srcImageHeight);
std::string retStr = ssdUtil.getDecodeResult(tmpscores2, tmpdata);
MS_PRINT("retStr %s", retStr.c_str());
return retStr;
}
extern "C" JNIEXPORT jlong JNICALL
Java_com_mindspore_himindspore_objectdetection_help_ObjectTrackingMobile_loadModel(JNIEnv *env, jobject thiz,
jobject assetManager,
jobject buffer,
jint numThread) {
MS_PRINT("MindSpore so version 20200730");
if (nullptr == buffer) {
MS_PRINT("error, buffer is nullptr!");
return (jlong) nullptr;
}
jlong bufferLen = env->GetDirectBufferCapacity(buffer);
MS_PRINT("MindSpore get bufferLen:%d", static_cast<int>(bufferLen));
if (0 == bufferLen) {
MS_PRINT("error, bufferLen is 0!");
return (jlong) nullptr;
}
char *modelBuffer = ObjectCreateLocalModelBuffer(env, buffer);
if (modelBuffer == nullptr) {
MS_PRINT("modelBuffer create failed!");
return (jlong) nullptr;
}
MS_PRINT("MindSpore loading Model.");
void **labelEnv = new void *;
MSNetWork *labelNet = new MSNetWork;
*labelEnv = labelNet;
mindspore::lite::Context *context = new mindspore::lite::Context;
context->thread_num_ = numThread;
labelNet->CreateSessionMS(modelBuffer, bufferLen, context);
delete context;
if (labelNet->session() == nullptr) {
delete labelNet;
delete labelEnv;
MS_PRINT("MindSpore create session failed!.");
return (jlong) nullptr;
}
MS_PRINT("MindSpore create session successfully.");
if (buffer != nullptr) {
env->DeleteLocalRef(buffer);
}
if (assetManager != nullptr) {
env->DeleteLocalRef(assetManager);
}
MS_PRINT("ptr released successfully.");
return (jlong) labelEnv;
}
extern "C" JNIEXPORT jstring JNICALL
Java_com_mindspore_himindspore_objectdetection_help_ObjectTrackingMobile_runNet(JNIEnv *env, jobject thiz,
jlong netEnv,
jobject srcBitmap) {
LiteMat lite_mat_bgr, lite_norm_mat_cut;
if (!ObjectBitmapToLiteMat(env, srcBitmap, &lite_mat_bgr)) {
MS_PRINT("ObjectBitmapToLiteMat error");
return NULL;
}
int srcImageWidth = lite_mat_bgr.width_;
int srcImageHeight = lite_mat_bgr.height_;
if (!ObjectPreProcessImageData(lite_mat_bgr, &lite_norm_mat_cut)) {
MS_PRINT("ObjectPreProcessImageData error");
return NULL;
}
ImgDims inputDims;
inputDims.channel = lite_norm_mat_cut.channel_;
inputDims.width = lite_norm_mat_cut.width_;
inputDims.height = lite_norm_mat_cut.height_;
// Get the mindsore inference environment which created in loadModel().
void **labelEnv = reinterpret_cast<void **>(netEnv);
if (labelEnv == nullptr) {
MS_PRINT("MindSpore error, labelEnv is a nullptr.");
return NULL;
}
MSNetWork *labelNet = static_cast<MSNetWork *>(*labelEnv);
auto mSession = labelNet->session();
if (mSession == nullptr) {
MS_PRINT("MindSpore error, Session is a nullptr.");
return NULL;
}
MS_PRINT("MindSpore get session.");
auto msInputs = mSession->GetInputs();
auto inTensor = msInputs.front();
float *dataHWC = reinterpret_cast<float *>(lite_norm_mat_cut.data_ptr_);
// copy input Tensor
memcpy(inTensor->MutableData(), dataHWC,
inputDims.channel * inputDims.width * inputDims.height * sizeof(float));
MS_PRINT("MindSpore get msInputs.");
auto status = mSession->RunGraph();
if (status != mindspore::lite::RET_OK) {
MS_PRINT("MindSpore runnet error.");
return NULL;
}
auto names = mSession->GetOutputTensorNames();
std::unordered_map<std::string,
mindspore::tensor::MSTensor *> msOutputs;
for (const auto &name : names) {
auto temp_dat = mSession->GetOutputByTensorName(name);
msOutputs.insert(std::pair<std::string, mindspore::tensor::MSTensor *> {name, temp_dat});
}
std::string retStr = ProcessRunnetResult(msOutputs, srcImageWidth, srcImageHeight);
const char *resultChardata = retStr.c_str();
return (env)->NewStringUTF(resultChardata);
}
extern "C"
JNIEXPORT jboolean JNICALL
Java_com_mindspore_himindspore_objectdetection_help_ObjectTrackingMobile_unloadModel(JNIEnv *env,
jobject thiz,
jlong netEnv) {
void **labelEnv = reinterpret_cast<void **>(netEnv);
MSNetWork *labelNet = static_cast<MSNetWork *>(*labelEnv);
labelNet->ReleaseNets();
return (jboolean) true;
}

View File

@ -0,0 +1,291 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <android/log.h>
#include <algorithm>
#include "ssd_util/ssd_util.h"
#define MS_PRINT(format, ...) __android_log_print(ANDROID_LOG_INFO, "MSJNI", format, ##__VA_ARGS__)
SSDModelUtil::~SSDModelUtil(void) {}
/**
* SSD model util constructor.
* @param srcImgWidth The width of the original input image.
* @param srcImgHeight The height of the original input image.
*/
SSDModelUtil::SSDModelUtil(int srcImgWidth, int srcImgHeight) {
inputImageWidth = srcImgWidth;
inputImageHeight = srcImgHeight;
getDefaultBoxes(); // To fill the vectordefaultboxes.
}
std::string SSDModelUtil::getDecodeResult(float *branchScores, float *branchBoxData) {
std::string result = "";
NormalBox tmpBox[1917] = {0};
float mScores[1917][81] = {0};
float outBuff[1917][7] = {0};
float scoreWithOneClass[1917] = {0};
int outBoxNum = 0;
YXBoxes decodedBoxes[1917] = {0};
// Copy branch outputs box data to tmpBox.
for (int i = 0; i < 1917; ++i) {
tmpBox[i].y = branchBoxData[i * 4 + 0];
tmpBox[i].x = branchBoxData[i * 4 + 1];
tmpBox[i].h = branchBoxData[i * 4 + 2];
tmpBox[i].w = branchBoxData[i * 4 + 3];
}
// Copy branch outputs score to mScores.
for (int i = 0; i < 1917; ++i) {
for (int j = 0; j < 81; ++j) {
mScores[i][j] = branchScores[i * 81 + j];
}
}
// NMS processing.
ssd_boxes_decode(tmpBox, decodedBoxes, 0.1, 0.2, 1917);
const float nms_threshold = 0.3;
for (int i = 1; i < 81; i++) {
std::vector<int> in_indexes;
for (int j = 0; j < 1917; j++) {
scoreWithOneClass[j] = mScores[j][i];
if (mScores[j][i] > g_thres_map[i]) {
in_indexes.push_back(j);
}
}
if (in_indexes.size() == 0) {
continue;
}
sort(in_indexes.begin(), in_indexes.end(),
[&](int a, int b) { return scoreWithOneClass[a] > scoreWithOneClass[b]; });
std::vector<int> out_indexes;
nonMaximumSuppression(decodedBoxes, scoreWithOneClass, in_indexes, &out_indexes,
nms_threshold);
for (int k = 0; k < out_indexes.size(); k++) {
// image id
outBuff[outBoxNum][0] = out_indexes[k];
// labelid
outBuff[outBoxNum][1] = i;
// scores
outBuff[outBoxNum][2] = scoreWithOneClass[out_indexes[k]];
outBuff[outBoxNum][3] =
decodedBoxes[out_indexes[k]].xmin * inputImageWidth / 300;
outBuff[outBoxNum][4] =
decodedBoxes[out_indexes[k]].ymin * inputImageHeight / 300;
outBuff[outBoxNum][5] =
decodedBoxes[out_indexes[k]].xmax * inputImageWidth / 300;
outBuff[outBoxNum][6] =
decodedBoxes[out_indexes[k]].ymax * inputImageHeight / 300;
outBoxNum++;
}
}
MS_PRINT("outBoxNum %d", outBoxNum);
for (int i = 0; i < outBoxNum; ++i) {
std::string tmpid_str = std::to_string(outBuff[i][0]);
result += tmpid_str;
result += "_";
MS_PRINT("label_classes i %d, outBuff %d", i, (int) outBuff[i][1]);
tmpid_str = label_classes[static_cast<int>(outBuff[i][1])];
// label id
result += tmpid_str;
result += "_";
tmpid_str = std::to_string(outBuff[i][2]);
// scores
result += tmpid_str;
result += "_";
tmpid_str = std::to_string(outBuff[i][3]);
// xmin
result += tmpid_str;
result += "_";
tmpid_str = std::to_string(outBuff[i][4]);
// ymin
result += tmpid_str;
result += "_";
tmpid_str = std::to_string(outBuff[i][5]);
// xmax
result += tmpid_str;
result += "_";
tmpid_str = std::to_string(outBuff[i][6]);
// ymax
result += tmpid_str;
result += ";";
}
return result;
}
void SSDModelUtil::getDefaultBoxes() {
float fk[6] = {0.0, 0.0, 0.0, 0.0, 0.0, 0.0};
std::vector<struct WHBox> all_sizes;
struct Product mProductData[19 * 19] = {0};
for (int i = 0; i < 6; i++) {
fk[i] = config.model_input_height / config.steps[i];
}
float scale_rate =
(config.max_scale - config.min_scale) / (sizeof(config.num_default) / sizeof(int) - 1);
float scales[7] = {0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0};
for (int i = 0; i < sizeof(config.num_default) / sizeof(int); i++) {
scales[i] = config.min_scale + scale_rate * i;
}
for (int idex = 0; idex < sizeof(config.feature_size) / sizeof(int); idex++) {
float sk1 = scales[idex];
float sk2 = scales[idex + 1];
float sk3 = sqrt(sk1 * sk2);
struct WHBox tempWHBox;
all_sizes.clear();
// idex == 0时 len(all_sizes) = 3.
if (idex == 0) {
float w = sk1 * sqrt(2);
float h = sk1 / sqrt(2);
// all_sizes = [(0.1, 0.1), (w, h), (h, w)]
tempWHBox.boxw = 0.1;
tempWHBox.boxh = 0.1;
all_sizes.push_back(tempWHBox);
tempWHBox.boxw = w;
tempWHBox.boxh = h;
all_sizes.push_back(tempWHBox);
tempWHBox.boxw = h;
tempWHBox.boxh = w;
all_sizes.push_back(tempWHBox);
} else {
// len(all_sizes) = 6.
tempWHBox.boxw = sk1;
tempWHBox.boxh = sk1;
all_sizes.push_back(tempWHBox);
for (int j = 0; j < sizeof(config.aspect_ratios[idex]) / sizeof(int); j++) {
float w = sk1 * sqrt(config.aspect_ratios[idex][j]);
float h = sk1 / sqrt(config.aspect_ratios[idex][j]);
tempWHBox.boxw = w;
tempWHBox.boxh = h;
all_sizes.push_back(tempWHBox);
tempWHBox.boxw = h;
tempWHBox.boxh = w;
all_sizes.push_back(tempWHBox);
}
tempWHBox.boxw = sk3;
tempWHBox.boxh = sk3;
all_sizes.push_back(tempWHBox);
}
for (int i = 0; i < config.feature_size[idex]; i++) {
for (int j = 0; j < config.feature_size[idex]; j++) {
mProductData[i * config.feature_size[idex] + j].x = i;
mProductData[i * config.feature_size[idex] + j].y = j;
}
}
int productLen = config.feature_size[idex] * config.feature_size[idex];
for (int i = 0; i < productLen; i++) {
for (int j = 0; j < all_sizes.size(); j++) {
struct NormalBox tempBox;
float cx = (mProductData[i].y + 0.5) / fk[idex];
float cy = (mProductData[i].x + 0.5) / fk[idex];
tempBox.y = cy;
tempBox.x = cx;
tempBox.h = all_sizes[j].boxh;
tempBox.w = all_sizes[j].boxw;
mDefaultBoxes.push_back(tempBox);
}
}
}
}
void SSDModelUtil::ssd_boxes_decode(const NormalBox *boxes,
YXBoxes *const decoded_boxes, const float scale0,
const float scale1, const int count) {
if (mDefaultBoxes.size() == 0) {
MS_PRINT("get default boxes error.");
return;
}
for (int i = 0; i < count; ++i) {
float cy = boxes[i].y * scale0 * mDefaultBoxes[i].h + mDefaultBoxes[i].y;
float cx = boxes[i].x * scale0 * mDefaultBoxes[i].w + mDefaultBoxes[i].x;
float h = exp(boxes[i].h * scale1) * mDefaultBoxes[i].h;
float w = exp(boxes[i].w * scale1) * mDefaultBoxes[i].w;
decoded_boxes[i].ymin = std::min(1.0f, std::max(0.0f, cy - h / 2)) * config.model_input_height;
decoded_boxes[i].xmin = std::min(1.0f, std::max(0.0f, cx - w / 2)) * config.model_input_width;
decoded_boxes[i].ymax = std::min(1.0f, std::max(0.0f, cy + h / 2)) * config.model_input_height;
decoded_boxes[i].xmax = std::min(1.0f, std::max(0.0f, cx + w / 2)) * config.model_input_width;
}
}
void SSDModelUtil::nonMaximumSuppression(const YXBoxes *const decoded_boxes,
const float *const scores,
const std::vector<int> &in_indexes,
std::vector<int> *out_indexes_p, const float nmsThreshold,
const int count, const int max_results) {
int nR = 0;
std::vector<int> &out_indexes = *out_indexes_p;
std::vector<bool> del(count, false);
for (size_t i = 0; i < in_indexes.size(); i++) {
if (!del[in_indexes[i]]) {
out_indexes.push_back(in_indexes[i]);
if (++nR == max_results) {
break;
}
for (size_t j = i + 1; j < in_indexes.size(); j++) {
const auto boxi = decoded_boxes[in_indexes[i]], boxj = decoded_boxes[in_indexes[j]];
float a[4] = {boxi.xmin, boxi.ymin, boxi.xmax, boxi.ymax};
float b[4] = {boxj.xmin, boxj.ymin, boxj.xmax, boxj.ymax};
if (IOU(a, b) > nmsThreshold) {
del[in_indexes[j]] = true;
}
}
}
}
}
double SSDModelUtil::IOU(float r1[4], float r2[4]) {
float x1 = std::max(r1[0], r2[0]);
float y1 = std::max(r1[1], r2[1]);
float x2 = std::min(r1[2], r2[2]);
float y2 = std::min(r1[3], r2[3]);
// if max(min) > min(max), there is no intersection
if (x2 - x1 + 1 <= 0 || y2 - y1 + 1 <= 0)
return 0;
double insect_area = (x2 - x1 + 1) * (y2 - y1 + 1);
double union_area =
(r1[2] - r1[0] + 1) * (r1[3] - r1[1] + 1) + (r2[2] - r2[0] + 1) * (r2[3] - r2[1] + 1) -
insect_area;
double iou = insect_area / union_area;
return (iou > 0) ? iou : 0;
}

View File

@ -0,0 +1,200 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef HIMINDSPORE_SSD_UTIL_H
#define HIMINDSPORE_SSD_UTIL_H
#include <string>
#include <vector>
class SSDModelUtil {
public:
// Constructor.
SSDModelUtil(int srcImageWidth, int srcImgHeight);
~SSDModelUtil();
/**
* Return the SSD model post-processing result.
* @param branchScores
* @param branchBoxData
* @return
*/
std::string getDecodeResult(float *branchScores, float *branchBoxData);
struct NormalBox {
float y;
float x;
float h;
float w;
};
struct YXBoxes {
float ymin;
float xmin;
float ymax;
float xmax;
};
struct Product {
int x;
int y;
};
struct WHBox {
float boxw;
float boxh;
};
private:
std::vector<struct NormalBox> mDefaultBoxes;
int inputImageHeight;
int inputImageWidth;
void getDefaultBoxes();
void ssd_boxes_decode(const NormalBox *boxes,
YXBoxes *const decoded_boxes,
const float scale0 = 0.1, const float scale1 = 0.2,
const int count = 1917);
void nonMaximumSuppression(const YXBoxes *const decoded_boxes, const float *const scores,
const std::vector<int> &in_indexes, std::vector<int> *out_indexes_p,
const float nmsThreshold = 0.6,
const int count = 1917, const int max_results = 100);
double IOU(float r1[4], float r2[4]);
// ============= variables =============.
struct network {
int model_input_height = 300;
int model_input_width = 300;
int num_default[6] = {3, 6, 6, 6, 6, 6};
int feature_size[6] = {19, 10, 5, 3, 2, 1};
double min_scale = 0.2;
float max_scale = 0.95;
float steps[6] = {16, 32, 64, 100, 150, 300};
float prior_scaling[2] = {0.1, 0.2};
float gamma = 2.0;
float alpha = 0.75;
int aspect_ratios[6][2] = {{2, 0},
{2, 3},
{2, 3},
{2, 3},
{2, 3},
{2, 3}};
} config;
float g_thres_map[81] = {0, 0.635, 0.627, 0.589, 0.585, 0.648, 0.664, 0.655,
0.481, 0.529, 0.611, 0.641, 0.774, 0.549, 0.513, 0.652,
0.552, 0.590, 0.650, 0.575, 0.583, 0.650, 0.656, 0.696,
0.653, 0.438, 0.515, 0.459, 0.561, 0.545, 0.635, 0.540,
0.560, 0.721, 0.544, 0.548, 0.511, 0.611, 0.592, 0.542,
0.512, 0.635, 0.531, 0.437, 0.525, 0.445, 0.484, 0.546,
0.490, 0.581, 0.566, 0.516, 0.445, 0.541, 0.613, 0.560,
0.483, 0.509, 0.464, 0.543, 0.538, 0.490, 0.576, 0.617,
0.577, 0.595, 0.640, 0.585, 0.598, 0.592, 0.514, 0.397,
0.592, 0.504, 0.548, 0.642, 0.581, 0.497, 0.545, 0.154,
0.580,
};
std::string label_classes[81] = {
{"background"},
{"human"},
{"bike"},
{"automobile"},
{"motorbike"},
{"aircraft"},
{"motorbus"},
{"train"},
{"motortruck"},
{"boat"},
{"traffic signal"},
{"fireplug"},
{"stop sign"},
{"parking meter"},
{"seat"},
{"bird"},
{"cat"},
{"dog"},
{"horse"},
{"sheep"},
{"cow"},
{"elephant"},
{"bear"},
{"zebra"},
{"giraffe"},
{"knapsack"},
{"bumbershoot"},
{"purse"},
{"neckwear"},
{"traveling bag"},
{"frisbee"},
{"skis"},
{"snowboard"},
{"sports ball"},
{"kite"},
{"baseball bat"},
{"baseball glove"},
{"skateboard"},
{"surfboard"},
{"tennis racket"},
{"bottle"},
{"wine glass"},
{"cup"},
{"fork"},
{"knife"},
{"spoon"},
{"bowl"},
{"banana"},
{"apple"},
{"sandwich"},
{"orange"},
{"broccoli"},
{"carrot"},
{"hot dog"},
{"pizza"},
{"donut"},
{"cake"},
{"chair"},
{"couch"},
{"houseplant"},
{"bed"},
{"dinner table"},
{"toilet"},
{"television"},
{"notebook computer"},
{"mouse"},
{"remote"},
{"keyboard"},
{"smartphone"},
{"microwave"},
{"oven"},
{"toaster"},
{"water sink"},
{"fridge"},
{"book"},
{"bell"},
{"vase"},
{"shears"},
{"toy bear"},
{"hair drier"},
{"toothbrush"}
};
};
#endif

View File

@ -0,0 +1,90 @@
package com.mindspore.himindspore;
import android.Manifest;
import android.content.Intent;
import android.net.Uri;
import android.os.Bundle;
import android.view.View;
import android.widget.Button;
import androidx.annotation.NonNull;
import androidx.appcompat.app.AppCompatActivity;
import androidx.core.app.ActivityCompat;
import com.mindspore.himindspore.imageclassification.ui.ImageCameraActivity;
import com.mindspore.himindspore.imageclassification.ui.ImageMainActivity;
import com.mindspore.himindspore.objectdetection.ui.ObjectDetectionMainActivity;
public class SplashActivity extends AppCompatActivity implements View.OnClickListener {
private static final int REQUEST_PERMISSION = 1;
private Button btnImage, btnObject, btnContract,btnAdvice;
private boolean isHasPermssion;
private static final String CODE_URL ="https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/lite";
private static final String HELP_URL ="https://github.com/mindspore-ai/mindspore/issues";
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_splash);
btnImage = findViewById(R.id.btn_image);
btnObject = findViewById(R.id.btn_object);
btnContract = findViewById(R.id.btn_contact);
btnAdvice = findViewById(R.id.btn_advice);
btnImage.setOnClickListener(this);
btnObject.setOnClickListener(this);
btnContract.setOnClickListener(this);
btnAdvice.setOnClickListener(this);
requestPermissions();
}
private void requestPermissions() {
ActivityCompat.requestPermissions(this,
new String[]{Manifest.permission.READ_EXTERNAL_STORAGE, Manifest.permission.WRITE_EXTERNAL_STORAGE,
Manifest.permission.READ_PHONE_STATE, Manifest.permission.CAMERA}, REQUEST_PERMISSION);
}
/**
* 权限申请结果回调
*/
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
if (REQUEST_PERMISSION == requestCode) {
isHasPermssion = true;
}
}
@Override
public void onClick(View view) {
if (R.id.btn_image == view.getId()) {
if (isHasPermssion) {
startActivity(new Intent(SplashActivity.this, ImageMainActivity.class));
} else {
requestPermissions();
}
} else if (R.id.btn_object == view.getId()) {
if (isHasPermssion) {
startActivity(new Intent(SplashActivity.this, ObjectDetectionMainActivity.class));
} else {
requestPermissions();
}
} else if (R.id.btn_contact == view.getId()) {
openBrowser(CODE_URL);
}else if (R.id.btn_advice == view.getId()) {
openBrowser(HELP_URL);
}
}
public void openBrowser(String url) {
Intent intent = new Intent();
intent.setAction("android.intent.action.VIEW");
Uri uri = Uri.parse(url.trim());
intent.setData(uri);
startActivity(intent);
}
}

View File

@ -0,0 +1,843 @@
package com.mindspore.himindspore.camera;
import android.Manifest;
import android.app.Activity;
import android.content.Context;
import android.content.pm.PackageManager;
import android.content.res.Configuration;
import android.graphics.Bitmap;
import android.graphics.ImageFormat;
import android.graphics.Matrix;
import android.graphics.Point;
import android.graphics.RectF;
import android.graphics.SurfaceTexture;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CameraMetadata;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.CaptureResult;
import android.hardware.camera2.TotalCaptureResult;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.Image;
import android.media.ImageReader;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.AttributeSet;
import android.util.Log;
import android.util.Size;
import android.util.SparseIntArray;
import android.view.Surface;
import android.view.TextureView;
import android.widget.Toast;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.core.app.ActivityCompat;
import com.mindspore.himindspore.imageclassification.help.GarbageTrackingMobile;
import com.mindspore.himindspore.track.TrackListener;
import com.mindspore.himindspore.imageclassification.help.ImageTrackingMobile;
import com.mindspore.himindspore.objectdetection.help.ObjectTrackingMobile;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
public class CameraPreview extends TextureView {
private static final String TAG = "CameraPreview";
private static final SparseIntArray ORIENTATIONS = new SparseIntArray();//从屏幕旋转转换为JPEG方向
private static final int MAX_PREVIEW_WIDTH = 1920;//Camera2 API 保证的最大预览宽高
private static final int MAX_PREVIEW_HEIGHT = 1080;
private static final int STATE_PREVIEW = 0;//显示相机预览
private static final int STATE_WAITING_LOCK = 1;//焦点锁定中
private static final int STATE_WAITING_PRE_CAPTURE = 2;//拍照中
private static final int STATE_WAITING_NON_PRE_CAPTURE = 3;//其它状态
private static final int STATE_PICTURE_TAKEN = 4;//拍照完毕
public static final int OPEN_TYPE_IMAGE = 1;
public static final int OPEN_TYPE_IMAGE_CUSTOM = 11;
public static final int OPEN_TYPE_OBJECT = 2;
private int openType;
private int mState = STATE_PREVIEW;
private int mRatioWidth = 0, mRatioHeight = 0;
private int mSensorOrientation;
private boolean mFlashSupported;
private Semaphore mCameraOpenCloseLock = new Semaphore(1);//使用信号量 Semaphore 进行多线程任务调度
private Activity activity;
private File mFile;
private HandlerThread mBackgroundThread;
private Handler mBackgroundHandler;
private HandlerThread mImageBackgroundThread;
private Handler mImageBackgroundHandler;
private Size mPreviewSize;
private String mCameraId;
private CameraDevice mCameraDevice;
private CaptureRequest.Builder mPreviewRequestBuilder;
private CaptureRequest mPreviewRequest;
private CameraCaptureSession mCaptureSession;
private ImageReader mImageReader;
private ICameraDataCallBack iCameraDataCallBack;
private ImageTrackingMobile imageTrackingMobile;
private GarbageTrackingMobile garbageTrackingMobile;
private ObjectTrackingMobile objectTrackingMobile;
private boolean isPreBackgroundThreadPause;
private boolean isAlive;
static {
ORIENTATIONS.append(Surface.ROTATION_0, 90);
ORIENTATIONS.append(Surface.ROTATION_90, 0);
ORIENTATIONS.append(Surface.ROTATION_180, 270);
ORIENTATIONS.append(Surface.ROTATION_270, 180);
}
public CameraPreview(@NonNull Context context) {
this(context, null);
}
public CameraPreview(@NonNull Context context, @Nullable AttributeSet attrs) {
this(context, attrs, 0);
}
public CameraPreview(@NonNull Context context, @Nullable AttributeSet attrs, int defStyleAttr) {
this(context, attrs, defStyleAttr, 0);
}
public CameraPreview(@NonNull Context context, @Nullable AttributeSet attrs, int defStyleAttr, int defStyleRes) {
super(context, attrs, defStyleAttr, defStyleRes);
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
int width = MeasureSpec.getSize(widthMeasureSpec);
int height = MeasureSpec.getSize(heightMeasureSpec);
if (0 == mRatioWidth || 0 == mRatioHeight) {
setMeasuredDimension(width, height);
} else {
if (width < height * mRatioWidth / mRatioHeight) {
setMeasuredDimension(width, width * mRatioHeight / mRatioWidth);
} else {
setMeasuredDimension(height * mRatioWidth / mRatioHeight, height);
}
}
}
public void setAspectRatio(int width, int height) {
if (width < 0 || height < 0) {
throw new IllegalArgumentException("Size can't be negative");
}
mRatioWidth = width;
mRatioHeight = height;
requestLayout();
}
/**
* 处理生命周期内的回调事件
*/
private final SurfaceTextureListener mSurfaceTextureListener = new SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(SurfaceTexture texture, int width, int height) {
openCamera(width, height);
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture texture, int width, int height) {
configureTransform(width, height);
}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture texture) {
return true;
}
@Override
public void onSurfaceTextureUpdated(SurfaceTexture texture) {
}
};
/**
* 相机状态改变回调
*/
private final CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened(@NonNull CameraDevice cameraDevice) {
mCameraOpenCloseLock.release();
Log.d(TAG, "相机已打开");
mCameraDevice = cameraDevice;
createCameraPreviewSession();
}
@Override
public void onDisconnected(@NonNull CameraDevice cameraDevice) {
mCameraOpenCloseLock.release();
cameraDevice.close();
mCameraDevice = null;
}
@Override
public void onError(@NonNull CameraDevice cameraDevice, int error) {
mCameraOpenCloseLock.release();
cameraDevice.close();
mCameraDevice = null;
if (null != activity) {
activity.finish();
}
}
};
/**
* 处理与照片捕获相关的事件
*/
private CameraCaptureSession.CaptureCallback mCaptureCallback = new CameraCaptureSession.CaptureCallback() {
private void process(CaptureResult result) {
switch (mState) {
case STATE_PREVIEW: {
break;
}
case STATE_WAITING_LOCK: {
Integer afState = result.get(CaptureResult.CONTROL_AF_STATE);
if (afState == null) {
captureStillPicture();
} else if (CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED == afState ||
CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED == afState) {
Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE);
if (aeState == null || aeState == CaptureResult.CONTROL_AE_STATE_CONVERGED) {
mState = STATE_PICTURE_TAKEN;
captureStillPicture();
} else {
runPreCaptureSequence();
}
}
break;
}
case STATE_WAITING_PRE_CAPTURE: {
Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE);
if (aeState == null ||
aeState == CaptureResult.CONTROL_AE_STATE_PRECAPTURE ||
aeState == CaptureRequest.CONTROL_AE_STATE_FLASH_REQUIRED) {
mState = STATE_WAITING_NON_PRE_CAPTURE;
}
break;
}
case STATE_WAITING_NON_PRE_CAPTURE: {
Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE);
if (aeState == null || aeState != CaptureResult.CONTROL_AE_STATE_PRECAPTURE) {
mState = STATE_PICTURE_TAKEN;
captureStillPicture();
}
break;
}
}
}
@Override
public void onCaptureProgressed(@NonNull CameraCaptureSession session,
@NonNull CaptureRequest request,
@NonNull CaptureResult partialResult) {
process(partialResult);
}
@Override
public void onCaptureCompleted(@NonNull CameraCaptureSession session,
@NonNull CaptureRequest request,
@NonNull TotalCaptureResult result) {
process(result);
}
};
public void onResume(Activity activity, int openType, TrackListener track) {
isAlive = true;
this.activity = activity;
this.openType = openType;
if (OPEN_TYPE_IMAGE == openType) {
if (null != track) {
imageTrackingMobile = (ImageTrackingMobile) track;
}
} else if (OPEN_TYPE_IMAGE_CUSTOM == openType) {
if (null != track) {
garbageTrackingMobile = (GarbageTrackingMobile) track;
}
}else if (OPEN_TYPE_OBJECT == openType) {
if (null != track) {
objectTrackingMobile = (ObjectTrackingMobile) track;
}
}
startBackgroundThread();
//当Activity或Fragment OnResume(),可以冲洗打开一个相机并开始预览,否则,这个Surface已经准备就绪
if (this.isAvailable()) {
openCamera(this.getWidth(), this.getHeight());
} else {
this.setSurfaceTextureListener(mSurfaceTextureListener);
}
}
public void onPause() {
isAlive = false;
closeCamera();
stopBackgroundThread();
}
private void startBackgroundThread() {
mBackgroundThread = new HandlerThread("CameraBackground");
mBackgroundThread.start();
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
if (OPEN_TYPE_IMAGE == openType || OPEN_TYPE_OBJECT == openType || OPEN_TYPE_IMAGE_CUSTOM == openType ) {
mImageBackgroundThread = new HandlerThread("MINDSPORE");
mImageBackgroundThread.start();
mImageBackgroundHandler = new Handler(mImageBackgroundThread.getLooper());
mImageBackgroundHandler.post(classifyRunnable);
}
}
public boolean isAlive() {
return isAlive;
}
/**
* Detect time-consuming threads
*/
private Runnable classifyRunnable = new Runnable() {
public void run() {
synchronized (this) {
Bitmap bitmap = getBitmap();
if (bitmap != null) {
long startTime = System.currentTimeMillis();
// The current bitmap performs the sending request identification operation
String ret ="";
if (OPEN_TYPE_IMAGE == openType){
ret = null == imageTrackingMobile ? "" : imageTrackingMobile.MindSpore_runnet(bitmap);
}else if(OPEN_TYPE_IMAGE_CUSTOM == openType){
ret = null == garbageTrackingMobile ? "" : garbageTrackingMobile.MindSpore_runnet(bitmap);
}else if(OPEN_TYPE_OBJECT == openType){
ret = null == objectTrackingMobile ? "" : objectTrackingMobile.MindSpore_runnet(bitmap);
}
long endTime = System.currentTimeMillis();
if (mRecognitionDataCallBack != null) {
// Interface returns data
mRecognitionDataCallBack.onRecognitionDataCallBack(ret, (endTime - startTime) + "ms ");
}
if (!bitmap.isRecycled()) {
bitmap.recycle();
}
}
if (mImageBackgroundHandler != null && !isPreBackgroundThreadPause) {
mImageBackgroundHandler.postDelayed(classifyRunnable,1000);
}
}
}
};
private void stopBackgroundThread() {
isPreBackgroundThreadPause = true;
mBackgroundThread.quitSafely();
try {
mBackgroundThread.join();
mBackgroundThread = null;
mBackgroundHandler = null;
if (OPEN_TYPE_IMAGE == openType || OPEN_TYPE_IMAGE_CUSTOM == openType || OPEN_TYPE_OBJECT == openType) {
mImageBackgroundThread.quitSafely();
mImageBackgroundThread.join();
mImageBackgroundThread = null;
mImageBackgroundHandler = null;
}
} catch (InterruptedException e) {
e.printStackTrace();
}
}
/**
* Data interface returned after identification.
*/
public interface RecognitionDataCallBack {
/**
* Data interface returned after identification.
*
* @param result Recognition result
* @param time Response time
*/
void onRecognitionDataCallBack(String result, String time);
}
private RecognitionDataCallBack mRecognitionDataCallBack;
public void addImageRecognitionDataCallBack(RecognitionDataCallBack recognitionDataCallBack) {
this.mRecognitionDataCallBack = recognitionDataCallBack;
}
/**
* 根据mCameraId打开相机
*/
private void openCamera(int width, int height) {
setUpCameraOutputs(width, height);
configureTransform(width, height);
CameraManager manager = (CameraManager) getContext().getSystemService(Context.CAMERA_SERVICE);
try {
if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
throw new RuntimeException("Time out waiting to lock camera opening.");
}
if (ActivityCompat.checkSelfPermission(activity, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
// TODO: Consider calling
// ActivityCompat#requestPermissions
return;
}
manager.openCamera(mCameraId, mStateCallback, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera opening.", e);
}
}
/**
* 设置相机相关的属性或变量
*
* @param width 相机预览的可用尺寸的宽度
* @param height 相机预览的可用尺寸的高度
*/
@SuppressWarnings("SuspiciousNameCombination")
private void setUpCameraOutputs(int width, int height) {
CameraManager manager = (CameraManager) getContext().getSystemService(Context.CAMERA_SERVICE);
try {
for (String cameraId : manager.getCameraIdList()) {
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
// 在这个例子中不使用前置摄像头
Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING);
if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) {
continue;
}
StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
if (map == null) {
continue;
}
Size largest = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)),
new CompareSizesByArea());
mImageReader = ImageReader.newInstance(largest.getWidth(), largest.getHeight(),
ImageFormat.JPEG, /*maxImages*/2);
mImageReader.setOnImageAvailableListener(
mOnImageAvailableListener, mBackgroundHandler);
int displayRotation = activity.getWindowManager().getDefaultDisplay().getRotation();
// noinspection ConstantConditions
mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
boolean swappedDimensions = false;
switch (displayRotation) {
case Surface.ROTATION_0:
case Surface.ROTATION_180:
if (mSensorOrientation == 90 || mSensorOrientation == 270) {
swappedDimensions = true;
}
break;
case Surface.ROTATION_90:
case Surface.ROTATION_270:
if (mSensorOrientation == 0 || mSensorOrientation == 180) {
swappedDimensions = true;
}
break;
default:
Log.e(TAG, "Display rotation is invalid: " + displayRotation);
}
Point displaySize = new Point();
activity.getWindowManager().getDefaultDisplay().getSize(displaySize);
int rotatedPreviewWidth = width;
int rotatedPreviewHeight = height;
int maxPreviewWidth = displaySize.x;
int maxPreviewHeight = displaySize.y;
if (swappedDimensions) {
rotatedPreviewWidth = height;
rotatedPreviewHeight = width;
maxPreviewWidth = displaySize.y;
maxPreviewHeight = displaySize.x;
}
if (maxPreviewWidth > MAX_PREVIEW_WIDTH) {
maxPreviewWidth = MAX_PREVIEW_WIDTH;
}
if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) {
maxPreviewHeight = MAX_PREVIEW_HEIGHT;
}
mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class),
rotatedPreviewWidth, rotatedPreviewHeight, maxPreviewWidth,
maxPreviewHeight, largest);
int orientation = getResources().getConfiguration().orientation;
if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight());
} else {
setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth());
}
Boolean available = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE);
mFlashSupported = available == null ? false : available;
mCameraId = cameraId;
return;
}
} catch (CameraAccessException e) {
e.printStackTrace();
} catch (NullPointerException e) {
Log.e(TAG, "设备不支持Camera2");
}
}
/**
* 在确定相机预览大小后应调用此方法
*
* @param viewWidth
* @param viewHeight
*/
private void configureTransform(int viewWidth, int viewHeight) {
if (null == mPreviewSize || null == activity) {
return;
}
int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
Matrix matrix = new Matrix();
RectF viewRect = new RectF(0, 0, viewWidth, viewHeight);
RectF bufferRect = new RectF(0, 0, mPreviewSize.getHeight(), mPreviewSize.getWidth());
float centerX = viewRect.centerX();
float centerY = viewRect.centerY();
if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) {
bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
float scale = Math.max(
(float) viewHeight / mPreviewSize.getHeight(),
(float) viewWidth / mPreviewSize.getWidth());
matrix.postScale(scale, scale, centerX, centerY);
matrix.postRotate(90 * (rotation - 2), centerX, centerY);
} else if (Surface.ROTATION_180 == rotation) {
matrix.postRotate(180, centerX, centerY);
}
this.setTransform(matrix);
}
/**
* 获取一个合适的相机预览尺寸
*
* @param choices 支持的预览尺寸列表
* @param textureViewWidth 相对宽度
* @param textureViewHeight 相对高度
* @param maxWidth 可以选择的最大宽度
* @param maxHeight 可以选择的最大高度
* @param aspectRatio 宽高比
* @return 最佳预览尺寸
*/
private static Size chooseOptimalSize(Size[] choices, int textureViewWidth, int textureViewHeight,
int maxWidth, int maxHeight, Size aspectRatio) {
List<Size> bigEnough = new ArrayList<>();
List<Size> notBigEnough = new ArrayList<>();
int w = aspectRatio.getWidth();
int h = aspectRatio.getHeight();
for (Size option : choices) {
if (option.getWidth() <= maxWidth && option.getHeight() <= maxHeight &&
option.getHeight() == option.getWidth() * h / w) {
if (option.getWidth() >= textureViewWidth &&
option.getHeight() >= textureViewHeight) {
bigEnough.add(option);
} else {
notBigEnough.add(option);
}
}
}
if (bigEnough.size() > 0) {
return Collections.min(bigEnough, new CompareSizesByArea());
} else if (notBigEnough.size() > 0) {
return Collections.max(notBigEnough, new CompareSizesByArea());
} else {
Log.e(TAG, "Couldn't find any suitable preview size");
return choices[0];
}
}
/**
* 为相机预览创建新的CameraCaptureSession
*/
private void createCameraPreviewSession() {
try {
SurfaceTexture texture = this.getSurfaceTexture();
assert texture != null;
// 将默认缓冲区的大小配置为想要的相机预览的大小
texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
Surface surface = new Surface(texture);
mPreviewRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
mPreviewRequestBuilder.addTarget(surface);
// 我们创建一个 CameraCaptureSession 来进行相机预览
mCameraDevice.createCaptureSession(Arrays.asList(surface, mImageReader.getSurface()),
new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) {
if (null == mCameraDevice) {
return;
}
// 会话准备好后我们开始显示预览
mCaptureSession = cameraCaptureSession;
try {
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE,
CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
setAutoFlash(mPreviewRequestBuilder);
mPreviewRequest = mPreviewRequestBuilder.build();
mCaptureSession.setRepeatingRequest(mPreviewRequest, mCaptureCallback, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
@Override
public void onConfigureFailed(@NonNull CameraCaptureSession cameraCaptureSession) {
}
}, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
/**
* 从指定的屏幕旋转中检索照片方向
*
* @param rotation 屏幕方向
* @return 照片方向0,90,270,360
*/
private int getOrientation(int rotation) {
return (ORIENTATIONS.get(rotation) + mSensorOrientation + 270) % 360;
}
/**
* 锁定焦点
*/
private void lockFocus() {
try {
// 如何通知相机锁定焦点
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_START);
// 通知mCaptureCallback等待锁定
mState = STATE_WAITING_LOCK;
mCaptureSession.capture(mPreviewRequestBuilder.build(), mCaptureCallback, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
/**
* 解锁焦点
*/
private void unlockFocus() {
try {
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER,
CameraMetadata.CONTROL_AF_TRIGGER_CANCEL);
setAutoFlash(mPreviewRequestBuilder);
mCaptureSession.capture(mPreviewRequestBuilder.build(), mCaptureCallback,
mBackgroundHandler);
mState = STATE_PREVIEW;
mCaptureSession.setRepeatingRequest(mPreviewRequest, mCaptureCallback,
mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
/**
* 拍摄静态图片
*/
private void captureStillPicture() {
try {
if (null == activity || null == mCameraDevice) {
return;
}
final CaptureRequest.Builder captureBuilder =
mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
captureBuilder.addTarget(mImageReader.getSurface());
captureBuilder.set(CaptureRequest.CONTROL_AF_MODE,
CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
setAutoFlash(captureBuilder);
// 方向
int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, getOrientation(rotation));
CameraCaptureSession.CaptureCallback captureCallback
= new CameraCaptureSession.CaptureCallback() {
@Override
public void onCaptureCompleted(@NonNull CameraCaptureSession session,
@NonNull CaptureRequest request,
@NonNull TotalCaptureResult result) {
Toast.makeText(getContext(), "Saved: " + mFile, Toast.LENGTH_SHORT).show();
Log.d(TAG, mFile.toString());
unlockFocus();
}
};
mCaptureSession.stopRepeating();
mCaptureSession.abortCaptures();
mCaptureSession.capture(captureBuilder.build(), captureCallback, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
public void setAutoFlash(CaptureRequest.Builder requestBuilder) {
if (mFlashSupported) {
requestBuilder.set(CaptureRequest.CONTROL_AE_MODE,
CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
}
}
public void takePicture() {
// SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyyMMddHHmmss");
// String date = simpleDateFormat.format(new Date());
mFile = new File(getContext().getExternalFilesDir(null), "temp.jpg");
if (mFile.length() > 0) {
mFile.delete();
}
lockFocus();
}
/**
* 运行preCapture序列来捕获静止图像
*/
private void runPreCaptureSequence() {
try {
// 设置拍照参数请求
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER,
CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START);
mState = STATE_WAITING_PRE_CAPTURE;
mCaptureSession.capture(mPreviewRequestBuilder.build(), mCaptureCallback, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
/**
* 比较两者大小
*/
private static class CompareSizesByArea implements Comparator<Size> {
@Override
public int compare(Size lhs, Size rhs) {
return Long.signum((long) lhs.getWidth() * lhs.getHeight() -
(long) rhs.getWidth() * rhs.getHeight());
}
}
/**
* ImageReader的回调对象
*/
private final ImageReader.OnImageAvailableListener mOnImageAvailableListener
= new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader reader) {
mBackgroundHandler.post(new ImageSaver(reader.acquireNextImage(), mFile, iCameraDataCallBack));
}
};
/**
* 将捕获到的图像保存到指定的文件中
*/
private static class ImageSaver implements Runnable {
private final Image mImage;
private final File mFile;
private final ICameraDataCallBack mICameraDataCallBack;
ImageSaver(Image image, File file, ICameraDataCallBack iCameraDataCallBack) {
mImage = image;
mFile = file;
mICameraDataCallBack = iCameraDataCallBack;
}
@Override
public void run() {
if (mFile.length() > 0) {
mFile.delete();
}
ByteBuffer buffer = mImage.getPlanes()[0].getBuffer();
byte[] bytes = new byte[buffer.remaining()];
buffer.get(bytes);
FileOutputStream output = null;
try {
output = new FileOutputStream(mFile);
output.write(bytes);
} catch (IOException e) {
e.printStackTrace();
} finally {
mImage.close();
if (null != output) {
try {
output.close();
} catch (IOException e) {
e.printStackTrace();
}
}
if (mICameraDataCallBack != null) {
mICameraDataCallBack.takeCameraDataCallBack(mFile);
}
}
}
}
/**
* 关闭相机
*/
private void closeCamera() {
try {
mCameraOpenCloseLock.acquire();
if (null != mCaptureSession) {
mCaptureSession.close();
mCaptureSession = null;
}
if (null != mCameraDevice) {
mCameraDevice.close();
mCameraDevice = null;
}
if (null != mImageReader) {
mImageReader.close();
mImageReader = null;
}
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera closing.", e);
} finally {
mCameraOpenCloseLock.release();
}
}
public interface ICameraDataCallBack {
void takeCameraDataCallBack(File file);
}
public void addCameraDataCallBack(ICameraDataCallBack iCameraDataCallBack) {
this.iCameraDataCallBack = iCameraDataCallBack;
}
}

View File

@ -0,0 +1,55 @@
package com.mindspore.himindspore.contract;
import android.os.Bundle;
import android.text.TextUtils;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.Toast;
import androidx.appcompat.app.AppCompatActivity;
import com.mindspore.himindspore.R;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class ContractActivity extends AppCompatActivity implements View.OnClickListener {
private EditText emailEdit;
private Button submitBtn;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_contract);
emailEdit = findViewById(R.id.emailEditText);
submitBtn = findViewById(R.id.submitBtn);
submitBtn.setOnClickListener(this);
}
@Override
public void onClick(View view) {
if (R.id.submitBtn == view.getId()) {
String email = emailEdit.getText().toString();
if (TextUtils.isEmpty(email)) {
Toast.makeText(ContractActivity.this,"Please input your email!",Toast.LENGTH_LONG).show();
return;
}
if (isEmailFormat(email)){
}else{
Toast.makeText(ContractActivity.this,"The email address you enterd is not in the correct format",Toast.LENGTH_LONG).show();
return;
}
}
}
private boolean isEmailFormat(String emailAdd) {
Pattern p = Pattern.compile("^([a-zA-Z0-9_-])+@([a-zA-Z0-9_-])+(\\.([a-zA-Z0-9_-])+)+$");
Matcher m = p.matcher(emailAdd);
return m.matches();
}
}

View File

@ -0,0 +1,109 @@
package com.mindspore.himindspore.contract.email;
import java.util.Properties;
public class MailInfo {
private String mailServerHost;// 发送邮件的服务器的IP
private String mailServerPort;// 发送邮件的服务器的端口
private String fromAddress;// 邮件发送者的地址
private String toAddress; // 邮件接收者的地址
private String userName;// 登陆邮件发送服务器的用户名
private String password;// 登陆邮件发送服务器的密码
private boolean validate = true;// 是否需要身份验证
private String subject;// 邮件主题
private String content;// 邮件的文本内容
private String[] attachFileNames;// 邮件附件的文件名
/**
* 获得邮件会话属性
*/
public Properties getProperties() {
Properties p = new Properties();
p.put("mail.smtp.host", this.mailServerHost);
p.put("mail.smtp.port", this.mailServerPort);
p.put("mail.smtp.auth", validate ? "true" : "false");
return p;
}
public String getMailServerHost() {
return mailServerHost;
}
public void setMailServerHost(String mailServerHost) {
this.mailServerHost = mailServerHost;
}
public String getMailServerPort() {
return mailServerPort;
}
public void setMailServerPort(String mailServerPort) {
this.mailServerPort = mailServerPort;
}
public boolean isValidate() {
return validate;
}
public void setValidate(boolean validate) {
this.validate = validate;
}
public String[] getAttachFileNames() {
return attachFileNames;
}
public void setAttachFileNames(String[] fileNames) {
this.attachFileNames = fileNames;
}
public String getFromAddress() {
return fromAddress;
}
public void setFromAddress(String fromAddress) {
this.fromAddress = fromAddress;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public String getToAddress() {
return toAddress;
}
public void setToAddress(String toAddress) {
this.toAddress = toAddress;
}
public String getUserName() {
return userName;
}
public void setUserName(String userName) {
this.userName = userName;
}
public String getSubject() {
return subject;
}
public void setSubject(String subject) {
this.subject = subject;
}
public String getContent() {
return content;
}
public void setContent(String textContent) {
this.content = textContent;
}
}

View File

@ -0,0 +1,202 @@
package com.mindspore.himindspore.contract.email;
import android.util.Log;
import java.io.File;
import java.util.Date;
import java.util.Properties;
import javax.activation.DataHandler;
import javax.activation.FileDataSource;
import javax.mail.Address;
import javax.mail.Authenticator;
import javax.mail.BodyPart;
import javax.mail.Message;
import javax.mail.MessagingException;
import javax.mail.Multipart;
import javax.mail.PasswordAuthentication;
import javax.mail.Session;
import javax.mail.Transport;
import javax.mail.internet.InternetAddress;
import javax.mail.internet.MimeBodyPart;
import javax.mail.internet.MimeMessage;
import javax.mail.internet.MimeMultipart;
import javax.mail.internet.MimeUtility;
public class MailSender {
/**
* 以文本格式发送邮件
*
* @param mailInfo 待发送的邮件的信息
*/
public boolean sendTextMail(final MailInfo mailInfo) {
// 判断是否需要身份认证
MyAuthenticator authenticator = null;
Properties pro = mailInfo.getProperties();
if (mailInfo.isValidate()) {
// 如果需要身份认证则创建一个密码验证器
authenticator = new MyAuthenticator(mailInfo.getUserName(), mailInfo.getPassword());
}
// 根据邮件会话属性和密码验证器构造一个发送邮件的session
Session sendMailSession = Session.getDefaultInstance(pro, authenticator);
// Session sendMailSession = Session.getInstance(pro, new Authenticator() {
// @Override
// protected PasswordAuthentication getPasswordAuthentication() {
// return new PasswordAuthentication(mailInfo.getUserName(),mailInfo.getPassword());
// }
// });
try {
// 根据session创建一个邮件消息
Message mailMessage = new MimeMessage(sendMailSession);
// 创建邮件发送者地址
Address from = new InternetAddress(mailInfo.getFromAddress());
// 设置邮件消息的发送者
mailMessage.setFrom(from);
// 创建邮件的接收者地址并设置到邮件消息中
Address to = new InternetAddress(mailInfo.getToAddress());
mailMessage.setRecipient(Message.RecipientType.TO, to);
// 设置邮件消息的主题
mailMessage.setSubject(mailInfo.getSubject());
// 设置邮件消息发送的时间
mailMessage.setSentDate(new Date());
// 设置邮件消息的主要内容
String mailContent = mailInfo.getContent();
mailMessage.setText(mailContent);
// 发送邮件
Transport.send(mailMessage);
return true;
} catch (MessagingException ex) {
ex.printStackTrace();
}
return false;
}
/**
* 以HTML格式发送邮件
*
* @param mailInfo 待发送的邮件信息
*/
public static boolean sendHtmlMail(MailInfo mailInfo) {
// 判断是否需要身份认证
MyAuthenticator authenticator = null;
Properties pro = mailInfo.getProperties();
// 如果需要身份认证则创建一个密码验证器
if (mailInfo.isValidate()) {
authenticator = new MyAuthenticator(mailInfo.getUserName(), mailInfo.getPassword());
}
// 根据邮件会话属性和密码验证器构造一个发送邮件的session
Session sendMailSession = Session.getDefaultInstance(pro, authenticator);
try {
// 根据session创建一个邮件消息
Message mailMessage = new MimeMessage(sendMailSession);
// 创建邮件发送者地址
Address from = new InternetAddress(mailInfo.getFromAddress());
// 设置邮件消息的发送者
mailMessage.setFrom(from);
// 创建邮件的接收者地址并设置到邮件消息中
Address to = new InternetAddress(mailInfo.getToAddress());
// Message.RecipientType.TO属性表示接收者的类型为TO
mailMessage.setRecipient(Message.RecipientType.TO, to);
// 设置邮件消息的主题
mailMessage.setSubject(mailInfo.getSubject());
// 设置邮件消息发送的时间
mailMessage.setSentDate(new Date());
// MiniMultipart类是一个容器类包含MimeBodyPart类型的对象
Multipart mainPart = new MimeMultipart();
// 创建一个包含HTML内容的MimeBodyPart
BodyPart html = new MimeBodyPart();
// 设置HTML内容
html.setContent(mailInfo.getContent(), "text/html; charset=utf-8");
mainPart.addBodyPart(html);
// 将MiniMultipart对象设置为邮件内容
mailMessage.setContent(mainPart);
// 发送邮件
Transport.send(mailMessage);
return true;
} catch (MessagingException ex) {
ex.printStackTrace();
}
return false;
}
/**
* 发送带附件的邮件
*
* @param info
* @return
*/
public boolean sendFileMail(MailInfo info, File file) {
Message attachmentMail = createAttachmentMail(info, file);
try {
Transport.send(attachmentMail);
return true;
} catch (MessagingException e) {
e.printStackTrace();
return false;
}
}
/**
* 创建带有附件的邮件
*
* @return
*/
private Message createAttachmentMail(final MailInfo info, File file) {
//创建邮件
MimeMessage message = null;
Properties pro = info.getProperties();
try {
Session sendMailSession = Session.getInstance(pro, new Authenticator() {
@Override
protected PasswordAuthentication getPasswordAuthentication() {
return new PasswordAuthentication(info.getUserName(), info.getPassword());
}
});
message = new MimeMessage(sendMailSession);
// 设置邮件的基本信息
//创建邮件发送者地址
Address from = new InternetAddress(info.getFromAddress());
//设置邮件消息的发送者
message.setFrom(from);
//创建邮件的接受者地址并设置到邮件消息中
Address to = new InternetAddress(info.getToAddress());
//设置邮件消息的接受者, Message.RecipientType.TO属性表示接收者的类型为TO
message.setRecipient(Message.RecipientType.TO, to);
//邮件标题
message.setSubject(info.getSubject());
// 创建邮件正文为了避免邮件正文中文乱码问题需要使用CharSet=UTF-8指明字符编码
MimeBodyPart text = new MimeBodyPart();
text.setContent(info.getContent(), "text/html;charset=UTF-8");
// 创建容器描述数据关系
MimeMultipart mp = new MimeMultipart();
mp.addBodyPart(text);
// 创建邮件附件
MimeBodyPart attach = new MimeBodyPart();
FileDataSource ds = new FileDataSource(file);
DataHandler dh = new DataHandler(ds);
attach.setDataHandler(dh);
attach.setFileName(MimeUtility.encodeText(dh.getName()));
mp.addBodyPart(attach);
mp.setSubType("mixed");
message.setContent(mp);
message.saveChanges();
} catch (Exception e) {
Log.i("TAG","创建带附件的邮件失败");
e.printStackTrace();
}
// 返回生成的邮件
return message;
}
}

View File

@ -0,0 +1,21 @@
package com.mindspore.himindspore.contract.email;
import javax.mail.Authenticator;
import javax.mail.PasswordAuthentication;
public class MyAuthenticator extends Authenticator {
String userName = null;
String password = null;
public MyAuthenticator() {
}
public MyAuthenticator(String username, String password) {
this.userName = username;
this.password = password;
}
protected PasswordAuthentication getPasswordAuthentication() {
return new PasswordAuthentication(userName, password);
}
}

View File

@ -0,0 +1,51 @@
package com.mindspore.himindspore.contract.email;
import androidx.annotation.NonNull;
import java.io.File;
public class SendMailUtil {
public static void send(final File file, String toAdd) {
final MailInfo mailInfo = creatMail(toAdd);
final MailSender sms = new MailSender();
new Thread(new Runnable() {
@Override
public void run() {
sms.sendFileMail(mailInfo, file);
}
}).start();
}
public static void send(String toAdd) {
final MailInfo mailInfo = creatMail(toAdd);
final MailSender sms = new MailSender();
new Thread(new Runnable() {
@Override
public void run() {
sms.sendTextMail(mailInfo);
}
}).start();
}
@NonNull
private static MailInfo creatMail(String toAdd) {
// String HOST = ShareUtils.getString(MyApplication.getInstance(), "HOST", "");
// String PORT = ShareUtils.getString(MyApplication.getInstance(), "PORT", "");
// String FROM_ADD = ShareUtils.getString(MyApplication.getInstance(), "FROM_ADD", "");
// String FROM_PSW = ShareUtils.getString(MyApplication.getInstance(), "FROM_PSW", "");
final MailInfo mailInfo = new MailInfo();
// mailInfo.setMailServerHost(HOST);//发送方邮箱服务器
// mailInfo.setMailServerPort(PORT);//发送方邮箱端口号
// mailInfo.setValidate(true);
// mailInfo.setUserName(FROM_ADD); // 发送者邮箱地址
// mailInfo.setPassword(FROM_PSW);// 发送者邮箱授权码
// mailInfo.setFromAddress(FROM_ADD); // 发送者邮箱
// mailInfo.setToAddress(toAdd); // 接收者邮箱
// mailInfo.setSubject("Android应用测试"); // 邮件主题
// mailInfo.setContent("哈哈"); // 邮件文本
return mailInfo;
}
}

View File

@ -0,0 +1,46 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mindspore.himindspore.imageclassification.bean;
public class RecognitionImageBean {
private String name;
private float score;
public RecognitionImageBean(String name, float score) {
this.name = name;
this.score = score;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public float getScore() {
return score;
}
public void setScore(float score) {
this.score = score;
}
}

View File

@ -0,0 +1,130 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mindspore.himindspore.imageclassification.help;
import android.content.Context;
import android.content.res.AssetManager;
import android.graphics.Bitmap;
import android.util.Log;
import com.mindspore.himindspore.track.TrackListener;
import java.io.InputStream;
import java.nio.ByteBuffer;
/**
* Call the MindSpore interface API in the Java layer.
*/
public class GarbageTrackingMobile implements TrackListener {
private final static String TAG = "GarbageTrackingMobile";
static {
try {
System.loadLibrary("mlkit-label-MS");
Log.i(TAG, "load libiMindSpore.so successfully.");
} catch (UnsatisfiedLinkError e) {
Log.e(TAG, "UnsatisfiedLinkError " + e.getMessage());
}
}
// The address of the running inference environment.
private long netEnv = 0;
private final Context mActivity;
public GarbageTrackingMobile(Context activity) {
this.mActivity = activity;
}
/**
* JNI load model and also create model inference environment.
*
* @param modelBuffer Model buffer.
* @param numThread The num of thread.
* @return MindSpore Inference environment address.
*/
public native long loadModel(ByteBuffer modelBuffer, int numThread);
/**
* Running model.
*
* @param netEnv Inference environment address.
* @param img A picture to be inferred.
* @return Inference result
*/
public native String runNet(long netEnv, Bitmap img);
/**
* Unbind model data.
*
* @param netEnv Inference environment address.
* @return Unbound state.
*/
public native boolean unloadModel(long netEnv);
/**
* The C++ side is encapsulated into a method of the MSNetWorks class
*
* @param modelPath Model file location
* @return Load model file status
*/
public boolean loadModelFromBuf(String modelPath) {
ByteBuffer buffer = loadModelFile(modelPath);
netEnv = loadModel(buffer, 2); //numThread's default setting is 2.
if (netEnv == 0){ // Loading model failed.
return false;
}
return true;
}
/**
* Run MindSpore inference.
*/
public String MindSpore_runnet(Bitmap img) {
String ret_str = runNet(netEnv, img);
return ret_str;
}
/**
* Unload model.
* @return true
*/
public boolean unloadModel() {
unloadModel(netEnv);
return true;
}
/**
* Load model file stream.
* @param modelPath Model file path.
* @return Model ByteBuffer.
*/
public ByteBuffer loadModelFile(String modelPath) {
InputStream is = null;
try {
is = mActivity.getAssets().open(modelPath);
byte[] bytes = new byte[is.available()];
is.read(bytes);
return ByteBuffer.allocateDirect(bytes.length).put(bytes);
} catch (Exception e) {
Log.d("loadModelFile", " Exception occur. ");
Log.e(TAG, Log.getStackTraceString(e));
}
return null;
}
}

View File

@ -0,0 +1,129 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mindspore.himindspore.imageclassification.help;
import android.content.Context;
import android.graphics.Bitmap;
import android.util.Log;
import com.mindspore.himindspore.track.TrackListener;
import java.io.InputStream;
import java.nio.ByteBuffer;
/**
* Call the MindSpore interface API in the Java layer.
*/
public class ImageTrackingMobile implements TrackListener {
private final static String TAG = "ImageTrackingMobile";
static {
try {
System.loadLibrary("mlkit-label-MS");
Log.i(TAG, "load libiMindSpore.so successfully.");
} catch (UnsatisfiedLinkError e) {
Log.e(TAG, "UnsatisfiedLinkError " + e.getMessage());
}
}
// The address of the running inference environment.
private long netEnv = 0;
private final Context mActivity;
public ImageTrackingMobile(Context activity) {
this.mActivity = activity;
}
/**
* JNI load model and also create model inference environment.
*
* @param modelBuffer Model buffer.
* @param numThread The num of thread.
* @return MindSpore Inference environment address.
*/
public native long loadModel(ByteBuffer modelBuffer, int numThread);
/**
* Running model.
*
* @param netEnv Inference environment address.
* @param img A picture to be inferred.
* @return Inference result
*/
public native String runNet(long netEnv, Bitmap img);
/**
* Unbind model data.
*
* @param netEnv Inference environment address.
* @return Unbound state.
*/
public native boolean unloadModel(long netEnv);
/**
* The C++ side is encapsulated into a method of the MSNetWorks class
*
* @param modelPath Model file location
* @return Load model file status
*/
public boolean loadModelFromBuf(String modelPath) {
ByteBuffer buffer = loadModelFile(modelPath);
netEnv = loadModel(buffer, 2); //numThread's default setting is 2.
if (netEnv == 0){ // Loading model failed.
return false;
}
return true;
}
/**
* Run MindSpore inference.
*/
public String MindSpore_runnet(Bitmap img) {
String ret_str = runNet(netEnv, img);
return ret_str;
}
/**
* Unload model.
* @return true
*/
public boolean unloadModel() {
unloadModel(netEnv);
return true;
}
/**
* Load model file stream.
* @param modelPath Model file path.
* @return Model ByteBuffer.
*/
public ByteBuffer loadModelFile(String modelPath) {
InputStream is = null;
try {
is = mActivity.getAssets().open(modelPath);
byte[] bytes = new byte[is.available()];
is.read(bytes);
return ByteBuffer.allocateDirect(bytes.length).put(bytes);
} catch (Exception e) {
Log.d("loadModelFile", " Exception occur. ");
Log.e(TAG, Log.getStackTraceString(e));
}
return null;
}
}

View File

@ -0,0 +1,73 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mindspore.himindspore.imageclassification.ui;
import android.content.Context;
import android.util.AttributeSet;
import android.view.LayoutInflater;
import android.view.View;
import android.widget.LinearLayout;
import android.widget.TextView;
import androidx.annotation.Nullable;
import com.mindspore.himindspore.R;
public class HorTextView extends LinearLayout {
private TextView tvLeftTitle;
public TextView getTvRightContent() {
return tvRightContent;
}
private TextView tvRightContent;
private View viewBottomLine;
public HorTextView(Context context) {
this(context, null);
}
public HorTextView(Context context, @Nullable AttributeSet attrs) {
this(context, attrs, 0);
}
public HorTextView(Context context, @Nullable AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
LayoutInflater.from(context).inflate(R.layout.layout_hor_text_view, this);
tvLeftTitle = findViewById(R.id.tv_left_title);
tvRightContent = findViewById(R.id.tv_right_content);
viewBottomLine = findViewById(R.id.view_bottom_line);
}
public void setLeftTitle(String title) {
tvLeftTitle.setText(title);
}
public void setRightContent(String content) {
tvRightContent.setText(content);
}
public void setBottomLineVisible(int isVisible) {
viewBottomLine.setVisibility(isVisible);
}
public TextView getTvLeftTitle() {
return tvLeftTitle;
}
}

View File

@ -0,0 +1,224 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mindspore.himindspore.imageclassification.ui;
import android.graphics.Color;
import android.os.Bundle;
import android.text.TextUtils;
import android.util.Log;
import android.view.Gravity;
import android.view.View;
import android.view.ViewGroup;
import android.widget.CompoundButton;
import android.widget.LinearLayout;
import android.widget.Switch;
import android.widget.TextView;
import androidx.annotation.UiThread;
import androidx.appcompat.app.AppCompatActivity;
import com.mindspore.himindspore.R;
import com.mindspore.himindspore.camera.CameraPreview;
import com.mindspore.himindspore.imageclassification.bean.RecognitionImageBean;
import com.mindspore.himindspore.imageclassification.help.GarbageTrackingMobile;
import com.mindspore.himindspore.imageclassification.help.ImageTrackingMobile;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
/**
* The main interface of camera preview.
* Using Camera 2 API.
*/
public class ImageCameraActivity extends AppCompatActivity implements CameraPreview.RecognitionDataCallBack {
private static final String TAG = "ImageCameraActivity";
public static final String OPEN_TYPE = "OPEN_TYPE";
public static final int TYPE_DEMO = 1;
public static final int TYPE_CUSTOM = 2;
private int enterType;
private LinearLayout bottomLayout;
private List<RecognitionImageBean> recognitionObjectBeanList;
private CameraPreview cameraPreview;
private ImageTrackingMobile mTrackingMobile;
private GarbageTrackingMobile garbageTrackingMobile;
@Override
protected void onCreate( Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_image_camera);
enterType = getIntent().getIntExtra(OPEN_TYPE,TYPE_DEMO);
cameraPreview = findViewById(R.id.image_camera_preview);
bottomLayout = findViewById(R.id.layout_bottom_content);
cameraPreview.setVisibility(View.VISIBLE);
init();
}
private void init() {
if(enterType == TYPE_DEMO) {
mTrackingMobile = new ImageTrackingMobile(this);
String modelPath = "model/mobilenetv2.ms";
boolean ret = mTrackingMobile.loadModelFromBuf(modelPath);
Log.d(TAG, "Loading model return value: " + ret);
}else {
garbageTrackingMobile = new GarbageTrackingMobile(this);
String garbageModelPath = "model/garbage_mobilenetv2.ms";
boolean garbageRet = garbageTrackingMobile.loadModelFromBuf(garbageModelPath);
Log.d(TAG, "Garbage Loading model return value: " + garbageRet);
}
cameraPreview.addImageRecognitionDataCallBack(this);
}
@Override
protected void onResume() {
super.onResume();
if(enterType == TYPE_DEMO) {
cameraPreview.onResume(this, CameraPreview.OPEN_TYPE_IMAGE, mTrackingMobile);
}else{
cameraPreview.onResume(this, CameraPreview.OPEN_TYPE_IMAGE_CUSTOM, garbageTrackingMobile);
}
}
@Override
protected void onPause() {
super.onPause();
cameraPreview.onPause();
}
@Override
protected void onStop() {
super.onStop();
if (mTrackingMobile != null) {
boolean ret = mTrackingMobile.unloadModel();
Log.d(TAG, "Unload model return value: " + ret);
}
if (garbageTrackingMobile != null) {
boolean ret = garbageTrackingMobile.unloadModel();
Log.d(TAG, "garbageTrackingMobile Unload model return value: " + ret);
}
}
@Override
public void onRecognitionDataCallBack(final String result, final String time) {
if(enterType == TYPE_DEMO) {
if (recognitionObjectBeanList != null) {
recognitionObjectBeanList.clear();
} else {
recognitionObjectBeanList = new ArrayList<>();
}
if (!result.equals("")) {
String[] resultArray = result.split(";");
for (String singleRecognitionResult : resultArray) {
String[] singleResult = singleRecognitionResult.split(":");
float score = Float.parseFloat(singleResult[1]);
if (score > 0.5) {
recognitionObjectBeanList.add(new RecognitionImageBean(singleResult[0], score));
}
}
Collections.sort(recognitionObjectBeanList, new Comparator<RecognitionImageBean>() {
@Override
public int compare(RecognitionImageBean t1, RecognitionImageBean t2) {
return Float.compare(t2.getScore(), t1.getScore());
}
});
}
runOnUiThread(new Runnable() {
@Override
public void run() {
showResultsInBottomSheet(recognitionObjectBeanList, time);
}
});
} else {
runOnUiThread(new Runnable() {
@Override
public void run() {
showResultsInBottomSheetGarbage(result, time);
}
});
}
}
@UiThread
protected void showResultsInBottomSheet(List<RecognitionImageBean> list, String time) {
bottomLayout.removeAllViews();
if (list != null && list.size() > 0) {
int classNum = 0;
for (RecognitionImageBean bean : list) {
classNum++;
HorTextView horTextView = new HorTextView(this);
horTextView.setLeftTitle(bean.getName());
horTextView.setRightContent(String.format("%.2f", (100 * bean.getScore())) + "%");
horTextView.setBottomLineVisible(View.VISIBLE);
if (classNum == 1){
horTextView.getTvLeftTitle().setTextColor(getResources().getColor(R.color.text_blue));
horTextView.getTvRightContent().setTextColor(getResources().getColor(R.color.text_blue));
}else{
horTextView.getTvLeftTitle().setTextColor(getResources().getColor(R.color.white));
horTextView.getTvRightContent().setTextColor(getResources().getColor(R.color.white));
}
bottomLayout.addView(horTextView);
if (classNum > 4) { // set maximum display is 5.
break;
}
}
HorTextView horTextView = new HorTextView(this);
horTextView.setLeftTitle(getResources().getString(R.string.title_time));
horTextView.setRightContent(time);
horTextView.setBottomLineVisible(View.INVISIBLE);
horTextView.getTvLeftTitle().setTextColor(getResources().getColor(R.color.text_blue));
horTextView.getTvRightContent().setTextColor(getResources().getColor(R.color.text_blue));
bottomLayout.addView(horTextView);
} else {
showLoadView();
}
}
@UiThread
protected void showResultsInBottomSheetGarbage(String result, String time) {
bottomLayout.removeAllViews();
if (!TextUtils.isEmpty(result)) {
HorTextView horTextView = new HorTextView(this);
horTextView.setLeftTitle(result);
horTextView.setBottomLineVisible(View.VISIBLE);
bottomLayout.addView(horTextView);
} else {
showLoadView();
}
}
private void showLoadView(){
TextView textView = new TextView(this);
textView.setLayoutParams(new LinearLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.WRAP_CONTENT));
textView.setText("Keep moving.");
textView.setGravity(Gravity.CENTER);
textView.setTextColor(Color.WHITE);
textView.setTextSize(30);
bottomLayout.addView(textView);
}
}

View File

@ -0,0 +1,37 @@
package com.mindspore.himindspore.imageclassification.ui;
import androidx.appcompat.app.AppCompatActivity;
import android.content.Intent;
import android.os.Bundle;
import android.view.View;
import com.mindspore.himindspore.R;
public class ImageMainActivity extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_image_main);
findViewById(R.id.btn_demo).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
Intent intent = new Intent(ImageMainActivity.this,ImageCameraActivity.class);
intent.putExtra(ImageCameraActivity.OPEN_TYPE,ImageCameraActivity.TYPE_DEMO);
startActivity(intent);
}
});
findViewById(R.id.btn_custom).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
Intent intent = new Intent(ImageMainActivity.this,ImageCameraActivity.class);
intent.putExtra(ImageCameraActivity.OPEN_TYPE,ImageCameraActivity.TYPE_CUSTOM);
startActivity(intent);
}
});
}
}

View File

@ -0,0 +1,159 @@
package com.mindspore.himindspore.objectdetection.bean;
import android.text.TextUtils;
import java.util.ArrayList;
import java.util.List;
public class RecognitionObjectBean {
private String rectID;
private String imgID;
private String objectName;
private float score;
private float left;
private float top;
private float right;
private float bottom;
private RecognitionObjectBean(Builder builder){
this.rectID = builder.rectID;
this.imgID = builder.imgID;
this.objectName = builder.objectName;
this.score = builder.score;
this.left = builder.left;
this.top = builder.top;
this.right = builder.right;
this.bottom = builder.bottom;
}
public static class Builder {
private String rectID;
private String imgID;
private String objectName;
private float score;
private float left;
private float top;
private float right;
private float bottom;
public RecognitionObjectBean build(){
return new RecognitionObjectBean(this);
}
public Builder setRectID(String rectID) {
this.rectID = rectID;
return this;
}
public Builder setImgID(String imgID) {
this.imgID = imgID;
return this;
}
public Builder setObjectName(String objectName) {
this.objectName = objectName;
return this;
}
public Builder setScore(float score) {
this.score = score;
return this;
}
public Builder setLeft(float left) {
this.left = left;
return this;
}
public Builder setTop(float top) {
this.top = top;
return this;
}
public Builder setRight(float right) {
this.right = right;
return this;
}
public Builder setBottom(float bottom) {
this.bottom = bottom;
return this;
}
}
public String getImgID() {
return imgID;
}
public String getRectID() {
return rectID;
}
public String getObjectName() {
return objectName;
}
public float getScore() {
return score;
}
public float getLeft() {
return left;
}
public float getTop() {
return top;
}
public float getRight() {
return right;
}
public float getBottom() {
return bottom;
}
public static List<RecognitionObjectBean> getRecognitionList(String result) {
if (!TextUtils.isEmpty(result)) {
String[] resultArray = result.split(";");
List<RecognitionObjectBean> list = new ArrayList<>();
for (int i = 0; i < resultArray.length; i++) {
String singleRecognitionResult = resultArray[i];
String[] singleResult = singleRecognitionResult.split("_");
RecognitionObjectBean bean = new RecognitionObjectBean.Builder()
.setRectID(String.valueOf(i + 1))
.setImgID(null != getData(0, singleResult) ? getData(0, singleResult) : "")
.setObjectName(null != getData(1, singleResult) ? getData(1, singleResult) : "")
.setScore(null != getData(2, singleResult) ? Float.parseFloat(getData(2, singleResult)) : 0)
.setLeft(null != getData(3, singleResult) ? Float.parseFloat(getData(3, singleResult)) : 0)
.setTop(null != getData(4, singleResult) ? Float.parseFloat(getData(4, singleResult)) : 0)
.setRight(null != getData(5, singleResult) ? Float.parseFloat(getData(5, singleResult)) : 0)
.setBottom(null != getData(6, singleResult) ? Float.parseFloat(getData(6, singleResult)) : 0)
.build();
list.add(bean);
}
return list;
} else {
return null;
}
}
/**
* @param index
* @param singleResult
* @return
*/
private static String getData(int index, String[] singleResult) {
if (index > singleResult.length) {
return null;
} else {
if (!TextUtils.isEmpty(singleResult[index])) {
return singleResult[index];
}
}
return null;
}
}

View File

@ -0,0 +1,185 @@
package com.mindspore.himindspore.objectdetection.help;
import android.annotation.SuppressLint;
import android.content.ContentUris;
import android.content.Context;
import android.database.Cursor;
import android.graphics.Bitmap;
import android.graphics.Matrix;
import android.media.ExifInterface;
import android.net.Uri;
import android.os.Build;
import android.os.Environment;
import android.provider.DocumentsContract;
import android.provider.MediaStore;
import java.io.IOException;
public class ImageDegreeHelper {
/**
* 专为Android4.4及以上设计的从Uri获取文件绝对路径以前的方法已不好使
*/
@SuppressLint("NewApi")
public static String getPath(final Context context, final Uri uri) {
final boolean isKitKat = Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT;
// DocumentProvider
if (isKitKat && DocumentsContract.isDocumentUri(context, uri)) {
// ExternalStorageProvider
if (isExternalStorageDocument(uri)) {
final String docId = DocumentsContract.getDocumentId(uri);
final String[] split = docId.split(":");
final String type = split[0];
if ("primary".equalsIgnoreCase(type)) {
return Environment.getExternalStorageDirectory() + "/" + split[1];
}
// TODO handle non-primary volumes
}
// DownloadsProvider
else if (isDownloadsDocument(uri)) {
final String id = DocumentsContract.getDocumentId(uri);
final Uri contentUri = ContentUris.withAppendedId(
Uri.parse("content://downloads/public_downloads"), Long.valueOf(id));
return getDataColumn(context, contentUri, null, null);
}
// MediaProvider
else if (isMediaDocument(uri)) {
final String docId = DocumentsContract.getDocumentId(uri);
final String[] split = docId.split(":");
final String type = split[0];
Uri contentUri = null;
if ("image".equals(type)) {
contentUri = MediaStore.Images.Media.EXTERNAL_CONTENT_URI;
} else if ("video".equals(type)) {
contentUri = MediaStore.Video.Media.EXTERNAL_CONTENT_URI;
} else if ("audio".equals(type)) {
contentUri = MediaStore.Audio.Media.EXTERNAL_CONTENT_URI;
}
final String selection = "_id=?";
final String[] selectionArgs = new String[]{split[1]};
return getDataColumn(context, contentUri, selection, selectionArgs);
}
}
// MediaStore (and general)
else if ("content".equalsIgnoreCase(uri.getScheme())) {
return getDataColumn(context, uri, null, null);
}
// File
else if ("file".equalsIgnoreCase(uri.getScheme())) {
return uri.getPath();
}
return null;
}
/**
* Get the value of the data column for this Uri. This is useful for
* MediaStore Uris, and other file-based ContentProviders.
*
* @param context The context.
* @param uri The Uri to query.
* @param selection (Optional) Filter used in the query.
* @param selectionArgs (Optional) Selection arguments used in the query.
* @return The value of the _data column, which is typically a file path.
*/
public static String getDataColumn(Context context, Uri uri, String selection,
String[] selectionArgs) {
Cursor cursor = null;
final String column = "_data";
final String[] projection = {column};
try {
cursor = context.getContentResolver().query(uri, projection, selection, selectionArgs,
null);
if (cursor != null && cursor.moveToFirst()) {
final int column_index = cursor.getColumnIndexOrThrow(column);
return cursor.getString(column_index);
}
} finally {
if (cursor != null)
cursor.close();
}
return null;
}
/**
* @param uri The Uri to check.
* @return Whether the Uri authority is ExternalStorageProvider.
*/
public static boolean isExternalStorageDocument(Uri uri) {
return "com.android.externalstorage.documents".equals(uri.getAuthority());
}
/**
* @param uri The Uri to check.
* @return Whether the Uri authority is DownloadsProvider.
*/
public static boolean isDownloadsDocument(Uri uri) {
return "com.android.providers.downloads.documents".equals(uri.getAuthority());
}
/**
* @param uri The Uri to check.
* @return Whether the Uri authority is MediaProvider.
*/
public static boolean isMediaDocument(Uri uri) {
return "com.android.providers.media.documents".equals(uri.getAuthority());
}
/**
* 读取照片旋转角度
*
* @param path 照片路径
* @return 角度
*/
public static int readPictureDegree(String path) {
int degree = 0;
try {
ExifInterface exifInterface = new ExifInterface(path);
int orientation = exifInterface.getAttributeInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_NORMAL);
switch (orientation) {
case ExifInterface.ORIENTATION_ROTATE_90:
degree = 90;
break;
case ExifInterface.ORIENTATION_ROTATE_180:
degree = 180;
break;
case ExifInterface.ORIENTATION_ROTATE_270:
degree = 270;
break;
}
} catch (IOException e) {
e.printStackTrace();
}
return degree;
}
public static Bitmap rotaingImageView(int angle, Bitmap bitmap) {
Bitmap returnBm = null;
// 根据旋转角度生成旋转矩阵
Matrix matrix = new Matrix();
matrix.postRotate(angle);
try {
// 将原始图片按照旋转矩阵进行旋转并得到新的图片
returnBm = Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight(), matrix, true);
} catch (OutOfMemoryError e) {
}
if (returnBm == null) {
returnBm = bitmap;
}
if (bitmap != returnBm) {
bitmap.recycle();
}
return returnBm;
}
}

View File

@ -0,0 +1,120 @@
package com.mindspore.himindspore.objectdetection.help;
import android.content.Context;
import android.content.res.AssetManager;
import android.graphics.Bitmap;
import android.util.Log;
import com.mindspore.himindspore.track.TrackListener;
import java.io.FileNotFoundException;
import java.io.InputStream;
import java.nio.ByteBuffer;
import java.util.HashMap;
public class ObjectTrackingMobile implements TrackListener {
private final static String TAG = "ObjectTrackingMobile";
static {
try {
System.loadLibrary("mlkit-label-MS");
Log.i(TAG, "load libiMindSpore.so successfully.");
} catch (UnsatisfiedLinkError e) {
Log.e(TAG, "UnsatisfiedLinkError " + e.getMessage());
}
}
public static HashMap<Integer, String> synset_words_map = new HashMap<>();
public static float[] threshold = new float[494];
private long netEnv = 0;
private final Context mActivity;
public ObjectTrackingMobile(Context activity) throws FileNotFoundException {
this.mActivity = activity;
}
/**
* jni加载模型
*
* @param assetManager assetManager
* @param buffer buffer
* @param numThread numThread
* @return 加载模型数据
*/
public native long loadModel(AssetManager assetManager, ByteBuffer buffer, int numThread);
/**
* jni运行模型
*
* @param netEnv 加载模型数据
* @param img 当前图片
* @return 运行模型数据
*/
public native String runNet(long netEnv, Bitmap img);
/**
* 解绑模型数据
*
* @param netEnv 模型数据
* @return 解绑状态
*/
public native boolean unloadModel(long netEnv);
/**
* C++侧封装成了MSNetWorks类的方法
*
* @param assetManager 模型文件位置
* @return 加载模型文件状态
*/
public boolean loadModelFromBuf(AssetManager assetManager) {
// String ModelPath = "model/model_hebing_3branch.ms";
String ModelPath = "model/ssd.ms";
ByteBuffer buffer = loadModelFile(ModelPath);
netEnv = loadModel(assetManager, buffer, 2);
return true;
}
/**
* 运行Mindspore
*
* @param img 当前图片识别
* @return 识别出来的文字信息
*/
public String MindSpore_runnet(Bitmap img) {
String ret_str = runNet(netEnv, img);
return ret_str;
}
/**
* 解绑模型
* @return true
*/
public boolean unloadModel() {
unloadModel(netEnv);
return true;
}
/**
* 加载模型文件流
* @param modelPath 模型文件路径
* @return 加载模型文件流
*/
public ByteBuffer loadModelFile(String modelPath) {
InputStream is = null;
try {
is = mActivity.getAssets().open(modelPath);
byte[] bytes = new byte[is.available()];
is.read(bytes);
return ByteBuffer.allocateDirect(bytes.length).put(bytes);
} catch (Exception e) {
Log.d("loadModelFile", " Exception occur ");
e.printStackTrace();
}
return null;
}
}

View File

@ -0,0 +1,87 @@
package com.mindspore.himindspore.objectdetection.ui;
import android.os.Bundle;
import android.text.TextUtils;
import android.util.Log;
import androidx.appcompat.app.AppCompatActivity;
import com.mindspore.himindspore.R;
import com.mindspore.himindspore.camera.CameraPreview;
import com.mindspore.himindspore.imageclassification.help.ImageTrackingMobile;
import com.mindspore.himindspore.objectdetection.bean.RecognitionObjectBean;
import com.mindspore.himindspore.objectdetection.help.ObjectTrackingMobile;
import java.io.FileNotFoundException;
import java.util.List;
import static com.mindspore.himindspore.objectdetection.bean.RecognitionObjectBean.getRecognitionList;
/**
* [入口主页面]
*
* 向JNI传入图片测试MindSpore模型加载推理等.
*/
public class ObjectCameraActivity extends AppCompatActivity implements CameraPreview.RecognitionDataCallBack {
private final String TAG = "ObjectCameraActivity";
private CameraPreview cameraPreview;
private ObjectTrackingMobile mTrackingMobile;
private ObjectRectView mObjectRectView;
private List<RecognitionObjectBean> recognitionObjectBeanList;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_object_camera);
cameraPreview = findViewById(R.id.camera_preview);
mObjectRectView = findViewById(R.id.objRectView);
init();
}
private void init() {
try {
mTrackingMobile = new ObjectTrackingMobile(this);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
boolean ret = mTrackingMobile.loadModelFromBuf(getAssets());
Log.d(TAG, "TrackingMobile loadModelFromBuf: " + ret);
cameraPreview.addImageRecognitionDataCallBack(this);
}
@Override
protected void onResume() {
super.onResume();
cameraPreview.onResume(this,CameraPreview.OPEN_TYPE_OBJECT,mTrackingMobile);
}
@Override
protected void onPause() {
super.onPause();
cameraPreview.onPause();
}
@Override
public void onRecognitionDataCallBack(String result, String time) {
if (TextUtils.isEmpty(result)) {
mObjectRectView.clearCanvas();
return;
}
Log.d(TAG,result);
recognitionObjectBeanList = getRecognitionList(result);
mObjectRectView.setInfo(recognitionObjectBeanList);
}
}

View File

@ -0,0 +1,87 @@
package com.mindspore.himindspore.objectdetection.ui;
import android.Manifest;
import android.content.Intent;
import android.os.Bundle;
import android.provider.MediaStore;
import android.view.View;
import android.widget.Button;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.appcompat.app.AppCompatActivity;
import androidx.core.app.ActivityCompat;
import com.mindspore.himindspore.R;
public class ObjectDetectionMainActivity extends AppCompatActivity implements View.OnClickListener {
private static final int RC_CHOOSE_PHOTO = 1;
private static final int REQUEST_CAMERA_PERMISSION = 2;
private static final int REQUEST_PHOTO_PERMISSION = 3;
private Button btnPhoto, btnCamera;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_object_detection_main);
btnPhoto = findViewById(R.id.btn_photo);
btnCamera = findViewById(R.id.btn_camera);
btnPhoto.setOnClickListener(this);
btnCamera.setOnClickListener(this);
}
@Override
public void onClick(View view) {
if (R.id.btn_photo == view.getId()) {
ActivityCompat.requestPermissions(this,
new String[]{Manifest.permission.READ_EXTERNAL_STORAGE, Manifest.permission.WRITE_EXTERNAL_STORAGE,
Manifest.permission.READ_PHONE_STATE}, REQUEST_PHOTO_PERMISSION);
} else if (R.id.btn_camera == view.getId()) {
ActivityCompat.requestPermissions(this,
new String[]{Manifest.permission.READ_EXTERNAL_STORAGE, Manifest.permission.WRITE_EXTERNAL_STORAGE,
Manifest.permission.READ_PHONE_STATE, Manifest.permission.CAMERA}, REQUEST_CAMERA_PERMISSION);
}
}
/**
* 权限申请结果回调
*/
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
if (REQUEST_PHOTO_PERMISSION == requestCode) {
choosePhoto();
} else if (REQUEST_CAMERA_PERMISSION == requestCode) {
chooseCamera();
}
}
private void choosePhoto() {
Intent intentToPickPic = new Intent(Intent.ACTION_PICK, null);
intentToPickPic.setDataAndType(MediaStore.Images.Media.EXTERNAL_CONTENT_URI, "image/*");
startActivityForResult(intentToPickPic, RC_CHOOSE_PHOTO);
}
private void chooseCamera() {
Intent intent = new Intent(ObjectDetectionMainActivity.this, ObjectCameraActivity.class);
startActivity(intent);
}
@Override
protected void onActivityResult(int requestCode, int resultCode, @Nullable Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (RC_CHOOSE_PHOTO == requestCode && null != data && null != data.getData()) {
Intent intent = new Intent(ObjectDetectionMainActivity.this, PhotoActivity.class);
intent.setData(data.getData());
startActivity(intent);
}
}
}

View File

@ -0,0 +1,115 @@
package com.mindspore.himindspore.objectdetection.ui;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Paint.Style;
import android.graphics.RectF;
import android.util.AttributeSet;
import android.util.Log;
import android.view.View;
import com.mindspore.himindspore.R;
import com.mindspore.himindspore.objectdetection.bean.RecognitionObjectBean;
import com.mindspore.himindspore.utils.DisplayUtil;
import java.util.ArrayList;
import java.util.List;
/**
* 针对物体检测的矩形框绘制类
* <p>
* 使用的API
* 1. Canvas代表依附于指定View的画布用它的方法来绘制各种图形.
* 2. Paint代表Canvas上的画笔用于设置画笔颜色画笔粗细填充风格等.
*/
public class ObjectRectView extends View {
private final String TAG = "ObjectRectView";
private List<RecognitionObjectBean> mRecognitions = new ArrayList<>();
private Paint mPaint = null;
// 画框区域.
private RectF mObjRectF;
private Context context;
public ObjectRectView(Context context) {
this(context,null);
}
public ObjectRectView(Context context, AttributeSet attrs) {
this(context, attrs,0);
}
public ObjectRectView(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
this.context =context;
initialize();
}
private static final int[] MyColor ={R.color.white,R.color.text_blue,R.color.text_yellow,R.color.text_orange,R.color.text_green};
private void initialize() {
mObjRectF = new RectF();
mPaint = new Paint(Paint.ANTI_ALIAS_FLAG);
mPaint.setTextSize(DisplayUtil.sp2px(context,16));
//只绘制图形轮廓(描边)
mPaint.setStyle(Style.STROKE);
mPaint.setStrokeWidth(DisplayUtil.dip2px(context,2));
}
/**
* 传入需绘制信息
*
* @param recognitions
*/
public void setInfo(List<RecognitionObjectBean> recognitions) {
Log.i(TAG, "setInfo: "+recognitions.size());
mRecognitions.clear();
mRecognitions.addAll(recognitions);
//重新draw().
invalidate();
}
public void clearCanvas(){
mRecognitions.clear();
//重新draw().
invalidate();
}
@Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
if (mRecognitions == null || mRecognitions.size() == 0) {
return;
}
for (int i = 0;i<mRecognitions.size();i++){
RecognitionObjectBean bean = mRecognitions.get(i);
mPaint.setColor(context.getResources().getColor(MyColor[i % MyColor.length]));
drawRect(bean, canvas);
}
}
public void drawRect(RecognitionObjectBean bean, Canvas canvas) {
StringBuilder sb = new StringBuilder();
sb.append(bean.getRectID()).append("_").append(bean.getObjectName()).append("_").append(String.format("%.2f", (100 * bean.getScore())) + "%");
mObjRectF = new RectF(bean.getLeft(), bean.getTop(), bean.getRight(), bean.getBottom());
canvas.drawRect(mObjRectF, mPaint);
canvas.drawText(sb.toString(), mObjRectF.left, mObjRectF.top - DisplayUtil.dip2px(context,10) , mPaint);
}
}

View File

@ -0,0 +1,115 @@
package com.mindspore.himindspore.objectdetection.ui;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.RectF;
import android.net.Uri;
import android.os.Bundle;
import android.util.Log;
import android.widget.ImageView;
import androidx.appcompat.app.AppCompatActivity;
import com.mindspore.himindspore.R;
import com.mindspore.himindspore.objectdetection.help.ObjectTrackingMobile;
import com.mindspore.himindspore.objectdetection.bean.RecognitionObjectBean;
import com.mindspore.himindspore.objectdetection.help.ImageDegreeHelper;
import com.mindspore.himindspore.utils.DisplayUtil;
import java.io.FileNotFoundException;
import java.util.List;
import static com.mindspore.himindspore.objectdetection.bean.RecognitionObjectBean.getRecognitionList;
public class PhotoActivity extends AppCompatActivity {
private static final String TAG = "PhotoActivity";
private static final int[] COLORS ={R.color.white,R.color.text_blue,R.color.text_yellow,R.color.text_orange,R.color.text_green};
private ImageView imgPhoto;
private ObjectTrackingMobile trackingMobile;
private List<RecognitionObjectBean> recognitionObjectBeanList;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_object_photo);
imgPhoto = findViewById(R.id.img_photo);
Uri uri = getIntent().getData();
String imgPath = ImageDegreeHelper.getPath(this,uri);
int degree = ImageDegreeHelper.readPictureDegree(imgPath);
Bitmap originBitmap = BitmapFactory.decodeFile(imgPath);
if (originBitmap != null) {
Bitmap bitmap = ImageDegreeHelper.rotaingImageView(degree, originBitmap.copy(Bitmap.Config.ARGB_8888, true));
if (bitmap != null) {
imgPhoto.setImageBitmap(bitmap);
initMindspore(bitmap);
}
}
}
private void initMindspore(Bitmap bitmap) {
try {
trackingMobile = new ObjectTrackingMobile(this);
} catch (FileNotFoundException e) {
Log.e(TAG, Log.getStackTraceString(e));
e.printStackTrace();
}
// 加载模型
boolean ret = trackingMobile.loadModelFromBuf(getAssets());
if (!ret) {
Log.e(TAG, "Load model error.");
return;
}
// run net.
long startTime = System.currentTimeMillis();
String result = trackingMobile.MindSpore_runnet(bitmap);
long endTime = System.currentTimeMillis();
Log.d(TAG, "RUNNET 耗时:"+(endTime-startTime)+"ms");
Log.d(TAG, "result"+ result);
recognitionObjectBeanList = getRecognitionList(result);
if (recognitionObjectBeanList != null && recognitionObjectBeanList.size() > 0) {
drawRect(bitmap);
}
}
private void drawRect(Bitmap bitmap) {
Canvas canvas = new Canvas(bitmap);
Paint mPaint = new Paint(Paint.ANTI_ALIAS_FLAG);
mPaint.setTextSize(DisplayUtil.sp2px(this,30));
//只绘制图形轮廓(描边)
mPaint.setStyle(Paint.Style.STROKE);
mPaint.setStrokeWidth(DisplayUtil.dip2px(this,2));
for (int i = 0; i < recognitionObjectBeanList.size(); i++) {
RecognitionObjectBean objectBean = recognitionObjectBeanList.get(i);
StringBuilder sb = new StringBuilder();
sb.append(objectBean.getRectID()).append("_").append(objectBean.getObjectName()).append("_").append(String.format("%.2f", (100 * objectBean.getScore())) + "%");
int paintColor =getResources().getColor(COLORS[i % COLORS.length]);
mPaint.setColor(paintColor);
RectF rectF = new RectF(objectBean.getLeft(), objectBean.getTop(), objectBean.getRight(), objectBean.getBottom());
canvas.drawRect(rectF, mPaint);
canvas.drawText(sb.toString(),objectBean.getLeft(), objectBean.getTop()-10,mPaint);
}
}
@Override
protected void onDestroy() {
super.onDestroy();
trackingMobile.unloadModel();
}
}

View File

@ -0,0 +1,4 @@
package com.mindspore.himindspore.track;
public interface TrackListener {
}

View File

@ -0,0 +1,47 @@
package com.mindspore.himindspore.utils;
import android.content.Context;
public class DisplayUtil {
private DisplayUtil() {
/* cannot be instantiated */
throw new UnsupportedOperationException("cannot be instantiated");
}
/**
* 将px值转换为dip或dp值保证尺寸大小不变
* DisplayMetrics类中属性density
*/
public static int px2dip(Context context, float pxValue) {
final float scale = context.getResources().getDisplayMetrics().density;
return (int) (pxValue / scale + 0.5f);
}
/**
* 将dip或dp值转换为px值保证尺寸大小不变
* DisplayMetrics类中属性density
*/
public static int dip2px(Context context, float dipValue) {
final float scale = context.getResources().getDisplayMetrics().density;
return (int) (dipValue * scale + 0.5f);
}
/**
* 将px值转换为sp值保证文字大小不变
* DisplayMetrics类中属性scaledDensity
*/
public static int px2sp(Context context, float pxValue) {
final float fontScale = context.getResources().getDisplayMetrics().scaledDensity;
return (int) (pxValue / fontScale + 0.5f);
}
/**
* 将sp值转换为px值保证文字大小不变
* DisplayMetrics类中属性scaledDensity
*/
public static int sp2px(Context context, float spValue) {
final float fontScale = context.getResources().getDisplayMetrics().scaledDensity;
return (int) (spValue * fontScale + 0.5f);
}
}

View File

@ -0,0 +1,30 @@
<vector xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:aapt="http://schemas.android.com/aapt"
android:width="108dp"
android:height="108dp"
android:viewportWidth="108"
android:viewportHeight="108">
<path android:pathData="M31,63.928c0,0 6.4,-11 12.1,-13.1c7.2,-2.6 26,-1.4 26,-1.4l38.1,38.1L107,108.928l-32,-1L31,63.928z">
<aapt:attr name="android:fillColor">
<gradient
android:endX="85.84757"
android:endY="92.4963"
android:startX="42.9492"
android:startY="49.59793"
android:type="linear">
<item
android:color="#44000000"
android:offset="0.0" />
<item
android:color="#00000000"
android:offset="1.0" />
</gradient>
</aapt:attr>
</path>
<path
android:fillColor="#FFFFFF"
android:fillType="nonZero"
android:pathData="M65.3,45.828l3.8,-6.6c0.2,-0.4 0.1,-0.9 -0.3,-1.1c-0.4,-0.2 -0.9,-0.1 -1.1,0.3l-3.9,6.7c-6.3,-2.8 -13.4,-2.8 -19.7,0l-3.9,-6.7c-0.2,-0.4 -0.7,-0.5 -1.1,-0.3C38.8,38.328 38.7,38.828 38.9,39.228l3.8,6.6C36.2,49.428 31.7,56.028 31,63.928h46C76.3,56.028 71.8,49.428 65.3,45.828zM43.4,57.328c-0.8,0 -1.5,-0.5 -1.8,-1.2c-0.3,-0.7 -0.1,-1.5 0.4,-2.1c0.5,-0.5 1.4,-0.7 2.1,-0.4c0.7,0.3 1.2,1 1.2,1.8C45.3,56.528 44.5,57.328 43.4,57.328L43.4,57.328zM64.6,57.328c-0.8,0 -1.5,-0.5 -1.8,-1.2s-0.1,-1.5 0.4,-2.1c0.5,-0.5 1.4,-0.7 2.1,-0.4c0.7,0.3 1.2,1 1.2,1.8C66.5,56.528 65.6,57.328 64.6,57.328L64.6,57.328z"
android:strokeWidth="1"
android:strokeColor="#00000000" />
</vector>

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 75 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.7 KiB

View File

@ -0,0 +1,170 @@
<?xml version="1.0" encoding="utf-8"?>
<vector xmlns:android="http://schemas.android.com/apk/res/android"
android:width="108dp"
android:height="108dp"
android:viewportWidth="108"
android:viewportHeight="108">
<path
android:fillColor="#3DDC84"
android:pathData="M0,0h108v108h-108z" />
<path
android:fillColor="#00000000"
android:pathData="M9,0L9,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,0L19,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M29,0L29,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M39,0L39,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M49,0L49,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M59,0L59,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M69,0L69,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M79,0L79,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M89,0L89,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M99,0L99,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,9L108,9"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,19L108,19"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,29L108,29"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,39L108,39"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,49L108,49"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,59L108,59"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,69L108,69"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,79L108,79"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,89L108,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,99L108,99"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,29L89,29"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,39L89,39"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,49L89,49"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,59L89,59"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,69L89,69"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,79L89,79"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M29,19L29,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M39,19L39,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M49,19L49,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M59,19L59,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M69,19L69,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M79,19L79,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
</vector>

View File

@ -0,0 +1,57 @@
<?xml version="1.0" encoding="utf-8"?>
<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:background="@color/colorPrimary"
tools:context=".contract.ContractActivity">
<ImageView
android:id="@+id/logo"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginTop="50dp"
android:paddingTop="50dp"
android:src="@drawable/logo"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintStart_toStartOf="parent" />
<TextView
android:id="@+id/title"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_marginStart="20dp"
android:layout_marginTop="30dp"
android:layout_marginEnd="10dp"
android:text="If you are interested in Mindspore,
please enter your email for more product information"
android:textColor="@color/white"
android:textSize="25sp"
app:layout_constraintTop_toBottomOf="@+id/logo"
tools:ignore="MissingConstraints" />
<EditText
android:maxLines="1"
android:id="@+id/emailEditText"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_margin="20dp"
android:background="@color/gray"
android:hint="input your email"
android:padding="10dp"
android:textColor="@color/black"
android:textColorHint="@color/white"
app:layout_constraintTop_toBottomOf="@+id/title" />
<Button
android:id="@+id/submitBtn"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_margin="20dp"
android:gravity="center"
android:text="submit"
app:layout_constraintTop_toBottomOf="@+id/emailEditText" />
</androidx.constraintlayout.widget.ConstraintLayout>

View File

@ -0,0 +1,50 @@
<?xml version="1.0" encoding="utf-8"?>
<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:id="@+id/container"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context=".imageclassification.ui.ImageCameraActivity">
<com.mindspore.himindspore.camera.CameraPreview
android:id="@+id/image_camera_preview"
android:layout_width="match_parent"
android:layout_height="match_parent"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toTopOf="parent" />
<androidx.appcompat.widget.Toolbar
android:id="@+id/toolbar"
android:layout_width="match_parent"
android:layout_height="?attr/actionBarSize"
android:background="@color/mindspore_semi_transparent"
app:layout_constraintTop_toTopOf="parent">
<TextView
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:drawableStart="@drawable/logo2"
android:drawablePadding="5dp"
android:gravity="center_vertical"
android:text="@string/app_name_title"
android:textColor="@color/white"
android:textSize="20sp" />
</androidx.appcompat.widget.Toolbar>
<LinearLayout
android:id="@+id/layout_bottom_content"
android:layout_width="match_parent"
android:layout_height="200dp"
android:background="@color/colorPrimary"
android:gravity="center"
android:orientation="vertical"
app:layout_constraintBottom_toBottomOf="parent" />
</androidx.constraintlayout.widget.ConstraintLayout>

View File

@ -0,0 +1,72 @@
<?xml version="1.0" encoding="utf-8"?>
<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:background="@color/colorPrimary"
tools:context=".imageclassification.ui.ImageMainActivity">
<TextView
android:id="@+id/title"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_marginTop="70dp"
android:drawableTop="@drawable/logo"
android:drawablePadding="35dp"
android:gravity="center_horizontal"
android:text="@string/app_name"
android:textColor="@color/white"
android:textSize="42sp"
app:layout_constraintTop_toTopOf="parent" />
<TextView
android:id="@+id/sub_title"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_marginTop="18dp"
android:gravity="center_horizontal"
android:text="@string/title_image"
android:textColor="@color/white"
android:textSize="30sp"
app:layout_constraintTop_toBottomOf="@+id/title" />
<Button
android:textColor="@color/white"
android:id="@+id/btn_demo"
android:layout_width="320dp"
android:layout_height="48dp"
android:layout_marginTop="60dp"
android:background="@color/gray_btn"
android:drawableStart="@drawable/btn_image"
android:drawablePadding="16dp"
android:gravity="left|center_vertical"
android:paddingLeft="40dp"
android:text="@string/title_demo"
android:textAllCaps="false"
android:textSize="16sp"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toBottomOf="@+id/sub_title" />
<Button
android:textColor="@color/white"
android:id="@+id/btn_custom"
android:layout_width="320dp"
android:layout_height="48dp"
android:layout_marginTop="16dp"
android:background="@color/gray_btn"
android:drawableStart="@drawable/btn_object"
android:drawablePadding="16dp"
android:gravity="left|center_vertical"
android:paddingLeft="40dp"
android:text="@string/title_custom"
android:textAllCaps="false"
android:textSize="16sp"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toBottomOf="@+id/btn_demo" />
</androidx.constraintlayout.widget.ConstraintLayout>

View File

@ -0,0 +1,19 @@
<?xml version="1.0" encoding="utf-8"?>
<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context=".MainActivity">
<TextView
android:id="@+id/sample_text"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="Hello World!"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintLeft_toLeftOf="parent"
app:layout_constraintRight_toRightOf="parent"
app:layout_constraintTop_toTopOf="parent" />
</androidx.constraintlayout.widget.ConstraintLayout>

View File

@ -0,0 +1,28 @@
<?xml version="1.0" encoding="utf-8"?>
<androidx.constraintlayout.widget.ConstraintLayout
xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:id="@+id/container"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context=".objectdetection.ui.ObjectCameraActivity">
<com.mindspore.himindspore.camera.CameraPreview
android:id="@+id/camera_preview"
android:layout_width="match_parent"
android:layout_height="match_parent"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toTopOf="parent" />
<com.mindspore.himindspore.objectdetection.ui.ObjectRectView
android:id="@+id/objRectView"
android:layout_width="match_parent"
android:layout_height="match_parent"
/>
</androidx.constraintlayout.widget.ConstraintLayout>

View File

@ -0,0 +1,71 @@
<?xml version="1.0" encoding="utf-8"?>
<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:background="@color/colorPrimary"
tools:context=".objectdetection.ui.ObjectDetectionMainActivity">
<TextView
android:id="@+id/title"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_marginTop="70dp"
android:drawableTop="@drawable/logo"
android:drawablePadding="35dp"
android:gravity="center_horizontal"
android:text="@string/app_name"
android:textColor="@color/white"
android:textSize="42sp"
app:layout_constraintTop_toTopOf="parent" />
<TextView
android:id="@+id/sub_title"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_marginTop="18dp"
android:gravity="center_horizontal"
android:text="@string/title_object"
android:textColor="@color/white"
android:textSize="30sp"
app:layout_constraintTop_toBottomOf="@+id/title" />
<Button
android:id="@+id/btn_photo"
android:layout_width="320dp"
android:layout_height="48dp"
android:layout_marginTop="60dp"
android:background="@color/gray_btn"
android:drawableStart="@drawable/btn_image"
android:drawablePadding="16dp"
android:gravity="left|center_vertical"
android:paddingLeft="40dp"
android:text="@string/title_photo"
android:textAllCaps="false"
android:textSize="16sp"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toBottomOf="@+id/sub_title" />
<Button
android:id="@+id/btn_camera"
android:layout_width="320dp"
android:layout_height="48dp"
android:layout_marginTop="16dp"
android:background="@color/gray_btn"
android:drawableStart="@drawable/btn_object"
android:drawablePadding="16dp"
android:gravity="left|center_vertical"
android:paddingLeft="40dp"
android:text="@string/title_camera"
android:textAllCaps="false"
android:textSize="16sp"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toBottomOf="@+id/btn_photo" />
</androidx.constraintlayout.widget.ConstraintLayout>

View File

@ -0,0 +1,18 @@
<?xml version="1.0" encoding="utf-8"?>
<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context=".objectdetection.ui.PhotoActivity">
<ImageView
android:id="@+id/img_photo"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:scaleType="fitCenter"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toTopOf="parent" />
</androidx.constraintlayout.widget.ConstraintLayout>

View File

@ -0,0 +1,94 @@
<?xml version="1.0" encoding="utf-8"?>
<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:background="@color/colorPrimary"
tools:context=".SplashActivity">
<TextView
android:id="@+id/title"
android:layout_width="0dp"
android:layout_height="wrap_content"
android:layout_marginTop="70dp"
android:drawableTop="@drawable/logo"
android:drawablePadding="30dp"
android:gravity="center_horizontal"
android:maxLines="1"
android:text="@string/app_name"
android:textColor="@color/white"
android:textSize="36sp"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toTopOf="parent" />
<Button
android:id="@+id/btn_image"
android:layout_width="320dp"
android:layout_height="48dp"
android:layout_marginTop="60dp"
android:background="@color/gray_btn"
android:drawableStart="@drawable/btn_image"
android:drawablePadding="16dp"
android:gravity="left|center_vertical"
android:paddingLeft="40dp"
android:text="@string/title_image"
android:textAllCaps="false"
android:textSize="16sp"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toBottomOf="@+id/title" />
<Button
android:id="@+id/btn_object"
android:layout_width="320dp"
android:layout_height="48dp"
android:layout_marginTop="16dp"
android:background="@color/gray_btn"
android:drawableStart="@drawable/btn_object"
android:drawablePadding="16dp"
android:gravity="left|center_vertical"
android:paddingLeft="40dp"
android:text="@string/title_object"
android:textAllCaps="false"
android:textSize="16sp"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toBottomOf="@+id/btn_image" />
<Button
android:id="@+id/btn_contact"
android:layout_width="320dp"
android:layout_height="48dp"
android:layout_marginTop="16dp"
android:background="@color/gray_btn"
android:drawableStart="@drawable/btn_code"
android:drawablePadding="16dp"
android:gravity="left|center_vertical"
android:paddingLeft="40dp"
android:text="@string/title_source"
android:textAllCaps="false"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toBottomOf="@+id/btn_object" />
<Button
android:id="@+id/btn_advice"
android:layout_width="320dp"
android:layout_height="48dp"
android:layout_marginTop="16dp"
android:background="@color/gray_btn"
android:drawableStart="@drawable/btn_help"
android:drawablePadding="16dp"
android:gravity="left|center_vertical"
android:paddingLeft="40dp"
android:text="@string/title_help"
android:textAllCaps="false"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toBottomOf="@+id/btn_contact" />
</androidx.constraintlayout.widget.ConstraintLayout>

View File

@ -0,0 +1,43 @@
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:orientation="vertical">
<TextView
android:textStyle="bold"
android:textColor="@color/white"
android:id="@+id/tv_left_title"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginStart="@dimen/hor_text_view_text_margin_normal"
android:layout_marginTop="@dimen/hor_text_view_text_margin_small"
android:textSize="@dimen/hor_text_view_text_size"
tools:text="person" />
<TextView
android:textStyle="bold"
android:gravity="end"
android:layout_toRightOf="@+id/tv_left_title"
android:textColor="@color/white"
android:layout_alignParentEnd="true"
android:id="@+id/tv_right_content"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginEnd="@dimen/hor_text_view_text_margin_normal"
android:layout_marginStart="@dimen/hor_text_view_text_margin_normal"
android:layout_marginTop="@dimen/hor_text_view_text_margin_small"
android:textSize="@dimen/hor_text_view_text_size"
tools:text="12.5" />
<View
android:id="@+id/view_bottom_line"
android:layout_marginTop="@dimen/hor_text_view_text_margin_small"
android:layout_alignStart="@+id/tv_left_title"
android:layout_alignEnd="@+id/tv_right_content"
android:background="@color/white"
android:layout_below="@+id/tv_left_title"
android:layout_height="0.5dp"
android:layout_width="match_parent"/>
</RelativeLayout>

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 8.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 11 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 15 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 22 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 27 KiB

View File

@ -0,0 +1,20 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<color name="colorPrimary">#303030</color>
<color name="colorPrimaryDark">#3700B3</color>
<color name="colorAccent">#03DAC5</color>
<color name="mindspore_semi_transparent">#66000000</color>
<color name="white">#ffffff</color>
<color name="black">#000000</color>
<color name="gray">#A69D9D</color>
<color name="gray_btn">#424242</color>
<color name="text_blue">#6DA7FF</color>
<color name="text_yellow">#F8E71C</color>
<color name="text_orange">#FF844D</color>
<color name="text_green">#66B50A</color>
</resources>

View File

@ -0,0 +1,9 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<dimen name="ms_bottom_sheet_corner_radius">15dp</dimen>
<dimen name="ms_bottom_sheet_top_padding">8dp</dimen>
<dimen name="hor_text_view_text_margin_normal">15dp</dimen>
<dimen name="hor_text_view_text_margin_small">6dp</dimen>
<dimen name="hor_text_view_text_size">14sp</dimen>
</resources>

View File

@ -0,0 +1,19 @@
<resources>
<string name="app_name">HiMindSpore</string>
<string name="app_name_title">MindSpore</string>
<string name="switch_custom">custom</string>
<string name="title_image">Image Classification</string>
<string name="title_object">Object Detection</string>
<string name="title_source">Source Code</string>
<string name="title_help">Help And FeedBack</string>
<string name="title_photo">Photo</string>
<string name="title_camera">Camera</string>
<string name="title_demo">Demo</string>
<string name="title_custom">Custom</string>
<string name="title_time">Inference Time</string>
</resources>

View File

@ -0,0 +1,11 @@
<resources>
<!-- Base application theme. -->
<style name="AppTheme" parent="Theme.AppCompat.Light.DarkActionBar">
<!-- Customize your theme here. -->
<item name="colorPrimary">@color/colorPrimary</item>
<item name="colorPrimaryDark">@color/colorPrimaryDark</item>
<item name="colorAccent">@color/colorAccent</item>
</style>
</resources>

View File

@ -0,0 +1,17 @@
package com.mindspore.himindspore;
import org.junit.Test;
import static org.junit.Assert.*;
/**
* Example local unit test, which will execute on the development machine (host).
*
* @see <a href="http://d.android.com/tools/testing">Testing documentation</a>
*/
public class ExampleUnitTest {
@Test
public void addition_isCorrect() {
assertEquals(4, 2 + 2);
}
}

View File

@ -0,0 +1,25 @@
// Top-level build file where you can add configuration options common to all sub-projects/modules.
buildscript {
repositories {
google()
jcenter()
}
dependencies {
classpath "com.android.tools.build:gradle:4.0.1"
// NOTE: Do not place your application dependencies here; they belong
// in the individual module build.gradle files
}
}
allprojects {
repositories {
google()
jcenter()
maven { url "https://jitpack.io" }
}
}
task clean(type: Delete) {
delete rootProject.buildDir
}

View File

@ -0,0 +1,19 @@
# Project-wide Gradle settings.
# IDE (e.g. Android Studio) users:
# Gradle settings configured through the IDE *will override*
# any settings specified in this file.
# For more details on how to configure your build environment visit
# http://www.gradle.org/docs/current/userguide/build_environment.html
# Specifies the JVM arguments used for the daemon process.
# The setting is particularly useful for tweaking memory settings.
org.gradle.jvmargs=-Xmx2048m
# When configured, Gradle will run in incubating parallel mode.
# This option should only be used with decoupled projects. More details, visit
# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
# org.gradle.parallel=true
# AndroidX package structure to make it clearer which packages are bundled with the
# Android operating system, and which are packaged with your app"s APK
# https://developer.android.com/topic/libraries/support-library/androidx-rn
android.useAndroidX=true
# Automatically convert third-party libraries to use AndroidX
android.enableJetifier=true

View File

@ -0,0 +1,6 @@
#Mon Oct 19 10:08:12 CST 2020
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-6.1.1-all.zip

View File

@ -0,0 +1,172 @@
#!/usr/bin/env sh
##############################################################################
##
## Gradle start up script for UN*X
##
##############################################################################
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
PRG="$0"
# Need this for relative symlinks.
while [ -h "$PRG" ] ; do
ls=`ls -ld "$PRG"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
PRG="$link"
else
PRG=`dirname "$PRG"`"/$link"
fi
done
SAVED="`pwd`"
cd "`dirname \"$PRG\"`/" >/dev/null
APP_HOME="`pwd -P`"
cd "$SAVED" >/dev/null
APP_NAME="Gradle"
APP_BASE_NAME=`basename "$0"`
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS=""
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD="maximum"
warn () {
echo "$*"
}
die () {
echo
echo "$*"
echo
exit 1
}
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
nonstop=false
case "`uname`" in
CYGWIN* )
cygwin=true
;;
Darwin* )
darwin=true
;;
MINGW* )
msys=true
;;
NONSTOP* )
nonstop=true
;;
esac
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD="$JAVA_HOME/jre/sh/java"
else
JAVACMD="$JAVA_HOME/bin/java"
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD="java"
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
# Increase the maximum file descriptors if we can.
if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
MAX_FD_LIMIT=`ulimit -H -n`
if [ $? -eq 0 ] ; then
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
MAX_FD="$MAX_FD_LIMIT"
fi
ulimit -n $MAX_FD
if [ $? -ne 0 ] ; then
warn "Could not set maximum file descriptor limit: $MAX_FD"
fi
else
warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
fi
fi
# For Darwin, add options to specify how the application appears in the dock
if $darwin; then
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
fi
# For Cygwin, switch paths to Windows format before running java
if $cygwin ; then
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
JAVACMD=`cygpath --unix "$JAVACMD"`
# We build the pattern for arguments to be converted via cygpath
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
SEP=""
for dir in $ROOTDIRSRAW ; do
ROOTDIRS="$ROOTDIRS$SEP$dir"
SEP="|"
done
OURCYGPATTERN="(^($ROOTDIRS))"
# Add a user-defined pattern to the cygpath arguments
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
fi
# Now convert the arguments - kludge to limit ourselves to /bin/sh
i=0
for arg in "$@" ; do
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
else
eval `echo args$i`="\"$arg\""
fi
i=$((i+1))
done
case $i in
(0) set -- ;;
(1) set -- "$args0" ;;
(2) set -- "$args0" "$args1" ;;
(3) set -- "$args0" "$args1" "$args2" ;;
(4) set -- "$args0" "$args1" "$args2" "$args3" ;;
(5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
(6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
(7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
(8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
(9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
esac
fi
# Escape application args
save () {
for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
echo " "
}
APP_ARGS=$(save "$@")
# Collect all arguments for the java command, following the shell quoting and substitution rules
eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong
if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then
cd "$(dirname "$0")"
fi
exec "$JAVACMD" "$@"

View File

@ -0,0 +1,84 @@
@if "%DEBUG%" == "" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@rem
@rem ##########################################################################
@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal
set DIRNAME=%~dp0
if "%DIRNAME%" == "" set DIRNAME=.
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS=
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if "%ERRORLEVEL%" == "0" goto init
echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:findJavaFromJavaHome
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto init
echo.
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:init
@rem Get command-line arguments, handling Windows variants
if not "%OS%" == "Windows_NT" goto win9xME_args
:win9xME_args
@rem Slurp the command line arguments.
set CMD_LINE_ARGS=
set _SKIP=2
:win9xME_args_slurp
if "x%~1" == "x" goto execute
set CMD_LINE_ARGS=%*
:execute
@rem Setup the command line
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
@rem Execute Gradle
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
:end
@rem End local scope for the variables with windows NT shell
if "%ERRORLEVEL%"=="0" goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
exit /b 1
:mainEnd
if "%OS%"=="Windows_NT" endlocal
:omega

View File

@ -0,0 +1,2 @@
include ':app'
rootProject.name = "HiMindSpore"

View File

@ -78,11 +78,11 @@ app
When MindSpore C++ APIs are called at the Android JNI layer, related library files are required. You can use MindSpore Lite [source code compilation](https://www.mindspore.cn/tutorial/lite/en/master/use/build.html) to generate the MindSpore Lite version. In this case, you need to use the compile command of generate with image preprocessing module.
In this example, the build process automatically downloads the `mindspore-lite-1.0.0-minddata-arm64-cpu` by the `app/download.gradle` file and saves in the `app/src/main/cpp` directory.
In this example, the build process automatically downloads the `mindspore-lite-1.0.1-runtime-arm64-cpu` by the `app/download.gradle` file and saves in the `app/src/main/cpp` directory.
Note: if the automatic download fails, please manually download the relevant library files and put them in the corresponding location.
mindspore-lite-1.0.0-minddata-arm64-cpu.tar.gz [Download link](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/lite/android_aarch64/mindspore-lite-1.0.0-minddata-arm64-cpu.tar.gz)
mindspore-lite-1.0.1-runtime-arm64-cpu.tar.gz [Download link](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.1/lite/android_aarch64/mindspore-lite-1.0.1-runtime-arm64-cpu.tar.gz)
```
android{

View File

@ -98,7 +98,7 @@ Android JNI层调用MindSpore C++ API时需要相关库文件支持。可通
* 注:若自动下载失败,请手动下载相关库文件,解压并放在对应位置:
mindspore-lite-1.0.0-minddata-arm64-cpu.tar.gz [下载链接](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/lite/android_aarch64/mindspore-lite-1.0.0-minddata-arm64-cpu.tar.gz)
mindspore-lite-1.0.1-runtime-arm64-cpu.tar.gz [下载链接](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.1/lite/android_aarch64/mindspore-lite-1.0.1-runtime-arm64-cpu.tar.gz)
在app的`build.gradle`文件中配置CMake编译支持以及`arm64-v8a`的编译支持,如下所示:

View File

@ -8,7 +8,7 @@ cmake_minimum_required(VERSION 3.4.1)
set(CMAKE_VERBOSE_MAKEFILE on)
set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${CMAKE_SOURCE_DIR}/libs/${ANDROID_ABI})
set(MINDSPORELITE_VERSION mindspore-lite-1.0.0-minddata-arm64-cpu)
set(MINDSPORELITE_VERSION mindspore-lite-1.0.1-runtime-arm64-cpu)
# ============== Set MindSpore Dependencies. =============
include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp)
@ -17,14 +17,18 @@ include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION})
include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/include)
include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/include/ir/dtype)
include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/include/schema)
include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/minddata/include)
add_library(mindspore-lite SHARED IMPORTED )
add_library(minddata-lite SHARED IMPORTED )
add_library(libmindspore-lite-fp16 SHARED IMPORTED )
set_target_properties(mindspore-lite PROPERTIES IMPORTED_LOCATION
${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/lib/libmindspore-lite.so)
set_target_properties(minddata-lite PROPERTIES IMPORTED_LOCATION
${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/lib/libminddata-lite.so)
${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/minddata/lib/libminddata-lite.so)
set_target_properties(libmindspore-lite-fp16 PROPERTIES IMPORTED_LOCATION
${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/lib/libmindspore-lite-fp16.so)
# --------------- MindSpore Lite set End. --------------------
@ -71,6 +75,7 @@ target_link_libraries( # Specifies the target library.
# --- mindspore ---
minddata-lite
mindspore-lite
libmindspore-lite-fp16
# --- other dependencies.---
-ljnigraphics

View File

@ -4,13 +4,14 @@
* The libraries can be downloaded manually.
*/
def targetMindSporeInclude = "src/main/cpp/"
def mindsporeLite_Version = "mindspore-lite-1.0.0-minddata-arm64-cpu"
def mindsporeLite_Version = "mindspore-lite-1.0.1-runtime-arm64-cpu"
def targetModelFile = "src/main/assets/model/mobilenetv2.ms"
def mindSporeLibrary_arm64 = "src/main/cpp/${mindsporeLite_Version}.tar.gz"
def modelDownloadUrl = "https://download.mindspore.cn/model_zoo/official/lite/mobilenetv2_openimage_lite/mobilenetv2.ms"
def mindsporeLiteDownloadUrl = "https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/lite/android_aarch64/${mindsporeLite_Version}.tar.gz"
//def mindsporeLiteDownloadUrl = "https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/lite/android_aarch64/${mindsporeLite_Version}.tar.gz"
def mindsporeLiteDownloadUrl = "https://download.mindspore.cn/model_zoo/official/lite/lib/mindspore%20version%201.0.1/${mindsporeLite_Version}.tar.gz"
def cleantargetMindSporeInclude = "src/main/cpp"

View File

@ -370,6 +370,9 @@ Java_com_mindspore_himindsporedemo_gallery_classify_TrackingMobile_loadModel(JNI
mindspore::lite::Context *context = new mindspore::lite::Context;
context->thread_num_ = num_thread;
context->device_list_[0].device_info_.cpu_device_info_.cpu_bind_mode_ = mindspore::lite::NO_BIND;
context->device_list_[0].device_info_.cpu_device_info_.enable_float16_ = true;
context->device_list_[0].device_type_ = mindspore::lite::DT_CPU;
labelNet->CreateSessionMS(modelBuffer, bufferLen, context);
delete context;

View File

@ -227,7 +227,7 @@ public class CameraFragment extends Fragment {
mMindsporeHandlerThread = new HandlerThread("MINDSPORE");
mMindsporeHandlerThread.start();
mMindsporeHandler = new Handler(mMindsporeHandlerThread.getLooper());
mMindsporeHandler.post(classifyRunnable);
mMindsporeHandler.postDelayed(classifyRunnable, 500);
}
@ -252,7 +252,7 @@ public class CameraFragment extends Fragment {
}
}
if (mMindsporeHandler != null && !isPreBackgroundThreadPause) {
mMindsporeHandler.post(classifyRunnable);
mMindsporeHandler.postDelayed(classifyRunnable, 500);
}
}
}

View File

@ -1,5 +1,5 @@
<resources>
<string name="app_name">MindSporeClassify</string>
<string name="app_name">Classification</string>
<string name="action_settings">设置</string>
<string name="request_permission">This sample needs camera permission.</string>

View File

@ -1,2 +1,2 @@
include ':app'
rootProject.name = "HiMindSporeDemo"
rootProject.name = "Classification"

View File

@ -45,11 +45,11 @@ This object detection sample program on the Android device includes a Java layer
When MindSpore C++ APIs are called at the Android JNI layer, related library files are required. You can use MindSpore Lite [source code compilation](https://www.mindspore.cn/tutorial/lite/en/master/use/build.html) to generate the MindSpore Lite version. In this case, you need to use the compile command of generate with image preprocessing module.
In this example, the build process automatically downloads the `mindspore-lite-1.0.0-minddata-arm64-cpu` by the `app/download.gradle` file and saves in the `app/src/main/cpp` directory.
In this example, the build process automatically downloads the `mindspore-lite-1.0.1-runtime-arm64-cpu` by the `app/download.gradle` file and saves in the `app/src/main/cpp` directory.
Note: if the automatic download fails, please manually download the relevant library files and put them in the corresponding location.
mindspore-lite-1.0.0-minddata-arm64-cpu.tar.gz [Download link](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/lite/android_aarch64/mindspore-lite-1.0.0-minddata-arm64-cpu.tar.gz)
mindspore-lite-1.0.1-runtime-arm64-cpu.tar.gz [Download link](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.1/lite/android_aarch64/mindspore-lite-1.0.1-runtime-arm64-cpu.tar.gz)
```
android{
@ -71,7 +71,7 @@ Create a link to the `.so` library file in the `app/CMakeLists.txt` file:
```
# Set MindSpore Lite Dependencies.
set(MINDSPORELITE_VERSION mindspore-lite-1.0.0-minddata-arm64-cpu)
set(MINDSPORELITE_VERSION mindspore-lite-1.0.1-runtime-arm64-cpu)
include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION})
add_library(mindspore-lite SHARED IMPORTED )
add_library(minddata-lite SHARED IMPORTED )

View File

@ -97,7 +97,7 @@ Android JNI层调用MindSpore C++ API时需要相关库文件支持。可通
* 注:若自动下载失败,请手动下载相关库文件,解压并放在对应位置:
mindspore-lite-1.0.0-minddata-arm64-cpu.tar.gz [下载链接](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/lite/android_aarch64/mindspore-lite-1.0.0-minddata-arm64-cpu.tar.gz)
mindspore-lite-1.0.1-runtime-arm64-cpu.tar.gz [下载链接](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.1/lite/android_aarch64/mindspore-lite-1.0.1-runtime-arm64-cpu.tar.gz)
在app的`build.gradle`文件中配置CMake编译支持以及`arm64-v8a`的编译支持,如下所示:
@ -121,7 +121,7 @@ android{
```
# Set MindSpore Lite Dependencies.
set(MINDSPORELITE_VERSION mindspore-lite-1.0.0-minddata-arm64-cpu)
set(MINDSPORELITE_VERSION mindspore-lite-1.0.1-runtime-arm64-cpu)
include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION})
add_library(mindspore-lite SHARED IMPORTED )
add_library(minddata-lite SHARED IMPORTED )

View File

@ -8,7 +8,7 @@ cmake_minimum_required(VERSION 3.4.1)
set(CMAKE_VERBOSE_MAKEFILE on)
set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${CMAKE_SOURCE_DIR}/libs/${ANDROID_ABI})
set(MINDSPORELITE_VERSION mindspore-lite-1.0.0-minddata-arm64-cpu)
set(MINDSPORELITE_VERSION mindspore-lite-1.0.1-runtime-arm64-cpu)
# ============== Set MindSpore Dependencies. =============
include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp)
@ -17,16 +17,21 @@ include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION})
include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/include)
include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/include/ir/dtype)
include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/include/schema)
include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/minddata/include)
#include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/include/ModelUtil)
add_library(mindspore-lite SHARED IMPORTED )
add_library(minddata-lite SHARED IMPORTED )
add_library(libmindspore-lite-fp16 SHARED IMPORTED )
set_target_properties(mindspore-lite PROPERTIES IMPORTED_LOCATION
${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/lib/libmindspore-lite.so)
set_target_properties(minddata-lite PROPERTIES IMPORTED_LOCATION
${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/lib/libminddata-lite.so)
${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/minddata/lib/libminddata-lite.so)
set_target_properties(libmindspore-lite-fp16 PROPERTIES IMPORTED_LOCATION
${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/lib/libmindspore-lite-fp16.so)
# --------------- MindSpore Lite set End. --------------------
# Creates and names a library, sets it as either STATIC
@ -72,6 +77,7 @@ target_link_libraries( # Specifies the target library.
# --- mindspore ---
minddata-lite
mindspore-lite
libmindspore-lite-fp16
# --- other dependencies.---
-ljnigraphics

View File

@ -4,13 +4,14 @@
* The libraries can be downloaded manually.
*/
def targetMindSporeInclude = "src/main/cpp/"
def mindsporeLite_Version = "mindspore-lite-1.0.0-minddata-arm64-cpu"
def mindsporeLite_Version = "mindspore-lite-1.0.1-runtime-arm64-cpu"
def targetModelFile = "src/main/assets/model/ssd.ms"
def mindSporeLibrary_arm64 = "src/main/cpp/${mindsporeLite_Version}.tar.gz"
def modelDownloadUrl = "https://download.mindspore.cn/model_zoo/official/lite/ssd_mobilenetv2_lite/ssd.ms"
def mindsporeLiteDownloadUrl = "https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/lite/android_aarch64/${mindsporeLite_Version}.tar.gz"
//def mindsporeLiteDownloadUrl = "https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/lite/android_aarch64/${mindsporeLite_Version}.tar.gz"
def mindsporeLiteDownloadUrl = "https://download.mindspore.cn/model_zoo/official/lite/lib/mindspore%20version%201.0.1/${mindsporeLite_Version}.tar.gz"
def cleantargetMindSporeInclude = "src/main/cpp"

View File

@ -164,8 +164,10 @@ Java_com_mindspore_hiobject_help_TrackingMobile_loadModel(JNIEnv *env, jobject t
*labelEnv = labelNet;
mindspore::lite::Context *context = new mindspore::lite::Context;
context->device_type_ = mindspore::lite::DT_CPU;
context->thread_num_ = numThread;
context->device_list_[0].device_info_.cpu_device_info_.cpu_bind_mode_ = mindspore::lite::NO_BIND;
context->device_list_[0].device_info_.cpu_device_info_.enable_float16_ = true;
context->device_list_[0].device_type_ = mindspore::lite::DT_CPU;
labelNet->CreateSessionMS(modelBuffer, bufferLen, context);
delete context;

View File

@ -612,7 +612,7 @@ public class CameraFragment extends Fragment {
mPreBackgroundThread = new HandlerThread("AlgoBackground");
mPreBackgroundThread.start();
mPreBackgroundHandler = new Handler(mPreBackgroundThread.getLooper());
mPreBackgroundHandler.post(periodicClassify);
mPreBackgroundHandler.postDelayed(periodicClassify, 500);
}
@ -627,7 +627,7 @@ public class CameraFragment extends Fragment {
}
//重复请求
if (mPreBackgroundHandler != null && !isPreBackgroundThreadPause) {
mPreBackgroundHandler.post(periodicClassify);
mPreBackgroundHandler.postDelayed(periodicClassify, 500);
}
}
}

View File

@ -1,5 +1,5 @@
<resources>
<string name="app_name">HiObject</string>
<string name="app_name">Detection</string>
<string name="action_settings">设置</string>
<string name="request_permission">This sample needs camera permission.</string>

View File

@ -1,2 +1,2 @@
include ':app'
rootProject.name = "HiMSObject"
rootProject.name = "Detection"