添加骨骼和风格迁移代码格式化

himindspore添加骨骼和风格迁移模块

himindspore添加骨骼和风格迁移模块

himindspore添加骨骼和风格迁移模块

去除注释
This commit is contained in:
hukang hwx963878 2020-11-20 17:47:37 +08:00
parent 85e4efd706
commit 21075e30da
185 changed files with 4563 additions and 908 deletions

View File

@ -8,22 +8,16 @@ android {
applicationId "com.mindspore.himindspore"
minSdkVersion 21
targetSdkVersion 30
versionCode 1
versionName "1.0"
versionCode 2
versionName "1.1.0"
testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
externalNativeBuild {
cmake {
arguments "-DANDROID_STL=c++_shared"
cppFlags "-std=c++17"
javaCompileOptions {
annotationProcessorOptions {
arguments = [moduleName: project.getName()]
}
}
ndk {
abiFilters 'arm64-v8a'
}
}
aaptOptions {
noCompress '.so', 'ms'
}
buildTypes {
@ -32,48 +26,28 @@ android {
proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
}
customDebugType {
debuggable true
}
}
externalNativeBuild {
cmake {
path file('CMakeLists.txt')
}
}
ndkVersion '21.3.6528147'
sourceSets{
main {
jniLibs.srcDirs = ['libs']
}
}
packagingOptions{
pickFirst 'lib/arm64-v8a/libmlkit-label-MS.so'
aaptOptions {
noCompress "ms"
}
compileOptions {
sourceCompatibility JavaVersion.VERSION_1_8
targetCompatibility JavaVersion.VERSION_1_8
}
}
// Before gradle build.
// To download some necessary libraries.
apply from:'download.gradle'
dependencies {
implementation fileTree(dir: "libs", include: ["*.jar"])
implementation 'androidx.appcompat:appcompat:1.2.0'
implementation 'androidx.constraintlayout:constraintlayout:2.0.2'
implementation 'androidx.constraintlayout:constraintlayout:2.0.4'
implementation 'androidx.cardview:cardview:1.0.0'
testImplementation 'junit:junit:4.13.1'
androidTestImplementation 'androidx.test.ext:junit:1.1.2'
androidTestImplementation 'androidx.test.espresso:espresso-core:3.3.0'
implementation 'com.sun.mail:android-mail:1.6.5'
implementation 'com.sun.mail:android-activation:1.6.5'
implementation 'com.trello.rxlifecycle2:rxlifecycle:2.2.2'
implementation 'com.trello.rxlifecycle2:rxlifecycle-components:2.2.2'
@ -85,4 +59,11 @@ dependencies {
implementation 'com.squareup.okhttp3:logging-interceptor:4.9.0'
implementation 'org.greenrobot:eventbus:3.0.0'
implementation 'com.alibaba:arouter-api:1.2.1'
annotationProcessor 'com.alibaba:arouter-compiler:1.1.2'
implementation project(':posenet')
implementation project(':imageObject')
implementation project(':styletransfer')
}

View File

@ -2,13 +2,13 @@ package com.mindspore.himindspore;
import android.content.Context;
import androidx.test.platform.app.InstrumentationRegistry;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.platform.app.InstrumentationRegistry;
import org.junit.Test;
import org.junit.runner.RunWith;
import static org.junit.Assert.*;
import static org.junit.Assert.assertEquals;
/**
* Instrumented test, which will execute on an Android device.

View File

@ -14,6 +14,7 @@
<uses-permission android:name="android.permission.INTERNET" />
<application
android:name=".base.MyApplication"
android:allowBackup="true"
android:icon="@mipmap/ic_launcher"
android:label="@string/app_name"
@ -35,30 +36,6 @@
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
<activity
android:name=".imageclassification.ui.ImageMainActivity"
android:screenOrientation="portrait"
android:theme="@style/Theme.AppCompat.NoActionBar" />
<activity
android:name=".contract.ContractActivity"
android:screenOrientation="portrait"
android:theme="@style/Theme.AppCompat.NoActionBar" />
<activity
android:name=".objectdetection.ui.ObjectDetectionMainActivity"
android:screenOrientation="portrait"
android:theme="@style/Theme.AppCompat.NoActionBar" />
<activity
android:name=".objectdetection.ui.ObjectCameraActivity"
android:screenOrientation="portrait"
android:theme="@style/Theme.AppCompat.NoActionBar" />
<activity
android:name=".objectdetection.ui.ObjectPhotoActivity"
android:screenOrientation="portrait"
android:theme="@style/Theme.AppCompat.NoActionBar" />
<activity
android:name=".imageclassification.ui.ImageCameraActivity"
android:screenOrientation="portrait"
android:theme="@style/Theme.AppCompat.NoActionBar" />
<provider
android:name="androidx.core.content.FileProvider"

View File

@ -1,267 +0,0 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <jni.h>
#include <android/bitmap.h>
#include <android/asset_manager_jni.h>
#include <android/log.h>
#include <sstream>
#include <cstring>
#include <set>
#include <utility>
#include "include/errorcode.h"
#include "include/ms_tensor.h"
#include "MSNetWork.h"
#include "ssd_util/ssd_util.h"
#include "lite_cv/lite_mat.h"
#include "lite_cv/image_process.h"
using mindspore::dataset::LiteMat;
using mindspore::dataset::LPixelType;
using mindspore::dataset::LDataType;
#define MS_PRINT(format, ...) __android_log_print(ANDROID_LOG_INFO, "MSJNI", format, ##__VA_ARGS__)
bool ObjectBitmapToLiteMat(JNIEnv *env, const jobject &srcBitmap, LiteMat *lite_mat) {
bool ret = false;
AndroidBitmapInfo info;
void *pixels = nullptr;
LiteMat &lite_mat_bgr = *lite_mat;
AndroidBitmap_getInfo(env, srcBitmap, &info);
if (info.format != ANDROID_BITMAP_FORMAT_RGBA_8888) {
MS_PRINT("Image Err, Request RGBA");
return false;
}
AndroidBitmap_lockPixels(env, srcBitmap, &pixels);
if (info.stride == info.width * 4) {
ret = InitFromPixel(reinterpret_cast<const unsigned char *>(pixels),
LPixelType::RGBA2RGB, LDataType::UINT8,
info.width, info.height, lite_mat_bgr);
if (!ret) {
MS_PRINT("Init From RGBA error");
}
} else {
unsigned char *pixels_ptr = new unsigned char[info.width * info.height * 4];
unsigned char *ptr = pixels_ptr;
unsigned char *data = reinterpret_cast<unsigned char *>(pixels);
for (int i = 0; i < info.height; i++) {
memcpy(ptr, data, info.width * 4);
ptr += info.width * 4;
data += info.stride;
}
ret = InitFromPixel(reinterpret_cast<const unsigned char *>(pixels_ptr),
LPixelType::RGBA2RGB, LDataType::UINT8,
info.width, info.height, lite_mat_bgr);
if (!ret) {
MS_PRINT("Init From RGBA error");
}
delete[] (pixels_ptr);
}
AndroidBitmap_unlockPixels(env, srcBitmap);
return ret;
}
bool ObjectPreProcessImageData(const LiteMat &lite_mat_bgr, LiteMat *lite_norm_mat_ptr) {
bool ret = false;
LiteMat lite_mat_resize;
LiteMat &lite_norm_mat_cut = *lite_norm_mat_ptr;
ret = ResizeBilinear(lite_mat_bgr, lite_mat_resize, 300, 300);
if (!ret) {
MS_PRINT("ResizeBilinear error");
return false;
}
LiteMat lite_mat_convert_float;
ret = ConvertTo(lite_mat_resize, lite_mat_convert_float, 1.0 / 255.0);
if (!ret) {
MS_PRINT("ConvertTo error");
return false;
}
std::vector<float> means = {0.485, 0.456, 0.406};
std::vector<float> stds = {0.229, 0.224, 0.225};
SubStractMeanNormalize(lite_mat_convert_float, lite_norm_mat_cut, means, stds);
return true;
}
char *ObjectCreateLocalModelBuffer(JNIEnv *env, jobject modelBuffer) {
jbyte *modelAddr = static_cast<jbyte *>(env->GetDirectBufferAddress(modelBuffer));
int modelLen = static_cast<int>(env->GetDirectBufferCapacity(modelBuffer));
char *buffer(new char[modelLen]);
memcpy(buffer, modelAddr, modelLen);
return buffer;
}
/**
*
* @param msOutputs Model output, the mindspore inferencing result.
* @param srcImageWidth The width of the original input image.
* @param srcImageHeight The height of the original input image.
* @return
*/
std::string ProcessRunnetResult(std::unordered_map<std::string, mindspore::tensor::MSTensor *> msOutputs,
int srcImageWidth, int srcImageHeight) {
std::unordered_map<std::string, mindspore::tensor::MSTensor *>::iterator iter;
iter = msOutputs.begin();
auto branch2_string = iter->first;
auto branch2_tensor = iter->second;
++iter;
auto branch1_string = iter->first;
auto branch1_tensor = iter->second;
MS_PRINT("%s %s", branch1_string.c_str(), branch2_string.c_str());
// ----------- 接口测试 --------------------------
float *tmpscores2 = reinterpret_cast<float *>(branch1_tensor->MutableData());
float *tmpdata = reinterpret_cast<float *>(branch2_tensor->MutableData());
// Using ssd model util to process model branch outputs.
SSDModelUtil ssdUtil(srcImageWidth, srcImageHeight);
std::string retStr = ssdUtil.getDecodeResult(tmpscores2, tmpdata);
MS_PRINT("retStr %s", retStr.c_str());
return retStr;
}
extern "C" JNIEXPORT jlong JNICALL
Java_com_mindspore_himindspore_objectdetection_help_ObjectTrackingMobile_loadModel(JNIEnv *env, jobject thiz,
jobject assetManager,
jobject buffer,
jint numThread) {
MS_PRINT("MindSpore so version 20200730");
if (nullptr == buffer) {
MS_PRINT("error, buffer is nullptr!");
return (jlong) nullptr;
}
jlong bufferLen = env->GetDirectBufferCapacity(buffer);
MS_PRINT("MindSpore get bufferLen:%d", static_cast<int>(bufferLen));
if (0 == bufferLen) {
MS_PRINT("error, bufferLen is 0!");
return (jlong) nullptr;
}
char *modelBuffer = ObjectCreateLocalModelBuffer(env, buffer);
if (modelBuffer == nullptr) {
MS_PRINT("modelBuffer create failed!");
return (jlong) nullptr;
}
MS_PRINT("MindSpore loading Model.");
void **labelEnv = new void *;
MSNetWork *labelNet = new MSNetWork;
*labelEnv = labelNet;
mindspore::lite::Context *context = new mindspore::lite::Context;
context->thread_num_ = numThread;
labelNet->CreateSessionMS(modelBuffer, bufferLen, context);
delete context;
if (labelNet->session() == nullptr) {
delete labelNet;
delete labelEnv;
MS_PRINT("MindSpore create session failed!.");
return (jlong) nullptr;
}
MS_PRINT("MindSpore create session successfully.");
if (buffer != nullptr) {
env->DeleteLocalRef(buffer);
}
if (assetManager != nullptr) {
env->DeleteLocalRef(assetManager);
}
MS_PRINT("ptr released successfully.");
return (jlong) labelEnv;
}
extern "C" JNIEXPORT jstring JNICALL
Java_com_mindspore_himindspore_objectdetection_help_ObjectTrackingMobile_runNet(JNIEnv *env, jobject thiz,
jlong netEnv,
jobject srcBitmap) {
LiteMat lite_mat_bgr, lite_norm_mat_cut;
if (!ObjectBitmapToLiteMat(env, srcBitmap, &lite_mat_bgr)) {
MS_PRINT("ObjectBitmapToLiteMat error");
return NULL;
}
int srcImageWidth = lite_mat_bgr.width_;
int srcImageHeight = lite_mat_bgr.height_;
if (!ObjectPreProcessImageData(lite_mat_bgr, &lite_norm_mat_cut)) {
MS_PRINT("ObjectPreProcessImageData error");
return NULL;
}
ImgDims inputDims;
inputDims.channel = lite_norm_mat_cut.channel_;
inputDims.width = lite_norm_mat_cut.width_;
inputDims.height = lite_norm_mat_cut.height_;
// Get the mindsore inference environment which created in loadModel().
void **labelEnv = reinterpret_cast<void **>(netEnv);
if (labelEnv == nullptr) {
MS_PRINT("MindSpore error, labelEnv is a nullptr.");
return NULL;
}
MSNetWork *labelNet = static_cast<MSNetWork *>(*labelEnv);
auto mSession = labelNet->session();
if (mSession == nullptr) {
MS_PRINT("MindSpore error, Session is a nullptr.");
return NULL;
}
MS_PRINT("MindSpore get session.");
auto msInputs = mSession->GetInputs();
auto inTensor = msInputs.front();
float *dataHWC = reinterpret_cast<float *>(lite_norm_mat_cut.data_ptr_);
// copy input Tensor
memcpy(inTensor->MutableData(), dataHWC,
inputDims.channel * inputDims.width * inputDims.height * sizeof(float));
MS_PRINT("MindSpore get msInputs.");
auto status = mSession->RunGraph();
if (status != mindspore::lite::RET_OK) {
MS_PRINT("MindSpore runnet error.");
return NULL;
}
auto names = mSession->GetOutputTensorNames();
std::unordered_map<std::string,
mindspore::tensor::MSTensor *> msOutputs;
for (const auto &name : names) {
auto temp_dat = mSession->GetOutputByTensorName(name);
msOutputs.insert(std::pair<std::string, mindspore::tensor::MSTensor *> {name, temp_dat});
}
std::string retStr = ProcessRunnetResult(msOutputs, srcImageWidth, srcImageHeight);
const char *resultChardata = retStr.c_str();
return (env)->NewStringUTF(resultChardata);
}
extern "C"
JNIEXPORT jboolean JNICALL
Java_com_mindspore_himindspore_objectdetection_help_ObjectTrackingMobile_unloadModel(JNIEnv *env,
jobject thiz,
jlong netEnv) {
void **labelEnv = reinterpret_cast<void **>(netEnv);
MSNetWork *labelNet = static_cast<MSNetWork *>(*labelEnv);
labelNet->ReleaseNets();
return (jboolean) true;
}

View File

@ -1,3 +1,18 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mindspore.himindspore;
import android.Manifest;
@ -11,7 +26,7 @@ import android.os.Build;
import android.os.Environment;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import android.widget.TextView;
import android.widget.Toast;
import androidx.annotation.NonNull;
@ -19,25 +34,27 @@ import androidx.appcompat.app.AlertDialog;
import androidx.core.app.ActivityCompat;
import androidx.core.content.FileProvider;
import com.alibaba.android.arouter.facade.annotation.Route;
import com.alibaba.android.arouter.launcher.ARouter;
import com.mindspore.himindspore.base.BaseActivity;
import com.mindspore.himindspore.imageclassification.ui.ImageMainActivity;
import com.mindspore.himindspore.mvp.MainContract;
import com.mindspore.himindspore.mvp.MainPresenter;
import com.mindspore.himindspore.net.FileDownLoadObserver;
import com.mindspore.himindspore.net.UpdateInfoBean;
import com.mindspore.himindspore.objectdetection.ui.ObjectDetectionMainActivity;
import java.io.File;
public class SplashActivity extends BaseActivity<MainPresenter> implements MainContract.View, View.OnClickListener {
@Route(path = "/himindspore/SplashActivity")
public class SplashActivity extends BaseActivity<MainPresenter> implements MainContract.View {
private static final String TAG = "SplashActivity";
private static final int REQUEST_PERMISSION = 1;
private Button btnImage, btnObject, btnContract, btnAdvice;
private boolean isHasPermssion;
private int now_version;
private ProgressDialog progressDialog;
private TextView versionText;
private static final String CODE_URL = "https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/lite";
private static final String HELP_URL = "https://github.com/mindspore-ai/mindspore/issues";
@ -46,17 +63,8 @@ public class SplashActivity extends BaseActivity<MainPresenter> implements MainC
@Override
protected void init() {
presenter = new MainPresenter(this);
btnImage = findViewById(R.id.btn_image);
btnObject = findViewById(R.id.btn_object);
btnContract = findViewById(R.id.btn_contact);
btnAdvice = findViewById(R.id.btn_advice);
btnImage.setOnClickListener(this);
btnObject.setOnClickListener(this);
btnContract.setOnClickListener(this);
btnAdvice.setOnClickListener(this);
versionText = findViewById(R.id.tv_vision);
showPackaeInfo();
requestPermissions();
getUpdateInfo();
}
@ -66,6 +74,18 @@ public class SplashActivity extends BaseActivity<MainPresenter> implements MainC
return R.layout.activity_splash;
}
private void showPackaeInfo() {
try {
PackageManager packageManager = this.getPackageManager();
PackageInfo packageInfo = packageManager.getPackageInfo(this.getPackageName(), 0);
now_version = packageInfo.versionCode;
versionText.setText("Version: " + packageInfo.versionName);
} catch (PackageManager.NameNotFoundException e) {
e.printStackTrace();
}
}
private void requestPermissions() {
ActivityCompat.requestPermissions(this,
new String[]{Manifest.permission.READ_EXTERNAL_STORAGE, Manifest.permission.WRITE_EXTERNAL_STORAGE,
@ -87,27 +107,65 @@ public class SplashActivity extends BaseActivity<MainPresenter> implements MainC
}
@Override
public void onClick(View view) {
if (R.id.btn_image == view.getId()) {
if (isHasPermssion) {
startActivity(new Intent(SplashActivity.this, ImageMainActivity.class));
} else {
requestPermissions();
}
} else if (R.id.btn_object == view.getId()) {
if (isHasPermssion) {
startActivity(new Intent(SplashActivity.this, ObjectDetectionMainActivity.class));
} else {
requestPermissions();
}
} else if (R.id.btn_contact == view.getId()) {
openBrowser(CODE_URL);
} else if (R.id.btn_advice == view.getId()) {
openBrowser(HELP_URL);
public void onClickImage(View view) {
if (isHasPermssion) {
ARouter.getInstance().build("/imageobject/ImageCameraActivity")
.withInt("OPEN_TYPE", 1).navigation();
} else {
requestPermissions();
}
}
public void onClickGarbage(View view) {
if (isHasPermssion) {
ARouter.getInstance().build("/imageobject/ImageCameraActivity")
.withInt("OPEN_TYPE", 2).navigation();
} else {
requestPermissions();
}
}
public void onClickPhotoDetection(View view) {
if (isHasPermssion) {
ARouter.getInstance().build("/imageobject/ObjectPhotoActivity").navigation();
} else {
requestPermissions();
}
}
public void onClickCameraDetection(View view) {
if (isHasPermssion) {
ARouter.getInstance().build("/imageobject/ObjectCameraActivity").navigation();
} else {
requestPermissions();
}
}
public void onClickPoseNet(View view) {
if (isHasPermssion) {
ARouter.getInstance().build("/posenet/PosenetMainActivity").navigation(this);
} else {
requestPermissions();
}
}
public void onClickStyleTransfer(View view) {
if (isHasPermssion) {
ARouter.getInstance().build("/styletransfer/StyleMainActivity").navigation(this);
} else {
requestPermissions();
}
}
public void onClickSouceCode(View view) {
openBrowser(CODE_URL);
}
public void onClickHelp(View view) {
openBrowser(HELP_URL);
}
public void openBrowser(String url) {
Intent intent = new Intent();
intent.setAction("android.intent.action.VIEW");
@ -160,16 +218,7 @@ public class SplashActivity extends BaseActivity<MainPresenter> implements MainC
}
private int now_version;
public void showUpdate(final UpdateInfoBean updateInfo) {
try {
PackageManager packageManager = this.getPackageManager();
PackageInfo packageInfo = packageManager.getPackageInfo(this.getPackageName(), 0);
now_version = packageInfo.versionCode;
} catch (PackageManager.NameNotFoundException e) {
e.printStackTrace();
}
if (now_version == updateInfo.getVersionCode()) {
Toast.makeText(this, "已经是最新版本", Toast.LENGTH_SHORT).show();
@ -245,4 +294,5 @@ public class SplashActivity extends BaseActivity<MainPresenter> implements MainC
return directoryPath;
}
}

View File

@ -1,3 +1,18 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mindspore.himindspore.base;
import android.app.Activity;

View File

@ -1,3 +1,18 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mindspore.himindspore.base;
public abstract class BasePresenter<T extends BaseActivity> {

View File

@ -0,0 +1,34 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mindspore.himindspore.base;
import android.app.Application;
import com.alibaba.android.arouter.BuildConfig;
import com.alibaba.android.arouter.launcher.ARouter;
public class MyApplication extends Application {
@Override
public void onCreate() {
super.onCreate();
if (BuildConfig.DEBUG) {
ARouter.openLog();
ARouter.openDebug();
}
ARouter.init(this);
}
}

View File

@ -1,4 +0,0 @@
package com.mindspore.himindspore.base;
public interface TrackListener {
}

View File

@ -1,37 +0,0 @@
package com.mindspore.himindspore.imageclassification.ui;
import android.content.Intent;
import android.os.Bundle;
import android.view.View;
import androidx.appcompat.app.AppCompatActivity;
import com.mindspore.himindspore.R;
public class ImageMainActivity extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_image_main);
findViewById(R.id.btn_demo).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
Intent intent = new Intent(ImageMainActivity.this, ImageCameraActivity.class);
intent.putExtra(ImageCameraActivity.OPEN_TYPE, ImageCameraActivity.TYPE_DEMO);
startActivity(intent);
}
});
findViewById(R.id.btn_custom).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
Intent intent = new Intent(ImageMainActivity.this, ImageCameraActivity.class);
intent.putExtra(ImageCameraActivity.OPEN_TYPE, ImageCameraActivity.TYPE_CUSTOM);
startActivity(intent);
}
});
}
}

View File

@ -1,3 +1,18 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mindspore.himindspore.mvp;
import com.mindspore.himindspore.net.FileDownLoadObserver;

View File

@ -1,4 +1,19 @@
package com.mindspore.himindspore.mvp;
/**
* Copyright 2020 Huawei Technologies Co., Ltd
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import android.util.Log;
@ -40,7 +55,7 @@ public class MainPresenter extends BasePresenter<SplashActivity> implements Main
@Override
public void onFailure(Call<UpdateInfoBean> call, Throwable t) {
Log.e(TAG, "onFailure" + t.toString());
Log.e(TAG, "onFailure>>>" + t.toString());
view.showFail(call.toString());
}
});

View File

@ -1,71 +0,0 @@
package com.mindspore.himindspore.objectdetection.ui;
import android.Manifest;
import android.content.Intent;
import android.os.Bundle;
import android.view.View;
import android.widget.Button;
import androidx.annotation.NonNull;
import androidx.appcompat.app.AppCompatActivity;
import androidx.core.app.ActivityCompat;
import com.mindspore.himindspore.R;
public class ObjectDetectionMainActivity extends AppCompatActivity implements View.OnClickListener {
private static final int REQUEST_CAMERA_PERMISSION = 2;
private static final int REQUEST_PHOTO_PERMISSION = 3;
private Button btnPhoto, btnCamera;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_object_detection_main);
btnPhoto = findViewById(R.id.btn_photo);
btnCamera = findViewById(R.id.btn_camera);
btnPhoto.setOnClickListener(this);
btnCamera.setOnClickListener(this);
}
@Override
public void onClick(View view) {
if (R.id.btn_photo == view.getId()) {
ActivityCompat.requestPermissions(this,
new String[]{Manifest.permission.READ_EXTERNAL_STORAGE, Manifest.permission.WRITE_EXTERNAL_STORAGE,
Manifest.permission.READ_PHONE_STATE}, REQUEST_PHOTO_PERMISSION);
} else if (R.id.btn_camera == view.getId()) {
ActivityCompat.requestPermissions(this,
new String[]{Manifest.permission.READ_EXTERNAL_STORAGE, Manifest.permission.WRITE_EXTERNAL_STORAGE,
Manifest.permission.READ_PHONE_STATE, Manifest.permission.CAMERA}, REQUEST_CAMERA_PERMISSION);
}
}
/**
* Authority application result callback
*/
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
if (REQUEST_PHOTO_PERMISSION == requestCode) {
choosePhoto();
} else if (REQUEST_CAMERA_PERMISSION == requestCode) {
chooseCamera();
}
}
private void choosePhoto() {
Intent intent = new Intent(ObjectDetectionMainActivity.this, ObjectPhotoActivity.class);
startActivity(intent);
}
private void chooseCamera() {
Intent intent = new Intent(ObjectDetectionMainActivity.this, ObjectCameraActivity.class);
startActivity(intent);
}
}

View File

@ -1,4 +0,0 @@
package com.mindspore.himindspore.track;
public interface TrackListener {
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.9 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.4 KiB

After

Width:  |  Height:  |  Size: 2.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.0 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.9 KiB

After

Width:  |  Height:  |  Size: 1.2 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.2 KiB

After

Width:  |  Height:  |  Size: 2.0 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 4.7 KiB

After

Width:  |  Height:  |  Size: 3.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.3 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.8 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.1 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.7 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.7 KiB

View File

@ -1,57 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:background="@color/colorPrimary"
tools:context=".contract.ContractActivity">
<ImageView
android:id="@+id/logo"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginTop="50dp"
android:paddingTop="50dp"
android:src="@drawable/logo"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintStart_toStartOf="parent" />
<TextView
android:id="@+id/title"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_marginStart="20dp"
android:layout_marginTop="30dp"
android:layout_marginEnd="10dp"
android:text="If you are interested in Mindspore,
please enter your email for more product information"
android:textColor="@color/white"
android:textSize="25sp"
app:layout_constraintTop_toBottomOf="@+id/logo"
tools:ignore="MissingConstraints" />
<EditText
android:maxLines="1"
android:id="@+id/emailEditText"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_margin="20dp"
android:background="@color/gray"
android:hint="input your email"
android:padding="10dp"
android:textColor="@color/black"
android:textColorHint="@color/white"
app:layout_constraintTop_toBottomOf="@+id/title" />
<Button
android:id="@+id/submitBtn"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_margin="20dp"
android:gravity="center"
android:text="submit"
app:layout_constraintTop_toBottomOf="@+id/emailEditText" />
</androidx.constraintlayout.widget.ConstraintLayout>

View File

@ -1,72 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:background="@color/colorPrimary"
tools:context=".imageclassification.ui.ImageMainActivity">
<TextView
android:id="@+id/title"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_marginTop="70dp"
android:drawableTop="@drawable/logo"
android:drawablePadding="35dp"
android:gravity="center_horizontal"
android:text="@string/app_name"
android:textColor="@color/white"
android:textSize="42sp"
app:layout_constraintTop_toTopOf="parent" />
<TextView
android:id="@+id/sub_title"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_marginTop="18dp"
android:gravity="center_horizontal"
android:text="@string/title_image"
android:textColor="@color/white"
android:textSize="30sp"
app:layout_constraintTop_toBottomOf="@+id/title" />
<Button
android:textColor="@color/white"
android:id="@+id/btn_demo"
android:layout_width="320dp"
android:layout_height="48dp"
android:layout_marginTop="60dp"
android:background="@color/gray_btn"
android:drawableStart="@drawable/btn_image"
android:drawablePadding="16dp"
android:gravity="left|center_vertical"
android:paddingLeft="40dp"
android:text="@string/title_demo"
android:textAllCaps="false"
android:textSize="16sp"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toBottomOf="@+id/sub_title" />
<Button
android:textColor="@color/white"
android:id="@+id/btn_custom"
android:layout_width="320dp"
android:layout_height="48dp"
android:layout_marginTop="16dp"
android:background="@color/gray_btn"
android:drawableStart="@drawable/btn_object"
android:drawablePadding="16dp"
android:gravity="left|center_vertical"
android:paddingLeft="40dp"
android:text="@string/title_custom"
android:textAllCaps="false"
android:textSize="16sp"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toBottomOf="@+id/btn_demo" />
</androidx.constraintlayout.widget.ConstraintLayout>

View File

@ -1,19 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context=".MainActivity">
<TextView
android:id="@+id/sample_text"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="Hello World!"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintLeft_toLeftOf="parent"
app:layout_constraintRight_toRightOf="parent"
app:layout_constraintTop_toTopOf="parent" />
</androidx.constraintlayout.widget.ConstraintLayout>

View File

@ -1,71 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:background="@color/colorPrimary"
tools:context=".objectdetection.ui.ObjectDetectionMainActivity">
<TextView
android:id="@+id/title"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_marginTop="70dp"
android:drawableTop="@drawable/logo"
android:drawablePadding="35dp"
android:gravity="center_horizontal"
android:text="@string/app_name"
android:textColor="@color/white"
android:textSize="42sp"
app:layout_constraintTop_toTopOf="parent" />
<TextView
android:id="@+id/sub_title"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_marginTop="18dp"
android:gravity="center_horizontal"
android:text="@string/title_object"
android:textColor="@color/white"
android:textSize="30sp"
app:layout_constraintTop_toBottomOf="@+id/title" />
<Button
android:id="@+id/btn_photo"
android:layout_width="320dp"
android:layout_height="48dp"
android:layout_marginTop="60dp"
android:background="@color/gray_btn"
android:drawableStart="@drawable/btn_image"
android:drawablePadding="16dp"
android:gravity="left|center_vertical"
android:paddingLeft="40dp"
android:text="@string/title_photo"
android:textAllCaps="false"
android:textSize="16sp"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toBottomOf="@+id/sub_title" />
<Button
android:id="@+id/btn_camera"
android:layout_width="320dp"
android:layout_height="48dp"
android:layout_marginTop="16dp"
android:background="@color/gray_btn"
android:drawableStart="@drawable/btn_object"
android:drawablePadding="16dp"
android:gravity="left|center_vertical"
android:paddingLeft="40dp"
android:text="@string/title_camera"
android:textAllCaps="false"
android:textSize="16sp"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toBottomOf="@+id/btn_photo" />
</androidx.constraintlayout.widget.ConstraintLayout>

View File

@ -7,11 +7,12 @@
android:background="@color/colorPrimary"
tools:context=".SplashActivity">
<TextView
android:id="@+id/title"
android:layout_width="0dp"
android:layout_height="wrap_content"
android:layout_marginTop="70dp"
android:layout_marginTop="68dp"
android:drawableTop="@drawable/logo"
android:drawablePadding="30dp"
android:gravity="center_horizontal"
@ -20,75 +21,163 @@
android:textColor="@color/white"
android:textSize="36sp"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintHorizontal_bias="0.0"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toTopOf="parent" />
<Button
android:id="@+id/btn_image"
android:layout_width="320dp"
android:layout_width="0dp"
android:layout_height="48dp"
android:layout_marginTop="60dp"
android:layout_marginLeft="20dp"
android:layout_marginTop="30dp"
android:background="@color/gray_btn"
android:drawableStart="@drawable/btn_image"
android:drawablePadding="16dp"
android:drawablePadding="5dp"
android:gravity="left|center_vertical"
android:paddingLeft="40dp"
android:onClick="onClickImage"
android:paddingLeft="4dp"
android:text="@string/title_image"
android:textAllCaps="false"
android:textSize="16sp"
app:layout_constraintEnd_toEndOf="parent"
android:textSize="12sp"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toBottomOf="@+id/title" />
app:layout_constraintTop_toBottomOf="@+id/title"
app:layout_constraintWidth_percent="0.43" />
<Button
android:id="@+id/btn_image_garbage"
android:layout_width="0dp"
android:layout_height="48dp"
android:layout_marginRight="20dp"
android:background="@color/gray_btn"
android:drawableStart="@drawable/btn_other"
android:drawablePadding="5dp"
android:gravity="left|center_vertical"
android:onClick="onClickGarbage"
android:paddingLeft="4dp"
android:text="@string/title_image_garbage"
android:textAllCaps="false"
android:textSize="12sp"
app:layout_constraintBottom_toBottomOf="@+id/btn_image"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintWidth_percent="0.43" />
<Button
android:id="@+id/btn_object"
android:layout_width="320dp"
android:layout_width="0dp"
android:layout_height="48dp"
android:layout_marginTop="16dp"
android:background="@color/gray_btn"
android:drawableStart="@drawable/btn_object"
android:drawablePadding="16dp"
android:drawableStart="@drawable/btn_text"
android:drawablePadding="5dp"
android:gravity="left|center_vertical"
android:paddingLeft="40dp"
android:onClick="onClickPhotoDetection"
android:paddingLeft="4dp"
android:text="@string/title_object"
android:textAllCaps="false"
android:textSize="16sp"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toBottomOf="@+id/btn_image" />
android:textSize="12sp"
app:layout_constraintStart_toStartOf="@+id/btn_image"
app:layout_constraintTop_toBottomOf="@+id/btn_image"
app:layout_constraintWidth_percent="0.43" />
<Button
android:id="@+id/btn_contact"
android:layout_width="320dp"
android:id="@+id/btn_object_camera"
android:layout_width="0dp"
android:layout_height="48dp"
android:background="@color/gray_btn"
android:drawableStart="@drawable/btn_object"
android:drawablePadding="5dp"
android:gravity="left|center_vertical"
android:onClick="onClickCameraDetection"
android:paddingLeft="4dp"
android:text="@string/title_object_camera"
android:textAllCaps="false"
android:textSize="12sp"
app:layout_constraintBottom_toBottomOf="@+id/btn_object"
app:layout_constraintEnd_toEndOf="@+id/btn_image_garbage"
app:layout_constraintWidth_percent="0.43" />
<Button
android:id="@+id/btn_posenet"
android:layout_width="0dp"
android:layout_height="48dp"
android:layout_marginTop="16dp"
android:background="@color/gray_btn"
android:drawableStart="@drawable/btn_audio"
android:drawablePadding="5dp"
android:gravity="left|center_vertical"
android:onClick="onClickPoseNet"
android:paddingLeft="4dp"
android:text="@string/title_pose_net"
android:textAllCaps="false"
android:textSize="12sp"
app:layout_constraintStart_toStartOf="@+id/btn_object"
app:layout_constraintTop_toBottomOf="@+id/btn_object"
app:layout_constraintWidth_percent="0.43" />
<Button
android:id="@+id/btn_style_transfer"
android:layout_width="0dp"
android:layout_height="48dp"
android:background="@color/gray_btn"
android:drawableStart="@drawable/btn_commend"
android:drawablePadding="5dp"
android:gravity="left|center_vertical"
android:onClick="onClickStyleTransfer"
android:paddingLeft="4dp"
android:text="@string/title_style_transfer"
android:textAllCaps="false"
android:textSize="12sp"
app:layout_constraintBottom_toBottomOf="@+id/btn_posenet"
app:layout_constraintEnd_toEndOf="@+id/btn_image_garbage"
app:layout_constraintWidth_percent="0.43" />
<Button
android:id="@+id/btn_source"
android:layout_width="0dp"
android:layout_height="48dp"
android:layout_marginTop="16dp"
android:background="@color/gray_btn"
android:drawableStart="@drawable/btn_code"
android:drawablePadding="16dp"
android:drawablePadding="5dp"
android:gravity="left|center_vertical"
android:paddingLeft="40dp"
android:onClick="onClickSouceCode"
android:paddingLeft="4dp"
android:text="@string/title_source"
android:textAllCaps="false"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toBottomOf="@+id/btn_object" />
android:textSize="12sp"
app:layout_constraintStart_toStartOf="@+id/btn_posenet"
app:layout_constraintTop_toBottomOf="@+id/btn_posenet"
app:layout_constraintWidth_percent="0.43" />
<Button
android:id="@+id/btn_advice"
android:layout_width="320dp"
android:id="@+id/btn_help"
android:layout_width="0dp"
android:layout_height="48dp"
android:layout_marginTop="16dp"
android:background="@color/gray_btn"
android:drawableStart="@drawable/btn_help"
android:drawablePadding="16dp"
android:drawablePadding="5dp"
android:gravity="left|center_vertical"
android:paddingLeft="40dp"
android:onClick="onClickHelp"
android:paddingLeft="4dp"
android:text="@string/title_help"
android:textAllCaps="false"
android:textSize="12sp"
app:layout_constraintBottom_toBottomOf="@+id/btn_source"
app:layout_constraintEnd_toEndOf="@+id/btn_image_garbage"
app:layout_constraintWidth_percent="0.43" />
<TextView
android:id="@+id/tv_vision"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginBottom="30dp"
android:textColor="@color/white"
android:textSize="20sp"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toBottomOf="@+id/btn_contact" />
tools:text="Version 1.0.0" />
</androidx.constraintlayout.widget.ConstraintLayout>

View File

@ -4,7 +4,11 @@
<string name="switch_custom">custom</string>
<string name="title_image">Image Classification</string>
<string name="title_object">Object Detection</string>
<string name="title_image_garbage">Garbage Classification</string>
<string name="title_object">Photo Detection</string>
<string name="title_object_camera">Camera Detection</string>
<string name="title_pose_net">PoseNet</string>
<string name="title_style_transfer">Style Transfer</string>
<string name="title_source">Source Code</string>
<string name="title_help">Help And FeedBack</string>

View File

@ -1,9 +1,10 @@
<?xml version="1.0" encoding="utf-8"?>
<paths>
<external-path path="Android/data/com.mindspore.himindspore/" name="files_root" />
<external-path
path="Android/data/com.mindspore.himindspore/"
name="files_root" />
<files-path
name="Android/data/com.mindspore.himindspore/"
path="files_root">
</files-path>
path="files_root"></files-path>
</paths>

View File

@ -2,7 +2,7 @@ package com.mindspore.himindspore;
import org.junit.Test;
import static org.junit.Assert.*;
import static org.junit.Assert.assertEquals;
/**
* Example local unit test, which will execute on the development machine (host).

View File

@ -16,4 +16,5 @@ org.gradle.jvmargs=-Xmx2048m
# https://developer.android.com/topic/libraries/support-library/androidx-rn
android.useAndroidX=true
# Automatically convert third-party libraries to use AndroidX
android.enableJetifier=true
android.enableJetifier=true
android.injected.testOnly=false

View File

@ -0,0 +1 @@
/build

View File

@ -6,9 +6,9 @@
cmake_minimum_required(VERSION 3.4.1)
set(CMAKE_VERBOSE_MAKEFILE on)
set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${CMAKE_SOURCE_DIR}/libs/${ANDROID_ABI})
set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${CMAKE_SOURCE_DIR}/libs/${ANDROID_ABI})
set(MINDSPORELITE_VERSION mindspore-lite-1.0.1-runtime-arm64-cpu)
set(MINDSPORELITE_VERSION mindspore-lite-1.0.1-runtime-arm64-cpu)
# ============== Set MindSpore Dependencies. =============
include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp)
@ -19,16 +19,16 @@ include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/in
include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/include/schema)
include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/minddata/include)
add_library(mindspore-lite SHARED IMPORTED )
add_library(minddata-lite SHARED IMPORTED )
add_library(libmindspore-lite-fp16 SHARED IMPORTED )
add_library(mindspore-lite SHARED IMPORTED)
add_library(minddata-lite SHARED IMPORTED)
#add_library(libmindspore-lite-fp16 SHARED IMPORTED )
set_target_properties(mindspore-lite PROPERTIES IMPORTED_LOCATION
${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/lib/libmindspore-lite.so)
set_target_properties(minddata-lite PROPERTIES IMPORTED_LOCATION
${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/minddata/lib/libminddata-lite.so)
set_target_properties(libmindspore-lite-fp16 PROPERTIES IMPORTED_LOCATION
${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/lib/libmindspore-lite-fp16.so)
#set_target_properties(libmindspore-lite-fp16 PROPERTIES IMPORTED_LOCATION
# ${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/lib/libmindspore-lite-fp16.so)
# --------------- MindSpore Lite set End. --------------------
@ -37,7 +37,7 @@ set_target_properties(libmindspore-lite-fp16 PROPERTIES IMPORTED_LOCATION
# You can define multiple libraries, and CMake builds them for you.
# Gradle automatically packages shared libraries with your APK.
file(GLOB_RECURSE cpp_src "src/main/cpp/*.cpp" "src/main/cpp/*.h")
file(GLOB_RECURSE cpp_src "src/main/cpp/*.cpp" "src/main/cpp/*.h")
add_library( # Sets the name of the library.
mlkit-label-MS
@ -60,10 +60,10 @@ find_library( # Sets the name of the path variable.
# Specifies the name of the NDK library that
# you want CMake to locate.
log )
log)
find_library( jnigraphics-lib jnig·raphics )
find_library(jnigraphics-lib jnig·raphics)
# Specifies libraries CMake should link to your target library. You
# can link multiple libraries, such as libraries you define in this
@ -75,7 +75,7 @@ target_link_libraries( # Specifies the target library.
# --- mindspore ---
minddata-lite
mindspore-lite
libmindspore-lite-fp16
# libmindspore-lite-fp16
# --- other dependencies.---
-ljnigraphics

View File

@ -0,0 +1,83 @@
plugins {
id 'com.android.library'
}
android {
compileSdkVersion 30
buildToolsVersion "30.0.1"
defaultConfig {
minSdkVersion 21
targetSdkVersion 30
versionCode 1
versionName "1.0"
testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
consumerProguardFiles "consumer-rules.pro"
javaCompileOptions {
annotationProcessorOptions {
arguments = [moduleName: project.getName()]
}
}
externalNativeBuild {
cmake {
arguments "-DANDROID_STL=c++_shared"
cppFlags "-std=c++17"
}
}
ndk {
abiFilters 'arm64-v8a'
}
}
aaptOptions {
noCompress '.so', 'ms'
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
}
}
compileOptions {
sourceCompatibility JavaVersion.VERSION_1_8
targetCompatibility JavaVersion.VERSION_1_8
}
externalNativeBuild {
cmake {
path file('CMakeLists.txt')
}
}
ndkVersion '21.3.6528147'
sourceSets {
main {
jniLibs.srcDirs = ['libs']
}
}
packagingOptions {
pickFirst 'lib/arm64-v8a/libmlkit-label-MS.so'
exclude 'lib/arm64-v8a/libmindspore-lite.so'
}
}
// Download default models; if you wish to use your own models then
// place them in the "assets" directory and comment out this line.
apply from: 'download.gradle'
dependencies {
implementation 'androidx.appcompat:appcompat:1.2.0'
implementation 'com.google.android.material:material:1.2.1'
implementation 'androidx.constraintlayout:constraintlayout:2.0.4'
testImplementation 'junit:junit:4.+'
androidTestImplementation 'androidx.test.ext:junit:1.1.2'
androidTestImplementation 'androidx.test.espresso:espresso-core:3.3.0'
implementation 'com.alibaba:arouter-api:1.2.1'
annotationProcessor 'com.alibaba:arouter-compiler:1.1.2'
}

View File

@ -74,21 +74,21 @@ task cleanUnusedmindsporeFiles(type: Delete, dependsOn: ['unzipMindSporeInclude'
* Using preBuild to download mindspore library and model file.
* Run before gradle build.
*/
if (file("src/main/cpp/${mindsporeLite_Version}/lib/libmindspore-lite.so").exists()){
if (file("src/main/cpp/${mindsporeLite_Version}/lib/libmindspore-lite.so").exists()) {
downloadMindSporeLibrary.enabled = false
unzipMindSporeInclude.enabled = false
cleanUnusedmindsporeFiles.enabled = false
}
if (file("src/main/assets/model/garbage_mobilenetv2.ms").exists()){
if (file("src/main/assets/model/garbage_mobilenetv2.ms").exists()) {
downloadGarbageModelFile.enabled = false
}
if (file("src/main/assets/model/mobilenetv2.ms").exists()){
if (file("src/main/assets/model/mobilenetv2.ms").exists()) {
downloadModelFile.enabled = false
}
if (file("src/main/assets/model/ssd.ms").exists()){
if (file("src/main/assets/model/ssd.ms").exists()) {
downloadObjectModelFile.enabled = false
}

View File

@ -0,0 +1,21 @@
# Add project specific ProGuard rules here.
# You can control the set of applied configuration files using the
# proguardFiles setting in build.gradle.
#
# For more details, see
# http://developer.android.com/guide/developing/tools/proguard.html
# If your project uses WebView with JS, uncomment the following
# and specify the fully qualified class name to the JavaScript interface
# class:
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
# public *;
#}
# Uncomment this to preserve the line number information for
# debugging stack traces.
#-keepattributes SourceFile,LineNumberTable
# If you keep the line number information, uncomment this to
# hide the original source file name.
#-renamesourcefileattribute SourceFile

View File

@ -0,0 +1,26 @@
package com.mindspore.imageobject;
import android.content.Context;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.platform.app.InstrumentationRegistry;
import org.junit.Test;
import org.junit.runner.RunWith;
import static org.junit.Assert.assertEquals;
/**
* Instrumented test, which will execute on an Android device.
*
* @see <a href="http://d.android.com/tools/testing">Testing documentation</a>
*/
@RunWith(AndroidJUnit4.class)
public class ExampleInstrumentedTest {
@Test
public void useAppContext() {
// Context of the app under test.
Context appContext = InstrumentationRegistry.getInstrumentation().getTargetContext();
assertEquals("com.mindspore.imageobject.test", appContext.getPackageName());
}
}

View File

@ -0,0 +1,27 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
package="com.mindspore.imageobject">
<application
android:allowBackup="true"
android:label="@string/app_name"
android:supportsRtl="true">
<activity
android:name=".imageclassification.ui.ImageCameraActivity"
android:screenOrientation="portrait"
android:theme="@style/Theme.AppCompat.NoActionBar"
tools:ignore="WrongManifestParent" />
<activity
android:name=".objectdetection.ui.ObjectCameraActivity"
android:screenOrientation="portrait"
android:theme="@style/Theme.AppCompat.NoActionBar"
tools:ignore="WrongManifestParent" />
<activity
android:name=".objectdetection.ui.ObjectPhotoActivity"
android:screenOrientation="portrait"
android:theme="@style/Theme.AppCompat.NoActionBar"
tools:ignore="WrongManifestParent" />
</application>
</manifest>

View File

@ -85,8 +85,9 @@ char *CreateLocalModelBuffer(JNIEnv *env, jobject modelBuffer) {
* @param msOutputs
* @return
*/
std::string GarbageProcessRunnetResult(const int RET_CATEGORY_SUM, const char *const labels_name_map[],
std::unordered_map<std::string, mindspore::tensor::MSTensor *> msOutputs) {
std::string
GarbageProcessRunnetResult(const int RET_CATEGORY_SUM, const char *const labels_name_map[],
std::unordered_map<std::string, mindspore::tensor::MSTensor *> msOutputs) {
// Get the branch of the model output.
// Use iterators to get map elements.
std::unordered_map<std::string, mindspore::tensor::MSTensor *>::iterator iter;
@ -110,17 +111,17 @@ std::string GarbageProcessRunnetResult(const int RET_CATEGORY_SUM, const char *c
// Converted to text information that needs to be displayed in the APP.
std::string categoryScore = "";
if (maxIndex <= 9) {
categoryScore += labels_name_grbage_sort_map[0];
categoryScore += ":";
categoryScore += labels_name_grbage_sort_map[0];
categoryScore += ":";
} else if (maxIndex > 9 && maxIndex <= 17) {
categoryScore += labels_name_grbage_sort_map[1];
categoryScore += ":";
categoryScore += labels_name_grbage_sort_map[1];
categoryScore += ":";
} else if (maxIndex > 17 && maxIndex <= 21) {
categoryScore += labels_name_grbage_sort_map[2];
categoryScore += ":";
categoryScore += labels_name_grbage_sort_map[2];
categoryScore += ":";
} else if (maxIndex > 21 && maxIndex <= 25) {
categoryScore += labels_name_grbage_sort_map[3];
categoryScore += ":";
categoryScore += labels_name_grbage_sort_map[3];
categoryScore += ":";
}
categoryScore += labels_name_map[maxIndex];
return categoryScore;
@ -198,10 +199,10 @@ bool PreProcessImageData(const LiteMat &lite_mat_bgr, LiteMat *lite_norm_mat_ptr
*/
extern "C"
JNIEXPORT jlong JNICALL
Java_com_mindspore_himindspore_imageclassification_help_GarbageTrackingMobile_loadModel(JNIEnv *env,
jobject thiz,
jobject model_buffer,
jint num_thread) {
Java_com_mindspore_imageobject_imageclassification_help_GarbageTrackingMobile_loadModel(JNIEnv *env,
jobject thiz,
jobject model_buffer,
jint num_thread) {
if (nullptr == model_buffer) {
MS_PRINT("error, buffer is nullptr!");
return (jlong) nullptr;
@ -248,9 +249,10 @@ Java_com_mindspore_himindspore_imageclassification_help_GarbageTrackingMobile_lo
* sending a picture to the model and run inference.
*/
extern "C" JNIEXPORT jstring JNICALL
Java_com_mindspore_himindspore_imageclassification_help_GarbageTrackingMobile_runNet(JNIEnv *env, jclass type,
jlong netEnv,
jobject srcBitmap) {
Java_com_mindspore_imageobject_imageclassification_help_GarbageTrackingMobile_runNet(JNIEnv *env,
jclass type,
jlong netEnv,
jobject srcBitmap) {
LiteMat lite_mat_bgr, lite_norm_mat_cut;
if (!BitmapToLiteMat(env, srcBitmap, &lite_mat_bgr)) {
@ -315,16 +317,18 @@ Java_com_mindspore_himindspore_imageclassification_help_GarbageTrackingMobile_ru
}
std::string resultStr = GarbageProcessRunnetResult(::RET_GARBAGE_DETAILED_SUM,
::labels_name_grbage_detailed_map, msOutputs);
::labels_name_grbage_detailed_map,
msOutputs);
const char *resultCharData = resultStr.c_str();
return (env)->NewStringUTF(resultCharData);
}
extern "C" JNIEXPORT jboolean JNICALL
Java_com_mindspore_himindspore_imageclassification_help_GarbageTrackingMobile_unloadModel(JNIEnv *env,
jclass type,
jlong netEnv) {
Java_com_mindspore_imageobject_imageclassification_help_GarbageTrackingMobile_unloadModel(
JNIEnv *env,
jclass type,
jlong netEnv) {
MS_PRINT("MindSpore release net.");
void **labelEnv = reinterpret_cast<void **>(netEnv);
if (labelEnv == nullptr) {

View File

@ -671,7 +671,7 @@ bool ImagePreProcessImageData(const LiteMat &lite_mat_bgr, LiteMat *lite_norm_ma
*/
extern "C"
JNIEXPORT jlong JNICALL
Java_com_mindspore_himindspore_imageclassification_help_ImageTrackingMobile_loadModel(JNIEnv *env,
Java_com_mindspore_imageobject_imageclassification_help_ImageTrackingMobile_loadModel(JNIEnv *env,
jobject thiz,
jobject model_buffer,
jint num_thread) {
@ -721,7 +721,7 @@ Java_com_mindspore_himindspore_imageclassification_help_ImageTrackingMobile_load
* sending a picture to the model and run inference.
*/
extern "C" JNIEXPORT jstring JNICALL
Java_com_mindspore_himindspore_imageclassification_help_ImageTrackingMobile_runNet(JNIEnv *env,
Java_com_mindspore_imageobject_imageclassification_help_ImageTrackingMobile_runNet(JNIEnv *env,
jclass type,
jlong netEnv,
jobject srcBitmap) {
@ -795,7 +795,7 @@ Java_com_mindspore_himindspore_imageclassification_help_ImageTrackingMobile_runN
}
extern "C" JNIEXPORT jboolean JNICALL
Java_com_mindspore_himindspore_imageclassification_help_ImageTrackingMobile_unloadModel(JNIEnv *env,
Java_com_mindspore_imageobject_imageclassification_help_ImageTrackingMobile_unloadModel(JNIEnv *env,
jclass type,
jlong netEnv) {
MS_PRINT("MindSpore release net.");

View File

@ -26,37 +26,38 @@ MSNetWork::MSNetWork(void) : session_(nullptr), model_(nullptr) {}
MSNetWork::~MSNetWork(void) {}
void MSNetWork::CreateSessionMS(char *modelBuffer, size_t bufferLen, mindspore::lite::Context *ctx) {
session_ = mindspore::session::LiteSession::CreateSession(ctx);
if (session_ == nullptr) {
MS_PRINT("Create Session failed.");
return;
}
void
MSNetWork::CreateSessionMS(char *modelBuffer, size_t bufferLen, mindspore::lite::Context *ctx) {
session_ = mindspore::session::LiteSession::CreateSession(ctx);
if (session_ == nullptr) {
MS_PRINT("Create Session failed.");
return;
}
// Compile model.
model_ = mindspore::lite::Model::Import(modelBuffer, bufferLen);
if (model_ == nullptr) {
ReleaseNets();
MS_PRINT("Import model failed.");
return;
}
// Compile model.
model_ = mindspore::lite::Model::Import(modelBuffer, bufferLen);
if (model_ == nullptr) {
ReleaseNets();
MS_PRINT("Import model failed.");
return;
}
int ret = session_->CompileGraph(model_);
if (ret != mindspore::lite::RET_OK) {
ReleaseNets();
MS_PRINT("CompileGraph failed.");
return;
}
int ret = session_->CompileGraph(model_);
if (ret != mindspore::lite::RET_OK) {
ReleaseNets();
MS_PRINT("CompileGraph failed.");
return;
}
}
void MSNetWork::ReleaseNets(void) {
if (model_ != nullptr) {
model_->Free();
delete model_;
model_ = nullptr;
}
if (session_ != nullptr) {
delete session_;
session_ = nullptr;
}
if (model_ != nullptr) {
model_->Free();
delete model_;
model_ = nullptr;
}
if (session_ != nullptr) {
delete session_;
session_ = nullptr;
}
}

View File

@ -53,8 +53,10 @@ class MSNetWork {
void ReleaseNets(void);
mindspore::session::LiteSession *session() const { return session_; }
private:
mindspore::session::LiteSession *session_;
mindspore::lite::Model *model_;
};
#endif

View File

@ -0,0 +1,270 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <jni.h>
#include <android/bitmap.h>
#include <android/asset_manager_jni.h>
#include <android/log.h>
#include <sstream>
#include <cstring>
#include <set>
#include <utility>
#include "include/errorcode.h"
#include "include/ms_tensor.h"
#include "MSNetWork.h"
#include "ssd_util/ssd_util.h"
#include "lite_cv/lite_mat.h"
#include "lite_cv/image_process.h"
using mindspore::dataset::LiteMat;
using mindspore::dataset::LPixelType;
using mindspore::dataset::LDataType;
#define MS_PRINT(format, ...) __android_log_print(ANDROID_LOG_INFO, "MSJNI", format, ##__VA_ARGS__)
bool ObjectBitmapToLiteMat(JNIEnv *env, const jobject &srcBitmap, LiteMat *lite_mat) {
bool ret = false;
AndroidBitmapInfo info;
void *pixels = nullptr;
LiteMat &lite_mat_bgr = *lite_mat;
AndroidBitmap_getInfo(env, srcBitmap, &info);
if (info.format != ANDROID_BITMAP_FORMAT_RGBA_8888) {
MS_PRINT("Image Err, Request RGBA");
return false;
}
AndroidBitmap_lockPixels(env, srcBitmap, &pixels);
if (info.stride == info.width * 4) {
ret = InitFromPixel(reinterpret_cast<const unsigned char *>(pixels),
LPixelType::RGBA2RGB, LDataType::UINT8,
info.width, info.height, lite_mat_bgr);
if (!ret) {
MS_PRINT("Init From RGBA error");
}
} else {
unsigned char *pixels_ptr = new unsigned char[info.width * info.height * 4];
unsigned char *ptr = pixels_ptr;
unsigned char *data = reinterpret_cast<unsigned char *>(pixels);
for (int i = 0; i < info.height; i++) {
memcpy(ptr, data, info.width * 4);
ptr += info.width * 4;
data += info.stride;
}
ret = InitFromPixel(reinterpret_cast<const unsigned char *>(pixels_ptr),
LPixelType::RGBA2RGB, LDataType::UINT8,
info.width, info.height, lite_mat_bgr);
if (!ret) {
MS_PRINT("Init From RGBA error");
}
delete[] (pixels_ptr);
}
AndroidBitmap_unlockPixels(env, srcBitmap);
return ret;
}
bool ObjectPreProcessImageData(const LiteMat &lite_mat_bgr, LiteMat *lite_norm_mat_ptr) {
bool ret = false;
LiteMat lite_mat_resize;
LiteMat &lite_norm_mat_cut = *lite_norm_mat_ptr;
ret = ResizeBilinear(lite_mat_bgr, lite_mat_resize, 300, 300);
if (!ret) {
MS_PRINT("ResizeBilinear error");
return false;
}
LiteMat lite_mat_convert_float;
ret = ConvertTo(lite_mat_resize, lite_mat_convert_float, 1.0 / 255.0);
if (!ret) {
MS_PRINT("ConvertTo error");
return false;
}
std::vector<float> means = {0.485, 0.456, 0.406};
std::vector<float> stds = {0.229, 0.224, 0.225};
SubStractMeanNormalize(lite_mat_convert_float, lite_norm_mat_cut, means, stds);
return true;
}
char *ObjectCreateLocalModelBuffer(JNIEnv *env, jobject modelBuffer) {
jbyte *modelAddr = static_cast<jbyte *>(env->GetDirectBufferAddress(modelBuffer));
int modelLen = static_cast<int>(env->GetDirectBufferCapacity(modelBuffer));
char *buffer(new char[modelLen]);
memcpy(buffer, modelAddr, modelLen);
return buffer;
}
/**
*
* @param msOutputs Model output, the mindspore inferencing result.
* @param srcImageWidth The width of the original input image.
* @param srcImageHeight The height of the original input image.
* @return
*/
std::string
ProcessRunnetResult(std::unordered_map<std::string, mindspore::tensor::MSTensor *> msOutputs,
int srcImageWidth, int srcImageHeight) {
std::unordered_map<std::string, mindspore::tensor::MSTensor *>::iterator iter;
iter = msOutputs.begin();
auto branch2_string = iter->first;
auto branch2_tensor = iter->second;
++iter;
auto branch1_string = iter->first;
auto branch1_tensor = iter->second;
MS_PRINT("%s %s", branch1_string.c_str(), branch2_string.c_str());
// ----------- 接口测试 --------------------------
float *tmpscores2 = reinterpret_cast<float *>(branch1_tensor->MutableData());
float *tmpdata = reinterpret_cast<float *>(branch2_tensor->MutableData());
// Using ssd model util to process model branch outputs.
SSDModelUtil ssdUtil(srcImageWidth, srcImageHeight);
std::string retStr = ssdUtil.getDecodeResult(tmpscores2, tmpdata);
MS_PRINT("retStr %s", retStr.c_str());
return retStr;
}
extern "C" JNIEXPORT jlong JNICALL
Java_com_mindspore_imageobject_objectdetection_help_ObjectTrackingMobile_loadModel(JNIEnv *env,
jobject thiz,
jobject assetManager,
jobject buffer,
jint numThread) {
MS_PRINT("MindSpore so version 20200730");
if (nullptr == buffer) {
MS_PRINT("error, buffer is nullptr!");
return (jlong) nullptr;
}
jlong bufferLen = env->GetDirectBufferCapacity(buffer);
MS_PRINT("MindSpore get bufferLen:%d", static_cast<int>(bufferLen));
if (0 == bufferLen) {
MS_PRINT("error, bufferLen is 0!");
return (jlong) nullptr;
}
char *modelBuffer = ObjectCreateLocalModelBuffer(env, buffer);
if (modelBuffer == nullptr) {
MS_PRINT("modelBuffer create failed!");
return (jlong) nullptr;
}
MS_PRINT("MindSpore loading Model.");
void **labelEnv = new void *;
MSNetWork *labelNet = new MSNetWork;
*labelEnv = labelNet;
mindspore::lite::Context *context = new mindspore::lite::Context;
context->thread_num_ = numThread;
labelNet->CreateSessionMS(modelBuffer, bufferLen, context);
delete context;
if (labelNet->session() == nullptr) {
delete labelNet;
delete labelEnv;
MS_PRINT("MindSpore create session failed!.");
return (jlong) nullptr;
}
MS_PRINT("MindSpore create session successfully.");
if (buffer != nullptr) {
env->DeleteLocalRef(buffer);
}
if (assetManager != nullptr) {
env->DeleteLocalRef(assetManager);
}
MS_PRINT("ptr released successfully.");
return (jlong) labelEnv;
}
extern "C" JNIEXPORT jstring JNICALL
Java_com_mindspore_imageobject_objectdetection_help_ObjectTrackingMobile_runNet(JNIEnv *env,
jobject thiz,
jlong netEnv,
jobject srcBitmap) {
LiteMat lite_mat_bgr, lite_norm_mat_cut;
if (!ObjectBitmapToLiteMat(env, srcBitmap, &lite_mat_bgr)) {
MS_PRINT("ObjectBitmapToLiteMat error");
return NULL;
}
int srcImageWidth = lite_mat_bgr.width_;
int srcImageHeight = lite_mat_bgr.height_;
if (!ObjectPreProcessImageData(lite_mat_bgr, &lite_norm_mat_cut)) {
MS_PRINT("ObjectPreProcessImageData error");
return NULL;
}
ImgDims inputDims;
inputDims.channel = lite_norm_mat_cut.channel_;
inputDims.width = lite_norm_mat_cut.width_;
inputDims.height = lite_norm_mat_cut.height_;
// Get the mindsore inference environment which created in loadModel().
void **labelEnv = reinterpret_cast<void **>(netEnv);
if (labelEnv == nullptr) {
MS_PRINT("MindSpore error, labelEnv is a nullptr.");
return NULL;
}
MSNetWork *labelNet = static_cast<MSNetWork *>(*labelEnv);
auto mSession = labelNet->session();
if (mSession == nullptr) {
MS_PRINT("MindSpore error, Session is a nullptr.");
return NULL;
}
MS_PRINT("MindSpore get session.");
auto msInputs = mSession->GetInputs();
auto inTensor = msInputs.front();
float *dataHWC = reinterpret_cast<float *>(lite_norm_mat_cut.data_ptr_);
// copy input Tensor
memcpy(inTensor->MutableData(), dataHWC,
inputDims.channel * inputDims.width * inputDims.height * sizeof(float));
MS_PRINT("MindSpore get msInputs.");
auto status = mSession->RunGraph();
if (status != mindspore::lite::RET_OK) {
MS_PRINT("MindSpore runnet error.");
return NULL;
}
auto names = mSession->GetOutputTensorNames();
std::unordered_map<std::string,
mindspore::tensor::MSTensor *> msOutputs;
for (const auto &name : names) {
auto temp_dat = mSession->GetOutputByTensorName(name);
msOutputs.insert(std::pair<std::string, mindspore::tensor::MSTensor *>{name, temp_dat});
}
std::string retStr = ProcessRunnetResult(msOutputs, srcImageWidth, srcImageHeight);
const char *resultChardata = retStr.c_str();
return (env)->NewStringUTF(resultChardata);
}
extern "C"
JNIEXPORT jboolean JNICALL
Java_com_mindspore_imageobject_objectdetection_help_ObjectTrackingMobile_unloadModel(JNIEnv *env,
jobject thiz,
jlong netEnv) {
void **labelEnv = reinterpret_cast<void **>(netEnv);
MSNetWork *labelNet = static_cast<MSNetWork *>(*labelEnv);
labelNet->ReleaseNets();
return (jboolean) true;
}

View File

@ -241,10 +241,14 @@ void SSDModelUtil::ssd_boxes_decode(const NormalBox *boxes,
float cx = boxes[i].x * scale0 * mDefaultBoxes[i].w + mDefaultBoxes[i].x;
float h = exp(boxes[i].h * scale1) * mDefaultBoxes[i].h;
float w = exp(boxes[i].w * scale1) * mDefaultBoxes[i].w;
decoded_boxes[i].ymin = std::min(1.0f, std::max(0.0f, cy - h / 2)) * config.model_input_height;
decoded_boxes[i].xmin = std::min(1.0f, std::max(0.0f, cx - w / 2)) * config.model_input_width;
decoded_boxes[i].ymax = std::min(1.0f, std::max(0.0f, cy + h / 2)) * config.model_input_height;
decoded_boxes[i].xmax = std::min(1.0f, std::max(0.0f, cx + w / 2)) * config.model_input_width;
decoded_boxes[i].ymin =
std::min(1.0f, std::max(0.0f, cy - h / 2)) * config.model_input_height;
decoded_boxes[i].xmin =
std::min(1.0f, std::max(0.0f, cx - w / 2)) * config.model_input_width;
decoded_boxes[i].ymax =
std::min(1.0f, std::max(0.0f, cy + h / 2)) * config.model_input_height;
decoded_boxes[i].xmax =
std::min(1.0f, std::max(0.0f, cx + w / 2)) * config.model_input_width;
}
}

View File

@ -1,4 +1,19 @@
package com.mindspore.himindspore.camera;
/**
* Copyright 2020 Huawei Technologies Co., Ltd
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mindspore.imageobject.camera;
import android.Manifest;
import android.app.Activity;
@ -37,10 +52,10 @@ import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.core.app.ActivityCompat;
import com.mindspore.himindspore.base.TrackListener;
import com.mindspore.himindspore.imageclassification.help.GarbageTrackingMobile;
import com.mindspore.himindspore.imageclassification.help.ImageTrackingMobile;
import com.mindspore.himindspore.objectdetection.help.ObjectTrackingMobile;
import com.mindspore.imageobject.imageclassification.help.GarbageTrackingMobile;
import com.mindspore.imageobject.imageclassification.help.ImageTrackingMobile;
import com.mindspore.imageobject.objectdetection.help.ObjectTrackingMobile;
import com.mindspore.imageobject.track.TrackListener;
import java.io.File;
import java.io.FileOutputStream;

View File

@ -13,8 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mindspore.himindspore.imageclassification.bean;
package com.mindspore.imageobject.imageclassification.bean;
public class RecognitionImageBean {

View File

@ -14,13 +14,13 @@
* limitations under the License.
*/
package com.mindspore.himindspore.imageclassification.help;
package com.mindspore.imageobject.imageclassification.help;
import android.content.Context;
import android.graphics.Bitmap;
import android.util.Log;
import com.mindspore.himindspore.base.TrackListener;
import com.mindspore.imageobject.track.TrackListener;
import java.io.InputStream;
import java.nio.ByteBuffer;
@ -53,7 +53,7 @@ public class GarbageTrackingMobile implements TrackListener {
* JNI load model and also create model inference environment.
*
* @param modelBuffer Model buffer.
* @param numThread The num of thread.
* @param numThread The num of thread.
* @return MindSpore Inference environment address.
*/
public native long loadModel(ByteBuffer modelBuffer, int numThread);
@ -62,7 +62,7 @@ public class GarbageTrackingMobile implements TrackListener {
* Running model.
*
* @param netEnv Inference environment address.
* @param img A picture to be inferred.
* @param img A picture to be inferred.
* @return Inference result
*/
public native String runNet(long netEnv, Bitmap img);
@ -101,6 +101,7 @@ public class GarbageTrackingMobile implements TrackListener {
/**
* Unload model.
*
* @return true
*/
public boolean unloadModel() {
@ -110,6 +111,7 @@ public class GarbageTrackingMobile implements TrackListener {
/**
* Load model file stream.
*
* @param modelPath Model file path.
* @return Model ByteBuffer.
*/

View File

@ -14,13 +14,13 @@
* limitations under the License.
*/
package com.mindspore.himindspore.imageclassification.help;
package com.mindspore.imageobject.imageclassification.help;
import android.content.Context;
import android.graphics.Bitmap;
import android.util.Log;
import com.mindspore.himindspore.base.TrackListener;
import com.mindspore.imageobject.track.TrackListener;
import java.io.InputStream;
import java.nio.ByteBuffer;
@ -36,7 +36,7 @@ public class ImageTrackingMobile implements TrackListener {
System.loadLibrary("mlkit-label-MS");
Log.i(TAG, "load libiMindSpore.so successfully.");
} catch (UnsatisfiedLinkError e) {
Log.e(TAG, "UnsatisfiedLinkError " + e.getMessage());
Log.e(TAG, "UnsatisfiedLinkError >>>>>>" + e.getMessage());
}
}
@ -53,7 +53,7 @@ public class ImageTrackingMobile implements TrackListener {
* JNI load model and also create model inference environment.
*
* @param modelBuffer Model buffer.
* @param numThread The num of thread.
* @param numThread The num of thread.
* @return MindSpore Inference environment address.
*/
public native long loadModel(ByteBuffer modelBuffer, int numThread);
@ -62,7 +62,7 @@ public class ImageTrackingMobile implements TrackListener {
* Running model.
*
* @param netEnv Inference environment address.
* @param img A picture to be inferred.
* @param img A picture to be inferred.
* @return Inference result
*/
public native String runNet(long netEnv, Bitmap img);
@ -101,6 +101,7 @@ public class ImageTrackingMobile implements TrackListener {
/**
* Unload model.
*
* @return true
*/
public boolean unloadModel() {
@ -110,6 +111,7 @@ public class ImageTrackingMobile implements TrackListener {
/**
* Load model file stream.
*
* @param modelPath Model file path.
* @return Model ByteBuffer.
*/

View File

@ -14,7 +14,7 @@
* limitations under the License.
*/
package com.mindspore.himindspore.imageclassification.ui;
package com.mindspore.imageobject.imageclassification.ui;
import android.content.Context;
import android.util.AttributeSet;
@ -25,7 +25,7 @@ import android.widget.TextView;
import androidx.annotation.Nullable;
import com.mindspore.himindspore.R;
import com.mindspore.imageobject.R;
public class HorTextView extends LinearLayout {

View File

@ -14,7 +14,7 @@
* limitations under the License.
*/
package com.mindspore.himindspore.imageclassification.ui;
package com.mindspore.imageobject.imageclassification.ui;
import android.graphics.Color;
import android.os.Bundle;
@ -29,11 +29,14 @@ import android.widget.TextView;
import androidx.annotation.UiThread;
import androidx.appcompat.app.AppCompatActivity;
import com.mindspore.himindspore.R;
import com.mindspore.himindspore.camera.CameraPreview;
import com.mindspore.himindspore.imageclassification.bean.RecognitionImageBean;
import com.mindspore.himindspore.imageclassification.help.GarbageTrackingMobile;
import com.mindspore.himindspore.imageclassification.help.ImageTrackingMobile;
import com.alibaba.android.arouter.facade.annotation.Autowired;
import com.alibaba.android.arouter.facade.annotation.Route;
import com.alibaba.android.arouter.launcher.ARouter;
import com.mindspore.imageobject.R;
import com.mindspore.imageobject.camera.CameraPreview;
import com.mindspore.imageobject.imageclassification.bean.RecognitionImageBean;
import com.mindspore.imageobject.imageclassification.help.GarbageTrackingMobile;
import com.mindspore.imageobject.imageclassification.help.ImageTrackingMobile;
import java.util.ArrayList;
import java.util.Collections;
@ -44,13 +47,14 @@ import java.util.List;
* The main interface of camera preview.
* Using Camera 2 API.
*/
@Route(path = "/imageobject/ImageCameraActivity")
public class ImageCameraActivity extends AppCompatActivity implements CameraPreview.RecognitionDataCallBack {
private static final String TAG = "ImageCameraActivity";
public static final String OPEN_TYPE = "OPEN_TYPE";
public static final int TYPE_DEMO = 1;
public static final int TYPE_CUSTOM = 2;
private int enterType;
@Autowired(name = "OPEN_TYPE")
int enterType;
private LinearLayout bottomLayout;
@ -65,8 +69,10 @@ public class ImageCameraActivity extends AppCompatActivity implements CameraPrev
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
//inject
ARouter.getInstance().inject(this);
setContentView(R.layout.activity_image_camera);
enterType = getIntent().getIntExtra(OPEN_TYPE, TYPE_DEMO);
cameraPreview = findViewById(R.id.image_camera_preview);
bottomLayout = findViewById(R.id.layout_bottom_content);

View File

@ -1,4 +1,19 @@
package com.mindspore.himindspore.objectdetection.bean;
/**
* Copyright 2020 Huawei Technologies Co., Ltd
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mindspore.imageobject.objectdetection.bean;
import android.text.TextUtils;

View File

@ -0,0 +1,151 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mindspore.imageobject.objectdetection.help;
import android.app.Activity;
import android.database.Cursor;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Matrix;
import android.media.ExifInterface;
import android.net.Uri;
import android.provider.MediaStore;
import android.util.Log;
import java.io.IOException;
import java.io.InputStream;
public class BitmapUtils {
private static final String TAG = "BitmapUtils";
public static void recycleBitmap(Bitmap... bitmaps) {
for (Bitmap bitmap : bitmaps) {
if (bitmap != null && !bitmap.isRecycled()) {
bitmap.recycle();
bitmap = null;
}
}
}
private static String getImagePath(Activity activity, Uri uri) {
String[] projection = {MediaStore.Images.Media.DATA};
Cursor cursor = activity.managedQuery(uri, projection, null, null, null);
int columnIndex = cursor.getColumnIndexOrThrow(MediaStore.Images.Media.DATA);
cursor.moveToFirst();
return cursor.getString(columnIndex);
}
public static Bitmap loadFromPath(Activity activity, int id, int width, int height) {
BitmapFactory.Options options = new BitmapFactory.Options();
options.inJustDecodeBounds = true;
InputStream is = activity.getResources().openRawResource(id);
int sampleSize = calculateInSampleSize(options, width, height);
options.inSampleSize = sampleSize;
options.inJustDecodeBounds = false;
return zoomImage(BitmapFactory.decodeStream(is), width, height);
}
public static Bitmap loadFromPath(Activity activity, Uri uri, int width, int height) {
BitmapFactory.Options options = new BitmapFactory.Options();
options.inJustDecodeBounds = true;
String path = getImagePath(activity, uri);
BitmapFactory.decodeFile(path, options);
int sampleSize = calculateInSampleSize(options, width, height);
options.inSampleSize = sampleSize;
options.inJustDecodeBounds = false;
Bitmap bitmap = zoomImage(BitmapFactory.decodeFile(path, options), width, height);
return rotateBitmap(bitmap, getRotationAngle(path));
}
private static int calculateInSampleSize(BitmapFactory.Options options, int reqWidth, int reqHeight) {
final int width = options.outWidth;
final int height = options.outHeight;
int inSampleSize = 1;
if (height > reqHeight || width > reqWidth) {
// Calculate height and required height scale.
final int heightRatio = Math.round((float) height / (float) reqHeight);
// Calculate width and required width scale.
final int widthRatio = Math.round((float) width / (float) reqWidth);
// Take the larger of the values.
inSampleSize = heightRatio > widthRatio ? heightRatio : widthRatio;
}
return inSampleSize;
}
// Scale pictures to screen width.
private static Bitmap zoomImage(Bitmap imageBitmap, int targetWidth, int maxHeight) {
float scaleFactor =
Math.max(
(float) imageBitmap.getWidth() / (float) targetWidth,
(float) imageBitmap.getHeight() / (float) maxHeight);
Bitmap resizedBitmap =
Bitmap.createScaledBitmap(
imageBitmap,
(int) (imageBitmap.getWidth() / scaleFactor),
(int) (imageBitmap.getHeight() / scaleFactor),
true);
return resizedBitmap;
}
/**
* Get the rotation angle of the photo.
*
* @param path photo path.
* @return angle.
*/
public static int getRotationAngle(String path) {
int rotation = 0;
try {
ExifInterface exifInterface = new ExifInterface(path);
int orientation = exifInterface.getAttributeInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_NORMAL);
switch (orientation) {
case ExifInterface.ORIENTATION_ROTATE_90:
rotation = 90;
break;
case ExifInterface.ORIENTATION_ROTATE_180:
rotation = 180;
break;
case ExifInterface.ORIENTATION_ROTATE_270:
rotation = 270;
break;
default:
break;
}
} catch (IOException e) {
Log.e(TAG, "Failed to get rotation: " + e.getMessage());
}
return rotation;
}
public static Bitmap rotateBitmap(Bitmap bitmap, int angle) {
Matrix matrix = new Matrix();
matrix.postRotate(angle);
Bitmap result = null;
try {
result = Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight(), matrix, true);
} catch (OutOfMemoryError e) {
Log.e(TAG, "Failed to rotate bitmap: " + e.getMessage());
}
if (result == null) {
return bitmap;
}
return result;
}
}

View File

@ -1,4 +1,19 @@
package com.mindspore.himindspore.utils;
/**
* Copyright 2020 Huawei Technologies Co., Ltd
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mindspore.imageobject.objectdetection.help;
import android.content.Context;

View File

@ -1,11 +1,26 @@
package com.mindspore.himindspore.objectdetection.help;
/**
* Copyright 2020 Huawei Technologies Co., Ltd
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mindspore.imageobject.objectdetection.help;
import android.content.Context;
import android.content.res.AssetManager;
import android.graphics.Bitmap;
import android.util.Log;
import com.mindspore.himindspore.base.TrackListener;
import com.mindspore.imageobject.track.TrackListener;
import java.io.FileNotFoundException;
import java.io.InputStream;
@ -20,7 +35,7 @@ public class ObjectTrackingMobile implements TrackListener {
System.loadLibrary("mlkit-label-MS");
Log.i(TAG, "load libiMindSpore.so successfully.");
} catch (UnsatisfiedLinkError e) {
Log.e(TAG, "UnsatisfiedLinkError " + e.getMessage());
Log.e(TAG, "UnsatisfiedLinkError >>>>>>" + e.getMessage());
}
}

View File

@ -1,4 +1,19 @@
package com.mindspore.himindspore.objectdetection.ui;
/**
* Copyright 2020 Huawei Technologies Co., Ltd
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mindspore.imageobject.objectdetection.ui;
import android.os.Bundle;
import android.text.TextUtils;
@ -6,15 +21,16 @@ import android.util.Log;
import androidx.appcompat.app.AppCompatActivity;
import com.mindspore.himindspore.R;
import com.mindspore.himindspore.camera.CameraPreview;
import com.mindspore.himindspore.objectdetection.bean.RecognitionObjectBean;
import com.mindspore.himindspore.objectdetection.help.ObjectTrackingMobile;
import com.alibaba.android.arouter.facade.annotation.Route;
import com.mindspore.imageobject.R;
import com.mindspore.imageobject.camera.CameraPreview;
import com.mindspore.imageobject.objectdetection.bean.RecognitionObjectBean;
import com.mindspore.imageobject.objectdetection.help.ObjectTrackingMobile;
import java.io.FileNotFoundException;
import java.util.List;
import static com.mindspore.himindspore.objectdetection.bean.RecognitionObjectBean.getRecognitionList;
import static com.mindspore.imageobject.objectdetection.bean.RecognitionObjectBean.getRecognitionList;
/**
@ -22,7 +38,7 @@ import static com.mindspore.himindspore.objectdetection.bean.RecognitionObjectBe
* <p>
* Pass in pictures to JNI, test mindspore model, load reasoning, etc
*/
@Route(path = "/imageobject/ObjectCameraActivity")
public class ObjectCameraActivity extends AppCompatActivity implements CameraPreview.RecognitionDataCallBack {
private final String TAG = "ObjectCameraActivity";

View File

@ -1,4 +1,19 @@
package com.mindspore.himindspore.objectdetection.ui;
/**
* Copyright 2020 Huawei Technologies Co., Ltd
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mindspore.imageobject.objectdetection.ui;
import android.content.Intent;
import android.content.res.Configuration;
@ -17,18 +32,19 @@ import android.widget.ImageView;
import androidx.annotation.Nullable;
import androidx.appcompat.app.AppCompatActivity;
import com.mindspore.himindspore.R;
import com.mindspore.himindspore.objectdetection.bean.RecognitionObjectBean;
import com.mindspore.himindspore.objectdetection.help.BitmapUtils;
import com.mindspore.himindspore.objectdetection.help.ObjectTrackingMobile;
import com.mindspore.himindspore.utils.DisplayUtil;
import com.alibaba.android.arouter.facade.annotation.Route;
import com.mindspore.imageobject.R;
import com.mindspore.imageobject.objectdetection.bean.RecognitionObjectBean;
import com.mindspore.imageobject.objectdetection.help.BitmapUtils;
import com.mindspore.imageobject.objectdetection.help.DisplayUtil;
import com.mindspore.imageobject.objectdetection.help.ObjectTrackingMobile;
import java.io.FileNotFoundException;
import java.util.List;
import static com.mindspore.himindspore.objectdetection.bean.RecognitionObjectBean.getRecognitionList;
import static com.mindspore.imageobject.objectdetection.bean.RecognitionObjectBean.getRecognitionList;
@Route(path = "/imageobject/ObjectPhotoActivity")
public class ObjectPhotoActivity extends AppCompatActivity {
private static final String TAG = "ObjectPhotoActivity";

View File

@ -1,4 +1,19 @@
package com.mindspore.himindspore.objectdetection.ui;
/**
* Copyright 2020 Huawei Technologies Co., Ltd
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mindspore.imageobject.objectdetection.ui;
import android.content.Context;
import android.graphics.Canvas;
@ -9,9 +24,9 @@ import android.util.AttributeSet;
import android.util.Log;
import android.view.View;
import com.mindspore.himindspore.R;
import com.mindspore.himindspore.objectdetection.bean.RecognitionObjectBean;
import com.mindspore.himindspore.utils.DisplayUtil;
import com.mindspore.imageobject.R;
import com.mindspore.imageobject.objectdetection.bean.RecognitionObjectBean;
import com.mindspore.imageobject.objectdetection.help.DisplayUtil;
import java.util.ArrayList;
import java.util.List;

View File

@ -0,0 +1,19 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mindspore.imageobject.track;
public interface TrackListener {
}

View File

@ -0,0 +1,30 @@
<vector xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:aapt="http://schemas.android.com/aapt"
android:width="108dp"
android:height="108dp"
android:viewportWidth="108"
android:viewportHeight="108">
<path android:pathData="M31,63.928c0,0 6.4,-11 12.1,-13.1c7.2,-2.6 26,-1.4 26,-1.4l38.1,38.1L107,108.928l-32,-1L31,63.928z">
<aapt:attr name="android:fillColor">
<gradient
android:endX="85.84757"
android:endY="92.4963"
android:startX="42.9492"
android:startY="49.59793"
android:type="linear">
<item
android:color="#44000000"
android:offset="0.0" />
<item
android:color="#00000000"
android:offset="1.0" />
</gradient>
</aapt:attr>
</path>
<path
android:fillColor="#FFFFFF"
android:fillType="nonZero"
android:pathData="M65.3,45.828l3.8,-6.6c0.2,-0.4 0.1,-0.9 -0.3,-1.1c-0.4,-0.2 -0.9,-0.1 -1.1,0.3l-3.9,6.7c-6.3,-2.8 -13.4,-2.8 -19.7,0l-3.9,-6.7c-0.2,-0.4 -0.7,-0.5 -1.1,-0.3C38.8,38.328 38.7,38.828 38.9,39.228l3.8,6.6C36.2,49.428 31.7,56.028 31,63.928h46C76.3,56.028 71.8,49.428 65.3,45.828zM43.4,57.328c-0.8,0 -1.5,-0.5 -1.8,-1.2c-0.3,-0.7 -0.1,-1.5 0.4,-2.1c0.5,-0.5 1.4,-0.7 2.1,-0.4c0.7,0.3 1.2,1 1.2,1.8C45.3,56.528 44.5,57.328 43.4,57.328L43.4,57.328zM64.6,57.328c-0.8,0 -1.5,-0.5 -1.8,-1.2s-0.1,-1.5 0.4,-2.1c0.5,-0.5 1.4,-0.7 2.1,-0.4c0.7,0.3 1.2,1 1.2,1.8C66.5,56.528 65.6,57.328 64.6,57.328L64.6,57.328z"
android:strokeWidth="1"
android:strokeColor="#00000000" />
</vector>

Binary file not shown.

After

Width:  |  Height:  |  Size: 75 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.2 KiB

View File

@ -0,0 +1,170 @@
<?xml version="1.0" encoding="utf-8"?>
<vector xmlns:android="http://schemas.android.com/apk/res/android"
android:width="108dp"
android:height="108dp"
android:viewportWidth="108"
android:viewportHeight="108">
<path
android:fillColor="#3DDC84"
android:pathData="M0,0h108v108h-108z" />
<path
android:fillColor="#00000000"
android:pathData="M9,0L9,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,0L19,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M29,0L29,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M39,0L39,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M49,0L49,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M59,0L59,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M69,0L69,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M79,0L79,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M89,0L89,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M99,0L99,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,9L108,9"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,19L108,19"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,29L108,29"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,39L108,39"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,49L108,49"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,59L108,59"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,69L108,69"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,79L108,79"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,89L108,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,99L108,99"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,29L89,29"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,39L89,39"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,49L89,49"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,59L89,59"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,69L89,69"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,79L89,79"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M29,19L29,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M39,19L39,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M49,19L49,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M59,19L59,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M69,19L69,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M79,19L79,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
</vector>

View File

@ -5,10 +5,10 @@
android:id="@+id/container"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context=".imageclassification.ui.ImageCameraActivity">
tools:context="com.mindspore.imageobject.imageclassification.ui.ImageCameraActivity">
<com.mindspore.himindspore.camera.CameraPreview
<com.mindspore.imageobject.camera.CameraPreview
android:id="@+id/image_camera_preview"
android:layout_width="match_parent"
android:layout_height="match_parent"

View File

@ -1,16 +1,14 @@
<?xml version="1.0" encoding="utf-8"?>
<androidx.constraintlayout.widget.ConstraintLayout
xmlns:android="http://schemas.android.com/apk/res/android"
<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:id="@+id/container"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context=".objectdetection.ui.ObjectCameraActivity">
tools:context="com.mindspore.imageobject.objectdetection.ui.ObjectCameraActivity">
<com.mindspore.himindspore.camera.CameraPreview
<com.mindspore.imageobject.camera.CameraPreview
android:id="@+id/camera_preview"
android:layout_width="match_parent"
android:layout_height="match_parent"
@ -19,10 +17,9 @@
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toTopOf="parent" />
<com.mindspore.himindspore.objectdetection.ui.ObjectRectView
<com.mindspore.imageobject.objectdetection.ui.ObjectRectView
android:id="@+id/objRectView"
android:layout_width="match_parent"
android:layout_height="match_parent"
/>
android:layout_height="match_parent" />
</androidx.constraintlayout.widget.ConstraintLayout>

View File

@ -7,10 +7,10 @@
android:keepScreenOn="true"
android:orientation="vertical">
<ImageView
android:id="@+id/img_photo"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_centerInParent="true"
android:scaleType="fitXY"/>
<ImageView
android:id="@+id/img_photo"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_centerInParent="true"
android:scaleType="fitXY" />
</RelativeLayout>

View File

@ -39,5 +39,5 @@
android:background="@color/white"
android:layout_below="@+id/tv_left_title"
android:layout_height="0.5dp"
android:layout_width="match_parent"/>
android:layout_width="match_parent" />
</RelativeLayout>

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 8.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 11 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 15 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 22 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 27 KiB

View File

@ -0,0 +1,20 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<color name="colorPrimary">#303030</color>
<color name="colorPrimaryDark">#3700B3</color>
<color name="colorAccent">#03DAC5</color>
<color name="mindspore_semi_transparent">#66000000</color>
<color name="white">#ffffff</color>
<color name="black">#000000</color>
<color name="gray">#A69D9D</color>
<color name="gray_btn">#424242</color>
<color name="text_blue">#6DA7FF</color>
<color name="text_yellow">#F8E71C</color>
<color name="text_orange">#FF844D</color>
<color name="text_green">#66B50A</color>
</resources>

View File

@ -0,0 +1,9 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<dimen name="ms_bottom_sheet_corner_radius">15dp</dimen>
<dimen name="ms_bottom_sheet_top_padding">8dp</dimen>
<dimen name="hor_text_view_text_margin_normal">15dp</dimen>
<dimen name="hor_text_view_text_margin_small">6dp</dimen>
<dimen name="hor_text_view_text_size">14sp</dimen>
</resources>

View File

@ -0,0 +1,23 @@
<resources>
<string name="app_name">HiMindSpore</string>
<string name="app_name_title">MindSpore</string>
<string name="switch_custom">custom</string>
<string name="title_image">Image Classification</string>
<string name="title_image_garbage">Garbage Classification</string>
<string name="title_object">Photo Detection</string>
<string name="title_object_camera">Camera Detection</string>
<string name="title_pose_net">PoseNet</string>
<string name="title_style_transfer">Style Transfer</string>
<string name="title_source">Source Code</string>
<string name="title_help">Help And FeedBack</string>
<string name="title_photo">Photo</string>
<string name="title_camera">Camera</string>
<string name="title_demo">Demo</string>
<string name="title_custom">Custom</string>
<string name="title_time">Inference Time</string>
</resources>

View File

@ -0,0 +1,11 @@
<resources>
<!-- Base application theme. -->
<style name="AppTheme" parent="Theme.AppCompat.Light.DarkActionBar">
<!-- Customize your theme here. -->
<item name="colorPrimary">@color/colorPrimary</item>
<item name="colorPrimaryDark">@color/colorPrimaryDark</item>
<item name="colorAccent">@color/colorAccent</item>
</style>
</resources>

View File

@ -0,0 +1,10 @@
<?xml version="1.0" encoding="utf-8"?>
<paths>
<external-path
path="Android/data/com.mindspore.himindspore/"
name="files_root" />
<files-path
name="Android/data/com.mindspore.himindspore/"
path="files_root"></files-path>
</paths>

View File

@ -0,0 +1,17 @@
package com.mindspore.imageobject;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
/**
* Example local unit test, which will execute on the development machine (host).
*
* @see <a href="http://d.android.com/tools/testing">Testing documentation</a>
*/
public class ExampleUnitTest {
@Test
public void addition_isCorrect() {
assertEquals(4, 2 + 2);
}
}

View File

@ -0,0 +1 @@
/build

View File

@ -0,0 +1,71 @@
plugins {
id 'com.android.library'
}
android {
compileSdkVersion 30
buildToolsVersion "30.0.1"
defaultConfig {
minSdkVersion 21
targetSdkVersion 30
versionCode 1
versionName "1.0"
testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
javaCompileOptions {
annotationProcessorOptions {
arguments = [moduleName: project.getName()]
}
}
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
}
}
aaptOptions {
noCompress "ms"
}
lintOptions {
checkReleaseBuilds false
// Or, if you prefer, you can continue to check for errors in release builds,
// but continue the build even when errors are found:
abortOnError false
}
repositories {
google()
jcenter()
flatDir {
dirs 'libs'
}
}
compileOptions {
sourceCompatibility JavaVersion.VERSION_1_8
targetCompatibility JavaVersion.VERSION_1_8
}
}
// Download default models; if you wish to use your own models then
// place them in the "assets" directory and comment out this line.
apply from: 'download.gradle'
dependencies {
implementation fileTree(dir: 'libs', include: ['*.jar', '*.aar'])
implementation 'androidx.appcompat:appcompat:1.2.0'
implementation 'com.google.android.material:material:1.2.1'
implementation 'androidx.constraintlayout:constraintlayout:2.0.4'
testImplementation 'junit:junit:4.+'
androidTestImplementation 'androidx.test.ext:junit:1.1.2'
androidTestImplementation 'androidx.test.espresso:espresso-core:3.3.0'
implementation 'com.alibaba:arouter-api:1.2.1'
annotationProcessor 'com.alibaba:arouter-compiler:1.1.2'
}

View File

@ -0,0 +1,73 @@
/**
* To download necessary library from HuaWei server.
* Including mindspore-lite .so file, minddata-lite .so file and model file.
* The libraries can be downloaded manually.
*/
def mindsporeLite_Version = "mindspore-lite-maven-1.0.1"
def targetModelFile = "src/main/assets/posenet_model.ms"
def modelDownloadUrl = "https://download.mindspore.cn/model_zoo/official/lite/posenet_lite/posenet_model.ms"
def mindsporeLiteDownloadUrl = "https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.1/lite/java/${mindsporeLite_Version}.zip"
def mindSporeLibrary = "libs/${mindsporeLite_Version}.zip"
def cleantargetMindSporeInclude = "libs"
def targetMindSporeInclude = "libs/"
task downloadModelFile(type: DownloadUrlTask) {
doFirst {
println "Downloading ${modelDownloadUrl}"
}
sourceUrl = "${modelDownloadUrl}"
target = file("${targetModelFile}")
}
task downloadMindSporeLibrary(type: DownloadUrlTask) {
doFirst {
println "Downloading ${mindsporeLiteDownloadUrl}"
}
sourceUrl = "${mindsporeLiteDownloadUrl}"
target = file("${mindSporeLibrary}")
}
task unzipMindSporeInclude(type: Copy, dependsOn: ['downloadMindSporeLibrary']) {
doFirst {
println "Unzipping ${mindSporeLibrary}"
}
from zipTree("${mindSporeLibrary}")
into "${targetMindSporeInclude}"
}
task cleanUnusedmindsporeFiles(type: Delete, dependsOn: ['unzipMindSporeInclude']) {
delete fileTree("${cleantargetMindSporeInclude}").matching {
include "*.zip"
}
}
if (file("libs/mindspore-lite-1.0.1.aar").exists()) {
downloadMindSporeLibrary.enabled = false
unzipMindSporeInclude.enabled = false
cleanUnusedmindsporeFiles.enabled = false
}
if (file("src/main/assets/posenet_model.ms").exists()) {
downloadModelFile.enabled = false
}
preBuild.dependsOn downloadModelFile
preBuild.dependsOn downloadMindSporeLibrary
preBuild.dependsOn unzipMindSporeInclude
preBuild.dependsOn cleanUnusedmindsporeFiles
class DownloadUrlTask extends DefaultTask {
@Input
String sourceUrl
@OutputFile
File target
@TaskAction
void download() {
ant.get(src: sourceUrl, dest: target)
}
}

View File

@ -0,0 +1,21 @@
# Add project specific ProGuard rules here.
# You can control the set of applied configuration files using the
# proguardFiles setting in build.gradle.
#
# For more details, see
# http://developer.android.com/guide/developing/tools/proguard.html
# If your project uses WebView with JS, uncomment the following
# and specify the fully qualified class name to the JavaScript interface
# class:
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
# public *;
#}
# Uncomment this to preserve the line number information for
# debugging stack traces.
#-keepattributes SourceFile,LineNumberTable
# If you keep the line number information, uncomment this to
# hide the original source file name.
#-renamesourcefileattribute SourceFile

View File

@ -0,0 +1,26 @@
package com.mindspore.posenet;
import android.content.Context;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.platform.app.InstrumentationRegistry;
import org.junit.Test;
import org.junit.runner.RunWith;
import static org.junit.Assert.assertEquals;
/**
* Instrumented test, which will execute on an Android device.
*
* @see <a href="http://d.android.com/tools/testing">Testing documentation</a>
*/
@RunWith(AndroidJUnit4.class)
public class ExampleInstrumentedTest {
@Test
public void useAppContext() {
// Context of the app under test.
Context appContext = InstrumentationRegistry.getInstrumentation().getTargetContext();
assertEquals("com.mindspore.posenet", appContext.getPackageName());
}
}

View File

@ -0,0 +1,19 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.mindspore.posenet">
<application
android:allowBackup="true"
android:label="@string/app_name"
android:supportsRtl="true">
<activity
android:name=".PosenetMainActivity"
android:screenOrientation="portrait"
android:theme="@style/Theme.AppCompat.NoActionBar">
</activity>
</application>
</manifest>

View File

@ -0,0 +1,23 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mindspore.posenet;
import android.media.Image;
import android.view.SurfaceView;
public interface CameraDataDealListener {
void dataDeal(Image image, SurfaceView surfaceView);
}

View File

@ -0,0 +1,74 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mindspore.posenet;
public class ImageUtils {
// This value is 2 ^ 18 - 1, and is used to hold the RGB values together before their ranges
// are normalized to eight bits.
private static final int MAX_CHANNEL_VALUE = 262143;
/**
* Helper function to convert y,u,v integer values to RGB format
*/
private static int convertYUVToRGB(int y, int u, int v) {
// Adjust and check YUV values
int yNew = y - 16 < 0 ? 0 : y - 16;
int uNew = u - 128;
int vNew = v - 128;
int expandY = 1192 * yNew;
int r = checkBoundaries(expandY + 1634 * vNew);
int g = checkBoundaries(expandY - 833 * vNew - 400 * uNew);
int b = checkBoundaries(expandY + 2066 * uNew);
return -0x1000000 | (r << 6 & 0xff0000) | (g >> 2 & 0xff00) | (b >> 10 & 0xff);
}
private static int checkBoundaries(int value) {
if (value > MAX_CHANNEL_VALUE) {
return MAX_CHANNEL_VALUE;
} else if (value < 0) {
return 0;
} else {
return value;
}
}
/**
* Converts YUV420 format image data (ByteArray) into ARGB8888 format with IntArray as output.
*/
public static void convertYUV420ToARGB8888(byte[] yData, byte[] uData, byte[] vData,
int width, int height,
int yRowStride, int uvRowStride, int uvPixelStride, int[] out) {
int outputIndex = 0;
for (int j = 0; j < height; j++) {
int positionY = yRowStride * j;
int positionUV = uvRowStride * (j >> 1);
for (int i = 0; i < width; i++) {
int uvOffset = positionUV + (i >> 1) * uvPixelStride;
// "0xff and" is used to cut off bits from following value that are higher than
// the low 8 bits
out[outputIndex++] = convertYUVToRGB(
0xff & yData[positionY + i], 0xff & uData[uvOffset],
0xff & vData[uvOffset]);
}
}
}
}

View File

@ -0,0 +1,390 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mindspore.posenet;
import android.Manifest;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.content.Context;
import android.content.pm.PackageManager;
import android.graphics.ImageFormat;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.CaptureResult;
import android.hardware.camera2.TotalCaptureResult;
import android.media.Image;
import android.media.ImageReader;
import android.os.Bundle;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.Log;
import android.util.Size;
import android.view.LayoutInflater;
import android.view.Surface;
import android.view.SurfaceView;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Toast;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.core.app.ActivityCompat;
import androidx.fragment.app.Fragment;
import java.util.Arrays;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
/**
* A simple {@link Fragment} subclass.
* create an instance of this fragment.
*/
public class PoseNetFragment extends Fragment {
private final static int REQUEST_CAMERA_PERMISSION = 1;
private String cameraId = "1";
private SurfaceView surfaceView;
private CameraCaptureSession captureSession;
private CameraDevice cameraDevice;
private Size previewSize;
private int previewWidth;
private int previewHeight;
private final int PREVIEW_WIDTH = 640;
private final int PREVIEW_HEIGHT = 480;
private HandlerThread backgroundThread;
private Handler backgroundHandler;
private ImageReader imageReader;
private CaptureRequest.Builder previewRequestBuilder;
private CaptureRequest previewRequest;
private Semaphore cameraOpenCloseLock = new Semaphore(1);//使用信号量 Semaphore 进行多线程任务调度
private boolean flashSupported;
private static final String TAG = "PoseNetFragment";
private final CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened(@NonNull CameraDevice mCameraDevice) {
cameraOpenCloseLock.release();
Log.d(TAG, "camera has open");
PoseNetFragment.this.cameraDevice = mCameraDevice;
createCameraPreviewSession();
}
@Override
public void onDisconnected(@NonNull CameraDevice cameraDevice) {
cameraOpenCloseLock.release();
cameraDevice.close();
PoseNetFragment.this.cameraDevice = null;
}
@Override
public void onError(@NonNull CameraDevice cameraDevice, int error) {
onDisconnected(cameraDevice);
Activity activity = getActivity();
if (activity != null) {
activity.finish();
}
}
};
private CameraCaptureSession.CaptureCallback captureCallback = new CameraCaptureSession.CaptureCallback() {
@Override
public void onCaptureProgressed(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull CaptureResult partialResult) {
super.onCaptureProgressed(session, request, partialResult);
}
@Override
public void onCaptureCompleted(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull TotalCaptureResult result) {
super.onCaptureCompleted(session, request, result);
}
};
private CameraDataDealListener cameraDataDealListener;
public void setCameraDataDealListener(CameraDataDealListener cameraDataDealListener) {
this.cameraDataDealListener = cameraDataDealListener;
}
public static PoseNetFragment newInstance() {
PoseNetFragment fragment = new PoseNetFragment();
return fragment;
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
// Inflate the layout for this fragment
return inflater.inflate(R.layout.fragment_pose_net, container, false);
}
@Override
public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
this.surfaceView = view.findViewById(R.id.surfaceView);
}
@Override
public void onResume() {
super.onResume();
startBackgroundThread();
}
public void onStart() {
super.onStart();
openCamera();
}
public void onPause() {
this.closeCamera();
this.stopBackgroundThread();
super.onPause();
}
public void onDestroy() {
super.onDestroy();
}
private void requestCameraPermission() {
if (shouldShowRequestPermissionRationale(Manifest.permission.CAMERA)) {
Toast.makeText(getContext(), "This app needs camera permission.", Toast.LENGTH_LONG).show();
} else {
requestPermissions(new String[]{Manifest.permission.READ_EXTERNAL_STORAGE, Manifest.permission.WRITE_EXTERNAL_STORAGE,
Manifest.permission.READ_PHONE_STATE, Manifest.permission.CAMERA}, REQUEST_CAMERA_PERMISSION);
}
}
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
if (requestCode == REQUEST_CAMERA_PERMISSION) {
if (allPermissionsGranted(grantResults)) {
Toast.makeText(getContext(), "This app needs camera permission.", Toast.LENGTH_LONG).show();
}
} else {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
}
}
private boolean allPermissionsGranted(int[] grantResults) {
for (int grantResult : grantResults) {
if (grantResult == PackageManager.PERMISSION_DENIED) {
return false;
}
}
return true;
}
/**
* Sets up member variables related to camera.
*/
private void setUpCameraOutputs() {
CameraManager manager = (CameraManager) getContext().getSystemService(Context.CAMERA_SERVICE);
try {
for (String cameraId : manager.getCameraIdList()) {
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
// We don't use a front facing camera in this sample.
Integer cameraDirection = characteristics.get(CameraCharacteristics.LENS_FACING);
if (cameraDirection != null && cameraDirection == CameraCharacteristics.LENS_FACING_FRONT) {
continue;
}
previewSize = new Size(PREVIEW_WIDTH, PREVIEW_HEIGHT);
imageReader = ImageReader.newInstance(
PREVIEW_WIDTH, PREVIEW_HEIGHT,
ImageFormat.YUV_420_888, /*maxImages*/ 2
);
previewHeight = previewSize.getHeight();
previewWidth = previewSize.getWidth();
// Initialize the storage bitmaps once when the resolution is known.
// Check if the flash is supported.
flashSupported =
characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE) == true;
this.cameraId = cameraId;
// We've found a viable camera and finished setting up member variables,
// so we don't need to iterate through other available cameras.
return;
}
} catch (CameraAccessException e) {
e.printStackTrace();
} catch (NullPointerException e) {
e.printStackTrace();
}
}
/**
* Opens the camera specified by [PosenetActivity.cameraId].
*/
@SuppressLint("MissingPermission")
private void openCamera() {
if (ActivityCompat.checkSelfPermission(getContext(), Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
this.requestCameraPermission();
}
setUpCameraOutputs();
CameraManager manager = (CameraManager) getContext().getSystemService(Context.CAMERA_SERVICE);
try {
// Wait for camera to open - 2.5 seconds is sufficient
if (!cameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
throw new RuntimeException("Time out waiting to lock camera opening.");
}
manager.openCamera(cameraId, mStateCallback, backgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
private void closeCamera() {
try {
cameraOpenCloseLock.acquire();
if (captureSession != null) {
captureSession.close();
captureSession = null;
}
if (null != cameraDevice) {
cameraDevice.close();
cameraDevice = null;
}
if (null != imageReader) {
imageReader.close();
imageReader = null;
}
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera closing.", e);
} finally {
cameraOpenCloseLock.release();
}
}
/**
* Starts a background thread and its [Handler].
*/
private void startBackgroundThread() {
backgroundThread = new HandlerThread("imageAvailableListener");
backgroundThread.start();
backgroundHandler = new Handler(backgroundThread.getLooper());
}
/**
* Stops the background thread and its [Handler].
*/
private void stopBackgroundThread() {
backgroundThread.quitSafely();
try {
backgroundThread.join();
backgroundThread = null;
backgroundHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
private final ImageReader.OnImageAvailableListener imageAvailableListener = new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader imageReader) {
if (previewWidth != 0 && previewHeight != 0 && imageReader != null) {
Image image = imageReader.acquireLatestImage();
if (cameraDataDealListener != null) {
cameraDataDealListener.dataDeal(image, surfaceView);
}
}
}
};
/**
* Creates a new [CameraCaptureSession] for camera preview.
*/
private void createCameraPreviewSession() {
try {
// We capture images from preview in YUV format.
imageReader = ImageReader.newInstance(
previewSize.getWidth(), previewSize.getHeight(), ImageFormat.YUV_420_888, 2);
imageReader.setOnImageAvailableListener(imageAvailableListener, backgroundHandler);
// This is the surface we need to record images for processing.
Surface recordingSurface = imageReader.getSurface();
// We set up a CaptureRequest.Builder with the output Surface.
previewRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
previewRequestBuilder.addTarget(recordingSurface);
// Here, we create a CameraCaptureSession for camera preview.
cameraDevice.createCaptureSession(
Arrays.asList(recordingSurface),
new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) {
// The camera is already closed
if (cameraDevice == null) {
return;
}
// When the session is ready, we start displaying the preview.
captureSession = cameraCaptureSession;
try {
// Auto focus should be continuous for camera preview.
previewRequestBuilder.set(
CaptureRequest.CONTROL_AF_MODE,
CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE
);
// Flash is automatically enabled when necessary.
setAutoFlash(previewRequestBuilder);
// Finally, we start displaying the camera preview.
previewRequest = previewRequestBuilder.build();
captureSession.setRepeatingRequest(
previewRequest,
captureCallback, backgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
@Override
public void onConfigureFailed(@NonNull CameraCaptureSession cameraCaptureSession) {
}
},
null);
} catch (CameraAccessException e) {
Log.e(TAG, e.toString());
}
}
private void setAutoFlash(CaptureRequest.Builder requestBuilder) {
if (flashSupported) {
requestBuilder.set(
CaptureRequest.CONTROL_AE_MODE,
CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
}
}
}

View File

@ -0,0 +1,315 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mindspore.posenet;
import android.content.Context;
import android.graphics.Bitmap;
import android.os.SystemClock;
import android.util.Log;
import androidx.core.util.Pair;
import com.mindspore.lite.LiteSession;
import com.mindspore.lite.MSTensor;
import com.mindspore.lite.Model;
import com.mindspore.lite.config.CpuBindMode;
import com.mindspore.lite.config.DeviceType;
import com.mindspore.lite.config.MSConfig;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.Arrays;
import java.util.LinkedHashMap;
import java.util.List;
import static java.lang.Math.exp;
public class Posenet {
public enum BodyPart {
NOSE,
LEFT_EYE,
RIGHT_EYE,
LEFT_EAR,
RIGHT_EAR,
LEFT_SHOULDER,
RIGHT_SHOULDER,
LEFT_ELBOW,
RIGHT_ELBOW,
LEFT_WRIST,
RIGHT_WRIST,
LEFT_HIP,
RIGHT_HIP,
LEFT_KNEE,
RIGHT_KNEE,
LEFT_ANKLE,
RIGHT_ANKLE
}
public class Position {
int x;
int y;
}
public class KeyPoint {
BodyPart bodyPart = BodyPart.NOSE;
Position position = new Position();
float score = 0.0f;
}
public class Person {
List<KeyPoint> keyPoints;
float score = 0.0f;
}
private Context mContext;
private MSConfig msConfig;
private LiteSession session;
private Model model;
private LinkedHashMap<String, MSTensor> mOutputs;
public long lastInferenceTimeNanos;
private final int NUM_THREADS = 4;
public Posenet(Context context) {
mContext = context;
init();
}
public boolean init() {
// Load the .ms model.
model = new Model();
if (!model.loadModel(mContext, "posenet_model.ms")) {
Log.e("MS_LITE", "Load Model failed");
return false;
}
// Create and init config.
msConfig = new MSConfig();
if (!msConfig.init(DeviceType.DT_CPU, NUM_THREADS, CpuBindMode.MID_CPU)) {
Log.e("MS_LITE", "Init context failed");
return false;
}
// Create the MindSpore lite session.
session = new LiteSession();
if (!session.init(msConfig)) {
Log.e("MS_LITE", "Create session failed");
msConfig.free();
return false;
}
msConfig.free();
// Complile graph.
if (!session.compileGraph(model)) {
Log.e("MS_LITE", "Compile graph failed");
model.freeBuffer();
return false;
}
// Note: when use model.freeBuffer(), the model can not be complile graph again.
model.freeBuffer();
return true;
}
private float sigmoid(float x) {
return (float) (1.0f / (1.0f + exp(-x)));
}
/**
* Scale the image to a byteBuffer of [-1,1] values.
*/
private ByteBuffer initInputArray(Bitmap bitmap) {
final int bytesPerChannel = 4;
final int inputChannels = 3;
final int batchSize = 1;
ByteBuffer inputBuffer = ByteBuffer.allocateDirect(
batchSize * bytesPerChannel * bitmap.getHeight() * bitmap.getWidth() * inputChannels
);
inputBuffer.order(ByteOrder.nativeOrder());
inputBuffer.rewind();
final float mean = 128.0f;
final float std = 128.0f;
int[] intValues = new int[bitmap.getWidth() * bitmap.getHeight()];
bitmap.getPixels(intValues, 0, bitmap.getWidth(), 0, 0, bitmap.getWidth(), bitmap.getHeight());
int pixel = 0;
for (int y = 0; y < bitmap.getHeight(); y++) {
for (int x = 0; x < bitmap.getWidth(); x++) {
int value = intValues[pixel++];
inputBuffer.putFloat(((float) (value >> 16 & 0xFF) - mean) / std);
inputBuffer.putFloat(((float) (value >> 8 & 0xFF) - mean) / std);
inputBuffer.putFloat(((float) (value & 0xFF) - mean) / std);
}
}
return inputBuffer;
}
/**
* Estimates the pose for a single person.
* args:
* bitmap: image bitmap of frame that should be processed
* returns:
* person: a Person object containing data about keypoint locations and confidence scores
*/
Person estimateSinglePose(Bitmap bitmap) {
long estimationStartTimeNanos = SystemClock.elapsedRealtimeNanos();
ByteBuffer inputArray = this.initInputArray(bitmap);
List<MSTensor> inputs = session.getInputs();
if (inputs.size() != 1) {
return null;
}
Log.i("posenet", String.format("Scaling to [-1,1] took %.2f ms",
1.0f * (SystemClock.elapsedRealtimeNanos() - estimationStartTimeNanos) / 1_000_000));
MSTensor inTensor = inputs.get(0);
inTensor.setData(inputArray);
long inferenceStartTimeNanos = SystemClock.elapsedRealtimeNanos();
// Run graph to infer results.
if (!session.runGraph()) {
Log.e("MS_LITE", "Run graph failed");
return null;
}
lastInferenceTimeNanos = SystemClock.elapsedRealtimeNanos() - inferenceStartTimeNanos;
Log.i(
"posenet",
String.format("Interpreter took %.2f ms", 1.0f * lastInferenceTimeNanos / 1_000_000)
);
// Get output tensor values.
List<MSTensor> heatmaps_list = session.getOutputsByNodeName("Conv2D-27");
if (heatmaps_list == null) {
return null;
}
MSTensor heatmaps_tensors = heatmaps_list.get(0);
float[] heatmaps_results = heatmaps_tensors.getFloatData();
int[] heatmapsShape = heatmaps_tensors.getShape(); //1, 9, 9 ,17
float[][][][] heatmaps = new float[heatmapsShape[0]][][][];
for (int x = 0; x < heatmapsShape[0]; x++) { // heatmapsShape[0] =1
float[][][] arrayThree = new float[heatmapsShape[1]][][];
for (int y = 0; y < heatmapsShape[1]; y++) { // heatmapsShape[1] = 9
float[][] arrayTwo = new float[heatmapsShape[2]][];
for (int z = 0; z < heatmapsShape[2]; z++) { //heatmapsShape[2] = 9
float[] arrayOne = new float[heatmapsShape[3]]; //heatmapsShape[3] = 17
for (int i = 0; i < heatmapsShape[3]; i++) {
int n = i + z * heatmapsShape[3] + y * heatmapsShape[2] * heatmapsShape[3] + x * heatmapsShape[1] * heatmapsShape[2] * heatmapsShape[3];
arrayOne[i] = heatmaps_results[n]; //1*9*9*17 ??
}
arrayTwo[z] = arrayOne;
}
arrayThree[y] = arrayTwo;
}
heatmaps[x] = arrayThree;
}
List<MSTensor> offsets_list = session.getOutputsByNodeName("Conv2D-28");
if (offsets_list == null) {
return null;
}
MSTensor offsets_tensors = offsets_list.get(0);
float[] offsets_results = offsets_tensors.getFloatData();
int[] offsetsShapes = offsets_tensors.getShape();
float[][][][] offsets = new float[offsetsShapes[0]][][][];
for (int x = 0; x < offsetsShapes[0]; x++) {
float[][][] offsets_arrayThree = new float[offsetsShapes[1]][][];
for (int y = 0; y < offsetsShapes[1]; y++) {
float[][] offsets_arrayTwo = new float[offsetsShapes[2]][];
for (int z = 0; z < offsetsShapes[2]; z++) {
float[] offsets_arrayOne = new float[offsetsShapes[3]];
for (int i = 0; i < offsetsShapes[3]; i++) {
int n = i + z * offsetsShapes[3] + y * offsetsShapes[2] * offsetsShapes[3] + x * offsetsShapes[1] * offsetsShapes[2] * offsetsShapes[3];
offsets_arrayOne[i] = offsets_results[n];
}
offsets_arrayTwo[z] = offsets_arrayOne;
}
offsets_arrayThree[y] = offsets_arrayTwo;
}
offsets[x] = offsets_arrayThree;
}
int height = ((Object[]) heatmaps[0]).length; //9
int width = ((Object[]) heatmaps[0][0]).length; //9
int numKeypoints = heatmaps[0][0][0].length; //17
// Finds the (row, col) locations of where the keypoints are most likely to be.
Pair[] keypointPositions = new Pair[numKeypoints];
for (int i = 0; i < numKeypoints; i++) {
keypointPositions[i] = new Pair(0, 0);
}
for (int keypoint = 0; keypoint < numKeypoints; keypoint++) {
float maxVal = heatmaps[0][0][0][keypoint];
int maxRow = 0;
int maxCol = 0;
for (int row = 0; row < height; row++) {
for (int col = 0; col < width; col++) {
if (heatmaps[0][row][col][keypoint] > maxVal) {
maxVal = heatmaps[0][row][col][keypoint];
maxRow = row;
maxCol = col;
}
}
}
keypointPositions[keypoint] = new Pair(maxRow, maxCol);
}
// Calculating the x and y coordinates of the keypoints with offset adjustment.
int[] xCoords = new int[numKeypoints];
int[] yCoords = new int[numKeypoints];
float[] confidenceScores = new float[numKeypoints];
for (int i = 0; i < keypointPositions.length; i++) {
Pair position = keypointPositions[i];
int positionY = (int) position.first;
int positionX = (int) position.second;
yCoords[i] = (int) ((float) positionY / (float) (height - 1) * bitmap.getHeight() + offsets[0][positionY][positionX][i]);
xCoords[i] = (int) ((float) positionX / (float) (width - 1) * bitmap.getWidth() + offsets[0][positionY][positionX][i + numKeypoints]);
confidenceScores[i] = sigmoid(heatmaps[0][positionY][positionX][i]);
}
Person person = new Person();
KeyPoint[] keypointList = new KeyPoint[numKeypoints];
for (int i = 0; i < numKeypoints; i++) {
keypointList[i] = new KeyPoint();
}
float totalScore = 0.0f;
for (int i = 0; i < keypointList.length; i++) {
keypointList[i].position.x = xCoords[i];
keypointList[i].position.y = yCoords[i];
keypointList[i].score = confidenceScores[i];
totalScore += confidenceScores[i];
}
person.keyPoints = Arrays.asList(keypointList);
person.score = totalScore / numKeypoints;
return person;
}
}

Some files were not shown because too many files have changed in this diff Show More