!8736 add android poseNet demo

From: @sishuikang
Reviewed-by: @zhanghaibo5,@zhanghaibo5
Signed-off-by: @zhanghaibo5
This commit is contained in:
mindspore-ci-bot 2020-11-19 15:20:32 +08:00 committed by Gitee
commit 7a6c858704
46 changed files with 1990 additions and 2 deletions

View File

@ -371,7 +371,7 @@ Java_com_mindspore_himindsporedemo_gallery_classify_TrackingMobile_loadModel(JNI
mindspore::lite::Context *context = new mindspore::lite::Context;
context->thread_num_ = num_thread;
context->device_list_[0].device_info_.cpu_device_info_.cpu_bind_mode_ = mindspore::lite::NO_BIND;
context->device_list_[0].device_info_.cpu_device_info_.enable_float16_ = true;
context->device_list_[0].device_info_.cpu_device_info_.enable_float16_ = false;
context->device_list_[0].device_type_ = mindspore::lite::DT_CPU;
labelNet->CreateSessionMS(modelBuffer, bufferLen, context);

View File

@ -166,7 +166,7 @@ Java_com_mindspore_hiobject_help_TrackingMobile_loadModel(JNIEnv *env, jobject t
mindspore::lite::Context *context = new mindspore::lite::Context;
context->thread_num_ = numThread;
context->device_list_[0].device_info_.cpu_device_info_.cpu_bind_mode_ = mindspore::lite::NO_BIND;
context->device_list_[0].device_info_.cpu_device_info_.enable_float16_ = true;
context->device_list_[0].device_info_.cpu_device_info_.enable_float16_ = false;
context->device_list_[0].device_type_ = mindspore::lite::DT_CPU;
labelNet->CreateSessionMS(modelBuffer, bufferLen, context);

View File

@ -0,0 +1,14 @@
*.iml
.gradle
/local.properties
/.idea/caches
/.idea/libraries
/.idea/modules.xml
/.idea/workspace.xml
/.idea/navEditor.xml
/.idea/assetWizardSettings.xml
.DS_Store
/build
/captures
.externalNativeBuild
.cxx

View File

@ -0,0 +1 @@
/build

View File

@ -0,0 +1,59 @@
apply plugin: 'com.android.application'
android {
compileSdkVersion 30
buildToolsVersion "30.0.1"
defaultConfig {
applicationId "com.mindspore.posnetdemo"
minSdkVersion 21
targetSdkVersion 30
versionCode 1
versionName "1.0"
testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
}
}
aaptOptions {
noCompress "ms"
}
lintOptions {
checkReleaseBuilds false
// Or, if you prefer, you can continue to check for errors in release builds,
// but continue the build even when errors are found:
abortOnError false
}
repositories {
google()
jcenter()
flatDir {
dirs 'libs'
}
}
}
// Download default models; if you wish to use your own models then
// place them in the "assets" directory and comment out this line.
apply from:'download.gradle'
dependencies {
implementation fileTree(dir: "libs", include: ["*.jar"])
implementation 'androidx.appcompat:appcompat:1.2.0'
implementation 'androidx.constraintlayout:constraintlayout:2.0.4'
implementation 'androidx.legacy:legacy-support-v4:1.0.0'
testImplementation 'junit:junit:4.12'
androidTestImplementation 'androidx.test.ext:junit:1.1.2'
androidTestImplementation 'androidx.test.espresso:espresso-core:3.3.0'
implementation(name: 'mindspore-lite-1.0.1', ext: 'aar')
}

View File

@ -0,0 +1,73 @@
/**
* To download necessary library from HuaWei server.
* Including mindspore-lite .so file, minddata-lite .so file and model file.
* The libraries can be downloaded manually.
*/
def mindsporeLite_Version = "mindspore-lite-maven-1.0.1"
def targetModelFile = "src/main/assets/posenet_model.ms"
def modelDownloadUrl = "https://download.mindspore.cn/model_zoo/official/lite/posenet_lite/posenet_model.ms"
def mindsporeLiteDownloadUrl = "https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.1/lite/java/${mindsporeLite_Version}.zip"
def mindSporeLibrary = "libs/${mindsporeLite_Version}.zip"
def cleantargetMindSporeInclude = "libs"
def targetMindSporeInclude = "libs/"
task downloadModelFile(type: DownloadUrlTask) {
doFirst {
println "Downloading ${modelDownloadUrl}"
}
sourceUrl = "${modelDownloadUrl}"
target = file("${targetModelFile}")
}
task downloadMindSporeLibrary(type: DownloadUrlTask) {
doFirst {
println "Downloading ${mindsporeLiteDownloadUrl}"
}
sourceUrl = "${mindsporeLiteDownloadUrl}"
target = file("${mindSporeLibrary}")
}
task unzipMindSporeInclude(type: Copy, dependsOn: ['downloadMindSporeLibrary']) {
doFirst {
println "Unzipping ${mindSporeLibrary}"
}
from zipTree("${mindSporeLibrary}")
into "${targetMindSporeInclude}"
}
task cleanUnusedmindsporeFiles(type: Delete, dependsOn: ['unzipMindSporeInclude']) {
delete fileTree("${cleantargetMindSporeInclude}").matching {
include "*.zip"
}
}
if (file("libs/mindspore-lite-1.0.1.aar").exists()){
downloadMindSporeLibrary.enabled = false
unzipMindSporeInclude.enabled = false
cleanUnusedmindsporeFiles.enabled = false
}
if (file("src/main/assets/posenet_model.ms").exists()){
downloadModelFile.enabled = false
}
preBuild.dependsOn downloadModelFile
preBuild.dependsOn downloadMindSporeLibrary
preBuild.dependsOn unzipMindSporeInclude
preBuild.dependsOn cleanUnusedmindsporeFiles
class DownloadUrlTask extends DefaultTask {
@Input
String sourceUrl
@OutputFile
File target
@TaskAction
void download() {
ant.get(src: sourceUrl, dest: target)
}
}

View File

@ -0,0 +1,21 @@
# Add project specific ProGuard rules here.
# You can control the set of applied configuration files using the
# proguardFiles setting in build.gradle.
#
# For more details, see
# http://developer.android.com/guide/developing/tools/proguard.html
# If your project uses WebView with JS, uncomment the following
# and specify the fully qualified class name to the JavaScript interface
# class:
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
# public *;
#}
# Uncomment this to preserve the line number information for
# debugging stack traces.
#-keepattributes SourceFile,LineNumberTable
# If you keep the line number information, uncomment this to
# hide the original source file name.
#-renamesourcefileattribute SourceFile

View File

@ -0,0 +1,26 @@
package com.mindspore.posenetdemo;
import android.content.Context;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.platform.app.InstrumentationRegistry;
import org.junit.Test;
import org.junit.runner.RunWith;
import static org.junit.Assert.assertEquals;
/**
* Instrumented test, which will execute on an Android device.
*
* @see <a href="http://d.android.com/tools/testing">Testing documentation</a>
*/
@RunWith(AndroidJUnit4.class)
public class ExampleInstrumentedTest {
@Test
public void useAppContext() {
// Context of the app under test.
Context appContext = InstrumentationRegistry.getInstrumentation().getTargetContext();
assertEquals("com.mindspore.posnetdemo", appContext.getPackageName());
}
}

View File

@ -0,0 +1,32 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.mindspore.posenetdemo">
<uses-permission android:name="android.permission.CAMERA" />
<uses-feature android:name="android.hardware.camera" />
<uses-feature android:name="android.hardware.camera.autofocus" />
<application
android:allowBackup="true"
android:icon="@mipmap/ic_launcher"
android:label="@string/app_name"
android:roundIcon="@mipmap/ic_launcher_round"
android:supportsRtl="true"
android:theme="@style/AppTheme">
<activity android:name=".TestActivity"
android:screenOrientation="portrait"
android:theme="@style/Theme.AppCompat.NoActionBar">
</activity>
<activity android:name=".MainActivity"
android:screenOrientation="portrait"
android:theme="@style/Theme.AppCompat.NoActionBar">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
</manifest>

View File

@ -0,0 +1,23 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mindspore.posenetdemo;
import android.media.Image;
import android.view.SurfaceView;
public interface CameraDataDealListener {
void dataDeal(Image image, SurfaceView surfaceView);
}

View File

@ -0,0 +1,74 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mindspore.posenetdemo;
public class ImageUtils {
// This value is 2 ^ 18 - 1, and is used to hold the RGB values together before their ranges
// are normalized to eight bits.
private static final int MAX_CHANNEL_VALUE = 262143;
/**
* Helper function to convert y,u,v integer values to RGB format
*/
private static int convertYUVToRGB(int y, int u, int v) {
// Adjust and check YUV values
int yNew = y - 16 < 0 ? 0 : y - 16;
int uNew = u - 128;
int vNew = v - 128;
int expandY = 1192 * yNew;
int r = checkBoundaries(expandY + 1634 * vNew);
int g = checkBoundaries(expandY - 833 * vNew - 400 * uNew);
int b = checkBoundaries(expandY + 2066 * uNew);
return -0x1000000 | (r << 6 & 0xff0000) | (g >> 2 & 0xff00) | (b >> 10 & 0xff);
}
private static int checkBoundaries(int value) {
if (value > MAX_CHANNEL_VALUE) {
return MAX_CHANNEL_VALUE;
} else if (value < 0) {
return 0;
} else {
return value;
}
}
/**
* Converts YUV420 format image data (ByteArray) into ARGB8888 format with IntArray as output.
*/
public static void convertYUV420ToARGB8888(byte[] yData, byte[] uData, byte[] vData,
int width, int height,
int yRowStride, int uvRowStride, int uvPixelStride, int[] out) {
int outputIndex = 0;
for (int j = 0; j < height; j++) {
int positionY = yRowStride * j;
int positionUV = uvRowStride * (j >> 1);
for (int i = 0; i < width; i++) {
int uvOffset = positionUV + (i >> 1) * uvPixelStride;
// "0xff and" is used to cut off bits from following value that are higher than
// the low 8 bits
out[outputIndex++] = convertYUVToRGB(
0xff & yData[positionY + i], 0xff & uData[uvOffset],
0xff & vData[uvOffset]);
}
}
}
}

View File

@ -0,0 +1,258 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mindspore.posenetdemo;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Matrix;
import android.graphics.Paint;
import android.graphics.PorterDuff;
import android.graphics.Rect;
import android.hardware.camera2.CameraCharacteristics;
import android.media.Image;
import android.os.Bundle;
import android.view.SurfaceView;
import androidx.appcompat.app.AppCompatActivity;
import androidx.core.util.Pair;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.List;
import static com.mindspore.posenetdemo.Posenet.BodyPart.LEFT_ANKLE;
import static com.mindspore.posenetdemo.Posenet.BodyPart.LEFT_ELBOW;
import static com.mindspore.posenetdemo.Posenet.BodyPart.LEFT_HIP;
import static com.mindspore.posenetdemo.Posenet.BodyPart.LEFT_KNEE;
import static com.mindspore.posenetdemo.Posenet.BodyPart.LEFT_SHOULDER;
import static com.mindspore.posenetdemo.Posenet.BodyPart.LEFT_WRIST;
import static com.mindspore.posenetdemo.Posenet.BodyPart.RIGHT_ANKLE;
import static com.mindspore.posenetdemo.Posenet.BodyPart.RIGHT_ELBOW;
import static com.mindspore.posenetdemo.Posenet.BodyPart.RIGHT_HIP;
import static com.mindspore.posenetdemo.Posenet.BodyPart.RIGHT_KNEE;
import static com.mindspore.posenetdemo.Posenet.BodyPart.RIGHT_SHOULDER;
import static com.mindspore.posenetdemo.Posenet.BodyPart.RIGHT_WRIST;
public class MainActivity extends AppCompatActivity implements CameraDataDealListener {
private final List bodyJoints = Arrays.asList(
new Pair(LEFT_WRIST, LEFT_ELBOW), new Pair(LEFT_ELBOW, LEFT_SHOULDER),
new Pair(LEFT_SHOULDER, RIGHT_SHOULDER), new Pair(RIGHT_SHOULDER, RIGHT_ELBOW),
new Pair(RIGHT_ELBOW, RIGHT_WRIST), new Pair(LEFT_SHOULDER, LEFT_HIP),
new Pair(LEFT_HIP, RIGHT_HIP), new Pair(RIGHT_HIP, RIGHT_SHOULDER),
new Pair(LEFT_HIP, LEFT_KNEE), new Pair(LEFT_KNEE, LEFT_ANKLE),
new Pair(RIGHT_HIP, RIGHT_KNEE), new Pair(RIGHT_KNEE, RIGHT_ANKLE));
/**
* Model input shape for images.
*/
private final static int MODEL_WIDTH = 257;
private final static int MODEL_HEIGHT = 257;
private final double minConfidence = 0.5;
private final float circleRadius = 8.0f;
private Paint paint = new Paint();
private final int PREVIEW_WIDTH = 640;
private final int PREVIEW_HEIGHT = 480;
private Posenet posenet;
private int[] rgbBytes = new int[PREVIEW_WIDTH * PREVIEW_HEIGHT];
private byte[][] yuvBytes = new byte[3][];
private SurfaceView surfaceView;
private int lensFacing = CameraCharacteristics.LENS_FACING_BACK;
private PoseNetFragment poseNetFragment;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
addCameraFragment();
}
private void addCameraFragment() {
posenet = new Posenet(this);
poseNetFragment = PoseNetFragment.newInstance();
poseNetFragment.setCameraDataDealListener(this);
// poseNetFragment.setFacingCamera(lensFacing);
getSupportFragmentManager().popBackStack();
getSupportFragmentManager().beginTransaction()
.replace(R.id.container, poseNetFragment)
.commitAllowingStateLoss();
}
@Override
public void dataDeal(Image image, SurfaceView surfaceView) {
if (image == null || image.getPlanes() == null) {
return;
}
this.surfaceView = surfaceView;
fillBytes(image.getPlanes(), yuvBytes);
ImageUtils.convertYUV420ToARGB8888(yuvBytes[0], yuvBytes[1], yuvBytes[2],
PREVIEW_WIDTH, PREVIEW_HEIGHT,
image.getPlanes()[0].getRowStride(),
image.getPlanes()[1].getRowStride(),
image.getPlanes()[1].getPixelStride(),
rgbBytes);
Bitmap imageBitmap = Bitmap.createBitmap(
rgbBytes, PREVIEW_WIDTH, PREVIEW_HEIGHT,
Bitmap.Config.ARGB_8888);
Matrix rotateMatrix = new Matrix();
rotateMatrix.postRotate(90.0f);
Bitmap rotatedBitmap = Bitmap.createBitmap(
imageBitmap, 0, 0, PREVIEW_WIDTH, PREVIEW_HEIGHT,
rotateMatrix, true
);
image.close();
processImage(rotatedBitmap);
}
/**
* Fill the yuvBytes with data from image planes.
*/
private void fillBytes(Image.Plane[] planes, byte[][] yuvBytes) {
// Row stride is the total number of bytes occupied in memory by a row of an image.
// Because of the variable row stride it's not possible to know in
// advance the actual necessary dimensions of the yuv planes
for (int i = 0; i < planes.length; ++i) {
ByteBuffer buffer = planes[i].getBuffer();
if (yuvBytes[i] == null) {
yuvBytes[i] = new byte[buffer.capacity()];
}
buffer.get(yuvBytes[i]);
}
}
/**
* Crop Bitmap to maintain aspect ratio of model input.
*/
private Bitmap cropBitmap(Bitmap bitmap) {
float bitmapRatio = bitmap.getHeight() / bitmap.getWidth();
float modelInputRatio = MODEL_HEIGHT / MODEL_WIDTH;
double maxDifference = 1.0E-5D;
float cropHeight = modelInputRatio - bitmapRatio;
if (Math.abs(cropHeight) < maxDifference) {
return bitmap;
} else {
Bitmap croppedBitmap;
if (modelInputRatio < bitmapRatio) {
cropHeight = (float) bitmap.getHeight() - (float) bitmap.getWidth() / modelInputRatio;
croppedBitmap = Bitmap.createBitmap(bitmap,
0, (int) (cropHeight / 2), bitmap.getWidth(), (int) (bitmap.getHeight() - cropHeight));
} else {
cropHeight = (float) bitmap.getWidth() - (float) bitmap.getHeight() * modelInputRatio;
croppedBitmap = Bitmap.createBitmap(bitmap,
(int) (cropHeight / 2), 0, (int) (bitmap.getWidth() - cropHeight), bitmap.getHeight());
}
return croppedBitmap;
}
}
/**
* Set the paint color and size.
*/
private void setPaint() {
paint.setColor(getResources().getColor(R.color.text_blue));
paint.setTextSize(80.0f);
paint.setStrokeWidth(8.0f);
}
/**
* Draw bitmap on Canvas.
*/
private void draw(Canvas canvas, Posenet.Person person, Bitmap bitmap) {
canvas.drawColor(Color.TRANSPARENT, PorterDuff.Mode.CLEAR);
// Draw `bitmap` and `person` in square canvas.
int screenWidth, screenHeight;
int left, right, top, bottom;
if (canvas.getHeight() > canvas.getWidth()) {
screenWidth = canvas.getWidth();
screenHeight = canvas.getWidth();
left = 0;
top = (canvas.getHeight() - canvas.getWidth()) / 2;
} else {
screenWidth = canvas.getHeight();
screenHeight = canvas.getHeight();
left = (canvas.getWidth() - canvas.getHeight()) / 2;
top = 0;
}
right = left + screenWidth;
bottom = top + screenHeight;
setPaint();
canvas.drawBitmap(
bitmap,
new Rect(0, 0, bitmap.getWidth(), bitmap.getHeight()),
new Rect(left, top, right, bottom), paint);
float widthRatio = screenWidth / MODEL_WIDTH;
float heightRatio = screenHeight / MODEL_HEIGHT;
for (Posenet.KeyPoint keyPoint : person.keyPoints) {
if (keyPoint.score > minConfidence) {
Posenet.Position position = keyPoint.position;
float adjustedX = position.x * widthRatio + left;
float adjustedY = position.y * heightRatio + top;
canvas.drawCircle(adjustedX, adjustedY, circleRadius, paint);
}
}
for (int i = 0; i < bodyJoints.size(); i++) {
Pair line = (Pair) bodyJoints.get(i);
Posenet.BodyPart first = (Posenet.BodyPart) line.first;
Posenet.BodyPart second = (Posenet.BodyPart) line.second;
if (person.keyPoints.get(first.ordinal()).score > minConfidence &
person.keyPoints.get(second.ordinal()).score > minConfidence) {
canvas.drawLine(
person.keyPoints.get(first.ordinal()).position.x * widthRatio + left,
person.keyPoints.get(first.ordinal()).position.y * heightRatio + top,
person.keyPoints.get(second.ordinal()).position.x * widthRatio + left,
person.keyPoints.get(second.ordinal()).position.y * heightRatio + top, paint);
}
}
canvas.drawText(String.format("Score: %.2f", person.score),
(15.0f * widthRatio), (30.0f * heightRatio + bottom), paint);
canvas.drawText(String.format("Time: %.2f ms", posenet.lastInferenceTimeNanos * 1.0f / 1_000_000),
(15.0f * widthRatio), (50.0f * heightRatio + bottom), paint
);
// Draw!
surfaceView.getHolder().unlockCanvasAndPost(canvas);
}
/**
* Process image using Posenet library.
*/
private void processImage(Bitmap bitmap) {
// Crop bitmap.
Bitmap croppedBitmap = cropBitmap(bitmap);
// Created scaled version of bitmap for model input.
Bitmap scaledBitmap = Bitmap.createScaledBitmap(croppedBitmap, MODEL_WIDTH, MODEL_HEIGHT, true);
// Perform inference.
Posenet.Person person = posenet.estimateSinglePose(scaledBitmap);
Canvas canvas = surfaceView.getHolder().lockCanvas();
draw(canvas, person, scaledBitmap);
}
}

View File

@ -0,0 +1,388 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mindspore.posenetdemo;
import android.Manifest;
import android.app.Activity;
import android.content.Context;
import android.content.pm.PackageManager;
import android.graphics.ImageFormat;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.CaptureResult;
import android.hardware.camera2.TotalCaptureResult;
import android.media.Image;
import android.media.ImageReader;
import android.os.Bundle;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.Log;
import android.util.Size;
import android.view.LayoutInflater;
import android.view.Surface;
import android.view.SurfaceView;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Toast;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.core.app.ActivityCompat;
import androidx.fragment.app.Fragment;
import java.util.Arrays;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
/**
* A simple {@link Fragment} subclass.
* create an instance of this fragment.
*/
public class PoseNetFragment extends Fragment {
private final static int REQUEST_CAMERA_PERMISSION = 1;
private String cameraId = "1";
private SurfaceView surfaceView;
private CameraCaptureSession captureSession;
private CameraDevice cameraDevice;
private Size previewSize;
private int previewWidth;
private int previewHeight;
private final int PREVIEW_WIDTH = 640;
private final int PREVIEW_HEIGHT = 480;
private HandlerThread backgroundThread;
private Handler backgroundHandler;
private ImageReader imageReader;
private CaptureRequest.Builder previewRequestBuilder;
private CaptureRequest previewRequest;
private Semaphore cameraOpenCloseLock = new Semaphore(1);//使用信号量 Semaphore 进行多线程任务调度
private boolean flashSupported;
private static final String TAG = "PoseNetFragment";
private final CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened(@NonNull CameraDevice mCameraDevice) {
cameraOpenCloseLock.release();
Log.d(TAG, "camera has open");
PoseNetFragment.this.cameraDevice = mCameraDevice;
createCameraPreviewSession();
}
@Override
public void onDisconnected(@NonNull CameraDevice cameraDevice) {
cameraOpenCloseLock.release();
cameraDevice.close();
PoseNetFragment.this.cameraDevice = null;
}
@Override
public void onError(@NonNull CameraDevice cameraDevice, int error) {
onDisconnected(cameraDevice);
Activity activity = getActivity();
if (activity != null) {
activity.finish();
}
}
};
private CameraCaptureSession.CaptureCallback captureCallback = new CameraCaptureSession.CaptureCallback() {
@Override
public void onCaptureProgressed(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull CaptureResult partialResult) {
super.onCaptureProgressed(session, request, partialResult);
}
@Override
public void onCaptureCompleted(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull TotalCaptureResult result) {
super.onCaptureCompleted(session, request, result);
}
};
private CameraDataDealListener cameraDataDealListener;
public void setCameraDataDealListener(CameraDataDealListener cameraDataDealListener) {
this.cameraDataDealListener = cameraDataDealListener;
}
public static PoseNetFragment newInstance() {
PoseNetFragment fragment = new PoseNetFragment();
return fragment;
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
// Inflate the layout for this fragment
return inflater.inflate(R.layout.fragment_pose_net, container, false);
}
@Override
public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
this.surfaceView = view.findViewById(R.id.surfaceView);
}
@Override
public void onResume() {
super.onResume();
startBackgroundThread();
}
public void onStart() {
super.onStart();
openCamera();
}
public void onPause() {
this.closeCamera();
this.stopBackgroundThread();
super.onPause();
}
public void onDestroy() {
super.onDestroy();
}
private void requestCameraPermission() {
if (shouldShowRequestPermissionRationale(Manifest.permission.CAMERA)) {
Toast.makeText(getContext(), "This app needs camera permission.", Toast.LENGTH_LONG).show();
} else {
requestPermissions(new String[]{Manifest.permission.READ_EXTERNAL_STORAGE, Manifest.permission.WRITE_EXTERNAL_STORAGE,
Manifest.permission.READ_PHONE_STATE, Manifest.permission.CAMERA}, REQUEST_CAMERA_PERMISSION);
}
}
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
if (requestCode == REQUEST_CAMERA_PERMISSION) {
if (allPermissionsGranted(grantResults)) {
Toast.makeText(getContext(), "This app needs camera permission.", Toast.LENGTH_LONG).show();
}
} else {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
}
}
private boolean allPermissionsGranted(int[] grantResults) {
for (int grantResult : grantResults) {
if (grantResult == PackageManager.PERMISSION_DENIED) {
return false;
}
}
return true;
}
/**
* Sets up member variables related to camera.
*/
private void setUpCameraOutputs() {
CameraManager manager = (CameraManager) getContext().getSystemService(Context.CAMERA_SERVICE);
try {
for (String cameraId : manager.getCameraIdList()) {
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
// We don't use a front facing camera in this sample.
Integer cameraDirection = characteristics.get(CameraCharacteristics.LENS_FACING);
if (cameraDirection != null && cameraDirection == CameraCharacteristics.LENS_FACING_FRONT) {
continue;
}
previewSize = new Size(PREVIEW_WIDTH, PREVIEW_HEIGHT);
imageReader = ImageReader.newInstance(
PREVIEW_WIDTH, PREVIEW_HEIGHT,
ImageFormat.YUV_420_888, /*maxImages*/ 2
);
previewHeight = previewSize.getHeight();
previewWidth = previewSize.getWidth();
// Initialize the storage bitmaps once when the resolution is known.
// Check if the flash is supported.
flashSupported =
characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE) == true;
this.cameraId = cameraId;
// We've found a viable camera and finished setting up member variables,
// so we don't need to iterate through other available cameras.
return;
}
} catch (CameraAccessException e) {
e.printStackTrace();
} catch (NullPointerException e) {
e.printStackTrace();
}
}
/**
* Opens the camera specified by [PosenetActivity.cameraId].
*/
private void openCamera() {
if (ActivityCompat.checkSelfPermission(getContext(), Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
this.requestCameraPermission();
}
setUpCameraOutputs();
CameraManager manager = (CameraManager) getContext().getSystemService(Context.CAMERA_SERVICE);
try {
// Wait for camera to open - 2.5 seconds is sufficient
if (!cameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
throw new RuntimeException("Time out waiting to lock camera opening.");
}
manager.openCamera(cameraId, mStateCallback, backgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
private void closeCamera() {
try {
cameraOpenCloseLock.acquire();
if (captureSession != null) {
captureSession.close();
captureSession = null;
}
if (null != cameraDevice) {
cameraDevice.close();
cameraDevice = null;
}
if (null != imageReader) {
imageReader.close();
imageReader = null;
}
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera closing.", e);
} finally {
cameraOpenCloseLock.release();
}
}
/**
* Starts a background thread and its [Handler].
*/
private void startBackgroundThread() {
backgroundThread = new HandlerThread("imageAvailableListener");
backgroundThread.start();
backgroundHandler = new Handler(backgroundThread.getLooper());
}
/**
* Stops the background thread and its [Handler].
*/
private void stopBackgroundThread() {
backgroundThread.quitSafely();
try {
backgroundThread.join();
backgroundThread = null;
backgroundHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
private final ImageReader.OnImageAvailableListener imageAvailableListener = new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader imageReader) {
if (previewWidth != 0 && previewHeight != 0 && imageReader != null) {
Image image = imageReader.acquireLatestImage();
if (cameraDataDealListener != null) {
cameraDataDealListener.dataDeal(image, surfaceView);
}
}
}
};
/**
* Creates a new [CameraCaptureSession] for camera preview.
*/
private void createCameraPreviewSession() {
try {
// We capture images from preview in YUV format.
imageReader = ImageReader.newInstance(
previewSize.getWidth(), previewSize.getHeight(), ImageFormat.YUV_420_888, 2);
imageReader.setOnImageAvailableListener(imageAvailableListener, backgroundHandler);
// This is the surface we need to record images for processing.
Surface recordingSurface = imageReader.getSurface();
// We set up a CaptureRequest.Builder with the output Surface.
previewRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
previewRequestBuilder.addTarget(recordingSurface);
// Here, we create a CameraCaptureSession for camera preview.
cameraDevice.createCaptureSession(
Arrays.asList(recordingSurface),
new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) {
// The camera is already closed
if (cameraDevice == null) {
return;
}
// When the session is ready, we start displaying the preview.
captureSession = cameraCaptureSession;
try {
// Auto focus should be continuous for camera preview.
previewRequestBuilder.set(
CaptureRequest.CONTROL_AF_MODE,
CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE
);
// Flash is automatically enabled when necessary.
setAutoFlash(previewRequestBuilder);
// Finally, we start displaying the camera preview.
previewRequest = previewRequestBuilder.build();
captureSession.setRepeatingRequest(
previewRequest,
captureCallback, backgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
@Override
public void onConfigureFailed(@NonNull CameraCaptureSession cameraCaptureSession) {
}
},
null);
} catch (CameraAccessException e) {
Log.e(TAG, e.toString());
}
}
private void setAutoFlash(CaptureRequest.Builder requestBuilder) {
if (flashSupported) {
requestBuilder.set(
CaptureRequest.CONTROL_AE_MODE,
CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
}
}
}

View File

@ -0,0 +1,315 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mindspore.posenetdemo;
import android.content.Context;
import android.graphics.Bitmap;
import android.os.SystemClock;
import android.util.Log;
import androidx.core.util.Pair;
import com.mindspore.lite.LiteSession;
import com.mindspore.lite.MSTensor;
import com.mindspore.lite.Model;
import com.mindspore.lite.config.CpuBindMode;
import com.mindspore.lite.config.DeviceType;
import com.mindspore.lite.config.MSConfig;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.Arrays;
import java.util.LinkedHashMap;
import java.util.List;
import static java.lang.Math.exp;
public class Posenet {
public enum BodyPart {
NOSE,
LEFT_EYE,
RIGHT_EYE,
LEFT_EAR,
RIGHT_EAR,
LEFT_SHOULDER,
RIGHT_SHOULDER,
LEFT_ELBOW,
RIGHT_ELBOW,
LEFT_WRIST,
RIGHT_WRIST,
LEFT_HIP,
RIGHT_HIP,
LEFT_KNEE,
RIGHT_KNEE,
LEFT_ANKLE,
RIGHT_ANKLE
}
public class Position {
int x;
int y;
}
public class KeyPoint {
BodyPart bodyPart = BodyPart.NOSE;
Position position = new Position();
float score = 0.0f;
}
public class Person {
List<KeyPoint> keyPoints;
float score = 0.0f;
}
private Context mContext;
private MSConfig msConfig;
private LiteSession session;
private Model model;
private LinkedHashMap<String, MSTensor> mOutputs;
public long lastInferenceTimeNanos;
private final int NUM_THREADS = 4;
public Posenet(Context context) {
mContext = context;
init();
}
public boolean init() {
// Load the .ms model.
model = new Model();
if (!model.loadModel(mContext, "posenet_model.ms")) {
Log.e("MS_LITE", "Load Model failed");
return false;
}
// Create and init config.
msConfig = new MSConfig();
if (!msConfig.init(DeviceType.DT_CPU, NUM_THREADS, CpuBindMode.MID_CPU)) {
Log.e("MS_LITE", "Init context failed");
return false;
}
// Create the MindSpore lite session.
session = new LiteSession();
if (!session.init(msConfig)) {
Log.e("MS_LITE", "Create session failed");
msConfig.free();
return false;
}
msConfig.free();
// Complile graph.
if (!session.compileGraph(model)) {
Log.e("MS_LITE", "Compile graph failed");
model.freeBuffer();
return false;
}
// Note: when use model.freeBuffer(), the model can not be complile graph again.
model.freeBuffer();
return true;
}
private float sigmoid(float x) {
return (float) (1.0f / (1.0f + exp(-x)));
}
/**
* Scale the image to a byteBuffer of [-1,1] values.
*/
private ByteBuffer initInputArray(Bitmap bitmap) {
final int bytesPerChannel = 4;
final int inputChannels = 3;
final int batchSize = 1;
ByteBuffer inputBuffer = ByteBuffer.allocateDirect(
batchSize * bytesPerChannel * bitmap.getHeight() * bitmap.getWidth() * inputChannels
);
inputBuffer.order(ByteOrder.nativeOrder());
inputBuffer.rewind();
final float mean = 128.0f;
final float std = 128.0f;
int[] intValues = new int[bitmap.getWidth() * bitmap.getHeight()];
bitmap.getPixels(intValues, 0, bitmap.getWidth(), 0, 0, bitmap.getWidth(), bitmap.getHeight());
int pixel = 0;
for (int y = 0; y < bitmap.getHeight(); y++) {
for (int x = 0; x < bitmap.getWidth(); x++) {
int value = intValues[pixel++];
inputBuffer.putFloat(((float) (value >> 16 & 0xFF) - mean) / std);
inputBuffer.putFloat(((float) (value >> 8 & 0xFF) - mean) / std);
inputBuffer.putFloat(((float) (value & 0xFF) - mean) / std);
}
}
return inputBuffer;
}
/**
* Estimates the pose for a single person.
* args:
* bitmap: image bitmap of frame that should be processed
* returns:
* person: a Person object containing data about keypoint locations and confidence scores
*/
Person estimateSinglePose(Bitmap bitmap) {
long estimationStartTimeNanos = SystemClock.elapsedRealtimeNanos();
ByteBuffer inputArray = this.initInputArray(bitmap);
List<MSTensor> inputs = session.getInputs();
if (inputs.size() != 1) {
return null;
}
Log.i("posenet", String.format("Scaling to [-1,1] took %.2f ms",
1.0f * (SystemClock.elapsedRealtimeNanos() - estimationStartTimeNanos) / 1_000_000));
MSTensor inTensor = inputs.get(0);
inTensor.setData(inputArray);
long inferenceStartTimeNanos = SystemClock.elapsedRealtimeNanos();
// Run graph to infer results.
if (!session.runGraph()) {
Log.e("MS_LITE", "Run graph failed");
return null;
}
lastInferenceTimeNanos = SystemClock.elapsedRealtimeNanos() - inferenceStartTimeNanos;
Log.i(
"posenet",
String.format("Interpreter took %.2f ms", 1.0f * lastInferenceTimeNanos / 1_000_000)
);
// Get output tensor values.
List<MSTensor> heatmaps_list = session.getOutputsByNodeName("Conv2D-27");
if (heatmaps_list == null) {
return null;
}
MSTensor heatmaps_tensors = heatmaps_list.get(0);
float[] heatmaps_results = heatmaps_tensors.getFloatData();
int[] heatmapsShape = heatmaps_tensors.getShape(); //1, 9, 9 ,17
float[][][][] heatmaps = new float[heatmapsShape[0]][][][];
for (int x = 0; x < heatmapsShape[0]; x++) { // heatmapsShape[0] =1
float[][][] arrayThree = new float[heatmapsShape[1]][][];
for (int y = 0; y < heatmapsShape[1]; y++) { // heatmapsShape[1] = 9
float[][] arrayTwo = new float[heatmapsShape[2]][];
for (int z = 0; z < heatmapsShape[2]; z++) { //heatmapsShape[2] = 9
float[] arrayOne = new float[heatmapsShape[3]]; //heatmapsShape[3] = 17
for (int i = 0; i < heatmapsShape[3]; i++) {
int n = i + z * heatmapsShape[3] + y * heatmapsShape[2] * heatmapsShape[3] + x * heatmapsShape[1] * heatmapsShape[2] * heatmapsShape[3];
arrayOne[i] = heatmaps_results[n]; //1*9*9*17 ??
}
arrayTwo[z] = arrayOne;
}
arrayThree[y] = arrayTwo;
}
heatmaps[x] = arrayThree;
}
List<MSTensor> offsets_list = session.getOutputsByNodeName("Conv2D-28");
if (offsets_list == null) {
return null;
}
MSTensor offsets_tensors = offsets_list.get(0);
float[] offsets_results = offsets_tensors.getFloatData();
int[] offsetsShapes = offsets_tensors.getShape();
float[][][][] offsets = new float[offsetsShapes[0]][][][];
for (int x = 0; x < offsetsShapes[0]; x++) {
float[][][] offsets_arrayThree = new float[offsetsShapes[1]][][];
for (int y = 0; y < offsetsShapes[1]; y++) {
float[][] offsets_arrayTwo = new float[offsetsShapes[2]][];
for (int z = 0; z < offsetsShapes[2]; z++) {
float[] offsets_arrayOne = new float[offsetsShapes[3]];
for (int i = 0; i < offsetsShapes[3]; i++) {
int n = i + z * offsetsShapes[3] + y * offsetsShapes[2] * offsetsShapes[3] + x * offsetsShapes[1] * offsetsShapes[2] * offsetsShapes[3];
offsets_arrayOne[i] = offsets_results[n];
}
offsets_arrayTwo[z] = offsets_arrayOne;
}
offsets_arrayThree[y] = offsets_arrayTwo;
}
offsets[x] = offsets_arrayThree;
}
int height = ((Object[]) heatmaps[0]).length; //9
int width = ((Object[]) heatmaps[0][0]).length; //9
int numKeypoints = heatmaps[0][0][0].length; //17
// Finds the (row, col) locations of where the keypoints are most likely to be.
Pair[] keypointPositions = new Pair[numKeypoints];
for (int i = 0; i < numKeypoints; i++) {
keypointPositions[i] = new Pair(0, 0);
}
for (int keypoint = 0; keypoint < numKeypoints; keypoint++) {
float maxVal = heatmaps[0][0][0][keypoint];
int maxRow = 0;
int maxCol = 0;
for (int row = 0; row < height; row++) {
for (int col = 0; col < width; col++) {
if (heatmaps[0][row][col][keypoint] > maxVal) {
maxVal = heatmaps[0][row][col][keypoint];
maxRow = row;
maxCol = col;
}
}
}
keypointPositions[keypoint] = new Pair(maxRow, maxCol);
}
// Calculating the x and y coordinates of the keypoints with offset adjustment.
int[] xCoords = new int[numKeypoints];
int[] yCoords = new int[numKeypoints];
float[] confidenceScores = new float[numKeypoints];
for (int i = 0; i < keypointPositions.length; i++) {
Pair position = keypointPositions[i];
int positionY = (int) position.first;
int positionX = (int) position.second;
yCoords[i] = (int) ((float) positionY / (float) (height - 1) * bitmap.getHeight() + offsets[0][positionY][positionX][i]);
xCoords[i] = (int) ((float) positionX / (float) (width - 1) * bitmap.getWidth() + offsets[0][positionY][positionX][i + numKeypoints]);
confidenceScores[i] = sigmoid(heatmaps[0][positionY][positionX][i]);
}
Person person = new Person();
KeyPoint[] keypointList = new KeyPoint[numKeypoints];
for (int i = 0; i < numKeypoints; i++) {
keypointList[i] = new KeyPoint();
}
float totalScore = 0.0f;
for (int i = 0; i < keypointList.length; i++) {
keypointList[i].position.x = xCoords[i];
keypointList[i].position.y = yCoords[i];
keypointList[i].score = confidenceScores[i];
totalScore += confidenceScores[i];
}
person.keyPoints = Arrays.asList(keypointList);
person.score = totalScore / numKeypoints;
return person;
}
}

View File

@ -0,0 +1,67 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mindspore.posenetdemo;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.drawable.Drawable;
import android.os.Bundle;
import android.widget.ImageView;
import androidx.appcompat.app.AppCompatActivity;
public class TestActivity extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_test);
ImageView sampleImageView = findViewById(R.id.image);
Drawable drawedImage = getResources().getDrawable(R.drawable.image);
Bitmap imageBitmap = drawableToBitmap(drawedImage);
sampleImageView.setImageBitmap(imageBitmap);
Posenet posenet = new Posenet(this);
Posenet.Person person = posenet.estimateSinglePose(imageBitmap);
// Draw the keypoints over the image.
Paint paint = new Paint();
paint.setColor(Color.RED);
Bitmap mutableBitmap = imageBitmap.copy(Bitmap.Config.ARGB_8888, true);
Canvas canvas = new Canvas(mutableBitmap);
for (Posenet.KeyPoint keypoint : person.keyPoints) {
canvas.drawCircle(
keypoint.position.x,
keypoint.position.y, 2.0f, paint);
}
sampleImageView.setAdjustViewBounds(true);
sampleImageView.setImageBitmap(mutableBitmap);
}
private Bitmap drawableToBitmap(Drawable drawable) {
Bitmap bitmap = Bitmap.createBitmap(257, 257, Bitmap.Config.ARGB_8888);
Canvas canvas = new Canvas(bitmap);
drawable.setBounds(0, 0, canvas.getWidth(), canvas.getHeight());
drawable.draw(canvas);
return bitmap;
}
}

View File

@ -0,0 +1,30 @@
<vector xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:aapt="http://schemas.android.com/aapt"
android:width="108dp"
android:height="108dp"
android:viewportWidth="108"
android:viewportHeight="108">
<path android:pathData="M31,63.928c0,0 6.4,-11 12.1,-13.1c7.2,-2.6 26,-1.4 26,-1.4l38.1,38.1L107,108.928l-32,-1L31,63.928z">
<aapt:attr name="android:fillColor">
<gradient
android:endX="85.84757"
android:endY="92.4963"
android:startX="42.9492"
android:startY="49.59793"
android:type="linear">
<item
android:color="#44000000"
android:offset="0.0" />
<item
android:color="#00000000"
android:offset="1.0" />
</gradient>
</aapt:attr>
</path>
<path
android:fillColor="#FFFFFF"
android:fillType="nonZero"
android:pathData="M65.3,45.828l3.8,-6.6c0.2,-0.4 0.1,-0.9 -0.3,-1.1c-0.4,-0.2 -0.9,-0.1 -1.1,0.3l-3.9,6.7c-6.3,-2.8 -13.4,-2.8 -19.7,0l-3.9,-6.7c-0.2,-0.4 -0.7,-0.5 -1.1,-0.3C38.8,38.328 38.7,38.828 38.9,39.228l3.8,6.6C36.2,49.428 31.7,56.028 31,63.928h46C76.3,56.028 71.8,49.428 65.3,45.828zM43.4,57.328c-0.8,0 -1.5,-0.5 -1.8,-1.2c-0.3,-0.7 -0.1,-1.5 0.4,-2.1c0.5,-0.5 1.4,-0.7 2.1,-0.4c0.7,0.3 1.2,1 1.2,1.8C45.3,56.528 44.5,57.328 43.4,57.328L43.4,57.328zM64.6,57.328c-0.8,0 -1.5,-0.5 -1.8,-1.2s-0.1,-1.5 0.4,-2.1c0.5,-0.5 1.4,-0.7 2.1,-0.4c0.7,0.3 1.2,1 1.2,1.8C66.5,56.528 65.6,57.328 64.6,57.328L64.6,57.328z"
android:strokeWidth="1"
android:strokeColor="#00000000" />
</vector>

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.2 KiB

View File

@ -0,0 +1,11 @@
<?xml version="1.0" encoding="utf-8"?>
<shape xmlns:android="http://schemas.android.com/apk/res/android"
android:shape="oval"
android:useLevel="false">
<solid android:color="#515099"></solid>
<size
android:width="15dp"
android:height="15dp"></size>
</shape>

View File

@ -0,0 +1,170 @@
<?xml version="1.0" encoding="utf-8"?>
<vector xmlns:android="http://schemas.android.com/apk/res/android"
android:width="108dp"
android:height="108dp"
android:viewportWidth="108"
android:viewportHeight="108">
<path
android:fillColor="#3DDC84"
android:pathData="M0,0h108v108h-108z" />
<path
android:fillColor="#00000000"
android:pathData="M9,0L9,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,0L19,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M29,0L29,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M39,0L39,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M49,0L49,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M59,0L59,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M69,0L69,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M79,0L79,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M89,0L89,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M99,0L99,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,9L108,9"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,19L108,19"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,29L108,29"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,39L108,39"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,49L108,49"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,59L108,59"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,69L108,69"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,79L108,79"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,89L108,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,99L108,99"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,29L89,29"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,39L89,39"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,49L89,49"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,59L89,59"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,69L89,69"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,79L89,79"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M29,19L29,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M39,19L39,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M49,19L49,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M59,19L59,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M69,19L69,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M79,19L79,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
</vector>

Binary file not shown.

After

Width:  |  Height:  |  Size: 8.1 KiB

View File

@ -0,0 +1,31 @@
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:background="@android:color/black"
tools:context="com.mindspore.posenetdemo.MainActivity">
<FrameLayout
android:id="@+id/container"
android:layout_width="match_parent"
android:layout_height="match_parent" />
<androidx.appcompat.widget.Toolbar
android:id="@+id/toolbar"
android:layout_width="match_parent"
android:layout_height="?attr/actionBarSize"
android:layout_alignParentTop="true"
android:background="#66000000">
<TextView
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:drawableStart="@drawable/logo2"
android:drawablePadding="5dp"
android:gravity="center_vertical"
android:text="MindSpore PoseNet"
android:textColor="#ffffff"
android:textSize="20sp" />
</androidx.appcompat.widget.Toolbar>
</RelativeLayout>

View File

@ -0,0 +1,13 @@
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:orientation="vertical"
android:layout_width="fill_parent"
android:layout_height="fill_parent">
<ImageView
android:id="@+id/image"
android:scaleType="center"
android:layout_width="fill_parent"
android:layout_height="fill_parent" />
</RelativeLayout>

View File

@ -0,0 +1,14 @@
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent">
<SurfaceView
android:id="@+id/surfaceView"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_alignParentStart="true"
android:layout_alignParentTop="true"
android:layout_alignParentBottom="true" />
</RelativeLayout>

View File

@ -0,0 +1,5 @@
<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
<background android:drawable="@drawable/ic_launcher_background" />
<foreground android:drawable="@drawable/ic_launcher_foreground" />
</adaptive-icon>

View File

@ -0,0 +1,5 @@
<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
<background android:drawable="@drawable/ic_launcher_background" />
<foreground android:drawable="@drawable/ic_launcher_foreground" />
</adaptive-icon>

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.5 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 12 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 10 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 16 KiB

View File

@ -0,0 +1,19 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<color name="colorPrimary">#6200EE</color>
<color name="colorPrimaryDark">#3700B3</color>
<color name="colorAccent">#03DAC5</color>
<color name="mindspore_semi_transparent">#66000000</color>
<color name="white">#ffffff</color>
<color name="black">#000000</color>
<color name="gray">#A69D9D</color>
<color name="gray_btn">#424242</color>
<color name="text_blue">#6DA7FF</color>
<color name="text_yellow">#F8E71C</color>
<color name="text_orange">#FF844D</color>
<color name="text_green">#66B50A</color>
</resources>

View File

@ -0,0 +1,5 @@
<resources>
<string name="app_name">PoseNetDemo</string>
<!-- TODO: Remove or change this placeholder text -->
<string name="hello_blank_fragment">Hello blank fragment</string>
</resources>

View File

@ -0,0 +1,10 @@
<resources>
<!-- Base application theme. -->
<style name="AppTheme" parent="Theme.AppCompat.Light.NoActionBar">
<!-- Customize your theme here. -->
<item name="colorPrimary">@color/colorPrimary</item>
<item name="colorPrimaryDark">@color/colorPrimaryDark</item>
<item name="colorAccent">@color/colorAccent</item>
</style>
</resources>

View File

@ -0,0 +1,17 @@
package com.mindspore.posenetdemo;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
/**
* Example local unit test, which will execute on the development machine (host).
*
* @see <a href="http://d.android.com/tools/testing">Testing documentation</a>
*/
public class ExampleUnitTest {
@Test
public void addition_isCorrect() {
assertEquals(4, 2 + 2);
}
}

View File

@ -0,0 +1,24 @@
// Top-level build file where you can add configuration options common to all sub-projects/modules.
buildscript {
repositories {
google()
jcenter()
}
dependencies {
classpath "com.android.tools.build:gradle:4.0.1"
// NOTE: Do not place your application dependencies here; they belong
// in the individual module build.gradle files
}
}
allprojects {
repositories {
google()
jcenter()
}
}
task clean(type: Delete) {
delete rootProject.buildDir
}

View File

@ -0,0 +1,19 @@
# Project-wide Gradle settings.
# IDE (e.g. Android Studio) users:
# Gradle settings configured through the IDE *will override*
# any settings specified in this file.
# For more details on how to configure your build environment visit
# http://www.gradle.org/docs/current/userguide/build_environment.html
# Specifies the JVM arguments used for the daemon process.
# The setting is particularly useful for tweaking memory settings.
org.gradle.jvmargs=-Xmx2048m
# When configured, Gradle will run in incubating parallel mode.
# This option should only be used with decoupled projects. More details, visit
# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
# org.gradle.parallel=true
# AndroidX package structure to make it clearer which packages are bundled with the
# Android operating system, and which are packaged with your app"s APK
# https://developer.android.com/topic/libraries/support-library/androidx-rn
android.useAndroidX=true
# Automatically convert third-party libraries to use AndroidX
android.enableJetifier=true

Binary file not shown.

View File

@ -0,0 +1,6 @@
#Wed Nov 11 09:42:36 CST 2020
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-6.1.1-all.zip

172
model_zoo/official/lite/posenet/gradlew vendored Normal file
View File

@ -0,0 +1,172 @@
#!/usr/bin/env sh
##############################################################################
##
## Gradle start up script for UN*X
##
##############################################################################
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
PRG="$0"
# Need this for relative symlinks.
while [ -h "$PRG" ] ; do
ls=`ls -ld "$PRG"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
PRG="$link"
else
PRG=`dirname "$PRG"`"/$link"
fi
done
SAVED="`pwd`"
cd "`dirname \"$PRG\"`/" >/dev/null
APP_HOME="`pwd -P`"
cd "$SAVED" >/dev/null
APP_NAME="Gradle"
APP_BASE_NAME=`basename "$0"`
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS=""
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD="maximum"
warn () {
echo "$*"
}
die () {
echo
echo "$*"
echo
exit 1
}
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
nonstop=false
case "`uname`" in
CYGWIN* )
cygwin=true
;;
Darwin* )
darwin=true
;;
MINGW* )
msys=true
;;
NONSTOP* )
nonstop=true
;;
esac
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD="$JAVA_HOME/jre/sh/java"
else
JAVACMD="$JAVA_HOME/bin/java"
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD="java"
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
# Increase the maximum file descriptors if we can.
if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
MAX_FD_LIMIT=`ulimit -H -n`
if [ $? -eq 0 ] ; then
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
MAX_FD="$MAX_FD_LIMIT"
fi
ulimit -n $MAX_FD
if [ $? -ne 0 ] ; then
warn "Could not set maximum file descriptor limit: $MAX_FD"
fi
else
warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
fi
fi
# For Darwin, add options to specify how the application appears in the dock
if $darwin; then
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
fi
# For Cygwin, switch paths to Windows format before running java
if $cygwin ; then
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
JAVACMD=`cygpath --unix "$JAVACMD"`
# We build the pattern for arguments to be converted via cygpath
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
SEP=""
for dir in $ROOTDIRSRAW ; do
ROOTDIRS="$ROOTDIRS$SEP$dir"
SEP="|"
done
OURCYGPATTERN="(^($ROOTDIRS))"
# Add a user-defined pattern to the cygpath arguments
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
fi
# Now convert the arguments - kludge to limit ourselves to /bin/sh
i=0
for arg in "$@" ; do
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
else
eval `echo args$i`="\"$arg\""
fi
i=$((i+1))
done
case $i in
(0) set -- ;;
(1) set -- "$args0" ;;
(2) set -- "$args0" "$args1" ;;
(3) set -- "$args0" "$args1" "$args2" ;;
(4) set -- "$args0" "$args1" "$args2" "$args3" ;;
(5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
(6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
(7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
(8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
(9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
esac
fi
# Escape application args
save () {
for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
echo " "
}
APP_ARGS=$(save "$@")
# Collect all arguments for the java command, following the shell quoting and substitution rules
eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong
if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then
cd "$(dirname "$0")"
fi
exec "$JAVACMD" "$@"

View File

@ -0,0 +1,84 @@
@if "%DEBUG%" == "" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@rem
@rem ##########################################################################
@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal
set DIRNAME=%~dp0
if "%DIRNAME%" == "" set DIRNAME=.
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS=
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if "%ERRORLEVEL%" == "0" goto init
echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:findJavaFromJavaHome
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto init
echo.
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:init
@rem Get command-line arguments, handling Windows variants
if not "%OS%" == "Windows_NT" goto win9xME_args
:win9xME_args
@rem Slurp the command line arguments.
set CMD_LINE_ARGS=
set _SKIP=2
:win9xME_args_slurp
if "x%~1" == "x" goto execute
set CMD_LINE_ARGS=%*
:execute
@rem Setup the command line
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
@rem Execute Gradle
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
:end
@rem End local scope for the variables with windows NT shell
if "%ERRORLEVEL%"=="0" goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
exit /b 1
:mainEnd
if "%OS%"=="Windows_NT" endlocal
:omega

View File

@ -0,0 +1,2 @@
include ':app'
rootProject.name = "PosNetDemo"