car classification init

This commit is contained in:
hukang hwx963878 2021-01-07 16:29:48 +08:00
parent 2f8010f805
commit cbb5e2f4ce
92 changed files with 4623 additions and 0 deletions

View File

@ -0,0 +1,83 @@
# MindSpore
build/
mindspore/lib
app/src/main/assets/model/
app/src/main/cpp/mindspore-lite-*
output
*.ir
mindspore/ccsrc/schema/inner/*
# Cmake files
CMakeFiles/
cmake_install.cmake
CMakeCache.txt
Makefile
cmake-build-debug
# Dynamic libraries
*.so
*.so.*
*.dylib
# Static libraries
*.la
*.lai
*.a
*.lib
# Protocol buffers
*_pb2.py
*.pb.h
*.pb.cc
# Object files
*.o
# Editor
.vscode
.idea/
# Cquery
.cquery_cached_index/
compile_commands.json
# Ctags and cscope
tags
TAGS
CTAGS
GTAGS
GRTAGS
GSYMS
GPATH
cscope.*
# Python files
*__pycache__*
.pytest_cache
# Mac files
*.DS_Store
# Test results
test_temp_summary_event_file/
*.dot
*.dat
*.svg
*.perf
*.info
*.ckpt
*.shp
*.pkl
.clangd
mindspore/version.py
mindspore/default_config.py
mindspore/.commit_id
onnx.proto
mindspore/ccsrc/onnx.proto
# Android
local.properties
.gradle
sdk/build
sdk/.cxx
app/.cxx

View File

@ -0,0 +1,6 @@
/build
!/libs/
!/src/main/assets/model/
!/src/main/cpp/mindspore-lite-1.0.1-runtime-arm64-cpu/
/src/main/cpp/mindspore-lite-1.0.1-runtime-arm64-cpu/

View File

@ -0,0 +1,86 @@
# For more information about using CMake with Android Studio, read the
# documentation: https://d.android.com/studio/projects/add-native-code.html
# Sets the minimum version of CMake required to build the native library.
cmake_minimum_required(VERSION 3.4.1)
set(CMAKE_VERBOSE_MAKEFILE on)
set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${CMAKE_SOURCE_DIR}/libs/${ANDROID_ABI})
set(MINDSPORELITE_VERSION mindspore-lite-1.0.1-runtime-arm64-cpu)
# ============== Set MindSpore Dependencies. =============
include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp)
include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/third_party/flatbuffers/include)
include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION})
include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/include)
include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/include/ir/dtype)
include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/include/schema)
include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/minddata/include)
add_library(mindspore-lite SHARED IMPORTED )
add_library(minddata-lite SHARED IMPORTED )
add_library(libmindspore-lite-fp16 SHARED IMPORTED )
set_target_properties(mindspore-lite PROPERTIES IMPORTED_LOCATION
${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/lib/libmindspore-lite.so)
set_target_properties(minddata-lite PROPERTIES IMPORTED_LOCATION
${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/minddata/lib/libminddata-lite.so)
set_target_properties(libmindspore-lite-fp16 PROPERTIES IMPORTED_LOCATION
${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/lib/libmindspore-lite-fp16.so)
# --------------- MindSpore Lite set End. --------------------
# Creates and names a library, sets it as either STATIC
# or SHARED, and provides the relative paths to its source code.
# You can define multiple libraries, and CMake builds them for you.
# Gradle automatically packages shared libraries with your APK.
file(GLOB_RECURSE cpp_src "src/main/cpp/*.cpp" "src/main/cpp/*.h")
add_library( # Sets the name of the library.
mlkit-label-MS
# Sets the library as a shared library.
SHARED
# Provides a relative path to your source file(s).
${cpp_src})
# Searches for a specified prebuilt library and stores the path as a
# variable. Because CMake includes system libraries in the search path by
# default, you only need to specify the name of the public NDK library
# you want to add. CMake verifies that the library exists before
# completing its build.
find_library( # Sets the name of the path variable.
log-lib
# Specifies the name of the NDK library that
# you want CMake to locate.
log )
find_library( jnigraphics-lib jnig·raphics )
# Specifies libraries CMake should link to your target library. You
# can link multiple libraries, such as libraries you define in this
# build script, prebuilt third-party libraries, or system libraries.
add_definitions(-DMNN_USE_LOGCAT)
target_link_libraries( # Specifies the target library.
mlkit-label-MS
# --- mindspore ---
minddata-lite
mindspore-lite
libmindspore-lite-fp16
# --- other dependencies.---
-ljnigraphics
android
# Links the target library to the log library
${log-lib}
)

View File

@ -0,0 +1,78 @@
apply plugin: 'com.android.application'
android {
compileSdkVersion 30
buildToolsVersion "30.0.1"
defaultConfig {
applicationId "com.mindspore.classificationforcar"
minSdkVersion 21
targetSdkVersion 30
versionCode 1
versionName "1.0"
testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
externalNativeBuild {
cmake {
arguments "-DANDROID_STL=c++_shared"
cppFlags "-std=c++17"
}
}
ndk {
abiFilters 'arm64-v8a'
}
}
aaptOptions {
noCompress '.so', 'ms'
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
}
}
externalNativeBuild {
cmake {
path file('CMakeLists.txt')
}
}
ndkVersion '21.3.6528147'
sourceSets {
main {
jniLibs.srcDirs = ['libs']
}
}
packagingOptions {
pickFirst 'lib/arm64-v8a/libmlkit-label-MS.so'
}
compileOptions {
sourceCompatibility JavaVersion.VERSION_1_8
targetCompatibility JavaVersion.VERSION_1_8
}
}
// Before gradle build.
// To download some necessary libraries.
apply from: 'download.gradle'
dependencies {
implementation fileTree(dir: "libs", include: ["*.jar"])
implementation 'androidx.appcompat:appcompat:1.3.0-alpha02'
implementation 'androidx.constraintlayout:constraintlayout:1.1.3'
// implementation project(path: ':sdk')
testImplementation 'junit:junit:4.12'
androidTestImplementation 'androidx.test.ext:junit:1.1.1'
androidTestImplementation 'androidx.test.espresso:espresso-core:3.2.0'
implementation 'com.google.android.material:material:1.0.0'
androidTestImplementation 'com.android.support.test:rules:1.0.2'
androidTestImplementation 'com.google.truth:truth:1.0.1'
implementation 'com.github.bumptech.glide:glide:4.11.0'
annotationProcessor 'com.github.bumptech.glide:compiler:4.11.0'
}

View File

@ -0,0 +1,82 @@
/**
* To download necessary library from HuaWei server.
* Including mindspore-lite .so file, minddata-lite .so file and model file.
* The libraries can be downloaded manually.
*/
def targetMindSporeInclude = "src/main/cpp/"
def mindsporeLite_Version = "mindspore-lite-1.0.1-runtime-arm64-cpu"
def targetModelFile = "src/main/assets/model/mobilenetv2.ms"
def mindSporeLibrary_arm64 = "src/main/cpp/${mindsporeLite_Version}.tar.gz"
def modelDownloadUrl = "https://download.mindspore.cn/model_zoo/official/lite/mobilenetv2_openimage_lite/mobilenetv2.ms"
def mindsporeLiteDownloadUrl = "https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.1/lite/android_aarch64/${mindsporeLite_Version}.tar.gz"
def cleantargetMindSporeInclude = "src/main/cpp"
task cleanCmakeCache(type: Delete) {
delete '.cxx/cmake/debug'
delete '.cxx/cmake/release'
}
task downloadModelFile(type: DownloadUrlTask) {
doFirst {
println "Downloading ${modelDownloadUrl}"
}
sourceUrl = "${modelDownloadUrl}"
target = file("${targetModelFile}")
}
task downloadMindSporeLibrary(type: DownloadUrlTask) {
doFirst {
println "Downloading ${mindsporeLiteDownloadUrl}"
}
sourceUrl = "${mindsporeLiteDownloadUrl}"
target = file("${mindSporeLibrary_arm64}")
}
task unzipMindSporeInclude(type: Copy, dependsOn: 'downloadMindSporeLibrary') {
doFirst {
println "Unzipping ${mindSporeLibrary_arm64}"
}
from tarTree(resources.gzip("${mindSporeLibrary_arm64}"))
into "${targetMindSporeInclude}"
}
task cleanUnusedmindsporeFiles(type: Delete, dependsOn: ['unzipMindSporeInclude']) {
delete fileTree("${cleantargetMindSporeInclude}").matching {
include "*.tar.gz"
}
}
/*
* Using preBuild to download mindspore library and model file.
* Run before gradle build.
*/
if (file("src/main/cpp/${mindsporeLite_Version}/lib/libmindspore-lite.so").exists()){
downloadMindSporeLibrary.enabled = false
unzipMindSporeInclude.enabled = false
cleanUnusedmindsporeFiles.enabled = false
}
if (file("src/main/assets/model/mobilenetv2.ms").exists()){
downloadModelFile.enabled = false
}
preBuild.dependsOn cleanCmakeCache
preBuild.dependsOn downloadModelFile
preBuild.dependsOn downloadMindSporeLibrary
preBuild.dependsOn unzipMindSporeInclude
preBuild.dependsOn cleanUnusedmindsporeFiles
class DownloadUrlTask extends DefaultTask {
@Input
String sourceUrl
@OutputFile
File target
@TaskAction
void download() {
ant.get(src: sourceUrl, dest: target)
}
}

View File

@ -0,0 +1,21 @@
# Add project specific ProGuard rules here.
# You can control the set of applied configuration files using the
# proguardFiles setting in build.gradle.
#
# For more details, see
# http://developer.android.com/guide/developing/tools/proguard.html
# If your project uses WebView with JS, uncomment the following
# and specify the fully qualified class name to the JavaScript interface
# class:
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
# public *;
#}
# Uncomment this to preserve the line number information for
# debugging stack traces.
#-keepattributes SourceFile,LineNumberTable
# If you keep the line number information, uncomment this to
# hide the original source file name.
#-renamesourcefileattribute SourceFile

View File

@ -0,0 +1,42 @@
/**
* Copyright 2021 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mindspore.classificationforcar;
import android.content.Context;
import androidx.test.platform.app.InstrumentationRegistry;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import org.junit.Test;
import org.junit.runner.RunWith;
import static org.junit.Assert.*;
/**
* Instrumented test, which will execute on an Android device.
*
* @see <a href="http://d.android.com/tools/testing">Testing documentation</a>
*/
@RunWith(AndroidJUnit4.class)
public class ExampleInstrumentedTest {
@Test
public void useAppContext() {
// Context of the app under test.
Context appContext = InstrumentationRegistry.getInstrumentation().getTargetContext();
assertEquals("com.mindspore.classification", appContext.getPackageName());
}
}

View File

@ -0,0 +1,50 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
package="com.mindspore.classificationforcar"
android:versionCode="1"
android:versionName="1.0">
<uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.permission.WRITE_INTERNAL_STORAGE" />
<uses-permission android:name="android.permission.READ_INTERNAL_STORAGE" />
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
<uses-permission android:name="android.permission.MOUNT_UNMOUNT_FILESYSTEM" />
<uses-permission android:name="android.permission.READ_PHONE_STATE" />
<application
android:allowBackup="true"
android:icon="@mipmap/ic_launcher"
android:label="@string/app_name"
android:requestLegacyExternalStorage="true"
android:roundIcon="@mipmap/ic_launcher_round"
android:supportsRtl="true"
android:theme="@style/AppTheme">
<activity android:name="com.mindspore.classificationforcar.widget.MainActivity"
android:screenOrientation="portrait"
>
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
<activity
android:name="com.mindspore.classificationforcar.widget.CameraActivity"
android:screenOrientation="portrait">
</activity>
<provider
android:name="androidx.core.content.FileProvider"
android:authorities="com.mindspore.classificationforcar.fileprovider"
android:exported="false"
tools:replace="android:authorities"
android:grantUriPermissions="true">
<meta-data
tools:replace="android:resource"
android:name="android.support.FILE_PROVIDER_PATHS"
android:resource="@xml/file_paths" />
</provider>
</application>
</manifest>

View File

@ -0,0 +1,812 @@
/**
* Copyright 2021 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <jni.h>
#include <android/bitmap.h>
#include <android/asset_manager_jni.h>
#include <android/log.h>
#include <utility>
#include <cstring>
#include <vector>
#include <string>
#include <unordered_map>
#include <set>
#include "include/errorcode.h"
#include "include/ms_tensor.h"
#include "ImageMindSporeNetnative.h"
#include "MSNetWork.h"
#include "lite_cv/lite_mat.h"
#include "lite_cv/image_process.h"
#include "mindspore-lite-1.0.1-runtime-arm64-cpu/minddata/include/lite_cv/lite_mat.h"
using mindspore::dataset::LiteMat;
using mindspore::dataset::LPixelType;
using mindspore::dataset::LDataType;
#define MS_PRINT(format, ...) __android_log_print(ANDROID_LOG_INFO, "MSJNI", format, ##__VA_ARGS__)
static const int RET_CATEGORY_SUM = 410;
static const char *labels_name_map[RET_CATEGORY_SUM] = {
"Herd",
"Safari",
"Bangle",
"Cushion",
"Countertop",
"Prom",
"Branch",
"Sports",
"Sky",
"Community",
"Wheel",
"Cola",
"Tuxedo",
"Flowerpot",
"Team",
"Computer",
"Unicycle",
"Brig",
"Aerospace engineering",
"Scuba diving",
"Goggles",
"Fruit",
"Badminton",
"Horse",
"Sunglasses",
"Fun",
"Prairie",
"Poster",
"Flag",
"Speedboat",
"Eyelash",
"Veil",
"Mobile phone",
"Wheelbarrow",
"Saucer",
"Leather",
"Drawer",
"Paper",
"Pier",
"Waterfowl",
"Tights",
"Rickshaw",
"Vegetable",
"Handrail",
"Ice",
"Metal",
"Flower",
"Wing",
"Silverware",
"Event",
"Skyline",
"Money",
"Comics",
"Handbag",
"Porcelain",
"Rodeo",
"Curtain",
"Tile",
"Human mouth",
"Army",
"Menu",
"Boat",
"Snowboarding",
"Cairn terrier",
"Net",
"Pasteles",
"Cup",
"Rugby",
"Pho",
"Cap",
"Human hair",
"Surfing",
"Loveseat",
"Museum",
"Shipwreck",
"Trunk (Tree)",
"Plush",
"Monochrome",
"Volcano",
"Rock",
"Pillow",
"Presentation",
"Nebula",
"Subwoofer",
"Lake",
"Sledding",
"Bangs",
"Tablecloth",
"Necklace",
"Swimwear",
"Standing",
"Jeans",
"Carnival",
"Softball",
"Centrepiece",
"Skateboarder",
"Cake",
"Dragon",
"Aurora",
"Skiing",
"Bathroom",
"Dog",
"Needlework",
"Umbrella",
"Church",
"Fire",
"Piano",
"Denim",
"Bridle",
"Cabinetry",
"Lipstick",
"Ring",
"Television",
"Roller",
"Seal",
"Concert",
"Product",
"News",
"Fast food",
"Horn (Animal)",
"Tattoo",
"Bird",
"Bridegroom",
"Love",
"Helmet",
"Dinosaur",
"Icing",
"Miniature",
"Tire",
"Toy",
"Icicle",
"Jacket",
"Coffee",
"Mosque",
"Rowing",
"Wetsuit",
"Camping",
"Underwater",
"Christmas",
"Gelato",
"Whiteboard",
"Field",
"Ragdoll",
"Construction",
"Lampshade",
"Palace",
"Meal",
"Factory",
"Cage",
"Clipper (Boat)",
"Gymnastics",
"Turtle",
"Human foot",
"Marriage",
"Web page",
"Human beard",
"Fog",
"Wool",
"Cappuccino",
"Lighthouse",
"Lego",
"Sparkler",
"Sari",
"Model",
"Temple",
"Beanie",
"Building",
"Waterfall",
"Penguin",
"Cave",
"Stadium",
"Smile",
"Human hand",
"Park",
"Desk",
"Shetland sheepdog",
"Bar",
"Eating",
"Neon",
"Dalmatian",
"Crocodile",
"Wakeboarding",
"Longboard",
"Road",
"Race",
"Kitchen",
"Odometer",
"Cliff",
"Fiction",
"School",
"Interaction",
"Bullfighting",
"Boxer",
"Gown",
"Aquarium",
"Superhero",
"Pie",
"Asphalt",
"Surfboard",
"Cheeseburger",
"Screenshot",
"Supper",
"Laugh",
"Lunch",
"Party ",
"Glacier",
"Bench",
"Grandparent",
"Sink",
"Pomacentridae",
"Blazer",
"Brick",
"Space",
"Backpacking",
"Stuffed toy",
"Sushi",
"Glitter",
"Bonfire",
"Castle",
"Marathon",
"Pizza",
"Beach",
"Human ear",
"Racing",
"Sitting",
"Iceberg",
"Shelf",
"Vehicle",
"Pop music",
"Playground",
"Clown",
"Car",
"Rein",
"Fur",
"Musician",
"Casino",
"Baby",
"Alcohol",
"Strap",
"Reef",
"Balloon",
"Outerwear",
"Cathedral",
"Competition",
"Joker",
"Blackboard",
"Bunk bed",
"Bear",
"Moon",
"Archery",
"Polo",
"River",
"Fishing",
"Ferris wheel",
"Mortarboard",
"Bracelet",
"Flesh",
"Statue",
"Farm",
"Desert",
"Chain",
"Aircraft",
"Textile",
"Hot dog",
"Knitting",
"Singer",
"Juice",
"Circus",
"Chair",
"Musical instrument",
"Room",
"Crochet",
"Sailboat",
"Newspaper",
"Santa claus",
"Swamp",
"Skyscraper",
"Skin",
"Rocket",
"Aviation",
"Airliner",
"Garden",
"Ruins",
"Storm",
"Glasses",
"Balance",
"Nail (Body part)",
"Rainbow",
"Soil ",
"Vacation ",
"Moustache",
"Doily",
"Food",
"Bride ",
"Cattle",
"Pocket",
"Infrastructure",
"Train",
"Gerbil",
"Fireworks",
"Pet",
"Dam",
"Crew",
"Couch",
"Bathing",
"Quilting",
"Motorcycle",
"Butterfly",
"Sled",
"Watercolor paint",
"Rafting",
"Monument",
"Lightning",
"Sunset",
"Bumper",
"Shoe",
"Waterskiing",
"Sneakers",
"Tower",
"Insect",
"Pool",
"Placemat",
"Airplane",
"Plant",
"Jungle",
"Armrest",
"Duck",
"Dress",
"Tableware",
"Petal",
"Bus",
"Hanukkah",
"Forest",
"Hat",
"Barn",
"Tubing",
"Snorkeling",
"Cool",
"Cookware and bakeware",
"Cycling",
"Swing (Seat)",
"Muscle",
"Cat",
"Skateboard",
"Star",
"Toe",
"Junk",
"Bicycle",
"Bedroom",
"Person",
"Sand",
"Canyon",
"Tie",
"Twig",
"Sphynx",
"Supervillain",
"Nightclub",
"Ranch",
"Pattern",
"Shorts",
"Himalayan",
"Wall",
"Leggings",
"Windsurfing",
"Deejay",
"Dance",
"Van",
"Bento",
"Sleep",
"Wine",
"Picnic",
"Leisure",
"Dune",
"Crowd",
"Kayak",
"Ballroom",
"Selfie",
"Graduation",
"Frigate",
"Mountain",
"Dude",
"Windshield",
"Skiff",
"Class",
"Scarf",
"Bull",
"Soccer",
"Bag",
"Basset hound",
"Tractor",
"Swimming",
"Running",
"Track",
"Helicopter",
"Pitch",
"Clock",
"Song",
"Jersey",
"Stairs",
"Flap",
"Jewellery",
"Bridge",
"Cuisine",
"Bread",
"Caving",
"Shell",
"Wreath",
"Roof",
"Cookie",
"Canoe"};
static float g_thres_map[RET_CATEGORY_SUM] = {
0.23, 0.03, 0.10, 0.13, 0.03,
0.10, 0.06, 0.09, 0.09, 0.05,
0.01, 0.04, 0.01, 0.27, 0.05,
0.16, 0.01, 0.16, 0.04, 0.13,
0.09, 0.18, 0.10, 0.65, 0.08,
0.04, 0.08, 0.01, 0.05, 0.20,
0.01, 0.16, 0.10, 0.10, 0.10,
0.02, 0.24, 0.08, 0.10, 0.53,
0.07, 0.05, 0.07, 0.27, 0.02,
0.01, 0.71, 0.01, 0.06, 0.06,
0.03, 0.96, 0.03, 0.94, 0.05,
0.03, 0.14, 0.09, 0.03, 0.11,
0.50, 0.16, 0.07, 0.07, 0.06,
0.07, 0.08, 0.10, 0.29, 0.03,
0.05, 0.11, 0.03, 0.03, 0.03,
0.01, 0.11, 0.07, 0.03, 0.49,
0.12, 0.30, 0.10, 0.15, 0.02,
0.06, 0.17, 0.01, 0.04, 0.07,
0.06, 0.02, 0.19, 0.20, 0.14,
0.35, 0.15, 0.01, 0.10, 0.13,
0.43, 0.11, 0.12, 0.32, 0.01,
0.22, 0.51, 0.02, 0.04, 0.14,
0.04, 0.35, 0.35, 0.01, 0.54,
0.04, 0.02, 0.03, 0.02, 0.38,
0.13, 0.19, 0.06, 0.01, 0.02,
0.06, 0.03, 0.04, 0.01, 0.10,
0.01, 0.07, 0.07, 0.07, 0.33,
0.08, 0.04, 0.06, 0.07, 0.07,
0.11, 0.02, 0.32, 0.48, 0.14,
0.01, 0.01, 0.04, 0.05, 0.04,
0.16, 0.50, 0.11, 0.03, 0.04,
0.02, 0.55, 0.17, 0.13, 0.84,
0.18, 0.03, 0.16, 0.02, 0.06,
0.03, 0.11, 0.96, 0.36, 0.68,
0.02, 0.08, 0.02, 0.01, 0.03,
0.05, 0.14, 0.09, 0.06, 0.03,
0.20, 0.15, 0.62, 0.03, 0.10,
0.08, 0.02, 0.02, 0.06, 0.03,
0.04, 0.01, 0.10, 0.05, 0.04,
0.02, 0.07, 0.03, 0.32, 0.11,
0.03, 0.02, 0.03, 0.01, 0.03,
0.03, 0.25, 0.20, 0.19, 0.03,
0.11, 0.03, 0.02, 0.03, 0.15,
0.14, 0.06, 0.11, 0.03, 0.02,
0.02, 0.52, 0.03, 0.02, 0.02,
0.02, 0.09, 0.56, 0.01, 0.22,
0.01, 0.48, 0.14, 0.10, 0.08,
0.73, 0.39, 0.09, 0.10, 0.85,
0.31, 0.03, 0.05, 0.01, 0.01,
0.01, 0.10, 0.28, 0.02, 0.03,
0.04, 0.03, 0.07, 0.14, 0.20,
0.10, 0.01, 0.05, 0.37, 0.12,
0.04, 0.44, 0.04, 0.26, 0.08,
0.07, 0.27, 0.10, 0.03, 0.01,
0.03, 0.16, 0.41, 0.16, 0.34,
0.04, 0.30, 0.04, 0.05, 0.18,
0.33, 0.03, 0.21, 0.03, 0.04,
0.22, 0.01, 0.04, 0.02, 0.01,
0.06, 0.02, 0.08, 0.87, 0.11,
0.15, 0.05, 0.14, 0.09, 0.08,
0.22, 0.09, 0.07, 0.06, 0.06,
0.05, 0.43, 0.70, 0.03, 0.07,
0.06, 0.07, 0.14, 0.04, 0.01,
0.03, 0.05, 0.65, 0.06, 0.04,
0.23, 0.06, 0.75, 0.10, 0.01,
0.63, 0.41, 0.09, 0.01, 0.01,
0.18, 0.10, 0.03, 0.01, 0.05,
0.13, 0.18, 0.03, 0.23, 0.01,
0.04, 0.03, 0.38, 0.90, 0.21,
0.18, 0.10, 0.48, 0.08, 0.46,
0.03, 0.01, 0.02, 0.03, 0.10,
0.01, 0.09, 0.01, 0.01, 0.01,
0.10, 0.41, 0.01, 0.06, 0.75,
0.08, 0.01, 0.01, 0.08, 0.21,
0.06, 0.02, 0.05, 0.02, 0.05,
0.09, 0.12, 0.03, 0.06, 0.11,
0.03, 0.01, 0.01, 0.06, 0.84,
0.04, 0.81, 0.39, 0.02, 0.29,
0.77, 0.07, 0.06, 0.22, 0.23,
0.23, 0.01, 0.02, 0.13, 0.04,
0.19, 0.04, 0.08, 0.27, 0.09,
0.06, 0.01, 0.03, 0.21, 0.04,
};
char *ImageCreateLocalModelBuffer(JNIEnv *env, jobject modelBuffer) {
jbyte *modelAddr = static_cast<jbyte *>(env->GetDirectBufferAddress(modelBuffer));
int modelLen = static_cast<int>(env->GetDirectBufferCapacity(modelBuffer));
char *buffer(new char[modelLen]);
memcpy(buffer, modelAddr, modelLen);
return buffer;
}
/**
* To process the result of mindspore inference.
* @param msOutputs
* @return
*/
std::string ImageProcessRunnetResult(const int RET_CATEGORY_SUM, const char *const labels_name_map[],
std::unordered_map<std::string, mindspore::tensor::MSTensor *> msOutputs) {
// Get the branch of the model output.
// Use iterators to get map elements.
std::unordered_map<std::string, mindspore::tensor::MSTensor *>::iterator iter;
iter = msOutputs.begin();
// The mobilenetv2.ms model output just one branch.
auto outputTensor = iter->second;
int tensorNum = outputTensor->ElementsNum();
MS_PRINT("Number of tensor elements:%d", tensorNum);
// Get a pointer to the first score.
float *temp_scores = static_cast<float *>(outputTensor->MutableData());
float scores[RET_CATEGORY_SUM];
for (int i = 0; i < RET_CATEGORY_SUM; ++i) {
scores[i] = temp_scores[i];
}
const float unifiedThre = 0.5;
const float probMax = 1.0;
for (size_t i = 0; i < RET_CATEGORY_SUM; ++i) {
float threshold = g_thres_map[i];
float tmpProb = scores[i];
if (tmpProb < threshold) {
tmpProb = tmpProb / threshold * unifiedThre;
} else {
tmpProb = (tmpProb - threshold) / (probMax - threshold) * unifiedThre + unifiedThre;
}
scores[i] = tmpProb;
}
for (int i = 0; i < RET_CATEGORY_SUM; ++i) {
if (scores[i] > 0.5) {
MS_PRINT("MindSpore scores[%d] : [%f]", i, scores[i]);
}
}
// Score for each category.
// Converted to text information that needs to be displayed in the APP.
std::string categoryScore = "";
for (int i = 0; i < RET_CATEGORY_SUM; ++i) {
categoryScore += labels_name_map[i];
categoryScore += ":";
std::string score_str = std::to_string(scores[i]);
categoryScore += score_str;
categoryScore += ";";
}
return categoryScore;
}
bool ImageBitmapToLiteMat(JNIEnv *env, const jobject &srcBitmap, LiteMat *lite_mat) {
bool ret = false;
AndroidBitmapInfo info;
void *pixels = nullptr;
LiteMat &lite_mat_bgr = *lite_mat;
AndroidBitmap_getInfo(env, srcBitmap, &info);
if (info.format != ANDROID_BITMAP_FORMAT_RGBA_8888) {
MS_PRINT("Image Err, Request RGBA");
return false;
}
AndroidBitmap_lockPixels(env, srcBitmap, &pixels);
if (info.stride == info.width * 4) {
ret = InitFromPixel(reinterpret_cast<const unsigned char *>(pixels),
LPixelType::RGBA2RGB, LDataType::UINT8,
info.width, info.height, lite_mat_bgr);
if (!ret) {
MS_PRINT("Init From RGBA error");
}
} else {
unsigned char *pixels_ptr = new unsigned char[info.width * info.height * 4];
unsigned char *ptr = pixels_ptr;
unsigned char *data = reinterpret_cast<unsigned char *>(pixels);
for (int i = 0; i < info.height; i++) {
memcpy(ptr, data, info.width * 4);
ptr += info.width * 4;
data += info.stride;
}
ret = InitFromPixel(reinterpret_cast<const unsigned char *>(pixels_ptr),
LPixelType::RGBA2RGB, LDataType::UINT8,
info.width, info.height, lite_mat_bgr);
if (!ret) {
MS_PRINT("Init From RGBA error");
}
delete[] (pixels_ptr);
}
AndroidBitmap_unlockPixels(env, srcBitmap);
return ret;
}
bool ImagePreProcessImageData(const LiteMat &lite_mat_bgr, LiteMat *lite_norm_mat_ptr) {
bool ret = false;
LiteMat lite_mat_resize;
LiteMat &lite_norm_mat_cut = *lite_norm_mat_ptr;
ret = ResizeBilinear(lite_mat_bgr, lite_mat_resize, 256, 256);
if (!ret) {
MS_PRINT("ResizeBilinear error");
return false;
}
LiteMat lite_mat_convert_float;
ret = ConvertTo(lite_mat_resize, lite_mat_convert_float, 1.0 / 255.0);
if (!ret) {
MS_PRINT("ConvertTo error");
return false;
}
LiteMat lite_mat_cut;
ret = Crop(lite_mat_convert_float, lite_mat_cut, 16, 16, 224, 224);
if (!ret) {
MS_PRINT("Crop error");
return false;
}
std::vector<float> means = {0.485, 0.456, 0.406};
std::vector<float> stds = {0.229, 0.224, 0.225};
SubStractMeanNormalize(lite_mat_cut, lite_norm_mat_cut, means, stds);
return true;
}
/**
* The Java layer reads the model into MappedByteBuffer or ByteBuffer to load the model.
*/
extern "C"
JNIEXPORT jlong JNICALL
Java_com_mindspore_classificationforcar_gallery_classify_ImageTrackingMobile_loadModel(JNIEnv *env,
jobject thiz,
jobject model_buffer,
jint num_thread) {
if (nullptr == model_buffer) {
MS_PRINT("error, buffer is nullptr!");
return (jlong) nullptr;
}
jlong bufferLen = env->GetDirectBufferCapacity(model_buffer);
if (0 == bufferLen) {
MS_PRINT("error, bufferLen is 0!");
return (jlong) nullptr;
}
char *modelBuffer = ImageCreateLocalModelBuffer(env, model_buffer);
if (modelBuffer == nullptr) {
MS_PRINT("modelBuffer create failed!");
return (jlong) nullptr;
}
// To create a mindspore network inference environment.
void **labelEnv = new void *;
MSNetWork *labelNet = new MSNetWork;
*labelEnv = labelNet;
mindspore::lite::Context *context = new mindspore::lite::Context;
context->thread_num_ = num_thread;
labelNet->CreateSessionMS(modelBuffer, bufferLen, context);
delete context;
if (labelNet->session() == nullptr) {
MS_PRINT("MindSpore create session failed!.");
delete labelNet;
delete labelEnv;
return (jlong) nullptr;
}
if (model_buffer != nullptr) {
env->DeleteLocalRef(model_buffer);
}
return (jlong) labelEnv;
}
/**
* After the inference environment is successfully created,
* sending a picture to the model and run inference.
*/
extern "C" JNIEXPORT jstring JNICALL
Java_com_mindspore_classificationforcar_gallery_classify_ImageTrackingMobile_runNet(JNIEnv *env,
jclass type,
jlong netEnv,
jobject srcBitmap) {
LiteMat lite_mat_bgr, lite_norm_mat_cut;
if (!ImageBitmapToLiteMat(env, srcBitmap, &lite_mat_bgr)) {
MS_PRINT("ImageBitmapToLiteMat error");
return NULL;
}
if (!ImagePreProcessImageData(lite_mat_bgr, &lite_norm_mat_cut)) {
MS_PRINT("ImagePreProcessImageData error");
return NULL;
}
ImgDims inputDims;
inputDims.channel = lite_norm_mat_cut.channel_;
inputDims.width = lite_norm_mat_cut.width_;
inputDims.height = lite_norm_mat_cut.height_;
// Get the mindsore inference environment which created in loadModel().
void **labelEnv = reinterpret_cast<void **>(netEnv);
if (labelEnv == nullptr) {
MS_PRINT("MindSpore error, labelEnv is a nullptr.");
return NULL;
}
MSNetWork *labelNet = static_cast<MSNetWork *>(*labelEnv);
auto mSession = labelNet->session();
if (mSession == nullptr) {
MS_PRINT("MindSpore error, Session is a nullptr.");
return NULL;
}
MS_PRINT("MindSpore get session.");
auto msInputs = mSession->GetInputs();
if (msInputs.size() == 0) {
MS_PRINT("MindSpore error, msInputs.size() equals 0.");
return NULL;
}
auto inTensor = msInputs.front();
float *dataHWC = reinterpret_cast<float *>(lite_norm_mat_cut.data_ptr_);
// Copy dataHWC to the model input tensor.
memcpy(inTensor->MutableData(), dataHWC,
inputDims.channel * inputDims.width * inputDims.height * sizeof(float));
// After the model and image tensor data is loaded, run inference.
auto status = mSession->RunGraph();
if (status != mindspore::lite::RET_OK) {
MS_PRINT("MindSpore run net error.");
return NULL;
}
/**
* Get the mindspore inference results.
* Return the map of output node name and MindSpore Lite MSTensor.
*/
auto names = mSession->GetOutputTensorNames();
std::unordered_map<std::string, mindspore::tensor::MSTensor *> msOutputs;
for (const auto &name : names) {
auto temp_dat = mSession->GetOutputByTensorName(name);
msOutputs.insert(std::pair<std::string, mindspore::tensor::MSTensor *>{name, temp_dat});
}
std::string resultStr = ImageProcessRunnetResult(::RET_CATEGORY_SUM,
::labels_name_map, msOutputs);
const char *resultCharData = resultStr.c_str();
return (env)->NewStringUTF(resultCharData);
}
extern "C" JNIEXPORT jboolean JNICALL
Java_com_mindspore_classificationforcar_gallery_classify_ImageTrackingMobile_unloadModel(JNIEnv *env,
jclass type,
jlong netEnv) {
MS_PRINT("MindSpore release net.");
void **labelEnv = reinterpret_cast<void **>(netEnv);
if (labelEnv == nullptr) {
MS_PRINT("MindSpore error, labelEnv is a nullptr.");
}
MSNetWork *labelNet = static_cast<MSNetWork *>(*labelEnv);
labelNet->ReleaseNets();
return (jboolean) true;
}

View File

@ -0,0 +1,21 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef IMAGE_MINDSPORE_JNI_HMS_DEBUG_MINDSPORENETNATIVE_H
#define IMAGE_MINDSPORE_JNI_HMS_DEBUG_MINDSPORENETNATIVE_H
#endif // MINDSPORE_JNI_HMS_DEBUG_MINDSPORENETNATIVE_H

View File

@ -0,0 +1,62 @@
/**
* Copyright 2021 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "MSNetWork.h"
#include <android/log.h>
#include <iostream>
#include <string>
#include "include/errorcode.h"
#define MS_PRINT(format, ...) __android_log_print(ANDROID_LOG_INFO, "MSJNI", format, ##__VA_ARGS__)
MSNetWork::MSNetWork(void) : session_(nullptr), model_(nullptr) {}
MSNetWork::~MSNetWork(void) {}
void MSNetWork::CreateSessionMS(char *modelBuffer, size_t bufferLen, mindspore::lite::Context *ctx) {
session_ = mindspore::session::LiteSession::CreateSession(ctx);
if (session_ == nullptr) {
MS_PRINT("Create Session failed.");
return;
}
// Compile model.
model_ = mindspore::lite::Model::Import(modelBuffer, bufferLen);
if (model_ == nullptr) {
ReleaseNets();
MS_PRINT("Import model failed.");
return;
}
int ret = session_->CompileGraph(model_);
if (ret != mindspore::lite::RET_OK) {
ReleaseNets();
MS_PRINT("CompileGraph failed.");
return;
}
}
void MSNetWork::ReleaseNets(void) {
if (model_ != nullptr) {
model_->Free();
delete model_;
model_ = nullptr;
}
if (session_ != nullptr) {
delete session_;
session_ = nullptr;
}
}

View File

@ -0,0 +1,60 @@
/**
* Copyright 2021 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MSNETWORK_H
#define MSNETWORK_H
#include <context.h>
#include <lite_session.h>
#include <model.h>
#include <errorcode.h>
#include <cstdio>
#include <algorithm>
#include <fstream>
#include <functional>
#include <sstream>
#include <vector>
#include <map>
#include <string>
#include <memory>
#include <utility>
struct ImgDims {
int channel = 0;
int width = 0;
int height = 0;
};
/*struct SessIterm {
std::shared_ptr<mindspore::session::LiteSession> sess = nullptr;
};*/
class MSNetWork {
public:
MSNetWork();
~MSNetWork();
void CreateSessionMS(char *modelBuffer, size_t bufferLen, mindspore::lite::Context *ctx);
void ReleaseNets(void);
mindspore::session::LiteSession *session() const { return session_; }
private:
mindspore::session::LiteSession *session_;
mindspore::lite::Model *model_;
};
#endif

View File

@ -0,0 +1,304 @@
/**
* Copyright 2021 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <jni.h>
#include <android/bitmap.h>
#include <android/asset_manager_jni.h>
#include <android/log.h>
#include <utility>
#include <cstring>
#include <vector>
#include <string>
#include <unordered_map>
#include <set>
#include "include/errorcode.h"
#include "include/ms_tensor.h"
#include "MindSporeNetnative.h"
#include "MSNetWork.h"
#include "lite_cv/lite_mat.h"
#include "lite_cv/image_process.h"
using mindspore::dataset::LiteMat;
using mindspore::dataset::LPixelType;
using mindspore::dataset::LDataType;
#define MS_PRINT(format, ...) __android_log_print(ANDROID_LOG_INFO, "MSJNI", format, ##__VA_ARGS__)
static const int RET_CAR_DETAILED_SUM = 10;
static const char *labels_name_car_detailed_map[RET_CAR_DETAILED_SUM] = {
{"大客车"},
{"出租车"},
{"中小型货车"},
{"小轿车"},
{"面包车"},
{"越野车"},
{"SUV"},
{"卡车"},
{"赛车"},
{"消防车"},
};
char *CreateLocalModelBuffer(JNIEnv *env, jobject modelBuffer) {
jbyte *modelAddr = static_cast<jbyte *>(env->GetDirectBufferAddress(modelBuffer));
int modelLen = static_cast<int>(env->GetDirectBufferCapacity(modelBuffer));
char *buffer(new char[modelLen]);
memcpy(buffer, modelAddr, modelLen);
return buffer;
}
/**
* To process the result of mindspore inference.
* @param msOutputs
* @return
*/
std::string ProcessRunnetResult(const int RET_CATEGORY_SUM, const char *const labels_name_map[],
std::unordered_map<std::string, mindspore::tensor::MSTensor *> msOutputs) {
// Get the branch of the model output.
// Use iterators to get map elements.
std::unordered_map<std::string, mindspore::tensor::MSTensor *>::iterator iter;
iter = msOutputs.begin();
// The mobilenetv2.ms model output just one branch.
auto outputTensor = iter->second;
// Get a pointer to the first score.
float *temp_scores = static_cast<float *>(outputTensor->MutableData());
float max = 0.0;
int maxIndex = 0;
for (int i = 0; i < RET_CAR_DETAILED_SUM; ++i) {
if (temp_scores[i] > max) {
max = temp_scores[i];
maxIndex = i;
}
}
// Score for each category.
// Converted to text information that needs to be displayed in the APP.
std::string categoryScore = "";
categoryScore += labels_name_map[maxIndex];
return categoryScore;
}
bool BitmapToLiteMat(JNIEnv *env, const jobject &srcBitmap, LiteMat *lite_mat) {
bool ret = false;
AndroidBitmapInfo info;
void *pixels = nullptr;
LiteMat &lite_mat_bgr = *lite_mat;
AndroidBitmap_getInfo(env, srcBitmap, &info);
if (info.format != ANDROID_BITMAP_FORMAT_RGBA_8888) {
MS_PRINT("Image Err, Request RGBA");
return false;
}
AndroidBitmap_lockPixels(env, srcBitmap, &pixels);
if (info.stride == info.width * 4) {
ret = InitFromPixel(reinterpret_cast<const unsigned char *>(pixels),
LPixelType::RGBA2RGB, LDataType::UINT8,
info.width, info.height, lite_mat_bgr);
if (!ret) {
MS_PRINT("Init From RGBA error");
}
} else {
unsigned char *pixels_ptr = new unsigned char[info.width * info.height * 4];
unsigned char *ptr = pixels_ptr;
unsigned char *data = reinterpret_cast<unsigned char *>(pixels);
for (int i = 0; i < info.height; i++) {
memcpy(ptr, data, info.width * 4);
ptr += info.width * 4;
data += info.stride;
}
ret = InitFromPixel(reinterpret_cast<const unsigned char *>(pixels_ptr),
LPixelType::RGBA2RGB, LDataType::UINT8,
info.width, info.height, lite_mat_bgr);
if (!ret) {
MS_PRINT("Init From RGBA error");
}
delete[] (pixels_ptr);
}
AndroidBitmap_unlockPixels(env, srcBitmap);
return ret;
}
bool PreProcessImageData(const LiteMat &lite_mat_bgr, LiteMat *lite_norm_mat_ptr) {
bool ret = false;
LiteMat lite_mat_resize;
LiteMat &lite_norm_mat_cut = *lite_norm_mat_ptr;
ret = ResizeBilinear(lite_mat_bgr, lite_mat_resize, 256, 256);
if (!ret) {
MS_PRINT("ResizeBilinear error");
return false;
}
LiteMat lite_mat_convert_float;
ret = ConvertTo(lite_mat_resize, lite_mat_convert_float, 1.0 / 255.0);
if (!ret) {
MS_PRINT("ConvertTo error");
return false;
}
LiteMat lite_mat_cut;
ret = Crop(lite_mat_convert_float, lite_mat_cut, 16, 16, 224, 224);
if (!ret) {
MS_PRINT("Crop error");
return false;
}
std::vector<float> means = {0.485, 0.456, 0.406};
std::vector<float> stds = {0.229, 0.224, 0.225};
SubStractMeanNormalize(lite_mat_cut, lite_norm_mat_cut, means, stds);
return true;
}
/**
* The Java layer reads the model into MappedByteBuffer or ByteBuffer to load the model.
*/
extern "C"
JNIEXPORT jlong JNICALL
Java_com_mindspore_classificationforcar_gallery_classify_TrackingMobile_loadModel(JNIEnv *env,
jobject thiz,
jobject model_buffer,
jint num_thread) {
if (nullptr == model_buffer) {
MS_PRINT("error, buffer is nullptr!");
return (jlong) nullptr;
}
jlong bufferLen = env->GetDirectBufferCapacity(model_buffer);
if (0 == bufferLen) {
MS_PRINT("error, bufferLen is 0!");
return (jlong) nullptr;
}
char *modelBuffer = CreateLocalModelBuffer(env, model_buffer);
if (modelBuffer == nullptr) {
MS_PRINT("modelBuffer create failed!");
return (jlong) nullptr;
}
// To create a mindspore network inference environment.
void **labelEnv = new void *;
MSNetWork *labelNet = new MSNetWork;
*labelEnv = labelNet;
mindspore::lite::Context *context = new mindspore::lite::Context;
context->thread_num_ = num_thread;
labelNet->CreateSessionMS(modelBuffer, bufferLen, context);
delete context;
if (labelNet->session() == nullptr) {
MS_PRINT("MindSpore create session failed!.");
delete labelNet;
delete labelEnv;
return (jlong) nullptr;
}
if (model_buffer != nullptr) {
env->DeleteLocalRef(model_buffer);
}
return (jlong) labelEnv;
}
/**
* After the inference environment is successfully created,
* sending a picture to the model and run inference.
*/
extern "C" JNIEXPORT jstring JNICALL
Java_com_mindspore_classificationforcar_gallery_classify_TrackingMobile_runNet(JNIEnv *env, jclass type,
jlong netEnv,
jobject srcBitmap) {
LiteMat lite_mat_bgr, lite_norm_mat_cut;
if (!BitmapToLiteMat(env, srcBitmap, &lite_mat_bgr)) {
MS_PRINT("BitmapToLiteMat error");
return NULL;
}
if (!PreProcessImageData(lite_mat_bgr, &lite_norm_mat_cut)) {
MS_PRINT("PreProcessImageData error");
return NULL;
}
ImgDims inputDims;
inputDims.channel = lite_norm_mat_cut.channel_;
inputDims.width = lite_norm_mat_cut.width_;
inputDims.height = lite_norm_mat_cut.height_;
// Get the mindsore inference environment which created in loadModel().
void **labelEnv = reinterpret_cast<void **>(netEnv);
if (labelEnv == nullptr) {
MS_PRINT("MindSpore error, labelEnv is a nullptr.");
return NULL;
}
MSNetWork *labelNet = static_cast<MSNetWork *>(*labelEnv);
auto mSession = labelNet->session();
if (mSession == nullptr) {
MS_PRINT("MindSpore error, Session is a nullptr.");
return NULL;
}
MS_PRINT("MindSpore get session.");
auto msInputs = mSession->GetInputs();
if (msInputs.size() == 0) {
MS_PRINT("MindSpore error, msInputs.size() equals 0.");
return NULL;
}
auto inTensor = msInputs.front();
float *dataHWC = reinterpret_cast<float *>(lite_norm_mat_cut.data_ptr_);
// Copy dataHWC to the model input tensor.
memcpy(inTensor->MutableData(), dataHWC,
inputDims.channel * inputDims.width * inputDims.height * sizeof(float));
// After the model and image tensor data is loaded, run inference.
auto status = mSession->RunGraph();
if (status != mindspore::lite::RET_OK) {
MS_PRINT("MindSpore run net error.");
return NULL;
}
/**
* Get the mindspore inference results.
* Return the map of output node name and MindSpore Lite MSTensor.
*/
auto names = mSession->GetOutputTensorNames();
std::unordered_map<std::string, mindspore::tensor::MSTensor *> msOutputs;
for (const auto &name : names) {
auto temp_dat = mSession->GetOutputByTensorName(name);
msOutputs.insert(std::pair<std::string, mindspore::tensor::MSTensor *>{name, temp_dat});
}
std::string resultStr = ProcessRunnetResult(::RET_CAR_DETAILED_SUM,
::labels_name_car_detailed_map, msOutputs);
const char *resultCharData = resultStr.c_str();
return (env)->NewStringUTF(resultCharData);
}
extern "C" JNIEXPORT jboolean JNICALL
Java_com_mindspore_classificationforcar_gallery_classify_TrackingMobile_unloadModel(JNIEnv *env,
jclass type,
jlong netEnv) {
MS_PRINT("MindSpore release net.");
void **labelEnv = reinterpret_cast<void **>(netEnv);
if (labelEnv == nullptr) {
MS_PRINT("MindSpore error, labelEnv is a nullptr.");
}
MSNetWork *labelNet = static_cast<MSNetWork *>(*labelEnv);
labelNet->ReleaseNets();
return (jboolean) true;
}

View File

@ -0,0 +1,21 @@
/**
* Copyright 2021 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_JNI_HMS_DEBUG_MINDSPORENETNATIVE_H
#define MINDSPORE_JNI_HMS_DEBUG_MINDSPORENETNATIVE_H
#endif // MINDSPORE_JNI_HMS_DEBUG_MINDSPORENETNATIVE_H

View File

@ -0,0 +1,165 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mindspore.classificationforcar.gallery.classify;
import android.app.Activity;
import android.content.ContentResolver;
import android.content.Context;
import android.database.Cursor;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Matrix;
import android.media.ExifInterface;
import android.net.Uri;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
public class BitmapUtils {
private static final String TAG = "BitmapUtils";
public static void recycleBitmap(Bitmap... bitmaps) {
for (Bitmap bitmap : bitmaps) {
if (bitmap != null && !bitmap.isRecycled()) {
bitmap.recycle();
bitmap = null;
}
}
}
public static Bitmap getBitmapFormUri(Activity ac, Uri uri) {
Bitmap bitmap = null;
try {
InputStream input = ac.getContentResolver().openInputStream(uri);
BitmapFactory.Options onlyBoundsOptions = new BitmapFactory.Options();
onlyBoundsOptions.inJustDecodeBounds = true;
onlyBoundsOptions.inDither = true;//optional
onlyBoundsOptions.inPreferredConfig = Bitmap.Config.ARGB_8888;//optional
BitmapFactory.decodeStream(input, null, onlyBoundsOptions);
input.close();
int originalWidth = onlyBoundsOptions.outWidth;
int originalHeight = onlyBoundsOptions.outHeight;
if ((originalWidth == -1) || (originalHeight == -1))
return null;
float hh = 1920f;
float ww = 1080f;
int be = 1;
if (originalWidth > originalHeight && originalWidth > ww) {
be = (int) (originalWidth / ww);
} else if (originalWidth < originalHeight && originalHeight > hh) {
be = (int) (originalHeight / hh);
}
if (be <= 0) {
be = 1;
}
BitmapFactory.Options bitmapOptions = new BitmapFactory.Options();
bitmapOptions.inSampleSize = be;
bitmapOptions.inDither = true;//optional
bitmapOptions.inPreferredConfig = Bitmap.Config.ARGB_8888;//optional
input = ac.getContentResolver().openInputStream(uri);
bitmap = BitmapFactory.decodeStream(input, null, bitmapOptions);
input.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
return compressImage(bitmap);
}
public static Bitmap compressImage(Bitmap image) {
if (image != null) {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
image.compress(Bitmap.CompressFormat.JPEG, 100, baos);
int options = 100;
while (baos.toByteArray().length / 1024 > 100) {
baos.reset();
image.compress(Bitmap.CompressFormat.JPEG, options, baos);
options -= 10;
}
ByteArrayInputStream isBm = new ByteArrayInputStream(baos.toByteArray());
Bitmap bitmap = BitmapFactory.decodeStream(isBm, null, null);
return bitmap;
}else {
return null;
}
}
public static File getFileFromMediaUri(Context ac, Uri uri) {
if (uri.getScheme().toString().compareTo("content") == 0) {
ContentResolver cr = ac.getContentResolver();
Cursor cursor = cr.query(uri, null, null, null, null);
if (cursor != null) {
cursor.moveToFirst();
String filePath = cursor.getString(cursor.getColumnIndex("_data"));
cursor.close();
if (filePath != null) {
return new File(filePath);
}
}
} else if (uri.getScheme().toString().compareTo("file") == 0) {
return new File(uri.toString().replace("file://", ""));
}
return null;
}
public static int getBitmapDegree(String path) {
int degree = 0;
try {
ExifInterface exifInterface = new ExifInterface(path);
int orientation = exifInterface.getAttributeInt(ExifInterface.TAG_ORIENTATION,
ExifInterface.ORIENTATION_NORMAL);
switch (orientation) {
case ExifInterface.ORIENTATION_ROTATE_90:
degree = 90;
break;
case ExifInterface.ORIENTATION_ROTATE_180:
degree = 180;
break;
case ExifInterface.ORIENTATION_ROTATE_270:
degree = 270;
break;
}
} catch (IOException e) {
e.printStackTrace();
}
return degree;
}
public static Bitmap rotateBitmapByDegree(Bitmap bm, int degree) {
Bitmap returnBm = null;
Matrix matrix = new Matrix();
matrix.postRotate(degree);
try {
returnBm = Bitmap.createBitmap(bm, 0, 0, bm.getWidth(), bm.getHeight(), matrix, true);
} catch (OutOfMemoryError e) {
}
if (returnBm == null) {
returnBm = bm;
}
if (bm != returnBm) {
bm.recycle();
}
return returnBm;
}
}

View File

@ -0,0 +1,34 @@
/**
* Copyright 2021 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mindspore.classificationforcar.gallery.classify;
import android.util.Size;
import java.util.Comparator;
/**
* Data comparator.
*/
public class CompareSizesByArea implements Comparator<Size> {
@Override
public int compare(Size lhs, Size rhs) {
// We cast here to ensure the multiplications won't overflow
return Long.signum((long) lhs.getWidth() * lhs.getHeight() - (long) rhs.getWidth() * rhs.getHeight());
}
}

View File

@ -0,0 +1,129 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mindspore.classificationforcar.gallery.classify;
import android.content.Context;
import android.graphics.Bitmap;
import android.util.Log;
import java.io.InputStream;
import java.nio.ByteBuffer;
/**
* Call the MindSpore interface API in the Java layer.
*/
public class ImageTrackingMobile {
private final static String TAG = "ImageTrackingMobile";
static {
try {
System.loadLibrary("mlkit-label-MS");
Log.i(TAG, "load libiMindSpore.so successfully.");
} catch (UnsatisfiedLinkError e) {
Log.e(TAG, "UnsatisfiedLinkError >>>>>>" + e.getMessage());
}
}
// The address of the running inference environment.
private long netEnv = 0;
private final Context mActivity;
public ImageTrackingMobile(Context activity) {
this.mActivity = activity;
}
/**
* JNI load model and also create model inference environment.
*
* @param modelBuffer Model buffer.
* @param numThread The num of thread.
* @return MindSpore Inference environment address.
*/
public native long loadModel(ByteBuffer modelBuffer, int numThread);
/**
* Running model.
*
* @param netEnv Inference environment address.
* @param img A picture to be inferred.
* @return Inference result
*/
public native String runNet(long netEnv, Bitmap img);
/**
* Unbind model data.
*
* @param netEnv Inference environment address.
* @return Unbound state.
*/
public native boolean unloadModel(long netEnv);
/**
* The C++ side is encapsulated into a method of the MSNetWorks class
*
* @param modelPath Model file location
* @return Load model file status
*/
public boolean loadModelFromBuf(String modelPath) {
ByteBuffer buffer = loadModelFile(modelPath);
netEnv = loadModel(buffer, 2); //numThread's default setting is 2.
if (netEnv == 0) { // Loading model failed.
return false;
}
return true;
}
/**
* Run MindSpore inference.
*/
public String MindSpore_runnet(Bitmap img) {
String ret_str = runNet(netEnv, img);
return ret_str;
}
/**
* Unload model.
*
* @return true
*/
public boolean unloadModel() {
unloadModel(netEnv);
return true;
}
/**
* Load model file stream.
*
* @param modelPath Model file path.
* @return Model ByteBuffer.
*/
public ByteBuffer loadModelFile(String modelPath) {
InputStream is = null;
try {
is = mActivity.getAssets().open(modelPath);
byte[] bytes = new byte[is.available()];
is.read(bytes);
return ByteBuffer.allocateDirect(bytes.length).put(bytes);
} catch (Exception e) {
Log.d("loadModelFile", " Exception occur. ");
Log.e(TAG, Log.getStackTraceString(e));
}
return null;
}
}

View File

@ -0,0 +1,45 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mindspore.classificationforcar.gallery.classify;
public class RecognitionImageBean {
private String name;
private float score;
public RecognitionImageBean(String name, float score) {
this.name = name;
this.score = score;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public float getScore() {
return score;
}
public void setScore(float score) {
this.score = score;
}
}

View File

@ -0,0 +1,129 @@
/**
* Copyright 2021 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mindspore.classificationforcar.gallery.classify;
import android.content.Context;
import android.graphics.Bitmap;
import android.util.Log;
import java.io.FileInputStream;
import java.io.InputStream;
import java.nio.ByteBuffer;
/**
* Call the MindSpore interface API in the Java layer.
*/
public class TrackingMobile {
private final static String TAG = "TrackingMobile";
static {
try {
System.loadLibrary("mlkit-label-MS");
Log.i(TAG, "load libiMindSpore.so successfully.");
} catch (UnsatisfiedLinkError e) {
Log.e(TAG, "UnsatisfiedLinkError " + e.getMessage());
}
}
// The address of the running inference environment.
private long netEnv = 0;
private final Context mActivity;
public TrackingMobile(Context activity) {
this.mActivity = activity;
}
/**
* JNI load model and also create model inference environment.
*
* @param modelBuffer Model buffer.
* @param numThread The num of thread.
* @return MindSpore Inference environment address.
*/
public native long loadModel(ByteBuffer modelBuffer, int numThread);
/**
* Running model.
*
* @param netEnv Inference environment address.
* @param img A picture to be inferred.
* @return Inference result
*/
public native String runNet(long netEnv, Bitmap img);
/**
* Unbind model data.
*
* @param netEnv Inference environment address.
* @return Unbound state.
*/
public native boolean unloadModel(long netEnv);
/**
* The C++ side is encapsulated into a method of the MSNetWorks class
*
* @param modelPath Model file location
* @return Load model file status
*/
public boolean loadModelFromBuf(String modelPath) {
ByteBuffer buffer = loadModelFile(modelPath);
netEnv = loadModel(buffer, 2); //numThread's default setting is 2.
if (netEnv == 0){ // Loading model failed.
return false;
}
return true;
}
/**
* Run MindSpore inference.
*/
public String MindSpore_runnet(Bitmap img) {
String ret_str = runNet(netEnv, img);
return ret_str;
}
/**
* Unload model.
* @return true
*/
public boolean unloadModel() {
unloadModel(netEnv);
return true;
}
/**
* Load model file stream.
* @param modelPath Model file path.
* @return Model ByteBuffer.
*/
public ByteBuffer loadModelFile(String modelPath) {
InputStream is = null;
try {
is = new FileInputStream(modelPath);
// is = mActivity.getAssets().open(modelPath);
byte[] bytes = new byte[is.available()];
is.read(bytes);
return ByteBuffer.allocateDirect(bytes.length).put(bytes);
} catch (Exception e) {
Log.d("loadModelFile", " Exception occur. ");
Log.e(TAG, Log.getStackTraceString(e));
}
return null;
}
}

View File

@ -0,0 +1,74 @@
/**
* Copyright 2021 Huawei Technologies Co., Ltd
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mindspore.classificationforcar.widget;
import android.content.Context;
import android.util.AttributeSet;
import android.view.TextureView;
public class AutoFitTextureView extends TextureView {
private int mRatioWidth = 0;
private int mRatioHeight = 0;
public AutoFitTextureView(Context context) {
this(context, null);
}
public AutoFitTextureView(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public AutoFitTextureView(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
}
/**
* Sets the aspect ratio for this view. The size of the view will be measured based on the ratio
* calculated from the parameters. Note that the actual sizes of parameters don't matter, that
* is, calling setAspectRatio(2, 3) and setAspectRatio(4, 6) make the same result.
*
* @param width Relative horizontal size
* @param height Relative vertical size
*/
public void setAspectRatio(int width, int height) {
if (width < 0 || height < 0) {
throw new IllegalArgumentException("Size cannot be negative.");
}
mRatioWidth = width;
mRatioHeight = height;
requestLayout();
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
int width = MeasureSpec.getSize(widthMeasureSpec);
int height = MeasureSpec.getSize(heightMeasureSpec);
if (0 == mRatioWidth || 0 == mRatioHeight) {
setMeasuredDimension(width, height);
} else {
if (width > height * mRatioWidth / mRatioHeight) {
setMeasuredDimension(width, width * mRatioHeight / mRatioWidth);
} else {
setMeasuredDimension(height * mRatioWidth / mRatioHeight, height);
}
}
}
}

View File

@ -0,0 +1,156 @@
/**
* Copyright 2021 Huawei Technologies Co., Ltd
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mindspore.classificationforcar.widget;
import android.graphics.Bitmap;
import android.os.Bundle;
import android.text.TextUtils;
import android.util.Log;
import android.widget.TextView;
import androidx.annotation.Nullable;
import androidx.annotation.UiThread;
import androidx.appcompat.app.AppCompatActivity;
import com.mindspore.classificationforcar.R;
import com.mindspore.classificationforcar.gallery.classify.ImageTrackingMobile;
import com.mindspore.classificationforcar.gallery.classify.RecognitionImageBean;
import com.mindspore.classificationforcar.gallery.classify.TrackingMobile;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* The main interface of camera preview.
* Using Camera 2 API.
*/
public class CameraActivity extends AppCompatActivity {
private static final String TAG = "CameraActivity";
private static final String IMAGE_SCENE_MS = "model/mobilenetv2.ms";
private String filePath;
private boolean isCarModel;
private TrackingMobile trackingMobile;
private ImageTrackingMobile imageTrackingMobile;
private TextView resultText;
private List<RecognitionImageBean> recognitionObjectBeanList;
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Log.d(TAG, "onCreate");
setContentView(R.layout.activity_camera);
filePath = getIntent().getStringExtra("FILEPATH");
isCarModel = getIntent().getBooleanExtra("ISHASCARMODELFILE", false);
resultText = findViewById(R.id.textResult);
if (isCarModel) {
trackingMobile = new TrackingMobile(this);
boolean ret = trackingMobile.loadModelFromBuf(filePath);
if (!ret) {
Log.e(TAG, "Load model error.");
return;
}
} else {
imageTrackingMobile = new ImageTrackingMobile(this);
boolean ret = imageTrackingMobile.loadModelFromBuf(IMAGE_SCENE_MS);
if (!ret) {
Log.e(TAG, "Load model error.");
return;
}
}
addCameraFragment();
}
protected void addCameraFragment() {
CameraFragment cameraFragment = CameraFragment.newInstance(bitmap -> {
runOnUiThread(() -> initMindspore(bitmap));
});
getSupportFragmentManager().beginTransaction()
.replace(R.id.container, cameraFragment)
.commitAllowingStateLoss();
}
private void initMindspore(Bitmap bitmap) {
// run net.
if (isCarModel) {
long startTime = System.currentTimeMillis();
String result = trackingMobile.MindSpore_runnet(bitmap);
long endTime = System.currentTimeMillis();
resultText.setText(TextUtils.isEmpty(result) ? "正在识别..." : result);
Log.d(TAG, "RUNNET CONSUMING" + (endTime - startTime) + "ms");
Log.d(TAG, "result" + result);
} else {
if (recognitionObjectBeanList != null) {
recognitionObjectBeanList.clear();
} else {
recognitionObjectBeanList = new ArrayList<>();
}
long startTime = System.currentTimeMillis();
String result = imageTrackingMobile.MindSpore_runnet(bitmap);
long endTime = System.currentTimeMillis();
Log.d(TAG, "RUNNET CONSUMING" + (endTime - startTime) + "ms");
Log.d(TAG, "result" + result);
if (!TextUtils.isEmpty(result)) {
String[] resultArray = result.split(";");
for (String singleRecognitionResult : resultArray) {
String[] singleResult = singleRecognitionResult.split(":");
float score = Float.parseFloat(singleResult[1]);
if (score > 0.5) {
recognitionObjectBeanList.add(new RecognitionImageBean(singleResult[0], score));
}
}
Collections.sort(recognitionObjectBeanList, (t1, t2) -> Float.compare(t2.getScore(), t1.getScore()));
showResultsInBottomSheet(recognitionObjectBeanList, (endTime - startTime) + "ms");
}
}
}
@UiThread
protected void showResultsInBottomSheet(List<RecognitionImageBean> list, String time) {
if (list == null || list.size() < 1) {
return;
}
StringBuilder stringBuilder = new StringBuilder();
for (int i = 0; i < list.size(); i++) {
RecognitionImageBean bean = list.get(i);
stringBuilder.append(bean.getName()).append("\r:\r").append(String.format("%.2f", (100 * bean.getScore())) + "%").append("\r\n");
if (i > 3) { // set maximum display is 3.
break;
}
}
resultText.setText(stringBuilder);
}
@Override
protected void onDestroy() {
super.onDestroy();
if (trackingMobile != null) {
trackingMobile.unloadModel();
}
if (imageTrackingMobile != null) {
imageTrackingMobile.unloadModel();
}
}
}

View File

@ -0,0 +1,782 @@
/**
* Copyright 2021 Huawei Technologies Co., Ltd
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mindspore.classificationforcar.widget;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.content.Context;
import android.content.res.Configuration;
import android.graphics.Bitmap;
import android.graphics.ImageFormat;
import android.graphics.Matrix;
import android.graphics.Point;
import android.graphics.RectF;
import android.graphics.SurfaceTexture;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CameraMetadata;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.CaptureResult;
import android.hardware.camera2.TotalCaptureResult;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.Image;
import android.media.ImageReader;
import android.os.Bundle;
import android.os.Environment;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.Log;
import android.util.Size;
import android.util.SparseIntArray;
import android.view.LayoutInflater;
import android.view.Surface;
import android.view.TextureView;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Toast;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.fragment.app.Fragment;
import com.mindspore.classificationforcar.R;
import com.mindspore.classificationforcar.gallery.classify.CompareSizesByArea;
import com.mindspore.classificationforcar.gallery.classify.TrackingMobile;
import java.io.File;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
public class CameraFragment extends Fragment {
private static final String TAG = "CameraFragment";
private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
static {
ORIENTATIONS.append(Surface.ROTATION_0, 90);
ORIENTATIONS.append(Surface.ROTATION_90, 0);
ORIENTATIONS.append(Surface.ROTATION_180, 270);
ORIENTATIONS.append(Surface.ROTATION_270, 180);
}
private static final int MAX_PREVIEW_WIDTH = 1920;
private static final int MAX_PREVIEW_HEIGHT = 1280;
private Semaphore mCameraOpenCloseLock = new Semaphore(1);
private int mState = STATE_PREVIEW;
private static final int STATE_PREVIEW = 0;
private static final int STATE_WAITING_LOCK = 1;
private static final int STATE_WAITING_PRECAPTURE = 2;
private static final int STATE_WAITING_NON_PRECAPTURE = 3;
private static final int STATE_PICTURE_TAKEN = 4;
/**
* Data interface returned after identification.
*/
private RecognitionDataCallBack recognitionDataCallBack;
private AutoFitTextureView mTextureView;
private boolean mFlashSupported;
private boolean isPreBackgroundThreadPause;
/**
* HandlerThread and Handler of camera and algorithm.
*/
private HandlerThread mCameraHandlerThread, mMindsporeHandlerThread;
private Handler mCameraHandler, mMindsporeHandler;
private CameraManager mCameraManager;
private CameraCaptureSession mCaptureSession;
private CameraDevice mCameraDevice;
private String mCameraId;
private ImageReader mImageReader;
private CaptureRequest.Builder mPreviewRequestBuilder;
private CaptureRequest mPreviewRequest;
private File mFile;
private Size mPreviewSize;
private int mSensorOrientation;
private CameraDevice.StateCallback mCameraDeviceStateCallback;
private CameraFragment(RecognitionDataCallBack recognitionDataCallBack) {
this.recognitionDataCallBack = recognitionDataCallBack;
}
/**
* Singleton.
*
* @param recognitionDataCallBack Identify data return interface.
* @return Construction method.
*/
public static CameraFragment newInstance(RecognitionDataCallBack recognitionDataCallBack) {
return new CameraFragment(recognitionDataCallBack);
}
/**
* Data interface returned after identification.
*/
public interface RecognitionDataCallBack {
void onRecognitionDataCallBack(Bitmap bitmap);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
return inflater.inflate(R.layout.fragment_camera, container, false);
}
@Override
public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceState) {
mTextureView = (AutoFitTextureView) view.findViewById(R.id.texture);
}
@Override
public void onResume() {
super.onResume();
initChildThread();
initCameraManager();
initSelectCamera();
initHandlerMatchingSize();
initImageReader();
initTextureViewListener();
}
@Override
public void onPause() {
closeCamera();
stopBackgroundThread();
super.onPause();
}
private void initChildThread() {
mCameraHandlerThread = new HandlerThread("CAMERA2");
mCameraHandlerThread.start();
mCameraHandler = new Handler(mCameraHandlerThread.getLooper());
mMindsporeHandlerThread = new HandlerThread("MINDSPORE");
mMindsporeHandlerThread.start();
mMindsporeHandler = new Handler(mMindsporeHandlerThread.getLooper());
mMindsporeHandler.postDelayed(classifyRunnable, 500);
}
/**
* Detect time-consuming threads
*/
private Runnable classifyRunnable = new Runnable() {
public void run() {
synchronized (CameraFragment.this) {
Bitmap bitmap = mTextureView.getBitmap();
if (bitmap != null) {
if (recognitionDataCallBack != null) {
// Interface returns data
recognitionDataCallBack.onRecognitionDataCallBack(bitmap);
}
}
if (mMindsporeHandler != null && !isPreBackgroundThreadPause) {
mMindsporeHandler.postDelayed(classifyRunnable, 500);
}
}
}
};
private void initCameraManager() {
mCameraManager = (CameraManager) getActivity().getSystemService(Context.CAMERA_SERVICE);
}
private void initSelectCamera() {
try {
String[] cameraIdArray = mCameraManager.getCameraIdList();
for (String itemId : cameraIdArray) {
CameraCharacteristics itemCharacteristics = mCameraManager.getCameraCharacteristics(itemId);
Integer facing = itemCharacteristics.get(CameraCharacteristics.LENS_FACING);
if (facing == CameraCharacteristics.LENS_FACING_BACK) {
mCameraId = itemId;
break;
}
}
} catch (CameraAccessException e) {
e.printStackTrace();
}
if (mCameraId == null) {
Toast.makeText(getActivity(), getString(R.string.camera_error), Toast.LENGTH_SHORT).show();
}
}
private StreamConfigurationMap streamConfigurationMap;
private Size largest;
/**
* Calculate the camera resolution suitable for the current screen resolution.
*/
private void initHandlerMatchingSize() {
try {
CameraCharacteristics cameraCharacteristics = mCameraManager.getCameraCharacteristics(mCameraId);
streamConfigurationMap = cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
Size[] sizes = streamConfigurationMap.getOutputSizes(ImageFormat.JPEG);
largest = Collections.max(Arrays.asList(sizes), new CompareSizesByArea());
Boolean available = cameraCharacteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE);
mFlashSupported = available == null ? false : available;
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
/**
* Initialize the picture.
*/
private void initImageReader() {
final int W = 640;
final int H = 480;
mImageReader = ImageReader.newInstance(W, H, ImageFormat.JPEG, 30);
mImageReader.setOnImageAvailableListener(new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader reader) {
mFile = new File(getActivity().getExternalFilesDir(null), System.currentTimeMillis() + ".jpg");
// Get the data frame and start the algorithm processing.
try {
// Get the next image from the ImageReader queue.
Image image = reader.acquireNextImage();
image.close();
} catch (Exception e) {
Log.e(TAG, "onImageAvailable: " + e.toString());
}
}
}, mCameraHandler);
}
/**
* TextureView.SurfaceTextureListener
*/
private void initTextureViewListener() {
if (mTextureView.isAvailable()) {
openCamera(mTextureView.getMeasuredWidth(), mTextureView.getMeasuredHeight());
Log.d(TAG, "isAvailable: " + mTextureView.getWidth() + "--" + mTextureView.getHeight());
} else {
mTextureView.setSurfaceTextureListener(new TextureView.SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(SurfaceTexture texture, int width, int height) {
openCamera(width, height);
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture texture, int width, int height) {
configureTransform(width, height);
}
@Override
public boolean onSurfaceTextureDestroyed(@NonNull SurfaceTexture surfaceTexture) {
return true;
}
@Override
public void onSurfaceTextureUpdated(@NonNull SurfaceTexture surfaceTexture) {
}
});
}
}
@SuppressLint("MissingPermission")
private void openCamera(int width, int height) {
setPreviewSize(width, height, streamConfigurationMap, largest);
configureTransform(width, height);
mCameraDeviceStateCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened(@NonNull CameraDevice cameraDevice) {
if (cameraDevice != null) {
mCameraOpenCloseLock.release();
mCameraDevice = cameraDevice;
createCameraPreviewSession();
}
}
@Override
public void onDisconnected(@NonNull CameraDevice cameraDevice) {
if (cameraDevice != null) {
mCameraOpenCloseLock.release();
cameraDevice.close();
mCameraDevice = null;
}
}
@Override
public void onError(@NonNull CameraDevice cameraDevice, int i) {
if (cameraDevice != null) {
mCameraOpenCloseLock.release();
cameraDevice.close();
mCameraDevice = null;
if (null != getActivity()) {
getActivity().finish();
}
}
}
};
try {
if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
throw new RuntimeException("Time out waiting to lock ic_launcher opening.");
}
mCameraManager.openCamera(mCameraId, mCameraDeviceStateCallback, mCameraHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock ic_launcher opening.", e);
}
}
/**
* Open camera preview.
*/
private void createCameraPreviewSession() {
try {
SurfaceTexture texture = mTextureView.getSurfaceTexture();
// Set preview size.
texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
// This is the output Surface we need to start preview.
Surface surface = new Surface(texture);
mPreviewRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
mPreviewRequestBuilder.addTarget(surface);
// Here, we create a CameraCaptureSession for ic_launcher preview.
mCameraDevice.createCaptureSession(Arrays.asList(surface, mImageReader.getSurface()),
new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) {
// The ic_launcher is already closed
if (null == mCameraDevice) {
return;
}
// When the session is ready, we start displaying the preview.
mCaptureSession = cameraCaptureSession;
try {
// Auto focus should be continuous for ic_launcher preview.
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE,
CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
// Flash is automatically enabled when necessary.
setAutoFlash(mPreviewRequestBuilder);
// Finally, we start displaying the ic_launcher preview.
mPreviewRequest = mPreviewRequestBuilder.build();
mCaptureSession.setRepeatingRequest(mPreviewRequest, mCaptureCallback, mCameraHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
@Override
public void onConfigureFailed(@NonNull CameraCaptureSession cameraCaptureSession) {
Toast.makeText(getActivity(), "Failed", Toast.LENGTH_LONG).show();
}
}, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private CameraCaptureSession.CaptureCallback mCaptureCallback = new CameraCaptureSession.CaptureCallback() {
private void process(CaptureResult result) {
switch (mState) {
case STATE_PREVIEW: {
// We have nothing to do when the ic_launcher preview is working normally.
break;
}
case STATE_WAITING_LOCK: {
Integer afState = result.get(CaptureResult.CONTROL_AF_STATE);
if (afState == null) {
captureStillPicture();
} else if (CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED == afState
|| CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED == afState) {
// CONTROL_AE_STATE can be null on some devices
Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE);
if (aeState == null || aeState == CaptureResult.CONTROL_AE_STATE_CONVERGED) {
mState = STATE_PICTURE_TAKEN;
captureStillPicture();
} else {
runPrecaptureSequence();
}
}
break;
}
case STATE_WAITING_PRECAPTURE: {
// CONTROL_AE_STATE can be null on some devices
Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE);
if (aeState == null || aeState == CaptureResult.CONTROL_AE_STATE_PRECAPTURE
|| aeState == CaptureRequest.CONTROL_AE_STATE_FLASH_REQUIRED) {
mState = STATE_WAITING_NON_PRECAPTURE;
}
break;
}
case STATE_WAITING_NON_PRECAPTURE: {
// CONTROL_AE_STATE can be null on some devices
Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE);
if (aeState == null || aeState != CaptureResult.CONTROL_AE_STATE_PRECAPTURE) {
mState = STATE_PICTURE_TAKEN;
captureStillPicture();
}
break;
}
}
}
@Override
public void onCaptureProgressed(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request,
@NonNull CaptureResult partialResult) {
process(partialResult);
}
@Override
public void onCaptureCompleted(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request,
@NonNull TotalCaptureResult result) {
process(result);
}
};
/**
* Run the precapture sequence for capturing a still image. This method should be called when
* we get a response in.
*/
private void runPrecaptureSequence() {
try {
// This is how to tell the ic_launcher to trigger.
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER,
CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START);
// Tell #mCaptureCallback to wait for the precapture sequence to be set.
mState = STATE_WAITING_PRECAPTURE;
mCaptureSession.capture(mPreviewRequestBuilder.build(), mCaptureCallback, mCameraHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
/**
* Capture a still picture. This method should be called when we get a response in
* {@link #mCaptureCallback} from both
*/
private void captureStillPicture() {
try {
final Activity activity = getActivity();
if (null == activity || null == mCameraDevice) {
return;
}
// This is the CaptureRequest.Builder that we use to take a picture.
final CaptureRequest.Builder captureBuilder =
mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
captureBuilder.addTarget(mImageReader.getSurface());
// Use the same AE and AF modes as the preview.
captureBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
setAutoFlash(captureBuilder);
// Orientation
int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, getOrientation(rotation));
CameraCaptureSession.CaptureCallback CaptureCallback = new CameraCaptureSession.CaptureCallback() {
@Override
public void onCaptureCompleted(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request,
@NonNull TotalCaptureResult result) {
showToast("Saved: " + mFile);
Log.d(TAG, mFile.toString());
unlockFocus();
}
};
mCaptureSession.stopRepeating();
mCaptureSession.abortCaptures();
mCaptureSession.capture(captureBuilder.build(), CaptureCallback, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
/**
* Retrieves the JPEG orientation from the specified screen rotation.
*
* @param rotation The screen rotation.
* @return The JPEG orientation (one of 0, 90, 270, and 360)
*/
private int getOrientation(int rotation) {
return (ORIENTATIONS.get(rotation) + mSensorOrientation + 270) % 360;
}
/**
* Unlock the focus. This method should be called when still image capture sequence is
* finished.
*/
private void unlockFocus() {
try {
// Reset the auto-focus trigger
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_CANCEL);
setAutoFlash(mPreviewRequestBuilder);
mCaptureSession.capture(mPreviewRequestBuilder.build(), mCaptureCallback, mCameraHandler);
// After this, the ic_launcher will go back to the normal state of preview.
mState = STATE_PREVIEW;
mCaptureSession.setRepeatingRequest(mPreviewRequest, mCaptureCallback, mCameraHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void setAutoFlash(CaptureRequest.Builder requestBuilder) {
if (mFlashSupported) {
requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
}
}
protected void showToast(final String text) {
final Activity activity = getActivity();
if (activity != null) {
activity.runOnUiThread(new Runnable() {
@Override
public void run() {
Toast.makeText(activity, text, Toast.LENGTH_SHORT).show();
}
});
}
}
/**
* Configures the necessary {@link Matrix} transformation to `mTextureView`.
* This method should be called after the ic_launcher preview size is determined in
* setUpCameraOutputs and also the size of `mTextureView` is fixed.
*
* @param viewWidth The width of `mTextureView`
* @param viewHeight The height of `mTextureView`
*/
protected void configureTransform(int viewWidth, int viewHeight) {
Activity activity = getActivity();
if (null == mTextureView || null == mPreviewSize || null == activity) {
return;
}
int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
Matrix matrix = new Matrix();
RectF viewRect = new RectF(0, 0, viewWidth, viewHeight);
RectF bufferRect = new RectF(0, 0, mPreviewSize.getHeight(), mPreviewSize.getWidth());
float centerX = viewRect.centerX();
float centerY = viewRect.centerY();
if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) {
bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
float scale =
Math.max((float) viewHeight / mPreviewSize.getHeight(), (float) viewWidth / mPreviewSize.getWidth());
matrix.postScale(scale, scale, centerX, centerY);
matrix.postRotate(90 * (rotation - 2), centerX, centerY);
} else if (Surface.ROTATION_180 == rotation) {
matrix.postRotate(180, centerX, centerY);
}
mTextureView.setTransform(matrix);
}
/**
* Set preview image size and positioning.
*
* @param width
* @param height
* @param map StreamConfigurationMap, the manager of all output formats and sizes supported by the camera.
* @param largest The max size
*/
private void setPreviewSize(int width, int height, StreamConfigurationMap map, Size largest) {
// Find out if we need to swap dimension to get the preview size relative to sensor coordinate.
int displayRotation = getActivity().getWindowManager().getDefaultDisplay().getRotation();
Log.d(TAG, "displayRotation: " + displayRotation);
boolean swappedDimensions = false;
if (Surface.ROTATION_0 == displayRotation || Surface.ROTATION_180 == displayRotation) {
if (mSensorOrientation == 90 || mSensorOrientation == 270) {
swappedDimensions = true;
}
} else if (Surface.ROTATION_90 == displayRotation || Surface.ROTATION_270 == displayRotation) {
if (mSensorOrientation == 0 || mSensorOrientation == 180) {
swappedDimensions = true;
}
}
Point displaySize = new Point();
getActivity().getWindowManager().getDefaultDisplay().getSize(displaySize);
int rotatedPreviewWidth = width;
int rotatedPreviewHeight = height;
int maxPreviewWidth = displaySize.x;
int maxPreviewHeight = displaySize.y;
if (swappedDimensions) {
rotatedPreviewWidth = height;
rotatedPreviewHeight = width;
maxPreviewWidth = displaySize.y;
maxPreviewHeight = displaySize.x;
}
if (maxPreviewWidth > MAX_PREVIEW_WIDTH) {
maxPreviewWidth = MAX_PREVIEW_WIDTH;
}
if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) {
maxPreviewHeight = MAX_PREVIEW_HEIGHT;
}
// Danger, W.R.! Attempting to use too large a preview size could exceed the ic_launcher
// bus' bandwidth limitation, resulting in gorgeous previews but the storage of
// garbage capture data.
mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), rotatedPreviewWidth,
rotatedPreviewHeight, maxPreviewWidth, maxPreviewHeight, largest);
// We fit the aspect ratio of TextureView to the size of preview we picked.
int orientation = getResources().getConfiguration().orientation;// manifest中方向已经写死
if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
mTextureView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight());
} else {
mTextureView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth());
}
}
/**
* Given {@code choices} of {@code Size}s supported by a ic_launcher, choose the smallest one that
* is at least as large as the respective texture view size, and that is at most as large as the
* respective max size, and whose aspect ratio matches with the specified value. If such size
* doesn't exist, choose the largest one that is at most as large as the respective max size,
* and whose aspect ratio matches with the specified value.
*
* @param choices The list of sizes that the ic_launcher supports for the intended output
* class
* @param textureViewWidth The width of the texture view relative to sensor coordinate
* @param textureViewHeight The height of the texture view relative to sensor coordinate
* @param maxWidth The maximum width that can be chosen
* @param maxHeight The maximum height that can be chosen
* @param aspectRatio The aspect ratio
* @return The optimal {@code Size}, or an arbitrary one if none were big enough
*/
protected Size chooseOptimalSize(Size[] choices, int textureViewWidth, int textureViewHeight, int maxWidth,
int maxHeight, Size aspectRatio) {
// Collect the supported resolutions that are at least as big as the preview Surface
List<Size> bigEnough = new ArrayList<>();
// Collect the supported resolutions that are smaller than the preview Surface
List<Size> notBigEnough = new ArrayList<>();
int w = aspectRatio.getWidth();
int h = aspectRatio.getHeight();
for (Size option : choices) {
if (option.getWidth() <= maxWidth && option.getHeight() <= maxHeight
&& option.getHeight() == option.getWidth() * h / w) {
// if (option.getWidth() <= maxWidth && option.getHeight() <= maxHeight) {
if (option.getWidth() >= textureViewWidth && option.getHeight() >= textureViewHeight) {
bigEnough.add(option);
} else {
notBigEnough.add(option);
}
}
}
if (bigEnough.size() > 0) {
return Collections.min(bigEnough, new CompareSizesByArea());
} else if (notBigEnough.size() > 0) {
return Collections.max(notBigEnough, new CompareSizesByArea());
} else {
Log.e(TAG, "Couldn't find any suitable preview size");
return choices[0];
}
}
/**
* Closes the current {@link CameraDevice}.
*/
private void closeCamera() {
try {
mCameraOpenCloseLock.acquire();
if (null != mCaptureSession) {
mCaptureSession.close();
mCaptureSession = null;
}
if (null != mCameraDevice) {
mCameraDevice.close();
mCameraDevice = null;
}
if (null != mImageReader) {
mImageReader.close();
mImageReader = null;
}
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock ic_launcher closing.", e);
} finally {
mCameraOpenCloseLock.release();
}
}
private void stopBackgroundThread() {
isPreBackgroundThreadPause = true;
mCameraHandlerThread.quitSafely();
mMindsporeHandlerThread.quitSafely();
try {
mCameraHandlerThread.join();
mCameraHandlerThread = null;
mCameraHandler.removeCallbacksAndMessages(null);
mCameraHandler = null;
mMindsporeHandlerThread.join();
mMindsporeHandlerThread = null;
mMindsporeHandler.removeCallbacksAndMessages(null);
mMindsporeHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}

View File

@ -0,0 +1,341 @@
package com.mindspore.classificationforcar.widget;
import android.Manifest;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.net.Uri;
import android.os.Build;
import android.os.Bundle;
import android.os.Environment;
import android.provider.MediaStore;
import android.provider.Settings;
import android.text.TextUtils;
import android.util.Log;
import android.view.View;
import android.widget.ImageView;
import android.widget.ProgressBar;
import android.widget.TextView;
import android.widget.Toast;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.UiThread;
import androidx.appcompat.app.AlertDialog;
import androidx.appcompat.app.AppCompatActivity;
import androidx.core.app.ActivityCompat;
import androidx.core.content.ContextCompat;
import androidx.core.content.FileProvider;
import androidx.recyclerview.widget.GridLayoutManager;
import androidx.recyclerview.widget.RecyclerView;
import com.mindspore.classificationforcar.R;
import com.mindspore.classificationforcar.gallery.classify.BitmapUtils;
import com.mindspore.classificationforcar.gallery.classify.ImageTrackingMobile;
import com.mindspore.classificationforcar.gallery.classify.RecognitionImageBean;
import com.mindspore.classificationforcar.gallery.classify.TrackingMobile;
import java.io.File;
import java.io.FileNotFoundException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
public class MainActivity extends AppCompatActivity implements OnBackgroundImageListener {
private static final String TAG = "MainActivity";
private static final String[] PERMISSIONS = {Manifest.permission.READ_EXTERNAL_STORAGE, Manifest.permission.WRITE_EXTERNAL_STORAGE,
Manifest.permission.READ_PHONE_STATE, Manifest.permission.CAMERA};
private static final int REQUEST_PERMISSION = 0;
private static final int[] IMAGES = {R.drawable.style0, R.drawable.style1, R.drawable.style2, R.drawable.style3, R.drawable.style4,
R.drawable.style5, R.drawable.style6, R.drawable.style7, R.drawable.style8, R.drawable.style9,
R.drawable.style10, R.drawable.style11, R.drawable.style12, R.drawable.style13, R.drawable.style14,
R.drawable.style15, R.drawable.style16, R.drawable.style17, R.drawable.style18, R.drawable.style19};
private static final int RC_CHOOSE_PHOTO = 1;
private static final int RC_CHOOSE_CAMERA = 2;
private static final String IMAGE_SCENE_MS = "model/mobilenetv2.ms";
private boolean isAllGranted;
private static final String CAR_MS = "car.ms";
private File ROOT_FILE = new File(Environment.getExternalStorageDirectory().getAbsoluteFile(), "CarClassification");
private File DIR_FILE = new File(ROOT_FILE, CAR_MS);
private ImageView imgPreview;
private Uri imageUri;
private TextView textResult;
private ProgressBar progressBar;
private RecyclerView recyclerView;
private TrackingMobile trackingMobile;
private ImageTrackingMobile imageTrackingMobile;
private List<RecognitionImageBean> recognitionObjectBeanList;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
init();
requestPermissions();
}
private void init() {
imgPreview = findViewById(R.id.img_origin);
textResult = findViewById(R.id.tv_image);
progressBar = findViewById(R.id.progress);
recyclerView = findViewById(R.id.recyclerview);
recyclerView.setLayoutManager(new GridLayoutManager(this, 3));
recyclerView.setAdapter(new RecyclerViewAdapter(this, IMAGES, this));
trackingMobile = new TrackingMobile(this);
imageTrackingMobile = new ImageTrackingMobile(this);
}
private void requestPermissions() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
isAllGranted = checkPermissionAllGranted(PERMISSIONS);
if (!isAllGranted) {
ActivityCompat.requestPermissions(this, PERMISSIONS, REQUEST_PERMISSION);
}
} else {
isAllGranted = true;
}
}
private boolean checkPermissionAllGranted(String[] permissions) {
for (String permission : permissions) {
if (ContextCompat.checkSelfPermission(this, permission) != PackageManager.PERMISSION_GRANTED) {
return false;
}
}
return true;
}
/**
* Authority application result callback
*/
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
if (REQUEST_PERMISSION == requestCode) {
isAllGranted = true;
for (int grant : grantResults) {
if (grant != PackageManager.PERMISSION_GRANTED) {
isAllGranted = false;
break;
}
}
if (!isAllGranted) {
openAppDetails();
}
}
}
private void openAppDetails() {
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setMessage("HiMindSpore需要访问 “相机” 和 “外部存储器”,请到 “应用信息 -> 权限” 中授予!");
builder.setPositiveButton("去手动授权", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
Intent intent = new Intent();
intent.setAction(Settings.ACTION_APPLICATION_DETAILS_SETTINGS);
intent.addCategory(Intent.CATEGORY_DEFAULT);
intent.setData(Uri.parse("package:" + getPackageName()));
intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
intent.addFlags(Intent.FLAG_ACTIVITY_NO_HISTORY);
intent.addFlags(Intent.FLAG_ACTIVITY_EXCLUDE_FROM_RECENTS);
startActivity(intent);
}
});
builder.setNegativeButton("取消", null);
builder.show();
}
public boolean isHasCarModelFile() {
if (DIR_FILE.exists()) {
return true;
} else {
if (!ROOT_FILE.exists()) {
ROOT_FILE.mkdirs();
}
return false;
}
}
public void onClickPhoto(View view) {
if (isAllGranted) {
openGallay();
} else {
requestPermissions();
}
}
public void onClickCamera(View view) {
if (isAllGranted) {
openCamera();
} else {
requestPermissions();
}
}
public void onClickScene(View view) {
Intent intent = new Intent(MainActivity.this, CameraActivity.class);
intent.putExtra("FILEPATH", DIR_FILE.getPath());
intent.putExtra("ISHASCARMODELFILE", isHasCarModelFile());
startActivity(intent);
}
@Override
public void onBackImageSelected(int position) {
imgPreview.setImageResource(IMAGES[position]);
initMindspore(BitmapFactory.decodeResource(getResources(), IMAGES[position]));
}
private void openGallay() {
Intent intentToPickPic = new Intent(Intent.ACTION_PICK, null);
intentToPickPic.setDataAndType(MediaStore.Images.Media.EXTERNAL_CONTENT_URI, "image/*");
startActivityForResult(intentToPickPic, RC_CHOOSE_PHOTO);
}
private void openCamera() {
Intent intentToTakePhoto = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
String mTempPhotoPath = Environment.getExternalStorageDirectory() + File.separator + "photo22.jpeg";
imageUri = FileProvider.getUriForFile(this, getApplicationContext().getPackageName() + ".fileprovider", new File(mTempPhotoPath));
intentToTakePhoto.putExtra(MediaStore.EXTRA_OUTPUT, imageUri);
startActivityForResult(intentToTakePhoto, RC_CHOOSE_CAMERA);
}
@Override
protected void onActivityResult(int requestCode, int resultCode, @Nullable Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (resultCode == RESULT_OK) {
if (RC_CHOOSE_PHOTO == requestCode) {
if (null != data && null != data.getData()) {
this.imageUri = data.getData();
showOriginImage();
} else {
finish();
}
} else if (RC_CHOOSE_CAMERA == requestCode) {
showOriginCamera();
}
}
}
private void showOriginImage() {
File file = BitmapUtils.getFileFromMediaUri(this, imageUri);
Bitmap photoBmp = BitmapUtils.getBitmapFormUri(this, Uri.fromFile(file));
int degree = BitmapUtils.getBitmapDegree(file.getAbsolutePath());
Bitmap originBitmap = BitmapUtils.rotateBitmapByDegree(photoBmp, degree);
if (originBitmap != null) {
imgPreview.setImageBitmap(originBitmap);
initMindspore(originBitmap.copy(Bitmap.Config.ARGB_8888, true));
} else {
Toast.makeText(this, R.string.image_invalid, Toast.LENGTH_LONG).show();
}
}
private void showOriginCamera() {
try {
Bitmap originBitmap = BitmapFactory.decodeStream(getContentResolver().openInputStream(imageUri));
if (originBitmap != null) {
imgPreview.setImageBitmap(originBitmap);
initMindspore(originBitmap.copy(Bitmap.Config.ARGB_8888, true));
} else {
Toast.makeText(this, R.string.image_invalid, Toast.LENGTH_LONG).show();
}
} catch (FileNotFoundException e) {
e.printStackTrace();
}
}
private void initMindspore(Bitmap bitmap) {
progressBar.setVisibility(View.VISIBLE);
if (isHasCarModelFile()) {
boolean ret = trackingMobile.loadModelFromBuf(DIR_FILE.getPath());
if (!ret) {
textResult.setText("Load model error.");
Log.e(TAG, "Load model error.");
return;
}
// run net.
long startTime = System.currentTimeMillis();
String result = trackingMobile.MindSpore_runnet(bitmap);
long endTime = System.currentTimeMillis();
progressBar.setVisibility(View.GONE);
textResult.setText(result);
Log.d(TAG, "RUNNET CONSUMING" + (endTime - startTime) + "ms");
Log.d(TAG, "result" + result);
} else {
if (recognitionObjectBeanList != null) {
recognitionObjectBeanList.clear();
} else {
recognitionObjectBeanList = new ArrayList<>();
}
boolean ret = imageTrackingMobile.loadModelFromBuf(IMAGE_SCENE_MS);
if (!ret) {
textResult.setText("Load model error.");
Log.e(TAG, "Load model error.");
return;
}
// run net.
long startTime = System.currentTimeMillis();
String result = imageTrackingMobile.MindSpore_runnet(bitmap);
long endTime = System.currentTimeMillis();
progressBar.setVisibility(View.GONE);
Log.d(TAG, "RUNNET CONSUMING" + (endTime - startTime) + "ms");
Log.d(TAG, "result" + result);
if (!TextUtils.isEmpty(result)) {
String[] resultArray = result.split(";");
for (String singleRecognitionResult : resultArray) {
String[] singleResult = singleRecognitionResult.split(":");
float score = Float.parseFloat(singleResult[1]);
if (score > 0.5) {
recognitionObjectBeanList.add(new RecognitionImageBean(singleResult[0], score));
}
}
Collections.sort(recognitionObjectBeanList, (t1, t2) -> Float.compare(t2.getScore(), t1.getScore()));
showResultsInBottomSheet(recognitionObjectBeanList, (endTime - startTime) + "ms");
}
}
if (!bitmap.isRecycled()) {
bitmap.recycle();
}
}
@UiThread
protected void showResultsInBottomSheet(List<RecognitionImageBean> list, String time) {
if (list == null || list.size() < 1) {
return;
}
StringBuilder stringBuilder = new StringBuilder();
for (int i = 0; i < list.size(); i++) {
RecognitionImageBean bean = list.get(i);
stringBuilder.append(bean.getName()).append("\r:\r").append(String.format("%.2f", (100 * bean.getScore())) + "%").append("\r\n");
if (i > 3) { // set maximum display is 3.
break;
}
}
textResult.setText(stringBuilder);
}
@Override
protected void onDestroy() {
super.onDestroy();
if (trackingMobile != null) {
trackingMobile.unloadModel();
}
if (imageTrackingMobile != null) {
imageTrackingMobile.unloadModel();
}
}
}

View File

@ -0,0 +1,9 @@
package com.mindspore.classificationforcar.widget;
import android.view.View;
public interface OnBackgroundImageListener {
void onBackImageSelected(int position);
// void onImageAdd(View view);
}

View File

@ -0,0 +1,98 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mindspore.classificationforcar.widget;
import android.content.Context;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import androidx.annotation.NonNull;
import androidx.recyclerview.widget.RecyclerView;
import com.bumptech.glide.Glide;
import com.mindspore.classificationforcar.R;
public class RecyclerViewAdapter extends RecyclerView.Adapter<RecyclerViewAdapter.StyleItemViewHolder> {
private final int[] IMAGES;
private final Context context;
private final OnBackgroundImageListener mListener;
public RecyclerViewAdapter(Context context, int[] IMAGES, OnBackgroundImageListener mListener) {
this.IMAGES = IMAGES;
this.context = context;
this.mListener = mListener;
}
@NonNull
@Override
public StyleItemViewHolder onCreateViewHolder(@NonNull ViewGroup parent, int viewType) {
View view = LayoutInflater.from(context)
.inflate(R.layout.image_item, parent, false);
return new StyleItemViewHolder(view);
}
@Override
public void onBindViewHolder(@NonNull StyleItemViewHolder holder, int position) {
Glide.with(context).
load(IMAGES[position]).
into(holder.getImageView());
View view = holder.getMView();
view.setTag(IMAGES[position]);
view.setOnClickListener(view1 -> {
if (mListener != null) {
// if (IMAGES.length - 1 == position) {
// mListener.onImageAdd(holder.getImageView());
// } else {
mListener.onBackImageSelected(position);
// }
}
});
}
@Override
public int getItemCount() {
return IMAGES == null ? 0 : IMAGES.length;
}
public class StyleItemViewHolder extends RecyclerView.ViewHolder {
private ImageView imageView;
private final View mView;
public final ImageView getImageView() {
return this.imageView;
}
public final void setImageView(ImageView imageView) {
this.imageView = imageView;
}
public final View getMView() {
return this.mView;
}
public StyleItemViewHolder(View mView) {
super(mView);
this.mView = mView;
this.imageView = mView.findViewById(R.id.image_view);
}
}
}

View File

@ -0,0 +1,30 @@
<vector xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:aapt="http://schemas.android.com/aapt"
android:width="108dp"
android:height="108dp"
android:viewportWidth="108"
android:viewportHeight="108">
<path android:pathData="M31,63.928c0,0 6.4,-11 12.1,-13.1c7.2,-2.6 26,-1.4 26,-1.4l38.1,38.1L107,108.928l-32,-1L31,63.928z">
<aapt:attr name="android:fillColor">
<gradient
android:endX="85.84757"
android:endY="92.4963"
android:startX="42.9492"
android:startY="49.59793"
android:type="linear">
<item
android:color="#44000000"
android:offset="0.0" />
<item
android:color="#00000000"
android:offset="1.0" />
</gradient>
</aapt:attr>
</path>
<path
android:fillColor="#FFFFFF"
android:fillType="nonZero"
android:pathData="M65.3,45.828l3.8,-6.6c0.2,-0.4 0.1,-0.9 -0.3,-1.1c-0.4,-0.2 -0.9,-0.1 -1.1,0.3l-3.9,6.7c-6.3,-2.8 -13.4,-2.8 -19.7,0l-3.9,-6.7c-0.2,-0.4 -0.7,-0.5 -1.1,-0.3C38.8,38.328 38.7,38.828 38.9,39.228l3.8,6.6C36.2,49.428 31.7,56.028 31,63.928h46C76.3,56.028 71.8,49.428 65.3,45.828zM43.4,57.328c-0.8,0 -1.5,-0.5 -1.8,-1.2c-0.3,-0.7 -0.1,-1.5 0.4,-2.1c0.5,-0.5 1.4,-0.7 2.1,-0.4c0.7,0.3 1.2,1 1.2,1.8C45.3,56.528 44.5,57.328 43.4,57.328L43.4,57.328zM64.6,57.328c-0.8,0 -1.5,-0.5 -1.8,-1.2s-0.1,-1.5 0.4,-2.1c0.5,-0.5 1.4,-0.7 2.1,-0.4c0.7,0.3 1.2,1 1.2,1.8C66.5,56.528 65.6,57.328 64.6,57.328L64.6,57.328z"
android:strokeWidth="1"
android:strokeColor="#00000000" />
</vector>

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 111 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 192 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.5 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 313 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 241 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 247 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 138 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 169 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 109 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 107 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 204 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 189 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 328 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 255 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 193 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 307 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 266 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 631 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 273 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 288 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 254 KiB

View File

@ -0,0 +1,170 @@
<?xml version="1.0" encoding="utf-8"?>
<vector xmlns:android="http://schemas.android.com/apk/res/android"
android:width="108dp"
android:height="108dp"
android:viewportHeight="108"
android:viewportWidth="108">
<path
android:fillColor="#26A69A"
android:pathData="M0,0h108v108h-108z" />
<path
android:fillColor="#00000000"
android:pathData="M9,0L9,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M19,0L19,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M29,0L29,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M39,0L39,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M49,0L49,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M59,0L59,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M69,0L69,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M79,0L79,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M89,0L89,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M99,0L99,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,9L108,9"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,19L108,19"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,29L108,29"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,39L108,39"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,49L108,49"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,59L108,59"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,69L108,69"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,79L108,79"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,89L108,89"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,99L108,99"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M19,29L89,29"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M19,39L89,39"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M19,49L89,49"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M19,59L89,59"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M19,69L89,69"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M19,79L89,79"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M29,19L29,89"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M39,19L39,89"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M49,19L49,89"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M59,19L59,89"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M69,19L69,89"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M79,19L79,89"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
</vector>

View File

@ -0,0 +1,20 @@
<?xml version="1.0" encoding="utf-8"?>
<animated-rotate xmlns:android="http://schemas.android.com/apk/res/android"
android:fromDegrees="0"
android:pivotX="50%"
android:pivotY="50%"
android:toDegrees="360">
<shape
android:innerRadiusRatio="3"
android:shape="ring"
android:thicknessRatio="8"
android:useLevel="false">
<gradient
android:centerColor="#62AEEC"
android:centerY="0.50"
android:endColor="#1063A5"
android:startColor="#61C2EC"
android:type="sweep"
android:useLevel="false" />
</shape>
</animated-rotate>

View File

@ -0,0 +1,13 @@
<?xml version="1.0" encoding="UTF-8"?>
<shape xmlns:android="http://schemas.android.com/apk/res/android"
android:id="@+id/listview_background_shape">
<stroke
android:width="1dp"
android:color="@android:color/darker_gray" />
<padding
android:bottom="2dp"
android:left="2dp"
android:right="2dp"
android:top="2dp" />
<solid android:color="#ffffffff" />
</shape>

View File

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="utf-8"?>
<shape xmlns:android="http://schemas.android.com/apk/res/android">
<size
android:width="40dp"
android:height="40dp" />
<corners android:radius="20dp" />
<solid android:color="#82eae5e5" />
</shape>

View File

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="utf-8"?>
<shape xmlns:android="http://schemas.android.com/apk/res/android">
<size
android:width="100dp"
android:height="40dp"/>
<corners android:radius="20dp" />
<solid android:color="#784b4b4b" />
</shape>

View File

@ -0,0 +1,54 @@
<?xml version="1.0" encoding="utf-8"?>
<androidx.coordinatorlayout.widget.CoordinatorLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:background="#00000000">
<RelativeLayout
android:layout_width="match_parent"
android:layout_height="match_parent"
android:background="#00000000"
android:orientation="vertical">
<FrameLayout
android:id="@+id/container"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:background="@android:color/black"
tools:context="com.mindspore.classificationforcar.widget.CameraActivity" />
<androidx.appcompat.widget.Toolbar
android:id="@+id/toolbar"
android:layout_width="match_parent"
android:layout_height="?attr/actionBarSize"
android:layout_alignParentTop="true"
android:background="#66000000">
<TextView
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:drawableStart="@drawable/logo"
android:drawablePadding="5dp"
android:gravity="center_vertical"
android:maxLines="1"
android:text="MS Car Classification"
android:textColor="#ffffff"
android:textSize="20sp" />
</androidx.appcompat.widget.Toolbar>
<TextView
android:maxLines="3"
android:id="@+id/textResult"
android:layout_width="match_parent"
android:layout_height="150dp"
android:layout_alignParentBottom="true"
android:background="@color/white"
android:gravity="center"
android:orientation="vertical"
android:text="正在识别..."
android:textColor="@color/black"
android:textSize="25sp" />
</RelativeLayout>
</androidx.coordinatorlayout.widget.CoordinatorLayout>

View File

@ -0,0 +1,131 @@
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:background="@color/colorPrimary"
android:orientation="vertical">
<androidx.appcompat.widget.Toolbar
android:id="@+id/toolbar"
android:layout_width="match_parent"
android:layout_height="?attr/actionBarSize"
android:layout_alignParentTop="true"
android:background="#66000000">
<TextView
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:drawableStart="@drawable/logo"
android:drawablePadding="5dp"
android:gravity="center_vertical"
android:maxLines="1"
android:text="MS Car Classification"
android:textColor="#ffffff"
android:textSize="20sp" />
</androidx.appcompat.widget.Toolbar>
<FrameLayout
android:layout_width="match_parent"
android:layout_height="300dp">
<ImageView
android:id="@+id/img_origin"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_gravity="center"
android:layout_margin="10dp"
android:scaleType="fitXY"
android:src="@drawable/logo2" />
<ProgressBar
android:id="@+id/progress"
android:layout_width="80dp"
android:layout_height="80dp"
android:layout_gravity="center"
android:indeterminateDrawable="@drawable/progressbar"
android:visibility="invisible" />
</FrameLayout>
<TextView
android:maxLines="3"
android:id="@+id/tv_image"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_marginLeft="20dp"
android:layout_marginTop="20dp"
android:text="Choose an Image"
android:textAllCaps="false"
android:textColor="@color/white"
android:textSize="20sp" />
<LinearLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_marginTop="15dp"
android:orientation="horizontal">
<Button
android:layout_width="0dp"
android:layout_height="48dp"
android:layout_marginLeft="20dp"
android:layout_marginRight="5dp"
android:layout_weight="1"
android:background="@color/gray_btn"
android:gravity="center"
android:onClick="onClickPhoto"
android:text="PHOTO"
android:textAllCaps="false"
android:textColor="@color/white"
android:textSize="12sp" />
<Button
android:layout_width="0dp"
android:layout_height="48dp"
android:layout_marginLeft="5dp"
android:layout_marginRight="5dp"
android:layout_weight="1"
android:background="@color/gray_btn"
android:gravity="center"
android:onClick="onClickCamera"
android:text="CAMERA"
android:textAllCaps="false"
android:textColor="@color/white"
android:textSize="12sp" />
<Button
android:layout_width="0dp"
android:layout_height="48dp"
android:layout_marginLeft="5dp"
android:layout_marginRight="5dp"
android:layout_weight="1"
android:background="@color/gray_btn"
android:gravity="center"
android:onClick="onClickScene"
android:text="SCAN"
android:textAllCaps="false"
android:textColor="@color/white"
android:textSize="12sp" />
</LinearLayout>
<TextView
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginLeft="20dp"
android:layout_marginTop="20dp"
android:text="Choose a Demo"
android:textColor="@color/white"
android:textSize="20sp" />
<androidx.recyclerview.widget.RecyclerView
android:id="@+id/recyclerview"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_margin="20dp"
android:fadeScrollbars="false"
android:scrollbarSize="6dp"
android:scrollbarStyle="outsideInset"
android:scrollbarThumbVertical="@color/gray"
android:scrollbars="vertical" />
</LinearLayout>

View File

@ -0,0 +1,13 @@
<?xml version="1.0" encoding="utf-8"?>
<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:background="@color/white"
android:orientation="vertical">
<com.mindspore.classificationforcar.widget.AutoFitTextureView
android:id="@+id/texture"
android:layout_width="match_parent"
android:layout_height="match_parent" />
</FrameLayout>

View File

@ -0,0 +1,14 @@
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:orientation="vertical">
<ImageView
android:id="@+id/image_view"
android:layout_width="wrap_content"
android:layout_height="120dp"
android:scaleType="fitXY"
tools:srcCompat="@drawable/logo" />
</LinearLayout>

View File

@ -0,0 +1,40 @@
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:orientation="vertical">
<TextView
android:id="@+id/tv_left_title"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginStart="@dimen/hor_text_view_text_margin_normal"
android:layout_marginTop="@dimen/hor_text_view_text_margin_small"
android:textSize="@dimen/hor_text_view_text_size"
tools:text="person" />
<TextView
android:gravity="end"
android:layout_toRightOf="@+id/tv_left_title"
android:textColor="@color/black"
android:layout_alignParentEnd="true"
android:id="@+id/tv_right_content"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginEnd="@dimen/hor_text_view_text_margin_normal"
android:layout_marginStart="@dimen/hor_text_view_text_margin_normal"
android:layout_marginTop="@dimen/hor_text_view_text_margin_small"
android:textSize="@dimen/hor_text_view_text_size"
tools:text="12.5" />
<View
android:id="@+id/view_bottom_line"
android:layout_marginTop="@dimen/hor_text_view_text_margin_small"
android:layout_alignStart="@+id/tv_left_title"
android:layout_alignEnd="@+id/tv_right_content"
android:background="@color/gray"
android:layout_below="@+id/tv_left_title"
android:layout_height="0.5dp"
android:layout_width="match_parent"/>
</RelativeLayout>

View File

@ -0,0 +1,5 @@
<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
<background android:drawable="@color/ic_launcher_background"/>
<foreground android:drawable="@mipmap/ic_launcher_foreground"/>
</adaptive-icon>

View File

@ -0,0 +1,5 @@
<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
<background android:drawable="@color/ic_launcher_background"/>
<foreground android:drawable="@mipmap/ic_launcher_foreground"/>
</adaptive-icon>

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.5 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.5 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.5 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 10 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 11 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 8.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 15 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 16 KiB

View File

@ -0,0 +1,17 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<color name="colorPrimary">#303030</color>
<color name="colorPrimaryDark">#3700B3</color>
<color name="colorAccent">#03DAC5</color>
<color name="white">#ffffff</color>
<color name="black">#000000</color>
<color name="gray">#A69D9D</color>
<color name="gray_btn">#424242</color>
<color name="text_blue">#6DA7FF</color>
<color name="text_yellow">#F8E71C</color>
<color name="text_orange">#FF844D</color>
<color name="text_green">#66B50A</color>
</resources>

View File

@ -0,0 +1,9 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<dimen name="ms_bottom_sheet_corner_radius">15dp</dimen>
<dimen name="ms_bottom_sheet_top_padding">8dp</dimen>
<dimen name="hor_text_view_text_margin_normal">15dp</dimen>
<dimen name="hor_text_view_text_margin_small">6dp</dimen>
<dimen name="hor_text_view_text_size">15sp</dimen>
</resources>

View File

@ -0,0 +1,4 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<color name="ic_launcher_background">#FFFFFF</color>
</resources>

View File

@ -0,0 +1,32 @@
<resources>
<string name="app_name">CarClassification</string>
<string name="action_settings">设置</string>
<string name="request_permission">This sample needs camera permission.</string>
<string name="camera_error">This device doesn\'t support Camera2 API.</string>
<string name="intro_message">使用的google的camera2demo.</string>
<string name="appwidget_text">EXAMPLE</string>
<string name="add_widget">Add widget</string>
<string name="image_invalid">The image path you selected is not valid. Please choose again</string>
<string name="train_invalid">Sorry, there is no object identified in this picture. Try another picture</string>
<string name="ms_ic_app_name" translation_description="Image Classification demo app [CHAR_LIMIT=40]">TFL Classify</string>
<string name="ms_ic_camera_error" translation_description="Error regarding camera support[CHAR_LIMIT=40]">This device doesn\'t support Camera2 API.</string>
<string name="ms_ic_gpu_quant_error" translation_description="Error regarding GPU support for Quant models[CHAR_LIMIT=60]">GPU does not yet supported quantized models.</string>
<string name="ms_ic_model" translatable="false">Model:</string>
<string-array name="ms_ic_models" translatable="false">
<item>Quantized_EfficientNet</item>
<item>Float_EfficientNet</item>
<item>Quantized_MobileNet</item>
<item>Float_MobileNet</item>
</string-array>
<string name="ms_ic_device" translatable="false">Device:</string>
<string-array name="ms_ic_devices" translatable="false">
<item>CPU</item>
<item>GPU</item>
<item>NNAPI</item>
</string-array>
</resources>

View File

@ -0,0 +1,10 @@
<resources>
<!-- Base application theme. -->
<style name="AppTheme" parent="Theme.AppCompat.Light.NoActionBar">
<!-- Customize your theme here. -->
<item name="colorPrimary">@color/colorPrimary</item>
<item name="colorPrimaryDark">@color/colorPrimaryDark</item>
<item name="colorAccent">@color/colorAccent</item>
</style>
</resources>

View File

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="utf-8"?>
<paths>
<external-path
name="external_files"
path="." />
</paths>

View File

@ -0,0 +1,33 @@
/**
* Copyright 2021 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mindspore.classificationforcar;
import org.junit.Test;
import static org.junit.Assert.*;
/**
* Example local unit test, which will execute on the development machine (host).
*
* @see <a href="http://d.android.com/tools/testing">Testing documentation</a>
*/
public class ExampleUnitTest {
@Test
public void addition_isCorrect() {
assertEquals(4, 2 + 2);
}
}

View File

@ -0,0 +1,27 @@
// Top-level build file where you can add configuration options common to all sub-projects/modules.
buildscript {
repositories {
google()
jcenter()
}
dependencies {
classpath "com.android.tools.build:gradle:4.0.1"
// NOTE: Do not place your application dependencies here; they belong
// in the individual module build.gradle files
}
}
allprojects {
repositories {
maven {
url "https://mirrors.huaweicloud.com/repository/maven/"
}
google()
jcenter()
}
}
task clean(type: Delete) {
delete rootProject.buildDir
}

View File

@ -0,0 +1,20 @@
# Project-wide Gradle settings.
# IDE (e.g. Android Studio) users:
# Gradle settings configured through the IDE *will override*
# any settings specified in this file.
# For more details on how to configure your build environment visit
# http://www.gradle.org/docs/current/userguide/build_environment.html
# Specifies the JVM arguments used for the daemon process.
# The setting is particularly useful for tweaking memory settings.
org.gradle.jvmargs=-Xmx2048m
# When configured, Gradle will run in incubating parallel mode.
# This option should only be used with decoupled projects. More details, visit
# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
# org.gradle.parallel=true
# AndroidX package structure to make it clearer which packages are bundled with the
# Android operating system, and which are packaged with your app"s APK
# https://developer.android.com/topic/libraries/support-library/androidx-rn
android.useAndroidX=true
# Automatically convert third-party libraries to use AndroidX
android.enableJetifier=true
android.injected.testOnly=false

View File

@ -0,0 +1,6 @@
#Tue Jul 28 10:28:05 CST 2020
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-6.1.1-all.zip

View File

@ -0,0 +1,172 @@
#!/usr/bin/env sh
##############################################################################
##
## Gradle start up script for UN*X
##
##############################################################################
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
PRG="$0"
# Need this for relative symlinks.
while [ -h "$PRG" ] ; do
ls=`ls -ld "$PRG"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
PRG="$link"
else
PRG=`dirname "$PRG"`"/$link"
fi
done
SAVED="`pwd`"
cd "`dirname \"$PRG\"`/" >/dev/null
APP_HOME="`pwd -P`"
cd "$SAVED" >/dev/null
APP_NAME="Gradle"
APP_BASE_NAME=`basename "$0"`
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS=""
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD="maximum"
warn () {
echo "$*"
}
die () {
echo
echo "$*"
echo
exit 1
}
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
nonstop=false
case "`uname`" in
CYGWIN* )
cygwin=true
;;
Darwin* )
darwin=true
;;
MINGW* )
msys=true
;;
NONSTOP* )
nonstop=true
;;
esac
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD="$JAVA_HOME/jre/sh/java"
else
JAVACMD="$JAVA_HOME/bin/java"
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD="java"
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
# Increase the maximum file descriptors if we can.
if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
MAX_FD_LIMIT=`ulimit -H -n`
if [ $? -eq 0 ] ; then
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
MAX_FD="$MAX_FD_LIMIT"
fi
ulimit -n $MAX_FD
if [ $? -ne 0 ] ; then
warn "Could not set maximum file descriptor limit: $MAX_FD"
fi
else
warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
fi
fi
# For Darwin, add options to specify how the application appears in the dock
if $darwin; then
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
fi
# For Cygwin, switch paths to Windows format before running java
if $cygwin ; then
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
JAVACMD=`cygpath --unix "$JAVACMD"`
# We build the pattern for arguments to be converted via cygpath
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
SEP=""
for dir in $ROOTDIRSRAW ; do
ROOTDIRS="$ROOTDIRS$SEP$dir"
SEP="|"
done
OURCYGPATTERN="(^($ROOTDIRS))"
# Add a user-defined pattern to the cygpath arguments
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
fi
# Now convert the arguments - kludge to limit ourselves to /bin/sh
i=0
for arg in "$@" ; do
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
else
eval `echo args$i`="\"$arg\""
fi
i=$((i+1))
done
case $i in
(0) set -- ;;
(1) set -- "$args0" ;;
(2) set -- "$args0" "$args1" ;;
(3) set -- "$args0" "$args1" "$args2" ;;
(4) set -- "$args0" "$args1" "$args2" "$args3" ;;
(5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
(6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
(7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
(8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
(9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
esac
fi
# Escape application args
save () {
for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
echo " "
}
APP_ARGS=$(save "$@")
# Collect all arguments for the java command, following the shell quoting and substitution rules
eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong
if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then
cd "$(dirname "$0")"
fi
exec "$JAVACMD" "$@"

View File

@ -0,0 +1,84 @@
@if "%DEBUG%" == "" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@rem
@rem ##########################################################################
@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal
set DIRNAME=%~dp0
if "%DIRNAME%" == "" set DIRNAME=.
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS=
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if "%ERRORLEVEL%" == "0" goto init
echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:findJavaFromJavaHome
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto init
echo.
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:init
@rem Get command-line arguments, handling Windows variants
if not "%OS%" == "Windows_NT" goto win9xME_args
:win9xME_args
@rem Slurp the command line arguments.
set CMD_LINE_ARGS=
set _SKIP=2
:win9xME_args_slurp
if "x%~1" == "x" goto execute
set CMD_LINE_ARGS=%*
:execute
@rem Setup the command line
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
@rem Execute Gradle
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
:end
@rem End local scope for the variables with windows NT shell
if "%ERRORLEVEL%"=="0" goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
exit /b 1
:mainEnd
if "%OS%"=="Windows_NT" endlocal
:omega

View File

@ -0,0 +1,2 @@
include ':app'
rootProject.name = "CarClassification"