!9626 [MS][LITE]add scene detection android demo

From: @sishuikang
Reviewed-by: @zhanghaibo5,@zhang_xue_tong
Signed-off-by: @zhanghaibo5,@zhang_xue_tong
This commit is contained in:
mindspore-ci-bot 2020-12-11 14:59:26 +08:00 committed by Gitee
commit 36b4d93ad7
57 changed files with 3063 additions and 0 deletions

View File

@ -0,0 +1,83 @@
# MindSpore
build/
mindspore/lib
app/src/main/assets/model/
app/src/main/cpp/mindspore-lite-*
output
*.ir
mindspore/ccsrc/schema/inner/*
# Cmake files
CMakeFiles/
cmake_install.cmake
CMakeCache.txt
Makefile
cmake-build-debug
# Dynamic libraries
*.so
*.so.*
*.dylib
# Static libraries
*.la
*.lai
*.a
*.lib
# Protocol buffers
*_pb2.py
*.pb.h
*.pb.cc
# Object files
*.o
# Editor
.vscode
.idea/
# Cquery
.cquery_cached_index/
compile_commands.json
# Ctags and cscope
tags
TAGS
CTAGS
GTAGS
GRTAGS
GSYMS
GPATH
cscope.*
# Python files
*__pycache__*
.pytest_cache
# Mac files
*.DS_Store
# Test results
test_temp_summary_event_file/
*.dot
*.dat
*.svg
*.perf
*.info
*.ckpt
*.shp
*.pkl
.clangd
mindspore/version.py
mindspore/default_config.py
mindspore/.commit_id
onnx.proto
mindspore/ccsrc/onnx.proto
# Android
local.properties
.gradle
sdk/build
sdk/.cxx
app/.cxx

View File

@ -0,0 +1 @@
/build

View File

@ -0,0 +1,82 @@
# For more information about using CMake with Android Studio, read the
# documentation: https://d.android.com/studio/projects/add-native-code.html
# Sets the minimum version of CMake required to build the native library.
cmake_minimum_required(VERSION 3.4.1)
set(CMAKE_VERBOSE_MAKEFILE on)
set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${CMAKE_SOURCE_DIR}/libs/${ANDROID_ABI})
set(MINDSPORELITE_VERSION mindspore-lite-1.0.1-runtime-arm64-cpu)
# ============== Set MindSpore Dependencies. =============
include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp)
include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/third_party/flatbuffers/include)
include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION})
include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/include)
include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/include/ir/dtype)
include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/include/schema)
include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/minddata/include)
add_library(mindspore-lite SHARED IMPORTED)
add_library(minddata-lite SHARED IMPORTED)
set_target_properties(mindspore-lite PROPERTIES IMPORTED_LOCATION
${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/lib/libmindspore-lite.so)
set_target_properties(minddata-lite PROPERTIES IMPORTED_LOCATION
${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/minddata/lib/libminddata-lite.so)
# --------------- MindSpore Lite set End. --------------------
# Creates and names a library, sets it as either STATIC
# or SHARED, and provides the relative paths to its source code.
# You can define multiple libraries, and CMake builds them for you.
# Gradle automatically packages shared libraries with your APK.
file(GLOB_RECURSE cpp_src "src/main/cpp/*.cpp" "src/main/cpp/*.h")
add_library( # Sets the name of the library.
mlkit-label-MS
# Sets the library as a shared library.
SHARED
# Provides a relative path to your source file(s).
${cpp_src})
# Searches for a specified prebuilt library and stores the path as a
# variable. Because CMake includes system libraries in the search path by
# default, you only need to specify the name of the public NDK library
# you want to add. CMake verifies that the library exists before
# completing its build.
find_library( # Sets the name of the path variable.
log-lib
# Specifies the name of the NDK library that
# you want CMake to locate.
log)
find_library(jnigraphics-lib jnig·raphics)
# Specifies libraries CMake should link to your target library. You
# can link multiple libraries, such as libraries you define in this
# build script, prebuilt third-party libraries, or system libraries.
add_definitions(-DMNN_USE_LOGCAT)
target_link_libraries( # Specifies the target library.
mlkit-label-MS
# --- mindspore ---
minddata-lite
mindspore-lite
# --- other dependencies.---
-ljnigraphics
android
# Links the target library to the log library
${log-lib}
)

View File

@ -0,0 +1,75 @@
apply plugin: 'com.android.application'
android {
compileSdkVersion 30
buildToolsVersion "30.0.1"
defaultConfig {
applicationId "com.mindspore.scene"
minSdkVersion 21
targetSdkVersion 30
versionCode 1
versionName "1.0.0"
testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
externalNativeBuild {
cmake {
arguments "-DANDROID_STL=c++_shared"
cppFlags ""
}
}
ndk {
abiFilters 'arm64-v8a'
}
}
aaptOptions {
noCompress '.so', 'ms'
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
}
customDebugType {
debuggable true
}
}
externalNativeBuild {
cmake {
path file('CMakeLists.txt')
}
}
ndkVersion '21.3.6528147'
sourceSets {
main {
jniLibs.srcDirs = ['libs']
}
}
packagingOptions {
pickFirst 'lib/arm64-v8a/libmlkit-label-MS.so'
}
}
// Before gradle build.
// To download some necessary libraries.
apply from: 'download.gradle'
dependencies {
implementation fileTree(dir: "libs", include: ["*.jar"])
implementation 'androidx.appcompat:appcompat:1.1.0'
implementation 'androidx.constraintlayout:constraintlayout:1.1.3'
testImplementation 'junit:junit:4.12'
androidTestImplementation 'androidx.test.ext:junit:1.1.1'
androidTestImplementation 'androidx.test.espresso:espresso-core:3.2.0'
implementation 'com.google.android.material:material:1.0.0'
androidTestImplementation 'com.android.support.test:rules:1.0.2'
androidTestImplementation 'com.google.truth:truth:1.0.1'
}

View File

@ -0,0 +1,82 @@
/**
* To download necessary library from HuaWei server.
* Including mindspore-lite .so file, minddata-lite .so file and model file.
* The libraries can be downloaded manually.
*/
def targetMindSporeInclude = "src/main/cpp/"
def mindsporeLite_Version = "mindspore-lite-1.0.1-runtime-arm64-cpu"
def targetModelFile = "src/main/assets/model/mobilenetv2.ms"
def mindSporeLibrary_arm64 = "src/main/cpp/${mindsporeLite_Version}.tar.gz"
def modelDownloadUrl = "https://download.mindspore.cn/model_zoo/official/lite/mobilenetv2_openimage_lite/mobilenetv2.ms"
def mindsporeLiteDownloadUrl = "https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.1/lite/android_aarch64/${mindsporeLite_Version}.tar.gz"
def cleantargetMindSporeInclude = "src/main/cpp"
task cleanCmakeCache(type: Delete) {
delete '.cxx/cmake/debug'
delete '.cxx/cmake/release'
}
task downloadModelFile(type: DownloadUrlTask) {
doFirst {
println "Downloading ${modelDownloadUrl}"
}
sourceUrl = "${modelDownloadUrl}"
target = file("${targetModelFile}")
}
task downloadMindSporeLibrary(type: DownloadUrlTask) {
doFirst {
println "Downloading ${mindsporeLiteDownloadUrl}"
}
sourceUrl = "${mindsporeLiteDownloadUrl}"
target = file("${mindSporeLibrary_arm64}")
}
task unzipMindSporeInclude(type: Copy, dependsOn: 'downloadMindSporeLibrary') {
doFirst {
println "Unzipping ${mindSporeLibrary_arm64}"
}
from tarTree(resources.gzip("${mindSporeLibrary_arm64}"))
into "${targetMindSporeInclude}"
}
task cleanUnusedmindsporeFiles(type: Delete, dependsOn: ['unzipMindSporeInclude']) {
delete fileTree("${cleantargetMindSporeInclude}").matching {
include "*.tar.gz"
}
}
/*
* Using preBuild to download mindspore library and model file.
* Run before gradle build.
*/
if (file("src/main/cpp/${mindsporeLite_Version}/lib/libmindspore-lite.so").exists()) {
downloadMindSporeLibrary.enabled = false
unzipMindSporeInclude.enabled = false
cleanUnusedmindsporeFiles.enabled = false
}
if (file("src/main/assets/model/mobilenetv2.ms").exists()) {
downloadModelFile.enabled = false
}
preBuild.dependsOn cleanCmakeCache
preBuild.dependsOn downloadModelFile
preBuild.dependsOn downloadMindSporeLibrary
preBuild.dependsOn unzipMindSporeInclude
preBuild.dependsOn cleanUnusedmindsporeFiles
class DownloadUrlTask extends DefaultTask {
@Input
String sourceUrl
@OutputFile
File target
@TaskAction
void download() {
ant.get(src: sourceUrl, dest: target)
}
}

View File

@ -0,0 +1,21 @@
# Add project specific ProGuard rules here.
# You can control the set of applied configuration files using the
# proguardFiles setting in build.gradle.
#
# For more details, see
# http://developer.android.com/guide/developing/tools/proguard.html
# If your project uses WebView with JS, uncomment the following
# and specify the fully qualified class name to the JavaScript interface
# class:
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
# public *;
#}
# Uncomment this to preserve the line number information for
# debugging stack traces.
#-keepattributes SourceFile,LineNumberTable
# If you keep the line number information, uncomment this to
# hide the original source file name.
#-renamesourcefileattribute SourceFile

View File

@ -0,0 +1,32 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.mindspore.scene"
android:versionCode="1"
android:versionName="1.0">
<uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.permission.WRITE_INTERNAL_STORAGE" />
<uses-permission android:name="android.permission.READ_INTERNAL_STORAGE" />
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
<uses-permission android:name="android.permission.MOUNT_UNMOUNT_FILESYSTEM" />
<uses-permission android:name="android.permission.READ_PHONE_STATE" />
<application
android:allowBackup="true"
android:icon="@mipmap/ic_launcher"
android:label="@string/app_name"
android:roundIcon="@mipmap/ic_launcher_round"
android:supportsRtl="true"
android:theme="@style/AppTheme">
<activity
android:name="com.mindspore.scene.widget.CameraActivity"
android:screenOrientation="portrait">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
</manifest>

View File

@ -0,0 +1,63 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "MSNetWork.h"
#include <android/log.h>
#include <iostream>
#include <string>
#include "include/errorcode.h"
#define MS_PRINT(format, ...) __android_log_print(ANDROID_LOG_INFO, "MSJNI", format, ##__VA_ARGS__)
MSNetWork::MSNetWork(void) : session_(nullptr), model_(nullptr) {}
MSNetWork::~MSNetWork(void) {}
void
MSNetWork::CreateSessionMS(char *modelBuffer, size_t bufferLen, mindspore::lite::Context *ctx) {
session_ = mindspore::session::LiteSession::CreateSession(ctx);
if (session_ == nullptr) {
MS_PRINT("Create Session failed.");
return;
}
// Compile model.
model_ = mindspore::lite::Model::Import(modelBuffer, bufferLen);
if (model_ == nullptr) {
ReleaseNets();
MS_PRINT("Import model failed.");
return;
}
int ret = session_->CompileGraph(model_);
if (ret != mindspore::lite::RET_OK) {
ReleaseNets();
MS_PRINT("CompileGraph failed.");
return;
}
}
void MSNetWork::ReleaseNets(void) {
if (model_ != nullptr) {
model_->Free();
delete model_;
model_ = nullptr;
}
if (session_ != nullptr) {
delete session_;
session_ = nullptr;
}
}

View File

@ -0,0 +1,58 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MSNETWORK_H
#define MSNETWORK_H
#include <context.h>
#include <lite_session.h>
#include <model.h>
#include <errorcode.h>
#include <cstdio>
#include <algorithm>
#include <fstream>
#include <functional>
#include <sstream>
#include <vector>
#include <map>
#include <string>
#include <memory>
#include <utility>
struct ImgDims {
int channel = 0;
int width = 0;
int height = 0;
};
class MSNetWork {
public:
MSNetWork();
~MSNetWork();
void CreateSessionMS(char *modelBuffer, size_t bufferLen, mindspore::lite::Context *ctx);
void ReleaseNets(void);
mindspore::session::LiteSession *session() const { return session_; }
private:
mindspore::session::LiteSession *session_;
mindspore::lite::Model *model_;
};
#endif

View File

@ -0,0 +1,472 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <jni.h>
#include <android/bitmap.h>
#include <android/asset_manager_jni.h>
#include <android/log.h>
#include <utility>
#include <cstring>
#include <vector>
#include <string>
#include <unordered_map>
#include <set>
#include "include/errorcode.h"
#include "include/ms_tensor.h"
#include "MindSporeNetnative.h"
#include "MSNetWork.h"
#include "lite_cv/lite_mat.h"
#include "lite_cv/image_process.h"
using mindspore::dataset::LiteMat;
using mindspore::dataset::LPixelType;
using mindspore::dataset::LDataType;
#define MS_PRINT(format, ...) __android_log_print(ANDROID_LOG_INFO, "MSJNI", format, ##__VA_ARGS__)
static const int RET_CATEGORY_SUM = 365;
static const char *labels_name_map[RET_CATEGORY_SUM] = {"airfield", "airplane_cabin",
"airport_terminal", "alcove", "alley",
"amphitheater", "amusement_arcade",
"amusement_park",
"apartment_building/outdoor", "aquarium",
"aqueduct", "arcade", "arch",
"archaelogical_excavation", "archive",
"arena/hockey", "arena/performance",
"arena/rodeo", "army_base", "art_gallery",
"art_school", "art_studio", "artists_loft",
"assembly_line", "athletic_field/outdoor",
"atrium/public", "attic", "auditorium",
"auto_factory", "auto_showroom",
"badlands", "bakery/shop",
"balcony/exterior", "balcony/interior",
"ball_pit",
"ballroom", "bamboo_forest", "bank_vault",
"banquet_hall", "bar",
"barn", "barndoor", "baseball_field",
"basement", "basketball_court/indoor",
"bathroom", "bazaar/indoor",
"bazaar/outdoor", "beach", "beach_house",
"beauty_salon", "bedchamber", "bedroom",
"beer_garden", "beer_hall",
"berth", "biology_laboratory", "boardwalk",
"boat_deck", "boathouse",
"bookstore", "booth/indoor",
"botanical_garden", "bow_window/indoor",
"bowling_alley",
"boxing_ring", "bridge", "building_facade",
"bullring", "burial_chamber",
"bus_interior", "bus_station/indoor",
"butchers_shop", "butte", "cabin/outdoor",
"cafeteria", "campsite", "campus",
"canal/natural", "canal/urban",
"candy_store", "canyon", "car_interior",
"carrousel", "castle",
"catacomb", "cemetery", "chalet",
"chemistry_lab", "childs_room",
"church/indoor", "church/outdoor",
"classroom", "clean_room", "cliff",
"closet", "clothing_store", "coast",
"cockpit", "coffee_shop",
"computer_room", "conference_center",
"conference_room", "construction_site",
"corn_field",
"corral", "corridor", "cottage",
"courthouse", "courtyard",
"creek", "crevasse", "crosswalk", "dam",
"delicatessen",
"department_store", "desert/sand",
"desert/vegetation", "desert_road",
"diner/outdoor",
"dining_hall", "dining_room", "discotheque",
"doorway/outdoor", "dorm_room",
"downtown", "dressing_room", "driveway",
"drugstore", "elevator/door",
"elevator_lobby", "elevator_shaft",
"embassy", "engine_room", "entrance_hall",
"escalator/indoor", "excavation",
"fabric_store", "farm",
"fastfood_restaurant",
"field/cultivated", "field/wild",
"field_road", "fire_escape", "fire_station",
"fishpond", "flea_market/indoor",
"florist_shop/indoor", "food_court",
"football_field",
"forest/broadleaf", "forest_path",
"forest_road", "formal_garden", "fountain",
"galley", "garage/indoor", "garage/outdoor",
"gas_station", "gazebo/exterior",
"general_store/indoor",
"general_store/outdoor", "gift_shop",
"glacier", "golf_course",
"greenhouse/indoor", "greenhouse/outdoor",
"grotto", "gymnasium/indoor",
"hangar/indoor",
"hangar/outdoor", "harbor",
"hardware_store", "hayfield", "heliport",
"highway", "home_office", "home_theater",
"hospital", "hospital_room",
"hot_spring", "hotel/outdoor", "hotel_room",
"house", "hunting_lodge/outdoor",
"ice_cream_parlor", "ice_floe", "ice_shelf",
"ice_skating_rink/indoor",
"ice_skating_rink/outdoor",
"iceberg", "igloo", "industrial_area",
"inn/outdoor", "islet",
"jacuzzi/indoor", "jail_cell",
"japanese_garden", "jewelry_shop",
"junkyard",
"kasbah", "kennel/outdoor",
"kindergarden_classroom", "kitchen",
"lagoon",
"lake/natural", "landfill", "landing_deck",
"laundromat", "lawn",
"lecture_room", "legislative_chamber",
"library/indoor", "library/outdoor",
"lighthouse",
"living_room", "loading_dock", "lobby",
"lock_chamber", "locker_room",
"mansion", "manufactured_home",
"market/indoor", "market/outdoor", "marsh",
"martial_arts_gym", "mausoleum", "medina",
"mezzanine", "moat/water",
"mosque/outdoor", "motel", "mountain",
"mountain_path", "mountain_snowy",
"movie_theater/indoor", "museum/indoor",
"museum/outdoor", "music_studio",
"natural_history_museum",
"nursery", "nursing_home", "oast_house",
"ocean", "office",
"office_building", "office_cubicles",
"oilrig", "operating_room", "orchard",
"orchestra_pit", "pagoda", "palace",
"pantry", "park",
"parking_garage/indoor",
"parking_garage/outdoor", "parking_lot",
"pasture", "patio",
"pavilion", "pet_shop", "pharmacy",
"phone_booth", "physics_laboratory",
"picnic_area", "pier", "pizzeria",
"playground", "playroom",
"plaza", "pond", "porch", "promenade",
"pub/indoor",
"racecourse", "raceway", "raft",
"railroad_track", "rainforest",
"reception", "recreation_room",
"repair_shop", "residential_neighborhood",
"restaurant",
"restaurant_kitchen", "restaurant_patio",
"rice_paddy", "river", "rock_arch",
"roof_garden", "rope_bridge", "ruin",
"runway", "sandbox",
"sauna", "schoolhouse", "science_museum",
"server_room", "shed",
"shoe_shop", "shopfront",
"shopping_mall/indoor", "shower",
"ski_resort",
"ski_slope", "sky", "skyscraper", "slum",
"snowfield",
"soccer_field", "stable",
"stadium/baseball", "stadium/football",
"stadium/soccer",
"stage/indoor", "stage/outdoor",
"staircase", "storage_room", "street",
"subway_station/platform", "supermarket",
"sushi_bar", "swamp", "swimming_hole",
"swimming_pool/indoor",
"swimming_pool/outdoor",
"synagogue/outdoor", "television_room",
"television_studio",
"temple/asia", "throne_room",
"ticket_booth", "topiary_garden", "tower",
"toyshop", "train_interior",
"train_station/platform", "tree_farm",
"tree_house",
"trench", "tundra", "underwater/ocean_deep",
"utility_room", "valley",
"vegetable_garden", "veterinarians_office",
"viaduct", "village", "vineyard",
"volcano", "volleyball_court/outdoor",
"waiting_room", "water_park", "water_tower",
"waterfall", "watering_hole", "wave",
"wet_bar", "wheat_field",
"wind_farm", "windmill", "yard",
"youth_hostel", "zen_garden"};
char *CreateLocalModelBuffer(JNIEnv *env, jobject modelBuffer) {
jbyte *modelAddr = static_cast<jbyte *>(env->GetDirectBufferAddress(modelBuffer));
int modelLen = static_cast<int>(env->GetDirectBufferCapacity(modelBuffer));
char *buffer(new char[modelLen]);
memcpy(buffer, modelAddr, modelLen);
return buffer;
}
/**
* To process the result of mindspore inference.
* @param msOutputs
* @return
*/
std::string ProcessRunnetResult(const int RET_CATEGORY_SUM, const char *const labels_name_map[],
std::unordered_map<std::string, mindspore::tensor::MSTensor *> msOutputs) {
// Get the branch of the model output.
// Use iterators to get map elements.
std::unordered_map<std::string, mindspore::tensor::MSTensor *>::iterator iter;
iter = msOutputs.begin();
// The mobilenetv2.ms model output just one branch.
auto outputTensor = iter->second;
int tensorNum = outputTensor->ElementsNum();
MS_PRINT("Number of tensor elements:%d", tensorNum);
// Get a pointer to the first score.
float *temp_scores = static_cast<float *>(outputTensor->MutableData());
// float scores[RET_CATEGORY_SUM];
float scores = temp_scores[0];
int cat_loc = 0;
for (int i = 0; i < RET_CATEGORY_SUM; ++i) {
if (scores < temp_scores[i]) {
scores = temp_scores[i];
cat_loc = i;
}
if (temp_scores[i] > 0.5) {
MS_PRINT("MindSpore scores[%d] : [%f]", i, temp_scores[i]);
}
}
// Score for each category.
// Converted to text information that needs to be displayed in the APP.
std::string categoryScore = "";
categoryScore += labels_name_map[cat_loc];
categoryScore += ":";
std::string score_str = std::to_string(scores);
categoryScore += score_str;
return categoryScore;
}
bool BitmapToLiteMat(JNIEnv *env, const jobject &srcBitmap, LiteMat *lite_mat) {
bool ret = false;
AndroidBitmapInfo info;
void *pixels = nullptr;
LiteMat &lite_mat_bgr = *lite_mat;
AndroidBitmap_getInfo(env, srcBitmap, &info);
if (info.format != ANDROID_BITMAP_FORMAT_RGBA_8888) {
MS_PRINT("Image Err, Request RGBA");
return false;
}
AndroidBitmap_lockPixels(env, srcBitmap, &pixels);
if (info.stride == info.width * 4) {
ret = InitFromPixel(reinterpret_cast<const unsigned char *>(pixels),
LPixelType::RGBA2RGB, LDataType::UINT8,
info.width, info.height, lite_mat_bgr);
if (!ret) {
MS_PRINT("Init From RGBA error");
}
} else {
unsigned char *pixels_ptr = new unsigned char[info.width * info.height * 4];
unsigned char *ptr = pixels_ptr;
unsigned char *data = reinterpret_cast<unsigned char *>(pixels);
for (int i = 0; i < info.height; i++) {
memcpy(ptr, data, info.width * 4);
ptr += info.width * 4;
data += info.stride;
}
ret = InitFromPixel(reinterpret_cast<const unsigned char *>(pixels_ptr),
LPixelType::RGBA2RGB, LDataType::UINT8,
info.width, info.height, lite_mat_bgr);
if (!ret) {
MS_PRINT("Init From RGBA error");
}
delete[] (pixels_ptr);
}
AndroidBitmap_unlockPixels(env, srcBitmap);
return ret;
}
bool PreProcessImageData(const LiteMat &lite_mat_bgr, LiteMat *lite_norm_mat_ptr) {
bool ret = false;
LiteMat lite_mat_resize;
LiteMat &lite_norm_mat_cut = *lite_norm_mat_ptr;
ret = ResizeBilinear(lite_mat_bgr, lite_mat_resize, 256, 256);
if (!ret) {
MS_PRINT("ResizeBilinear error");
return false;
}
LiteMat lite_mat_convert_float;
ret = ConvertTo(lite_mat_resize, lite_mat_convert_float, 1.0 / 255.0);
if (!ret) {
MS_PRINT("ConvertTo error");
return false;
}
LiteMat lite_mat_cut;
ret = Crop(lite_mat_convert_float, lite_mat_cut, 16, 16, 224, 224);
if (!ret) {
MS_PRINT("Crop error");
return false;
}
std::vector<float> means = {0.485, 0.456, 0.406};
std::vector<float> stds = {0.229, 0.224, 0.225};
SubStractMeanNormalize(lite_mat_cut, lite_norm_mat_cut, means, stds);
return true;
}
/**
* The Java layer reads the model into MappedByteBuffer or ByteBuffer to load the model.
*/
extern "C"
JNIEXPORT jlong JNICALL
Java_com_mindspore_scene_gallery_classify_TrackingMobile_loadModel(JNIEnv *env,
jobject thiz,
jobject model_buffer,
jint num_thread) {
if (nullptr == model_buffer) {
MS_PRINT("error, buffer is nullptr!");
return (jlong) nullptr;
}
jlong bufferLen = env->GetDirectBufferCapacity(model_buffer);
if (0 == bufferLen) {
MS_PRINT("error, bufferLen is 0!");
return (jlong) nullptr;
}
char *modelBuffer = CreateLocalModelBuffer(env, model_buffer);
if (modelBuffer == nullptr) {
MS_PRINT("modelBuffer create failed!");
return (jlong) nullptr;
}
// To create a mindspore network inference environment.
void **labelEnv = new void *;
MSNetWork *labelNet = new MSNetWork;
*labelEnv = labelNet;
mindspore::lite::Context *context = new mindspore::lite::Context;
context->thread_num_ = num_thread;
context->device_list_[0].device_info_.cpu_device_info_.cpu_bind_mode_ = mindspore::lite::NO_BIND;
context->device_list_[0].device_info_.cpu_device_info_.enable_float16_ = false;
context->device_list_[0].device_type_ = mindspore::lite::DT_CPU;
labelNet->CreateSessionMS(modelBuffer, bufferLen, context);
delete context;
if (labelNet->session() == nullptr) {
MS_PRINT("MindSpore create session failed!.");
delete labelNet;
delete labelEnv;
return (jlong) nullptr;
}
if (model_buffer != nullptr) {
env->DeleteLocalRef(model_buffer);
}
return (jlong) labelEnv;
}
/**
* After the inference environment is successfully created,
* sending a picture to the model and run inference.
*/
extern "C" JNIEXPORT jstring JNICALL
Java_com_mindspore_scene_gallery_classify_TrackingMobile_runNet(JNIEnv *env, jclass type,
jlong netEnv,
jobject srcBitmap) {
LiteMat lite_mat_bgr, lite_norm_mat_cut;
if (!BitmapToLiteMat(env, srcBitmap, &lite_mat_bgr)) {
MS_PRINT("BitmapToLiteMat error");
return NULL;
}
if (!PreProcessImageData(lite_mat_bgr, &lite_norm_mat_cut)) {
MS_PRINT("PreProcessImageData error");
return NULL;
}
ImgDims inputDims;
inputDims.channel = lite_norm_mat_cut.channel_;
inputDims.width = lite_norm_mat_cut.width_;
inputDims.height = lite_norm_mat_cut.height_;
// Get the mindsore inference environment which created in loadModel().
void **labelEnv = reinterpret_cast<void **>(netEnv);
if (labelEnv == nullptr) {
MS_PRINT("MindSpore error, labelEnv is a nullptr.");
return NULL;
}
MSNetWork *labelNet = static_cast<MSNetWork *>(*labelEnv);
auto mSession = labelNet->session();
if (mSession == nullptr) {
MS_PRINT("MindSpore error, Session is a nullptr.");
return NULL;
}
MS_PRINT("MindSpore get session.");
auto msInputs = mSession->GetInputs();
if (msInputs.size() == 0) {
MS_PRINT("MindSpore error, msInputs.size() equals 0.");
return NULL;
}
auto inTensor = msInputs.front();
float *dataHWC = reinterpret_cast<float *>(lite_norm_mat_cut.data_ptr_);
// Copy dataHWC to the model input tensor.
memcpy(inTensor->MutableData(), dataHWC,
inputDims.channel * inputDims.width * inputDims.height * sizeof(float));
// After the model and image tensor data is loaded, run inference.
auto status = mSession->RunGraph();
if (status != mindspore::lite::RET_OK) {
MS_PRINT("MindSpore run net error.");
return NULL;
}
/**
* Get the mindspore inference results.
* Return the map of output node name and MindSpore Lite MSTensor.
*/
auto names = mSession->GetOutputTensorNames();
std::unordered_map<std::string, mindspore::tensor::MSTensor *> msOutputs;
for (const auto &name : names) {
auto temp_dat = mSession->GetOutputByTensorName(name);
msOutputs.insert(std::pair<std::string, mindspore::tensor::MSTensor *>{name, temp_dat});
}
std::string resultStr = ProcessRunnetResult(::RET_CATEGORY_SUM,
::labels_name_map, msOutputs);
const char *resultCharData = resultStr.c_str();
return (env)->NewStringUTF(resultCharData);
}
extern "C" JNIEXPORT jboolean JNICALL
Java_com_mindspore_scene_gallery_classify_TrackingMobile_unloadModel(JNIEnv *env,
jclass type,
jlong netEnv) {
MS_PRINT("MindSpore release net.");
void **labelEnv = reinterpret_cast<void **>(netEnv);
if (labelEnv == nullptr) {
MS_PRINT("MindSpore error, labelEnv is a nullptr.");
}
MSNetWork *labelNet = static_cast<MSNetWork *>(*labelEnv);
labelNet->ReleaseNets();
return (jboolean) true;
}

View File

@ -0,0 +1,21 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_JNI_HMS_DEBUG_MINDSPORENETNATIVE_H
#define MINDSPORE_JNI_HMS_DEBUG_MINDSPORENETNATIVE_H
#endif // MINDSPORE_JNI_HMS_DEBUG_MINDSPORENETNATIVE_H

View File

@ -0,0 +1,34 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mindspore.scene.gallery.classify;
import android.util.Size;
import java.util.Comparator;
/**
* Data comparator.
*/
public class CompareSizesByArea implements Comparator<Size> {
@Override
public int compare(Size lhs, Size rhs) {
// We cast here to ensure the multiplications won't overflow
return Long.signum((long) lhs.getWidth() * lhs.getHeight() - (long) rhs.getWidth() * rhs.getHeight());
}
}

View File

@ -0,0 +1,45 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mindspore.scene.gallery.classify;
public class RecognitionObjectBean {
private String name;
private float score;
public RecognitionObjectBean(String name, float score) {
this.name = name;
this.score = score;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public float getScore() {
return score;
}
public void setScore(float score) {
this.score = score;
}
}

View File

@ -0,0 +1,126 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mindspore.scene.gallery.classify;
import android.content.Context;
import android.graphics.Bitmap;
import android.util.Log;
import java.io.InputStream;
import java.nio.ByteBuffer;
/**
* Call the MindSpore interface API in the Java layer.
*/
public class TrackingMobile {
private final static String TAG = "TrackingMobile";
static {
try {
System.loadLibrary("mlkit-label-MS");
Log.i(TAG, "load libiMindSpore.so successfully.");
} catch (UnsatisfiedLinkError e) {
Log.e(TAG, "UnsatisfiedLinkError " + e.getMessage());
}
}
// The address of the running inference environment.
private long netEnv = 0;
private final Context mActivity;
public TrackingMobile(Context activity) {
this.mActivity = activity;
}
/**
* JNI load model and also create model inference environment.
*
* @param modelBuffer Model buffer.
* @param numThread The num of thread.
* @return MindSpore Inference environment address.
*/
public native long loadModel(ByteBuffer modelBuffer, int numThread);
/**
* Running model.
*
* @param netEnv Inference environment address.
* @param img A picture to be inferred.
* @return Inference result
*/
public native String runNet(long netEnv, Bitmap img);
/**
* Unbind model data.
*
* @param netEnv Inference environment address.
* @return Unbound state.
*/
public native boolean unloadModel(long netEnv);
/**
* The C++ side is encapsulated into a method of the MSNetWorks class
*
* @param modelPath Model file location
* @return Load model file status
*/
public boolean loadModelFromBuf(String modelPath) {
ByteBuffer buffer = loadModelFile(modelPath);
netEnv = loadModel(buffer, 2); //numThread's default setting is 2.
if (netEnv == 0) { // Loading model failed.
return false;
}
return true;
}
/**
* Run MindSpore inference.
*/
public String MindSpore_runnet(Bitmap img) {
String ret_str = runNet(netEnv, img);
return ret_str;
}
/**
* Unload model.
* @return true
*/
public boolean unloadModel() {
unloadModel(netEnv);
return true;
}
/**
* Load model file stream.
* @param modelPath Model file path.
* @return Model ByteBuffer.
*/
public ByteBuffer loadModelFile(String modelPath) {
InputStream is = null;
try {
is = mActivity.getAssets().open(modelPath);
byte[] bytes = new byte[is.available()];
is.read(bytes);
return ByteBuffer.allocateDirect(bytes.length).put(bytes);
} catch (Exception e) {
Log.d("loadModelFile", " Exception occur. ");
Log.e(TAG, Log.getStackTraceString(e));
}
return null;
}
}

View File

@ -0,0 +1,73 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mindspore.scene.widget;
import android.content.Context;
import android.util.AttributeSet;
import android.view.TextureView;
public class AutoFitTextureView extends TextureView {
private int mRatioWidth = 0;
private int mRatioHeight = 0;
public AutoFitTextureView(Context context) {
this(context, null);
}
public AutoFitTextureView(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public AutoFitTextureView(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
}
/**
* Sets the aspect ratio for this view. The size of the view will be measured based on the ratio
* calculated from the parameters. Note that the actual sizes of parameters don't matter, that
* is, calling setAspectRatio(2, 3) and setAspectRatio(4, 6) make the same result.
*
* @param width Relative horizontal size
* @param height Relative vertical size
*/
public void setAspectRatio(int width, int height) {
if (width < 0 || height < 0) {
throw new IllegalArgumentException("Size cannot be negative.");
}
mRatioWidth = width;
mRatioHeight = height;
requestLayout();
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
int width = MeasureSpec.getSize(widthMeasureSpec);
int height = MeasureSpec.getSize(heightMeasureSpec);
if (0 == mRatioWidth || 0 == mRatioHeight) {
setMeasuredDimension(width, height);
} else {
if (width > height * mRatioWidth / mRatioHeight) {
setMeasuredDimension(width, width * mRatioHeight / mRatioWidth);
} else {
setMeasuredDimension(height * mRatioWidth / mRatioHeight, height);
}
}
}
}

View File

@ -0,0 +1,184 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mindspore.scene.widget;
import android.Manifest;
import android.content.pm.PackageManager;
import android.graphics.Color;
import android.os.Build;
import android.os.Bundle;
import android.util.Log;
import android.view.Gravity;
import android.view.View;
import android.view.ViewGroup;
import android.view.WindowManager;
import android.widget.LinearLayout;
import android.widget.TextView;
import android.widget.Toast;
import androidx.annotation.UiThread;
import androidx.appcompat.app.AppCompatActivity;
import com.mindspore.scene.R;
import com.mindspore.scene.gallery.classify.RecognitionObjectBean;
/**
* The main interface of camera preview.
* Using Camera 2 API.
*/
public class CameraActivity extends AppCompatActivity {
private static final String TAG = "CameraActivity";
private static final String BUNDLE_FRAGMENTS_KEY = "android:support:fragments";
private static final int PERMISSIONS_REQUEST = 1;
private static final String PERMISSION_CAMERA = Manifest.permission.CAMERA;
private LinearLayout bottomLayout;
private RecognitionObjectBean recognitionObjectBean;
@Override
protected void onCreate(final Bundle savedInstanceState) {
Log.d(TAG, "onCreate");
if (savedInstanceState != null && this.clearFragmentsTag()) {
// Clear the state of the fragment when rebuilding.
savedInstanceState.remove(BUNDLE_FRAGMENTS_KEY);
}
super.onCreate(null);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.activity_camera);
if (hasPermission()) {
setFragment();
} else {
requestPermission();
}
bottomLayout = findViewById(R.id.layout_bottom_content);
}
@Override
public void onRequestPermissionsResult(final int requestCode, final String[] permissions,
final int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
if (requestCode == PERMISSIONS_REQUEST) {
if (allPermissionsGranted(grantResults)) {
setFragment();
} else {
requestPermission();
}
}
}
private static boolean allPermissionsGranted(final int[] grantResults) {
for (int result : grantResults) {
if (result != PackageManager.PERMISSION_GRANTED) {
return false;
}
}
return true;
}
private boolean hasPermission() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
return checkSelfPermission(PERMISSION_CAMERA) == PackageManager.PERMISSION_GRANTED;
} else {
return true;
}
}
private void requestPermission() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
if (shouldShowRequestPermissionRationale(PERMISSION_CAMERA)) {
Toast.makeText(CameraActivity.this, "Camera permission is required for this demo", Toast.LENGTH_LONG)
.show();
}
requestPermissions(new String[]{PERMISSION_CAMERA}, PERMISSIONS_REQUEST);
}
}
protected void setFragment() {
CameraFragment cameraFragment = CameraFragment.newInstance(new CameraFragment.RecognitionDataCallBack() {
@Override
public void onRecognitionDataCallBack(String result, final String time) {
dealRecognitionData(result, time);
}
});
getSupportFragmentManager().beginTransaction()
.replace(R.id.container, cameraFragment)
.commitAllowingStateLoss();
}
private void dealRecognitionData(String result, final String time) {
if (!result.equals("") && result.contains(":")) {
String[] resultArray = result.split(":");
recognitionObjectBean = new RecognitionObjectBean(resultArray[0], Float.valueOf(resultArray[1]));
}
runOnUiThread(new Runnable() {
@Override
public void run() {
showResultsInBottomSheet(time);
}
});
}
@UiThread
protected void showResultsInBottomSheet(String time) {
bottomLayout.removeAllViews();
if (recognitionObjectBean != null) {
HorTextView horTextView = new HorTextView(this);
horTextView.setLeftTitle(recognitionObjectBean.getName() + ":");
horTextView.setRightContent(String.format("%.2f", (100 * recognitionObjectBean.getScore())) + "%");
horTextView.setBottomLineVisible(View.VISIBLE);
bottomLayout.addView(horTextView);
HorTextView horTimeView = new HorTextView(this);
horTimeView.setLeftTitle("Inference Time");
horTimeView.setRightContent(time);
horTimeView.setBottomLineVisible(View.INVISIBLE);
bottomLayout.addView(horTimeView);
} else {
TextView textView = new TextView(this);
textView.setLayoutParams(new LinearLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.WRAP_CONTENT));
textView.setText("Keep moving.");
textView.setGravity(Gravity.CENTER);
textView.setTextColor(Color.BLACK);
textView.setTextSize(30);
bottomLayout.addView(textView);
}
}
@Override
protected void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
if (outState != null && this.clearFragmentsTag()) {
outState.remove(BUNDLE_FRAGMENTS_KEY);
}
}
protected boolean clearFragmentsTag() {
return true;
}
}

View File

@ -0,0 +1,823 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mindspore.scene.widget;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.content.Context;
import android.content.res.Configuration;
import android.graphics.Bitmap;
import android.graphics.ImageFormat;
import android.graphics.Matrix;
import android.graphics.Point;
import android.graphics.RectF;
import android.graphics.SurfaceTexture;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CameraMetadata;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.CaptureResult;
import android.hardware.camera2.TotalCaptureResult;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.Image;
import android.media.ImageReader;
import android.os.Bundle;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.Log;
import android.util.Size;
import android.util.SparseIntArray;
import android.view.LayoutInflater;
import android.view.Surface;
import android.view.TextureView;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Toast;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.fragment.app.Fragment;
import com.mindspore.scene.R;
import com.mindspore.scene.gallery.classify.CompareSizesByArea;
import com.mindspore.scene.gallery.classify.TrackingMobile;
import java.io.File;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
public class CameraFragment extends Fragment {
private static final String TAG = "CameraFragment";
private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
static {
ORIENTATIONS.append(Surface.ROTATION_0, 90);
ORIENTATIONS.append(Surface.ROTATION_90, 0);
ORIENTATIONS.append(Surface.ROTATION_180, 270);
ORIENTATIONS.append(Surface.ROTATION_270, 180);
}
private static final int MAX_PREVIEW_WIDTH = 1920;
private static final int MAX_PREVIEW_HEIGHT = 1280;
private Semaphore mCameraOpenCloseLock = new Semaphore(1);
private int mState = STATE_PREVIEW;
private static final int STATE_PREVIEW = 0;
private static final int STATE_WAITING_LOCK = 1;
private static final int STATE_WAITING_PRECAPTURE = 2;
private static final int STATE_WAITING_NON_PRECAPTURE = 3;
private static final int STATE_PICTURE_TAKEN = 4;
/**
* Data interface returned after identification.
*/
private RecognitionDataCallBack recognitionDataCallBack;
private AutoFitTextureView mTextureView;
private TrackingMobile mTrackingMobile;
private boolean mFlashSupported;
private boolean isPreBackgroundThreadPause;
/**
* HandlerThread and Handler of camera and algorithm.
*/
private HandlerThread mCameraHandlerThread, mMindsporeHandlerThread;
private Handler mCameraHandler, mMindsporeHandler;
private CameraManager mCameraManager;
private CameraCaptureSession mCaptureSession;
private CameraDevice mCameraDevice;
private String mCameraId;
private ImageReader mImageReader;
private CaptureRequest.Builder mPreviewRequestBuilder;
private CaptureRequest mPreviewRequest;
private File mFile;
private Size mPreviewSize;
private int mSensorOrientation;
private CameraDevice.StateCallback mCameraDeviceStateCallback;
private CameraFragment(RecognitionDataCallBack recognitionDataCallBack) {
this.recognitionDataCallBack = recognitionDataCallBack;
}
/**
* Singleton.
*
* @param recognitionDataCallBack Identify data return interface.
* @return Construction method.
*/
public static CameraFragment newInstance(RecognitionDataCallBack recognitionDataCallBack) {
return new CameraFragment(recognitionDataCallBack);
}
/**
* Data interface returned after identification.
*/
public interface RecognitionDataCallBack {
/**
* Data interface returned after identification.
*
* @param result Recognition result
* @param time Response time
*/
void onRecognitionDataCallBack(String result, String time);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
return inflater.inflate(R.layout.fragment_camera, container, false);
}
@Override
public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceState) {
mTextureView = (AutoFitTextureView) view.findViewById(R.id.texture);
}
@Override
public void onActivityCreated(@Nullable Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
mTrackingMobile = new TrackingMobile(getActivity());
String modelPath = "model/mobilenetv2.ms";
boolean ret = mTrackingMobile.loadModelFromBuf(modelPath);
Log.d(TAG, "Loading model return value: " + ret);
}
@Override
public void onResume() {
super.onResume();
initChildThread();
initCameraManager();
initSelectCamera();
initHandlerMatchingSize();
initImageReader();
initTextureViewListener();
}
@Override
public void onDetach() {
super.onDetach();
if (mTrackingMobile != null) {
boolean ret = mTrackingMobile.unloadModel();
Log.d(TAG, "Unload model return value: " + ret);
}
}
@Override
public void onPause() {
closeCamera();
stopBackgroundThread();
super.onPause();
}
private void initChildThread() {
mCameraHandlerThread = new HandlerThread("CAMERA2");
mCameraHandlerThread.start();
mCameraHandler = new Handler(mCameraHandlerThread.getLooper());
mMindsporeHandlerThread = new HandlerThread("MINDSPORE");
mMindsporeHandlerThread.start();
mMindsporeHandler = new Handler(mMindsporeHandlerThread.getLooper());
mMindsporeHandler.postDelayed(classifyRunnable, 500);
}
/**
* Detect time-consuming threads
*/
private Runnable classifyRunnable = new Runnable() {
public void run() {
synchronized (CameraFragment.this) {
Bitmap bitmap = mTextureView.getBitmap();
if (bitmap != null) {
long startTime = System.currentTimeMillis();
// The current bitmap performs the sending request identification operation
String ret = mTrackingMobile.MindSpore_runnet(bitmap);
long endTime = System.currentTimeMillis();
if (recognitionDataCallBack != null) {
// Interface returns data
recognitionDataCallBack.onRecognitionDataCallBack(ret, (endTime - startTime) + "ms ");
}
if (!bitmap.isRecycled()) {
bitmap.recycle();
}
}
if (mMindsporeHandler != null && !isPreBackgroundThreadPause) {
mMindsporeHandler.postDelayed(classifyRunnable, 500);
}
}
}
};
private void initCameraManager() {
mCameraManager = (CameraManager) getActivity().getSystemService(Context.CAMERA_SERVICE);
}
private void initSelectCamera() {
try {
String[] cameraIdArray = mCameraManager.getCameraIdList();
for (String itemId : cameraIdArray) {
CameraCharacteristics itemCharacteristics = mCameraManager.getCameraCharacteristics(itemId);
mSensorOrientation = itemCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
Integer facing = itemCharacteristics.get(CameraCharacteristics.LENS_FACING);
if (facing == CameraCharacteristics.LENS_FACING_BACK) {
mCameraId = itemId;
break;
}
}
} catch (CameraAccessException e) {
e.printStackTrace();
}
if (mCameraId == null) {
Toast.makeText(getActivity(), getString(R.string.camera_error), Toast.LENGTH_SHORT).show();
}
}
private StreamConfigurationMap streamConfigurationMap;
private Size largest;
/**
* Calculate the camera resolution suitable for the current screen resolution.
*/
private void initHandlerMatchingSize() {
try {
CameraCharacteristics cameraCharacteristics = mCameraManager.getCameraCharacteristics(mCameraId);
streamConfigurationMap = cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
Size[] sizes = streamConfigurationMap.getOutputSizes(ImageFormat.JPEG);
largest = Collections.max(Arrays.asList(sizes), new CompareSizesByArea());
Boolean available = cameraCharacteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE);
mFlashSupported = available == null ? false : available;
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
/**
* Initialize the picture.
*/
private void initImageReader() {
final int W = 640;
final int H = 480;
mImageReader = ImageReader.newInstance(W, H, ImageFormat.JPEG, 30);
mImageReader.setOnImageAvailableListener(new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader reader) {
mFile = new File(getActivity().getExternalFilesDir(null), System.currentTimeMillis() + ".jpg");
// Get the data frame and start the algorithm processing.
try {
// Get the next image from the ImageReader queue.
Image image = reader.acquireNextImage();
image.close();
} catch (Exception e) {
Log.e(TAG, "onImageAvailable: " + e.toString());
}
}
}, mCameraHandler);
}
/**
* TextureView.SurfaceTextureListener
*/
private void initTextureViewListener() {
if (mTextureView.isAvailable()) {
openCamera(mTextureView.getWidth(), mTextureView.getHeight());
Log.d(TAG, "isAvailable: " + mTextureView.getWidth() + "--" + mTextureView.getHeight());
} else {
mTextureView.setSurfaceTextureListener(new TextureView.SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(SurfaceTexture texture, int width, int height) {
openCamera(width, height);
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture texture, int width, int height) {
configureTransform(width, height);
}
@Override
public boolean onSurfaceTextureDestroyed(@NonNull SurfaceTexture surfaceTexture) {
return true;
}
@Override
public void onSurfaceTextureUpdated(@NonNull SurfaceTexture surfaceTexture) {
}
});
}
}
@SuppressLint("MissingPermission")
private void openCamera(int width, int height) {
setPreviewSize(width, height, streamConfigurationMap, largest);
configureTransform(width, height);
mCameraDeviceStateCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened(@NonNull CameraDevice cameraDevice) {
if (cameraDevice != null) {
mCameraOpenCloseLock.release();
mCameraDevice = cameraDevice;
createCameraPreviewSession();
}
}
@Override
public void onDisconnected(@NonNull CameraDevice cameraDevice) {
if (cameraDevice != null) {
mCameraOpenCloseLock.release();
cameraDevice.close();
mCameraDevice = null;
}
}
@Override
public void onError(@NonNull CameraDevice cameraDevice, int i) {
if (cameraDevice != null) {
mCameraOpenCloseLock.release();
cameraDevice.close();
mCameraDevice = null;
if (null != getActivity()) {
getActivity().finish();
}
}
}
};
try {
if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
throw new RuntimeException("Time out waiting to lock ic_launcher opening.");
}
mCameraManager.openCamera(mCameraId, mCameraDeviceStateCallback, mCameraHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock ic_launcher opening.", e);
}
}
/**
* Open camera preview.
*/
private void createCameraPreviewSession() {
try {
SurfaceTexture texture = mTextureView.getSurfaceTexture();
// Set preview size.
texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
// This is the output Surface we need to start preview.
Surface surface = new Surface(texture);
mPreviewRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
mPreviewRequestBuilder.addTarget(surface);
// Here, we create a CameraCaptureSession for ic_launcher preview.
mCameraDevice.createCaptureSession(Arrays.asList(surface, mImageReader.getSurface()),
new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) {
// The ic_launcher is already closed
if (null == mCameraDevice) {
return;
}
// When the session is ready, we start displaying the preview.
mCaptureSession = cameraCaptureSession;
try {
// Auto focus should be continuous for ic_launcher preview.
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE,
CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
// Flash is automatically enabled when necessary.
setAutoFlash(mPreviewRequestBuilder);
// Finally, we start displaying the ic_launcher preview.
mPreviewRequest = mPreviewRequestBuilder.build();
mCaptureSession.setRepeatingRequest(mPreviewRequest, mCaptureCallback, mCameraHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
@Override
public void onConfigureFailed(@NonNull CameraCaptureSession cameraCaptureSession) {
Toast.makeText(getActivity(), "Failed", Toast.LENGTH_LONG).show();
}
}, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private CameraCaptureSession.CaptureCallback mCaptureCallback = new CameraCaptureSession.CaptureCallback() {
private void process(CaptureResult result) {
switch (mState) {
case STATE_PREVIEW: {
// We have nothing to do when the ic_launcher preview is working normally.
break;
}
case STATE_WAITING_LOCK: {
Integer afState = result.get(CaptureResult.CONTROL_AF_STATE);
if (afState == null) {
captureStillPicture();
} else if (CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED == afState
|| CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED == afState) {
// CONTROL_AE_STATE can be null on some devices
Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE);
if (aeState == null || aeState == CaptureResult.CONTROL_AE_STATE_CONVERGED) {
mState = STATE_PICTURE_TAKEN;
captureStillPicture();
} else {
runPrecaptureSequence();
}
}
break;
}
case STATE_WAITING_PRECAPTURE: {
// CONTROL_AE_STATE can be null on some devices
Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE);
if (aeState == null || aeState == CaptureResult.CONTROL_AE_STATE_PRECAPTURE
|| aeState == CaptureRequest.CONTROL_AE_STATE_FLASH_REQUIRED) {
mState = STATE_WAITING_NON_PRECAPTURE;
}
break;
}
case STATE_WAITING_NON_PRECAPTURE: {
// CONTROL_AE_STATE can be null on some devices
Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE);
if (aeState == null || aeState != CaptureResult.CONTROL_AE_STATE_PRECAPTURE) {
mState = STATE_PICTURE_TAKEN;
captureStillPicture();
}
break;
}
}
}
@Override
public void onCaptureProgressed(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request,
@NonNull CaptureResult partialResult) {
process(partialResult);
}
@Override
public void onCaptureCompleted(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request,
@NonNull TotalCaptureResult result) {
process(result);
}
};
/**
* Run the precapture sequence for capturing a still image. This method should be called when
* we get a response in.
*/
private void runPrecaptureSequence() {
try {
// This is how to tell the ic_launcher to trigger.
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER,
CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START);
// Tell #mCaptureCallback to wait for the precapture sequence to be set.
mState = STATE_WAITING_PRECAPTURE;
mCaptureSession.capture(mPreviewRequestBuilder.build(), mCaptureCallback, mCameraHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
/**
* Capture a still picture. This method should be called when we get a response in
* {@link #mCaptureCallback} from both
*/
private void captureStillPicture() {
try {
final Activity activity = getActivity();
if (null == activity || null == mCameraDevice) {
return;
}
// This is the CaptureRequest.Builder that we use to take a picture.
final CaptureRequest.Builder captureBuilder =
mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
captureBuilder.addTarget(mImageReader.getSurface());
// Use the same AE and AF modes as the preview.
captureBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
setAutoFlash(captureBuilder);
// Orientation
int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, getOrientation(rotation));
CameraCaptureSession.CaptureCallback CaptureCallback = new CameraCaptureSession.CaptureCallback() {
@Override
public void onCaptureCompleted(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request,
@NonNull TotalCaptureResult result) {
showToast("Saved: " + mFile);
Log.d(TAG, mFile.toString());
unlockFocus();
}
};
mCaptureSession.stopRepeating();
mCaptureSession.abortCaptures();
mCaptureSession.capture(captureBuilder.build(), CaptureCallback, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
/**
* Retrieves the JPEG orientation from the specified screen rotation.
*
* @param rotation The screen rotation.
* @return The JPEG orientation (one of 0, 90, 270, and 360)
*/
private int getOrientation(int rotation) {
return (ORIENTATIONS.get(rotation) + mSensorOrientation + 270) % 360;
}
/**
* Unlock the focus. This method should be called when still image capture sequence is
* finished.
*/
private void unlockFocus() {
try {
// Reset the auto-focus trigger
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_CANCEL);
setAutoFlash(mPreviewRequestBuilder);
mCaptureSession.capture(mPreviewRequestBuilder.build(), mCaptureCallback, mCameraHandler);
// After this, the ic_launcher will go back to the normal state of preview.
mState = STATE_PREVIEW;
mCaptureSession.setRepeatingRequest(mPreviewRequest, mCaptureCallback, mCameraHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void setAutoFlash(CaptureRequest.Builder requestBuilder) {
if (mFlashSupported) {
requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
}
}
protected void showToast(final String text) {
final Activity activity = getActivity();
if (activity != null) {
activity.runOnUiThread(new Runnable() {
@Override
public void run() {
Toast.makeText(activity, text, Toast.LENGTH_SHORT).show();
}
});
}
}
/**
* Configures the necessary {@link Matrix} transformation to `mTextureView`.
* This method should be called after the ic_launcher preview size is determined in
* setUpCameraOutputs and also the size of `mTextureView` is fixed.
*
* @param viewWidth The width of `mTextureView`
* @param viewHeight The height of `mTextureView`
*/
protected void configureTransform(int viewWidth, int viewHeight) {
Activity activity = getActivity();
if (null == mTextureView || null == mPreviewSize || null == activity) {
return;
}
int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
Matrix matrix = new Matrix();
RectF viewRect = new RectF(0, 0, viewWidth, viewHeight);
RectF bufferRect = new RectF(0, 0, mPreviewSize.getHeight(), mPreviewSize.getWidth());
float centerX = viewRect.centerX();
float centerY = viewRect.centerY();
if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) {
bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
float scale =
Math.max((float) viewHeight / mPreviewSize.getHeight(), (float) viewWidth / mPreviewSize.getWidth());
matrix.postScale(scale, scale, centerX, centerY);
matrix.postRotate(90 * (rotation - 2), centerX, centerY);
} else if (Surface.ROTATION_180 == rotation) {
matrix.postRotate(180, centerX, centerY);
}
mTextureView.setTransform(matrix);
}
/**
* Set preview image size and positioning.
*
* @param width
* @param height
* @param map StreamConfigurationMap, the manager of all output formats and sizes supported by the camera.
* @param largest The max size
*/
private void setPreviewSize(int width, int height, StreamConfigurationMap map, Size largest) {
// Find out if we need to swap dimension to get the preview size relative to sensor coordinate.
int displayRotation = getActivity().getWindowManager().getDefaultDisplay().getRotation();
Log.d(TAG, "displayRotation: " + displayRotation);
boolean swappedDimensions = false;
switch (displayRotation) {
case Surface.ROTATION_0:
case Surface.ROTATION_180:
if (mSensorOrientation == 90 || mSensorOrientation == 270) {
swappedDimensions = true;
}
break;
case Surface.ROTATION_90:
case Surface.ROTATION_270:
if (mSensorOrientation == 0 || mSensorOrientation == 180) {
swappedDimensions = true;
}
break;
default:
Log.e(TAG, "Display rotation is invalid: " + displayRotation);
}
Point displaySize = new Point();
getActivity().getWindowManager().getDefaultDisplay().getSize(displaySize);
int rotatedPreviewWidth = width;
int rotatedPreviewHeight = height;
int maxPreviewWidth = displaySize.x;
int maxPreviewHeight = displaySize.y;
if (swappedDimensions) {
rotatedPreviewWidth = height;
rotatedPreviewHeight = width;
maxPreviewWidth = displaySize.y;
maxPreviewHeight = displaySize.x;
}
if (maxPreviewWidth > MAX_PREVIEW_WIDTH) {
maxPreviewWidth = MAX_PREVIEW_WIDTH;
}
if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) {
maxPreviewHeight = MAX_PREVIEW_HEIGHT;
}
// Danger, W.R.! Attempting to use too large a preview size could exceed the ic_launcher
// bus' bandwidth limitation, resulting in gorgeous previews but the storage of
// garbage capture data.
mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), rotatedPreviewWidth,
rotatedPreviewHeight, maxPreviewWidth, maxPreviewHeight, largest);
// We fit the aspect ratio of TextureView to the size of preview we picked.
int orientation = getResources().getConfiguration().orientation;// manifest中方向已经写死
if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
mTextureView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight());
} else {
mTextureView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth());
}
}
/**
* Given {@code choices} of {@code Size}s supported by a ic_launcher, choose the smallest one that
* is at least as large as the respective texture view size, and that is at most as large as the
* respective max size, and whose aspect ratio matches with the specified value. If such size
* doesn't exist, choose the largest one that is at most as large as the respective max size,
* and whose aspect ratio matches with the specified value.
*
* @param choices The list of sizes that the ic_launcher supports for the intended output
* class
* @param textureViewWidth The width of the texture view relative to sensor coordinate
* @param textureViewHeight The height of the texture view relative to sensor coordinate
* @param maxWidth The maximum width that can be chosen
* @param maxHeight The maximum height that can be chosen
* @param aspectRatio The aspect ratio
* @return The optimal {@code Size}, or an arbitrary one if none were big enough
*/
protected Size chooseOptimalSize(Size[] choices, int textureViewWidth, int textureViewHeight, int maxWidth,
int maxHeight, Size aspectRatio) {
// Collect the supported resolutions that are at least as big as the preview Surface
List<Size> bigEnough = new ArrayList<>();
// Collect the supported resolutions that are smaller than the preview Surface
List<Size> notBigEnough = new ArrayList<>();
int w = aspectRatio.getWidth();
int h = aspectRatio.getHeight();
for (Size option : choices) {
if (option.getWidth() <= maxWidth && option.getHeight() <= maxHeight
&& option.getHeight() == option.getWidth() * h / w) {
// if (option.getWidth() <= maxWidth && option.getHeight() <= maxHeight) {
if (option.getWidth() >= textureViewWidth && option.getHeight() >= textureViewHeight) {
bigEnough.add(option);
} else {
notBigEnough.add(option);
}
}
}
if (bigEnough.size() > 0) {
return Collections.min(bigEnough, new CompareSizesByArea());
} else if (notBigEnough.size() > 0) {
return Collections.max(notBigEnough, new CompareSizesByArea());
} else {
Log.e(TAG, "Couldn't find any suitable preview size");
return choices[0];
}
}
/**
* Closes the current {@link CameraDevice}.
*/
private void closeCamera() {
try {
mCameraOpenCloseLock.acquire();
if (null != mCaptureSession) {
mCaptureSession.close();
mCaptureSession = null;
}
if (null != mCameraDevice) {
mCameraDevice.close();
mCameraDevice = null;
}
if (null != mImageReader) {
mImageReader.close();
mImageReader = null;
}
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock ic_launcher closing.", e);
} finally {
mCameraOpenCloseLock.release();
}
}
private void stopBackgroundThread() {
isPreBackgroundThreadPause = true;
mCameraHandlerThread.quitSafely();
mMindsporeHandlerThread.quitSafely();
try {
mCameraHandlerThread.join();
mCameraHandlerThread = null;
mCameraHandler.removeCallbacksAndMessages(null);
mCameraHandler = null;
mMindsporeHandlerThread.join();
mMindsporeHandlerThread = null;
mMindsporeHandler.removeCallbacksAndMessages(null);
mMindsporeHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}

View File

@ -0,0 +1,62 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mindspore.scene.widget;
import android.content.Context;
import android.util.AttributeSet;
import android.view.LayoutInflater;
import android.view.View;
import android.widget.LinearLayout;
import android.widget.TextView;
import androidx.annotation.Nullable;
import com.mindspore.scene.R;
public class HorTextView extends LinearLayout {
private TextView tvLeftTitle, tvRightContent;
private View viewBottomLine;
public HorTextView(Context context) {
this(context, null);
}
public HorTextView(Context context, @Nullable AttributeSet attrs) {
this(context, attrs, 0);
}
public HorTextView(Context context, @Nullable AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
LayoutInflater.from(context).inflate(R.layout.layout_hor_text_view, this);
tvLeftTitle = findViewById(R.id.tv_left_title);
tvRightContent = findViewById(R.id.tv_right_content);
viewBottomLine = findViewById(R.id.view_bottom_line);
}
public void setLeftTitle(String title) {
tvLeftTitle.setText(title);
}
public void setRightContent(String content) {
tvRightContent.setText(content);
}
public void setBottomLineVisible(int isVisible) {
viewBottomLine.setVisibility(isVisible);
}
}

View File

@ -0,0 +1,30 @@
<vector xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:aapt="http://schemas.android.com/aapt"
android:width="108dp"
android:height="108dp"
android:viewportWidth="108"
android:viewportHeight="108">
<path android:pathData="M31,63.928c0,0 6.4,-11 12.1,-13.1c7.2,-2.6 26,-1.4 26,-1.4l38.1,38.1L107,108.928l-32,-1L31,63.928z">
<aapt:attr name="android:fillColor">
<gradient
android:endX="85.84757"
android:endY="92.4963"
android:startX="42.9492"
android:startY="49.59793"
android:type="linear">
<item
android:color="#44000000"
android:offset="0.0" />
<item
android:color="#00000000"
android:offset="1.0" />
</gradient>
</aapt:attr>
</path>
<path
android:fillColor="#FFFFFF"
android:fillType="nonZero"
android:pathData="M65.3,45.828l3.8,-6.6c0.2,-0.4 0.1,-0.9 -0.3,-1.1c-0.4,-0.2 -0.9,-0.1 -1.1,0.3l-3.9,6.7c-6.3,-2.8 -13.4,-2.8 -19.7,0l-3.9,-6.7c-0.2,-0.4 -0.7,-0.5 -1.1,-0.3C38.8,38.328 38.7,38.828 38.9,39.228l3.8,6.6C36.2,49.428 31.7,56.028 31,63.928h46C76.3,56.028 71.8,49.428 65.3,45.828zM43.4,57.328c-0.8,0 -1.5,-0.5 -1.8,-1.2c-0.3,-0.7 -0.1,-1.5 0.4,-2.1c0.5,-0.5 1.4,-0.7 2.1,-0.4c0.7,0.3 1.2,1 1.2,1.8C45.3,56.528 44.5,57.328 43.4,57.328L43.4,57.328zM64.6,57.328c-0.8,0 -1.5,-0.5 -1.8,-1.2s-0.1,-1.5 0.4,-2.1c0.5,-0.5 1.4,-0.7 2.1,-0.4c0.7,0.3 1.2,1 1.2,1.8C66.5,56.528 65.6,57.328 64.6,57.328L64.6,57.328z"
android:strokeWidth="1"
android:strokeColor="#00000000" />
</vector>

View File

@ -0,0 +1,170 @@
<?xml version="1.0" encoding="utf-8"?>
<vector xmlns:android="http://schemas.android.com/apk/res/android"
android:width="108dp"
android:height="108dp"
android:viewportHeight="108"
android:viewportWidth="108">
<path
android:fillColor="#26A69A"
android:pathData="M0,0h108v108h-108z" />
<path
android:fillColor="#00000000"
android:pathData="M9,0L9,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M19,0L19,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M29,0L29,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M39,0L39,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M49,0L49,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M59,0L59,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M69,0L69,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M79,0L79,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M89,0L89,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M99,0L99,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,9L108,9"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,19L108,19"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,29L108,29"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,39L108,39"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,49L108,49"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,59L108,59"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,69L108,69"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,79L108,79"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,89L108,89"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,99L108,99"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M19,29L89,29"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M19,39L89,39"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M19,49L89,49"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M19,59L89,59"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M19,69L89,69"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M19,79L89,79"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M29,19L29,89"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M39,19L39,89"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M49,19L49,89"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M59,19L59,89"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M69,19L69,89"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M79,19L79,89"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
</vector>

View File

@ -0,0 +1,13 @@
<?xml version="1.0" encoding="UTF-8"?>
<shape xmlns:android="http://schemas.android.com/apk/res/android"
android:id="@+id/listview_background_shape">
<stroke
android:width="1dp"
android:color="@android:color/darker_gray" />
<padding
android:bottom="2dp"
android:left="2dp"
android:right="2dp"
android:top="2dp" />
<solid android:color="#ffffffff" />
</shape>

View File

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="utf-8"?>
<shape xmlns:android="http://schemas.android.com/apk/res/android">
<size
android:width="40dp"
android:height="40dp" />
<corners android:radius="20dp" />
<solid android:color="#82eae5e5" />
</shape>

View File

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="utf-8"?>
<shape xmlns:android="http://schemas.android.com/apk/res/android">
<size
android:width="100dp"
android:height="40dp" />
<corners android:radius="20dp" />
<solid android:color="#784b4b4b" />
</shape>

View File

@ -0,0 +1,43 @@
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:background="#00000000">
<FrameLayout
android:id="@+id/container"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:background="@android:color/black"
tools:context="com.mindspore.scene.widget.CameraActivity" />
<LinearLayout
android:id="@+id/layout_bottom_content"
android:layout_width="match_parent"
android:layout_height="100dp"
android:layout_alignParentBottom="true"
android:background="#ffffff"
android:gravity="center"
android:orientation="vertical" />
<androidx.appcompat.widget.Toolbar
android:id="@+id/toolbar"
android:layout_width="match_parent"
android:layout_height="?attr/actionBarSize"
android:layout_alignParentTop="true"
android:background="@color/mindspore_semi_transparent">
<TextView
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:drawableStart="@mipmap/logo"
android:drawablePadding="5dp"
android:gravity="center_vertical"
android:text="MS Scene Detection"
android:textColor="#ffffff"
android:textSize="25sp" />
</androidx.appcompat.widget.Toolbar>
</RelativeLayout>

View File

@ -0,0 +1,12 @@
<?xml version="1.0" encoding="utf-8"?>
<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context=".widget.TestActivity">
<ImageView
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:id="@+id/img" />
</androidx.constraintlayout.widget.ConstraintLayout>

View File

@ -0,0 +1,13 @@
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:background="@color/white"
android:orientation="vertical">
<com.mindspore.scene.widget.AutoFitTextureView
android:id="@+id/texture"
android:layout_width="match_parent"
android:layout_height="match_parent" />
</RelativeLayout>

View File

@ -0,0 +1,40 @@
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:orientation="vertical">
<TextView
android:id="@+id/tv_left_title"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginStart="@dimen/hor_text_view_text_margin_normal"
android:layout_marginTop="@dimen/hor_text_view_text_margin_small"
android:textSize="@dimen/hor_text_view_text_size"
tools:text="person" />
<TextView
android:gravity="end"
android:layout_toRightOf="@+id/tv_left_title"
android:textColor="@color/black"
android:layout_alignParentEnd="true"
android:id="@+id/tv_right_content"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginEnd="@dimen/hor_text_view_text_margin_normal"
android:layout_marginStart="@dimen/hor_text_view_text_margin_normal"
android:layout_marginTop="@dimen/hor_text_view_text_margin_small"
android:textSize="@dimen/hor_text_view_text_size"
tools:text="12.5" />
<View
android:id="@+id/view_bottom_line"
android:layout_marginTop="@dimen/hor_text_view_text_margin_small"
android:layout_alignStart="@+id/tv_left_title"
android:layout_alignEnd="@+id/tv_right_content"
android:background="@color/gray"
android:layout_below="@+id/tv_left_title"
android:layout_height="0.5dp"
android:layout_width="match_parent" />
</RelativeLayout>

View File

@ -0,0 +1,5 @@
<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
<background android:drawable="@color/ic_launcher_background" />
<foreground android:drawable="@mipmap/ic_launcher_foreground" />
</adaptive-icon>

View File

@ -0,0 +1,5 @@
<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
<background android:drawable="@color/ic_launcher_background" />
<foreground android:drawable="@mipmap/ic_launcher_foreground" />
</adaptive-icon>

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.5 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.5 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.5 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 10 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 11 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 8.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 15 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 16 KiB

View File

@ -0,0 +1,18 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<color name="colorPrimary">#6200EE</color>
<color name="colorPrimaryDark">#3700B3</color>
<color name="colorAccent">#03DAC5</color>
<color name="mindspore_color_primary">#00D4EF</color>
<color name="mindspore_color_primary_dark">#03A9F4</color>
<color name="mindspore_color_accent">#769DDA</color>
<color name="mindspore_semi_transparent">#66000000</color>
<color name="white">#ffffff</color>
<color name="black">#000000</color>
<color name="gray">#A69D9D</color>
</resources>

View File

@ -0,0 +1,9 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<dimen name="ms_bottom_sheet_corner_radius">15dp</dimen>
<dimen name="ms_bottom_sheet_top_padding">8dp</dimen>
<dimen name="hor_text_view_text_margin_normal">15dp</dimen>
<dimen name="hor_text_view_text_margin_small">6dp</dimen>
<dimen name="hor_text_view_text_size">20sp</dimen>
</resources>

View File

@ -0,0 +1,4 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<color name="ic_launcher_background">#FFFFFF</color>
</resources>

View File

@ -0,0 +1,30 @@
<resources>
<string name="app_name">SceneDetection</string>
<string name="action_settings">设置</string>
<string name="request_permission">This sample needs camera permission.</string>
<string name="camera_error">This device doesn\'t support Camera2 API.</string>
<string name="intro_message">使用的google的camera2demo.</string>
<string name="appwidget_text">EXAMPLE</string>
<string name="add_widget">Add widget</string>
<string name="ms_ic_app_name" translation_description="Image Classification demo app [CHAR_LIMIT=40]">TFL Classify</string>
<string name="ms_ic_camera_error" translation_description="Error regarding camera support[CHAR_LIMIT=40]">This device doesn\'t support Camera2 API.</string>
<string name="ms_ic_gpu_quant_error" translation_description="Error regarding GPU support for Quant models[CHAR_LIMIT=60]">GPU does not yet supported quantized models.</string>
<string name="ms_ic_model" translatable="false">Model:</string>
<string-array name="ms_ic_models" translatable="false">
<item>Quantized_EfficientNet</item>
<item>Float_EfficientNet</item>
<item>Quantized_MobileNet</item>
<item>Float_MobileNet</item>
</string-array>
<string name="ms_ic_device" translatable="false">Device:</string>
<string-array name="ms_ic_devices" translatable="false">
<item>CPU</item>
<item>GPU</item>
<item>NNAPI</item>
</string-array>
</resources>

View File

@ -0,0 +1,10 @@
<resources>
<!-- Base application theme. -->
<style name="AppTheme" parent="Theme.AppCompat.Light.NoActionBar">
<!-- Customize your theme here. -->
<item name="colorPrimary">@color/colorPrimary</item>
<item name="colorPrimaryDark">@color/colorPrimaryDark</item>
<item name="colorAccent">@color/colorAccent</item>
</style>
</resources>

View File

@ -0,0 +1,24 @@
// Top-level build file where you can add configuration options common to all sub-projects/modules.
buildscript {
repositories {
google()
jcenter()
}
dependencies {
classpath "com.android.tools.build:gradle:4.0.1"
// NOTE: Do not place your application dependencies here; they belong
// in the individual module build.gradle files
}
}
allprojects {
repositories {
google()
jcenter()
}
}
task clean(type: Delete) {
delete rootProject.buildDir
}

View File

@ -0,0 +1,20 @@
# Project-wide Gradle settings.
# IDE (e.g. Android Studio) users:
# Gradle settings configured through the IDE *will override*
# any settings specified in this file.
# For more details on how to configure your build environment visit
# http://www.gradle.org/docs/current/userguide/build_environment.html
# Specifies the JVM arguments used for the daemon process.
# The setting is particularly useful for tweaking memory settings.
org.gradle.jvmargs=-Xmx2048m
# When configured, Gradle will run in incubating parallel mode.
# This option should only be used with decoupled projects. More details, visit
# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
# org.gradle.parallel=true
# AndroidX package structure to make it clearer which packages are bundled with the
# Android operating system, and which are packaged with your app"s APK
# https://developer.android.com/topic/libraries/support-library/androidx-rn
android.useAndroidX=true
# Automatically convert third-party libraries to use AndroidX
android.enableJetifier=true
android.injected.testOnly=false

View File

@ -0,0 +1,6 @@
#Tue Jul 28 10:28:05 CST 2020
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-6.1.1-all.zip

View File

@ -0,0 +1,172 @@
#!/usr/bin/env sh
##############################################################################
##
## Gradle start up script for UN*X
##
##############################################################################
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
PRG="$0"
# Need this for relative symlinks.
while [ -h "$PRG" ] ; do
ls=`ls -ld "$PRG"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
PRG="$link"
else
PRG=`dirname "$PRG"`"/$link"
fi
done
SAVED="`pwd`"
cd "`dirname \"$PRG\"`/" >/dev/null
APP_HOME="`pwd -P`"
cd "$SAVED" >/dev/null
APP_NAME="Gradle"
APP_BASE_NAME=`basename "$0"`
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS=""
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD="maximum"
warn () {
echo "$*"
}
die () {
echo
echo "$*"
echo
exit 1
}
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
nonstop=false
case "`uname`" in
CYGWIN* )
cygwin=true
;;
Darwin* )
darwin=true
;;
MINGW* )
msys=true
;;
NONSTOP* )
nonstop=true
;;
esac
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD="$JAVA_HOME/jre/sh/java"
else
JAVACMD="$JAVA_HOME/bin/java"
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD="java"
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
# Increase the maximum file descriptors if we can.
if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
MAX_FD_LIMIT=`ulimit -H -n`
if [ $? -eq 0 ] ; then
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
MAX_FD="$MAX_FD_LIMIT"
fi
ulimit -n $MAX_FD
if [ $? -ne 0 ] ; then
warn "Could not set maximum file descriptor limit: $MAX_FD"
fi
else
warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
fi
fi
# For Darwin, add options to specify how the application appears in the dock
if $darwin; then
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
fi
# For Cygwin, switch paths to Windows format before running java
if $cygwin ; then
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
JAVACMD=`cygpath --unix "$JAVACMD"`
# We build the pattern for arguments to be converted via cygpath
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
SEP=""
for dir in $ROOTDIRSRAW ; do
ROOTDIRS="$ROOTDIRS$SEP$dir"
SEP="|"
done
OURCYGPATTERN="(^($ROOTDIRS))"
# Add a user-defined pattern to the cygpath arguments
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
fi
# Now convert the arguments - kludge to limit ourselves to /bin/sh
i=0
for arg in "$@" ; do
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
else
eval `echo args$i`="\"$arg\""
fi
i=$((i+1))
done
case $i in
(0) set -- ;;
(1) set -- "$args0" ;;
(2) set -- "$args0" "$args1" ;;
(3) set -- "$args0" "$args1" "$args2" ;;
(4) set -- "$args0" "$args1" "$args2" "$args3" ;;
(5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
(6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
(7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
(8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
(9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
esac
fi
# Escape application args
save () {
for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
echo " "
}
APP_ARGS=$(save "$@")
# Collect all arguments for the java command, following the shell quoting and substitution rules
eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong
if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then
cd "$(dirname "$0")"
fi
exec "$JAVACMD" "$@"

View File

@ -0,0 +1,84 @@
@if "%DEBUG%" == "" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@rem
@rem ##########################################################################
@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal
set DIRNAME=%~dp0
if "%DIRNAME%" == "" set DIRNAME=.
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS=
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if "%ERRORLEVEL%" == "0" goto init
echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:findJavaFromJavaHome
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto init
echo.
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:init
@rem Get command-line arguments, handling Windows variants
if not "%OS%" == "Windows_NT" goto win9xME_args
:win9xME_args
@rem Slurp the command line arguments.
set CMD_LINE_ARGS=
set _SKIP=2
:win9xME_args_slurp
if "x%~1" == "x" goto execute
set CMD_LINE_ARGS=%*
:execute
@rem Setup the command line
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
@rem Execute Gradle
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
:end
@rem End local scope for the variables with windows NT shell
if "%ERRORLEVEL%"=="0" goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
exit /b 1
:mainEnd
if "%OS%"=="Windows_NT" endlocal
:omega

View File

@ -0,0 +1,2 @@
include ':app'
rootProject.name = "Scene"