add object detection

This commit is contained in:
gongdaguo 2020-09-15 16:33:11 +08:00
parent 73d4ceeaf7
commit 31b92128cf
73 changed files with 5191 additions and 121 deletions

View File

@ -49,8 +49,6 @@ android {
}
}
packagingOptions{
pickFirst 'lib/arm64-v8a/libminddata-lite.so'
pickFirst 'lib/arm64-v8a/libmindspore-lite.so'
pickFirst 'lib/arm64-v8a/libmlkit-label-MS.so'
}
@ -61,10 +59,6 @@ android {
// Before gradle build.
// To download some necessary libraries.
apply from:'download.gradle'
/*if (!file("libs/arm64-v8a/libmindspore-lite.so").exists() ||
!file("libs/arm64-v8a/libopencv_java4.so").exists()){
apply from:'download.gradle'
}*/

View File

@ -14,6 +14,10 @@ def mindsporeLiteDownloadUrl = "https://download.mindspore.cn/model_zoo/official
def cleantargetMindSporeInclude = "src/main/cpp"
task cleanCmakeCache(type: Delete) {
delete '.cxx/cmake/debug'
delete '.cxx/cmake/release'
}
task downloadModelFile(type: DownloadUrlTask) {
doFirst {
@ -45,7 +49,7 @@ task cleanUnusedmindsporeFiles(type: Delete, dependsOn: ['unzipMindSporeInclude'
}
}
/*
* Using preBuild to download mindspore library, opencv library and model file.
* Using preBuild to download mindspore library and model file.
* Run before gradle build.
*/
if (file("src/main/cpp/${mindsporeLite_Version}/lib/libmindspore-lite.so").exists()){
@ -58,6 +62,7 @@ if (file("src/main/assets/model/mobilenetv2.ms").exists()){
downloadModelFile.enabled = false
}
preBuild.dependsOn cleanCmakeCache
preBuild.dependsOn downloadModelFile
preBuild.dependsOn downloadMindSporeLibrary
preBuild.dependsOn unzipMindSporeInclude

View File

@ -22,14 +22,14 @@
#define MS_PRINT(format, ...) __android_log_print(ANDROID_LOG_INFO, "MSJNI", format, ##__VA_ARGS__)
MSNetWork::MSNetWork(void) : session(nullptr), model(nullptr) {}
MSNetWork::MSNetWork(void) : session_(nullptr) {}
MSNetWork::~MSNetWork(void) {}
void
MSNetWork::CreateSessionMS(char *modelBuffer, size_t bufferLen, mindspore::lite::Context *ctx) {
session = mindspore::session::LiteSession::CreateSession(ctx);
if (session == nullptr) {
session_ = mindspore::session::LiteSession::CreateSession(ctx);
if (session_ == nullptr) {
MS_PRINT("Create Session failed.");
return;
}
@ -41,7 +41,7 @@ MSNetWork::CreateSessionMS(char *modelBuffer, size_t bufferLen, mindspore::lite:
return;
}
int ret = session->CompileGraph(model);
int ret = session_->CompileGraph(model);
if (ret != mindspore::lite::RET_OK) {
MS_PRINT("CompileGraph failed.");
return;
@ -49,104 +49,7 @@ MSNetWork::CreateSessionMS(char *modelBuffer, size_t bufferLen, mindspore::lite:
}
int MSNetWork::ReleaseNets(void) {
delete session;
// delete model;
delete session_;
return 0;
}
const char *MSNetWork::labels_name_map[MSNetWork::RET_CATEGORY_SUM] = {
{"Tortoise"}, {"Container"}, {"Magpie"}, {"Seaturtle"}, {"Football"}, {"Ambulance"}, {"Ladder"},
{"Toothbrush"}, {"Syringe"}, {"Sink"}, {"Toy"}, {"Organ(MusicalInstrument) "}, {"Cassettedeck"},
{"Apple"}, {"Humaneye"}, {"Cosmetics"}, {"Paddle"}, {"Snowman"}, {"Beer"}, {"Chopsticks"},
{"Humanbeard"}, {"Bird"}, {"Parkingmeter"}, {"Trafficlight"}, {"Croissant"}, {"Cucumber"},
{"Radish"}, {"Towel"}, {"Doll"}, {"Skull"}, {"Washingmachine"}, {"Glove"}, {"Tick"}, {"Belt"},
{"Sunglasses"}, {"Banjo"}, {"Cart"}, {"Ball"}, {"Backpack"}, {"Bicycle"}, {"Homeappliance"},
{"Centipede"}, {"Boat"}, {"Surfboard"}, {"Boot"}, {"Headphones"}, {"Hotdog"}, {"Shorts"},
{"Fastfood"}, {"Bus"}, {"Boy "}, {"Screwdriver"}, {"Bicyclewheel"}, {"Barge"}, {"Laptop"},
{"Miniskirt"}, {"Drill(Tool)"}, {"Dress"}, {"Bear"}, {"Waffle"}, {"Pancake"}, {"Brownbear"},
{"Woodpecker"}, {"Bluejay"}, {"Pretzel"}, {"Bagel"}, {"Tower"}, {"Teapot"}, {"Person"},
{"Bowandarrow"}, {"Swimwear"}, {"Beehive"}, {"Brassiere"}, {"Bee"}, {"Bat(Animal)"},
{"Starfish"}, {"Popcorn"}, {"Burrito"}, {"Chainsaw"}, {"Balloon"}, {"Wrench"}, {"Tent"},
{"Vehicleregistrationplate"}, {"Lantern"}, {"Toaster"}, {"Flashlight"}, {"Billboard"},
{"Tiara"}, {"Limousine"}, {"Necklace"}, {"Carnivore"}, {"Scissors"}, {"Stairs"},
{"Computerkeyboard"}, {"Printer"}, {"Trafficsign"}, {"Chair"}, {"Shirt"}, {"Poster"},
{"Cheese"}, {"Sock"}, {"Firehydrant"}, {"Landvehicle"}, {"Earrings"}, {"Tie"}, {"Watercraft"},
{"Cabinetry"}, {"Suitcase"}, {"Muffin"}, {"Bidet"}, {"Snack"}, {"Snowmobile"}, {"Clock"},
{"Medicalequipment"}, {"Cattle"}, {"Cello"}, {"Jetski"}, {"Camel"}, {"Coat"}, {"Suit"},
{"Desk"}, {"Cat"}, {"Bronzesculpture"}, {"Juice"}, {"Gondola"}, {"Beetle"}, {"Cannon"},
{"Computermouse"}, {"Cookie"}, {"Officebuilding"}, {"Fountain"}, {"Coin"}, {"Calculator"},
{"Cocktail"}, {"Computermonitor"}, {"Box"}, {"Stapler"}, {"Christmastree"}, {"Cowboyhat"},
{"Hikingequipment"}, {"Studiocouch"}, {"Drum"}, {"Dessert"}, {"Winerack"}, {"Drink"},
{"Zucchini"}, {"Ladle"}, {"Humanmouth"}, {"DairyProduct"}, {"Dice"}, {"Oven"}, {"Dinosaur"},
{"Ratchet(Device)"}, {"Couch"}, {"Cricketball"}, {"Wintermelon"}, {"Spatula"}, {"Whiteboard"},
{"Pencilsharpener"}, {"Door"}, {"Hat"}, {"Shower"}, {"Eraser"}, {"Fedora"}, {"Guacamole"},
{"Dagger"}, {"Scarf"}, {"Dolphin"}, {"Sombrero"}, {"Tincan"}, {"Mug"}, {"Tap"}, {"Harborseal"},
{"Stretcher"}, {"Canopener"}, {"Goggles"}, {"Humanbody"}, {"Rollerskates"}, {"Coffeecup"},
{"Cuttingboard"}, {"Blender"}, {"Plumbingfixture"}, {"Stopsign"}, {"Officesupplies"},
{"Volleyball(Ball)"}, {"Vase"}, {"Slowcooker"}, {"Wardrobe"}, {"Coffee"}, {"Whisk"},
{"Papertowel"}, {"Personalcare"}, {"Food"}, {"Sunhat"}, {"Treehouse"}, {"Flyingdisc"},
{"Skirt"}, {"Gasstove"}, {"Saltandpeppershakers"}, {"Mechanicalfan"}, {"Facepowder"}, {"Fax"},
{"Fruit"}, {"Frenchfries"}, {"Nightstand"}, {"Barrel"}, {"Kite"}, {"Tart"}, {"Treadmill"},
{"Fox"}, {"Flag"}, {"Frenchhorn"}, {"Windowblind"}, {"Humanfoot"}, {"Golfcart"}, {"Jacket"},
{"Egg(Food)"}, {"Streetlight"}, {"Guitar"}, {"Pillow"}, {"Humanleg"}, {"Isopod"}, {"Grape"},
{"Humanear"}, {"Powerplugsandsockets"}, {"Panda"}, {"Giraffe"}, {"Woman"}, {"Doorhandle"},
{"Rhinoceros"}, {"Bathtub"}, {"Goldfish"}, {"Houseplant"}, {"Goat"}, {"Baseballbat"},
{"Baseballglove"}, {"Mixingbowl"}, {"Marineinvertebrates"}, {"Kitchenutensil"}, {"Lightswitch"},
{"House"}, {"Horse"}, {"Stationarybicycle"}, {"Hammer"}, {"Ceilingfan"}, {"Sofabed"},
{"Adhesivetape "}, {"Harp"}, {"Sandal"}, {"Bicyclehelmet"}, {"Saucer"}, {"Harpsichord"},
{"Humanhair"}, {"Heater"}, {"Harmonica"}, {"Hamster"}, {"Curtain"}, {"Bed"}, {"Kettle"},
{"Fireplace"}, {"Scale"}, {"Drinkingstraw"}, {"Insect"}, {"Hairdryer"}, {"Kitchenware"},
{"Indoorrower"}, {"Invertebrate"}, {"Foodprocessor"}, {"Bookcase"}, {"Refrigerator"},
{"Wood-burningstove"}, {"Punchingbag"}, {"Commonfig"}, {"Cocktailshaker"}, {"Jaguar(Animal)"},
{"Golfball"}, {"Fashionaccessory"}, {"Alarmclock"}, {"Filingcabinet"}, {"Artichoke"}, {"Table"},
{"Tableware"}, {"Kangaroo"}, {"Koala"}, {"Knife"}, {"Bottle"}, {"Bottleopener"}, {"Lynx"},
{"Lavender(Plant)"}, {"Lighthouse"}, {"Dumbbell"}, {"Humanhead"}, {"Bowl"}, {"Humidifier"},
{"Porch"}, {"Lizard"}, {"Billiardtable"}, {"Mammal"}, {"Mouse"}, {"Motorcycle"},
{"Musicalinstrument"}, {"Swimcap"}, {"Fryingpan"}, {"Snowplow"}, {"Bathroomcabinet"},
{"Missile"}, {"Bust"}, {"Man"}, {"Waffleiron"}, {"Milk"}, {"Ringbinder"}, {"Plate"},
{"Mobilephone"}, {"Bakedgoods"}, {"Mushroom"}, {"Crutch"}, {"Pitcher(Container)"}, {"Mirror"},
{"Personalflotationdevice"}, {"Tabletennisracket"}, {"Pencilcase"}, {"Musicalkeyboard"},
{"Scoreboard"}, {"Briefcase"}, {"Kitchenknife"}, {"Nail(Construction)"}, {"Tennisball"},
{"Plasticbag"}, {"Oboe"}, {"Chestofdrawers"}, {"Ostrich"}, {"Piano"}, {"Girl"}, {"Plant"},
{"Potato"}, {"Hairspray"}, {"Sportsequipment"}, {"Pasta"}, {"Penguin"}, {"Pumpkin"}, {"Pear"},
{"Infantbed"}, {"Polarbear"}, {"Mixer"}, {"Cupboard"}, {"Jacuzzi"}, {"Pizza"}, {"Digitalclock"},
{"Pig"}, {"Reptile"}, {"Rifle"}, {"Lipstick"}, {"Skateboard"}, {"Raven"}, {"Highheels"},
{"Redpanda"}, {"Rose"}, {"Rabbit"}, {"Sculpture"}, {"Saxophone"}, {"Shotgun"}, {"Seafood"},
{"Submarinesandwich"}, {"Snowboard"}, {"Sword"}, {"Pictureframe"}, {"Sushi"}, {"Loveseat"},
{"Ski"}, {"Squirrel"}, {"Tripod"}, {"Stethoscope"}, {"Submarine"}, {"Scorpion"}, {"Segway"},
{"Trainingbench"}, {"Snake"}, {"Coffeetable"}, {"Skyscraper"}, {"Sheep"}, {"Television"},
{"Trombone"}, {"Tea"}, {"Tank"}, {"Taco"}, {"Telephone"}, {"Torch"}, {"Tiger"}, {"Strawberry"},
{"Trumpet"}, {"Tree"}, {"Tomato"}, {"Train"}, {"Tool"}, {"Picnicbasket"}, {"Cookingspray"},
{"Trousers"}, {"Bowlingequipment"}, {"Footballhelmet"}, {"Truck"}, {"Measuringcup"},
{"Coffeemaker"}, {"Violin"}, {"Vehicle"}, {"Handbag"}, {"Papercutter"}, {"Wine"}, {"Weapon"},
{"Wheel"}, {"Worm"}, {"Wok"}, {"Whale"}, {"Zebra"}, {"Autopart"}, {"Jug"}, {"Pizzacutter"},
{"Cream"}, {"Monkey"}, {"Lion"}, {"Bread"}, {"Platter"}, {"Chicken"}, {"Eagle"}, {"Helicopter"},
{"Owl"}, {"Duck"}, {"Turtle"}, {"Hippopotamus"}, {"Crocodile"}, {"Toilet"}, {"Toiletpaper"},
{"Squid"}, {"Clothing"}, {"Footwear"}, {"Lemon"}, {"Spider"}, {"Deer"}, {"Frog"}, {"Banana"},
{"Rocket"}, {"Wineglass"}, {"Countertop"}, {"Tabletcomputer"}, {"Wastecontainer"},
{"Swimmingpool"}, {"Dog"}, {"Book"}, {"Elephant"}, {"Shark"}, {"Candle"}, {"Leopard"}, {"Axe"},
{"Handdryer"}, {"Soapdispenser"}, {"Porcupine"}, {"Flower"}, {"Canary"}, {"Cheetah"},
{"Palmtree"}, {"Hamburger"}, {"Maple"}, {"Building"}, {"Fish"}, {"Lobster"},
{"GardenAsparagus"}, {"Furniture"}, {"Hedgehog"}, {"Airplane"}, {"Spoon"}, {"Otter"}, {"Bull"},
{"Oyster"}, {"Horizontalbar"}, {"Conveniencestore"}, {"Bomb"}, {"Bench"}, {"Icecream"},
{"Caterpillar"}, {"Butterfly"}, {"Parachute"}, {"Orange"}, {"Antelope"}, {"Beaker"},
{"Mothsandbutterflies"}, {"Window"}, {"Closet"}, {"Castle"}, {"Jellyfish"}, {"Goose"}, {"Mule"},
{"Swan"}, {"Peach"}, {"Coconut"}, {"Seatbelt"}, {"Raccoon"}, {"Chisel"}, {"Fork"}, {"Lamp"},
{"Camera"}, {"Squash(Plant)"}, {"Racket"}, {"Humanface"}, {"Humanarm"}, {"Vegetable"},
{"Diaper"}, {"Unicycle"}, {"Falcon"}, {"Chime"}, {"Snail"}, {"Shellfish"}, {"Cabbage"},
{"Carrot"}, {"Mango"}, {"Jeans"}, {"Flowerpot"}, {"Pineapple"}, {"Drawer"}, {"Stool"},
{"Envelope"}, {"Cake"}, {"Dragonfly"}, {"Commonsunflower"}, {"Microwaveoven"}, {"Honeycomb"},
{"Marinemammal"}, {"Sealion"}, {"Ladybug"}, {"Shelf"}, {"Watch"}, {"Candy"}, {"Salad"},
{"Parrot"}, {"Handgun"}, {"Sparrow"}, {"Van"}, {"Grinder"}, {"Spicerack"}, {"Lightbulb"},
{"Cordedphone"}, {"Sportsuniform"}, {"Tennisracket"}, {"Wallclock"}, {"Servingtray"},
{"Kitchen&diningroomtable"}, {"Dogbed"}, {"Cakestand"}, {"Catfurniture"}, {"Bathroomaccessory"},
{"Facialtissueholder"}, {"Pressurecooker"}, {"Kitchenappliance"}, {"Tire"}, {"Ruler"},
{"Luggageandbags"}, {"Microphone"}, {"Broccoli"}, {"Umbrella"}, {"Pastry"}, {"Grapefruit"},
{"Band-aid"}, {"Animal"}, {"Bellpepper"}, {"Turkey"}, {"Lily"}, {"Pomegranate"}, {"Doughnut"},
{"Glasses"}, {"Humannose"}, {"Pen"}, {"Ant"}, {"Car"}, {"Aircraft"}, {"Humanhand"}, {"Skunk"},
{"Teddybear"}, {"Watermelon"}, {"Cantaloupe"}, {"Dishwasher"}, {"Flute"}, {"Balancebeam"},
{"Sandwich"}, {"Shrimp"}, {"Sewingmachine"}, {"Binoculars"}, {"Raysandskates"}, {"Ipod"},
{"Accordion"}, {"Willow"}, {"Crab"}, {"Crown"}, {"Seahorse"}, {"Perfume"}, {"Alpaca"}, {"Taxi"},
{"Canoe"}, {"Remotecontrol"}, {"Wheelchair"}, {"Rugbyball"}, {"Armadillo"}, {"Maracas"},
{"Helmet"}};

View File

@ -52,9 +52,8 @@ class MSNetWork {
int ReleaseNets(void);
mindspore::session::LiteSession *session;
mindspore::lite::Model *model;
static const int RET_CATEGORY_SUM = 601;
static const char *labels_name_map[RET_CATEGORY_SUM];
mindspore::session::LiteSession * session() const { return session_; }
private:
mindspore::session::LiteSession *session_;
};
#endif

View File

@ -17,6 +17,7 @@
#include <android/bitmap.h>
#include <android/asset_manager_jni.h>
#include <android/log.h>
#include <utility>
#include <cstring>
#include <vector>
#include <string>
@ -34,6 +35,103 @@ using mindspore::dataset::LPixelType;
using mindspore::dataset::LDataType;
#define MS_PRINT(format, ...) __android_log_print(ANDROID_LOG_INFO, "MSJNI", format, ##__VA_ARGS__)
static const int RET_CATEGORY_SUM = 601;
static const char *labels_name_map[RET_CATEGORY_SUM] = {
{"Tortoise"}, {"Container"}, {"Magpie"}, {"Seaturtle"}, {"Football"}, {"Ambulance"}, {"Ladder"},
{"Toothbrush"}, {"Syringe"}, {"Sink"}, {"Toy"}, {"Organ(MusicalInstrument) "}, {"Cassettedeck"},
{"Apple"}, {"Humaneye"}, {"Cosmetics"}, {"Paddle"}, {"Snowman"}, {"Beer"}, {"Chopsticks"},
{"Humanbeard"}, {"Bird"}, {"Parkingmeter"}, {"Trafficlight"}, {"Croissant"}, {"Cucumber"},
{"Radish"}, {"Towel"}, {"Doll"}, {"Skull"}, {"Washingmachine"}, {"Glove"}, {"Tick"}, {"Belt"},
{"Sunglasses"}, {"Banjo"}, {"Cart"}, {"Ball"}, {"Backpack"}, {"Bicycle"}, {"Homeappliance"},
{"Centipede"}, {"Boat"}, {"Surfboard"}, {"Boot"}, {"Headphones"}, {"Hotdog"}, {"Shorts"},
{"Fastfood"}, {"Bus"}, {"Boy "}, {"Screwdriver"}, {"Bicyclewheel"}, {"Barge"}, {"Laptop"},
{"Miniskirt"}, {"Drill(Tool)"}, {"Dress"}, {"Bear"}, {"Waffle"}, {"Pancake"}, {"Brownbear"},
{"Woodpecker"}, {"Bluejay"}, {"Pretzel"}, {"Bagel"}, {"Tower"}, {"Teapot"}, {"Person"},
{"Bowandarrow"}, {"Swimwear"}, {"Beehive"}, {"Brassiere"}, {"Bee"}, {"Bat(Animal)"},
{"Starfish"}, {"Popcorn"}, {"Burrito"}, {"Chainsaw"}, {"Balloon"}, {"Wrench"}, {"Tent"},
{"Vehicleregistrationplate"}, {"Lantern"}, {"Toaster"}, {"Flashlight"}, {"Billboard"},
{"Tiara"}, {"Limousine"}, {"Necklace"}, {"Carnivore"}, {"Scissors"}, {"Stairs"},
{"Computerkeyboard"}, {"Printer"}, {"Trafficsign"}, {"Chair"}, {"Shirt"}, {"Poster"},
{"Cheese"}, {"Sock"}, {"Firehydrant"}, {"Landvehicle"}, {"Earrings"}, {"Tie"}, {"Watercraft"},
{"Cabinetry"}, {"Suitcase"}, {"Muffin"}, {"Bidet"}, {"Snack"}, {"Snowmobile"}, {"Clock"},
{"Medicalequipment"}, {"Cattle"}, {"Cello"}, {"Jetski"}, {"Camel"}, {"Coat"}, {"Suit"},
{"Desk"}, {"Cat"}, {"Bronzesculpture"}, {"Juice"}, {"Gondola"}, {"Beetle"}, {"Cannon"},
{"Computermouse"}, {"Cookie"}, {"Officebuilding"}, {"Fountain"}, {"Coin"}, {"Calculator"},
{"Cocktail"}, {"Computermonitor"}, {"Box"}, {"Stapler"}, {"Christmastree"}, {"Cowboyhat"},
{"Hikingequipment"}, {"Studiocouch"}, {"Drum"}, {"Dessert"}, {"Winerack"}, {"Drink"},
{"Zucchini"}, {"Ladle"}, {"Humanmouth"}, {"DairyProduct"}, {"Dice"}, {"Oven"}, {"Dinosaur"},
{"Ratchet(Device)"}, {"Couch"}, {"Cricketball"}, {"Wintermelon"}, {"Spatula"}, {"Whiteboard"},
{"Pencilsharpener"}, {"Door"}, {"Hat"}, {"Shower"}, {"Eraser"}, {"Fedora"}, {"Guacamole"},
{"Dagger"}, {"Scarf"}, {"Dolphin"}, {"Sombrero"}, {"Tincan"}, {"Mug"}, {"Tap"}, {"Harborseal"},
{"Stretcher"}, {"Canopener"}, {"Goggles"}, {"Humanbody"}, {"Rollerskates"}, {"Coffeecup"},
{"Cuttingboard"}, {"Blender"}, {"Plumbingfixture"}, {"Stopsign"}, {"Officesupplies"},
{"Volleyball(Ball)"}, {"Vase"}, {"Slowcooker"}, {"Wardrobe"}, {"Coffee"}, {"Whisk"},
{"Papertowel"}, {"Personalcare"}, {"Food"}, {"Sunhat"}, {"Treehouse"}, {"Flyingdisc"},
{"Skirt"}, {"Gasstove"}, {"Saltandpeppershakers"}, {"Mechanicalfan"}, {"Facepowder"}, {"Fax"},
{"Fruit"}, {"Frenchfries"}, {"Nightstand"}, {"Barrel"}, {"Kite"}, {"Tart"}, {"Treadmill"},
{"Fox"}, {"Flag"}, {"Frenchhorn"}, {"Windowblind"}, {"Humanfoot"}, {"Golfcart"}, {"Jacket"},
{"Egg(Food)"}, {"Streetlight"}, {"Guitar"}, {"Pillow"}, {"Humanleg"}, {"Isopod"}, {"Grape"},
{"Humanear"}, {"Powerplugsandsockets"}, {"Panda"}, {"Giraffe"}, {"Woman"}, {"Doorhandle"},
{"Rhinoceros"}, {"Bathtub"}, {"Goldfish"}, {"Houseplant"}, {"Goat"}, {"Baseballbat"},
{"Baseballglove"}, {"Mixingbowl"}, {"Marineinvertebrates"}, {"Kitchenutensil"}, {"Lightswitch"},
{"House"}, {"Horse"}, {"Stationarybicycle"}, {"Hammer"}, {"Ceilingfan"}, {"Sofabed"},
{"Adhesivetape "}, {"Harp"}, {"Sandal"}, {"Bicyclehelmet"}, {"Saucer"}, {"Harpsichord"},
{"Humanhair"}, {"Heater"}, {"Harmonica"}, {"Hamster"}, {"Curtain"}, {"Bed"}, {"Kettle"},
{"Fireplace"}, {"Scale"}, {"Drinkingstraw"}, {"Insect"}, {"Hairdryer"}, {"Kitchenware"},
{"Indoorrower"}, {"Invertebrate"}, {"Foodprocessor"}, {"Bookcase"}, {"Refrigerator"},
{"Wood-burningstove"}, {"Punchingbag"}, {"Commonfig"}, {"Cocktailshaker"}, {"Jaguar(Animal)"},
{"Golfball"}, {"Fashionaccessory"}, {"Alarmclock"}, {"Filingcabinet"}, {"Artichoke"}, {"Table"},
{"Tableware"}, {"Kangaroo"}, {"Koala"}, {"Knife"}, {"Bottle"}, {"Bottleopener"}, {"Lynx"},
{"Lavender(Plant)"}, {"Lighthouse"}, {"Dumbbell"}, {"Humanhead"}, {"Bowl"}, {"Humidifier"},
{"Porch"}, {"Lizard"}, {"Billiardtable"}, {"Mammal"}, {"Mouse"}, {"Motorcycle"},
{"Musicalinstrument"}, {"Swimcap"}, {"Fryingpan"}, {"Snowplow"}, {"Bathroomcabinet"},
{"Missile"}, {"Bust"}, {"Man"}, {"Waffleiron"}, {"Milk"}, {"Ringbinder"}, {"Plate"},
{"Mobilephone"}, {"Bakedgoods"}, {"Mushroom"}, {"Crutch"}, {"Pitcher(Container)"}, {"Mirror"},
{"Personalflotationdevice"}, {"Tabletennisracket"}, {"Pencilcase"}, {"Musicalkeyboard"},
{"Scoreboard"}, {"Briefcase"}, {"Kitchenknife"}, {"Nail(Construction)"}, {"Tennisball"},
{"Plasticbag"}, {"Oboe"}, {"Chestofdrawers"}, {"Ostrich"}, {"Piano"}, {"Girl"}, {"Plant"},
{"Potato"}, {"Hairspray"}, {"Sportsequipment"}, {"Pasta"}, {"Penguin"}, {"Pumpkin"}, {"Pear"},
{"Infantbed"}, {"Polarbear"}, {"Mixer"}, {"Cupboard"}, {"Jacuzzi"}, {"Pizza"}, {"Digitalclock"},
{"Pig"}, {"Reptile"}, {"Rifle"}, {"Lipstick"}, {"Skateboard"}, {"Raven"}, {"Highheels"},
{"Redpanda"}, {"Rose"}, {"Rabbit"}, {"Sculpture"}, {"Saxophone"}, {"Shotgun"}, {"Seafood"},
{"Submarinesandwich"}, {"Snowboard"}, {"Sword"}, {"Pictureframe"}, {"Sushi"}, {"Loveseat"},
{"Ski"}, {"Squirrel"}, {"Tripod"}, {"Stethoscope"}, {"Submarine"}, {"Scorpion"}, {"Segway"},
{"Trainingbench"}, {"Snake"}, {"Coffeetable"}, {"Skyscraper"}, {"Sheep"}, {"Television"},
{"Trombone"}, {"Tea"}, {"Tank"}, {"Taco"}, {"Telephone"}, {"Torch"}, {"Tiger"}, {"Strawberry"},
{"Trumpet"}, {"Tree"}, {"Tomato"}, {"Train"}, {"Tool"}, {"Picnicbasket"}, {"Cookingspray"},
{"Trousers"}, {"Bowlingequipment"}, {"Footballhelmet"}, {"Truck"}, {"Measuringcup"},
{"Coffeemaker"}, {"Violin"}, {"Vehicle"}, {"Handbag"}, {"Papercutter"}, {"Wine"}, {"Weapon"},
{"Wheel"}, {"Worm"}, {"Wok"}, {"Whale"}, {"Zebra"}, {"Autopart"}, {"Jug"}, {"Pizzacutter"},
{"Cream"}, {"Monkey"}, {"Lion"}, {"Bread"}, {"Platter"}, {"Chicken"}, {"Eagle"}, {"Helicopter"},
{"Owl"}, {"Duck"}, {"Turtle"}, {"Hippopotamus"}, {"Crocodile"}, {"Toilet"}, {"Toiletpaper"},
{"Squid"}, {"Clothing"}, {"Footwear"}, {"Lemon"}, {"Spider"}, {"Deer"}, {"Frog"}, {"Banana"},
{"Rocket"}, {"Wineglass"}, {"Countertop"}, {"Tabletcomputer"}, {"Wastecontainer"},
{"Swimmingpool"}, {"Dog"}, {"Book"}, {"Elephant"}, {"Shark"}, {"Candle"}, {"Leopard"}, {"Axe"},
{"Handdryer"}, {"Soapdispenser"}, {"Porcupine"}, {"Flower"}, {"Canary"}, {"Cheetah"},
{"Palmtree"}, {"Hamburger"}, {"Maple"}, {"Building"}, {"Fish"}, {"Lobster"},
{"GardenAsparagus"}, {"Furniture"}, {"Hedgehog"}, {"Airplane"}, {"Spoon"}, {"Otter"}, {"Bull"},
{"Oyster"}, {"Horizontalbar"}, {"Conveniencestore"}, {"Bomb"}, {"Bench"}, {"Icecream"},
{"Caterpillar"}, {"Butterfly"}, {"Parachute"}, {"Orange"}, {"Antelope"}, {"Beaker"},
{"Mothsandbutterflies"}, {"Window"}, {"Closet"}, {"Castle"}, {"Jellyfish"}, {"Goose"}, {"Mule"},
{"Swan"}, {"Peach"}, {"Coconut"}, {"Seatbelt"}, {"Raccoon"}, {"Chisel"}, {"Fork"}, {"Lamp"},
{"Camera"}, {"Squash(Plant)"}, {"Racket"}, {"Humanface"}, {"Humanarm"}, {"Vegetable"},
{"Diaper"}, {"Unicycle"}, {"Falcon"}, {"Chime"}, {"Snail"}, {"Shellfish"}, {"Cabbage"},
{"Carrot"}, {"Mango"}, {"Jeans"}, {"Flowerpot"}, {"Pineapple"}, {"Drawer"}, {"Stool"},
{"Envelope"}, {"Cake"}, {"Dragonfly"}, {"Commonsunflower"}, {"Microwaveoven"}, {"Honeycomb"},
{"Marinemammal"}, {"Sealion"}, {"Ladybug"}, {"Shelf"}, {"Watch"}, {"Candy"}, {"Salad"},
{"Parrot"}, {"Handgun"}, {"Sparrow"}, {"Van"}, {"Grinder"}, {"Spicerack"}, {"Lightbulb"},
{"Cordedphone"}, {"Sportsuniform"}, {"Tennisracket"}, {"Wallclock"}, {"Servingtray"},
{"Kitchen&diningroomtable"}, {"Dogbed"}, {"Cakestand"}, {"Catfurniture"}, {"Bathroomaccessory"},
{"Facialtissueholder"}, {"Pressurecooker"}, {"Kitchenappliance"}, {"Tire"}, {"Ruler"},
{"Luggageandbags"}, {"Microphone"}, {"Broccoli"}, {"Umbrella"}, {"Pastry"}, {"Grapefruit"},
{"Band-aid"}, {"Animal"}, {"Bellpepper"}, {"Turkey"}, {"Lily"}, {"Pomegranate"}, {"Doughnut"},
{"Glasses"}, {"Humannose"}, {"Pen"}, {"Ant"}, {"Car"}, {"Aircraft"}, {"Humanhand"}, {"Skunk"},
{"Teddybear"}, {"Watermelon"}, {"Cantaloupe"}, {"Dishwasher"}, {"Flute"}, {"Balancebeam"},
{"Sandwich"}, {"Shrimp"}, {"Sewingmachine"}, {"Binoculars"}, {"Raysandskates"}, {"Ipod"},
{"Accordion"}, {"Willow"}, {"Crab"}, {"Crown"}, {"Seahorse"}, {"Perfume"}, {"Alpaca"}, {"Taxi"},
{"Canoe"}, {"Remotecontrol"}, {"Wheelchair"}, {"Rugbyball"}, {"Armadillo"}, {"Maracas"},
{"Helmet"}};
char *CreateLocalModelBuffer(JNIEnv *env, jobject modelBuffer) {
jbyte *modelAddr = static_cast<jbyte *>(env->GetDirectBufferAddress(modelBuffer));
int modelLen = static_cast<int>(env->GetDirectBufferCapacity(modelBuffer));
@ -186,12 +284,12 @@ Java_com_mindspore_himindsporedemo_gallery_classify_TrackingMobile_loadModel(JNI
context->thread_num_ = num_thread;
labelNet->CreateSessionMS(modelBuffer, bufferLen, context);
delete (context);
delete context;
if (labelNet->session == nullptr) {
if (labelNet->session() == nullptr) {
MS_PRINT("MindSpore create session failed!.");
delete (labelNet);
delete (labelEnv);
delete labelNet;
delete labelEnv;
return (jlong) nullptr;
}
@ -234,7 +332,7 @@ Java_com_mindspore_himindsporedemo_gallery_classify_TrackingMobile_runNet(JNIEnv
}
MSNetWork *labelNet = static_cast<MSNetWork *>(*labelEnv);
auto mSession = labelNet->session;
auto mSession = labelNet->session();
if (mSession == nullptr) {
MS_PRINT("MindSpore error, Session is a nullptr.");
return NULL;
@ -272,8 +370,8 @@ Java_com_mindspore_himindsporedemo_gallery_classify_TrackingMobile_runNet(JNIEnv
msOutputs.insert(std::pair<std::string, mindspore::tensor::MSTensor *> {name, temp_dat});
}
std::string resultStr = ProcessRunnetResult(MSNetWork::RET_CATEGORY_SUM,
MSNetWork::labels_name_map, msOutputs);
std::string resultStr = ProcessRunnetResult(::RET_CATEGORY_SUM,
::labels_name_map, msOutputs);
const char *resultCharData = resultStr.c_str();
return (env)->NewStringUTF(resultCharData);

View File

@ -17,4 +17,7 @@
#ifndef MINDSPORE_JNI_HMS_DEBUG_MINDSPORENETNATIVE_H
#define MINDSPORE_JNI_HMS_DEBUG_MINDSPORENETNATIVE_H
#endif // MINDSPORE_JNI_HMS_DEBUG_MINDSPORENETNATIVE_H

View File

@ -427,7 +427,6 @@ public class CameraFragment extends Fragment {
mPreviewRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
mPreviewRequestBuilder.addTarget(surface);
// mPreviewRequestBuilder.addTarget(mImageReader.getSurface());
// Here, we create a CameraCaptureSession for ic_launcher preview.
mCameraDevice.createCaptureSession(Arrays.asList(surface, mImageReader.getSurface()),
new CameraCaptureSession.StateCallback() {

View File

@ -0,0 +1,83 @@
# MindSpore
build/
app/src/main/cpp/mindspore-lite*
app/src/main/assets/model/
mindspore/lib
output
*.ir
mindspore/ccsrc/schema/inner/*
# Cmake files
CMakeFiles/
cmake_install.cmake
CMakeCache.txt
Makefile
cmake-build-debug
# Dynamic libraries
*.so
*.so.*
*.dylib
# Static libraries
*.la
*.lai
*.a
*.lib
# Protocol buffers
*_pb2.py
*.pb.h
*.pb.cc
# Object files
*.o
# Editor
.vscode
.idea/
# Cquery
.cquery_cached_index/
compile_commands.json
# Ctags and cscope
tags
TAGS
CTAGS
GTAGS
GRTAGS
GSYMS
GPATH
cscope.*
# Python files
*__pycache__*
.pytest_cache
# Mac files
*.DS_Store
# Test results
test_temp_summary_event_file/
*.dot
*.dat
*.svg
*.perf
*.info
*.ckpt
*.shp
*.pkl
.clangd
mindspore/version.py
mindspore/default_config.py
mindspore/.commit_id
onnx.proto
mindspore/ccsrc/onnx.proto
# Android
local.properties
.gradle
sdk/build
sdk/.cxx
app/.cxx

View File

@ -0,0 +1,549 @@
# demo_object_detection
The following describes how to use the MindSpore Lite C++ APIs (Android JNIs) and MindSpore Lite object detection models to perform on-device inference, detect the content captured by a device camera, and display the most possible detection result on the application's image preview screen.
## Deploying an Application
The following section describes how to build and execute an on-device object detecion task on MindSpore Lite.
### Running Dependencies
- Android Studio 3.2 or later (Android 4.0 or later is recommended.)
- Native development kit (NDK) 21.3
- CMake 3.10.2
- Android software development kit (SDK) 26 or later
- OpenCV 4.0.0 or later (included in the sample code)
### Building and Running
1. Load the sample source code to Android Studio and install the corresponding SDK. (After the SDK version is specified, Android Studio automatically installs the SDK.)
![start_home](images/home.png)
Start Android Studio, click `File > Settings > System Settings > Android SDK`, and select the corresponding SDK. As shown in the following figure, select an SDK and click `OK`. Android Studio automatically installs the SDK.
![start_sdk](images/sdk_management.png)
2. Connect to an Android device and runs the object detection application.
Connect to the Android device through a USB cable for debugging. Click `Run 'app'` to run the sample project on your device.
![run_app](images/project_structure.png)
For details about how to connect the Android Studio to a device for debugging, see <https://developer.android.com/studio/run/device>.
3. Continue the installation on the Android device. After the installation is complete, you can view the content captured by a camera and the inference result.
![result](images/object_detection.png)
## Detailed Description of the Sample Program
This object detection sample program on the Android device includes a Java layer and a JNI layer. At the Java layer, the Android Camera 2 API is used to enable a camera to obtain image frames and process images. At the JNI layer, the model inference process is completed .
### Configuring MindSpore Lite Dependencies
In Android Studio, place the compiled `libmindspore-lite.so` library file (which can contain multiple compatible architectures) in the `app/libs/ARM64-V8a` (Arm64) or `app/libs/armeabi-v7a` (Arm32) directory of the application project. In the `build.gradle` file of the application, configure the compilation support of CMake, `arm64-v8a`, and `armeabi-v7a`.  
```
android{
defaultConfig{
externalNativeBuild{
cmake{
arguments "-DANDROID_STL=c++_shared"
}
}
ndk{
abiFilters'armeabi-v7a', 'arm64-v8a'
}
}
}
```
Create a link to the `.so` library file in the `app/CMakeLists.txt` file:
```
# Set MindSpore Lite Dependencies.
include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/include/MindSpore)
add_library(mindspore-lite SHARED IMPORTED )
set_target_properties(mindspore-lite PROPERTIES
IMPORTED_LOCATION "${CMAKE_SOURCE_DIR}/libs/libmindspore-lite.so")
# Link target library.
target_link_libraries(
...
mindspore-lite
minddata-lite
...
)
```
In this example, the download.gradle File configuration auto download library file, placed in the 'app / libs / arm64-v8a' directory.
Note: if the automatic download fails, please manually download the relevant library files and put them in the corresponding location.
libmindspore-lite.so [libmindspore-lite.so]( https://download.mindspore.cn/model_zoo/official/lite/lib/mindspore%20version%200.7/libmindspore-lite.so)
### Downloading and Deploying a Model File
In this example, the download.gradle File configuration auto download `ssd.ms `and placed in the 'app / libs / arm64-v8a' directory.
Note: if the automatic download fails, please manually download the relevant library files and put them in the corresponding location.
ssd.ms [ssd.ms]( https://download.mindspore.cn/model_zoo/official/lite/ssd_mobilenetv2_lite/ssd.ms)
### Compiling On-Device Inference Code
Call MindSpore Lite C++ APIs at the JNI layer to implement on-device inference.
The inference code process is as follows. For details about the complete code, see `src/cpp/MindSporeNetnative.cpp`.
1. Load the MindSpore Lite model file and build the context, session, and computational graph for inference.
- Load a model file. Create and configure the context for model inference.
```cpp
// Buffer is the model data passed in by the Java layer
jlong bufferLen = env->GetDirectBufferCapacity(buffer);
char *modelBuffer = CreateLocalModelBuffer(env, buffer);
```
- Create a session.
```cpp
void **labelEnv = new void *;
MSNetWork *labelNet = new MSNetWork;
*labelEnv = labelNet;
// Create context.
lite::Context *context = new lite::Context;
context->device_ctx_.type = lite::DT_CPU;
context->thread_num_ = numThread; //Specify the number of threads to run inference
// Create the mindspore session.
labelNet->CreateSessionMS(modelBuffer, bufferLen, "device label", context);
delete(context);
```
- Load the model file and build a computational graph for inference.
```cpp
void MSNetWork::CreateSessionMS(char* modelBuffer, size_t bufferLen, std::string name, mindspore::lite::Context* ctx)
{
CreateSession(modelBuffer, bufferLen, ctx);
session = mindspore::session::LiteSession::CreateSession(ctx);
auto model = mindspore::lite::Model::Import(modelBuffer, bufferLen);
int ret = session->CompileGraph(model);
}
```
2. Pre-Process the imagedata and convert the input image into the Tensor format of the MindSpore model.
```cpp
// Convert the Bitmap image passed in from the JAVA layer to Mat for OpenCV processing
LiteMat lite_mat_bgr,lite_norm_mat_cut;
if (!BitmapToLiteMat(env, srcBitmap, lite_mat_bgr)){
MS_PRINT("BitmapToLiteMat error");
return NULL;
}
int srcImageWidth = lite_mat_bgr.width_;
int srcImageHeight = lite_mat_bgr.height_;
if(!PreProcessImageData(lite_mat_bgr, lite_norm_mat_cut)){
MS_PRINT("PreProcessImageData error");
return NULL;
}
ImgDims inputDims;
inputDims.channel =lite_norm_mat_cut.channel_;
inputDims.width = lite_norm_mat_cut.width_;
inputDims.height = lite_norm_mat_cut.height_;
// Get the mindsore inference environment which created in loadModel().
void **labelEnv = reinterpret_cast<void **>(netEnv);
if (labelEnv == nullptr) {
MS_PRINT("MindSpore error, labelEnv is a nullptr.");
return NULL;
}
MSNetWork *labelNet = static_cast<MSNetWork *>(*labelEnv);
auto mSession = labelNet->session;
if (mSession == nullptr) {
MS_PRINT("MindSpore error, Session is a nullptr.");
return NULL;
}
MS_PRINT("MindSpore get session.");
auto msInputs = mSession->GetInputs();
auto inTensor = msInputs.front();
float *dataHWC = reinterpret_cast<float *>(lite_norm_mat_cut.data_ptr_);
// copy input Tensor
memcpy(inTensor->MutableData(), dataHWC,
inputDims.channel * inputDims.width * inputDims.height * sizeof(float));
delete[] (dataHWC);
```
3. The input image shall be NHWC(1:300:300:3).
```cpp
bool PreProcessImageData(const LiteMat &lite_mat_bgr, LiteMat *lite_norm_mat_ptr) {
bool ret = false;
LiteMat lite_mat_resize;
LiteMat &lite_norm_mat_cut = *lite_norm_mat_ptr;
ret = ResizeBilinear(lite_mat_bgr, lite_mat_resize, 300, 300);
if (!ret) {
MS_PRINT("ResizeBilinear error");
return false;
}
LiteMat lite_mat_convert_float;
ret = ConvertTo(lite_mat_resize, lite_mat_convert_float, 1.0 / 255.0);
if (!ret) {
MS_PRINT("ConvertTo error");
return false;
}
float means[3] = {0.485, 0.456, 0.406};
float vars[3] = {1.0 / 0.229, 1.0 / 0.224, 1.0 / 0.225};
SubStractMeanNormalize(lite_mat_convert_float, lite_norm_mat_cut, means, vars);
return true;
}
```
4. Perform inference on the input tensor based on the model, obtain the output tensor, and perform post-processing.
Perform graph execution and on-device inference.
```cpp
// After the model and image tensor data is loaded, run inference.
auto status = mSession->RunGraph();
```
Obtain the output data.
```cpp
auto names = mSession->GetOutputTensorNames();
typedef std::unordered_map<std::string,
std::vector<mindspore::tensor::MSTensor *>> Msout;
std::unordered_map<std::string,
mindspore::tensor::MSTensor *> msOutputs;
for (const auto &name : names) {
auto temp_dat =mSession->GetOutputByTensorName(name);
msOutputs.insert(std::pair<std::string, mindspore::tensor::MSTensor *> {name, temp_dat});
}
std::string retStr = ProcessRunnetResult(msOutputs, ret);
```
The model output the object category scores (1:1917:81) and the object detection location offset (1:1917:4). The location offset can be calcalation the object location in getDefaultBoxes function .
```cpp
void SSDModelUtil::getDefaultBoxes() {
float fk[6] = {0.0, 0.0, 0.0, 0.0, 0.0, 0.0};
std::vector<struct WHBox> all_sizes;
struct Product mProductData[19 * 19] = {0};
for (int i = 0; i < 6; i++) {
fk[i] = config.model_input_height / config.steps[i];
}
float scale_rate =
(config.max_scale - config.min_scale) / (sizeof(config.num_default) / sizeof(int) - 1);
float scales[7] = {0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0};
for (int i = 0; i < sizeof(config.num_default) / sizeof(int); i++) {
scales[i] = config.min_scale + scale_rate * i;
}
for (int idex = 0; idex < sizeof(config.feature_size) / sizeof(int); idex++) {
float sk1 = scales[idex];
float sk2 = scales[idex + 1];
float sk3 = sqrt(sk1 * sk2);
struct WHBox tempWHBox;
all_sizes.clear();
if (idex == 0) {
float w = sk1 * sqrt(2);
float h = sk1 / sqrt(2);
tempWHBox.boxw = 0.1;
tempWHBox.boxh = 0.1;
all_sizes.push_back(tempWHBox);
tempWHBox.boxw = w;
tempWHBox.boxh = h;
all_sizes.push_back(tempWHBox);
tempWHBox.boxw = h;
tempWHBox.boxh = w;
all_sizes.push_back(tempWHBox);
} else {
tempWHBox.boxw = sk1;
tempWHBox.boxh = sk1;
all_sizes.push_back(tempWHBox);
for (int j = 0; j < sizeof(config.aspect_ratios[idex]) / sizeof(int); j++) {
float w = sk1 * sqrt(config.aspect_ratios[idex][j]);
float h = sk1 / sqrt(config.aspect_ratios[idex][j]);
tempWHBox.boxw = w;
tempWHBox.boxh = h;
all_sizes.push_back(tempWHBox);
tempWHBox.boxw = h;
tempWHBox.boxh = w;
all_sizes.push_back(tempWHBox);
}
tempWHBox.boxw = sk3;
tempWHBox.boxh = sk3;
all_sizes.push_back(tempWHBox);
}
for (int i = 0; i < config.feature_size[idex]; i++) {
for (int j = 0; j < config.feature_size[idex]; j++) {
mProductData[i * config.feature_size[idex] + j].x = i;
mProductData[i * config.feature_size[idex] + j].y = j;
}
}
int productLen = config.feature_size[idex] * config.feature_size[idex];
for (int i = 0; i < productLen; i++) {
for (int j = 0; j < all_sizes.size(); j++) {
struct NormalBox tempBox;
float cx = (mProductData[i].y + 0.5) / fk[idex];
float cy = (mProductData[i].x + 0.5) / fk[idex];
tempBox.y = cy;
tempBox.x = cx;
tempBox.h = all_sizes[j].boxh;
tempBox.w = all_sizes[j].boxw;
mDefaultBoxes.push_back(tempBox);
}
}
}
}
```
- The higher scores and location of category can be calcluted by the nonMaximumSuppression function.
```cpp
void SSDModelUtil::nonMaximumSuppression(const YXBoxes *const decoded_boxes,
const float *const scores,
const std::vector<int> &in_indexes,
std::vector<int> &out_indexes, const float nmsThreshold,
const int count, const int max_results) {
int nR = 0; //number of results
std::vector<bool> del(count, false);
for (size_t i = 0; i < in_indexes.size(); i++) {
if (!del[in_indexes[i]]) {
out_indexes.push_back(in_indexes[i]);
if (++nR == max_results) {
break;
}
for (size_t j = i + 1; j < in_indexes.size(); j++) {
const auto boxi = decoded_boxes[in_indexes[i]], boxj = decoded_boxes[in_indexes[j]];
float a[4] = {boxi.xmin, boxi.ymin, boxi.xmax, boxi.ymax};
float b[4] = {boxj.xmin, boxj.ymin, boxj.xmax, boxj.ymax};
if (IOU(a, b) > nmsThreshold) {
del[in_indexes[j]] = true;
}
}
}
}
}
```
- For the targets whose probability is greater than the threshold value, the output rectangle box needs to be restored to the original size after the rectangular box is filtered by NMS algorithm.
```cpp
std::string SSDModelUtil::getDecodeResult(float *branchScores, float *branchBoxData) {
std::string result = "";
NormalBox tmpBox[1917] = {0};
float mScores[1917][81] = {0};
float outBuff[1917][7] = {0};
float scoreWithOneClass[1917] = {0};
int outBoxNum = 0;
YXBoxes decodedBoxes[1917] = {0};
// Copy branch outputs box data to tmpBox.
for (int i = 0; i < 1917; ++i) {
tmpBox[i].y = branchBoxData[i * 4 + 0];
tmpBox[i].x = branchBoxData[i * 4 + 1];
tmpBox[i].h = branchBoxData[i * 4 + 2];
tmpBox[i].w = branchBoxData[i * 4 + 3];
}
// Copy branch outputs score to mScores.
for (int i = 0; i < 1917; ++i) {
for (int j = 0; j < 81; ++j) {
mScores[i][j] = branchScores[i * 81 + j];
}
}
// NMS processing.
ssd_boxes_decode(tmpBox, decodedBoxes);
// const float nms_threshold = 0.6;
const float nms_threshold = 0.3;
for (int i = 1; i < 81; i++) {
std::vector<int> in_indexes;
for (int j = 0; j < 1917; j++) {
scoreWithOneClass[j] = mScores[j][i];
// if (mScores[j][i] > 0.1) {
if (mScores[j][i] > g_thres_map[i]) {
in_indexes.push_back(j);
}
}
if (in_indexes.size() == 0) {
continue;
}
sort(in_indexes.begin(), in_indexes.end(),
[&](int a, int b) { return scoreWithOneClass[a] > scoreWithOneClass[b]; });
std::vector<int> out_indexes;
nonMaximumSuppression(decodedBoxes, scoreWithOneClass, in_indexes, out_indexes,
nms_threshold);
for (int k = 0; k < out_indexes.size(); k++) {
outBuff[outBoxNum][0] = out_indexes[k]; //image id
outBuff[outBoxNum][1] = i; //labelid
outBuff[outBoxNum][2] = scoreWithOneClass[out_indexes[k]]; //scores
outBuff[outBoxNum][3] =
decodedBoxes[out_indexes[k]].xmin * inputImageWidth / 300;
outBuff[outBoxNum][4] =
decodedBoxes[out_indexes[k]].ymin * inputImageHeight / 300;
outBuff[outBoxNum][5] =
decodedBoxes[out_indexes[k]].xmax * inputImageWidth / 300;
outBuff[outBoxNum][6] =
decodedBoxes[out_indexes[k]].ymax * inputImageHeight / 300;
outBoxNum++;
}
}
MS_PRINT("outBoxNum %d", outBoxNum);
for (int i = 0; i < outBoxNum; ++i) {
std::string tmpid_str = std::to_string(outBuff[i][0]);
result += tmpid_str; // image ID
result += "_";
// tmpid_str = std::to_string(outBuff[i][1]);
MS_PRINT("label_classes i %d, outBuff %d",i, (int) outBuff[i][1]);
tmpid_str = label_classes[(int) outBuff[i][1]];
result += tmpid_str; // label id
result += "_";
tmpid_str = std::to_string(outBuff[i][2]);
result += tmpid_str; // scores
result += "_";
tmpid_str = std::to_string(outBuff[i][3]);
result += tmpid_str; // xmin
result += "_";
tmpid_str = std::to_string(outBuff[i][4]);
result += tmpid_str; // ymin
result += "_";
tmpid_str = std::to_string(outBuff[i][5]);
result += tmpid_str; // xmax
result += "_";
tmpid_str = std::to_string(outBuff[i][6]);
result += tmpid_str; // ymax
result += ";";
}
return result;
}
std::string SSDModelUtil::getDecodeResult(float *branchScores, float *branchBoxData) {
std::string result = "";
NormalBox tmpBox[1917] = {0};
float mScores[1917][81] = {0};
float outBuff[1917][7] = {0};
float scoreWithOneClass[1917] = {0};
int outBoxNum = 0;
YXBoxes decodedBoxes[1917] = {0};
// Copy branch outputs box data to tmpBox.
for (int i = 0; i < 1917; ++i) {
tmpBox[i].y = branchBoxData[i * 4 + 0];
tmpBox[i].x = branchBoxData[i * 4 + 1];
tmpBox[i].h = branchBoxData[i * 4 + 2];
tmpBox[i].w = branchBoxData[i * 4 + 3];
}
// Copy branch outputs score to mScores.
for (int i = 0; i < 1917; ++i) {
for (int j = 0; j < 81; ++j) {
mScores[i][j] = branchScores[i * 81 + j];
}
}
ssd_boxes_decode(tmpBox, decodedBoxes);
const float nms_threshold = 0.3;
for (int i = 1; i < 81; i++) {
std::vector<int> in_indexes;
for (int j = 0; j < 1917; j++) {
scoreWithOneClass[j] = mScores[j][i];
if (mScores[j][i] > g_thres_map[i]) {
in_indexes.push_back(j);
}
}
if (in_indexes.size() == 0) {
continue;
}
sort(in_indexes.begin(), in_indexes.end(),
[&](int a, int b) { return scoreWithOneClass[a] > scoreWithOneClass[b]; });
std::vector<int> out_indexes;
nonMaximumSuppression(decodedBoxes, scoreWithOneClass, in_indexes, out_indexes,
nms_threshold);
for (int k = 0; k < out_indexes.size(); k++) {
outBuff[outBoxNum][0] = out_indexes[k]; //image id
outBuff[outBoxNum][1] = i; //labelid
outBuff[outBoxNum][2] = scoreWithOneClass[out_indexes[k]]; //scores
outBuff[outBoxNum][3] =
decodedBoxes[out_indexes[k]].xmin * inputImageWidth / 300;
outBuff[outBoxNum][4] =
decodedBoxes[out_indexes[k]].ymin * inputImageHeight / 300;
outBuff[outBoxNum][5] =
decodedBoxes[out_indexes[k]].xmax * inputImageWidth / 300;
outBuff[outBoxNum][6] =
decodedBoxes[out_indexes[k]].ymax * inputImageHeight / 300;
outBoxNum++;
}
}
MS_PRINT("outBoxNum %d", outBoxNum);
for (int i = 0; i < outBoxNum; ++i) {
std::string tmpid_str = std::to_string(outBuff[i][0]);
result += tmpid_str; // image ID
result += "_";
// tmpid_str = std::to_string(outBuff[i][1]);
MS_PRINT("label_classes i %d, outBuff %d",i, (int) outBuff[i][1]);
tmpid_str = label_classes[(int) outBuff[i][1]];
result += tmpid_str; // label id
result += "_";
tmpid_str = std::to_string(outBuff[i][2]);
result += tmpid_str; // scores
result += "_";
tmpid_str = std::to_string(outBuff[i][3]);
result += tmpid_str; // xmin
result += "_";
tmpid_str = std::to_string(outBuff[i][4]);
result += tmpid_str; // ymin
result += "_";
tmpid_str = std::to_string(outBuff[i][5]);
result += tmpid_str; // xmax
result += "_";
tmpid_str = std::to_string(outBuff[i][6]);
result += tmpid_str; // ymax
result += ";";
}
return result;
}
```

View File

@ -0,0 +1,490 @@
## MindSpore Lite 端侧目标检测demoAndroid
本示例程序演示了如何在端侧利用MindSpore Lite C++ APIAndroid JNI以及MindSpore Lite 目标检测模型完成端侧推理实现对图库或者设备摄像头捕获的内容进行检测并在App图像预览界面中显示连续目标检测结果。
### 运行依赖
- Android Studio >= 3.2 (推荐4.0以上版本)
- NDK 21.3
- CMake 3.10
- Android SDK >= 26
### 构建与运行
1. 在Android Studio中加载本示例源码并安装相应的SDK指定SDK版本后由Android Studio自动安装
![start_home](images/home.png)
启动Android Studio后点击`File->Settings->System Settings->Android SDK`勾选相应的SDK。如下图所示勾选后点击`OK`Android Studio即可自动安装SDK。
![start_sdk](images/sdk_management.png)
可选若安装时出现NDK版本问题可手动下载相应的[NDK版本](https://developer.android.com/ndk/downloads?hl=zh-cn)本示例代码使用的NDK版本为21.3),并在`Project Structure`的`Android NDK location`设置中指定SDK的位置。
![project_structure](images/project_structure.png)
2. 连接Android设备运行目标检测示例应用程序。
通过USB连接Android设备调试点击`Run 'app'`即可在你的设备上运行本示例项目。
* 注编译过程中Android Studio会自动下载MindSpore Lite、OpenCV、模型文件等相关依赖项编译过程需做耐心等待。
![run_app](images/run_app.PNG)
Android Studio连接设备调试操作可参考<https://developer.android.com/studio/run/device?hl=zh-cn>
3. 在Android设备上点击“继续安装”安装完即可查看到设备摄像头捕获的内容和推理结果。
![install](images/install.jpg)
如下图所示,检测出图中内容是鼠标。
![result](images/object_detection.png)
## 示例程序详细说明
本端侧目标检测Android示例程序分为JAVA层和JNI层其中JAVA层主要通过Android Camera 2 API实现摄像头获取图像帧以及相应的图像处理针对推理结果画框等功能JNI层在[Runtime](https://www.mindspore.cn/tutorial/zh-CN/master/use/lite_runtime.html)中完成模型推理的过程。
> 此处详细说明示例程序的JNI层实现JAVA层运用Android Camera 2 API实现开启设备摄像头以及图像帧处理等功能需读者具备一定的Android开发基础知识。
### 示例程序结构
```
app
|
├── libs # 存放demo jni层编译出的库文件
│ └── arm64-v8a
│ │── libmlkit-label-MS.so #
|
├── src/main
│ ├── assets # 资源文件
| | └── ssd.ms # 存放模型文件
│ |
│ ├── cpp # 模型加载和预测主要逻辑封装类
| | ├── mindspore-lite-...-cpu # minspore源码编译出的调用包,包含demo jni层依赖的库文件及相关的头文件
| | | └── ...
│ | |
| | ├── MindSporeNetnative.cpp # MindSpore调用相关的JNI方法
│ ├── java # java层应用代码
│ │ └── com.huawei.himindsporedemo
│ │ ├── help # 图像处理及MindSpore JNI调用相关实现
│ │ │ └── ...
│ │ └── obejctdetect # 开启摄像头及绘制相关实现
│ │ └── ...
│ │
│ ├── res # 存放Android相关的资源文件
│ └── AndroidManifest.xml # Android配置文件
├── CMakeList.txt # cmake编译入口文件
├── build.gradle # 其他Android配置文件
├── download.gradle # APP构建时由gradle自动从HuaWei Server下载依赖的库文件及模型文件
└── ...
```
### 配置MindSpore Lite依赖项
Android JNI层调用MindSpore C++ API时需要相关库文件支持。可通过MindSpore Lite[源码编译](https://www.mindspore.cn/lite/docs/zh-CN/master/deploy.html)生成`libmindspore-lite.so`库文件。
在Android Studio中将编译完成的`libmindspore-lite.so`库文件可包含多个兼容架构分别放置在APP工程的`app/libs/arm64-v8a`ARM64或`app/libs/armeabi-v7a`ARM32目录下并在app的`build.gradle`文件中配置CMake编译支持以及`arm64-v8a`和`armeabi-v7a`的编译支持,如下所示:
```
android{
defaultConfig{
externalNativeBuild{
cmake{
arguments "-DANDROID_STL=c++_shared"
}
}
ndk{
abiFilters 'arm64-v8a'
}
}
}
```
在`app/CMakeLists.txt`文件中建立`.so`库文件链接,如下所示。
```
# Set MindSpore Lite Dependencies.
include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/include/MindSpore)
add_library(mindspore-lite SHARED IMPORTED )
set_target_properties(mindspore-lite PROPERTIES
IMPORTED_LOCATION "${CMAKE_SOURCE_DIR}/libs/libmindspore-lite.so")
# Link target library.
target_link_libraries(
...
mindspore-lite
minddata-lite
...
)
```
本示例中app build过程由download.gradle文件自动从华为服务器下载mindspore所编译的库及相关头文件并放置在`src/main/cpp`工程目录下。
* 注:若自动下载失败,请手动下载相关库文件并将其放在对应位置:
* libmindspore-lite.so [下载链接](https://download.mindspore.cn/model_zoo/official/lite/lib/mindspore%20version%200.7/libmindspore-lite.so)
### 下载及部署模型文件
从MindSpore Model Hub中下载模型文件本示例程序中使用的目标检测模型文件为`ssd.ms`同样通过download.gradle脚本在APP构建时自动下载并放置在`app/src/main/assets`工程目录下。
* 注若下载失败请手动下载模型文件ssd.ms [下载链接](https://download.mindspore.cn/model_zoo/official/lite/ssd_mobilenetv2_lite/ssd.ms)。
### 编写端侧推理代码
在JNI层调用MindSpore Lite C++ API实现端测推理。
推理代码流程如下,完整代码请参见`src/cpp/MindSporeNetnative.cpp`。
1. 加载MindSpore Lite模型文件构建上下文、会话以及用于推理的计算图。
- 加载模型文件:创建并配置用于模型推理的上下文
```cpp
// Buffer is the model data passed in by the Java layer
jlong bufferLen = env->GetDirectBufferCapacity(buffer);
char *modelBuffer = CreateLocalModelBuffer(env, buffer);
```
- 创建会话
```cpp
void **labelEnv = new void *;
MSNetWork *labelNet = new MSNetWork;
*labelEnv = labelNet;
// Create context.
lite::Context *context = new lite::Context;
context->cpu_bind_mode_ = lite::NO_BIND;
context->device_ctx_.type = lite::DT_CPU;
context->thread_num_ = numThread; //Specify the number of threads to run inference
// Create the mindspore session.
labelNet->CreateSessionMS(modelBuffer, bufferLen, "device label", context);
delete context;
```
- 加载模型文件并构建用于推理的计算图
```cpp
void MSNetWork::CreateSessionMS(char* modelBuffer, size_t bufferLen, std::string name, mindspore::lite::Context* ctx)
{
CreateSession(modelBuffer, bufferLen, ctx);
session = mindspore::session::LiteSession::CreateSession(ctx);
auto model = mindspore::lite::Model::Import(modelBuffer, bufferLen);
int ret = session->CompileGraph(model); // Compile Graph
}
```
2. 将输入图片转换为传入MindSpore模型的Tensor格式。
将待检测图片数据转换为输入MindSpore模型的Tensor。
```cpp
// Convert the Bitmap image passed in from the JAVA layer to Mat for OpenCV processing
LiteMat lite_mat_bgr,lite_norm_mat_cut;
if (!BitmapToLiteMat(env, srcBitmap, lite_mat_bgr)){
MS_PRINT("BitmapToLiteMat error");
return NULL;
}
int srcImageWidth = lite_mat_bgr.width_;
int srcImageHeight = lite_mat_bgr.height_;
if(!PreProcessImageData(lite_mat_bgr, lite_norm_mat_cut)){
MS_PRINT("PreProcessImageData error");
return NULL;
}
ImgDims inputDims;
inputDims.channel =lite_norm_mat_cut.channel_;
inputDims.width = lite_norm_mat_cut.width_;
inputDims.height = lite_norm_mat_cut.height_;
// Get the mindsore inference environment which created in loadModel().
void **labelEnv = reinterpret_cast<void **>(netEnv);
if (labelEnv == nullptr) {
MS_PRINT("MindSpore error, labelEnv is a nullptr.");
return NULL;
}
MSNetWork *labelNet = static_cast<MSNetWork *>(*labelEnv);
auto mSession = labelNet->session;
if (mSession == nullptr) {
MS_PRINT("MindSpore error, Session is a nullptr.");
return NULL;
}
MS_PRINT("MindSpore get session.");
auto msInputs = mSession->GetInputs();
auto inTensor = msInputs.front();
float *dataHWC = reinterpret_cast<float *>(lite_norm_mat_cut.data_ptr_);
// copy input Tensor
memcpy(inTensor->MutableData(), dataHWC,
inputDims.channel * inputDims.width * inputDims.height * sizeof(float));
delete[] (dataHWC);
```
3. 进行模型推理前输入tensor格式为 NHWCshape为1:300:300:3格式为RGB, 并对输入tensor做标准化处理.
```cpp
bool PreProcessImageData(LiteMat &lite_mat_bgr,LiteMat &lite_norm_mat_cut) {
bool ret=false;
LiteMat lite_mat_resize;
ret = ResizeBilinear(lite_mat_bgr, lite_mat_resize, 300, 300);
if (!ret) {
MS_PRINT("ResizeBilinear error");
return false;
}
LiteMat lite_mat_convert_float;
ret = ConvertTo(lite_mat_resize, lite_mat_convert_float, 1.0 / 255.0);
if (!ret) {
MS_PRINT("ConvertTo error");
return false;
}
float means[3] = {0.485, 0.456, 0.406};
float vars[3] = {1.0 / 0.229, 1.0 / 0.224, 1.0 / 0.225};
SubStractMeanNormalize(lite_mat_convert_float, lite_norm_mat_cut, means, vars);
return true;
}
```
4. 对输入Tensor按照模型进行推理获取输出Tensor并进行后处理。
- 图执行,端测推理。
```cpp
// After the model and image tensor data is loaded, run inference.
auto status = mSession->RunGraph();
```
- 获取输出数据。
```cpp
auto names = mSession->GetOutputTensorNames();
typedef std::unordered_map<std::string,
std::vector<mindspore::tensor::MSTensor *>> Msout;
std::unordered_map<std::string,
mindspore::tensor::MSTensor *> msOutputs;
for (const auto &name : names) {
auto temp_dat =mSession->GetOutputByTensorName(name);
msOutputs.insert(std::pair<std::string, mindspore::tensor::MSTensor *> {name, temp_dat});
}
std::string retStr = ProcessRunnetResult(msOutputs, ret);
```
- 模型有2个输出输出1是目标的类别置信度维度为11917: 81 输出2是目标的矩形框坐标偏移量维度为1:1917:4。 为了得出目标的实际矩形框,需要根据偏移量计算出矩形框的位置。这部分在 getDefaultBoxes中实现。
```cpp
void SSDModelUtil::getDefaultBoxes() {
float fk[6] = {0.0, 0.0, 0.0, 0.0, 0.0, 0.0};
std::vector<struct WHBox> all_sizes;
struct Product mProductData[19 * 19] = {0};
for (int i = 0; i < 6; i++) {
fk[i] = config.model_input_height / config.steps[i];
}
float scale_rate =
(config.max_scale - config.min_scale) / (sizeof(config.num_default) / sizeof(int) - 1);
float scales[7] = {0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0};
for (int i = 0; i < sizeof(config.num_default) / sizeof(int); i++) {
scales[i] = config.min_scale + scale_rate * i;
}
for (int idex = 0; idex < sizeof(config.feature_size) / sizeof(int); idex++) {
float sk1 = scales[idex];
float sk2 = scales[idex + 1];
float sk3 = sqrt(sk1 * sk2);
struct WHBox tempWHBox;
all_sizes.clear();
if (idex == 0) {
float w = sk1 * sqrt(2);
float h = sk1 / sqrt(2);
tempWHBox.boxw = 0.1;
tempWHBox.boxh = 0.1;
all_sizes.push_back(tempWHBox);
tempWHBox.boxw = w;
tempWHBox.boxh = h;
all_sizes.push_back(tempWHBox);
tempWHBox.boxw = h;
tempWHBox.boxh = w;
all_sizes.push_back(tempWHBox);
} else {
tempWHBox.boxw = sk1;
tempWHBox.boxh = sk1;
all_sizes.push_back(tempWHBox);
for (int j = 0; j < sizeof(config.aspect_ratios[idex]) / sizeof(int); j++) {
float w = sk1 * sqrt(config.aspect_ratios[idex][j]);
float h = sk1 / sqrt(config.aspect_ratios[idex][j]);
tempWHBox.boxw = w;
tempWHBox.boxh = h;
all_sizes.push_back(tempWHBox);
tempWHBox.boxw = h;
tempWHBox.boxh = w;
all_sizes.push_back(tempWHBox);
}
tempWHBox.boxw = sk3;
tempWHBox.boxh = sk3;
all_sizes.push_back(tempWHBox);
}
for (int i = 0; i < config.feature_size[idex]; i++) {
for (int j = 0; j < config.feature_size[idex]; j++) {
mProductData[i * config.feature_size[idex] + j].x = i;
mProductData[i * config.feature_size[idex] + j].y = j;
}
}
int productLen = config.feature_size[idex] * config.feature_size[idex];
for (int i = 0; i < productLen; i++) {
for (int j = 0; j < all_sizes.size(); j++) {
struct NormalBox tempBox;
float cx = (mProductData[i].y + 0.5) / fk[idex];
float cy = (mProductData[i].x + 0.5) / fk[idex];
tempBox.y = cy;
tempBox.x = cx;
tempBox.h = all_sizes[j].boxh;
tempBox.w = all_sizes[j].boxw;
mDefaultBoxes.push_back(tempBox);
}
}
}
}
```
- 通过最大值抑制将目标类型置信度较高的输出筛选出来。
```cpp
void SSDModelUtil::nonMaximumSuppression(const YXBoxes *const decoded_boxes,
const float *const scores,
const std::vector<int> &in_indexes,
std::vector<int> &out_indexes, const float nmsThreshold,
const int count, const int max_results) {
int nR = 0; //number of results
std::vector<bool> del(count, false);
for (size_t i = 0; i < in_indexes.size(); i++) {
if (!del[in_indexes[i]]) {
out_indexes.push_back(in_indexes[i]);
if (++nR == max_results) {
break;
}
for (size_t j = i + 1; j < in_indexes.size(); j++) {
const auto boxi = decoded_boxes[in_indexes[i]], boxj = decoded_boxes[in_indexes[j]];
float a[4] = {boxi.xmin, boxi.ymin, boxi.xmax, boxi.ymax};
float b[4] = {boxj.xmin, boxj.ymin, boxj.xmax, boxj.ymax};
if (IOU(a, b) > nmsThreshold) {
del[in_indexes[j]] = true;
}
}
}
}
}
```
- 对每类的概率大于阈值通过NMS算法筛选出矩形框后 还需要将输出的矩形框恢复到原图尺寸。
```cpp
std::string SSDModelUtil::getDecodeResult(float *branchScores, float *branchBoxData) {
std::string result = "";
NormalBox tmpBox[1917] = {0};
float mScores[1917][81] = {0};
float outBuff[1917][7] = {0};
float scoreWithOneClass[1917] = {0};
int outBoxNum = 0;
YXBoxes decodedBoxes[1917] = {0};
// Copy branch outputs box data to tmpBox.
for (int i = 0; i < 1917; ++i) {
tmpBox[i].y = branchBoxData[i * 4 + 0];
tmpBox[i].x = branchBoxData[i * 4 + 1];
tmpBox[i].h = branchBoxData[i * 4 + 2];
tmpBox[i].w = branchBoxData[i * 4 + 3];
}
// Copy branch outputs score to mScores.
for (int i = 0; i < 1917; ++i) {
for (int j = 0; j < 81; ++j) {
mScores[i][j] = branchScores[i * 81 + j];
}
}
ssd_boxes_decode(tmpBox, decodedBoxes);
const float nms_threshold = 0.3;
for (int i = 1; i < 81; i++) {
std::vector<int> in_indexes;
for (int j = 0; j < 1917; j++) {
scoreWithOneClass[j] = mScores[j][i];
// if (mScores[j][i] > 0.1) {
if (mScores[j][i] > g_thres_map[i]) {
in_indexes.push_back(j);
}
}
if (in_indexes.size() == 0) {
continue;
}
sort(in_indexes.begin(), in_indexes.end(),
[&](int a, int b) { return scoreWithOneClass[a] > scoreWithOneClass[b]; });
std::vector<int> out_indexes;
nonMaximumSuppression(decodedBoxes, scoreWithOneClass, in_indexes, out_indexes,
nms_threshold);
for (int k = 0; k < out_indexes.size(); k++) {
outBuff[outBoxNum][0] = out_indexes[k]; //image id
outBuff[outBoxNum][1] = i; //labelid
outBuff[outBoxNum][2] = scoreWithOneClass[out_indexes[k]]; //scores
outBuff[outBoxNum][3] =
decodedBoxes[out_indexes[k]].xmin * inputImageWidth / 300;
outBuff[outBoxNum][4] =
decodedBoxes[out_indexes[k]].ymin * inputImageHeight / 300;
outBuff[outBoxNum][5] =
decodedBoxes[out_indexes[k]].xmax * inputImageWidth / 300;
outBuff[outBoxNum][6] =
decodedBoxes[out_indexes[k]].ymax * inputImageHeight / 300;
outBoxNum++;
}
}
MS_PRINT("outBoxNum %d", outBoxNum);
for (int i = 0; i < outBoxNum; ++i) {
std::string tmpid_str = std::to_string(outBuff[i][0]);
result += tmpid_str; // image ID
result += "_";
// tmpid_str = std::to_string(outBuff[i][1]);
MS_PRINT("label_classes i %d, outBuff %d",i, (int) outBuff[i][1]);
tmpid_str = label_classes[(int) outBuff[i][1]];
result += tmpid_str; // label id
result += "_";
tmpid_str = std::to_string(outBuff[i][2]);
result += tmpid_str; // scores
result += "_";
tmpid_str = std::to_string(outBuff[i][3]);
result += tmpid_str; // xmin
result += "_";
tmpid_str = std::to_string(outBuff[i][4]);
result += tmpid_str; // ymin
result += "_";
tmpid_str = std::to_string(outBuff[i][5]);
result += tmpid_str; // xmax
result += "_";
tmpid_str = std::to_string(outBuff[i][6]);
result += tmpid_str; // ymax
result += ";";
}
return result;
}
```

View File

@ -0,0 +1,3 @@
/build
/libs
/src/main/cpp/include/

View File

@ -0,0 +1,82 @@
# For more information about using CMake with Android Studio, read the
# documentation: https://d.android.com/studio/projects/add-native-code.html
# Sets the minimum version of CMake required to build the native library.
cmake_minimum_required(VERSION 3.4.1)
set(CMAKE_VERBOSE_MAKEFILE on)
set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${CMAKE_SOURCE_DIR}/libs/${ANDROID_ABI})
set(MINDSPORELITE_VERSION mindspore-lite-0.7.0-minddata-arm64-cpu)
# ============== Set MindSpore Dependencies. =============
include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp)
include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/third_party/flatbuffers/include)
include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION})
include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/include)
include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/include/ir/dtype)
include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/include/schema)
#include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/include/ModelUtil)
add_library(mindspore-lite SHARED IMPORTED )
add_library(minddata-lite SHARED IMPORTED )
set_target_properties(mindspore-lite PROPERTIES IMPORTED_LOCATION
${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/lib/libmindspore-lite.so)
set_target_properties(minddata-lite PROPERTIES IMPORTED_LOCATION
${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/lib/libminddata-lite.so)
# --------------- MindSpore Lite set End. --------------------
# Creates and names a library, sets it as either STATIC
# or SHARED, and provides the relative paths to its source code.
# You can define multiple libraries, and CMake builds them for you.
# Gradle automatically packages shared libraries with your APK.
file(GLOB_RECURSE cpp_src "src/main/cpp/*.cpp" "src/main/cpp/*.h")
add_library( # Sets the name of the library.
mlkit-label-MS
# Sets the library as a shared library.
SHARED
# Provides a relative path to your source file(s).
${cpp_src})
# Searches for a specified prebuilt library and stores the path as a
# variable. Because CMake includes system libraries in the search path by
# default, you only need to specify the name of the public NDK library
# you want to add. CMake verifies that the library exists before
# completing its build.
find_library( # Sets the name of the path variable.
log-lib
# Specifies the name of the NDK library that
# you want CMake to locate.
log )
find_library( jnigraphics-lib jnig·raphics )
# Specifies libraries CMake should link to your target library. You
# can link multiple libraries, such as libraries you define in this
# build script, prebuilt third-party libraries, or system libraries.
add_definitions(-DMNN_USE_LOGCAT)
target_link_libraries( # Specifies the target library.
mlkit-label-MS
# --- mindspore ---
minddata-lite
mindspore-lite
# --- other dependencies.---
-ljnigraphics
android
# Links the target library to the log library
${log-lib}
)

View File

@ -0,0 +1,61 @@
apply plugin: 'com.android.application'
// Before gradle build.
// To download some necessary libraries.
apply from:'download.gradle'
android {
compileSdkVersion 30
buildToolsVersion "30.0.1"
defaultConfig {
applicationId "com.mindspore.hiobject"
minSdkVersion 21
targetSdkVersion 30
versionCode 1
versionName "1.0"
testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
externalNativeBuild {
cmake {
arguments "-DANDROID_STL=c++_shared"
cppFlags ""
}
}
ndk {
//abiFilters 'arm64-v8a', 'armeabi-v7a'
abiFilters 'arm64-v8a'
}
}
aaptOptions {
// noCompress '.so', ".tflite", ".mnn", ".cambricon", '.mslite', 'om', 'ms'
noCompress '.so', 'ms'
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
}
customDebugType {
debuggable true
}
}
externalNativeBuild {
cmake {
path file('CMakeLists.txt')
}
}
ndkVersion '21.3.6528147'
}
dependencies {
implementation fileTree(dir: "libs", include: ["*.jar"])
implementation 'androidx.appcompat:appcompat:1.1.0'
implementation 'androidx.constraintlayout:constraintlayout:1.1.3'
testImplementation 'junit:junit:4.12'
androidTestImplementation 'androidx.test.ext:junit:1.1.1'
androidTestImplementation 'androidx.test.espresso:espresso-core:3.2.0'
}

View File

@ -0,0 +1,82 @@
/**
* To download necessary library from HuaWei server.
* Including mindspore-lite .so file, minddata-lite .so file and model file.
* The libraries can be downloaded manually.
*/
def targetMindSporeInclude = "src/main/cpp/"
def mindsporeLite_Version = "mindspore-lite-0.7.0-minddata-arm64-cpu"
def targetModelFile = "src/main/assets/model/ssd.ms"
def mindSporeLibrary_arm64 = "src/main/cpp/${mindsporeLite_Version}.tar.gz"
def modelDownloadUrl = "https://download.mindspore.cn/model_zoo/official/lite/ssd_mobilenetv2_lite/ssd.ms"
def mindsporeLiteDownloadUrl = "https://download.mindspore.cn/model_zoo/official/lite/lib/mindspore%20version%201.0/${mindsporeLite_Version}.tar.gz"
def cleantargetMindSporeInclude = "src/main/cpp"
task cleanCmakeCache(type: Delete) {
delete '.cxx/cmake/debug'
delete '.cxx/cmake/release'
}
task downloadModelFile(type: DownloadUrlTask) {
doFirst {
println "Downloading ${modelDownloadUrl}"
}
sourceUrl = "${modelDownloadUrl}"
target = file("${targetModelFile}")
}
task downloadMindSporeLibrary(type: DownloadUrlTask) {
doFirst {
println "Downloading ${mindsporeLiteDownloadUrl}"
}
sourceUrl = "${mindsporeLiteDownloadUrl}"
target = file("${mindSporeLibrary_arm64}")
}
task unzipMindSporeInclude(type: Copy, dependsOn: 'downloadMindSporeLibrary') {
doFirst {
println "Unzipping ${mindSporeLibrary_arm64}"
}
from tarTree(resources.gzip("${mindSporeLibrary_arm64}"))
into "${targetMindSporeInclude}"
}
task cleanUnusedmindsporeFiles(type: Delete, dependsOn: ['unzipMindSporeInclude']) {
delete fileTree("${cleantargetMindSporeInclude}").matching {
include "*.tar.gz"
}
}
/*
* Using preBuild to download mindspore library and model file.
* Run before gradle build.
*/
if (file("src/main/cpp/${mindsporeLite_Version}/lib/libmindspore-lite.so").exists()){
downloadMindSporeLibrary.enabled = false
unzipMindSporeInclude.enabled = false
cleanUnusedmindsporeFiles.enabled = false
}
if (file("src/main/assets/model/ssd.ms").exists()){
downloadModelFile.enabled = false
}
preBuild.dependsOn cleanCmakeCache
preBuild.dependsOn downloadModelFile
preBuild.dependsOn downloadMindSporeLibrary
preBuild.dependsOn unzipMindSporeInclude
preBuild.dependsOn cleanUnusedmindsporeFiles
class DownloadUrlTask extends DefaultTask {
@Input
String sourceUrl
@OutputFile
File target
@TaskAction
void download() {
ant.get(src: sourceUrl, dest: target)
}
}

View File

@ -0,0 +1,21 @@
# Add project specific ProGuard rules here.
# You can control the set of applied configuration files using the
# proguardFiles setting in build.gradle.
#
# For more details, see
# http://developer.android.com/guide/developing/tools/proguard.html
# If your project uses WebView with JS, uncomment the following
# and specify the fully qualified class name to the JavaScript interface
# class:
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
# public *;
#}
# Uncomment this to preserve the line number information for
# debugging stack traces.
#-keepattributes SourceFile,LineNumberTable
# If you keep the line number information, uncomment this to
# hide the original source file name.
#-renamesourcefileattribute SourceFile

View File

@ -0,0 +1,26 @@
package com.mindspore.hiobject;
import android.content.Context;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.platform.app.InstrumentationRegistry;
import org.junit.Test;
import org.junit.runner.RunWith;
import static org.junit.Assert.assertEquals;
/**
* Instrumented test, which will execute on an Android device.
*
* @see <a href="http://d.android.com/tools/testing">Testing documentation</a>
*/
@RunWith(AndroidJUnit4.class)
public class ExampleInstrumentedTest {
@Test
public void useAppContext() {
// Context of the app under test.
Context appContext = InstrumentationRegistry.getInstrumentation().getTargetContext();
assertEquals("com.huawei.himindsporedemo", appContext.getPackageName());
}
}

View File

@ -0,0 +1,46 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.mindspore.hiobject"
android:versionCode="1"
android:versionName="1.0">
<uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.permission.WRITE_INTERNAL_STORAGE" />
<uses-permission android:name="android.permission.READ_INTERNAL_STORAGE" />
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
<uses-permission android:name="android.permission.MOUNT_UNMOUNT_FILESYSTEM" />
<uses-permission android:name="android.permission.READ_PHONE_STATE" />
<application
android:requestLegacyExternalStorage = "true"
android:allowBackup="true"
android:icon="@mipmap/ic_launcher"
android:label="@string/app_name"
android:roundIcon="@mipmap/ic_launcher_round"
android:supportsRtl="true"
android:theme="@style/AppTheme">
<activity
android:name="com.mindspore.hiobject.objectdetect.DealDataActivity"
android:screenOrientation="portrait"
android:theme="@style/Theme.AppCompat.NoActionBar"/>
<activity
android:name="com.mindspore.hiobject.objectdetect.PhotoActivity"
android:theme="@style/Theme.AppCompat.NoActionBar"/>
<activity
android:name="com.mindspore.hiobject.objectdetect.CameraActivity"
android:screenOrientation="portrait"
android:theme="@style/Theme.AppCompat.NoActionBar"/>
<activity
android:name="com.mindspore.hiobject.SplashActivity"
android:screenOrientation="portrait"
android:theme="@style/Theme.AppCompat.NoActionBar">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
</manifest>

View File

@ -0,0 +1,55 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "MSNetWork.h"
#include <android/log.h>
#include <iostream>
#include <string>
#include "include/errorcode.h"
#define MS_PRINT(format, ...) __android_log_print(ANDROID_LOG_INFO, "MSJNI", format, ##__VA_ARGS__)
MSNetWork::MSNetWork(void) : session_(nullptr) {}
MSNetWork::~MSNetWork(void) {}
void
MSNetWork::CreateSessionMS(char *modelBuffer, size_t bufferLen, mindspore::lite::Context *ctx) {
session_ = mindspore::session::LiteSession::CreateSession(ctx);
if (session_ == nullptr) {
MS_PRINT("Create Session failed.");
return;
}
// Compile model.
auto model = mindspore::lite::Model::Import(modelBuffer, bufferLen);
if (model == nullptr) {
MS_PRINT("Import model failed.");
return;
}
int ret = session_->CompileGraph(model);
if (ret != mindspore::lite::RET_OK) {
MS_PRINT("CompileGraph failed.");
return;
}
}
int MSNetWork::ReleaseNets(void) {
delete session_;
return 0;
}

View File

@ -0,0 +1,59 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MSNETWORK_H
#define MSNETWORK_H
#include <context.h>
#include <lite_session.h>
#include <model.h>
#include <errorcode.h>
#include <cstdio>
#include <algorithm>
#include <fstream>
#include <functional>
#include <sstream>
#include <vector>
#include <map>
#include <string>
#include <memory>
#include <utility>
struct ImgDims {
int channel = 0;
int width = 0;
int height = 0;
};
/*struct SessIterm {
std::shared_ptr<mindspore::session::LiteSession> sess = nullptr;
};*/
class MSNetWork {
public:
MSNetWork();
~MSNetWork();
void CreateSessionMS(char *modelBuffer, size_t bufferLen, mindspore::lite::Context *ctx);
int ReleaseNets(void);
mindspore::session::LiteSession * session() const { return session_; }
private:
mindspore::session::LiteSession *session_;
};
#endif

View File

@ -0,0 +1,269 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <jni.h>
#include <android/bitmap.h>
#include <android/asset_manager_jni.h>
#include <android/log.h>
#include <sstream>
#include <cstring>
#include <set>
#include <utility>
#include "include/errorcode.h"
#include "include/ms_tensor.h"
#include "MSNetWork.h"
#include "ssd_util/ssd_util.h"
#include "lite_cv/lite_mat.h"
#include "lite_cv/image_process.h"
using mindspore::dataset::LiteMat;
using mindspore::dataset::LPixelType;
using mindspore::dataset::LDataType;
#define MS_PRINT(format, ...) __android_log_print(ANDROID_LOG_INFO, "MSJNI", format, ##__VA_ARGS__)
bool BitmapToLiteMat(JNIEnv *env, const jobject &srcBitmap, LiteMat *lite_mat) {
bool ret = false;
AndroidBitmapInfo info;
void *pixels = nullptr;
LiteMat &lite_mat_bgr = *lite_mat;
AndroidBitmap_getInfo(env, srcBitmap, &info);
if (info.format != ANDROID_BITMAP_FORMAT_RGBA_8888) {
MS_PRINT("Image Err, Request RGBA");
return false;
}
AndroidBitmap_lockPixels(env, srcBitmap, &pixels);
if (info.stride == info.width*4) {
ret = InitFromPixel(reinterpret_cast<const unsigned char *>(pixels),
LPixelType::RGBA2RGB, LDataType::UINT8,
info.width, info.height, lite_mat_bgr);
if (!ret) {
MS_PRINT("Init From RGBA error");
}
} else {
unsigned char *pixels_ptr = new unsigned char[info.width*info.height*4];
unsigned char *ptr = pixels_ptr;
unsigned char *data = reinterpret_cast<unsigned char *>(pixels);
for (int i = 0; i < info.height; i++) {
memcpy(ptr, data, info.width*4);
ptr += info.width*4;
data += info.stride;
}
ret = InitFromPixel(reinterpret_cast<const unsigned char *>(pixels_ptr),
LPixelType::RGBA2RGB, LDataType::UINT8,
info.width, info.height, lite_mat_bgr);
if (!ret) {
MS_PRINT("Init From RGBA error");
}
delete[] (pixels_ptr);
}
AndroidBitmap_unlockPixels(env, srcBitmap);
return ret;
}
bool PreProcessImageData(const LiteMat &lite_mat_bgr, LiteMat *lite_norm_mat_ptr) {
bool ret = false;
LiteMat lite_mat_resize;
LiteMat &lite_norm_mat_cut = *lite_norm_mat_ptr;
ret = ResizeBilinear(lite_mat_bgr, lite_mat_resize, 300, 300);
if (!ret) {
MS_PRINT("ResizeBilinear error");
return false;
}
LiteMat lite_mat_convert_float;
ret = ConvertTo(lite_mat_resize, lite_mat_convert_float, 1.0 / 255.0);
if (!ret) {
MS_PRINT("ConvertTo error");
return false;
}
float means[3] = {0.485, 0.456, 0.406};
float vars[3] = {1.0 / 0.229, 1.0 / 0.224, 1.0 / 0.225};
SubStractMeanNormalize(lite_mat_convert_float, lite_norm_mat_cut, means, vars);
return true;
}
char *CreateLocalModelBuffer(JNIEnv *env, jobject modelBuffer) {
jbyte *modelAddr = static_cast<jbyte *>(env->GetDirectBufferAddress(modelBuffer));
int modelLen = static_cast<int>(env->GetDirectBufferCapacity(modelBuffer));
char *buffer(new char[modelLen]);
memcpy(buffer, modelAddr, modelLen);
return buffer;
}
/**
*
* @param msOutputs Model output, the mindspore inferencing result.
* @param srcImageWidth The width of the original input image.
* @param srcImageHeight The height of the original input image.
* @return
*/
std::string ProcessRunnetResult(std::unordered_map<std::string,
mindspore::tensor::MSTensor *> msOutputs,
int srcImageWidth, int srcImageHeight) {
std::unordered_map<std::string, mindspore::tensor::MSTensor *>::iterator iter;
iter = msOutputs.begin();
auto branch2_string = iter->first;
auto branch2_tensor = iter->second;
++iter;
auto branch1_string = iter->first;
auto branch1_tensor = iter->second;
MS_PRINT("%s %s", branch1_string.c_str(), branch2_string.c_str());
// ----------- 接口测试 --------------------------
float *tmpscores2 = reinterpret_cast<float * >(branch1_tensor->MutableData());
float *tmpdata = reinterpret_cast<float * >(branch2_tensor->MutableData());
// Using ssd model util to process model branch outputs.
SSDModelUtil ssdUtil(srcImageWidth, srcImageHeight);
std::string retStr = ssdUtil.getDecodeResult(tmpscores2, tmpdata);
MS_PRINT("retStr %s", retStr.c_str());
return retStr;
}
extern "C" JNIEXPORT jlong JNICALL
Java_com_mindspore_hiobject_help_TrackingMobile_loadModel(JNIEnv *env, jobject thiz,
jobject assetManager,
jobject buffer,
jint numThread) {
MS_PRINT("MindSpore so version 20200730");
if (nullptr == buffer) {
MS_PRINT("error, buffer is nullptr!");
return (jlong) nullptr;
}
jlong bufferLen = env->GetDirectBufferCapacity(buffer);
MS_PRINT("MindSpore get bufferLen:%d", static_cast<int>(bufferLen));
if (0 == bufferLen) {
MS_PRINT("error, bufferLen is 0!");
return (jlong) nullptr;
}
char *modelBuffer = CreateLocalModelBuffer(env, buffer);
if (modelBuffer == nullptr) {
MS_PRINT("modelBuffer create failed!");
return (jlong) nullptr;
}
MS_PRINT("MindSpore loading Model.");
void **labelEnv = new void *;
MSNetWork *labelNet = new MSNetWork;
*labelEnv = labelNet;
mindspore::lite::Context *context = new mindspore::lite::Context;
context->device_type_ = mindspore::lite::DT_CPU;
context->thread_num_ = numThread;
labelNet->CreateSessionMS(modelBuffer, bufferLen, context);
delete context;
if (labelNet->session() == nullptr) {
delete labelNet;
delete labelEnv;
MS_PRINT("MindSpore create session failed!.");
return (jlong) nullptr;
}
MS_PRINT("MindSpore create session successfully.");
if (buffer != nullptr) {
env->DeleteLocalRef(buffer);
}
if (assetManager != nullptr) {
env->DeleteLocalRef(assetManager);
}
MS_PRINT("ptr released successfully.");
return (jlong) labelEnv;
}
extern "C" JNIEXPORT jstring JNICALL
Java_com_mindspore_hiobject_help_TrackingMobile_runNet(JNIEnv *env, jobject thiz,
jlong netEnv,
jobject srcBitmap) {
LiteMat lite_mat_bgr, lite_norm_mat_cut;
if (!BitmapToLiteMat(env, srcBitmap, &lite_mat_bgr)) {
MS_PRINT("BitmapToLiteMat error");
return NULL;
}
int srcImageWidth = lite_mat_bgr.width_;
int srcImageHeight = lite_mat_bgr.height_;
if (!PreProcessImageData(lite_mat_bgr, &lite_norm_mat_cut)) {
MS_PRINT("PreProcessImageData error");
return NULL;
}
ImgDims inputDims;
inputDims.channel = lite_norm_mat_cut.channel_;
inputDims.width = lite_norm_mat_cut.width_;
inputDims.height = lite_norm_mat_cut.height_;
// Get the mindsore inference environment which created in loadModel().
void **labelEnv = reinterpret_cast<void **>(netEnv);
if (labelEnv == nullptr) {
MS_PRINT("MindSpore error, labelEnv is a nullptr.");
return NULL;
}
MSNetWork *labelNet = static_cast<MSNetWork *>(*labelEnv);
auto mSession = labelNet->session();
if (mSession == nullptr) {
MS_PRINT("MindSpore error, Session is a nullptr.");
return NULL;
}
MS_PRINT("MindSpore get session.");
auto msInputs = mSession->GetInputs();
auto inTensor = msInputs.front();
float *dataHWC = reinterpret_cast<float *>(lite_norm_mat_cut.data_ptr_);
// copy input Tensor
memcpy(inTensor->MutableData(), dataHWC,
inputDims.channel * inputDims.width * inputDims.height * sizeof(float));
MS_PRINT("MindSpore get msInputs.");
auto status = mSession->RunGraph();
if (status != mindspore::lite::RET_OK) {
MS_PRINT("MindSpore runnet error.");
return NULL;
}
auto names = mSession->GetOutputTensorNames();
std::unordered_map<std::string,
mindspore::tensor::MSTensor *> msOutputs;
for (const auto &name : names) {
auto temp_dat = mSession->GetOutputByTensorName(name);
msOutputs.insert(std::pair<std::string, mindspore::tensor::MSTensor *> {name, temp_dat});
}
std::string retStr = ProcessRunnetResult(msOutputs, srcImageWidth, srcImageHeight);
const char *resultChardata = retStr.c_str();
return (env)->NewStringUTF(resultChardata);
}
extern "C"
JNIEXPORT jboolean JNICALL
Java_com_mindspore_hiobject_help_TrackingMobile_unloadModel(JNIEnv *env,
jobject thiz,
jlong netEnv) {
void **labelEnv = reinterpret_cast<void **>(netEnv);
MSNetWork *labelNet = static_cast<MSNetWork *>(*labelEnv);
labelNet->ReleaseNets();
return (jboolean) true;
}

View File

@ -0,0 +1,293 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <android/log.h>
#include <algorithm>
#include "ssd_util/ssd_util.h"
#define MS_PRINT(format, ...) __android_log_print(ANDROID_LOG_INFO, "MSJNI", format, ##__VA_ARGS__)
/**
* SSD model util constructor.
* @param srcImgWidth The width of the original input image.
* @param srcImgHeight The height of the original input image.
*/
SSDModelUtil::SSDModelUtil(int srcImgWidth, int srcImgHeight) {
inputImageWidth = srcImgWidth;
inputImageHeight = srcImgHeight;
getDefaultBoxes(); // To fill the vectordefaultboxes.
}
std::string SSDModelUtil::getDecodeResult(float *branchScores, float *branchBoxData) {
std::string result = "";
NormalBox tmpBox[1917] = {0};
float mScores[1917][81] = {0};
float outBuff[1917][7] = {0};
float scoreWithOneClass[1917] = {0};
int outBoxNum = 0;
YXBoxes decodedBoxes[1917] = {0};
// Copy branch outputs box data to tmpBox.
for (int i = 0; i < 1917; ++i) {
tmpBox[i].y = branchBoxData[i * 4 + 0];
tmpBox[i].x = branchBoxData[i * 4 + 1];
tmpBox[i].h = branchBoxData[i * 4 + 2];
tmpBox[i].w = branchBoxData[i * 4 + 3];
}
// Copy branch outputs score to mScores.
for (int i = 0; i < 1917; ++i) {
for (int j = 0; j < 81; ++j) {
mScores[i][j] = branchScores[i * 81 + j];
}
}
// NMS processing.
ssd_boxes_decode(tmpBox, decodedBoxes);
// const float nms_threshold = 0.6;
const float nms_threshold = 0.3;
for (int i = 1; i < 81; i++) {
std::vector<int> in_indexes;
for (int j = 0; j < 1917; j++) {
scoreWithOneClass[j] = mScores[j][i];
if (mScores[j][i] > g_thres_map[i]) {
in_indexes.push_back(j);
}
}
if (in_indexes.size() == 0) {
continue;
}
sort(in_indexes.begin(), in_indexes.end(),
[&](int a, int b) { return scoreWithOneClass[a] > scoreWithOneClass[b]; });
std::vector<int> out_indexes;
nonMaximumSuppression(decodedBoxes, scoreWithOneClass, in_indexes, &out_indexes,
nms_threshold);
for (int k = 0; k < out_indexes.size(); k++) {
// image id
outBuff[outBoxNum][0] = out_indexes[k];
// labelid
outBuff[outBoxNum][1] = i;
// scores
outBuff[outBoxNum][2] = scoreWithOneClass[out_indexes[k]];
outBuff[outBoxNum][3] =
decodedBoxes[out_indexes[k]].xmin * inputImageWidth / 300;
outBuff[outBoxNum][4] =
decodedBoxes[out_indexes[k]].ymin * inputImageHeight / 300;
outBuff[outBoxNum][5] =
decodedBoxes[out_indexes[k]].xmax * inputImageWidth / 300;
outBuff[outBoxNum][6] =
decodedBoxes[out_indexes[k]].ymax * inputImageHeight / 300;
outBoxNum++;
}
}
MS_PRINT("outBoxNum %d", outBoxNum);
for (int i = 0; i < outBoxNum; ++i) {
std::string tmpid_str = std::to_string(outBuff[i][0]);
result += tmpid_str;
result += "_";
// tmpid_str = std::to_string(outBuff[i][1]);
MS_PRINT("label_classes i %d, outBuff %d", i, (int) outBuff[i][1]);
tmpid_str = label_classes[static_cast<int>(outBuff[i][1])];
// label id
result += tmpid_str;
result += "_";
tmpid_str = std::to_string(outBuff[i][2]);
// scores
result += tmpid_str;
result += "_";
tmpid_str = std::to_string(outBuff[i][3]);
// xmin
result += tmpid_str;
result += "_";
tmpid_str = std::to_string(outBuff[i][4]);
// ymin
result += tmpid_str;
result += "_";
tmpid_str = std::to_string(outBuff[i][5]);
// xmax
result += tmpid_str;
result += "_";
tmpid_str = std::to_string(outBuff[i][6]);
// ymax
result += tmpid_str;
result += ";";
}
return result;
}
void SSDModelUtil::getDefaultBoxes() {
float fk[6] = {0.0, 0.0, 0.0, 0.0, 0.0, 0.0};
std::vector<struct WHBox> all_sizes;
struct Product mProductData[19 * 19] = {0};
for (int i = 0; i < 6; i++) {
fk[i] = config.model_input_height / config.steps[i];
}
float scale_rate =
(config.max_scale - config.min_scale) / (sizeof(config.num_default) / sizeof(int) - 1);
float scales[7] = {0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0};
for (int i = 0; i < sizeof(config.num_default) / sizeof(int); i++) {
scales[i] = config.min_scale + scale_rate * i;
}
for (int idex = 0; idex < sizeof(config.feature_size) / sizeof(int); idex++) {
float sk1 = scales[idex];
float sk2 = scales[idex + 1];
float sk3 = sqrt(sk1 * sk2);
struct WHBox tempWHBox;
all_sizes.clear();
// idex == 0时 len(all_sizes) = 3.
if (idex == 0) {
float w = sk1 * sqrt(2);
float h = sk1 / sqrt(2);
// all_sizes = [(0.1, 0.1), (w, h), (h, w)]
tempWHBox.boxw = 0.1;
tempWHBox.boxh = 0.1;
all_sizes.push_back(tempWHBox);
tempWHBox.boxw = w;
tempWHBox.boxh = h;
all_sizes.push_back(tempWHBox);
tempWHBox.boxw = h;
tempWHBox.boxh = w;
all_sizes.push_back(tempWHBox);
} else {
// len(all_sizes) = 6.
tempWHBox.boxw = sk1;
tempWHBox.boxh = sk1;
all_sizes.push_back(tempWHBox);
for (int j = 0; j < sizeof(config.aspect_ratios[idex]) / sizeof(int); j++) {
float w = sk1 * sqrt(config.aspect_ratios[idex][j]);
float h = sk1 / sqrt(config.aspect_ratios[idex][j]);
tempWHBox.boxw = w;
tempWHBox.boxh = h;
all_sizes.push_back(tempWHBox);
tempWHBox.boxw = h;
tempWHBox.boxh = w;
all_sizes.push_back(tempWHBox);
}
tempWHBox.boxw = sk3;
tempWHBox.boxh = sk3;
all_sizes.push_back(tempWHBox);
}
for (int i = 0; i < config.feature_size[idex]; i++) {
for (int j = 0; j < config.feature_size[idex]; j++) {
mProductData[i * config.feature_size[idex] + j].x = i;
mProductData[i * config.feature_size[idex] + j].y = j;
}
}
int productLen = config.feature_size[idex] * config.feature_size[idex];
for (int i = 0; i < productLen; i++) {
for (int j = 0; j < all_sizes.size(); j++) {
struct NormalBox tempBox;
float cx = (mProductData[i].y + 0.5) / fk[idex];
float cy = (mProductData[i].x + 0.5) / fk[idex];
tempBox.y = cy;
tempBox.x = cx;
tempBox.h = all_sizes[j].boxh;
tempBox.w = all_sizes[j].boxw;
mDefaultBoxes.push_back(tempBox);
}
}
}
}
void SSDModelUtil::ssd_boxes_decode(const NormalBox *boxes,
YXBoxes *const decoded_boxes, const float scale0,
const float scale1, const int count) {
if (mDefaultBoxes.size() == 0) {
MS_PRINT("get default boxes error.");
return;
}
for (int i = 0; i < count; ++i) {
float cy = boxes[i].y * scale0 * mDefaultBoxes[i].h + mDefaultBoxes[i].y;
float cx = boxes[i].x * scale0 * mDefaultBoxes[i].w + mDefaultBoxes[i].x;
float h = exp(boxes[i].h * scale1) * mDefaultBoxes[i].h;
float w = exp(boxes[i].w * scale1) * mDefaultBoxes[i].w;
decoded_boxes[i].ymin = std::min(1.0f, std::max(0.0f, cy - h / 2)) * config.model_input_height;
decoded_boxes[i].xmin = std::min(1.0f, std::max(0.0f, cx - w / 2)) * config.model_input_width;
decoded_boxes[i].ymax = std::min(1.0f, std::max(0.0f, cy + h / 2)) * config.model_input_height;
decoded_boxes[i].xmax = std::min(1.0f, std::max(0.0f, cx + w / 2)) * config.model_input_width;
}
}
void SSDModelUtil::nonMaximumSuppression(const YXBoxes *const decoded_boxes,
const float *const scores,
const std::vector<int> &in_indexes,
std::vector<int> *out_indexes_p, const float nmsThreshold,
const int count, const int max_results) {
int nR = 0;
std::vector<int> &out_indexes = *out_indexes_p;
std::vector<bool> del(count, false);
for (size_t i = 0; i < in_indexes.size(); i++) {
if (!del[in_indexes[i]]) {
out_indexes.push_back(in_indexes[i]);
if (++nR == max_results) {
break;
}
for (size_t j = i + 1; j < in_indexes.size(); j++) {
const auto boxi = decoded_boxes[in_indexes[i]], boxj = decoded_boxes[in_indexes[j]];
float a[4] = {boxi.xmin, boxi.ymin, boxi.xmax, boxi.ymax};
float b[4] = {boxj.xmin, boxj.ymin, boxj.xmax, boxj.ymax};
if (IOU(a, b) > nmsThreshold) {
del[in_indexes[j]] = true;
}
}
}
}
}
double SSDModelUtil::IOU(float r1[4], float r2[4]) {
float x1 = std::max(r1[0], r2[0]);
float y1 = std::max(r1[1], r2[1]);
float x2 = std::min(r1[2], r2[2]);
float y2 = std::min(r1[3], r2[3]);
// if max(min) > min(max), there is no intersection
if (x2 - x1 + 1 <= 0 || y2 - y1 + 1 <= 0)
return 0;
double insect_area = (x2 - x1 + 1) * (y2 - y1 + 1);
double union_area =
(r1[2] - r1[0] + 1) * (r1[3] - r1[1] + 1) + (r2[2] - r2[0] + 1) * (r2[3] - r2[1] + 1) -
insect_area;
double iou = insect_area / union_area;
return (iou > 0) ? iou : 0;
}

View File

@ -0,0 +1,202 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef HIMINDSPOREDEMO_SSD_UTIL_H
#define HIMINDSPOREDEMO_SSD_UTIL_H
#include <string>
#include <vector>
class SSDModelUtil {
public:
// Constructor.
SSDModelUtil(int srcImageWidth, int srcImgHeight);
/**
* Return the SSD model post-processing result.
* @param branchScores
* @param branchBoxData
* @return
*/
std::string getDecodeResult(float *branchScores, float *branchBoxData);
// ============= variables =============.
int inputImageHeight;
int inputImageWidth;
struct NormalBox {
float y;
float x;
float h;
float w;
};
struct YXBoxes {
float ymin;
float xmin;
float ymax;
float xmax;
};
struct Product {
int x;
int y;
};
struct WHBox {
float boxw;
float boxh;
};
private:
std::vector<struct NormalBox> mDefaultBoxes;
void getDefaultBoxes();
void ssd_boxes_decode(const NormalBox *boxes,
YXBoxes *const decoded_boxes,
const float scale0 = 0.1, const float scale1 = 0.2,
const int count = 1917);
void nonMaximumSuppression(const YXBoxes *const decoded_boxes, const float *const scores,
const std::vector<int> &in_indexes, std::vector<int> *out_indexes_p,
const float nmsThreshold = 0.6,
const int count = 1917, const int max_results = 100);
double IOU(float r1[4], float r2[4]);
// ============= variables =============.
struct network {
int model_input_height = 300;
int model_input_width = 300;
int num_default[6] = {3, 6, 6, 6, 6, 6};
int feature_size[6] = {19, 10, 5, 3, 2, 1};
double min_scale = 0.2;
float max_scale = 0.95;
float steps[6] = {16, 32, 64, 100, 150, 300};
float prior_scaling[2] = {0.1, 0.2};
float gamma = 2.0;
float alpha = 0.75;
int aspect_ratios[6][2] = {{2, 0},
{2, 3},
{2, 3},
{2, 3},
{2, 3},
{2, 3}};
} config;
float g_thres_map[81] = {0, 0.635, 0.627, 0.589, 0.585, 0.648, 0.664, 0.655,
0.481, 0.529, 0.611, 0.641, 0.774, 0.549, 0.513, 0.652,
0.552, 0.590, 0.650, 0.575, 0.583, 0.650, 0.656, 0.696,
0.653, 0.438, 0.515, 0.459, 0.561, 0.545, 0.635, 0.540,
0.560, 0.721, 0.544, 0.548, 0.511, 0.611, 0.592, 0.542,
0.512, 0.635, 0.531, 0.437, 0.525, 0.445, 0.484, 0.546,
0.490, 0.581, 0.566, 0.516, 0.445, 0.541, 0.613, 0.560,
0.483, 0.509, 0.464, 0.543, 0.538, 0.490, 0.576, 0.617,
0.577, 0.595, 0.640, 0.585, 0.598, 0.592, 0.514, 0.397,
0.592, 0.504, 0.548, 0.642, 0.581, 0.497, 0.545, 0.154,
0.580,
};
std::string label_classes[81] = {
{"background"},
{"human"},
{"bike"},
{"automobile"},
{"motorbike"},
{"aircraft"},
{"motorbus"},
{"train"},
{"motortruck"},
{"boat"},
{"traffic signal"},
{"fireplug"},
{"stop sign"},
{"parking meter"},
{"seat"},
{"bird"},
{"cat"},
{"dog"},
{"horse"},
{"sheep"},
{"cow"},
{"elephant"},
{"bear"},
{"zebra"},
{"giraffe"},
{"knapsack"},
{"bumbershoot"},
{"purse"},
{"neckwear"},
{"traveling bag"},
{"frisbee"},
{"skis"},
{"snowboard"},
{"sports ball"},
{"kite"},
{"baseball bat"},
{"baseball glove"},
{"skateboard"},
{"surfboard"},
{"tennis racket"},
{"bottle"},
{"wine glass"},
{"cup"},
{"fork"},
{"knife"},
{"spoon"},
{"bowl"},
{"banana"},
{"apple"},
{"sandwich"},
{"orange"},
{"broccoli"},
{"carrot"},
{"hot dog"},
{"pizza"},
{"donut"},
{"cake"},
{"chair"},
{"couch"},
{"houseplant"},
{"bed"},
{"dinner table"},
{"toilet"},
{"television"},
{"notebook computer"},
{"mouse"},
{"remote"},
{"keyboard"},
{"smartphone"},
{"microwave"},
{"oven"},
{"toaster"},
{"water sink"},
{"fridge"},
{"book"},
{"bell"},
{"vase"},
{"shears"},
{"toy bear"},
{"hair drier"},
{"toothbrush"}
};
};
#endif

View File

@ -0,0 +1,95 @@
package com.mindspore.hiobject;
import android.Manifest;
import android.content.Intent;
import android.os.Bundle;
import android.provider.MediaStore;
import android.view.View;
import android.widget.Button;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.appcompat.app.AppCompatActivity;
import androidx.core.app.ActivityCompat;
import com.mindspore.hiobject.objectdetect.CameraActivity;
import com.mindspore.hiobject.objectdetect.DealDataActivity;
import com.mindspore.hiobject.objectdetect.PhotoActivity;
public class SplashActivity extends AppCompatActivity implements View.OnClickListener {
private static final int RC_CHOOSE_PHOTO = 1;
private static final int REQUEST_CAMERA_PERMISSION = 2;
private Button btnPhoto, btnCamera, btnDealData;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_splash);
btnPhoto = findViewById(R.id.btn_photo);
btnCamera = findViewById(R.id.btn_camera);
btnDealData = findViewById(R.id.btn_deal);
btnDealData.setVisibility(View.GONE);
btnPhoto.setOnClickListener(this);
btnCamera.setOnClickListener(this);
btnDealData.setOnClickListener(this);
}
@Override
public void onClick(View view) {
if (R.id.btn_photo == view.getId()) {
ActivityCompat.requestPermissions(this,
new String[]{Manifest.permission.READ_EXTERNAL_STORAGE, Manifest.permission.WRITE_EXTERNAL_STORAGE,
Manifest.permission.READ_PHONE_STATE}, RC_CHOOSE_PHOTO);
} else if (R.id.btn_camera == view.getId()) {
ActivityCompat.requestPermissions(this,
new String[]{Manifest.permission.READ_EXTERNAL_STORAGE, Manifest.permission.WRITE_EXTERNAL_STORAGE,
Manifest.permission.READ_PHONE_STATE, Manifest.permission.CAMERA}, REQUEST_CAMERA_PERMISSION);
} else if (R.id.btn_deal == view.getId()) {
Intent intent = new Intent(SplashActivity.this, DealDataActivity.class);
startActivity(intent);
}
}
/**
* 权限申请结果回调
*/
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
if (RC_CHOOSE_PHOTO == requestCode) {
choosePhoto();
} else if (REQUEST_CAMERA_PERMISSION == requestCode) {
chooseCamera();
}
}
private void choosePhoto() {
Intent intentToPickPic = new Intent(Intent.ACTION_PICK, null);
intentToPickPic.setDataAndType(MediaStore.Images.Media.EXTERNAL_CONTENT_URI, "image/*");
startActivityForResult(intentToPickPic, RC_CHOOSE_PHOTO);
}
private void chooseCamera() {
Intent intent = new Intent(SplashActivity.this, CameraActivity.class);
startActivity(intent);
}
@Override
protected void onActivityResult(int requestCode, int resultCode, @Nullable Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (RC_CHOOSE_PHOTO == requestCode && null != data && null != data.getData()) {
Intent intent = new Intent(SplashActivity.this, PhotoActivity.class);
intent.setData(data.getData());
startActivity(intent);
}
}
}

View File

@ -0,0 +1,185 @@
package com.mindspore.hiobject.help;
import android.annotation.SuppressLint;
import android.content.ContentUris;
import android.content.Context;
import android.database.Cursor;
import android.graphics.Bitmap;
import android.graphics.Matrix;
import android.media.ExifInterface;
import android.net.Uri;
import android.os.Build;
import android.os.Environment;
import android.provider.DocumentsContract;
import android.provider.MediaStore;
import java.io.IOException;
public class ImageDegreeHelper {
/**
* 专为Android4.4及以上设计的从Uri获取文件绝对路径以前的方法已不好使
*/
@SuppressLint("NewApi")
public static String getPath(final Context context, final Uri uri) {
final boolean isKitKat = Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT;
// DocumentProvider
if (isKitKat && DocumentsContract.isDocumentUri(context, uri)) {
// ExternalStorageProvider
if (isExternalStorageDocument(uri)) {
final String docId = DocumentsContract.getDocumentId(uri);
final String[] split = docId.split(":");
final String type = split[0];
if ("primary".equalsIgnoreCase(type)) {
return Environment.getExternalStorageDirectory() + "/" + split[1];
}
// TODO handle non-primary volumes
}
// DownloadsProvider
else if (isDownloadsDocument(uri)) {
final String id = DocumentsContract.getDocumentId(uri);
final Uri contentUri = ContentUris.withAppendedId(
Uri.parse("content://downloads/public_downloads"), Long.valueOf(id));
return getDataColumn(context, contentUri, null, null);
}
// MediaProvider
else if (isMediaDocument(uri)) {
final String docId = DocumentsContract.getDocumentId(uri);
final String[] split = docId.split(":");
final String type = split[0];
Uri contentUri = null;
if ("image".equals(type)) {
contentUri = MediaStore.Images.Media.EXTERNAL_CONTENT_URI;
} else if ("video".equals(type)) {
contentUri = MediaStore.Video.Media.EXTERNAL_CONTENT_URI;
} else if ("audio".equals(type)) {
contentUri = MediaStore.Audio.Media.EXTERNAL_CONTENT_URI;
}
final String selection = "_id=?";
final String[] selectionArgs = new String[]{split[1]};
return getDataColumn(context, contentUri, selection, selectionArgs);
}
}
// MediaStore (and general)
else if ("content".equalsIgnoreCase(uri.getScheme())) {
return getDataColumn(context, uri, null, null);
}
// File
else if ("file".equalsIgnoreCase(uri.getScheme())) {
return uri.getPath();
}
return null;
}
/**
* Get the value of the data column for this Uri. This is useful for
* MediaStore Uris, and other file-based ContentProviders.
*
* @param context The context.
* @param uri The Uri to query.
* @param selection (Optional) Filter used in the query.
* @param selectionArgs (Optional) Selection arguments used in the query.
* @return The value of the _data column, which is typically a file path.
*/
public static String getDataColumn(Context context, Uri uri, String selection,
String[] selectionArgs) {
Cursor cursor = null;
final String column = "_data";
final String[] projection = {column};
try {
cursor = context.getContentResolver().query(uri, projection, selection, selectionArgs,
null);
if (cursor != null && cursor.moveToFirst()) {
final int column_index = cursor.getColumnIndexOrThrow(column);
return cursor.getString(column_index);
}
} finally {
if (cursor != null)
cursor.close();
}
return null;
}
/**
* @param uri The Uri to check.
* @return Whether the Uri authority is ExternalStorageProvider.
*/
public static boolean isExternalStorageDocument(Uri uri) {
return "com.android.externalstorage.documents".equals(uri.getAuthority());
}
/**
* @param uri The Uri to check.
* @return Whether the Uri authority is DownloadsProvider.
*/
public static boolean isDownloadsDocument(Uri uri) {
return "com.android.providers.downloads.documents".equals(uri.getAuthority());
}
/**
* @param uri The Uri to check.
* @return Whether the Uri authority is MediaProvider.
*/
public static boolean isMediaDocument(Uri uri) {
return "com.android.providers.media.documents".equals(uri.getAuthority());
}
/**
* 读取照片旋转角度
*
* @param path 照片路径
* @return 角度
*/
public static int readPictureDegree(String path) {
int degree = 0;
try {
ExifInterface exifInterface = new ExifInterface(path);
int orientation = exifInterface.getAttributeInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_NORMAL);
switch (orientation) {
case ExifInterface.ORIENTATION_ROTATE_90:
degree = 90;
break;
case ExifInterface.ORIENTATION_ROTATE_180:
degree = 180;
break;
case ExifInterface.ORIENTATION_ROTATE_270:
degree = 270;
break;
}
} catch (IOException e) {
e.printStackTrace();
}
return degree;
}
public static Bitmap rotaingImageView(int angle, Bitmap bitmap) {
Bitmap returnBm = null;
// 根据旋转角度生成旋转矩阵
Matrix matrix = new Matrix();
matrix.postRotate(angle);
try {
// 将原始图片按照旋转矩阵进行旋转并得到新的图片
returnBm = Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight(), matrix, true);
} catch (OutOfMemoryError e) {
}
if (returnBm == null) {
returnBm = bitmap;
}
if (bitmap != returnBm) {
bitmap.recycle();
}
return returnBm;
}
}

View File

@ -0,0 +1,159 @@
package com.mindspore.hiobject.help;
import android.text.TextUtils;
import java.util.ArrayList;
import java.util.List;
public class RecognitionObjectBean {
private String rectID;
private String imgID;
private String objectName;
private float score;
private float left;
private float top;
private float right;
private float bottom;
private RecognitionObjectBean(Builder builder){
this.rectID = builder.rectID;
this.imgID = builder.imgID;
this.objectName = builder.objectName;
this.score = builder.score;
this.left = builder.left;
this.top = builder.top;
this.right = builder.right;
this.bottom = builder.bottom;
}
public static class Builder {
private String rectID;
private String imgID;
private String objectName;
private float score;
private float left;
private float top;
private float right;
private float bottom;
public RecognitionObjectBean build(){
return new RecognitionObjectBean(this);
}
public Builder setRectID(String rectID) {
this.rectID = rectID;
return this;
}
public Builder setImgID(String imgID) {
this.imgID = imgID;
return this;
}
public Builder setObjectName(String objectName) {
this.objectName = objectName;
return this;
}
public Builder setScore(float score) {
this.score = score;
return this;
}
public Builder setLeft(float left) {
this.left = left;
return this;
}
public Builder setTop(float top) {
this.top = top;
return this;
}
public Builder setRight(float right) {
this.right = right;
return this;
}
public Builder setBottom(float bottom) {
this.bottom = bottom;
return this;
}
}
public String getImgID() {
return imgID;
}
public String getRectID() {
return rectID;
}
public String getObjectName() {
return objectName;
}
public float getScore() {
return score;
}
public float getLeft() {
return left;
}
public float getTop() {
return top;
}
public float getRight() {
return right;
}
public float getBottom() {
return bottom;
}
public static List<RecognitionObjectBean> getRecognitionList(String result) {
if (!TextUtils.isEmpty(result)) {
String[] resultArray = result.split(";");
List<RecognitionObjectBean> list = new ArrayList<>();
for (int i = 0; i < resultArray.length; i++) {
String singleRecognitionResult = resultArray[i];
String[] singleResult = singleRecognitionResult.split("_");
RecognitionObjectBean bean = new RecognitionObjectBean.Builder()
.setRectID(String.valueOf(i + 1))
.setImgID(null != getData(0, singleResult) ? getData(0, singleResult) : "")
.setObjectName(null != getData(1, singleResult) ? getData(1, singleResult) : "")
.setScore(null != getData(2, singleResult) ? Float.parseFloat(getData(2, singleResult)) : 0)
.setLeft(null != getData(3, singleResult) ? Float.parseFloat(getData(3, singleResult)) : 0)
.setTop(null != getData(4, singleResult) ? Float.parseFloat(getData(4, singleResult)) : 0)
.setRight(null != getData(5, singleResult) ? Float.parseFloat(getData(5, singleResult)) : 0)
.setBottom(null != getData(6, singleResult) ? Float.parseFloat(getData(6, singleResult)) : 0)
.build();
list.add(bean);
}
return list;
} else {
return null;
}
}
/**
* @param index
* @param singleResult
* @return
*/
private static String getData(int index, String[] singleResult) {
if (index > singleResult.length) {
return null;
} else {
if (!TextUtils.isEmpty(singleResult[index])) {
return singleResult[index];
}
}
return null;
}
}

View File

@ -0,0 +1,118 @@
package com.mindspore.hiobject.help;
import android.content.Context;
import android.content.res.AssetManager;
import android.graphics.Bitmap;
import android.util.Log;
import java.io.FileNotFoundException;
import java.io.InputStream;
import java.nio.ByteBuffer;
import java.util.HashMap;
public class TrackingMobile {
private final static String TAG = "TrackingMobile";
static {
try {
System.loadLibrary("mlkit-label-MS");
Log.i(TAG, "load libiMindSpore.so successfully.");
} catch (UnsatisfiedLinkError e) {
Log.e(TAG, "UnsatisfiedLinkError " + e.getMessage());
}
}
public static HashMap<Integer, String> synset_words_map = new HashMap<>();
public static float[] threshold = new float[494];
private long netEnv = 0;
private final Context mActivity;
public TrackingMobile(Context activity) throws FileNotFoundException {
this.mActivity = activity;
}
/**
* jni加载模型
*
* @param assetManager assetManager
* @param buffer buffer
* @param numThread numThread
* @return 加载模型数据
*/
public native long loadModel(AssetManager assetManager, ByteBuffer buffer, int numThread);
/**
* jni运行模型
*
* @param netEnv 加载模型数据
* @param img 当前图片
* @return 运行模型数据
*/
public native String runNet(long netEnv, Bitmap img);
/**
* 解绑模型数据
*
* @param netEnv 模型数据
* @return 解绑状态
*/
public native boolean unloadModel(long netEnv);
/**
* C++侧封装成了MSNetWorks类的方法
*
* @param assetManager 模型文件位置
* @return 加载模型文件状态
*/
public boolean loadModelFromBuf(AssetManager assetManager) {
// String ModelPath = "model/model_hebing_3branch.ms";
String ModelPath = "model/ssd.ms";
ByteBuffer buffer = loadModelFile(ModelPath);
netEnv = loadModel(assetManager, buffer, 2);
return true;
}
/**
* 运行Mindspore
*
* @param img 当前图片识别
* @return 识别出来的文字信息
*/
public String MindSpore_runnet(Bitmap img) {
String ret_str = runNet(netEnv, img);
return ret_str;
}
/**
* 解绑模型
* @return true
*/
public boolean unloadModel() {
unloadModel(netEnv);
return true;
}
/**
* 加载模型文件流
* @param modelPath 模型文件路径
* @return 加载模型文件流
*/
public ByteBuffer loadModelFile(String modelPath) {
InputStream is = null;
try {
is = mActivity.getAssets().open(modelPath);
byte[] bytes = new byte[is.available()];
is.read(bytes);
return ByteBuffer.allocateDirect(bytes.length).put(bytes);
} catch (Exception e) {
Log.d("loadModelFile", " Exception occur ");
e.printStackTrace();
}
return null;
}
}

View File

@ -0,0 +1,78 @@
/*
* Copyright 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mindspore.hiobject.objectdetect;
import android.content.Context;
import android.util.AttributeSet;
import android.view.TextureView;
/**
* A {@link TextureView} that can be adjusted to a specified aspect ratio.
*/
public class AutoFitTextureView extends TextureView {
private int mRatioWidth = 0;
private int mRatioHeight = 0;
public AutoFitTextureView(Context context) {
this(context, null);
}
public AutoFitTextureView(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public AutoFitTextureView(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
}
/**
* Sets the aspect ratio for this view. The size of the view will be measured based on the ratio
* calculated from the parameters. Note that the actual sizes of parameters don't matter, that
* is, calling setAspectRatio(2, 3) and setAspectRatio(4, 6) make the same result.
*
* @param width Relative horizontal size
* @param height Relative vertical size
*/
public void setAspectRatio(int width, int height) {
if (width < 0 || height < 0) {
throw new IllegalArgumentException("Size cannot be negative.");
}
mRatioWidth = width;
mRatioHeight = height;
requestLayout();
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
int width = MeasureSpec.getSize(widthMeasureSpec);
int height = MeasureSpec.getSize(heightMeasureSpec);
if (0 == mRatioWidth || 0 == mRatioHeight) {
setMeasuredDimension(width, height);
} else {
if (width > height * mRatioWidth / mRatioHeight) {
setMeasuredDimension(width, width * mRatioHeight / mRatioWidth);
} else {
setMeasuredDimension(height * mRatioWidth / mRatioHeight, height);
}
}
}
}

View File

@ -0,0 +1,48 @@
package com.mindspore.hiobject.objectdetect;
import android.os.Bundle;
import android.view.WindowManager;
import androidx.appcompat.app.AppCompatActivity;
import com.mindspore.hiobject.R;
/**
* [入口主页面]
*
* 向JNI传入图片测试MindSpore模型加载推理等.
*/
public class CameraActivity extends AppCompatActivity {
private final String TAG = "CameraActivity";
private static final String BUNDLE_FRAGMENTS_KEY = "android:support:fragments";
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_camera);
if (savedInstanceState != null && this.clearFragmentsTag()) {
// 重建时清除 fragment的状态
savedInstanceState.remove(BUNDLE_FRAGMENTS_KEY);
}
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
getSupportFragmentManager().beginTransaction().replace(R.id.container, CameraFragment.newInstance()).commit();
}
@Override
protected void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
if (outState != null && this.clearFragmentsTag()) {
// 销毁时不保存fragment的状态
outState.remove(BUNDLE_FRAGMENTS_KEY);
}
}
protected boolean clearFragmentsTag() {
return true;
}
}

View File

@ -0,0 +1,899 @@
package com.mindspore.hiobject.objectdetect;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.Dialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.res.Configuration;
import android.graphics.Bitmap;
import android.graphics.ImageFormat;
import android.graphics.Matrix;
import android.graphics.Point;
import android.graphics.RectF;
import android.graphics.SurfaceTexture;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CameraMetadata;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.CaptureResult;
import android.hardware.camera2.TotalCaptureResult;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.Image;
import android.media.ImageReader;
import android.os.Bundle;
import android.os.Handler;
import android.os.HandlerThread;
import android.text.TextUtils;
import android.util.Log;
import android.util.Size;
import android.util.SparseIntArray;
import android.view.LayoutInflater;
import android.view.Surface;
import android.view.TextureView;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Toast;
import androidx.annotation.NonNull;
import androidx.fragment.app.DialogFragment;
import androidx.fragment.app.Fragment;
import com.mindspore.hiobject.R;
import com.mindspore.hiobject.help.RecognitionObjectBean;
import com.mindspore.hiobject.help.TrackingMobile;
import java.io.FileNotFoundException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import static com.mindspore.hiobject.help.RecognitionObjectBean.getRecognitionList;
public class CameraFragment extends Fragment {
/**
* Conversion from screen rotation to JPEG orientation.
*/
private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
private static final String FRAGMENT_DIALOG = "dialog";
static {
ORIENTATIONS.append(Surface.ROTATION_0, 90);
ORIENTATIONS.append(Surface.ROTATION_90, 0);
ORIENTATIONS.append(Surface.ROTATION_180, 270);
ORIENTATIONS.append(Surface.ROTATION_270, 180);
}
/**
* Tag for the {@link Log}.
*/
private static final String TAG = "CameraFragment";
/**
* Camera state: Showing ic_launcher preview.
*/
private static final int STATE_PREVIEW = 0;
/**
* Camera state: Waiting for the focus to be locked.
*/
private static final int STATE_WAITING_LOCK = 1;
/**
* Camera state: Waiting for the exposure to be precapture state.
*/
private static final int STATE_WAITING_PRECAPTURE = 2;
/**
* Camera state: Waiting for the exposure state to be something other than precapture.
*/
private static final int STATE_WAITING_NON_PRECAPTURE = 3;
/**
* Camera state: Picture was taken.
*/
private static final int STATE_PICTURE_TAKEN = 4;
/**
* Max preview width that is guaranteed by Camera2 API
*/
private static final int MAX_PREVIEW_WIDTH = 1920;
/**
* Max preview height that is guaranteed by Camera2 API
*/
private static final int MAX_PREVIEW_HEIGHT = 1080;
/**
* {@link TextureView.SurfaceTextureListener} handles several lifecycle events on a
* {@link TextureView}.
*/
private final TextureView.SurfaceTextureListener mSurfaceTextureListener = new TextureView.SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(SurfaceTexture texture, int width, int height) {
openCamera(width, height);
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture texture, int width, int height) {
configureTransform(width, height);
}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture texture) {
return true;
}
@Override
public void onSurfaceTextureUpdated(SurfaceTexture texture) {
}
};
/**
* ID of the current {@link CameraDevice}.
*/
private String mCameraId;
/**
* An {@link AutoFitTextureView} for ic_launcher preview.
*/
private AutoFitTextureView mTextureView;
/**
* An {@link ObjectRectView} for Object_draw_Rect_view.
*/
private ObjectRectView mObjectRectView;
/**
* A {@link CameraCaptureSession } for ic_launcher preview.
*/
private CameraCaptureSession mCaptureSession;
/**
* A reference to the opened {@link CameraDevice}.
*/
private CameraDevice mCameraDevice;
/**
* The {@link Size} of ic_launcher preview.
*/
private Size mPreviewSize;
/**
* {@link CameraDevice.StateCallback} is called when {@link CameraDevice} changes its state.
*/
private final CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened(@NonNull CameraDevice cameraDevice) {
// This method is called when the ic_launcher is opened. We start ic_launcher preview here.
mCameraOpenCloseLock.release();
mCameraDevice = cameraDevice;
createCameraPreviewSession();
}
@Override
public void onDisconnected(@NonNull CameraDevice cameraDevice) {
mCameraOpenCloseLock.release();
cameraDevice.close();
mCameraDevice = null;
}
@Override
public void onError(@NonNull CameraDevice cameraDevice, int error) {
mCameraOpenCloseLock.release();
cameraDevice.close();
mCameraDevice = null;
if (null != getActivity()) {
getActivity().finish();
}
}
};
/**
* An additional thread for running tasks that shouldn't block the UI.
*/
private HandlerThread mBackgroundThread, mPreBackgroundThread;
/**
* A {@link Handler} for running tasks in the background.
*/
private Handler mBackgroundHandler, mPreBackgroundHandler;
/**
* An {@link ImageReader} that handles still image capture.
*/
private ImageReader mImageReader;
/***
* A {@link TrackingMobile} for data tracking
*/
private TrackingMobile mTrackingMobile;
/**
* This a callback object for the {@link ImageReader}. "onImageAvailable" will be called when a
* still image is ready to be saved.
*/
private final ImageReader.OnImageAvailableListener mOnImageAvailableListener = new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader reader) {
try {
Image image = reader.acquireNextImage();
image.close();
} catch (Exception e) {
Log.e(TAG, "onImageAvailable: " + e.toString());
}
}
};
/**
* {@link CaptureRequest.Builder} for the ic_launcher preview
*/
private CaptureRequest.Builder mPreviewRequestBuilder;
/**
* {@link CaptureRequest} generated by {@link #mPreviewRequestBuilder}
*/
private CaptureRequest mPreviewRequest;
/**
* The current state of ic_launcher state for taking pictures.
*
* @see #mCaptureCallback
*/
private int mState = STATE_PREVIEW;
/**
* A {@link Semaphore} to prevent the app from exiting before closing the ic_launcher.
*/
private Semaphore mCameraOpenCloseLock = new Semaphore(1);
/**
* Whether the current ic_launcher device supports Flash or not.
*/
private boolean mFlashSupported;
/**
* Orientation of the ic_launcher sensor
*/
private int mSensorOrientation;
/**
* A {@link CameraCaptureSession.CaptureCallback} that handles events related to JPEG capture.
*/
private CameraCaptureSession.CaptureCallback mCaptureCallback = new CameraCaptureSession.CaptureCallback() {
private void process(CaptureResult result) {
switch (mState) {
case STATE_PREVIEW: {
// We have nothing to do when the ic_launcher preview is working normally.
break;
}
case STATE_WAITING_LOCK: {
Integer afState = result.get(CaptureResult.CONTROL_AF_STATE);
if (afState == null) {
captureStillPicture();
} else if (CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED == afState ||
CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED == afState) {
// CONTROL_AE_STATE can be null on some devices
Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE);
if (aeState == null ||
aeState == CaptureResult.CONTROL_AE_STATE_CONVERGED) {
mState = STATE_PICTURE_TAKEN;
captureStillPicture();
} else {
runPrecaptureSequence();
}
}
break;
}
case STATE_WAITING_PRECAPTURE: {
// CONTROL_AE_STATE can be null on some devices
Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE);
if (aeState == null ||
aeState == CaptureResult.CONTROL_AE_STATE_PRECAPTURE ||
aeState == CaptureRequest.CONTROL_AE_STATE_FLASH_REQUIRED) {
mState = STATE_WAITING_NON_PRECAPTURE;
}
break;
}
case STATE_WAITING_NON_PRECAPTURE: {
// CONTROL_AE_STATE can be null on some devices
Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE);
if (aeState == null || aeState != CaptureResult.CONTROL_AE_STATE_PRECAPTURE) {
mState = STATE_PICTURE_TAKEN;
captureStillPicture();
}
break;
}
}
}
@Override
public void onCaptureProgressed(@NonNull CameraCaptureSession session,
@NonNull CaptureRequest request,
@NonNull CaptureResult partialResult) {
process(partialResult);
}
@Override
public void onCaptureCompleted(@NonNull CameraCaptureSession session,
@NonNull CaptureRequest request,
@NonNull TotalCaptureResult result) {
process(result);
}
};
/**
* Given {@code choices} of {@code Size}s supported by a ic_launcher, choose the smallest one that
* is at least as large as the respective texture view size, and that is at most as large as the
* respective max size, and whose aspect ratio matches with the specified value. If such size
* doesn't exist, choose the largest one that is at most as large as the respective max size,
* and whose aspect ratio matches with the specified value.
*
* @param choices The list of sizes that the ic_launcher supports for the intended output
* class
* @param textureViewWidth The width of the texture view relative to sensor coordinate
* @param textureViewHeight The height of the texture view relative to sensor coordinate
* @param maxWidth The maximum width that can be chosen
* @param maxHeight The maximum height that can be chosen
* @param aspectRatio The aspect ratio
* @return The optimal {@code Size}, or an arbitrary one if none were big enough
*/
private static Size chooseOptimalSize(Size[] choices, int textureViewWidth, int textureViewHeight, int maxWidth, int maxHeight, Size aspectRatio) {
// Collect the supported resolutions that are at least as big as the preview Surface
List<Size> bigEnough = new ArrayList<>();
// Collect the supported resolutions that are smaller than the preview Surface
List<Size> notBigEnough = new ArrayList<>();
int w = aspectRatio.getWidth();
int h = aspectRatio.getHeight();
for (Size option : choices) {
if (option.getWidth() <= maxWidth && option.getHeight() <= maxHeight && option.getHeight() == option.getWidth() * h / w) {
// if (option.getWidth() <= maxWidth && option.getHeight() <= maxHeight) {
if (option.getWidth() >= textureViewWidth && option.getHeight() >= textureViewHeight) {
bigEnough.add(option);
} else {
notBigEnough.add(option);
}
}
}
// Pick the smallest of those big enough. If there is no one big enough, pick the
// largest of those not big enough.
if (bigEnough.size() > 0) {
return Collections.min(bigEnough, new CompareSizesByArea());
} else if (notBigEnough.size() > 0) {
return Collections.max(notBigEnough, new CompareSizesByArea());
} else {
Log.e(TAG, "Couldn't find any suitable preview size");
return choices[0];
}
}
public static CameraFragment newInstance() {
return new CameraFragment();
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
return inflater.inflate(R.layout.fragment_camera, container, false);
}
@Override
public void onViewCreated(final View view, Bundle savedInstanceState) {
mTextureView = view.findViewById(R.id.texture);
mObjectRectView = view.findViewById(R.id.objRectView);
}
@Override
public void onDetach() {
super.onDetach();
if (mTrackingMobile != null) {
mTrackingMobile.unloadModel();
}
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
try {
mTrackingMobile = new TrackingMobile(getActivity());
} catch (FileNotFoundException e) {
e.printStackTrace();
}
boolean ret = mTrackingMobile.loadModelFromBuf(getActivity().getAssets());
Log.d(TAG, "TrackingMobile loadModelFromBuf: " + ret);
}
@Override
public void onResume() {
super.onResume();
initChildThread();
// When the screen is turned off and turned back on, the SurfaceTexture is already
// available, and "onSurfaceTextureAvailable" will not be called. In that case, we can open
// a ic_launcher and start preview from here (otherwise, we wait until the surface is ready in
// the SurfaceTextureListener).
if (mTextureView.isAvailable()) {
openCamera(mTextureView.getWidth(), mTextureView.getHeight());
Log.d(TAG, "isAvailable: " + mTextureView.getWidth() + "--" + mTextureView.getHeight());
} else {
Log.d(TAG, "notAvailable: " + mTextureView.getWidth() + "--" + mTextureView.getHeight());
mTextureView.setSurfaceTextureListener(mSurfaceTextureListener);
}
}
@Override
public void onPause() {
closeCamera();
stopBackgroundThread();
super.onPause();
}
/**
* Sets up member variables related to ic_launcher.
*
* @param width The width of available size for ic_launcher preview
* @param height The height of available size for ic_launcher preview
*/
@SuppressWarnings("SuspiciousNameCombination")
private void setUpCameraOutputs(int width, int height) {
CameraManager manager = (CameraManager) getActivity().getSystemService(Context.CAMERA_SERVICE);
try {
for (String cameraId : manager.getCameraIdList()) {
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
// We don't use a front facing ic_launcher in this sample.
Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING);
if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) {
continue;
}
StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
if (map == null) {
continue;
}
// For still image captures, we use the largest available size.
Size largest = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)), new CompareSizesByArea());
//TODO 设置输出图片
mImageReader = ImageReader.newInstance(width, height, ImageFormat.JPEG, /*maxImages*/30);
mImageReader.setOnImageAvailableListener(mOnImageAvailableListener, mBackgroundHandler);
// Find out if we need to swap dimension to get the preview size relative to sensor coordinate.
int displayRotation = getActivity().getWindowManager().getDefaultDisplay().getRotation();
Log.d("displayRotation", "displayRotation: " + displayRotation);
//noinspection ConstantConditions
mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
boolean swappedDimensions = false;
switch (displayRotation) {
case Surface.ROTATION_0:
case Surface.ROTATION_180:
if (mSensorOrientation == 90 || mSensorOrientation == 270) {
swappedDimensions = true;
}
break;
case Surface.ROTATION_90:
case Surface.ROTATION_270:
if (mSensorOrientation == 0 || mSensorOrientation == 180) {
swappedDimensions = true;
}
break;
default:
Log.e(TAG, "Display rotation is invalid: " + displayRotation);
}
Point displaySize = new Point();
getActivity().getWindowManager().getDefaultDisplay().getSize(displaySize);
int rotatedPreviewWidth = width;
int rotatedPreviewHeight = height;
int maxPreviewWidth = displaySize.x;
int maxPreviewHeight = displaySize.y;
if (swappedDimensions) {
rotatedPreviewWidth = height;
rotatedPreviewHeight = width;
maxPreviewWidth = displaySize.y;
maxPreviewHeight = displaySize.x;
}
if (maxPreviewWidth > MAX_PREVIEW_WIDTH) {
maxPreviewWidth = MAX_PREVIEW_WIDTH;
}
if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) {
maxPreviewHeight = MAX_PREVIEW_HEIGHT;
}
// Danger, W.R.! Attempting to use too large a preview size could exceed the ic_launcher
// bus' bandwidth limitation, resulting in gorgeous previews but the storage of
// garbage capture data.
mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), rotatedPreviewWidth, rotatedPreviewHeight, maxPreviewWidth, maxPreviewHeight, largest);
// We fit the aspect ratio of TextureView to the size of preview we picked.
int orientation = getResources().getConfiguration().orientation;//manifest中方向已经写死
if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
mTextureView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight());
} else {
mTextureView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth());
}
// Check if the flash is supported.
Boolean available = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE);
mFlashSupported = available == null ? false : available;
mCameraId = cameraId;
return;
}
} catch (CameraAccessException e) {
e.printStackTrace();
} catch (NullPointerException e) {
// Currently an NPE is thrown when the Camera2API is used but not supported on the
// device this code runs.
ErrorDialog.newInstance(getString(R.string.camera_error)).show(getChildFragmentManager(), FRAGMENT_DIALOG);
}
}
/**
* Opens the ic_launcher specified by {@link CameraFragment#mCameraId}.
*/
@SuppressLint("MissingPermission")
private void openCamera(int width, int height) {
setUpCameraOutputs(width, height);
configureTransform(width, height);
Activity activity = getActivity();
CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
try {
if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
throw new RuntimeException("Time out waiting to lock ic_launcher opening.");
}
manager.openCamera(mCameraId, mStateCallback, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock ic_launcher opening.", e);
}
}
/**
* Closes the current {@link CameraDevice}.
*/
private void closeCamera() {
try {
mCameraOpenCloseLock.acquire();
if (null != mCaptureSession) {
mCaptureSession.close();
mCaptureSession = null;
}
if (null != mCameraDevice) {
mCameraDevice.close();
mCameraDevice = null;
}
if (null != mImageReader) {
mImageReader.close();
mImageReader = null;
}
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock ic_launcher closing.", e);
} finally {
mCameraOpenCloseLock.release();
}
}
/**
* Starts a background thread and its {@link Handler}.
*/
private void initChildThread() {
mBackgroundThread = new HandlerThread("CameraBackground");
mBackgroundThread.start();
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
mPreBackgroundThread = new HandlerThread("AlgoBackground");
mPreBackgroundThread.start();
mPreBackgroundHandler = new Handler(mPreBackgroundThread.getLooper());
mPreBackgroundHandler.post(periodicClassify);
}
private boolean isPreBackgroundThreadPause;
private Runnable periodicClassify = new Runnable() {
public void run() {
synchronized (CameraFragment.this) {
Bitmap bitmap = mTextureView.getBitmap();
if (bitmap != null) {
drawBitmapToRect(bitmap);
}
//重复请求
if (mPreBackgroundHandler != null && !isPreBackgroundThreadPause) {
mPreBackgroundHandler.post(periodicClassify);
}
}
}
};
/**
* Stops the background thread and its {@link Handler}.
*/
private void stopBackgroundThread() {
isPreBackgroundThreadPause = true;
mBackgroundThread.quitSafely();
mPreBackgroundThread.quitSafely();
try {
mBackgroundThread.join();
mBackgroundThread = null;
mBackgroundHandler = null;
mPreBackgroundThread.join();
mPreBackgroundThread = null;
mPreBackgroundThread = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
private void createCameraPreviewSession() {
try {
SurfaceTexture texture = mTextureView.getSurfaceTexture();
// Set preview size.
texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
// This is the output Surface we need to start preview.
Surface surface = new Surface(texture);
mPreviewRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
mPreviewRequestBuilder.addTarget(surface);
// mPreviewRequestBuilder.addTarget(mImageReader.getSurface());
// Here, we create a CameraCaptureSession for ic_launcher preview.
mCameraDevice.createCaptureSession(Arrays.asList(surface, mImageReader.getSurface()),
new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) {
// The ic_launcher is already closed
if (null == mCameraDevice) {
return;
}
// When the session is ready, we start displaying the preview.
mCaptureSession = cameraCaptureSession;
try {
// Auto focus should be continuous for ic_launcher preview.
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE,
CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
// Flash is automatically enabled when necessary.
setAutoFlash(mPreviewRequestBuilder);
// Finally, we start displaying the ic_launcher preview.
mPreviewRequest = mPreviewRequestBuilder.build();
mCaptureSession.setRepeatingRequest(mPreviewRequest, mCaptureCallback, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
@Override
public void onConfigureFailed(@NonNull CameraCaptureSession cameraCaptureSession) {
Toast.makeText(getActivity(), "Failed", Toast.LENGTH_LONG).show();
}
}, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private List<RecognitionObjectBean> recognitionObjectBeanList;
public void drawBitmapToRect(Bitmap bitmap) {
if (recognitionObjectBeanList != null) {
recognitionObjectBeanList.clear();
}
long startTime = System.currentTimeMillis();
String result = mTrackingMobile.MindSpore_runnet(bitmap);
long endTime = System.currentTimeMillis();
Log.d(TAG, "MindSpore_runnet:time>>>" + (endTime - startTime));
Log.d(TAG, "MindSpore_runnet:result>>>" + result);
if (TextUtils.isEmpty(result)) {
mObjectRectView.clearCanvas();
return;
}
recognitionObjectBeanList = getRecognitionList(result);
mObjectRectView.setInfo(recognitionObjectBeanList);
}
/**
* Configures the necessary {@link Matrix} transformation to `mTextureView`.
* This method should be called after the ic_launcher preview size is determined in
* setUpCameraOutputs and also the size of `mTextureView` is fixed.
*
* @param viewWidth The width of `mTextureView`
* @param viewHeight The height of `mTextureView`
*/
private void configureTransform(int viewWidth, int viewHeight) {
if (null == mTextureView || null == mPreviewSize || null == getActivity()) {
return;
}
int rotation = getActivity().getWindowManager().getDefaultDisplay().getRotation();
Matrix matrix = new Matrix();
RectF viewRect = new RectF(0, 0, viewWidth, viewHeight);
RectF bufferRect = new RectF(0, 0, mPreviewSize.getHeight(), mPreviewSize.getWidth());
float centerX = viewRect.centerX();
float centerY = viewRect.centerY();
if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) {
bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
float scale = Math.max((float) viewHeight / mPreviewSize.getHeight(), (float) viewWidth / mPreviewSize.getWidth());
matrix.postScale(scale, scale, centerX, centerY);
matrix.postRotate(90 * (rotation - 2), centerX, centerY);
} else if (Surface.ROTATION_180 == rotation) {
matrix.postRotate(180, centerX, centerY);
}
mTextureView.setTransform(matrix);
}
/**
* Run the precapture sequence for capturing a still image. This method should be called when
*/
private void runPrecaptureSequence() {
try {
// This is how to tell the ic_launcher to trigger.
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START);
// Tell #mCaptureCallback to wait for the precapture sequence to be set.
mState = STATE_WAITING_PRECAPTURE;
mCaptureSession.capture(mPreviewRequestBuilder.build(), mCaptureCallback, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
/**
* Capture a still picture. This method should be called when we get a response in
*/
private void captureStillPicture() {
try {
final Activity activity = getActivity();
if (null == activity || null == mCameraDevice) {
return;
}
// This is the CaptureRequest.Builder that we use to take a picture.
final CaptureRequest.Builder captureBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
captureBuilder.addTarget(mImageReader.getSurface());
// Use the same AE and AF modes as the preview.
captureBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
setAutoFlash(captureBuilder);
// Orientation
int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, getOrientation(rotation));
CameraCaptureSession.CaptureCallback CaptureCallback
= new CameraCaptureSession.CaptureCallback() {
@Override
public void onCaptureCompleted(@NonNull CameraCaptureSession session,
@NonNull CaptureRequest request,
@NonNull TotalCaptureResult result) {
unlockFocus();
}
};
mCaptureSession.stopRepeating();
mCaptureSession.abortCaptures();
mCaptureSession.capture(captureBuilder.build(), CaptureCallback, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
/**
* Retrieves the JPEG orientation from the specified screen rotation.
*
* @param rotation The screen rotation.
* @return The JPEG orientation (one of 0, 90, 270, and 360)
*/
private int getOrientation(int rotation) {
// Sensor orientation is 90 for most devices, or 270 for some devices (eg. Nexus 5X)
// We have to take that into account and rotate JPEG properly.
// For devices with orientation of 90, we simply return our mapping from ORIENTATIONS.
// For devices with orientation of 270, we need to rotate the JPEG 180 degrees.
return (ORIENTATIONS.get(rotation) + mSensorOrientation + 270) % 360;
}
/**
* Unlock the focus. This method should be called when still image capture sequence is
* finished.
*/
private void unlockFocus() {
try {
// Reset the auto-focus trigger
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER,
CameraMetadata.CONTROL_AF_TRIGGER_CANCEL);
setAutoFlash(mPreviewRequestBuilder);
mCaptureSession.capture(mPreviewRequestBuilder.build(), mCaptureCallback,
mBackgroundHandler);
// After this, the ic_launcher will go back to the normal state of preview.
mState = STATE_PREVIEW;
mCaptureSession.setRepeatingRequest(mPreviewRequest, mCaptureCallback,
mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void setAutoFlash(CaptureRequest.Builder requestBuilder) {
if (mFlashSupported) {
requestBuilder.set(CaptureRequest.CONTROL_AE_MODE,
CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
}
}
/**
* Compares two {@code Size}s based on their areas.
*/
static class CompareSizesByArea implements Comparator<Size> {
@Override
public int compare(Size lhs, Size rhs) {
// We cast here to ensure the multiplications won't overflow
return Long.signum((long) lhs.getWidth() * lhs.getHeight() - (long) rhs.getWidth() * rhs.getHeight());
}
}
/**
* Shows an error message dialog.
*/
public static class ErrorDialog extends DialogFragment {
private static final String ARG_MESSAGE = "message";
public static ErrorDialog newInstance(String message) {
ErrorDialog dialog = new ErrorDialog();
Bundle args = new Bundle();
args.putString(ARG_MESSAGE, message);
dialog.setArguments(args);
return dialog;
}
@NonNull
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
final Activity activity = getActivity();
return new AlertDialog.Builder(activity)
.setMessage(getArguments().getString(ARG_MESSAGE))
.setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialogInterface, int i) {
activity.finish();
}
})
.create();
}
}
}

View File

@ -0,0 +1,263 @@
package com.mindspore.hiobject.objectdetect;
import android.Manifest;
import android.content.pm.PackageManager;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.os.Build;
import android.os.Bundle;
import android.os.Environment;
import android.os.Handler;
import android.os.Message;
import android.util.Log;
import android.widget.Toast;
import androidx.annotation.NonNull;
import androidx.appcompat.app.AppCompatActivity;
import com.mindspore.hiobject.R;
import com.mindspore.hiobject.help.TrackingMobile;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.ObjectOutputStream;
import java.util.ArrayList;
import java.util.List;
/**
* [入口主页面]
* <p>
* 向JNI传入图片测试MindSpore模型加载推理等.
*/
public class DealDataActivity extends AppCompatActivity {
private final String TAG = "DealDataActivity";
//自行将v2017的图片放入手机sdcard的位置
private final static String IMGPATH = "/sdcard/val2017";
private final static String IMG_RESULT_PATH = "/sdcard/val2017result/result.txt";
private final static String IMG_RESULT_SINGLE_PATH = "/sdcard/val2017result/result2.txt";
private Bitmap mBitmap;
private TrackingMobile mTrackingMobile;
private static final String PERMISSION_READ_EXTERNAL_STORAGEA = Manifest.permission.READ_EXTERNAL_STORAGE;
private static final String PERMISSION_WRITE_EXTERNAL_STORAGEA = Manifest.permission.WRITE_EXTERNAL_STORAGE;
private static final int PERMISSIONS_REQUEST = 1;
private Handler handler = new Handler() {
@Override
public void handleMessage(@NonNull Message msg) {
super.handleMessage(msg);
if (1 == msg.what) {
dealData();
// dealSingleData();
}
}
};
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_dealdata);
try {
mTrackingMobile = new TrackingMobile(this);
} catch (FileNotFoundException e) {
Log.e(TAG, Log.getStackTraceString(e));
}
mTrackingMobile.loadModelFromBuf(getAssets());
if (hasPermission()) {
getImgFileList();
} else {
requestPermission();
}
}
private List<String> imgFileList;
private void getImgFileList() {
new Thread(new Runnable() {
@Override
public void run() {
imgFileList = getFilesAllName(IMGPATH);
Message message = new Message();
message.what = 1;
handler.sendMessage(message);
}
}).start();
}
List<String> dealList = new ArrayList<>();
private void dealData() {
if (imgFileList != null && imgFileList.size() > 0) {
Log.d(TAG, "imgFileList size()>>" + imgFileList.size());
for (int i = 0; i < imgFileList.size(); i++) {
Bitmap bitmap = BitmapFactory.decodeFile(imgFileList.get(i)).copy(Bitmap.Config.ARGB_8888, true);
String result = mTrackingMobile.MindSpore_runnet(bitmap);
String fileName = imgFileList.get(i).substring(imgFileList.get(i).lastIndexOf("/") + 1);
Log.d(TAG, "index>>>" + i + ">>" + fileName + ">>result" + result);
StringBuilder sb = new StringBuilder();
sb.append(fileName).append("_").append(result);
dealList.add(sb.toString());
}
Log.d(TAG, "dealList >>>" + dealList.size());
writeListIntoSDcard(IMG_RESULT_PATH, dealList);
}
}
private void dealSingleData() {
String fileFullName = IMGPATH + "/error.jpg";
Bitmap bitmap = BitmapFactory.decodeResource(getResources(),R.drawable.error).copy(Bitmap.Config.ARGB_8888, true);
// Bitmap bitmap = BitmapFactory.decodeFile(fileFullName).copy(Bitmap.Config.ARGB_8888, true);
if (bitmap != null) {
String result = mTrackingMobile.MindSpore_runnet(bitmap);
Log.d(TAG, ">>result" + result);
StringBuilder sb = new StringBuilder();
sb.append("error.jpg").append("_").append(result);
// writeStringIntoSDcard(IMG_RESULT_SINGLE_PATH, sb.toString());
}
}
public boolean writeListIntoSDcard(String fileName, List<String> list) {
if (Environment.getExternalStorageState().equals(Environment.MEDIA_MOUNTED)) {
File sdFile = new File(fileName);
try {
FileOutputStream fos = new FileOutputStream(sdFile);
ObjectOutputStream oos = new ObjectOutputStream(fos);
oos.writeObject(list);//写入
fos.close();
oos.close();
return true;
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
return false;
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
return false;
}
} else {
return false;
}
}
public boolean writeStringIntoSDcard(String fileName, String content) {
if (Environment.getExternalStorageState().equals(Environment.MEDIA_MOUNTED)) {
File sdFile = new File(fileName);
try {
FileOutputStream fos = new FileOutputStream(sdFile);
ObjectOutputStream oos = new ObjectOutputStream(fos);
oos.writeObject(content);//写入
fos.close();
oos.close();
return true;
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
return false;
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
return false;
}
} else {
return false;
}
}
@Override
public void onRequestPermissionsResult(final int requestCode, final String[] permissions,
final int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
if (requestCode == PERMISSIONS_REQUEST) {
if (allPermissionsGranted(grantResults)) {
getImgFileList();
} else {
requestPermission();
}
}
}
private static boolean allPermissionsGranted(final int[] grantResults) {
for (int result : grantResults) {
if (result != PackageManager.PERMISSION_GRANTED) {
return false;
}
}
return true;
}
private boolean hasPermission() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
return checkSelfPermission(PERMISSION_READ_EXTERNAL_STORAGEA) == PackageManager.PERMISSION_GRANTED &&
checkSelfPermission(PERMISSION_WRITE_EXTERNAL_STORAGEA) == PackageManager.PERMISSION_GRANTED;
} else {
return true;
}
}
private void requestPermission() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
if (shouldShowRequestPermissionRationale(PERMISSION_READ_EXTERNAL_STORAGEA)) {
Toast.makeText(this, "Read permission is required for this demo", Toast.LENGTH_LONG)
.show();
}
if (shouldShowRequestPermissionRationale(PERMISSION_WRITE_EXTERNAL_STORAGEA)) {
Toast.makeText(this, "WRITE permission is required for this demo", Toast.LENGTH_LONG)
.show();
}
requestPermissions(new String[]{PERMISSION_READ_EXTERNAL_STORAGEA, PERMISSION_WRITE_EXTERNAL_STORAGEA}, PERMISSIONS_REQUEST);
}
}
public List<String> getFilesAllName(String path) {
//传入指定文件夹的路径
File file = new File(path);
if (null == file || !file.isDirectory()) {
return null;
}
File[] files = file.listFiles();
List<String> imagePaths = new ArrayList<>();
for (int i = 0; i < files.length; i++) {
if (checkIsImageFile(files[i].getPath())) {
imagePaths.add(files[i].getPath());
}
}
return imagePaths;
}
/**
* 判断是否是照片
*/
public boolean checkIsImageFile(String fName) {
boolean isImageFile = false;
//获取拓展名
String fileEnd = fName.substring(fName.lastIndexOf(".") + 1,
fName.length()).toLowerCase();
if (fileEnd.equals("jpg") || fileEnd.equals("png") || fileEnd.equals("gif")
|| fileEnd.equals("jpeg") || fileEnd.equals("bmp")) {
isImageFile = true;
} else {
isImageFile = false;
}
return isImageFile;
}
}

View File

@ -0,0 +1,114 @@
package com.mindspore.hiobject.objectdetect;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Paint.Style;
import android.graphics.RectF;
import android.util.AttributeSet;
import android.util.Log;
import android.view.View;
import com.mindspore.hiobject.help.RecognitionObjectBean;
import java.util.ArrayList;
import java.util.List;
/**
* 针对物体检测的矩形框绘制类
* <p>
* 使用的API
* 1. Canvas代表依附于指定View的画布用它的方法来绘制各种图形.
* 2. Paint代表Canvas上的画笔用于设置画笔颜色画笔粗细填充风格等.
*/
public class ObjectRectView extends View {
private final String TAG = "ObjectRectView";
private List<RecognitionObjectBean> mRecognitions = new ArrayList<>();
private Paint mPaint = null;
// 画框区域.
private RectF mObjRectF;
public ObjectRectView(Context context) {
super(context);
initialize();
}
public ObjectRectView(Context context, AttributeSet attrs) {
super(context, attrs);
initialize();
}
public ObjectRectView(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
initialize();
}
public int[] MyColor = {Color.RED, Color.WHITE, Color.YELLOW, Color.GREEN, Color.LTGRAY, Color.MAGENTA, Color.BLACK, Color.BLUE, Color.CYAN};
private void initialize() {
mObjRectF = new RectF();
mPaint = new Paint(Paint.ANTI_ALIAS_FLAG);
mPaint.setTextSize(50);
//只绘制图形轮廓(描边)
mPaint.setStyle(Style.STROKE);
mPaint.setStrokeWidth(5);
}
/**
* 传入需绘制信息
*
* @param recognitions
*/
public void setInfo(List<RecognitionObjectBean> recognitions) {
Log.i(TAG, "setInfo: "+recognitions.size());
mRecognitions.clear();
mRecognitions.addAll(recognitions);
//重新draw().
invalidate();
}
public void clearCanvas(){
mRecognitions.clear();
//重新draw().
invalidate();
}
@Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
if (mRecognitions == null || mRecognitions.size() == 0) {
// mPaint.setColor(Color.TRANSPARENT);
// mObjRectF = new RectF(0, 0, 5, 5);
// canvas.drawRoundRect(mObjRectF, 0, 0, mPaint);
return;
}
for (int i = 0;i<mRecognitions.size();i++){
RecognitionObjectBean bean = mRecognitions.get(i);
mPaint.setColor(MyColor[i % MyColor.length]);
drawRect(bean, canvas);
}
}
public void drawRect(RecognitionObjectBean bean, Canvas canvas) {
StringBuilder sb = new StringBuilder();
sb.append(bean.getRectID()).append("_").append(bean.getObjectName()).append("_").append(String.format("%.2f", (100 * bean.getScore())) + "%");
mObjRectF = new RectF(bean.getLeft(), bean.getTop(), bean.getRight(), bean.getBottom());
canvas.drawRoundRect(mObjRectF, 5, 5, mPaint);
canvas.drawText(sb.toString(), mObjRectF.left, mObjRectF.top -20 , mPaint);
}
}

View File

@ -0,0 +1,116 @@
package com.mindspore.hiobject.objectdetect;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.RectF;
import android.net.Uri;
import android.os.Bundle;
import android.util.Log;
import android.widget.ImageView;
import androidx.appcompat.app.AppCompatActivity;
import com.mindspore.hiobject.R;
import com.mindspore.hiobject.help.ImageDegreeHelper;
import com.mindspore.hiobject.help.RecognitionObjectBean;
import com.mindspore.hiobject.help.TrackingMobile;
import java.io.FileNotFoundException;
import java.util.List;
import static com.mindspore.hiobject.help.RecognitionObjectBean.getRecognitionList;
public class PhotoActivity extends AppCompatActivity {
private static final String TAG = "PhotoActivity";
private static final int[] COLORS ={Color.RED, Color.WHITE, Color.YELLOW, Color.GREEN, Color.LTGRAY, Color.MAGENTA, Color.BLACK, Color.BLUE, Color.CYAN};
private ImageView imgPhoto;
private TrackingMobile trackingMobile;
private List<RecognitionObjectBean> recognitionObjectBeanList;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_photo);
imgPhoto = findViewById(R.id.img_photo);
Uri uri = getIntent().getData();
String imgPath = ImageDegreeHelper.getPath(this,uri);
int degree = ImageDegreeHelper.readPictureDegree(imgPath);
Bitmap originBitmap = BitmapFactory.decodeFile(imgPath);
if (originBitmap != null) {
Bitmap bitmap = ImageDegreeHelper.rotaingImageView(degree, originBitmap.copy(Bitmap.Config.ARGB_8888, true));
if (bitmap != null) {
imgPhoto.setImageBitmap(bitmap);
initMindspore(bitmap);
}
}
}
private void initMindspore(Bitmap bitmap) {
try {
trackingMobile = new TrackingMobile(this);
} catch (FileNotFoundException e) {
Log.e(TAG, Log.getStackTraceString(e));
e.printStackTrace();
}
// 加载模型
boolean ret = trackingMobile.loadModelFromBuf(getAssets());
if (!ret) {
Log.e(TAG, "Load model error.");
return;
}
// run net.
long startTime = System.currentTimeMillis();
String result = trackingMobile.MindSpore_runnet(bitmap);
long endTime = System.currentTimeMillis();
Log.d(TAG, "RUNNET 耗时:"+(endTime-startTime)+"ms");
Log.d(TAG, "result"+ result);
recognitionObjectBeanList = getRecognitionList(result);
if (recognitionObjectBeanList != null && recognitionObjectBeanList.size() > 0) {
drawRect(bitmap);
}
}
private void drawRect(Bitmap bitmap) {
Canvas canvas = new Canvas(bitmap);
Paint paint = new Paint(Paint.ANTI_ALIAS_FLAG);
paint.setTextSize(dip2px(15));
paint.setStyle(Paint.Style.STROKE);
paint.setStrokeWidth(3);
for (int i = 0; i < recognitionObjectBeanList.size(); i++) {
RecognitionObjectBean objectBean = recognitionObjectBeanList.get(i);
StringBuilder sb = new StringBuilder();
sb.append(objectBean.getRectID()).append("_").append(objectBean.getObjectName()).append("_").append(String.format("%.2f", (100 * objectBean.getScore())) + "%");
int paintColor = COLORS[i % COLORS.length];
paint.setColor(paintColor);
RectF rectF = new RectF(objectBean.getLeft(), objectBean.getTop(), objectBean.getRight(), objectBean.getBottom());
canvas.drawRect(rectF, paint);
canvas.drawText(sb.toString(),objectBean.getLeft(), objectBean.getTop()-10,paint);
}
}
@Override
protected void onDestroy() {
super.onDestroy();
trackingMobile.unloadModel();
}
public int dip2px(float dipValue){
float scale = getResources().getDisplayMetrics().density;
return (int) (dipValue*scale+0.5f);
}
}

View File

@ -0,0 +1,30 @@
<vector xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:aapt="http://schemas.android.com/aapt"
android:width="108dp"
android:height="108dp"
android:viewportWidth="108"
android:viewportHeight="108">
<path android:pathData="M31,63.928c0,0 6.4,-11 12.1,-13.1c7.2,-2.6 26,-1.4 26,-1.4l38.1,38.1L107,108.928l-32,-1L31,63.928z">
<aapt:attr name="android:fillColor">
<gradient
android:endX="85.84757"
android:endY="92.4963"
android:startX="42.9492"
android:startY="49.59793"
android:type="linear">
<item
android:color="#44000000"
android:offset="0.0" />
<item
android:color="#00000000"
android:offset="1.0" />
</gradient>
</aapt:attr>
</path>
<path
android:fillColor="#FFFFFF"
android:fillType="nonZero"
android:pathData="M65.3,45.828l3.8,-6.6c0.2,-0.4 0.1,-0.9 -0.3,-1.1c-0.4,-0.2 -0.9,-0.1 -1.1,0.3l-3.9,6.7c-6.3,-2.8 -13.4,-2.8 -19.7,0l-3.9,-6.7c-0.2,-0.4 -0.7,-0.5 -1.1,-0.3C38.8,38.328 38.7,38.828 38.9,39.228l3.8,6.6C36.2,49.428 31.7,56.028 31,63.928h46C76.3,56.028 71.8,49.428 65.3,45.828zM43.4,57.328c-0.8,0 -1.5,-0.5 -1.8,-1.2c-0.3,-0.7 -0.1,-1.5 0.4,-2.1c0.5,-0.5 1.4,-0.7 2.1,-0.4c0.7,0.3 1.2,1 1.2,1.8C45.3,56.528 44.5,57.328 43.4,57.328L43.4,57.328zM64.6,57.328c-0.8,0 -1.5,-0.5 -1.8,-1.2s-0.1,-1.5 0.4,-2.1c0.5,-0.5 1.4,-0.7 2.1,-0.4c0.7,0.3 1.2,1 1.2,1.8C66.5,56.528 65.6,57.328 64.6,57.328L64.6,57.328z"
android:strokeWidth="1"
android:strokeColor="#00000000" />
</vector>

Binary file not shown.

After

Width:  |  Height:  |  Size: 85 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 219 KiB

View File

@ -0,0 +1,170 @@
<?xml version="1.0" encoding="utf-8"?>
<vector xmlns:android="http://schemas.android.com/apk/res/android"
android:width="108dp"
android:height="108dp"
android:viewportWidth="108"
android:viewportHeight="108">
<path
android:fillColor="#3DDC84"
android:pathData="M0,0h108v108h-108z" />
<path
android:fillColor="#00000000"
android:pathData="M9,0L9,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,0L19,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M29,0L29,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M39,0L39,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M49,0L49,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M59,0L59,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M69,0L69,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M79,0L79,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M89,0L89,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M99,0L99,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,9L108,9"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,19L108,19"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,29L108,29"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,39L108,39"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,49L108,49"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,59L108,59"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,69L108,69"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,79L108,79"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,89L108,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,99L108,99"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,29L89,29"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,39L89,39"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,49L89,49"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,59L89,59"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,69L89,69"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,79L89,79"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M29,19L29,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M39,19L39,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M49,19L49,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M59,19L59,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M69,19L69,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M79,19L79,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
</vector>

Binary file not shown.

After

Width:  |  Height:  |  Size: 35 KiB

View File

@ -0,0 +1,22 @@
<?xml version="1.0" encoding="utf-8"?>
<androidx.constraintlayout.widget.ConstraintLayout
xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:id="@+id/container"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context=".objectdetect.CameraActivity">
<ImageView
android:id="@+id/igv_test"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toTopOf="parent" />
</androidx.constraintlayout.widget.ConstraintLayout>

View File

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent">
</LinearLayout>

View File

@ -0,0 +1,13 @@
<?xml version="1.0" encoding="utf-8"?>
<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context=".objectdetect.PhotoActivity">
<ImageView
android:scaleType="fitXY"
android:id="@+id/img_photo"
android:layout_width="match_parent"
android:layout_height="match_parent"/>
</androidx.constraintlayout.widget.ConstraintLayout>

View File

@ -0,0 +1,52 @@
<?xml version="1.0" encoding="utf-8"?>
<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:background="@color/colorPrimary"
tools:context=".SplashActivity">
<ImageView
android:id="@+id/img_logo"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:src="@mipmap/logo"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toTopOf="parent" />
<Button
android:id="@+id/btn_photo"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginTop="52dp"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintStart_toStartOf="parent"
android:text="PHOTO"
app:layout_constraintTop_toBottomOf="@+id/img_logo"
/>
<Button
android:id="@+id/btn_camera"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginTop="20dp"
android:text="CAMERA"
app:layout_constraintTop_toBottomOf="@+id/btn_photo"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintStart_toStartOf="parent" />
<Button
android:visibility="gone"
android:id="@+id/btn_deal"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginTop="20dp"
android:text="DEALDATA"
app:layout_constraintTop_toBottomOf="@+id/btn_camera"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintStart_toStartOf="parent" />
</androidx.constraintlayout.widget.ConstraintLayout>

View File

@ -0,0 +1,18 @@
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent">
<com.mindspore.hiobject.objectdetect.AutoFitTextureView
android:id="@+id/texture"
android:layout_width="match_parent"
android:layout_height="match_parent"
/>
<com.mindspore.hiobject.objectdetect.ObjectRectView
android:id="@+id/objRectView"
android:layout_width="match_parent"
android:layout_height="match_parent"
/>
</RelativeLayout>

View File

@ -0,0 +1,5 @@
<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
<background android:drawable="@drawable/ic_launcher_background" />
<foreground android:drawable="@drawable/ic_launcher_foreground" />
</adaptive-icon>

View File

@ -0,0 +1,5 @@
<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
<background android:drawable="@drawable/ic_launcher_background" />
<foreground android:drawable="@drawable/ic_launcher_foreground" />
</adaptive-icon>

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.5 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 12 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 16 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 10 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 16 KiB

View File

@ -0,0 +1,7 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<color name="colorPrimary">#1a1b33</color>
<color name="colorPrimaryDark">#3700B3</color>
<color name="colorAccent">#2B2B2B</color>
<color name="white">#ffffff</color>
</resources>

View File

@ -0,0 +1,11 @@
<resources>
<string name="app_name">HiObject</string>
<string name="action_settings">设置</string>
<string name="request_permission">This sample needs camera permission.</string>
<string name="camera_error">This device doesn\'t support Camera2 API.</string>
<string name="intro_message">使用的google的camera2demo.</string>
<string name="appwidget_text">EXAMPLE</string>
<string name="add_widget">Add widget</string>
</resources>

View File

@ -0,0 +1,10 @@
<resources>
<!-- Base application theme. -->
<style name="AppTheme" parent="Theme.AppCompat.Light.DarkActionBar">
<!-- Customize your theme here. -->
<item name="colorPrimary">@color/colorPrimary</item>
<item name="colorPrimaryDark">@color/colorPrimaryDark</item>
<item name="colorAccent">@color/colorAccent</item>
</style>
</resources>

View File

@ -0,0 +1,17 @@
package com.mindspore.hiobject;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
/**
* Example local unit test, which will execute on the development machine (host).
*
* @see <a href="http://d.android.com/tools/testing">Testing documentation</a>
*/
public class ExampleUnitTest {
@Test
public void addition_isCorrect() {
assertEquals(4, 2 + 2);
}
}

View File

@ -0,0 +1,24 @@
// Top-level build file where you can add configuration options common to all sub-projects/modules.
buildscript {
repositories {
google()
jcenter()
}
dependencies {
classpath "com.android.tools.build:gradle:4.0.1"
// NOTE: Do not place your application dependencies here; they belong
// in the individual module build.gradle files
}
}
allprojects {
repositories {
google()
jcenter()
}
}
task clean(type: Delete) {
delete rootProject.buildDir
}

View File

@ -0,0 +1,19 @@
# Project-wide Gradle settings.
# IDE (e.g. Android Studio) users:
# Gradle settings configured through the IDE *will override*
# any settings specified in this file.
# For more details on how to configure your build environment visit
# http://www.gradle.org/docs/current/userguide/build_environment.html
# Specifies the JVM arguments used for the daemon process.
# The setting is particularly useful for tweaking memory settings.
org.gradle.jvmargs=-Xmx2048m
# When configured, Gradle will run in incubating parallel mode.
# This option should only be used with decoupled projects. More details, visit
# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
# org.gradle.parallel=true
# AndroidX package structure to make it clearer which packages are bundled with the
# Android operating system, and which are packaged with your app"s APK
# https://developer.android.com/topic/libraries/support-library/androidx-rn
android.useAndroidX=true
# Automatically convert third-party libraries to use AndroidX
android.enableJetifier=true

View File

@ -0,0 +1,6 @@
#Tue Jul 28 10:28:05 CST 2020
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-6.1.1-all.zip

View File

@ -0,0 +1,172 @@
#!/usr/bin/env sh
##############################################################################
##
## Gradle start up script for UN*X
##
##############################################################################
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
PRG="$0"
# Need this for relative symlinks.
while [ -h "$PRG" ] ; do
ls=`ls -ld "$PRG"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
PRG="$link"
else
PRG=`dirname "$PRG"`"/$link"
fi
done
SAVED="`pwd`"
cd "`dirname \"$PRG\"`/" >/dev/null
APP_HOME="`pwd -P`"
cd "$SAVED" >/dev/null
APP_NAME="Gradle"
APP_BASE_NAME=`basename "$0"`
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS=""
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD="maximum"
warn () {
echo "$*"
}
die () {
echo
echo "$*"
echo
exit 1
}
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
nonstop=false
case "`uname`" in
CYGWIN* )
cygwin=true
;;
Darwin* )
darwin=true
;;
MINGW* )
msys=true
;;
NONSTOP* )
nonstop=true
;;
esac
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD="$JAVA_HOME/jre/sh/java"
else
JAVACMD="$JAVA_HOME/bin/java"
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD="java"
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
# Increase the maximum file descriptors if we can.
if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
MAX_FD_LIMIT=`ulimit -H -n`
if [ $? -eq 0 ] ; then
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
MAX_FD="$MAX_FD_LIMIT"
fi
ulimit -n $MAX_FD
if [ $? -ne 0 ] ; then
warn "Could not set maximum file descriptor limit: $MAX_FD"
fi
else
warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
fi
fi
# For Darwin, add options to specify how the application appears in the dock
if $darwin; then
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
fi
# For Cygwin, switch paths to Windows format before running java
if $cygwin ; then
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
JAVACMD=`cygpath --unix "$JAVACMD"`
# We build the pattern for arguments to be converted via cygpath
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
SEP=""
for dir in $ROOTDIRSRAW ; do
ROOTDIRS="$ROOTDIRS$SEP$dir"
SEP="|"
done
OURCYGPATTERN="(^($ROOTDIRS))"
# Add a user-defined pattern to the cygpath arguments
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
fi
# Now convert the arguments - kludge to limit ourselves to /bin/sh
i=0
for arg in "$@" ; do
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
else
eval `echo args$i`="\"$arg\""
fi
i=$((i+1))
done
case $i in
(0) set -- ;;
(1) set -- "$args0" ;;
(2) set -- "$args0" "$args1" ;;
(3) set -- "$args0" "$args1" "$args2" ;;
(4) set -- "$args0" "$args1" "$args2" "$args3" ;;
(5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
(6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
(7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
(8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
(9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
esac
fi
# Escape application args
save () {
for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
echo " "
}
APP_ARGS=$(save "$@")
# Collect all arguments for the java command, following the shell quoting and substitution rules
eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong
if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then
cd "$(dirname "$0")"
fi
exec "$JAVACMD" "$@"

View File

@ -0,0 +1,84 @@
@if "%DEBUG%" == "" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@rem
@rem ##########################################################################
@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal
set DIRNAME=%~dp0
if "%DIRNAME%" == "" set DIRNAME=.
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS=
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if "%ERRORLEVEL%" == "0" goto init
echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:findJavaFromJavaHome
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto init
echo.
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:init
@rem Get command-line arguments, handling Windows variants
if not "%OS%" == "Windows_NT" goto win9xME_args
:win9xME_args
@rem Slurp the command line arguments.
set CMD_LINE_ARGS=
set _SKIP=2
:win9xME_args_slurp
if "x%~1" == "x" goto execute
set CMD_LINE_ARGS=%*
:execute
@rem Setup the command line
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
@rem Execute Gradle
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
:end
@rem End local scope for the variables with windows NT shell
if "%ERRORLEVEL%"=="0" goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
exit /b 1
:mainEnd
if "%OS%"=="Windows_NT" endlocal
:omega

Binary file not shown.

After

Width:  |  Height:  |  Size: 109 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 39 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 27 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 160 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 36 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 86 KiB

View File

@ -0,0 +1,2 @@
include ':app'
rootProject.name = "HiMSObject"