diff --git a/model_zoo/official/lite/scene_detection/.gitignore b/model_zoo/official/lite/scene_detection/.gitignore new file mode 100644 index 0000000000..836b944df6 --- /dev/null +++ b/model_zoo/official/lite/scene_detection/.gitignore @@ -0,0 +1,83 @@ +# MindSpore +build/ +mindspore/lib +app/src/main/assets/model/ +app/src/main/cpp/mindspore-lite-* +output +*.ir +mindspore/ccsrc/schema/inner/* + +# Cmake files +CMakeFiles/ +cmake_install.cmake +CMakeCache.txt +Makefile +cmake-build-debug + +# Dynamic libraries +*.so +*.so.* +*.dylib + +# Static libraries +*.la +*.lai +*.a +*.lib + +# Protocol buffers +*_pb2.py +*.pb.h +*.pb.cc + +# Object files +*.o + +# Editor +.vscode +.idea/ + +# Cquery +.cquery_cached_index/ +compile_commands.json + +# Ctags and cscope +tags +TAGS +CTAGS +GTAGS +GRTAGS +GSYMS +GPATH +cscope.* + +# Python files +*__pycache__* +.pytest_cache + +# Mac files +*.DS_Store + +# Test results +test_temp_summary_event_file/ +*.dot +*.dat +*.svg +*.perf +*.info +*.ckpt +*.shp +*.pkl +.clangd +mindspore/version.py +mindspore/default_config.py +mindspore/.commit_id +onnx.proto +mindspore/ccsrc/onnx.proto + +# Android +local.properties +.gradle +sdk/build +sdk/.cxx +app/.cxx diff --git a/model_zoo/official/lite/scene_detection/app/.gitignore b/model_zoo/official/lite/scene_detection/app/.gitignore new file mode 100644 index 0000000000..796b96d1c4 --- /dev/null +++ b/model_zoo/official/lite/scene_detection/app/.gitignore @@ -0,0 +1 @@ +/build diff --git a/model_zoo/official/lite/scene_detection/app/CMakeLists.txt b/model_zoo/official/lite/scene_detection/app/CMakeLists.txt new file mode 100644 index 0000000000..c15b2a65ff --- /dev/null +++ b/model_zoo/official/lite/scene_detection/app/CMakeLists.txt @@ -0,0 +1,82 @@ +# For more information about using CMake with Android Studio, read the +# documentation: https://d.android.com/studio/projects/add-native-code.html + +# Sets the minimum version of CMake required to build the native library. + +cmake_minimum_required(VERSION 3.4.1) + +set(CMAKE_VERBOSE_MAKEFILE on) +set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${CMAKE_SOURCE_DIR}/libs/${ANDROID_ABI}) + +set(MINDSPORELITE_VERSION mindspore-lite-1.0.1-runtime-arm64-cpu) + +# ============== Set MindSpore Dependencies. ============= +include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp) +include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/third_party/flatbuffers/include) +include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}) +include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/include) +include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/include/ir/dtype) +include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/include/schema) +include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/minddata/include) + +add_library(mindspore-lite SHARED IMPORTED) +add_library(minddata-lite SHARED IMPORTED) + +set_target_properties(mindspore-lite PROPERTIES IMPORTED_LOCATION + ${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/lib/libmindspore-lite.so) +set_target_properties(minddata-lite PROPERTIES IMPORTED_LOCATION + ${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/minddata/lib/libminddata-lite.so) +# --------------- MindSpore Lite set End. -------------------- + + +# Creates and names a library, sets it as either STATIC +# or SHARED, and provides the relative paths to its source code. +# You can define multiple libraries, and CMake builds them for you. +# Gradle automatically packages shared libraries with your APK. + +file(GLOB_RECURSE cpp_src "src/main/cpp/*.cpp" "src/main/cpp/*.h") + +add_library( # Sets the name of the library. + mlkit-label-MS + + # Sets the library as a shared library. + SHARED + + # Provides a relative path to your source file(s). + ${cpp_src}) + + +# Searches for a specified prebuilt library and stores the path as a +# variable. Because CMake includes system libraries in the search path by +# default, you only need to specify the name of the public NDK library +# you want to add. CMake verifies that the library exists before +# completing its build. + +find_library( # Sets the name of the path variable. + log-lib + + # Specifies the name of the NDK library that + # you want CMake to locate. + log) + + +find_library(jnigraphics-lib jnig·raphics) + +# Specifies libraries CMake should link to your target library. You +# can link multiple libraries, such as libraries you define in this +# build script, prebuilt third-party libraries, or system libraries. +add_definitions(-DMNN_USE_LOGCAT) +target_link_libraries( # Specifies the target library. + mlkit-label-MS + + # --- mindspore --- + minddata-lite + mindspore-lite + + # --- other dependencies.--- + -ljnigraphics + android + + # Links the target library to the log library + ${log-lib} + ) \ No newline at end of file diff --git a/model_zoo/official/lite/scene_detection/app/build.gradle b/model_zoo/official/lite/scene_detection/app/build.gradle new file mode 100644 index 0000000000..8d23aa9740 --- /dev/null +++ b/model_zoo/official/lite/scene_detection/app/build.gradle @@ -0,0 +1,75 @@ +apply plugin: 'com.android.application' + +android { + compileSdkVersion 30 + buildToolsVersion "30.0.1" + + defaultConfig { + applicationId "com.mindspore.scene" + minSdkVersion 21 + targetSdkVersion 30 + versionCode 1 + versionName "1.0.0" + + testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner" + externalNativeBuild { + cmake { + arguments "-DANDROID_STL=c++_shared" + cppFlags "" + } + } + ndk { + abiFilters 'arm64-v8a' + } + } + aaptOptions { + noCompress '.so', 'ms' + } + + buildTypes { + release { + minifyEnabled false + proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro' + } + + customDebugType { + debuggable true + } + } + externalNativeBuild { + cmake { + path file('CMakeLists.txt') + } + } + ndkVersion '21.3.6528147' + + sourceSets { + main { + jniLibs.srcDirs = ['libs'] + } + } + packagingOptions { + pickFirst 'lib/arm64-v8a/libmlkit-label-MS.so' + } + +} + + +// Before gradle build. +// To download some necessary libraries. +apply from: 'download.gradle' + + +dependencies { + implementation fileTree(dir: "libs", include: ["*.jar"]) + implementation 'androidx.appcompat:appcompat:1.1.0' + implementation 'androidx.constraintlayout:constraintlayout:1.1.3' + + testImplementation 'junit:junit:4.12' + androidTestImplementation 'androidx.test.ext:junit:1.1.1' + androidTestImplementation 'androidx.test.espresso:espresso-core:3.2.0' + + implementation 'com.google.android.material:material:1.0.0' + androidTestImplementation 'com.android.support.test:rules:1.0.2' + androidTestImplementation 'com.google.truth:truth:1.0.1' +} diff --git a/model_zoo/official/lite/scene_detection/app/download.gradle b/model_zoo/official/lite/scene_detection/app/download.gradle new file mode 100644 index 0000000000..423ab8d6fc --- /dev/null +++ b/model_zoo/official/lite/scene_detection/app/download.gradle @@ -0,0 +1,82 @@ +/** + * To download necessary library from HuaWei server. + * Including mindspore-lite .so file, minddata-lite .so file and model file. + * The libraries can be downloaded manually. + */ +def targetMindSporeInclude = "src/main/cpp/" +def mindsporeLite_Version = "mindspore-lite-1.0.1-runtime-arm64-cpu" + +def targetModelFile = "src/main/assets/model/mobilenetv2.ms" +def mindSporeLibrary_arm64 = "src/main/cpp/${mindsporeLite_Version}.tar.gz" + +def modelDownloadUrl = "https://download.mindspore.cn/model_zoo/official/lite/mobilenetv2_openimage_lite/mobilenetv2.ms" +def mindsporeLiteDownloadUrl = "https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.1/lite/android_aarch64/${mindsporeLite_Version}.tar.gz" + +def cleantargetMindSporeInclude = "src/main/cpp" + +task cleanCmakeCache(type: Delete) { + delete '.cxx/cmake/debug' + delete '.cxx/cmake/release' +} + +task downloadModelFile(type: DownloadUrlTask) { + doFirst { + println "Downloading ${modelDownloadUrl}" + } + sourceUrl = "${modelDownloadUrl}" + target = file("${targetModelFile}") +} + +task downloadMindSporeLibrary(type: DownloadUrlTask) { + doFirst { + println "Downloading ${mindsporeLiteDownloadUrl}" + } + sourceUrl = "${mindsporeLiteDownloadUrl}" + target = file("${mindSporeLibrary_arm64}") +} + +task unzipMindSporeInclude(type: Copy, dependsOn: 'downloadMindSporeLibrary') { + doFirst { + println "Unzipping ${mindSporeLibrary_arm64}" + } + from tarTree(resources.gzip("${mindSporeLibrary_arm64}")) + into "${targetMindSporeInclude}" +} + +task cleanUnusedmindsporeFiles(type: Delete, dependsOn: ['unzipMindSporeInclude']) { + delete fileTree("${cleantargetMindSporeInclude}").matching { + include "*.tar.gz" + } +} +/* + * Using preBuild to download mindspore library and model file. + * Run before gradle build. + */ +if (file("src/main/cpp/${mindsporeLite_Version}/lib/libmindspore-lite.so").exists()) { + downloadMindSporeLibrary.enabled = false + unzipMindSporeInclude.enabled = false + cleanUnusedmindsporeFiles.enabled = false +} + +if (file("src/main/assets/model/mobilenetv2.ms").exists()) { + downloadModelFile.enabled = false +} + +preBuild.dependsOn cleanCmakeCache +preBuild.dependsOn downloadModelFile +preBuild.dependsOn downloadMindSporeLibrary +preBuild.dependsOn unzipMindSporeInclude +preBuild.dependsOn cleanUnusedmindsporeFiles + +class DownloadUrlTask extends DefaultTask { + @Input + String sourceUrl + + @OutputFile + File target + + @TaskAction + void download() { + ant.get(src: sourceUrl, dest: target) + } +} diff --git a/model_zoo/official/lite/scene_detection/app/proguard-rules.pro b/model_zoo/official/lite/scene_detection/app/proguard-rules.pro new file mode 100644 index 0000000000..481bb43481 --- /dev/null +++ b/model_zoo/official/lite/scene_detection/app/proguard-rules.pro @@ -0,0 +1,21 @@ +# Add project specific ProGuard rules here. +# You can control the set of applied configuration files using the +# proguardFiles setting in build.gradle. +# +# For more details, see +# http://developer.android.com/guide/developing/tools/proguard.html + +# If your project uses WebView with JS, uncomment the following +# and specify the fully qualified class name to the JavaScript interface +# class: +#-keepclassmembers class fqcn.of.javascript.interface.for.webview { +# public *; +#} + +# Uncomment this to preserve the line number information for +# debugging stack traces. +#-keepattributes SourceFile,LineNumberTable + +# If you keep the line number information, uncomment this to +# hide the original source file name. +#-renamesourcefileattribute SourceFile \ No newline at end of file diff --git a/model_zoo/official/lite/scene_detection/app/src/main/AndroidManifest.xml b/model_zoo/official/lite/scene_detection/app/src/main/AndroidManifest.xml new file mode 100644 index 0000000000..9b2c97b26a --- /dev/null +++ b/model_zoo/official/lite/scene_detection/app/src/main/AndroidManifest.xml @@ -0,0 +1,32 @@ + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/model_zoo/official/lite/scene_detection/app/src/main/cpp/MSNetWork.cpp b/model_zoo/official/lite/scene_detection/app/src/main/cpp/MSNetWork.cpp new file mode 100644 index 0000000000..ad02a1c59a --- /dev/null +++ b/model_zoo/official/lite/scene_detection/app/src/main/cpp/MSNetWork.cpp @@ -0,0 +1,63 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "MSNetWork.h" +#include +#include +#include +#include "include/errorcode.h" + +#define MS_PRINT(format, ...) __android_log_print(ANDROID_LOG_INFO, "MSJNI", format, ##__VA_ARGS__) + +MSNetWork::MSNetWork(void) : session_(nullptr), model_(nullptr) {} + +MSNetWork::~MSNetWork(void) {} + +void +MSNetWork::CreateSessionMS(char *modelBuffer, size_t bufferLen, mindspore::lite::Context *ctx) { + session_ = mindspore::session::LiteSession::CreateSession(ctx); + if (session_ == nullptr) { + MS_PRINT("Create Session failed."); + return; + } + + // Compile model. + model_ = mindspore::lite::Model::Import(modelBuffer, bufferLen); + if (model_ == nullptr) { + ReleaseNets(); + MS_PRINT("Import model failed."); + return; + } + + int ret = session_->CompileGraph(model_); + if (ret != mindspore::lite::RET_OK) { + ReleaseNets(); + MS_PRINT("CompileGraph failed."); + return; + } +} + +void MSNetWork::ReleaseNets(void) { + if (model_ != nullptr) { + model_->Free(); + delete model_; + model_ = nullptr; + } + if (session_ != nullptr) { + delete session_; + session_ = nullptr; + } +} diff --git a/model_zoo/official/lite/scene_detection/app/src/main/cpp/MSNetWork.h b/model_zoo/official/lite/scene_detection/app/src/main/cpp/MSNetWork.h new file mode 100644 index 0000000000..ee164459be --- /dev/null +++ b/model_zoo/official/lite/scene_detection/app/src/main/cpp/MSNetWork.h @@ -0,0 +1,58 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef MSNETWORK_H +#define MSNETWORK_H + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +struct ImgDims { + int channel = 0; + int width = 0; + int height = 0; +}; + +class MSNetWork { + public: + MSNetWork(); + + ~MSNetWork(); + + void CreateSessionMS(char *modelBuffer, size_t bufferLen, mindspore::lite::Context *ctx); + + void ReleaseNets(void); + + mindspore::session::LiteSession *session() const { return session_; } + + private: + mindspore::session::LiteSession *session_; + mindspore::lite::Model *model_; +}; + +#endif diff --git a/model_zoo/official/lite/scene_detection/app/src/main/cpp/MindSporeNetnative.cpp b/model_zoo/official/lite/scene_detection/app/src/main/cpp/MindSporeNetnative.cpp new file mode 100644 index 0000000000..463d171a83 --- /dev/null +++ b/model_zoo/official/lite/scene_detection/app/src/main/cpp/MindSporeNetnative.cpp @@ -0,0 +1,472 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include "include/errorcode.h" +#include "include/ms_tensor.h" +#include "MindSporeNetnative.h" +#include "MSNetWork.h" +#include "lite_cv/lite_mat.h" +#include "lite_cv/image_process.h" + +using mindspore::dataset::LiteMat; +using mindspore::dataset::LPixelType; +using mindspore::dataset::LDataType; +#define MS_PRINT(format, ...) __android_log_print(ANDROID_LOG_INFO, "MSJNI", format, ##__VA_ARGS__) + + +static const int RET_CATEGORY_SUM = 365; +static const char *labels_name_map[RET_CATEGORY_SUM] = {"airfield", "airplane_cabin", + "airport_terminal", "alcove", "alley", + "amphitheater", "amusement_arcade", + "amusement_park", + "apartment_building/outdoor", "aquarium", + "aqueduct", "arcade", "arch", + "archaelogical_excavation", "archive", + "arena/hockey", "arena/performance", + "arena/rodeo", "army_base", "art_gallery", + "art_school", "art_studio", "artists_loft", + "assembly_line", "athletic_field/outdoor", + "atrium/public", "attic", "auditorium", + "auto_factory", "auto_showroom", + "badlands", "bakery/shop", + "balcony/exterior", "balcony/interior", + "ball_pit", + "ballroom", "bamboo_forest", "bank_vault", + "banquet_hall", "bar", + "barn", "barndoor", "baseball_field", + "basement", "basketball_court/indoor", + "bathroom", "bazaar/indoor", + "bazaar/outdoor", "beach", "beach_house", + "beauty_salon", "bedchamber", "bedroom", + "beer_garden", "beer_hall", + "berth", "biology_laboratory", "boardwalk", + "boat_deck", "boathouse", + "bookstore", "booth/indoor", + "botanical_garden", "bow_window/indoor", + "bowling_alley", + "boxing_ring", "bridge", "building_facade", + "bullring", "burial_chamber", + "bus_interior", "bus_station/indoor", + "butchers_shop", "butte", "cabin/outdoor", + "cafeteria", "campsite", "campus", + "canal/natural", "canal/urban", + "candy_store", "canyon", "car_interior", + "carrousel", "castle", + "catacomb", "cemetery", "chalet", + "chemistry_lab", "childs_room", + "church/indoor", "church/outdoor", + "classroom", "clean_room", "cliff", + "closet", "clothing_store", "coast", + "cockpit", "coffee_shop", + "computer_room", "conference_center", + "conference_room", "construction_site", + "corn_field", + "corral", "corridor", "cottage", + "courthouse", "courtyard", + "creek", "crevasse", "crosswalk", "dam", + "delicatessen", + "department_store", "desert/sand", + "desert/vegetation", "desert_road", + "diner/outdoor", + "dining_hall", "dining_room", "discotheque", + "doorway/outdoor", "dorm_room", + "downtown", "dressing_room", "driveway", + "drugstore", "elevator/door", + "elevator_lobby", "elevator_shaft", + "embassy", "engine_room", "entrance_hall", + "escalator/indoor", "excavation", + "fabric_store", "farm", + "fastfood_restaurant", + "field/cultivated", "field/wild", + "field_road", "fire_escape", "fire_station", + "fishpond", "flea_market/indoor", + "florist_shop/indoor", "food_court", + "football_field", + "forest/broadleaf", "forest_path", + "forest_road", "formal_garden", "fountain", + "galley", "garage/indoor", "garage/outdoor", + "gas_station", "gazebo/exterior", + "general_store/indoor", + "general_store/outdoor", "gift_shop", + "glacier", "golf_course", + "greenhouse/indoor", "greenhouse/outdoor", + "grotto", "gymnasium/indoor", + "hangar/indoor", + "hangar/outdoor", "harbor", + "hardware_store", "hayfield", "heliport", + "highway", "home_office", "home_theater", + "hospital", "hospital_room", + "hot_spring", "hotel/outdoor", "hotel_room", + "house", "hunting_lodge/outdoor", + "ice_cream_parlor", "ice_floe", "ice_shelf", + "ice_skating_rink/indoor", + "ice_skating_rink/outdoor", + "iceberg", "igloo", "industrial_area", + "inn/outdoor", "islet", + "jacuzzi/indoor", "jail_cell", + "japanese_garden", "jewelry_shop", + "junkyard", + "kasbah", "kennel/outdoor", + "kindergarden_classroom", "kitchen", + "lagoon", + "lake/natural", "landfill", "landing_deck", + "laundromat", "lawn", + "lecture_room", "legislative_chamber", + "library/indoor", "library/outdoor", + "lighthouse", + "living_room", "loading_dock", "lobby", + "lock_chamber", "locker_room", + "mansion", "manufactured_home", + "market/indoor", "market/outdoor", "marsh", + "martial_arts_gym", "mausoleum", "medina", + "mezzanine", "moat/water", + "mosque/outdoor", "motel", "mountain", + "mountain_path", "mountain_snowy", + "movie_theater/indoor", "museum/indoor", + "museum/outdoor", "music_studio", + "natural_history_museum", + "nursery", "nursing_home", "oast_house", + "ocean", "office", + "office_building", "office_cubicles", + "oilrig", "operating_room", "orchard", + "orchestra_pit", "pagoda", "palace", + "pantry", "park", + "parking_garage/indoor", + "parking_garage/outdoor", "parking_lot", + "pasture", "patio", + "pavilion", "pet_shop", "pharmacy", + "phone_booth", "physics_laboratory", + "picnic_area", "pier", "pizzeria", + "playground", "playroom", + "plaza", "pond", "porch", "promenade", + "pub/indoor", + "racecourse", "raceway", "raft", + "railroad_track", "rainforest", + "reception", "recreation_room", + "repair_shop", "residential_neighborhood", + "restaurant", + "restaurant_kitchen", "restaurant_patio", + "rice_paddy", "river", "rock_arch", + "roof_garden", "rope_bridge", "ruin", + "runway", "sandbox", + "sauna", "schoolhouse", "science_museum", + "server_room", "shed", + "shoe_shop", "shopfront", + "shopping_mall/indoor", "shower", + "ski_resort", + "ski_slope", "sky", "skyscraper", "slum", + "snowfield", + "soccer_field", "stable", + "stadium/baseball", "stadium/football", + "stadium/soccer", + "stage/indoor", "stage/outdoor", + "staircase", "storage_room", "street", + "subway_station/platform", "supermarket", + "sushi_bar", "swamp", "swimming_hole", + "swimming_pool/indoor", + "swimming_pool/outdoor", + "synagogue/outdoor", "television_room", + "television_studio", + "temple/asia", "throne_room", + "ticket_booth", "topiary_garden", "tower", + "toyshop", "train_interior", + "train_station/platform", "tree_farm", + "tree_house", + "trench", "tundra", "underwater/ocean_deep", + "utility_room", "valley", + "vegetable_garden", "veterinarians_office", + "viaduct", "village", "vineyard", + "volcano", "volleyball_court/outdoor", + "waiting_room", "water_park", "water_tower", + "waterfall", "watering_hole", "wave", + "wet_bar", "wheat_field", + "wind_farm", "windmill", "yard", + "youth_hostel", "zen_garden"}; + + +char *CreateLocalModelBuffer(JNIEnv *env, jobject modelBuffer) { + jbyte *modelAddr = static_cast(env->GetDirectBufferAddress(modelBuffer)); + int modelLen = static_cast(env->GetDirectBufferCapacity(modelBuffer)); + char *buffer(new char[modelLen]); + memcpy(buffer, modelAddr, modelLen); + return buffer; +} + +/** + * To process the result of mindspore inference. + * @param msOutputs + * @return + */ +std::string ProcessRunnetResult(const int RET_CATEGORY_SUM, const char *const labels_name_map[], + std::unordered_map msOutputs) { + // Get the branch of the model output. + // Use iterators to get map elements. + std::unordered_map::iterator iter; + iter = msOutputs.begin(); + + // The mobilenetv2.ms model output just one branch. + auto outputTensor = iter->second; + + int tensorNum = outputTensor->ElementsNum(); + MS_PRINT("Number of tensor elements:%d", tensorNum); + + // Get a pointer to the first score. + float *temp_scores = static_cast(outputTensor->MutableData()); + + // float scores[RET_CATEGORY_SUM]; + float scores = temp_scores[0]; + int cat_loc = 0; + for (int i = 0; i < RET_CATEGORY_SUM; ++i) { + if (scores < temp_scores[i]) { + scores = temp_scores[i]; + cat_loc = i; + } + if (temp_scores[i] > 0.5) { + MS_PRINT("MindSpore scores[%d] : [%f]", i, temp_scores[i]); + } + } + + // Score for each category. + // Converted to text information that needs to be displayed in the APP. + std::string categoryScore = ""; + categoryScore += labels_name_map[cat_loc]; + categoryScore += ":"; + std::string score_str = std::to_string(scores); + categoryScore += score_str; + return categoryScore; +} + +bool BitmapToLiteMat(JNIEnv *env, const jobject &srcBitmap, LiteMat *lite_mat) { + bool ret = false; + AndroidBitmapInfo info; + void *pixels = nullptr; + LiteMat &lite_mat_bgr = *lite_mat; + AndroidBitmap_getInfo(env, srcBitmap, &info); + if (info.format != ANDROID_BITMAP_FORMAT_RGBA_8888) { + MS_PRINT("Image Err, Request RGBA"); + return false; + } + AndroidBitmap_lockPixels(env, srcBitmap, &pixels); + if (info.stride == info.width * 4) { + ret = InitFromPixel(reinterpret_cast(pixels), + LPixelType::RGBA2RGB, LDataType::UINT8, + info.width, info.height, lite_mat_bgr); + if (!ret) { + MS_PRINT("Init From RGBA error"); + } + } else { + unsigned char *pixels_ptr = new unsigned char[info.width * info.height * 4]; + unsigned char *ptr = pixels_ptr; + unsigned char *data = reinterpret_cast(pixels); + for (int i = 0; i < info.height; i++) { + memcpy(ptr, data, info.width * 4); + ptr += info.width * 4; + data += info.stride; + } + ret = InitFromPixel(reinterpret_cast(pixels_ptr), + LPixelType::RGBA2RGB, LDataType::UINT8, + info.width, info.height, lite_mat_bgr); + if (!ret) { + MS_PRINT("Init From RGBA error"); + } + delete[] (pixels_ptr); + } + AndroidBitmap_unlockPixels(env, srcBitmap); + return ret; +} + +bool PreProcessImageData(const LiteMat &lite_mat_bgr, LiteMat *lite_norm_mat_ptr) { + bool ret = false; + LiteMat lite_mat_resize; + LiteMat &lite_norm_mat_cut = *lite_norm_mat_ptr; + ret = ResizeBilinear(lite_mat_bgr, lite_mat_resize, 256, 256); + if (!ret) { + MS_PRINT("ResizeBilinear error"); + return false; + } + LiteMat lite_mat_convert_float; + ret = ConvertTo(lite_mat_resize, lite_mat_convert_float, 1.0 / 255.0); + if (!ret) { + MS_PRINT("ConvertTo error"); + return false; + } + LiteMat lite_mat_cut; + ret = Crop(lite_mat_convert_float, lite_mat_cut, 16, 16, 224, 224); + if (!ret) { + MS_PRINT("Crop error"); + return false; + } + std::vector means = {0.485, 0.456, 0.406}; + std::vector stds = {0.229, 0.224, 0.225}; + SubStractMeanNormalize(lite_mat_cut, lite_norm_mat_cut, means, stds); + return true; +} + + +/** + * The Java layer reads the model into MappedByteBuffer or ByteBuffer to load the model. + */ +extern "C" +JNIEXPORT jlong JNICALL +Java_com_mindspore_scene_gallery_classify_TrackingMobile_loadModel(JNIEnv *env, + jobject thiz, + jobject model_buffer, + jint num_thread) { + if (nullptr == model_buffer) { + MS_PRINT("error, buffer is nullptr!"); + return (jlong) nullptr; + } + jlong bufferLen = env->GetDirectBufferCapacity(model_buffer); + if (0 == bufferLen) { + MS_PRINT("error, bufferLen is 0!"); + return (jlong) nullptr; + } + + char *modelBuffer = CreateLocalModelBuffer(env, model_buffer); + if (modelBuffer == nullptr) { + MS_PRINT("modelBuffer create failed!"); + return (jlong) nullptr; + } + + // To create a mindspore network inference environment. + void **labelEnv = new void *; + MSNetWork *labelNet = new MSNetWork; + *labelEnv = labelNet; + + mindspore::lite::Context *context = new mindspore::lite::Context; + context->thread_num_ = num_thread; + context->device_list_[0].device_info_.cpu_device_info_.cpu_bind_mode_ = mindspore::lite::NO_BIND; + context->device_list_[0].device_info_.cpu_device_info_.enable_float16_ = false; + context->device_list_[0].device_type_ = mindspore::lite::DT_CPU; + + labelNet->CreateSessionMS(modelBuffer, bufferLen, context); + delete context; + + if (labelNet->session() == nullptr) { + MS_PRINT("MindSpore create session failed!."); + delete labelNet; + delete labelEnv; + return (jlong) nullptr; + } + + if (model_buffer != nullptr) { + env->DeleteLocalRef(model_buffer); + } + + return (jlong) labelEnv; +} + +/** + * After the inference environment is successfully created, + * sending a picture to the model and run inference. + */ +extern "C" JNIEXPORT jstring JNICALL +Java_com_mindspore_scene_gallery_classify_TrackingMobile_runNet(JNIEnv *env, jclass type, + jlong netEnv, + jobject srcBitmap) { + LiteMat lite_mat_bgr, lite_norm_mat_cut; + + if (!BitmapToLiteMat(env, srcBitmap, &lite_mat_bgr)) { + MS_PRINT("BitmapToLiteMat error"); + return NULL; + } + if (!PreProcessImageData(lite_mat_bgr, &lite_norm_mat_cut)) { + MS_PRINT("PreProcessImageData error"); + return NULL; + } + + ImgDims inputDims; + inputDims.channel = lite_norm_mat_cut.channel_; + inputDims.width = lite_norm_mat_cut.width_; + inputDims.height = lite_norm_mat_cut.height_; + + // Get the mindsore inference environment which created in loadModel(). + void **labelEnv = reinterpret_cast(netEnv); + if (labelEnv == nullptr) { + MS_PRINT("MindSpore error, labelEnv is a nullptr."); + return NULL; + } + MSNetWork *labelNet = static_cast(*labelEnv); + + auto mSession = labelNet->session(); + if (mSession == nullptr) { + MS_PRINT("MindSpore error, Session is a nullptr."); + return NULL; + } + MS_PRINT("MindSpore get session."); + + auto msInputs = mSession->GetInputs(); + if (msInputs.size() == 0) { + MS_PRINT("MindSpore error, msInputs.size() equals 0."); + return NULL; + } + auto inTensor = msInputs.front(); + + float *dataHWC = reinterpret_cast(lite_norm_mat_cut.data_ptr_); + // Copy dataHWC to the model input tensor. + memcpy(inTensor->MutableData(), dataHWC, + inputDims.channel * inputDims.width * inputDims.height * sizeof(float)); + + // After the model and image tensor data is loaded, run inference. + auto status = mSession->RunGraph(); + + if (status != mindspore::lite::RET_OK) { + MS_PRINT("MindSpore run net error."); + return NULL; + } + + /** + * Get the mindspore inference results. + * Return the map of output node name and MindSpore Lite MSTensor. + */ + auto names = mSession->GetOutputTensorNames(); + std::unordered_map msOutputs; + for (const auto &name : names) { + auto temp_dat = mSession->GetOutputByTensorName(name); + msOutputs.insert(std::pair{name, temp_dat}); + } + + std::string resultStr = ProcessRunnetResult(::RET_CATEGORY_SUM, + ::labels_name_map, msOutputs); + + const char *resultCharData = resultStr.c_str(); + return (env)->NewStringUTF(resultCharData); +} + +extern "C" JNIEXPORT jboolean JNICALL +Java_com_mindspore_scene_gallery_classify_TrackingMobile_unloadModel(JNIEnv *env, + jclass type, + jlong netEnv) { + MS_PRINT("MindSpore release net."); + void **labelEnv = reinterpret_cast(netEnv); + if (labelEnv == nullptr) { + MS_PRINT("MindSpore error, labelEnv is a nullptr."); + } + MSNetWork *labelNet = static_cast(*labelEnv); + + labelNet->ReleaseNets(); + + return (jboolean) true; +} diff --git a/model_zoo/official/lite/scene_detection/app/src/main/cpp/MindSporeNetnative.h b/model_zoo/official/lite/scene_detection/app/src/main/cpp/MindSporeNetnative.h new file mode 100644 index 0000000000..b28dc37234 --- /dev/null +++ b/model_zoo/official/lite/scene_detection/app/src/main/cpp/MindSporeNetnative.h @@ -0,0 +1,21 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef MINDSPORE_JNI_HMS_DEBUG_MINDSPORENETNATIVE_H +#define MINDSPORE_JNI_HMS_DEBUG_MINDSPORENETNATIVE_H + + +#endif // MINDSPORE_JNI_HMS_DEBUG_MINDSPORENETNATIVE_H diff --git a/model_zoo/official/lite/scene_detection/app/src/main/java/com/mindspore/scene/gallery/classify/CompareSizesByArea.java b/model_zoo/official/lite/scene_detection/app/src/main/java/com/mindspore/scene/gallery/classify/CompareSizesByArea.java new file mode 100644 index 0000000000..f73845f8f0 --- /dev/null +++ b/model_zoo/official/lite/scene_detection/app/src/main/java/com/mindspore/scene/gallery/classify/CompareSizesByArea.java @@ -0,0 +1,34 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + *

+ * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.mindspore.scene.gallery.classify; + +import android.util.Size; + +import java.util.Comparator; + +/** + * Data comparator. + */ + +public class CompareSizesByArea implements Comparator { + + @Override + public int compare(Size lhs, Size rhs) { + // We cast here to ensure the multiplications won't overflow + return Long.signum((long) lhs.getWidth() * lhs.getHeight() - (long) rhs.getWidth() * rhs.getHeight()); + } + +} \ No newline at end of file diff --git a/model_zoo/official/lite/scene_detection/app/src/main/java/com/mindspore/scene/gallery/classify/RecognitionObjectBean.java b/model_zoo/official/lite/scene_detection/app/src/main/java/com/mindspore/scene/gallery/classify/RecognitionObjectBean.java new file mode 100644 index 0000000000..3eec2773cd --- /dev/null +++ b/model_zoo/official/lite/scene_detection/app/src/main/java/com/mindspore/scene/gallery/classify/RecognitionObjectBean.java @@ -0,0 +1,45 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + *

+ * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.mindspore.scene.gallery.classify; + +public class RecognitionObjectBean { + + private String name; + private float score; + + public RecognitionObjectBean(String name, float score) { + this.name = name; + this.score = score; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public float getScore() { + return score; + } + + public void setScore(float score) { + this.score = score; + } + + +} diff --git a/model_zoo/official/lite/scene_detection/app/src/main/java/com/mindspore/scene/gallery/classify/TrackingMobile.java b/model_zoo/official/lite/scene_detection/app/src/main/java/com/mindspore/scene/gallery/classify/TrackingMobile.java new file mode 100644 index 0000000000..2c4f92305d --- /dev/null +++ b/model_zoo/official/lite/scene_detection/app/src/main/java/com/mindspore/scene/gallery/classify/TrackingMobile.java @@ -0,0 +1,126 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + *

+ * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.mindspore.scene.gallery.classify; + +import android.content.Context; +import android.graphics.Bitmap; +import android.util.Log; + +import java.io.InputStream; +import java.nio.ByteBuffer; + +/** + * Call the MindSpore interface API in the Java layer. + */ +public class TrackingMobile { + private final static String TAG = "TrackingMobile"; + + static { + try { + System.loadLibrary("mlkit-label-MS"); + Log.i(TAG, "load libiMindSpore.so successfully."); + } catch (UnsatisfiedLinkError e) { + Log.e(TAG, "UnsatisfiedLinkError " + e.getMessage()); + } + } + + // The address of the running inference environment. + private long netEnv = 0; + + private final Context mActivity; + + public TrackingMobile(Context activity) { + this.mActivity = activity; + } + + /** + * JNI load model and also create model inference environment. + * + * @param modelBuffer Model buffer. + * @param numThread The num of thread. + * @return MindSpore Inference environment address. + */ + public native long loadModel(ByteBuffer modelBuffer, int numThread); + + /** + * Running model. + * + * @param netEnv Inference environment address. + * @param img A picture to be inferred. + * @return Inference result + */ + public native String runNet(long netEnv, Bitmap img); + + /** + * Unbind model data. + * + * @param netEnv Inference environment address. + * @return Unbound state. + */ + public native boolean unloadModel(long netEnv); + + /** + * The C++ side is encapsulated into a method of the MSNetWorks class + * + * @param modelPath Model file location + * @return Load model file status + */ + public boolean loadModelFromBuf(String modelPath) { + ByteBuffer buffer = loadModelFile(modelPath); + netEnv = loadModel(buffer, 2); //numThread's default setting is 2. + if (netEnv == 0) { // Loading model failed. + return false; + } + + return true; + } + + /** + * Run MindSpore inference. + */ + public String MindSpore_runnet(Bitmap img) { + String ret_str = runNet(netEnv, img); + return ret_str; + } + + /** + * Unload model. + * @return true + */ + public boolean unloadModel() { + unloadModel(netEnv); + return true; + } + + /** + * Load model file stream. + * @param modelPath Model file path. + * @return Model ByteBuffer. + */ + public ByteBuffer loadModelFile(String modelPath) { + InputStream is = null; + try { + is = mActivity.getAssets().open(modelPath); + byte[] bytes = new byte[is.available()]; + is.read(bytes); + return ByteBuffer.allocateDirect(bytes.length).put(bytes); + } catch (Exception e) { + Log.d("loadModelFile", " Exception occur. "); + Log.e(TAG, Log.getStackTraceString(e)); + } + return null; + } +} diff --git a/model_zoo/official/lite/scene_detection/app/src/main/java/com/mindspore/scene/widget/AutoFitTextureView.java b/model_zoo/official/lite/scene_detection/app/src/main/java/com/mindspore/scene/widget/AutoFitTextureView.java new file mode 100644 index 0000000000..44006488f6 --- /dev/null +++ b/model_zoo/official/lite/scene_detection/app/src/main/java/com/mindspore/scene/widget/AutoFitTextureView.java @@ -0,0 +1,73 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + *

+ * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.mindspore.scene.widget; + +import android.content.Context; +import android.util.AttributeSet; +import android.view.TextureView; + +public class AutoFitTextureView extends TextureView { + + private int mRatioWidth = 0; + private int mRatioHeight = 0; + + public AutoFitTextureView(Context context) { + this(context, null); + } + + public AutoFitTextureView(Context context, AttributeSet attrs) { + this(context, attrs, 0); + } + + public AutoFitTextureView(Context context, AttributeSet attrs, int defStyle) { + super(context, attrs, defStyle); + } + + /** + * Sets the aspect ratio for this view. The size of the view will be measured based on the ratio + * calculated from the parameters. Note that the actual sizes of parameters don't matter, that + * is, calling setAspectRatio(2, 3) and setAspectRatio(4, 6) make the same result. + * + * @param width Relative horizontal size + * @param height Relative vertical size + */ + public void setAspectRatio(int width, int height) { + if (width < 0 || height < 0) { + throw new IllegalArgumentException("Size cannot be negative."); + } + mRatioWidth = width; + mRatioHeight = height; + requestLayout(); + } + + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + super.onMeasure(widthMeasureSpec, heightMeasureSpec); + int width = MeasureSpec.getSize(widthMeasureSpec); + int height = MeasureSpec.getSize(heightMeasureSpec); + + if (0 == mRatioWidth || 0 == mRatioHeight) { + setMeasuredDimension(width, height); + } else { + if (width > height * mRatioWidth / mRatioHeight) { + setMeasuredDimension(width, width * mRatioHeight / mRatioWidth); + } else { + setMeasuredDimension(height * mRatioWidth / mRatioHeight, height); + } + } + } + +} diff --git a/model_zoo/official/lite/scene_detection/app/src/main/java/com/mindspore/scene/widget/CameraActivity.java b/model_zoo/official/lite/scene_detection/app/src/main/java/com/mindspore/scene/widget/CameraActivity.java new file mode 100644 index 0000000000..f82dd419c8 --- /dev/null +++ b/model_zoo/official/lite/scene_detection/app/src/main/java/com/mindspore/scene/widget/CameraActivity.java @@ -0,0 +1,184 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + *

+ * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.mindspore.scene.widget; + +import android.Manifest; +import android.content.pm.PackageManager; +import android.graphics.Color; +import android.os.Build; +import android.os.Bundle; +import android.util.Log; +import android.view.Gravity; +import android.view.View; +import android.view.ViewGroup; +import android.view.WindowManager; +import android.widget.LinearLayout; +import android.widget.TextView; +import android.widget.Toast; + +import androidx.annotation.UiThread; +import androidx.appcompat.app.AppCompatActivity; + +import com.mindspore.scene.R; +import com.mindspore.scene.gallery.classify.RecognitionObjectBean; + +/** + * The main interface of camera preview. + * Using Camera 2 API. + */ +public class CameraActivity extends AppCompatActivity { + private static final String TAG = "CameraActivity"; + + private static final String BUNDLE_FRAGMENTS_KEY = "android:support:fragments"; + + private static final int PERMISSIONS_REQUEST = 1; + + private static final String PERMISSION_CAMERA = Manifest.permission.CAMERA; + + private LinearLayout bottomLayout; + + private RecognitionObjectBean recognitionObjectBean; + + @Override + protected void onCreate(final Bundle savedInstanceState) { + Log.d(TAG, "onCreate"); + + if (savedInstanceState != null && this.clearFragmentsTag()) { + // Clear the state of the fragment when rebuilding. + savedInstanceState.remove(BUNDLE_FRAGMENTS_KEY); + } + + super.onCreate(null); + getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); + + setContentView(R.layout.activity_camera); + + if (hasPermission()) { + setFragment(); + } else { + requestPermission(); + } + + bottomLayout = findViewById(R.id.layout_bottom_content); + } + + @Override + public void onRequestPermissionsResult(final int requestCode, final String[] permissions, + final int[] grantResults) { + super.onRequestPermissionsResult(requestCode, permissions, grantResults); + if (requestCode == PERMISSIONS_REQUEST) { + if (allPermissionsGranted(grantResults)) { + setFragment(); + } else { + requestPermission(); + } + } + } + + private static boolean allPermissionsGranted(final int[] grantResults) { + for (int result : grantResults) { + if (result != PackageManager.PERMISSION_GRANTED) { + return false; + } + } + return true; + } + + private boolean hasPermission() { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { + return checkSelfPermission(PERMISSION_CAMERA) == PackageManager.PERMISSION_GRANTED; + } else { + return true; + } + } + + private void requestPermission() { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { + if (shouldShowRequestPermissionRationale(PERMISSION_CAMERA)) { + Toast.makeText(CameraActivity.this, "Camera permission is required for this demo", Toast.LENGTH_LONG) + .show(); + } + requestPermissions(new String[]{PERMISSION_CAMERA}, PERMISSIONS_REQUEST); + } + } + + + protected void setFragment() { + CameraFragment cameraFragment = CameraFragment.newInstance(new CameraFragment.RecognitionDataCallBack() { + @Override + public void onRecognitionDataCallBack(String result, final String time) { + dealRecognitionData(result, time); + } + }); + + getSupportFragmentManager().beginTransaction() + .replace(R.id.container, cameraFragment) + .commitAllowingStateLoss(); + } + + + private void dealRecognitionData(String result, final String time) { + if (!result.equals("") && result.contains(":")) { + String[] resultArray = result.split(":"); + recognitionObjectBean = new RecognitionObjectBean(resultArray[0], Float.valueOf(resultArray[1])); + } + + runOnUiThread(new Runnable() { + @Override + public void run() { + showResultsInBottomSheet(time); + } + }); + } + + @UiThread + protected void showResultsInBottomSheet(String time) { + bottomLayout.removeAllViews(); + if (recognitionObjectBean != null) { + HorTextView horTextView = new HorTextView(this); + horTextView.setLeftTitle(recognitionObjectBean.getName() + ":"); + horTextView.setRightContent(String.format("%.2f", (100 * recognitionObjectBean.getScore())) + "%"); + horTextView.setBottomLineVisible(View.VISIBLE); + bottomLayout.addView(horTextView); + + HorTextView horTimeView = new HorTextView(this); + horTimeView.setLeftTitle("Inference Time:"); + horTimeView.setRightContent(time); + horTimeView.setBottomLineVisible(View.INVISIBLE); + bottomLayout.addView(horTimeView); + } else { + TextView textView = new TextView(this); + textView.setLayoutParams(new LinearLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.WRAP_CONTENT)); + textView.setText("Keep moving."); + textView.setGravity(Gravity.CENTER); + textView.setTextColor(Color.BLACK); + textView.setTextSize(30); + bottomLayout.addView(textView); + } + } + + @Override + protected void onSaveInstanceState(Bundle outState) { + super.onSaveInstanceState(outState); + if (outState != null && this.clearFragmentsTag()) { + outState.remove(BUNDLE_FRAGMENTS_KEY); + } + } + + protected boolean clearFragmentsTag() { + return true; + } +} diff --git a/model_zoo/official/lite/scene_detection/app/src/main/java/com/mindspore/scene/widget/CameraFragment.java b/model_zoo/official/lite/scene_detection/app/src/main/java/com/mindspore/scene/widget/CameraFragment.java new file mode 100644 index 0000000000..f2263a6dc0 --- /dev/null +++ b/model_zoo/official/lite/scene_detection/app/src/main/java/com/mindspore/scene/widget/CameraFragment.java @@ -0,0 +1,823 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + *

+ * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.mindspore.scene.widget; + +import android.annotation.SuppressLint; +import android.app.Activity; +import android.content.Context; +import android.content.res.Configuration; +import android.graphics.Bitmap; +import android.graphics.ImageFormat; +import android.graphics.Matrix; +import android.graphics.Point; +import android.graphics.RectF; +import android.graphics.SurfaceTexture; +import android.hardware.camera2.CameraAccessException; +import android.hardware.camera2.CameraCaptureSession; +import android.hardware.camera2.CameraCharacteristics; +import android.hardware.camera2.CameraDevice; +import android.hardware.camera2.CameraManager; +import android.hardware.camera2.CameraMetadata; +import android.hardware.camera2.CaptureRequest; +import android.hardware.camera2.CaptureResult; +import android.hardware.camera2.TotalCaptureResult; +import android.hardware.camera2.params.StreamConfigurationMap; +import android.media.Image; +import android.media.ImageReader; +import android.os.Bundle; +import android.os.Handler; +import android.os.HandlerThread; +import android.util.Log; +import android.util.Size; +import android.util.SparseIntArray; +import android.view.LayoutInflater; +import android.view.Surface; +import android.view.TextureView; +import android.view.View; +import android.view.ViewGroup; +import android.widget.Toast; + +import androidx.annotation.NonNull; +import androidx.annotation.Nullable; +import androidx.fragment.app.Fragment; + +import com.mindspore.scene.R; +import com.mindspore.scene.gallery.classify.CompareSizesByArea; +import com.mindspore.scene.gallery.classify.TrackingMobile; + +import java.io.File; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.concurrent.Semaphore; +import java.util.concurrent.TimeUnit; + +public class CameraFragment extends Fragment { + + private static final String TAG = "CameraFragment"; + + private static final SparseIntArray ORIENTATIONS = new SparseIntArray(); + + static { + ORIENTATIONS.append(Surface.ROTATION_0, 90); + ORIENTATIONS.append(Surface.ROTATION_90, 0); + ORIENTATIONS.append(Surface.ROTATION_180, 270); + ORIENTATIONS.append(Surface.ROTATION_270, 180); + } + + private static final int MAX_PREVIEW_WIDTH = 1920; + + private static final int MAX_PREVIEW_HEIGHT = 1280; + + private Semaphore mCameraOpenCloseLock = new Semaphore(1); + + private int mState = STATE_PREVIEW; + + private static final int STATE_PREVIEW = 0; + + private static final int STATE_WAITING_LOCK = 1; + + private static final int STATE_WAITING_PRECAPTURE = 2; + + private static final int STATE_WAITING_NON_PRECAPTURE = 3; + + private static final int STATE_PICTURE_TAKEN = 4; + + /** + * Data interface returned after identification. + */ + private RecognitionDataCallBack recognitionDataCallBack; + + + private AutoFitTextureView mTextureView; + + private TrackingMobile mTrackingMobile; + + private boolean mFlashSupported; + + private boolean isPreBackgroundThreadPause; + + + /** + * HandlerThread and Handler of camera and algorithm. + */ + private HandlerThread mCameraHandlerThread, mMindsporeHandlerThread; + + private Handler mCameraHandler, mMindsporeHandler; + + private CameraManager mCameraManager; + + private CameraCaptureSession mCaptureSession; + + private CameraDevice mCameraDevice; + + private String mCameraId; + + private ImageReader mImageReader; + + private CaptureRequest.Builder mPreviewRequestBuilder; + + private CaptureRequest mPreviewRequest; + + private File mFile; + + private Size mPreviewSize; + + private int mSensorOrientation; + + private CameraDevice.StateCallback mCameraDeviceStateCallback; + + + private CameraFragment(RecognitionDataCallBack recognitionDataCallBack) { + this.recognitionDataCallBack = recognitionDataCallBack; + } + + /** + * Singleton. + * + * @param recognitionDataCallBack Identify data return interface. + * @return Construction method. + */ + public static CameraFragment newInstance(RecognitionDataCallBack recognitionDataCallBack) { + return new CameraFragment(recognitionDataCallBack); + } + + /** + * Data interface returned after identification. + */ + public interface RecognitionDataCallBack { + /** + * Data interface returned after identification. + * + * @param result Recognition result + * @param time Response time + */ + void onRecognitionDataCallBack(String result, String time); + } + + @Override + public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { + return inflater.inflate(R.layout.fragment_camera, container, false); + } + + @Override + public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceState) { + mTextureView = (AutoFitTextureView) view.findViewById(R.id.texture); + } + + @Override + public void onActivityCreated(@Nullable Bundle savedInstanceState) { + super.onActivityCreated(savedInstanceState); + mTrackingMobile = new TrackingMobile(getActivity()); + + String modelPath = "model/mobilenetv2.ms"; + boolean ret = mTrackingMobile.loadModelFromBuf(modelPath); + Log.d(TAG, "Loading model return value: " + ret); + } + + + @Override + public void onResume() { + super.onResume(); + initChildThread(); + initCameraManager(); + initSelectCamera(); + initHandlerMatchingSize(); + initImageReader(); + initTextureViewListener(); + } + + @Override + public void onDetach() { + super.onDetach(); + if (mTrackingMobile != null) { + boolean ret = mTrackingMobile.unloadModel(); + Log.d(TAG, "Unload model return value: " + ret); + } + } + + @Override + public void onPause() { + closeCamera(); + stopBackgroundThread(); + super.onPause(); + } + + private void initChildThread() { + mCameraHandlerThread = new HandlerThread("CAMERA2"); + mCameraHandlerThread.start(); + mCameraHandler = new Handler(mCameraHandlerThread.getLooper()); + + mMindsporeHandlerThread = new HandlerThread("MINDSPORE"); + mMindsporeHandlerThread.start(); + mMindsporeHandler = new Handler(mMindsporeHandlerThread.getLooper()); + mMindsporeHandler.postDelayed(classifyRunnable, 500); + } + + + /** + * Detect time-consuming threads + */ + private Runnable classifyRunnable = new Runnable() { + public void run() { + synchronized (CameraFragment.this) { + Bitmap bitmap = mTextureView.getBitmap(); + if (bitmap != null) { + long startTime = System.currentTimeMillis(); + // The current bitmap performs the sending request identification operation + String ret = mTrackingMobile.MindSpore_runnet(bitmap); + long endTime = System.currentTimeMillis(); + if (recognitionDataCallBack != null) { + // Interface returns data。 + recognitionDataCallBack.onRecognitionDataCallBack(ret, (endTime - startTime) + "ms "); + } + if (!bitmap.isRecycled()) { + bitmap.recycle(); + } + } + if (mMindsporeHandler != null && !isPreBackgroundThreadPause) { + mMindsporeHandler.postDelayed(classifyRunnable, 500); + } + } + } + }; + + private void initCameraManager() { + mCameraManager = (CameraManager) getActivity().getSystemService(Context.CAMERA_SERVICE); + } + + private void initSelectCamera() { + try { + String[] cameraIdArray = mCameraManager.getCameraIdList(); + for (String itemId : cameraIdArray) { + CameraCharacteristics itemCharacteristics = mCameraManager.getCameraCharacteristics(itemId); + mSensorOrientation = itemCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION); + Integer facing = itemCharacteristics.get(CameraCharacteristics.LENS_FACING); + if (facing == CameraCharacteristics.LENS_FACING_BACK) { + mCameraId = itemId; + break; + } + + } + } catch (CameraAccessException e) { + e.printStackTrace(); + } + if (mCameraId == null) { + Toast.makeText(getActivity(), getString(R.string.camera_error), Toast.LENGTH_SHORT).show(); + } + } + + private StreamConfigurationMap streamConfigurationMap; + private Size largest; + + /** + * Calculate the camera resolution suitable for the current screen resolution. + */ + private void initHandlerMatchingSize() { + try { + CameraCharacteristics cameraCharacteristics = mCameraManager.getCameraCharacteristics(mCameraId); + streamConfigurationMap = cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); + + Size[] sizes = streamConfigurationMap.getOutputSizes(ImageFormat.JPEG); + largest = Collections.max(Arrays.asList(sizes), new CompareSizesByArea()); + + Boolean available = cameraCharacteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE); + mFlashSupported = available == null ? false : available; + } catch (CameraAccessException e) { + e.printStackTrace(); + } + } + + /** + * Initialize the picture. + */ + private void initImageReader() { + final int W = 640; + final int H = 480; + + mImageReader = ImageReader.newInstance(W, H, ImageFormat.JPEG, 30); + mImageReader.setOnImageAvailableListener(new ImageReader.OnImageAvailableListener() { + @Override + public void onImageAvailable(ImageReader reader) { + mFile = new File(getActivity().getExternalFilesDir(null), System.currentTimeMillis() + ".jpg"); + // Get the data frame and start the algorithm processing. + try { + // Get the next image from the ImageReader queue. + Image image = reader.acquireNextImage(); + image.close(); + } catch (Exception e) { + Log.e(TAG, "onImageAvailable: " + e.toString()); + } + } + }, mCameraHandler); + } + + /** + * TextureView.SurfaceTextureListener + */ + private void initTextureViewListener() { + if (mTextureView.isAvailable()) { + openCamera(mTextureView.getWidth(), mTextureView.getHeight()); + Log.d(TAG, "isAvailable: " + mTextureView.getWidth() + "--" + mTextureView.getHeight()); + } else { + mTextureView.setSurfaceTextureListener(new TextureView.SurfaceTextureListener() { + @Override + public void onSurfaceTextureAvailable(SurfaceTexture texture, int width, int height) { + openCamera(width, height); + } + + @Override + public void onSurfaceTextureSizeChanged(SurfaceTexture texture, int width, int height) { + configureTransform(width, height); + } + + @Override + public boolean onSurfaceTextureDestroyed(@NonNull SurfaceTexture surfaceTexture) { + return true; + } + + @Override + public void onSurfaceTextureUpdated(@NonNull SurfaceTexture surfaceTexture) { + + } + }); + } + } + + + @SuppressLint("MissingPermission") + private void openCamera(int width, int height) { + setPreviewSize(width, height, streamConfigurationMap, largest); + configureTransform(width, height); + + mCameraDeviceStateCallback = new CameraDevice.StateCallback() { + @Override + public void onOpened(@NonNull CameraDevice cameraDevice) { + if (cameraDevice != null) { + mCameraOpenCloseLock.release(); + mCameraDevice = cameraDevice; + createCameraPreviewSession(); + } + } + + @Override + public void onDisconnected(@NonNull CameraDevice cameraDevice) { + if (cameraDevice != null) { + mCameraOpenCloseLock.release(); + cameraDevice.close(); + mCameraDevice = null; + } + } + + @Override + public void onError(@NonNull CameraDevice cameraDevice, int i) { + if (cameraDevice != null) { + mCameraOpenCloseLock.release(); + cameraDevice.close(); + mCameraDevice = null; + if (null != getActivity()) { + getActivity().finish(); + } + } + } + }; + + + try { + if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) { + throw new RuntimeException("Time out waiting to lock ic_launcher opening."); + } + mCameraManager.openCamera(mCameraId, mCameraDeviceStateCallback, mCameraHandler); + } catch (CameraAccessException e) { + e.printStackTrace(); + } catch (InterruptedException e) { + throw new RuntimeException("Interrupted while trying to lock ic_launcher opening.", e); + } + + + } + + + /** + * Open camera preview. + */ + private void createCameraPreviewSession() { + try { + SurfaceTexture texture = mTextureView.getSurfaceTexture(); + // Set preview size. + texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight()); + // This is the output Surface we need to start preview. + Surface surface = new Surface(texture); + + mPreviewRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); + mPreviewRequestBuilder.addTarget(surface); + // Here, we create a CameraCaptureSession for ic_launcher preview. + mCameraDevice.createCaptureSession(Arrays.asList(surface, mImageReader.getSurface()), + new CameraCaptureSession.StateCallback() { + + @Override + public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) { + // The ic_launcher is already closed + if (null == mCameraDevice) { + return; + } + + // When the session is ready, we start displaying the preview. + mCaptureSession = cameraCaptureSession; + try { + // Auto focus should be continuous for ic_launcher preview. + mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, + CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE); + // Flash is automatically enabled when necessary. + setAutoFlash(mPreviewRequestBuilder); + // Finally, we start displaying the ic_launcher preview. + mPreviewRequest = mPreviewRequestBuilder.build(); + mCaptureSession.setRepeatingRequest(mPreviewRequest, mCaptureCallback, mCameraHandler); + } catch (CameraAccessException e) { + e.printStackTrace(); + } + } + + @Override + public void onConfigureFailed(@NonNull CameraCaptureSession cameraCaptureSession) { + Toast.makeText(getActivity(), "Failed", Toast.LENGTH_LONG).show(); + } + }, null); + } catch (CameraAccessException e) { + e.printStackTrace(); + } + } + + + private CameraCaptureSession.CaptureCallback mCaptureCallback = new CameraCaptureSession.CaptureCallback() { + + private void process(CaptureResult result) { + switch (mState) { + case STATE_PREVIEW: { + // We have nothing to do when the ic_launcher preview is working normally. + break; + } + case STATE_WAITING_LOCK: { + Integer afState = result.get(CaptureResult.CONTROL_AF_STATE); + if (afState == null) { + captureStillPicture(); + } else if (CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED == afState + || CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED == afState) { + // CONTROL_AE_STATE can be null on some devices + Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE); + if (aeState == null || aeState == CaptureResult.CONTROL_AE_STATE_CONVERGED) { + mState = STATE_PICTURE_TAKEN; + captureStillPicture(); + } else { + runPrecaptureSequence(); + } + } + break; + } + case STATE_WAITING_PRECAPTURE: { + // CONTROL_AE_STATE can be null on some devices + Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE); + if (aeState == null || aeState == CaptureResult.CONTROL_AE_STATE_PRECAPTURE + || aeState == CaptureRequest.CONTROL_AE_STATE_FLASH_REQUIRED) { + mState = STATE_WAITING_NON_PRECAPTURE; + } + break; + } + case STATE_WAITING_NON_PRECAPTURE: { + // CONTROL_AE_STATE can be null on some devices + Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE); + if (aeState == null || aeState != CaptureResult.CONTROL_AE_STATE_PRECAPTURE) { + mState = STATE_PICTURE_TAKEN; + captureStillPicture(); + } + break; + } + } + } + + @Override + public void onCaptureProgressed(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, + @NonNull CaptureResult partialResult) { + process(partialResult); + } + + @Override + public void onCaptureCompleted(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, + @NonNull TotalCaptureResult result) { + process(result); + } + }; + + + /** + * Run the precapture sequence for capturing a still image. This method should be called when + * we get a response in. + */ + private void runPrecaptureSequence() { + try { + // This is how to tell the ic_launcher to trigger. + mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, + CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START); + // Tell #mCaptureCallback to wait for the precapture sequence to be set. + mState = STATE_WAITING_PRECAPTURE; + mCaptureSession.capture(mPreviewRequestBuilder.build(), mCaptureCallback, mCameraHandler); + } catch (CameraAccessException e) { + e.printStackTrace(); + } + } + + /** + * Capture a still picture. This method should be called when we get a response in + * {@link #mCaptureCallback} from both + */ + private void captureStillPicture() { + try { + final Activity activity = getActivity(); + if (null == activity || null == mCameraDevice) { + return; + } + // This is the CaptureRequest.Builder that we use to take a picture. + final CaptureRequest.Builder captureBuilder = + mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE); + captureBuilder.addTarget(mImageReader.getSurface()); + + // Use the same AE and AF modes as the preview. + captureBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE); + setAutoFlash(captureBuilder); + + // Orientation + int rotation = activity.getWindowManager().getDefaultDisplay().getRotation(); + captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, getOrientation(rotation)); + + CameraCaptureSession.CaptureCallback CaptureCallback = new CameraCaptureSession.CaptureCallback() { + + @Override + public void onCaptureCompleted(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, + @NonNull TotalCaptureResult result) { + showToast("Saved: " + mFile); + Log.d(TAG, mFile.toString()); + unlockFocus(); + } + }; + + mCaptureSession.stopRepeating(); + mCaptureSession.abortCaptures(); + mCaptureSession.capture(captureBuilder.build(), CaptureCallback, null); + } catch (CameraAccessException e) { + e.printStackTrace(); + } + } + + /** + * Retrieves the JPEG orientation from the specified screen rotation. + * + * @param rotation The screen rotation. + * @return The JPEG orientation (one of 0, 90, 270, and 360) + */ + private int getOrientation(int rotation) { + return (ORIENTATIONS.get(rotation) + mSensorOrientation + 270) % 360; + } + + /** + * Unlock the focus. This method should be called when still image capture sequence is + * finished. + */ + private void unlockFocus() { + try { + // Reset the auto-focus trigger + mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_CANCEL); + setAutoFlash(mPreviewRequestBuilder); + mCaptureSession.capture(mPreviewRequestBuilder.build(), mCaptureCallback, mCameraHandler); + // After this, the ic_launcher will go back to the normal state of preview. + mState = STATE_PREVIEW; + mCaptureSession.setRepeatingRequest(mPreviewRequest, mCaptureCallback, mCameraHandler); + } catch (CameraAccessException e) { + e.printStackTrace(); + } + } + + private void setAutoFlash(CaptureRequest.Builder requestBuilder) { + if (mFlashSupported) { + requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH); + } + } + + protected void showToast(final String text) { + final Activity activity = getActivity(); + if (activity != null) { + activity.runOnUiThread(new Runnable() { + @Override + public void run() { + Toast.makeText(activity, text, Toast.LENGTH_SHORT).show(); + } + }); + } + } + + /** + * Configures the necessary {@link Matrix} transformation to `mTextureView`. + * This method should be called after the ic_launcher preview size is determined in + * setUpCameraOutputs and also the size of `mTextureView` is fixed. + * + * @param viewWidth The width of `mTextureView` + * @param viewHeight The height of `mTextureView` + */ + protected void configureTransform(int viewWidth, int viewHeight) { + Activity activity = getActivity(); + if (null == mTextureView || null == mPreviewSize || null == activity) { + return; + } + int rotation = activity.getWindowManager().getDefaultDisplay().getRotation(); + Matrix matrix = new Matrix(); + RectF viewRect = new RectF(0, 0, viewWidth, viewHeight); + RectF bufferRect = new RectF(0, 0, mPreviewSize.getHeight(), mPreviewSize.getWidth()); + float centerX = viewRect.centerX(); + float centerY = viewRect.centerY(); + if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) { + bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY()); + matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL); + float scale = + Math.max((float) viewHeight / mPreviewSize.getHeight(), (float) viewWidth / mPreviewSize.getWidth()); + matrix.postScale(scale, scale, centerX, centerY); + matrix.postRotate(90 * (rotation - 2), centerX, centerY); + } else if (Surface.ROTATION_180 == rotation) { + matrix.postRotate(180, centerX, centerY); + } + mTextureView.setTransform(matrix); + } + + + /** + * Set preview image size and positioning. + * + * @param width + * @param height + * @param map StreamConfigurationMap, the manager of all output formats and sizes supported by the camera. + * @param largest The max size + */ + private void setPreviewSize(int width, int height, StreamConfigurationMap map, Size largest) { + // Find out if we need to swap dimension to get the preview size relative to sensor coordinate. + int displayRotation = getActivity().getWindowManager().getDefaultDisplay().getRotation(); + Log.d(TAG, "displayRotation: " + displayRotation); + + boolean swappedDimensions = false; + switch (displayRotation) { + case Surface.ROTATION_0: + case Surface.ROTATION_180: + if (mSensorOrientation == 90 || mSensorOrientation == 270) { + swappedDimensions = true; + } + break; + case Surface.ROTATION_90: + case Surface.ROTATION_270: + if (mSensorOrientation == 0 || mSensorOrientation == 180) { + swappedDimensions = true; + } + break; + default: + Log.e(TAG, "Display rotation is invalid: " + displayRotation); + } + + Point displaySize = new Point(); + getActivity().getWindowManager().getDefaultDisplay().getSize(displaySize); + int rotatedPreviewWidth = width; + int rotatedPreviewHeight = height; + int maxPreviewWidth = displaySize.x; + int maxPreviewHeight = displaySize.y; + + if (swappedDimensions) { + rotatedPreviewWidth = height; + rotatedPreviewHeight = width; + maxPreviewWidth = displaySize.y; + maxPreviewHeight = displaySize.x; + } + + if (maxPreviewWidth > MAX_PREVIEW_WIDTH) { + maxPreviewWidth = MAX_PREVIEW_WIDTH; + } + + if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) { + maxPreviewHeight = MAX_PREVIEW_HEIGHT; + } + + // Danger, W.R.! Attempting to use too large a preview size could exceed the ic_launcher + // bus' bandwidth limitation, resulting in gorgeous previews but the storage of + // garbage capture data. + mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), rotatedPreviewWidth, + rotatedPreviewHeight, maxPreviewWidth, maxPreviewHeight, largest); + + // We fit the aspect ratio of TextureView to the size of preview we picked. + int orientation = getResources().getConfiguration().orientation;// manifest中方向已经写死 + if (orientation == Configuration.ORIENTATION_LANDSCAPE) { + mTextureView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight()); + } else { + mTextureView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth()); + } + } + + /** + * Given {@code choices} of {@code Size}s supported by a ic_launcher, choose the smallest one that + * is at least as large as the respective texture view size, and that is at most as large as the + * respective max size, and whose aspect ratio matches with the specified value. If such size + * doesn't exist, choose the largest one that is at most as large as the respective max size, + * and whose aspect ratio matches with the specified value. + * + * @param choices The list of sizes that the ic_launcher supports for the intended output + * class + * @param textureViewWidth The width of the texture view relative to sensor coordinate + * @param textureViewHeight The height of the texture view relative to sensor coordinate + * @param maxWidth The maximum width that can be chosen + * @param maxHeight The maximum height that can be chosen + * @param aspectRatio The aspect ratio + * @return The optimal {@code Size}, or an arbitrary one if none were big enough + */ + protected Size chooseOptimalSize(Size[] choices, int textureViewWidth, int textureViewHeight, int maxWidth, + int maxHeight, Size aspectRatio) { + + // Collect the supported resolutions that are at least as big as the preview Surface + List bigEnough = new ArrayList<>(); + // Collect the supported resolutions that are smaller than the preview Surface + List notBigEnough = new ArrayList<>(); + int w = aspectRatio.getWidth(); + int h = aspectRatio.getHeight(); + for (Size option : choices) { + if (option.getWidth() <= maxWidth && option.getHeight() <= maxHeight + && option.getHeight() == option.getWidth() * h / w) { + // if (option.getWidth() <= maxWidth && option.getHeight() <= maxHeight) { + if (option.getWidth() >= textureViewWidth && option.getHeight() >= textureViewHeight) { + bigEnough.add(option); + } else { + notBigEnough.add(option); + } + } + } + + if (bigEnough.size() > 0) { + return Collections.min(bigEnough, new CompareSizesByArea()); + } else if (notBigEnough.size() > 0) { + return Collections.max(notBigEnough, new CompareSizesByArea()); + } else { + Log.e(TAG, "Couldn't find any suitable preview size"); + return choices[0]; + } + } + + + /** + * Closes the current {@link CameraDevice}. + */ + private void closeCamera() { + try { + mCameraOpenCloseLock.acquire(); + if (null != mCaptureSession) { + mCaptureSession.close(); + mCaptureSession = null; + } + if (null != mCameraDevice) { + mCameraDevice.close(); + mCameraDevice = null; + } + if (null != mImageReader) { + mImageReader.close(); + mImageReader = null; + } + } catch (InterruptedException e) { + throw new RuntimeException("Interrupted while trying to lock ic_launcher closing.", e); + } finally { + mCameraOpenCloseLock.release(); + } + } + + private void stopBackgroundThread() { + isPreBackgroundThreadPause = true; + mCameraHandlerThread.quitSafely(); + mMindsporeHandlerThread.quitSafely(); + try { + mCameraHandlerThread.join(); + mCameraHandlerThread = null; + mCameraHandler.removeCallbacksAndMessages(null); + mCameraHandler = null; + + mMindsporeHandlerThread.join(); + mMindsporeHandlerThread = null; + mMindsporeHandler.removeCallbacksAndMessages(null); + mMindsporeHandler = null; + } catch (InterruptedException e) { + e.printStackTrace(); + } + } +} diff --git a/model_zoo/official/lite/scene_detection/app/src/main/java/com/mindspore/scene/widget/HorTextView.java b/model_zoo/official/lite/scene_detection/app/src/main/java/com/mindspore/scene/widget/HorTextView.java new file mode 100644 index 0000000000..fe504dbf25 --- /dev/null +++ b/model_zoo/official/lite/scene_detection/app/src/main/java/com/mindspore/scene/widget/HorTextView.java @@ -0,0 +1,62 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + *

+ * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.mindspore.scene.widget; + +import android.content.Context; +import android.util.AttributeSet; +import android.view.LayoutInflater; +import android.view.View; +import android.widget.LinearLayout; +import android.widget.TextView; + +import androidx.annotation.Nullable; + +import com.mindspore.scene.R; + +public class HorTextView extends LinearLayout { + private TextView tvLeftTitle, tvRightContent; + private View viewBottomLine; + + public HorTextView(Context context) { + this(context, null); + } + + public HorTextView(Context context, @Nullable AttributeSet attrs) { + this(context, attrs, 0); + } + + public HorTextView(Context context, @Nullable AttributeSet attrs, int defStyleAttr) { + super(context, attrs, defStyleAttr); + LayoutInflater.from(context).inflate(R.layout.layout_hor_text_view, this); + tvLeftTitle = findViewById(R.id.tv_left_title); + tvRightContent = findViewById(R.id.tv_right_content); + viewBottomLine = findViewById(R.id.view_bottom_line); + } + + + public void setLeftTitle(String title) { + tvLeftTitle.setText(title); + } + + public void setRightContent(String content) { + tvRightContent.setText(content); + } + + public void setBottomLineVisible(int isVisible) { + viewBottomLine.setVisibility(isVisible); + } + +} diff --git a/model_zoo/official/lite/scene_detection/app/src/main/res/drawable-v24/ic_launcher_foreground.xml b/model_zoo/official/lite/scene_detection/app/src/main/res/drawable-v24/ic_launcher_foreground.xml new file mode 100644 index 0000000000..2b068d1146 --- /dev/null +++ b/model_zoo/official/lite/scene_detection/app/src/main/res/drawable-v24/ic_launcher_foreground.xml @@ -0,0 +1,30 @@ + + + + + + + + + + + \ No newline at end of file diff --git a/model_zoo/official/lite/scene_detection/app/src/main/res/drawable/ic_launcher_background.xml b/model_zoo/official/lite/scene_detection/app/src/main/res/drawable/ic_launcher_background.xml new file mode 100644 index 0000000000..d5fccc538c --- /dev/null +++ b/model_zoo/official/lite/scene_detection/app/src/main/res/drawable/ic_launcher_background.xml @@ -0,0 +1,170 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/model_zoo/official/lite/scene_detection/app/src/main/res/drawable/rectangle.xml b/model_zoo/official/lite/scene_detection/app/src/main/res/drawable/rectangle.xml new file mode 100644 index 0000000000..b8f5d3559c --- /dev/null +++ b/model_zoo/official/lite/scene_detection/app/src/main/res/drawable/rectangle.xml @@ -0,0 +1,13 @@ + + + + + + \ No newline at end of file diff --git a/model_zoo/official/lite/scene_detection/app/src/main/res/drawable/switch_thumb.xml b/model_zoo/official/lite/scene_detection/app/src/main/res/drawable/switch_thumb.xml new file mode 100644 index 0000000000..ef6cfd2339 --- /dev/null +++ b/model_zoo/official/lite/scene_detection/app/src/main/res/drawable/switch_thumb.xml @@ -0,0 +1,8 @@ + + + + + + \ No newline at end of file diff --git a/model_zoo/official/lite/scene_detection/app/src/main/res/drawable/switch_track.xml b/model_zoo/official/lite/scene_detection/app/src/main/res/drawable/switch_track.xml new file mode 100644 index 0000000000..304fbfe8b9 --- /dev/null +++ b/model_zoo/official/lite/scene_detection/app/src/main/res/drawable/switch_track.xml @@ -0,0 +1,8 @@ + + + + + + \ No newline at end of file diff --git a/model_zoo/official/lite/scene_detection/app/src/main/res/layout/activity_camera.xml b/model_zoo/official/lite/scene_detection/app/src/main/res/layout/activity_camera.xml new file mode 100644 index 0000000000..c167db7938 --- /dev/null +++ b/model_zoo/official/lite/scene_detection/app/src/main/res/layout/activity_camera.xml @@ -0,0 +1,43 @@ + + + + + + + + + + + + + + diff --git a/model_zoo/official/lite/scene_detection/app/src/main/res/layout/activity_test.xml b/model_zoo/official/lite/scene_detection/app/src/main/res/layout/activity_test.xml new file mode 100644 index 0000000000..8507a6c7ae --- /dev/null +++ b/model_zoo/official/lite/scene_detection/app/src/main/res/layout/activity_test.xml @@ -0,0 +1,12 @@ + + + + + \ No newline at end of file diff --git a/model_zoo/official/lite/scene_detection/app/src/main/res/layout/fragment_camera.xml b/model_zoo/official/lite/scene_detection/app/src/main/res/layout/fragment_camera.xml new file mode 100644 index 0000000000..8dcd998284 --- /dev/null +++ b/model_zoo/official/lite/scene_detection/app/src/main/res/layout/fragment_camera.xml @@ -0,0 +1,13 @@ + + + + + + diff --git a/model_zoo/official/lite/scene_detection/app/src/main/res/layout/layout_hor_text_view.xml b/model_zoo/official/lite/scene_detection/app/src/main/res/layout/layout_hor_text_view.xml new file mode 100644 index 0000000000..c0aafc9d13 --- /dev/null +++ b/model_zoo/official/lite/scene_detection/app/src/main/res/layout/layout_hor_text_view.xml @@ -0,0 +1,40 @@ + + + + + + + + + \ No newline at end of file diff --git a/model_zoo/official/lite/scene_detection/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml b/model_zoo/official/lite/scene_detection/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml new file mode 100644 index 0000000000..c9ad5f98f1 --- /dev/null +++ b/model_zoo/official/lite/scene_detection/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml @@ -0,0 +1,5 @@ + + + + + \ No newline at end of file diff --git a/model_zoo/official/lite/scene_detection/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml b/model_zoo/official/lite/scene_detection/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml new file mode 100644 index 0000000000..c9ad5f98f1 --- /dev/null +++ b/model_zoo/official/lite/scene_detection/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml @@ -0,0 +1,5 @@ + + + + + \ No newline at end of file diff --git a/model_zoo/official/lite/scene_detection/app/src/main/res/mipmap-hdpi/ic_launcher.png b/model_zoo/official/lite/scene_detection/app/src/main/res/mipmap-hdpi/ic_launcher.png new file mode 100644 index 0000000000..ad551dcf1c Binary files /dev/null and b/model_zoo/official/lite/scene_detection/app/src/main/res/mipmap-hdpi/ic_launcher.png differ diff --git a/model_zoo/official/lite/scene_detection/app/src/main/res/mipmap-hdpi/ic_launcher_foreground.png b/model_zoo/official/lite/scene_detection/app/src/main/res/mipmap-hdpi/ic_launcher_foreground.png new file mode 100644 index 0000000000..cea70b6899 Binary files /dev/null and b/model_zoo/official/lite/scene_detection/app/src/main/res/mipmap-hdpi/ic_launcher_foreground.png differ diff --git a/model_zoo/official/lite/scene_detection/app/src/main/res/mipmap-hdpi/ic_launcher_round.png b/model_zoo/official/lite/scene_detection/app/src/main/res/mipmap-hdpi/ic_launcher_round.png new file mode 100644 index 0000000000..12602e9e7b Binary files /dev/null and b/model_zoo/official/lite/scene_detection/app/src/main/res/mipmap-hdpi/ic_launcher_round.png differ diff --git a/model_zoo/official/lite/scene_detection/app/src/main/res/mipmap-mdpi/ic_launcher.png b/model_zoo/official/lite/scene_detection/app/src/main/res/mipmap-mdpi/ic_launcher.png new file mode 100644 index 0000000000..534bc3b190 Binary files /dev/null and b/model_zoo/official/lite/scene_detection/app/src/main/res/mipmap-mdpi/ic_launcher.png differ diff --git a/model_zoo/official/lite/scene_detection/app/src/main/res/mipmap-mdpi/ic_launcher_foreground.png b/model_zoo/official/lite/scene_detection/app/src/main/res/mipmap-mdpi/ic_launcher_foreground.png new file mode 100644 index 0000000000..0cc30d70ba Binary files /dev/null and b/model_zoo/official/lite/scene_detection/app/src/main/res/mipmap-mdpi/ic_launcher_foreground.png differ diff --git a/model_zoo/official/lite/scene_detection/app/src/main/res/mipmap-mdpi/ic_launcher_round.png b/model_zoo/official/lite/scene_detection/app/src/main/res/mipmap-mdpi/ic_launcher_round.png new file mode 100644 index 0000000000..13d25d7dcb Binary files /dev/null and b/model_zoo/official/lite/scene_detection/app/src/main/res/mipmap-mdpi/ic_launcher_round.png differ diff --git a/model_zoo/official/lite/scene_detection/app/src/main/res/mipmap-xhdpi/ic_launcher.png b/model_zoo/official/lite/scene_detection/app/src/main/res/mipmap-xhdpi/ic_launcher.png new file mode 100644 index 0000000000..ce4d40a739 Binary files /dev/null and b/model_zoo/official/lite/scene_detection/app/src/main/res/mipmap-xhdpi/ic_launcher.png differ diff --git a/model_zoo/official/lite/scene_detection/app/src/main/res/mipmap-xhdpi/ic_launcher_foreground.png b/model_zoo/official/lite/scene_detection/app/src/main/res/mipmap-xhdpi/ic_launcher_foreground.png new file mode 100644 index 0000000000..4f8f320bf7 Binary files /dev/null and b/model_zoo/official/lite/scene_detection/app/src/main/res/mipmap-xhdpi/ic_launcher_foreground.png differ diff --git a/model_zoo/official/lite/scene_detection/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png b/model_zoo/official/lite/scene_detection/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png new file mode 100644 index 0000000000..674ae88e4d Binary files /dev/null and b/model_zoo/official/lite/scene_detection/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png differ diff --git a/model_zoo/official/lite/scene_detection/app/src/main/res/mipmap-xxhdpi/ic_launcher.png b/model_zoo/official/lite/scene_detection/app/src/main/res/mipmap-xxhdpi/ic_launcher.png new file mode 100644 index 0000000000..7c87876ea0 Binary files /dev/null and b/model_zoo/official/lite/scene_detection/app/src/main/res/mipmap-xxhdpi/ic_launcher.png differ diff --git a/model_zoo/official/lite/scene_detection/app/src/main/res/mipmap-xxhdpi/ic_launcher_foreground.png b/model_zoo/official/lite/scene_detection/app/src/main/res/mipmap-xxhdpi/ic_launcher_foreground.png new file mode 100644 index 0000000000..d4b8095ea9 Binary files /dev/null and b/model_zoo/official/lite/scene_detection/app/src/main/res/mipmap-xxhdpi/ic_launcher_foreground.png differ diff --git a/model_zoo/official/lite/scene_detection/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png b/model_zoo/official/lite/scene_detection/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png new file mode 100644 index 0000000000..2528822d4d Binary files /dev/null and b/model_zoo/official/lite/scene_detection/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png differ diff --git a/model_zoo/official/lite/scene_detection/app/src/main/res/mipmap-xxhdpi/logo.png b/model_zoo/official/lite/scene_detection/app/src/main/res/mipmap-xxhdpi/logo.png new file mode 100644 index 0000000000..c90f1dda43 Binary files /dev/null and b/model_zoo/official/lite/scene_detection/app/src/main/res/mipmap-xxhdpi/logo.png differ diff --git a/model_zoo/official/lite/scene_detection/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png b/model_zoo/official/lite/scene_detection/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png new file mode 100644 index 0000000000..cae0e3a326 Binary files /dev/null and b/model_zoo/official/lite/scene_detection/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png differ diff --git a/model_zoo/official/lite/scene_detection/app/src/main/res/mipmap-xxxhdpi/ic_launcher_foreground.png b/model_zoo/official/lite/scene_detection/app/src/main/res/mipmap-xxxhdpi/ic_launcher_foreground.png new file mode 100644 index 0000000000..c1229997eb Binary files /dev/null and b/model_zoo/official/lite/scene_detection/app/src/main/res/mipmap-xxxhdpi/ic_launcher_foreground.png differ diff --git a/model_zoo/official/lite/scene_detection/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png b/model_zoo/official/lite/scene_detection/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png new file mode 100644 index 0000000000..24849e26b4 Binary files /dev/null and b/model_zoo/official/lite/scene_detection/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png differ diff --git a/model_zoo/official/lite/scene_detection/app/src/main/res/values/colors.xml b/model_zoo/official/lite/scene_detection/app/src/main/res/values/colors.xml new file mode 100644 index 0000000000..73bd50bbb2 --- /dev/null +++ b/model_zoo/official/lite/scene_detection/app/src/main/res/values/colors.xml @@ -0,0 +1,18 @@ + + + #6200EE + #3700B3 + #03DAC5 + + + #00D4EF + #03A9F4 + #769DDA + + #66000000 + + #ffffff + #000000 + #A69D9D + + \ No newline at end of file diff --git a/model_zoo/official/lite/scene_detection/app/src/main/res/values/dimens.xml b/model_zoo/official/lite/scene_detection/app/src/main/res/values/dimens.xml new file mode 100644 index 0000000000..d039c7f2c7 --- /dev/null +++ b/model_zoo/official/lite/scene_detection/app/src/main/res/values/dimens.xml @@ -0,0 +1,9 @@ + + + 15dp + 8dp + + 15dp + 6dp + 20sp + \ No newline at end of file diff --git a/model_zoo/official/lite/scene_detection/app/src/main/res/values/ic_launcher_background.xml b/model_zoo/official/lite/scene_detection/app/src/main/res/values/ic_launcher_background.xml new file mode 100644 index 0000000000..c5d5899fdf --- /dev/null +++ b/model_zoo/official/lite/scene_detection/app/src/main/res/values/ic_launcher_background.xml @@ -0,0 +1,4 @@ + + + #FFFFFF + \ No newline at end of file diff --git a/model_zoo/official/lite/scene_detection/app/src/main/res/values/strings.xml b/model_zoo/official/lite/scene_detection/app/src/main/res/values/strings.xml new file mode 100644 index 0000000000..b829c5b310 --- /dev/null +++ b/model_zoo/official/lite/scene_detection/app/src/main/res/values/strings.xml @@ -0,0 +1,30 @@ + + SceneDetection + 设置 + + This sample needs camera permission. + This device doesn\'t support Camera2 API. + 使用的google的camera2demo. + EXAMPLE + Add widget + + + TFL Classify + This device doesn\'t support Camera2 API. + GPU does not yet supported quantized models. + Model: + + Quantized_EfficientNet + Float_EfficientNet + Quantized_MobileNet + Float_MobileNet + + + Device: + + CPU + GPU + NNAPI + + + \ No newline at end of file diff --git a/model_zoo/official/lite/scene_detection/app/src/main/res/values/styles.xml b/model_zoo/official/lite/scene_detection/app/src/main/res/values/styles.xml new file mode 100644 index 0000000000..13f74a4612 --- /dev/null +++ b/model_zoo/official/lite/scene_detection/app/src/main/res/values/styles.xml @@ -0,0 +1,10 @@ + + + + + \ No newline at end of file diff --git a/model_zoo/official/lite/scene_detection/build.gradle b/model_zoo/official/lite/scene_detection/build.gradle new file mode 100644 index 0000000000..6754c23d52 --- /dev/null +++ b/model_zoo/official/lite/scene_detection/build.gradle @@ -0,0 +1,24 @@ +// Top-level build file where you can add configuration options common to all sub-projects/modules. +buildscript { + repositories { + google() + jcenter() + } + dependencies { + classpath "com.android.tools.build:gradle:4.0.1" + + // NOTE: Do not place your application dependencies here; they belong + // in the individual module build.gradle files + } +} + +allprojects { + repositories { + google() + jcenter() + } +} + +task clean(type: Delete) { + delete rootProject.buildDir +} \ No newline at end of file diff --git a/model_zoo/official/lite/scene_detection/gradle.properties b/model_zoo/official/lite/scene_detection/gradle.properties new file mode 100644 index 0000000000..f5020a3b37 --- /dev/null +++ b/model_zoo/official/lite/scene_detection/gradle.properties @@ -0,0 +1,20 @@ +# Project-wide Gradle settings. +# IDE (e.g. Android Studio) users: +# Gradle settings configured through the IDE *will override* +# any settings specified in this file. +# For more details on how to configure your build environment visit +# http://www.gradle.org/docs/current/userguide/build_environment.html +# Specifies the JVM arguments used for the daemon process. +# The setting is particularly useful for tweaking memory settings. +org.gradle.jvmargs=-Xmx2048m +# When configured, Gradle will run in incubating parallel mode. +# This option should only be used with decoupled projects. More details, visit +# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects +# org.gradle.parallel=true +# AndroidX package structure to make it clearer which packages are bundled with the +# Android operating system, and which are packaged with your app"s APK +# https://developer.android.com/topic/libraries/support-library/androidx-rn +android.useAndroidX=true +# Automatically convert third-party libraries to use AndroidX +android.enableJetifier=true +android.injected.testOnly=false \ No newline at end of file diff --git a/model_zoo/official/lite/scene_detection/gradle/wrapper/gradle-wrapper.jar b/model_zoo/official/lite/scene_detection/gradle/wrapper/gradle-wrapper.jar new file mode 100644 index 0000000000..f6b961fd5a Binary files /dev/null and b/model_zoo/official/lite/scene_detection/gradle/wrapper/gradle-wrapper.jar differ diff --git a/model_zoo/official/lite/scene_detection/gradle/wrapper/gradle-wrapper.properties b/model_zoo/official/lite/scene_detection/gradle/wrapper/gradle-wrapper.properties new file mode 100644 index 0000000000..1d8635ad73 --- /dev/null +++ b/model_zoo/official/lite/scene_detection/gradle/wrapper/gradle-wrapper.properties @@ -0,0 +1,6 @@ +#Tue Jul 28 10:28:05 CST 2020 +distributionBase=GRADLE_USER_HOME +distributionPath=wrapper/dists +zipStoreBase=GRADLE_USER_HOME +zipStorePath=wrapper/dists +distributionUrl=https\://services.gradle.org/distributions/gradle-6.1.1-all.zip diff --git a/model_zoo/official/lite/scene_detection/gradlew b/model_zoo/official/lite/scene_detection/gradlew new file mode 100644 index 0000000000..cccdd3d517 --- /dev/null +++ b/model_zoo/official/lite/scene_detection/gradlew @@ -0,0 +1,172 @@ +#!/usr/bin/env sh + +############################################################################## +## +## Gradle start up script for UN*X +## +############################################################################## + +# Attempt to set APP_HOME +# Resolve links: $0 may be a link +PRG="$0" +# Need this for relative symlinks. +while [ -h "$PRG" ] ; do + ls=`ls -ld "$PRG"` + link=`expr "$ls" : '.*-> \(.*\)$'` + if expr "$link" : '/.*' > /dev/null; then + PRG="$link" + else + PRG=`dirname "$PRG"`"/$link" + fi +done +SAVED="`pwd`" +cd "`dirname \"$PRG\"`/" >/dev/null +APP_HOME="`pwd -P`" +cd "$SAVED" >/dev/null + +APP_NAME="Gradle" +APP_BASE_NAME=`basename "$0"` + +# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +DEFAULT_JVM_OPTS="" + +# Use the maximum available, or set MAX_FD != -1 to use that value. +MAX_FD="maximum" + +warn () { + echo "$*" +} + +die () { + echo + echo "$*" + echo + exit 1 +} + +# OS specific support (must be 'true' or 'false'). +cygwin=false +msys=false +darwin=false +nonstop=false +case "`uname`" in + CYGWIN* ) + cygwin=true + ;; + Darwin* ) + darwin=true + ;; + MINGW* ) + msys=true + ;; + NONSTOP* ) + nonstop=true + ;; +esac + +CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar + +# Determine the Java command to use to start the JVM. +if [ -n "$JAVA_HOME" ] ; then + if [ -x "$JAVA_HOME/jre/sh/java" ] ; then + # IBM's JDK on AIX uses strange locations for the executables + JAVACMD="$JAVA_HOME/jre/sh/java" + else + JAVACMD="$JAVA_HOME/bin/java" + fi + if [ ! -x "$JAVACMD" ] ; then + die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." + fi +else + JAVACMD="java" + which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." +fi + +# Increase the maximum file descriptors if we can. +if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then + MAX_FD_LIMIT=`ulimit -H -n` + if [ $? -eq 0 ] ; then + if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then + MAX_FD="$MAX_FD_LIMIT" + fi + ulimit -n $MAX_FD + if [ $? -ne 0 ] ; then + warn "Could not set maximum file descriptor limit: $MAX_FD" + fi + else + warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" + fi +fi + +# For Darwin, add options to specify how the application appears in the dock +if $darwin; then + GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" +fi + +# For Cygwin, switch paths to Windows format before running java +if $cygwin ; then + APP_HOME=`cygpath --path --mixed "$APP_HOME"` + CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` + JAVACMD=`cygpath --unix "$JAVACMD"` + + # We build the pattern for arguments to be converted via cygpath + ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` + SEP="" + for dir in $ROOTDIRSRAW ; do + ROOTDIRS="$ROOTDIRS$SEP$dir" + SEP="|" + done + OURCYGPATTERN="(^($ROOTDIRS))" + # Add a user-defined pattern to the cygpath arguments + if [ "$GRADLE_CYGPATTERN" != "" ] ; then + OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" + fi + # Now convert the arguments - kludge to limit ourselves to /bin/sh + i=0 + for arg in "$@" ; do + CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` + CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option + + if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition + eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` + else + eval `echo args$i`="\"$arg\"" + fi + i=$((i+1)) + done + case $i in + (0) set -- ;; + (1) set -- "$args0" ;; + (2) set -- "$args0" "$args1" ;; + (3) set -- "$args0" "$args1" "$args2" ;; + (4) set -- "$args0" "$args1" "$args2" "$args3" ;; + (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; + (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; + (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; + (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; + (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; + esac +fi + +# Escape application args +save () { + for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done + echo " " +} +APP_ARGS=$(save "$@") + +# Collect all arguments for the java command, following the shell quoting and substitution rules +eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS" + +# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong +if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then + cd "$(dirname "$0")" +fi + +exec "$JAVACMD" "$@" diff --git a/model_zoo/official/lite/scene_detection/gradlew.bat b/model_zoo/official/lite/scene_detection/gradlew.bat new file mode 100644 index 0000000000..f9553162f1 --- /dev/null +++ b/model_zoo/official/lite/scene_detection/gradlew.bat @@ -0,0 +1,84 @@ +@if "%DEBUG%" == "" @echo off +@rem ########################################################################## +@rem +@rem Gradle startup script for Windows +@rem +@rem ########################################################################## + +@rem Set local scope for the variables with windows NT shell +if "%OS%"=="Windows_NT" setlocal + +set DIRNAME=%~dp0 +if "%DIRNAME%" == "" set DIRNAME=. +set APP_BASE_NAME=%~n0 +set APP_HOME=%DIRNAME% + +@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +set DEFAULT_JVM_OPTS= + +@rem Find java.exe +if defined JAVA_HOME goto findJavaFromJavaHome + +set JAVA_EXE=java.exe +%JAVA_EXE% -version >NUL 2>&1 +if "%ERRORLEVEL%" == "0" goto init + +echo. +echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:findJavaFromJavaHome +set JAVA_HOME=%JAVA_HOME:"=% +set JAVA_EXE=%JAVA_HOME%/bin/java.exe + +if exist "%JAVA_EXE%" goto init + +echo. +echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:init +@rem Get command-line arguments, handling Windows variants + +if not "%OS%" == "Windows_NT" goto win9xME_args + +:win9xME_args +@rem Slurp the command line arguments. +set CMD_LINE_ARGS= +set _SKIP=2 + +:win9xME_args_slurp +if "x%~1" == "x" goto execute + +set CMD_LINE_ARGS=%* + +:execute +@rem Setup the command line + +set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar + +@rem Execute Gradle +"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS% + +:end +@rem End local scope for the variables with windows NT shell +if "%ERRORLEVEL%"=="0" goto mainEnd + +:fail +rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of +rem the _cmd.exe /c_ return code! +if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 +exit /b 1 + +:mainEnd +if "%OS%"=="Windows_NT" endlocal + +:omega diff --git a/model_zoo/official/lite/scene_detection/settings.gradle b/model_zoo/official/lite/scene_detection/settings.gradle new file mode 100644 index 0000000000..1e9becd467 --- /dev/null +++ b/model_zoo/official/lite/scene_detection/settings.gradle @@ -0,0 +1,2 @@ +include ':app' +rootProject.name = "Scene" \ No newline at end of file