master c87964b9ddec cached
93 files
209.5 KB
63.4k tokens
138 symbols
1 requests
Download .txt
Showing preview only (239K chars total). Download the full file or copy to clipboard to get everything.
Repository: chehongshu/ncnnforandroid_objectiondetection_Mobilenetssd
Branch: master
Commit: c87964b9ddec
Files: 93
Total size: 209.5 KB

Directory structure:
gitextract_1slkszqd/

├── MobileNetSSD_demo/
│   ├── .gitignore
│   ├── .idea/
│   │   ├── codeStyles/
│   │   │   └── Project.xml
│   │   ├── gradle.xml
│   │   └── misc.xml
│   ├── app/
│   │   ├── .gitignore
│   │   ├── CMakeLists.txt
│   │   ├── build.gradle
│   │   ├── proguard-rules.pro
│   │   └── src/
│   │       ├── androidTest/
│   │       │   └── java/
│   │       │       └── com/
│   │       │           └── example/
│   │       │               └── che/
│   │       │                   └── mobilenetssd_demo/
│   │       │                       └── ExampleInstrumentedTest.java
│   │       ├── main/
│   │       │   ├── AndroidManifest.xml
│   │       │   ├── assets/
│   │       │   │   └── words.txt
│   │       │   ├── cpp/
│   │       │   │   ├── MobileNetSSD_deploy.id.h
│   │       │   │   ├── MobileNetssd.cpp
│   │       │   │   └── include/
│   │       │   │       ├── allocator.h
│   │       │   │       ├── benchmark.h
│   │       │   │       ├── blob.h
│   │       │   │       ├── cpu.h
│   │       │   │       ├── layer.h
│   │       │   │       ├── layer_type.h
│   │       │   │       ├── layer_type_enum.h
│   │       │   │       ├── mat.h
│   │       │   │       ├── modelbin.h
│   │       │   │       ├── net.h
│   │       │   │       ├── opencv.h
│   │       │   │       ├── paramdict.h
│   │       │   │       └── platform.h
│   │       │   ├── java/
│   │       │   │   └── com/
│   │       │   │       └── example/
│   │       │   │           └── che/
│   │       │   │               └── mobilenetssd_demo/
│   │       │   │                   ├── MainActivity.java
│   │       │   │                   ├── MobileNetssd.java
│   │       │   │                   └── PhotoUtil.java
│   │       │   ├── jniLibs/
│   │       │   │   └── armeabi-v7a/
│   │       │   │       └── libncnn.a
│   │       │   └── res/
│   │       │       ├── drawable/
│   │       │       │   └── ic_launcher_background.xml
│   │       │       ├── drawable-v24/
│   │       │       │   └── ic_launcher_foreground.xml
│   │       │       ├── layout/
│   │       │       │   └── activity_main.xml
│   │       │       ├── mipmap-anydpi-v26/
│   │       │       │   ├── ic_launcher.xml
│   │       │       │   └── ic_launcher_round.xml
│   │       │       └── values/
│   │       │           ├── colors.xml
│   │       │           ├── strings.xml
│   │       │           └── styles.xml
│   │       └── test/
│   │           └── java/
│   │               └── com/
│   │                   └── example/
│   │                       └── che/
│   │                           └── mobilenetssd_demo/
│   │                               └── ExampleUnitTest.java
│   ├── build.gradle
│   ├── gradle/
│   │   └── wrapper/
│   │       ├── gradle-wrapper.jar
│   │       └── gradle-wrapper.properties
│   ├── gradle.properties
│   ├── gradlew
│   ├── gradlew.bat
│   └── settings.gradle
├── MobileNetSSD_demo_single/
│   ├── .gitignore
│   ├── .idea/
│   │   ├── codeStyles/
│   │   │   └── Project.xml
│   │   ├── gradle.xml
│   │   └── misc.xml
│   ├── app/
│   │   ├── .gitignore
│   │   ├── CMakeLists.txt
│   │   ├── build.gradle
│   │   ├── proguard-rules.pro
│   │   └── src/
│   │       ├── androidTest/
│   │       │   └── java/
│   │       │       └── com/
│   │       │           └── example/
│   │       │               └── che/
│   │       │                   └── mobilenetssd_demo/
│   │       │                       └── ExampleInstrumentedTest.java
│   │       ├── main/
│   │       │   ├── AndroidManifest.xml
│   │       │   ├── assets/
│   │       │   │   └── words.txt
│   │       │   ├── cpp/
│   │       │   │   ├── MobileNetSSD_deploy.id.h
│   │       │   │   ├── MobileNetssd.cpp
│   │       │   │   └── include/
│   │       │   │       ├── allocator.h
│   │       │   │       ├── benchmark.h
│   │       │   │       ├── blob.h
│   │       │   │       ├── cpu.h
│   │       │   │       ├── layer.h
│   │       │   │       ├── layer_type.h
│   │       │   │       ├── layer_type_enum.h
│   │       │   │       ├── mat.h
│   │       │   │       ├── modelbin.h
│   │       │   │       ├── net.h
│   │       │   │       ├── opencv.h
│   │       │   │       ├── paramdict.h
│   │       │   │       └── platform.h
│   │       │   ├── java/
│   │       │   │   └── com/
│   │       │   │       └── example/
│   │       │   │           └── che/
│   │       │   │               └── mobilenetssd_demo/
│   │       │   │                   ├── MainActivity.java
│   │       │   │                   ├── MobileNetssd.java
│   │       │   │                   └── PhotoUtil.java
│   │       │   ├── jniLibs/
│   │       │   │   └── armeabi-v7a/
│   │       │   │       └── libncnn.a
│   │       │   └── res/
│   │       │       ├── drawable/
│   │       │       │   └── ic_launcher_background.xml
│   │       │       ├── drawable-v24/
│   │       │       │   └── ic_launcher_foreground.xml
│   │       │       ├── layout/
│   │       │       │   └── activity_main.xml
│   │       │       ├── mipmap-anydpi-v26/
│   │       │       │   ├── ic_launcher.xml
│   │       │       │   └── ic_launcher_round.xml
│   │       │       └── values/
│   │       │           ├── colors.xml
│   │       │           ├── strings.xml
│   │       │           └── styles.xml
│   │       └── test/
│   │           └── java/
│   │               └── com/
│   │                   └── example/
│   │                       └── che/
│   │                           └── mobilenetssd_demo/
│   │                               └── ExampleUnitTest.java
│   ├── build.gradle
│   ├── gradle/
│   │   └── wrapper/
│   │       ├── gradle-wrapper.jar
│   │       └── gradle-wrapper.properties
│   ├── gradle.properties
│   ├── gradlew
│   ├── gradlew.bat
│   └── settings.gradle
└── README.md

================================================
FILE CONTENTS
================================================

================================================
FILE: MobileNetSSD_demo/.gitignore
================================================
*.iml
.gradle
/local.properties
/.idea/caches/build_file_checksums.ser
/.idea/libraries
/.idea/modules.xml
/.idea/workspace.xml
.DS_Store
/build
/captures
.externalNativeBuild


================================================
FILE: MobileNetSSD_demo/.idea/codeStyles/Project.xml
================================================
<component name="ProjectCodeStyleConfiguration">
  <code_scheme name="Project" version="173">
    <Objective-C-extensions>
      <file>
        <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="Import" />
        <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="Macro" />
        <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="Typedef" />
        <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="Enum" />
        <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="Constant" />
        <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="Global" />
        <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="Struct" />
        <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="FunctionPredecl" />
        <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="Function" />
      </file>
      <class>
        <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="Property" />
        <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="Synthesize" />
        <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="InitMethod" />
        <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="StaticMethod" />
        <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="InstanceMethod" />
        <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="DeallocMethod" />
      </class>
      <extensions>
        <pair source="cpp" header="h" fileNamingConvention="NONE" />
        <pair source="c" header="h" fileNamingConvention="NONE" />
      </extensions>
    </Objective-C-extensions>
  </code_scheme>
</component>

================================================
FILE: MobileNetSSD_demo/.idea/gradle.xml
================================================
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
  <component name="GradleSettings">
    <option name="linkedExternalProjectsSettings">
      <GradleProjectSettings>
        <option name="distributionType" value="DEFAULT_WRAPPED" />
        <option name="externalProjectPath" value="$PROJECT_DIR$" />
        <option name="modules">
          <set>
            <option value="$PROJECT_DIR$" />
            <option value="$PROJECT_DIR$/app" />
          </set>
        </option>
        <option name="resolveModulePerSourceSet" value="false" />
      </GradleProjectSettings>
    </option>
  </component>
</project>

================================================
FILE: MobileNetSSD_demo/.idea/misc.xml
================================================
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
  <component name="NullableNotNullManager">
    <option name="myDefaultNullable" value="android.support.annotation.Nullable" />
    <option name="myDefaultNotNull" value="android.support.annotation.NonNull" />
    <option name="myNullables">
      <value>
        <list size="7">
          <item index="0" class="java.lang.String" itemvalue="org.jetbrains.annotations.Nullable" />
          <item index="1" class="java.lang.String" itemvalue="javax.annotation.Nullable" />
          <item index="2" class="java.lang.String" itemvalue="javax.annotation.CheckForNull" />
          <item index="3" class="java.lang.String" itemvalue="edu.umd.cs.findbugs.annotations.Nullable" />
          <item index="4" class="java.lang.String" itemvalue="android.support.annotation.Nullable" />
          <item index="5" class="java.lang.String" itemvalue="androidx.annotation.Nullable" />
          <item index="6" class="java.lang.String" itemvalue="androidx.annotation.RecentlyNullable" />
        </list>
      </value>
    </option>
    <option name="myNotNulls">
      <value>
        <list size="6">
          <item index="0" class="java.lang.String" itemvalue="org.jetbrains.annotations.NotNull" />
          <item index="1" class="java.lang.String" itemvalue="javax.annotation.Nonnull" />
          <item index="2" class="java.lang.String" itemvalue="edu.umd.cs.findbugs.annotations.NonNull" />
          <item index="3" class="java.lang.String" itemvalue="android.support.annotation.NonNull" />
          <item index="4" class="java.lang.String" itemvalue="androidx.annotation.NonNull" />
          <item index="5" class="java.lang.String" itemvalue="androidx.annotation.RecentlyNonNull" />
        </list>
      </value>
    </option>
  </component>
  <component name="ProjectRootManager" version="2" languageLevel="JDK_1_7" project-jdk-name="1.8" project-jdk-type="JavaSDK">
    <output url="file://$PROJECT_DIR$/build/classes" />
  </component>
  <component name="ProjectType">
    <option name="id" value="Android" />
  </component>
</project>

================================================
FILE: MobileNetSSD_demo/app/.gitignore
================================================
/build


================================================
FILE: MobileNetSSD_demo/app/CMakeLists.txt
================================================
# For more information about using CMake with Android Studio, read the
# documentation: https://d.android.com/studio/projects/add-native-code.html

# Sets the minimum version of CMake required to build the native library.

cmake_minimum_required(VERSION 3.4.1)

# Creates and names a library, sets it as either STATIC
# or SHARED, and provides the relative paths to its source code.
# You can define multiple libraries, and CMake builds them for you.
# Gradle automatically packages shared libraries with your APK.

##需要添加
set(ncnn_lib ${CMAKE_SOURCE_DIR}/src/main/jniLibs/armeabi-v7a/libncnn.a)
add_library (ncnn_lib STATIC IMPORTED)
set_target_properties(ncnn_lib PROPERTIES IMPORTED_LOCATION ${ncnn_lib})

add_library( # Sets the name of the library.
        MobileNetssd ## 为生成.so的文字最好直接和.c名字一样,需要更改

        # Sets the library as a shared library.
        SHARED

        # Provides a relative path to your source file(s).
        src/main/cpp/MobileNetssd.cpp)##cpp文件的name

# Searches for a specified prebuilt library and stores the path as a
# variable. Because CMake includes system libraries in the search path by
# default, you only need to specify the name of the public NDK library
# you want to add. CMake verifies that the library exists before
# completing its build.

find_library( # Sets the name of the path variable.
        log-lib

        # Specifies the name of the NDK library that
        # you want CMake to locate.
        log)

# Specifies libraries CMake should link to your target library. You
# can link multiple libraries, such as libraries you define in this
# build script, prebuilt third-party libraries, or system libraries.

target_link_libraries( # Specifies the target library.
        ##以下三个都要添加
        MobileNetssd   #和上面一样
        ncnn_lib       #这个ncnn的lib的add
        jnigraphics    #这个jni也需要add

        # Links the target library to the log library
        # included in the NDK.
        ${log-lib})

================================================
FILE: MobileNetSSD_demo/app/build.gradle
================================================
apply plugin: 'com.android.application'

android {
    compileSdkVersion 28
    defaultConfig {
        applicationId "com.example.che.mobilenetssd_demo"
        minSdkVersion 15
        targetSdkVersion 28
        versionCode 1
        versionName "1.0"
        testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner"
        externalNativeBuild {
            cmake {
                cppFlags "-std=c++11 -fopenmp"//c++,多线程 需要添加need to add
                abiFilters "armeabi-v7a" // 手机的硬件架构,基本所有的硬件都适配
            }
        }
    }
    buildTypes {
        release {
            minifyEnabled false
            proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
        }
    }
    externalNativeBuild {
        cmake {
            path "CMakeLists.txt"
        }
    }

    // 需要添加 把 .a文件导入, .a为 ncnn make intall生成的里面的.a文件
    sourceSets {
        main {
            jniLibs.srcDirs = ["src/main/jniLibs"]
            jni.srcDirs = ['src/cpp']
        }
    }
}

dependencies {
    implementation fileTree(dir: 'libs', include: ['*.jar'])
    implementation 'com.android.support:appcompat-v7:28.0.0'
    implementation 'com.android.support.constraint:constraint-layout:1.1.3'
    testImplementation 'junit:junit:4.12'
    implementation 'com.github.bumptech.glide:glide:4.3.1'   // need to add增加图片类 bumptech,build自动红线消失
    androidTestImplementation 'com.android.support.test:runner:1.0.2'
    androidTestImplementation 'com.android.support.test.espresso:espresso-core:3.0.2'
}


================================================
FILE: MobileNetSSD_demo/app/proguard-rules.pro
================================================
# Add project specific ProGuard rules here.
# You can control the set of applied configuration files using the
# proguardFiles setting in build.gradle.
#
# For more details, see
#   http://developer.android.com/guide/developing/tools/proguard.html

# If your project uses WebView with JS, uncomment the following
# and specify the fully qualified class name to the JavaScript interface
# class:
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
#   public *;
#}

# Uncomment this to preserve the line number information for
# debugging stack traces.
#-keepattributes SourceFile,LineNumberTable

# If you keep the line number information, uncomment this to
# hide the original source file name.
#-renamesourcefileattribute SourceFile


================================================
FILE: MobileNetSSD_demo/app/src/androidTest/java/com/example/che/mobilenetssd_demo/ExampleInstrumentedTest.java
================================================
package com.example.che.mobilenetssd_demo;

import android.content.Context;
import android.support.test.InstrumentationRegistry;
import android.support.test.runner.AndroidJUnit4;

import org.junit.Test;
import org.junit.runner.RunWith;

import static org.junit.Assert.*;

/**
 * Instrumented test, which will execute on an Android device.
 *
 * @see <a href="http://d.android.com/tools/testing">Testing documentation</a>
 */
@RunWith(AndroidJUnit4.class)
public class ExampleInstrumentedTest {
    @Test
    public void useAppContext() {
        // Context of the app under test.
        Context appContext = InstrumentationRegistry.getTargetContext();

        assertEquals("com.example.che.mobilenetssd_demo", appContext.getPackageName());
    }
}


================================================
FILE: MobileNetSSD_demo/app/src/main/AndroidManifest.xml
================================================
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
    package="com.example.che.mobilenetssd_demo">

    <uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE"/>
    <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
    <uses-permission android:name="android.permission.CAMERA"/>

    <application
        android:allowBackup="true"
        android:icon="@mipmap/ic_launcher"
        android:label="@string/app_name"
        android:roundIcon="@mipmap/ic_launcher_round"
        android:supportsRtl="true"
        android:theme="@style/AppTheme">
        <activity android:name=".MainActivity">
            <intent-filter>
                <action android:name="android.intent.action.MAIN" />

                <category android:name="android.intent.category.LAUNCHER" />
            </intent-filter>
        </activity>
    </application>

</manifest>

================================================
FILE: MobileNetSSD_demo/app/src/main/assets/words.txt
================================================
background
aeroplane
bicycle
bird
boat
bottle
bus
car
cat
chair
cow
diningtable
dog
horse
motorbike
person
pottedplant
sheep
sofa
train
tvmonitor

================================================
FILE: MobileNetSSD_demo/app/src/main/cpp/MobileNetSSD_deploy.id.h
================================================
#ifndef NCNN_INCLUDE_GUARD_MobileNetSSD_deploy_id_h
#define NCNN_INCLUDE_GUARD_MobileNetSSD_deploy_id_h
namespace MobileNetSSD_deploy_param_id {
const int LAYER_input = 0;
const int BLOB_data = 0;
const int LAYER_splitncnn_0 = 1;
const int BLOB_data_splitncnn_0 = 1;
const int BLOB_data_splitncnn_1 = 2;
const int BLOB_data_splitncnn_2 = 3;
const int BLOB_data_splitncnn_3 = 4;
const int BLOB_data_splitncnn_4 = 5;
const int BLOB_data_splitncnn_5 = 6;
const int BLOB_data_splitncnn_6 = 7;
const int LAYER_conv0 = 2;
const int BLOB_conv0 = 8;
const int LAYER_conv0_relu = 3;
const int BLOB_conv0_conv0_relu = 9;
const int LAYER_conv1_dw = 4;
const int BLOB_conv1_dw = 10;
const int LAYER_conv1_dw_relu = 5;
const int BLOB_conv1_dw_conv1_dw_relu = 11;
const int LAYER_conv1 = 6;
const int BLOB_conv1 = 12;
const int LAYER_conv1_relu = 7;
const int BLOB_conv1_conv1_relu = 13;
const int LAYER_conv2_dw = 8;
const int BLOB_conv2_dw = 14;
const int LAYER_conv2_dw_relu = 9;
const int BLOB_conv2_dw_conv2_dw_relu = 15;
const int LAYER_conv2 = 10;
const int BLOB_conv2 = 16;
const int LAYER_conv2_relu = 11;
const int BLOB_conv2_conv2_relu = 17;
const int LAYER_conv3_dw = 12;
const int BLOB_conv3_dw = 18;
const int LAYER_conv3_dw_relu = 13;
const int BLOB_conv3_dw_conv3_dw_relu = 19;
const int LAYER_conv3 = 14;
const int BLOB_conv3 = 20;
const int LAYER_conv3_relu = 15;
const int BLOB_conv3_conv3_relu = 21;
const int LAYER_conv4_dw = 16;
const int BLOB_conv4_dw = 22;
const int LAYER_conv4_dw_relu = 17;
const int BLOB_conv4_dw_conv4_dw_relu = 23;
const int LAYER_conv4 = 18;
const int BLOB_conv4 = 24;
const int LAYER_conv4_relu = 19;
const int BLOB_conv4_conv4_relu = 25;
const int LAYER_conv5_dw = 20;
const int BLOB_conv5_dw = 26;
const int LAYER_conv5_dw_relu = 21;
const int BLOB_conv5_dw_conv5_dw_relu = 27;
const int LAYER_conv5 = 22;
const int BLOB_conv5 = 28;
const int LAYER_conv5_relu = 23;
const int BLOB_conv5_conv5_relu = 29;
const int LAYER_conv6_dw = 24;
const int BLOB_conv6_dw = 30;
const int LAYER_conv6_dw_relu = 25;
const int BLOB_conv6_dw_conv6_dw_relu = 31;
const int LAYER_conv6 = 26;
const int BLOB_conv6 = 32;
const int LAYER_conv6_relu = 27;
const int BLOB_conv6_conv6_relu = 33;
const int LAYER_conv7_dw = 28;
const int BLOB_conv7_dw = 34;
const int LAYER_conv7_dw_relu = 29;
const int BLOB_conv7_dw_conv7_dw_relu = 35;
const int LAYER_conv7 = 30;
const int BLOB_conv7 = 36;
const int LAYER_conv7_relu = 31;
const int BLOB_conv7_conv7_relu = 37;
const int LAYER_conv8_dw = 32;
const int BLOB_conv8_dw = 38;
const int LAYER_conv8_dw_relu = 33;
const int BLOB_conv8_dw_conv8_dw_relu = 39;
const int LAYER_conv8 = 34;
const int BLOB_conv8 = 40;
const int LAYER_conv8_relu = 35;
const int BLOB_conv8_conv8_relu = 41;
const int LAYER_conv9_dw = 36;
const int BLOB_conv9_dw = 42;
const int LAYER_conv9_dw_relu = 37;
const int BLOB_conv9_dw_conv9_dw_relu = 43;
const int LAYER_conv9 = 38;
const int BLOB_conv9 = 44;
const int LAYER_conv9_relu = 39;
const int BLOB_conv9_conv9_relu = 45;
const int LAYER_conv10_dw = 40;
const int BLOB_conv10_dw = 46;
const int LAYER_conv10_dw_relu = 41;
const int BLOB_conv10_dw_conv10_dw_relu = 47;
const int LAYER_conv10 = 42;
const int BLOB_conv10 = 48;
const int LAYER_conv10_relu = 43;
const int BLOB_conv10_conv10_relu = 49;
const int LAYER_conv11_dw = 44;
const int BLOB_conv11_dw = 50;
const int LAYER_conv11_dw_relu = 45;
const int BLOB_conv11_dw_conv11_dw_relu = 51;
const int LAYER_conv11 = 46;
const int BLOB_conv11 = 52;
const int LAYER_conv11_relu = 47;
const int BLOB_conv11_conv11_relu = 53;
const int LAYER_splitncnn_1 = 48;
const int BLOB_conv11_conv11_relu_splitncnn_0 = 54;
const int BLOB_conv11_conv11_relu_splitncnn_1 = 55;
const int BLOB_conv11_conv11_relu_splitncnn_2 = 56;
const int BLOB_conv11_conv11_relu_splitncnn_3 = 57;
const int LAYER_conv12_dw = 49;
const int BLOB_conv12_dw = 58;
const int LAYER_conv12_dw_relu = 50;
const int BLOB_conv12_dw_conv12_dw_relu = 59;
const int LAYER_conv12 = 51;
const int BLOB_conv12 = 60;
const int LAYER_conv12_relu = 52;
const int BLOB_conv12_conv12_relu = 61;
const int LAYER_conv13_dw = 53;
const int BLOB_conv13_dw = 62;
const int LAYER_conv13_dw_relu = 54;
const int BLOB_conv13_dw_conv13_dw_relu = 63;
const int LAYER_conv13 = 55;
const int BLOB_conv13 = 64;
const int LAYER_conv13_relu = 56;
const int BLOB_conv13_conv13_relu = 65;
const int LAYER_splitncnn_2 = 57;
const int BLOB_conv13_conv13_relu_splitncnn_0 = 66;
const int BLOB_conv13_conv13_relu_splitncnn_1 = 67;
const int BLOB_conv13_conv13_relu_splitncnn_2 = 68;
const int BLOB_conv13_conv13_relu_splitncnn_3 = 69;
const int LAYER_conv14_1 = 58;
const int BLOB_conv14_1 = 70;
const int LAYER_conv14_1_relu = 59;
const int BLOB_conv14_1_conv14_1_relu = 71;
const int LAYER_conv14_2 = 60;
const int BLOB_conv14_2 = 72;
const int LAYER_conv14_2_relu = 61;
const int BLOB_conv14_2_conv14_2_relu = 73;
const int LAYER_splitncnn_3 = 62;
const int BLOB_conv14_2_conv14_2_relu_splitncnn_0 = 74;
const int BLOB_conv14_2_conv14_2_relu_splitncnn_1 = 75;
const int BLOB_conv14_2_conv14_2_relu_splitncnn_2 = 76;
const int BLOB_conv14_2_conv14_2_relu_splitncnn_3 = 77;
const int LAYER_conv15_1 = 63;
const int BLOB_conv15_1 = 78;
const int LAYER_conv15_1_relu = 64;
const int BLOB_conv15_1_conv15_1_relu = 79;
const int LAYER_conv15_2 = 65;
const int BLOB_conv15_2 = 80;
const int LAYER_conv15_2_relu = 66;
const int BLOB_conv15_2_conv15_2_relu = 81;
const int LAYER_splitncnn_4 = 67;
const int BLOB_conv15_2_conv15_2_relu_splitncnn_0 = 82;
const int BLOB_conv15_2_conv15_2_relu_splitncnn_1 = 83;
const int BLOB_conv15_2_conv15_2_relu_splitncnn_2 = 84;
const int BLOB_conv15_2_conv15_2_relu_splitncnn_3 = 85;
const int LAYER_conv16_1 = 68;
const int BLOB_conv16_1 = 86;
const int LAYER_conv16_1_relu = 69;
const int BLOB_conv16_1_conv16_1_relu = 87;
const int LAYER_conv16_2 = 70;
const int BLOB_conv16_2 = 88;
const int LAYER_conv16_2_relu = 71;
const int BLOB_conv16_2_conv16_2_relu = 89;
const int LAYER_splitncnn_5 = 72;
const int BLOB_conv16_2_conv16_2_relu_splitncnn_0 = 90;
const int BLOB_conv16_2_conv16_2_relu_splitncnn_1 = 91;
const int BLOB_conv16_2_conv16_2_relu_splitncnn_2 = 92;
const int BLOB_conv16_2_conv16_2_relu_splitncnn_3 = 93;
const int LAYER_conv17_1 = 73;
const int BLOB_conv17_1 = 94;
const int LAYER_conv17_1_relu = 74;
const int BLOB_conv17_1_conv17_1_relu = 95;
const int LAYER_conv17_2 = 75;
const int BLOB_conv17_2 = 96;
const int LAYER_conv17_2_relu = 76;
const int BLOB_conv17_2_conv17_2_relu = 97;
const int LAYER_splitncnn_6 = 77;
const int BLOB_conv17_2_conv17_2_relu_splitncnn_0 = 98;
const int BLOB_conv17_2_conv17_2_relu_splitncnn_1 = 99;
const int BLOB_conv17_2_conv17_2_relu_splitncnn_2 = 100;
const int LAYER_conv11_mbox_loc = 78;
const int BLOB_conv11_mbox_loc = 101;
const int LAYER_conv11_mbox_loc_perm = 79;
const int BLOB_conv11_mbox_loc_perm = 102;
const int LAYER_conv11_mbox_loc_flat = 80;
const int BLOB_conv11_mbox_loc_flat = 103;
const int LAYER_conv11_mbox_conf = 81;
const int BLOB_conv11_mbox_conf = 104;
const int LAYER_conv11_mbox_conf_perm = 82;
const int BLOB_conv11_mbox_conf_perm = 105;
const int LAYER_conv11_mbox_conf_flat = 83;
const int BLOB_conv11_mbox_conf_flat = 106;
const int LAYER_conv11_mbox_priorbox = 84;
const int BLOB_conv11_mbox_priorbox = 107;
const int LAYER_conv13_mbox_loc = 85;
const int BLOB_conv13_mbox_loc = 108;
const int LAYER_conv13_mbox_loc_perm = 86;
const int BLOB_conv13_mbox_loc_perm = 109;
const int LAYER_conv13_mbox_loc_flat = 87;
const int BLOB_conv13_mbox_loc_flat = 110;
const int LAYER_conv13_mbox_conf = 88;
const int BLOB_conv13_mbox_conf = 111;
const int LAYER_conv13_mbox_conf_perm = 89;
const int BLOB_conv13_mbox_conf_perm = 112;
const int LAYER_conv13_mbox_conf_flat = 90;
const int BLOB_conv13_mbox_conf_flat = 113;
const int LAYER_conv13_mbox_priorbox = 91;
const int BLOB_conv13_mbox_priorbox = 114;
const int LAYER_conv14_2_mbox_loc = 92;
const int BLOB_conv14_2_mbox_loc = 115;
const int LAYER_conv14_2_mbox_loc_perm = 93;
const int BLOB_conv14_2_mbox_loc_perm = 116;
const int LAYER_conv14_2_mbox_loc_flat = 94;
const int BLOB_conv14_2_mbox_loc_flat = 117;
const int LAYER_conv14_2_mbox_conf = 95;
const int BLOB_conv14_2_mbox_conf = 118;
const int LAYER_conv14_2_mbox_conf_perm = 96;
const int BLOB_conv14_2_mbox_conf_perm = 119;
const int LAYER_conv14_2_mbox_conf_flat = 97;
const int BLOB_conv14_2_mbox_conf_flat = 120;
const int LAYER_conv14_2_mbox_priorbox = 98;
const int BLOB_conv14_2_mbox_priorbox = 121;
const int LAYER_conv15_2_mbox_loc = 99;
const int BLOB_conv15_2_mbox_loc = 122;
const int LAYER_conv15_2_mbox_loc_perm = 100;
const int BLOB_conv15_2_mbox_loc_perm = 123;
const int LAYER_conv15_2_mbox_loc_flat = 101;
const int BLOB_conv15_2_mbox_loc_flat = 124;
const int LAYER_conv15_2_mbox_conf = 102;
const int BLOB_conv15_2_mbox_conf = 125;
const int LAYER_conv15_2_mbox_conf_perm = 103;
const int BLOB_conv15_2_mbox_conf_perm = 126;
const int LAYER_conv15_2_mbox_conf_flat = 104;
const int BLOB_conv15_2_mbox_conf_flat = 127;
const int LAYER_conv15_2_mbox_priorbox = 105;
const int BLOB_conv15_2_mbox_priorbox = 128;
const int LAYER_conv16_2_mbox_loc = 106;
const int BLOB_conv16_2_mbox_loc = 129;
const int LAYER_conv16_2_mbox_loc_perm = 107;
const int BLOB_conv16_2_mbox_loc_perm = 130;
const int LAYER_conv16_2_mbox_loc_flat = 108;
const int BLOB_conv16_2_mbox_loc_flat = 131;
const int LAYER_conv16_2_mbox_conf = 109;
const int BLOB_conv16_2_mbox_conf = 132;
const int LAYER_conv16_2_mbox_conf_perm = 110;
const int BLOB_conv16_2_mbox_conf_perm = 133;
const int LAYER_conv16_2_mbox_conf_flat = 111;
const int BLOB_conv16_2_mbox_conf_flat = 134;
const int LAYER_conv16_2_mbox_priorbox = 112;
const int BLOB_conv16_2_mbox_priorbox = 135;
const int LAYER_conv17_2_mbox_loc = 113;
const int BLOB_conv17_2_mbox_loc = 136;
const int LAYER_conv17_2_mbox_loc_perm = 114;
const int BLOB_conv17_2_mbox_loc_perm = 137;
const int LAYER_conv17_2_mbox_loc_flat = 115;
const int BLOB_conv17_2_mbox_loc_flat = 138;
const int LAYER_conv17_2_mbox_conf = 116;
const int BLOB_conv17_2_mbox_conf = 139;
const int LAYER_conv17_2_mbox_conf_perm = 117;
const int BLOB_conv17_2_mbox_conf_perm = 140;
const int LAYER_conv17_2_mbox_conf_flat = 118;
const int BLOB_conv17_2_mbox_conf_flat = 141;
const int LAYER_conv17_2_mbox_priorbox = 119;
const int BLOB_conv17_2_mbox_priorbox = 142;
const int LAYER_mbox_loc = 120;
const int BLOB_mbox_loc = 143;
const int LAYER_mbox_conf = 121;
const int BLOB_mbox_conf = 144;
const int LAYER_mbox_priorbox = 122;
const int BLOB_mbox_priorbox = 145;
const int LAYER_mbox_conf_reshape = 123;
const int BLOB_mbox_conf_reshape = 146;
const int LAYER_mbox_conf_softmax = 124;
const int BLOB_mbox_conf_softmax = 147;
const int LAYER_mbox_conf_flatten = 125;
const int BLOB_mbox_conf_flatten = 148;
const int LAYER_detection_out = 126;
const int BLOB_detection_out = 149;
} // namespace MobileNetSSD_deploy_param_id
#endif // NCNN_INCLUDE_GUARD_MobileNetSSD_deploy_id_h


================================================
FILE: MobileNetSSD_demo/app/src/main/cpp/MobileNetssd.cpp
================================================
#include <android/bitmap.h>
#include <android/log.h>
#include <jni.h>
#include <string>
#include <vector>

// ncnn
#include "include/opencv.h"
#include "MobileNetSSD_deploy.id.h"   //这里看成自己的id.h
#include <sys/time.h>
#include <unistd.h>
#include "include/net.h"

static ncnn::UnlockedPoolAllocator g_blob_pool_allocator;
static ncnn::PoolAllocator g_workspace_pool_allocator;

static ncnn::Mat ncnn_param;
static ncnn::Mat ncnn_bin;
static ncnn::Net ncnn_net;

extern "C" {


// public native boolean Init(byte[] words,byte[] param, byte[] bin);  原函数形式(c++) 以下形式为ndk的c++形式
JNIEXPORT jboolean JNICALL
Java_com_example_che_mobilenetssd_1demo_MobileNetssd_Init(JNIEnv *env, jobject obj, jbyteArray param, jbyteArray bin) {
    __android_log_print(ANDROID_LOG_DEBUG, "MobileNetssd", "enter the jni func");
    // init param
    {
        int len = env->GetArrayLength(param);
        ncnn_param.create(len, (size_t) 1u);
        env->GetByteArrayRegion(param, 0, len, (jbyte *) ncnn_param);
        int ret = ncnn_net.load_param((const unsigned char *) ncnn_param);
        __android_log_print(ANDROID_LOG_DEBUG, "MobileNetssd", "load_param %d %d", ret, len);
    }

    // init bin
    {
        int len = env->GetArrayLength(bin);
        ncnn_bin.create(len, (size_t) 1u);
        env->GetByteArrayRegion(bin, 0, len, (jbyte *) ncnn_bin);
        int ret = ncnn_net.load_model((const unsigned char *) ncnn_bin);
        __android_log_print(ANDROID_LOG_DEBUG, "MobileNetssd", "load_model %d %d", ret, len);
    }

    ncnn::Option opt;
    opt.lightmode = true;
    opt.num_threads = 4;   //线程 这里可以修改
    opt.blob_allocator = &g_blob_pool_allocator;
    opt.workspace_allocator = &g_workspace_pool_allocator;

    ncnn::set_default_option(opt);

    return JNI_TRUE;
}

// public native String Detect(Bitmap bitmap);
JNIEXPORT jfloatArray JNICALL Java_com_example_che_mobilenetssd_1demo_MobileNetssd_Detect(JNIEnv* env, jobject thiz, jobject bitmap)
{
    // ncnn from bitmap
    ncnn::Mat in;
    {
        AndroidBitmapInfo info;
        AndroidBitmap_getInfo(env, bitmap, &info);
//        int origin_w = info.width;
//        int origin_h = info.height;
//        int width = 300;
//        int height = 300;
        int width = info.width;
        int height = info.height;
        if (info.format != ANDROID_BITMAP_FORMAT_RGBA_8888)
            return NULL;

        void* indata;
        AndroidBitmap_lockPixels(env, bitmap, &indata);
        // 把像素转换成data,并指定通道顺序
        // 因为图像预处理每个网络层输入的数据格式不一样一般为300*300 128*128等等所以这类需要一个resize的操作可以在cpp中写,也可以是java读入图片时有个resize操作
//      in = ncnn::Mat::from_pixels_resize((const unsigned char*)indata, ncnn::Mat::PIXEL_RGBA2RGB, origin_w, origin_h, width, height);

        in = ncnn::Mat::from_pixels((const unsigned char*)indata, ncnn::Mat::PIXEL_RGBA2RGB, width, height);

        // 下面一行为debug代码
        //__android_log_print(ANDROID_LOG_DEBUG, "MobilenetssdJniIn", "Mobilenetssd_predict_has_input1, in.w: %d; in.h: %d", in.w, in.h);
        AndroidBitmap_unlockPixels(env, bitmap);
    }

    // ncnn_net
    std::vector<float> cls_scores;
    {
        // 减去均值和乘上比例(这个数据和前面的归一化图片预处理形式一一对应)
        const float mean_vals[3] = {127.5f, 127.5f, 127.5f};
        const float scale[3] = {0.007843f, 0.007843f, 0.007843f};

        in.substract_mean_normalize(mean_vals, scale);// 归一化

        ncnn::Extractor ex = ncnn_net.create_extractor();//前向传播

        // 如果不加密是使用ex.input("data", in);
        // BLOB_data在id.h文件中可见,相当于datainput网络层的id
        ex.input(MobileNetSSD_deploy_param_id::BLOB_data, in);
        //ex.set_num_threads(4); 和上面一样一个对象

        ncnn::Mat out;
        // 如果时不加密是使用ex.extract("prob", out);
        //BLOB_detection_out.h文件中可见,相当于dataout网络层的id,输出检测的结果数据
        ex.extract(MobileNetSSD_deploy_param_id::BLOB_detection_out, out);

        int output_wsize = out.w;
        int output_hsize = out.h;

        //输出整理
        jfloat *output[output_wsize * output_hsize];   // float类型
        for(int i = 0; i< out.h; i++) {
            for (int j = 0; j < out.w; j++) {
                output[i*output_wsize + j] = &out.row(i)[j];
            }
        }
        //建立float数组 长度为 output_wsize * output_hsize,如果只是ouput_size相当于只有一行的out的数据那就是一个object检测数据
        jfloatArray jOutputData = env->NewFloatArray(output_wsize * output_hsize);
        if (jOutputData == nullptr) return nullptr;
        env->SetFloatArrayRegion(jOutputData, 0,  output_wsize * output_hsize,
                                 reinterpret_cast<const jfloat *>(*output));
        return jOutputData;
    }
}
}


================================================
FILE: MobileNetSSD_demo/app/src/main/cpp/include/allocator.h
================================================
// Tencent is pleased to support the open source community by making ncnn available.
//
// Copyright (C) 2018 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except
// in compliance with the License. You may obtain a copy of the License at
//
// https://opensource.org/licenses/BSD-3-Clause
//
// Unless required by applicable law or agreed to in writing, software distributed
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.

#ifndef NCNN_ALLOCATOR_H
#define NCNN_ALLOCATOR_H

#ifdef _WIN32
#define WIN32_LEAN_AND_MEAN
#include <windows.h>
#else
#include <pthread.h>
#endif

#include <stdlib.h>
#include <list>

namespace ncnn {

// the alignment of all the allocated buffers
#define MALLOC_ALIGN    16

// Aligns a pointer to the specified number of bytes
// ptr Aligned pointer
// n Alignment size that must be a power of two
template<typename _Tp> static inline _Tp* alignPtr(_Tp* ptr, int n=(int)sizeof(_Tp))
{
    return (_Tp*)(((size_t)ptr + n-1) & -n);
}

// Aligns a buffer size to the specified number of bytes
// The function returns the minimum number that is greater or equal to sz and is divisible by n
// sz Buffer size to align
// n Alignment size that must be a power of two
static inline size_t alignSize(size_t sz, int n)
{
    return (sz + n-1) & -n;
}

static inline void* fastMalloc(size_t size)
{
    unsigned char* udata = (unsigned char*)malloc(size + sizeof(void*) + MALLOC_ALIGN);
    if (!udata)
        return 0;
    unsigned char** adata = alignPtr((unsigned char**)udata + 1, MALLOC_ALIGN);
    adata[-1] = udata;
    return adata;
}

static inline void fastFree(void* ptr)
{
    if (ptr)
    {
        unsigned char* udata = ((unsigned char**)ptr)[-1];
        free(udata);
    }
}

// exchange-add operation for atomic operations on reference counters
#if defined __INTEL_COMPILER && !(defined WIN32 || defined _WIN32)
// atomic increment on the linux version of the Intel(tm) compiler
#  define NCNN_XADD(addr, delta) (int)_InterlockedExchangeAdd(const_cast<void*>(reinterpret_cast<volatile void*>(addr)), delta)
#elif defined __GNUC__
#  if defined __clang__ && __clang_major__ >= 3 && !defined __ANDROID__ && !defined __EMSCRIPTEN__ && !defined(__CUDACC__)
#    ifdef __ATOMIC_ACQ_REL
#      define NCNN_XADD(addr, delta) __c11_atomic_fetch_add((_Atomic(int)*)(addr), delta, __ATOMIC_ACQ_REL)
#    else
#      define NCNN_XADD(addr, delta) __atomic_fetch_add((_Atomic(int)*)(addr), delta, 4)
#    endif
#  else
#    if defined __ATOMIC_ACQ_REL && !defined __clang__
// version for gcc >= 4.7
#      define NCNN_XADD(addr, delta) (int)__atomic_fetch_add((unsigned*)(addr), (unsigned)(delta), __ATOMIC_ACQ_REL)
#    else
#      define NCNN_XADD(addr, delta) (int)__sync_fetch_and_add((unsigned*)(addr), (unsigned)(delta))
#    endif
#  endif
#elif defined _MSC_VER && !defined RC_INVOKED
#  include <intrin.h>
#  define NCNN_XADD(addr, delta) (int)_InterlockedExchangeAdd((long volatile*)addr, delta)
#else
// thread-unsafe branch
static inline int NCNN_XADD(int* addr, int delta) { int tmp = *addr; *addr += delta; return tmp; }
#endif

#ifdef _WIN32
class Mutex
{
public:
    Mutex() { InitializeSRWLock(&srwlock); }
    ~Mutex() {}
    void lock() { AcquireSRWLockExclusive(&srwlock); }
    void unlock() { ReleaseSRWLockExclusive(&srwlock); }
private:
    // NOTE SRWLock is available from windows vista
    SRWLOCK srwlock;
};
#else // _WIN32
class Mutex
{
public:
    Mutex() { pthread_mutex_init(&mutex, 0); }
    ~Mutex() { pthread_mutex_destroy(&mutex); }
    void lock() { pthread_mutex_lock(&mutex); }
    void unlock() { pthread_mutex_unlock(&mutex); }
private:
    pthread_mutex_t mutex;
};
#endif // _WIN32

class Allocator
{
public:
    virtual ~Allocator() = 0;
    virtual void* fastMalloc(size_t size) = 0;
    virtual void fastFree(void* ptr) = 0;
};

class PoolAllocator : public Allocator
{
public:
    PoolAllocator();
    ~PoolAllocator();

    // ratio range 0 ~ 1
    // default cr = 0.75
    void set_size_compare_ratio(float scr);

    // release all budgets immediately
    void clear();

    virtual void* fastMalloc(size_t size);
    virtual void fastFree(void* ptr);

private:
    Mutex budgets_lock;
    Mutex payouts_lock;
    unsigned int size_compare_ratio;// 0~256
    std::list< std::pair<size_t, void*> > budgets;
    std::list< std::pair<size_t, void*> > payouts;
};

class UnlockedPoolAllocator : public Allocator
{
public:
    UnlockedPoolAllocator();
    ~UnlockedPoolAllocator();

    // ratio range 0 ~ 1
    // default cr = 0.75
    void set_size_compare_ratio(float scr);

    // release all budgets immediately
    void clear();

    virtual void* fastMalloc(size_t size);
    virtual void fastFree(void* ptr);

private:
    unsigned int size_compare_ratio;// 0~256
    std::list< std::pair<size_t, void*> > budgets;
    std::list< std::pair<size_t, void*> > payouts;
};

} // namespace ncnn

#endif // NCNN_ALLOCATOR_H


================================================
FILE: MobileNetSSD_demo/app/src/main/cpp/include/benchmark.h
================================================
// Tencent is pleased to support the open source community by making ncnn available.
//
// Copyright (C) 2017 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except
// in compliance with the License. You may obtain a copy of the License at
//
// https://opensource.org/licenses/BSD-3-Clause
//
// Unless required by applicable law or agreed to in writing, software distributed
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.

#ifndef NCNN_BENCHMARK_H
#define NCNN_BENCHMARK_H

#include "platform.h"
#include "mat.h"
#include "layer.h"

namespace ncnn {

// get now timestamp in ms
double get_current_time();

#if NCNN_BENCHMARK

void benchmark(const Layer* layer, double start, double end);
void benchmark(const Layer* layer, const Mat& bottom_blob, Mat& top_blob, double start, double end);

#endif // NCNN_BENCHMARK

} // namespace ncnn

#endif // NCNN_BENCHMARK_H


================================================
FILE: MobileNetSSD_demo/app/src/main/cpp/include/blob.h
================================================
// Tencent is pleased to support the open source community by making ncnn available.
//
// Copyright (C) 2017 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except
// in compliance with the License. You may obtain a copy of the License at
//
// https://opensource.org/licenses/BSD-3-Clause
//
// Unless required by applicable law or agreed to in writing, software distributed
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.

#ifndef NCNN_BLOB_H
#define NCNN_BLOB_H

#include <string>
#include <vector>
#include "platform.h"

namespace ncnn {

class Blob
{
public:
    // empty
    Blob();

public:
#if NCNN_STRING
    // blob name
    std::string name;
#endif // NCNN_STRING
    // layer index which produce this blob as output
    int producer;
    // layer index which need this blob as input
    std::vector<int> consumers;
};

} // namespace ncnn

#endif // NCNN_BLOB_H


================================================
FILE: MobileNetSSD_demo/app/src/main/cpp/include/cpu.h
================================================
// Tencent is pleased to support the open source community by making ncnn available.
//
// Copyright (C) 2017 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except
// in compliance with the License. You may obtain a copy of the License at
//
// https://opensource.org/licenses/BSD-3-Clause
//
// Unless required by applicable law or agreed to in writing, software distributed
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.

#ifndef NCNN_CPU_H
#define NCNN_CPU_H

namespace ncnn {

// test optional cpu features
// neon = armv7 neon or aarch64 asimd
int cpu_support_arm_neon();
// vfpv4 = armv7 fp16 + fma
int cpu_support_arm_vfpv4();
// asimdhp = aarch64 asimd half precision
int cpu_support_arm_asimdhp();

// cpu info
int get_cpu_count();

// bind all threads on little clusters if powersave enabled
// affacts HMP arch cpu like ARM big.LITTLE
// only implemented on android at the moment
// switching powersave is expensive and not thread-safe
// 0 = all cores enabled(default)
// 1 = only little clusters enabled
// 2 = only big clusters enabled
// return 0 if success for setter function
int get_cpu_powersave();
int set_cpu_powersave(int powersave);

// misc function wrapper for openmp routines
int get_omp_num_threads();
void set_omp_num_threads(int num_threads);

int get_omp_dynamic();
void set_omp_dynamic(int dynamic);

} // namespace ncnn

#endif // NCNN_CPU_H


================================================
FILE: MobileNetSSD_demo/app/src/main/cpp/include/layer.h
================================================
// Tencent is pleased to support the open source community by making ncnn available.
//
// Copyright (C) 2017 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except
// in compliance with the License. You may obtain a copy of the License at
//
// https://opensource.org/licenses/BSD-3-Clause
//
// Unless required by applicable law or agreed to in writing, software distributed
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.

#ifndef NCNN_LAYER_H
#define NCNN_LAYER_H

#include <stdio.h>
#include <string>
#include <vector>
#include "mat.h"
#include "modelbin.h"
#include "paramdict.h"
#include "platform.h"

namespace ncnn {

class Allocator;
class Option
{
public:
    // default option
    Option();

public:
    // light mode
    // intermediate blob will be recycled when enabled
    // enabled by default
    bool lightmode;

    // thread count
    // default value is the one returned by get_cpu_count()
    int num_threads;

    // blob memory allocator
    Allocator* blob_allocator;

    // workspace memory allocator
    Allocator* workspace_allocator;
};

// the global default option
const Option& get_default_option();
int set_default_option(const Option& opt);

class Layer
{
public:
    // empty
    Layer();
    // virtual destructor
    virtual ~Layer();

    // load layer specific parameter from parsed dict
    // return 0 if success
    virtual int load_param(const ParamDict& pd);

    // load layer specific weight data from model binary
    // return 0 if success
    virtual int load_model(const ModelBin& mb);

public:
    // one input and one output blob
    bool one_blob_only;

    // support inplace inference
    bool support_inplace;

public:
    // implement inference
    // return 0 if success
    virtual int forward(const std::vector<Mat>& bottom_blobs, std::vector<Mat>& top_blobs, const Option& opt = get_default_option()) const;
    virtual int forward(const Mat& bottom_blob, Mat& top_blob, const Option& opt = get_default_option()) const;

    // implement inplace inference
    // return 0 if success
    virtual int forward_inplace(std::vector<Mat>& bottom_top_blobs, const Option& opt = get_default_option()) const;
    virtual int forward_inplace(Mat& bottom_top_blob, const Option& opt = get_default_option()) const;

public:
#if NCNN_STRING
    // layer type name
    std::string type;
    // layer name
    std::string name;
#endif // NCNN_STRING
    // blob index which this layer needs as input
    std::vector<int> bottoms;
    // blob index which this layer produces as output
    std::vector<int> tops;
};

// layer factory function
typedef Layer* (*layer_creator_func)();

struct layer_registry_entry
{
#if NCNN_STRING
    // layer type name
    const char* name;
#endif // NCNN_STRING
    // layer factory entry
    layer_creator_func creator;
};

#if NCNN_STRING
// get layer type from type name
int layer_to_index(const char* type);
// create layer from type name
Layer* create_layer(const char* type);
#endif // NCNN_STRING
// create layer from layer type
Layer* create_layer(int index);

#define DEFINE_LAYER_CREATOR(name) \
    ::ncnn::Layer* name##_layer_creator() { return new name; }

} // namespace ncnn

#endif // NCNN_LAYER_H


================================================
FILE: MobileNetSSD_demo/app/src/main/cpp/include/layer_type.h
================================================
// Tencent is pleased to support the open source community by making ncnn available.
//
// Copyright (C) 2017 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except
// in compliance with the License. You may obtain a copy of the License at
//
// https://opensource.org/licenses/BSD-3-Clause
//
// Unless required by applicable law or agreed to in writing, software distributed
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.

#ifndef NCNN_LAYER_TYPE_H
#define NCNN_LAYER_TYPE_H

namespace ncnn {

namespace LayerType {
enum
{
#include "layer_type_enum.h"
    CustomBit = (1<<8),
};
} // namespace LayerType

} // namespace ncnn

#endif // NCNN_LAYER_TYPE_H


================================================
FILE: MobileNetSSD_demo/app/src/main/cpp/include/layer_type_enum.h
================================================
// Layer Type Enum header
//
// This file is auto-generated by cmake, don't edit it.

AbsVal = 0,
ArgMax = 1,
BatchNorm = 2,
Bias = 3,
BNLL = 4,
Concat = 5,
Convolution = 6,
Crop = 7,
Deconvolution = 8,
Dropout = 9,
Eltwise = 10,
ELU = 11,
Embed = 12,
Exp = 13,
Flatten = 14,
InnerProduct = 15,
Input = 16,
Log = 17,
LRN = 18,
MemoryData = 19,
MVN = 20,
Pooling = 21,
Power = 22,
PReLU = 23,
Proposal = 24,
Reduction = 25,
ReLU = 26,
Reshape = 27,
ROIPooling = 28,
Scale = 29,
Sigmoid = 30,
Slice = 31,
Softmax = 32,
Split = 33,
SPP = 34,
TanH = 35,
Threshold = 36,
Tile = 37,
RNN = 38,
LSTM = 39,
BinaryOp = 40,
UnaryOp = 41,
ConvolutionDepthWise = 42,
Padding = 43,
Squeeze = 44,
ExpandDims = 45,
Normalize = 46,
Permute = 47,
PriorBox = 48,
DetectionOutput = 49,
Interp = 50,
DeconvolutionDepthWise = 51,
ShuffleChannel = 52,
InstanceNorm = 53,
Clip = 54,
Reorg = 55,
YoloDetectionOutput = 56,
Quantize = 57,
Dequantize = 58,
Yolov3DetectionOutput = 59,
PSROIPooling = 60,
ROIAlign = 61,



================================================
FILE: MobileNetSSD_demo/app/src/main/cpp/include/mat.h
================================================
// Tencent is pleased to support the open source community by making ncnn available.
//
// Copyright (C) 2017 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except
// in compliance with the License. You may obtain a copy of the License at
//
// https://opensource.org/licenses/BSD-3-Clause
//
// Unless required by applicable law or agreed to in writing, software distributed
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.

#ifndef NCNN_MAT_H
#define NCNN_MAT_H

#include <stdlib.h>
#include <string.h>
#if __ARM_NEON
#include <arm_neon.h>
#endif
#include "allocator.h"
#include "platform.h"

namespace ncnn {

// the three dimension matrix
class Mat
{
public:
    // empty
    Mat();
    // vec
    Mat(int w, size_t elemsize = 4u, Allocator* allocator = 0);
    // image
    Mat(int w, int h, size_t elemsize = 4u, Allocator* allocator = 0);
    // dim
    Mat(int w, int h, int c, size_t elemsize = 4u, Allocator* allocator = 0);
    // copy
    Mat(const Mat& m);
    // external vec
    Mat(int w, void* data, size_t elemsize = 4u, Allocator* allocator = 0);
    // external image
    Mat(int w, int h, void* data, size_t elemsize = 4u, Allocator* allocator = 0);
    // external dim
    Mat(int w, int h, int c, void* data, size_t elemsize = 4u, Allocator* allocator = 0);
    // release
    ~Mat();
    // assign
    Mat& operator=(const Mat& m);
    // set all
    void fill(float v);
    void fill(int v);
    template <typename T> void fill(T v);
    // deep copy
    Mat clone(Allocator* allocator = 0) const;
    // reshape vec
    Mat reshape(int w, Allocator* allocator = 0) const;
    // reshape image
    Mat reshape(int w, int h, Allocator* allocator = 0) const;
    // reshape dim
    Mat reshape(int w, int h, int c, Allocator* allocator = 0) const;
    // allocate vec
    void create(int w, size_t elemsize = 4u, Allocator* allocator = 0);
    // allocate image
    void create(int w, int h, size_t elemsize = 4u, Allocator* allocator = 0);
    // allocate dim
    void create(int w, int h, int c, size_t elemsize = 4u, Allocator* allocator = 0);
    // refcount++
    void addref();
    // refcount--
    void release();

    bool empty() const;
    size_t total() const;

    // data reference
    Mat channel(int c);
    const Mat channel(int c) const;
    float* row(int y);
    const float* row(int y) const;
    template<typename T> T* row(int y);
    template<typename T> const T* row(int y) const;

    // range reference
    Mat channel_range(int c, int channels);
    const Mat channel_range(int c, int channels) const;
    Mat row_range(int y, int rows);
    const Mat row_range(int y, int rows) const;
    Mat range(int x, int n);
    const Mat range(int x, int n) const;

    // access raw data
    template<typename T> operator T*();
    template<typename T> operator const T*() const;

    // convenient access float vec element
    float& operator[](int i);
    const float& operator[](int i) const;

#if NCNN_PIXEL
    enum
    {
        PIXEL_CONVERT_SHIFT = 16,
        PIXEL_FORMAT_MASK = 0x0000ffff,
        PIXEL_CONVERT_MASK = 0xffff0000,

        PIXEL_RGB       = 1,
        PIXEL_BGR       = (1 << 1),
        PIXEL_GRAY      = (1 << 2),
        PIXEL_RGBA      = (1 << 3),

        PIXEL_RGB2BGR   = PIXEL_RGB | (PIXEL_BGR << PIXEL_CONVERT_SHIFT),
        PIXEL_RGB2GRAY  = PIXEL_RGB | (PIXEL_GRAY << PIXEL_CONVERT_SHIFT),

        PIXEL_BGR2RGB   = PIXEL_BGR | (PIXEL_RGB << PIXEL_CONVERT_SHIFT),
        PIXEL_BGR2GRAY  = PIXEL_BGR | (PIXEL_GRAY << PIXEL_CONVERT_SHIFT),

        PIXEL_GRAY2RGB  = PIXEL_GRAY | (PIXEL_RGB << PIXEL_CONVERT_SHIFT),
        PIXEL_GRAY2BGR  = PIXEL_GRAY | (PIXEL_BGR << PIXEL_CONVERT_SHIFT),

        PIXEL_RGBA2RGB  = PIXEL_RGBA | (PIXEL_RGB << PIXEL_CONVERT_SHIFT),
        PIXEL_RGBA2BGR  = PIXEL_RGBA | (PIXEL_BGR << PIXEL_CONVERT_SHIFT),
        PIXEL_RGBA2GRAY = PIXEL_RGBA | (PIXEL_GRAY << PIXEL_CONVERT_SHIFT),
    };
    // convenient construct from pixel data
    static Mat from_pixels(const unsigned char* pixels, int type, int w, int h, Allocator* allocator = 0);
    // convenient construct from pixel data and resize to specific size
    static Mat from_pixels_resize(const unsigned char* pixels, int type, int w, int h, int target_width, int target_height, Allocator* allocator = 0);

    // convenient export to pixel data
    void to_pixels(unsigned char* pixels, int type) const;
    // convenient export to pixel data and resize to specific size
    void to_pixels_resize(unsigned char* pixels, int type, int target_width, int target_height) const;
#endif // NCNN_PIXEL

    // substract channel-wise mean values, then multiply by normalize values, pass 0 to skip
    void substract_mean_normalize(const float* mean_vals, const float* norm_vals);

    // convenient construct from half precisoin floating point data
    static Mat from_float16(const unsigned short* data, int size);

    // pointer to the data
    void* data;

    // pointer to the reference counter
    // when points to user-allocated data, the pointer is NULL
    int* refcount;

    // element size in bytes
    // 4 = float32/int32
    // 2 = float16
    // 1 = int8/uint8
    // 0 = empty
    size_t elemsize;

    // the allocator
    Allocator* allocator;

    // the dimensionality
    int dims;

    int w;
    int h;
    int c;

    size_t cstep;
};

// misc function
#if NCNN_PIXEL
// convert yuv420sp(nv21) to rgb, the fast approximate version
void yuv420sp2rgb(const unsigned char* yuv420sp, int w, int h, unsigned char* rgb);
// image pixel bilinear resize
void resize_bilinear_c1(const unsigned char* src, int srcw, int srch, unsigned char* dst, int w, int h);
void resize_bilinear_c2(const unsigned char* src, int srcw, int srch, unsigned char* dst, int w, int h);
void resize_bilinear_c3(const unsigned char* src, int srcw, int srch, unsigned char* dst, int w, int h);
void resize_bilinear_c4(const unsigned char* src, int srcw, int srch, unsigned char* dst, int w, int h);
// image pixel bilinear resize, convenient wrapper for yuv420sp(nv21)
void resize_bilinear_yuv420sp(const unsigned char* src, int srcw, int srch, unsigned char* dst, int w, int h);
#endif // NCNN_PIXEL

// mat process
enum
{
    BORDER_CONSTANT = 0,
    BORDER_REPLICATE = 1,
};
void copy_make_border(const Mat& src, Mat& dst, int top, int bottom, int left, int right, int type, float v, Allocator* allocator = 0, int num_threads = 1);
void copy_cut_border(const Mat& src, Mat& dst, int top, int bottom, int left, int right, Allocator* allocator = 0, int num_threads = 1);
void resize_bilinear(const Mat& src, Mat& dst, int w, int h, Allocator* allocator = 0, int num_threads = 1);

inline Mat::Mat()
    : data(0), refcount(0), elemsize(0), allocator(0), dims(0), w(0), h(0), c(0), cstep(0)
{
}

inline Mat::Mat(int _w, size_t _elemsize, Allocator* allocator)
    : data(0), refcount(0), dims(0)
{
    create(_w, _elemsize, allocator);
}

inline Mat::Mat(int _w, int _h, size_t _elemsize, Allocator* allocator)
    : data(0), refcount(0), dims(0)
{
    create(_w, _h, _elemsize, allocator);
}

inline Mat::Mat(int _w, int _h, int _c, size_t _elemsize, Allocator* allocator)
    : data(0), refcount(0), dims(0)
{
    create(_w, _h, _c, _elemsize, allocator);
}

inline Mat::Mat(const Mat& m)
    : data(m.data), refcount(m.refcount), elemsize(m.elemsize), allocator(m.allocator), dims(m.dims)
{
    if (refcount)
        NCNN_XADD(refcount, 1);

    w = m.w;
    h = m.h;
    c = m.c;

    cstep = m.cstep;
}

inline Mat::Mat(int _w, void* _data, size_t _elemsize, Allocator* _allocator)
    : data(_data), refcount(0), elemsize(_elemsize), allocator(_allocator), dims(1)
{
    w = _w;
    h = 1;
    c = 1;

    cstep = w;
}

inline Mat::Mat(int _w, int _h, void* _data, size_t _elemsize, Allocator* _allocator)
    : data(_data), refcount(0), elemsize(_elemsize), allocator(_allocator), dims(2)
{
    w = _w;
    h = _h;
    c = 1;

    cstep = w * h;
}

inline Mat::Mat(int _w, int _h, int _c, void* _data, size_t _elemsize, Allocator* _allocator)
    : data(_data), refcount(0), elemsize(_elemsize), allocator(_allocator), dims(3)
{
    w = _w;
    h = _h;
    c = _c;

    cstep = alignSize(w * h * elemsize, 16) / elemsize;
}

inline Mat::~Mat()
{
    release();
}

inline Mat& Mat::operator=(const Mat& m)
{
    if (this == &m)
        return *this;

    if (m.refcount)
        NCNN_XADD(m.refcount, 1);

    release();

    data = m.data;
    refcount = m.refcount;
    elemsize = m.elemsize;
    allocator = m.allocator;

    dims = m.dims;
    w = m.w;
    h = m.h;
    c = m.c;

    cstep = m.cstep;

    return *this;
}

inline void Mat::fill(float _v)
{
    int size = total();
    float* ptr = (float*)data;

#if __ARM_NEON
    int nn = size >> 2;
    int remain = size - (nn << 2);
#else
    int remain = size;
#endif // __ARM_NEON

#if __ARM_NEON
    float32x4_t _c = vdupq_n_f32(_v);
#if __aarch64__
    if (nn > 0)
    {
    asm volatile (
        "0:                             \n"
        "subs       %w0, %w0, #1        \n"
        "st1        {%4.4s}, [%1], #16  \n"
        "bne        0b                  \n"
        : "=r"(nn),     // %0
          "=r"(ptr)     // %1
        : "0"(nn),
          "1"(ptr),
          "w"(_c)       // %4
        : "cc", "memory"
    );
    }
#else
    if (nn > 0)
    {
    asm volatile(
        "0:                             \n"
        "subs       %0, #1              \n"
        "vst1.f32   {%e4-%f4}, [%1 :128]!\n"
        "bne        0b                  \n"
        : "=r"(nn),     // %0
          "=r"(ptr)     // %1
        : "0"(nn),
          "1"(ptr),
          "w"(_c)       // %4
        : "cc", "memory"
    );
    }
#endif // __aarch64__
#endif // __ARM_NEON
    for (; remain>0; remain--)
    {
        *ptr++ = _v;
    }
}

inline void Mat::fill(int _v)
{
    int size = total();
    int* ptr = (int*)data;

#if __ARM_NEON
    int nn = size >> 2;
    int remain = size - (nn << 2);
#else
    int remain = size;
#endif // __ARM_NEON

#if __ARM_NEON
    int32x4_t _c = vdupq_n_s32(_v);
#if __aarch64__
    if (nn > 0)
    {
    asm volatile (
        "0:                             \n"
        "subs       %w0, %w0, #1        \n"
        "st1        {%4.4s}, [%1], #16  \n"
        "bne        0b                  \n"
        : "=r"(nn),     // %0
          "=r"(ptr)     // %1
        : "0"(nn),
          "1"(ptr),
          "w"(_c)       // %4
        : "cc", "memory"
    );
    }
#else
    if (nn > 0)
    {
    asm volatile(
        "0:                             \n"
        "subs       %0, #1              \n"
        "vst1.s32   {%e4-%f4}, [%1 :128]!\n"
        "bne        0b                  \n"
        : "=r"(nn),     // %0
          "=r"(ptr)     // %1
        : "0"(nn),
          "1"(ptr),
          "w"(_c)       // %4
        : "cc", "memory"
    );
    }
#endif // __aarch64__
#endif // __ARM_NEON
    for (; remain>0; remain--)
    {
        *ptr++ = _v;
    }
}

template <typename T>
inline void Mat::fill(T _v)
{
    int size = total();
    T* ptr = (T*)data;
    for (int i=0; i<size; i++)
    {
        ptr[i] = _v;
    }
}

inline Mat Mat::clone(Allocator* allocator) const
{
    if (empty())
        return Mat();

    Mat m;
    if (dims == 1)
        m.create(w, elemsize, allocator);
    else if (dims == 2)
        m.create(w, h, elemsize, allocator);
    else if (dims == 3)
        m.create(w, h, c, elemsize, allocator);

    if (total() > 0)
    {
        memcpy(m.data, data, total() * elemsize);
    }

    return m;
}

inline Mat Mat::reshape(int _w, Allocator* allocator) const
{
    if (w * h * c != _w)
        return Mat();

    if (dims == 3 && cstep != (size_t)w * h)
    {
        Mat m;
        m.create(_w, elemsize, allocator);

        // flatten
        for (int i=0; i<c; i++)
        {
            const void* ptr = (unsigned char*)data + i * cstep * elemsize;
            void* mptr = (unsigned char*)m.data + i * w * h * elemsize;
            memcpy(mptr, ptr, w * h * elemsize);
        }

        return m;
    }

    Mat m = *this;

    m.dims = 1;
    m.w = _w;
    m.h = 1;
    m.c = 1;

    m.cstep = _w;

    return m;
}

inline Mat Mat::reshape(int _w, int _h, Allocator* allocator) const
{
    if (w * h * c != _w * _h)
        return Mat();

    if (dims == 3 && cstep != (size_t)w * h)
    {
        Mat m;
        m.create(_w, _h, elemsize, allocator);

        // flatten
        for (int i=0; i<c; i++)
        {
            const void* ptr = (unsigned char*)data + i * cstep * elemsize;
            void* mptr = (unsigned char*)m.data + i * w * h * elemsize;
            memcpy(mptr, ptr, w * h * elemsize);
        }

        return m;
    }

    Mat m = *this;

    m.dims = 2;
    m.w = _w;
    m.h = _h;
    m.c = 1;

    m.cstep = _w * _h;

    return m;
}

inline Mat Mat::reshape(int _w, int _h, int _c, Allocator* allocator) const
{
    if (w * h * c != _w * _h * _c)
        return Mat();

    if (dims < 3)
    {
        if ((size_t)_w * _h != alignSize(_w * _h * elemsize, 16) / elemsize)
        {
            Mat m;
            m.create(_w, _h, _c, elemsize, allocator);

            // align channel
            for (int i=0; i<_c; i++)
            {
                const void* ptr = (unsigned char*)data + i * _w * _h * elemsize;
                void* mptr = (unsigned char*)m.data + i * m.cstep * m.elemsize;
                memcpy(mptr, ptr, _w * _h * elemsize);
            }

            return m;
        }
    }
    else if (c != _c)
    {
        // flatten and then align
        Mat tmp = reshape(_w * _h * _c, allocator);
        return tmp.reshape(_w, _h, _c, allocator);
    }

    Mat m = *this;

    m.dims = 3;
    m.w = _w;
    m.h = _h;
    m.c = _c;

    m.cstep = alignSize(_w * _h * elemsize, 16) / elemsize;

    return m;
}

inline void Mat::create(int _w, size_t _elemsize, Allocator* _allocator)
{
    if (dims == 1 && w == _w && elemsize == _elemsize && allocator == _allocator)
        return;

    release();

    elemsize = _elemsize;
    allocator = _allocator;

    dims = 1;
    w = _w;
    h = 1;
    c = 1;

    cstep = w;

    if (total() > 0)
    {
        size_t totalsize = alignSize(total() * elemsize, 4);
        if (allocator)
            data = allocator->fastMalloc(totalsize + (int)sizeof(*refcount));
        else
            data = fastMalloc(totalsize + (int)sizeof(*refcount));
        refcount = (int*)(((unsigned char*)data) + totalsize);
        *refcount = 1;
    }
}

inline void Mat::create(int _w, int _h, size_t _elemsize, Allocator* _allocator)
{
    if (dims == 2 && w == _w && h == _h && elemsize == _elemsize && allocator == _allocator)
        return;

    release();

    elemsize = _elemsize;
    allocator = _allocator;

    dims = 2;
    w = _w;
    h = _h;
    c = 1;

    cstep = w * h;

    if (total() > 0)
    {
        size_t totalsize = alignSize(total() * elemsize, 4);
        if (allocator)
            data = allocator->fastMalloc(totalsize + (int)sizeof(*refcount));
        else
            data = fastMalloc(totalsize + (int)sizeof(*refcount));
        refcount = (int*)(((unsigned char*)data) + totalsize);
        *refcount = 1;
    }
}

inline void Mat::create(int _w, int _h, int _c, size_t _elemsize, Allocator* _allocator)
{
    if (dims == 3 && w == _w && h == _h && c == _c && elemsize == _elemsize && allocator == _allocator)
        return;

    release();

    elemsize = _elemsize;
    allocator = _allocator;

    dims = 3;
    w = _w;
    h = _h;
    c = _c;

    cstep = alignSize(w * h * elemsize, 16) / elemsize;

    if (total() > 0)
    {
        size_t totalsize = alignSize(total() * elemsize, 4);
        if (allocator)
            data = allocator->fastMalloc(totalsize + (int)sizeof(*refcount));
        else
            data = fastMalloc(totalsize + (int)sizeof(*refcount));
        refcount = (int*)(((unsigned char*)data) + totalsize);
        *refcount = 1;
    }
}

inline void Mat::addref()
{
    if (refcount)
        NCNN_XADD(refcount, 1);
}

inline void Mat::release()
{
    if (refcount && NCNN_XADD(refcount, -1) == 1)
    {
        if (allocator)
            allocator->fastFree(data);
        else
            fastFree(data);
    }

    data = 0;

    elemsize = 0;

    dims = 0;
    w = 0;
    h = 0;
    c = 0;

    cstep = 0;

    refcount = 0;
}

inline bool Mat::empty() const
{
    return data == 0 || total() == 0;
}

inline size_t Mat::total() const
{
    return cstep * c;
}

inline Mat Mat::channel(int c)
{
    return Mat(w, h, (unsigned char*)data + cstep * c * elemsize, elemsize, allocator);
}

inline const Mat Mat::channel(int c) const
{
    return Mat(w, h, (unsigned char*)data + cstep * c * elemsize, elemsize, allocator);
}

inline float* Mat::row(int y)
{
    return (float*)data + w * y;
}

inline const float* Mat::row(int y) const
{
    return (const float*)data + w * y;
}

template <typename T>
inline T* Mat::row(int y)
{
    return (T*)data + w * y;
}

template <typename T>
inline const T* Mat::row(int y) const
{
    return (const T*)data + w * y;
}

inline Mat Mat::channel_range(int _c, int channels)
{
    return Mat(w, h, channels, (unsigned char*)data + cstep * _c * elemsize, elemsize, allocator);
}

inline const Mat Mat::channel_range(int _c, int channels) const
{
    return Mat(w, h, channels, (unsigned char*)data + cstep * _c * elemsize, elemsize, allocator);
}

inline Mat Mat::row_range(int y, int rows)
{
    return Mat(w, rows, (unsigned char*)data + w * y * elemsize, elemsize, allocator);
}

inline const Mat Mat::row_range(int y, int rows) const
{
    return Mat(w, rows, (unsigned char*)data + w * y * elemsize, elemsize, allocator);
}

inline Mat Mat::range(int x, int n)
{
    return Mat(n, (unsigned char*)data + x * elemsize, elemsize, allocator);
}

inline const Mat Mat::range(int x, int n) const
{
    return Mat(n, (unsigned char*)data + x * elemsize, elemsize, allocator);
}

template <typename T>
inline Mat::operator T*()
{
    return (T*)data;
}

template <typename T>
inline Mat::operator const T*() const
{
    return (const T*)data;
}

inline float& Mat::operator[](int i)
{
    return ((float*)data)[i];
}

inline const float& Mat::operator[](int i) const
{
    return ((const float*)data)[i];
}

} // namespace ncnn

#endif // NCNN_MAT_H


================================================
FILE: MobileNetSSD_demo/app/src/main/cpp/include/modelbin.h
================================================
// Tencent is pleased to support the open source community by making ncnn available.
//
// Copyright (C) 2017 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except
// in compliance with the License. You may obtain a copy of the License at
//
// https://opensource.org/licenses/BSD-3-Clause
//
// Unless required by applicable law or agreed to in writing, software distributed
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.

#ifndef NCNN_MODELBIN_H
#define NCNN_MODELBIN_H

#include <stdio.h>
#include "mat.h"
#include "platform.h"

namespace ncnn {

class Net;
class ModelBin
{
public:
    // element type
    // 0 = auto
    // 1 = float32
    // 2 = float16
    // 3 = int8
    // load vec
    virtual Mat load(int w, int type) const = 0;
    // load image
    virtual Mat load(int w, int h, int type) const;
    // load dim
    virtual Mat load(int w, int h, int c, int type) const;
};

#if NCNN_STDIO
class ModelBinFromStdio : public ModelBin
{
public:
    // construct from file
    ModelBinFromStdio(FILE* binfp);

    virtual Mat load(int w, int type) const;

protected:
    FILE* binfp;
};
#endif // NCNN_STDIO

class ModelBinFromMemory : public ModelBin
{
public:
    // construct from external memory
    ModelBinFromMemory(const unsigned char*& mem);

    virtual Mat load(int w, int type) const;

protected:
    const unsigned char*& mem;
};

class ModelBinFromMatArray : public ModelBin
{
public:
    // construct from weight blob array
    ModelBinFromMatArray(const Mat* weights);

    virtual Mat load(int w, int type) const;

protected:
    mutable const Mat* weights;
};

} // namespace ncnn

#endif // NCNN_MODELBIN_H


================================================
FILE: MobileNetSSD_demo/app/src/main/cpp/include/net.h
================================================
// Tencent is pleased to support the open source community by making ncnn available.
//
// Copyright (C) 2017 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except
// in compliance with the License. You may obtain a copy of the License at
//
// https://opensource.org/licenses/BSD-3-Clause
//
// Unless required by applicable law or agreed to in writing, software distributed
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.

#ifndef NCNN_NET_H
#define NCNN_NET_H

#include <stdio.h>
#include <vector>
#include "blob.h"
#include "layer.h"
#include "mat.h"
#include "platform.h"

namespace ncnn {

class Extractor;
class Net
{
public:
    // empty init
    Net();
    // clear and destroy
    ~Net();

#if NCNN_STRING
    // register custom layer by layer type name
    // return 0 if success
    int register_custom_layer(const char* type, layer_creator_func creator);
#endif // NCNN_STRING
    // register custom layer by layer type
    // return 0 if success
    int register_custom_layer(int index, layer_creator_func creator);

#if NCNN_STDIO
#if NCNN_STRING
    // load network structure from plain param file
    // return 0 if success
    int load_param(FILE* fp);
    int load_param(const char* protopath);
    int load_param_mem(const char* mem);
#endif // NCNN_STRING
    // load network structure from binary param file
    // return 0 if success
    int load_param_bin(FILE* fp);
    int load_param_bin(const char* protopath);

    // load network weight data from model file
    // return 0 if success
    int load_model(FILE* fp);
    int load_model(const char* modelpath);
#endif // NCNN_STDIO

    // load network structure from external memory
    // memory pointer must be 32-bit aligned
    // return bytes consumed
    int load_param(const unsigned char* mem);

    // reference network weight data from external memory
    // weight data is not copied but referenced
    // so external memory should be retained when used
    // memory pointer must be 32-bit aligned
    // return bytes consumed
    int load_model(const unsigned char* mem);

    // unload network structure and weight data
    void clear();

    // construct an Extractor from network
    Extractor create_extractor() const;

public:
    // enable winograd convolution optimization
    // improve convolution 3x3 stride1 performace, may consume more memory
    // changes should be applied before loading network structure and weight
    // enabled by default
    int use_winograd_convolution;

    // enable sgemm convolution optimization
    // improve convolution 1x1 stride1 performace, may consume more memory
    // changes should be applied before loading network structure and weight
    // enabled by default
    int use_sgemm_convolution;

    // enable quantized int8 inference
    // use low-precision int8 path for quantized model
    // changes should be applied before loading network structure and weight
    // enabled by default
    int use_int8_inference;

protected:
    friend class Extractor;
#if NCNN_STRING
    int find_blob_index_by_name(const char* name) const;
    int find_layer_index_by_name(const char* name) const;
    int custom_layer_to_index(const char* type);
    Layer* create_custom_layer(const char* type);
#endif // NCNN_STRING
    Layer* create_custom_layer(int index);
    int forward_layer(int layer_index, std::vector<Mat>& blob_mats, Option& opt) const;

protected:
    std::vector<Blob> blobs;
    std::vector<Layer*> layers;

    std::vector<layer_registry_entry> custom_layer_registry;
};

class Extractor
{
public:
    // enable light mode
    // intermediate blob will be recycled when enabled
    // enabled by default
    void set_light_mode(bool enable);

    // set thread count for this extractor
    // this will overwrite the global setting
    // default count is system depended
    void set_num_threads(int num_threads);

    // set blob memory allocator
    void set_blob_allocator(Allocator* allocator);

    // set workspace memory allocator
    void set_workspace_allocator(Allocator* allocator);

#if NCNN_STRING
    // set input by blob name
    // return 0 if success
    int input(const char* blob_name, const Mat& in);

    // get result by blob name
    // return 0 if success
    int extract(const char* blob_name, Mat& feat);
#endif // NCNN_STRING

    // set input by blob index
    // return 0 if success
    int input(int blob_index, const Mat& in);

    // get result by blob index
    // return 0 if success
    int extract(int blob_index, Mat& feat);

protected:
    friend Extractor Net::create_extractor() const;
    Extractor(const Net* net, int blob_count);

private:
    const Net* net;
    std::vector<Mat> blob_mats;
    Option opt;
};

} // namespace ncnn

#endif // NCNN_NET_H


================================================
FILE: MobileNetSSD_demo/app/src/main/cpp/include/opencv.h
================================================
// Tencent is pleased to support the open source community by making ncnn available.
//
// Copyright (C) 2017 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except
// in compliance with the License. You may obtain a copy of the License at
//
// https://opensource.org/licenses/BSD-3-Clause
//
// Unless required by applicable law or agreed to in writing, software distributed
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.

#ifndef NCNN_OPENCV_H
#define NCNN_OPENCV_H

#include "platform.h"

#if NCNN_OPENCV

#include <algorithm>
#include <string>
#include "mat.h"

// minimal opencv style data structure implementation
namespace cv
{

struct Size
{
    Size() : width(0), height(0) {}
    Size(int _w, int _h) : width(_w), height(_h) {}

    int width;
    int height;
};

template<typename _Tp>
struct Rect_
{
    Rect_() : x(0), y(0), width(0), height(0) {}
    Rect_(_Tp _x, _Tp _y, _Tp _w, _Tp _h) : x(_x), y(_y), width(_w), height(_h) {}

    _Tp x;
    _Tp y;
    _Tp width;
    _Tp height;

    // area
    _Tp area() const
    {
        return width * height;
    }
};

template<typename _Tp> static inline Rect_<_Tp>& operator &= ( Rect_<_Tp>& a, const Rect_<_Tp>& b )
{
    _Tp x1 = std::max(a.x, b.x), y1 = std::max(a.y, b.y);
    a.width = std::min(a.x + a.width, b.x + b.width) - x1;
    a.height = std::min(a.y + a.height, b.y + b.height) - y1;
    a.x = x1; a.y = y1;
    if( a.width <= 0 || a.height <= 0 )
        a = Rect_<_Tp>();
    return a;
}

template<typename _Tp> static inline Rect_<_Tp>& operator |= ( Rect_<_Tp>& a, const Rect_<_Tp>& b )
{
    _Tp x1 = std::min(a.x, b.x), y1 = std::min(a.y, b.y);
    a.width = std::max(a.x + a.width, b.x + b.width) - x1;
    a.height = std::max(a.y + a.height, b.y + b.height) - y1;
    a.x = x1; a.y = y1;
    return a;
}

template<typename _Tp> static inline Rect_<_Tp> operator & (const Rect_<_Tp>& a, const Rect_<_Tp>& b)
{
    Rect_<_Tp> c = a;
    return c &= b;
}

template<typename _Tp> static inline Rect_<_Tp> operator | (const Rect_<_Tp>& a, const Rect_<_Tp>& b)
{
    Rect_<_Tp> c = a;
    return c |= b;
}

typedef Rect_<int> Rect;
typedef Rect_<float> Rect2f;

template<typename _Tp>
struct Point_
{
    Point_() : x(0), y(0) {}
    Point_(_Tp _x, _Tp _y) : x(_x), y(_y) {}

    _Tp x;
    _Tp y;
};

typedef Point_<int> Point;
typedef Point_<float> Point2f;

#define CV_8UC1 1
#define CV_8UC3 3
#define CV_8UC4 4
#define CV_32FC1 4

struct Mat
{
    Mat() : data(0), refcount(0), rows(0), cols(0), c(0) {}

    Mat(int _rows, int _cols, int flags) : data(0), refcount(0)
    {
        create(_rows, _cols, flags);
    }

    // copy
    Mat(const Mat& m) : data(m.data), refcount(m.refcount)
    {
        if (refcount)
            NCNN_XADD(refcount, 1);

        rows = m.rows;
        cols = m.cols;
        c = m.c;
    }

    Mat(int _rows, int _cols, int flags, void* _data) : data((unsigned char*)_data), refcount(0)
    {
        rows = _rows;
        cols = _cols;
        c = flags;
    }

    ~Mat()
    {
        release();
    }

    // assign
    Mat& operator=(const Mat& m)
    {
        if (this == &m)
            return *this;

        if (m.refcount)
            NCNN_XADD(m.refcount, 1);

        release();

        data = m.data;
        refcount = m.refcount;

        rows = m.rows;
        cols = m.cols;
        c = m.c;

        return *this;
    }

    void create(int _rows, int _cols, int flags)
    {
        release();

        rows = _rows;
        cols = _cols;
        c = flags;

        if (total() > 0)
        {
            // refcount address must be aligned, so we expand totalsize here
            size_t totalsize = (total() + 3) >> 2 << 2;
            data = (unsigned char*)ncnn::fastMalloc(totalsize + (int)sizeof(*refcount));
            refcount = (int*)(((unsigned char*)data) + totalsize);
            *refcount = 1;
        }
    }

    void release()
    {
        if (refcount && NCNN_XADD(refcount, -1) == 1)
            ncnn::fastFree(data);

        data = 0;

        rows = 0;
        cols = 0;
        c = 0;

        refcount = 0;
    }

    Mat clone() const
    {
        if (empty())
            return Mat();

        Mat m(rows, cols, c);

        if (total() > 0)
        {
            memcpy(m.data, data, total());
        }

        return m;
    }

    bool empty() const { return data == 0 || total() == 0; }

    int channels() const { return c; }

    size_t total() const { return cols * rows * c; }

    const unsigned char* ptr(int y) const { return data + y * cols * c; }

    unsigned char* ptr(int y) { return data + y * cols * c; }

    // roi
    Mat operator()( const Rect& roi ) const
    {
        if (empty())
            return Mat();

        Mat m(roi.height, roi.width, c);

        int sy = roi.y;
        for (int y = 0; y < roi.height; y++)
        {
            const unsigned char* sptr = ptr(sy) + roi.x * c;
            unsigned char* dptr = m.ptr(y);
            memcpy(dptr, sptr, roi.width * c);
            sy++;
        }

        return m;
    }

    unsigned char* data;

    // pointer to the reference counter;
    // when points to user-allocated data, the pointer is NULL
    int* refcount;

    int rows;
    int cols;

    int c;

};

#define CV_LOAD_IMAGE_GRAYSCALE 1
#define CV_LOAD_IMAGE_COLOR 3
Mat imread(const std::string& path, int flags);
void imwrite(const std::string& path, const Mat& m);

#if NCNN_PIXEL
void resize(const Mat& src, Mat& dst, const Size& size, float sw = 0.f, float sh = 0.f, int flags = 0);
#endif // NCNN_PIXEL

} // namespace cv

#endif // NCNN_OPENCV

#endif // NCNN_OPENCV_H


================================================
FILE: MobileNetSSD_demo/app/src/main/cpp/include/paramdict.h
================================================
// Tencent is pleased to support the open source community by making ncnn available.
//
// Copyright (C) 2017 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except
// in compliance with the License. You may obtain a copy of the License at
//
// https://opensource.org/licenses/BSD-3-Clause
//
// Unless required by applicable law or agreed to in writing, software distributed
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.

#ifndef NCNN_PARAMDICT_H
#define NCNN_PARAMDICT_H

#include <stdio.h>
#include "mat.h"
#include "platform.h"

// at most 20 parameters
#define NCNN_MAX_PARAM_COUNT 20

namespace ncnn {

class Net;
class ParamDict
{
public:
    // empty
    ParamDict();

    // get int
    int get(int id, int def) const;
    // get float
    float get(int id, float def) const;
    // get array
    Mat get(int id, const Mat& def) const;

    // set int
    void set(int id, int i);
    // set float
    void set(int id, float f);
    // set array
    void set(int id, const Mat& v);

public:
    int use_winograd_convolution;
    int use_sgemm_convolution;
    int use_int8_inference;

protected:
    friend class Net;

    void clear();

#if NCNN_STDIO
#if NCNN_STRING
    int load_param(FILE* fp);
    int load_param_mem(const char*& mem);
#endif // NCNN_STRING
    int load_param_bin(FILE* fp);
#endif // NCNN_STDIO
    int load_param(const unsigned char*& mem);

protected:
    struct
    {
        int loaded;
        union { int i; float f; };
        Mat v;
    } params[NCNN_MAX_PARAM_COUNT];
};

} // namespace ncnn

#endif // NCNN_PARAMDICT_H


================================================
FILE: MobileNetSSD_demo/app/src/main/cpp/include/platform.h
================================================
// Tencent is pleased to support the open source community by making ncnn available.
//
// Copyright (C) 2017 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except
// in compliance with the License. You may obtain a copy of the License at
//
// https://opensource.org/licenses/BSD-3-Clause
//
// Unless required by applicable law or agreed to in writing, software distributed
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.

#ifndef NCNN_PLATFORM_H
#define NCNN_PLATFORM_H

#define NCNN_STDIO 1
#define NCNN_STRING 1
#define NCNN_OPENCV 0
#define NCNN_BENCHMARK 0
#define NCNN_PIXEL 1
#define NCNN_PIXEL_ROTATE 0

#endif // NCNN_PLATFORM_H


================================================
FILE: MobileNetSSD_demo/app/src/main/java/com/example/che/mobilenetssd_demo/MainActivity.java
================================================
package com.example.che.mobilenetssd_demo;

import android.Manifest;
import android.app.Activity;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.content.res.AssetManager;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.net.Uri;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.v4.app.ActivityCompat;
import android.support.v4.content.ContextCompat;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.text.method.ScrollingMovementMethod;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.TextView;
import android.widget.Toast;

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;


import com.bumptech.glide.Glide;
import com.bumptech.glide.load.engine.DiskCacheStrategy;
import com.bumptech.glide.request.RequestOptions;


public class MainActivity extends AppCompatActivity {

    private static final String TAG = MainActivity.class.getName();
    private static final int USE_PHOTO = 1001;
    private String camera_image_path;
    private ImageView show_image;
    private TextView result_text;
    private boolean load_result = false;
    private int[] ddims = {1, 3, 300, 300}; //这里的维度的值要和train model的input 一一对应
    private int model_index = 1;
    private List<String> resultLabel = new ArrayList<>();
    private MobileNetssd mobileNetssd = new MobileNetssd(); //java接口实例化 下面直接利用java函数调用NDK c++函数

    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.activity_main);
        try
        {
            initMobileNetSSD();
        } catch (IOException e) {
            Log.e("MainActivity", "initMobileNetSSD error");
        }
        init_view();
        readCacheLabelFromLocalFile();
}

    /**
     *
     * MobileNetssd初始化,也就是把model文件进行加载
     */
    private void initMobileNetSSD() throws IOException {
        byte[] param = null;
        byte[] bin = null;
        {
            //用io流读取二进制文件,最后存入到byte[]数组中
            InputStream assetsInputStream = getAssets().open("MobileNetSSD_deploy.param.bin");// param:  网络结构文件
            int available = assetsInputStream.available();
            param = new byte[available];
            int byteCode = assetsInputStream.read(param);
            assetsInputStream.close();
        }
        {
            //用io流读取二进制文件,最后存入到byte上,转换为int型
            InputStream assetsInputStream = getAssets().open("MobileNetSSD_deploy.bin");//bin:   model文件
            int available = assetsInputStream.available();
            bin = new byte[available];
            int byteCode = assetsInputStream.read(bin);
            assetsInputStream.close();
        }

        load_result = mobileNetssd.Init(param, bin);// 再将文件传入java的NDK接口(c++ 代码中的init接口 )
        Log.d("load model", "MobileNetSSD_load_model_result:" + load_result);
    }


    // initialize view
    private void init_view() {
        request_permissions();
        show_image = (ImageView) findViewById(R.id.show_image);
        result_text = (TextView) findViewById(R.id.result_text);
        result_text.setMovementMethod(ScrollingMovementMethod.getInstance());
        Button use_photo = (Button) findViewById(R.id.use_photo);
        // use photo click
        use_photo.setOnClickListener(new View.OnClickListener() {
            @Override
            public void onClick(View view) {
                if (!load_result) {
                    Toast.makeText(MainActivity.this, "never load model", Toast.LENGTH_SHORT).show();
                    return;
                }
                PhotoUtil.use_photo(MainActivity.this, USE_PHOTO);
            }
        });
    }

    // load label's name
    private void readCacheLabelFromLocalFile() {
        try {
            AssetManager assetManager = getApplicationContext().getAssets();
            BufferedReader reader = new BufferedReader(new InputStreamReader(assetManager.open("words.txt")));//这里是label的文件
            String readLine = null;
            while ((readLine = reader.readLine()) != null) {
                resultLabel.add(readLine);
            }
            reader.close();
        } catch (Exception e) {
            Log.e("labelCache", "error " + e);
        }
    }


    protected void onActivityResult(int requestCode, int resultCode, @Nullable Intent data) {
        String image_path;
        RequestOptions options = new RequestOptions().skipMemoryCache(true).diskCacheStrategy(DiskCacheStrategy.NONE);
        if (resultCode == Activity.RESULT_OK) {
            switch (requestCode) {
                case USE_PHOTO:
                    if (data == null) {
                        Log.w(TAG, "user photo data is null");
                        return;
                    }
                    Uri image_uri = data.getData();

                    //Glide.with(MainActivity.this).load(image_uri).apply(options).into(show_image);

                    // get image path from uri
                    image_path = PhotoUtil.get_path_from_URI(MainActivity.this, image_uri);
                    // predict image
                    predict_image(image_path);
                    break;
            }
        }
    }

    //  predict image
    private void predict_image(String image_path) {
        // picture to float array
        Bitmap bmp = PhotoUtil.getScaleBitmap(image_path);
        Bitmap rgba = bmp.copy(Bitmap.Config.ARGB_8888, true);
        // resize
        Bitmap input_bmp = Bitmap.createScaledBitmap(rgba, ddims[2], ddims[3], false);
        try {
            // Data format conversion takes too long
            // Log.d("inputData", Arrays.toString(inputData));
            long start = System.currentTimeMillis();
            // get predict result
            float[] result = mobileNetssd.Detect(input_bmp);
            // time end
            long end = System.currentTimeMillis();
            Log.d(TAG, "origin predict result:" + Arrays.toString(result));
            long time = end - start;
            Log.d("result length", "length of result: " + String.valueOf(result.length));
            // show predict result and time
            // float[] r = get_max_result(result);

            String show_text = "result:" + Arrays.toString(result) + "\nname:" + resultLabel.get((int) result[0]) + "\nprobability:" + result[1] + "\ntime:" + time + "ms" ;
            result_text.setText(show_text);

            // 画布配置
            Canvas canvas = new Canvas(rgba);
            //图像上画矩形
            Paint paint = new Paint();
            paint.setColor(Color.RED);
            paint.setStyle(Paint.Style.STROKE);//不填充
            paint.setStrokeWidth(5); //线的宽度


            float get_finalresult[][] = TwoArry(result);
            Log.d("zhuanhuan",get_finalresult+"");
            int object_num = 0;
            int num = result.length/6;// number of object
            //continue to draw rect
            for(object_num = 0; object_num < num; object_num++){
                Log.d(TAG, "haha :" + Arrays.toString(get_finalresult));
                // 画框
                paint.setColor(Color.RED);
                paint.setStyle(Paint.Style.STROKE);//不填充
                paint.setStrokeWidth(5); //线的宽度
                canvas.drawRect(get_finalresult[object_num][2] * rgba.getWidth(), get_finalresult[object_num][3] * rgba.getHeight(),
                        get_finalresult[object_num][4] * rgba.getWidth(), get_finalresult[object_num][5] * rgba.getHeight(), paint);

                paint.setColor(Color.YELLOW);
                paint.setStyle(Paint.Style.FILL);//不填充
                paint.setStrokeWidth(1); //线的宽度
                canvas.drawText(resultLabel.get((int) get_finalresult[object_num][0]) + "\n" + get_finalresult[object_num][1],
                        get_finalresult[object_num][2]*rgba.getWidth(),get_finalresult[object_num][3]*rgba.getHeight(),paint);
            }

            show_image.setImageBitmap(rgba);


        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    //一维数组转化为二维数组
    public static float[][] TwoArry(float[] inputfloat){
        int n = inputfloat.length;
        int num = inputfloat.length/6;
        float[][] outputfloat = new float[num][6];
        int k = 0;
        for(int i = 0; i < num ; i++)
        {
            int j = 0;

            while(j<6)
            {
                outputfloat[i][j] =  inputfloat[k];
                k++;
                j++;
            }

        }

        return outputfloat;
    }

    /*
    // get max probability label
    private float[] get_max_result(float[] result) {
        int num_rs = result.length / 6;
        float maxProp = result[1];
        int maxI = 0;
        for(int i = 1; i<num_rs;i++){
            if(maxProp<result[i*6+1]){
                maxProp = result[i*6+1];
                maxI = i;
            }
        }
        float[] ret = {0,0,0,0,0,0};
        for(int j=0;j<6;j++){
            ret[j] = result[maxI*6 + j];
        }
        return ret;
    }
    */
    // request permissions(add)
    private void request_permissions() {
        List<String> permissionList = new ArrayList<>();
        if (ContextCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
            permissionList.add(Manifest.permission.CAMERA);
        }
        if (ContextCompat.checkSelfPermission(this, Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) {
            permissionList.add(Manifest.permission.WRITE_EXTERNAL_STORAGE);
        }
        if (ContextCompat.checkSelfPermission(this, Manifest.permission.READ_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) {
            permissionList.add(Manifest.permission.READ_EXTERNAL_STORAGE);
        }
        // if list is not empty will request permissions
        if (!permissionList.isEmpty()) {
            ActivityCompat.requestPermissions(this, permissionList.toArray(new String[permissionList.size()]), 1);
        }
    }

    @Override
    public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
        super.onRequestPermissionsResult(requestCode, permissions, grantResults);
        switch (requestCode) {
            case 1:
                if (grantResults.length > 0) {
                    for (int i = 0; i < grantResults.length; i++) {
                        int grantResult = grantResults[i];
                        if (grantResult == PackageManager.PERMISSION_DENIED) {
                            String s = permissions[i];
                            Toast.makeText(this, s + "permission was denied", Toast.LENGTH_SHORT).show();
                        }
                    }
                }
                break;
        }
    }



}



================================================
FILE: MobileNetSSD_demo/app/src/main/java/com/example/che/mobilenetssd_demo/MobileNetssd.java
================================================
package com.example.che.mobilenetssd_demo;

import android.graphics.Bitmap;

/**
 *  MobileNetssd的java接口,与本地c++代码相呼应 native为本地 此文件与 MobileNetssd.cpp相呼应
 */
public class MobileNetssd {

    public native boolean Init(byte[] param, byte[] bin); // 初始化函数
    public native float[] Detect(Bitmap bitmap); // 检测函数
    // Used to load the 'native-lib' library on application startup.
    static {
        System.loadLibrary("MobileNetssd");
    }
}



================================================
FILE: MobileNetSSD_demo/app/src/main/java/com/example/che/mobilenetssd_demo/PhotoUtil.java
================================================
package com.example.che.mobilenetssd_demo;

import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.database.Cursor;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.net.Uri;
import android.provider.MediaStore;

public class PhotoUtil {
    // get picture in photo
    public static void use_photo(Activity activity, int requestCode) {
        Intent intent = new Intent(Intent.ACTION_PICK);
        intent.setType("image/*");
        activity.startActivityForResult(intent, requestCode);
    }

    // get photo from Uri
    public static String get_path_from_URI(Context context, Uri uri) {
        String result;
        Cursor cursor = context.getContentResolver().query(uri, null, null, null, null);
        if (cursor == null) {
            result = uri.getPath();
        } else {
            cursor.moveToFirst();
            int idx = cursor.getColumnIndex(MediaStore.Images.ImageColumns.DATA);
            result = cursor.getString(idx);
            cursor.close();
        }
        return result;
    }

    // compress picture
    public static Bitmap getScaleBitmap(String filePath) {
        BitmapFactory.Options opt = new BitmapFactory.Options();
        opt.inJustDecodeBounds = true;
        BitmapFactory.decodeFile(filePath, opt);

        int bmpWidth = opt.outWidth;
        int bmpHeight = opt.outHeight;

        int maxSize = 500;

        // compress picture with inSampleSize
        opt.inSampleSize = 1;
        while (true) {
            if (bmpWidth / opt.inSampleSize < maxSize || bmpHeight / opt.inSampleSize < maxSize) {
                break;
            }
            opt.inSampleSize *= 2;
        }
        opt.inJustDecodeBounds = false;
        return BitmapFactory.decodeFile(filePath, opt);
    }
}



================================================
FILE: MobileNetSSD_demo/app/src/main/res/drawable/ic_launcher_background.xml
================================================
<?xml version="1.0" encoding="utf-8"?>
<vector xmlns:android="http://schemas.android.com/apk/res/android"
    android:width="108dp"
    android:height="108dp"
    android:viewportWidth="108"
    android:viewportHeight="108">
    <path
        android:fillColor="#008577"
        android:pathData="M0,0h108v108h-108z" />
    <path
        android:fillColor="#00000000"
        android:pathData="M9,0L9,108"
        android:strokeWidth="0.8"
        android:strokeColor="#33FFFFFF" />
    <path
        android:fillColor="#00000000"
        android:pathData="M19,0L19,108"
        android:strokeWidth="0.8"
        android:strokeColor="#33FFFFFF" />
    <path
        android:fillColor="#00000000"
        android:pathData="M29,0L29,108"
        android:strokeWidth="0.8"
        android:strokeColor="#33FFFFFF" />
    <path
        android:fillColor="#00000000"
        android:pathData="M39,0L39,108"
        android:strokeWidth="0.8"
        android:strokeColor="#33FFFFFF" />
    <path
        android:fillColor="#00000000"
        android:pathData="M49,0L49,108"
        android:strokeWidth="0.8"
        android:strokeColor="#33FFFFFF" />
    <path
        android:fillColor="#00000000"
        android:pathData="M59,0L59,108"
        android:strokeWidth="0.8"
        android:strokeColor="#33FFFFFF" />
    <path
        android:fillColor="#00000000"
        android:pathData="M69,0L69,108"
        android:strokeWidth="0.8"
        android:strokeColor="#33FFFFFF" />
    <path
        android:fillColor="#00000000"
        android:pathData="M79,0L79,108"
        android:strokeWidth="0.8"
        android:strokeColor="#33FFFFFF" />
    <path
        android:fillColor="#00000000"
        android:pathData="M89,0L89,108"
        android:strokeWidth="0.8"
        android:strokeColor="#33FFFFFF" />
    <path
        android:fillColor="#00000000"
        android:pathData="M99,0L99,108"
        android:strokeWidth="0.8"
        android:strokeColor="#33FFFFFF" />
    <path
        android:fillColor="#00000000"
        android:pathData="M0,9L108,9"
        android:strokeWidth="0.8"
        android:strokeColor="#33FFFFFF" />
    <path
        android:fillColor="#00000000"
        android:pathData="M0,19L108,19"
        android:strokeWidth="0.8"
        android:strokeColor="#33FFFFFF" />
    <path
        android:fillColor="#00000000"
        android:pathData="M0,29L108,29"
        android:strokeWidth="0.8"
        android:strokeColor="#33FFFFFF" />
    <path
        android:fillColor="#00000000"
        android:pathData="M0,39L108,39"
        android:strokeWidth="0.8"
        android:strokeColor="#33FFFFFF" />
    <path
        android:fillColor="#00000000"
        android:pathData="M0,49L108,49"
        android:strokeWidth="0.8"
        android:strokeColor="#33FFFFFF" />
    <path
        android:fillColor="#00000000"
        android:pathData="M0,59L108,59"
        android:strokeWidth="0.8"
        android:strokeColor="#33FFFFFF" />
    <path
        android:fillColor="#00000000"
        android:pathData="M0,69L108,69"
        android:strokeWidth="0.8"
        android:strokeColor="#33FFFFFF" />
    <path
        android:fillColor="#00000000"
        android:pathData="M0,79L108,79"
        android:strokeWidth="0.8"
        android:strokeColor="#33FFFFFF" />
    <path
        android:fillColor="#00000000"
        android:pathData="M0,89L108,89"
        android:strokeWidth="0.8"
        android:strokeColor="#33FFFFFF" />
    <path
        android:fillColor="#00000000"
        android:pathData="M0,99L108,99"
        android:strokeWidth="0.8"
        android:strokeColor="#33FFFFFF" />
    <path
        android:fillColor="#00000000"
        android:pathData="M19,29L89,29"
        android:strokeWidth="0.8"
        android:strokeColor="#33FFFFFF" />
    <path
        android:fillColor="#00000000"
        android:pathData="M19,39L89,39"
        android:strokeWidth="0.8"
        android:strokeColor="#33FFFFFF" />
    <path
        android:fillColor="#00000000"
        android:pathData="M19,49L89,49"
        android:strokeWidth="0.8"
        android:strokeColor="#33FFFFFF" />
    <path
        android:fillColor="#00000000"
        android:pathData="M19,59L89,59"
        android:strokeWidth="0.8"
        android:strokeColor="#33FFFFFF" />
    <path
        android:fillColor="#00000000"
        android:pathData="M19,69L89,69"
        android:strokeWidth="0.8"
        android:strokeColor="#33FFFFFF" />
    <path
        android:fillColor="#00000000"
        android:pathData="M19,79L89,79"
        android:strokeWidth="0.8"
        android:strokeColor="#33FFFFFF" />
    <path
        android:fillColor="#00000000"
        android:pathData="M29,19L29,89"
        android:strokeWidth="0.8"
        android:strokeColor="#33FFFFFF" />
    <path
        android:fillColor="#00000000"
        android:pathData="M39,19L39,89"
        android:strokeWidth="0.8"
        android:strokeColor="#33FFFFFF" />
    <path
        android:fillColor="#00000000"
        android:pathData="M49,19L49,89"
        android:strokeWidth="0.8"
        android:strokeColor="#33FFFFFF" />
    <path
        android:fillColor="#00000000"
        android:pathData="M59,19L59,89"
        android:strokeWidth="0.8"
        android:strokeColor="#33FFFFFF" />
    <path
        android:fillColor="#00000000"
        android:pathData="M69,19L69,89"
        android:strokeWidth="0.8"
        android:strokeColor="#33FFFFFF" />
    <path
        android:fillColor="#00000000"
        android:pathData="M79,19L79,89"
        android:strokeWidth="0.8"
        android:strokeColor="#33FFFFFF" />
</vector>


================================================
FILE: MobileNetSSD_demo/app/src/main/res/drawable-v24/ic_launcher_foreground.xml
================================================
<vector xmlns:android="http://schemas.android.com/apk/res/android"
    xmlns:aapt="http://schemas.android.com/aapt"
    android:width="108dp"
    android:height="108dp"
    android:viewportWidth="108"
    android:viewportHeight="108">
    <path
        android:fillType="evenOdd"
        android:pathData="M32,64C32,64 38.39,52.99 44.13,50.95C51.37,48.37 70.14,49.57 70.14,49.57L108.26,87.69L108,109.01L75.97,107.97L32,64Z"
        android:strokeWidth="1"
        android:strokeColor="#00000000">
        <aapt:attr name="android:fillColor">
            <gradient
                android:endX="78.5885"
                android:endY="90.9159"
                android:startX="48.7653"
                android:startY="61.0927"
                android:type="linear">
                <item
                    android:color="#44000000"
                    android:offset="0.0" />
                <item
                    android:color="#00000000"
                    android:offset="1.0" />
            </gradient>
        </aapt:attr>
    </path>
    <path
        android:fillColor="#FFFFFF"
        android:fillType="nonZero"
        android:pathData="M66.94,46.02L66.94,46.02C72.44,50.07 76,56.61 76,64L32,64C32,56.61 35.56,50.11 40.98,46.06L36.18,41.19C35.45,40.45 35.45,39.3 36.18,38.56C36.91,37.81 38.05,37.81 38.78,38.56L44.25,44.05C47.18,42.57 50.48,41.71 54,41.71C57.48,41.71 60.78,42.57 63.68,44.05L69.11,38.56C69.84,37.81 70.98,37.81 71.71,38.56C72.44,39.3 72.44,40.45 71.71,41.19L66.94,46.02ZM62.94,56.92C64.08,56.92 65,56.01 65,54.88C65,53.76 64.08,52.85 62.94,52.85C61.8,52.85 60.88,53.76 60.88,54.88C60.88,56.01 61.8,56.92 62.94,56.92ZM45.06,56.92C46.2,56.92 47.13,56.01 47.13,54.88C47.13,53.76 46.2,52.85 45.06,52.85C43.92,52.85 43,53.76 43,54.88C43,56.01 43.92,56.92 45.06,56.92Z"
        android:strokeWidth="1"
        android:strokeColor="#00000000" />
</vector>


================================================
FILE: MobileNetSSD_demo/app/src/main/res/layout/activity_main.xml
================================================
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
    xmlns:app="http://schemas.android.com/apk/res-auto"
    xmlns:tools="http://schemas.android.com/tools"
    android:layout_width="match_parent"
    android:layout_height="match_parent"
    tools:context=".MainActivity">
    <LinearLayout
        android:id="@+id/btn_ll"
        android:layout_alignParentBottom="true"
        android:layout_width="match_parent"
        android:layout_height="wrap_content"
        android:orientation="horizontal">
        <Button
            android:id="@+id/use_photo"
            android:layout_weight="1"
            android:layout_width="0dp"
            android:layout_height="wrap_content"
            android:text="相册"/>
    </LinearLayout>
    <TextView
        android:layout_above="@id/btn_ll"
        android:id="@+id/result_text"
        android:textSize="16sp"
        android:layout_width="match_parent"
        android:hint="预测结果会在这里显示"
        android:layout_height="100dp"/>
    <ImageView
        android:layout_alignParentTop="true"
        android:layout_above="@id/result_text"
        android:id="@+id/show_image"
        android:layout_width="match_parent"
        android:layout_height="match_parent"/>
</RelativeLayout>



================================================
FILE: MobileNetSSD_demo/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml
================================================
<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
    <background android:drawable="@drawable/ic_launcher_background" />
    <foreground android:drawable="@drawable/ic_launcher_foreground" />
</adaptive-icon>

================================================
FILE: MobileNetSSD_demo/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml
================================================
<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
    <background android:drawable="@drawable/ic_launcher_background" />
    <foreground android:drawable="@drawable/ic_launcher_foreground" />
</adaptive-icon>

================================================
FILE: MobileNetSSD_demo/app/src/main/res/values/colors.xml
================================================
<?xml version="1.0" encoding="utf-8"?>
<resources>
    <color name="colorPrimary">#008577</color>
    <color name="colorPrimaryDark">#00574B</color>
    <color name="colorAccent">#D81B60</color>
</resources>


================================================
FILE: MobileNetSSD_demo/app/src/main/res/values/strings.xml
================================================
<resources>
    <string name="app_name">MobileNetSSD_demo</string>
</resources>


================================================
FILE: MobileNetSSD_demo/app/src/main/res/values/styles.xml
================================================
<resources>

    <!-- Base application theme. -->
    <style name="AppTheme" parent="Theme.AppCompat.Light.DarkActionBar">
        <!-- Customize your theme here. -->
        <item name="colorPrimary">@color/colorPrimary</item>
        <item name="colorPrimaryDark">@color/colorPrimaryDark</item>
        <item name="colorAccent">@color/colorAccent</item>
    </style>

</resources>


================================================
FILE: MobileNetSSD_demo/app/src/test/java/com/example/che/mobilenetssd_demo/ExampleUnitTest.java
================================================
package com.example.che.mobilenetssd_demo;

import org.junit.Test;

import static org.junit.Assert.*;

/**
 * Example local unit test, which will execute on the development machine (host).
 *
 * @see <a href="http://d.android.com/tools/testing">Testing documentation</a>
 */
public class ExampleUnitTest {
    @Test
    public void addition_isCorrect() {
        assertEquals(4, 2 + 2);
    }
}

================================================
FILE: MobileNetSSD_demo/build.gradle
================================================
// Top-level build file where you can add configuration options common to all sub-projects/modules.

buildscript {
    
    repositories {
        google()
        jcenter()
    }
    dependencies {
        classpath 'com.android.tools.build:gradle:3.2.1'
        

        // NOTE: Do not place your application dependencies here; they belong
        // in the individual module build.gradle files
    }
}

allprojects {
    repositories {
        google()
        jcenter()
    }
}

task clean(type: Delete) {
    delete rootProject.buildDir
}


================================================
FILE: MobileNetSSD_demo/gradle/wrapper/gradle-wrapper.properties
================================================
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-4.6-all.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists


================================================
FILE: MobileNetSSD_demo/gradle.properties
================================================
# Project-wide Gradle settings.
# IDE (e.g. Android Studio) users:
# Gradle settings configured through the IDE *will override*
# any settings specified in this file.
# For more details on how to configure your build environment visit
# http://www.gradle.org/docs/current/userguide/build_environment.html
# Specifies the JVM arguments used for the daemon process.
# The setting is particularly useful for tweaking memory settings.
org.gradle.jvmargs=-Xmx1536m
# When configured, Gradle will run in incubating parallel mode.
# This option should only be used with decoupled projects. More details, visit
# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
# org.gradle.parallel=true




================================================
FILE: MobileNetSSD_demo/gradlew
================================================
#!/usr/bin/env sh

##############################################################################
##
##  Gradle start up script for UN*X
##
##############################################################################

# Attempt to set APP_HOME
# Resolve links: $0 may be a link
PRG="$0"
# Need this for relative symlinks.
while [ -h "$PRG" ] ; do
    ls=`ls -ld "$PRG"`
    link=`expr "$ls" : '.*-> \(.*\)$'`
    if expr "$link" : '/.*' > /dev/null; then
        PRG="$link"
    else
        PRG=`dirname "$PRG"`"/$link"
    fi
done
SAVED="`pwd`"
cd "`dirname \"$PRG\"`/" >/dev/null
APP_HOME="`pwd -P`"
cd "$SAVED" >/dev/null

APP_NAME="Gradle"
APP_BASE_NAME=`basename "$0"`

# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS=""

# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD="maximum"

warn () {
    echo "$*"
}

die () {
    echo
    echo "$*"
    echo
    exit 1
}

# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
nonstop=false
case "`uname`" in
  CYGWIN* )
    cygwin=true
    ;;
  Darwin* )
    darwin=true
    ;;
  MINGW* )
    msys=true
    ;;
  NONSTOP* )
    nonstop=true
    ;;
esac

CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar

# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
    if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
        # IBM's JDK on AIX uses strange locations for the executables
        JAVACMD="$JAVA_HOME/jre/sh/java"
    else
        JAVACMD="$JAVA_HOME/bin/java"
    fi
    if [ ! -x "$JAVACMD" ] ; then
        die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME

Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
    fi
else
    JAVACMD="java"
    which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.

Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi

# Increase the maximum file descriptors if we can.
if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
    MAX_FD_LIMIT=`ulimit -H -n`
    if [ $? -eq 0 ] ; then
        if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
            MAX_FD="$MAX_FD_LIMIT"
        fi
        ulimit -n $MAX_FD
        if [ $? -ne 0 ] ; then
            warn "Could not set maximum file descriptor limit: $MAX_FD"
        fi
    else
        warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
    fi
fi

# For Darwin, add options to specify how the application appears in the dock
if $darwin; then
    GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
fi

# For Cygwin, switch paths to Windows format before running java
if $cygwin ; then
    APP_HOME=`cygpath --path --mixed "$APP_HOME"`
    CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
    JAVACMD=`cygpath --unix "$JAVACMD"`

    # We build the pattern for arguments to be converted via cygpath
    ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
    SEP=""
    for dir in $ROOTDIRSRAW ; do
        ROOTDIRS="$ROOTDIRS$SEP$dir"
        SEP="|"
    done
    OURCYGPATTERN="(^($ROOTDIRS))"
    # Add a user-defined pattern to the cygpath arguments
    if [ "$GRADLE_CYGPATTERN" != "" ] ; then
        OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
    fi
    # Now convert the arguments - kludge to limit ourselves to /bin/sh
    i=0
    for arg in "$@" ; do
        CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
        CHECK2=`echo "$arg"|egrep -c "^-"`                                 ### Determine if an option

        if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then                    ### Added a condition
            eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
        else
            eval `echo args$i`="\"$arg\""
        fi
        i=$((i+1))
    done
    case $i in
        (0) set -- ;;
        (1) set -- "$args0" ;;
        (2) set -- "$args0" "$args1" ;;
        (3) set -- "$args0" "$args1" "$args2" ;;
        (4) set -- "$args0" "$args1" "$args2" "$args3" ;;
        (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
        (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
        (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
        (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
        (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
    esac
fi

# Escape application args
save () {
    for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
    echo " "
}
APP_ARGS=$(save "$@")

# Collect all arguments for the java command, following the shell quoting and substitution rules
eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"

# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong
if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then
  cd "$(dirname "$0")"
fi

exec "$JAVACMD" "$@"


================================================
FILE: MobileNetSSD_demo/gradlew.bat
================================================
@if "%DEBUG%" == "" @echo off
@rem ##########################################################################
@rem
@rem  Gradle startup script for Windows
@rem
@rem ##########################################################################

@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal

set DIRNAME=%~dp0
if "%DIRNAME%" == "" set DIRNAME=.
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%

@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS=

@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome

set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if "%ERRORLEVEL%" == "0" goto init

echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.

goto fail

:findJavaFromJavaHome
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe

if exist "%JAVA_EXE%" goto init

echo.
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.

goto fail

:init
@rem Get command-line arguments, handling Windows variants

if not "%OS%" == "Windows_NT" goto win9xME_args

:win9xME_args
@rem Slurp the command line arguments.
set CMD_LINE_ARGS=
set _SKIP=2

:win9xME_args_slurp
if "x%~1" == "x" goto execute

set CMD_LINE_ARGS=%*

:execute
@rem Setup the command line

set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar

@rem Execute Gradle
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%

:end
@rem End local scope for the variables with windows NT shell
if "%ERRORLEVEL%"=="0" goto mainEnd

:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
if  not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
exit /b 1

:mainEnd
if "%OS%"=="Windows_NT" endlocal

:omega


================================================
FILE: MobileNetSSD_demo/settings.gradle
================================================
include ':app'


================================================
FILE: MobileNetSSD_demo_single/.gitignore
================================================
*.iml
.gradle
/local.properties
/.idea/caches/build_file_checksums.ser
/.idea/libraries
/.idea/modules.xml
/.idea/workspace.xml
.DS_Store
/build
/captures
.externalNativeBuild


================================================
FILE: MobileNetSSD_demo_single/.idea/codeStyles/Project.xml
================================================
<component name="ProjectCodeStyleConfiguration">
  <code_scheme name="Project" version="173">
    <Objective-C-extensions>
      <file>
        <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="Import" />
        <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="Macro" />
        <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="Typedef" />
        <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="Enum" />
        <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="Constant" />
        <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="Global" />
        <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="Struct" />
        <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="FunctionPredecl" />
        <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="Function" />
      </file>
      <class>
        <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="Property" />
        <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="Synthesize" />
        <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="InitMethod" />
        <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="StaticMethod" />
        <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="InstanceMethod" />
        <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="DeallocMethod" />
      </class>
      <extensions>
        <pair source="cpp" header="h" fileNamingConvention="NONE" />
        <pair source="c" header="h" fileNamingConvention="NONE" />
      </extensions>
    </Objective-C-extensions>
  </code_scheme>
</component>

================================================
FILE: MobileNetSSD_demo_single/.idea/gradle.xml
================================================
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
  <component name="GradleSettings">
    <option name="linkedExternalProjectsSettings">
      <GradleProjectSettings>
        <option name="distributionType" value="DEFAULT_WRAPPED" />
        <option name="externalProjectPath" value="$PROJECT_DIR$" />
        <option name="modules">
          <set>
            <option value="$PROJECT_DIR$" />
            <option value="$PROJECT_DIR$/app" />
          </set>
        </option>
        <option name="resolveModulePerSourceSet" value="false" />
      </GradleProjectSettings>
    </option>
  </component>
</project>

================================================
FILE: MobileNetSSD_demo_single/.idea/misc.xml
================================================
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
  <component name="NullableNotNullManager">
    <option name="myDefaultNullable" value="android.support.annotation.Nullable" />
    <option name="myDefaultNotNull" value="android.support.annotation.NonNull" />
    <option name="myNullables">
      <value>
        <list size="7">
          <item index="0" class="java.lang.String" itemvalue="org.jetbrains.annotations.Nullable" />
          <item index="1" class="java.lang.String" itemvalue="javax.annotation.Nullable" />
          <item index="2" class="java.lang.String" itemvalue="javax.annotation.CheckForNull" />
          <item index="3" class="java.lang.String" itemvalue="edu.umd.cs.findbugs.annotations.Nullable" />
          <item index="4" class="java.lang.String" itemvalue="android.support.annotation.Nullable" />
          <item index="5" class="java.lang.String" itemvalue="androidx.annotation.Nullable" />
          <item index="6" class="java.lang.String" itemvalue="androidx.annotation.RecentlyNullable" />
        </list>
      </value>
    </option>
    <option name="myNotNulls">
      <value>
        <list size="6">
          <item index="0" class="java.lang.String" itemvalue="org.jetbrains.annotations.NotNull" />
          <item index="1" class="java.lang.String" itemvalue="javax.annotation.Nonnull" />
          <item index="2" class="java.lang.String" itemvalue="edu.umd.cs.findbugs.annotations.NonNull" />
          <item index="3" class="java.lang.String" itemvalue="android.support.annotation.NonNull" />
          <item index="4" class="java.lang.String" itemvalue="androidx.annotation.NonNull" />
          <item index="5" class="java.lang.String" itemvalue="androidx.annotation.RecentlyNonNull" />
        </list>
      </value>
    </option>
  </component>
  <component name="ProjectRootManager" version="2" languageLevel="JDK_1_7" project-jdk-name="1.8" project-jdk-type="JavaSDK">
    <output url="file://$PROJECT_DIR$/build/classes" />
  </component>
  <component name="ProjectType">
    <option name="id" value="Android" />
  </component>
</project>

================================================
FILE: MobileNetSSD_demo_single/app/.gitignore
================================================
/build


================================================
FILE: MobileNetSSD_demo_single/app/CMakeLists.txt
================================================
# For more information about using CMake with Android Studio, read the
# documentation: https://d.android.com/studio/projects/add-native-code.html

# Sets the minimum version of CMake required to build the native library.

cmake_minimum_required(VERSION 3.4.1)

# Creates and names a library, sets it as either STATIC
# or SHARED, and provides the relative paths to its source code.
# You can define multiple libraries, and CMake builds them for you.
# Gradle automatically packages shared libraries with your APK.

##需要添加
set(ncnn_lib ${CMAKE_SOURCE_DIR}/src/main/jniLibs/armeabi-v7a/libncnn.a)
add_library (ncnn_lib STATIC IMPORTED)
set_target_properties(ncnn_lib PROPERTIES IMPORTED_LOCATION ${ncnn_lib})

add_library( # Sets the name of the library.
        MobileNetssd ## 为生成.so的文字最好直接和.c名字一样,需要更改

        # Sets the library as a shared library.
        SHARED

        # Provides a relative path to your source file(s).
        src/main/cpp/MobileNetssd.cpp)##cpp文件的name

# Searches for a specified prebuilt library and stores the path as a
# variable. Because CMake includes system libraries in the search path by
# default, you only need to specify the name of the public NDK library
# you want to add. CMake verifies that the library exists before
# completing its build.

find_library( # Sets the name of the path variable.
        log-lib

        # Specifies the name of the NDK library that
        # you want CMake to locate.
        log)

# Specifies libraries CMake should link to your target library. You
# can link multiple libraries, such as libraries you define in this
# build script, prebuilt third-party libraries, or system libraries.

target_link_libraries( # Specifies the target library.
        ##以下三个都要添加
        MobileNetssd   #和上面一样
        ncnn_lib       #这个ncnn的lib的add
        jnigraphics    #这个jni也需要add

        # Links the target library to the log library
        # included in the NDK.
        ${log-lib})

================================================
FILE: MobileNetSSD_demo_single/app/build.gradle
================================================
apply plugin: 'com.android.application'

android {
    compileSdkVersion 28
    defaultConfig {
        applicationId "com.example.che.mobilenetssd_demo"
        minSdkVersion 15
        targetSdkVersion 28
        versionCode 1
        versionName "1.0"
        testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner"
        externalNativeBuild {
            cmake {
                cppFlags "-std=c++11 -fopenmp"//c++,多线程 需要添加need to add
                abiFilters "armeabi-v7a" // 手机的硬件架构,基本所有的硬件都适配
            }
        }
    }
    buildTypes {
        release {
            minifyEnabled false
            proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
        }
    }
    externalNativeBuild {
        cmake {
            path "CMakeLists.txt"
        }
    }

    // 需要添加 把 .a文件导入, .a为 ncnn make intall生成的里面的.a文件
    sourceSets {
        main {
            jniLibs.srcDirs = ["src/main/jniLibs"]
            jni.srcDirs = ['src/cpp']
        }
    }
}

dependencies {
    implementation fileTree(dir: 'libs', include: ['*.jar'])
    implementation 'com.android.support:appcompat-v7:28.0.0'
    implementation 'com.android.support.constraint:constraint-layout:1.1.3'
    testImplementation 'junit:junit:4.12'
    implementation 'com.github.bumptech.glide:glide:4.3.1'   // need to add增加图片类 bumptech,build自动红线消失
    androidTestImplementation 'com.android.support.test:runner:1.0.2'
    androidTestImplementation 'com.android.support.test.espresso:espresso-core:3.0.2'
}


================================================
FILE: MobileNetSSD_demo_single/app/proguard-rules.pro
================================================
# Add project specific ProGuard rules here.
# You can control the set of applied configuration files using the
# proguardFiles setting in build.gradle.
#
# For more details, see
#   http://developer.android.com/guide/developing/tools/proguard.html

# If your project uses WebView with JS, uncomment the following
# and specify the fully qualified class name to the JavaScript interface
# class:
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
#   public *;
#}

# Uncomment this to preserve the line number information for
# debugging stack traces.
#-keepattributes SourceFile,LineNumberTable

# If you keep the line number information, uncomment this to
# hide the original source file name.
#-renamesourcefileattribute SourceFile


================================================
FILE: MobileNetSSD_demo_single/app/src/androidTest/java/com/example/che/mobilenetssd_demo/ExampleInstrumentedTest.java
================================================
package com.example.che.mobilenetssd_demo;

import android.content.Context;
import android.support.test.InstrumentationRegistry;
import android.support.test.runner.AndroidJUnit4;

import org.junit.Test;
import org.junit.runner.RunWith;

import static org.junit.Assert.*;

/**
 * Instrumented test, which will execute on an Android device.
 *
 * @see <a href="http://d.android.com/tools/testing">Testing documentation</a>
 */
@RunWith(AndroidJUnit4.class)
public class ExampleInstrumentedTest {
    @Test
    public void useAppContext() {
        // Context of the app under test.
        Context appContext = InstrumentationRegistry.getTargetContext();

        assertEquals("com.example.che.mobilenetssd_demo", appContext.getPackageName());
    }
}


================================================
FILE: MobileNetSSD_demo_single/app/src/main/AndroidManifest.xml
================================================
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
    package="com.example.che.mobilenetssd_demo">

    <uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE"/>
    <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
    <uses-permission android:name="android.permission.CAMERA"/>

    <application
        android:allowBackup="true"
        android:icon="@mipmap/ic_launcher"
        android:label="@string/app_name"
        android:roundIcon="@mipmap/ic_launcher_round"
        android:supportsRtl="true"
        android:theme="@style/AppTheme">
        <activity android:name=".MainActivity">
            <intent-filter>
                <action android:name="android.intent.action.MAIN" />

                <category android:name="android.intent.category.LAUNCHER" />
            </intent-filter>
        </activity>
    </application>

</manifest>

================================================
FILE: MobileNetSSD_demo_single/app/src/main/assets/words.txt
================================================
background
aeroplane
bicycle
bird
boat
bottle
bus
car
cat
chair
cow
diningtable
dog
horse
motorbike
person
pottedplant
sheep
sofa
train
tvmonitor

================================================
FILE: MobileNetSSD_demo_single/app/src/main/cpp/MobileNetSSD_deploy.id.h
================================================
#ifndef NCNN_INCLUDE_GUARD_MobileNetSSD_deploy_id_h
#define NCNN_INCLUDE_GUARD_MobileNetSSD_deploy_id_h
namespace MobileNetSSD_deploy_param_id {
const int LAYER_input = 0;
const int BLOB_data = 0;
const int LAYER_splitncnn_0 = 1;
const int BLOB_data_splitncnn_0 = 1;
const int BLOB_data_splitncnn_1 = 2;
const int BLOB_data_splitncnn_2 = 3;
const int BLOB_data_splitncnn_3 = 4;
const int BLOB_data_splitncnn_4 = 5;
const int BLOB_data_splitncnn_5 = 6;
const int BLOB_data_splitncnn_6 = 7;
const int LAYER_conv0 = 2;
const int BLOB_conv0 = 8;
const int LAYER_conv0_relu = 3;
const int BLOB_conv0_conv0_relu = 9;
const int LAYER_conv1_dw = 4;
const int BLOB_conv1_dw = 10;
const int LAYER_conv1_dw_relu = 5;
const int BLOB_conv1_dw_conv1_dw_relu = 11;
const int LAYER_conv1 = 6;
const int BLOB_conv1 = 12;
const int LAYER_conv1_relu = 7;
const int BLOB_conv1_conv1_relu = 13;
const int LAYER_conv2_dw = 8;
const int BLOB_conv2_dw = 14;
const int LAYER_conv2_dw_relu = 9;
const int BLOB_conv2_dw_conv2_dw_relu = 15;
const int LAYER_conv2 = 10;
const int BLOB_conv2 = 16;
const int LAYER_conv2_relu = 11;
const int BLOB_conv2_conv2_relu = 17;
const int LAYER_conv3_dw = 12;
const int BLOB_conv3_dw = 18;
const int LAYER_conv3_dw_relu = 13;
const int BLOB_conv3_dw_conv3_dw_relu = 19;
const int LAYER_conv3 = 14;
const int BLOB_conv3 = 20;
const int LAYER_conv3_relu = 15;
const int BLOB_conv3_conv3_relu = 21;
const int LAYER_conv4_dw = 16;
const int BLOB_conv4_dw = 22;
const int LAYER_conv4_dw_relu = 17;
const int BLOB_conv4_dw_conv4_dw_relu = 23;
const int LAYER_conv4 = 18;
const int BLOB_conv4 = 24;
const int LAYER_conv4_relu = 19;
const int BLOB_conv4_conv4_relu = 25;
const int LAYER_conv5_dw = 20;
const int BLOB_conv5_dw = 26;
const int LAYER_conv5_dw_relu = 21;
const int BLOB_conv5_dw_conv5_dw_relu = 27;
const int LAYER_conv5 = 22;
const int BLOB_conv5 = 28;
const int LAYER_conv5_relu = 23;
const int BLOB_conv5_conv5_relu = 29;
const int LAYER_conv6_dw = 24;
const int BLOB_conv6_dw = 30;
const int LAYER_conv6_dw_relu = 25;
const int BLOB_conv6_dw_conv6_dw_relu = 31;
const int LAYER_conv6 = 26;
const int BLOB_conv6 = 32;
const int LAYER_conv6_relu = 27;
const int BLOB_conv6_conv6_relu = 33;
const int LAYER_conv7_dw = 28;
const int BLOB_conv7_dw = 34;
const int LAYER_conv7_dw_relu = 29;
const int BLOB_conv7_dw_conv7_dw_relu = 35;
const int LAYER_conv7 = 30;
const int BLOB_conv7 = 36;
const int LAYER_conv7_relu = 31;
const int BLOB_conv7_conv7_relu = 37;
const int LAYER_conv8_dw = 32;
const int BLOB_conv8_dw = 38;
const int LAYER_conv8_dw_relu = 33;
const int BLOB_conv8_dw_conv8_dw_relu = 39;
const int LAYER_conv8 = 34;
const int BLOB_conv8 = 40;
const int LAYER_conv8_relu = 35;
const int BLOB_conv8_conv8_relu = 41;
const int LAYER_conv9_dw = 36;
const int BLOB_conv9_dw = 42;
const int LAYER_conv9_dw_relu = 37;
const int BLOB_conv9_dw_conv9_dw_relu = 43;
const int LAYER_conv9 = 38;
const int BLOB_conv9 = 44;
const int LAYER_conv9_relu = 39;
const int BLOB_conv9_conv9_relu = 45;
const int LAYER_conv10_dw = 40;
const int BLOB_conv10_dw = 46;
const int LAYER_conv10_dw_relu = 41;
const int BLOB_conv10_dw_conv10_dw_relu = 47;
const int LAYER_conv10 = 42;
const int BLOB_conv10 = 48;
const int LAYER_conv10_relu = 43;
const int BLOB_conv10_conv10_relu = 49;
const int LAYER_conv11_dw = 44;
const int BLOB_conv11_dw = 50;
const int LAYER_conv11_dw_relu = 45;
const int BLOB_conv11_dw_conv11_dw_relu = 51;
const int LAYER_conv11 = 46;
const int BLOB_conv11 = 52;
const int LAYER_conv11_relu = 47;
const int BLOB_conv11_conv11_relu = 53;
const int LAYER_splitncnn_1 = 48;
const int BLOB_conv11_conv11_relu_splitncnn_0 = 54;
const int BLOB_conv11_conv11_relu_splitncnn_1 = 55;
const int BLOB_conv11_conv11_relu_splitncnn_2 = 56;
const int BLOB_conv11_conv11_relu_splitncnn_3 = 57;
const int LAYER_conv12_dw = 49;
const int BLOB_conv12_dw = 58;
const int LAYER_conv12_dw_relu = 50;
const int BLOB_conv12_dw_conv12_dw_relu = 59;
const int LAYER_conv12 = 51;
const int BLOB_conv12 = 60;
const int LAYER_conv12_relu = 52;
const int BLOB_conv12_conv12_relu = 61;
const int LAYER_conv13_dw = 53;
const int BLOB_conv13_dw = 62;
const int LAYER_conv13_dw_relu = 54;
const int BLOB_conv13_dw_conv13_dw_relu = 63;
const int LAYER_conv13 = 55;
const int BLOB_conv13 = 64;
const int LAYER_conv13_relu = 56;
const int BLOB_conv13_conv13_relu = 65;
const int LAYER_splitncnn_2 = 57;
const int BLOB_conv13_conv13_relu_splitncnn_0 = 66;
const int BLOB_conv13_conv13_relu_splitncnn_1 = 67;
const int BLOB_conv13_conv13_relu_splitncnn_2 = 68;
const int BLOB_conv13_conv13_relu_splitncnn_3 = 69;
const int LAYER_conv14_1 = 58;
const int BLOB_conv14_1 = 70;
const int LAYER_conv14_1_relu = 59;
const int BLOB_conv14_1_conv14_1_relu = 71;
const int LAYER_conv14_2 = 60;
const int BLOB_conv14_2 = 72;
const int LAYER_conv14_2_relu = 61;
const int BLOB_conv14_2_conv14_2_relu = 73;
const int LAYER_splitncnn_3 = 62;
const int BLOB_conv14_2_conv14_2_relu_splitncnn_0 = 74;
const int BLOB_conv14_2_conv14_2_relu_splitncnn_1 = 75;
const int BLOB_conv14_2_conv14_2_relu_splitncnn_2 = 76;
const int BLOB_conv14_2_conv14_2_relu_splitncnn_3 = 77;
const int LAYER_conv15_1 = 63;
const int BLOB_conv15_1 = 78;
const int LAYER_conv15_1_relu = 64;
const int BLOB_conv15_1_conv15_1_relu = 79;
const int LAYER_conv15_2 = 65;
const int BLOB_conv15_2 = 80;
const int LAYER_conv15_2_relu = 66;
const int BLOB_conv15_2_conv15_2_relu = 81;
const int LAYER_splitncnn_4 = 67;
const int BLOB_conv15_2_conv15_2_relu_splitncnn_0 = 82;
const int BLOB_conv15_2_conv15_2_relu_splitncnn_1 = 83;
const int BLOB_conv15_2_conv15_2_relu_splitncnn_2 = 84;
const int BLOB_conv15_2_conv15_2_relu_splitncnn_3 = 85;
const int LAYER_conv16_1 = 68;
const int BLOB_conv16_1 = 86;
const int LAYER_conv16_1_relu = 69;
const int BLOB_conv16_1_conv16_1_relu = 87;
const int LAYER_conv16_2 = 70;
const int BLOB_conv16_2 = 88;
const int LAYER_conv16_2_relu = 71;
const int BLOB_conv16_2_conv16_2_relu = 89;
const int LAYER_splitncnn_5 = 72;
const int BLOB_conv16_2_conv16_2_relu_splitncnn_0 = 90;
const int BLOB_conv16_2_conv16_2_relu_splitncnn_1 = 91;
const int BLOB_conv16_2_conv16_2_relu_splitncnn_2 = 92;
const int BLOB_conv16_2_conv16_2_relu_splitncnn_3 = 93;
const int LAYER_conv17_1 = 73;
const int BLOB_conv17_1 = 94;
const int LAYER_conv17_1_relu = 74;
const int BLOB_conv17_1_conv17_1_relu = 95;
const int LAYER_conv17_2 = 75;
const int BLOB_conv17_2 = 96;
const int LAYER_conv17_2_relu = 76;
const int BLOB_conv17_2_conv17_2_relu = 97;
const int LAYER_splitncnn_6 = 77;
const int BLOB_conv17_2_conv17_2_relu_splitncnn_0 = 98;
const int BLOB_conv17_2_conv17_2_relu_splitncnn_1 = 99;
const int BLOB_conv17_2_conv17_2_relu_splitncnn_2 = 100;
const int LAYER_conv11_mbox_loc = 78;
const int BLOB_conv11_mbox_loc = 101;
const int LAYER_conv11_mbox_loc_perm = 79;
const int BLOB_conv11_mbox_loc_perm = 102;
const int LAYER_conv11_mbox_loc_flat = 80;
const int BLOB_conv11_mbox_loc_flat = 103;
const int LAYER_conv11_mbox_conf = 81;
const int BLOB_conv11_mbox_conf = 104;
const int LAYER_conv11_mbox_conf_perm = 82;
const int BLOB_conv11_mbox_conf_perm = 105;
const int LAYER_conv11_mbox_conf_flat = 83;
const int BLOB_conv11_mbox_conf_flat = 106;
const int LAYER_conv11_mbox_priorbox = 84;
const int BLOB_conv11_mbox_priorbox = 107;
const int LAYER_conv13_mbox_loc = 85;
const int BLOB_conv13_mbox_loc = 108;
const int LAYER_conv13_mbox_loc_perm = 86;
const int BLOB_conv13_mbox_loc_perm = 109;
const int LAYER_conv13_mbox_loc_flat = 87;
const int BLOB_conv13_mbox_loc_flat = 110;
const int LAYER_conv13_mbox_conf = 88;
const int BLOB_conv13_mbox_conf = 111;
const int LAYER_conv13_mbox_conf_perm = 89;
const int BLOB_conv13_mbox_conf_perm = 112;
const int LAYER_conv13_mbox_conf_flat = 90;
const int BLOB_conv13_mbox_conf_flat = 113;
const int LAYER_conv13_mbox_priorbox = 91;
const int BLOB_conv13_mbox_priorbox = 114;
const int LAYER_conv14_2_mbox_loc = 92;
const int BLOB_conv14_2_mbox_loc = 115;
const int LAYER_conv14_2_mbox_loc_perm = 93;
const int BLOB_conv14_2_mbox_loc_perm = 116;
const int LAYER_conv14_2_mbox_loc_flat = 94;
const int BLOB_conv14_2_mbox_loc_flat = 117;
const int LAYER_conv14_2_mbox_conf = 95;
const int BLOB_conv14_2_mbox_conf = 118;
const int LAYER_conv14_2_mbox_conf_perm = 96;
const int BLOB_conv14_2_mbox_conf_perm = 119;
const int LAYER_conv14_2_mbox_conf_flat = 97;
const int BLOB_conv14_2_mbox_conf_flat = 120;
const int LAYER_conv14_2_mbox_priorbox = 98;
const int BLOB_conv14_2_mbox_priorbox = 121;
const int LAYER_conv15_2_mbox_loc = 99;
const int BLOB_conv15_2_mbox_loc = 122;
const int LAYER_conv15_2_mbox_loc_perm = 100;
const int BLOB_conv15_2_mbox_loc_perm = 123;
const int LAYER_conv15_2_mbox_loc_flat = 101;
const int BLOB_conv15_2_mbox_loc_flat = 124;
const int LAYER_conv15_2_mbox_conf = 102;
const int BLOB_conv15_2_mbox_conf = 125;
const int LAYER_conv15_2_mbox_conf_perm = 103;
const int BLOB_conv15_2_mbox_conf_perm = 126;
const int LAYER_conv15_2_mbox_conf_flat = 104;
const int BLOB_conv15_2_mbox_conf_flat = 127;
const int LAYER_conv15_2_mbox_priorbox = 105;
const int BLOB_conv15_2_mbox_priorbox = 128;
const int LAYER_conv16_2_mbox_loc = 106;
const int BLOB_conv16_2_mbox_loc = 129;
const int LAYER_conv16_2_mbox_loc_perm = 107;
const int BLOB_conv16_2_mbox_loc_perm = 130;
const int LAYER_conv16_2_mbox_loc_flat = 108;
const int BLOB_conv16_2_mbox_loc_flat = 131;
const int LAYER_conv16_2_mbox_conf = 109;
const int BLOB_conv16_2_mbox_conf = 132;
const int LAYER_conv16_2_mbox_conf_perm = 110;
const int BLOB_conv16_2_mbox_conf_perm = 133;
const int LAYER_conv16_2_mbox_conf_flat = 111;
const int BLOB_conv16_2_mbox_conf_flat = 134;
const int LAYER_conv16_2_mbox_priorbox = 112;
const int BLOB_conv16_2_mbox_priorbox = 135;
const int LAYER_conv17_2_mbox_loc = 113;
const int BLOB_conv17_2_mbox_loc = 136;
const int LAYER_conv17_2_mbox_loc_perm = 114;
const int BLOB_conv17_2_mbox_loc_perm = 137;
const int LAYER_conv17_2_mbox_loc_flat = 115;
const int BLOB_conv17_2_mbox_loc_flat = 138;
const int LAYER_conv17_2_mbox_conf = 116;
const int BLOB_conv17_2_mbox_conf = 139;
const int LAYER_conv17_2_mbox_conf_perm = 117;
const int BLOB_conv17_2_mbox_conf_perm = 140;
const int LAYER_conv17_2_mbox_conf_flat = 118;
const int BLOB_conv17_2_mbox_conf_flat = 141;
const int LAYER_conv17_2_mbox_priorbox = 119;
const int BLOB_conv17_2_mbox_priorbox = 142;
const int LAYER_mbox_loc = 120;
const int BLOB_mbox_loc = 143;
const int LAYER_mbox_conf = 121;
const int BLOB_mbox_conf = 144;
const int LAYER_mbox_priorbox = 122;
const int BLOB_mbox_priorbox = 145;
const int LAYER_mbox_conf_reshape = 123;
const int BLOB_mbox_conf_reshape = 146;
const int LAYER_mbox_conf_softmax = 124;
const int BLOB_mbox_conf_softmax = 147;
const int LAYER_mbox_conf_flatten = 125;
const int BLOB_mbox_conf_flatten = 148;
const int LAYER_detection_out = 126;
const int BLOB_detection_out = 149;
} // namespace MobileNetSSD_deploy_param_id
#endif // NCNN_INCLUDE_GUARD_MobileNetSSD_deploy_id_h


================================================
FILE: MobileNetSSD_demo_single/app/src/main/cpp/MobileNetssd.cpp
================================================
#include <android/bitmap.h>
#include <android/log.h>
#include <jni.h>
#include <string>
#include <vector>

// ncnn
#include "include/opencv.h"
#include "MobileNetSSD_deploy.id.h"   //这里看成自己的id.h
#include <sys/time.h>
#include <unistd.h>
#include "include/net.h"

static ncnn::UnlockedPoolAllocator g_blob_pool_allocator;
static ncnn::PoolAllocator g_workspace_pool_allocator;

static ncnn::Mat ncnn_param;
static ncnn::Mat ncnn_bin;
static ncnn::Net ncnn_net;

extern "C" {


// public native boolean Init(byte[] words,byte[] param, byte[] bin);  原函数形式(c++) 以下形式为ndk的c++形式
JNIEXPORT jboolean JNICALL
Java_com_example_che_mobilenetssd_1demo_MobileNetssd_Init(JNIEnv *env, jobject obj, jbyteArray param, jbyteArray bin) {
    __android_log_print(ANDROID_LOG_DEBUG, "MobileNetssd", "enter the jni func");
    // init param
    {
        int len = env->GetArrayLength(param);
        ncnn_param.create(len, (size_t) 1u);
        env->GetByteArrayRegion(param, 0, len, (jbyte *) ncnn_param);
        int ret = ncnn_net.load_param((const unsigned char *) ncnn_param);
        __android_log_print(ANDROID_LOG_DEBUG, "MobileNetssd", "load_param %d %d", ret, len);
    }

    // init bin
    {
        int len = env->GetArrayLength(bin);
        ncnn_bin.create(len, (size_t) 1u);
        env->GetByteArrayRegion(bin, 0, len, (jbyte *) ncnn_bin);
        int ret = ncnn_net.load_model((const unsigned char *) ncnn_bin);
        __android_log_print(ANDROID_LOG_DEBUG, "MobileNetssd", "load_model %d %d", ret, len);
    }

    ncnn::Option opt;
    opt.lightmode = true;
    opt.num_threads = 4;   //线程 这里可以修改
    opt.blob_allocator = &g_blob_pool_allocator;
    opt.workspace_allocator = &g_workspace_pool_allocator;

    ncnn::set_default_option(opt);

    return JNI_TRUE;
}

// public native String Detect(Bitmap bitmap);
JNIEXPORT jfloatArray JNICALL Java_com_example_che_mobilenetssd_1demo_MobileNetssd_Detect(JNIEnv* env, jobject thiz, jobject bitmap)
{
    // ncnn from bitmap
    ncnn::Mat in;
    {
        AndroidBitmapInfo info;
        AndroidBitmap_getInfo(env, bitmap, &info);
//        int origin_w = info.width;
//        int origin_h = info.height;
//        int width = 300;
//        int height = 300;
        int width = info.width;
        int height = info.height;
        if (info.format != ANDROID_BITMAP_FORMAT_RGBA_8888)
            return NULL;

        void* indata;
        AndroidBitmap_lockPixels(env, bitmap, &indata);
        // 把像素转换成data,并指定通道顺序
        // 因为图像预处理每个网络层输入的数据格式不一样一般为300*300 128*128等等所以这类需要一个resize的操作可以在cpp中写,也可以是java读入图片时有个resize操作
//      in = ncnn::Mat::from_pixels_resize((const unsigned char*)indata, ncnn::Mat::PIXEL_RGBA2RGB, origin_w, origin_h, width, height);

        in = ncnn::Mat::from_pixels((const unsigned char*)indata, ncnn::Mat::PIXEL_RGBA2RGB, width, height);

        // 下面一行为debug代码
        //__android_log_print(ANDROID_LOG_DEBUG, "MobilenetssdJniIn", "Mobilenetssd_predict_has_input1, in.w: %d; in.h: %d", in.w, in.h);
        AndroidBitmap_unlockPixels(env, bitmap);
    }

    // ncnn_net
    std::vector<float> cls_scores;
    {
        // 减去均值和乘上比例(这个数据和前面的归一化图片预处理形式一一对应)
        const float mean_vals[3] = {127.5f, 127.5f, 127.5f};
        const float scale[3] = {0.007843f, 0.007843f, 0.007843f};

        in.substract_mean_normalize(mean_vals, scale);// 归一化

        ncnn::Extractor ex = ncnn_net.create_extractor();//前向传播

        // 如果不加密是使用ex.input("data", in);
        // BLOB_data在id.h文件中可见,相当于datainput网络层的id
        ex.input(MobileNetSSD_deploy_param_id::BLOB_data, in);
        //ex.set_num_threads(4); 和上面一样一个对象

        ncnn::Mat out;
        // 如果时不加密是使用ex.extract("prob", out);
        //BLOB_detection_out.h文件中可见,相当于dataout网络层的id,输出检测的结果数据
        ex.extract(MobileNetSSD_deploy_param_id::BLOB_detection_out, out);

        int output_wsize = out.w;
        int output_hsize = out.h;

        //输出整理
        jfloat *output[output_wsize * output_hsize];
        for(int i = 0; i< out.h; i++) {
            for (int j = 0; j < out.w; j++) {
                output[i*output_wsize + j] = &out.row(i)[j];
            }
        }
        jfloatArray jOutputData = env->NewFloatArray(output_wsize);
        if (jOutputData == nullptr) return nullptr;
        env->SetFloatArrayRegion(jOutputData, 0,  output_wsize * output_hsize,
                                 reinterpret_cast<const jfloat *>(*output));

        return jOutputData;
    }
}
}


================================================
FILE: MobileNetSSD_demo_single/app/src/main/cpp/include/allocator.h
================================================
// Tencent is pleased to support the open source community by making ncnn available.
//
// Copyright (C) 2018 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except
// in compliance with the License. You may obtain a copy of the License at
//
// https://opensource.org/licenses/BSD-3-Clause
//
// Unless required by applicable law or agreed to in writing, software distributed
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.

#ifndef NCNN_ALLOCATOR_H
#define NCNN_ALLOCATOR_H

#ifdef _WIN32
#define WIN32_LEAN_AND_MEAN
#include <windows.h>
#else
#include <pthread.h>
#endif

#include <stdlib.h>
#include <list>

namespace ncnn {

// the alignment of all the allocated buffers
#define MALLOC_ALIGN    16

// Aligns a pointer to the specified number of bytes
// ptr Aligned pointer
// n Alignment size that must be a power of two
template<typename _Tp> static inline _Tp* alignPtr(_Tp* ptr, int n=(int)sizeof(_Tp))
{
    return (_Tp*)(((size_t)ptr + n-1) & -n);
}

// Aligns a buffer size to the specified number of bytes
// The function returns the minimum number that is greater or equal to sz and is divisible by n
// sz Buffer size to align
// n Alignment size that must be a power of two
static inline size_t alignSize(size_t sz, int n)
{
    return (sz + n-1) & -n;
}

static inline void* fastMalloc(size_t size)
{
    unsigned char* udata = (unsigned char*)malloc(size + sizeof(void*) + MALLOC_ALIGN);
    if (!udata)
        return 0;
    unsigned char** adata = alignPtr((unsigned char**)udata + 1, MALLOC_ALIGN);
    adata[-1] = udata;
    return adata;
}

static inline void fastFree(void* ptr)
{
    if (ptr)
    {
        unsigned char* udata = ((unsigned char**)ptr)[-1];
        free(udata);
    }
}

// exchange-add operation for atomic operations on reference counters
#if defined __INTEL_COMPILER && !(defined WIN32 || defined _WIN32)
// atomic increment on the linux version of the Intel(tm) compiler
#  define NCNN_XADD(addr, delta) (int)_InterlockedExchangeAdd(const_cast<void*>(reinterpret_cast<volatile void*>(addr)), delta)
#elif defined __GNUC__
#  if defined __clang__ && __clang_major__ >= 3 && !defined __ANDROID__ && !defined __EMSCRIPTEN__ && !defined(__CUDACC__)
#    ifdef __ATOMIC_ACQ_REL
#      define NCNN_XADD(addr, delta) __c11_atomic_fetch_add((_Atomic(int)*)(addr), delta, __ATOMIC_ACQ_REL)
#    else
#      define NCNN_XADD(addr, delta) __atomic_fetch_add((_Atomic(int)*)(addr), delta, 4)
#    endif
#  else
#    if defined __ATOMIC_ACQ_REL && !defined __clang__
// version for gcc >= 4.7
#      define NCNN_XADD(addr, delta) (int)__atomic_fetch_add((unsigned*)(addr), (unsigned)(delta), __ATOMIC_ACQ_REL)
#    else
#      define NCNN_XADD(addr, delta) (int)__sync_fetch_and_add((unsigned*)(addr), (unsigned)(delta))
#    endif
#  endif
#elif defined _MSC_VER && !defined RC_INVOKED
#  include <intrin.h>
#  define NCNN_XADD(addr, delta) (int)_InterlockedExchangeAdd((long volatile*)addr, delta)
#else
// thread-unsafe branch
static inline int NCNN_XADD(int* addr, int delta) { int tmp = *addr; *addr += delta; return tmp; }
#endif

#ifdef _WIN32
class Mutex
{
public:
    Mutex() { InitializeSRWLock(&srwlock); }
    ~Mutex() {}
    void lock() { AcquireSRWLockExclusive(&srwlock); }
    void unlock() { ReleaseSRWLockExclusive(&srwlock); }
private:
    // NOTE SRWLock is available from windows vista
    SRWLOCK srwlock;
};
#else // _WIN32
class Mutex
{
public:
    Mutex() { pthread_mutex_init(&mutex, 0); }
    ~Mutex() { pthread_mutex_destroy(&mutex); }
    void lock() { pthread_mutex_lock(&mutex); }
    void unlock() { pthread_mutex_unlock(&mutex); }
private:
    pthread_mutex_t mutex;
};
#endif // _WIN32

class Allocator
{
public:
    virtual ~Allocator() = 0;
    virtual void* fastMalloc(size_t size) = 0;
    virtual void fastFree(void* ptr) = 0;
};

class PoolAllocator : public Allocator
{
public:
    PoolAllocator();
    ~PoolAllocator();

    // ratio range 0 ~ 1
    // default cr = 0.75
    void set_size_compare_ratio(float scr);

    // release all budgets immediately
    void clear();

    virtual void* fastMalloc(size_t size);
    virtual void fastFree(void* ptr);

private:
    Mutex budgets_lock;
    Mutex payouts_lock;
    unsigned int size_compare_ratio;// 0~256
    std::list< std::pair<size_t, void*> > budgets;
    std::list< std::pair<size_t, void*> > payouts;
};

class UnlockedPoolAllocator : public Allocator
{
public:
    UnlockedPoolAllocator();
    ~UnlockedPoolAllocator();

    // ratio range 0 ~ 1
    // default cr = 0.75
    void set_size_compare_ratio(float scr);

    // release all budgets immediately
    void clear();

    virtual void* fastMalloc(size_t size);
    virtual void fastFree(void* ptr);

private:
    unsigned int size_compare_ratio;// 0~256
    std::list< std::pair<size_t, void*> > budgets;
    std::list< std::pair<size_t, void*> > payouts;
};

} // namespace ncnn

#endif // NCNN_ALLOCATOR_H


================================================
FILE: MobileNetSSD_demo_single/app/src/main/cpp/include/benchmark.h
================================================
// Tencent is pleased to support the open source community by making ncnn available.
//
// Copyright (C) 2017 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except
// in compliance with the License. You may obtain a copy of the License at
//
// https://opensource.org/licenses/BSD-3-Clause
//
// Unless required by applicable law or agreed to in writing, software distributed
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.

#ifndef NCNN_BENCHMARK_H
#define NCNN_BENCHMARK_H

#include "platform.h"
#include "mat.h"
#include "layer.h"

namespace ncnn {

// get now timestamp in ms
double get_current_time();

#if NCNN_BENCHMARK

void benchmark(const Layer* layer, double start, double end);
void benchmark(const Layer* layer, const Mat& bottom_blob, Mat& top_blob, double start, double end);

#endif // NCNN_BENCHMARK

} // namespace ncnn

#endif // NCNN_BENCHMARK_H


================================================
FILE: MobileNetSSD_demo_single/app/src/main/cpp/include/blob.h
================================================
// Tencent is pleased to support the open source community by making ncnn available.
//
// Copyright (C) 2017 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except
// in compliance with the License. You may obtain a copy of the License at
//
// https://opensource.org/licenses/BSD-3-Clause
//
// Unless required by applicable law or agreed to in writing, software distributed
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.

#ifndef NCNN_BLOB_H
#define NCNN_BLOB_H

#include <string>
#include <vector>
#include "platform.h"

namespace ncnn {

class Blob
{
public:
    // empty
    Blob();

public:
#if NCNN_STRING
    // blob name
    std::string name;
#endif // NCNN_STRING
    // layer index which produce this blob as output
    int producer;
    // layer index which need this blob as input
    std::vector<int> consumers;
};

} // namespace ncnn

#endif // NCNN_BLOB_H


================================================
FILE: MobileNetSSD_demo_single/app/src/main/cpp/include/cpu.h
================================================
// Tencent is pleased to support the open source community by making ncnn available.
//
// Copyright (C) 2017 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except
// in compliance with the License. You may obtain a copy of the License at
//
// https://opensource.org/licenses/BSD-3-Clause
//
// Unless required by applicable law or agreed to in writing, software distributed
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.

#ifndef NCNN_CPU_H
#define NCNN_CPU_H

namespace ncnn {

// test optional cpu features
// neon = armv7 neon or aarch64 asimd
int cpu_support_arm_neon();
// vfpv4 = armv7 fp16 + fma
int cpu_support_arm_vfpv4();
// asimdhp = aarch64 asimd half precision
int cpu_support_arm_asimdhp();

// cpu info
int get_cpu_count();

// bind all threads on little clusters if powersave enabled
// affacts HMP arch cpu like ARM big.LITTLE
// only implemented on android at the moment
// switching powersave is expensive and not thread-safe
// 0 = all cores enabled(default)
// 1 = only little clusters enabled
// 2 = only big clusters enabled
// return 0 if success for setter function
int get_cpu_powersave();
int set_cpu_powersave(int powersave);

// misc function wrapper for openmp routines
int get_omp_num_threads();
void set_omp_num_threads(int num_threads);

int get_omp_dynamic();
void set_omp_dynamic(int dynamic);

} // namespace ncnn

#endif // NCNN_CPU_H


================================================
FILE: MobileNetSSD_demo_single/app/src/main/cpp/include/layer.h
================================================
// Tencent is pleased to support the open source community by making ncnn available.
//
// Copyright (C) 2017 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except
// in compliance with the License. You may obtain a copy of the License at
//
// https://opensource.org/licenses/BSD-3-Clause
//
// Unless required by applicable law or agreed to in writing, software distributed
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.

#ifndef NCNN_LAYER_H
#define NCNN_LAYER_H

#include <stdio.h>
#include <string>
#include <vector>
#include "mat.h"
#include "modelbin.h"
#include "paramdict.h"
#include "platform.h"

namespace ncnn {

class Allocator;
class Option
{
public:
    // default option
    Option();

public:
    // light mode
    // intermediate blob will be recycled when enabled
    // enabled by default
    bool lightmode;

    // thread count
    // default value is the one returned by get_cpu_count()
    int num_threads;

    // blob memory allocator
    Allocator* blob_allocator;

    // workspace memory allocator
    Allocator* workspace_allocator;
};

// the global default option
const Option& get_default_option();
int set_default_option(const Option& opt);

class Layer
{
public:
    // empty
    Layer();
    // virtual destructor
    virtual ~Layer();

    // load layer specific parameter from parsed dict
    // return 0 if success
    virtual int load_param(const ParamDict& pd);

    // load layer specific weight data from model binary
    // return 0 if success
    virtual int load_model(const ModelBin& mb);

public:
    // one input and one output blob
    bool one_blob_only;

    // support inplace inference
    bool support_inplace;

public:
    // implement inference
    // return 0 if success
    virtual int forward(const std::vector<Mat>& bottom_blobs, std::vector<Mat>& top_blobs, const Option& opt = get_default_option()) const;
    virtual int forward(const Mat& bottom_blob, Mat& top_blob, const Option& opt = get_default_option()) const;

    // implement inplace inference
    // return 0 if success
    virtual int forward_inplace(std::vector<Mat>& bottom_top_blobs, const Option& opt = get_default_option()) const;
    virtual int forward_inplace(Mat& bottom_top_blob, const Option& opt = get_default_option()) const;

public:
#if NCNN_STRING
    // layer type name
    std::string type;
    // layer name
    std::string name;
#endif // NCNN_STRING
    // blob index which this layer needs as input
    std::vector<int> bottoms;
    // blob index which this layer produces as output
    std::vector<int> tops;
};

// layer factory function
typedef Layer* (*layer_creator_func)();

struct layer_registry_entry
{
#if NCNN_STRING
    // layer type name
    const char* name;
#endif // NCNN_STRING
    // layer factory entry
    layer_creator_func creator;
};

#if NCNN_STRING
// get layer type from type name
int layer_to_index(const char* type);
// create layer from type name
Layer* create_layer(const char* type);
#endif // NCNN_STRING
// create layer from layer type
Layer* create_layer(int index);

#define DEFINE_LAYER_CREATOR(name) \
    ::ncnn::Layer* name##_layer_creator() { return new name; }

} // namespace ncnn

#endif // NCNN_LAYER_H


================================================
FILE: MobileNetSSD_demo_single/app/src/main/cpp/include/layer_type.h
================================================
// Tencent is pleased to support the open source community by making ncnn available.
//
// Copyright (C) 2017 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except
// in compliance with the License. You may obtain a copy of the License at
//
// https://opensource.org/licenses/BSD-3-Clause
//
// Unless required by applicable law or agreed to in writing, software distributed
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.

#ifndef NCNN_LAYER_TYPE_H
#define NCNN_LAYER_TYPE_H

namespace ncnn {

namespace LayerType {
enum
{
#include "layer_type_enum.h"
    CustomBit = (1<<8),
};
} // namespace LayerType

} // namespace ncnn

#endif // NCNN_LAYER_TYPE_H


================================================
FILE: MobileNetSSD_demo_single/app/src/main/cpp/include/layer_type_enum.h
================================================
// Layer Type Enum header
//
// This file is auto-generated by cmake, don't edit it.

AbsVal = 0,
ArgMax = 1,
BatchNorm = 2,
Bias = 3,
BNLL = 4,
Concat = 5,
Convolution = 6,
Crop = 7,
Deconvolution = 8,
Dropout = 9,
Eltwise = 10,
ELU = 11,
Embed = 12,
Exp = 13,
Flatten = 14,
InnerProduct = 15,
Input = 16,
Log = 17,
LRN = 18,
MemoryData = 19,
MVN = 20,
Pooling = 21,
Power = 22,
PReLU = 23,
Proposal = 24,
Reduction = 25,
ReLU = 26,
Reshape = 27,
ROIPooling = 28,
Scale = 29,
Sigmoid = 30,
Slice = 31,
Softmax = 32,
Split = 33,
SPP = 34,
TanH = 35,
Threshold = 36,
Tile = 37,
RNN = 38,
LSTM = 39,
BinaryOp = 40,
UnaryOp = 41,
ConvolutionDepthWise = 42,
Padding = 43,
Squeeze = 44,
ExpandDims = 45,
Normalize = 46,
Permute = 47,
PriorBox = 48,
DetectionOutput = 49,
Interp = 50,
DeconvolutionDepthWise = 51,
ShuffleChannel = 52,
InstanceNorm = 53,
Clip = 54,
Reorg = 55,
YoloDetectionOutput = 56,
Quantize = 57,
Dequantize = 58,
Yolov3DetectionOutput = 59,
PSROIPooling = 60,
ROIAlign = 61,



================================================
FILE: MobileNetSSD_demo_single/app/src/main/cpp/include/mat.h
================================================
// Tencent is pleased to support the open source community by making ncnn available.
//
// Copyright (C) 2017 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except
// in compliance with the License. You may obtain a copy of the License at
//
// https://opensource.org/licenses/BSD-3-Clause
//
// Unless required by applicable law or agreed to in writing, software distributed
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.

#ifndef NCNN_MAT_H
#define NCNN_MAT_H

#include <stdlib.h>
#include <string.h>
#if __ARM_NEON
#include <arm_neon.h>
#endif
#include "allocator.h"
#include "platform.h"

namespace ncnn {

// the three dimension matrix
class Mat
{
public:
    // empty
    Mat();
    // vec
    Mat(int w, size_t elemsize = 4u, Allocator* allocator = 0);
    // image
    Mat(int w, int h, size_t elemsize = 4u, Allocator* allocator = 0);
    // dim
    Mat(int w, int h, int c, size_t elemsize = 4u, Allocator* allocator = 0);
    // copy
    Mat(const Mat& m);
    // external vec
    Mat(int w, void* data, size_t elemsize = 4u, Allocator* allocator = 0);
    // external image
    Mat(int w, int h, void* data, size_t elemsize = 4u, Allocator* allocator = 0);
    // external dim
    Mat(int w, int h, int c, void* data, size_t elemsize = 4u, Allocator* allocator = 0);
    // release
    ~Mat();
    // assign
    Mat& operator=(const Mat& m);
    // set all
    void fill(float v);
    void fill(int v);
    template <typename T> void fill(T v);
    // deep copy
    Mat clone(Allocator* allocator = 0) const;
    // reshape vec
    Mat reshape(int w, Allocator* allocator = 0) const;
    // reshape image
    Mat reshape(int w, int h, Allocator* allocator = 0) const;
    // reshape dim
    Mat reshape(int w, int h, int c, Allocator* allocator = 0) const;
    // allocate vec
    void create(int w, size_t elemsize = 4u, Allocator* allocator = 0);
    // allocate image
    void create(int w, int h, size_t elemsize = 4u, Allocator* allocator = 0);
    // allocate dim
    void create(int w, int h, int c, size_t elemsize = 4u, Allocator* allocator = 0);
    // refcount++
    void addref();
    // refcount--
    void release();

    bool empty() const;
    size_t total() const;

    // data reference
    Mat channel(int c);
    const Mat channel(int c) const;
    float* row(int y);
    const float* row(int y) const;
    template<typename T> T* row(int y);
    template<typename T> const T* row(int y) const;

    // range reference
    Mat channel_range(int c, int channels);
    const Mat channel_range(int c, int channels) const;
    Mat row_range(int y, int rows);
    const Mat row_range(int y, int rows) const;
    Mat range(int x, int n);
    const Mat range(int x, int n) const;

    // access raw data
    template<typename T> operator T*();
    template<typename T> operator const T*() const;

    // convenient access float vec element
    float& operator[](int i);
    const float& operator[](int i) const;

#if NCNN_PIXEL
    enum
    {
        PIXEL_CONVERT_SHIFT = 16,
        PIXEL_FORMAT_MASK = 0x0000ffff,
        PIXEL_CONVERT_MASK = 0xffff0000,

        PIXEL_RGB       = 1,
        PIXEL_BGR       = (1 << 1),
        PIXEL_GRAY      = (1 << 2),
        PIXEL_RGBA      = (1 << 3),

        PIXEL_RGB2BGR   = PIXEL_RGB | (PIXEL_BGR << PIXEL_CONVERT_SHIFT),
        PIXEL_RGB2GRAY  = PIXEL_RGB | (PIXEL_GRAY << PIXEL_CONVERT_SHIFT),

        PIXEL_BGR2RGB   = PIXEL_BGR | (PIXEL_RGB << PIXEL_CONVERT_SHIFT),
        PIXEL_BGR2GRAY  = PIXEL_BGR | (PIXEL_GRAY << PIXEL_CONVERT_SHIFT),

        PIXEL_GRAY2RGB  = PIXEL_GRAY | (PIXEL_RGB << PIXEL_CONVERT_SHIFT),
        PIXEL_GRAY2BGR  = PIXEL_GRAY | (PIXEL_BGR << PIXEL_CONVERT_SHIFT),

        PIXEL_RGBA2RGB  = PIXEL_RGBA | (PIXEL_RGB << PIXEL_CONVERT_SHIFT),
        PIXEL_RGBA2BGR  = PIXEL_RGBA | (PIXEL_BGR << PIXEL_CONVERT_SHIFT),
        PIXEL_RGBA2GRAY = PIXEL_RGBA | (PIXEL_GRAY << PIXEL_CONVERT_SHIFT),
    };
    // convenient construct from pixel data
    static Mat from_pixels(const unsigned char* pixels, int type, int w, int h, Allocator* allocator = 0);
    // convenient construct from pixel data and resize to specific size
    static Mat from_pixels_resize(const unsigned char* pixels, int type, int w, int h, int target_width, int target_height, Allocator* allocator = 0);

    // convenient export to pixel data
    void to_pixels(unsigned char* pixels, int type) const;
    // convenient export to pixel data and resize to specific size
    void to_pixels_resize(unsigned char* pixels, int type, int target_width, int target_height) const;
#endif // NCNN_PIXEL

    // substract channel-wise mean values, then multiply by normalize values, pass 0 to skip
    void substract_mean_normalize(const float* mean_vals, const float* norm_vals);

    // convenient construct from half precisoin floating point data
    static Mat from_float16(const unsigned short* data, int size);

    // pointer to the data
    void* data;

    // pointer to the reference counter
    // when points to user-allocated data, the pointer is NULL
    int* refcount;

    // element size in bytes
    // 4 = float32/int32
    // 2 = float16
    // 1 = int8/uint8
    // 0 = empty
    size_t elemsize;

    // the allocator
    Allocator* allocator;

    // the dimensionality
    int dims;

    int w;
    int h;
    int c;

    size_t cstep;
};

// misc function
#if NCNN_PIXEL
// convert yuv420sp(nv21) to rgb, the fast approximate version
void yuv420sp2rgb(const unsigned char* yuv420sp, int w, int h, unsigned char* rgb);
// image pixel bilinear resize
void resize_bilinear_c1(const unsigned char* src, int srcw, int srch, unsigned char* dst, int w, int h);
void resize_bilinear_c2(const unsigned char* src, int srcw, int srch, unsigned char* dst, int w, int h);
void resize_bilinear_c3(const unsigned char* src, int srcw, int srch, unsigned char* dst, int w, int h);
void resize_bilinear_c4(const unsigned char* src, int srcw, int srch, unsigned char* dst, int w, int h);
// image pixel bilinear resize, convenient wrapper for yuv420sp(nv21)
void resize_bilinear_yuv420sp(const unsigned char* src, int srcw, int srch, unsigned char* dst, int w, int h);
#endif // NCNN_PIXEL

// mat process
enum
{
    BORDER_CONSTANT = 0,
    BORDER_REPLICATE = 1,
};
void copy_make_border(const Mat& src, Mat& dst, int top, int bottom, int left, int right, int type, float v, Allocator* allocator = 0, int num_threads = 1);
void copy_cut_border(const Mat& src, Mat& dst, int top, int bottom, int left, int right, Allocator* allocator = 0, int num_threads = 1);
void resize_bilinear(const Mat& src, Mat& dst, int w, int h, Allocator* allocator = 0, int num_threads = 1);

inline Mat::Mat()
    : data(0), refcount(0), elemsize(0), allocator(0), dims(0), w(0), h(0), c(0), cstep(0)
{
}

inline Mat::Mat(int _w, size_t _elemsize, Allocator* allocator)
    : data(0), refcount(0), dims(0)
{
    create(_w, _elemsize, allocator);
}

inline Mat::Mat(int _w, int _h, size_t _elemsize, Allocator* allocator)
    : data(0), refcount(0), dims(0)
{
    create(_w, _h, _elemsize, allocator);
}

inline Mat::Mat(int _w, int _h, int _c, size_t _elemsize, Allocator* allocator)
    : data(0), refcount(0), dims(0)
{
    create(_w, _h, _c, _elemsize, allocator);
}

inline Mat::Mat(const Mat& m)
    : data(m.data), refcount(m.refcount), elemsize(m.elemsize), allocator(m.allocator), dims(m.dims)
{
    if (refcount)
        NCNN_XADD(refcount, 1);

    w = m.w;
    h = m.h;
    c = m.c;

    cstep = m.cstep;
}

inline Mat::Mat(int _w, void* _data, size_t _elemsize, Allocator* _allocator)
    : data(_data), refcount(0), elemsize(_elemsize), allocator(_allocator), dims(1)
{
    w = _w;
    h = 1;
    c = 1;

    cstep = w;
}

inline Mat::Mat(int _w, int _h, void* _data, size_t _elemsize, Allocator* _allocator)
    : data(_data), refcount(0), elemsize(_elemsize), allocator(_allocator), dims(2)
{
    w = _w;
    h = _h;
    c = 1;

    cstep = w * h;
}

inline Mat::Mat(int _w, int _h, int _c, void* _data, size_t _elemsize, Allocator* _allocator)
    : data(_data), refcount(0), elemsize(_elemsize), allocator(_allocator), dims(3)
{
    w = _w;
    h = _h;
    c = _c;

    cstep = alignSize(w * h * elemsize, 16) / elemsize;
}

inline Mat::~Mat()
{
    release();
}

inline Mat& Mat::operator=(const Mat& m)
{
    if (this == &m)
        return *this;

    if (m.refcount)
        NCNN_XADD(m.refcount, 1);

    release();

    data = m.data;
    refcount = m.refcount;
    elemsize = m.elemsize;
    allocator = m.allocator;

    dims = m.dims;
    w = m.w;
    h = m.h;
    c = m.c;

    cstep = m.cstep;

    return *this;
}

inline void Mat::fill(float _v)
{
    int size = total();
    float* ptr = (float*)data;

#if __ARM_NEON
    int nn = size >> 2;
    int remain = size - (nn << 2);
#else
    int remain = size;
#endif // __ARM_NEON

#if __ARM_NEON
    float32x4_t _c = vdupq_n_f32(_v);
#if __aarch64__
    if (nn > 0)
    {
    asm volatile (
        "0:                             \n"
        "subs       %w0, %w0, #1        \n"
        "st1        {%4.4s}, [%1], #16  \n"
        "bne        0b                  \n"
        : "=r"(nn),     // %0
          "=r"(ptr)     // %1
        : "0"(nn),
          "1"(ptr),
          "w"(_c)       // %4
        : "cc", "memory"
    );
    }
#else
    if (nn > 0)
    {
    asm volatile(
        "0:                             \n"
        "subs       %0, #1              \n"
        "vst1.f32   {%e4-%f4}, [%1 :128]!\n"
        "bne        0b                  \n"
        : "=r"(nn),     // %0
          "=r"(ptr)     // %1
        : "0"(nn),
          "1"(ptr),
          "w"(_c)       // %4
        : "cc", "memory"
    );
    }
#endif // __aarch64__
#endif // __ARM_NEON
    for (; remain>0; remain--)
    {
        *ptr++ = _v;
    }
}

inline void Mat::fill(int _v)
{
    int size = total();
    int* ptr = (int*)data;

#if __ARM_NEON
    int nn = size >> 2;
    int remain = size - (nn << 2);
#else
    int remain = size;
#endif // __ARM_NEON

#if __ARM_NEON
    int32x4_t _c = vdupq_n_s32(_v);
#if __aarch64__
    if (nn > 0)
    {
    asm volatile (
        "0:                             \n"
        "subs       %w0, %w0, #1        \n"
        "st1        {%4.4s}, [%1], #16  \n"
        "bne        0b                  \n"
        : "=r"(nn),     // %0
          "=r"(ptr)     // %1
        : "0"(nn),
          "1"(ptr),
          "w"(_c)       // %4
        : "cc", "memory"
    );
    }
#else
    if (nn > 0)
    {
    asm volatile(
        "0:                             \n"
        "subs       %0, #1              \n"
        "vst1.s32   {%e4-%f4}, [%1 :128]!\n"
        "bne        0b                  \n"
        : "=r"(nn),     // %0
          "=r"(ptr)     // %1
        : "0"(nn),
          "1"(ptr),
          "w"(_c)       // %4
        : "cc", "memory"
    );
    }
#endif // __aarch64__
#endif // __ARM_NEON
    for (; remain>0; remain--)
    {
        *ptr++ = _v;
    }
}

template <typename T>
inline void Mat::fill(T _v)
{
    int size = total();
    T* ptr = (T*)data;
    for (int i=0; i<size; i++)
    {
        ptr[i] = _v;
    }
}

inline Mat Mat::clone(Allocator* allocator) const
{
    if (empty())
        return Mat();

    Mat m;
    if (dims == 1)
        m.create(w, elemsize, allocator);
    else if (dims == 2)
        m.create(w, h, elemsize, allocator);
    else if (dims == 3)
        m.create(w, h, c, elemsize, allocator);

    if (total() > 0)
    {
        memcpy(m.data, data, total() * elemsize);
    }

    return m;
}

inline Mat Mat::reshape(int _w, Allocator* allocator) const
{
    if (w * h * c != _w)
        return Mat();

    if (dims == 3 && cstep != (size_t)w * h)
    {
        Mat m;
        m.create(_w, elemsize, allocator);

        // flatten
        for (int i=0; i<c; i++)
        {
            const void* ptr = (unsigned char*)data + i * cstep * elemsize;
            void* mptr = (unsigned char*)m.data + i * w * h * elemsize;
            memcpy(mptr, ptr, w * h * elemsize);
        }

        return m;
    }

    Mat m = *this;

    m.dims = 1;
    m.w = _w;
    m.h = 1;
    m.c = 1;

    m.cstep = _w;

    return m;
}

inline Mat Mat::reshape(int _w, int _h, Allocator* allocator) const
{
    if (w * h * c != _w * _h)
        return Mat();

    if (dims == 3 && cstep != (size_t)w * h)
    {
        Mat m;
        m.create(_w, _h, elemsize, allocator);

        // flatten
        for (int i=0; i<c; i++)
        {
            const void* ptr = (unsigned char*)data + i * cstep * elemsize;
            void* mptr = (unsigned char*)m.data + i * w * h * elemsize;
            memcpy(mptr, ptr, w * h * elemsize);
        }

        return m;
    }

    Mat m = *this;

    m.dims = 2;
    m.w = _w;
    m.h = _h;
    m.c = 1;

    m.cstep = _w * _h;

    return m;
}

inline Mat Mat::reshape(int _w, int _h, int _c, Allocator* allocator) const
{
    if (w * h * c != _w * _h * _c)
        return Mat();

    if (dims < 3)
    {
        if ((size_t)_w * _h != alignSize(_w * _h * elemsize, 16) / elemsize)
        {
            Mat m;
            m.create(_w, _h, _c, elemsize, allocator);

            // align channel
            for (int i=0; i<_c; i++)
            {
                const void* ptr = (unsigned char*)data + i * _w * _h * elemsize;
                void* mptr = (unsigned char*)m.data + i * m.cstep * m.elemsize;
                memcpy(mptr, ptr, _w * _h * elemsize);
            }

            return m;
        }
    }
    else if (c != _c)
    {
        // flatten and then align
        Mat tmp = reshape(_w * _h * _c, allocator);
        return tmp.reshape(_w, _h, _c, allocator);
    }

    Mat m = *this;

    m.dims = 3;
    m.w = _w;
    m.h = _h;
    m.c = _c;

    m.cstep = alignSize(_w * _h * elemsize, 16) / elemsize;

    return m;
}

inline void Mat::create(int _w, size_t _elemsize, Allocator* _allocator)
{
    if (dims == 1 && w == _w && elemsize == _elemsize && allocator == _allocator)
        return;

    release();

    elemsize = _elemsize;
    allocator = _allocator;

    dims = 1;
    w = _w;
    h = 1;
    c = 1;

    cstep = w;

    if (total() > 0)
    {
        size_t totalsize = alignSize(total() * elemsize, 4);
        if (allocator)
            data = allocator->fastMalloc(totalsize + (int)sizeof(*refcount));
        else
            data = fastMalloc(totalsize + (int)sizeof(*refcount));
        refcount = (int*)(((unsigned char*)data) + totalsize);
        *refcount = 1;
    }
}

inline void Mat::create(int _w, int _h, size_t _elemsize, Allocator* _allocator)
{
    if (dims == 2 && w == _w && h == _h && elemsize == _elemsize && allocator == _allocator)
        return;

    release();

    elemsize = _elemsize;
    allocator = _allocator;

    dims = 2;
    w = _w;
    h = _h;
    c = 1;

    cstep = w * h;

    if (total() > 0)
    {
        size_t totalsize = alignSize(total() * elemsize, 4);
        if (allocator)
            data = allocator->fastMalloc(totalsize + (int)sizeof(*refcount));
        else
            data = fastMalloc(totalsize + (int)sizeof(*refcount));
        refcount = (int*)(((unsigned char*)data) + totalsize);
        *refcount = 1;
    }
}

inline void Mat::create(int _w, int _h, int _c, size_t _elemsize, Allocator* _allocator)
{
    if (dims == 3 && w == _w && h == _h && c == _c && elemsize == _elemsize && allocator == _allocator)
        return;

    release();

    elemsize = _elemsize;
    allocator = _allocator;

    dims = 3;
    w = _w;
    h = _h;
    c = _c;

    cstep = alignSize(w * h * elemsize, 16) / elemsize;

    if (total() > 0)
    {
        size_t totalsize = alignSize(total() * elemsize, 4);
        if (allocator)
            data = allocator->fastMalloc(totalsize + (int)sizeof(*refcount));
        else
            data = fastMalloc(totalsize + (int)sizeof(*refcount));
        refcount = (int*)(((unsigned char*)data) + totalsize);
        *refcount = 1;
    }
}

inline void Mat::addref()
{
    if (refcount)
        NCNN_XADD(refcount, 1);
}

inline void Mat::release()
{
    if (refcount && NCNN_XADD(refcount, -1) == 1)
    {
        if (allocator)
            allocator->fastFree(data);
        else
            fastFree(data);
    }

    data = 0;

    elemsize = 0;

    dims = 0;
    w = 0;
    h = 0;
    c = 0;

    cstep = 0;

    refcount = 0;
}

inline bool Mat::empty() const
{
    return data == 0 || total() == 0;
}

inline size_t Mat::total() const
{
    return cstep * c;
}

inline Mat Mat::channel(int c)
{
    return Mat(w, h, (unsigned char*)data + cstep * c * elemsize, elemsize, allocator);
}

inline const Mat Mat::channel(int c) const
{
    return Mat(w, h, (unsigned char*)data + cstep * c * elemsize, elemsize, allocator);
}

inline float* Mat::row(int y)
{
    return (float*)data + w * y;
}

inline const float* Mat::row(int y) const
{
    return (const float*)data + w * y;
}

template <typename T>
inline T* Mat::row(int y)
{
    return (T*)data + w * y;
}

template <typename T>
inline const T* Mat::row(int y) const
{
    return (const T*)data + w * y;
}

inline Mat Mat::channel_range(int _c, int channels)
{
    return Mat(w, h, channels, (unsigned char*)data + cstep * _c * elemsize, elemsize, allocator);
}

inline const Mat Mat::channel_range(int _c, int channels) const
{
    return Mat(w, h, channels, (unsigned char*)data + cstep * _c * elemsize, elemsize, allocator);
}

inline Mat Mat::row_range(int y, int rows)
{
    return Mat(w, rows, (unsigned char*)data + w * y * elemsize, elemsize, allocator);
}

inline const Mat Mat::row_range(int y, int rows) const
{
    return Mat(w, rows, (unsigned char*)data + w * y * elemsize, elemsize, allocator);
}

inline Mat Mat::range(int x, int n)
{
    return Mat(n, (unsigned char*)data + x * elemsize, elemsize, allocator);
}

inline const Mat Mat::range(int x, int n) const
{
    return Mat(n, (unsigned char*)data + x * elemsize, elemsize, allocator);
}

template <typename T>
inline Mat::operator T*()
{
    return (T*)data;
}

template <typename T>
inline Mat::operator const T*() const
{
    return (const T*)data;
}

inline float& Mat::operator[](int i)
{
    return ((float*)data)[i];
}

inline const float& Mat::operator[](int i) const
{
    return ((const float*)data)[i];
}

} // namespace ncnn

#endif // NCNN_MAT_H


================================================
FILE: MobileNetSSD_demo_single/app/src/main/cpp/include/modelbin.h
================================================
// Tencent is pleased to support the open source community by making ncnn available.
//
// Copyright (C) 2017 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except
// in compliance with the License. You may obtain a copy of the License at
//
// https://opensource.org/licenses/BSD-3-Clause
//
// Unless required by applicable law or agreed to in writing, software distributed
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.

#ifndef NCNN_MODELBIN_H
#define NCNN_MODELBIN_H

#include <stdio.h>
#include "mat.h"
#include "platform.h"

namespace ncnn {

class Net;
class ModelBin
{
public:
    // element type
    // 0 = auto
    // 1 = float32
    // 2 = float16
    // 3 = int8
    // load vec
    virtual Mat load(int w, int type) const = 0;
    // load image
    virtual Mat load(int w, int h, int type) const;
    // load dim
    virtual Mat load(int w, int h, int c, int type) const;
};

#if NCNN_STDIO
class ModelBinFromStdio : public ModelBin
{
public:
    // construct from file
    ModelBinFromStdio(FILE* binfp);

    virtual Mat load(int w, int type) const;

protected:
    FILE* binfp;
};
#endif // NCNN_STDIO

class ModelBinFromMemory : public ModelBin
{
public:
    // construct from external memory
    ModelBinFromMemory(const unsigned char*& mem);

    virtual Mat load(int w, int type) const;

protected:
    const unsigned char*& mem;
};

class ModelBinFromMatArray : public ModelBin
{
public:
    // construct from weight blob array
    ModelBinFromMatArray(const Mat* weights);

    virtual Mat load(int w, int type) const;

protected:
    mutable const Mat* weights;
};

} // namespace ncnn

#endif // NCNN_MODELBIN_H


================================================
FILE: MobileNetSSD_demo_single/app/src/main/cpp/include/net.h
================================================
// Tencent is pleased to support the open source community by making ncnn available.
//
// Copyright (C) 2017 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except
// in compliance with the License. You may obtain a copy of the License at
//
// https://opensource.org/licenses/BSD-3-Clause
//
// Unless required by applicable law or agreed to in writing, software distributed
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.

#ifndef NCNN_NET_H
#define NCNN_NET_H

#include <stdio.h>
#include <vector>
#include "blob.h"
#include "layer.h"
#include "mat.h"
#include "platform.h"

namespace ncnn {

class Extractor;
class Net
{
public:
    // empty init
    Net();
    // clear and destroy
    ~Net();

#if NCNN_STRING
    // register custom layer by layer type name
    // return 0 if success
    int register_custom_layer(const char* type, layer_creator_func creator);
#endif // NCNN_STRING
    // register custom layer by layer type
    // return 0 if success
    int register_custom_layer(int index, layer_creator_func creator);

#if NCNN_STDIO
#if NCNN_STRING
    // load network structure from plain param file
    // return 0 if success
    int load_param(FILE* fp);
    int load_param(const char* protopath);
    int load_param_mem(const char* mem);
#endif // NCNN_STRING
    // load network structure from binary param file
    // return 0 if success
    int load_param_bin(FILE* fp);
    int load_param_bin(const char* protopath);

    // load network weight data from model file
    // return 0 if success
    int load_model(FILE* fp);
    int load_model(const char* modelpath);
#endif // NCNN_STDIO

    // load network structure from external memory
    // memory pointer must be 32-bit aligned
    // return bytes consumed
    int load_param(const unsigned char* mem);

    // reference network weight data from external memory
    // weight data is not copied but referenced
    // so external memory should be retained when used
    // memory pointer must be 32-bit aligned
    // return bytes consumed
    int load_model(const unsigned char* mem);

    // unload network structure and weight data
    void clear();

    // construct an Extractor from network
    Extractor create_extractor() const;

public:
    // enable winograd convolution optimization
    // improve convolution 3x3 stride1 performace, may consume more memory
    // changes should be applied before loading network structure and weight
    // enabled by default
    int use_winograd_convolution;

    // enable sgemm convolution optimization
    // improve convolution 1x1 stride1 performace, may consume more memory
    // changes should be applied before loading network structure and weight
    // enabled by default
    int use_sgemm_convolution;

    // enable quantized int8 inference
    // use low-precision int8 path for quantized model
    // changes should be applied before loading network structure and weight
    // enabled by default
    int use_int8_inference;

protected:
    friend class Extractor;
#if NCNN_STRING
    int find_blob_index_by_name(const char* name) const;
    int find_layer_index_by_name(const char* name) const;
    int custom_layer_to_index(const char* type);
    Layer* create_custom_layer(const char* type);
#endif // NCNN_STRING
    Layer* create_custom_layer(int index);
    int forward_layer(int layer_index, std::vector<Mat>& blob_mats, Option& opt) const;

protected:
    std::vector<Blob> blobs;
    std::vector<Layer*> layers;

    std::vector<layer_registry_entry> custom_layer_registry;
};

class Extractor
{
public:
    // enable light mode
    // intermediate blob will be recycled when enabled
    // enabled by default
    void set_light_mode(bool enable);

    // set thread count for this extractor
    // this will overwrite the global setting
    // default count is system depended
    void set_num_threads(int num_threads);

    // set blob memory allocator
    void set_blob_allocator(Allocator* allocator);

    // set workspace memory allocator
    void set_workspace_allocator(Allocator* allocator);

#if NCNN_STRING
    // set input by blob name
    // return 0 if success
    int input(const char* blob_name, const Mat& in);

    // get result by blob name
    // return 0 if success
    int extract(const char* blob_name, Mat& feat);
#endif // NCNN_STRING

    // set input by blob index
    // return 0 if success
    int input(int blob_index, const Mat& in);

    // get result by blob index
    // return 0 if success
    int extract(int blob_index, Mat& feat);

protected:
    friend Extractor Net::create_extractor() const;
    Extractor(const Net* net, int blob_count);

private:
    const Net* net;
    std::vector<Mat> blob_mats;
    Option opt;
};

} // namespace ncnn

#endif // NCNN_NET_H


================================================
FILE: MobileNetSSD_demo_single/app/src/main/cpp/include/opencv.h
================================================
// Tencent is pleased to support the open source community by making ncnn available.
//
// Copyright (C) 2017 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except
// in compliance with the License. You may obtain a copy of the License at
//
// https://opensource.org/licenses/BSD-3-Clause
//
// Unless required by applicable law or agreed to in writing, software distributed
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.

#ifndef NCNN_OPENCV_H
#define NCNN_OPENCV_H

#include "platform.h"

#if NCNN_OPENCV

#include <algorithm>
#include <string>
#include "mat.h"

// minimal opencv style data structure implementation
namespace cv
{

struct Size
{
    Size() : width(0), height(0) {}
    Size(int _w, int _h) : width(_w), height(_h) {}

    int width;
    int height;
};

template<typename _Tp>
struct Rect_
{
    Rect_() : x(0), y(0), width(0), height(0) {}
    Rect_(_Tp _x, _Tp _y, _Tp _w, _Tp _h) : x(_x), y(_y), width(_w), height(_h) {}

    _Tp x;
    _Tp y;
    _Tp width;
    _Tp height;

    // area
    _Tp area() const
    {
        return width * height;
    }
};

template<typename _Tp> static inline Rect_<_Tp>& operator &= ( Rect_<_Tp>& a, const Rect_<_Tp>& b )
{
    _Tp x1 = std::max(a.x, b.x), y1 = std::max(a.y, b.y);
    a.width = std::min(a.x + a.width, b.x + b.width) - x1;
    a.height = std::min(a.y + a.height, b.y + b.height) - y1;
    a.x = x1; a.y = y1;
    if( a.width <= 0 || a.height <= 0 )
        a = Rect_<_Tp>();
    return a;
}

template<typename _Tp> static inline Rect_<_Tp>& operator |= ( Rect_<_Tp>& a, const Rect_<_Tp>& b )
{
    _Tp x1 = std::min(a.x, b.x), y1 = std::min(a.y, b.y);
    a.width = std::max(a.x + a.width, b.x + b.width) - x1;
    a.height = std::max(a.y + a.height, b.y + b.height) - y1;
    a.x = x1; a.y = y1;
    return a;
}

template<typename _Tp> static inline Rect_<_Tp> operator & (const Rect_<_Tp>& a, const Rect_<_Tp>& b)
{
    Rect_<_Tp> c = a;
    return c &= b;
}

template<typename _Tp> static inline Rect_<_Tp> operator | (const Rect_<_Tp>& a, const Rect_<_Tp>& b)
{
    Rect_<_Tp> c = a;
    return c |= b;
}

typedef Rect_<int> Rect;
typedef Rect_<float> Rect2f;

template<typename _Tp>
struct Point_
{
    Point_() : x(0), y(0) {}
    Point_(_Tp _x, _Tp _y) : x(_x), y(_y) {}

    _Tp x;
    _Tp y;
};

typedef Point_<int> Point;
typedef Point_<float> Point2f;

#define CV_8UC1 1
#define CV_8UC3 3
#define CV_8UC4 4
#define CV_32FC1 4

struct Mat
{
    Mat() : data(0), refcount(0), rows(0), cols(0), c(0) {}

    Mat(int _rows, int _cols, int flags) : data(0), refcount(0)
    {
        create(_rows, _cols, flags);
    }

    // copy
    Mat(const Mat& m) : data(m.data), refcount(m.refcount)
    {
        if (refcount)
            NCNN_XADD(refcount, 1);

        rows = m.rows;
        cols = m.cols;
        c = m.c;
    }

    Mat(int _rows, int _cols, int flags, void* _data) : data((unsigned char*)_data), refcount(0)
    {
        rows = _rows;
        cols = _cols;
        c = flags;
    }

    ~Mat()
    {
        release();
    }

    // assign
    Mat& operator=(const Mat& m)
    {
        if (this == &m)
            return *this;

        if (m.refcount)
            NCNN_XADD(m.refcount, 1);

        release();

        data = m.data;
        refcount = m.refcount;

        rows = m.rows;
        cols = m.cols;
        c = m.c;

        return *this;
    }

    void create(int _rows, int _cols, int flags)
    {
        release();

        rows = _rows;
        cols = _cols;
        c = flags;

        if (total() > 0)
        {
            // refcount address must be aligned, so we expand totalsize here
            size_t totalsize = (total() + 3) >> 2 << 2;
            data = (unsigned char*)ncnn::fastMalloc(totalsize + (int)sizeof(*refcount));
            refcount = (int*)(((unsigned char*)data) + totalsize);
            *refcount = 1;
        }
    }

    void release()
    {
        if (refcount && NCNN_XADD(refcount, -1) == 1)
            ncnn::fastFree(data);

        data = 0;

        rows = 0;
        cols = 0;
        c = 0;

        refcount = 0;
    }

    Mat clone() const
    {
        if (empty())
            return Mat();

        Mat m(rows, cols, c);

        if (total() > 0)
        {
            memcpy(m.data, data, total());
        }

        return m;
    }

    bool empty() const { return data == 0 || total() == 0; }

    int channels() const { return c; }

    size_t total() const { return cols * rows * c; }

    const unsigned char* ptr(int y) const { return data + y * cols * c; }

    unsigned char* ptr(int y) { return data + y * cols * c; }

    // roi
    Mat operator()( const Rect& roi ) const
    {
        if (empty())
            return Mat();

        Mat m(roi.height, roi.width, c);

        int sy = roi.y;
        for (int y = 0; y < roi.height; y++)
        {
            const unsigned char* sptr = ptr(sy) + roi.x * c;
            unsigned char* dptr = m.ptr(y);
            memcpy(dptr, sptr, roi.width * c);
            sy++;
        }

        return m;
    }

    unsigned char* data;

    // pointer to the reference counter;
    // when points to user-allocated data, the pointer is NULL
    int* refcount;

    int rows;
    int cols;

    int c;

};

#define CV_LOAD_IMAGE_GRAYSCALE 1
#define CV_LOAD_IMAGE_COLOR 3
Mat imread(const std::string& path, int flags);
void imwrite(const std::string& path, const Mat& m);

#if NCNN_PIXEL
void resize(const Mat& src, Mat& dst, const Size& size, float sw = 0.f, float sh = 0.f, int flags = 0);
#endif // NCNN_PIXEL

} // namespace cv

#endif // NCNN_OPENCV

#endif // NCNN_OPENCV_H


================================================
FILE: MobileNetSSD_demo_single/app/src/main/cpp/include/paramdict.h
================================================
// Tencent is pleased to support the open source community by making ncnn available.
//
// Copyright (C) 2017 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except
// in compliance with the License. You may obtain a copy of the License at
//
// https://opensource.org/licenses/BSD-3-Clause
//
// Unless required by applicable law or agreed to in writing, software distributed
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.

#ifndef NCNN_PARAMDICT_H
#define NCNN_PARAMDICT_H

#include <stdio.h>
#include "mat.h"
#include "platform.h"

// at most 20 parameters
#define NCNN_MAX_PARAM_COUNT 20

namespace ncnn {

class Net;
class ParamDict
{
public:
    // empty
    ParamDict();

    // get int
    int get(int id, int def) const;
    // get float
    float get(int id, float def) const;
    // get array
    Mat get(int id, const Mat& def) const;

    // set int
    void set(int id, int i);
    // set float
    void set(int id, float f);
    // set array
    void set(int id, const Mat& v);

public:
    int use_winograd_convolution;
    int use_sgemm_convolution;
    int use_int8_inference;

protected:
    friend class Net;

    void clear();

#if NCNN_STDIO
#if NCNN_STRING
    int load_param(FILE* fp);
    int load_param_mem(const char*& mem);
#endif // NCNN_STRING
    int load_param_bin(FILE* fp);
#endif // NCNN_STDIO
    int load_param(const unsigned char*& mem);

protected:
    struct
    {
        int loaded;
        union { int i; float f; };
        Mat v;
    } params[NCNN_MAX_PARAM_COUNT];
};

} // namespace ncnn

#endif // NCNN_PARAMDICT_H


================================================
FILE: MobileNetSSD_demo_single/app/src/main/cpp/include/platform.h
================================================
// Tencent is pleased to support the open source community by making ncnn available.
//
// Copyright (C) 2017 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except
// in compliance with the License. You may obtain a copy of the License at
//
// https://opensource.org/licenses/BSD-3-Clause
//
// Unless required by applicable law or agreed to in writing, software distributed
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.

#ifndef NCNN_PLATFORM_H
#define NCNN_PLATFORM_H

#define NCNN_STDIO 1
#define NCNN_STRING 1
#define NCNN_OPENCV 0
#define NCNN_BENCHMARK 0
#define NCNN_PIXEL 1
#define NCNN_PIXEL_ROTATE 0

#endif // NCNN_PLATFORM_H


================================================
FILE: MobileNetSSD_demo_single/app/src/main/java/com/example/che/mobilenetssd_demo/MainActivity.java
================================================
package com.example.che.mobilenetssd_demo;

import android.Manifest;
import android.app.Activity;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.content.res.AssetManager;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.net.Uri;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.v4.app.ActivityCompat;
import android.support.v4.content.ContextCompat;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.text.method.ScrollingMovementMethod;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.TextView;
import android.widget.Toast;

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;


import com.bumptech.glide.Glide;
import com.bumptech.glide.load.engine.DiskCacheStrategy;
import com.bumptech.glide.request.RequestOptions;


public class MainActivity extends AppCompatActivity {

    private static final String TAG = MainActivity.class.getName();
    private static final int USE_PHOTO = 1001;
    private String camera_image_path;
    private ImageView show_image;
    private TextView result_text;
    private boolean load_result = false;
    private int[] ddims = {1, 3, 300, 300}; //这里的维度的值要和train model的input 一一对应
    private int model_index = 1;
    private List<String> resultLabel = new ArrayList<>();
    private MobileNetssd mobileNetssd = new MobileNetssd(); //java接口实例化 下面直接利用java函数调用NDK c++函数

    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.activity_main);
        try
        {
            initMobileNetSSD();
        } catch (IOException e) {
            Log.e("MainActivity", "initMobileNetSSD error");
        }
        init_view();
        readCacheLabelFromLocalFile();
}

    /**
     *
     * MobileNetssd初始化,也就是把model文件进行加载
     */
    private void initMobileNetSSD() throws IOException {
        byte[] param = null;
        byte[] bin = null;
        {
            //用io流读取二进制文件,最后存入到byte[]数组中
            InputStream assetsInputStream = getAssets().open("MobileNetSSD_deploy.param.bin");// param:  网络结构文件
            int available = assetsInputStream.available();
            param = new byte[available];
            int byteCode = assetsInputStream.read(param);
            assetsInputStream.close();
        }
        {
            //用io流读取二进制文件,最后存入到byte上,转换为int型
            InputStream assetsInputStream = getAssets().open("MobileNetSSD_deploy.bin");//bin:   model文件
            int available = assetsInputStream.available();
            bin = new byte[available];
            int byteCode = assetsInputStream.read(bin);
            assetsInputStream.close();
        }

        load_result = mobileNetssd.Init(param, bin);// 再将文件传入java的NDK接口(c++ 代码中的init接口 )
        Log.d("load model", "MobileNetSSD_load_model_result:" + load_result);
    }


    // initialize view
    private void init_view() {
        request_permissions();
        show_image = (ImageView) findViewById(R.id.show_image);
        result_text = (TextView) findViewById(R.id.result_text);
        result_text.setMovementMethod(ScrollingMovementMethod.getInstance());
        Button use_photo = (Button) findViewById(R.id.use_photo);
        // use photo click
        use_photo.setOnClickListener(new View.OnClickListener() {
            @Override
            public void onClick(View view) {
                if (!load_result) {
                    Toast.makeText(MainActivity.this, "never load model", Toast.LENGTH_SHORT).show();
                    return;
                }
                PhotoUtil.use_photo(MainActivity.this, USE_PHOTO);
            }
        });
    }

    // load label's name
    private void readCacheLabelFromLocalFile() {
        try {
            AssetManager assetManager = getApplicationContext().getAssets();
            BufferedReader reader = new BufferedReader(new InputStreamReader(assetManager.open("words.txt")));//这里是label的文件
            String readLine = null;
            while ((readLine = reader.readLine()) != null) {
                resultLabel.add(readLine);
            }
            reader.close();
        } catch (Exception e) {
            Log.e("labelCache", "error " + e);
        }
    }


    protected void onActivityResult(int requestCode, int resultCode, @Nullable Intent data) {
        String image_path;
        RequestOptions options = new RequestOptions().skipMemoryCache(true).diskCacheStrategy(DiskCacheStrategy.NONE);
        if (resultCode == Activity.RESULT_OK) {
            switch (requestCode) {
                case USE_PHOTO:
                    if (data == null) {
                        Log.w(TAG, "user photo data is null");
                        return;
                    }
                    Uri image_uri = data.getData();

                    //Glide.with(MainActivity.this).load(image_uri).apply(options).into(show_image);

                    // get image path from uri
                    image_path = PhotoUtil.get_path_from_URI(MainActivity.this, image_uri);
                    // predict image
                    predict_image(image_path);
                    break;
            }
        }
    }

    //  predict image
    private void predict_image(String image_path) {
        // picture to float array
        Bitmap bmp = PhotoUtil.getScaleBitmap(image_path);
        Bitmap rgba = bm
Download .txt
gitextract_1slkszqd/

├── MobileNetSSD_demo/
│   ├── .gitignore
│   ├── .idea/
│   │   ├── codeStyles/
│   │   │   └── Project.xml
│   │   ├── gradle.xml
│   │   └── misc.xml
│   ├── app/
│   │   ├── .gitignore
│   │   ├── CMakeLists.txt
│   │   ├── build.gradle
│   │   ├── proguard-rules.pro
│   │   └── src/
│   │       ├── androidTest/
│   │       │   └── java/
│   │       │       └── com/
│   │       │           └── example/
│   │       │               └── che/
│   │       │                   └── mobilenetssd_demo/
│   │       │                       └── ExampleInstrumentedTest.java
│   │       ├── main/
│   │       │   ├── AndroidManifest.xml
│   │       │   ├── assets/
│   │       │   │   └── words.txt
│   │       │   ├── cpp/
│   │       │   │   ├── MobileNetSSD_deploy.id.h
│   │       │   │   ├── MobileNetssd.cpp
│   │       │   │   └── include/
│   │       │   │       ├── allocator.h
│   │       │   │       ├── benchmark.h
│   │       │   │       ├── blob.h
│   │       │   │       ├── cpu.h
│   │       │   │       ├── layer.h
│   │       │   │       ├── layer_type.h
│   │       │   │       ├── layer_type_enum.h
│   │       │   │       ├── mat.h
│   │       │   │       ├── modelbin.h
│   │       │   │       ├── net.h
│   │       │   │       ├── opencv.h
│   │       │   │       ├── paramdict.h
│   │       │   │       └── platform.h
│   │       │   ├── java/
│   │       │   │   └── com/
│   │       │   │       └── example/
│   │       │   │           └── che/
│   │       │   │               └── mobilenetssd_demo/
│   │       │   │                   ├── MainActivity.java
│   │       │   │                   ├── MobileNetssd.java
│   │       │   │                   └── PhotoUtil.java
│   │       │   ├── jniLibs/
│   │       │   │   └── armeabi-v7a/
│   │       │   │       └── libncnn.a
│   │       │   └── res/
│   │       │       ├── drawable/
│   │       │       │   └── ic_launcher_background.xml
│   │       │       ├── drawable-v24/
│   │       │       │   └── ic_launcher_foreground.xml
│   │       │       ├── layout/
│   │       │       │   └── activity_main.xml
│   │       │       ├── mipmap-anydpi-v26/
│   │       │       │   ├── ic_launcher.xml
│   │       │       │   └── ic_launcher_round.xml
│   │       │       └── values/
│   │       │           ├── colors.xml
│   │       │           ├── strings.xml
│   │       │           └── styles.xml
│   │       └── test/
│   │           └── java/
│   │               └── com/
│   │                   └── example/
│   │                       └── che/
│   │                           └── mobilenetssd_demo/
│   │                               └── ExampleUnitTest.java
│   ├── build.gradle
│   ├── gradle/
│   │   └── wrapper/
│   │       ├── gradle-wrapper.jar
│   │       └── gradle-wrapper.properties
│   ├── gradle.properties
│   ├── gradlew
│   ├── gradlew.bat
│   └── settings.gradle
├── MobileNetSSD_demo_single/
│   ├── .gitignore
│   ├── .idea/
│   │   ├── codeStyles/
│   │   │   └── Project.xml
│   │   ├── gradle.xml
│   │   └── misc.xml
│   ├── app/
│   │   ├── .gitignore
│   │   ├── CMakeLists.txt
│   │   ├── build.gradle
│   │   ├── proguard-rules.pro
│   │   └── src/
│   │       ├── androidTest/
│   │       │   └── java/
│   │       │       └── com/
│   │       │           └── example/
│   │       │               └── che/
│   │       │                   └── mobilenetssd_demo/
│   │       │                       └── ExampleInstrumentedTest.java
│   │       ├── main/
│   │       │   ├── AndroidManifest.xml
│   │       │   ├── assets/
│   │       │   │   └── words.txt
│   │       │   ├── cpp/
│   │       │   │   ├── MobileNetSSD_deploy.id.h
│   │       │   │   ├── MobileNetssd.cpp
│   │       │   │   └── include/
│   │       │   │       ├── allocator.h
│   │       │   │       ├── benchmark.h
│   │       │   │       ├── blob.h
│   │       │   │       ├── cpu.h
│   │       │   │       ├── layer.h
│   │       │   │       ├── layer_type.h
│   │       │   │       ├── layer_type_enum.h
│   │       │   │       ├── mat.h
│   │       │   │       ├── modelbin.h
│   │       │   │       ├── net.h
│   │       │   │       ├── opencv.h
│   │       │   │       ├── paramdict.h
│   │       │   │       └── platform.h
│   │       │   ├── java/
│   │       │   │   └── com/
│   │       │   │       └── example/
│   │       │   │           └── che/
│   │       │   │               └── mobilenetssd_demo/
│   │       │   │                   ├── MainActivity.java
│   │       │   │                   ├── MobileNetssd.java
│   │       │   │                   └── PhotoUtil.java
│   │       │   ├── jniLibs/
│   │       │   │   └── armeabi-v7a/
│   │       │   │       └── libncnn.a
│   │       │   └── res/
│   │       │       ├── drawable/
│   │       │       │   └── ic_launcher_background.xml
│   │       │       ├── drawable-v24/
│   │       │       │   └── ic_launcher_foreground.xml
│   │       │       ├── layout/
│   │       │       │   └── activity_main.xml
│   │       │       ├── mipmap-anydpi-v26/
│   │       │       │   ├── ic_launcher.xml
│   │       │       │   └── ic_launcher_round.xml
│   │       │       └── values/
│   │       │           ├── colors.xml
│   │       │           ├── strings.xml
│   │       │           └── styles.xml
│   │       └── test/
│   │           └── java/
│   │               └── com/
│   │                   └── example/
│   │                       └── che/
│   │                           └── mobilenetssd_demo/
│   │                               └── ExampleUnitTest.java
│   ├── build.gradle
│   ├── gradle/
│   │   └── wrapper/
│   │       ├── gradle-wrapper.jar
│   │       └── gradle-wrapper.properties
│   ├── gradle.properties
│   ├── gradlew
│   ├── gradlew.bat
│   └── settings.gradle
└── README.md
Download .txt
SYMBOL INDEX (138 symbols across 36 files)

FILE: MobileNetSSD_demo/app/src/androidTest/java/com/example/che/mobilenetssd_demo/ExampleInstrumentedTest.java
  class ExampleInstrumentedTest (line 17) | @RunWith(AndroidJUnit4.class)
    method useAppContext (line 19) | @Test

FILE: MobileNetSSD_demo/app/src/main/cpp/MobileNetSSD_deploy.id.h
  function namespace (line 3) | namespace MobileNetSSD_deploy_param_id {

FILE: MobileNetSSD_demo/app/src/main/cpp/MobileNetssd.cpp
  function JNIEXPORT (line 25) | JNIEXPORT jboolean JNICALL
  function JNIEXPORT (line 58) | JNIEXPORT jfloatArray JNICALL Java_com_example_che_mobilenetssd_1demo_Mo...

FILE: MobileNetSSD_demo/app/src/main/cpp/include/allocator.h
  function namespace (line 28) | namespace ncnn {
  function alignSize (line 45) | static inline size_t alignSize(size_t sz, int n)
  function fastFree (line 60) | static inline void fastFree(void* ptr)
  function NCNN_XADD (line 93) | static inline int NCNN_XADD(int* addr, int delta) { int tmp = *addr; *ad...
  function class (line 97) | class Mutex
  function class (line 109) | class Mutex
  function class (line 121) | class Allocator
  function class (line 129) | class PoolAllocator : public Allocator
  function class (line 153) | class UnlockedPoolAllocator : public Allocator

FILE: MobileNetSSD_demo/app/src/main/cpp/include/benchmark.h
  function namespace (line 22) | namespace ncnn {

FILE: MobileNetSSD_demo/app/src/main/cpp/include/blob.h
  function namespace (line 22) | namespace ncnn {

FILE: MobileNetSSD_demo/app/src/main/cpp/include/cpu.h
  function namespace (line 18) | namespace ncnn {

FILE: MobileNetSSD_demo/app/src/main/cpp/include/layer.h
  function namespace (line 26) | namespace ncnn {

FILE: MobileNetSSD_demo/app/src/main/cpp/include/layer_type.h
  function namespace (line 18) | namespace ncnn {

FILE: MobileNetSSD_demo/app/src/main/cpp/include/mat.h
  function namespace (line 26) | namespace ncnn {
  function fill (line 290) | inline void Mat::fill(float _v)
  function fill (line 344) | inline void Mat::fill(int _v)
  function fill (line 399) | void Mat::fill(T _v)
  function Mat (line 409) | inline Mat Mat::clone(Allocator* allocator) const
  function Mat (line 430) | inline Mat Mat::reshape(int _w, Allocator* allocator) const
  function Mat (line 463) | inline Mat Mat::reshape(int _w, int _h, Allocator* allocator) const
  function Mat (line 496) | inline Mat Mat::reshape(int _w, int _h, int _c, Allocator* allocator) const
  function create (line 538) | inline void Mat::create(int _w, size_t _elemsize, Allocator* _allocator)
  function create (line 567) | inline void Mat::create(int _w, int _h, size_t _elemsize, Allocator* _al...
  function create (line 596) | inline void Mat::create(int _w, int _h, int _c, size_t _elemsize, Alloca...
  function addref (line 625) | inline void Mat::addref()
  function release (line 631) | inline void Mat::release()
  function Mat (line 665) | inline Mat Mat::channel(int c)
  function Mat (line 670) | inline const Mat Mat::channel(int c) const
  function T (line 692) | const T* Mat::row(int y) const
  function Mat (line 697) | inline Mat Mat::channel_range(int _c, int channels)
  function Mat (line 702) | inline const Mat Mat::channel_range(int _c, int channels) const
  function Mat (line 707) | inline Mat Mat::row_range(int y, int rows)
  function Mat (line 712) | inline const Mat Mat::row_range(int y, int rows) const
  function Mat (line 717) | inline Mat Mat::range(int x, int n)
  function Mat (line 722) | inline const Mat Mat::range(int x, int n) const
  function const (line 744) | inline const float& Mat::operator[](int i) const

FILE: MobileNetSSD_demo/app/src/main/cpp/include/modelbin.h
  function namespace (line 22) | namespace ncnn {

FILE: MobileNetSSD_demo/app/src/main/cpp/include/net.h
  function namespace (line 25) | namespace ncnn {

FILE: MobileNetSSD_demo/app/src/main/cpp/include/opencv.h
  function operator (line 30) | struct Size
  type Rect_ (line 89) | typedef Rect_<int> Rect;
  type Rect_ (line 90) | typedef Rect_<float> Rect2f;
  type Point_ (line 102) | typedef Point_<int> Point;
  type Point_ (line 103) | typedef Point_<float> Point2f;

FILE: MobileNetSSD_demo/app/src/main/cpp/include/paramdict.h
  function namespace (line 25) | namespace ncnn {

FILE: MobileNetSSD_demo/app/src/main/java/com/example/che/mobilenetssd_demo/MainActivity.java
  class MainActivity (line 41) | public class MainActivity extends AppCompatActivity {
    method onCreate (line 54) | @Override
    method initMobileNetSSD (line 72) | private void initMobileNetSSD() throws IOException {
    method init_view (line 98) | private void init_view() {
    method readCacheLabelFromLocalFile (line 118) | private void readCacheLabelFromLocalFile() {
    method onActivityResult (line 133) | protected void onActivityResult(int requestCode, int resultCode, @Null...
    method predict_image (line 157) | private void predict_image(String image_path) {
    method TwoArry (line 219) | public static float[][] TwoArry(float[] inputfloat){
    method request_permissions (line 260) | private void request_permissions() {
    method onRequestPermissionsResult (line 277) | @Override

FILE: MobileNetSSD_demo/app/src/main/java/com/example/che/mobilenetssd_demo/MobileNetssd.java
  class MobileNetssd (line 8) | public class MobileNetssd {
    method Init (line 10) | public native boolean Init(byte[] param, byte[] bin);
    method Detect (line 11) | public native float[] Detect(Bitmap bitmap);

FILE: MobileNetSSD_demo/app/src/main/java/com/example/che/mobilenetssd_demo/PhotoUtil.java
  class PhotoUtil (line 12) | public class PhotoUtil {
    method use_photo (line 14) | public static void use_photo(Activity activity, int requestCode) {
    method get_path_from_URI (line 21) | public static String get_path_from_URI(Context context, Uri uri) {
    method getScaleBitmap (line 36) | public static Bitmap getScaleBitmap(String filePath) {

FILE: MobileNetSSD_demo/app/src/test/java/com/example/che/mobilenetssd_demo/ExampleUnitTest.java
  class ExampleUnitTest (line 12) | public class ExampleUnitTest {
    method addition_isCorrect (line 13) | @Test

FILE: MobileNetSSD_demo_single/app/src/androidTest/java/com/example/che/mobilenetssd_demo/ExampleInstrumentedTest.java
  class ExampleInstrumentedTest (line 17) | @RunWith(AndroidJUnit4.class)
    method useAppContext (line 19) | @Test

FILE: MobileNetSSD_demo_single/app/src/main/cpp/MobileNetSSD_deploy.id.h
  function namespace (line 3) | namespace MobileNetSSD_deploy_param_id {

FILE: MobileNetSSD_demo_single/app/src/main/cpp/MobileNetssd.cpp
  function JNIEXPORT (line 25) | JNIEXPORT jboolean JNICALL
  function JNIEXPORT (line 58) | JNIEXPORT jfloatArray JNICALL Java_com_example_che_mobilenetssd_1demo_Mo...

FILE: MobileNetSSD_demo_single/app/src/main/cpp/include/allocator.h
  function namespace (line 28) | namespace ncnn {
  function alignSize (line 45) | static inline size_t alignSize(size_t sz, int n)
  function fastFree (line 60) | static inline void fastFree(void* ptr)
  function NCNN_XADD (line 93) | static inline int NCNN_XADD(int* addr, int delta) { int tmp = *addr; *ad...
  function class (line 97) | class Mutex
  function class (line 109) | class Mutex
  function class (line 121) | class Allocator
  function class (line 129) | class PoolAllocator : public Allocator
  function class (line 153) | class UnlockedPoolAllocator : public Allocator

FILE: MobileNetSSD_demo_single/app/src/main/cpp/include/benchmark.h
  function namespace (line 22) | namespace ncnn {

FILE: MobileNetSSD_demo_single/app/src/main/cpp/include/blob.h
  function namespace (line 22) | namespace ncnn {

FILE: MobileNetSSD_demo_single/app/src/main/cpp/include/cpu.h
  function namespace (line 18) | namespace ncnn {

FILE: MobileNetSSD_demo_single/app/src/main/cpp/include/layer.h
  function namespace (line 26) | namespace ncnn {

FILE: MobileNetSSD_demo_single/app/src/main/cpp/include/layer_type.h
  function namespace (line 18) | namespace ncnn {

FILE: MobileNetSSD_demo_single/app/src/main/cpp/include/mat.h
  function namespace (line 26) | namespace ncnn {
  function fill (line 290) | inline void Mat::fill(float _v)
  function fill (line 344) | inline void Mat::fill(int _v)
  function fill (line 399) | void Mat::fill(T _v)
  function Mat (line 409) | inline Mat Mat::clone(Allocator* allocator) const
  function Mat (line 430) | inline Mat Mat::reshape(int _w, Allocator* allocator) const
  function Mat (line 463) | inline Mat Mat::reshape(int _w, int _h, Allocator* allocator) const
  function Mat (line 496) | inline Mat Mat::reshape(int _w, int _h, int _c, Allocator* allocator) const
  function create (line 538) | inline void Mat::create(int _w, size_t _elemsize, Allocator* _allocator)
  function create (line 567) | inline void Mat::create(int _w, int _h, size_t _elemsize, Allocator* _al...
  function create (line 596) | inline void Mat::create(int _w, int _h, int _c, size_t _elemsize, Alloca...
  function addref (line 625) | inline void Mat::addref()
  function release (line 631) | inline void Mat::release()
  function Mat (line 665) | inline Mat Mat::channel(int c)
  function Mat (line 670) | inline const Mat Mat::channel(int c) const
  function T (line 692) | const T* Mat::row(int y) const
  function Mat (line 697) | inline Mat Mat::channel_range(int _c, int channels)
  function Mat (line 702) | inline const Mat Mat::channel_range(int _c, int channels) const
  function Mat (line 707) | inline Mat Mat::row_range(int y, int rows)
  function Mat (line 712) | inline const Mat Mat::row_range(int y, int rows) const
  function Mat (line 717) | inline Mat Mat::range(int x, int n)
  function Mat (line 722) | inline const Mat Mat::range(int x, int n) const
  function const (line 744) | inline const float& Mat::operator[](int i) const

FILE: MobileNetSSD_demo_single/app/src/main/cpp/include/modelbin.h
  function namespace (line 22) | namespace ncnn {

FILE: MobileNetSSD_demo_single/app/src/main/cpp/include/net.h
  function namespace (line 25) | namespace ncnn {

FILE: MobileNetSSD_demo_single/app/src/main/cpp/include/opencv.h
  function operator (line 30) | struct Size
  type Rect_ (line 89) | typedef Rect_<int> Rect;
  type Rect_ (line 90) | typedef Rect_<float> Rect2f;
  type Point_ (line 102) | typedef Point_<int> Point;
  type Point_ (line 103) | typedef Point_<float> Point2f;

FILE: MobileNetSSD_demo_single/app/src/main/cpp/include/paramdict.h
  function namespace (line 25) | namespace ncnn {

FILE: MobileNetSSD_demo_single/app/src/main/java/com/example/che/mobilenetssd_demo/MainActivity.java
  class MainActivity (line 41) | public class MainActivity extends AppCompatActivity {
    method onCreate (line 54) | @Override
    method initMobileNetSSD (line 72) | private void initMobileNetSSD() throws IOException {
    method init_view (line 98) | private void init_view() {
    method readCacheLabelFromLocalFile (line 118) | private void readCacheLabelFromLocalFile() {
    method onActivityResult (line 133) | protected void onActivityResult(int requestCode, int resultCode, @Null...
    method predict_image (line 157) | private void predict_image(String image_path) {
    method get_max_result (line 195) | private float[] get_max_result(float[] result) {
    method request_permissions (line 213) | private void request_permissions() {
    method onRequestPermissionsResult (line 230) | @Override

FILE: MobileNetSSD_demo_single/app/src/main/java/com/example/che/mobilenetssd_demo/MobileNetssd.java
  class MobileNetssd (line 8) | public class MobileNetssd {
    method Init (line 10) | public native boolean Init(byte[] param, byte[] bin);
    method Detect (line 11) | public native float[] Detect(Bitmap bitmap);

FILE: MobileNetSSD_demo_single/app/src/main/java/com/example/che/mobilenetssd_demo/PhotoUtil.java
  class PhotoUtil (line 12) | public class PhotoUtil {
    method use_photo (line 14) | public static void use_photo(Activity activity, int requestCode) {
    method get_path_from_URI (line 21) | public static String get_path_from_URI(Context context, Uri uri) {
    method getScaleBitmap (line 36) | public static Bitmap getScaleBitmap(String filePath) {

FILE: MobileNetSSD_demo_single/app/src/test/java/com/example/che/mobilenetssd_demo/ExampleUnitTest.java
  class ExampleUnitTest (line 12) | public class ExampleUnitTest {
    method addition_isCorrect (line 13) | @Test
Condensed preview — 93 files, each showing path, character count, and a content snippet. Download the .json file or copy for the full structured content (234K chars).
[
  {
    "path": "MobileNetSSD_demo/.gitignore",
    "chars": 176,
    "preview": "*.iml\n.gradle\n/local.properties\n/.idea/caches/build_file_checksums.ser\n/.idea/libraries\n/.idea/modules.xml\n/.idea/worksp"
  },
  {
    "path": "MobileNetSSD_demo/.idea/codeStyles/Project.xml",
    "chars": 1775,
    "preview": "<component name=\"ProjectCodeStyleConfiguration\">\n  <code_scheme name=\"Project\" version=\"173\">\n    <Objective-C-extension"
  },
  {
    "path": "MobileNetSSD_demo/.idea/gradle.xml",
    "chars": 626,
    "preview": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project version=\"4\">\n  <component name=\"GradleSettings\">\n    <option name=\"linke"
  },
  {
    "path": "MobileNetSSD_demo/.idea/misc.xml",
    "chars": 2101,
    "preview": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project version=\"4\">\n  <component name=\"NullableNotNullManager\">\n    <option nam"
  },
  {
    "path": "MobileNetSSD_demo/app/.gitignore",
    "chars": 7,
    "preview": "/build\n"
  },
  {
    "path": "MobileNetSSD_demo/app/CMakeLists.txt",
    "chars": 1946,
    "preview": "# For more information about using CMake with Android Studio, read the\n# documentation: https://d.android.com/studio/pro"
  },
  {
    "path": "MobileNetSSD_demo/app/build.gradle",
    "chars": 1532,
    "preview": "apply plugin: 'com.android.application'\n\nandroid {\n    compileSdkVersion 28\n    defaultConfig {\n        applicationId \"c"
  },
  {
    "path": "MobileNetSSD_demo/app/proguard-rules.pro",
    "chars": 751,
    "preview": "# Add project specific ProGuard rules here.\n# You can control the set of applied configuration files using the\n# proguar"
  },
  {
    "path": "MobileNetSSD_demo/app/src/androidTest/java/com/example/che/mobilenetssd_demo/ExampleInstrumentedTest.java",
    "chars": 750,
    "preview": "package com.example.che.mobilenetssd_demo;\n\nimport android.content.Context;\nimport android.support.test.InstrumentationR"
  },
  {
    "path": "MobileNetSSD_demo/app/src/main/AndroidManifest.xml",
    "chars": 951,
    "preview": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<manifest xmlns:android=\"http://schemas.android.com/apk/res/android\"\n    package="
  },
  {
    "path": "MobileNetSSD_demo/app/src/main/assets/words.txt",
    "chars": 145,
    "preview": "background\naeroplane\nbicycle\nbird\nboat\nbottle\nbus\ncar\ncat\nchair\ncow\ndiningtable\ndog\nhorse\nmotorbike\nperson\npottedplant\ns"
  },
  {
    "path": "MobileNetSSD_demo/app/src/main/cpp/MobileNetSSD_deploy.id.h",
    "chars": 10980,
    "preview": "#ifndef NCNN_INCLUDE_GUARD_MobileNetSSD_deploy_id_h\n#define NCNN_INCLUDE_GUARD_MobileNetSSD_deploy_id_h\nnamespace Mobile"
  },
  {
    "path": "MobileNetSSD_demo/app/src/main/cpp/MobileNetssd.cpp",
    "chars": 4547,
    "preview": "#include <android/bitmap.h>\n#include <android/log.h>\n#include <jni.h>\n#include <string>\n#include <vector>\n\n// ncnn\n#incl"
  },
  {
    "path": "MobileNetSSD_demo/app/src/main/cpp/include/allocator.h",
    "chars": 5185,
    "preview": "// Tencent is pleased to support the open source community by making ncnn available.\n//\n// Copyright (C) 2018 THL A29 Li"
  },
  {
    "path": "MobileNetSSD_demo/app/src/main/cpp/include/benchmark.h",
    "chars": 1152,
    "preview": "// Tencent is pleased to support the open source community by making ncnn available.\n//\n// Copyright (C) 2017 THL A29 Li"
  },
  {
    "path": "MobileNetSSD_demo/app/src/main/cpp/include/blob.h",
    "chars": 1160,
    "preview": "// Tencent is pleased to support the open source community by making ncnn available.\n//\n// Copyright (C) 2017 THL A29 Li"
  },
  {
    "path": "MobileNetSSD_demo/app/src/main/cpp/include/cpu.h",
    "chars": 1661,
    "preview": "// Tencent is pleased to support the open source community by making ncnn available.\n//\n// Copyright (C) 2017 THL A29 Li"
  },
  {
    "path": "MobileNetSSD_demo/app/src/main/cpp/include/layer.h",
    "chars": 3480,
    "preview": "// Tencent is pleased to support the open source community by making ncnn available.\n//\n// Copyright (C) 2017 THL A29 Li"
  },
  {
    "path": "MobileNetSSD_demo/app/src/main/cpp/include/layer_type.h",
    "chars": 942,
    "preview": "// Tencent is pleased to support the open source community by making ncnn available.\n//\n// Copyright (C) 2017 THL A29 Li"
  },
  {
    "path": "MobileNetSSD_demo/app/src/main/cpp/include/layer_type_enum.h",
    "chars": 992,
    "preview": "// Layer Type Enum header\n//\n// This file is auto-generated by cmake, don't edit it.\n\nAbsVal = 0,\nArgMax = 1,\nBatchNorm "
  },
  {
    "path": "MobileNetSSD_demo/app/src/main/cpp/include/mat.h",
    "chars": 18533,
    "preview": "// Tencent is pleased to support the open source community by making ncnn available.\n//\n// Copyright (C) 2017 THL A29 Li"
  },
  {
    "path": "MobileNetSSD_demo/app/src/main/cpp/include/modelbin.h",
    "chars": 1924,
    "preview": "// Tencent is pleased to support the open source community by making ncnn available.\n//\n// Copyright (C) 2017 THL A29 Li"
  },
  {
    "path": "MobileNetSSD_demo/app/src/main/cpp/include/net.h",
    "chars": 5037,
    "preview": "// Tencent is pleased to support the open source community by making ncnn available.\n//\n// Copyright (C) 2017 THL A29 Li"
  },
  {
    "path": "MobileNetSSD_demo/app/src/main/cpp/include/opencv.h",
    "chars": 5885,
    "preview": "// Tencent is pleased to support the open source community by making ncnn available.\n//\n// Copyright (C) 2017 THL A29 Li"
  },
  {
    "path": "MobileNetSSD_demo/app/src/main/cpp/include/paramdict.h",
    "chars": 1849,
    "preview": "// Tencent is pleased to support the open source community by making ncnn available.\n//\n// Copyright (C) 2017 THL A29 Li"
  },
  {
    "path": "MobileNetSSD_demo/app/src/main/cpp/include/platform.h",
    "chars": 926,
    "preview": "// Tencent is pleased to support the open source community by making ncnn available.\n//\n// Copyright (C) 2017 THL A29 Li"
  },
  {
    "path": "MobileNetSSD_demo/app/src/main/java/com/example/che/mobilenetssd_demo/MainActivity.java",
    "chars": 11161,
    "preview": "package com.example.che.mobilenetssd_demo;\n\nimport android.Manifest;\nimport android.app.Activity;\nimport android.content"
  },
  {
    "path": "MobileNetSSD_demo/app/src/main/java/com/example/che/mobilenetssd_demo/MobileNetssd.java",
    "chars": 444,
    "preview": "package com.example.che.mobilenetssd_demo;\n\nimport android.graphics.Bitmap;\n\n/**\n *  MobileNetssd的java接口,与本地c++代码相呼应 nat"
  },
  {
    "path": "MobileNetSSD_demo/app/src/main/java/com/example/che/mobilenetssd_demo/PhotoUtil.java",
    "chars": 1831,
    "preview": "package com.example.che.mobilenetssd_demo;\n\nimport android.app.Activity;\nimport android.content.Context;\nimport android."
  },
  {
    "path": "MobileNetSSD_demo/app/src/main/res/drawable/ic_launcher_background.xml",
    "chars": 5606,
    "preview": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<vector xmlns:android=\"http://schemas.android.com/apk/res/android\"\n    android:wi"
  },
  {
    "path": "MobileNetSSD_demo/app/src/main/res/drawable-v24/ic_launcher_foreground.xml",
    "chars": 1880,
    "preview": "<vector xmlns:android=\"http://schemas.android.com/apk/res/android\"\n    xmlns:aapt=\"http://schemas.android.com/aapt\"\n    "
  },
  {
    "path": "MobileNetSSD_demo/app/src/main/res/layout/activity_main.xml",
    "chars": 1299,
    "preview": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<RelativeLayout xmlns:android=\"http://schemas.android.com/apk/res/android\"\n    xm"
  },
  {
    "path": "MobileNetSSD_demo/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml",
    "chars": 272,
    "preview": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<adaptive-icon xmlns:android=\"http://schemas.android.com/apk/res/android\">\n    <b"
  },
  {
    "path": "MobileNetSSD_demo/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml",
    "chars": 272,
    "preview": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<adaptive-icon xmlns:android=\"http://schemas.android.com/apk/res/android\">\n    <b"
  },
  {
    "path": "MobileNetSSD_demo/app/src/main/res/values/colors.xml",
    "chars": 208,
    "preview": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<resources>\n    <color name=\"colorPrimary\">#008577</color>\n    <color name=\"color"
  },
  {
    "path": "MobileNetSSD_demo/app/src/main/res/values/strings.xml",
    "chars": 80,
    "preview": "<resources>\n    <string name=\"app_name\">MobileNetSSD_demo</string>\n</resources>\n"
  },
  {
    "path": "MobileNetSSD_demo/app/src/main/res/values/styles.xml",
    "chars": 383,
    "preview": "<resources>\n\n    <!-- Base application theme. -->\n    <style name=\"AppTheme\" parent=\"Theme.AppCompat.Light.DarkActionBar"
  },
  {
    "path": "MobileNetSSD_demo/app/src/test/java/com/example/che/mobilenetssd_demo/ExampleUnitTest.java",
    "chars": 394,
    "preview": "package com.example.che.mobilenetssd_demo;\n\nimport org.junit.Test;\n\nimport static org.junit.Assert.*;\n\n/**\n * Example lo"
  },
  {
    "path": "MobileNetSSD_demo/build.gradle",
    "chars": 546,
    "preview": "// Top-level build file where you can add configuration options common to all sub-projects/modules.\n\nbuildscript {\n    \n"
  },
  {
    "path": "MobileNetSSD_demo/gradle/wrapper/gradle-wrapper.properties",
    "chars": 200,
    "preview": "distributionBase=GRADLE_USER_HOME\ndistributionPath=wrapper/dists\ndistributionUrl=https\\://services.gradle.org/distributi"
  },
  {
    "path": "MobileNetSSD_demo/gradle.properties",
    "chars": 728,
    "preview": "# Project-wide Gradle settings.\n# IDE (e.g. Android Studio) users:\n# Gradle settings configured through the IDE *will ov"
  },
  {
    "path": "MobileNetSSD_demo/gradlew",
    "chars": 5296,
    "preview": "#!/usr/bin/env sh\n\n##############################################################################\n##\n##  Gradle start up"
  },
  {
    "path": "MobileNetSSD_demo/gradlew.bat",
    "chars": 2260,
    "preview": "@if \"%DEBUG%\" == \"\" @echo off\r\n@rem ##########################################################################\r\n@rem\r\n@r"
  },
  {
    "path": "MobileNetSSD_demo/settings.gradle",
    "chars": 15,
    "preview": "include ':app'\n"
  },
  {
    "path": "MobileNetSSD_demo_single/.gitignore",
    "chars": 176,
    "preview": "*.iml\n.gradle\n/local.properties\n/.idea/caches/build_file_checksums.ser\n/.idea/libraries\n/.idea/modules.xml\n/.idea/worksp"
  },
  {
    "path": "MobileNetSSD_demo_single/.idea/codeStyles/Project.xml",
    "chars": 1775,
    "preview": "<component name=\"ProjectCodeStyleConfiguration\">\n  <code_scheme name=\"Project\" version=\"173\">\n    <Objective-C-extension"
  },
  {
    "path": "MobileNetSSD_demo_single/.idea/gradle.xml",
    "chars": 626,
    "preview": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project version=\"4\">\n  <component name=\"GradleSettings\">\n    <option name=\"linke"
  },
  {
    "path": "MobileNetSSD_demo_single/.idea/misc.xml",
    "chars": 2101,
    "preview": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project version=\"4\">\n  <component name=\"NullableNotNullManager\">\n    <option nam"
  },
  {
    "path": "MobileNetSSD_demo_single/app/.gitignore",
    "chars": 7,
    "preview": "/build\n"
  },
  {
    "path": "MobileNetSSD_demo_single/app/CMakeLists.txt",
    "chars": 1946,
    "preview": "# For more information about using CMake with Android Studio, read the\n# documentation: https://d.android.com/studio/pro"
  },
  {
    "path": "MobileNetSSD_demo_single/app/build.gradle",
    "chars": 1532,
    "preview": "apply plugin: 'com.android.application'\n\nandroid {\n    compileSdkVersion 28\n    defaultConfig {\n        applicationId \"c"
  },
  {
    "path": "MobileNetSSD_demo_single/app/proguard-rules.pro",
    "chars": 751,
    "preview": "# Add project specific ProGuard rules here.\n# You can control the set of applied configuration files using the\n# proguar"
  },
  {
    "path": "MobileNetSSD_demo_single/app/src/androidTest/java/com/example/che/mobilenetssd_demo/ExampleInstrumentedTest.java",
    "chars": 750,
    "preview": "package com.example.che.mobilenetssd_demo;\n\nimport android.content.Context;\nimport android.support.test.InstrumentationR"
  },
  {
    "path": "MobileNetSSD_demo_single/app/src/main/AndroidManifest.xml",
    "chars": 951,
    "preview": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<manifest xmlns:android=\"http://schemas.android.com/apk/res/android\"\n    package="
  },
  {
    "path": "MobileNetSSD_demo_single/app/src/main/assets/words.txt",
    "chars": 145,
    "preview": "background\naeroplane\nbicycle\nbird\nboat\nbottle\nbus\ncar\ncat\nchair\ncow\ndiningtable\ndog\nhorse\nmotorbike\nperson\npottedplant\ns"
  },
  {
    "path": "MobileNetSSD_demo_single/app/src/main/cpp/MobileNetSSD_deploy.id.h",
    "chars": 10980,
    "preview": "#ifndef NCNN_INCLUDE_GUARD_MobileNetSSD_deploy_id_h\n#define NCNN_INCLUDE_GUARD_MobileNetSSD_deploy_id_h\nnamespace Mobile"
  },
  {
    "path": "MobileNetSSD_demo_single/app/src/main/cpp/MobileNetssd.cpp",
    "chars": 4424,
    "preview": "#include <android/bitmap.h>\n#include <android/log.h>\n#include <jni.h>\n#include <string>\n#include <vector>\n\n// ncnn\n#incl"
  },
  {
    "path": "MobileNetSSD_demo_single/app/src/main/cpp/include/allocator.h",
    "chars": 5185,
    "preview": "// Tencent is pleased to support the open source community by making ncnn available.\n//\n// Copyright (C) 2018 THL A29 Li"
  },
  {
    "path": "MobileNetSSD_demo_single/app/src/main/cpp/include/benchmark.h",
    "chars": 1152,
    "preview": "// Tencent is pleased to support the open source community by making ncnn available.\n//\n// Copyright (C) 2017 THL A29 Li"
  },
  {
    "path": "MobileNetSSD_demo_single/app/src/main/cpp/include/blob.h",
    "chars": 1160,
    "preview": "// Tencent is pleased to support the open source community by making ncnn available.\n//\n// Copyright (C) 2017 THL A29 Li"
  },
  {
    "path": "MobileNetSSD_demo_single/app/src/main/cpp/include/cpu.h",
    "chars": 1661,
    "preview": "// Tencent is pleased to support the open source community by making ncnn available.\n//\n// Copyright (C) 2017 THL A29 Li"
  },
  {
    "path": "MobileNetSSD_demo_single/app/src/main/cpp/include/layer.h",
    "chars": 3480,
    "preview": "// Tencent is pleased to support the open source community by making ncnn available.\n//\n// Copyright (C) 2017 THL A29 Li"
  },
  {
    "path": "MobileNetSSD_demo_single/app/src/main/cpp/include/layer_type.h",
    "chars": 942,
    "preview": "// Tencent is pleased to support the open source community by making ncnn available.\n//\n// Copyright (C) 2017 THL A29 Li"
  },
  {
    "path": "MobileNetSSD_demo_single/app/src/main/cpp/include/layer_type_enum.h",
    "chars": 992,
    "preview": "// Layer Type Enum header\n//\n// This file is auto-generated by cmake, don't edit it.\n\nAbsVal = 0,\nArgMax = 1,\nBatchNorm "
  },
  {
    "path": "MobileNetSSD_demo_single/app/src/main/cpp/include/mat.h",
    "chars": 18533,
    "preview": "// Tencent is pleased to support the open source community by making ncnn available.\n//\n// Copyright (C) 2017 THL A29 Li"
  },
  {
    "path": "MobileNetSSD_demo_single/app/src/main/cpp/include/modelbin.h",
    "chars": 1924,
    "preview": "// Tencent is pleased to support the open source community by making ncnn available.\n//\n// Copyright (C) 2017 THL A29 Li"
  },
  {
    "path": "MobileNetSSD_demo_single/app/src/main/cpp/include/net.h",
    "chars": 5037,
    "preview": "// Tencent is pleased to support the open source community by making ncnn available.\n//\n// Copyright (C) 2017 THL A29 Li"
  },
  {
    "path": "MobileNetSSD_demo_single/app/src/main/cpp/include/opencv.h",
    "chars": 5885,
    "preview": "// Tencent is pleased to support the open source community by making ncnn available.\n//\n// Copyright (C) 2017 THL A29 Li"
  },
  {
    "path": "MobileNetSSD_demo_single/app/src/main/cpp/include/paramdict.h",
    "chars": 1849,
    "preview": "// Tencent is pleased to support the open source community by making ncnn available.\n//\n// Copyright (C) 2017 THL A29 Li"
  },
  {
    "path": "MobileNetSSD_demo_single/app/src/main/cpp/include/platform.h",
    "chars": 926,
    "preview": "// Tencent is pleased to support the open source community by making ncnn available.\n//\n// Copyright (C) 2017 THL A29 Li"
  },
  {
    "path": "MobileNetSSD_demo_single/app/src/main/java/com/example/che/mobilenetssd_demo/MainActivity.java",
    "chars": 9535,
    "preview": "package com.example.che.mobilenetssd_demo;\n\nimport android.Manifest;\nimport android.app.Activity;\nimport android.content"
  },
  {
    "path": "MobileNetSSD_demo_single/app/src/main/java/com/example/che/mobilenetssd_demo/MobileNetssd.java",
    "chars": 444,
    "preview": "package com.example.che.mobilenetssd_demo;\n\nimport android.graphics.Bitmap;\n\n/**\n *  MobileNetssd的java接口,与本地c++代码相呼应 nat"
  },
  {
    "path": "MobileNetSSD_demo_single/app/src/main/java/com/example/che/mobilenetssd_demo/PhotoUtil.java",
    "chars": 1831,
    "preview": "package com.example.che.mobilenetssd_demo;\n\nimport android.app.Activity;\nimport android.content.Context;\nimport android."
  },
  {
    "path": "MobileNetSSD_demo_single/app/src/main/res/drawable/ic_launcher_background.xml",
    "chars": 5606,
    "preview": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<vector xmlns:android=\"http://schemas.android.com/apk/res/android\"\n    android:wi"
  },
  {
    "path": "MobileNetSSD_demo_single/app/src/main/res/drawable-v24/ic_launcher_foreground.xml",
    "chars": 1880,
    "preview": "<vector xmlns:android=\"http://schemas.android.com/apk/res/android\"\n    xmlns:aapt=\"http://schemas.android.com/aapt\"\n    "
  },
  {
    "path": "MobileNetSSD_demo_single/app/src/main/res/layout/activity_main.xml",
    "chars": 1299,
    "preview": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<RelativeLayout xmlns:android=\"http://schemas.android.com/apk/res/android\"\n    xm"
  },
  {
    "path": "MobileNetSSD_demo_single/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml",
    "chars": 272,
    "preview": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<adaptive-icon xmlns:android=\"http://schemas.android.com/apk/res/android\">\n    <b"
  },
  {
    "path": "MobileNetSSD_demo_single/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml",
    "chars": 272,
    "preview": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<adaptive-icon xmlns:android=\"http://schemas.android.com/apk/res/android\">\n    <b"
  },
  {
    "path": "MobileNetSSD_demo_single/app/src/main/res/values/colors.xml",
    "chars": 208,
    "preview": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<resources>\n    <color name=\"colorPrimary\">#008577</color>\n    <color name=\"color"
  },
  {
    "path": "MobileNetSSD_demo_single/app/src/main/res/values/strings.xml",
    "chars": 80,
    "preview": "<resources>\n    <string name=\"app_name\">MobileNetSSD_demo</string>\n</resources>\n"
  },
  {
    "path": "MobileNetSSD_demo_single/app/src/main/res/values/styles.xml",
    "chars": 383,
    "preview": "<resources>\n\n    <!-- Base application theme. -->\n    <style name=\"AppTheme\" parent=\"Theme.AppCompat.Light.DarkActionBar"
  },
  {
    "path": "MobileNetSSD_demo_single/app/src/test/java/com/example/che/mobilenetssd_demo/ExampleUnitTest.java",
    "chars": 394,
    "preview": "package com.example.che.mobilenetssd_demo;\n\nimport org.junit.Test;\n\nimport static org.junit.Assert.*;\n\n/**\n * Example lo"
  },
  {
    "path": "MobileNetSSD_demo_single/build.gradle",
    "chars": 546,
    "preview": "// Top-level build file where you can add configuration options common to all sub-projects/modules.\n\nbuildscript {\n    \n"
  },
  {
    "path": "MobileNetSSD_demo_single/gradle/wrapper/gradle-wrapper.properties",
    "chars": 200,
    "preview": "distributionBase=GRADLE_USER_HOME\ndistributionPath=wrapper/dists\ndistributionUrl=https\\://services.gradle.org/distributi"
  },
  {
    "path": "MobileNetSSD_demo_single/gradle.properties",
    "chars": 728,
    "preview": "# Project-wide Gradle settings.\n# IDE (e.g. Android Studio) users:\n# Gradle settings configured through the IDE *will ov"
  },
  {
    "path": "MobileNetSSD_demo_single/gradlew",
    "chars": 5296,
    "preview": "#!/usr/bin/env sh\n\n##############################################################################\n##\n##  Gradle start up"
  },
  {
    "path": "MobileNetSSD_demo_single/gradlew.bat",
    "chars": 2260,
    "preview": "@if \"%DEBUG%\" == \"\" @echo off\r\n@rem ##########################################################################\r\n@rem\r\n@r"
  },
  {
    "path": "MobileNetSSD_demo_single/settings.gradle",
    "chars": 15,
    "preview": "include ':app'\n"
  },
  {
    "path": "README.md",
    "chars": 515,
    "preview": "# ncnnforandroid_objectiondetection_Mobilenetssd\n利用Mobilenetssd目标检测框架,ncnn前向推理,android项目\n请看我的个人blog配套教程\n - [MobileNetSSD"
  }
]

// ... and 4 more files (download for full content)

About this extraction

This page contains the full source code of the chehongshu/ncnnforandroid_objectiondetection_Mobilenetssd GitHub repository, extracted and formatted as plain text for AI agents and large language models (LLMs). The extraction includes 93 files (209.5 KB), approximately 63.4k tokens, and a symbol index with 138 extracted functions, classes, methods, constants, and types. Use this with OpenClaw, Claude, ChatGPT, Cursor, Windsurf, or any other AI tool that accepts text input. You can copy the full output to your clipboard or download it as a .txt file.

Extracted by GitExtract — free GitHub repo to text converter for AI. Built by Nikandr Surkov.

Copied to clipboard!