37、使用Android Studio调用腾讯官方提供的ncnn包实现Yolo-fastest

基本思想:想测试一下yolo-fast在Android的运行时间,因为不太喜欢(https://github.com/dog-qiuqiu/Yolo-Fastest)的源码移植 ,所以使用腾讯提供的ncnn的静态包进行该源码中的ncnn_example中的yolo-fast移植,使用腾讯企鹅的第三方包https://github.com/Tencent/ncnn/releases

第一步:创建Android Studio工程,导入opencv4.4模块(java端和c++端,具体参考:https://blog.csdn.net/sxj731533730/article/details/108380540

然后去官网https://github.com/Tencent/ncnn/releases 下载ncnn-android-lib.zip,解压;

ubuntu@ubuntu:$ tree -L 2
.
├── arm64-v8a
│   └── libncnn.a
├── armeabi-v7a
│   └── libncnn.a
├── include
│   └── ncnn
├── x86
│   └── libncnn.a
└── x86_64
    └── libncnn.a

6 directories, 4 files

然后导入Android Studio工程中,本人按照opencv4.4的文件结构导入,未修改文件结构,只做复制操作

ubuntu@ubuntu:~/AndroidStudioProjects/ncnn/app/src/main$ tree -L 3
.
├── AndroidManifest.xml
├── cpp
│   ├── CMakeLists.txt
│   ├── include
│   │   ├── glslang
│   │   ├── ncnn
│   │   ├── opencv2
│   │   └── SPIRV
│   └── native-lib.cpp
├── java
│   └── com
│       └── yolofastest
├── jniLibs
│   └── libs
│       ├── arm64-v8a
│       ├── armeabi-v7a
│       ├── x86
│       └── x86_64
└── res
    ├── drawable
    │   └── ic_launcher_background.xml
    ├── drawable-v24
    │   └── ic_launcher_foreground.xml
    ├── layout
    │   └── activity_main.xml
    ├── mipmap-anydpi-v26
    │   ├── ic_launcher_round.xml
    │   └── ic_launcher.xml
    ├── mipmap-hdpi
    │   ├── ic_launcher.png
    │   └── ic_launcher_round.png
    ├── mipmap-mdpi
    │   ├── ic_launcher.png
    │   └── ic_launcher_round.png
    ├── mipmap-xhdpi
    │   ├── ic_launcher.png
    │   └── ic_launcher_round.png
    ├── mipmap-xxhdpi
    │   ├── ic_launcher.png
    │   └── ic_launcher_round.png
    ├── mipmap-xxxhdpi
    │   ├── ic_launcher.png
    │   └── ic_launcher_round.png
    └── values
        ├── colors.xml
        ├── strings.xml
        └── styles.xml

26 directories, 21 files

对应的jniLibs文件夹结构为:

ubuntu@ubuntu:~/AndroidStudioProjects/ncnn/app/src/main/jniLibs$ tree -L 3.
└── libs
    ├── arm64-v8a
    │   ├── libglslang.a
    │   ├── libncnn.a
    │   ├── libOGLCompiler.a
    │   ├── libopencv_java4.so
    │   ├── libOSDependent.a
    │   └── libSPIRV.a
    ├── armeabi-v7a
    │   ├── libglslang.a
    │   ├── libncnn.a
    │   ├── libOGLCompiler.a
    │   ├── libopencv_java4.so
    │   ├── libOSDependent.a
    │   └── libSPIRV.a
    ├── x86
    │   ├── libglslang.a
    │   ├── libncnn.a
    │   ├── libOGLCompiler.a
    │   ├── libopencv_java4.so
    │   ├── libOSDependent.a
    │   └── libSPIRV.a
    └── x86_64
        ├── libglslang.a
        ├── libncnn.a
        ├── libOGLCompiler.a
        ├── libopencv_java4.so
        ├── libOSDependent.a
        └── libSPIRV.a

5 directories, 24 files

修改对应build.gradle文件

     externalNativeBuild {
            cmake {
                cppFlags "-std=c++11"
                //arguments '-DANDROID=c++_shared'
                //abiFilters 'armeabi-v7a','arm64-v8a','x86','x86_64'
            }
            ndk{
                abiFilters 'armeabi-v7a'  // x86 armeabi arm64-v8a x86_64

            }
        }
    }
    sourceSets{
        main{
            jniLibs.srcDirs=["src/main/jniLibs/libs"]
        }
    }

然后配置对应CMakelist.txt文件配置

# For more information about using CMake with Android Studio, read the
# documentation: https://d.android.com/studio/projects/add-native-code.html

# Sets the minimum version of CMake required to build the native library.

cmake_minimum_required(VERSION 3.4.1)

# Creates and names a library, sets it as either STATIC
# or SHARED, and provides the relative paths to its source code.
# You can define multiple libraries, and CMake builds them for you.
# Gradle automatically packages shared libraries with your APK.

# 添加opencv的头文件目录


include_directories(${CMAKE_SOURCE_DIR}/include)

# 导入opencv的so
add_library(libopencv_java4 SHARED IMPORTED)
set_target_properties(libopencv_java4 PROPERTIES IMPORTED_LOCATION
        ${CMAKE_SOURCE_DIR}/../jniLibs/libs/${ANDROID_ABI}/libopencv_java4.so)

set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fopenmp")
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fopenmp")
if(DEFINED ANDROID_NDK_MAJOR AND ${ANDROID_NDK_MAJOR} GREATER 20)
    set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -static-openmp")
endif()

#导入ncnn
add_library(libncnn STATIC IMPORTED)
set_target_properties(libncnn PROPERTIES IMPORTED_LOCATION
        ${CMAKE_SOURCE_DIR}/../jniLibs/libs/${ANDROID_ABI}/libncnn.a)





find_library(android-lib android)
add_library( # Sets the name of the library.
        native-lib

        # Sets the library as a shared library.
        SHARED

        # Provides a relative path to your source file(s).
        native-lib.cpp )

# Searches for a specified prebuilt library and stores the path as a
# variable. Because CMake includes system libraries in the search path by
# default, you only need to specify the name of the public NDK library
# you want to add. CMake verifies that the library exists before
# completing its build.

find_library( # Sets the name of the path variable.
        log-lib

        # Specifies the name of the NDK library that
        # you want CMake to locate.
        log )

# Specifies libraries CMake should link to your target library. You
# can link multiple libraries, such as libraries you define in this
# build script, prebuilt third-party libraries, or system libraries.

target_link_libraries( # Specifies the target library.
        native-lib
        jnigraphics
        libopencv_java4 # 链接opencv的so
        libncnn #链接ncnn静态的.a
        # Links the target library to the log library
        # included in the NDK.
        ${log-lib}
        ${android-lib}
        )

然后开始在native-lib.cpp中开发使用小企鹅的ncnn 调用fast-yolo模型了;

#include <jni.h>
#include <string>
#include <iostream>
#include <opencv2/core/core.hpp>
#include <opencv2/opencv.hpp>
#include <ncnn/benchmark.h>
#include <ncnn/cpu.h>
#include <ncnn/datareader.h>
#include <ncnn/net.h>
#include <ncnn/gpu.h>

#include <opencv2/core/core.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <cstdio>
#include <vector>
#include <algorithm>


using namespace cv;
using namespace std;

extern "C" JNIEXPORT jstring JNICALL
Java_com_yolofastest_ncnn_MainActivity_stringFromJNI(
        JNIEnv* env,
        jobject /* this */) {
    std::string hello = "Hello from C++";

    static const char* class_names[] = {"background",
                                        "aeroplane", "bicycle", "bird", "boat",
                                        "bottle", "bus", "car", "cat", "chair",
                                        "cow", "diningtable", "dog", "horse",
                                        "motorbike", "person", "pottedplant",
                                        "sheep", "sofa", "train", "tvmonitor"
    };

    ncnn::Net detector;
    detector.load_param("/sdcard/1/yolo-fastest.param");
    detector.load_model("/sdcard/1/yolo-fastest.bin");
    int detector_size_width  = 320;
    int detector_size_height = 320;
    cv::Mat image=imread("/sdcard/1/dog.jpg");
    cv::Mat bgr = image.clone();
    int img_w = bgr.cols;
    int img_h = bgr.rows;

    ncnn::Mat in = ncnn::Mat::from_pixels_resize(bgr.data, ncnn::Mat::PIXEL_BGR2RGB,\
                                                 bgr.cols, bgr.rows, detector_size_width, detector_size_height);

    //数据预处理
    const float mean_vals[3] = {0.f, 0.f, 0.f};
    const float norm_vals[3] = {1/255.f, 1/255.f, 1/255.f};
    in.substract_mean_normalize(mean_vals, norm_vals);

    ncnn::Extractor ex = detector.create_extractor();

    ex.set_num_threads(8);
    ex.input("data", in);
    ncnn::Mat out;
    ex.extract("output", out);

    for (int i = 0; i < out.h; i++)
    {
        int label;
        float x1, y1, x2, y2, score;
        float pw,ph,cx,cy;
        const float* values = out.row(i);

        x1 = values[2] * img_w;
        y1 = values[3] * img_h;
        x2 = values[4] * img_w;
        y2 = values[5] * img_h;

        score = values[1];
        label = values[0];

        //处理坐标越界问题
        if(x1<0) x1=0;
        if(y1<0) y1=0;
        if(x2<0) x2=0;
        if(y2<0) y2=0;

        if(x1>img_w) x1=img_w;
        if(y1>img_h) y1=img_h;
        if(x2>img_w) x2=img_w;
        if(y2>img_h) y2=img_h;
        cv::rectangle (image, cv::Point(x1, y1), cv::Point(x2, y2), cv::Scalar(255, 255, 0), 1, 1, 0);

        char text[256];
        sprintf(text, "%s %.1f%%", class_names[label], score * 100);
        int baseLine = 0;
        cv::Size label_size = cv::getTextSize(text, cv::FONT_HERSHEY_SIMPLEX, 0.5, 1, &baseLine);
        cv::putText(image, text, cv::Point(x1, y1 + label_size.height),
                    cv::FONT_HERSHEY_SIMPLEX, 0.5, cv::Scalar(0, 0, 0));
    }

    cv::imwrite("/sdcard/1/demo.jpg", image);
    return env->NewStringUTF(hello.c_str());
}

一切修改完成,然后在我的OPPO R9手机上运行Android Studio看看YOLO-FAST识别我们的看门狗(此看门狗非彼看门狗) 哈哈哈

本工程的源代码

链接:https://pan.baidu.com/s/1Ot_JjBQ75zks_d40SJ-YEQ 
提取码:msev 
复制这段内容后打开百度网盘手机App,操作更方便哦

以及配置文件

链接:https://pan.baidu.com/s/1VyVUfByxotiTAwQ6BrCiYA 
提取码:0oca

猜你喜欢

转载自blog.csdn.net/sxj731533730/article/details/108616226