本章实现在上一节Android音视频-视频采集(OpenGL ES渲染)的基础上实现硬编码(MediaCodec)和软编码(FFMpeg)的功能。之前有写过一篇在应用层使用MediaCodec的硬编码,可以结合参考看一下Android音视频-视频编解码(H.264视频硬编硬解)
整体框架设计
这个的实现基于上一篇的代码基础,代码链接在文末列出,先看整体设计:
主要的调度类文件为mv_recording_preview_controller.cpp
主要控制调用逻辑方法以及调用步骤为上面的两个类的注释部分。
通过下面的方法选择是硬件编码还是软件编码方式:
void MVRecordingPreviewController::startEncoding(const char* h264FilePath, int width, int height, int videoBitRate, float frameRate, bool useHardWareEncoding) {
if(NULL != encoder){
delete encoder;
encoder = NULL;
}
if (useHardWareEncoding){
encoder = new HWEncoderAdapter(g_jvm, obj);
} else {
encoder = new SoftEncoderAdapter();
}
encoder->init(h264FilePath, width, height, videoBitRate, frameRate);
if (handler)
handler->postMessage(new Message(MSG_START_RECORDING));
}
上层调用的时候传递想要的编码方式即可。
底层代码导入
底层使用C++的实现,先看一下项目的主要整体结构
其中导入的libffmpeg.so库包含了libx264库,因为FFmpeg支持H264的解码但是对编码没有,所以还的结合之前编译FFmpeg的例子把libx264整到libffmpeg.so中,步骤如下:
#!/bin/bash
NDK=/Users/yehu/Downloads/android-ndk-r15c
PLATFORM=$NDK/platforms/android-16/arch-arm
TOOLCHAIN=$NDK/toolchains/arm-linux-androideabi-4.9/prebuilt/darwin-x86_64
PREFIX=./android/arm
function build_one
{
./configure \
--prefix=$PREFIX \
--disable-shared \
--enable-static \
--enable-pic \
--enable-strip \
--enable-thread \
--enable-asm \
--host=arm-linux-androideabi \
--cross-prefix=$TOOLCHAIN/bin/arm-linux-androideabi- \
--sysroot=$PLATFORM \
--extra-cflags="-Os -fpic" \
--extra-ldflags="" \
$ADDITIONAL_CONFIGURE_FLAG
make clean
make -j4
make install
}
build_one
编译完成,会出现错误:
更改libx264下面的config.h文件
#define HAVE_LOG2F 1
为#define HAVE_LOG2F 0
然后命令行执行
make&&make install
- 在ffmpeg根目录文件下建立编译文件build_android.sh,内容如下:
#!/bin/bash
# NDK的路径,根据自己的安装位置进行设置
NDK=/Users/yehu/Downloads/android-ndk-r15c
PLATFORM=$NDK/platforms/android-16/arch-arm
TOOLCHAIN=$NDK/toolchains/arm-linux-androideabi-4.9/prebuilt/darwin-x86_64
basepath=$(cd `dirname $0`; pwd)
X264_INCLUDE=$basepath/libx264/android/arm/include
X264_LIB=$basepath/libx264/android/arm/lib
function build_one
{
./configure \
--prefix=$PREFIX \
--arch=arm \
--cpu=armv7-a \
--target-os=android \
--enable-cross-compile \
--cross-prefix=$TOOLCHAIN/bin/arm-linux-androideabi- \
--sysroot=$PLATFORM \
--extra-cflags="-I$X264_INCLUDE -I$PLATFORM/usr/include" \
--extra-ldflags="-L$X264_LIB" \
--cc=$TOOLCHAIN/bin/arm-linux-androideabi-gcc \
--nm=$TOOLCHAIN/bin/arm-linux-androideabi-nm \
--disable-shared \
--enable-static \
--enable-gpl \
--enable-version3 \
--enable-pthreads \
--enable-runtime-cpudetect \
--disable-small \
--disable-network \
--disable-vda \
--disable-iconv \
--enable-asm \
--enable-neon \
--enable-yasm \
--disable-encoders \
--enable-libx264 \
--enable-encoder=libx264 \
--enable-encoder=aac \
--enable-encoder=mpeg4 \
--enable-encoder=mjpeg \
--enable-encoder=png \
--disable-muxers \
--enable-muxer=mov \
--enable-muxer=mp4 \
--enable-muxer=adts \
--enable-muxer=h264 \
--enable-muxer=mjpeg \
--disable-decoders \
--enable-decoder=aac \
--enable-decoder=aac_latm \
--enable-decoder=mp3 \
--enable-decoder=h264 \
--enable-decoder=mpeg4 \
--enable-decoder=mjpeg \
--enable-decoder=png \
--disable-demuxers \
--enable-demuxer=image2 \
--enable-demuxer=h264 \
--enable-demuxer=aac \
--enable-demuxer=mp3 \
--enable-demuxer=mpc \
--enable-demuxer=mpegts \
--enable-demuxer=mov \
--disable-parsers \
--enable-parser=aac \
--enable-parser=ac3 \
--enable-parser=h264 \
--disable-protocols \
--enable-protocol=file \
--enable-protocol=concat \
--enable-filters \
--enable-zlib \
--disable-outdevs \
--disable-doc \
--disable-ffplay \
--disable-ffmpeg \
--disable-ffserver \
--disable-debug \
--disable-ffprobe \
--disable-postproc \
--disable-avdevice \
--disable-symver \
--disable-stripping \
--extra-cflags="-Os -fpic $ADDI_CFLAGS" \
--extra-ldflags="$ADDI_LDFLAGS" \
$ADDITIONAL_CONFIGURE_FLAG
make clean
make -j8
make install
$TOOLCHAIN/bin/arm-linux-androideabi-ld \
-rpath-link=$PLATFORM/usr/lib \
-L$PLATFORM/usr/lib \
-L$PREFIX/lib \
-L$X264_LIB \
-soname libffmpeg.so -shared -nostdlib -Bsymbolic --whole-archive --no-undefined -o \
$PREFIX/libffmpeg.so \
libavcodec/libavcodec.a \
libavfilter/libavfilter.a \
libswresample/libswresample.a \
libavformat/libavformat.a \
libavutil/libavutil.a \
libswscale/libswscale.a \
libx264/libx264.a \
-lc -lm -lz -ldl -llog --dynamic-linker=/system/bin/linker \
$TOOLCHAIN/lib/gcc/arm-linux-androideabi/4.9.x/libgcc.a
}
# arm v7vfp
CPU=arm-v7a
OPTIMIZE_CFLAGS="-mfloat-abi=softfp -mfpu=vfp -marm -march=armv7-a "
ADDI_CFLAGS="-marm"
PREFIX=./android/$CPU
build_one
编译完成可以拿到我们要的带libx264的libffmpeg.so了
- 配置项目的CMakeLists.txt文件如下:
cmake_minimum_required(VERSION 3.4.1)
find_library( # Sets the name of the path variable.
log-lib
# Specifies the name of the NDK library that
# you want CMake to locate.
log )
set(PATH_TO_ROOT ${CMAKE_SOURCE_DIR}/src/main/cpp)
include_directories(${PATH_TO_ROOT}/libcommon/)
file(GLOB FILES_LIB_COMMON "${PATH_TO_ROOT}/libcommon/*.cpp")
file(GLOB FILES_LIB_COMMON_EGL_CORE "${PATH_TO_ROOT}/libcommon/egl_core/*.cpp")
file(GLOB FILES_LIB_COMMON_MSG_Q "${PATH_TO_ROOT}/libcommon/message_queue/*.cpp")
file(GLOB FILES_LIB_COMMON_GL_MEDIA "${PATH_TO_ROOT}/libcommon/opengl_media/*.cpp")
file(GLOB FILES_LIB_COMMON_GL_MEDIA_RENDER "${PATH_TO_ROOT}/libcommon/opengl_media/render/*.cpp")
file(GLOB FILES_LIB_COMMON_GL_MEDIA_TEXTURE "${PATH_TO_ROOT}/libcommon/opengl_media/texture/*.cpp")
file(GLOB FILES_LIB_COMMON_GL_MEDIA_TEXTURE_COPIER "${PATH_TO_ROOT}/libcommon/opengl_media/texture_copier/*.cpp")
file(GLOB FILES_LIB_COMMON_SL_MEDIA "${PATH_TO_ROOT}/libcommon/opensl_media/*.cpp")
add_library( # Sets the name of the library.
commontool
# Sets the library as a shared library.
SHARED
# Provides a relative path to your source file(s).
${FILES_LIB_COMMON}
${FILES_LIB_COMMON_EGL_CORE}
${FILES_LIB_COMMON_MSG_Q}
${FILES_LIB_COMMON_GL_MEDIA}
${FILES_LIB_COMMON_GL_MEDIA_RENDER}
${FILES_LIB_COMMON_GL_MEDIA_TEXTURE}
${FILES_LIB_COMMON_GL_MEDIA_TEXTURE_COPIER}
${FILES_LIB_COMMON_SL_MEDIA}
)
# Include libraries needed for renderer lib
target_link_libraries(
commontool
${log-lib}
android
GLESv2
EGL
OpenSLES)
include_directories(${PATH_TO_ROOT}/3rdparty/ffmpeg/include)
add_library(
libffmpeg
SHARED
IMPORTED
)
set_target_properties(libffmpeg
PROPERTIES IMPORTED_LOCATION
${CMAKE_SOURCE_DIR}/src/main/jniLibs/armeabi-v7a/libffmpeg.so
)
file(GLOB FILES_LIB_VIDEO_ENCODER "${PATH_TO_ROOT}/video_encoder/*.cpp")
file(GLOB FILES_LIB_VIDEO_ENCODER_HW "${PATH_TO_ROOT}/video_encoder/hw_encoder/*.cpp")
file(GLOB FILES_LIB_VIDEO_ENCODER_SOFT "${PATH_TO_ROOT}/video_encoder/soft_encoder/*.cpp")
file(GLOB FILES_LIB_VIDEO_ENCODER_SOFT_COLOR "${PATH_TO_ROOT}/video_encoder/soft_encoder/color_conversion/*.cpp")
add_library( # Sets the name of the library.
videoencoder
# Sets the library as a shared library.
SHARED
# Provides a relative path to your source file(s).
${FILES_LIB_VIDEO_ENCODER}
${FILES_LIB_VIDEO_ENCODER_HW}
${FILES_LIB_VIDEO_ENCODER_SOFT}
${FILES_LIB_VIDEO_ENCODER_SOFT_COLOR}
)
target_link_libraries(
videoencoder
commontool
libffmpeg)
include_directories(${PATH_TO_ROOT}/camera_preview/)
file(GLOB FILES_LIB_CAMERA_PREVIEW "${PATH_TO_ROOT}/camera_preview/*.cpp")
add_library( # Sets the name of the library.
camerapreview
# Sets the library as a shared library.
SHARED
# Provides a relative path to your source file(s).
${FILES_LIB_CAMERA_PREVIEW}
${PATH_TO_ROOT}/LPreviewScheduler.cpp
)
# Include libraries needed for renderer lib
target_link_libraries(
camerapreview
videoencoder)
软编码实现
软编码的主要实现类文件为soft_encoder_adapter.cpp
该类实现的主要逻辑如下图:
对照这个图片再去看代码可以很轻松的梳理出执行的大体逻辑
有一个关键难点的部分是Camera的数据是如何通过纹理拷贝线程过来的。
扫描二维码关注公众号,回复:
2416947 查看本文章
- 初始化OpenGL ES的上下文环境,绑定到纹理拷贝线程(通过createEncoder方法传入EGLCore)
- 拷贝线程的纹理使用共享的上下文EGLContext,它可以包含纹理对象、帧缓存对象等等
- 根据传递的上下文创建拷贝纹理Surface,初始化新线程的copyTexSurface
bool SoftEncoderAdapter::initialize() {
pixelSize = videoWidth * videoHeight * PIXEL_BYTE_SIZE;
hostGPUCopier = new HostGPUCopier();
eglCore = new EGLCore();
eglCore->init(loadTextureContext);
copyTexSurface = eglCore->createOffscreenSurface(videoWidth, videoHeight);
eglCore->makeCurrent(copyTexSurface);
renderer = new VideoGLSurfaceRender();
renderer->init(videoWidth, videoHeight);
glGenFramebuffers(1, &mFBO);
//初始化outputTexId
glGenTextures(1, &outputTexId);
glBindTexture(GL_TEXTURE_2D, outputTexId);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, videoWidth, videoHeight, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0);
glBindTexture(GL_TEXTURE_2D, 0);
return true;
}
后面的关键部分就是OpenGL ES的操作了,现在暂时不仔细研究那部分的代码了,以后再补充,另外该项目的代码运行有一点问题,之后一起处理
硬编码实现
硬编码在这里的实现主要类文件为hw_encoder_adapter.cpp在底层和类MVRecordingPreviewController的交互逻辑和软编码一眼,这里主要梳理它的类本身的调用逻辑。如下图所示:
上层首先通过底层的纹理ID来绑定到Camera上,然后把Camera的数据传递给底层,底层在通过回掉上层的硬编码方法编码数据,底层拿到编码好的数据再存储到文件。
硬编码的代码执行起来没有任何问题,但是生成的文件却无法播放,问题还不知道在哪,以后可以再排查
总结
这个代码的学习有几个没有处理的问题,软编码代码运行出错,硬编码生成文件无法播放,应该是编码的时候代码逻辑出了问题,暂时没有解决这个问题,后面再处理。代码的逻辑梳理对照还是不难,这里的难点是OpenGL ES部分的代码,这个还得加强自己对OpenGL ES的知识,才能完全消化这个代码。
本文代码:
camerapreviewrecord
参考代码:
Android-CameraPreviewRecorder