FFmpeg In Android - H264码流解码/OpenGL ES渲染

主要思路是FFmpeg解码H264得到一张yuv420p图片后,传递给opengl es在着色器内部做图片转换yuv->rgb,然后通过纹理贴图的方式渲染出来.这种方式的效率更高.核心代码如下:

#include "common.h"
#include "gl_util.h"

#ifdef __cplusplus
extern "C"
{
#endif
    #include <libavformat/avformat.h>
    #include <libavutil/imgutils.h>
    #include <libavutil/pixfmt.h>

#ifdef __cplusplus
};
#endif

auto gVertexShader =
        "attribute vec4 aPosition;\n"
        "attribute vec2 a_TexCoord;\n"
        "varying vec2 v_TexCoord;\n"
        "void main() {\n"
        "  gl_Position = aPosition;\n"
        "  v_TexCoord = a_TexCoord;\n"
        "}\n";

auto gFragmentShader =
        "precision mediump float;\n"
        "uniform sampler2D samplerY;\n"
        "uniform sampler2D samplerU;\n"
        "uniform sampler2D samplerV;\n"
        "varying vec2 v_TexCoord;\n"
        "void main() {\n"
        "   vec3 yuv;\n"
        "   vec3 rgb;\n"
        "   yuv.x = texture2D(samplerY, v_TexCoord).r;\n"
        "   yuv.y = texture2D(samplerU, v_TexCoord).r - 0.5;\n"
        "   yuv.z = texture2D(samplerV, v_TexCoord).r - 0.5;\n"
        "   rgb = mat3(1, 1, 1, 0, -0.39465,  2.03211, 1.13983, -0.58060, 0) * yuv;\n"
        "   gl_FragColor = vec4(rgb, 1);\n"
        "}\n";


AVPacket avpkt;
AVCodecContext *avctx;
const AVCodec *codec;
AVFrame *frame;
uint8_t *video_dst_data[4] = {NULL};
int      video_dst_linesize[4];
int video_dst_bufsize;


const GLfloat gTriangleVertices[] = {
        -0.5f, 0.5f,  0.0f, 1 - 1.0f,
        -0.5f, -0.5f, 0.0f, 1 - 0.0f,
        0.5f,  0.5f,  1.0f, 1 - 1.0f,
        0.5f,  -0.5f, 1.0f, 1 - 0.0f};

GLuint gProgram;
GLuint gaPositionHandle;
GLuint gaTexCoordHandle;
GLuint uTextureSamplerYHandle;
GLuint uTextureSamplerUHandle;
GLuint uTextureSamplerVHandle;


void ffmpeg_log_callback(void *ptr, int level, const char *fmt, va_list vl)
{
    va_list vl2;
    char line[1024];
    static int print_prefix = 1;


    va_copy(vl2, vl);
    // av_log_default_callback(ptr, level, fmt, vl);
    av_log_format_line(ptr, level, fmt, vl2, line, sizeof(line), &print_prefix);
    va_end(vl2);

    LOGD("%s", line);

}


extern "C" JNIEXPORT void JNICALL
Java_com_example_king_ffmpegexample_GLESRenderUtil_init(
        JNIEnv* env,
        jobject obj) {

    av_register_all();
    av_log_set_callback(ffmpeg_log_callback);

    av_init_packet(&avpkt);

    codec = avcodec_find_decoder(AV_CODEC_ID_H264);
    if (!codec) {
        LOGE("Codec not found");
    }

    avctx = avcodec_alloc_context3(codec);
    if (!avctx) {
        LOGE("Could not allocate video codec context\n");
    }

    if (codec->capabilities & AV_CODEC_CAP_TRUNCATED)
        avctx->flags |= AV_CODEC_FLAG_TRUNCATED; // we do not send complete frames

    avctx->width = 240;
    avctx->height = 320;
    avctx->time_base.num = 1;
    avctx->frame_number = 1;
    avctx->codec_type = AVMEDIA_TYPE_VIDEO;
    avctx->bit_rate = 0;

    /* open it */
    if (avcodec_open2(avctx, codec, NULL) < 0) {
        LOGE("Could not open codec\n");
    }

    frame = av_frame_alloc();
    if (!frame) {
        LOGE("Could not allocate video frame\n");
    }

    int ret = av_image_alloc(video_dst_data, video_dst_linesize,
                             avctx->width, avctx->height, AV_PIX_FMT_YUV420P, 1);

    if (ret < 0) {
        LOGE("Could not allocate raw video buffer\n");
        return;
    }
    video_dst_bufsize = ret;
}

extern "C" JNIEXPORT void JNICALL
Java_com_example_king_ffmpegexample_GLESRenderUtil_resize(
        JNIEnv* env,
        jobject obj,
        jint width,
        jint height) {

    glViewport(0, 0, width, height);

    gProgram = linkProgram(gVertexShader, gFragmentShader);
    if (!gProgram) {
        LOGE("Could not create program.");
        return;
    }

    glUseProgram(gProgram);

    gaPositionHandle = glGetAttribLocation(gProgram, "aPosition");
    glVertexAttribPointer(gaPositionHandle, 2, GL_FLOAT, GL_FALSE, 4 * sizeof(GLfloat), gTriangleVertices);
    glEnableVertexAttribArray(gaPositionHandle);

    gaTexCoordHandle = glGetAttribLocation(gProgram, "a_TexCoord");
    glVertexAttribPointer(gaTexCoordHandle, 2, GL_FLOAT, GL_FALSE, 4 * sizeof(GLfloat), gTriangleVertices + 2);
    glEnableVertexAttribArray(gaTexCoordHandle);


    uTextureSamplerYHandle = glGetUniformLocation(gProgram, "samplerY");
    uTextureSamplerUHandle = glGetUniformLocation(gProgram, "samplerU");
    uTextureSamplerVHandle = glGetUniformLocation(gProgram, "samplerV");
}

void videoFrameRender(char * yuvDataBuf, int yuv_width, int yuv_height) {
    GLuint textureYID;
    GLuint textureUID;
    GLuint textureVID;

    glGenTextures(1, &textureYID);
    glGenTextures(1, &textureUID);
    glGenTextures(1, &textureVID);

    glActiveTexture(GL_TEXTURE0);
    glBindTexture(GL_TEXTURE_2D, textureYID);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
    glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, yuv_width, yuv_height,
                 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, yuvDataBuf);
    glGenerateMipmap(GL_TEXTURE_2D);
    glUniform1i(uTextureSamplerYHandle, 0);


    glActiveTexture(GL_TEXTURE1);
    glBindTexture(GL_TEXTURE_2D, textureUID);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
    glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, yuv_width/2, yuv_height/2,
                 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, yuvDataBuf + yuv_width * yuv_height);
    glGenerateMipmap(GL_TEXTURE_2D);
    glUniform1i(uTextureSamplerUHandle, 1);


    glActiveTexture(GL_TEXTURE2);
    glBindTexture(GL_TEXTURE_2D, textureVID);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
    glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, yuv_width/2, yuv_height/2,
                 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, yuvDataBuf + yuv_width * yuv_height * 5 / 4);
    glGenerateMipmap(GL_TEXTURE_2D);
    glUniform1i(uTextureSamplerVHandle, 2);


    glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
    glClear(GL_COLOR_BUFFER_BIT);

    glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);

    glBindTexture(GL_TEXTURE_2D, 0);
    glDeleteTextures(1, &textureYID);
    glDeleteTextures(1, &textureUID);
    glDeleteTextures(1, &textureVID);

}

extern "C" JNIEXPORT void JNICALL
Java_com_example_king_ffmpegexample_GLESRenderUtil_step(
        JNIEnv* env,
        jobject obj,
        jbyteArray in,
        jint len) {

    if (len > 0) {
        jbyte *buf = env->GetByteArrayElements(in, NULL);
        if (buf) {
            avpkt.size = len;
            avpkt.data = (uint8_t *) buf;
            int got_frame = 0;

            int decodedLen = avcodec_decode_video2(avctx, frame, &got_frame, &avpkt);
            if (decodedLen < 0) {
                LOGE("Error while decoding frame");
            }
            if (got_frame) {

                LOGD("got_frame----------------------step");
                av_image_copy(video_dst_data, video_dst_linesize,
                              (const uint8_t **) (frame->data), frame->linesize,
                              AV_PIX_FMT_YUV420P, avctx->width, avctx->height);

                videoFrameRender((char*)video_dst_data[0], avctx->width, avctx->height);
            }

            env->ReleaseByteArrayElements(in, buf, 0);
        }
    }
}

extern "C" JNIEXPORT void JNICALL
Java_com_example_king_ffmpegexample_GLESRenderUtil_release(
        JNIEnv* env,
        jobject obj) {

    avcodec_free_context(&avctx);
    av_frame_free(&frame);
    av_free(video_dst_data[0]);
}

着色器用到的yuv转换rgb公式如下,用矩阵乘法实现:
R = Y + 1.13983V
G = Y - 0.39465
U - 0.58060V
B = Y + 2.03211
U

本例子完整源码H264AndroidGLActivity,gl_render.cpp

转:OpenGL播放yuv视频
转:最简单的视音频播放示例6:OpenGL播放YUV420P(通过Texture,使用Shader)

猜你喜欢

转载自blog.csdn.net/kingdam578/article/details/84070826