六、音视频同步

版权声明:本文为博主原创文章,未经博主允许不得转载。 https://blog.csdn.net/likuan0214/article/details/78471680

FFmpegAudio.h

//
// Created by ygdx_lk on 17/11/4.
//

#ifndef VIDEOPLAYER_FFMPEGAUDIO_H
#define VIDEOPLAYER_FFMPEGAUDIO_H


#include <queue>
#include <jni.h>
#include <unistd.h>
#include <string>
#include <SLES/OpenSLES_Android.h>

extern "C"{
#include "Log.h"
#include <pthread.h>
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libswresample/swresample.h>

class FFmpegAudio{
public:
    FFmpegAudio();

    ~FFmpegAudio();

    int enQueue(const AVPacket *avPacket);

    int deQueue(AVPacket *avPacket);

    void play();

    void setCodec(AVCodecContext *avCodecContext);

    double getClock();

    void setTimeBase(AVRational avRational);

    void stop();

    int createPlayer();

public:
    AVCodecContext *avCodecContext;

    int isPlay;
    //音频播放线程
    pthread_t p_playid;
    //音频解码队列
    std::queue<AVPacket*> queue;

    AVRational avRational;

    int index;
    JavaVM *vm;

    //同步锁
    pthread_mutex_t mutex;
    //条件变量
    pthread_cond_t cond;

    SwrContext *swrContext;
    uint8_t *out_buffer;
    int out_channer_nb;
//    相对于第一帧时间
    double clock;

    AVRational time_base;

    SLObjectItf engineObject;
    SLEngineItf engineEngine;
    SLEnvironmentalReverbItf outputMixEnvironmentalReverb;
    SLObjectItf outputMixObject;
    SLObjectItf bqPlayerObject;
    SLEffectSendItf bqPlayerEffectSend;
    SLVolumeItf bqPlayerVolume;
    SLPlayItf bqPlayerPlay;
    SLAndroidSimpleBufferQueueItf bqPlayerBufferQueue;

};

};

void *play_audio(void *arg);
int getPcm(FFmpegAudio *audio);
int createFFmpeg(FFmpegAudio *audio);
void bqPlayerCallBack(SLAndroidSimpleBufferQueueItf bq, void *context);

#endif //VIDEOPLAYER_FFMPEGAUDIO_H

FFmpegAudio.cpp

//
// Created by ygdx_lk on 17/11/4.
//

#include "FFmpegAudio.h"

FFmpegAudio::FFmpegAudio() {
    clock = 0;
    pthread_mutex_init(&mutex, NULL);
    pthread_cond_init(&cond, NULL);
}

int getPcm(FFmpegAudio *audio){
    int got_frame;
    int size;
    AVPacket *packet = (AVPacket *)av_mallocz(sizeof(AVPacket));
    AVFrame *avFrame = av_frame_alloc();
    while (audio->isPlay){
        size = 0;
        audio->deQueue(packet);

        if(packet->pts != AV_NOPTS_VALUE){
            audio->clock = av_q2d(audio->time_base) * packet->pts;
        }

//            int avcodec_decode_audio4(AVCodecContext *avctx, AVFrame *frame,
//                                      int *got_frame_ptr, const AVPacket *avpkt);
        avcodec_decode_audio4(audio->avCodecContext, avFrame, &got_frame, packet);
        if(got_frame){
//                int swr_convert(struct SwrContext *s, uint8_t **out, int out_count,
//                                const uint8_t **in , int in_count);
            swr_convert(audio->swrContext, &audio->out_buffer, 44100 * 2, (const uint8_t **) avFrame->data, avFrame->nb_samples);
//                int av_samples_get_buffer_size(int *linesize, int nb_channels, int nb_samples,
//                                               enum AVSampleFormat sample_fmt, int align);
            size = av_samples_get_buffer_size(NULL, audio->out_channer_nb, avFrame->nb_samples, AV_SAMPLE_FMT_S16, 1);
            break;
        }
    }
    av_free_packet(packet);
    av_frame_free(&avFrame);
    return size;
}

FFmpegAudio::~FFmpegAudio(){
    if(out_buffer){
        free(out_buffer);
    }
    for (int i = 0; i < queue.size(); ++i) {
        AVPacket *packet = queue.front();
        queue.pop();
        av_free_packet(packet);
    }
    pthread_cond_destroy(&cond);
    pthread_mutex_destroy(&mutex);
}


/**
 * 压入一帧数据
 * @param avPacket
 * @return
 */
int FFmpegAudio::enQueue(const AVPacket *avPacket) {
    AVPacket *packet = (AVPacket *)av_mallocz(sizeof(AVPacket));
    //拷贝
    if(av_packet_ref(packet, avPacket)){
        return 0;
    }
    pthread_mutex_lock(&mutex);
    queue.push(packet);
    //通知播放音频
    pthread_cond_signal(&cond);
    pthread_mutex_unlock(&mutex);
    return 1;
}


int FFmpegAudio::deQueue(AVPacket *avPacket){
    pthread_mutex_lock(&mutex);
    while (isPlay){
        if(!queue.empty()){
            //从队列取出一个packet,clone一个给入参对象
            if(av_packet_ref(avPacket, queue.front())){
                break;
            }

            //取成功了 弹出队列 销毁packet
            AVPacket *packet = queue.front();
            queue.pop();
            av_free(packet);
            break;
        } else{
            //如果队列里面没有数据的话,一直等待,阻塞。
            pthread_cond_wait(&cond, &mutex);
        }
    }
    pthread_mutex_unlock(&mutex);
    return 0;
}

void FFmpegAudio::play(){
    isPlay = 1;
    pthread_create(&p_playid, NULL, play_audio, this);
}

void FFmpegAudio::setCodec(AVCodecContext *codecContext) {
    this->avCodecContext = codecContext;
    createFFmpeg(this);
}


double FFmpegAudio::getClock(){
    return this->clock;
}

void FFmpegAudio::setTimeBase(AVRational avRational){
    this->avRational = avRational;
}

void FFmpegAudio::stop(){
    LOGI("停止声音");
    //可能卡在dequeue
    pthread_mutex_lock(&mutex);
    isPlay = 0;
    pthread_cond_signal(&cond);
    pthread_mutex_unlock(&mutex);
    pthread_join(p_playid, 0);
    if(bqPlayerPlay){
        (*bqPlayerPlay)->SetPlayState(bqPlayerPlay, SL_PLAYSTATE_STOPPED);
        bqPlayerPlay = 0;
    }

    if(bqPlayerObject){
        (*bqPlayerObject)->Destroy(bqPlayerObject);
        bqPlayerObject = 0;

        bqPlayerBufferQueue = 0;
        bqPlayerVolume = 0;
    }

    if(outputMixObject){
        (*outputMixObject)->Destroy(outputMixObject);
        outputMixObject = 0;
    }

    if(engineObject){
        (*engineObject)->Destroy(engineObject);
        engineObject = 0;
        engineEngine = 0;
    }

    if(swrContext){
        swr_free(&swrContext);
    }

    if(this->avCodecContext){
        if(avcodec_is_open(this->avCodecContext)){
            avcodec_close(this->avCodecContext);
        }
        avcodec_free_context(&this->avCodecContext);
        this->avCodecContext = 0;
    }

    LOGI("audio clear");

}

void *play_audio(void *args){
    LOGI("%s", "开启音频");
    FFmpegAudio* audio = (FFmpegAudio *) args;
    audio->createPlayer();
    pthread_exit(0);
}


//第一次主动调用在调用线程
//之后在新线程中回调
void bqPlayerCallBack(SLAndroidSimpleBufferQueueItf bq, void *context){
    FFmpegAudio *audio = (FFmpegAudio *) context;
    int datalen = getPcm(audio);
    if(datalen > 0){
        double time = datalen/((double)44100 * 2 * 2);
        audio->clock  = audio->clock + time;
        (*bq)->Enqueue(bq, audio->out_buffer, datalen);
    } else{
        LOGE("解码错误");
    }
};


int createFFmpeg(FFmpegAudio *audio){
//    mp3  里面所包含的编码格式   转换成  pcm   SwcContext
    audio->swrContext = swr_alloc();

//    44100*2
    audio->out_buffer = (uint8_t *) av_mallocz(44100 * 2);
    uint64_t  out_ch_layout=AV_CH_LAYOUT_STEREO;
//    输出采样位数  16位
    enum AVSampleFormat out_formart=AV_SAMPLE_FMT_S16;
//输出的采样率必须与输入相同
    int out_sample_rate = 44100;


    swr_alloc_set_opts( audio->swrContext, out_ch_layout, out_formart, out_sample_rate,
                        audio->avCodecContext->channel_layout, audio->avCodecContext->sample_fmt,  audio->avCodecContext->sample_rate, 0,
                        NULL);

    swr_init( audio->swrContext);
//    获取通道数  2
    audio->out_channer_nb = av_get_channel_layout_nb_channels(AV_CH_LAYOUT_STEREO);
    LOGE("------>通道数%d  ", audio->out_channer_nb);
    return 0;
}

int FFmpegAudio::createPlayer() {
    SLresult result;
    // 创建引擎engineObject
    result = slCreateEngine(&engineObject, 0, NULL, 0, NULL, NULL);
    if (SL_RESULT_SUCCESS != result) {
        return 0;
    }
    // 实现引擎engineObject
    result = (*engineObject)->Realize(engineObject, SL_BOOLEAN_FALSE);
    if (SL_RESULT_SUCCESS != result) {
        return 0;
    }
    // 获取引擎接口engineEngine
    result = (*engineObject)->GetInterface(engineObject, SL_IID_ENGINE,
                                           &engineEngine);
    if (SL_RESULT_SUCCESS != result) {
        return 0;
    }
    // 创建混音器outputMixObject
    result = (*engineEngine)->CreateOutputMix(engineEngine, &outputMixObject, 0,
                                              0, 0);
    if (SL_RESULT_SUCCESS != result) {
        return 0;
    }
    // 实现混音器outputMixObject
    result = (*outputMixObject)->Realize(outputMixObject, SL_BOOLEAN_FALSE);
    if (SL_RESULT_SUCCESS != result) {
        return 0;
    }
    result = (*outputMixObject)->GetInterface(outputMixObject, SL_IID_ENVIRONMENTALREVERB,
                                              &outputMixEnvironmentalReverb);
    const SLEnvironmentalReverbSettings settings = SL_I3DL2_ENVIRONMENT_PRESET_DEFAULT;
    if (SL_RESULT_SUCCESS == result) {
        (*outputMixEnvironmentalReverb)->SetEnvironmentalReverbProperties(
                outputMixEnvironmentalReverb, &settings);
    }


    //======================
    SLDataLocator_AndroidSimpleBufferQueue android_queue = {SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE, 2};
    SLDataFormat_PCM pcm = {SL_DATAFORMAT_PCM, 2, SL_SAMPLINGRATE_44_1, SL_PCMSAMPLEFORMAT_FIXED_16,
                            SL_PCMSAMPLEFORMAT_FIXED_16,
                            SL_SPEAKER_FRONT_LEFT | SL_SPEAKER_FRONT_RIGHT,
                            SL_BYTEORDER_LITTLEENDIAN};
//   新建一个数据源 将上述配置信息放到这个数据源中
    SLDataSource slDataSource = {&android_queue, &pcm};
//    设置混音器
    SLDataLocator_OutputMix outputMix = {SL_DATALOCATOR_OUTPUTMIX, outputMixObject};

    SLDataSink audioSnk = {&outputMix, NULL};
    const SLInterfaceID ids[3] = {SL_IID_BUFFERQUEUE, SL_IID_EFFECTSEND,
            /*SL_IID_MUTESOLO,*/ SL_IID_VOLUME};
    const SLboolean req[3] = {SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE,
            /*SL_BOOLEAN_TRUE,*/ SL_BOOLEAN_TRUE};
    //先讲这个
    (*engineEngine)->CreateAudioPlayer(engineEngine, &bqPlayerObject, &slDataSource,
                                       &audioSnk, 2,
                                       ids, req);
    //初始化播放器
    (*bqPlayerObject)->Realize(bqPlayerObject, SL_BOOLEAN_FALSE);

//    得到接口后调用  获取Player接口
    (*bqPlayerObject)->GetInterface(bqPlayerObject, SL_IID_PLAY, &bqPlayerPlay);

//    注册回调缓冲区 //获取缓冲队列接口
    (*bqPlayerObject)->GetInterface(bqPlayerObject, SL_IID_BUFFERQUEUE,
                                    &bqPlayerBufferQueue);
    //缓冲接口回调
    (*bqPlayerBufferQueue)->RegisterCallback(bqPlayerBufferQueue, bqPlayerCallBack, this);
//    获取音量接口
    (*bqPlayerObject)->GetInterface(bqPlayerObject, SL_IID_VOLUME, &bqPlayerVolume);

//    获取播放状态接口
    (*bqPlayerPlay)->SetPlayState(bqPlayerPlay, SL_PLAYSTATE_PLAYING);

    bqPlayerCallBack(bqPlayerBufferQueue, this);
    return 1;
}

FFmpegVideo.h

//
// Created by ygdx_lk on 17/11/4.
//

#ifndef VIDEOPLAYER_FFMPEGVEDIO_H
#define VIDEOPLAYER_FFMPEGVEDIO_H

#include "FFmpegVideo.h"
#include "Log.h"
#include "FFmpegAudio.h"

#include <unistd.h>
extern "C"{
#include <pthread.h>
#include <libswscale/swscale.h>
#include <libavformat/avformat.h>
#include <libavcodec/avcodec.h>
#include <libavutil/time.h>
#include "Log.h"

class FFmpegVideo{
public:
    FFmpegVideo();

    ~FFmpegVideo();

    int enQueue(AVPacket *avPacket);

    int deQueue(AVPacket *avPacket);

    void play();

    void stop();

    void setAvCodecContext(AVCodecContext *codecContext);

    void setTimeBase(AVRational avRational);
    /**
     * 设置回调接口
     * @param call
     */
    void setPlayCall(void (*call)(AVFrame* frame));

    double synchronize(AVFrame *frame, double play);

    void setAudio(FFmpegAudio *audio);

public:
    //解码器上下文
    AVCodecContext *codec;
    //是否正在播放
    int isPlay;
    //流索引
    int index;
    //音频队列
    std::queue<AVPacket *> queue;
    //处理线程
    pthread_t p_id;
    //同步锁
    pthread_mutex_t mutex;
    //条件变量
    pthread_cond_t cond;
    //头文件写在"extend"C外
    FFmpegAudio* audio;
    AVRational time_base;
    double  clock;
    int videoWidth, videoHeight;
};

};

void *play_video(void *arg);

#endif //VIDEOPLAYER_FFMPEGVEDIO_H

FFmpegVideo.cpp

//
// Created by ygdx_lk on 17/11/4.
//

#include "FFmpegVideo.h"
FFmpegVideo::FFmpegVideo(){
    clock = 0;
    pthread_mutex_init(&mutex, NULL);
    pthread_cond_init(&cond, NULL);
}

static void (*video_call)(AVFrame *frame);


int FFmpegVideo::enQueue(AVPacket *avPacket){
    AVPacket *packet = (AVPacket *)av_mallocz(sizeof(AVPacket));
    //拷贝
    if(av_packet_ref(packet, avPacket)){
        return 0;
    }
    pthread_mutex_lock(&mutex);
    queue.push(packet);
    //通知播放音频
    pthread_cond_signal(&cond);
    pthread_mutex_unlock(&mutex);
    return 1;
}

int FFmpegVideo::deQueue(AVPacket *avPacket){
    pthread_mutex_lock(&mutex);
    while (isPlay){
        if(!queue.empty()){
            //从队列取出一个packet,clone一个给入参对象
            if(av_packet_ref(avPacket, queue.front())){
                break;
            }

            //取成功了 弹出队列 销毁packet
            AVPacket *packet = queue.front();
            queue.pop();
            av_free(packet);
            break;
        } else{
            //如果队列里面没有数据的话,一直等待,阻塞。
            pthread_cond_wait(&cond, &mutex);
        }
    }
    pthread_mutex_unlock(&mutex);
    return 0;
}



void *play_video(void *arg){
    FFmpegVideo *vedio = (FFmpegVideo *) arg;
    //像素数据(解码数据)
    AVFrame *frame = av_frame_alloc();

    //转换rgba
    SwsContext *sws_ctx;

    int type = 0;
    LOGE("TYPE  %d", type);
    sws_ctx = sws_getContext(
            vedio->codec->width, vedio->codec->height,vedio->codec->pix_fmt,
            vedio->videoWidth,vedio->videoHeight, AV_PIX_FMT_RGBA,
            SWS_BILINEAR, 0, 0, 0);


    AVFrame *rgb_frame = av_frame_alloc();
//    给缓冲区分配内存
    //只有指定了AVFrame的像素格式、画面大小才能真正分配内存
    //缓冲区分配内存
    uint8_t   *out_buffer= (uint8_t *) av_mallocz(avpicture_get_size(AV_PIX_FMT_RGBA, vedio->videoWidth,vedio->videoHeight));
//设置yuvFrame的缓冲区,像素格式
    avpicture_fill((AVPicture *) rgb_frame, out_buffer, AV_PIX_FMT_RGBA,vedio->videoWidth, vedio->videoHeight);

    int got_frame;
    //编码数据
    AVPacket *packet = (AVPacket *)av_mallocz(sizeof(AVPacket));
    //6.一阵一阵读取压缩的视频数据AVPacket
    double  last_play  //上一帧的播放时间
    ,play             //当前帧的播放时间
    , last_delay    // 上一次播放视频的两帧视频间隔时间
    ,delay         //两帧视频间隔时间
    ,audio_clock //音频轨道 实际播放时间
    ,diff   //音频帧与视频帧相差时间
    ,sync_threshold
    ,start_time  //从第一帧开始的绝对时间
    ,pts
    ,actual_delay//真正需要延迟时间
    ;//两帧间隔合理间隔时间
    start_time = av_gettime() / 1000000.0;
    while (vedio->isPlay) {
        LOGE("视频 解码  一帧 %d",vedio->queue.size());
//        消费者取到一帧数据  没有 阻塞
        vedio->deQueue(packet);
        avcodec_decode_video2(vedio->codec,frame, &got_frame, packet);
        if (!got_frame) {
            continue;
        }

//        转码成rgb
        sws_scale(sws_ctx, (const uint8_t *const *)frame->data, frame->linesize, 0,
                           frame->height,
                           rgb_frame->data, rgb_frame->linesize);

        if ((pts = av_frame_get_best_effort_timestamp(frame)) == AV_NOPTS_VALUE) {
            pts = 0;
        }
        play = pts * av_q2d(vedio->time_base);
        //纠正时间
        play = vedio->synchronize(frame, play);
        delay = play - last_play;
        if (delay <= 0 || delay > 1) {
            delay = last_delay;
        }
        audio_clock = vedio->audio->clock;
        last_delay = delay;
        last_play = play;
        //音频与视频的时间差
        diff = vedio->clock - audio_clock;
        //在合理范围外  才会延迟  加快
        sync_threshold = (delay > 0.01 ? 0.01 : delay);

        if (fabs(diff) < 10) {
            if (diff <= -sync_threshold) {
                delay = 0;
            } else if (diff >=sync_threshold) {
                delay = 2 * delay;
            }
        }
        start_time += delay;
        actual_delay=start_time-av_gettime()/1000000.0;
        if (actual_delay < 0.01) {
            actual_delay = 0.01;
        }
//           av_usleep(actual_delay * 1000000.0 + 5000);
        av_usleep(actual_delay * 1000000.0);
        video_call(rgb_frame);
    }
    av_free(packet);
    av_frame_free(&frame);
    av_frame_free(&rgb_frame);
    sws_freeContext(sws_ctx);
    size_t size = vedio->queue.size();
    for (int i = 0; i < size; ++i) {
        AVPacket *pkt = vedio->queue.front();
        av_free(pkt);
        vedio->queue.pop();
    }
    LOGE("VIDEO EXIT");
    pthread_exit(0);
}


void FFmpegVideo::play(){
    isPlay = 1;
    pthread_create(&p_id, 0, play_video, this);
}


void FFmpegVideo::stop(){
    pthread_mutex_lock(&mutex);
    pthread_cond_signal(&cond);
    pthread_mutex_unlock(&mutex);

    pthread_join(p_id, 0);
    if(this->codec){
        if(avcodec_is_open(this->codec)){
            avcodec_close(this->codec);
        }
        avcodec_free_context(&this->codec);
        this->codec = 0;
    }
    LOGI("VIDEO close")
}

void FFmpegVideo::setAvCodecContext(AVCodecContext *codecContext){
    this->codec = codecContext;
}

void FFmpegVideo::setPlayCall(void (*call)(AVFrame *)) {
    video_call = call;
}

void FFmpegVideo::setAudio(FFmpegAudio *audio) {
    this->audio = audio;
}

void FFmpegVideo::setTimeBase(AVRational avRational) {
    this->time_base = avRational;
}


double FFmpegVideo::synchronize(AVFrame *frame, double play) {
    //clock是当前播放的时间位置
    if (play != 0)
        clock=play;
    else //pst为0 则先把pts设为上一帧时间
        play = clock;
    //可能有pts为0 则主动增加clock
    //frame->repeat_pict = 当解码时,这张图片需要要延迟多少
    //需要求出扩展延时:
    //extra_delay = repeat_pict / (2*fps) 显示这样图片需要延迟这么久来显示
    double repeat_pict = frame->repeat_pict;
    //使用AvCodecContext的而不是stream的
    double frame_delay = av_q2d(codec->time_base);
    //如果time_base是1,25 把1s分成25份,则fps为25
    //fps = 1/(1/25)
    double fps = 1 / frame_delay;
    //pts 加上 这个延迟 是显示时间
    double extra_delay = repeat_pict / (2 * fps);
    double delay = extra_delay + frame_delay;
//    LOGI("extra_delay:%f",extra_delay);
    clock += delay;
    return play;
}

FFmpegVideo::~FFmpegVideo(){
    pthread_cond_destroy(&cond);
    pthread_mutex_destroy(&mutex);
}

Log.h

//
// Created by ygdx_lk on 17/11/4.
//

#ifndef VIDEOPLAYER_LOG_H
#define VIDEOPLAYER_LOG_H

#include <string>
#include <android/log.h>

#define LOGE(FORMAT, ...) __android_log_print(ANDROID_LOG_ERROR, "videoplayer", FORMAT, ##__VA_ARGS__);
#define LOGI(FORMAT, ...) __android_log_print(ANDROID_LOG_INFO, "videoplayer", FORMAT,##__VA_ARGS__);

#endif //VIDEOPLAYER_LOG_H

native-lib.h

//
// Created by ygdx_lk on 17/11/4.
//

#ifndef VIDEOPLAYER_NATIVE_LIB_H_H
#define VIDEOPLAYER_NATIVE_LIB_H_H

#include <jni.h>
#include <string>
#include "Log.h"
#include <SLES/OpenSLES.h>
#include <SLES/OpenSLES_Android.h>
#include "FFmpegAudio.h"
#include "FFmpegVideo.h"

extern "C" {
//编码
#include "libavcodec/avcodec.h"
//封装格式处理
#include "libavformat/avformat.h"
#include "libswresample/swresample.h"
//像素处理
#include "libswscale/swscale.h"
#include "unistd.h"
#include <android/native_window_jni.h>
#include "libavutil/time.h"

ANativeWindow *window = 0;
const char *path;
FFmpegAudio *audio;
FFmpegVideo *video;
pthread_t  p_tid;
int isPlay = -1;
int windowWidth, windowHeight;

JNIEXPORT jstring JNICALL Java_com_test_videoplayer_MainActivity_stringFromJNI(JNIEnv *env, jobject /* this */);
JNIEXPORT void JNICALL Java_com_test_videoplayer_VideoPlayer_play(JNIEnv *env, jobject instance, jstring path);
JNIEXPORT void JNICALL Java_com_test_videoplayer_VideoPlayer_dispaley(JNIEnv *env, jobject instance, jobject surface);
JNIEXPORT void JNICALL Java_com_test_videoplayer_VideoPlayer_release(JNIEnv *env, jobject instance);
}

void *process(void*);

#endif //VIDEOPLAYER_NATIVE_LIB_H_H

native-lib.cpp

#include "native-lib.h"


void call_video_play(AVFrame *frame){
    if(!window){
        return;
    }
    ANativeWindow_Buffer window_buffer;
    if(ANativeWindow_lock(window, &window_buffer, 0)){
        return;
    }
    // 缓冲区的地址
    uint8_t *dst = (uint8_t *) window_buffer.bits;
    // 每行的内存大小
    int dstStride = window_buffer.stride * 4;
    // 像素区的地址
    uint8_t *src = frame->data[0];
    int srcStride = frame->linesize[0];
    for (int i = 0; i < video->videoHeight; i++) {
        // 逐行拷贝内存数据,但要进行偏移,否则视频会拉伸变形
        // (i + (windowHeight - videoHeight) / 2) * dstStride 纵向偏移,确保视频纵向居中播放
        //(dstStride - srcStride) / 2 横向偏移,确保视频横向居中播放
//        void* memcpy(void* _Nonnull __restrict, const void* _Nonnull __restrict, size_t)
        memcpy(dst + (i + (windowHeight - video->videoHeight) / 2) * dstStride + (dstStride - srcStride) / 2, src + i * srcStride, srcStride);
//        memcpy(dst + i * dstStride, src + i * srcStride, srcStride);
    }
    //解锁
    ANativeWindow_unlockAndPost(window);
}


void Java_com_test_videoplayer_VideoPlayer_play(JNIEnv *env, jobject instance, jstring path_) {
    path = env->GetStringUTFChars(path_, 0);
    audio = new FFmpegAudio;
    video = new FFmpegVideo;
    video->setPlayCall(call_video_play);
    pthread_create(&p_tid, NULL, process, NULL);
}

//void av_log_set_callback(void (*callback)(void*, int, const char*, va_list));
void callback(void *, int, const char* pmt, va_list list){
    __android_log_vprint(ANDROID_LOG_ERROR, "AAAAAA", pmt, list);
}
void *process(void *args){

    //开启log
    av_log_set_level(0);
    av_log_set_callback(callback);

    LOGI("%s", "开启线程");
    av_register_all();
    //播放网络的话,需要初始化
    avformat_network_init();
    AVFormatContext *avFormatContext = avformat_alloc_context();
    if(avformat_open_input(&avFormatContext, path, NULL, NULL) < 0){
        LOGE("%s", "打开输入视频文件失败");
        return ((void *)0);
    }
    if(avformat_find_stream_info(avFormatContext, NULL) < 0){
        LOGE("%s", "获取视频信息失败");
        return ((void *)0);
    }

    for (int i = 0; i < avFormatContext->nb_streams; ++i) {
        AVCodecContext *avCodecContext = avFormatContext->streams[i]->codec;
        AVCodec *decoder = avcodec_find_decoder(avCodecContext->codec_id);
        if(avcodec_open2(avCodecContext, decoder, NULL) < 0){
            LOGE("%s", "解码器无法打开");
            continue;
        };
        if(avCodecContext->codec_type == AVMEDIA_TYPE_AUDIO){//音频
            audio->setCodec(avCodecContext);
            audio->setTimeBase(avFormatContext->streams[i]->time_base);
            audio->index = i;
        } else if(avCodecContext->codec_type == AVMEDIA_TYPE_VIDEO){//视频
            video->setAvCodecContext(avCodecContext);
            video->time_base = (avFormatContext->streams[i]->time_base);
            video->index = i;
            if(window && avCodecContext){
                // 绘制区域的宽高
                windowWidth = ANativeWindow_getWidth(window);
                windowHeight = ANativeWindow_getHeight(window);
                ANativeWindow_setBuffersGeometry(window, windowWidth, windowHeight, WINDOW_FORMAT_RGBA_8888);
                // 确定视频的大小,有可能视频的会大于当前的绘制区域,需要缩放
//                int videoWidth = windowWidth > avCodecContext->width ? avCodecContext->width : windowWidth;
                //设置播放区域,宽度铺满
                video->videoWidth = windowWidth;
                //计算高度
                video->videoHeight = video->videoWidth * avCodecContext->height / avCodecContext->width;
                //如果高度大于绘制区域的高度,需要重新计算宽高
                if (video->videoHeight > windowHeight) {
                    video->videoHeight = windowHeight;
                    video->videoWidth = windowHeight * avCodecContext->width / avCodecContext->height;
                }
            }
        }
    }

    video->setAudio(audio);
    video->play();
    audio->play();
    isPlay = 1;
    AVPacket* avPacket = (AVPacket *)av_mallocz(sizeof(AVPacket));
    //解码完整视频, 子线程
    int ret;
    while (isPlay){
        ret = av_read_frame(avFormatContext, avPacket);
        if(ret == 0) {
//        LOGI("从流中读取播放数据  %#x", avPacket)
            if (audio && audio->isPlay && avPacket->stream_index == audio->index) {
                audio->enQueue(avPacket);
            } else if (video && video->isPlay && avPacket->stream_index == video->index) {
                video->enQueue(avPacket);
            }
            av_packet_unref(avPacket);
        } else if(ret == AVERROR_EOF){
            //读取完毕,但是可能未播放完毕
            while (isPlay){
                if(video->queue.empty() && audio->queue.empty()){
                    break;
                }
                //等待播放完成
                av_usleep(10000);
            }
        }
    }


    //视频解码完, 可能视频播放完了,也可能视频没播放完成
    isPlay = 0;

    if(audio && audio->isPlay){
        audio->stop();
    }

    if(video && video->isPlay){
        video->stop();
    }

    //释放内存
    av_free_packet(avPacket);
    avformat_free_context(avFormatContext);
    pthread_exit(0);
}

void JNICALL Java_com_test_videoplayer_VideoPlayer_dispaley(JNIEnv *env, jobject instance, jobject surface){
    if(window){
        ANativeWindow_release(window);
        window = 0;
    }
    window = ANativeWindow_fromSurface(env, surface);
    if(video && video->codec){
        // 绘制区域的宽高
        windowWidth = ANativeWindow_getWidth(window);
        windowHeight = ANativeWindow_getHeight(window);
        ANativeWindow_setBuffersGeometry(window, windowWidth, windowHeight, WINDOW_FORMAT_RGBA_8888);
        // 确定视频的大小,有可能视频的会大于当前的绘制区域,需要缩放
//                int videoWidth = windowWidth > avCodecContext->width ? avCodecContext->width : windowWidth;
        //设置播放区域,宽度铺满
        video->videoWidth = windowWidth;
        //计算高度
        video->videoHeight = video->videoWidth * video->codec->height / video->codec->width;
        //如果高度大于绘制区域的高度,需要重新计算宽高
        if (video->videoHeight > windowHeight) {
            video->videoHeight = windowHeight;
            video->videoWidth = windowHeight * video->codec->width / video->codec->height;
        }
    }
}

void JNICALL Java_com_test_videoplayer_VideoPlayer_release(JNIEnv *env, jobject instance){
    if(isPlay){
        isPlay = 0;
        pthread_join(p_tid, 0);
    }
    if(video){
        if(video->isPlay){
            video->stop();
        }
        delete(video);
        video = 0;
    }
    if(audio){
        if(audio->isPlay){
            audio->stop();
        }
        delete(audio);
        audio = 0;
    }
}

猜你喜欢

转载自blog.csdn.net/likuan0214/article/details/78471680