simplest_ffmpeg_streamer加注释版

版权声明:本文为博主原创文章,未经博主允许不得转载。 https://blog.csdn.net/mao0514/article/details/86211075

https://blog.csdn.net/czc1009/article/details/12913759

https://blog.csdn.net/ytaosky/article/details/72820329

雷神代码:

工程下载:https://download.csdn.net/download/mao0514/10909202

// simplest_ffmpeg_streamer.cpp : 定义控制台应用程序的入口点。
//

#include "stdafx.h"
#include <stdio.h>
#define AV_CODEC_FLAG_GLOBAL_HEADER (1 << 22)
#define CODEC_FLAG_GLOBAL_HEADER AV_CODEC_FLAG_GLOBAL_HEADER
#define AVFMT_RAWPICTURE 0x0020


#define __STDC_CONSTANT_MACROS

#ifdef _WIN32
//Windows
extern "C"
{
#include "libavformat/avformat.h"
#include "libavutil/mathematics.h"
#include "libavutil/time.h"
};
#endif
int main(int argc, char *argv[])
{
    AVOutputFormat *ofmt = NULL;
    AVFormatContext *ifmt_ctx = NULL, *ofmt_ctx = NULL;
    AVPacket pkt;
    const char *in_filename, *out_filename;
    int ret, i;
    int videoindex = -1;
    int frame_index = 0;
    int64_t start_time = 0;
    in_filename = "test.flv";

    out_filename = "rtmp://localhost/publishlive/livestream";
    //注册所有组件
    av_register_all();
    //注册网络相关的组件
    avformat_network_init();
    /*
    打开输入视频文件,并获得其一些相关信息(&AVFormatContext(双重指针函数), 输入文件名,
    强制指定AVFormatContext中AVInputFormat的。这个参数一般情况下可以设置为NULL,这样FFmpeg可以自动检测AVInputFormat
    ,附加的一些选项,一般情况下可以设置为NULL)
    */
    if ((ret = avformat_open_input(&ifmt_ctx, in_filename, 0, 0)) < 0)
    {
        printf("Could not open input file.");
        goto end;
    }
    //获取视频文件相关信息,错误时返回值为-1
    if ((ret = avformat_find_stream_info(ifmt_ctx, 0)) < 0) {
        printf("Failed to retrieve input stream information");
        goto end;
    }
    //nb_streams:输入视频的AVStream个数
    for (i = 0; i < ifmt_ctx->nb_streams; i++) {
        //ifmt_ctx->stream[i],输入视频的AVStream[]数组,具体结构见ppt上的介绍。标记为视频类型
        if (ifmt_ctx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
            videoindex = i;
            break;
        }
    }
    //打印ifmt_ctx里面的具体信息,0代表是input, 1代表是output
    av_dump_format(ifmt_ctx, 0, in_filename, 0);
    /*
    用于初始化AVFormatContext结构体
    ctx:函数调用成功之后创建的AVFormatContext结构体。
    oformat:指定AVFormatContext中的AVOutputFormat,用于确定输出格式。如果指定为NULL,可以设定后两个参数(format_name或者filename)由FFmpeg猜测输出格式。
    PS:使用该参数需要自己手动获取AVOutputFormat,相对于使用后两个参数来说要麻烦一些。
    format_name:指定输出格式的名称。根据格式名称,FFmpeg会推测输出格式。输出格式可以是“flv”,“mkv”等等。
    filename:指定输出文件的名称。根据文件名称,FFmpeg会推测输出格式。文件名称可以是“xx.flv”,“yy.mkv”等等。
    函数执行成功的话,其返回值大于等于0。
    */
    avformat_alloc_output_context2(&ofmt_ctx, NULL, "flv", out_filename);
    if (!ofmt_ctx) {
        printf("Could not create output context\n");
        ret = AVERROR_UNKNOWN;
        goto end;
    }
    //AVOutputFormat类型
    ofmt = ofmt_ctx->oformat;
    for (i = 0; i < ifmt_ctx->nb_streams; i++) {
        //根据输入流创建输出流
        AVStream *in_stream = ifmt_ctx->streams[i];
        //创建输出码流的AVStream
        AVStream *out_stream = avformat_new_stream(ofmt_ctx, in_stream->codec->codec);
        if (!out_stream) {
            printf("Failed allocating output stream\n");
            ret = AVERROR_UNKNOWN;
            goto end;
        }
        //拷贝输入视频码流的AVCodecContex的数值到输出视频码流的AVCodecContex
        ret = avcodec_copy_context(out_stream->codec, in_stream->codec);
        //失败返回-1
        if (ret < 0) {
            printf("Failed to copy context from input to output stream codec context\n");
            goto end;
        }
        //AVCodecContex的codec_tag
        out_stream->codec->codec_tag = 0;
        //设置AVOutputFormat的文件头的类型
        if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
            out_stream->codec->flags |= CODEC_FLAG_GLOBAL_HEADER;
        //打印一下ofmt_ctx里面的具体信息,最后的1代表是output, 0代表是input
        av_dump_format(ofmt_ctx, 0, out_filename, 1);
        //打开输出URL(Open output URL)
        if (!(ofmt->flags & AVFMT_NOFILE)) {
            //打开输出文件
            ret = avio_open(&ofmt_ctx->pb, out_filename, AVIO_FLAG_WRITE);
            if (ret < 0) {
                printf("Could not open output URL '%s'", out_filename);
                goto end;
            }
        }
        //写文件头
        ret = avformat_write_header(ofmt_ctx, NULL);
        if (ret < 0) {
            printf("Error occurred when opening output URL\n");
            goto end;
        }
        //获取
        start_time = av_gettime();
        //对AVStream[]数组的操作
        while (1) {
            AVStream *in_stream, *out_stream;
            //从输入文件读取一个AVPacket
            ret = av_read_frame(ifmt_ctx, &pkt);
            if (ret < 0)
                break;
            //pts 显示时间戳
            if (pkt.pts == AV_NOPTS_VALUE) {
                //time_base AVStream的时间基
                AVRational time_base1 = ifmt_ctx->streams[videoindex]->time_base;
                //r_frame_rate AVStream的帧率
                int64_t calc_duration = (double)AV_TIME_BASE / av_q2d(ifmt_ctx->streams[videoindex]->r_frame_rate);
                pkt.pts = (double)(frame_index*calc_duration) / (double)(av_q2d(time_base1)*AV_TIME_BASE);
                //dts 解码时间戳
                pkt.dts = pkt.pts;
                //duration 输入视频的时长,在AVFormatContext结构体中
                pkt.duration = (double)calc_duration / (double)(av_q2d(time_base1)*AV_TIME_BASE);
            }
            //Delay,具体变量意义见上文
            if (pkt.stream_index == videoindex) {
                AVRational time_base = ifmt_ctx->streams[videoindex]->time_base;
                AVRational time_base_q = { 1,AV_TIME_BASE };
                int64_t pts_time = av_rescale_q(pkt.dts, time_base, time_base_q);
                int64_t now_time = av_gettime() - start_time;
                if (pts_time > now_time)
                    //延时函数,为了使时间戳能与解码出来的图像相对应
                    av_usleep(pts_time - now_time);
            }
            //AVStream
            in_stream = ifmt_ctx->streams[pkt.stream_index];
            out_stream = ofmt_ctx->streams[pkt.stream_index];
            //copy packet
            //转换PTS/DTS
            pkt.pts = av_rescale_q_rnd(pkt.pts, in_stream->time_base, out_stream->time_base, (AVRounding)(AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX));
            pkt.dts = av_rescale_q_rnd(pkt.dts, in_stream->time_base, out_stream->time_base, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
            pkt.duration = av_rescale_q(pkt.duration, in_stream->time_base, out_stream->time_base);
            pkt.pos = -1;
            //Print to Screen
            if (pkt.stream_index == videoindex) {
                printf("Send %8d video frames to output URL\n", frame_index);
                frame_index++;
            }
            //将AVPacket(存储视频压缩码流数据)写入文件
            ret = av_interleaved_write_frame(ofmt_ctx, &pkt);

            if (ret < 0) {
                printf("Error muxing packet\n");
                break;
            }
            //销毁指针,释放内存
            av_free_packet(&pkt);
        }
        //写文件尾
        av_write_trailer(ofmt_ctx);
    end:
        //销毁AVFormatContext指针,释放内存
        avformat_close_input(&ifmt_ctx);
        //close output
        if (ofmt_ctx && !(ofmt->flags & AVFMT_NOFILE)) {
            avio_close(ofmt_ctx->pb);
        }
        //销毁AVOutputFormat指针,释放内存
        avformat_free_context(ofmt_ctx);
        if (ret < 0 && ret != AVERROR_EOF) {
            printf("ERROR occurred.\n");
            return -1;
        }

    }
    return 0;
}

 修改后的:


//push( "d:\\temp\\test.264","rtmp://192.168.1.105:1935/live/test");
int push(char* input_str, char* output_str)
{
   AVOutputFormat *ofmt = NULL;
   AVFormatContext *ifmt_ctx = NULL, *ofmt_ctx = NULL;
   AVPacket pkt;
 
    int ret, i;
 
   //FFmpeg av_log() callback
  // av_log_set_callback(custom_log);
 
   av_register_all();
   //Network
   avformat_network_init();
    //Input
    if((ret = avformat_open_input(&ifmt_ctx, input_str, 0, 0)) < 0)
    {
       printf( "Could not open input file.");
       goto end;
    }
 //   LOGI("avformat_find_stream_info");
    if((ret = avformat_find_stream_info(ifmt_ctx, 0)) < 0)
    {
       printf( "error to retrieve input stream information");
       goto end;
    }
 
   //Output
   avformat_alloc_output_context2(&ofmt_ctx, NULL,"flv",output_str); //RTMP
   //avformat_alloc_output_context2(&ofmt_ctx, NULL,"mpegts", output_str);//UDP
    if(!ofmt_ctx)
    {
        printf( "Could not create outputcontext\n");
        ret= AVERROR_UNKNOWN;
       goto end;
    }
    ofmt =ofmt_ctx->oformat;
    int videoindex = -1;
    for(i =0; i<ifmt_ctx->nb_streams; i++)
       if(ifmt_ctx->streams[i]->codec->codec_type ==AVMEDIA_TYPE_VIDEO)
        {
           videoindex = i;
           break;
        }
   //Create output AVStream according to input AVStream
   AVStream *in_stream = ifmt_ctx->streams[videoindex];
   AVStream *out_stream = avformat_new_stream(ofmt_ctx,in_stream->codec->codec);
    if(!out_stream)
    {
       printf( "Error occurred when allocating output stream\n");
        ret= AVERROR_UNKNOWN;
       goto end;
    }
    //Copythe settings of AVCodecContext
    ret =avcodec_copy_context(out_stream->codec, in_stream->codec);
    if (ret< 0)
    {
       printf( "error to copy context from input to output stream codeccontext\n");
       goto end;
    }
   out_stream->codec->codec_tag = 0;
    if(ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
       out_stream->codec->flags |= CODEC_FLAG_GLOBAL_HEADER;
 
    //Openoutput URL
    if(!(ofmt->flags & AVFMT_NOFILE))
    {
        ret= avio_open(&ofmt_ctx->pb, output_str, AVIO_FLAG_WRITE);
        if(ret < 0)
        {
           printf( "Could not open output URL '%s'", output_str);
           goto end;
        }
    }
    //Writefile header
    ret =avformat_write_header(ofmt_ctx, NULL);
    if (ret< 0)
    {
       printf( "Error occurred when opening output URL : %d\n", ret);
        goto end;
    }
 
    int frame_index = 0;
 
    int64_t start_time = av_gettime();
    while(1)
    {
       AVStream *in_stream, *out_stream;
       //Get an AVPacket
        ret= av_read_frame(ifmt_ctx, &pkt);
        if(ret < 0)
           break;
       if(pkt.stream_index != videoindex)
        {
           av_free_packet(&pkt);
           continue;
        }
       //FIX:NoPTS (Example: Raw H.264)
       //Simple Write PTS
       if(pkt.pts == AV_NOPTS_VALUE)
        {
           //Write PTS
           AVRational time_base1 = ifmt_ctx->streams[videoindex]->time_base;
           //Duration between 2 frames (us)
           int64_t calc_duration =(double)AV_TIME_BASE/av_q2d(ifmt_ctx->streams[videoindex]->r_frame_rate);
           //Parameters
           pkt.pts =(double)(frame_index*calc_duration)/(double)(av_q2d(time_base1)*AV_TIME_BASE);
           pkt.dts = pkt.pts;
           pkt.duration =(double)calc_duration/(double)(av_q2d(time_base1)*AV_TIME_BASE);
        }
       //Important:Delay
       if(pkt.stream_index == videoindex)
        {
           AVRational time_base = ifmt_ctx->streams[videoindex]->time_base;
           AVRational time_base_q = {1,AV_TIME_BASE};
           int64_t pts_time = av_rescale_q(pkt.dts, time_base, time_base_q);
           int64_t now_time = av_gettime() - start_time;
           if (pts_time > now_time)
                av_usleep(pts_time - now_time);
        }
 
       in_stream  =ifmt_ctx->streams[pkt.stream_index];
       out_stream = ofmt_ctx->streams[pkt.stream_index];
        /*copy packet */
       //Convert PTS/DTS
       pkt.pts = av_rescale_q_rnd(pkt.pts, in_stream->time_base,out_stream->time_base,  (AVRounding)(AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX));
       pkt.dts = pkt.pts;//av_rescale_q_rnd(pkt.dts, in_stream->time_base,out_stream->time_base, AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX);
       pkt.duration = av_rescale_q(pkt.duration, in_stream->time_base,out_stream->time_base);
       pkt.pos = -1;
       //Print to Screen
       if(pkt.stream_index == videoindex)
        {
           printf("Send %8d video frames to output URL\n",frame_index);
            frame_index++;
        }
        ret= av_interleaved_write_frame(ofmt_ctx, &pkt);
 
        if(ret < 0)
        {
           printf( "Error muxing packet\n");
           break;
        }
       av_free_packet(&pkt);
 
    }
    //Writefile trailer
   av_write_trailer(ofmt_ctx);
end:
   avformat_close_input(&ifmt_ctx);
    /*close output */
    if(ofmt_ctx && !(ofmt->flags & AVFMT_NOFILE))
       avio_close(ofmt_ctx->pb);
   avformat_free_context(ofmt_ctx);
    if (ret< 0 && ret != AVERROR_EOF)
    {
       printf( "Error occurred.\n");
       return -1;
    }
    return 0;
}

猜你喜欢

转载自blog.csdn.net/mao0514/article/details/86211075