FFMPEG (1) Capture camera data from V4L2

     Related blog posts in the series:

            FFMPEG (1) Capture camera data from V4L2

            FFMPEG (2) v4l2 data format replacement

            FFMPEG (three) v4l2 data encoding H264

    Recently I was learning FFMPEG and found that many examples on the Internet are based on reading files. Few read data directly from the camera. I have written an example of collecting camera data through v4l2 and then calling x264 to encode it into a video file. In FFMPEG, it has encapsulated many V4L2 operation functions, here is a simplest routine. It should be noted that the version of FFMPEG I used is the latest ffmpeg-3.2.4 version, and the interface functions of different versions of the library are somewhat different.

/*=============================================================================
#     FileName: read_device.c
#         Desc: use ffmpeg read a frame data from v4l2
#       Author: licaibiao
#   LastChange: 2017-03-15
=============================================================================*/
#include <stdio.h>
#include <string.h>
#include <stdlib.h>
#include <unistd.h>
#include "avformat.h"
#include "avcodec.h"
#include "avdevice.h"

char* input_name= "video4linux2";
char* file_name = "/dev/video0";
char* out_file  = "test.jpeg";

void captureOneFrame(void){
    AVFormatContext *fmtCtx = NULL;    
    AVPacket *packet;
    AVInputFormat *inputFmt;
    FILE *fp;
	int ret;


    inputFmt = av_find_input_format (input_name);    
   
    if (inputFmt == NULL)    {        
        printf("can not find_input_format\n");        
        return;    
    }    

    if (avformat_open_input ( &fmtCtx, file_name, inputFmt, NULL) < 0){
        printf("can not open_input_file\n");         return;    
    }
	/* print device information*/
	av_dump_format (fmtCtx, 0, file_name, 0);

    packet = (AVPacket *)av_malloc(sizeof(AVPacket));    
    av_read_frame (fmtCtx, packet);
    printf("data length = %d\n",packet->size);   

    fp = fopen(out_file, "wb");    
    if (fp < 0)    {        
        printf("open frame data file failed\n");        
        return ;    
    }    
    
    fwrite(packet->data, 1, packet->size, fp);    

    fclose(fp);    
    av_free_packet (packet);    
    avformat_close_input(&fmtCtx);
 }

int main(void){    
    avcodec_register_all();    
    avdevice_register_all();    
    captureOneFrame();    
    return 0;
}

    The Makefile is as follows:

OUT_APP		 = test
INCLUDE_PATH = /usr/local/include/
INCLUDE = -I$(INCLUDE_PATH)libavutil/ -I$(INCLUDE_PATH)libavdevice/ \
			-I$(INCLUDE_PATH)libavcodec/ -I$(INCLUDE_PATH)libswresample \
			-I$(INCLUDE_PATH)libavfilter/ -I$(INCLUDE_PATH)libavformat \
			-I$(INCLUDE_PATH)libswscale/

FFMPEG_LIBS = -lavformat -lavutil -lavdevice -lavcodec -lswresample -lavfilter -lswscale
SDL_LIBS	=
LIBS		= $(FFMPEG_LIBS)$(SDL_LIBS)

COMPILE_OPTS = $(INCLUDE)
C 			 = c
OBJ = or
C_COMPILER   = cc
C_FLAGS 	 = $(COMPILE_OPTS) $(CPPFLAGS) $(CFLAGS)

LINK 		 = cc -o
LINK_OPTS    = -lz -lm  -lpthread
LINK_OBJ	 = read_device.o

.$(C).$(OBJ):
	$(C_COMPILER) -c -g $(C_FLAGS) $<


$(OUT_APP): $(LINK_OBJ)
	$(LINK)$@  $(LINK_OBJ)  $(LIBS) $(LINK_OPTS)

clean:
		-rm -rf *.$(OBJ) $(OUT_APP) core *.core *~  *.jpeg

    The compilation and running results are as follows:

licaibiao@ubuntu:~/test/FFMPEG/device$ ./test
[video4linux2,v4l2 @ 0x1d18040] Time per frame unknown
Input #0, video4linux2,v4l2, from '/dev/video0':
  Duration: N/A, bitrate: N/A
    Stream #0:0: Video: mjpeg, none, 640x480, 1000k tbn
data length = 27697
licaibiao@ubuntu:~/test/FFMPEG/device$ ls
Makefile  read_device.c  read_device.o  test  test.jpeg  webcm.c
licaibiao@ubuntu:~/test/FFMPEG/device$
    The camera I use outputs data in mjpeg format and saves it to the test.jpeg file. Open as follows:





Guess you like

Origin http://43.154.161.224:23101/article/api/json?id=324756687&siteId=291194637