在Ubutun16.0.中使用V4L2采集采集视频数据(YUV422)转换成YUV420数据,再使用ALSA采集pcm数据。
在两个线程中分别播放音频视频。
其中音频使用QT自带的multimedia播放
YUV420数据由Opengl实时渲染显示。
YUV422转420代码
//YUV422转YUV420
int YuvCapture::Yuv422ToYuv420(unsigned char *yuv422, unsigned char *yuv420, int width, int height)
{
int ynum=width*height;
int i,j,k=0;
//得到Y分量
for(i=0;i<ynum;i++){
yuv420[i]=yuv422[i*2];
}
//得到U分量
for(i=0;i<height;i++){
if((i%2)!=0)continue;
for(j=0;j<(width/2);j++){
if((4*j+1)>(2*width))break;
yuv420[ynum+k*2*width/4+j]=yuv422[i*2*width+4*j+1];
}
k++;
}
k=0;
//得到V分量
for(i=0;i<height;i++){
if((i%2)==0)continue;
for(j=0;j<(width/2);j++){
if((4*j+3)>(2*width))break;
yuv420[ynum+ynum/4+k*2*width/4+j]=yuv422[i*2*width+4*j+3];
}
k++;
}
return 1;
}
音频采集与视频采集在我的另外两篇博客中。
OpenGl渲染代码(使用QT中的OpenGLWidet)
#ifndef VIDEOPLAY_H
#define VIDEOPLAY_H
#include <QWidget>
#include <QOpenGLFunctions>
#include <QOpenGLWidget>
#include <QGLShaderProgram>
#include "ivideocall.h"
#include <mutex>
class VideoPlay : public QOpenGLWidget, protected QOpenGLFunctions, public IVideoCall
{
Q_OBJECT
public:
VideoPlay(QWidget *parent);
~VideoPlay();
virtual void Init();
virtual void Repaint(unsigned char *frame);
protected:
void paintGL();
void initializeGL();
void resizeGL(int width, int height);
private:
//shader程序
QGLShaderProgram progma;
//shader的yuv
GLuint uint_yuv[3] = { 0 };
//opengl中的材质地址
GLuint opengl_texs[3] = { 0 };
int width = 640;
int height = 480;
//材质内存空间
unsigned char * datas[3] = { 0 };
std::mutex mux;
};
#endif // VIDEOPLAY_H
CPP文件
#include "videoplay.h"
#include <QDebug>
#include <string.h>
VideoPlay::VideoPlay(QWidget *parent) :QOpenGLWidget(parent)
{}
VideoPlay::~VideoPlay()
{
delete datas[0];
delete datas[1];
delete datas[2];
}
//自动加引号
#define GET_STR(x) #x
#define A_VER 3
#define T_VER 4
//顶点shader代码
const char *vString = GET_STR(
attribute vec4 vertexIn;
attribute vec2 textureIn;
varying vec2 textureOut;
void main(void)
{
gl_Position = vertexIn;
textureOut = textureIn;
}
);
//片元shader
const char *tString = GET_STR(
varying vec2 textureOut;
uniform sampler2D tex_y;
uniform sampler2D tex_u;
uniform sampler2D tex_v;
void main(void)
{
vec3 yuv;
vec3 rgb;
yuv.x = texture2D(tex_y, textureOut).r;
yuv.y = texture2D(tex_u, textureOut).r - 0.5;
yuv.z = texture2D(tex_v, textureOut).r - 0.5;
rgb = mat3(1.0, 1.0, 1.0,
0.0, -0.39465, 2.03211,
1.13983, -0.58060, 0.0) * yuv;
gl_FragColor = vec4(rgb, 1.0);
}
);
void VideoPlay::Init()
{
qDebug() << "Init done";
mux.lock();
this->width = 640;
this->height = 480;
//分配材质内存空间 Y U V
datas[0] = new unsigned char[width * height];
datas[1] = new unsigned char[width * height / 4];
datas[2] = new unsigned char[width * height / 4];
mux.unlock();
}
void VideoPlay::paintGL()
{
mux.lock();
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, opengl_texs[0]);
//修改材质内容
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width, height, GL_RED, GL_UNSIGNED_BYTE, datas[0]);
//关联shader
glUniform1i(uint_yuv[0], 0);
//1层
glActiveTexture(GL_TEXTURE0 +1);
glBindTexture(GL_TEXTURE_2D, opengl_texs[1]);
//修改材质内容
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width/2, height/2, GL_RED, GL_UNSIGNED_BYTE, datas[1]);
//关联shader
glUniform1i(uint_yuv[1], 1);
//2层
glActiveTexture(GL_TEXTURE0 +2);
glBindTexture(GL_TEXTURE_2D, opengl_texs[2]);
//修改材质内容
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width/2, height/2, GL_RED, GL_UNSIGNED_BYTE, datas[2]);
//关联shader
glUniform1i(uint_yuv[2], 2);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
qDebug() << "Paint done";
mux.unlock();
}
void VideoPlay::initializeGL()
{
mux.lock();
qDebug() << "initializeGL()";
//初始化Opengl (QOFUNTIONs)函数
initializeOpenGLFunctions();
//program加载shadler脚本
//顶点shader
qDebug() << progma.addShaderFromSourceCode(QGLShader::Vertex, vString);
//像素shader
qDebug() << progma.addShaderFromSourceCode(QGLShader::Fragment, tString);
//设置顶点坐标的变量
progma.bindAttributeLocation("vertexIn", A_VER);
//设置材质坐标
progma.bindAttributeLocation("textureIn", T_VER);
//编译shader
qDebug() << "progma.link()" << progma.link();
qDebug() << "progma.bind()" << progma.bind();
//传入顶点材质坐标
//顶点坐标
static const GLfloat ver[] = {
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f
};
//材质坐标
static const GLfloat tex[] = {
0.0f, 1.0f,
1.0f, 1.0f,
0.0f, 0.0f,
1.0f, 0.0f
};
//坐标写入到opengl中
glVertexAttribPointer(A_VER, 2, GL_FLOAT, 0, 0, ver);
glEnableVertexAttribArray(A_VER);
glVertexAttribPointer(T_VER, 2, GL_FLOAT, 0, 0, tex);
glEnableVertexAttribArray(T_VER);
//从shader获取材质
uint_yuv[0] = progma.uniformLocation("tex_y");
uint_yuv[1] = progma.uniformLocation("tex_u");
uint_yuv[2] = progma.uniformLocation("tex_v");
//创建材质
glGenTextures(3, opengl_texs);
//Y
glBindTexture(GL_TEXTURE_2D, opengl_texs[0]);
//放大过滤线性插值
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
//缩小
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
//创建材质空间
glTexImage2D(GL_TEXTURE_2D, 0, GL_RED, width, height, 0, GL_RED, GL_UNSIGNED_BYTE, 0);
//U
glBindTexture(GL_TEXTURE_2D, opengl_texs[1]);
//放大过滤线性插值
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
//缩小
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
//创建材质空间
glTexImage2D(GL_TEXTURE_2D, 0, GL_RED, width/2, height/2, 0, GL_RED, GL_UNSIGNED_BYTE, 0);
//V
glBindTexture(GL_TEXTURE_2D, opengl_texs[2]);
//放大过滤线性插值
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
//缩小
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
//创建材质空间
glTexImage2D(GL_TEXTURE_2D, 0, GL_RED, width/2, height/2, 0, GL_RED, GL_UNSIGNED_BYTE, 0);
mux.unlock();
}
void VideoPlay::Repaint(unsigned char *frame)
{
mux.lock();
if(!frame) return;
int len = width * height;
memcpy(datas[0], frame, len);
frame = frame + len;
memcpy(datas[1], frame, (len / 4));
frame = frame + (len / 4);
memcpy(datas[2], frame, (len / 4));
mux.unlock();
update();
}
void VideoPlay::resizeGL(int width, int height)
{
qDebug() << "resizeGL:" << width << height;
}
最后效果 实时渲染视频播放音频
本代码仅供学习使用,代码没有做封装,退出时候还有BUG。
附上整个代码链接
链接:https://pan.baidu.com/s/1SqmKrhXUEZFwvqqwNUkGWQ
提取码:cco3
编译需要安装alsa库,还有V4L库