Android OpenGL ES 3.0 LUT filter

1. What is a LUT?

LUT is the abbreviation of Look Up Table , called a color lookup table, which is a management and conversion technology for color space. It can be divided into one-dimensional LUT (1D LUT) and three-dimensional LUT (3D LUT), among which three-dimensional LUT is more commonly used. Simply put, a LUT is a mapping table from one RGB combination to another RGB combination.

The LUT filter is a relatively classic filter, which is essentially an independent pixel point replacement , that is, the pixel point obtained by sampling the texture according to the OpenGL sampler, and then based on the (R, G, B) component lookup table of the pixel point, Get the LUT mapped (R1, G1, B1), replacing the original output.

2.LUT map

Generally, RGB pixels occupy 3 bytes and contain 3 components, and each component has 256 values, so the 3D LUT template can contain 256 X 256 X 256 situations, occupying 48MB of memory space. Such a LUT template consumes too much memory and also reduces the efficiency of search. Usually, down-sampling is used to reduce the amount of data.

Each component of the three-dimensional LUT template can be sampled 64 times to obtain a mapping relationship table with a size of 64 X 64 X 64, and the color values ​​not in the table can be interpolated to obtain similar results.

The three-dimensional LUT template, that is, the mapping relationship table with a size of 64 X 64 X 64, is usually represented by a two-dimensional image with a resolution of 512 X 512, called a LUT map.

The LUT image is divided into 8 X 8 small squares in the horizontal and vertical directions, a total of 64 small squares, the B (Blue) component in each small square is a fixed value, and the 64 small squares represent a total of 64 options for the B component value.

For each small square, the horizontal and vertical directions are respectively divided into 64 small squares, with the lower left corner as the origin, the R (Red) component of the horizontal small square increases sequentially, and the G (Green) component of the vertical small square increases sequentially.

So far, according to the B component value of the original sampling pixel RGB, determine which cell to choose in the LUT image, and then determine the mapped RGB combination according to the vertical and horizontal coordinates of the (R, G) component value.

3. LUTl filter implementation

3.1 Vertex shader
#version 300 es
layout(location = 0) in vec3 attr_position;
layout(location = 1) in vec2 attr_uv;

uniform mat4   uni_mat;
out vec2   v_texcoord;

void main(void)
{
    v_texcoord = attr_uv;
    gl_Position = uni_mat* vec4(attr_position,1.0);
}

mvp matrix and material vertices

3.2 Fragment shader
#version 300 es
precision mediump float;
//precision highp float;

//Lut 采样器
uniform sampler2D s_LutTexture;

uniform sampler2D uni_textureY;
uniform sampler2D uni_textureU;
uniform sampler2D uni_textureV;

in vec2 v_texcoord;
out vec4 fragColor;

uniform float u_offset;//偏移量
uniform vec2 texSize;//纹理尺寸

vec4 YuvToRgb(vec2 uv){
    vec3 yuv;
    vec3 rgb;
    yuv.x = texture(uni_textureY, uv).r;
    yuv.y = texture(uni_textureU, uv).r - 0.5;
    yuv.z = texture(uni_textureV, uv).r - 0.5;
    rgb = mat3( 1,1,1, 0,-0.39465,2.03211,1.13983,-0.58060,0) * yuv;
    return vec4(rgb, 1);
}

vec4 LutFilter(vec2 texCoord)
{
    //原始采样像素的 RGBA 值
    vec4 textureColor = YuvToRgb(texCoord);

    //获取 B 分量值,确定 LUT 小方格的 index, 取值范围转为 0~63
    float blueColor = textureColor.b * 63.0;

    //取与 B 分量值最接近的 2 个小方格的坐标
    vec2 quad1;
    quad1.y = floor(floor(blueColor) / 8.0);
    quad1.x = floor(blueColor) - (quad1.y * 8.0);

    vec2 quad2;
    quad2.y = floor(ceil(blueColor) / 7.9999);
    quad2.x = ceil(blueColor) - (quad2.y * 8.0);

    //通过 R 和 G 分量的值确定小方格内目标映射的 RGB 组合的坐标,然后归一化,转化为纹理坐标。
    vec2 texPos1;
    texPos1.x = (quad1.x * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * textureColor.r);
    texPos1.y = (quad1.y * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * textureColor.g);

    vec2 texPos2;
    texPos2.x = (quad2.x * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * textureColor.r);
    texPos2.y = (quad2.y * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * textureColor.g);

    //取目标映射对应的像素值
    vec4 newColor1 = texture(s_LutTexture, texPos1);
    vec4 newColor2 = texture(s_LutTexture, texPos2);

    //使用 Mix 方法对 2 个边界像素值进行混合
    vec4 newColor = mix(newColor1, newColor2, fract(blueColor));
    return mix(textureColor, vec4(newColor.rgb, textureColor.w), 1.0);
}


void main(void)
{

	//为了方便查看效果,做了分屏处理
    if(v_texcoord.x > 0.5)
    {
        fragColor = LutFilter(v_texcoord);
    }
    else
    {
        fragColor = YuvToRgb(v_texcoord);
    }

}

Get the original RGB color value, then get the B component, process the RG component of the B component, resample to get a new color, and then generate a new RGB color to return.

In order to see the effect, left and right split screen processing is done.

3.3 Rendering part
void MSDynamicGridLine::Render(MSGLCamera *pCamera) {
    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

    if(m_bUpdateData == false){
        return;
    }



    static MSVertex triangleVert[] = {
            {-1, 1,  1,     0,0},
            {-1, -1,  1,    0,1},
            {1,  1,  1,     1,0},
            {1,  -1,  1,    1,1},
    };


    glm::mat4x4  objectMat = glm::mat4x4(1.0);
    glm::mat4x4  objectTransMat = glm::translate(glm::mat4(1.0f), glm::vec3(0.0f, 0.0f, -5));
    objectMat = objectMat * objectTransMat;

    objectMat = pCamera->projectionMatrix * pCamera->viewMatrix * objectMat ;


    m_pOpenGLShader->Bind();

    m_pOpenGLShader->SetUniformValue("uni_mat",objectMat);


    m_pOpenGLShader->EnableAttributeArray("attr_position");
    m_pOpenGLShader->SetAttributeBuffer("attr_position",GL_FLOAT,triangleVert,3,sizeof(MSVertex));

    m_pOpenGLShader->EnableAttributeArray("attr_uv");
    m_pOpenGLShader->SetAttributeBuffer("attr_uv",GL_FLOAT,&triangleVert[0].u,2,sizeof(MSVertex));

    m_PeriodicFrameIndex++;
    float progress = GetFrameProgress();
    m_pOpenGLShader->SetUniformValue("u_offset",0.2f * progress);

//    LOGD("m_nVideoW is %d,progress is %f",m_nVideoW,progress);

    if (m_nVideoW>0){
        m_pOpenGLShader->SetUniformValue("texSize",glm::vec2(m_nVideoW,m_nVideoH));
    }else{
        m_pOpenGLShader->SetUniformValue("texSize",glm::vec2(720,1280));
    }


    m_pOpenGLShader->SetUniformValue("uni_textureY",0);

    glActiveTexture(GL_TEXTURE0);
    glBindTexture(GL_TEXTURE_2D, m_textures[0]);
    glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
    glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, m_nVideoW, m_nVideoH, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, m_pBufYuv420p);
    glPixelStorei(GL_UNPACK_ALIGNMENT, 4);
    glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);

    m_pOpenGLShader->SetUniformValue("uni_textureU",1);
    glActiveTexture(GL_TEXTURE1);
    glBindTexture(GL_TEXTURE_2D, m_textures[1]);
    glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
    glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE,m_nVideoW/2, m_nVideoH/2, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, (char*)(m_pBufYuv420p+m_yFrameLength));
    glPixelStorei(GL_UNPACK_ALIGNMENT, 4);
    glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);

    m_pOpenGLShader->SetUniformValue("uni_textureV",2);
    glActiveTexture(GL_TEXTURE2);
    glBindTexture(GL_TEXTURE_2D, m_textures[2]);
    glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
    glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, m_nVideoW/2, m_nVideoH/2, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, (char*)(m_pBufYuv420p+m_yFrameLength+m_uFrameLength));
    glPixelStorei(GL_UNPACK_ALIGNMENT, 4);
    glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
	
    //增加 Lut滤镜采样器
    m_pOpenGLShader->SetUniformValue("s_LutTexture",3);
    glActiveTexture(GL_TEXTURE3);
    glBindTexture(GL_TEXTURE_2D,m_texID);

    glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);

    glBindTexture(GL_TEXTURE_2D, 0);
    m_pOpenGLShader->DisableAttributeArray("attr_position");
    m_pOpenGLShader->DisableAttributeArray("attr_uv");

    m_pOpenGLShader->Release();

    return;
}

In the rendering part, except for the addition of the Lut sampler, the others are normal operations.

Guess you like

Origin blog.csdn.net/u014078003/article/details/128001009