OpenGL.Shader: Zhige teaches you to write a live filter client (5) Visual filters: contrast, exposure, mosaic

OpenGL.Shader: Zhige teaches you to write a live filter client(5)

The last chapter introduced how to seamlessly switch filters when rendering nv21 streams. This chapter follows the previous chapter, introducing three filter effects: contrast, exposure, and mosaic, and how to dynamically adjust filter effects . Don't talk nonsense, show the code!

Filter 1: Contrast

#include "GpuBaseFilter.hpp"
/**
 * 更改图像的对比度。
 * 对比度值在0.0到4.0之间,正常值为1.0
 */
class GpuContrastFilter : public GpuBaseFilter {
public:
    int getTypeId() { return FILTER_TYPE_CONTRAST; }

    GpuContrastFilter()
    {
        CONTRAST_FRAGMENT_SHADER  ="precision mediump float;\n\
                                    varying highp vec2 textureCoordinate;\n\
                                    uniform sampler2D SamplerRGB;\n\
                                    uniform sampler2D SamplerY;\n\
                                    uniform sampler2D SamplerU;\n\
                                    uniform sampler2D SamplerV;\n\
                                    uniform lowp float contrast;\n\
                                    mat3 colorConversionMatrix = mat3(\n\
                                                       1.0, 1.0, 1.0,\n\
                                                       0.0, -0.39465, 2.03211,\n\
                                                       1.13983, -0.58060, 0.0);\n\
                                    vec3 yuv2rgb(vec2 pos)\n\
                                    {\n\
                                       vec3 yuv;\n\
                                       yuv.x = texture2D(SamplerY, pos).r;\n\
                                       yuv.y = texture2D(SamplerU, pos).r - 0.5;\n\
                                       yuv.z = texture2D(SamplerV, pos).r - 0.5;\n\
                                       return colorConversionMatrix * yuv;\n\
                                    }\n\
                                    void main()\n\
                                    {\n\
                                       vec4 textureColor = vec4(yuv2rgb(textureCoordinate), 1.0);\n\
                                       gl_FragColor = vec4((contrast*(textureColor.rgb - vec3(0.5)) + vec3(0.5)), textureColor.w);\n\
                                    }";
    }
    ~GpuContrastFilter() {
        if(!CONTRAST_FRAGMENT_SHADER.empty()) CONTRAST_FRAGMENT_SHADER.clear();
    }
    void init() {
        GpuBaseFilter::init(NO_FILTER_VERTEX_SHADER.c_str(), CONTRAST_FRAGMENT_SHADER.c_str());
        mContrastLocation = glGetUniformLocation(mGLProgId, "contrast");
        mContrastValue = 1.0f;
    }

    void setAdjustEffect(float percent) {
        mContrastValue = percent * 4.0f;
    } // (经验范围控制在0~4)

    void onDraw(GLuint SamplerY_texId, GLuint SamplerU_texId, GLuint SamplerV_texId,
                        void* positionCords, void* textureCords)
    { // ...}

private:
    std::string CONTRAST_FRAGMENT_SHADER;

    GLint   mContrastLocation;
    float   mContrastValue;
};

The vertex shader uses the base class GpuBaseFilter.NO_FILTER_VERTEX_SHADER, analyzes a wave of fragment shaders, it is not difficult to find the contrast principle:

gl_FragColor = vec4((contrast*(textureColor.rgb - vec3(0.5)) + vec3(0.5)), textureColor.w);

textureColor.rgb-vec3(0.5) is used for binary alignment, which can also be understood as quantization; contrast is the contrast factor; multiplying by contrast is used to expand the level range of different color values. (Experience range is controlled at 0~4)

 Obviously this contrast needs to be dynamically adjusted. Recalling the design method of the previous chapter, tracking setAdjustEffect(float percent) in GpuFilterRender is not difficult to find that it is also called in renderOnDraw. Part of the code is as follows:

void GpuFilterRender::renderOnDraw(double elpasedInMilliSec)
{
        // 画面渲染
        mWindowSurface->makeCurrent();
        yTextureId = updateTexture(dst_y, yTextureId, mFrameWidth, mFrameHeight);
        uTextureId = updateTexture(dst_u, uTextureId, mFrameWidth/2, mFrameHeight/2);
        vTextureId = updateTexture(dst_v, vTextureId, mFrameWidth/2, mFrameHeight/2);
        // 检测更新Filter
        checkFilterChange();
        if( mFilter!=NULL) {
            mFilter->setAdjustEffect(mFilterEffectPercent);
            mFilter->onDraw(yTextureId, uTextureId, vTextureId, positionCords, textureCords);
        }
        // ...    
}
void GpuFilterRender::adjustFilterValue(int value, int max) {
    mFilterEffectPercent = (float)value / (float)max;
    //LOGD("GpuFilterRender adjust %f", mFilterEffectPercent);
}
///gpu_filter_jni//
JNIEXPORT void JNICALL
Java_org_zzrblog_gpufilter_GpuFilterRender_adjustFilterValue(JNIEnv *env, jobject instance, jint value, jint max) {
    if (render == NULL)
        render = new GpuFilterRender();
    render->adjustFilterValue(value, max);
}

 Continue to trace back, you can find that Seekbar is called through Activity, and then CfeScheduler.adjustFilterValue(value, max) is called, and finally the contrast factor contrast is dynamically adjusted. The effect is as follows:

 

Filter 2: Exposure (black and white reverse)

#include "GpuBaseFilter.hpp"
/**
 * 反转图像中的所有颜色。
 */
class GpuColorInvertFilter : public GpuBaseFilter {
public:
    int getTypeId() { return FILTER_TYPE_COLOR_INVERT; }

    GpuColorInvertFilter()
    {
        COLOR_INVERT_FRAGMENT_SHADER="precision mediump float;\n\
                                    varying highp vec2 textureCoordinate;\n\
                                    uniform sampler2D SamplerRGB;\n\
                                    uniform sampler2D SamplerY;\n\
                                    uniform sampler2D SamplerU;\n\
                                    uniform sampler2D SamplerV;\n\
                                    mat3 colorConversionMatrix = mat3(\n\
                                                       1.0, 1.0, 1.0,\n\
                                                       0.0, -0.39465, 2.03211,\n\
                                                       1.13983, -0.58060, 0.0);\n\
                                    vec3 yuv2rgb(vec2 pos)\n\
                                    {\n\
                                       vec3 yuv;\n\
                                       yuv.x = texture2D(SamplerY, pos).r;\n\
                                       yuv.y = texture2D(SamplerU, pos).r - 0.5;\n\
                                       yuv.z = texture2D(SamplerV, pos).r - 0.5;\n\
                                       return colorConversionMatrix * yuv;\n\
                                    }\n\
                                    void main()\n\
                                    {\n\
                                       vec4 textureColor = vec4(yuv2rgb(textureCoordinate), 1.0);\n\
                                       gl_FragColor = vec4((1.0 - textureColor.rgb), textureColor.w);\n\
                                    }";
    }
    ~GpuColorInvertFilter() {
        if(!COLOR_INVERT_FRAGMENT_SHADER.empty()) COLOR_INVERT_FRAGMENT_SHADER.clear();
    }

    void init() {
        GpuBaseFilter::init(NO_FILTER_VERTEX_SHADER.c_str(), COLOR_INVERT_FRAGMENT_SHADER.c_str());
    }

private:
    std::string COLOR_INVERT_FRAGMENT_SHADER;
};

The vertex shader still uses the base class GpuBaseFilter.NO_FILTER_VERTEX_SHADER to analyze a wave of fragment shaders, and it is not difficult to find the principle of exposure:

gl_FragColor = vec4((1.0 - textureColor.rgb), textureColor.w); 

In fact, it's a negation! And there is no need for dynamic adjustment, so easy (piece_piece)   

The meaning of the last rendering:

 

Filter 3: Mosaic

The last and most interesting filter effect, old drivers may be the most annoying one (squinting smile.jpg)

#include "GpuBaseFilter.hpp"
/**
 * 对图像应用格仔化效果。
 */
class GpuPixelationFilter : public GpuBaseFilter {
public:
    int getTypeId() { return FILTER_TYPE_PIXELATION; }

    GpuPixelationFilter()
    {
        PIXELATION_FRAGMENT_SHADER="precision highp float;\n\
                                    varying highp vec2 textureCoordinate;\n\
                                    uniform sampler2D SamplerRGB;\n\
                                    uniform sampler2D SamplerY;\n\
                                    uniform sampler2D SamplerU;\n\
                                    uniform sampler2D SamplerV;\n\
                                    mat3 colorConversionMatrix = mat3(\n\
                                                       1.0, 1.0, 1.0,\n\
                                                       0.0, -0.39465, 2.03211,\n\
                                                       1.13983, -0.58060, 0.0);\n\
                                    uniform float imageWidthFactor;\n\
                                    uniform float imageHeightFactor;\n\
                                    uniform float pixel;\n\
                                    vec3 yuv2rgb(vec2 pos)\n\
                                    {\n\
                                        vec3 yuv;\n\
                                        yuv.x = texture2D(SamplerY, pos).r;\n\
                                        yuv.y = texture2D(SamplerU, pos).r-0.5;\n\
                                        yuv.z = texture2D(SamplerV, pos).r-0.5;\n\
                                        return colorConversionMatrix * yuv;\n\
                                    }\n\
                                    void main()\n\
                                    {\n\
                                        vec2 uv  = textureCoordinate.xy;\n\
                                        float dx = pixel * imageWidthFactor;\n\
                                        float dy = pixel * imageHeightFactor;\n\
                                        vec2 coord = vec2(dx*floor(uv.x / dx), dy*floor(uv.y / dy));\n\
                                        gl_FragColor = vec4(yuv2rgb(coord), 1.0);\n\
                                    }";
    }
    ~GpuPixelationFilter() {
        if(!PIXELATION_FRAGMENT_SHADER.empty()) PIXELATION_FRAGMENT_SHADER.clear();
    }

    void init() {
        GpuBaseFilter::init(NO_FILTER_VERTEX_SHADER.c_str(), PIXELATION_FRAGMENT_SHADER.c_str());
        mPixelLocation = glGetUniformLocation(mGLProgId, "pixel");
        mImageWidthFactorLocation = glGetUniformLocation(mGLProgId, "imageWidthFactor");
        mImageHeightFactorLocation = glGetUniformLocation(mGLProgId, "imageHeightFactor");
        mPixelValue = 1.0f;
    }
    void setAdjustEffect(float percent) {
        if(percent==0.0f) percent=0.01f;
        mPixelValue = percent * 100.0f;
    }
    void onOutputSizeChanged(int width, int height) {
        GpuBaseFilter::onOutputSizeChanged(width, height);
        glUniform1f(mImageWidthFactorLocation, 1.0f / width);
        glUniform1f(mImageHeightFactorLocation, 1.0f / height);
    }
    // ...
    void onDraw(GLuint SamplerY_texId, GLuint SamplerU_texId, GLuint SamplerV_texId,
                void* positionCords, void* textureCords)
    {
        if (!mIsInitialized)
            return;
        glUseProgram(mGLProgId);

        glUniform1f(mPixelLocation, mPixelValue);
        glUniform1f(mImageWidthFactorLocation, 1.0f / mOutputWidth);
        glUniform1f(mImageHeightFactorLocation, 1.0f / mOutputHeight);
        // 绘制的模板代码,此处省略
    }
};

There is a lot of content, let’s analyze a wave together:

uniform float imageWidthFactor; // 当前屏幕宽度因子,取值为当前宽度的1/10
uniform float imageHeightFactor; 
uniform float pixel; // 采样跨度

void main()
{
    vec2 uv  = textureCoordinate.xy; // 当前纹理坐标
    float dx = pixel * imageWidthFactor;  // 根据采样跨度,调整步长
    float dy = pixel * imageHeightFactor;
    // floor(uv.x / dx)“向下舍入”,具体数值说明白,屏幕720*1280,widthFacetor=72
    // uv.x = 1,pixel = 1,代入计算 72*(floor(1/72)) = 0
    // uv.x = 2,pixel = 1,代入计算 72*(floor(2/72)) = 0
    // uv.x = 71,pixel = 1,代入计算 72*(floor(71/72)) = 0
    // uv.x = 72,pixel = 1,代入计算 72*(floor(72/72)) = 72
    //以上可以说明,通过计算,可以把步长范围内的所有纹理坐标,锁定到范围内的第一个像素坐标位置进行纹理采样。
    vec2 coord = vec2(dx*floor(uv.x / dx), dy*floor(uv.y / dy));
    gl_FragColor = vec4(yuv2rgb(coord), 1.0);
}";

After the analysis is completed, take a look at the effect diagram.

       

 Project address:  https://github.com/MrZhaozhirong/NativeCppApp     shader is centrally placed in src\main\cpp\gpufilter\filter

Guess you like

Origin blog.csdn.net/a360940265a/article/details/104636475