抽取WebRtc Android里texutre转I420代码

简单的对WebRtc里面texture转I420的代码抽取出来

首先是MainActivity:

package webrtc.example.com.texture2i420;

import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.hardware.camera2.CameraManager;
import android.os.Handler;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.util.Log;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;

import java.io.IOException;
import java.util.Arrays;

public class MainActivity extends AppCompatActivity implements  SurfaceTexture.OnFrameAvailableListener, SurfaceHolder.Callback {

    private SurfaceView mSurfaceView;
    private Surface mSurface;
    private YuvConverter yuvConverter;
    private Camera camera;
    float[] mtx = new float[16];
    private int mTextureId;

    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.activity_main);
        mSurfaceView=findViewById(R.id.surfaceView);
        mSurfaceView.getHolder().addCallback(this);
    }


    @Override
    public void onAttachedToWindow() {
        super.onAttachedToWindow();
    }


    @Override
    protected void onDestroy() {
        super.onDestroy();
        camera.stopPreview();
        camera.release();
        yuvConverter.release();

    }

    @Override
    public void onFrameAvailable(SurfaceTexture surfaceTexture) {
        surfaceTexture.updateTexImage();
        surfaceTexture.getTransformMatrix(mtx);
        Log.w("yangTest", "mtx:" + Arrays.toString(mtx));
        yuvConverter.convert(720, 1280,  mtx);
    }

    @Override
    public void surfaceCreated(SurfaceHolder holder) {
        yuvConverter = new YuvConverter(holder.getSurface());

        camera = Camera.open(1);
        try {
            yuvConverter.getmSurfaceTexture().setDefaultBufferSize(720, 1280);
            camera.setPreviewTexture(yuvConverter.getmSurfaceTexture());
            yuvConverter.getmSurfaceTexture().setOnFrameAvailableListener(this);
            camera.setDisplayOrientation(90);
            camera.startPreview();
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    @Override
    public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {

    }

    @Override
    public void surfaceDestroyed(SurfaceHolder holder) {

    }

}

然后是GLShader

/*
 *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
 *
 *  Use of this source code is governed by a BSD-style license
 *  that can be found in the LICENSE file in the root of the source
 *  tree. An additional intellectual property rights grant can be found
 *  in the file PATENTS.  All contributing project authors may
 *  be found in the AUTHORS file in the root of the source tree.
 */

package webrtc.example.com.texture2i420;

import android.opengl.GLES20;
import android.util.Log;

import java.nio.FloatBuffer;

// Helper class for handling OpenGL shaders and shader programs.
public class GlShader {
  private static final String TAG = "GlShader";

  private static int compileShader(int shaderType, String source) {
    final int shader = GLES20.glCreateShader(shaderType);
    GlUtil.checkNoGLES2Error("glCreateShader() failed. GLES20 error: " + GLES20.glGetError());
    if (shader == 0) {
      throw new RuntimeException("glCreateShader() failed. GLES20 error: " + GLES20.glGetError());
    }
    GLES20.glShaderSource(shader, source);
    GLES20.glCompileShader(shader);
    int[] compileStatus = new int[] {GLES20.GL_FALSE};
    GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
    if (compileStatus[0] != GLES20.GL_TRUE) {
      Log.e(
          TAG, "Compile error " + GLES20.glGetShaderInfoLog(shader) + " in shader:\n" + source);
      throw new RuntimeException(GLES20.glGetShaderInfoLog(shader));
    }
    GlUtil.checkNoGLES2Error("compileShader");
    return shader;
  }

  private int program;

  public GlShader(String vertexSource, String fragmentSource) {
    final int vertexShader = compileShader(GLES20.GL_VERTEX_SHADER, vertexSource);
    final int fragmentShader = compileShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
    program = GLES20.glCreateProgram();
    if (program == 0) {
      throw new RuntimeException("glCreateProgram() failed. GLES20 error: " + GLES20.glGetError());
    }
    GLES20.glAttachShader(program, vertexShader);
    GLES20.glAttachShader(program, fragmentShader);
    GLES20.glLinkProgram(program);
    int[] linkStatus = new int[] {GLES20.GL_FALSE};
    GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
    if (linkStatus[0] != GLES20.GL_TRUE) {
      Log.e(TAG, "Could not link program: " + GLES20.glGetProgramInfoLog(program));
      throw new RuntimeException(GLES20.glGetProgramInfoLog(program));
    }
    // According to the documentation of glLinkProgram():
    // "After the link operation, applications are free to modify attached shader objects, compile
    // attached shader objects, detach shader objects, delete shader objects, and attach additional
    // shader objects. None of these operations affects the information log or the program that is
    // part of the program object."
    // But in practice, detaching shaders from the program seems to break some devices. Deleting the
    // shaders are fine however - it will delete them when they are no longer attached to a program.
    GLES20.glDeleteShader(vertexShader);
    GLES20.glDeleteShader(fragmentShader);
    GlUtil.checkNoGLES2Error("Creating GlShader");
  }

  public int getAttribLocation(String label) {
    if (program == -1) {
      throw new RuntimeException("The program has been released");
    }
    int location = GLES20.glGetAttribLocation(program, label);
    if (location < 0) {
      throw new RuntimeException("Could not locate '" + label + "' in program");
    }
    return location;
  }

  /**
   * Enable and upload a vertex array for attribute |label|. The vertex data is specified in
   * |buffer| with |dimension| number of components per vertex.
   */
  public void setVertexAttribArray(String label, int dimension, FloatBuffer buffer) {
    setVertexAttribArray(label, dimension, 0 /* stride */, buffer);
  }

  /**
   * Enable and upload a vertex array for attribute |label|. The vertex data is specified in
   * |buffer| with |dimension| number of components per vertex and specified |stride|.
   */
  public void setVertexAttribArray(String label, int dimension, int stride, FloatBuffer buffer) {
    if (program == -1) {
      throw new RuntimeException("The program has been released");
    }
    int location = getAttribLocation(label);
    GLES20.glEnableVertexAttribArray(location);
    GLES20.glVertexAttribPointer(location, dimension, GLES20.GL_FLOAT, false, stride, buffer);
    GlUtil.checkNoGLES2Error("setVertexAttribArray");
  }

  public int getUniformLocation(String label) {
    if (program == -1) {
      throw new RuntimeException("The program has been released");
    }
    int location = GLES20.glGetUniformLocation(program, label);
    if (location < 0) {
      throw new RuntimeException("Could not locate uniform '" + label + "' in program");
    }
    return location;
  }

  public void useProgram() {
    if (program == -1) {
      throw new RuntimeException("The program has been released");
    }
    GLES20.glUseProgram(program);
    GlUtil.checkNoGLES2Error("glUseProgram");
  }

  public void release() {
    Log.d(TAG, "Deleting shader.");
    // Delete program, automatically detaching any shaders from it.
    if (program != -1) {
      GLES20.glDeleteProgram(program);
      program = -1;
    }
  }
}

GLTextureFrameBuffer

/*
 *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
 *
 *  Use of this source code is governed by a BSD-style license
 *  that can be found in the LICENSE file in the root of the source
 *  tree. An additional intellectual property rights grant can be found
 *  in the file PATENTS.  All contributing project authors may
 *  be found in the AUTHORS file in the root of the source tree.
 */

package webrtc.example.com.texture2i420;

import android.opengl.GLES20;

/**
 * Helper class for handling OpenGL framebuffer with only color attachment and no depth or stencil
 * buffer. Intended for simple tasks such as texture copy, texture downscaling, and texture color
 * conversion. This class is not thread safe and must be used by a thread with an active GL context.
 */
// TODO(magjed): Add unittests for this class.
public class GlTextureFrameBuffer {
  private final int pixelFormat;
  private int frameBufferId;
  private int textureId;
  private int width;
  private int height;

  /**
   * Generate texture and framebuffer resources. An EGLContext must be bound on the current thread
   * when calling this function. The framebuffer is not complete until setSize() is called.
   */
  public GlTextureFrameBuffer(int pixelFormat) {
    switch (pixelFormat) {
      case GLES20.GL_LUMINANCE:
      case GLES20.GL_RGB:
      case GLES20.GL_RGBA:
        this.pixelFormat = pixelFormat;
        break;
      default:
        throw new IllegalArgumentException("Invalid pixel format: " + pixelFormat);
    }
    this.width = 0;
    this.height = 0;
  }

  /**
   * (Re)allocate texture. Will do nothing if the requested size equals the current size. An
   * EGLContext must be bound on the current thread when calling this function. Must be called at
   * least once before using the framebuffer. May be called multiple times to change size.
   */
  public void setSize(int width, int height) {
    if (width <= 0 || height <= 0) {
      throw new IllegalArgumentException("Invalid size: " + width + "x" + height);
    }
    if (width == this.width && height == this.height) {
      return;
    }
    this.width = width;
    this.height = height;
    // Lazy allocation the first time setSize() is called.
    if (textureId == 0) {
      textureId = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
    }
    if (frameBufferId == 0) {
      final int frameBuffers[] = new int[1];
      GLES20.glGenFramebuffers(1, frameBuffers, 0);
      frameBufferId = frameBuffers[0];
    }

    // Allocate texture.
    GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
    GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
    GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, pixelFormat, width, height, 0, pixelFormat,
        GLES20.GL_UNSIGNED_BYTE, null);
    GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
    GlUtil.checkNoGLES2Error("GlTextureFrameBuffer setSize");

    // Attach the texture to the framebuffer as color attachment.
    GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBufferId);
    GLES20.glFramebufferTexture2D(
        GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, textureId, 0);

    // Check that the framebuffer is in a good state.
    final int status = GLES20.glCheckFramebufferStatus(GLES20.GL_FRAMEBUFFER);
    if (status != GLES20.GL_FRAMEBUFFER_COMPLETE) {
      throw new IllegalStateException("Framebuffer not complete, status: " + status);
    }

    GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
  }

  public int getWidth() {
    return width;
  }

  public int getHeight() {
    return height;
  }

  /** Gets the OpenGL frame buffer id. This value is only valid after setSize() has been called. */
  public int getFrameBufferId() {
    return frameBufferId;
  }

  /** Gets the OpenGL texture id. This value is only valid after setSize() has been called. */
  public int getTextureId() {
    return textureId;
  }

  /**
   * Release texture and framebuffer. An EGLContext must be bound on the current thread when calling
   * this function. This object should not be used after this call.
   */
  public void release() {
    GLES20.glDeleteTextures(1, new int[] {textureId}, 0);
    textureId = 0;
    GLES20.glDeleteFramebuffers(1, new int[] {frameBufferId}, 0);
    frameBufferId = 0;
    width = 0;
    height = 0;
  }
}

GLUtil

/*
 *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
 *
 *  Use of this source code is governed by a BSD-style license
 *  that can be found in the LICENSE file in the root of the source
 *  tree. An additional intellectual property rights grant can be found
 *  in the file PATENTS.  All contributing project authors may
 *  be found in the AUTHORS file in the root of the source tree.
 */

package webrtc.example.com.texture2i420;

import android.opengl.GLES20;

import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;

/**
 * Some OpenGL static utility functions.
 */
public class GlUtil {
  private GlUtil() {}

  // Assert that no OpenGL ES 2.0 error has been raised.
  public static void checkNoGLES2Error(String msg) {
    int error = GLES20.glGetError();
    if (error != GLES20.GL_NO_ERROR) {
      throw new RuntimeException(msg + ": GLES20 error: " + error);
    }
  }

  public static FloatBuffer createFloatBuffer(float[] coords) {
    // Allocate a direct ByteBuffer, using 4 bytes per float, and copy coords into it.
    ByteBuffer bb = ByteBuffer.allocateDirect(coords.length * 4);
    bb.order(ByteOrder.nativeOrder());
    FloatBuffer fb = bb.asFloatBuffer();
    fb.put(coords);
    fb.position(0);
    return fb;
  }

  /**
   * Generate texture with standard parameters.
   */
  public static int generateTexture(int target) {
    final int textureArray[] = new int[1];
    GLES20.glGenTextures(1, textureArray, 0);
    final int textureId = textureArray[0];
    GLES20.glBindTexture(target, textureId);
    GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
    GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
    GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
    GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
    checkNoGLES2Error("generateTexture");
    return textureId;
  }
}

YUVConverter

package webrtc.example.com.texture2i420;

import android.graphics.SurfaceTexture;
import android.opengl.EGL14;
import android.opengl.EGLConfig;
import android.opengl.EGLContext;
import android.opengl.EGLDisplay;
import android.opengl.EGLSurface;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.os.Environment;
import android.util.Log;
import android.view.Surface;

import java.io.File;
import java.io.FileOutputStream;
import java.nio.ByteBuffer;
import java.nio.FloatBuffer;

public class YuvConverter {

    private static final FloatBuffer DEVICE_RECTANGLE = GlUtil.createFloatBuffer(new float[] {
            -1.0f, -1.0f, // Bottom left.
            1.0f, -1.0f, // Bottom right.
            -1.0f, 1.0f, // Top left.
            1.0f, 1.0f, // Top right.
    });

    private static final FloatBuffer TEXTURE_RECTANGLE = GlUtil.createFloatBuffer(new float[] {
            0.0f, 0.0f, // Bottom left.
            1.0f, 0.0f, // Bottom right.
            0.0f, 1.0f, // Top left.
            1.0f, 1.0f // Top right.
    });

    private static final String VERTEX_SHADER =
            "varying vec2 interp_tc;\n"
                    + "attribute vec4 in_pos;\n"
                    + "attribute vec4 in_tc;\n"
                    + "\n"
                    + "uniform mat4 texMatrix;\n"
                    + "\n"
                    + "void main() {\n"
                    + "    gl_Position = in_pos;\n"
                    + "    interp_tc = (texMatrix * in_tc).xy;\n"
                    + "}\n";

    private static final String OES_FRAGMENT_SHADER =
            "#extension GL_OES_EGL_image_external : require\n"
                    + "precision mediump float;\n"
                    + "varying vec2 interp_tc;\n"
                    + "\n"
                    + "uniform samplerExternalOES tex;\n"
                    // Difference in texture coordinate corresponding to one
                    // sub-pixel in the x direction.
                    + "uniform vec2 xUnit;\n"
                    // Color conversion coefficients, including constant term
                    + "uniform vec4 coeffs;\n"
                    + "\n"
                    + "void main() {\n"
                    // Since the alpha read from the texture is always 1, this could
                    // be written as a mat4 x vec4 multiply. However, that seems to
                    // give a worse framerate, possibly because the additional
                    // multiplies by 1.0 consume resources. TODO(nisse): Could also
                    // try to do it as a vec3 x mat3x4, followed by an add in of a
                    // constant vector.
                    + "  gl_FragColor.r = coeffs.a + dot(coeffs.rgb,\n"
                    + "      texture2D(tex, interp_tc - 1.5 * xUnit).rgb);\n"
                    + "  gl_FragColor.g = coeffs.a + dot(coeffs.rgb,\n"
                    + "      texture2D(tex, interp_tc - 0.5 * xUnit).rgb);\n"
                    + "  gl_FragColor.b = coeffs.a + dot(coeffs.rgb,\n"
                    + "      texture2D(tex, interp_tc + 0.5 * xUnit).rgb);\n"
                    + "  gl_FragColor.a = coeffs.a + dot(coeffs.rgb,\n"
                    + "      texture2D(tex, interp_tc + 1.5 * xUnit).rgb);\n"
                    + "}\n";
    private final int mTextureId;

    public SurfaceTexture getmSurfaceTexture() {
        return mSurfaceTexture;
    }

    private final SurfaceTexture mSurfaceTexture;
    private EGLSurface eglSurface;
    private EGLContext mEglContext;
    private EGLDisplay mEglDisplay;
    private GlShader shader;
    private int texMatrixLoc;
    private int xUnitLoc;
    private int coeffsLoc;
    private final GlTextureFrameBuffer textureFrameBuffer = new GlTextureFrameBuffer(GLES20.GL_RGBA);
    private boolean isSaveFile=true;

    public YuvConverter(Surface surface) {
        mEglDisplay=EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
        if (mEglDisplay == EGL14.EGL_NO_DISPLAY) {
            throw new RuntimeException("unable to get EGL14 display");
        }
        int[] version=new int[2];
        if(!EGL14.eglInitialize(mEglDisplay,version ,0 ,version ,1 )){
            mEglDisplay=null;
            throw new RuntimeException("unable to initialize EGL14");
        }

        int[] attribiList={
                EGL14.EGL_RED_SIZE,8,
                EGL14.EGL_GREEN_SIZE,8,
                EGL14.EGL_BLUE_SIZE,8,
                EGL14.EGL_ALPHA_SIZE,8,
                EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
                EGL14.EGL_SURFACE_TYPE, EGL14.EGL_PBUFFER_BIT,
                EGL14.EGL_NONE
        };
        EGLConfig[] configs = new EGLConfig[1];
        int[] numConfigs = new int[1];
        if (!EGL14.eglChooseConfig(mEglDisplay,attribiList , 0, configs, 0, configs.length,numConfigs ,0 )) {
            throw new RuntimeException("unable to find RGB888+recordable ES2 EGL config");
        }
        if (numConfigs[0] <= 0) {
            throw new RuntimeException("Unable to find any matching EGL config");
        }
        final EGLConfig eglConfig = configs[0];
        if (eglConfig == null) {
            throw new RuntimeException("eglChooseConfig returned null");
        }

        int[] attrib_list = {
                EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
                EGL14.EGL_NONE
        };

        mEglContext=EGL14.eglCreateContext(mEglDisplay, eglConfig,EGL14.EGL_NO_CONTEXT ,attrib_list ,0 );

        int[] surfaceAttribs = {EGL14.EGL_NONE};

        mTextureId = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
        mSurfaceTexture=new SurfaceTexture(mTextureId);

        eglSurface = EGL14.eglCreateWindowSurface(mEglDisplay, eglConfig, surface, surfaceAttribs, 0);
        if (eglSurface == EGL14.EGL_NO_SURFACE) {
            throw new RuntimeException(
                    "Failed to create window surface: 0x" + Integer.toHexString(EGL14.eglGetError()));
        }

        if (!EGL14.eglMakeCurrent(mEglDisplay, eglSurface, eglSurface,mEglContext )) {
            throw new RuntimeException("eglMakeCurrent failed");
        }


        initShader();
    }

    private void initShader() {
        if (shader != null) {
            shader.release();
        }

        shader = new GlShader(VERTEX_SHADER, OES_FRAGMENT_SHADER);
        shader.useProgram();
        texMatrixLoc = shader.getUniformLocation("texMatrix");
        xUnitLoc = shader.getUniformLocation("xUnit");
        coeffsLoc = shader.getUniformLocation("coeffs");
        GLES20.glUniform1i(shader.getUniformLocation("tex"), 0);
        GlUtil.checkNoGLES2Error("Initialize fragment shader uniform values.");
        // Initialize vertex shader attributes.
        shader.setVertexAttribArray("in_pos", 2, DEVICE_RECTANGLE);
        // If the width is not a multiple of 4 pixels, the texture
        // will be scaled up slightly and clipped at the right border.
        shader.setVertexAttribArray("in_tc", 2, TEXTURE_RECTANGLE);


    }

    public void convert(int width, int height, float[] transformMatrix) {

        // SurfaceTextureHelper requires a stride that is divisible by 8.  Round width up.
        // See SurfaceTextureHelper for details on the size and format.
        final int stride = ((width + 7) / 8) * 8;
        final int uvHeight = (height + 1) / 2;
        // Due to the layout used by SurfaceTextureHelper, vPos + stride * uvHeight would overrun the
        // buffer.  Add one row at the bottom to compensate for this.  There will never be data in the
        // extra row, but now other code does not have to deal with v stride * v height exceeding the
        // buffer's capacity.
        final int size = stride * (height + uvHeight + 1);
        ByteBuffer buffer = ByteBuffer.allocateDirect(size);
        convert(buffer, width, height, stride, mTextureId,
                transformMatrix);

        final int yPos = 0;
        final int uPos = yPos + stride * height;
        // Rows of U and V alternate in the buffer, so V data starts after the first row of U.
        final int vPos = uPos + stride / 2;

        buffer.position(yPos);
        buffer.limit(yPos + stride * height);
        Log.w("yangTest", "YPos:"+yPos+"  (yPos + stride * height):"+(yPos + stride * height));
        ByteBuffer dataY = buffer.slice();

        buffer.position(uPos);
        buffer.limit(uPos + stride * uvHeight);
        Log.w("yangTest", "uPos:"+uPos+"  (uPos + stride * uvHeight):"+(uPos + stride * uvHeight));
        ByteBuffer dataU = buffer.slice();

        buffer.position(vPos);
        buffer.limit(vPos + stride * uvHeight);
        Log.w("yangTest", "vPos:"+vPos+"  vPos + stride * uvHeight:"+(vPos + stride * uvHeight));
        ByteBuffer dataV = buffer.slice();
        Log.w("yangTest", "dataY:"+dataY+"  dataY"+dataY.array().length+"  dataU:"+dataU+"  "+dataU.array().length+"   dataV:"+dataV+"  "+dataV.array().length+"  size:"+size+"   "+buffer.array().length);



        if(isSaveFile){
            File file = new File(Environment.getExternalStorageDirectory(),
                    "yuv.i420");
            try {
                byte[] data=new byte[(int) (1280*720*1.5f)];
                dataY.get(data,dataY.position(),dataY.limit());
                for(int i=0;i<640;i++){
                    for(int j=0;j<360;j++){
                        byte b=dataU.get(i*720+j);
                        data[921600+i*360+j]=b;
                    }
                    Log.w("yangTest","u第i行数据Ok:"+i );
                }
                for(int i=0;i<640;i++){
                    for(int j=0;j<360;j++){
                        byte b=dataV.get(i*720+j);
                        data[921600+230400+i*360+j]=b;
                    }
                    Log.w("yangTest","v第i行数据Ok:"+i );
                }
                FileOutputStream outStream = new FileOutputStream(file);
                outStream.write(data);
            } catch (Exception e) {
                e.printStackTrace();
            }
            isSaveFile=false;
        }
    }


    private void convert(ByteBuffer buf, int width, int height, int stride, int srcTextureId,
                         float[] transformMatrix) {

        shader.useProgram();

        // We draw into a buffer laid out like
        //
        //    +---------+
        //    |         |
        //    |  Y      |
        //    |         |
        //    |         |
        //    +----+----+
        //    | U  | V  |
        //    |    |    |
        //    +----+----+
        //
        // In memory, we use the same stride for all of Y, U and V. The
        // U data starts at offset |height| * |stride| from the Y data,
        // and the V data starts at at offset |stride/2| from the U
        // data, with rows of U and V data alternating.
        //
        // Now, it would have made sense to allocate a pixel buffer with
        // a single byte per pixel (EGL10.EGL_COLOR_BUFFER_TYPE,
        // EGL10.EGL_LUMINANCE_BUFFER,), but that seems to be
        // unsupported by devices. So do the following hack: Allocate an
        // RGBA buffer, of width |stride|/4. To render each of these
        // large pixels, sample the texture at 4 different x coordinates
        // and store the results in the four components.
        //
        // Since the V data needs to start on a boundary of such a
        // larger pixel, it is not sufficient that |stride| is even, it
        // has to be a multiple of 8 pixels.

        if (stride % 8 != 0) {
            throw new IllegalArgumentException("Invalid stride, must be a multiple of 8");
        }
        if (stride < width) {
            throw new IllegalArgumentException("Invalid stride, must >= width");
        }

        int y_width = (width + 3) / 4;
        int uv_width = (width + 7) / 8;
        int uv_height = (height + 1) / 2;
        int total_height = height + uv_height;
        int size = stride * total_height;

        if (buf.capacity() < size) {
            throw new IllegalArgumentException("YuvConverter.convert called with too small buffer");
        }
        // Produce a frame buffer starting at top-left corner, not
        // bottom-left.
//        transformMatrix =
//                RendererCommon.multiplyMatrices(transformMatrix, RendererCommon.verticalFlipMatrix());

        final int frameBufferWidth = stride / 4;
        final int frameBufferHeight = total_height;
        textureFrameBuffer.setSize(frameBufferWidth, frameBufferHeight);

        // Bind our framebuffer.
        GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, textureFrameBuffer.getFrameBufferId());
        GlUtil.checkNoGLES2Error("glBindFramebuffer");

        GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
        GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, srcTextureId);
        GLES20.glUniformMatrix4fv(texMatrixLoc, 1, false, transformMatrix, 0);

        // Draw Y
        GLES20.glViewport(0, 0, y_width, height);
        Log.w("yangTest","Y  glViewport:  x:"+(0)+"  height:"+0+"  uv_width:"+y_width+"  uv_height:"+height );
        // Matrix * (1;0;0;0) / width. Note that opengl uses column major order.
        GLES20.glUniform2f(xUnitLoc, transformMatrix[0] / width, transformMatrix[1] / width);
        // Y'UV444 to RGB888, see
        // https://en.wikipedia.org/wiki/YUV#Y.27UV444_to_RGB888_conversion.
        // We use the ITU-R coefficients for U and V */
        GLES20.glUniform4f(coeffsLoc, 0.299f, 0.587f, 0.114f, 0.0f);
        GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);

        // Draw U
        GLES20.glViewport(0, height, uv_width, uv_height);
        Log.w("yangTest","U  glViewport:  x:"+(0)+"  height:"+height+"  uv_width:"+uv_width+"  uv_height:"+uv_height );
        // Matrix * (1;0;0;0) / (width / 2). Note that opengl uses column major order.
        GLES20.glUniform2f(
                xUnitLoc, 2.0f * transformMatrix[0] / width, 2.0f * transformMatrix[1] / width);
        GLES20.glUniform4f(coeffsLoc, -0.169f, -0.331f, 0.499f, 0.5f);
        GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);

        // Draw V
        GLES20.glViewport(stride / 8, height, uv_width, uv_height);
        Log.w("yangTest","v  glViewport:  x:"+(stride/8)+"  height:"+height+"  uv_width:"+uv_width+"  uv_height:"+uv_height );
        GLES20.glUniform4f(coeffsLoc, 0.499f, -0.418f, -0.0813f, 0.5f);
        GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);

        GLES20.glReadPixels(
                0, 0, frameBufferWidth, frameBufferHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buf);

        GlUtil.checkNoGLES2Error("YuvConverter.convert");

        // Restore normal framebuffer.
        GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);

        // Unbind texture. Reportedly needed on some devices to get
        // the texture updated from the camera.
        GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
    }

    public void release() {
        if (shader != null) {
            shader.release();
        }
        textureFrameBuffer.release();
        if (eglSurface != EGL14.EGL_NO_SURFACE) {
            EGL14.eglDestroySurface(mEglDisplay, eglSurface);
            eglSurface = EGL14.EGL_NO_SURFACE;
        }
        if (!EGL14.eglMakeCurrent(
                mEglDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT)) {
            throw new RuntimeException(
                    "eglDetachCurrent failed: 0x" + Integer.toHexString(EGL14.eglGetError()));
        }
        EGL14.eglDestroyContext(mEglDisplay, mEglContext);
        EGL14.eglReleaseThread();
        EGL14.eglTerminate(mEglDisplay);
        mSurfaceTexture.release();
        mEglContext = EGL14.EGL_NO_CONTEXT;
        mEglDisplay = EGL14.EGL_NO_DISPLAY;
    }
}

最后布局文件

<?xml version="1.0" encoding="utf-8"?>
<android.support.constraint.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
    xmlns:app="http://schemas.android.com/apk/res-auto"
    xmlns:tools="http://schemas.android.com/tools"
    android:layout_width="match_parent"
    android:layout_height="match_parent"
    tools:context=".MainActivity">

    <SurfaceView
        android:id="@+id/surfaceView"
        android:layout_width="0dp"
        android:layout_height="0dp"
        app:layout_constraintBottom_toBottomOf="parent"
        app:layout_constraintEnd_toEndOf="parent"
        app:layout_constraintStart_toStartOf="parent"
        app:layout_constraintTop_toTopOf="parent" />

</android.support.constraint.ConstraintLayout>

布局文件比较简单

代码里没有请求相机权限,请自动加上

运行后会在/sdcard目录存一帧I420的camera数据

效果如下图:

猜你喜欢

转载自blog.csdn.net/qq_34557284/article/details/87932378