yuv分量提取(NDK:libyuv)

这里为已经打包好的yuv.so
通过ImageReader获得数据来源借助libyuv做转换
Image image = mImageReader.acquireLatestImage()
图片格式决定了image里面planes有几个数组
如下:

    /**
     * Only a subset of the formats defined in
     * {@link android.graphics.ImageFormat ImageFormat} and
     * {@link android.graphics.PixelFormat PixelFormat} are supported by
     * ImageReader. When reading RGB data from a surface, the formats defined in
     * {@link android.graphics.PixelFormat PixelFormat} can be used; when
     * reading YUV, JPEG, HEIC or raw sensor data (for example, from the camera
     * or video decoder), formats from {@link android.graphics.ImageFormat ImageFormat}
     * are used.
     */
    public static int getNumPlanesForFormat(int format) {
    
    
        switch (format) {
    
    
            case ImageFormat.YV12:
            case ImageFormat.YUV_420_888:
            case ImageFormat.NV21:
                return 3;
            case ImageFormat.NV16:
                return 2;
            case PixelFormat.RGB_565:
            case PixelFormat.RGBA_8888:
            case PixelFormat.RGBX_8888:
            case PixelFormat.RGB_888:
            case ImageFormat.JPEG:
            case ImageFormat.YUY2:
            case ImageFormat.Y8:
            case ImageFormat.Y16:
            case ImageFormat.RAW_SENSOR:
            case ImageFormat.RAW_PRIVATE:
            case ImageFormat.RAW10:
            case ImageFormat.RAW12:
            case ImageFormat.DEPTH16:
            case ImageFormat.DEPTH_POINT_CLOUD:
            case ImageFormat.RAW_DEPTH:
            case ImageFormat.DEPTH_JPEG:
            case ImageFormat.HEIC:
                return 1;
            case ImageFormat.PRIVATE:
                return 0;
            default:
                throw new UnsupportedOperationException(
                        String.format("Invalid format specified %d", format));
        }
    }

以上为ImageUtility里面给出的线索,故从此之后再也不愁应该在image的planes里面取几组数据了。

RGBA_8888

如上,只需要取planes[0]就好。

if (image.getFormat() == PixelFormat.RGBA_8888) {
    
    
    ByteBuffer byteBuffer = image.getPlanes()[0].getBuffer();
    byte[] bytes = new byte[byteBuffer.limit()];
    byteBuffer.get(bytes);
    enCodeSource(bytes, image.getWidth(), image.getHeight(), bytes.length);
    return super.loop();
}

//native
public native void enCodeSource(byte[] buffer, int width, int height, int size);

native层

extern "C"
JNIEXPORT void JNICALL
Java_com_freeme_cloudmirror_daemon_VideoServer3_enCodeSource(JNIEnv *env, jobject thiz,
                                                             jbyteArray buffer, jint width,
                                                             jint height, jint size) {
    
    
    uint8_t *rgbBuffer = (unsigned char *) env->GetByteArrayElements(buffer, NULL);
    int y_length = width * height;
    int u_length = (width * height) / 4;
    int v_length = u_length;
    uint8_t *y = (unsigned char *) malloc(y_length);
    uint8_t *u = (unsigned char *) malloc(u_length);
    uint8_t *v = (unsigned char *) malloc(v_length);
    LOGE("y:%d \r u:%d \r v:%d \r size:%d",y_length,u_length,v_length,size);
    libyuv::ARGBToI420(rgbBuffer, width * 4, y, width, u, (width + 1) / 2, v,
                       (width + 1) / 2, width, height);
}

参数记录

/**
ARGB little endian (bgra in memory) to I420.
		src_argb:用于待转换的argb数据。
		src_stride_argb:argb数据每一行的大小,如果是argb_8888格式的话这个值为 width * 4,argb4444的话值为 width * 2。
		dst_y:用于保存y分量数据。
		dst_stride_y:值为 width * height。
		dst_u:用于保存u分量数据。
		dst_stride_u:值为(width+1)/2。
		dst_v:用于保存分量数据。
		dst_stride_v:值为(width+1)/2。
		width:位图宽度。
		height:位图高度。
*/
LIBYUV_API
int ARGBToI420(const uint8* src_frame, int src_stride_frame,
               uint8* dst_y, int dst_stride_y,
               uint8* dst_u, int dst_stride_u,
               uint8* dst_v, int dst_stride_v,
               int width, int height);

YUV_420_888

     final Image.Plane[] planes = image.getPlanes();

     int width = image.getWidth();
     int height = image.getHeight();

     byte[] yBytes = new byte[width * height];//y分量
     int dstIndex = 0;

     byte uBytes[] = new byte[width * height / 4];//u分量
     byte vBytes[] = new byte[width * height / 4];//v分量
     int uIndex = 0;
     int vIndex = 0;

     int pixelsStride, rowStride;
     for (int i = 0; i < planes.length; i++) {
    
    
         pixelsStride = planes[i].getPixelStride();
         rowStride = planes[i].getRowStride();

         ByteBuffer buffer = planes[i].getBuffer();

         byte[] bytes = new byte[buffer.capacity()];
         buffer.get(bytes);

         int srcIndex = 0;
         if (i == 0) {
    
    
             for (int j = 0; j < height; j++) {
    
    
                 System.arraycopy(bytes, srcIndex, yBytes, dstIndex, width);
                 srcIndex += rowStride;
                 dstIndex += width;
             }
         } else if (i == 1) {
    
    
             for (int j = 0; j < height / 2; j++) {
    
    
                 for (int k = 0; k < width / 2; k++) {
    
    
                     uBytes[uIndex++] = bytes[srcIndex];
                     srcIndex += pixelsStride;
                 }
                 if (pixelsStride == 2) {
    
    
                     srcIndex += rowStride - width;
                 } else if (pixelsStride == 1) {
    
    
                     srcIndex += rowStride - width / 2;
                 }
             }
         } else if (i == 2) {
    
    
             for (int j = 0; j < height / 2; j++) {
    
    
                 for (int k = 0; k < width / 2; k++) {
    
    
                     vBytes[vIndex++] = bytes[srcIndex];
                     srcIndex += pixelsStride;
                 }
                 if (pixelsStride == 2) {
    
    
                     srcIndex += rowStride - width;
                 } else if (pixelsStride == 1) {
    
    
                     srcIndex += rowStride - width / 2;
                 }
             }
         }
     }

猜你喜欢

转载自blog.csdn.net/qq_33717425/article/details/107180621
YUV