使用MediaExtractor+MediaCodec+MediaMuxer实现视频截取和拼接

Android中使用MediaExtractor+MediaCodec+MediaMuxer实现将本地视频解码截取再和另外的视频进行拼接编码合成一个视频,主要有两个类。

下面是解码相关的类VideoDecoder.java

package com.audiovideo.camera.decoder;

import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
import android.media.MediaExtractor;
import android.media.MediaFormat;
import android.os.Handler;
import android.os.HandlerThread;
import android.support.annotation.NonNull;
import android.util.Log;
import android.widget.Toast;

import com.audiovideo.camera.MyApplication;
import com.audiovideo.camera.constant.Constants;

import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Date;
import java.util.HashMap;

/**
 * This class use for Decode Video Frame Data and show to SurfaceTexture
 * Created by fenghaitao on 2019年10月10日09:28:15
 */
public class VideoDecoder {
    private final static String TAG = "VideoEncoder";
    private final static int CONFIGURE_FLAG_DECODE = 0;
    private static final String MIME_TYPE = "video/avc"; // H.264 Advanced Video

    private MediaCodec mMediaCodec;
    private MediaFormat mMediaFormat;

    private Handler mVideoDecoderHandler;
    private HandlerThread mVideoDecoderHandlerThread = new HandlerThread("VideoDecoder");
    private MediaExtractor videoExtractor = null;
    private VideoEncoderFromBuffer videoEncoder = null;

    private int mPreviewWidth = 640;
    private int mPreviewHeight = 480;
    private long duration;

    private Date startTime;

    private boolean flag;  //是否需要合并视频
    private boolean hasMerge = true;  //是否为第二段视频
    private String input_path1;

    private MediaCodec.Callback mCallback = new MediaCodec.Callback() {
        @Override
        public void onInputBufferAvailable(@NonNull MediaCodec mediaCodec, int id) {
            ByteBuffer inputBuffer = mediaCodec.getInputBuffer(id);
            inputBuffer.clear();
            int samplesize = videoExtractor.readSampleData(inputBuffer,0);
            if (samplesize < 0) {
                mediaCodec.queueInputBuffer(id,0, 0,0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
                //需要合并视频
                if (flag) {
                    hasMerge = false;
                    // 覆盖新的分离器
                    videoExtractor.release();
                    videoExtractor = getVideoExtractor(input_path1,0);
                    // 覆盖新的解码器
                    mMediaCodec.stop();
                    mMediaCodec.release();
                    try {
                        mMediaCodec = MediaCodec.createDecoderByType(MediaFormat.MIMETYPE_VIDEO_AVC);
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                    //开始解码
                    startDecoder();
                }
            }else{
                Log.e("fht","SampleTime 为:" + videoExtractor.getSampleTime());
                if (hasMerge) { //判断是否为第二段视频,TRUE为第一段视频
                    mediaCodec.queueInputBuffer(id,0, samplesize,videoExtractor.getSampleTime(),0);
                    videoExtractor.advance();
                }else{
                    //只取视频前8秒,大于8秒则结束
                    if (videoExtractor.getSampleTime() > 6 * 1000000) {
                        mediaCodec.queueInputBuffer(id,0, 0,0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
                    }else{
                        mediaCodec.queueInputBuffer(id,0, samplesize,videoExtractor.getSampleTime(),0);
                        videoExtractor.advance();
                    }
                }
            }
        }

        @Override
        public void onOutputBufferAvailable(@NonNull MediaCodec mediaCodec, int id, @NonNull MediaCodec.BufferInfo bufferInfo) {
            ByteBuffer outputBuffer = mMediaCodec.getOutputBuffer(id);
            MediaFormat outputFormat = mMediaCodec.getOutputFormat(id);
            if(outputBuffer != null && bufferInfo.size > 0){
                byte [] buffer = new byte[outputBuffer.remaining()];
                outputBuffer.get(buffer);

                videoEncoder.encodeFrame(buffer);
                mMediaCodec.releaseOutputBuffer(id, false);
            }else if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                Date endDate = new Date(System.currentTimeMillis());
                long diff = (endDate.getTime() - startTime.getTime())/1000;
                Log.e("fht","编解码完成!用时:" + diff + "秒");
                Toast.makeText(MyApplication.getContext(),"编解码完成!用时:" + diff + "秒", Toast.LENGTH_LONG).show();
                release();
            }
        }

        @Override
        public void onError(@NonNull MediaCodec mediaCodec, @NonNull MediaCodec.CodecException e) {
            Log.d(TAG, "------> onError");
        }

        @Override
        public void onOutputFormatChanged(@NonNull MediaCodec mediaCodec, @NonNull MediaFormat mediaFormat) {
            Log.d(TAG, "------> onOutputFormatChanged");
        }
    };

    /**
     * 
     * @param input_path  第一个视频路径
     * @param input_path1  需要拼接的视频路径
     * @param mimeType  编码格式
     * @param time  第一个视频开始截取的时间点
     * @param flag  是否有合并的视频
     */
    public VideoDecoder(String input_path ,String input_path1 , String mimeType, int time, boolean flag){
        this.flag = flag;
        this.input_path1 = input_path1;
        try {
            getPlayTime(input_path);
            mMediaCodec = MediaCodec.createDecoderByType(mimeType);
            if (videoEncoder == null) {
                videoEncoder = new VideoEncoderFromBuffer(mPreviewWidth, mPreviewHeight);
            }
        } catch (IOException e) {
            Log.e(TAG, Log.getStackTraceString(e));
            mMediaCodec = null;
            return;
        }
        mVideoDecoderHandlerThread.start();
        mVideoDecoderHandler = new Handler(mVideoDecoderHandlerThread.getLooper());
        videoExtractor = getVideoExtractor(input_path,time);
    }

    //初始化分离器MediaExtractor,并分离文件视频信道
    private MediaExtractor getVideoExtractor(String input_path, int time){
        videoExtractor = new MediaExtractor();
        try {
            videoExtractor.setDataSource(input_path);
        } catch (IOException e) {
            e.printStackTrace();
        }
        int videoTrackIndex;
        //获取视频所在轨道
        videoTrackIndex = getMediaTrackIndex(videoExtractor, "video/");
        if (videoTrackIndex >= 0) {
            MediaCodecInfo codecInfo = selectCodec(MIME_TYPE);
            mMediaFormat = videoExtractor.getTrackFormat(videoTrackIndex);
//            mMediaFormat = MediaFormat.createVideoFormat(mimeType, mViewWidth, mViewHeight);
//            mMediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, videoEncoder.selectColorFormat(codecInfo,MIME_TYPE));
            mMediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar);
            mMediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, mPreviewWidth*mPreviewHeight);
            mMediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 30);
            mMediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
            videoExtractor.selectTrack(videoTrackIndex);
            //截取该段视频n秒之后的视频
            if (time > 0 && (time * 1000000)<duration ) { //判断开始截取的时间是否超过视总时长
                videoExtractor.seekTo(time * 1000000, MediaExtractor.SEEK_TO_CLOSEST_SYNC);
            }
        }
        return videoExtractor;
    }

    //获取视频分辨率和时长
    private void  getPlayTime(String mUri) {
        android.media.MediaMetadataRetriever mmr = new android.media.MediaMetadataRetriever();
        try {
            if (mUri != null) {
                HashMap<String, String> headers = null;
                if (headers == null) {
                    headers = new HashMap<String, String>();
                    headers.put("User-Agent", "Mozilla/5.0 (Linux; U; Android 4.4.2; zh-CN; MW-KW-001 Build/JRO03C) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 UCBrowser/1.0.0.001 U4/0.8.0 Mobile Safari/533.1");
                }
                mmr.setDataSource(mUri);
            } else {
                //mmr.setDataSource(mFD, mOffset, mLength);
            }

//            duration = mmr.extractMetadata(android.media.MediaMetadataRetriever.METADATA_KEY_DURATION);//时长(毫秒)
            mPreviewWidth = Integer.parseInt(mmr.extractMetadata(android.media.MediaMetadataRetriever.METADATA_KEY_VIDEO_WIDTH));//宽
            mPreviewHeight = Integer.parseInt(mmr.extractMetadata(android.media.MediaMetadataRetriever.METADATA_KEY_VIDEO_HEIGHT));//高

        } catch (Exception ex) {
            Log.e("TAG", "MediaMetadataRetriever exception " + ex);
        } finally {
            mmr.release();
        }
    }

    //获取指定类型媒体文件所在轨道
    private int getMediaTrackIndex(MediaExtractor videoExtractor, String MEDIA_TYPE) {
        int trackIndex = -1;
        for (int i = 0; i < videoExtractor.getTrackCount(); i++) {
            MediaFormat mediaFormat = videoExtractor.getTrackFormat(i);
//            mPreviewWidth = mediaFormat.getInteger(MediaFormat.KEY_WIDTH);
//            mPreviewHeight = mediaFormat.getInteger(MediaFormat.KEY_HEIGHT);
            duration = mediaFormat.getLong(MediaFormat.KEY_DURATION);
            Log.e("fht","mPreviewWidth:" + mPreviewWidth + "; mPreviewHeight:" + mPreviewHeight + "; duration:" + duration);
            String mime = mediaFormat.getString(MediaFormat.KEY_MIME);
            if (mime.startsWith(MEDIA_TYPE)) {
                trackIndex = i;
                break;
            }
        }
        return trackIndex;
    }

    public void startDecoder(){
        if(mMediaCodec != null){
            mMediaCodec.setCallback(mCallback, mVideoDecoderHandler);
            mMediaCodec.configure(mMediaFormat, null,null,CONFIGURE_FLAG_DECODE);
            mMediaCodec.start();
            if (hasMerge) {
                startTime = new Date(System.currentTimeMillis());
            }
        }else{
            throw new IllegalArgumentException("startDecoder failed, please check the MediaCodec is init correct");
        }

    }

    public void stopDecoder(){
        if(mMediaCodec != null){
            mMediaCodec.stop();
        }
    }

    /**
     * release all resource that used in Encoder
     */
    public void release(){
        if(mMediaCodec != null){
            mMediaCodec.stop();
            mMediaCodec.release();
            mMediaCodec = null;
        }
        if (videoExtractor != null) {
            videoExtractor.release();
            videoExtractor = null;
        }
        if (mVideoDecoderHandler != null) {
            mVideoDecoderHandler = null;
        }
        if (videoEncoder != null) {
            videoEncoder.close();
        }
    }

    /**
     * Returns the first codec capable of encoding the specified MIME type, or
     * null if no match was found.
     */
    private static MediaCodecInfo selectCodec(String mimeType) {
        int numCodecs = MediaCodecList.getCodecCount();
        for (int i = 0; i < numCodecs; i++) {
            MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
            if (!codecInfo.isEncoder()) {
                continue;
            }
            String[] types = codecInfo.getSupportedTypes();
            for (int j = 0; j < types.length; j++) {
                if (types[j].equalsIgnoreCase(mimeType)) {
                    return codecInfo;
                }
            }
        }
        return null;
    }
}

下面是将解码数据编码合成MP4文件的类 VideoEncoderFromBuffer.java

import android.annotation.SuppressLint;
import android.media.MediaCodec;
import android.media.MediaCodec.BufferInfo;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
import android.media.MediaFormat;
import android.media.MediaMuxer;
import android.util.Log;

import com.audiovideo.camera.constant.Constants;

import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Random;

/**
 * 将YUV数据编码合成MP4文件
 * create by  fenghaitao 2019年10月10日09:29:37
 */

public class VideoEncoderFromBuffer {
   private static final String TAG = "VideoEncoderFromBuffer";
   private static final boolean VERBOSE = true; // lots of logging
   // parameters for the encoder
   private static final String MIME_TYPE = "video/avc"; // H.264 Advanced Video
   private static final int FRAME_RATE = 25; // 15fps
   private static final int IFRAME_INTERVAL = 10; // 10 between
                                             // I-frames
   private static final int TIMEOUT_USEC = 10000;
   private static final int COMPRESS_RATIO = 256;
   private int mWidth;
   private int mHeight;
   private MediaCodec mMediaCodec;
   private MediaMuxer mMuxer;
   private BufferInfo mBufferInfo;
   private int mTrackIndex = -1;
   private boolean mMuxerStarted;
   byte[] mFrameData;
   private int mColorFormat;
   private long mStartTime = 0;

   @SuppressLint("NewApi")
   public VideoEncoderFromBuffer(int width, int height) {
      Log.i(TAG, "VideoEncoder()");
      this.mWidth = width;
      this.mHeight = height;
      mFrameData = new byte[this.mWidth * this.mHeight * 3 / 2];
      
      mBufferInfo = new BufferInfo();
      MediaCodecInfo codecInfo = selectCodec(MIME_TYPE);
      if (codecInfo == null) {
         // Don't fail CTS if they don't have an AVC codec (not here,
         // anyway).
         Log.e(TAG, "Unable to find an appropriate codec for " + MIME_TYPE);
         return;
      }
      if (VERBOSE)
         Log.d(TAG, "found codec: " + codecInfo.getName());
      mColorFormat = selectColorFormat(codecInfo, MIME_TYPE);
      if (VERBOSE)
         Log.d(TAG, "found colorFormat: " + mColorFormat);
      MediaFormat mediaFormat = MediaFormat.createVideoFormat(MIME_TYPE,
            this.mWidth, this.mHeight);
      mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, calcBitRate());
      mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
      mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, mColorFormat);
      mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL,
            IFRAME_INTERVAL);
      if (VERBOSE)
         Log.d(TAG, "format: " + mediaFormat);
      try {
         mMediaCodec = MediaCodec.createByCodecName(codecInfo.getName());
      } catch (IOException e1) {
         // TODO Auto-generated catch block
         e1.printStackTrace();
      }
      mMediaCodec.configure(mediaFormat, null, null,
            MediaCodec.CONFIGURE_FLAG_ENCODE);
      mMediaCodec.start();

      String fileName = Constants.output3 + new Random().nextInt() + ".mp4";
//    File f = new File(fileName);
//    boolean s = false;
//    if(!f.exists())
//       try {
//          s = f.createNewFile();
//       } catch (IOException e) {
//          // TODO Auto-generated catch block
//          e.printStackTrace();
//       }
      Log.i(TAG, "videofile: " + fileName);

      mStartTime = System.nanoTime();

      // Create a MediaMuxer.  We can't add the video track and start() the muxer here,
        // because our MediaFormat doesn't have the Magic Goodies.  These can only be
        // obtained from the encoder after it has started processing data.
        //
        // We're not actually interested in multiplexing audio.  We just want to convert
        // the raw H.264 elementary stream we get from MediaCodec into a .mp4 file.
      try {
         mMuxer = new MediaMuxer(fileName,
               MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
      } catch (IOException ioe) {
         throw new RuntimeException("MediaMuxer creation failed", ioe);
      }
      mTrackIndex = -1;
       mMuxerStarted = false;
   }

   public void encodeFrame(byte[] input/* , byte[] output */) {
      Log.i(TAG, "encodeFrame()");
//    Log.e("fht", "数据大小为:" + input.length);
      long encodedSize = 0;
      NV21toI420SemiPlanar(input, mFrameData, this.mWidth, this.mHeight);

      ByteBuffer[] inputBuffers = mMediaCodec.getInputBuffers();
      ByteBuffer[] outputBuffers = mMediaCodec.getOutputBuffers();
      int inputBufferIndex = mMediaCodec.dequeueInputBuffer(TIMEOUT_USEC);
      if (VERBOSE)
         Log.i(TAG, "inputBufferIndex-->" + inputBufferIndex);
      if (inputBufferIndex >= 0) {
         long endTime = System.nanoTime();
         long ptsUsec = (endTime - mStartTime) / 1000;
         Log.i(TAG, "resentationTime: " + ptsUsec);
         ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
         inputBuffer.clear();
         inputBuffer.put(mFrameData);
         mMediaCodec.queueInputBuffer(inputBufferIndex, 0,
               mFrameData.length, getPTSUs(), 0);
      } else {
         // either all in use, or we timed out during initial setup
         if (VERBOSE)
            Log.d(TAG, "input buffer not available");
      }

      int outputBufferIndex = mMediaCodec.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
      Log.i(TAG, "outputBufferIndex-->" + outputBufferIndex);
      do {
         if (outputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
            // no output available yet
            if (VERBOSE)
               Log.d(TAG, "no output from encoder available");
         } else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
            // not expected for an encoder
            outputBuffers = mMediaCodec.getOutputBuffers();
            if (VERBOSE)
               Log.d(TAG, "encoder output buffers changed");
         } else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
            // not expected for an encoder
            MediaFormat newFormat = mMediaCodec.getOutputFormat();
                Log.d(TAG, "encoder output format changed: " + newFormat);

                // now that we have the Magic Goodies, start the muxer
                mTrackIndex = mMuxer.addTrack(newFormat);
                mMuxer.start();
                mMuxerStarted = true;
         } else if (outputBufferIndex < 0) {
            Log.w(TAG, "unexpected result from encoder.dequeueOutputBuffer: " +
                  outputBufferIndex);
                // let's ignore it
         } else {
            if (VERBOSE)
               Log.d(TAG, "perform encoding");
            ByteBuffer outputBuffer = outputBuffers[outputBufferIndex];
            if (outputBuffer == null) {
                    throw new RuntimeException("encoderOutputBuffer " + outputBufferIndex +
                            " was null");
                }
            
            if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
                    // The codec config data was pulled out and fed to the muxer when we got
                    // the INFO_OUTPUT_FORMAT_CHANGED status.  Ignore it.
                    if (VERBOSE) Log.d(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG");
                    mBufferInfo.size = 0;
                }

            if (mBufferInfo.size != 0) {
               if (!mMuxerStarted) {
//                throw new RuntimeException("muxer hasn't started");
                  MediaFormat newFormat = mMediaCodec.getOutputFormat();
                  mTrackIndex = mMuxer.addTrack(newFormat);
                     mMuxer.start();
                     mMuxerStarted = true;
               }

               // adjust the ByteBuffer values to match BufferInfo (not needed?)
               outputBuffer.position(mBufferInfo.offset);
               outputBuffer.limit(mBufferInfo.offset + mBufferInfo.size);
               mMuxer.writeSampleData(mTrackIndex, outputBuffer, mBufferInfo);
               if (VERBOSE) {
                  Log.d(TAG, "sent " + mBufferInfo.size + " bytes to muxer");
               }
            }

            mMediaCodec.releaseOutputBuffer(outputBufferIndex, false);
         }
         outputBufferIndex = mMediaCodec.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
      } while (outputBufferIndex >= 0);
   }

   @SuppressLint("NewApi")
   public void close() {
      Log.i(TAG, "close()");
      try {
         if (mMediaCodec != null) {
            mMediaCodec.stop();
            mMediaCodec.release();
            mMediaCodec = null;
         }
      } catch (Exception e) {
         e.printStackTrace();
      }
      if (mMuxer != null) {
            // TODO: stop() throws an exception if you haven't fed it any data.  Keep track
            //       of frames submitted, and don't call stop() if we haven't written anything.
            mMuxer.stop();
            mMuxer.release();
            mMuxer = null;
        }
   }

   /**
    * NV21 is a 4:2:0 YCbCr, For 1 NV21 pixel: YYYYYYYY VUVU I420YUVSemiPlanar
    * is a 4:2:0 YUV, For a single I420 pixel: YYYYYYYY UVUV Apply NV21 to
    * I420YUVSemiPlanar(NV12) Refer to https://wiki.videolan.org/YUV/
    */
   private void NV21toI420SemiPlanar(byte[] nv21bytes, byte[] i420bytes,
         int width, int height) {
      int totle = width * height; //Y数据的长度
      int nLen = totle / 4;  //U、V数据的长度
      System.arraycopy(nv21bytes, 0, i420bytes, 0, totle);
      for (int i = 0; i < nLen; i++) {
         i420bytes[totle + i] = nv21bytes[totle + 2 * i];
         i420bytes[totle + nLen + i] = nv21bytes[totle + 2 * i + 1];
      }
   }

   /**
    * Returns a color format that is supported by the codec and by this test
    * code. If no match is found, this throws a test failure -- the set of
    * formats known to the test should be expanded for new platforms.
    */
   public static int selectColorFormat(MediaCodecInfo codecInfo,
         String mimeType) {
      MediaCodecInfo.CodecCapabilities capabilities = codecInfo
            .getCapabilitiesForType(mimeType);
      for (int i = 0; i < capabilities.colorFormats.length; i++) {
         int colorFormat = capabilities.colorFormats[i];
         if (isRecognizedFormat(colorFormat)) {
            return colorFormat;
         }
      }
      Log.e(TAG,
            "couldn't find a good color format for " + codecInfo.getName()
                  + " / " + mimeType);
      return 0; // not reached
   }

   /**
    * Returns true if this is a color format that this test code understands
    * (i.e. we know how to read and generate frames in this format).
    */
   private static boolean isRecognizedFormat(int colorFormat) {
      switch (colorFormat) {
      // these are the formats we know how to handle for this test
      case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
      case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar:
      case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
      case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar:
      case MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar:
         return true;
      default:
         return false;
      }
   }

   /**
    * Returns the first codec capable of encoding the specified MIME type, or
    * null if no match was found.
    */
   private static MediaCodecInfo selectCodec(String mimeType) {
      int numCodecs = MediaCodecList.getCodecCount();
      for (int i = 0; i < numCodecs; i++) {
         MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
         if (!codecInfo.isEncoder()) {
            continue;
         }
         String[] types = codecInfo.getSupportedTypes();
         for (int j = 0; j < types.length; j++) {
            if (types[j].equalsIgnoreCase(mimeType)) {
               return codecInfo;
            }
         }
      }
      return null;
   }

   /**
    * Generates the presentation time for frame N, in microseconds.
    */
   private static long computePresentationTime(int frameIndex) {
      return 132 + frameIndex * 1000000 / FRAME_RATE;
   }

   /**
    * Returns true if the specified color format is semi-planar YUV. Throws an
    * exception if the color format is not recognized (e.g. not YUV).
    */
   private static boolean isSemiPlanarYUV(int colorFormat) {
      switch (colorFormat) {
      case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
      case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar:
         return false;
      case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
      case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar:
      case MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar:
         return true;
      default:
         throw new RuntimeException("unknown format " + colorFormat);
      }
   }

   /**
    * 上一个用于书写的演示时间
    */
   private long prevOutputPTSUs = 0;
   /**
    * 获取下一个presentationTimeUs
    * @return
    */
   protected long getPTSUs() {
      long result = System.nanoTime() / 1000L;
      // presentationTimeUs 应该是单调的
      // 否则muxer无法写入
      if (result < prevOutputPTSUs)
         result = (prevOutputPTSUs - result) + result;
      return result;
   }

   private static final float BPP = 0.25f;
   private int calcBitRate() {
      final int bitrate = (int)(BPP * 25 * mWidth * mHeight);
      return bitrate;
   }
}

调用方法也很简单,如下:

VideoDecoder mVideoDecoder = new VideoDecoder(Constants.test_path, MediaFormat.MIMETYPE_VIDEO_AVC,0,false);
mVideoDecoder.startDecoder();

到时候注意把里面的视频文件路径替换一下就可以了

发布了20 篇原创文章 · 获赞 26 · 访问量 9439

猜你喜欢

转载自blog.csdn.net/weixin_42574892/article/details/102724651