iOS使用AVCaptureSession实现音视频采集

AVCaptureSession配置采集行为并协调从输入设备到采集输出的数据流。要执行实时音视频采集,需要实例化采集会话并添加适当的输入和输出。

  • AVCaptureSession:管理输入输出音视频流
  • AVCaptureDevice:相机硬件的接口,用于控制硬件特性,诸如镜头的位置(前后摄像头)、曝光、闪光灯等。
  • AVCaptureInput:配置输入设备,提供来自设备的数据
  • AVCaptureOutput:管理输出的音视频数据流
  • AVCaptureConnection:输入与输出的连接
  • AVCaptureVideoPreviewLayer:显示当前相机正在采集的状况
  • AVAssetWriter:将媒体数据写入到容器文件

初始化AVCaptureSession

- (AVCaptureSession *)captureSession {
    if (_captureSession == nil){
        _captureSession = [[AVCaptureSession alloc] init];
        if ([_captureSession canSetSessionPreset:AVCaptureSessionPresetHigh]) {
            _captureSession.sessionPreset = AVCaptureSessionPreset1280x720;
        }
    }
    return _captureSession;
}

- (dispatch_queue_t)videoQueue {
    if (!_videoQueue) {
        _videoQueue = dispatch_queue_create("VideoCapture", DISPATCH_QUEUE_SERIAL);
    }
    return _videoQueue;
}

添加视频输入

- (AVCaptureDevice *)getCameraDeviceWithPosition:(AVCaptureDevicePosition )position {
    AVCaptureDeviceDiscoverySession *deviceDiscoverySession =  [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:@[AVCaptureDeviceTypeBuiltInWideAngleCamera] mediaType:AVMediaTypeVideo position:position];
    for (AVCaptureDevice *device in deviceDiscoverySession.devices) {
        if ([device position] == position) {
            return device;
        }
    }
    return nil;
}

- (void)setupVideoInput {
    AVCaptureDevice *captureDevice = [self getCameraDeviceWithPosition:AVCaptureDevicePositionBack];
    if (!captureDevice){
        NSLog(@"captureDevice failed");
        return;
    }
    NSError *error = nil;
    self.videoInput = [[AVCaptureDeviceInput alloc] initWithDevice:captureDevice error:&error];
    if (error) {
        NSLog(@"videoInput error:%@", error);
        return;
    }
    if ([self.captureSession canAddInput:self.videoInput]) {
        [self.captureSession addInput:self.videoInput];
    }
}

添加音频输入

- (void)setupAudioInput {
    AVCaptureDevice *captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
    NSError *error = nil;
    self.audioInput = [[AVCaptureDeviceInput alloc] initWithDevice:captureDevice error:&error];
    if (error) {
        NSLog(@"audioInput error:%@", error);
        return;
    }
    if ([self.captureSession canAddInput:self.audioInput]) {
        [self.captureSession addInput:self.audioInput];
    }
}

添加视频输出

- (void)setupVideoOutput {
    self.videoOutput = [[AVCaptureVideoDataOutput alloc] init];
    self.videoOutput.alwaysDiscardsLateVideoFrames = YES;
    [self.videoOutput setSampleBufferDelegate:self queue:self.videoQueue];
    if ([self.captureSession canAddOutput:self.videoOutput]) {
        [self.captureSession addOutput:self.videoOutput];
    }
}

添加音频输出

- (void)setupAudioOutput {
    self.audioOutput = [[AVCaptureAudioDataOutput alloc] init];
    [self.audioOutput setSampleBufferDelegate:self queue:self.videoQueue];
    if ([self.captureSession canAddOutput:self.audioOutput]) {
        [self.captureSession addOutput:self.audioOutput];
    }
}

设置视频预览

- (void)setupCaptureVideoPreviewLayer:(UIView *)previewView {
    _captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.captureSession];
    CALayer *layer = previewView.layer;
    _captureVideoPreviewLayer.frame = previewView.bounds;
    _captureVideoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspect;
    
    _captureVideoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
    _captureVideoPreviewLayer.connection.videoOrientation = AVCaptureVideoOrientationLandscapeRight;
    [layer insertSublayer:_captureVideoPreviewLayer atIndex:0];
}

开始和结束采集会话

- (void)startSession {
    if (![self.captureSession isRunning]) {
        [self.captureSession startRunning];
    }
}

- (void)stopSession{
    if ([self.captureSession isRunning]) {
        [self.captureSession stopRunning];
    }
}

初始化AVAssetWriter,将音视频保存到视频文件

- (void)setUpWriter {
    if (self.videoURL == nil) {
        return;
    }
    self.assetWriter = [AVAssetWriter assetWriterWithURL:self.videoURL fileType:AVFileTypeMPEG4 error:nil];
    NSInteger numPixels = kScreenWidth * kScreenHeight;
    
    CGFloat bitsPerPixel = 12.0;
    NSInteger bitsPerSecond = numPixels * bitsPerPixel;
    
    NSDictionary *compressionProperties = @{ AVVideoAverageBitRateKey : @(bitsPerSecond),
                                             AVVideoExpectedSourceFrameRateKey : @(15),
                                             AVVideoMaxKeyFrameIntervalKey : @(15),
                                             AVVideoProfileLevelKey : AVVideoProfileLevelH264BaselineAutoLevel };
    self.videoCompressionSettings = @{ AVVideoCodecKey : AVVideoCodecTypeH264,
                                       AVVideoWidthKey : @(width * 2),
                                       AVVideoHeightKey : @(height * 2),
                                       AVVideoScalingModeKey : AVVideoScalingModeResizeAspect,
                                       AVVideoCompressionPropertiesKey : compressionProperties };
    _assetWriterVideoInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:self.videoCompressionSettings];
    _assetWriterVideoInput.expectsMediaDataInRealTime = YES;
    self.audioCompressionSettings = @{ AVEncoderBitRatePerChannelKey : @(28000),
                                       AVFormatIDKey : @(kAudioFormatMPEG4AAC),
                                       AVNumberOfChannelsKey : @(1),
                                       AVSampleRateKey : @(22050) };

    _assetWriterAudioInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:self.audioCompressionSettings];
    _assetWriterAudioInput.expectsMediaDataInRealTime = YES;
    
    if ([_assetWriter canAddInput:_assetWriterVideoInput]){
        [_assetWriter addInput:_assetWriterVideoInput];
    }
    else{
        NSLog(@"AssetWriter videoInput append Failed");
    }
    
    if ([_assetWriter canAddInput:_assetWriterAudioInput]){
        [_assetWriter addInput:_assetWriterAudioInput];
    }
    else{
        NSLog(@"AssetWriter audioInput Append Failed");
    }
    _canWrite = NO;
}

AVCaptureVideoDataOutputSampleBufferDelegate和AVCaptureAudioDataOutputSampleBufferDelegate音视频处理

#pragma mark - AVCaptureVideoDataOutputSampleBufferDelegate|AVCaptureAudioDataOutputSampleBufferDelegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
    @autoreleasepool{
        if (connection == [self.videoOutput connectionWithMediaType:AVMediaTypeVideo]) {
            @synchronized(self){
                [self appendSampleBuffer:sampleBuffer ofMediaType:AVMediaTypeVideo];
            }
        }
        if (connection == [self.audioOutput connectionWithMediaType:AVMediaTypeAudio]) {
            @synchronized(self) {
                [self appendSampleBuffer:sampleBuffer ofMediaType:AVMediaTypeAudio];
            }
        }
    }
}


- (void)appendSampleBuffer:(CMSampleBufferRef)sampleBuffer ofMediaType:(NSString *)mediaType {
        if (sampleBuffer == NULL){
        NSLog(@"empty sampleBuffer");
        return;
    }
    @autoreleasepool{
        if (!self.canWrite && mediaType == AVMediaTypeVideo){
            [self.assetWriter startWriting];
            [self.assetWriter startSessionAtSourceTime:CMSampleBufferGetPresentationTimeStamp(sampleBuffer)];
            self.canWrite = YES;
        }
        if (mediaType == AVMediaTypeVideo){
            if (self.assetWriterVideoInput.readyForMoreMediaData){
                BOOL success = [self.assetWriterVideoInput appendSampleBuffer:sampleBuffer];
                if (!success){
                    NSLog(@"assetWriterVideoInput appendSampleBuffer fail");
                    @synchronized (self){
                        [self stopVideoRecorder];
                    }
                }
            }
        }
        if (mediaType == AVMediaTypeAudio){
            if (self.assetWriterAudioInput.readyForMoreMediaData){
                BOOL success = [self.assetWriterAudioInput appendSampleBuffer:sampleBuffer];
                if (!success){
                    NSLog(@"assetWriterAudioInput appendSampleBuffer fail");
                    @synchronized (self){
                        [self stopVideoRecorder];
                    }
                }
            }
        }
    }
}

停止视频录制

- (void)stopVideoRecorder {
    __weak __typeof(self)weakSelf = self;
    if(_assetWriter && _assetWriter.status == AVAssetWriterStatusWriting) {
        [_assetWriter finishWritingWithCompletionHandler:^{
            weakSelf.canWrite = NO;
            weakSelf.assetWriter = nil;
            weakSelf.assetWriterAudioInput = nil;
            weakSelf.assetWriterVideoInput = nil;
        }];
    }
}

猜你喜欢

转载自blog.csdn.net/watson2017/article/details/134125113
今日推荐