本文主要内容
一.AVFoundation核心功能和核心类的介绍
二.视频预览图层
三.视频捕捉关于AVCaptureSession的配置
四.实现前后摄像头的改变
五.实现摄像头自动聚焦功能
六.实现摄像头自动曝光以及锁定曝光
七.实现摄像头手电筒和闪光等模式的开启关闭
八.静态图片的拍摄
九.视频录制实现及视频拍摄缩略图实现
一.AVFoundation核心功能和核心类的介绍
AVFoundation核心功能
iOS 8.0之后推出的音视频框架
照片/视频捕捉功能
小视频/直播
AVFoundation核心类
-
捕捉会话
:AVCaptureSession(最常见的类,类似“排插”的功能,用于输入和输出的连接工作) -
捕捉设备
:AVCaptureDevice(如摄像头、麦克风) -
捕捉设备输入
:AVCaptureDeviceInput(音视频输入) -
捕捉设备输出
:AVCaptureOutput 抽象类(静态图片/音视频等) AVCaptureStillImageOutput AVCaptureMovieFileOutput AVCaptureAudioDataOutput AVCaptureVideodataOutput -
捕捉连接
:AVCatureConnection(根据捕捉的输入设备自动建立输入和输出的连接) -
捕捉预览
:AVCaptureVideoPreviewLayer(显示摄像头实时捕捉的内容)
二.视频预览图层
1、视频预览图层
- THPreviewView:拍摄过程中显示预览效果
- 主要用到
AVCaptureVideoPreviewLayer
类
THPreviewView.h
#import <AVFoundation/AVFoundation.h>
@protocol THPreviewViewDelegate <NSObject>
// 点击聚焦
- (void)tappedToFocusAtPoint:(CGPoint)point;
// 点击曝光
- (void)tappedToExposeAtPoint:(CGPoint)point;
// 点击重置聚焦&曝光
- (void)tappedToResetFocusAndExposure;
@end
@interface THPreviewView : UIView
//session用来关联AVCaptureVideoPreviewLayer 和 激活AVCaptureSession
@property (strong, nonatomic) AVCaptureSession *session;
@property (weak, nonatomic) id<THPreviewViewDelegate> delegate;
@property (nonatomic) BOOL tapToFocusEnabled; //是否聚焦
@property (nonatomic) BOOL tapToExposeEnabled; //是否曝光
@end
复制代码
THPreviewView.m重要代码
//私有方法 用于支持该类定义的不同触摸处理方法。 将屏幕坐标系上的触控点转换为摄像头上的坐标系点
- (CGPoint)captureDevicePointForPoint:(CGPoint)point {
AVCaptureVideoPreviewLayer *layer = (AVCaptureVideoPreviewLayer *)self.layer;
return [layer captureDevicePointOfInterestForPoint:point];
}
复制代码
AVCaptureVideoPreviewLayer定义了两个方法用于摄像头和屏幕坐标系转换:
1、captureDevicePointOfInterestForPoint:获取屏幕坐标系的CGPoint数据,返回转换得到摄像头设备坐标系的CGPoint数据
2、pointForCaptureDevicePointOfInterest:获取摄像头坐标系的CGPoint数据,返回转换得到屏幕坐标系的CGPoint数据
复制代码
2、捕捉控制器
- THCameraController
THCameraController.h重要代码
#import <AVFoundation/AVFoundation.h>
@protocol THCameraControllerDelegate <NSObject>
// 1.设备错误/媒体捕捉错误/写入时错误,对相应错误进行回调处理
- (void)deviceConfigurationFailedWithError:(NSError *)error;
- (void)mediaCaptureFailedWithError:(NSError *)error;
- (void)assetLibraryWriteFailedWithError:(NSError *)error;
@end
@interface THCameraController : NSObject
@property (weak, nonatomic) id<THCameraControllerDelegate> delegate;
@property (nonatomic, strong, readonly) AVCaptureSession *captureSession;
// 2.用于设置、配置视频捕捉会话
- (BOOL)setupSession:(NSError **)error;
- (void)startSession;
- (void)stopSession;
// 3.切换前后摄像头
- (BOOL)switchCameras; // 切换前后摄像头
- (BOOL)canSwitchCameras; // 判断是否支持切换摄像头
@property (nonatomic, readonly) NSUInteger cameraCount; // 摄像头个数
@property (nonatomic, readonly) BOOL cameraHasTorch; // 手电筒
@property (nonatomic, readonly) BOOL cameraHasFlash; // 闪光灯
@property (nonatomic, readonly) BOOL cameraSupportsTapToFocus; // 聚焦
@property (nonatomic, readonly) BOOL cameraSupportsTapToExpose;// 曝光
@property (nonatomic) AVCaptureTorchMode torchMode; // 手电筒模式
@property (nonatomic) AVCaptureFlashMode flashMode; // 闪光灯模式
// 4.聚焦、曝光、重设聚焦/曝光的方法
- (void)focusAtPoint:(CGPoint)point;
- (void)exposeAtPoint:(CGPoint)point;
- (void)resetFocusAndExposureModes;
// 5.实现捕捉静态图片 & 视频的功能
// 捕捉静态图片
- (void)captureStillImage;
//视频录制
//开始录制
- (void)startRecording;
//停止录制
- (void)stopRecording;
//获取录制状态
- (BOOL)isRecording;
//录制时间
- (CMTime)recordedDuration;
@end
复制代码
三.AVCaptureSession的配置
THCameraController类功能
:视频/照片的捕捉- 1.初始化
- 2.设置分辨率
- 3.配置输入设备(注意必须转换为AVCaptureDeviceInput对象)
- 4.配置输入设备包括音频输入和视频输入
- 5.配置输出包括静态图像输出和视频文件输出
注意
:在为session添加输入输出时,一定要判断能否添加,因为摄像头并不隶属于某一个APP,而是公共设备。- 涉及摄像头、相册、麦克风,需要给用户提醒,处理隐私需求
1.AVCaptureSession基本配置
THCameraController.m重要代码
@interface THCameraController () <AVCaptureFileOutputRecordingDelegate>
//视频队列
@property (strong, nonatomic) dispatch_queue_t videoQueue;
// 捕捉会话
@property (strong, nonatomic) AVCaptureSession *captureSession;
//输入
@property (weak, nonatomic) AVCaptureDeviceInput *activeVideoInput;
@property (strong, nonatomic) AVCaptureStillImageOutput *imageOutput;
@property (strong, nonatomic) AVCaptureMovieFileOutput *movieOutput;
@property (strong, nonatomic) NSURL *outputURL;
@end
@implementation THCameraController
- (BOOL)setupSession:(NSError **)error {
// 1.开辟空间
self.captureSession = [[AVCaptureSession alloc] init];
// 2.设置分辨率:宽高比
self.captureSession.sessionPreset = AVCaptureSessionPresetHigh;
#pragma mark - (一)添加视频的输入设备
// 3.拿到默认视频捕捉设备:iOS默认后置摄像头为默认视频捕捉设备。
AVCaptureDevice *videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
// 4.一定要将捕捉设备转化AVCaptureDeviceInput
// 注意:为session添加捕捉设备,必须将此设备封装成AVCaptureDeviceInput对象
AVCaptureDeviceInput *videoInput = [AVCaptureDeviceInput deviceInputWithDevice: videoDevice error: error];
// 5.判断videoInput是否有效
if (videoInput) {
// 5.1注意⚠️:判断是否可以添加
if (self.captureSession canAddInput: videoInput) {
// 5.2将videoInput添加到会话
[self.captureSession addInput: videoInput];
// 5.3设置活跃输入设备为此设备
self.activeVideInput = videoInput
}
} else {
returen NO;
}
#pragma mark - (二)添加音频的输入设备:内置麦克风
AVCaptureDevice *audioDevice = [AVCaptureDecice defaultDeviceWithMediaType: AVMediaTypeAudio];
// 一定要将捕捉设备转化为AVCaptureDeviceInput
// 注意⚠️:为session添加捕捉设备,必须将此设备封装成AVCaptureDeviceInput对象
AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput DeviceInputWithDevice: audioDevice error: error];
if (audioInput) {
if ([self.captureSession canAddInput: audioInput]) {
[self.captureSession addInput: audioInput];
}
} else {
return NO;
}
#pragma mark - 设置输出(照片/视频文件)
// 图片AVCaptureStillImageOutput
self.imageOutput = [[AVCaptureStillImageOutput alloc] init];
// 捕捉到的图片存储格式.jpg
self.imageOutput.outputSettings = @{AVVideoCodecKey:AVVideoCodecJPEG};
if ([self.captureSession canAddOutput: self.imageOutput]) {
[self.captureSession addOutput: self.imageOutput];
}
//视频AVCaptureMovieFileOutput实例:默认QuickTime
self.movieOutput = [[AVCaptureMovieFileOutput alloc] init];
if ([self.captureSession canAddOutput: self.movieOutput]) {
[self.captureSession addOutput: self.movieOutput];
}
// 视频队列
self.videoQueue = dispatch_queue_create("cc.videoQueue", NULL);
return YES;
}
复制代码
2.AVCaptureSession的开启和结束
THCameraController.m重要代码
// 开始捕捉
- (void)startSession {
// 检查是否处于运行状态
if (![self.captureSession isRunning]) {
// 使用同步调用会损耗一定的时间,则用异步的方式处理
dispatch_async(self.videoQueue, ^{
[self.captureSession startRunning];
});
}
}
// 结束捕捉
- (void)startSession {
// 检查是否处于运行状态
if ([self.captureSession isRunning]) {
// 使用异步方式,停止运行
dispatch_async(self.videoQueue, ^{
[self.captureSession stopRunning];
});
}
}
复制代码
四.实现前后摄像头的改变
- 默认后置摄像头
THCameraController.m重要代码
#pragma mark - Device Configuration 配置摄像头支持的方法
// 寻找指定摄像头设备(前置或者后置摄像头)
- (AVCaptureDevice *)cameraWithPosition:(AVCaptureDevicePosition)position {
// 获取所有Video设备
NSArray *devices = [AVCaptureDevice deviceWithMediaType: AVMediaTypeVideo];
// 遍历所有设备
for (AVCaptureDevice *device in devices) {
if (device.position == positon) {
return device;
}
}
}
// 获取当前活跃设备
- (AVCaptureDevice *)activeCamera {
return self.activeVideoInput.device
}
// 获取当前不活跃设备
- (AVCaptureDevice *)inactiveCamera {
AVCaptureDevice *device = nil;
if (self.cameraCount > 1) {
// 判断当前是前置还是后置
if ([self activeCamera] == AVCaptureDevicePositionBack) {
device = [self cameraWithPosition: AVCaptureDevicePositionFront];
} else {
device = [self cameraWithPosition: AVCaptureDevicePositionBack];
}
}
return device;
}
// 能否切换摄像头
- (BOOL)canSwitchCameras {
return self.cameraCount > 1;
}
// 摄像头个数
- (NSUInteger)cameraCount {
return [[AVCaptureDevice devicesWithMediaType: AVMediaTypeVideo] count];
}
// 切换摄像头
- (BOOL)switchCameras {
// 1.判断是否能切换摄像头
if (![self canSwitchcameras]) {
return NO;
}
// 2.当前设备的反向设备
AVCaptureDevice *videoDevice = [self inactiveCamera];
// 3.将device转化为AVCaptureDeviceInput
NSError *error;
AVCaptureDeviceInput *videoInput = [AVCaptureDeviceInput deviceInputWithDevice: videoDevice error: &error];
// 4.添加
if (videoInput) {
// 标注原始配置要发生改变
[self.captureSession beginConfiguration];
// 将原来的输入设备移除
[self.captureSession removeInput: self.activeVideoInput];
// 判断能否加入
if ([self.captureSession canAddInput: videoInput]) {
[self.captureSession addInput: videoInput];
// 活跃设备更新
self.activeVideoInput = videoInput;
} else {
// 如果新设备无法加入,则将原来的视频输入设备添加进去
[self.captureSession addInput: self.activeVideoInput];
}
// 配置完成后提交
[self.captureSession commitConfiguration];
} else {
// 如果发生错误:设备添加错误
[self.delegate deviceConfigurationFailedWithError: error];
return NO;
}
return YES;
}
复制代码
五.实现摄像头自动聚焦功能
- 在修改设备的动作时,需要判断该设备是否支持
THCameraController.m重要代码
#pragma mark - Focus Methods 点击聚焦方法的实现
// 是否支持聚焦功能
- (BOOL)cameraSupportsTapToFocus {
//询问目前活跃的摄像头是否支持兴趣点对焦
return [[self activeCamera] isFocusPointOfInteresSupported];
}
- (void)focusAtPoint:(CGPoint)point {
AVCaptureDevice *device = [self activeCamera];
// 判断该设备是否支持兴趣点对焦/是否支持自动对焦
if (device.isFocusPointOfInterestSupported && [device isFocusModeSupported: AVCaptureFocusModeAutoFocus]) {
//锁定该设备配置
NSError *error;
// 因为配置时,不能让多个对象对它进行更改,所以要对该过程上锁。
if ([device lockForConfiguration: &error]) {
// 聚焦位置
device.focusPointOfInteres = point;
// 聚焦模式
device.focusMode = AVCaptureFocusModeAutoFocus;
// 修改完毕,释放锁
[device unlockForConfiguration];
} else {
// 设备错误
[self.delegate deviceConfigurationFailedWithError: error];
}
}
}
复制代码
六.实现摄像头自动曝光以及锁定曝光
THCameraController.m重要代码
#pragma mark - Exposure Methods 点击曝光的方法实现
// 是否支持曝光
- (BOOL)cameraSupportsTapToExpose {
return [[self activeCamera] isExpsurePointOfInterestSupported];
}
static const NSString *THCameraAdjustingExposureContext;
- (void)exposeAtPoint:(CGPoint)point {
AVCaptureDevice *device = [self activeCamera];
// 曝光模式
AVCaptureExposureMode exposureMode = AVCaptureExposureModeContinuousAutoExposure;
// 是否支持曝光及上述曝光模式
if (device.isExposurePointOfInterestSupported && [device isExposureModeSupported: exposureMode]) {
[device isExposureModeSupported: exposureMode];
NSError error;
// 锁定设备准备配置
if ([device lockForConfiguration: &error]) {
// 配置期望值
device.exposurePointOfInterest = point;
device.exposureMode = exposureMode;
// 判断设备是否支持锁定曝光的模式
if ([device isExposureModeSupported: AVCaptureExposureModeLocked]) {
// 支持,则使用kvo确定设备的adjustingExposure属性的状态
[device addObserver: self forKeyPath: @"adjustingExposure" options: NSKeyValueObservingOptionNew context: &THCameraAdjustingExposureContext];
}
[device unlockForConfiguration];
} else {
[self.delegate deviceConfigurationFailedWithError: error];
}
}
}
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context {
// 判断context上下文是否为THCameraAdjustingExposureContext
if (context == &THCameraAdjustingExposureContext) {
// 获取device
AVCaptureDevice *device = (AVCaptureDevice *)object;
// 判断设备是否不再调整曝光等级,确认设备的exposureMode是否可以设置为AVCaptureExposureModeLocked
if (!device.isAdjustingExposure && [device isExposureModeSupported: AVCaptureExposureModeLocked]) {
// 移除作为adjustingExposure的self,就不会得到后续变更的通知
[object removeObserver: self forKeyPath: @"adjustingExposure" context: &THCameraAdjustingExposureContext];
// 异步方式调回主队列
dispatch_async(dispatch_get_main_queue(), ^{
NSError *error;
if ([device lockForConfiguration: &error]) {
//修改exposureMode
device.exposureMode = AVCaptureExposureModeLocked;
// 释放该锁定
[device unlockForConfiguration];
} else {
[self.delegate deviceConfigurationFailedWithError: error];
}
});
} else {
[super observeValueForKeyPath:self ofObject:object change:change context:context];
}
}
}
// 重新设置对焦&曝光
- (void)resetFocusAndExposureModes {
AVCaptureDevice *device = [self activeCamera];
AVCaptureFocusMode focusMode = AVCaptureFocusModeContinuousAutoFocus;
// 获取对焦兴趣点和连续自动对焦模式 是否被支持
BOOL canResetFocus = [device isFocusPointOfInterestSupported] && [device isFocusModeSupported: focusMode];
AVCaptureExposureMode exposureMode = AVCaptureExposureModeContinuousAutoExposure;
// 确认曝光度可以被重设
BOOL canResetExposure = [device isFocusPointOfInterestSupported] && [device isExposureModeSupported: exposureMode];
// 回顾一下,捕捉设备空间左上角(0,0) 右上角(1,1) 中心点(0.5,0.5)
CGPoint centerPoint = CGPointMake(0.5f,0.5f);
NSError *error;
// 锁定设备,准备配置
if ([device lockForConfiguration: &error]) {
// 焦点可设,则修改
if (canResetFocus) {
device.focusMode = focusMode;
device.focusPointOfInterest = centerPoint;
}
// 曝光度可设,则设置为期望的曝光模式
if (canResetExposure) {
device.exposureMode = exposureMode;
device.exposurePointOfInterest = centerPoint;
}
// 释放锁定
[device unlockForConfiguration];
} else {
[self.delegate deviceConfigurationFailedWithError: error];
}
}
复制代码
七.实现摄像头手电筒和闪光等模式的开启关闭
THCameraController.m重要代码
#pragma mark - Flash and Torch Modes 闪光灯 & 手电筒
// 判断是否有闪光灯
- (BOOL)cameraHasFlash {
return [[self activeCamera] hasFlash];
}
// 闪光灯模式
- (AVCaptureFlashMode)flashMode {
return [[self activeCamera] flashMode];
}
// 设置闪光灯
- (void)setFlashMode:(AVCaptureFlashMode)flashMode {
// 获取会话
AVCaptureDevice *device = [self activeCamera];
// 判断是否支持闪光灯模式
if ([device isFlashModeSupported: flashMode]) {
// 如果支持,则锁定设备
NSError *error;
if ([device lockForConfiguration: &error]) {
// 修改闪光灯模式
device.flashMode = flashMode;
// 修改完成,解锁释放设备
[device unlockForConfiguration];
} else {
[self.delegate deviceConfigurationFailedWithError: error];
}
}
}
// 是否支持手电筒
- (BOOL)cameraHasTorch {
return [[self activeCamera] hasTorch];
}
// 手电筒模式
- (AVCaptureTorchMode)torchMode {
return [[self activeCamera] torchMode];
}
- (void)setTorchMode:(AVCaptureTorchMode)torchMode {
AVCaptureDevice *device = [self activeCamera];
if ([device isTorchModeSupported: torchMode]) {
NSError *error;
if (device lockForConfiguration: &error) {
device.torchMode = torchMode;
[device unlockForConfiguration];
} else {
[self.delegate deviceConfigurationFailedWithError: error];
}
}
}
复制代码
八.静态图片的拍摄
- 配置Session
THCameraController.m重要代码
#pragma mark - Image Capture Methods 拍摄静态图片
/*
AVCaptureStillImageOutput是AVCaptureOutput的子类,用于捕捉图片
*/
- (void)captureStillImage {
// 获取连接
AVCaptureConnection *connection = [self.imageOutput connectionWithMediaType: AVMediaTypeVideo];
// 程序只支持纵向,但是如果用户横向拍照时,需要调整结果照片的方向
// 判断是否设置视频方向
if (connection.isVideoOrientationSupported) {
// 获取方向值
connection.videoOrientation = [self currentVideoOrientation];
}
// 定义一个handle块,会返回1个图片的NSData数据
id handle = ^(CMSampleBufferRef sampleBuffer, NSError *error) {
if (sampleBuffer != NULL) {
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation: sampleBuffer];
UIImage *image = [[UIImage alloc] initWithData: imageData];
// 重点:捕捉图片成功后,将图片传递出去
[self writeImageToAssetsLibrary:image];
} else {
NSLog(@"NULL sampleBuffer:%@",[error localizedDescription]);
}
};
// 捕捉静态图片
[self.imageOutput captureStillImageAsynchronouslyFromConnection: connection completionHandler:^(CMSampleBufferRef _Nullable imageDataSampleBuffer, NSError *_Nullable error) {
if (imageDataSampleBuffer != NULL) {
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation: imageDataSampleBuffer];
UIImage *image = [[UIImage alloc] initWithData: imageData];
[self writeImageToAssetsLibrary:image];
} else {
NSLog(@"sampleBuffer is null!");
}
}];
}
// 获取当前设备方向
- (AVCaptureVideoOrientation)currentVideoOrientation {
AVcaptureVideoOrientation orientation;
// 获取UIDevice的orientation
switch ([UIDevice currentDevice].orientation) {
case UIDeviceOrientationPortrait:
orientation = AVCaptureVideoOrientationPortrait;
break;
case UIDeviceOrientationLandscapeRight:
orientation = AVCaptureVideoOrientationLandscapeRight;
break;
case UIDeviceOrientationPortraitUpsideDown:
orientation = AVCaptureVideoOrientationPortraitUpsideDown;
break;
default:
orientation = AVCaptureVideoOrientationLandscapeRight;
break;
}
return orientation;
return 0;
}
/*
Assets Library 框架
用来让开发者通过代码方式访问iOS photo
注意:会访问到相册,需要修改plist权限,否则会导致项目崩溃
*/
- (void)writeImageToAssetsLibrary:(UIImage *)image {
// 创建ALAssetsLibrary 实例
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
// 参数1:图片(参数为CGImageRef,所以image.CGImage)
// 参数2:方向参数转为NSUInterger
// 参数3: 写入成功/失败处理
[library writeImageToSavedPhotosAlbum: image.CGImage orientation: (NSUInteger)image.imageOrientation completionBlock:^(NSURL *assetURL, NSError *error) {
if(!error) {
[self postThumbnailNotification: image];
} else {
// 失败打印错误信息
id message = [error localizedDescription];
NSLog(@"%@",message);
}
}];
}
// 发送缩略图通知
- (void)postThumbnailNotification:(UIImage *)image {
// 回到主队列
dispatch_async(dispatch_get_main_queue(), ^{
// 发送请求
NSNotificationCenter *nc = [NSNotificationCenter defaultCenter];
[nc postNotificationName: THThumbnailCreatedNotification object: image];
});
}
复制代码
九.视频录制实现及视频拍摄缩略图实现
视频内容的捕捉。当设置捕捉会话时,添加一个名为AVCaptureMo vieFileOutput
的输出,这个类定义了方法将QuickTime影片捕捉到磁盘。这个类大多数核心功能都继承于超类AVCaptureFileOut put
,比如录制到最长时限或录制到特定文件大小时为止;还可以配置成保留最小可用的磁盘空间,这一点在存储空间有限的移动设备上录制视频时非常重要。 通常当QuickTime影片准备发布时,影片头的元数据处于文件的开始位置,这样可以让视频播放器快速读取头包含信息,来确定文件的内容、结构和其包含的对个样本的位置。不过,当录制一个QuickTime影片时,直到所有的样片都完成捕捉后才能创建信息头。当录制结束后,创建头数据并将它附在文件结尾处。
将创建头的过程放在所有影片样本完成捕捉之后存在一个问题,尤其是在移动设备的情况下。如果遇到崩溃或其他中断,比如有电话拨入,则影片头就不会被正确写入,会在磁盘生成一个不可读的影片文件。AVCaptureMovieFileOutput提供一个核心功能就是分段捕捉QuickTime影片。
当录制开始时,在文件最前面写入一个最小化的头信息,随着录制的进行,片段按照一定的周期写入,创建完整的头信息。默认状态下,每10秒写入一个片段,不过这个时间的间可以通过修改捕捉设备输出的movieFragentInterval属性来改变。写入片段的方式可以逐步创建完整的QuickTime影片头。这样确保了当遇到应用程序崩溃或中断时,影片仍然会以最好的一个写入片段为终点进行保存。 THCameraController.m重要代码
#pragma mark - Video Capture Methods 捕捉视频
// 捕捉是否录制状态
- (BOOL)isRecording {
return self.movieOutput.isRecording;
}
// 开始录制
- (void)startRecording {
if (![self isRecording]) {
// 1.获取当前视频捕捉连接信息:用于捕捉视频数据配置一些核心属性
AVCaptureConnection *videoConnection = [self.movieOutput connectionWithMediaType: AVMediaTypeVideo];
// 判断是否支持设置videoOrientation属性
if ([videoConnection isVideoOrientationSupported]) {
// 支持则修改当前视频的方向
videoConnection.videoOrientation = [self currentVideoOrientation];
}
// 2.判断是否支持视频稳定:可以显著提高视频的质量,只会在录制视频文件涉及
if ([videoConnection isVideoStabilizationSupported]) {
videoConnection.enablesVideoStabilizationWhenAvailable = YES;
}
// 3.摄像头进行平滑对焦:即减慢摄像头镜头对焦速度,当用户移动拍摄时摄像头会尝试快速自动对焦
AVCaptureDevice *device = [self activeCamera];
if (device.isSmoothAutoFocusEnabled) {
NSError *error;
if ([device lockForConfiguration: &error]) {
device.smoothAutoFocusEnabled = YES;
[device unlockForConfiguration];
} else {
[self.delegate deviceConfigurationFailedWithError: error];
}
}
// 4.获取路径:查找写入捕捉视频的唯一文件系统URL
self.outputURL = [self uniqueURL];
// 5.摄像头的相关配置完成
// 开始录制:直播/小视频 --> 捕捉到视频信息 --> 压缩(AAC/H264)
//录制成一个QuickTime视频文件存储到相册(AVFoundation实现的硬编码)
[self.movieOutput startRerdingToOutputFileURL: self.outputURL recordingDelegate: self];
}
}
// 录制时间
- (CMTime)recordedDuration {
return self.movieOutput.recordingedDuration;
}
// 实现写入视频唯一文件系统URL
- (NSURL *)uniqueURL {
NSFileManager *fileManager = [NSFileManager defaultManager];
// temporaryDirectoryWithTemplateString
NSString *dirPath = [fileManager temporaryDirectoryWithTemplateString: @"kamera.XXXXXX"];
if (dirPath) {
NSString *filePath = [dirPath stringByAppendingPathComponent: @"kamera_movie.mov"];
return [NSURL fileURLWithPath: filePath];
}
return nil;
}
// 停止录制
- (void)stopRecording {
// 是否正在录制
if ([self isRecorfing]) {
[self.movieOutput stopRecording];
}
}
#pragma - mark - AVCaptureFileOutputRecordingDelegate
// 完成录制
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error {
// 错误
if (error) {
[self.delegate mediaCaptureFailedWithError: error];
} else {
//写入
[self writeVideoToAssetsLibrary: [self.outputURL copy]];
}
self.outputURL = nil;
}
// 写入捕捉到的视频到相册
- (void)writeVideoToAssetsLibrary:(NSURL *)videoURL {
//ALAssetsLibrary实例:提供写入视频的接口
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
// 写资源库写入前,检查视频是否可被写入
if ([library videoAtPathIsCompatibleWithSavedPhotoAlbum: videoURL]) {
// 创建block块
ALAssetsLibraryWriteVideoCompletionBlock completionBlock;
completionBlock = ^(NSURL *assetURL, NSError *error) {
if (error) {
[self.delegate assetLibraryWriteFailedWithError: error];
} else {
// 用于界面展示视频缩略图
[self generateThumbnailForVideoAtURL: videoURL];
}
};
// 执行实际写入资源库的动作
[library writeVideoAtPathToSavedPhotosAlbum: videoURL completionBlock: completionBlock];
}
}
// 获取视频左下角缩略图
- (void)generateThumbnailForVideoAtURL:(NSURL *)videoURL {
// 在videoQueue上
dispatch_async(self.videoQueue, ^{
// 建立新的AVAsset & AVAssetImageGenerator
AVAsset *asset = [AVAsset assetWithURL: videoURL];
AVAssetImageGenerator *imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset: asset];
// 设置maximumSize宽为100,高为0,根据视频的宽高比来计算图片的高度
imageGenerator.maximumSize = CGSizeMake(100.0f, 0.0f);
// 捕捉视频缩略图会考虑视频的变化,如视频的方向变化,如果不设置,缩略图的方向可能出错
imageGenerator.appliesPreferredTrackTransform = YESl
// 获取CGImageRef图片,注意需要自己管理它的创建和释放
CGImageRef imageRef = [imageGenerator copyCGImageAtTime:kCMTimeZero actualTime: NULL error: nil];
// 将图片转化为UIImage
UIImage *image = [UIImage imageWithCGImage: imageRef];
// 释放CGImageRef imageRef防止内存泄漏
CGImageRelease(imageRef);
// 回到主线程
dispatch_async(dispatch_get_main_queue(),^{
// 发送通知,传递最新的image
[self postThumbnailNotification:image];
});
});
}
复制代码