swift--获取相机数据流

//所需框架
import AVFoundation

关键

1.输入 AVCaptureDevice

2.控制,调整 AVCaptureSession

3.输出 AVCaptureVideoDataOutput,输出的数据需要实现AVCaptureVideoDataOutputSampleBufferDelegate的captureOutput方法来接收。

4.显示 AVCaptureVideoPreviewLayer

import UIKit
import AVFoundation


class ViewController: UIViewController {

    var captureSession = AVCaptureSession()

    @IBOutlet weak var imageView: UIImageView!

    override func viewDidLoad() {
        super.viewDidLoad()

        //检测相机可用性
        if UIImagePickerController.isSourceTypeAvailable(.camera){
            //获取提供数据的设备以及数据类型
            let device = AVCaptureDevice.default(for: AVMediaType.video)

            //初始化 AVCaptureSession
            self.captureSession.sessionPreset = AVCaptureSession.Preset.photo
            do{
                try self.captureSession.addInput(AVCaptureDeviceInput(device: device!))
            }
            catch {
                print("error: \(error.localizedDescription)")
            }
            
            //设置输出流
            let output = AVCaptureVideoDataOutput()
            let cameraQueue = DispatchQueue.init(label: "cameraQueue")
            //设置代理
            output.setSampleBufferDelegate(self, queue: cameraQueue)
            //视频流质量设置
            output.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA]
            //添加到session
            self.captureSession.addOutput(output)
            
            //创建一个显示用的layer
            let previewLayer = AVCaptureVideoPreviewLayer(session: self.captureSession)
            previewLayer.videoGravity = AVLayerVideoGravity(rawValue: "AVLayerVideoGravityResizeAspect")
            previewLayer.frame = self.imageView.bounds
            self.imageView.layer.addSublayer(previewLayer)
            
            //启动数据流
            self.captureSession.startRunning()
            //关闭数据流
            //self.captureSession.startRunning()

        } else {
            print("不支持拍照")
            return
        }
        
    }
    
}



extension ViewController:AVCaptureVideoDataOutputSampleBufferDelegate{
    func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
        DispatchQueue.main.async {
            // 导出照片
            let image = self.imageConvert(sampleBuffer: sampleBuffer)
            self.imageView.image = image
        }
    }
    
    /// CMSampleBufferRef=>UIImage
    func imageConvert(sampleBuffer:CMSampleBuffer?) -> UIImage? {
        guard sampleBuffer != nil && CMSampleBufferIsValid(sampleBuffer!) == true else { return nil }
        let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer!)
        let ciImage = CIImage(cvPixelBuffer: pixelBuffer!)
        return UIImage(ciImage: ciImage)
    }
}

 

Guess you like

Origin blog.csdn.net/weixin_41735943/article/details/104692162