AVFounction学习笔记之--音视频播放.md

版权声明:本文为博主原创文章,未经博主允许不得转载。 https://blog.csdn.net/wj610671226/article/details/82974485

AVFounction学习笔记之–音视频播放

AVFounction是用于处理音视频的框架。它位于Core Audio、Core Video、Core Media、Core Animation框架之上。
Core Audio是处理所有音频事件的框架。为音频提供录制、播放、处理等接口。
Core Video是针对视频处理的框架。为框架Core Media提供图片缓存和缓存池的支持。
Core Media 提供音频样本和视频帧处理需要的数据类型和接口。它还提供了AVFounction用到的基于CMTime数据类型的时基模型。
Core Animation用于处理动画相关的框架。

AVSpeechSynthesizer

AVSpeechSynthesizer实现文本转语音的功能。

import AVFoundation

class SpeechSynthesizerViewController: UIViewController {

    private let speechSynthesizer = AVSpeechSynthesizer()
    private let voices: [AVSpeechSynthesisVoice] = [AVSpeechSynthesisVoice(language: "en-US")!, AVSpeechSynthesisVoice(language: "en-GB")!]
    private let speechString: [String] = ["The forum, which will bring together over 1,000 delegates, will be attended by special guests including the former French Prime Minister Jean-Pierre Raffarin, the 2011 Nobel Prize winner for Economics Thomas J.", "Sargent, and Zhu Yeyu, the vice3 president of the Hong Kong University of Science and Technology.", " Co-hosted by China Media Group and the People's Government of Guangdong Province, the forum will showcase the achievements of Guangdong Province in becoming a major gateway4 linking China with the world.", "The province also provides an example of the benefits of China's policy of Reform and Opening Up, which celebrates its 40th anniversary this year."]
    
    override func viewDidLoad() {
        super.viewDidLoad()
        // 获取所有声音支持列表
        print(AVSpeechSynthesisVoice.speechVoices())
    }
    
    @IBAction func clickPlay(_ sender: UIButton) {
        for index in 0..<speechString.count {
            let utterance = AVSpeechUtterance(string: speechString[index])
            utterance.voice = voices[index % 2]
            utterance.rate = 0.4
            utterance.pitchMultiplier = 0.8
            utterance.postUtteranceDelay = 0.1
            speechSynthesizer.speak(utterance)
        }
    }
}

播放和录制音频

音频会话分类表:

分类 作用 是否允许混音 音频输入 音频输出
Ambient 游戏、效率应用软件
Solo Ambient(默认) 游戏、效率应用软件
Playback 音频和视频播放器 可选
Record 录音机、音频捕捉
Play and Record VoIP、语音聊天 可选
Audio Processing 离线会话和处理
Multi-Route 使用外部硬件的高级A/V应用程序
  • 使用AVAudioPlayer播放本地音频

AVAudioPlayer可以实现音频的播放、循环、音频计量等,除非需要从网络流中播放音频、需要访问原始音频样本或者需要非常低的时延等,AVAudioPlayerdo都能胜任。

AVAudioPlayer功能:
1、修改播放器的音量
2、修改播放器的Pan值,允许立体声播放声音,范围(-1.0~1.0)
3、调整播放率,范围(0.5~2.0),半速到2倍速
4、通过设置numberOfLoops属性实现音频无缝循环,n大于0,实现循环n次循环,-1为无限循环
5、进行音频计量,获取播放音频力度的平均值和峰值

private func settingSession() {
        // 配置音频会话
        let audioSession = AVAudioSession.sharedInstance()
        do {
            try audioSession.setCategory(AVAudioSessionCategoryPlayback)
            try audioSession.setActive(true)
        } catch let error {
            print("error = \(error)")
        }
        // 配置音频后台播放
        // 在info.plist 中添加  Required background modes  item = App plays audio or streams audio/video using AirPlay
}

// MARK: - AVAudioPlayer
    private func audioPlayer() {
        // AVAudioPlayer 播放音频
        let url = Bundle.main.url(forResource: "test", withExtension: "mp3")
        do {
            player = try AVAudioPlayer.init(contentsOf: url!)
            // 制造和处理中断事件  例如: 当有电话呼入的时候
            NotificationCenter.default.addObserver(self, selector: #selector(handleNotification(_:)), name: .AVAudioSessionInterruption, object: nil)
            // 音频线路改变的通知 例如插入耳机
            NotificationCenter.default.addObserver(self, selector: #selector(handleRouteNotification(_:)), name: .AVAudioSessionRouteChange, object: nil)
            player?.prepareToPlay()
        } catch let error {
            print("error = \(error)")
        }
    }
    
    @IBAction func clickPlayMp3(_ sender: Any) {
        player?.play()
    }
    
    @objc func handleNotification(_ sender: Notification) {
        let info = sender.userInfo!
        let type = info["AVAudioSessionInterruptionTypeKey"] as! UInt
        if type == AVAudioSessionInterruptionType.began.rawValue {
            print("开始")
            player?.pause()
        } else {
            print("结束")
            player?.play()
        }
    }
    
    @objc func handleRouteNotification(_ sender: Notification) {
        let info = sender.userInfo!
        let reasonKey = info["AVAudioSessionRouteChangeReasonKey"] as! UInt
        if AVAudioSessionRouteChangeReason.oldDeviceUnavailable.rawValue == reasonKey  {
            print("耳机取出, 暂停播放")
            player?.pause()
        }
    }
    
  • 使用AVAudioRecorder录制音频

AVAudioRecorder 支持无限时长的录制,支持录制一段时间后暂停,再从这个点开始继续录制。

录制音频关键步骤:
1、提供本地存储文件的URL
2、配置录制音频会话的信息
3、容错处理

private func settingSession() {
    // 配置音频会话
    let audioSession = AVAudioSession.sharedInstance()
    do {
        try audioSession.setCategory(AVAudioSessionCategoryPlayback)
        try audioSession.setActive(true)
    } catch let error {
        print("error = \(error)")
    }
}

// MARK: - AVAudioRecorder
private func audioRecorderDemo() {
    // 需要在info.list中配置麦克风权限
    let path = NSSearchPathForDirectoriesInDomains(FileManager.SearchPathDirectory.documentDirectory, FileManager.SearchPathDomainMask.allDomainsMask, true).last! + "/voice.caf"
    let url = URL(fileURLWithPath: path)
    do {
        /*
         AVFormatIDKey 音频格式
         AVSampleRateKey 采样率
         AVNumberOfChannelsKey 声道数
         */
        recoder = try AVAudioRecorder.init(url: url, settings: [AVFormatIDKey : kAudioFormatAppleIMA4, AVSampleRateKey: 44100.0, AVNumberOfChannelsKey: 1, AVEncoderBitDepthHintKey: 16, AVEncoderAudioQualityKey: AVAudioQuality.medium])
        recoder?.prepareToRecord()
        recoder?.delegate = self
        
        
        // 开启音频数据测量
//            recoder?.isMeteringEnabled = true
        // 获取音频平均分贝值的大小 0 ~ -160db
//            recoder?.averagePower(forChannel: <#T##Int#>)
        // 获取音频峰值分贝数据大小
//            recoder?.peakPower(forChannel: <#T##Int#>)
    } catch let error {
        print("error = \(error)")
    }
}
    
@IBAction func clickRecoder(_ sender: Any) {
    print("开始录制")
    recoder?.record()
}
    
    
@IBAction func pauseRecoder(_ sender: Any) {
    recoder?.pause()
    print("暂停录制")
}
    
@IBAction func stopRecoder(_ sender: Any) {
    recoder?.stop()
    print("结束录制")
}

extension ViewController: AVAudioRecorderDelegate {
    func audioRecorderDidFinishRecording(_ recorder: AVAudioRecorder, successfully flag: Bool) {
        print("录制音频停止 对录制的音频做处理,保存?删除? audioRecorderDidFinishRecording")
    }
}

视频播放

  • 基础知识

AVPlayer是AVFounction中的核心播放类,但是它是一个不可见的组件,需要AVPlayerLayer来显示播放界面。AVPlayer是一个单独资源的播放,如何需要在一个序列中播放多个条目需要用AVQueuePlayer来实现。

AVPlayerLayer构建在Core Animation上用于视频内容的渲染界面。

AVPlayerItem会建立媒体资源动态数据模型,可以获取播放视频中的curretTime和presentationSize等多个动态属性。

  • 示例代码
private var avPlayer: AVPlayer!
private var playerItem: AVPlayerItem!
private var asset: AVAsset!
private var imageGenerator: AVAssetImageGenerator!
    
override func viewDidLoad() {
    super.viewDidLoad()
    let url = Bundle.main.url(forResource: "Test", withExtension: "mov")
    // 网络视频
    // let url = URL.init(string: "http://clips.vorwaerts-gmbh.de/big_buck_bunny.mp4")
    asset = AVAsset(url: url!)
    playerItem = AVPlayerItem(asset: asset)
    playerItem.addObserver(self, forKeyPath:"status", options: [NSKeyValueObservingOptions.old, NSKeyValueObservingOptions.new] , context: nil)
    avPlayer = AVPlayer(playerItem: playerItem)
    let playerView = PlayerView.init(UIScreen.main.bounds, avPlayer)
    view.addSubview(playerView)
    avPlayer.play()
    
}

override func observeValue(forKeyPath keyPath: String?, of object: Any?, change: [NSKeyValueChangeKey : Any]?, context: UnsafeMutableRawPointer?) {
    if playerItem.status == .readyToPlay {
        print("readyToPlay")
        // 设置播放监听
        // 监听时间
        addPlayerItemTimeObserver()
        
        // 监听视频播放完成
        addItemEndObserverForPlayerItem()
        
        // 获取视频指定时间点的缩略图
        getGenerateThumbnails()
        
        // 获取字幕信息
        loadMediaOptions()
    }
}
    
    
private func loadMediaOptions() {
    // 获取视频包含的字幕信息
    let gropup = asset.mediaSelectionGroup(forMediaCharacteristic: AVMediaCharacteristic.legible)
    if let gropup = gropup {
        var subtitles: [String] = Array()
        for item in gropup.options {
            print("displayName = \(item.displayName)")
            subtitles.append(item.displayName)
        }
    } else {
        print("gropup = nil, 没有字幕信息")
    }
    
}
    
private func getGenerateThumbnails() {
    imageGenerator = AVAssetImageGenerator(asset: asset)
    // 设置生成图片的宽高
    imageGenerator.maximumSize = CGSize(width: 200, height: 0)
    
    // 获取20张缩略图
    let duration = asset.duration
    var times: [NSValue] = Array()
    let increment = duration.value / 20
    var currentValue = kCMTimeZero.value
    while currentValue <= duration.value {
        let time = CMTimeMake(currentValue, duration.timescale)
        times.append(NSValue.init(time: time))
        currentValue += increment
    }
    
    var images: [UIImage] = Array()
    imageGenerator.generateCGImagesAsynchronously(forTimes: times) { (requestedTime, cgImage, actualTime, result, error) in
        if result == AVAssetImageGeneratorResult.succeeded {
            let image = UIImage(cgImage: cgImage!)
            images.append(image)
            // 将图片更新到UI组件
        } else {
            print("生成缩略图失败")
        }
    }
}
    
private func addItemEndObserverForPlayerItem() {
    NotificationCenter.default.addObserver(forName: NSNotification.Name.AVPlayerItemDidPlayToEndTime, object: nil, queue: OperationQueue.main) { (notification) in
        print("播放完成")
    }
}
    
// 监听播放时间
private func addPlayerItemTimeObserver() {
    /*
     监听时间
     1、定期监听 利用AVPlayer的方法 addPeriodicTimeObserverForInterval:<#(CMTime)#> queue:<#(nullable dispatch_queue_t)#> usingBlock:<#^(CMTime time)block#>
     2、边界时间监听 利用AVPlayer的方法定义边界标记 addBoundaryTimeObserverForTimes:<#(nonnull NSArray<NSValue *> *)#> queue:<#(nullable dispatch_queue_t)#> usingBlock:<#^(void)block#>
     */
    
    // 定义0.5秒时间间隔来更新时间
    let time = CMTimeMakeWithSeconds(0.5, Int32(NSEC_PER_SEC))
    avPlayer.addPeriodicTimeObserver(forInterval: time, queue: DispatchQueue.main) { [weak self] (time) in
        let currentTime = CMTimeGetSeconds(time)
        let duration = CMTimeGetSeconds((self?.playerItem.duration)!)
        print("更新当前播放的时间 = \(currentTime), 视频总时长 = \(duration)")
    }
}
    
deinit {
    NotificationCenter.default.removeObserver(self)
}
  • 利用AVKit播放视频

AVKit是iOS8新出的一个框架,可用于快速构建一个简单的播放功能。在MediaPlayer框架中的MPMoviePlayerViewController也有类似的功能,只不过在iOS9.0已经废弃了。

// 播放视频就是这么简单
let avplayer = AVPlayerViewController()
// 是否显示底部播放控制条
avplayer.showsPlaybackControls = false
let url = Bundle.main.url(forResource: "Test", withExtension: "mov")
avplayer.player = AVPlayer(url: url!)
avplayer.view.frame = UIScreen.main.bounds
view.addSubview(avplayer.view)
avplayer.player?.play()

猜你喜欢

转载自blog.csdn.net/wj610671226/article/details/82974485
今日推荐