ios视频和音频采集

版权声明:本文为博主原创文章,未经博主允许不得转载。 https://blog.csdn.net/zhuweigangzwg/article/details/52701669

ios视频和音频采集以及预览

本文将说明如何用ios做视频和音频的采集,以及预览,预览采用的是系统自带的AVCaptureVideoPreviewLayer和UIView,视频采集用AVCaptureSession,音频采集用AudioQueue,音频采集如果用AVCaptureSession设置参数比较麻烦。下面是具体代码

//
//  Lvs_Ios_Device_Collection.h
//  LvsIos
//
//  Created by mx on 16/9/5.
//  Copyright © 2016年 lvs.zwg All rights reserved.
//

#include <AVFoundation/AVFoundation.h>
#import <Foundation/Foundation.h>
#import <AudioToolbox/AudioToolbox.h>
#import "Lvs_Info.h"

//音频采集用(AudioQueue)

//添加视频,音频数据输出的代理,固定代理名称及调用的函数
@interface Lvs_Ios_Device_Collection : NSObject<AVCaptureVideoDataOutputSampleBufferDelegate,AVCaptureAudioDataOutputSampleBufferDelegate>


//音频采集buff数量
#define kNumberBuffers 3

//音频采集结构体(AudioQueue)
typedef struct AQRecorderState {
    AudioStreamBasicDescription  mDataFormat;                          //format
    AudioQueueRef                mQueue;                               //audio queue
    AudioQueueBufferRef          mBuffers[kNumberBuffers];             //buff数据缓存
    UInt32                       bufferByteSize;                       //每个缓存的大小
    Float64                      audio_seconde_time;                   //音频采集缓存时常(这个缓存要足够长满足后面处理时间,否则会丢数据,暂定0.5秒)
    Module_StreamInfo *          ModuleStreamInfo_Out_Audio_data;      //输出的数据信息结构体(用于获取数据的结构体)
} AQRecorderState;


//用于视频数据输出的代理方法
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection;

//init
- (int)device_init: (Module_Info **) ModuleInfo_Current andCurrentLength: (int *) CurrentLength;
//uinit
- (void)device_uinit: (Module_Info *) ModuleInfo_Current;

//write
- (int)device_write: (Module_Info *) ModuleInfo_Current andUpperLength: (int) UpperLength
              andModuleInfo_Next_video: (Module_Info *) ModuleInfo_Next_video andNextLength_video: (int) NextLength_video
              andModuleInfo_Next_audio: (Module_Info *) ModuleInfo_Next_audio andNextLength_audio: (int)NextLength_audio;


@end


//
//  Lvs_Ios_Device_Collection.m
//  LvsIos
//
//  Created by mx on 16/9/5.
//  Copyright © 2016年 lvs. All rights reserved.
//

#import "Lvs_Ios_Device_Collection.h"


@implementation Lvs_Ios_Device_Collection

int m_stream_num = 3;                                        //这里只有前摄像头,后摄像头,麦克风三路(音频用AudioQueue,这里只是预留出来)
//video
std::map<int,AVCaptureSession *> m_list_capture_session;     //session
AVCaptureConnection *videoCaptureConnection_back = nil;      //视频后摄像头输出数据用的connection
AVCaptureConnection *videoCaptureConnection_front = nil;     //视频前摄像头输出数据用的connection
AVCaptureConnection *audioCaptureConnection = nil;           //音频输出数据用的connection
long long up_time_video_front = 0;                           //前面摄像头的上一帧时间戳
long long up_time_video_back = 0;                            //后面摄像头的上一帧时间戳
//audio
static AQRecorderState m_aqData = {0};                       //音频采集结构体(AudioQueue)
long long up_time_audio = 0;                                 //音频的上一帧时间戳

//用于获取数据的地址
Module_Info * m_device_module_info_collection;               //0:后面摄像头 1:前面摄像头 2:麦克风
int m_device_module_length_collection;

//根据stream_id获取对应的session
-(AVCaptureSession *)get_list_session: (int)stream_id
{
    return m_list_capture_session[stream_id];
}

-(AQRecorderState *)get_AQRecorderState
{
    return &m_aqData;
}

//获取connection (type 0->video 1->audio) (backorfront 0->back 1->front)
-(AVCaptureConnection *)get_connection: (int)type andbackorfront: (int) backorfront
{
    //video
    if (type == 0)
    {
        if (backorfront == 0)
        {
            return videoCaptureConnection_back;
        }
        else if(backorfront == 1)
        {
            return videoCaptureConnection_front;
        }
    }
    //audio
    else if(type ==1)
    {
        return audioCaptureConnection;
    }
    return nil;
}

//用于视频数据输出的代理方法
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
    //NSLog(@"delegate");
    if (connection == videoCaptureConnection_back) //0
    {
        //NSLog(@"videoCaptureConnection_back");
        
        CVPixelBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
        
        //LOCK
        if (CVPixelBufferLockBaseAddress(imageBuffer, 0) == kCVReturnSuccess)
        {
            //buf
            UInt8 *bufferPtr_y = (UInt8*)CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0);
            UInt8 *bufferPtr_uv = (UInt8*)CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 1);
            //bufsize
            size_t buffSize = CVPixelBufferGetDataSize(imageBuffer);   //这个值不准要自己算
            //width
            size_t width = CVPixelBufferGetWidth(imageBuffer);
            //height
            size_t height = CVPixelBufferGetHeight(imageBuffer);
            //PlaneCount
            size_t PlaneCount = CVPixelBufferGetPlaneCount(imageBuffer);
            //NSLog(@"buffSize %d",buffSize);
            //NSLog(@"width %d",width);
            //NSLog(@"height %d",height);
            //NSLog(@"PlaneCount %d",PlaneCount);
            
            //获取1970到现在的时间毫秒
            NSTimeInterval nowtime = [[NSDate date] timeIntervalSince1970]*1000;
            long long theTime = [[NSNumber numberWithDouble:nowtime] longLongValue];
            
            if(theTime > up_time_video_back)
            {
                //拷贝数据
                m_device_module_info_collection[0].ModuleStreamInfo_Out->ActualLen = width * height + width * height /2;
                memcpy(m_device_module_info_collection[0].ModuleStreamInfo_Out->Buf,bufferPtr_y, width * height);
                memcpy(m_device_module_info_collection[0].ModuleStreamInfo_Out->Buf + width * height, bufferPtr_uv, width * height /2);
                
                m_device_module_info_collection[0].ModuleStreamInfo_Out->VideoInfo.pts = theTime;
                m_device_module_info_collection[0].ModuleStreamInfo_Out->VideoInfo.dts = theTime;
                up_time_video_back = theTime;
            }
            else
            {
                m_device_module_info_collection[0].ModuleStreamInfo_Out->ActualLen = 0;
            }
        }
        //Unlock
        CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
    }
    else if(connection == videoCaptureConnection_front)  //1
    {
        //NSLog(@"videoCaptureConnection_front");
        
        CVPixelBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
        
        //LOCK
        if (CVPixelBufferLockBaseAddress(imageBuffer, 0) == kCVReturnSuccess)
        {
            //buf
            UInt8 *bufferPtr_y = (UInt8*)CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0);
            UInt8 *bufferPtr_uv = (UInt8*)CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 1);
            //bufsize
            size_t buffSize = CVPixelBufferGetDataSize(imageBuffer);  //这个值不准要自己算
            //width
            size_t width = CVPixelBufferGetWidth(imageBuffer);
            //height
            size_t height = CVPixelBufferGetHeight(imageBuffer);
            //PlaneCount
            size_t PlaneCount = CVPixelBufferGetPlaneCount(imageBuffer);
            //NSLog(@"buffSize %d",buffSize);
            //NSLog(@"width %d",width);
            //NSLog(@"height %d",height);
            //NSLog(@"PlaneCount %d",PlaneCount);
            
            //获取1970到现在的时间毫秒
            NSTimeInterval nowtime = [[NSDate date] timeIntervalSince1970]*1000;
            long long theTime = [[NSNumber numberWithDouble:nowtime] longLongValue];
            
            if (theTime > up_time_video_front) {
                //拷贝数据
                m_device_module_info_collection[1].ModuleStreamInfo_Out->ActualLen = width * height + width * height /2;
                memcpy(m_device_module_info_collection[1].ModuleStreamInfo_Out->Buf,bufferPtr_y, width * height);
                memcpy(m_device_module_info_collection[1].ModuleStreamInfo_Out->Buf + width * height, bufferPtr_uv, width * height /2);
                
                m_device_module_info_collection[1].ModuleStreamInfo_Out->VideoInfo.pts = theTime;
                m_device_module_info_collection[1].ModuleStreamInfo_Out->VideoInfo.dts = theTime;
                up_time_video_front = theTime;
            }
            else
            {
                m_device_module_info_collection[1].ModuleStreamInfo_Out->ActualLen = 0;
            }

        }
        //Unlock
        CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
    }
    else if(connection == audioCaptureConnection)
    {
        NSLog(@"audioCaptureConnection");
    }
    else
    {
        NSLog(@"otherCaptureConnection");
    }
}

//音频采集数据回调
static void lvsAudioQueueInputCallback(void                        *aqData,
                               AudioQueueRef                       inAQ,
                               AudioQueueBufferRef                 inBuffer,
                               const AudioTimeStamp                *inStartTime,
                               UInt32                              inNumPackets,
                               const AudioStreamPacketDescription  *inPacketDesc)
{
    AQRecorderState *pAqData = (AQRecorderState *) aqData;
    
    //获取1970到现在的时间毫秒
    NSTimeInterval nowtime = [[NSDate date] timeIntervalSince1970]*1000;
    long long theTime = [[NSNumber numberWithDouble:nowtime] longLongValue];
    
    if (inNumPackets > 0)
    {
        if (theTime > up_time_audio)
        {
            pAqData->ModuleStreamInfo_Out_Audio_data->ActualLen = inBuffer->mAudioDataByteSize;
            //拷贝数据
            memcpy(pAqData->ModuleStreamInfo_Out_Audio_data->Buf,(char*)inBuffer->mAudioData,pAqData->ModuleStreamInfo_Out_Audio_data->ActualLen);
            
            
            pAqData->ModuleStreamInfo_Out_Audio_data->AudioInfo.pts = theTime;
            pAqData->ModuleStreamInfo_Out_Audio_data->AudioInfo.dts = theTime;
        }
        else{
            pAqData->ModuleStreamInfo_Out_Audio_data->ActualLen = 0;
        }
    }


    //清0
    AudioQueueEnqueueBuffer(pAqData->mQueue,inBuffer,0,NULL);
}

//计算音频缓冲区大小
void DeriveBufferSize (AudioQueueRef audioQueue,AudioStreamBasicDescription *ASBDescription, Float64 seconds,UInt32 *outBufferSize)
{
    static const int maxBufferSize = 0x50000;
    
    int maxPacketSize = (*ASBDescription).mBytesPerPacket;
    if (maxPacketSize == 0)
    {
        UInt32 maxVBRPacketSize = sizeof(maxPacketSize);
        AudioQueueGetProperty(audioQueue,kAudioQueueProperty_MaximumOutputPacketSize,&maxPacketSize,&maxVBRPacketSize);
    }
    
    Float64 numBytesForTime =
    (*ASBDescription).mSampleRate * maxPacketSize * seconds;
    *outBufferSize = numBytesForTime < maxBufferSize ? numBytesForTime : maxBufferSize;
}


- (int)device_init: (Module_Info **) ModuleInfo_Current andCurrentLength: (int *) CurrentLength
{
    int device_length = 0; //设备数量包括摄像头和麦克风一共
    NSArray * devices = [AVCaptureDevice devices];
    NSError *error = nil;
    
    Module_Info * pmodule_info = nil;
    pmodule_info = *ModuleInfo_Current;
    pmodule_info = (Module_Info*)realloc(pmodule_info, m_stream_num * sizeof(Module_Info));
    for (int i = 0; i< m_stream_num;i++)
    {
        pmodule_info[i].ModuleStreamInfo_In = nil;
        pmodule_info[i].ModuleStreamInfo_Out = nil;
    }
    
    for (AVCaptureDevice *device in devices)
    {
        NSLog(@"Device name: %@", [device localizedName]);
        
        //session init
        AVCaptureSession *session = [[AVCaptureSession alloc] init];
        
        
        if ([device hasMediaType:AVMediaTypeVideo])
        {
            if ([device position] == AVCaptureDevicePositionBack)
            {
                NSLog(@"Device position : back");
                
                //设置分辨率
                if ([session canSetSessionPreset:AVCaptureSessionPreset640x480])
                {
                    session.sessionPreset = AVCaptureSessionPreset640x480;
                }
                //Create and Configure the Device and Device Input
                AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
                
                if (!input)
                {
                    // Handle the error appropriately
                }
                if ([session canAddInput:input])
                {
                    [session addInput:input];
                }
                
                //Create and Configure the Data Output
                AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc] init];
                
                
                //AVCaptureVideoOrientation
                
                //添加输出数据队列
                dispatch_queue_t queue = dispatch_queue_create("video_back_queue", NULL);
                [output setSampleBufferDelegate:self queue:queue]; //setSampleBufferDelegate是:数据输出的代理
                dispatch_release(queue);
                //设置参数 (kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange nv12)
                NSDictionary *setting =[[NSDictionary alloc] initWithObjectsAndKeys:
                                        [NSNumber numberWithInt:640], (id)kCVPixelBufferWidthKey,
                                        [NSNumber numberWithInt:480], (id)kCVPixelBufferHeightKey,
                                        [NSNumber numberWithUnsignedInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange],(id)kCVPixelBufferPixelFormatTypeKey,
                                        nil];
                output.videoSettings = setting;
                output.alwaysDiscardsLateVideoFrames = YES;
                output.minFrameDuration = CMTimeMake(1.0, 25.0);
                
                if ([session canAddOutput:output])
                {
                    [session addOutput:output];
                }
                
                [setting release];
                
                //get connection
                videoCaptureConnection_back = [output connectionWithMediaType:AVMediaTypeVideo];
                //设置旋转方向
                [videoCaptureConnection_back setVideoOrientation:AVCaptureVideoOrientationPortrait];
                
                //put session
                m_list_capture_session[device_length] = (session);
                
                //put info
                pmodule_info[device_length].Is_ToNext_Module = 1;
                //header
                Module_HeaderInfo_In_Video_Dispose * pheaderinfo_device_video = nil; //这里用In_Video_Dispose_header代替
                pheaderinfo_device_video = new Module_HeaderInfo_In_Video_Dispose();
                pheaderinfo_device_video->stream_id = device_length;
                pmodule_info[device_length].ModuleHeaderInfo.HeaderInfo = pheaderinfo_device_video;
                //streaminfo
                Module_StreamInfo *pmodulestreaminfo = nil;
                pmodulestreaminfo = new Module_StreamInfo();
                pmodulestreaminfo->stream_id = device_length;
                pmodulestreaminfo->CodecType = CodecType_Video;
                pmodulestreaminfo->VideoInfo.PixFormat = LVS_PIX_FMT_NV12;
                pmodulestreaminfo->VideoInfo.Width = 480;  //竖屏宽高对调
                pmodulestreaminfo->VideoInfo.Height = 640; //竖屏宽高对调
                pmodulestreaminfo->VideoInfo.pts = -1;
                pmodulestreaminfo->VideoInfo.dts = -1;
                pmodulestreaminfo->VideoInfo.timebase_num = 1;
                pmodulestreaminfo->VideoInfo.timebase_den = 1000;
                pmodulestreaminfo->BufLen = 640*480 *3 +100;
                pmodulestreaminfo->Buf = (char *)calloc(pmodulestreaminfo->BufLen, sizeof(char));
                pmodulestreaminfo->ActualLen = 0;
                pmodule_info[device_length].ModuleStreamInfo_Out =(pmodulestreaminfo);
            }
            else
            {
                NSLog(@"Device position : front");
                
                //设置分辨率
                if ([session canSetSessionPreset:AVCaptureSessionPreset640x480])
                {
                    session.sessionPreset = AVCaptureSessionPreset640x480;
                }
                //Create and Configure the Device and Device Input
                AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
                if (!input)
                {
                    // Handle the error appropriately
                }
                if ([session canAddInput:input])
                {
                    [session addInput:input];
                }
                
                //Create and Configure the Data Output
                AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc] init];
                
                //添加输出数据队列
                dispatch_queue_t queue = dispatch_queue_create("video_front_queue", NULL);
                [output setSampleBufferDelegate:self queue:queue];    //setSampleBufferDelegate是:数据输出的代理
                dispatch_release(queue);
                
                //设置参数 (kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange nv12)
                NSDictionary *setting =[[NSDictionary alloc] initWithObjectsAndKeys:
                                        [NSNumber numberWithInt:640], (id)kCVPixelBufferWidthKey,
                                        [NSNumber numberWithInt:480], (id)kCVPixelBufferHeightKey,
                                        [NSNumber numberWithUnsignedInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange],(id)kCVPixelBufferPixelFormatTypeKey,
                                        nil];
                output.videoSettings = setting;
                output.alwaysDiscardsLateVideoFrames = YES;
                output.minFrameDuration = CMTimeMake(1.0, 25.0);
                
                if ([session canAddOutput:output])
                {
                    [session addOutput:output];
                }
                
                [setting release];
                
                //get connection
                videoCaptureConnection_front = [output connectionWithMediaType:AVMediaTypeVideo];
                //设置旋转方向
                [videoCaptureConnection_front setVideoOrientation:AVCaptureVideoOrientationPortrait];
                
                //put session
                m_list_capture_session[device_length] = (session);
                
                //put info
                pmodule_info[device_length].Is_ToNext_Module = 1;
                //header
                Module_HeaderInfo_In_Video_Dispose * pheaderinfo_device_video = nil; //这里用In_Video_Dispose_header代替
                pheaderinfo_device_video = new Module_HeaderInfo_In_Video_Dispose();
                pheaderinfo_device_video->stream_id = device_length;
                pmodule_info[device_length].ModuleHeaderInfo.HeaderInfo = pheaderinfo_device_video;
                //streaminfo
                Module_StreamInfo *pmodulestreaminfo = nil;
                pmodulestreaminfo = new Module_StreamInfo();
                pmodulestreaminfo->stream_id = device_length;
                pmodulestreaminfo->CodecType = CodecType_Video;
                pmodulestreaminfo->VideoInfo.PixFormat = LVS_PIX_FMT_NV12;
                pmodulestreaminfo->VideoInfo.Width = 480;  //竖屏宽高对调
                pmodulestreaminfo->VideoInfo.Height = 640; //竖屏宽高对调
                pmodulestreaminfo->VideoInfo.pts = -1;
                pmodulestreaminfo->VideoInfo.dts = -1;
                pmodulestreaminfo->VideoInfo.timebase_num = 1;
                pmodulestreaminfo->VideoInfo.timebase_den = 1000;
                pmodulestreaminfo->BufLen = 640*480 *3 +100;
                pmodulestreaminfo->Buf = (char *)calloc(pmodulestreaminfo->BufLen, sizeof(char));
                pmodulestreaminfo->ActualLen = 0;
                pmodule_info[device_length].ModuleStreamInfo_Out =(pmodulestreaminfo);
            }
        }
        if ([device hasMediaType:AVMediaTypeAudio])
        {
            /*
            //AVCaptureSession采集音频的时候设置参数有问题,所以用AudioQueue做音频采集
             
            //input
            AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
            if (error)
            {
                NSLog(@"Error getting video input device: %@", error.description);
            }
            
            if ([session canAddInput:audioInput])
            {
                [session addInput:audioInput];
            }
            else
            {
                NSLog(@"Error: %@", error);
            }
        
            //output
            AVCaptureAudioDataOutput * audioOutput = [[AVCaptureAudioDataOutput alloc] init];
            
            //添加输出数据队列
            dispatch_queue_t audioqueue = dispatch_queue_create("audio_queue", DISPATCH_QUEUE_SERIAL);
            [audioOutput setSampleBufferDelegate:self queue:audioqueue];   //setSampleBufferDelegate是:数据输出的代理
            dispatch_release(audioqueue);
            
            //设置参数
            NSDictionary *setting = [[NSMutableDictionary alloc] initWithObjectsAndKeys:
                                     [NSNumber numberWithInt: kAudioFormatLinearPCM], (id)AVFormatIDKey,
                                     [NSNumber numberWithFloat:44100], (id)AVSampleRateKey,
                                     [NSNumber numberWithInt:2], (id)AVNumberOfChannelsKey,
                                     [NSNumber numberWithInt:128000], (id)AVEncoderBitRateKey,
                                     [NSNumber numberWithInt:16], (id)AVLinearPCMBitDepthKey,
                                     nil];
            
            //[audioOutput setAudioSettings:setting]; //audioOutput.audioSettings = setting;用这个不好使
            //audioOutput.audioSettings = setting;
            
            
            if ([session canAddOutput:audioOutput])
            {
                [session addOutput:audioOutput];
            }
            else
            {
                NSLog(@"error: %@", error);
            }
            
            [setting release];
            
            //get connection
            audioCaptureConnection= [audioOutput connectionWithMediaType:AVMediaTypeAudio];
            
            //put session
            m_list_capture_session[device_length] = (session);
            */
            
            //设置formatter
            m_aqData.mDataFormat.mFormatID         = kAudioFormatLinearPCM;         
            m_aqData.mDataFormat.mSampleRate       = 44100.0;
            m_aqData.mDataFormat.mChannelsPerFrame = 2;
            m_aqData.mDataFormat.mBitsPerChannel   = 16;
            m_aqData.mDataFormat.mBytesPerPacket   = m_aqData.mDataFormat.mBytesPerFrame =  m_aqData.mDataFormat.mChannelsPerFrame * sizeof (SInt16);
            m_aqData.mDataFormat.mFramesPerPacket  = 1;
            //kLinearPCMFormatFlagIsBigEndian这里不能设置这个否出来的pcm 数据是大字节的,编码数据会有问题
            m_aqData.mDataFormat.mFormatFlags = /*kLinearPCMFormatFlagIsBigEndian | */kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked;
            
            //创建采集队列并设置回调:lvsAudioQueueInputCallback
            AudioQueueNewInput (&m_aqData.mDataFormat,lvsAudioQueueInputCallback,&m_aqData,NULL,kCFRunLoopCommonModes,0,&m_aqData.mQueue);
            
            UInt32 dataFormatSize = sizeof (m_aqData.mDataFormat);
            AudioQueueGetProperty(m_aqData.mQueue,kAudioQueueProperty_StreamDescription,&m_aqData.mDataFormat,&dataFormatSize);
            
            //计算音频缓冲区大小
            m_aqData.audio_seconde_time = 0.4;  //0.4秒
            DeriveBufferSize(m_aqData.mQueue,&m_aqData.mDataFormat,m_aqData.audio_seconde_time,&m_aqData.bufferByteSize);
            
            //准备一组音频队列缓冲区
            for (int i = 0; i < kNumberBuffers; ++i)
            {
                AudioQueueAllocateBuffer(m_aqData.mQueue,m_aqData.bufferByteSize,&m_aqData.mBuffers[i]);
                //清0
                AudioQueueEnqueueBuffer (m_aqData.mQueue,m_aqData.mBuffers[i], 0, NULL);
            }
            
            //put info
            pmodule_info[device_length].Is_ToNext_Module = 1;
            //header
            Module_HeaderInfo_In_Audio_Dispose * pheaderinfo_device_audio = nil; //这里用In_Audio_Dispose_header代替
            pheaderinfo_device_audio = new Module_HeaderInfo_In_Audio_Dispose();
            pheaderinfo_device_audio->stream_id = device_length;
            pmodule_info[device_length].ModuleHeaderInfo.HeaderInfo = pheaderinfo_device_audio;
            //streaminfo
            Module_StreamInfo *pmodulestreaminfo = nil;
            pmodulestreaminfo = new Module_StreamInfo();
            pmodulestreaminfo->stream_id = device_length;
            pmodulestreaminfo->CodecType = CodecType_Audio;
            pmodulestreaminfo->AudioInfo.Channel = 2;
            pmodulestreaminfo->AudioInfo.SampleFormat = LVS_SAMPLE_FMT_S16;
            pmodulestreaminfo->AudioInfo.SampleRate = SampleRate_44100;
            pmodulestreaminfo->AudioInfo.pts = -1;
            pmodulestreaminfo->AudioInfo.dts = -1;
            pmodulestreaminfo->AudioInfo.timebase_num = 1;
            pmodulestreaminfo->AudioInfo.timebase_den = 1000;
            pmodulestreaminfo->BufLen = 100 * 1024 * 2 * 2;
            pmodulestreaminfo->Buf = (char *)calloc(pmodulestreaminfo->BufLen, sizeof(char));
            pmodulestreaminfo->ActualLen = 0;
            pmodule_info[device_length].ModuleStreamInfo_Out =(pmodulestreaminfo);
            
            //用于数据callback 的参数获取数据
            m_aqData.ModuleStreamInfo_Out_Audio_data = pmodulestreaminfo;
        }
        
        //设备stream_id
        device_length ++;
    }
    
    *CurrentLength = device_length;
    *ModuleInfo_Current = pmodule_info;
    
    m_device_module_info_collection = pmodule_info;
    m_device_module_length_collection = device_length;
    
    return 1;
}

- (void)device_uinit: (Module_Info *) ModuleInfo_Current
{
    if (ModuleInfo_Current !=nil)
    {
        for (int i = 0; i <m_stream_num; i++)
        {
            //header
            if (ModuleInfo_Current[i].ModuleHeaderInfo.HeaderInfo != nil)
            {
                delete (ModuleInfo_Current[i].ModuleHeaderInfo.HeaderInfo);
                ModuleInfo_Current[i].ModuleHeaderInfo.HeaderInfo = nil;
            }
            //streaminfo_in
            if(ModuleInfo_Current[i].ModuleStreamInfo_In != nil)
            {
                if(ModuleInfo_Current[i].ModuleStreamInfo_In->Buf != nil)
                {
                    free(ModuleInfo_Current[i].ModuleStreamInfo_In->Buf);
                    ModuleInfo_Current[i].ModuleStreamInfo_In->Buf = nil;
                    ModuleInfo_Current[i].ModuleStreamInfo_In->ActualLen = 0;
                    ModuleInfo_Current[i].ModuleStreamInfo_In->BufLen = 0;
                }
                delete (ModuleInfo_Current[i].ModuleStreamInfo_In);
                ModuleInfo_Current[i].ModuleStreamInfo_In = nil;
            }
            //streaminfo_out
            if(ModuleInfo_Current[i].ModuleStreamInfo_Out != nil)
            {
                if(ModuleInfo_Current[i].ModuleStreamInfo_Out->Buf != nil)
                {
                    free(ModuleInfo_Current[i].ModuleStreamInfo_Out->Buf);
                    ModuleInfo_Current[i].ModuleStreamInfo_Out->Buf = nil;
                    ModuleInfo_Current[i].ModuleStreamInfo_Out->ActualLen = 0;
                    ModuleInfo_Current[i].ModuleStreamInfo_Out->BufLen = 0;
                }
                delete (ModuleInfo_Current[i].ModuleStreamInfo_Out);
                ModuleInfo_Current[i].ModuleStreamInfo_Out = nil;
            }
        }
        free(ModuleInfo_Current);
        ModuleInfo_Current = nil;
    }
    
    if(m_list_capture_session.size() >0)
    {
        m_list_capture_session.clear();
    }
    
    return;
}

- (int)device_write: (Module_Info *) ModuleInfo_Current andUpperLength: (int) UpperLength
andModuleInfo_Next_video: (Module_Info *) ModuleInfo_Next_video andNextLength_video: (int) NextLength_video
andModuleInfo_Next_audio: (Module_Info *) ModuleInfo_Next_audio andNextLength_audio: (int)NextLength_audio
{
    return 1;
}

@end

//
//  Lvs_Ios_Device_Preview.h
//  LvsIos
//
//  Created by mx on 16/9/5.
//  Copyright © 2016年 lvs.zwg All rights reserved.
//

#include <AVFoundation/AVFoundation.h>
#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>

@interface Lvs_Ios_Device_Preview : NSObject

//显示视频
- (void) ShowPreview: (AVCaptureSession *) session andImageView: (void *) imageview;

@end

//
//  Lvs_Ios_Device_Preview.m
//  LvsIos
//
//  Created by mx on 16/9/5.
//  Copyright © 2016年 lvs. All rights reserved.
//

#import "Lvs_Ios_Device_Preview.h"

@implementation Lvs_Ios_Device_Preview


- (void) ShowPreview: (AVCaptureSession *) session andImageView: (void *) imageview
{
    //得到传进来的显示窗口的uividew
    UIView * view = (UIView *)imageview;
    
    // 添加界面显示
    AVCaptureVideoPreviewLayer *previewLayer = nil;
    previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:session];
    previewLayer.frame = [[view layer] bounds];
    previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
    [[previewLayer connection] setVideoOrientation:AVCaptureVideoOrientationPortrait];
    [view.layer addSublayer:previewLayer];
}

@end

如有错误请指正:

交流请加QQ群:62054820
QQ:379969650.



猜你喜欢

转载自blog.csdn.net/zhuweigangzwg/article/details/52701669