iOS 音频采集 AudioBufferList转CMSampleBufferRef

@property (nonatomic, assign) AudioComponent              component;


@property (nonatomic, assign) AudioComponentInstance      componetInstance;


static OSStatus handleInputBuffer(void *inRefCon,

                                  AudioUnitRenderActionFlags *ioActionFlags,

                                  const AudioTimeStamp *inTimeStamp,

                                  UInt32 inBusNumber,

                                  UInt32 inNumberFrames,

                                  AudioBufferList *ioData) {

    @autoreleasepool {

        <CLassName> *ref = (__bridge <ClassName> *)inRefCon;

        

        AudioStreamBasicDescription asbd = [ref asbd];

        

        CMSampleBufferRef buff = NULL;

        

        CMFormatDescriptionRef format = NULL;

        

        OSStatus status = CMAudioFormatDescriptionCreate(kCFAllocatorDefault, &asbd, 0, NULL, 0, NULL, NULL, &format);

        if (status) {

            return status;

        }

        CMSampleTimingInfo timing = { CMTimeMake(1, 48000), kCMTimeZero, kCMTimeInvalid };


  

        status = CMSampleBufferCreate(kCFAllocatorDefault, NULL, false, NULL, NULL, format, (CMItemCount)inNumberFrames, 1, &timing, 0, NULL, &buff);

        

        if (status) { //失败

            return status;

        }

        

        AudioBuffer buffer;

        buffer.mData = NULL;

        buffer.mDataByteSize = 0;

        buffer.mNumberChannels = 2;


        AudioBufferList buffers;

        buffers.mNumberBuffers = 1;

        buffers.mBuffers[0] = buffer;


        status = AudioUnitRender(ref.componetInstance,

                                          ioActionFlags,

                                          inTimeStamp,

                                          inBusNumber,

                                          inNumberFrames,

                                          &buffers);

        if (status) {

            return status;

        }

        status = CMSampleBufferSetDataBufferFromAudioBufferList(buff, kCFAllocatorDefault, kCFAllocatorDefault, 0, &buffers);

        

        if (!status) {

        }


        return status;

    }

}


- (AudioStreamBasicDescription)asbd

{

    AudioStreamBasicDescription desc = {0};

    desc.mSampleRate = 48000;

    

    desc.mFormatID = kAudioFormatLinearPCM;

    desc.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked;

    desc.mChannelsPerFrame = 2;

    desc.mFramesPerPacket = 1;

    desc.mBitsPerChannel = 16;

    desc.mBytesPerFrame = desc.mBitsPerChannel / 8 * desc.mChannelsPerFrame;

    desc.mBytesPerPacket = desc.mBytesPerFrame * desc.mFramesPerPacket;

    

    return desc;

}



- (void)initMicrophoneSource

{

    AVAuthorizationStatus status = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeAudio];

    if(status == AVAuthorizationStatusAuthorized){

        AVAudioSession *session = [AVAudioSession sharedInstance];

        

        NSError *error = nil;

        

        [session setCategory:AVAudioSessionCategoryPlayAndRecord withOptions:AVAudioSessionCategoryOptionDefaultToSpeaker | AVAudioSessionCategoryOptionMixWithOthers | AVAudioSessionCategoryOptionAllowBluetooth error:nil];

        

        if (![session setActive:YES error:&error]) {

            NSString *log = @"Failed to set audio session active.";

            NSLog(@"%@", log);

            return ;

        }

        

        AudioComponentDescription acd;

        acd.componentType = kAudioUnitType_Output;

        acd.componentSubType = kAudioUnitSubType_RemoteIO;

        acd.componentManufacturer = kAudioUnitManufacturer_Apple;

        acd.componentFlags = 0;

        acd.componentFlagsMask = 0;

        

        self.component = AudioComponentFindNext(NULL, &acd);

        

        OSStatus status = noErr;

        status = AudioComponentInstanceNew(self.component, &_componetInstance);

        

        if (noErr != status) {

            NSString *log = @"Failed to new a audio component instance.";

            NSLog(@"%@", log);

            return ;

        }

        

        UInt32 flagOne = 1;

        

        AudioUnitSetProperty(self.componetInstance, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Input, 1, &flagOne, sizeof(flagOne));

        

        AudioStreamBasicDescription desc = [self asbd];

        

        AudioUnitSetProperty(self.componetInstance, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 1, &desc, sizeof(desc));

        

        

        AURenderCallbackStruct cb;

        cb.inputProcRefCon = (__bridge void *)(self);

        cb.inputProc = handleInputBuffer;

        

        AudioUnitSetProperty(self.componetInstance, kAudioOutputUnitProperty_SetInputCallback, kAudioUnitScope_Global, 1, &cb, sizeof(cb));

        

        status = AudioUnitInitialize(self.componetInstance);

        

        if (noErr != status) {

            NSString *log = @"Failed to init audio unit.";

            NSLog(@"%@", log);

        }

        

        AudioOutputUnitStart(self.componetInstance);


    }

    

}



猜你喜欢

转载自blog.csdn.net/wnnvv/article/details/50865201
今日推荐