How to Encode AAC data from PCM data in iPhone SDK? (iphone dev/Audio)

workingman317 picture workingman317 · Jan 13, 2012 · Viewed 6.9k times · Source

I guess "AudioConverterFillComplexBuffer" is the solution.

But I don't know this way is right.

+1. AudioUnit

initialize AudioUnit : "recordingCallback" is callback method. the output format is PCM. record to file.( I played the recorded file).

+2. AudioConverter

add "AudioConverterFillComplexBuffer" I don't know about it well. added,

+3. problem

"audioConverterComplexInputDataProc" method called only one time.

How can I use AudioConverter api?

Attached my code

#import "AACAudioRecorder.h"
#define kOutputBus 0
#define kInputBus 1
@implementation AACAudioRecorder
            
            

This is AudioConverterFillComplexBuffer's callback method.

static OSStatus audioConverterComplexInputDataProc(  AudioConverterRef               inAudioConverter,
                                      UInt32*                         ioNumberDataPackets,
                                      AudioBufferList*                ioData,
                                      AudioStreamPacketDescription**  outDataPacketDescription,
                                      void*                           inUserData){
    ioData = (AudioBufferList*)inUserData;
    return 0;
}

This is AudioUnit's callback.

static OSStatus recordingCallback(void *inRefCon, 
                                        AudioUnitRenderActionFlags *ioActionFlags, 
                                        const AudioTimeStamp *inTimeStamp, 
                                        UInt32 inBusNumber, 
                                        UInt32 inNumberFrames, 
                                        AudioBufferList *ioData) {
   @autoreleasepool {

       AudioBufferList *bufferList;
       
       AACAudioRecorder *THIS = (AACAudioRecorder *)inRefCon;
       OSStatus err = AudioUnitRender(THIS-> m_audioUnit , 
                                             ioActionFlags, 
                                             inTimeStamp, 1, inNumberFrames, bufferList);    

       if (err) { NSLog(@"%s AudioUnitRender error %d\n",__FUNCTION__, (int)err); return err; }
       
       NSString *recordFile = 
                       [NSTemporaryDirectory() stringByAppendingPathComponent: @"auioBuffer.pcm"];
       FILE *fp;
       fp = fopen([recordFile UTF8String], "a+");
       fwrite(bufferList->mBuffers[0].mData, sizeof(Byte), 
bufferList->mBuffers[0].mDataByteSize, fp);
       fclose(fp);    

       [THIS convert:bufferList ioOutputDataPacketSize:&inNumberFrames];     

   if (err) {NSLog(@"%s : AudioFormat Convert error %d\n",__FUNCTION__, (int)err);  }
    }
    return noErr;
}

status check method

static void checkStatus(OSStatus status, const char* str){
    if (status != noErr) {
        NSLog(@"%s %s error : %ld ",__FUNCTION__, str, status);
    }
}

convert method : PCM -> AAC

- (void)convert:(AudioBufferList*)input_bufferList ioOutputDataPacketSize:(UInt32*)packetSizeRef 
{
    UInt32 size = sizeof(UInt32);
    UInt32 maxOutputSize;
    AudioConverterGetProperty(m_audioConverterRef, 
                              kAudioConverterPropertyMaximumOutputPacketSize, 
                              &size, 
                              &maxOutputSize);
    
    AudioBufferList *output_bufferList = (AudioBufferList *)malloc(sizeof(AudioBufferList));

    output_bufferList->mNumberBuffers               = 1;
    output_bufferList->mBuffers[0].mNumberChannels  = 1;
    output_bufferList->mBuffers[0].mDataByteSize    = *packetSizeRef * 2;
    output_bufferList->mBuffers[0].mData  = (AudioUnitSampleType *)malloc(*packetSizeRef * 2);

    OSStatus        err;
    err = AudioConverterFillComplexBuffer(
                                          m_audioConverterRef,
                                          audioConverterComplexInputDataProc,
                                          input_bufferList,
                                          packetSizeRef,
                                          output_bufferList,
                                          NULL
                                          );


    if (err) {NSLog(@"%s : AudioFormat Convert error %d\n",__FUNCTION__, (int)err);  }
}

This is initialize method.

- (void)initialize
{ 
    // ...
    
    
    OSStatus status;
    
    // Describe audio component
    AudioComponentDescription desc;
    desc.componentType = kAudioUnitType_Output;
    desc.componentSubType = kAudioUnitSubType_RemoteIO;
    desc.componentFlags = 0;
    desc.componentFlagsMask = 0;
    desc.componentManufacturer = kAudioUnitManufacturer_Apple;
    
    // Get component
    AudioComponent inputComponent = AudioComponentFindNext(NULL, &desc);
    
    // Get audio units
    status = AudioComponentInstanceNew(inputComponent, &m_audioUnit);
    checkStatus(status,"AudioComponentInstanceNew");
    
    // Enable IO for recording
    UInt32 flag = 1;
    status = AudioUnitSetProperty(m_audioUnit, 
                                  kAudioOutputUnitProperty_EnableIO, 
                                  kAudioUnitScope_Input, 
                                  kInputBus,
                                  &flag, 
                                  sizeof(flag));
    checkStatus(status,"Enable IO for recording");
    
    // Enable IO for playback
    status = AudioUnitSetProperty(m_audioUnit, 
                                  kAudioOutputUnitProperty_EnableIO, 
                                  kAudioUnitScope_Output, 
                                  kOutputBus,
                                  &flag, 
                                  sizeof(flag));
    checkStatus(status,"Enable IO for playback");
    
    // Describe format
    AudioStreamBasicDescription audioFormat;
    audioFormat.mSampleRate   = 44100.00;
    audioFormat.mFormatID   = kAudioFormatLinearPCM;
    audioFormat.mFormatFlags  = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked;
    audioFormat.mFramesPerPacket = 1;
    audioFormat.mChannelsPerFrame = 1;
    audioFormat.mBitsPerChannel  = 16;
    audioFormat.mBytesPerPacket  = 2;
    audioFormat.mBytesPerFrame  = 2;
    
    // Apply format
    status = AudioUnitSetProperty(m_audioUnit, 
                                  kAudioUnitProperty_StreamFormat, 
                                  kAudioUnitScope_Output, 
                                  kInputBus, 
                                  &audioFormat, 
                                  sizeof(audioFormat));
    checkStatus(status,"Apply format1");
    status = AudioUnitSetProperty(m_audioUnit, 
                                  kAudioUnitProperty_StreamFormat, 
                                  kAudioUnitScope_Input, 
                                  kOutputBus, 
                                  &audioFormat, 
                                  sizeof(audioFormat));
    checkStatus(status,"Apply format2");
    
    
    // Set input callback
    AURenderCallbackStruct callbackStruct;
    callbackStruct.inputProc = recordingCallback;
    callbackStruct.inputProcRefCon = self;
    status = AudioUnitSetProperty(m_audioUnit, 
                                  kAudioOutputUnitProperty_SetInputCallback, 
                                  kAudioUnitScope_Global, 
                                  kInputBus, 
                                  &callbackStruct, 
                                  sizeof(callbackStruct));
    checkStatus(status,"Set input callback");

    // Initialise
    status = AudioUnitInitialize(m_audioUnit);
    checkStatus(status,"AudioUnitInitialize");

    // Set ASBD For converting Output Stream
    
    AudioStreamBasicDescription outputFormat;
    memset(&outputFormat, 0, sizeof(outputFormat));
    outputFormat.mSampleRate  = 44100.00;
    outputFormat.mFormatID   = kAudioFormatMPEG4AAC;
    outputFormat.mFormatFlags       = kMPEG4Object_AAC_Main; 
    outputFormat.mFramesPerPacket = 1024;
    outputFormat.mChannelsPerFrame = 1;
    outputFormat.mBitsPerChannel = 0;
    outputFormat.mBytesPerFrame = 0;
    outputFormat.mBytesPerPacket = 0;

    //Create An Audio Converter
    status = AudioConverterNew( &audioFormat, &outputFormat, &m_audioConverterRef );
    checkStatus(status,"Create An Audio Converter");
    if(m_audioConverterRef) NSLog(@"m_audioConverterRef is created");

}

AudioOutputUnitStart

- (void)StartRecord
{
    OSStatus status = AudioOutputUnitStart(m_audioUnit);
    checkStatus(status,"AudioOutputUnitStart");
}

AudioOutputUnitStop

- (void)StopRecord
{
    OSStatus status = AudioOutputUnitStop(m_audioUnit);
    checkStatus(status,"AudioOutputUnitStop");
}

finish

- (void)finish
{
    AudioUnitUninitialize(m_audioUnit);
}


@end

Answer

nevyn picture nevyn · May 16, 2015

It took me a long time to understand AudioConverterFillComplexBuffer, and especially how to use it to convert audio in real-time. I've posted my approach here: How do I use CoreAudio's AudioConverter to encode AAC in real-time?