3

In my iPhone application I want to record sounds internally produced by my own application, v.s. recording external sounds captured by the mic. Another way of saying this is that I want to record sound straight off the sound card as it's being played. From there I want to save the new recorded sound file to a specified, local URL. A similar question was posted here. I've read a few tutorials and have some code, however there are a few things I need help with. Here is my code:

header file

OSStatus status;

implementation file

#define kOutputBus 0
#define kInputBus 1

static AudioComponentInstance audioUnit;

static OSStatus recordingCallback(void *inRefCon,
                                  AudioUnitRenderActionFlags *ioActionFlags,
                                  const AudioTimeStamp *inTimeStamp,
                                  UInt32 inBusNumber,
                                  UInt32 inNumberFrames,
                                  AudioBufferList *ioData) {

    // TODO: Use inRefCon to access our interface object to do stuff
    // Then, use inNumberFrames to figure out how much data is available, and make
    // that much space available in buffers in an AudioBufferList.

    AudioBufferList *bufferList; // <- Fill this up with buffers (you will want to malloc it, as it's a dynamic-length list)

    // Then:
    // Obtain recorded samples

    OSStatus status;

    status = AudioUnitRender([audioInterface audioUnit],
                             ioActionFlags,
                             inTimeStamp,
                             inBusNumber,
                             inNumberFrames,
                             bufferList);
    checkStatus(status);

    // Now, we have the samples we just read sitting in buffers in bufferList
    DoStuffWithTheRecordedAudio(bufferList);
    return noErr;
}

static OSStatus playbackCallback(void *inRefCon,
                                 AudioUnitRenderActionFlags *ioActionFlags,
                                 const AudioTimeStamp *inTimeStamp,
                                 UInt32 inBusNumber,
                                 UInt32 inNumberFrames,
                                 AudioBufferList *ioData) {
    // Notes: ioData contains buffers (may be more than one!)
    // Fill them up as much as you can. Remember to set the size value in each buffer to match how
    // much data is in the buffer.
    return noErr;
}


void initializeInternalAudioRecorder() {
    AudioStreamBasicDescription audioFormat; //this is currently being called as a local variable, try calling it as a golbal variable if it doesnt work
    OSStatus status;
    AudioComponentInstance audioUnit;


    // Describe audio component
    AudioComponentDescription desc;
    desc.componentType = kAudioUnitType_Output;
    desc.componentSubType = kAudioUnitSubType_RemoteIO;
    desc.componentFlags = 0;
    desc.componentFlagsMask = 0;
    desc.componentManufacturer = kAudioUnitManufacturer_Apple;

    // Get component
    AudioComponent inputComponent = AudioComponentFindNext(NULL, &desc);

    // Get audio units
    status = AudioComponentInstanceNew(inputComponent, &audioUnit);
    checkStatus(status);

    // Enable IO for recording
    UInt32 flag = 1;
    status = AudioUnitSetProperty(audioUnit,
                                  kAudioOutputUnitProperty_EnableIO,
                                  kAudioUnitScope_Input,
                                  kInputBus,
                                  &flag,
                                  sizeof(flag));
    checkStatus(status);

    // Enable IO for playback
    status = AudioUnitSetProperty(audioUnit,
                                  kAudioOutputUnitProperty_EnableIO,
                                  kAudioUnitScope_Output,
                                  kOutputBus,
                                  &flag,
                                  sizeof(flag));
    checkStatus(status);

    // Describe format
    audioFormat.mSampleRate         = 44100.00;
    audioFormat.mFormatID           = kAudioFormatLinearPCM;
    audioFormat.mFormatFlags        = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked;
    audioFormat.mFramesPerPacket    = 1;
    audioFormat.mChannelsPerFrame   = 1;
    audioFormat.mBitsPerChannel     = 16;
    audioFormat.mBytesPerPacket     = 2;
    audioFormat.mBytesPerFrame      = 2;

    // Apply format
    status = AudioUnitSetProperty(audioUnit,
                                  kAudioUnitProperty_StreamFormat,
                                  kAudioUnitScope_Output,
                                  kInputBus,
                                  &audioFormat,
                                  sizeof(audioFormat));
    checkStatus(status);
    status = AudioUnitSetProperty(audioUnit,
                                  kAudioUnitProperty_StreamFormat,
                                  kAudioUnitScope_Input,
                                  kOutputBus,
                                  &audioFormat,
                                  sizeof(audioFormat));
    checkStatus(status);


    // Set input callback
    AURenderCallbackStruct callbackStruct;
    callbackStruct.inputProc = recordingCallback;
    callbackStruct.inputProcRefCon = self;
    status = AudioUnitSetProperty(audioUnit,
                                  kAudioOutputUnitProperty_SetInputCallback,
                                  kAudioUnitScope_Global,
                                  kInputBus,
                                  &callbackStruct,
                                  sizeof(callbackStruct));
    checkStatus(status);

    // Set output callback
    callbackStruct.inputProc = playbackCallback;
    callbackStruct.inputProcRefCon = self;
    status = AudioUnitSetProperty(audioUnit,
                                  kAudioUnitProperty_SetRenderCallback,
                                  kAudioUnitScope_Global,
                                  kOutputBus,
                                  &callbackStruct,
                                  sizeof(callbackStruct));
    checkStatus(status);

    // Disable buffer allocation for the recorder (optional - do this if we want to pass in our own)
    flag = 0;
    status = AudioUnitSetProperty(audioUnit,
                                  kAudioUnitProperty_ShouldAllocateBuffer,
                                  kAudioUnitScope_Output,
                                  kInputBus,
                                  &flag,
                                  sizeof(flag));

    // TODO: Allocate our own buffers if we want

    // Initialise
    status = AudioUnitInitialize(audioUnit);
    checkStatus(status);
}

-(void)startInternalRecorder {
        OSStatus status = AudioOutputUnitStart(audioUnit);
        checkStatus(status);
    }

-(void)stopInternalRecorder {
        OSStatus status = AudioOutputUnitStop(audioUnit);
        checkStatus(status);
        AudioComponentInstanceDispose(audioUnit);
    }

Now, I'm getting the following errors in my implementation

  • 'audioInterface' undeclared,
  • 'self' undeclared,

So my questions are how to fix those errors, and how to specify a URL to save the recorded sound file to.

Here is where I got my code: http://atastypixel.com/blog/using-remoteio-audio-unit/comment-page-6/#comment-6734

I know a lot of my problems have to do with my lack of understanding regarding Audio Units, but I would be very very grateful to anyone who helped out. Thank you.

Community
  • 1
  • 1
luca590
  • 460
  • 2
  • 5
  • 25
  • Based on a comment of yours below, it sounds like you want to record audio and save it to a local file on the device. It's natural to phrase it this way since that's how the API declares it, but in the case of your question it might be a bit misleading, as it sort of implies you want to save it on a server somewhere. If you do just want to save the file locally, `AVAudioRecorder` is a much simpler way to do it. – MusiGenesis Aug 15 '13 at 21:12
  • I just want to save the file locally, but the reason I can't use AVAudioRecorder is because it can only record external sounds (e.g. through the microphone.) I want to record internal sounds right off the sound card on the iPhone. For example if someone was listening to something with headphones in. I want the ability to record whatever is being played from the app to their headphones without having to take off the headphones – luca590 Aug 16 '13 at 00:28
  • Sorry for the confusion – luca590 Aug 16 '13 at 03:44
  • My bad, I didn't read your question carefully enough. – MusiGenesis Aug 16 '13 at 13:24

1 Answers1

1

Hmm, the code you have copy/pasted from seems woefully incomplete. I'd be careful with it. :) Also it seems that you have copy/pasted without keeping the structure which it is supposed to have.

Anyways audioFormat should be declared as a local variable, it's type is AudioStreamBasicDescription. The top part of the code (ie, everything above the recordingCallback function declaration) is actually meant to be an initialization function, though the original author wasn't so explicit about this. So that code needs to be wrapped in something like this:

void initializeMyStuff() {
  // Describe audio component
  AudioComponentDescription desc;
  desc.componentType = kAudioUnitType_Output;

  ... lots more code ...

  // Initialise
  status = AudioUnitInitialize(audioUnit);
  checkStatus(status);
} // <-- you were missing this end bracket, which caused the compilation errors

static OSStatus recordingCallback(void *inRefCon,
                                  AudioUnitRenderActionFlags *ioActionFlags,
                                  const AudioTimeStamp *inTimeStamp, ... etc

... and call this when you start the audio portion of your app. If you wrap the first part of this code in a proper C function the nested function error will disappear. As for the undeclared functions, you should either move recordingCallback and playbackCallback above initializeMyStuff, or declare them at the top of the file.

You should also get rid of -(void)testMethod and just call the C function initializeMyStuff() instead of that. Does this make sense?

Nik Reiman
  • 39,067
  • 29
  • 104
  • 160
  • yeah it does makes sense. So then how do I save the audio file I created by recording, to a URL that I specify on disk? I can create the URL, and with the AVAudioRecorder I simply initialize the recorder with the URL I want to save the file too, but it doesnt look like I can do that here... – luca590 Aug 12 '13 at 20:12
  • So it's still ok if I declare OSStatus status; AudioComponentInstance audioUnit; in the header file, or do I need to declare them at the top of initializeMyStuff()? – luca590 Aug 12 '13 at 20:50
  • The `audioUnit` might need to be global (in which case it should be declared `extern` in the header and then actually declared at the top of the .c file). – Nik Reiman Aug 13 '13 at 07:26
  • ok I have updated my code in my question. I declared audioUnit extern in my header file but I dont understand how to actually declare it at the top of the .c file. I'm actually not using a C file. All of my program is in Objective -C with the exception of this part. – luca590 Aug 13 '13 at 16:12
  • In the header file, you have `extern AudioComponentInstance audioUnit;`, and in the top of the implementation file (I guess it doesn't have to be pure C), you have `AudioComponentInstance audioUnit;`. If that doesn't work, try removing the declaration from the header file and simply having `static AudioComponentInstance audioUnit;` at the top of the implementation file. – Nik Reiman Aug 13 '13 at 18:03
  • that defiantly brought the errors down but I still have 2 more – luca590 Aug 16 '13 at 00:30
  • I updated my question. Once those two errors r fixed the only thing left that I dont understand is how to save the recording locally on a device to the URL i specify – luca590 Aug 16 '13 at 00:33