1

I have reduced this question quite a bit and am hoping for some help.

Basically this class has two methods, one to start recording audio (-recordMode) and the other to play audio (playMode). I currently have this class in a project with a single view controller with two buttons that call the corresponding methods (rec, play). There are no other variables, the class is self-contained.

However it will not play/rec anything and I cannot figure out why. When I try to play the file I get a file size of 0 and an error because you cant init the AVAudioPlayer with a nil reference of course. But I dont understand why the file is empty or why self.outputPath is nil.

.h file

#import <AVFoundation/AVFoundation.h>

@interface MicCommunicator : NSObject<AVCaptureAudioDataOutputSampleBufferDelegate>

@property(nonatomic,retain) NSURL *outputPath;
@property(nonatomic,retain) AVCaptureSession * captureSession;
@property(nonatomic,retain) AVCaptureAudioDataOutput * output;

-(void)beginStreaming;
-(void)playMode;
-(void)recordMode;

@end

.m file:

@implementation MicCommunicator {
    AVAssetWriter *assetWriter;
    AVAssetWriterInput *assetWriterInput;
}

@synthesize captureSession = _captureSession;
@synthesize output = _output;
@synthesize outputPath = _outputPath;

-(id)init {
    if ((self = [super init])) {
        NSArray *searchPaths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
        self.outputPath = [NSURL fileURLWithPath:[[searchPaths objectAtIndex:0] stringByAppendingPathComponent:@"micOutput.output"]];

        AudioChannelLayout acl;
        bzero(&acl, sizeof(acl));
        acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono; //kAudioChannelLayoutTag_Stereo;
        NSDictionary *audioOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                             [NSNumber numberWithInt: kAudioFormatULaw],AVFormatIDKey,        
                                             [NSNumber numberWithFloat:8000.0],AVSampleRateKey,//was 44100.0
                                             [NSData dataWithBytes: &acl length: sizeof( AudioChannelLayout ) ], AVChannelLayoutKey,
                                             [NSNumber numberWithInt:1],AVNumberOfChannelsKey,
                                             [NSNumber numberWithInt:8000.0],AVEncoderBitRateKey,
                                             nil];

        assetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:audioOutputSettings];
        [assetWriterInput setExpectsMediaDataInRealTime:YES];

        assetWriter = [[AVAssetWriter assetWriterWithURL:_outputPath fileType:AVFileTypeWAVE error:nil] retain];
        [assetWriter addInput:assetWriterInput];
    }
    return self;
}

-(void)dealloc {
    [assetWriter release];
    [super dealloc];
}

//conveniance methods

-(void)playMode
{
    [self stopRecording];

    NSError *error;
    AVAudioPlayer * audioPlayer = [[AVAudioPlayer alloc] initWithContentsOfURL:self.outputPath error:&error];
    audioPlayer.numberOfLoops = -1;

    if (audioPlayer == nil){
        NSLog(@"error: %@",[error description]);        
    }else{ 
        NSLog(@"playing");  
        [audioPlayer play];
    }
}

-(void)recordMode
{
        [self beginStreaming];    
}

-(void)stopRecording
{
    [self.captureSession stopRunning];
    [assetWriterInput markAsFinished];
    [assetWriter  finishWriting];

    NSDictionary *outputFileAttributes = [[NSFileManager defaultManager] attributesOfItemAtPath:[NSString stringWithFormat:@"%@",self.outputPath] error:nil];
    NSLog (@"done. file size is %llu", [outputFileAttributes fileSize]);
}

//starts audio recording
-(void)beginStreaming {
    self.captureSession = [[AVCaptureSession alloc] init];
    AVCaptureDevice *audioCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
    NSError *error = nil;
    AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioCaptureDevice error:&error];
    if (audioInput)
        [self.captureSession addInput:audioInput];
    else {
        NSLog(@"No audio input found.");
        return;
    }

    AVCaptureAudioDataOutput *output = [[AVCaptureAudioDataOutput alloc] init];

    dispatch_queue_t outputQueue = dispatch_queue_create("micOutputDispatchQueue", NULL);
    [output setSampleBufferDelegate:self queue:outputQueue];
    dispatch_release(outputQueue);

    [self.captureSession addOutput:output];
    [assetWriter startWriting];
    [self.captureSession startRunning];
}

//callback
-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
    AudioBufferList audioBufferList;
    NSMutableData *data= [[NSMutableData alloc] init];
    CMBlockBufferRef blockBuffer;
    CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(sampleBuffer, NULL, &audioBufferList, sizeof(audioBufferList), NULL, NULL, 0, &blockBuffer);

    //for (int y = 0; y < audioBufferList.mNumberBuffers; y++) {
    //  AudioBuffer audioBuffer = audioBufferList.mBuffers[y];
    //  Float32 *frame = (Float32*)audioBuffer.mData;
    //          
    //  [data appendBytes:frame length:audioBuffer.mDataByteSize];
    //}

    // append [data bytes] to your NSOutputStream 


    // These two lines write to disk, you may not need this, just providing an example
    [assetWriter startSessionAtSourceTime:CMSampleBufferGetPresentationTimeStamp(sampleBuffer)];
    [assetWriterInput appendSampleBuffer:sampleBuffer];

    CFRelease(blockBuffer);
    blockBuffer=NULL;
    [data release];
}

@end
owen gerig
  • 6,165
  • 6
  • 52
  • 91
  • Please go through the code once more and if a call returns some error information, like a `BOOL` status or an `NSError`, check or log it. That will take you closer to the source of the problem. Is your callback getting called? – zoul Jun 26 '12 at 14:40
  • yes i do know that much, because i use to have it send the audio data to another class to stream that audiodata. and it would get called consistently. like i said this is kind of a dumbed down version so i removed that. either way i will go through this and add some things like your saying, thanks for the suggestion – owen gerig Jun 26 '12 at 14:42
  • huh if i put NSLog(@"number of buffers %d",y); in the for loop within the callback its always 0. meaning mNumberBuffers is 0 as well – owen gerig Jun 26 '12 at 14:48
  • seems like this (http://stackoverflow.com/a/4299665/530933) might be where my problem is. seems like the samples are 8 bit right? – owen gerig Jun 26 '12 at 14:54
  • well its not 8bits - NSLog(@"size of buffer %lu",audioBuffer.mDataByteSize); = 2048 – owen gerig Jun 26 '12 at 15:14
  • i take that statement back. the while loop doesnt do anything in terms of saving the audio to a file – owen gerig Jun 27 '12 at 14:22

1 Answers1

0

Per Apple Support:

this is therefore the bug -- the file is created, a number of samples are written successfully then append starts failing for some unknown reason.

It seems that AVAssetWriter fails only with these settings.

AudioQueue is what should be used for ulaw audio

owen gerig
  • 6,165
  • 6
  • 52
  • 91