I've tried to make a video app like a vine by using "AVFoundation". Now I can save video through AVCaptureVideoDataOutput
and can play.But somehow audio doesn't work and I don't know why.
I'm beginner of the iOS app so it may be not clear to explain. Hope you understand what I'm trying to say and give me some tips.
This is the code I'm using.
Setting up AVCaptureVideoDataOutput
and AVCaptureAudioDataOutput
:
AVCaptureVideoDataOutput* videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
[CaptureSession addOutput:videoDataOutput];
videoDataOutput.videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kCVPixelFormatType_32BGRA],kCVPixelBufferPixelFormatTypeKey,
nil];
dispatch_queue_t videoQueue = dispatch_queue_create("VideoQueue", NULL);
[videoDataOutput setSampleBufferDelegate:self queue:videoQueue];
AVCaptureAudioDataOutput *audioDataOutput = [[AVCaptureAudioDataOutput alloc] init];
[CaptureSession addOutput:audioDataOutput];
dispatch_queue_t audioQueue = dispatch_queue_create("AudioQueue", NULL);
[audioDataOutput setSampleBufferDelegate:self queue:audioQueue];
Setting up AVAssetWrite
and AVAssetWriterInput
:
- (void)makeWriter{
pathString = [NSHomeDirectory()stringByAppendingPathComponent:@"Documents/capture.mov"];
exportURL = [NSURL fileURLWithPath:pathString];
if ([[NSFileManager defaultManager] fileExistsAtPath:exportURL.path])
{
[[NSFileManager defaultManager] removeItemAtPath:exportURL.path error:nil];
}
NSError* error;
writer = [[AVAssetWriter alloc] initWithURL:exportURL
fileType:AVFileTypeQuickTimeMovie
error:&error];
NSDictionary* videoSetting = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:1280], AVVideoWidthKey,
[NSNumber numberWithInt:720], AVVideoHeightKey,
nil];
videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSetting];
videoWriterInput.expectsMediaDataInRealTime = YES;
// Add the audio input
AudioChannelLayout acl;
bzero( &acl, sizeof(acl));
acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
NSDictionary* audioOutputSettings = nil;
// Both type of audio inputs causes output video file to be corrupted.
if( NO ) {
// should work from iphone 3GS on and from ipod 3rd generation
audioOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[ NSNumber numberWithInt: kAudioFormatMPEG4AAC ], AVFormatIDKey,
[ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,
[ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
[ NSNumber numberWithInt: 64000 ], AVEncoderBitRateKey,
[ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
nil];
} else {
// should work on any device requires more space
audioOutputSettings = [ NSDictionary dictionaryWithObjectsAndKeys:
[ NSNumber numberWithInt: kAudioFormatAppleLossless ], AVFormatIDKey,
[ NSNumber numberWithInt: 16 ], AVEncoderBitDepthHintKey,
[ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
[ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,
[ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
nil ];
}
audioWriterInput = [AVAssetWriterInput
assetWriterInputWithMediaType: AVMediaTypeAudio
outputSettings: audioOutputSettings ];
audioWriterInput.expectsMediaDataInRealTime = YES;
// add input
[writer addInput:videoWriterInput];
[writer addInput:audioWriterInput];
}
And finally the CaptureOutput
code:
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{
if ((isPause) && (isRecording)) { return; }
if( !CMSampleBufferDataIsReady(sampleBuffer) ){return;}
if( isRecording == YES ) {
isWritting = YES;
if( writer.status != AVAssetWriterStatusWriting ) {
[writer startWriting];
[writer startSessionAtSourceTime:kCMTimeZero];
}
if( [videoWriterInput isReadyForMoreMediaData] ) {
CFRetain(sampleBuffer);
CMSampleBufferRef newSampleBuffer = [self offsetTimmingWithSampleBufferForVideo:sampleBuffer];
[videoWriterInput appendSampleBuffer:newSampleBuffer];
CFRelease(sampleBuffer);
CFRelease(newSampleBuffer);
}
writeFrames++;
}
}
- (CMSampleBufferRef)offsetTimmingWithSampleBufferForVideo:(CMSampleBufferRef)sampleBuffer
{
CMSampleBufferRef newSampleBuffer;
CMSampleTimingInfo sampleTimingInfo;
sampleTimingInfo.duration = CMTimeMake(1, 30);
sampleTimingInfo.presentationTimeStamp = CMTimeMake(writeFrames, 30);
sampleTimingInfo.decodeTimeStamp = kCMTimeInvalid;
CMSampleBufferCreateCopyWithNewTiming(kCFAllocatorDefault,
sampleBuffer,
1,
&sampleTimingInfo,
&newSampleBuffer);
return newSampleBuffer;
}