I have a function that is supposed to re-encode a video to a manageable bitrate on iphone/ipad. Here it is: *UPDATED WORKING CODE, NOW WITH AUDIO! :) *
-(void)resizeVideo:(NSString*)pathy{
NSString *newName = [pathy stringByAppendingString:@".down.mov"];
NSURL *fullPath = [NSURL fileURLWithPath:newName];
NSURL *path = [NSURL fileURLWithPath:pathy];
NSLog(@"Write Started");
NSError *error = nil;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:fullPath fileType:AVFileTypeQuickTimeMovie error:&error];
NSParameterAssert(videoWriter);
AVAsset *avAsset = [[[AVURLAsset alloc] initWithURL:path options:nil] autorelease];
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:1280], AVVideoWidthKey,
[NSNumber numberWithInt:720], AVVideoHeightKey,
nil];
AVAssetWriterInput* videoWriterInput = [[AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings] retain];
NSParameterAssert(videoWriterInput);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
videoWriterInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:videoWriterInput];
NSError *aerror = nil;
AVAssetReader *reader = [[AVAssetReader alloc] initWithAsset:avAsset error:&aerror];
AVAssetTrack *videoTrack = [[avAsset tracksWithMediaType:AVMediaTypeVideo]objectAtIndex:0];
videoWriterInput.transform = videoTrack.preferredTransform;
NSDictionary *videoOptions = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
AVAssetReaderTrackOutput *asset_reader_output = [[AVAssetReaderTrackOutput alloc] initWithTrack:videoTrack outputSettings:videoOptions];
[reader addOutput:asset_reader_output];
//audio setup
AVAssetWriterInput* audioWriterInput = [[AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeAudio
outputSettings:nil] retain];
AVAssetReader *audioReader = [[AVAssetReader assetReaderWithAsset:avAsset error:&error] retain];
AVAssetTrack* audioTrack = [[avAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
AVAssetReaderOutput *readerOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:nil];
[audioReader addOutput:readerOutput];
NSParameterAssert(audioWriterInput);
NSParameterAssert([videoWriter canAddInput:audioWriterInput]);
audioWriterInput.expectsMediaDataInRealTime = NO;
[videoWriter addInput:audioWriterInput];
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
[reader startReading];
dispatch_queue_t _processingQueue = dispatch_queue_create("assetAudioWriterQueue", NULL);
[videoWriterInput requestMediaDataWhenReadyOnQueue:_processingQueue usingBlock:
^{
[self retain];
while ([videoWriterInput isReadyForMoreMediaData]) {
CMSampleBufferRef sampleBuffer;
if ([reader status] == AVAssetReaderStatusReading &&
(sampleBuffer = [asset_reader_output copyNextSampleBuffer])) {
BOOL result = [videoWriterInput appendSampleBuffer:sampleBuffer];
CFRelease(sampleBuffer);
if (!result) {
[reader cancelReading];
break;
}
} else {
[videoWriterInput markAsFinished];
switch ([reader status]) {
case AVAssetReaderStatusReading:
// the reader has more for other tracks, even if this one is done
break;
case AVAssetReaderStatusCompleted:
// your method for when the conversion is done
// should call finishWriting on the writer
//hook up audio track
[audioReader startReading];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
dispatch_queue_t mediaInputQueue = dispatch_queue_create("mediaInputQueue", NULL);
[audioWriterInput requestMediaDataWhenReadyOnQueue:mediaInputQueue usingBlock:^
{
NSLog(@"Request");
NSLog(@"Asset Writer ready :%d",audioWriterInput.readyForMoreMediaData);
while (audioWriterInput.readyForMoreMediaData) {
CMSampleBufferRef nextBuffer;
if ([audioReader status] == AVAssetReaderStatusReading &&
(nextBuffer = [readerOutput copyNextSampleBuffer])) {
NSLog(@"Ready");
if (nextBuffer) {
NSLog(@"NextBuffer");
[audioWriterInput appendSampleBuffer:nextBuffer];
}
}else{
[audioWriterInput markAsFinished];
switch ([audioReader status]) {
case AVAssetReaderStatusCompleted:
[videoWriter finishWriting];
[self hookUpVideo:newName];
break;
}
}
}
}
];
break;
case AVAssetReaderStatusFailed:
[videoWriter cancelWriting];
break;
}
break;
}
}
}
];
NSLog(@"Write Ended");
}
Unfortunately, if I pass in a video any longer than 2 seconds, the app sucks up memory like crazy and crashes! The code seems fairly simple, but I cannot seem to get it to work!
Am I supposed to release the buffer in there somewhere after it is written? I would be most greatful to anyone that has any input.
-copyNextSampleBuffer is returning a CMSampleBufferRef with +1 retain (copy methods do that). This means you must release the object. Since you're not doing so, you're going to leak a copy every pass through your while() loop.
Additionally, you're running that loop tightly without managing an autorelease pool. If there are objects being autoreleased in any of the routines you're calling, they will not be released until the autorelease pool above you drains. Since your while() loop duration is based on input, it's a good candidate for adding a manual autorelease pool.
One other thing to consider: because you're running this synchronously with a while() loop, you'll block the thread and possibly spin unnecessarily over your continue condition several times. AVAssetWriterInput provides an alternative mechanism to use libdispatch to process data asynchronously as resources become available: -requestMediaDataWhenReadyOnQueue:usingBlock: