I am capturing the video using UIImagePickerController, i can crop the video using the following code,
AVAsset *asset = [AVAsset assetWithURL:url];
//create an avassetrack with our asset
AVAssetTrack *clipVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
//create a video composition and preset some settings
AVMutableVideoComposition* videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.frameDuration = CMTimeMake(1, 30);
//here we are setting its render size to its height x height (Square)
videoComposition.renderSize = CGSizeMake(clipVideoTrack.naturalSize.height, clipVideoTrack.naturalSize.height);
//create a video instruction
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(60, 30));
AVMutableVideoCompositionLayerInstruction* transformer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:clipVideoTrack];
//Here we shift the viewing square up to the TOP of the video so we only see the top
CGAffineTransform t1 = CGAffineTransformMakeTranslation(clipVideoTrack.naturalSize.height, -20);
//Use this code if you want the viewing square to be in the middle of the video
//CGAffineTransform t1 = CGAffineTransformMakeTranslation(clipVideoTrack.naturalSize.height, -(clipVideoTrack.naturalSize.width - clipVideoTrack.naturalSize.height) /2 );
//Make sure the square is portrait
CGAffineTransform t2 = CGAffineTransformRotate(t1, M_PI_2);
CGAffineTransform finalTransform = t2;
[transformer setTransform:finalTransform atTime:kCMTimeZero];
//add the transformer layer instructions, then add to video composition
instruction.layerInstructions = [NSArray arrayWithObject:transformer];
videoComposition.instructions = [NSArray arrayWithObject: instruction];
//Create an Export Path to store the cropped video
NSString *outputPath = [NSString stringWithFormat:@"%@%@", NSTemporaryDirectory(), @"video.mp4"];
NSURL *exportUrl = [NSURL fileURLWithPath:outputPath];
//Remove any prevouis videos at that path
[[NSFileManager defaultManager] removeItemAtURL:exportUrl error:nil];
//Export
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPresetLowQuality] ;
exporter.videoComposition = videoComposition;
exporter.outputURL = exportUrl;
exporter.outputFileType = AVFileTypeMPEG4;
[exporter exportAsynchronouslyWithCompletionHandler:^
{
dispatch_async(dispatch_get_main_queue(), ^{
//Call when finished
[self exportDidFinish:exporter];
});
}];
But i dont know how to fix the orientation issue. Like instagram and vine app , (i.e) if i capture the video even in landscape mode, it should be in portrait mode and need to crop the video as square. Pls give me the solution... i am struggling with this issue...
I suppose the source code come from this link ( project code included )
http://www.one-dreamer.com/cropping-video-square-like-vine-instagram-xcode/
You need first to know the REAL video orientation:
- (UIImageOrientation)getVideoOrientationFromAsset:(AVAsset *)asset
{
AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
CGSize size = [videoTrack naturalSize];
CGAffineTransform txf = [videoTrack preferredTransform];
if (size.width == txf.tx && size.height == txf.ty)
return UIImageOrientationLeft; //return UIInterfaceOrientationLandscapeLeft;
else if (txf.tx == 0 && txf.ty == 0)
return UIImageOrientationRight; //return UIInterfaceOrientationLandscapeRight;
else if (txf.tx == 0 && txf.ty == size.width)
return UIImageOrientationDown; //return UIInterfaceOrientationPortraitUpsideDown;
else
return UIImageOrientationUp; //return UIInterfaceOrientationPortrait;
}
I made that function in a way that it return the right orientation as if it was an image
Then, i modified the function to fix the right orientation, supporting any crop region not just a square, like this:
// apply the crop to passed video asset (set outputUrl to avoid the saving on disk ). Return the exporter session object
- (AVAssetExportSession*)applyCropToVideoWithAsset:(AVAsset*)asset AtRect:(CGRect)cropRect OnTimeRange:(CMTimeRange)cropTimeRange ExportToUrl:(NSURL*)outputUrl ExistingExportSession:(AVAssetExportSession*)exporter WithCompletion:(void(^)(BOOL success, NSError* error, NSURL* videoUrl))completion
{
//create an avassetrack with our asset
AVAssetTrack *clipVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
//create a video composition and preset some settings
AVMutableVideoComposition* videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.frameDuration = CMTimeMake(1, 30);
CGFloat cropOffX = cropRect.origin.x;
CGFloat cropOffY = cropRect.origin.y;
CGFloat cropWidth = cropRect.size.width;
CGFloat cropHeight = cropRect.size.height;
videoComposition.renderSize = CGSizeMake(cropWidth, cropHeight);
//create a video instruction
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = cropTimeRange;
AVMutableVideoCompositionLayerInstruction* transformer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:clipVideoTrack];
UIImageOrientation videoOrientation = [self getVideoOrientationFromAsset:asset];
CGAffineTransform t1 = CGAffineTransformIdentity;
CGAffineTransform t2 = CGAffineTransformIdentity;
switch (videoOrientation) {
case UIImageOrientationUp:
t1 = CGAffineTransformMakeTranslation(clipVideoTrack.naturalSize.height - cropOffX, 0 - cropOffY );
t2 = CGAffineTransformRotate(t1, M_PI_2 );
break;
case UIImageOrientationDown:
t1 = CGAffineTransformMakeTranslation(0 - cropOffX, clipVideoTrack.naturalSize.width - cropOffY ); // not fixed width is the real height in upside down
t2 = CGAffineTransformRotate(t1, - M_PI_2 );
break;
case UIImageOrientationRight:
t1 = CGAffineTransformMakeTranslation(0 - cropOffX, 0 - cropOffY );
t2 = CGAffineTransformRotate(t1, 0 );
break;
case UIImageOrientationLeft:
t1 = CGAffineTransformMakeTranslation(clipVideoTrack.naturalSize.width - cropOffX, clipVideoTrack.naturalSize.height - cropOffY );
t2 = CGAffineTransformRotate(t1, M_PI );
break;
default:
NSLog(@"no supported orientation has been found in this video");
break;
}
CGAffineTransform finalTransform = t2;
[transformer setTransform:finalTransform atTime:kCMTimeZero];
//add the transformer layer instructions, then add to video composition
instruction.layerInstructions = [NSArray arrayWithObject:transformer];
videoComposition.instructions = [NSArray arrayWithObject: instruction];
//Remove any prevouis videos at that path
[[NSFileManager defaultManager] removeItemAtURL:outputUrl error:nil];
if (!exporter){
exporter = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPresetHighestQuality] ;
}
// assign all instruction for the video processing (in this case the transformation for cropping the video
exporter.videoComposition = videoComposition;
//exporter.outputFileType = AVFileTypeQuickTimeMovie;
if (outputUrl){
exporter.outputURL = outputUrl;
[exporter exportAsynchronouslyWithCompletionHandler:^{
switch ([exporter status]) {
case AVAssetExportSessionStatusFailed:
NSLog(@"crop Export failed: %@", [[exporter error] localizedDescription]);
if (completion){
dispatch_async(dispatch_get_main_queue(), ^{
completion(NO,[exporter error],nil);
});
return;
}
break;
case AVAssetExportSessionStatusCancelled:
NSLog(@"crop Export canceled");
if (completion){
dispatch_async(dispatch_get_main_queue(), ^{
completion(NO,nil,nil);
});
return;
}
break;
default:
break;
}
if (completion){
dispatch_async(dispatch_get_main_queue(), ^{
completion(YES,nil,outputUrl);
});
}
}];
}
return exporter;
}
Tested in all recorded video orientation (Up,Down,Lanscape R, Landscape L) in both normal and front camera cases. I tested it on iPhone 5S (iOS 8.1), iPhone 6 Plus (iOS 8.1)
Hope it helps