I\'ve been struggling with several dimensions to the problem of controlling video orientation during and after capture on an iOS device. Thanks to previous answers and docum
Finally,based on the answers of @Aaron Vegh and @Prince, I figured out my resolution: //Converting video
+(void)convertMOVToMp4:(NSString *)movFilePath completion:(void (^)(NSString *mp4FilePath))block{
AVURLAsset * videoAsset = [[AVURLAsset alloc]initWithURL:[NSURL fileURLWithPath:movFilePath] options:nil];
AVAssetTrack *sourceAudioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
AVMutableComposition* composition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:sourceAudioTrack
atTime:kCMTimeZero error:nil];
AVAssetExportSession * assetExport = [[AVAssetExportSession alloc] initWithAsset:composition
presetName:AVAssetExportPresetMediumQuality];
NSString *exportPath = [movFilePath stringByReplacingOccurrencesOfString:@".MOV" withString:@".mp4"];
NSURL * exportUrl = [NSURL fileURLWithPath:exportPath];
assetExport.outputFileType = AVFileTypeMPEG4;
assetExport.outputURL = exportUrl;
assetExport.shouldOptimizeForNetworkUse = YES;
assetExport.videoComposition = [self getVideoComposition:videoAsset composition:composition];
[assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
switch (assetExport.status)
{
case AVAssetExportSessionStatusCompleted:
// export complete
if (block) {
block(exportPath);
}
break;
case AVAssetExportSessionStatusFailed:
block(nil);
break;
case AVAssetExportSessionStatusCancelled:
block(nil);
break;
}
}];
}
//get current orientation
+(AVMutableVideoComposition *) getVideoComposition:(AVAsset *)asset composition:( AVMutableComposition*)composition{
BOOL isPortrait_ = [self isVideoPortrait:asset];
AVMutableCompositionTrack *compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:videoTrack atTime:kCMTimeZero error:nil];
AVMutableVideoCompositionLayerInstruction *layerInst = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTrack];
CGAffineTransform transform = videoTrack.preferredTransform;
[layerInst setTransform:transform atTime:kCMTimeZero];
AVMutableVideoCompositionInstruction *inst = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
inst.timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration);
inst.layerInstructions = [NSArray arrayWithObject:layerInst];
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.instructions = [NSArray arrayWithObject:inst];
CGSize videoSize = videoTrack.naturalSize;
if(isPortrait_) {
NSLog(@"video is portrait ");
videoSize = CGSizeMake(videoSize.height, videoSize.width);
}
videoComposition.renderSize = videoSize;
videoComposition.frameDuration = CMTimeMake(1,30);
videoComposition.renderScale = 1.0;
return videoComposition;
}
//get video
+(BOOL) isVideoPortrait:(AVAsset *)asset{
BOOL isPortrait = FALSE;
NSArray *tracks = [asset tracksWithMediaType:AVMediaTypeVideo];
if([tracks count] > 0) {
AVAssetTrack *videoTrack = [tracks objectAtIndex:0];
CGAffineTransform t = videoTrack.preferredTransform;
// Portrait
if(t.a == 0 && t.b == 1.0 && t.c == -1.0 && t.d == 0)
{
isPortrait = YES;
}
// PortraitUpsideDown
if(t.a == 0 && t.b == -1.0 && t.c == 1.0 && t.d == 0) {
isPortrait = YES;
}
// LandscapeRight
if(t.a == 1.0 && t.b == 0 && t.c == 0 && t.d == 1.0)
{
isPortrait = FALSE;
}
// LandscapeLeft
if(t.a == -1.0 && t.b == 0 && t.c == 0 && t.d == -1.0)
{
isPortrait = FALSE;
}
}
return isPortrait;
}