I\'ve been struggling with several dimensions to the problem of controlling video orientation during and after capture on an iOS device. Thanks to previous answers and docum
In case anyone else is looking for this answer as well, this is the method that I cooked up (modified a bit to simplify):
- (void)encodeVideoOrientation:(NSURL *)anOutputFileURL
{
CGAffineTransform rotationTransform;
CGAffineTransform rotateTranslate;
CGSize renderSize;
switch (self.recordingOrientation)
{
// set these 3 values based on orientation
}
AVURLAsset * videoAsset = [[AVURLAsset alloc]initWithURL:anOutputFileURL options:nil];
AVAssetTrack *sourceVideoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVAssetTrack *sourceAudioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
AVMutableComposition* composition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:sourceVideoTrack
atTime:kCMTimeZero error:nil];
[compositionVideoTrack setPreferredTransform:sourceVideoTrack.preferredTransform];
AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:sourceAudioTrack
atTime:kCMTimeZero error:nil];
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTrack];
[layerInstruction setTransform:rotateTranslate atTime:kCMTimeZero];
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.frameDuration = CMTimeMake(1,30);
videoComposition.renderScale = 1.0;
videoComposition.renderSize = renderSize;
instruction.layerInstructions = [NSArray arrayWithObject: layerInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration);
videoComposition.instructions = [NSArray arrayWithObject: instruction];
AVAssetExportSession * assetExport = [[AVAssetExportSession alloc] initWithAsset:composition
presetName:AVAssetExportPresetMediumQuality];
NSString* videoName = @"export.mov";
NSString *exportPath = [NSTemporaryDirectory() stringByAppendingPathComponent:videoName];
NSURL * exportUrl = [NSURL fileURLWithPath:exportPath];
if ([[NSFileManager defaultManager] fileExistsAtPath:exportPath])
{
[[NSFileManager defaultManager] removeItemAtPath:exportPath error:nil];
}
assetExport.outputFileType = AVFileTypeMPEG4;
assetExport.outputURL = exportUrl;
assetExport.shouldOptimizeForNetworkUse = YES;
assetExport.videoComposition = videoComposition;
[assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
switch (assetExport.status)
{
case AVAssetExportSessionStatusCompleted:
// export complete
NSLog(@"Export Complete");
break;
case AVAssetExportSessionStatusFailed:
NSLog(@"Export Failed");
NSLog(@"ExportSessionError: %@", [assetExport.error localizedDescription]);
// export error (see exportSession.error)
break;
case AVAssetExportSessionStatusCancelled:
NSLog(@"Export Failed");
NSLog(@"ExportSessionError: %@", [assetExport.error localizedDescription]);
// export cancelled
break;
}
}];
}
This stuff is poorly documented, unfortunately, but by stringing examples together from other SO questions and reading the header files, I was able to get this working. Hope this helps anyone else!