I am capturing the video using UIImagePickerController, i can crop the video using the following code,
AVAsset *asset = [AVAsset assetWithURL:url];
//create
I know this question is old but some people may still be wondering why some of the videos from the camera roll zoom in after they're cropped. I faced this problem and realized that the cropRect I was using as a frame was not scaled for the different aspect ratios of the video. To fix this problem I simply added the code below to crop the very top of the video into a square. If you want to change the position just change the y value but make sure to scale it according to the video. Luca Iaco provided some great code to get started with. I appreciate it!
CGSize videoSize = [[[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] naturalSize];
float scaleFactor;
if (videoSize.width > videoSize.height) {
scaleFactor = videoSize.height/320;
}
else if (videoSize.width == videoSize.height){
scaleFactor = videoSize.height/320;
}
else{
scaleFactor = videoSize.width/320;
}
CGFloat cropOffX = 0;
CGFloat cropOffY = 0;
CGFloat cropWidth = 320 *scaleFactor;
CGFloat cropHeight = 320 *scaleFactor;
This is my code to create a vine-like video from a video on disk. This is written in swift:
static let MaxDuration: CMTimeValue = 12
class func compressVideoAsset(_ asset: AVAsset, output: URL, completion: @escaping (_ data: Data?) -> Void)
{
let session = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetMediumQuality)!
session.videoComposition = self.squareVideoCompositionForAsset(asset)
session.outputURL = output
session.outputFileType = AVFileTypeMPEG4
session.shouldOptimizeForNetworkUse = true
session.canPerformMultiplePassesOverSourceMediaData = true
let duration = CMTimeValue(CGFloat(asset.duration.value) / CGFloat(asset.duration.timescale) * 30)
session.timeRange = CMTimeRange(start: kCMTimeZero, duration: CMTime(value: min(duration, VideoCompressor.MaxDuration * 30), timescale: 30))
session.exportAsynchronously(completionHandler: { () -> Void in
let data = try? Data(contentsOf: output)
DispatchQueue.main.async(execute: { () -> Void in
completion(data)
})
})
}
private class func squareVideoCompositionForAsset(_ asset: AVAsset) -> AVVideoComposition
{
let track = asset.tracks(withMediaType: AVMediaTypeVideo)[0]
let length = min(track.naturalSize.width, track.naturalSize.height)
var transform = track.preferredTransform
let size = track.naturalSize
let scale: CGFloat = (transform.a == -1 && transform.b == 0 && transform.c == 0 && transform.d == -1) ? -1 : 1 // check for inversion
transform = transform.translatedBy(x: scale * -(size.width - length) / 2, y: scale * -(size.height - length) / 2)
let transformer = AVMutableVideoCompositionLayerInstruction(assetTrack: track)
transformer.setTransform(transform, at: kCMTimeZero)
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRange(start: kCMTimeZero, duration: kCMTimePositiveInfinity)
instruction.layerInstructions = [transformer]
let composition = AVMutableVideoComposition()
composition.frameDuration = CMTime(value: 1, timescale: 30)
composition.renderSize = CGSize(width: length, height: length)
composition.instructions = [instruction]
return composition
}
I suppose the source code come from this link ( project code included )
http://www.one-dreamer.com/cropping-video-square-like-vine-instagram-xcode/
You need first to know the REAL video orientation:
- (UIImageOrientation)getVideoOrientationFromAsset:(AVAsset *)asset
{
AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
CGSize size = [videoTrack naturalSize];
CGAffineTransform txf = [videoTrack preferredTransform];
if (size.width == txf.tx && size.height == txf.ty)
return UIImageOrientationLeft; //return UIInterfaceOrientationLandscapeLeft;
else if (txf.tx == 0 && txf.ty == 0)
return UIImageOrientationRight; //return UIInterfaceOrientationLandscapeRight;
else if (txf.tx == 0 && txf.ty == size.width)
return UIImageOrientationDown; //return UIInterfaceOrientationPortraitUpsideDown;
else
return UIImageOrientationUp; //return UIInterfaceOrientationPortrait;
}
I made that function in a way that it return the right orientation as if it was an image
Then, i modified the function to fix the right orientation, supporting any crop region not just a square, like this:
// apply the crop to passed video asset (set outputUrl to avoid the saving on disk ). Return the exporter session object
- (AVAssetExportSession*)applyCropToVideoWithAsset:(AVAsset*)asset AtRect:(CGRect)cropRect OnTimeRange:(CMTimeRange)cropTimeRange ExportToUrl:(NSURL*)outputUrl ExistingExportSession:(AVAssetExportSession*)exporter WithCompletion:(void(^)(BOOL success, NSError* error, NSURL* videoUrl))completion
{
//create an avassetrack with our asset
AVAssetTrack *clipVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
//create a video composition and preset some settings
AVMutableVideoComposition* videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.frameDuration = CMTimeMake(1, 30);
CGFloat cropOffX = cropRect.origin.x;
CGFloat cropOffY = cropRect.origin.y;
CGFloat cropWidth = cropRect.size.width;
CGFloat cropHeight = cropRect.size.height;
videoComposition.renderSize = CGSizeMake(cropWidth, cropHeight);
//create a video instruction
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = cropTimeRange;
AVMutableVideoCompositionLayerInstruction* transformer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:clipVideoTrack];
UIImageOrientation videoOrientation = [self getVideoOrientationFromAsset:asset];
CGAffineTransform t1 = CGAffineTransformIdentity;
CGAffineTransform t2 = CGAffineTransformIdentity;
switch (videoOrientation) {
case UIImageOrientationUp:
t1 = CGAffineTransformMakeTranslation(clipVideoTrack.naturalSize.height - cropOffX, 0 - cropOffY );
t2 = CGAffineTransformRotate(t1, M_PI_2 );
break;
case UIImageOrientationDown:
t1 = CGAffineTransformMakeTranslation(0 - cropOffX, clipVideoTrack.naturalSize.width - cropOffY ); // not fixed width is the real height in upside down
t2 = CGAffineTransformRotate(t1, - M_PI_2 );
break;
case UIImageOrientationRight:
t1 = CGAffineTransformMakeTranslation(0 - cropOffX, 0 - cropOffY );
t2 = CGAffineTransformRotate(t1, 0 );
break;
case UIImageOrientationLeft:
t1 = CGAffineTransformMakeTranslation(clipVideoTrack.naturalSize.width - cropOffX, clipVideoTrack.naturalSize.height - cropOffY );
t2 = CGAffineTransformRotate(t1, M_PI );
break;
default:
NSLog(@"no supported orientation has been found in this video");
break;
}
CGAffineTransform finalTransform = t2;
[transformer setTransform:finalTransform atTime:kCMTimeZero];
//add the transformer layer instructions, then add to video composition
instruction.layerInstructions = [NSArray arrayWithObject:transformer];
videoComposition.instructions = [NSArray arrayWithObject: instruction];
//Remove any prevouis videos at that path
[[NSFileManager defaultManager] removeItemAtURL:outputUrl error:nil];
if (!exporter){
exporter = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPresetHighestQuality] ;
}
// assign all instruction for the video processing (in this case the transformation for cropping the video
exporter.videoComposition = videoComposition;
//exporter.outputFileType = AVFileTypeQuickTimeMovie;
if (outputUrl){
exporter.outputURL = outputUrl;
[exporter exportAsynchronouslyWithCompletionHandler:^{
switch ([exporter status]) {
case AVAssetExportSessionStatusFailed:
NSLog(@"crop Export failed: %@", [[exporter error] localizedDescription]);
if (completion){
dispatch_async(dispatch_get_main_queue(), ^{
completion(NO,[exporter error],nil);
});
return;
}
break;
case AVAssetExportSessionStatusCancelled:
NSLog(@"crop Export canceled");
if (completion){
dispatch_async(dispatch_get_main_queue(), ^{
completion(NO,nil,nil);
});
return;
}
break;
default:
break;
}
if (completion){
dispatch_async(dispatch_get_main_queue(), ^{
completion(YES,nil,outputUrl);
});
}
}];
}
return exporter;
}
Tested in all recorded video orientation (Up,Down,Lanscape R, Landscape L) in both normal and front camera cases. I tested it on iPhone 5S (iOS 8.1), iPhone 6 Plus (iOS 8.1)
Hope it helps