I'm am using the iOS AVFoundation framework and I am able to successfully merge video tracks, with image overlays, and text overlays. However, my output file doesn't keep the audio intact from my original source video.
How can I make sure that the audio source from one of my videos stays with the new video I create?
EDIT
*Use this code to have a good example of how to accomplish this creating a video (with original audio). It was not obvious to me that I need to include the audio track seperatly when processing a video with AVFoundation. Hope this helps somebody else.
AVAssetTrack *videoTrack = nil;
AVAssetTrack *audioTrack = nil;
CMTime insertionPoint = kCMTimeZero;
if([[url tracksWithMediaType:AVMediaTypeVideo] count] != 0) {
videoTrack = [url tracksWithMediaType:AVMediaTypeVideo][0];
}
if([[url tracksWithMediaType:AVMediaTypeAudio] count] != 0) {
audioTrack = [url tracksWithMediaType:AVMediaTypeAudio][0];
}
// Insert the video and audio tracks from AVAsset
if (videoTrack != nil) {
AVMutableCompositionTrack *compositionVideoTrack = [videoComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, [url duration]) ofTrack:videoTrack atTime:insertionPoint error:&error];
}
if (audioTrack != nil) {
AVMutableCompositionTrack *compositionAudioTrack = [videoComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, [url duration]) ofTrack:audioTrack atTime:insertionPoint error:&error];
}
Here is the complete code which solved this, it has two videos whcih are combined with their audios:-
AVURLAsset* video1 = [[AVURLAsset alloc]initWithURL:[NSURL fileURLWithPath:path1] options:nil];
AVURLAsset* video2 = [[AVURLAsset alloc]initWithURL:[NSURL fileURLWithPath:path2] options:nil];
if (video1 !=nil && video2!=nil) {
// 1 - Create AVMutableComposition object. This object will hold your AVMutableCompositionTrack instances.
AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
// 2 - Video track
AVMutableCompositionTrack *firstTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *firstTrackAudio = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
[firstTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, video1.duration)
ofTrack:[[video1 tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
[firstTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, video2.duration)
ofTrack:[[video2 tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:video1.duration error:nil];
// it has an audio track
if ([[video1 tracksWithMediaType:AVMediaTypeAudio] count] > 0)
{
AVAssetTrack *clipAudioTrack = [[video1 tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[firstTrackAudio insertTimeRange:CMTimeRangeMake(kCMTimeZero, video1.duration) ofTrack:clipAudioTrack atTime:kCMTimeZero error:nil];
}
// it has an audio track
if ([[video2 tracksWithMediaType:AVMediaTypeAudio] count] > 0)
{
AVAssetTrack *clipAudioTrack = [[video2 tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[firstTrackAudio insertTimeRange:CMTimeRangeMake(kCMTimeZero, video2.duration) ofTrack:clipAudioTrack atTime:video1.duration error:nil];
}
// export session
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition
presetName:AVAssetExportPresetHighestQuality];
//Creates the path to export to - Saving to temporary directory
NSString* filename = [NSString stringWithFormat:@"Video_%d.mov",arc4random() % 1000];
NSString* path = [NSTemporaryDirectory() stringByAppendingPathComponent:filename];
//Checks if there is already a file at the output URL.
if ([[NSFileManager defaultManager] fileExistsAtPath:path])
{
NSLog(@"Removing item at path: %@", path);
[[NSFileManager defaultManager] removeItemAtPath:path error:nil];
}
exporter.outputURL = [NSURL fileURLWithPath:path];
//Set the output file type
exporter.outputFileType = AVFileTypeQuickTimeMovie;
path3=path;
[arr_StoredDocumentoryUrls addObject:path3];
//Exports!
[exporter exportAsynchronouslyWithCompletionHandler:^{
switch (exporter.status) {
case AVAssetExportSessionStatusCompleted:{
NSLog(@"Export Complete");
break;
}
case AVAssetExportSessionStatusFailed:
NSLog(@"Export Error: %@", [exporter.error description]);
break;
case AVAssetExportSessionStatusCancelled:
NSLog(@"Export Cancelled");
break;
default:
break;
}
}];
}
Swift 4 version based on @Ashish's answer
let video1 = AVURLAsset(url: videoURL1)
let video2 = AVURLAsset(url: videoURL2)
// Create AVMutableComposition object. This object will hold your AVMutableCompositionTrack instances.
let mixComposition = AVMutableComposition()
guard let firstTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) else {return}
guard let firstAudioTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.audio,preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) else {return}
do {
try firstTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, video1.duration),
of: video1.tracks(withMediaType: AVMediaType.video)[0],
at: kCMTimeZero)
} catch {
print("error handling video1")
}
do {
try firstTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, video2.duration),
of: video2.tracks(withMediaType: AVMediaType.video)[0],
at: kCMTimeZero)
} catch {
print("error handling video2")
}
// if video 1 has an audio track
if video1.tracks.count > 0 {
let clipAudioTrack = video1.tracks[0]
do {
try firstAudioTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, video1.duration), of: clipAudioTrack, at: kCMTimeZero)
} catch {
print("error inserting audio track 1")
}
}
// if video 2 has an audio track
if video2.tracks.count > 0 {
let clipAudioTrack = video2.tracks[0]
do {
try firstAudioTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, video2.duration), of: clipAudioTrack, at: video1.duration)
} catch {
print("error inserting audio track 2")
}
}
// Checks if there is already a file at the output URL.
if FileManager.default.fileExists(atPath: "path") {
try? FileManager.default.removeItem(atPath: "path")
}
// Create Exporter
guard let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality) else {return}
// Get path
guard let documentDirectory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first else {return}
let documentUrl = documentDirectory.appendingPathComponent("mergeVideo.mov")
// Set the output file type
exporter.outputURL = documentUrl
exporter.outputFileType = AVFileType.mov
// Exports!
exporter.exportAsynchronously {
switch exporter.status {
case .completed:
print("export completed")
case .failed:
print("export failed")
case .cancelled:
print("export candelled")
default:
break
}
}
Try adding MobileCoreServices and execute.
来源:https://stackoverflow.com/questions/14026584/ios-avfoundation-export-session-is-missing-audio