Swift Merge AVasset-Videos array

狂风中的少年 提交于 2020-01-22 12:57:10

问题


I want to merge the AVAsset-arrayVideos into one single video and save it on camera roll. Raywenderlich.com has a great tutorial where two videos are merged into one. I've created the following code, however the video that I get after exporting to camera roll includes only the first and the last video from the array (excluding the rest of the videos in the middle of arrayVideos). Am I missing something here?

var arrayVideos = [AVAsset]() //Videos Array    
var atTimeM: CMTime = CMTimeMake(0, 0)
var lastAsset: AVAsset!
var layerInstructionsArray = [AVVideoCompositionLayerInstruction]()
var completeTrackDuration: CMTime = CMTimeMake(0, 1)
var videoSize: CGSize = CGSize(width: 0.0, height: 0.0)

func mergeVideoArray(){

    let mixComposition = AVMutableComposition()
    for videoAsset in arrayVideos{
        let videoTrack = mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
        do {
            if videoAsset == arrayVideos.first{
                atTimeM = kCMTimeZero
            } else{
                atTimeM = lastAsset!.duration
            }
            try videoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), of: videoAsset.tracks(withMediaType: AVMediaTypeVideo)[0], at: atTimeM)  
            videoSize = videoTrack.naturalSize
        } catch let error as NSError {
            print("error: \(error)")
        }
        completeTrackDuration = CMTimeAdd(completeTrackDuration, videoAsset.duration)
        let videoInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack)
        if videoAsset != arrayVideos.last{
            videoInstruction.setOpacity(0.0, at: videoAsset.duration)
        }
        layerInstructionsArray.append(videoInstruction)
        lastAsset = videoAsset            
    }

    let mainInstruction = AVMutableVideoCompositionInstruction()
    mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, completeTrackDuration)
    mainInstruction.layerInstructions = layerInstructionsArray        

    let mainComposition = AVMutableVideoComposition()
    mainComposition.instructions = [mainInstruction]
    mainComposition.frameDuration = CMTimeMake(1, 30)
    mainComposition.renderSize = CGSize(width: videoSize.width, height: videoSize.height)

    let documentDirectory = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0]
    let dateFormatter = DateFormatter()
    dateFormatter.dateStyle = .long
    dateFormatter.timeStyle = .short
    let date = dateFormatter.string(from: NSDate() as Date)
    let savePath = (documentDirectory as NSString).appendingPathComponent("mergeVideo-\(date).mov")
    let url = NSURL(fileURLWithPath: savePath)

    let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)
    exporter!.outputURL = url as URL
    exporter!.outputFileType = AVFileTypeQuickTimeMovie
    exporter!.shouldOptimizeForNetworkUse = true
    exporter!.videoComposition = mainComposition
    exporter!.exportAsynchronously {

        PHPhotoLibrary.shared().performChanges({
            PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: exporter!.outputURL!)
        }) { saved, error in
            if saved {
                let alertController = UIAlertController(title: "Your video was successfully saved", message: nil, preferredStyle: .alert)
                let defaultAction = UIAlertAction(title: "OK", style: .default, handler: nil)
                alertController.addAction(defaultAction)
                self.present(alertController, animated: true, completion: nil)
            } else{
                print("video erro: \(error)")

            }
        }
    }
} 

回答1:


You need to track the total time for all of the assets and update it for each video.

The code in your question was rewriting the atTimeM with the current video. That's why only the first and last got included.

It will look something like this:

...
var totalTime : CMTime = CMTimeMake(0, 0)

func mergeVideoArray() {

    let mixComposition = AVMutableComposition()
    for videoAsset in arrayVideos {
        let videoTrack = 
            mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, 
                                           preferredTrackID: Int32(kCMPersistentTrackID_Invalid))          
        do {
            if videoAsset == arrayVideos.first {
                atTimeM = kCMTimeZero
            } else {
                atTimeM = totalTime // <-- Use the total time for all the videos seen so far.
            }
            try videoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), 
                                           of: videoAsset.tracks(withMediaType: AVMediaTypeVideo)[0], 
                                           at: atTimeM)  
            videoSize = videoTrack.naturalSize
        } catch let error as NSError {
            print("error: \(error)")
        }
        totalTime += videoAsset.duration // <-- Update the total time for all videos.
...

You can remove the use of lastAsset.




回答2:


Swift 4

Use like

MeargeVide.mergeVideoArray(arrayVideos: arrayAsset) { (urlMeargeVide, error) in
 debugPrint("url",urlMeargeVide ?? "")
                debugPrint("error",error ?? "")
}

Complete class with orientation and merge multiple clip in single.

class MeargeVide {

   static func orientationFromTransform(_ transform: CGAffineTransform)
        -> (orientation: UIImageOrientation, isPortrait: Bool) {
            var assetOrientation = UIImageOrientation.up
            var isPortrait = false
            if transform.a == 0 && transform.b == 1.0 && transform.c == -1.0 && transform.d == 0 {
                assetOrientation = .right
                isPortrait = true
            } else if transform.a == 0 && transform.b == -1.0 && transform.c == 1.0 && transform.d == 0 {
                assetOrientation = .left
                isPortrait = true
            } else if transform.a == 1.0 && transform.b == 0 && transform.c == 0 && transform.d == 1.0 {
                assetOrientation = .up
            } else if transform.a == -1.0 && transform.b == 0 && transform.c == 0 && transform.d == -1.0 {
                assetOrientation = .down
            }
            return (assetOrientation, isPortrait)
    }

    static  func videoCompositionInstruction(_ track: AVCompositionTrack, asset: AVAsset)
        -> AVMutableVideoCompositionLayerInstruction {
            let instruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track)
            let assetTrack = asset.tracks(withMediaType: .video)[0]

            let transform = assetTrack.preferredTransform
            let assetInfo = orientationFromTransform(transform)

            var scaleToFitRatio = UIScreen.main.bounds.width / assetTrack.naturalSize.width
            if assetInfo.isPortrait {
                scaleToFitRatio = UIScreen.main.bounds.width / assetTrack.naturalSize.height
                let scaleFactor = CGAffineTransform(scaleX: scaleToFitRatio, y: scaleToFitRatio)
                instruction.setTransform(assetTrack.preferredTransform.concatenating(scaleFactor), at: kCMTimeZero)
            } else {
                let scaleFactor = CGAffineTransform(scaleX: scaleToFitRatio, y: scaleToFitRatio)
                var concat = assetTrack.preferredTransform.concatenating(scaleFactor)
                    .concatenating(CGAffineTransform(translationX: 0, y: UIScreen.main.bounds.width / 2))
                if assetInfo.orientation == .down {
                    let fixUpsideDown = CGAffineTransform(rotationAngle: CGFloat(Double.pi))
                    let windowBounds = UIScreen.main.bounds
                    let yFix = assetTrack.naturalSize.height + windowBounds.height
                    let centerFix = CGAffineTransform(translationX: assetTrack.naturalSize.width, y: yFix)
                    concat = fixUpsideDown.concatenating(centerFix).concatenating(scaleFactor)
                }
                instruction.setTransform(concat, at: kCMTimeZero)
            }

            return instruction
    }

    class func mergeVideoArray(arrayVideos:[AVAsset], callBack:@escaping (_ urlGet:URL?,_ errorGet:Error?) -> Void){

        var atTimeM: CMTime = CMTimeMake(0, 0)
        var lastAsset: AVAsset!
        var layerInstructionsArray = [AVVideoCompositionLayerInstruction]()
        var completeTrackDuration: CMTime = CMTimeMake(0, 1)
        var videoSize: CGSize = CGSize(width: 0.0, height: 0.0)
        var totalTime : CMTime = CMTimeMake(0, 0)

        let mixComposition = AVMutableComposition.init()
        for videoAsset in arrayVideos{

            let videoTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
            do {
                if videoAsset == arrayVideos.first {
                    atTimeM = kCMTimeZero
                } else {
                    atTimeM = totalTime // <-- Use the total time for all the videos seen so far.
                }
                try videoTrack?.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration),
                                                of: videoAsset.tracks(withMediaType: AVMediaType.video)[0],
                                                at: completeTrackDuration)
                videoSize = (videoTrack?.naturalSize)!



            } catch let error as NSError {
                print("error: \(error)")
            }

            totalTime = CMTimeAdd(totalTime, videoAsset.duration)



            completeTrackDuration = CMTimeAdd(completeTrackDuration, videoAsset.duration)

            let firstInstruction = self.videoCompositionInstruction(videoTrack!, asset: videoAsset)
            firstInstruction.setOpacity(0.0, at: videoAsset.duration)

            layerInstructionsArray.append(firstInstruction)
            lastAsset = videoAsset
        }


        let mainInstruction = AVMutableVideoCompositionInstruction()
        mainInstruction.layerInstructions = layerInstructionsArray
        mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, completeTrackDuration)

        let mainComposition = AVMutableVideoComposition()
        mainComposition.instructions = [mainInstruction]
        mainComposition.frameDuration = CMTimeMake(1, 30)
        mainComposition.renderSize = CGSize(width: UIScreen.main.bounds.width, height: UIScreen.main.bounds.height)

        let documentDirectory = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0]
        let dateFormatter = DateFormatter()
        dateFormatter.dateStyle = .long
        dateFormatter.timeStyle = .short
        let date = dateFormatter.string(from: NSDate() as Date)
        let savePath = (documentDirectory as NSString).appendingPathComponent("mergeVideo-\(date).mov")
        let url = NSURL(fileURLWithPath: savePath)

        let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)
        exporter!.outputURL = url as URL
        exporter!.outputFileType = AVFileType.mp4
        exporter!.shouldOptimizeForNetworkUse = true
        exporter!.videoComposition = mainComposition
        exporter!.exportAsynchronously {
            DispatchQueue.main.async {
                callBack(exporter?.outputURL, exporter?.error)
            }

        }
    } 
}



回答3:


You don't need atTimeM at all, since you are simply marching completeTrackDuration along it is where the next piece should be added. So replace

if videoAsset == arrayVideos.first{
            atTimeM = kCMTimeZero
        } else{
            atTimeM = lastAsset!.duration
        }
        try videoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), of: videoAsset.tracks(withMediaType: AVMediaTypeVideo)[0], at: atTimeM)

with

try videoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), of: videoAsset.tracks(withMediaType: AVMediaTypeVideo)[0], at: completeTrackDuration)


来源:https://stackoverflow.com/questions/38972829/swift-merge-avasset-videos-array

易学教程内所有资源均来自网络或用户发布的内容,如有违反法律规定的内容欢迎反馈
该文章没有解决你所遇到的问题?点击提问,说说你的问题,让更多的人一起探讨吧!