iOS - Reversing video file (.mov)

戏子无情 提交于 2019-12-01 16:45:15

Here is my solution , maybe it can help you. https://github.com/KayWong/VideoReverse

Swift 5, credit to Andy Hin as I based this on http://www.andyhin.com/post/5/reverse-video-avfoundation

    class func reverseVideo(inURL: URL, outURL: URL, queue: DispatchQueue, _ completionBlock: ((Bool)->Void)?) {
        let asset = AVAsset.init(url: inURL)
        guard
            let reader = try? AVAssetReader.init(asset: asset),
            let videoTrack = asset.tracks(withMediaType: .video).first
        else {
            assert(false)
            completionBlock?(false)
            return
        }

        let width = videoTrack.naturalSize.width
        let height = videoTrack.naturalSize.height

        let readerSettings: [String : Any] = [
            String(kCVPixelBufferPixelFormatTypeKey) : kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange,
        ]
        let readerOutput = AVAssetReaderTrackOutput.init(track: videoTrack, outputSettings: readerSettings)
        reader.add(readerOutput)
        reader.startReading()

        var buffers = [CMSampleBuffer]()
        while let nextBuffer = readerOutput.copyNextSampleBuffer() {
            buffers.append(nextBuffer)
        }
        let status = reader.status
        reader.cancelReading()
        guard status == .completed, let firstBuffer = buffers.first else {
            assert(false)
            completionBlock?(false)
            return
        }
        let sessionStartTime = CMSampleBufferGetPresentationTimeStamp(firstBuffer)

        let writerSettings: [String:Any] = [
            AVVideoCodecKey : AVVideoCodecType.h264,
            AVVideoWidthKey : width,
            AVVideoHeightKey: height,
        ]
        let writerInput: AVAssetWriterInput
        if let formatDescription = videoTrack.formatDescriptions.last {
            writerInput = AVAssetWriterInput.init(mediaType: .video, outputSettings: writerSettings, sourceFormatHint: (formatDescription as! CMFormatDescription))
        } else {
            writerInput = AVAssetWriterInput.init(mediaType: .video, outputSettings: writerSettings)
        }
        writerInput.transform = videoTrack.preferredTransform
        writerInput.expectsMediaDataInRealTime = false

        guard
            let writer = try? AVAssetWriter.init(url: outURL, fileType: .mp4),
            writer.canAdd(writerInput)
        else {
            assert(false)
            completionBlock?(false)
            return
        }

        let pixelBufferAdaptor = AVAssetWriterInputPixelBufferAdaptor.init(assetWriterInput: writerInput, sourcePixelBufferAttributes: nil)
        let group = DispatchGroup.init()

        group.enter()
        writer.add(writerInput)
        writer.startWriting()
        writer.startSession(atSourceTime: sessionStartTime)

        var currentSample = 0
        writerInput.requestMediaDataWhenReady(on: queue) {
            for i in currentSample..<buffers.count {
                currentSample = i
                if !writerInput.isReadyForMoreMediaData {
                    return
                }
                let presentationTime = CMSampleBufferGetPresentationTimeStamp(buffers[i])
                guard let imageBuffer = CMSampleBufferGetImageBuffer(buffers[buffers.count - i - 1]) else {
                    WLog("VideoWriter reverseVideo: warning, could not get imageBuffer from SampleBuffer...")
                    continue
                }
                if !pixelBufferAdaptor.append(imageBuffer, withPresentationTime: presentationTime) {
                    WLog("VideoWriter reverseVideo: warning, could not append imageBuffer...")
                }
            }

            // finish
            writerInput.markAsFinished()
            group.leave()
        }

        group.notify(queue: queue) {
            writer.finishWriting {
                if writer.status != .completed {
                    WLog("VideoWriter reverseVideo: error - \(String(describing: writer.error))")
                    completionBlock?(false)
                } else {
                    completionBlock?(true)
                }
            }
        }
    }

you need to refer AVFoundation library to achieve your task..

i have done only 30 seconds video edit with AVAssetExportSession & AVMutableComposition.

this is the link which you need to refer and its very help full.

http://www.subfurther.com/blog/category/avfoundation/

And also if you wants to refer WWDC conference PDF for Editing Media it would be better.

this link total source : https://developer.apple.com/videos/wwdc/2010/ And this link covers Editing Media With AVFoundation

About memory cycles.. it also consumes more memory while Exporting.

易学教程内所有资源均来自网络或用户发布的内容,如有违反法律规定的内容欢迎反馈
该文章没有解决你所遇到的问题?点击提问,说说你的问题,让更多的人一起探讨吧!