How do I export UIImage array as a movie?

后端 未结 10 1994
臣服心动
臣服心动 2020-11-22 02:11

I have a serious problem: I have an NSArray with several UIImage objects. What I now want to do, is create movie from those UIImages.

10条回答
  •  無奈伤痛
    2020-11-22 02:32

    Here's the swift3 version how to convert Images array to the Video

    import Foundation
    import AVFoundation
    import UIKit
    
    typealias CXEMovieMakerCompletion = (URL) -> Void
    typealias CXEMovieMakerUIImageExtractor = (AnyObject) -> UIImage?
    
    
    public class ImagesToVideoUtils: NSObject {
    
        static let paths = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)
        static let tempPath = paths[0] + "/exprotvideo.mp4"
        static let fileURL = URL(fileURLWithPath: tempPath)
    //    static let tempPath = NSTemporaryDirectory() + "/exprotvideo.mp4"
    //    static let fileURL = URL(fileURLWithPath: tempPath)
    
    
        var assetWriter:AVAssetWriter!
        var writeInput:AVAssetWriterInput!
        var bufferAdapter:AVAssetWriterInputPixelBufferAdaptor!
        var videoSettings:[String : Any]!
        var frameTime:CMTime!
        //var fileURL:URL!
    
        var completionBlock: CXEMovieMakerCompletion?
        var movieMakerUIImageExtractor:CXEMovieMakerUIImageExtractor?
    
    
        public class func videoSettings(codec:String, width:Int, height:Int) -> [String: Any]{
            if(Int(width) % 16 != 0){
                print("warning: video settings width must be divisible by 16")
            }
    
            let videoSettings:[String: Any] = [AVVideoCodecKey: AVVideoCodecJPEG, //AVVideoCodecH264,
                                               AVVideoWidthKey: width,
                                               AVVideoHeightKey: height]
    
            return videoSettings
        }
    
        public init(videoSettings: [String: Any]) {
            super.init()
    
    
            if(FileManager.default.fileExists(atPath: ImagesToVideoUtils.tempPath)){
                guard (try? FileManager.default.removeItem(atPath: ImagesToVideoUtils.tempPath)) != nil else {
                    print("remove path failed")
                    return
                }
            }
    
    
            self.assetWriter = try! AVAssetWriter(url: ImagesToVideoUtils.fileURL, fileType: AVFileTypeQuickTimeMovie)
    
            self.videoSettings = videoSettings
            self.writeInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: videoSettings)
            assert(self.assetWriter.canAdd(self.writeInput), "add failed")
    
            self.assetWriter.add(self.writeInput)
            let bufferAttributes:[String: Any] = [kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_32ARGB)]
            self.bufferAdapter = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: self.writeInput, sourcePixelBufferAttributes: bufferAttributes)
            self.frameTime = CMTimeMake(1, 5)
        }
    
        func createMovieFrom(urls: [URL], withCompletion: @escaping CXEMovieMakerCompletion){
            self.createMovieFromSource(images: urls as [AnyObject], extractor:{(inputObject:AnyObject) ->UIImage? in
                return UIImage(data: try! Data(contentsOf: inputObject as! URL))}, withCompletion: withCompletion)
        }
    
        func createMovieFrom(images: [UIImage], withCompletion: @escaping CXEMovieMakerCompletion){
            self.createMovieFromSource(images: images, extractor: {(inputObject:AnyObject) -> UIImage? in
                return inputObject as? UIImage}, withCompletion: withCompletion)
        }
    
        func createMovieFromSource(images: [AnyObject], extractor: @escaping CXEMovieMakerUIImageExtractor, withCompletion: @escaping CXEMovieMakerCompletion){
            self.completionBlock = withCompletion
    
            self.assetWriter.startWriting()
            self.assetWriter.startSession(atSourceTime: kCMTimeZero)
    
            let mediaInputQueue = DispatchQueue(label: "mediaInputQueue")
            var i = 0
            let frameNumber = images.count
    
            self.writeInput.requestMediaDataWhenReady(on: mediaInputQueue){
                while(true){
                    if(i >= frameNumber){
                        break
                    }
    
                    if (self.writeInput.isReadyForMoreMediaData){
                        var sampleBuffer:CVPixelBuffer?
                        autoreleasepool{
                            let img = extractor(images[i])
                            if img == nil{
                                i += 1
                                print("Warning: counld not extract one of the frames")
                                //continue
                            }
                            sampleBuffer = self.newPixelBufferFrom(cgImage: img!.cgImage!)
                        }
                        if (sampleBuffer != nil){
                            if(i == 0){
                                self.bufferAdapter.append(sampleBuffer!, withPresentationTime: kCMTimeZero)
                            }else{
                                let value = i - 1
                                let lastTime = CMTimeMake(Int64(value), self.frameTime.timescale)
                                let presentTime = CMTimeAdd(lastTime, self.frameTime)
                                self.bufferAdapter.append(sampleBuffer!, withPresentationTime: presentTime)
                            }
                            i = i + 1
                        }
                    }
                }
                self.writeInput.markAsFinished()
                self.assetWriter.finishWriting {
                    DispatchQueue.main.sync {
                        self.completionBlock!(ImagesToVideoUtils.fileURL)
                    }
                }
            }
        }
    
        func newPixelBufferFrom(cgImage:CGImage) -> CVPixelBuffer?{
            let options:[String: Any] = [kCVPixelBufferCGImageCompatibilityKey as String: true, kCVPixelBufferCGBitmapContextCompatibilityKey as String: true]
            var pxbuffer:CVPixelBuffer?
            let frameWidth = self.videoSettings[AVVideoWidthKey] as! Int
            let frameHeight = self.videoSettings[AVVideoHeightKey] as! Int
    
            let status = CVPixelBufferCreate(kCFAllocatorDefault, frameWidth, frameHeight, kCVPixelFormatType_32ARGB, options as CFDictionary?, &pxbuffer)
            assert(status == kCVReturnSuccess && pxbuffer != nil, "newPixelBuffer failed")
    
            CVPixelBufferLockBaseAddress(pxbuffer!, CVPixelBufferLockFlags(rawValue: 0))
            let pxdata = CVPixelBufferGetBaseAddress(pxbuffer!)
            let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
            let context = CGContext(data: pxdata, width: frameWidth, height: frameHeight, bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(pxbuffer!), space: rgbColorSpace, bitmapInfo: CGImageAlphaInfo.noneSkipFirst.rawValue)
            assert(context != nil, "context is nil")
    
            context!.concatenate(CGAffineTransform.identity)
            context!.draw(cgImage, in: CGRect(x: 0, y: 0, width: cgImage.width, height: cgImage.height))
            CVPixelBufferUnlockBaseAddress(pxbuffer!, CVPixelBufferLockFlags(rawValue: 0))
            return pxbuffer
        }
    }
    

    I use it together with screen capturing, to basically create a video of screen capturing, here's the full story/complete example.

提交回复
热议问题