iOS PHLivePhotoView 生成、展示、保存

删除回忆录丶 提交于 2020-08-10 00:32:22

展示和保存

导入头文件

import PhotosUI
let imagePath = NSSearchPathForDirectoriesInDomains(.cachesDirectory, .userDomainMask, true).first! + "/live.jpg"
let videoPath = NSSearchPathForDirectoriesInDomains(.cachesDirectory, .userDomainMask, true).first! + "/live.mov"

展示

PHLivePhoto.request(withResourceFileURLs: [URL(fileURLWithPath: videoPath), URL(fileURLWithPath: imagePath)], placeholderImage: nil, targetSize: .zero, contentMode: .default) { (livePhoto, info) in
    
    self.livePhotoView.livePhoto = livePhoto
    self.livePhotoView.startPlayback(with: PHLivePhotoViewPlaybackStyle.full)
}

保存

PHPhotoLibrary.shared().performChanges({
    
    let request = PHAssetCreationRequest.forAsset()
    request.addResource(with: .photo, fileURL: URL(fileURLWithPath: self.imagePath), options: nil)
    request.addResource(with: .pairedVideo, fileURL: URL(fileURLWithPath: self.videoPath), options: nil)
}) { (success, error) in
    
}

视频生成livePhoto

导入头文件

import PhotosUI
import MobileCoreServices
let DOCUMENTS = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true).first!
let GENERATE_TEMP_FILE = DOCUMENTS + "/tmp.mov"
let GENERATE_IMAGE_FILE = DOCUMENTS + "/generated.jpg"
let GENERATE_MOVIE_FILE = DOCUMENTS + "/generated.mov"

生成方法

func gernerateLivePhotoWithVideoAsset(_ asset: AVAsset, complection: (() -> ())?) {
    
    let alert = UIAlertController.init(title: "", message: "generating...", preferredStyle: .alert)
    present(alert, animated: true, completion: nil)
    
    var ret = false
    
    genUUID = UUID().uuidString
    
    repeat {
        // Generate image.
        ret = generateStillImage(asset as! AVURLAsset)
        if ret == false {
            break
        }
        
        // Generate movie.
        removeFile(GENERATE_TEMP_FILE)
        removeFile(GENERATE_MOVIE_FILE)

        let exportSession = AVAssetExportSession.init(asset: asset, presetName: AVAssetExportPresetPassthrough)
        exportSession?.outputFileType = exportSession?.supportedFileTypes.first
        exportSession?.outputURL = URL.init(fileURLWithPath: GENERATE_TEMP_FILE)

        let semaphore = DispatchSemaphore.init(value: 0)
        exportSession?.exportAsynchronously(completionHandler: {
            if exportSession?.status ==  AVAssetExportSession.Status.completed {
                print("export session completed");
            } else {
                print("export session error: ( String(describing: exportSession?.error))");
                ret = false
            }
            semaphore.signal()
        })

        semaphore.wait()

        if ret == false {
            break
        }
        let qtmov = QuickTimeMov.init(path: GENERATE_TEMP_FILE)
        qtmov.write(GENERATE_MOVIE_FILE, assetIdentifier: genUUID)

        ret = FileManager.default.fileExists(atPath: GENERATE_MOVIE_FILE)

        if ret == false {
            break
        }

        ret = true


    } while (false)

    if let block = complection {
        block()
    }

    alert.dismiss(animated: true, completion: nil)
}

func generateStillImage(_ asset: AVURLAsset) -> Bool {
    
    let imageGen = AVAssetImageGenerator.init(asset: asset)
    imageGen.appliesPreferredTrackTransform = true
    imageGen.requestedTimeToleranceBefore = .zero
    imageGen.requestedTimeToleranceAfter = .zero
    
    let cutPoint = CMTimeMakeWithSeconds(0, preferredTimescale: Int32(NSEC_PER_SEC))
    
    do {
        let ref: CGImage = try imageGen.copyCGImage(at: cutPoint, actualTime: nil)
        
        let metadata = ["{MakerApple}" : ["17" : genUUID]]
        
        let imageData = NSMutableData()
        
        if let dest = CGImageDestinationCreateWithData(imageData as CFMutableData, kUTTypeJPEG, 1, nil) {
            
            CGImageDestinationAddImage(dest, ref, metadata as CFDictionary)
            CGImageDestinationFinalize(dest)
        }
        
        do {
            try imageData.write(to: URL(fileURLWithPath: GENERATE_IMAGE_FILE))
        } catch {
            return false
        }
    } catch {
        return false
    }
    return true
}

func removeFile(_ path: String) {
    
    guard FileManager.default.fileExists(atPath: path) else {
        return
    }
    
    do {
        try FileManager.default.removeItem(atPath: path)
    } catch { }
}

使用

let asset: AVAsset = AVAsset.init(url: URL(fileURLWithPath: NSSearchPathForDirectoriesInDomains(.cachesDirectory, .userDomainMask, true).first! + "/IMG_0671.MOV"))

gernerateLivePhotoWithVideoAsset(asset) {
    
}

需要添加 QuickTimeMov.swift 文件

import Foundation
import AVFoundation

public class QuickTimeMov : NSObject {
    fileprivate let kKeyContentIdentifier =  "com.apple.quicktime.content.identifier"
    fileprivate let kKeyStillImageTime = "com.apple.quicktime.still-image-time"
    fileprivate let kKeySpaceQuickTimeMetadata = "mdta"
    fileprivate let path : String
    fileprivate let dummyTimeRange = CMTimeRangeMake(start: CMTimeMake(value: 0, timescale: 1000), duration: CMTimeMake(value: 200, timescale: 3000))
    
    fileprivate lazy var asset : AVURLAsset = {
        let url = URL(fileURLWithPath: self.path)
        return AVURLAsset(url: url)
    }()
    
    @objc init(path : String) {
        self.path = path
    }
    
    func readAssetIdentifier() -> String? {
        for item in metadata() {
            if item.key as? String == kKeyContentIdentifier &&
                item.keySpace!.rawValue == kKeySpaceQuickTimeMetadata {
                return item.value as? String
            }
        }
        return nil
    }
    
    func readStillImageTime() -> NSNumber? {
        if let track = track(AVMediaType.metadata) {
            let (reader, output) = try! self.reader(track, settings: nil)
            reader.startReading()
            
            while true {
                guard let buffer = output.copyNextSampleBuffer() else { return nil }
                if CMSampleBufferGetNumSamples(buffer) != 0 {
                    let group = AVTimedMetadataGroup(sampleBuffer: buffer)
                    for item in group?.items ?? [] {
                        if item.key as? String == kKeyStillImageTime &&
                            item.keySpace!.rawValue == kKeySpaceQuickTimeMetadata {
                            return item.numberValue
                        }
                    }
                }
            }
        }
        return nil
    }
    
    @objc func write(_ dest : String, assetIdentifier : String) {
        do {
            // --------------------------------------------------
            // reader for source video
            // --------------------------------------------------
            guard let track = self.track(AVMediaType.video) else {
                print("not found video track")
                return
            }
            let (reader, output) = try self.reader(track,
                                                   settings: [kCVPixelBufferPixelFormatTypeKey as String:
                                                    NSNumber(value: kCVPixelFormatType_32BGRA as UInt32)])
            
            // --------------------------------------------------
            // writer for mov
            // --------------------------------------------------
            let writer = try AVAssetWriter(outputURL: URL(fileURLWithPath: dest), fileType: AVFileType.mov)
            writer.metadata = [metadataFor(assetIdentifier)]
            
            // video track
            let input = AVAssetWriterInput(mediaType: AVMediaType.video,
                                           outputSettings: videoSettings(track.naturalSize))
            input.expectsMediaDataInRealTime = true
            input.transform = track.preferredTransform
            writer.add(input)
            
            // metadata track
            let adapter = metadataAdapter()
            writer.add(adapter.assetWriterInput)
            
            // --------------------------------------------------
            // creating video
            // --------------------------------------------------
            writer.startWriting()
            reader.startReading()
            writer.startSession(atSourceTime: CMTime.zero)
            
            // write metadata track
            adapter.append(AVTimedMetadataGroup(items: [metadataForStillImageTime()],
                                                timeRange: dummyTimeRange))
            
            // write video track
            input.requestMediaDataWhenReady(on: DispatchQueue(label: "assetAudioWriterQueue", attributes: [])) {
                while(input.isReadyForMoreMediaData) {
                    if reader.status == .reading {
                        if let buffer = output.copyNextSampleBuffer() {
                            if !input.append(buffer) {
                                print("cannot write: (String(describing: writer.error))")
                                reader.cancelReading()
                            }
                        }
                    } else {
                        input.markAsFinished()
                        writer.finishWriting() {
                            if let e = writer.error {
                                print("cannot write: (e)")
                            } else {
                                print("finish writing.")
                            }
                        }
                    }
                }
            }
            while writer.status == .writing {
                RunLoop.current.run(until: Date(timeIntervalSinceNow: 0.5))
            }
            if let e = writer.error {
                print("cannot write: (e)")
            }
        } catch {
            print("error")
        }
    }
    
    fileprivate func metadata() -> [AVMetadataItem] {
        return asset.metadata(forFormat: AVMetadataFormat.quickTimeMetadata)
    }
    
    fileprivate func track(_ mediaType : AVMediaType) -> AVAssetTrack? {
        return asset.tracks(withMediaType: mediaType).first
    }
    
    fileprivate func reader(_ track : AVAssetTrack, settings: [String:AnyObject]?) throws -> (AVAssetReader, AVAssetReaderOutput) {
        let output = AVAssetReaderTrackOutput(track: track, outputSettings: settings)
        let reader = try AVAssetReader(asset: asset)
        reader.add(output)
        return (reader, output)
    }
    
    fileprivate func metadataAdapter() -> AVAssetWriterInputMetadataAdaptor {
        let spec : NSDictionary = [
            kCMMetadataFormatDescriptionMetadataSpecificationKey_Identifier as NSString:
            "(kKeySpaceQuickTimeMetadata)/(kKeyStillImageTime)",
            kCMMetadataFormatDescriptionMetadataSpecificationKey_DataType as NSString:
            "com.apple.metadata.datatype.int8"]
        
        var desc : CMFormatDescription? = nil
        CMMetadataFormatDescriptionCreateWithMetadataSpecifications(allocator: kCFAllocatorDefault, metadataType: kCMMetadataFormatType_Boxed, metadataSpecifications: [spec] as CFArray, formatDescriptionOut: &desc)
        let input = AVAssetWriterInput(mediaType: AVMediaType.metadata,
                                       outputSettings: nil, sourceFormatHint: desc)
        return AVAssetWriterInputMetadataAdaptor(assetWriterInput: input)
    }
    
    fileprivate func videoSettings(_ size : CGSize) -> [String:AnyObject] {
        return [
            AVVideoCodecKey: AVVideoCodecType.h264 as AnyObject,
            AVVideoWidthKey: size.width as AnyObject,
            AVVideoHeightKey: size.height as AnyObject
        ]
    }
    
    fileprivate func metadataFor(_ assetIdentifier: String) -> AVMetadataItem {
        let item = AVMutableMetadataItem()
        item.key = kKeyContentIdentifier as NSCopying & NSObjectProtocol
        item.keySpace = AVMetadataKeySpace(rawValue: kKeySpaceQuickTimeMetadata)
        item.value = assetIdentifier as NSCopying & NSObjectProtocol
        item.dataType = "com.apple.metadata.datatype.UTF-8"
        return item
    }
    
    fileprivate func metadataForStillImageTime() -> AVMetadataItem {
        let item = AVMutableMetadataItem()
        item.key = kKeyStillImageTime as NSCopying & NSObjectProtocol
        item.keySpace = AVMetadataKeySpace(rawValue: kKeySpaceQuickTimeMetadata)
        item.value = 0 as NSCopying & NSObjectProtocol
        item.dataType = "com.apple.metadata.datatype.int8"
        return item
    }
}

 

 

标签
易学教程内所有资源均来自网络或用户发布的内容,如有违反法律规定的内容欢迎反馈
该文章没有解决你所遇到的问题?点击提问,说说你的问题,让更多的人一起探讨吧!