How to take UIImage of AVCaptureVideoPreviewLayer instead of AVCapturePhotoOutput capture

前端 未结 3 567
难免孤独
难免孤独 2020-12-16 05:04

I want to \"stream\" the preview layer to my server, however, I only want specific frames to be sent. Basically, I want to take a snapshot of the AVCaptureVideoPreviewLayer,

3条回答
  •  Happy的楠姐
    2020-12-16 05:43

    @ninjaproger's answer was great! Simply writing this as a Swift 4 version of the answer for future reference.

    import UIKit
    import AVFoundation
    
    var customPreviewLayer: AVCaptureVideoPreviewLayer?
    
    class ViewController: UIViewController {
        @IBOutlet weak var imageView: UIImageView!
        override func viewDidLoad() {
            super.viewDidLoad()
            CaptureManager.shared.statSession()
            CaptureManager.shared.delegate = self
        }
    }
    
    extension ViewController: CaptureManagerDelegate {
        func processCapturedImage(image: UIImage) {
            self.imageView.image = image
        }
    }
    
    protocol CaptureManagerDelegate: class {
        func processCapturedImage(image: UIImage)
    }
    
    class CaptureManager: NSObject {
        internal static let shared = CaptureManager()
        weak var delegate: CaptureManagerDelegate?
        var session: AVCaptureSession?
    
        override init() {
            super.init()
            session = AVCaptureSession()
    
            //setup input
            let device =  AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back)
            let input = try! AVCaptureDeviceInput(device: device!)
            session?.addInput(input)
    
            //setup output
            let output = AVCaptureVideoDataOutput()
            output.videoSettings = [kCVPixelBufferPixelFormatTypeKey as AnyHashable as! String: kCVPixelFormatType_32BGRA]
            output.setSampleBufferDelegate(self, queue: DispatchQueue.main)
            session?.addOutput(output)
        }
    
        func statSession() {
            session?.startRunning()
        }
    
        func stopSession() {
            session?.stopRunning()
        }
    
        func getImageFromSampleBuffer(sampleBuffer: CMSampleBuffer) ->UIImage? {
            guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
                return nil
            }
            CVPixelBufferLockBaseAddress(pixelBuffer, .readOnly)
            let baseAddress = CVPixelBufferGetBaseAddress(pixelBuffer)
            let width = CVPixelBufferGetWidth(pixelBuffer)
            let height = CVPixelBufferGetHeight(pixelBuffer)
            let bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer)
            let colorSpace = CGColorSpaceCreateDeviceRGB()
            let bitmapInfo = CGBitmapInfo(rawValue: CGImageAlphaInfo.premultipliedFirst.rawValue | CGBitmapInfo.byteOrder32Little.rawValue)
            guard let context = CGContext(data: baseAddress, width: width, height: height, bitsPerComponent: 8, bytesPerRow: bytesPerRow, space: colorSpace, bitmapInfo: bitmapInfo.rawValue) else {
                return nil
            }
            guard let cgImage = context.makeImage() else {
                return nil
            }
            let image = UIImage(cgImage: cgImage, scale: 1, orientation:.right)
            CVPixelBufferUnlockBaseAddress(pixelBuffer, .readOnly)
            return image
        }
    }
    
    extension CaptureManager: AVCaptureVideoDataOutputSampleBufferDelegate {
        func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
            guard let outputImage = getImageFromSampleBuffer(sampleBuffer: sampleBuffer) else {
                return
            }
            delegate?.processCapturedImage(image: outputImage)
        }
    }
    

提交回复
热议问题