AVCapture capturing and getting framebuffer at 60 fps in iOS 7

前端 未结 3 942
無奈伤痛
無奈伤痛 2020-12-23 02:43

I\'m developping an app which requires capturing framebuffer at as much fps as possible. I\'ve already figured out how to force iphone to capture at 60 fps but



        
3条回答
  •  长情又很酷
    2020-12-23 02:53

    I have developed the same function for Swift 2.0. I post here the code for who could need it:

    // Set your desired frame rate
    func setupCamera(maxFpsDesired: Double = 120) {
    var captureSession = AVCaptureSession()
        captureSession.sessionPreset = AVCaptureSessionPreset1920x1080
        let backCamera = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
        do{ let input = try AVCaptureDeviceInput(device: backCamera)
            captureSession.addInput(input) }
        catch { print("Error: can't access camera")
            return
        }
        do {
            var finalFormat = AVCaptureDeviceFormat()
            var maxFps: Double = 0
            for vFormat in backCamera!.formats {
                var ranges      = vFormat.videoSupportedFrameRateRanges as!  [AVFrameRateRange]
                let frameRates  = ranges[0]
                /*
                     "frameRates.maxFrameRate >= maxFps" select the video format
                     desired with the highest resolution available, because
                     the camera formats are ordered; else
                     "frameRates.maxFrameRate > maxFps" select the first
                     format available with the desired fps 
                */
                if frameRates.maxFrameRate >= maxFps && frameRates.maxFrameRate <= maxFpsDesired {
                    maxFps = frameRates.maxFrameRate
                    finalFormat = vFormat as! AVCaptureDeviceFormat
                }
            }
            if maxFps != 0 {
               let timeValue = Int64(1200.0 / maxFps)
               let timeScale: Int64 = 1200
               try backCamera!.lockForConfiguration()
               backCamera!.activeFormat = finalFormat
               backCamera!.activeVideoMinFrameDuration = CMTimeMake(timeValue, timeScale)
               backCamera!.activeVideoMaxFrameDuration = CMTimeMake(timeValue, timeScale)              backCamera!.focusMode = AVCaptureFocusMode.AutoFocus
               backCamera!.unlockForConfiguration()
            }
        }
        catch {
             print("Something was wrong")
        }
        let videoOutput = AVCaptureVideoDataOutput()
        videoOutput.alwaysDiscardsLateVideoFrames = true
        videoOutput.videoSettings = NSDictionary(object: Int(kCVPixelFormatType_32BGRA),
            forKey: kCVPixelBufferPixelFormatTypeKey as String) as [NSObject : AnyObject]
        videoOutput.setSampleBufferDelegate(self, queue: dispatch_queue_create("sample buffer delegate", DISPATCH_QUEUE_SERIAL))
        if captureSession.canAddOutput(videoOutput){
            captureSession.addOutput(videoOutput) }
        let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
        view.layer.addSublayer(previewLayer)
        previewLayer.transform =  CATransform3DMakeRotation(-1.5708, 0, 0, 1);
        previewLayer.frame = self.view.bounds
        previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
        self.view.layer.addSublayer(previewLayer)
        captureSession.startRunning()
    }
    

提交回复
热议问题