Method to find device's camera resolution iOS

后端 未结 5 2157
北荒
北荒 2020-12-04 11:20

Whats the best method to find the image resolution going to be captured using setting AVCaptureSessionPresetPhoto.
I am trying to find the resolution before

5条回答
  •  忘掉有多难
    2020-12-04 12:11

    Can be done in this way:

    /* Callback that is called when you activate the capture session. */
    - (void) avCaptureInputPortFormatDescriptionDidChangeNotification:(NSNotification *)notification {
            CMFormatDescriptionRef formatDescription = nil;
            NSArray *ports = [deviceInput ports];
            AVCaptureInputPort *usePort = nil;
    
            for (AVCaptureInputPort *port in ports) {
                    if (usePort == nil || [port.mediaType isEqualToString:AVMediaTypeVideo]) {
                            usePort = port;
                            break;
                    }
            }
    
            if (usePort != nil)
                    formatDescription = usePort.formatDescription;
            if (formatDescription != nil) {
                    /*------------>>>>>>> THIS IS YOUR RESOLUTION */
                    CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
                    self.frameSize = CGSizeMake(dimensions.width, dimensions.height);
                    [[NSNotificationCenter defaultCenter] removeObserver:self];
                    NSLog(@"Capturing with %dx%d", (int)self.frameSize.width, (int)self.frameSize.height);
            } else {
                    NSLog(@"Failed to detect resolution, using default one.");
                    self.frameSize = CGSizeMake(640.0f, 480.0f);
            }
    }
    
    - (id) init {
            if ((self = [super init])) {
                    /* Default framesize. */
                    self.frameSize = CGSizeMake(640.0f, 480.0f);
    
                    AVCaptureSession *capSession = [[AVCaptureSession alloc] init];
                    /* -------->>>>>>> REQUESTED RESOLUTION, CAN BE SOMTHING ELSE */
                    [capSession setSessionPreset:AVCaptureSessionPresetHigh];
    
    //                if (([modelName rangeOfString:@"iPhone 5"].length != 0) || ([modelName rangeOfString:@"iPhone 6"].length != 0)) {
    //                        [capSession setSessionPreset:AVCaptureSessionPresetHigh];
    //                } else {
    //                        [capSession setSessionPreset:AVCaptureSessionPreset640x480];
    //                }
    //                
                    self.captureSession = capSession;
    
                    [[NSNotificationCenter defaultCenter] addObserver:self
                                                             selector:@selector(avCaptureInputPortFormatDescriptionDidChangeNotification:)
                                                                 name:AVCaptureInputPortFormatDescriptionDidChangeNotification object:nil];
            }
            return self;
    }
    
    - (void) dealloc
    {
            [[NSNotificationCenter defaultCenter] removeObserver:self];
            if (self.captureSession.isRunning)
                    [self.captureSession stopRunning];
            self.captureSession = nil;
    }
    
    - (void) addRawViewOutput
    {
        /* We setup the output */
        AVCaptureVideoDataOutput *captureOutput = [[AVCaptureVideoDataOutput alloc] init];
    
            /* While a frame is processes in -captureOutput:didOutputSampleBuffer:fromConnection: delegate methods
               no other frames are added in the queue.
           If you don't want this behaviour set the property to NO */
        captureOutput.alwaysDiscardsLateVideoFrames = YES;
    
            /*We specify a minimum duration for each frame (play with this settings to avoid having too many frames waiting
         in the queue because it can cause memory issues). It is similar to the inverse of the maximum framerate.
         In this example we set a min frame duration of 1/10 seconds so a maximum framerate of 10fps. We say that
         we are not able to process more than 10 frames per second.*/
        //captureOutput.minFrameDuration = CMTimeMake(1, 10);
    
        /*We create a serial queue to handle the processing of our frames*/
        dispatch_queue_t queue;
        queue = dispatch_queue_create("com.YourApp.cameraQueue", NULL);
        [captureOutput setSampleBufferDelegate:self queue:queue];
    //  dispatch_release(queue);
    
        // Set the video output to store frame in BGRA (It is supposed to be faster)
        NSString *key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
        NSNumber *value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
        NSDictionary *videoSettings = [NSDictionary dictionaryWithObject:value forKey:key];
        [captureOutput setVideoSettings:videoSettings];
    
            // Register an output
        [self.captureSession addOutput:captureOutput];
    }
    
    - (bool) startWithDevicePosition:(AVCaptureDevicePosition)devicePosition
    {
            AVCaptureDevice *videoDevice = [self cameraWithPosition:devicePosition];
    
            if (!videoDevice)
                    return FALSE;
    
            NSError *error;
    
            AVCaptureDeviceInput *videoIn = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];
            self.deviceInput = videoIn;
    
            if (!error)
            {
                    if ([[self captureSession] canAddInput:videoIn])
                    {
                            [[self captureSession] addInput:videoIn];
                    }
                    else
                    {
                            NSLog(@"Couldn't add video input");
                            return FALSE;
                    }
            }
            else
            {
                    NSLog(@"Couldn't create video input");
                    return FALSE;
            }
    
            [self addRawViewOutput];
            [self resume];
            return TRUE;
    }
    
    -(void) pause
    {
            self.paused = YES;
            if (self.captureSession.isRunning)
                    [captureSession stopRunning];
    }
    
    -(void) resume
    {
            if (!self.captureSession.isRunning)
                    [captureSession startRunning];
            self.paused = NO;
    }
    
    #pragma mark -
    #pragma mark AVCaptureSession delegate
    - (void)captureOutput:(AVCaptureOutput *)captureOutput
    didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
           fromConnection:(AVCaptureConnection *)connection
    {
            CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
    
            /* Lock the image buffer */
            CVPixelBufferLockBaseAddress(imageBuffer,0);
    
            /* Get information about the image */
            uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);
            size_t width = CVPixelBufferGetWidth(imageBuffer);
            size_t height = CVPixelBufferGetHeight(imageBuffer);
            size_t stride = CVPixelBufferGetBytesPerRow(imageBuffer);
            size_t size = CVPixelBufferGetDataSize(imageBuffer);
    
            BGRAVideoFrame frame = {(int)width, (int)height, (int)stride, (int)size, baseAddress};
            [delegate frameReady:frame];
    
            /* We unlock the image buffer */
            CVPixelBufferUnlockBaseAddress(imageBuffer,0);
    }
    

提交回复
热议问题