iOS AVFoundation - Converting video into images at 60 fps

前端 未结 1 1853
挽巷
挽巷 2020-12-16 07:22

i\'m trying to convert a whole video into a sequence of images at a rate of 60fps, which means 60 images generated per second of video...

To do so, i\'m making use o

相关标签:
1条回答
  • 2020-12-16 07:59

    Hey @Sooriah Joel try using following code. It is working fine for me.

    - (void)generateCMTimesArrayOfAllFramesUsingAsset:(AVURLAsset *)asset
    {
        if (cmTimeArray.count>0) {
            [cmTimeArray removeAllObjects];
        }
        //Generate all frames present in video
        for(int t=0;t < asset.duration.value;t++) {
            CMTime thumbTime = CMTimeMake(t,asset.duration.timescale);
            NSValue *v=[NSValue valueWithCMTime:thumbTime];
            [cmTimeArray addObject:v];
        }
        NSLog(@"Array of time %@ count = %d",cmTimeArray, cmTimeArray.count);
        //NSLog(@"Array count = %d",cmTimeArray.count);
    }
    
    
    - (void)generateCMTimesArrayOfFrames:(int)framesInterval UsingAsset:(AVURLAsset *)asset
    {
        int videoDuration = ceilf(((float)asset.duration.value/asset.duration.timescale));
        NSLog(@"Video duration %lld seconds timescale = %d",asset.duration.value,asset.duration.timescale);
        if (cmTimeArray.count>0) {
            [cmTimeArray removeAllObjects];
        }
        //Generate limited frames present in video
        for (int i = 0; i<videoDuration; i++)
       {
           int64_t tempInt = i;
           CMTime tempCMTime = CMTimeMake(tempInt,1);
           int32_t interval = framesInterval;
           for (int j = 1; j<framesInterval+1; j++)
           {
                CMTime newCMtime = CMTimeMake(j,interval);
                CMTime addition = CMTimeAdd(tempCMTime, newCMtime);
                [cmTimeArray addObject:[NSValue valueWithCMTime:addition]];
           }
       }
       NSLog(@"Array of time %@ count = %d",cmTimeArray, cmTimeArray.count);
       //NSLog(@"Array count = %d",cmTimeArray.count);
    }
    
    
    - (void)generateThumbnailsFromVideoURL:(AVURLAsset *)videoAsset
    {
        //Generate CMTimes Array of required frames
        //1.Generate All Frames
        //[self generateCMTimesArrayOfAllFramesUsingAsset:asset];
    
        //2.Generate specific frames per second
        [self generateCMTimesArrayOfFrames:30 UsingAsset:videoAsset];
    
        __block int i = 0;
        AVAssetImageGeneratorCompletionHandler handler = ^(CMTime requestedTime, CGImageRef im, CMTime actualTime, AVAssetImageGeneratorResult result, NSError *error){
        if (result == AVAssetImageGeneratorSucceeded) {
            [framesArray addObject:[UIImage imageWithCGImage:im]];
        }
        if (result == AVAssetImageGeneratorFailed) {
            NSLog(@"Failed with error: %@ code %d", [error localizedDescription],error.code);
        }
        if (result == AVAssetImageGeneratorCancelled) {
            NSLog(@"Canceled");
        }
    
        i++;
        imageIndex = i;
    
        if(i == cmTimeArray.count) {
            //Thumbnail generation completed
        }
    };
    
        // Launching the process...
        self.generator = [[AVAssetImageGenerator alloc] initWithAsset:videoAsset];
        self.generator.apertureMode = AVAssetImageGeneratorApertureModeCleanAperture;
        self.generator.appliesPreferredTrackTransform=TRUE;
        self.generator.requestedTimeToleranceBefore = kCMTimeZero;
        self.generator.requestedTimeToleranceAfter = kCMTimeZero;
        self.generator.maximumSize = CGSizeMake(40, 40);
        [self.generator generateCGImagesAsynchronouslyForTimes:cmTimeArray completionHandler:handler];
    }
    
    0 讨论(0)
提交回复
热议问题