AVFoundation下的视频分帧处理

Posted

tags:

篇首语:本文由小常识网(cha138.com)小编为大家整理,主要介绍了AVFoundation下的视频分帧处理相关的知识,希望对你有一定的参考价值。

//
//  ViewController.m
//  VideoFrame
//
//  Created by wiseman on 16/1/27.
//  Copyright (c) 2016年 wiseman. All rights reserved.
//

#import "ViewController.h"
#import <AVFoundation/AVFoundation.h>

@interface ViewController ()<UICollectionViewDataSource,UICollectionViewDelegate>
@property (weak, nonatomic) IBOutlet UICollectionView *myCollectionView;
@property (weak, nonatomic) IBOutlet UICollectionViewFlowLayout *myFlowLayout;

@property(nonatomic,strong) NSMutableArray *Arr;

@end

@implementation ViewController

- (void)viewDidLoad {
    [super viewDidLoad];
    // Do any additional setup after loading the view, typically from a nib.
    //注册cell
    static NSString *ID = @"mycell";
    [self.myCollectionView registerClass:[UICollectionViewCell class] forCellWithReuseIdentifier:ID];

    self.myFlowLayout.itemSize = CGSizeMake((self.view.bounds.size.width-16)/3,  (self.view.bounds.size.width-16)/3);
    
    
    [self getVideoFrame];
}

#pragma mark - Array
-(NSMutableArray *)Arr{
    if (!_Arr) {
        _Arr = [NSMutableArray array];
    }
    return _Arr;
}

#pragma mark - GetVideoFrame
-(void)getVideoFrame{
    //1.本地mp4的地址
    NSString *path = [[NSBundle mainBundle] pathForResource:@"ddd" ofType:@"mp4"];
    //2.URL
    NSURL *pathURL = [NSURL fileURLWithPath:path];
    //3.Setting
    //3.1初始化asset对象
    AVURLAsset *videoAsset = [[AVURLAsset alloc]initWithURL:pathURL options:nil];
    //3.2 获取总视频的长度 = 总帧数 / 每秒的帧数
    long videoSumTime = videoAsset.duration.value / videoAsset.duration.timescale;
    //3.3 根据人眼每秒24帧图像,计算出每秒24帧情况下的总帧数
    long eyeSumValue = videoSumTime * 24  ;
    //总帧数 / 块数
    long kuai = videoAsset.duration.value / eyeSumValue;
    //4.创建AVAssetImageGenerator对象
    AVAssetImageGenerator *generator = [[AVAssetImageGenerator alloc]initWithAsset:videoAsset];
    generator.maximumSize = self.view.frame.size;
    generator.appliesPreferredTrackTransform = YES;
    generator.requestedTimeToleranceBefore = kCMTimeZero;
    generator.requestedTimeToleranceAfter = kCMTimeZero;
    //5. 添加需要帧数的时间集合
    NSMutableArray *arr1 = [NSMutableArray array];
    for (int i = 0; i < eyeSumValue; i ++) {
        CMTime time = CMTimeMake(i * kuai, videoAsset.duration.timescale);
        NSValue *value = [NSValue valueWithCMTime:time];
        [arr1 addObject:value];
    }
    
    static long count = 0;
    [generator generateCGImagesAsynchronouslyForTimes:arr1 completionHandler:^(CMTime requestedTime, CGImageRef im, CMTime actualTime, AVAssetImageGeneratorResult result, NSError *error){
//        NSString *requestedTimeString = (NSString *)
//        CFBridgingRelease(CMTimeCopyDescription(NULL, requestedTime));
//        NSString *actualTimeString = (NSString *)
//        CFBridgingRelease(CMTimeCopyDescription(NULL, actualTime));
//        NSLog(@"Requested: %@; actual %@", requestedTimeString, actualTimeString);
        if (result == AVAssetImageGeneratorSucceeded) {
            count++;
            NSLog(@"%ld",count);
            [self.Arr addObject:[UIImage imageWithCGImage:im]];
            if (count == eyeSumValue) {
                [self.myCollectionView reloadData];
            }
        }
        
        if (result == AVAssetImageGeneratorFailed) {
            NSLog(@"Failed with error: %@", [error localizedDescription]);
        }
        
        if (result == AVAssetImageGeneratorCancelled) {
            NSLog(@"AVAssetImageGeneratorCancelled");
        }
    }];

}

#pragma mark - UICollectionViewDataSource
- (NSInteger)collectionView:(UICollectionView *)collectionView numberOfItemsInSection:(NSInteger)section{
    return self.Arr.count;
}

- (UICollectionViewCell *)collectionView:(UICollectionView *)collectionView cellForItemAtIndexPath:(NSIndexPath *)indexPath{
    static NSString *ID = @"mycell";
    UICollectionViewCell *cell = [collectionView dequeueReusableCellWithReuseIdentifier:ID forIndexPath:indexPath];
    if (!cell) {
        cell = [[UICollectionViewCell alloc]init];
    }
    
    UIImageView *imgView = [[UIImageView alloc]initWithFrame:cell.bounds];
    imgView.image = self.Arr[indexPath.row];
    [cell.contentView addSubview:imgView];
    
    return cell;
}


- (void)didReceiveMemoryWarning {
    [super didReceiveMemoryWarning];
    // Dispose of any resources that can be recreated.
}

@end

 

以上是关于AVFoundation下的视频分帧处理的主要内容,如果未能解决你的问题,请参考以下文章

语音处理加窗分帧

AVFoundation视频流处理

AVFoundation学习笔记: 媒体捕捉读取及写入

视频格式、AVFoundation 和 UTI

AVFoundation学习笔记:视频播放相关

使用 OpenCV 和 AVFoundation 框架的 iPhone 实时图像处理?