iOS AVFoundation reverse 视频倒序存储
2019-08-15 本文已影响0人
李田迎
AVFoundation reverse play 倒放实现
一.参考:
http://www.andyhin.com/post/5/reverse-video-avfoundation
二.几种实现方案思路
1.预览过程使用AVPlayer的倒放功能
设置AVPlayer的rate 为-1
检查AVPlayerItem的canPlayReverse是否是YES
由于我们采用的GPUImage框架,在预览过程没有使用AVPlayer,次方案没有继续调研
2.使用AVAssetComposition把每帧位置翻转
速度快,不生成临时文件
必须每帧都是关键帧 否则严重卡顿 掉帧
无法精确控制insert单帧TimeRange
3.修改GPUImageMovie 倒序读取CVPixelBuffer (不可行)
基于AVPlayerItemVideoOutput copyPixelBufferForItemTime方法,尝试倒序copy.
实验发现该函数貌似不支持倒序读取,倒序读取前几秒返回正序的pixelBuffer,之后始终返回空.阅读函数文档也证明了这点.
4.使用AVAssetReader AVAssetWriter 读取出每个CMSampleBuffer反向写入文件 (可行)
播放非常流畅.
但需要生成临时文件,处理时间较长.
三.AVAssetReader AVAssetWriter倒序视频方案实现
AHSVVideoReverse.h
//
// AHSVVideoReverse.h
// AHVideoSDKFramework
//
// Created by 李田迎 on 2019/8/13.
// Copyright © 2019 Autohome. All rights reserved.
//
#import <Foundation/Foundation.h>
#import <AVFoundation/AVFoundation.h>
NS_ASSUME_NONNULL_BEGIN
@interface AHSVVideoReverse : NSObject
@property (nonatomic, copy) NSDictionary *videoSettings; //!< 写入视频配置参数 有默认参数
@property (strong, nonatomic) NSDictionary *readerOutputSettings; //!< 视频轨道读取出的数据格式
/**
根据原始视频生成倒放视频
@param origAsset 被倒放视频asset
@param outputPath 倒放的视频存储路径
@param handler 回调信息block
*/
- (void)reverseVideo:(AVAsset *)origAsset
outputPath:(NSString *)outputPath
completeBlock:(void (^)(NSError *error))handler;
@end
NS_ASSUME_NONNULL_END
AHSVVideoReverse.m
//
// AHSVVideoReverse.m
// AHVideoSDKFramework
//
// Created by 田迎 on 2019/8/13.
// Copyright © 2019. All rights reserved.
//
#import "AHSVVideoReverse.h"
#import "AVAsset+Addition.h"
#import "AHVideoRecordCustomConfig.h"
#define kClipMaxContainCount 10
@interface AHSVVideoReverse ()
@property (nonatomic, strong) AVAsset *origAsset; //!< 原始资源对象
@property (nonatomic, strong) AVAssetReader *assetReader; //!< 资源读取对象
@property (nonatomic, strong) AVAssetWriter *assetWriter; //!< 多媒体文件写入
@property (nonatomic, strong) AVAssetWriterInput *videoWriterInput; //!< 视频写入 append
@property (nonatomic, strong) AVAssetReaderTrackOutput *videoTrackOutput; //!< 视频输出对象
//adapter 有CVPixelBufferPool缓冲池提高写入效率 可以写入CVPixelBuffer 和时间戳
@property (nonatomic, strong) AVAssetWriterInputPixelBufferAdaptor *videoPixelBufferAdaptor;
@property (nonatomic, strong) NSURL *outputURL; //!< 输出文件Url
@property (nonatomic, strong) dispatch_queue_t inputQueue; //!< 多媒体数据写入队列
@property (nonatomic, assign) CGSize targetSize; //!< 导出视频size
@property (nonatomic, assign) float fps; //!< 帧率
@property (nonatomic, strong) void (^completionHandler)(NSError *); //!< 回调block
//内部逻辑使用
@property (nonatomic, strong) NSMutableArray *sampleTimeArray; //!< 存储采样时间戳数组
@property (nonatomic, strong) NSMutableArray *clipTimeRangeArray; //!< 分段处理时间段数组
@end
@implementation AHSVVideoReverse
#pragma mark -
#pragma mark LifeCycle Method
- (instancetype)init {
if (self = [super init]) {
}
return self;
}
- (void)dealloc {
}
#pragma mark -
#pragma mark Public Method
- (void)reverseVideo:(AVAsset *)origAsset
outputPath:(NSString *)outputPath
completeBlock:(void (^)(NSError *error))handler {
self.completionHandler = handler;
if (!origAsset) {
NSError *error = [NSError errorWithDomain:@"com.avvideo.videoReverse" code:-100 userInfo:@{@"msg":@"参数origAsset 不能为空!"}];
self.completionHandler(error);
return;
}
if (!origAsset.videoTrack) {
NSError *error = [NSError errorWithDomain:@"com.avvideo.videoReverse" code:-101 userInfo:@{@"msg":@"origAsset中不含有视频轨道信息!"}];
self.completionHandler(error);
return;
}
if (!outputPath || outputPath.length==0) {
NSError *error = [NSError errorWithDomain:@"com.avvideo.videoReverse" code:-102 userInfo:@{@"msg":@"参数outputPath 不能为空!"}];
self.completionHandler(error);
return;
}
self.outputURL = [NSURL fileURLWithPath:outputPath];
//本地目标文件清理
if ([[NSFileManager defaultManager] fileExistsAtPath:outputPath]) {
[[NSFileManager defaultManager] removeItemAtPath:outputPath error:nil];
}
self.origAsset = origAsset;
WEAKSELF;
[self.origAsset loadValuesAsynchronouslyForKeys:@[@"duration", @"tracks"] completionHandler:^{
dispatch_async(weakSelf.inputQueue, ^{
[weakSelf startReverseProcess];
});
}];
}
#pragma mark -
#pragma mark Private Method
- (void)startReverseProcess {
[self cancelRevese];
self.targetSize = self.origAsset.videoTrackSize;
self.fps = self.origAsset.videoTrack.nominalFrameRate;
//1. 生成每帧时间数组与分段数组
[self generateSampleTimesArray];
//2. 处理所有分段 正序读取 倒序写入
[self processReadReverseWriter];
}
//生成每帧时间数组 用于获取倒序时每个CVPixelBuffer的精确时间戳 以及分段数组
- (void)generateSampleTimesArray {
if ([self.assetReader canAddOutput:self.videoTrackOutput]) {
[self.assetReader addOutput:self.videoTrackOutput];
}
[self.assetReader startReading];
CMSampleBufferRef sample;
NSUInteger processIndex = 0;
CMTime startTime = kCMTimeZero;
CMTime endTime = kCMTimeZero;
CMTime presentationTime = kCMTimeZero;
while((sample = [self.videoTrackOutput copyNextSampleBuffer])) {
presentationTime = CMSampleBufferGetPresentationTimeStamp(sample);
NSValue *presentationValue = [NSValue valueWithBytes:&presentationTime objCType:@encode(CMTime)];
[self.sampleTimeArray addObject:presentationValue];
CFRelease(sample);
sample = NULL;
if (processIndex == 0) {
startTime = presentationTime;
processIndex ++;
} else if (processIndex == kClipMaxContainCount-1) {
endTime = presentationTime;
CMTimeRange timeRange = CMTimeRangeMake(startTime, CMTimeSubtract(endTime, startTime));
NSValue *timeRangeValue = [NSValue valueWithCMTimeRange:timeRange];
[self.clipTimeRangeArray addObject:timeRangeValue];
processIndex = 0;
startTime = kCMTimeZero;
endTime = kCMTimeZero;
} else {
processIndex ++;
}
}
//处理不够kClipMaxContainCount数量的帧的timerange
if (CMTIME_COMPARE_INLINE(kCMTimeZero, !=, startTime) && CMTIME_COMPARE_INLINE(kCMTimeZero, ==, endTime)) {
endTime = presentationTime;
//单独处理最后只剩一帧的情况
if (CMTIME_COMPARE_INLINE(endTime, ==, startTime) &&
processIndex == 1) {
startTime = CMTimeSubtract(startTime, CMTimeMake(1, self.fps));
}
CMTimeRange timeRange = CMTimeRangeMake(startTime, CMTimeSubtract(endTime, startTime));
NSValue *timeRangeValue = [NSValue valueWithCMTimeRange:timeRange];
[self.clipTimeRangeArray addObject:timeRangeValue];
}
}
- (void)processReadReverseWriter {
CMSampleBufferRef sampleBuffer;
//1.保护处理 清理之前可能未读取完的数据
while((sampleBuffer = [self.videoTrackOutput copyNextSampleBuffer])) {
CFRelease(sampleBuffer);
}
//2.为asserWriter添加writerInput 开始读写操作
if ([self.assetWriter canAddInput:self.videoWriterInput]) {
[self.assetWriter addInput:self.videoWriterInput];
}
[self videoPixelBufferAdaptor];
BOOL success = [self.assetWriter startWriting];
if (!success) {
NSLog(@"self.assetWriter error = %@", self.assetWriter.error);
}
[self.assetWriter startSessionAtSourceTime:kCMTimeZero];
NSUInteger clipCount = self.clipTimeRangeArray.count;
//当前处理帧索引
NSUInteger frameIndex = 0;
for (NSInteger i=clipCount-1; i>=0; i--) {
NSValue *clipTimeRangeValue = [self.clipTimeRangeArray objectAtIndex:i];
[self.videoTrackOutput resetForReadingTimeRanges:@[clipTimeRangeValue]];
//读取分段中所有帧到缓存数组
NSMutableArray *tempSampleArray = [[NSMutableArray alloc] init];
while((sampleBuffer = [self.videoTrackOutput copyNextSampleBuffer])) {
[tempSampleArray addObject:(__bridge id)sampleBuffer];
CFRelease(sampleBuffer);
}
//每个分段内的帧 倒序写入writer
for (NSInteger j=0; j<tempSampleArray.count; j++) {
//保护处理
if (frameIndex >= self.sampleTimeArray.count) {
continue;
}
NSValue *timeValue = [self.sampleTimeArray objectAtIndex:frameIndex];
CMTime frameTime = [timeValue CMTimeValue];
// CMTimeShow(frameTime);
CVPixelBufferRef pixefBuffer = CMSampleBufferGetImageBuffer((__bridge CMSampleBufferRef)tempSampleArray[tempSampleArray.count - j - 1]);
// append frames to output
BOOL appendSuccess = NO;
while (!appendSuccess) {
if (self.videoPixelBufferAdaptor.assetWriterInput.readyForMoreMediaData) {
appendSuccess = [self.videoPixelBufferAdaptor appendPixelBuffer:pixefBuffer withPresentationTime:frameTime];
if (!appendSuccess) {
NSLog(@"appendPixelBuffer error at time: %lld", frameTime.value);
} else {
// NSLog(@"appendPixelBuffer success at time: %f", CMTimeGetSeconds(frameTime));
}
} else {
// adaptor not ready
[NSThread sleepForTimeInterval:0.05];
}
}
frameIndex ++;
}
}
[self.videoWriterInput markAsFinished];
WEAKSELF;
[self.assetWriter finishWritingWithCompletionHandler:^(){
if (weakSelf.completionHandler) {
weakSelf.completionHandler(nil);
}
}];
}
- (void)cancelRevese {
if (!_inputQueue) {
return;
}
if (_assetReader && _assetReader.status == AVAssetReaderStatusReading) {
[self.assetReader cancelReading];
}
_assetReader = nil;
if (_assetWriter && _assetWriter.status == AVAssetWriterStatusWriting) {
[self.assetWriter cancelWriting];
}
_assetWriter = nil;
if (_videoTrackOutput) {
_videoTrackOutput = nil;
}
if (_videoWriterInput) {
_videoWriterInput = nil;
}
if (_videoPixelBufferAdaptor) {
_videoPixelBufferAdaptor = nil;
}
if (_clipTimeRangeArray) {
_clipTimeRangeArray = nil;
}
if (_sampleTimeArray) {
_sampleTimeArray = nil;
}
}
#pragma mark -
#pragma mark Get Method
- (AVAssetReader *)assetReader {
if (!_assetReader) {
NSError *error;
_assetReader = [[AVAssetReader alloc] initWithAsset:self.origAsset error:&error];
if (error) {
NSLog(@"assetReader 创建失败!! %@", error);
}
}
return _assetReader;
}
- (AVAssetWriter *)assetWriter {
if (!_assetWriter) {
NSError *writerError;
_assetWriter = [AVAssetWriter assetWriterWithURL:self.outputURL fileType:AVFileTypeQuickTimeMovie error:&writerError];
_assetWriter.shouldOptimizeForNetworkUse = YES;
if (writerError) {
NSLog(@"assetWriter 创建失败 %@", writerError);
}
}
return _assetWriter;
}
- (AVAssetReaderTrackOutput *)videoTrackOutput {
if (!_videoTrackOutput) {
_videoTrackOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:self.origAsset.videoTrack outputSettings:self.readerOutputSettings];
//设置支持不按顺序读取数据 设置为YES后 resetForReadingTimeRanges方法才可用
_videoTrackOutput.supportsRandomAccess = YES;
//不需要修改sample中的CVPixelBuffer内容 所以不需要copy
_videoTrackOutput.alwaysCopiesSampleData = NO;
}
return _videoTrackOutput;
}
- (AVAssetWriterInput *)videoWriterInput {
if (!_videoWriterInput) {
_videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:self.videoSettings];
_videoWriterInput.expectsMediaDataInRealTime = NO;
[_videoWriterInput setTransform:self.origAsset.videoTrack.preferredTransform];
}
return _videoWriterInput;
}
- (AVAssetWriterInputPixelBufferAdaptor *)videoPixelBufferAdaptor {
if (!_videoPixelBufferAdaptor) {
NSDictionary *pixelBufferAttributes = @{
(id)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA),
(id)kCVPixelBufferWidthKey: @(self.targetSize.width),
(id)kCVPixelBufferHeightKey: @(self.targetSize.height),
@"IOSurfaceOpenGLESTextureCompatibility": @YES,
@"IOSurfaceOpenGLESFBOCompatibility": @YES,
};
_videoPixelBufferAdaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:self.videoWriterInput sourcePixelBufferAttributes:pixelBufferAttributes];
}
return _videoPixelBufferAdaptor;
}
- (NSDictionary *)readerOutputSettings {
if (!_readerOutputSettings) {
_readerOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange], kCVPixelBufferPixelFormatTypeKey, nil];
}
return _readerOutputSettings;
}
- (dispatch_queue_t)inputQueue {
if (!_inputQueue) {
_inputQueue = dispatch_queue_create("com.ahvideo.reverseInputQueue", DISPATCH_QUEUE_SERIAL);
}
return _inputQueue;
}
- (NSDictionary *)videoSettings {
if (!_videoSettings) {
_videoSettings = @{AVVideoCodecKey: AVVideoCodecH264,
AVVideoWidthKey: @(self.targetSize.width),
AVVideoHeightKey: @(self.targetSize.height),
AVVideoCompressionPropertiesKey: @{
AVVideoAverageBitRateKey: @(kDefaultVideoBitRate * 1000),
AVVideoExpectedSourceFrameRateKey : @(kDefaultVideoFrameRate),
AVVideoMaxKeyFrameIntervalKey : @(kDefaultVideoKeyFrameInterval),
AVVideoProfileLevelKey: kDefaultVideoProfileLevel
},
};
}
return _videoSettings;
}
- (NSMutableArray *)sampleTimeArray {
if (!_sampleTimeArray) {
_sampleTimeArray = [[NSMutableArray alloc] initWithCapacity:100];
}
return _sampleTimeArray;
}
- (NSMutableArray *)clipTimeRangeArray {
if (!_clipTimeRangeArray) {
_clipTimeRangeArray = [[NSMutableArray alloc] initWithCapacity:20];
}
return _clipTimeRangeArray;
}
#pragma mark -
#pragma mark Set Method
@end