iOS学习小集

iOS-简单的AVFoundation视频处理

2017-03-31  本文已影响537人  西山薄凉

最近加班中TAT,具体思路暂时没时间写,先把干货放上来😂,后面有时间了再完善。


公司要做直播,是语音带连麦+PPT课件的,课件有图片有视频。需要将主播对PPT的操作记录下来,同时主播及连麦者的声音,最后待直播完成将语音及PPT课件合成一段视频。其中对音视频相关的需求归纳如下:

需求拆分:

  1. 静态图片转视频
  2. 图片加水印转视频
  3. 视频截取
  4. 视频分辨率修改
  5. 视频拼接
  6. 音、视频并轨

具体实现,CTRL+C/CTRL+V直接可用,伸手党福音。

静态图片转视频(加水印)

+ (void)writeImageAsMovie:(UIImage *)image
                     watermark:(UIImage *)watermark
                        toPath:(NSString*)path
                          size:(CGSize)size
                      duration:(double)duration
                           fps:(int)fps
             withCallbackBlock:(void(^)(BOOL success))callbackBlock
{
    [[NSFileManager defaultManager] removeItemAtPath:path error:NULL];
    
    NSError *error = nil;
    AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:path]
                                                           fileType:AVFileTypeMPEG4
                                                              error:&error];
    if (error) {
        if (callbackBlock) {
            callbackBlock(NO);
        }
        return;
    }
    NSParameterAssert(videoWriter);
    
    NSDictionary *videoSettings = @{AVVideoCodecKey: AVVideoCodecH264,
                                    AVVideoWidthKey: [NSNumber numberWithInt:size.width],
                                    AVVideoHeightKey: [NSNumber numberWithInt:size.height]};
    
    AVAssetWriterInput* writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo
                                                                         outputSettings:videoSettings];
    
    AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
                                                                                                                     sourcePixelBufferAttributes:nil];
    NSParameterAssert(writerInput);
    NSParameterAssert([videoWriter canAddInput:writerInput]);
    [videoWriter addInput:writerInput];
    
    //Start a session:
    [videoWriter startWriting];
    [videoWriter startSessionAtSourceTime:kCMTimeZero];
    
    CVPixelBufferRef buffer;
    CVPixelBufferPoolCreatePixelBuffer(NULL, adaptor.pixelBufferPool, &buffer);
    
    CMTime presentTime = CMTimeMake(0, fps);
    
    while (1)
    {
        if(writerInput.readyForMoreMediaData){
            buffer = [LTVideoTools pixelBufferFromCGImage:[image CGImage] watermark:[watermark CGImage] size:size];
            BOOL appendSuccess = [LTVideoTools appendToAdapter:adaptor
                                                                  pixelBuffer:buffer
                                                                       atTime:presentTime
                                                                    withInput:writerInput];
            
            NSAssert(appendSuccess, @"Failed to append");
            
            CMTime endTime = CMTimeMakeWithSeconds(duration, fps);
            BOOL appendSuccess2 = [LTVideoTools appendToAdapter:adaptor
                                                                   pixelBuffer:buffer
                                                                        atTime:endTime
                                                                     withInput:writerInput];
            
            NSAssert(appendSuccess2, @"Failed to append");
            
            
            //Finish the session:
            [writerInput markAsFinished];
            
            [videoWriter finishWritingWithCompletionHandler:^{
                NSLog(@"Successfully closed video writer");
                if (videoWriter.status == AVAssetWriterStatusCompleted) {
                    if (callbackBlock) {
                        callbackBlock(YES);
                    }
                } else {
                    if (callbackBlock) {
                        callbackBlock(NO);
                    }
                }
            }];
            CVPixelBufferPoolRelease(adaptor.pixelBufferPool);
            break;
        }
    }
}

+ (CVPixelBufferRef)pixelBufferFromCGImage:(CGImageRef)image
                                 watermark:(CGImageRef)watermark
                                      size:(CGSize)imageSize
{
    NSDictionary *options = @{(id)kCVPixelBufferCGImageCompatibilityKey: @YES,
                              (id)kCVPixelBufferCGBitmapContextCompatibilityKey: @YES};
    CVPixelBufferRef pxbuffer = NULL;
    CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, imageSize.width,
                                          imageSize.height, kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef) options,
                                          &pxbuffer);
    NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
    
    CVPixelBufferLockBaseAddress(pxbuffer, 0);
    void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
    NSParameterAssert(pxdata != NULL);
    
    CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
    CGContextRef context = CGBitmapContextCreate(pxdata, imageSize.width,
                                                 imageSize.height, 8, 4*imageSize.width, rgbColorSpace,
                                                 kCGImageAlphaNoneSkipFirst);
    NSParameterAssert(context);
    
    CGContextDrawImage(context, CGRectMake(0 + (imageSize.width-CGImageGetWidth(image))/2,
                                           (imageSize.height-CGImageGetHeight(image))/2,
                                           CGImageGetWidth(image),
                                           CGImageGetHeight(image)), image);
    if (watermark) {
        CGContextDrawImage(context, CGRectMake(0 + (imageSize.width-CGImageGetWidth(watermark))/2,
                                               (imageSize.height-CGImageGetHeight(watermark))/2,
                                               CGImageGetWidth(watermark),
                                               CGImageGetHeight(watermark)), watermark);
    }
    CGColorSpaceRelease(rgbColorSpace);
    CGContextRelease(context);
    
    CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
    
    return pxbuffer;
}

+ (BOOL)appendToAdapter:(AVAssetWriterInputPixelBufferAdaptor*)adaptor
            pixelBuffer:(CVPixelBufferRef)buffer
                 atTime:(CMTime)presentTime
              withInput:(AVAssetWriterInput*)writerInput
{
    while (!writerInput.readyForMoreMediaData) {
        usleep(1);
    }
    
    return [adaptor appendPixelBuffer:buffer withPresentationTime:presentTime];
}

+ (void)trimVideoWithVideoUrlStr:(NSURL *)videoUrl captureVideoWithStartTime:(double)start endTime:(double)end outputPath:(NSURL *)outputURL completion:(void(^)(NSURL *outputURL,NSError *error))completionHandle {
    CMTime startTime = CMTimeMakeWithSeconds(start, 1);
    CMTime videoDuration = CMTimeMakeWithSeconds(end - start, 1);
    CMTimeRange videoTimeRange = CMTimeRangeMake(startTime, videoDuration);
    
    AVAssetExportSession *session = [AVAssetExportSession exportSessionWithAsset:[AVAsset assetWithURL:videoUrl] presetName:AVAssetExportPresetMediumQuality];
    session.outputURL = outputURL;
    session.outputFileType = AVFileTypeMPEG4;
    session.timeRange = videoTimeRange;
    session.shouldOptimizeForNetworkUse = YES;
    [session exportAsynchronouslyWithCompletionHandler:^{
        if (completionHandle) {
            if (session.error) {
                completionHandle(nil,session.error);
            }else {
                completionHandle(outputURL,nil);
            }
        }
    }];
}

截取视频

+ (void)trimVideoWithVideoUrlStr:(NSURL *)videoUrl captureVideoWithStartTime:(double)start endTime:(double)end outputPath:(NSURL *)outputURL completion:(void(^)(NSURL *outputURL,NSError *error))completionHandle {
    CMTime startTime = CMTimeMakeWithSeconds(start, 1);
    CMTime videoDuration = CMTimeMakeWithSeconds(end - start, 1);
    CMTimeRange videoTimeRange = CMTimeRangeMake(startTime, videoDuration);
    
    AVAssetExportSession *session = [AVAssetExportSession exportSessionWithAsset:[AVAsset assetWithURL:videoUrl] presetName:AVAssetExportPresetMediumQuality];
    session.outputURL = outputURL;
    session.outputFileType = AVFileTypeMPEG4;
    session.timeRange = videoTimeRange;
    session.shouldOptimizeForNetworkUse = YES;
    [session exportAsynchronouslyWithCompletionHandler:^{
        if (completionHandle) {
            if (session.error) {
                completionHandle(nil,session.error);
            }else {
                completionHandle(outputURL,nil);
            }
        }
    }];
}

修改视频分辨率

+ (void)resizeVideoWithAssetURL:(NSURL *)assetURL outputURL:(NSURL *)outputURL preferSize:(CGSize)preferSize doneHandler:(void(^)(NSURL *outputURL,NSError *error))doneHandler {
    AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:assetURL options:nil];
    
    AVAssetTrack *assetVideoTrack = nil;
    AVAssetTrack *assetAudioTrack = nil;
    
    if ([[asset tracksWithMediaType:AVMediaTypeVideo] count] != 0) {
        assetVideoTrack = [asset tracksWithMediaType:AVMediaTypeVideo][0];
    }
    if ([[asset tracksWithMediaType:AVMediaTypeAudio] count] != 0) {
        assetAudioTrack = [asset tracksWithMediaType:AVMediaTypeAudio][0];
    }
    
    NSError *error = nil;
    
    AVMutableComposition* mixComposition = [AVMutableComposition composition];
    if (assetVideoTrack) {
        AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
        [compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:assetVideoTrack atTime:kCMTimeZero error:&error];
    }
    if (assetAudioTrack) {
        AVMutableCompositionTrack *compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
        [compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:assetAudioTrack atTime:kCMTimeZero error:&error];
    }
    
    AVMutableVideoComposition *mutableVideoComposition = [AVMutableVideoComposition videoComposition];
    mutableVideoComposition.renderSize = preferSize;
    mutableVideoComposition.frameDuration = CMTimeMake(1, 24);
    
    AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
    instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [mixComposition duration]);
    AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:(mixComposition.tracks)[0]];
    BOOL isPortrait_ = [LTVideoTools isVideoPortrait:asset];
    CGAffineTransform t = CGAffineTransformIdentity;
    if (isPortrait_) {
        t = CGAffineTransformRotate(t, M_PI_2);
        t = CGAffineTransformTranslate(t, 0, -preferSize.width);
    }
    preferSize = isPortrait_ ? CGSizeMake(preferSize.height, preferSize.width):preferSize;
    t = CGAffineTransformScale(t, preferSize.width / assetVideoTrack.naturalSize.width, preferSize.height / assetVideoTrack.naturalSize.height);
    [layerInstruction setTransform:t  atTime:kCMTimeZero];
    
    instruction.layerInstructions = @[layerInstruction];
    mutableVideoComposition.instructions = @[instruction];
    
    if ([[NSFileManager defaultManager] fileExistsAtPath:outputURL.path]) {
        [[NSFileManager defaultManager] removeItemAtPath:outputURL.path error:&error];
    }
    
    AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetMediumQuality];
    exportSession.videoComposition = mutableVideoComposition;
    exportSession.outputURL = outputURL;
    exportSession.outputFileType = AVFileTypeMPEG4;
    exportSession.shouldOptimizeForNetworkUse = YES;
    
    [exportSession exportAsynchronouslyWithCompletionHandler:^{
        if (exportSession.status == AVAssetExportSessionStatusCompleted) {
            doneHandler(outputURL,nil);
        }else {
            doneHandler(nil,exportSession.error);
        }
    }];
}

+ (BOOL)isVideoPortrait:(AVAsset *)asset {
    BOOL isPortrait = NO;
    NSArray *tracks = [asset tracksWithMediaType:AVMediaTypeVideo];
    if([tracks    count] > 0) {
        AVAssetTrack *videoTrack = [tracks objectAtIndex:0];
        
        CGAffineTransform t = videoTrack.preferredTransform;
        // Portrait
        if(t.a == 0 && t.b == 1.0 && t.c == -1.0 && t.d == 0)
        {
            isPortrait = YES;
        }
        // PortraitUpsideDown
        if(t.a == 0 && t.b == -1.0 && t.c == 1.0 && t.d == 0)  {
            
            isPortrait = YES;
        }
        // LandscapeRight
        if(t.a == 1.0 && t.b == 0 && t.c == 0 && t.d == 1.0)
        {
            isPortrait = NO;
        }
        // LandscapeLeft
        if(t.a == -1.0 && t.b == 0 && t.c == 0 && t.d == -1.0)
        {
            isPortrait = NO;
        }
    }
    return isPortrait;
}

音视频并轨

+ (void)mergeVideoAssetAndAudioAssetWithVideoAssetURL:(NSURL *)videoAssetURL audioAssetURL:(NSURL *)audioAssetURL outputURL:(NSURL *)outputURL doneHandler:(void(^)(NSURL *outputURL,NSError *error))doneHandler {
    AVAsset *videoAsset = [AVAsset assetWithURL:videoAssetURL];
    AVAsset *audioAsset = [AVAsset assetWithURL:audioAssetURL];
    
    AVMutableComposition *mainComposition = [[AVMutableComposition alloc] init];
    AVMutableCompositionTrack *videoCompositionTrack = [mainComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
    AVMutableCompositionTrack *soundCompositionTrack = [mainComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
    AVMutableCompositionTrack *soundCompositionTrack2 = [mainComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
    
    AVAssetTrack *videoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
    if (videoTrack) {
        [videoCompositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:videoTrack atTime:kCMTimeZero error:nil];
        [videoCompositionTrack setPreferredTransform:videoTrack.preferredTransform];
    }
    AVAssetTrack *audioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
    if (audioTrack) {
        [soundCompositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:audioTrack atTime:kCMTimeZero error:nil];
    }
    
    AVAssetTrack *audioTrack2 = [[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
    if (audioTrack2) {
        [soundCompositionTrack2 insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:audioTrack2 atTime:kCMTimeZero error:nil];
    }
    
    if ([[NSFileManager defaultManager] fileExistsAtPath:outputURL.path]) {
        [[NSFileManager defaultManager] removeItemAtPath:outputURL.path error:nil];
    }
    
    AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:mainComposition presetName:AVAssetExportPresetMediumQuality];
    exportSession.outputURL=outputURL;
    exportSession.outputFileType =AVFileTypeMPEG4;
    exportSession.shouldOptimizeForNetworkUse = YES;
    [exportSession exportAsynchronouslyWithCompletionHandler:^{
        if (exportSession.status == AVAssetExportSessionStatusCompleted) {
            doneHandler(outputURL,nil);
        }else {
            doneHandler(nil,exportSession.error);
        }
    }];
}

视频拼接

+ (void)mergeVideoWithAssetURLs:(NSArray <NSURL *>*)assetURLs outputURL:(NSURL *)outputURL doneHandler:(void(^)(NSURL *outputURL,NSError *error))doneHandler {
    NSMutableArray *assets = [NSMutableArray array];
    for (NSURL *url in assetURLs) {
        [assets addObject:[AVAsset assetWithURL:url]];
    }
    
    AVMutableComposition *mainComposition = [[AVMutableComposition alloc] init];
    AVMutableCompositionTrack *videoCompositionTrack = [mainComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
    AVMutableCompositionTrack *soundCompositionTrack = [mainComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
    
    CMTime insertTime = kCMTimeZero;
    
    for (AVAsset *videoAsset in assets) {
        AVAssetTrack *videoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
        if (videoTrack) {
            [videoCompositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:videoTrack atTime:insertTime error:nil];
            [videoCompositionTrack setPreferredTransform:videoTrack.preferredTransform];
        }
        AVAssetTrack *audioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
        if (audioTrack) {
            [soundCompositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:audioTrack atTime:insertTime error:nil];
        }
        
        insertTime = CMTimeAdd(insertTime, videoAsset.duration);
    }
    
    if ([[NSFileManager defaultManager] fileExistsAtPath:outputURL.path]) {
        [[NSFileManager defaultManager] removeItemAtPath:outputURL.path error:nil];
    }
    
    AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:mainComposition presetName:AVAssetExportPresetMediumQuality];
    exportSession.outputURL=outputURL;
    exportSession.outputFileType =AVFileTypeMPEG4;
    exportSession.shouldOptimizeForNetworkUse = YES;
    [exportSession exportAsynchronouslyWithCompletionHandler:^{
        for (NSURL *url in assets) {
            if ([[NSFileManager defaultManager] fileExistsAtPath:url.path]) {
                [[NSFileManager defaultManager] removeItemAtURL:url error:nil];
            }
        }
        
        if (exportSession.status == AVAssetExportSessionStatusCompleted) {
            doneHandler(outputURL,nil);
        }else {
            doneHandler(nil,exportSession.error);
        }
    }];
}
上一篇下一篇

猜你喜欢

热点阅读