iOS程序猿iOS Developer程序员

短视频从无到有 (十一)视频自定义区域裁剪

2018-08-03  本文已影响6人  卢叁

项目中有一个视频自定义区域裁剪的需求,即:用户选取要裁剪的区域去剪辑,区域之外的视频画面会被丢弃。实现效果如下:

IMG_5337.PNG
如图所示,视频页面上是一个拖拽框,添加手势,使之可以自由调节大小,也可以按比例调节大小,按照选取的范围去裁剪视频。好了,UI层面上的东西不再多说。裁剪视频需要用到可能很多开发者都比较陌生的两个类AVMutableVideoCompositionInstruction和AVMutableVideoCompositionLayerInstruction。AVMutableVideoCompositionInstruction是视频轨道中的一个视频,可以缩放、旋转等。AVMutableVideoCompositionLayerInstruction是一个轨道视频,包含了这个轨道上所有的视频素材。还是直接上代码(我知道你们也不想看我啰哩啰嗦):
    BOOL Assetvertical =NO;
    //创建AVAsset实例 AVAsset包含了video的所有信息
    NSDictionary *option = [NSDictionary dictionaryWithObject:@(YES) forKey:AVURLAssetPreferPreciseDurationAndTimingKey];
    AVAsset *videoAsset = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:videoPath] options:option];
  
    
   //创建AVMutableComposition实例.
    AVMutableComposition *mixComposition = [AVMutableComposition composition];
    
    //开始时间
    CMTime startTime =CMTimeMakeWithSeconds(0,videoAsset.duration.timescale);
    //结束时间
    CMTime endTime =CMTimeMakeWithSeconds(videoAsset.duration.value/videoAsset.duration.timescale,videoAsset.duration.timescale);
  //  CMTimeRange timeRange =CMTimeRangeMake(startTime, endTime);
    NSLog(@"视频_____%lld____%d",videoAsset.duration.value,videoAsset.duration.timescale);
    
    //3 视频通道  工程文件中的轨道,有音频轨、视频轨等,里面可以插入各种对应的素材
    AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
                                                                        preferredTrackID:kCMPersistentTrackID_Invalid];
    NSError *error;
    //把视频轨道数据加入到可变轨道中 这部分可以做视频裁剪TimeRange
    [videoTrack insertTimeRange:CMTimeRangeMake(startTime, endTime)
                        ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] lastObject]
                         atTime:kCMTimeZero error:&error];
    
    
   
    //有声音
    if ([[videoAsset tracksWithMediaType:AVMediaTypeAudio] count] > 0){
        //声音采集
        AVURLAsset * audioAsset = [[AVURLAsset alloc] initWithURL:[NSURL fileURLWithPath:videoPath] options:option];
       
        //音频通道
        AVMutableCompositionTrack * audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
        //音频采集通道
        AVAssetTrack * audioAssetTrack = [[audioAsset tracksWithMediaType:AVMediaTypeAudio] lastObject];
        [audioTrack insertTimeRange:CMTimeRangeMake(startTime,endTime) ofTrack:audioAssetTrack atTime:kCMTimeZero error:nil];
    }
    
   

    
    //3.1 AVMutableVideoCompositionInstruction 视频轨道中的一个视频,可以缩放、旋转等
    AVMutableVideoCompositionInstruction *mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
    mainInstruction.timeRange = CMTimeRangeMake(startTime,endTime);
    
    // 3.2 AVMutableVideoCompositionLayerInstruction 一个视频轨道,包含了这个轨道上的所有视频素材
    AVMutableVideoCompositionLayerInstruction *videolayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
    
    AVAssetTrack *videoAssetTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] lastObject];
    UIImageOrientation videoAssetOrientation_  = UIImageOrientationUp;
    //拍摄的时候视频是否是竖屏拍的
    BOOL isVideoAssetvertical  = NO;
    CGAffineTransform videoTransform = videoAssetTrack.preferredTransform;
    if (videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0) {
        isVideoAssetvertical = YES;
        videoAssetOrientation_ =  UIImageOrientationUp;//正着拍
    }
    if (videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0) {
        
        isVideoAssetvertical = YES;
        videoAssetOrientation_ = UIImageOrientationDown;//倒着拍
    }
    if (videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0) {
        
        isVideoAssetvertical = NO;
        videoAssetOrientation_ =  UIImageOrientationLeft;//左边拍
    }
    if (videoTransform.a == -1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == -1.0) {
        
        isVideoAssetvertical = NO;
        videoAssetOrientation_ = UIImageOrientationRight;//右边拍
    }
    
    float scaleX = 1.0,scaleY = 1.0,scale = 1.0;
    CGSize originVideoSize;
    if (isVideoAssetvertical || Assetvertical) {
        originVideoSize = CGSizeMake([[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] naturalSize].height, [[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] naturalSize].width);
    }
    else{
        originVideoSize = CGSizeMake([[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] naturalSize].width, [[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] naturalSize].height);
    }
    float x = videoPoint.x;
    float y = videoPoint.y;
    if (shouldScale) {
        scaleX = videoSize.width/originVideoSize.width;
        scaleY = videoSize.height/originVideoSize.height;
        scale  = MAX(scaleX, scaleY);
        if (scaleX>scaleY) {
            NSLog(@"竖屏");
        }
        else{
            NSLog(@"横屏");
        }
    }
    else{
        scaleX = 1.0;
        scaleY = 1.0;
        scale = 1.0;
    }
    if (Assetvertical) {
        CGAffineTransform trans = CGAffineTransformMake(videoAssetTrack.preferredTransform.a*scale, videoAssetTrack.preferredTransform.b*scale, videoAssetTrack.preferredTransform.c*scale, videoAssetTrack.preferredTransform.d*scale, videoAssetTrack.preferredTransform.tx*scale-x+720, videoAssetTrack.preferredTransform.ty*scale-y);
        
        //    [videolayerInstruction setTransform:trans atTime:kCMTimeZero];
        CGAffineTransform trans2 = CGAffineTransformRotate(trans, M_PI_2);
        [videolayerInstruction setTransform:trans2 atTime:kCMTimeZero];
    }
    else{
        CGAffineTransform trans = CGAffineTransformMake(videoAssetTrack.preferredTransform.a*scale, videoAssetTrack.preferredTransform.b*scale, videoAssetTrack.preferredTransform.c*scale, videoAssetTrack.preferredTransform.d*scale, videoAssetTrack.preferredTransform.tx*scale-x, videoAssetTrack.preferredTransform.ty*scale-y);
        
        [videolayerInstruction setTransform:trans atTime:kCMTimeZero];
    }
    
      
    
    //裁剪区域
    mainInstruction.layerInstructions = [NSArray arrayWithObjects:videolayerInstruction,nil];
    //AVMutableVideoComposition:管理所有视频轨道,可以决定最终视频的尺寸,裁剪需要在这里进行
    AVMutableVideoComposition *mainCompositionInst = [AVMutableVideoComposition videoComposition];
    
    
    CGSize naturalSize = originVideoSize;
    int64_t renderWidth = 0, renderHeight = 0;
    if (videoSize.height ==0.0 || videoSize.width == 0.0) {
        renderWidth = naturalSize.width;
        renderHeight = naturalSize.height;
    }
    else{
        renderWidth = ceil(videoSize.width);
        renderHeight = ceil(videoSize.height);
    }
    
    mainCompositionInst.renderSize = CGSizeMake(renderWidth,renderHeight);
    mainCompositionInst.instructions = [NSArray arrayWithObject:mainInstruction];
    mainCompositionInst.frameDuration = CMTimeMake(1,30);

经过以上这一步,我们就得到了一个AVMutableVideoComposition对象,这正是我们所需要的,剩下的就是使用 AVAssetExportSession导出视频就行了,如下:

//视频文件输出
    
    AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition
                                                                      presetName:presetName];
    exporter.outputURL=compressionFileURL;
    exporter.outputFileType = outputFileType;
    exporter.shouldOptimizeForNetworkUse = YES;
    exporter.videoComposition = mainCompositionInst;
    [exporter exportAsynchronouslyWithCompletionHandler:^{
        
        dispatch_async(dispatch_get_main_queue(), ^{
            
            switch (exporter.status) {
                case AVAssetExportSessionStatusFailed:{
                    
                    if (completeBlock) {
                        
                        completeBlock(exporter.error,compressionFileURL);
                    }
                    
                    break;
                }
                case AVAssetExportSessionStatusCancelled:{
                    
                    NSLog(@"Export Status: Cancell");
                    
                    break;
                }
                case AVAssetExportSessionStatusCompleted: {
                    
                    if (completeBlock) {
                        
                        completeBlock(nil,compressionFileURL);
                    }
                    
                    
                    break;
                }
                case AVAssetExportSessionStatusUnknown: {
                    
                    NSLog(@"Export Status: Unknown");
                    break;
                }
                case AVAssetExportSessionStatusExporting : {
                    
                    NSLog(@"Export Status: Exporting");
                    break;
                }
                case AVAssetExportSessionStatusWaiting: {
                    
                    NSLog(@"Export Status: Wating");
                    break;
                }
                    
            };
            
        });
   }];
    

注:我们还可以同时设置exporter.timeRange,这样在裁剪视频区域的同时还可以剪辑视频时间,不明白的同学见上一遍文章短视频从无到有 (十)视频自定义时间剪辑
有什么问题,欢迎留言讨论。

上一篇下一篇

猜你喜欢

热点阅读