AVFoundation 剪切视频

2017-09-13  本文已影响0人  蝼蚁撼树
#pragma mark --加载素材--
-(void)loadAsset{
    
    //1.加载素材
    NSString *videoStr = [[NSBundle mainBundle] pathForResource:@"abc" ofType:@"mp4"];

    NSLog(@"videoStr == %@",videoStr);
    
    AVURLAsset *asset =[AVURLAsset assetWithURL:[NSURL fileURLWithPath:videoStr]];
    
    NSLog(@"asset.duration == %f",CMTimeGetSeconds(asset.duration));
    
   
    
    NSLog(@"AssetTrack.count == %zd",asset.tracks.count);
    //读取轨道信息
    
    AVAssetTrack *videoAssetTrack = [[asset tracksWithMediaType:AVMediaTypeVideo]
                                     objectAtIndex:0];
    //获取视频的大小
     NSLog(@"asset.naturalSize == %@",NSStringFromCGSize(videoAssetTrack.naturalSize));
    
    NSLog(@"audioAssetTrack.count == %zd",[asset tracksWithMediaType:AVMediaTypeAudio].count);
    
    AVAssetTrack *audioAssetTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
    
    //2.工程文件 所有操作 在这里完成
    AVMutableComposition *composition = [AVMutableComposition composition];
    //工程视频轨道
    AVMutableCompositionTrack *videoCompositionTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
    [videoCompositionTrack setPreferredTransform:videoAssetTrack.preferredTransform];
    //插入视频信息
    NSError *videoError;
    [videoCompositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAssetTrack.timeRange.duration) ofTrack:videoAssetTrack atTime:kCMTimeZero error:&videoError];
    if (videoError) {
        NSLog(@"videoError == %@",videoError);
    }
    //工程音频轨道
    AVMutableCompositionTrack *audioCompositionTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
    //插入音频信息
    NSError *audioError;
    [audioCompositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, audioAssetTrack.timeRange.duration) ofTrack:audioAssetTrack atTime:kCMTimeZero error:&audioError];
    if (audioError) {
        NSLog(@"audioError == %@",audioError);
    }
    
    //3.剪切视频
    //操作指令 在一个指令的时间范围内,某个轨道的状态;
    AVMutableVideoCompositionLayerInstruction *videoCompositionLayerIns = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoAssetTrack];
    //变化 -- frame
    [videoCompositionLayerIns setTransform:videoAssetTrack.preferredTransform atTime:kCMTimeZero];
    
    
    //一个指令,决定一个timeRange内每个轨道的状态,包含多个layerInstruction;
    AVMutableVideoCompositionInstruction *videoCompositionIns = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
    
    [videoCompositionIns setTimeRange:CMTimeRangeMake(kCMTimeZero, videoAssetTrack.timeRange.duration)];
    
    videoCompositionIns.backgroundColor = [UIColor redColor].CGColor;
    videoCompositionIns.layerInstructions = @[videoCompositionLayerIns];
    
    //操作指令集合
    
    AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
    
    videoComposition.instructions = @[videoCompositionIns];
    
    videoComposition.renderSize = CGSizeMake(1920, 1080);
    videoComposition.renderScale = 1;

    videoComposition.frameDuration = CMTimeMake(1, 30);
    
    //添加水印
    
    /** 水印 */
   // CGSize videoSize = CGSizeMake(videoAssetTrack.naturalSize.width, videoAssetTrack.naturalSize.height);
//    CATextLayer *textLayer = [CATextLayer layer];
    
  //  textLayer.backgroundColor = [UIColor redColor].CGColor;
 //   textLayer.string = @"123456";
 //   textLayer.bounds = CGRectMake(0, 0, videoSize.width * 0.5, videoSize.height * 0.5);
   //添加水印 和 动画 
 //   CALayer *baseLayer = [CALayer layer];
//    [baseLayer addSublayer:textLayer];
//    baseLayer.position = CGPointMake(videoComposition.renderSize.width/2, videoComposition.renderSize.height/2);
    
//    CALayer *videoLayer = [CALayer layer];
//    videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
//    CALayer *parentLayer = [CALayer layer];
 //   parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
    
//    [parentLayer addSublayer:videoLayer];
//    [parentLayer addSublayer:baseLayer];
//    AVVideoCompositionCoreAnimationTool *animalTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
//    videoComposition.animationTool = animalTool;

    
    //给水印添加动画
   // CABasicAnimation *baseAnimation = [CABasicAnimation animationWithKeyPath:@"position"];
//    baseAnimation.fromValue = [NSValue valueWithCGPoint:CGPointMake(100, 100)];
 //   baseAnimation.toValue = [NSValue valueWithCGPoint:CGPointMake(200, 200)];
 //   baseAnimation.repeatCount = 5;
  //  baseAnimation.beginTime = AVCoreAnimationBeginTimeAtZero;
   // baseAnimation.duration = 1;
  //  baseAnimation.removedOnCompletion = NO;
    //[textLayer addAnimation:baseAnimation forKey:@"hehe"];
    
    
    //4.输出视频
    AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:composition presetName:AVAssetExportPreset1920x1080];
    
    exporter.videoComposition = videoComposition;
    
    exporter.outputFileType = AVFileTypeMPEG4;
    
    exporter.outputURL = [NSURL fileURLWithPath:[self getFilePath:YES] isDirectory:YES];
    
    exporter.shouldOptimizeForNetworkUse = YES;
    
    [exporter exportAsynchronouslyWithCompletionHandler:^{
        
        if (exporter.status == AVAssetExportSessionStatusCompleted) {
            
            NSLog(@"剪切成功");
            
            dispatch_async(dispatch_get_main_queue(), ^{
                [self play:[self getFilePath:NO]];
            });
        }
        else
        {
            NSLog(@"合成失败 %@",exporter.error);
        }
        
    }];
    
    

}

-(NSString *)getFilePath:(BOOL)isNew{

    NSString *url = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES).firstObject;
    NSString *str = [url stringByAppendingPathComponent:@"test.mp4"];
    
    if (isNew) unlink([str UTF8String]);
    
    NSLog(@"str == %@",str);
    
    return str;

}
//播放视频
-(void)play:(NSString *)url{
    
    AVAsset *assert = [AVAsset assetWithURL:[NSURL fileURLWithPath:url]];
    
    AVAssetTrack *videoAssetTrack = [assert tracksWithMediaType:AVMediaTypeVideo].firstObject;
    
    NSLog(@"assert.size == %@",NSStringFromCGSize(videoAssetTrack.naturalSize));
    
    AVAudioSession* session = [AVAudioSession sharedInstance];
    [session setCategory:AVAudioSessionCategoryPlayback error:nil];
    AVPlayer* player = [AVPlayer playerWithURL:[NSURL fileURLWithPath:url]];
    AVPlayerViewController* playerController = [[AVPlayerViewController alloc] init];
    playerController.player = player;
    playerController.videoGravity = AVLayerVideoGravityResizeAspect;
    playerController.showsPlaybackControls = true;
    playerController.view.translatesAutoresizingMaskIntoConstraints = true;
    playerController.view.frame = self.view.bounds;
    [playerController.player play];
    [self presentViewController:playerController animated:YES completion:nil];
    
}

在这里面要注意fileURLWithPathURLWithString 两个方法的不同


NSString*fileURL =@"file:///Users/username/Desktop/test.mp4";
'NSURL*url1 = [NSURL URLWithString:fileURL];'
NSLog(@"url1=%@",url1);

NSString*filePath =@"/Users/username/Desktop/test.mp4";
NSURL*url2 = [NSURL fileURLWithPath:filePath];
NSLog(@"url2=%@",url1);

NSURL*url3 = [NSURL fileURLWithPath:filePath];
NSLog(@"url3=%@",url1);

打印结果:

打印出的结果:url1 = file:///Users/username/Desktop/test.mp4

url2 = file:///Users/username/Desktop/test.mp4

url3 = /Users/username/Desktop/test.mp4

假如目录文件路径是 filePath (没有指定协议类型),调用 [AVAsset assetWithURL:[NSURL URLWithString: filePath]]; 是无法正常的加载AVAsset.
但是 [NSURL fileURLWithPath:filePath] 会给默认给filePath 添加 file:// 协议头
可以正常的加载资源文件.

上一篇下一篇

猜你喜欢

热点阅读