iOS Developer

音视频合成与压缩

2017-05-31  本文已影响0人  爆裂少女小鸭子
本文学习视频的压缩成,以及原本视频中的音频和需要合成的音频的不同比例混合。
    self.tmpVideoPath = [tempDir stringByAppendingPathComponent:@"myMovie.mp4"]; ```
2 看新路径存不存在 若之前存在则删除
`NSURL *url = [NSURL fileURLWithPath:self.tmpVideoPath];`
`NSFileManager *fm = [NSFileManager defaultManager];`
`BOOL exist = [fm fileExistsAtPath:url.path];`
`if (exist) {`
        [fm removeItemAtURL:url error:&err];
        NSLog(@"file deleted");
        if (err) {
            NSLog(@"file remove error, %@", err.localizedDescription );
        }
  `  } else {`
        NSLog(@"no file by that name");
   ` }  `
3 压缩视频
> 压缩前先了解 AVAsset是一个抽象类和不可变类,定义了媒体资源混合呈现的方式,通过asetWithURL方法进行创建时,实际上是创建了它子类AVUrlAsset的一个实例,而AVAsset是一个抽象类,不能直接被实例化。
通过AVUrlAsset我们可以创建一个带选项(可选)的资产,以提供更精确的时长和计时信息
可以使用 exportPresetsCompatibleWithAsset: 方法检查是否可以使用某个Preset.

``` NSURL *videoUrl = [NSURL fileURLWithPath:videoUrlString];
    self.asset = [AVAsset assetWithURL:videoUrl];
    AVAsset *anAsset = [[AVURLAsset alloc] initWithURL:videoUrl options:nil];
    NSArray *compatiblePresets = [AVAssetExportSession exportPresetsCompatibleWithAsset:anAsset];
// 所支持的压缩格式中是否有 所选的压缩格式
    if ([compatiblePresets containsObject:AVAssetExportPresetMediumQuality]) {```
        
        self.exportSession = [[AVAssetExportSession alloc]initWithAsset:self.asset presetName:AVAssetExportPresetPassthrough];
        NSURL *furl = [NSURL fileURLWithPath:self.tmpVideoPath];
        self.exportSession.outputURL = furl;
       //文件用.mp4扩展名标识。
        self.exportSession.outputFileType = AVFileTypeMPEG4;
         //设置剪裁时间
        CMTime start = CMTimeMakeWithSeconds(self.startTime, self.asset.duration.timescale);
        CMTime duration = CMTimeMakeWithSeconds(self.stopTime - self.startTime, self.asset.duration.timescale);
        CMTimeRange range = CMTimeRangeMake(start, duration);
        self.exportSession.timeRange = range;
        
        [self.exportSession exportAsynchronouslyWithCompletionHandler:^{
            
            switch ([self.exportSession status]) {
                case AVAssetExportSessionStatusFailed:
                    
                    NSLog(@"Export failed: %@", [[self.exportSession error] localizedDescription]);
                    break;
                case AVAssetExportSessionStatusCancelled:
                    
                    break;
                default:
                  //压缩成功
                    dispatch_async(dispatch_get_main_queue(), ^{
                        //混合音频百分比
                        CGFloat progress = _smarkSlider.value/100;
                        //1-progress 是原视频的所占百分比 progress是要插入音频的所占百分比
                        [self addVideoVolumn:1-progress andAudioVolumn:progress];
                        //                        //创建一个消息对象
                        //                        NSNotification * notice = [NSNotification notificationWithName:@"PUSH" object:nil userInfo:nil];
                        //                        //发送消息
                        //                        [[NSNotificationCenter defaultCenter]postNotification:notice];
                        
                    });
                    
                    break;
            }
        }];  }

4 原视频音频和要插入音频百分比混合成新的音频
```/*
 
 *  抽取原视频的音频与需要的音乐混合
 
 */
-(void)addVideoVolumn:(CGFloat)volumnVideo andAudioVolumn:(CGFloat)volumnAudio
{
    
    AVMutableComposition *composition =[AVMutableComposition composition];
  //NSMutableArray *audioMixParams; 混合音数组
    _audioMixParams =[[NSMutableArray alloc]initWithCapacity:0];
    // 录制的视频(刚才压缩的视频)
    NSURL *video_inputFileUrl =[[NSURL alloc]  initFileURLWithPath:self.tmpVideoPath];
    AVURLAsset *songAsset =[AVURLAsset URLAssetWithURL:video_inputFileUrl options:nil];
    CMTime startTime = CMTimeMakeWithSeconds(0, songAsset.duration.timescale);
    CMTime trackDuration = songAsset.duration;
    
    // 获取视频中的音频素材
    [self setUpAndAddAudioAtPath:video_inputFileUrl toComposition:composition start:startTime dura:trackDuration offset:CMTimeMake(0,44100) andVolume:volumnVideo];
    
    // 本地要插入的音乐
    NSURL *url = [[NSURL alloc]initFileURLWithPath:[[AudioDao getCurrentWorkDataByWorkdate:_model.audioTime].newfile getFilePathOfDocuments]];//根据时间获取音频的路径 ##**********
    
    // 获取设置完的蓝夹子音素材
    AVURLAsset *songAsset1 =[AVURLAsset URLAssetWithURL:url options:nil];
    CMTime startTime1 = CMTimeMakeWithSeconds(0.06 + self.startTime, songAsset1.duration.timescale);// 设置。 1.1
    CMTime trackDuration1 = songAsset1.duration;
    
    [self setUpAndAddAudioAtPath:url toComposition:composition start:startTime1 dura:trackDuration1 offset:CMTimeMake(0,44100) andVolume:volumnAudio];
    
    // 创建一个可变的音频混合
    AVMutableAudioMix *audioMix =[AVMutableAudioMix audioMix];
    audioMix.inputParameters =[NSArray arrayWithArray:_audioMixParams];//从数组里取出处理后的音频轨道参数
    
    // 创建一个输出
    AVAssetExportSession *exporter =[[AVAssetExportSession alloc]
                                     initWithAsset:composition
                                     presetName:AVAssetExportPresetAppleM4A];
    exporter.audioMix = audioMix;
    exporter.outputFileType= AVFileTypeAppleM4A;// @"com.apple.m4a-audio"
    NSString* fileName =[NSString stringWithFormat:@"%@.mov",@"overMix"];// 不能改格式
    // 输出路径
    NSString *exportFile =[NSString stringWithFormat:@"%@/%@",[self getLibarayPath], fileName];
    
    if([[NSFileManager defaultManager]fileExistsAtPath:exportFile]) {
        [[NSFileManager defaultManager]removeItemAtPath:exportFile error:nil];
    }
    
    
    NSURL *exportURL =[NSURL fileURLWithPath:exportFile];
    exporter.outputURL = exportURL;
    self.mixURL = exportURL;
    
    __weak typeof(self) weakSelf = self;
    [exporter exportAsynchronouslyWithCompletionHandler:^{
        int exportStatus =(int)exporter.status;
        switch (exportStatus){
            case AVAssetExportSessionStatusFailed:{
                NSError *exportError = exporter.error;
                NSLog(@"错误,信息: %@", exportError);
                
                break;
            }
            case AVAssetExportSessionStatusCompleted:{
                NSLog(@"成功 是否在主线程2%d",(int)[NSThread isMainThread]);
                // 最终混合
                [weakSelf theVideoWithMixMusic];
                
                break;
            }
            case AVAssetExportSessionStatusExporting:{
                 NSLog(@"当前压缩进度:%f",exporter.progress);

    
                
                break;
            }
        }
    }];
}```

```/*
 
 * 通过文件路径建立和添加音频素材
 
 */
- (void)setUpAndAddAudioAtPath:(NSURL*)assetURL toComposition:(AVMutableComposition*)composition start:(CMTime)start dura:(CMTime)dura offset:(CMTime)offset andVolume:(float)volumn{
    
    AVURLAsset *songAsset =[AVURLAsset URLAssetWithURL:assetURL options:nil];
    
    AVMutableCompositionTrack *track =[composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
    AVAssetTrack *sourceAudioTrack =[[songAsset tracksWithMediaType:AVMediaTypeAudio]objectAtIndex:0];
    NSError *error =nil;
    BOOL ok = NO;
    CMTime startTime = start;
    CMTime trackDuration = dura;
    CMTimeRange tRange = CMTimeRangeMake(startTime,trackDuration);
    
    // 设置音量 设置从指定时间开始的音频音量的值。
    //AVMutableAudioMixInputParameters(输入参数可变的音频混合)
    //audioMixInputParametersWithTrack(音频混音输入参数与轨道)
    AVMutableAudioMixInputParameters *trackMix =[AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:track];
    [trackMix setVolume:volumn atTime:startTime];
    
    // 素材加入数组
    [_audioMixParams addObject:trackMix];
    
    // Insert audio into track  //offsetCMTimeMake(0, 44100)
    ok = [track insertTimeRange:tRange ofTrack:sourceAudioTrack atTime:offset error:&error];
    
}```
5 把视频和混合的音频合成

-(void)theVideoWithMixMusic
{
// 声音来源路径(最终混合的音频)
NSURL *audio_inputFileUrl = self.mixURL;

// 视频来源路径
NSURL   *video_inputFileUrl = [NSURL fileURLWithPath:self.tmpVideoPath];

// 最终合成输出路径
_outputFilePath =[documentsDirectory stringByAppendingPathComponent:@"finalvideo.mp4"];
NSURL
*outputFileUrl = [NSURL fileURLWithPath:_outputFilePath];

if([[NSFileManager defaultManager]fileExistsAtPath:_outputFilePath])
    [[NSFileManager defaultManager]removeItemAtPath:_outputFilePath error:nil];

CMTime nextClipStartTime =kCMTimeZero;

// 创建可变的音频视频组合
AVMutableComposition* mixComposition =[AVMutableComposition composition];

// 视频采集

AVURLAsset* videoAsset =[[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];

[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo]objectAtIndex:0] atTime:nextClipStartTime error:nil];

// 声音采集
AVURLAsset* audioAsset =[[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];


CMTimeRange audio_timeRange =CMTimeRangeMake(kCMTimeZero,videoAsset.duration);//声音长度截取范围
AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio]objectAtIndex:0]atTime:nextClipStartTime error:nil];

AVMutableVideoComposition *mainComposition = [AVMutableVideoComposition videoComposition];
mainComposition.frameDuration = CMTimeMake(1, 30);

NSInteger videoRoate = [self.class degressFromVideoFileWithURL:video_inputFileUrl];
CGAffineTransform translateToCenter;
//    if(videoRoate!=0){
CGAffineTransform mixedTransform;
if(videoRoate == 0){
    
    translateToCenter = CGAffineTransformMakeTranslation(0.0,0.0);
    mixedTransform = CGAffineTransformRotate(translateToCenter,0);
    mainComposition.renderSize = CGSizeMake(a_compositionVideoTrack.naturalSize.width,a_compositionVideoTrack.naturalSize.height);
}
else if(videoRoate == 90){
    //顺时针旋转90°
    NSLog(@"视频旋转90度,home按键在左");
    translateToCenter = CGAffineTransformMakeTranslation(a_compositionVideoTrack.naturalSize.height,0.0);
    mixedTransform = CGAffineTransformRotate(translateToCenter,M_PI_2);
    mainComposition.renderSize = CGSizeMake(a_compositionVideoTrack.naturalSize.height,a_compositionVideoTrack.naturalSize.width);
}else if(videoRoate == 180){
    //顺时针旋转180°
    NSLog(@"视频旋转180度,home按键在上");
    translateToCenter = CGAffineTransformMakeTranslation(a_compositionVideoTrack.naturalSize.width, a_compositionVideoTrack.naturalSize.height);
    mixedTransform = CGAffineTransformRotate(translateToCenter,M_PI);
    mainComposition.renderSize = CGSizeMake(a_compositionVideoTrack.naturalSize.width,a_compositionVideoTrack.naturalSize.height);
}else if(videoRoate == 270){
    //顺时针旋转270°
    NSLog(@"视频旋转270度,home按键在右");
    translateToCenter = CGAffineTransformMakeTranslation(0.0, a_compositionVideoTrack.naturalSize.width);
    mixedTransform = CGAffineTransformRotate(translateToCenter,M_PI_2*3.0);
    mainComposition.renderSize = CGSizeMake(a_compositionVideoTrack.naturalSize.height,a_compositionVideoTrack.naturalSize.width);
}
AVMutableVideoCompositionInstruction *roateInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
roateInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, [mixComposition duration]);
AVMutableVideoCompositionLayerInstruction *roateLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:a_compositionVideoTrack];

[roateLayerInstruction setTransform:mixedTransform atTime:kCMTimeZero];
roateInstruction.layerInstructions = @[roateLayerInstruction];
//将视频方向旋转加入到视频处理中
mainComposition.instructions = @[roateInstruction];
//    }
// 创建一个输出
_assetExport =[[AVAssetExportSession alloc]initWithAsset:mixComposition presetName:_exportSet];
_assetExport.outputFileType =AVFileTypeMPEG4;
_assetExport.outputURL = outputFileUrl;
_assetExport.shouldOptimizeForNetworkUse=YES;
_assetExport.videoComposition = mainComposition;

__weak typeof(self) weakSelf = self;

// NSTimer *processTimer = [NSTimer scheduledTimerWithTimeInterval: 0.1 target: self selector: @selector(exportingProgressDicChanged) userInfo: nil repeats: YES];
//
// [processTimer fire];

// 获得队列
//    dispatch_queue_t queue = dispatch_get_global_queue(0, 0);
dispatch_queue_t queue =dispatch_get_main_queue();

// 创建一个定时器(dispatch_source_t本质还是个OC对象)
[self setProgressView];
_timer=dispatch_source_create(DISPATCH_SOURCE_TYPE_TIMER,0, 0, queue);
dispatch_time_t start =dispatch_time(DISPATCH_TIME_NOW, (int64_t)(1.0 * NSEC_PER_SEC));
uint64_t interval = (uint64_t)(0.1 *NSEC_PER_SEC);
dispatch_source_set_timer(_timer, start, interval,0);
// 设置回调
dispatch_source_set_event_handler(_timer, ^{
    [self exportingProgressDicChanged];
});
dispatch_resume(_timer);

// dispatch_async(queue, ^{
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
// if ([_assetExport status] == AVAssetExportSessionStatusCompleted) {
dispatch_async(dispatch_get_main_queue(), ^{
[LCProgressHUD showMessage:@"合成完成"];
});
}
];
// });
NSLog(@"完成!输出路径==%@",_outputFilePath);
}```

其实有很多概念 属性我都不是很确定是做什么的 所以这边我单拿出来 解释一下

demo我还没时间整理出来~但是一定会更新的 大家有问题就一起讨论 写的不对的地方请大家指出来
上一篇 下一篇

猜你喜欢

热点阅读