获取视频中的音频素材,获取视频第一帧

2018-09-13  本文已影响0人  泥孩儿0107

// 获取视频第一帧

- (UIImage*) getVideoPreViewImage:(AVURLAsset*)asset

{

    AVAssetImageGenerator *gen = [[AVAssetImageGenerator alloc] initWithAsset:asset];

    // 设定缩略图的方向

    // 如果不设定,可能会在视频旋转90/180/270°时,获取到的缩略图是被旋转过的,而不是正向的

    gen.appliesPreferredTrackTransform = YES;

    gen.apertureMode = AVAssetImageGeneratorApertureModeEncodedPixels;

    // 设置图片的最大size(分辨率)

    gen.maximumSize = CGSizeMake(300, 169);

    CMTime time = CMTimeMakeWithSeconds(0.0, 10); //取第0秒,一秒钟600帧

    NSError*error =nil;

    CMTimeactualTime;

    CGImageRefimage = [gencopyCGImageAtTime:timeactualTime:&actualTimeerror:&error];

    if(error) {

        UIImage*placeHoldImg = [UIImageimageNamed:@"付费页"];

        returnplaceHoldImg;

    }

    UIImage *thumb = [[UIImage alloc] initWithCGImage:image];

    CGImageRelease(image);

    returnthumb;

}

这个方法有时候会崩溃

//获取m3u8视频帧画面

- (UIImage*)getPixelBufferForItem:(AVPlayerItem*)playerItem{

    AVPlayerItemVideoOutput *output = [[AVPlayerItemVideoOutput alloc] init];

    [playerItemaddOutput:output];

    CVPixelBufferRef ref =[output copyPixelBufferForItemTime:CMTimeMake(1000, 60) itemTimeForDisplay:nil];

    UIImage*image = [selfCVImageToUIImage:ref];

    returnimage;

}

//CVPixelBufferRef转UIImage

- (UIImage*)CVImageToUIImage:(CVPixelBufferRef)imageBuffer{

    CVPixelBufferLockBaseAddress(imageBuffer, 0);

    void*baseAddress =CVPixelBufferGetBaseAddress(imageBuffer);

    size_t width = CVPixelBufferGetWidth(imageBuffer);

    size_t height = CVPixelBufferGetHeight(imageBuffer);

    size_tbufferSize =CVPixelBufferGetDataSize(imageBuffer);

    size_t bytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, 0);

    CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();

    CGDataProviderRefprovider =CGDataProviderCreateWithData(NULL, baseAddress, bufferSize,NULL);

    CGImageRef cgImage = CGImageCreate(width, height, 8, 32, bytesPerRow, rgbColorSpace, kCGImageAlphaNoneSkipFirst|kCGBitmapByteOrder32Little, provider, NULL, true, kCGRenderingIntentDefault);

    UIImage *image = [UIImage imageWithCGImage:cgImage];

    CGImageRelease(cgImage);

    CGDataProviderRelease(provider);

    CGColorSpaceRelease(rgbColorSpace);

    NSData* imageData = UIImageJPEGRepresentation(image, 1.0);

    image = [UIImageimageWithData:imageData];

    CVPixelBufferUnlockBaseAddress(imageBuffer, 0);

    returnimage;

}

//获取视频中的音频素材

    [selfsetUpAndAddAudioAtPath:urlAssettoComposition:compositionstart:startTimedura:trackDurationoffset:CMTimeMake(14*44100,44100)andArray:audioMixParams];

    //创建一个可变的音频混合

    AVMutableAudioMix *audioMix =[AVMutableAudioMix audioMix];

    audioMix.inputParameters =[NSArray arrayWithArray:audioMixParams];//从数组里取出处理后的音频轨道参数

    //创建一个输出

    AVAssetExportSession *exporter =[[AVAssetExportSession alloc]

                                     initWithAsset:composition

                                     presetName:AVAssetExportPresetAppleM4A];

    exporter.audioMix= audioMix;

    exporter.outputFileType=@"com.apple.m4a-audio";

    //视频的声音,合成后存储

    NSArray  *paths =NSSearchPathForDirectoriesInDomains(NSLibraryDirectory,NSUserDomainMask,YES);

    NSString*path=[paths objectAtIndex:0];

    //NSString *movDirectory = [path stringByAppendingPathComponent:@"/%@.m4a"];

        NSString* movDirectory =  [pathstringByAppendingPathComponent:

                                 [NSStringstringWithFormat:@"/%@.m4a",[selfgetCurrentTimes]]];

    if([[NSFileManager defaultManager]fileExistsAtPath:movDirectory]) {

        [[NSFileManager defaultManager]removeItemAtPath:movDirectory error:nil];

    }

    NSURL*exportURL =[NSURLfileURLWithPath:movDirectory];

    exporter.outputURL= exportURL;

        MAKAudioObject * object = [[MAKAudioObject alloc]init];

        object.url= movDirectory;

        object.name= [movDirectorysubstringFromIndex:movDirectory.length-19];

        object.localam  = [movDirectorysubstringFromIndex:movDirectory.length-19];

    [exporterexportAsynchronouslyWithCompletionHandler:^{

                dispatch_async(dispatch_get_main_queue(), ^{

                    NSLog(@"Export status not yet completed. Error: %@", exporter.error.description);

                    if (exporter.status == AVAssetExportSessionStatusCompleted) {

                        NSURL* outputURL = exporter.outputURL;

                        AVURLAsset*songAsset = [AVURLAssetURLAssetWithURL:outputURLoptions:nil];

                        NSData* data = [NSDatadataWithContentsOfFile:movDirectory];

                        NSLog(@"data:%@",data);

                        object.time=[NSStringstringWithFormat:@"%.2f",CMTimeGetSeconds(songAsset.duration)];

                        NSNumber*size;

                        [songAsset.URLgetResourceValue:&sizeforKey:NSURLFileSizeKeyerror:nil];

                        NSLog(@"size is %.2f",[sizefloatValue]/(1024.0*1024.0));//size is 43.703005

                        object.cunchu=[NSStringstringWithFormat:@"%.2fMb",[sizefloatValue]/(1024.0*1024.0)];

                        if ( [[MAKAudioMusicDBManager sharedManager] insertAudioWith:object]) {

                                                [selfbackButtonclick];

                                            }

                    }

                });

    }];

-(void)setUpAndAddAudioAtPath:(AVURLAsset*)songAsset toComposition:(AVMutableComposition*)composition start:(CMTime)start dura:(CMTime)dura offset:(CMTime)offset andArray:(NSMutableArray*)audioMixParams{

    AVMutableCompositionTrack *track = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];

    AVAssetTrack *sourceAudioTrack = [[songAsset tracksWithMediaType:AVMediaTypeAudio]objectAtIndex:0];

    NSError*error =nil;

    BOOLok =NO;

    CMTimestartTime = start;

    CMTimetrackDuration = dura;

    CMTimeRangetRange =CMTimeRangeMake(startTime,trackDuration);

    //设置音量

    //AVMutableAudioMixInputParameters(输入参数可变的音频混合)

    //audioMixInputParametersWithTrack(音频混音输入参数与轨道)

    //AVMutableAudioMixInputParameters *trackMix = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:track];

    //[trackMix setVolume:0.8f atTime:startTime];

    //素材加入数组

    //[audioMixParams addObject:trackMix];

    //Insert audio into track  //offsetCMTimeMake(0, 44100)

    ok =  [trackinsertTimeRange:tRangeofTrack:sourceAudioTrackatTime:kCMTimeInvaliderror:&error];

}

-(NSString*)getCurrentTimes{

    NSDateFormatter *formatter = [[NSDateFormatter alloc] init];

    // ----------设置你想要的格式,hh与HH的区别:分别表示12小时制,24小时制

    [formattersetDateFormat:@"YYYYMMdd_HHmmss"];

    //现在时间,你可以输出来看下是什么格式

    NSDate*datenow = [NSDatedate];

    //----------将nsdate按formatter格式转成nsstring

    NSString*currentTimeString = [formatterstringFromDate:datenow];

    NSLog(@"currentTimeString =  %@",currentTimeString);

    returncurrentTimeString;

}

上一篇下一篇

猜你喜欢

热点阅读