2023-08-23

2023-08-22  本文已影响0人  大佬papp

解决视频转webp方案

分为两步:1.视频剪辑后分帧数. 2:图片数组添加到webp

#pragma mark 裁剪视频
/// 裁剪视频
+ (void)cutVideoAndExportVideoWithVideoAsset:(AVAsset *)videoAsset startTime:(CGFloat)startTime endTime:(CGFloat)endTime completion:(void (^)(NSURL *outputPath, NSError *error, ST_VideoState state))completion
{
    NSError *error;
    //1 创建AVMutableComposition对象来添加视频音频资源的AVMutableCompositionTrack
    AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
    // 2 设置采集区域
    CGSize videoRange = CGSizeMake(startTime, (endTime - startTime)); // 开始位置, 裁剪的长度
    CMTime startT = CMTimeMakeWithSeconds(videoRange.width, videoAsset.duration.timescale);
    CMTime videoDuration = CMTimeMakeWithSeconds(videoRange.height, videoAsset.duration.timescale); //截取长度videoDuration
    CMTimeRange timeRange = CMTimeRangeMake(startT, videoDuration);
    
    // 3 - 视频通道  工程文件中的轨道,有音频轨、视频轨等,里面可以插入各种对应的素材
    AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
                                                                        preferredTrackID:kCMPersistentTrackID_Invalid];
    /*TimeRange截取的范围长度   ofTrack来源  atTime插放在视频的时间位置*/
    [videoTrack insertTimeRange:timeRange
                        ofTrack:([videoAsset tracksWithMediaType:AVMediaTypeVideo].count > 0) ? [videoAsset tracksWithMediaType:AVMediaTypeVideo].firstObject : nil
                         atTime:kCMTimeZero
                          error:&error];
    
    
    // 3 - 导出视频 - 返回数字包含了 AVAssetExportPreset1280x720多个这样的数组
    AVMutableVideoComposition *videoComposition = [self fixedCompositionWithAsset:videoAsset];
    
    [self _getExportVideoWithAvAssset:mixComposition videoComposition:videoComposition audioMix:nil timeRange:timeRange completion:completion cut:YES];
}
/// 获取优化后的视频转向信息
+ (AVMutableVideoComposition *)fixedCompositionWithAsset:(AVAsset *)videoAsset
{
    //1,可以用来对视频进行操作,用来生成video的组合指令,包含多段instruction
    AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
    // 视频转向
    int degrees = [self degressFromVideoFileWithAsset:videoAsset];
    CGAffineTransform translateToCenter;
    CGAffineTransform mixedTransform;
    videoComposition.frameDuration = CMTimeMake(1, 30);
    
    NSArray *tracks = [videoAsset tracksWithMediaType:AVMediaTypeVideo];
    AVAssetTrack *videoTrack = [tracks objectAtIndex:0];
    // 一个指令,决定一个timeRange内每个轨道的状态,包含多个layerInstruction
    AVMutableVideoCompositionInstruction *roateInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
    roateInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, [videoAsset duration]);
    // 在一个指令的时间范围内,某个轨道的状态
    AVMutableVideoCompositionLayerInstruction *roateLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
    
    if (degrees == 90) // UIImageOrientationRight
    {
        // 顺时针旋转90°
        translateToCenter = CGAffineTransformMakeTranslation(videoTrack.naturalSize.height, 0.0);
        mixedTransform = CGAffineTransformRotate(translateToCenter, M_PI_2);
        videoComposition.renderSize = CGSizeMake(videoTrack.naturalSize.height, videoTrack.naturalSize.width);
        [roateLayerInstruction setTransform:mixedTransform atTime:kCMTimeZero];
    }
    else if (degrees == 180) // UIImageOrientationDown
    {
        // 顺时针旋转180°
        translateToCenter = CGAffineTransformMakeTranslation(videoTrack.naturalSize.width, videoTrack.naturalSize.height);
        mixedTransform = CGAffineTransformRotate(translateToCenter, M_PI);
        videoComposition.renderSize = CGSizeMake(videoTrack.naturalSize.width, videoTrack.naturalSize.height);
        [roateLayerInstruction setTransform:mixedTransform atTime:kCMTimeZero];
    }
    else if (degrees == 270) // UIImageOrientationLeft
    {
        // 顺时针旋转270°
        translateToCenter = CGAffineTransformMakeTranslation(0.0, videoTrack.naturalSize.width);
        mixedTransform = CGAffineTransformRotate(translateToCenter, M_PI_2 * 3.0);
        videoComposition.renderSize = CGSizeMake(videoTrack.naturalSize.height, videoTrack.naturalSize.width);
        [roateLayerInstruction setTransform:mixedTransform atTime:kCMTimeZero];
    }
    // 方向是 0 不做处理
    
    roateInstruction.layerInstructions = @[roateLayerInstruction];
    videoComposition.instructions = @[roateInstruction]; // 加入视频方向信息
    
    return videoComposition;
}

/// 设置导出对象
+ (void)_getExportVideoWithAvAssset:(AVAsset *)videoAsset videoComposition:(AVVideoComposition *)videoComposition audioMix:(AVAudioMix *)audioMix timeRange:(CMTimeRange)timeRange completion:(void (^)(NSURL *outputPath, NSError *error, ST_VideoState state))completion cut:(BOOL)isCut
{
    NSURL *outputURL = [self _getExportVideoPathForType:@"mp4"]; // 创建输出的路径
    NSArray *compatiblePresets = [AVAssetExportSession exportPresetsCompatibleWithAsset:videoAsset];
    if ([compatiblePresets containsObject:AVAssetExportPresetHighestQuality])
    {
        AVAssetExportSession *exportSession = [[AVAssetExportSession alloc]
                                               initWithAsset:videoAsset
                                               presetName:AVAssetExportPresetHighestQuality]; //AVAssetExportPresetPassthrough可能返回没有处理过的视频
        if (!isCut)
        {
            exportSession.timeRange = timeRange; //截取时间---直接导出的方法只能从0开始
        }
        if (videoComposition.renderSize.width)
        {                                                      // 注意方向是 0 不要做处理否则会导出失败
            exportSession.videoComposition = videoComposition; // 修正视频转向
        }
        exportSession.outputURL = outputURL;             // 输出URL
        exportSession.shouldOptimizeForNetworkUse = YES; // 优化网络
        exportSession.outputFileType = AVFileTypeQuickTimeMovie;
        NSArray *supportedTypeArray = exportSession.supportedFileTypes; //支持的格式
        if ([supportedTypeArray containsObject:AVFileTypeMPEG4])        //MP4
        {
            exportSession.outputFileType = AVFileTypeMPEG4;
        }
        else if (supportedTypeArray.count == 0)
        {
            NSError *error = [NSError ST_PhotoSDKVideoActionDescription:@"视频类型暂不支持导出"];
            if (completion)
            {
                completion(nil, error, ST_ExportSessionStatusFailed);
            }
            return;
        }
        else
        {
            exportSession.outputFileType = [supportedTypeArray objectAtIndex:0];
        }
        
        // 开始异步导出视频
        __block NSError *error;
        
        [exportSession exportAsynchronouslyWithCompletionHandler:^(void) {
            dispatch_async(dispatch_get_main_queue(), ^{
                switch (exportSession.status)
                {
                    case AVAssetExportSessionStatusUnknown:
                    {
                        error = [NSError ST_PhotoSDKVideoActionDescription:@"AVAssetExportSessionStatusUnknown"];
                        if (completion)
                        {
                            completion(nil, error, ST_ExportSessionStatusUnknown);
                        }
                        break;
                    }
                    case AVAssetExportSessionStatusWaiting:
                    {
                        error = [NSError ST_PhotoSDKVideoActionDescription:@"AVAssetExportSessionStatusWaiting"];
                        if (completion)
                        {
                            completion(nil, error, ST_ExportSessionStatusWaiting);
                        }
                        break;
                    }
                    case AVAssetExportSessionStatusExporting:
                    {
                        error = [NSError ST_PhotoSDKVideoActionDescription:@"AVAssetExportSessionStatusExporting"];
                        if (completion)
                        {
                            completion(nil, error, ST_ExportSessionStatusExporting);
                        }
                        break;
                    }
                    case AVAssetExportSessionStatusCompleted:
                    {
                        if (completion)
                        {
                            completion(outputURL, nil, ST_ExportSessionStatusCompleted);
                        }
                        break;
                    }
                    case AVAssetExportSessionStatusFailed:
                    {
                        error = [NSError ST_PhotoSDKVideoActionDescription:[NSString stringWithFormat:@"导出失败:%@", exportSession.error]];
                        if (completion)
                        {
                            completion(nil, error, ST_ExportSessionStatusFailed);
                        }
                        break;
                    }
                    default:
                        break;
                }
            });
        }];
    }
}

//分帧,合成处理.这里主要为了控制生成webp文件大小内容
-(void)saveToWebpByVideoPath:(NSURL *)videoUrl andProgressBlock:(void (^)(float))progressBlock andPathBlock:(void (^)(NSString *))pathBlock{
    dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
        NSString *filePath = [self createWebpFilePath];

        YYImageEncoder *encoder = [[YYImageEncoder alloc] initWithType:YYImageTypeWebP];
        encoder.loopCount = 0;
        encoder.quality = 0.4; // 质量设置为0.5,减小文件大小
        encoder.lossless = NO; // 使用有损压缩模式
       
        
        AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:videoUrl options:nil];
        AVAssetImageGenerator *generator = [[AVAssetImageGenerator alloc] initWithAsset:asset];
        generator.appliesPreferredTrackTransform = YES;
        generator.requestedTimeToleranceAfter = kCMTimeZero;
        generator.requestedTimeToleranceBefore = kCMTimeZero;
        generator.maximumSize = CGSizeMake(256, 256); // 设置最大尺寸为512x512
        generator.apertureMode = AVAssetImageGeneratorApertureModeCleanAperture; // 设置孔径模式为清晰孔径
        
        NSArray *tracks = [asset tracksWithMediaType:AVMediaTypeVideo];
        
        if ([tracks count] > 0) {
            AVAssetTrack *videoTrack = [tracks objectAtIndex:0];
            float frameRate = [videoTrack nominalFrameRate];
            float duration = CMTimeGetSeconds([videoTrack timeRange].duration);
            int frameCount = (int)(frameRate * duration); ///<---帧数率 * 时间
            int maxCount = 20; ///<---最大帧数  计算方案,总帧数大于20,计算多少帧取一帧
            
            int minTime = frameCount/maxCount;
            // 最后一个有效帧的索引
            int lastValidFrameIndex = -1;
            int count = 0;
            for (int i = 0; i < frameCount; i++) {
                ///<最大帧数/时间 == 每秒最小帧率 , 如果i%最小帧率==0 取
                if(frameCount>20 && i%minTime!=0){
                    continue;
                }
                CMTime time = CMTimeMakeWithSeconds(i / frameRate, asset.duration.timescale);
                NSError *error = nil;
                CGImageRef image = [generator copyCGImageAtTime:time actualTime:nil error:&error];

                if (image) { // 确保图像不为空
                    UIImage *img = [UIImage imageWithCGImage:image];
                    img = [self resizeImage:img toSize:CGSizeMake(512, 512)];
                    NSData * data = [GIF2MP4 compressImageQualityWithImage:img toByte:10*1024];
    //                NSData *data = [self compressOriginalImage:img toMaxDataSizeKBytes:5];
                    [encoder addImageWithData:data duration:1.0 / frameRate*(minTime/2)];

                    lastValidFrameIndex = i;
                    CGImageRelease(image);
                }
                count++;
                progressBlock(count*0.04);
                if(count>=19){
                    break;
                }
            }
            
            // 如果最后一个有效帧不是最后一帧,则将其重复添加
            if (lastValidFrameIndex >= 0 && lastValidFrameIndex < frameCount - 1) {
                CMTime time = CMTimeMakeWithSeconds(lastValidFrameIndex / frameRate, asset.duration.timescale);
                CGImageRef image = [generator copyCGImageAtTime:time actualTime:nil error:nil];
                if (image) {
                    UIImage *img = [UIImage imageWithCGImage:image];
                    img = [self resizeImage:img toSize:CGSizeMake(512, 512)];
                    NSData * data = [GIF2MP4 compressImageQualityWithImage:img toByte:10*1024];
                    [encoder addImageWithData:data duration:1.0 / frameRate*(minTime/2)];
                    CGImageRelease(image);
                }
                count+=1;
                [SVProgressHUD showProgress:count*0.025 status:@"loadIng"];
                progressBlock(count*0.04);

            }
            DLog(@"data 大小为%ld",[encoder encode].length);
            [encoder encodeToFile:filePath];
            progressBlock(1);
            pathBlock(filePath);
        }
    });
}

记录一下

上一篇 下一篇

猜你喜欢

热点阅读