iOS开发攻城狮的集散地我是程序员;您好程先生;叫我序员就好了程序员

iOS 不同尺寸、比例、方向的视频拼接播放

2018-04-11  本文已影响55人  朱亦鸣

需求是不同尺寸、比例、方向的视频拼接成一个固定尺寸的视频播放不是导出后播放是直接播放(composition)。这个需求也是让我填坑填了几天,下面先说说碰到的问题。
在这之前呢如果你知道AVMutableComposition AVMutableVideoComposition AVMutableVideoCompositionInstruction AVMutableVideoCompositionLayerInstruction 就继续往下看,不然看不懂不要怪我。

问题

        var comVideoTracks = [AVMutableCompositionTrack]()
        for _ in 0...1 {
            comVideoTracks.append(muComposition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid)!)
        }
        
        var passThroughTimeRanges: [CMTimeRange] = [CMTimeRange]()
        var transitionTimeRanges: [CMTimeRange] = [CMTimeRange]()
        
        var startTime = kCMTimeZero
        for (index,asset) in clips.enumerated() {
            let oriVideoTrack = (asset.tracks(withMediaType: .video).first)
            if oriVideoTrack == nil{
                continue
            }
            let comVideoTrack = comVideoTracks[index % 2]
            let clipRange = clipRanges[index]
            try! comVideoTrack.insertTimeRange(clipRange, of: oriVideoTrack!, at: startTime)
            
            passThroughTimeRanges.append(CMTimeRangeMake(startTime, clipRange.duration))
            if index > 0 {
                passThroughTimeRanges[index].start = CMTimeAdd(passThroughTimeRanges[index].start, trasitionTime)
                passThroughTimeRanges[index].duration = CMTimeSubtract(passThroughTimeRanges[index].duration, trasitionTime);
            }
            
            if (index+1 < clips.count) {
                passThroughTimeRanges[index].duration = CMTimeSubtract(passThroughTimeRanges[index].duration, trasitionTime);
            }
            
            startTime = CMTimeAdd(startTime, clipRange.duration)
            startTime = CMTimeSubtract(startTime, trasitionTime)
            
            if index + 1 < clips.count{
                transitionTimeRanges.append(CMTimeRangeMake(startTime, trasitionTime))
            }
        }

我们把所有的视频交替添加到composition的两个轨道上面,passThroughTimeRanges transitionTimeRanges这两个数组里放的就是计算出来的视频过渡的时间,视频如果要加过渡动画就必须是要有重合,所以可以看到我们计算的时间是有一段重合的。我们对视频方向,比例的处理都是在AVMutableVideoCompositionLayerInstruction进行处理的

        var instructions = [Any]()
        
        for (index,asset) in clips.enumerated() {
            let comVideoTrack = comVideoTracks[index % 2]

            let passThroughInstruction = AVMutableVideoCompositionInstruction()
            passThroughInstruction.timeRange = passThroughTimeRanges[index]

            let passThroughLayer = AVMutableVideoCompositionLayerInstruction(assetTrack: comVideoTrack)
            changeVideoSize(asset: asset,passThroughLayer: passThroughLayer)

            passThroughInstruction.layerInstructions = [passThroughLayer]
            instructions.append(passThroughInstruction)
            
            if index + 1 < clips.count{
                let transitionInstruction = AVMutableVideoCompositionInstruction()
                transitionInstruction.timeRange = transitionTimeRanges[index]
                let fromLayer =
                    AVMutableVideoCompositionLayerInstruction(assetTrack: comVideoTrack)
                let toLayer =
                    AVMutableVideoCompositionLayerInstruction(assetTrack:comVideoTracks[1 - index % 2])
              
                changeVideoSize(asset: asset,passThroughLayer: fromLayer)
                changeVideoSize(asset: clips[index + 1],passThroughLayer: toLayer)
                
                videoTransition(fromLayer: fromLayer,toLayer: toLayer,asset: asset, timeRange: transitionTimeRanges[index])
                
                transitionInstruction.layerInstructions = [fromLayer, toLayer]
                instructions.append(transitionInstruction)
            }
        }
        muVideoComposition.instructions = instructions as! [AVVideoCompositionInstructionProtocol]

上面代码我们调用了changeVideoSizevideoTransition它们分别处理了视频比例方向和过渡动画

func videoTransition(fromLayer:AVMutableVideoCompositionLayerInstruction,toLayer:AVMutableVideoCompositionLayerInstruction,asset:AVAsset,timeRange:CMTimeRange) {
        let oriVideoTrack = asset.tracks(withMediaType: .video).first
        let natureSize = (oriVideoTrack?.naturalSize)!
        switch transitionType {
        case .Opacity:
            fromLayer.setOpacityRamp(fromStartOpacity: 1.0, toEndOpacity: 0.0, timeRange: timeRange)
            toLayer.setOpacityRamp(fromStartOpacity: 0.0, toEndOpacity: 1.0, timeRange: timeRange)
        case .SwipeLeft:
            fromLayer.setCropRectangleRamp(fromStartCropRectangle: CGRect.init(origin: .zero, size: videoSize), toEndCropRectangle: CGRect.init(origin: .zero, size: CGSize.init(width: 0, height: videoSize.height)), timeRange: timeRange)
        default:
            if degressFromVideo(asset: asset) == 90{
                fromLayer.setCropRectangleRamp(fromStartCropRectangle: CGRect.init(origin: .zero, size: videoSize), toEndCropRectangle: CGRect.init(origin: .zero, size: CGSize.init(width: 0, height: videoSize.height)), timeRange: timeRange)
            }else{
                let width = natureSize.width > videoSize.width ? natureSize.width : videoSize.width
                fromLayer.setCropRectangleRamp(fromStartCropRectangle: CGRect.init(origin: .zero, size:CGSize.init(width: width, height: videoSize.height)), toEndCropRectangle: CGRect.init(origin: .zero, size:  CGSize.init(width: width, height: 0)), timeRange: timeRange)
            }
        }
    }

暂时就做了三个过渡的动画,淡入,左扫,上扫。下面就是视频的比例问题了代码有点多

   func changeVideoSize(asset:AVAsset,passThroughLayer:AVMutableVideoCompositionLayerInstruction)  {
        
        let oriVideoTrack = asset.tracks(withMediaType: .video).first
        var natureSize = (oriVideoTrack?.naturalSize)!
        if degressFromVideo(asset: asset) == 90 {
            natureSize = CGSize.init(width: natureSize.height, height: natureSize.width)
        }
        
        //处理 livePhoto的视频
//        if natureSize.width == 1440 && natureSize.height == 1080 {
//            if videoRatio == .Ratio9_16{
//                natureSize.width = 1308
//            }else{
//                natureSize.height = 980
//            }
//        }
//
//        if natureSize.width == 1080 && natureSize.height == 1440 {
//            if videoRatio == .Ratio9_16 {
//                natureSize.width = 980
//            }else{
//                natureSize.height = 1308
//            }
//        }
        
        if (Int)(natureSize.width) % 2 != 0 {
            natureSize.width += 1.0
        }

        if videoRatio == .Ratio9_16{
            if degressFromVideo(asset: asset) == 90{
                let height = videoSize.width * natureSize.height / natureSize.width
                let translateToCenter = CGAffineTransform.init(translationX: videoSize.width, y: videoSize.height/2 - natureSize.height/2)
                
                let t = translateToCenter.scaledBy(x:videoSize.width/natureSize.width, y: height/natureSize.height)
                
                let mixedTransform = t.rotated(by: .pi/2)
                passThroughLayer.setTransform(mixedTransform, at: kCMTimeZero)
                
            }else{
                let height = videoSize.width * natureSize.height / natureSize.width
                let translateToCenter = CGAffineTransform.init(translationX: 0, y: videoSize.height/2 - height/2)
                let t = translateToCenter.scaledBy(x:videoSize.width/natureSize.width, y: height/natureSize.height)
                passThroughLayer.setTransform(t, at: kCMTimeZero)
            }
        }else{
            if degressFromVideo(asset: asset) == 90{
                let width = videoSize.height * natureSize.width/natureSize.height
                let translateToCenter = CGAffineTransform.init(translationX: videoSize.width/2 + width/2, y: 0)
                let t = translateToCenter.scaledBy(x:width/natureSize.width, y: videoSize.height/natureSize.height)
                
                let mixedTransform = t.rotated(by: .pi/2)
                passThroughLayer.setTransform(mixedTransform, at: kCMTimeZero)
                
            }else{
                let width = videoSize.height * natureSize.width/natureSize.height
                let translateToCenter = CGAffineTransform.init(translationX: videoSize.width/2 - width/2, y: 0)
                let t = translateToCenter.scaledBy(x:width/natureSize.width, y: videoSize.height/natureSize.height)
                passThroughLayer.setTransform(t, at: kCMTimeZero)
            }
        }
    }

我根据了16:9和9:16两种比例来判断,其它比例可按照这种方法来判断。我注释掉的是livephoto里的视频处理,这个也是比较坑的地方,也许只有碰到的人才能体会到了。碰到这种问题的人应该也都是做视频编辑类项目的人。最后就是播放

let playItem = AVPlayerItem.init(asset: editor.compostion)
    playItem.videoComposition = editor.videoComposition
    player = AVPlayer.init(playerItem: playItem)

就到这了,大家碰到了什么稀奇古怪的问题可以来交流交流。代码地址

上一篇下一篇

猜你喜欢

热点阅读