AVExportSession导出视频超级慢

问题描述

我正试图简单地提高合并视频的导出速度。

这里是代码://从网上和SO的广泛研究中,我可以大致了解到预设的PassThrough使其变得非常快,但是正如我在代码注释中所写的那样,合并的代码没有似乎可以使用该预设进行导出:/

            static func videoCompositionInstruction(_ track: AVCompositionTrack,asset: AVAsset)
            -> AVMutableVideoCompositionLayerInstruction {
                let instruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track)
                let assetTrack = asset.tracks(withMediaType: .video)[0]

                let transform = assetTrack.preferredTransform
                let assetInfo = orientationFromTransform(transform)

                var scaleToFitRatio = 1080 / assetTrack.naturalSize.width
                if assetInfo.isPortrait {
                    scaleToFitRatio = 1080 / assetTrack.naturalSize.height
                    let scaleFactor = CGAffineTransform(scaleX: scaleToFitRatio,y: scaleToFitRatio)
                    var finalTransform = assetTrack.preferredTransform.concatenating(scaleFactor)
                    //was needed in my case (if video not taking entire screen and leaving some parts black - don't know when actually needed so you'll have to try and see when it's needed)
                    if assetInfo.orientation == .rightMirrored || assetInfo.orientation == .leftMirrored {
                        finalTransform = finalTransform.translatedBy(x: -transform.ty,y: 0)
                    }
                    instruction.setTransform(finalTransform,at: CMTime.zero)
                } else {
                    let scaleFactor = CGAffineTransform(scaleX: scaleToFitRatio,y: scaleToFitRatio)
                    var concat = assetTrack.preferredTransform.concatenating(scaleFactor)
                        .concatenating(CGAffineTransform(translationX: 0,y: UIScreen.main.bounds.width / 2))
                    if assetInfo.orientation == .down {
                        let fixUpsideDown = CGAffineTransform(rotationAngle: CGFloat(Double.pi))
                        let windowBounds = UIScreen.main.bounds
                        let yFix = assetTrack.naturalSize.height + windowBounds.height
                        let centerFix = CGAffineTransform(translationX: assetTrack.naturalSize.width,y: yFix)
                        concat = fixUpsideDown.concatenating(centerFix).concatenating(scaleFactor)
                    }
                    instruction.setTransform(concat,at: CMTime.zero)
                }

                return instruction
        }

        static func orientationFromTransform(_ transform: CGAffineTransform)
            -> (orientation: UIImage.Orientation,isPortrait: Bool) {
                var assetOrientation = UIImage.Orientation.up
                var isPortrait = false
                if transform.a == 0 && transform.b == 1.0 && transform.c == -1.0 && transform.d == 0 {
                    assetOrientation = .right
                    isPortrait = true
                } else if transform.a == 0 && transform.b == 1.0 && transform.c == 1.0 && transform.d == 0 {
                    assetOrientation = .rightMirrored
                    isPortrait = true
                } else if transform.a == 0 && transform.b == -1.0 && transform.c == 1.0 && transform.d == 0 {
                    assetOrientation = .left
                    isPortrait = true
                } else if transform.a == 0 && transform.b == -1.0 && transform.c == -1.0 && transform.d == 0 {
                    assetOrientation = .leftMirrored
                    isPortrait = true
                } else if transform.a == 1.0 && transform.b == 0 && transform.c == 0 && transform.d == 1.0 {
                    assetOrientation = .up
                } else if transform.a == -1.0 && transform.b == 0 && transform.c == 0 && transform.d == -1.0 {
                    assetOrientation = .down
                }
                return (assetOrientation,isPortrait)
        }

    func mergeVideosTestSQ(arrayVideos:[AVAsset],completion:@escaping (URL?,Error?) -> ()) {
            
            let mixComposition = AVMutableComposition()

            
            var instructions: [AVMutableVideoCompositionLayerInstruction] = []
            var insertTime = CMTime(seconds: 0,preferredTimescale: 1)

            /// for each URL add the video and audio tracks and their duration to the composition
            for sourceAsset in arrayVideos {
                    
                let frameRange = CMTimeRange(start: CMTime(seconds: 0,preferredTimescale: 1),duration: sourceAsset.duration)

                guard
                    let nthVideoTrack = mixComposition.addMutableTrack(withMediaType: .video,preferredTrackID: Int32(kCMPersistentTrackID_Invalid)),let nthAudioTrack = mixComposition.addMutableTrack(withMediaType: .audio,//0 used to be kCMPersistentTrackID_Invalid
                    let assetVideoTrack = sourceAsset.tracks(withMediaType: .video).first
                else {
                    print("didnt work")
                    return
                }

                var assetAudioTrack: AVAssetTrack?
                assetAudioTrack = sourceAsset.tracks(withMediaType: .audio).first
                print(assetAudioTrack,",-- assetAudioTrack???",assetAudioTrack?.asset,"<-- hes",sourceAsset)
                
                do {
                    
                    try nthVideoTrack.insertTimeRange(frameRange,of: assetVideoTrack,at: insertTime)
                    try nthAudioTrack.insertTimeRange(frameRange,of: assetAudioTrack!,at: insertTime)
    
                    //instructions:
                    let nthInstruction = MainCamVC.videoCompositionInstruction(nthVideoTrack,asset: sourceAsset)
                    nthInstruction.setOpacity(0.0,at: CMTimeAdd(insertTime,sourceAsset.duration)) //sourceasset.duration

                    instructions.append(nthInstruction)
                    insertTime = insertTime + sourceAsset.duration //sourceAsset.duration

                    
                    
                } catch {
                    DispatchQueue.main.async {
                        print("didnt wor2k")
                    }
                }

            }
        

            let mainInstruction = AVMutableVideoCompositionInstruction()
            mainInstruction.timeRange = CMTimeRange(start: CMTime(seconds: 0,duration: insertTime)

            mainInstruction.layerInstructions = instructions

            let mainComposition = AVMutableVideoComposition()
            mainComposition.instructions = [mainInstruction]
            mainComposition.frameDuration = CMTimeMake(value: 1,timescale: 30)
            mainComposition.renderSize = CGSize(width: 1080,height: 1920)

            let outputFileURL = URL(fileURLWithPath: NSTemporaryDirectory() + "merge.mp4")

            //below to clear the video form docuent folder for new vid...
            let fileManager = FileManager()
            try? fileManager.removeItem(at: outputFileURL)
            
        
            /// try to start an export session and set the path and file type
            if let exportSession = AVAssetExportSession(asset: mixComposition,presetName: AVAssetExportPresetHighestQuality) { //DOES NOT WORK WITH AVAssetExportPresetPassthrough
                exportSession.outputFileType = .mov
                exportSession.outputURL = outputFileURL
                exportSession.videoComposition = mainComposition
                exportSession.shouldOptimizeForNetworkUse = true

                /// try to export the file and handle the status cases
                exportSession.exportAsynchronously {
                    if let url = exportSession.outputURL{
                      completion(url,nil)
                  }
                    if let error = exportSession.error {
                      completion(nil,error)
                  }
                }
                
            }

        }

请注意,我有说明以保持正确的方向。

感谢您的帮助!我只需要更快,它大约需要videoDuration / 2秒才能及时导出...

解决方法

暂无找到可以解决该程序问题的有效方法,小编努力寻找整理中!

如果你已经找到好的解决方法,欢迎将解决方案带上本链接一起发送给小编。

小编邮箱:dio#foxmail.com (将#修改为@)