iOS Swift:将两个视频并排合并为一个视频,但一个视频的高度不同

问题描述

我正在尝试在快速ios中并排合并两个视频。 我可以使用AVMutableVideoComposition合并两个视频,并使用AVAssetExportSession导出视频。

合并两个视频时出现问题,两个视频的高度都不同。

查看屏幕截图 https://i.stack.imgur.com/xHcP0.png

我希望这两个高度都应相同。

这是我合并视频的代码

func mergeTwoVideos(videoSize: CGSize,completion: @escaping StichVideoCompletion) {
    if videoUrl1 == nil || videoUrl2 == nil {
        return
    }
    let asset1: AVAsset = AVAsset(url: videoUrl1!) 
    let asset2: AVAsset = AVAsset(url: videoUrl2!)
    var layerInstructionArray:[AVMutableVideoCompositionLayerInstruction] = []
    var prevIoUsAssetDurationInFloats:Float32 = 0.0
    var prevIoUsAssest: AVAsset?
    let videoComposition = AVMutableVideoComposition()
    videoComposition.renderSize = CGSize(width: videoSize.width*2,height: 1000) //videoSize.height*2)
    let composition = AVMutableComposition.init()
    let mainInstruction = AVMutableVideoCompositionInstruction()
    
var xPos: CGFloat = 0
var yPos: CGFloat = 0
let arrayVideos: [AVAsset] = [asset1,asset2]

for (index,videoAsset) in arrayVideos.enumerated() {
    let assetDurationInFloats = Float32(videoAsset.duration.value) / Float32(videoAsset.duration.timescale)
    if (assetDurationInFloats >= prevIoUsAssetDurationInFloats) {
        videoComposition.frameDuration = CMTimeMake(value: 1,timescale: 60)
        mainInstruction.timeRange = CMTimeRangeMake(start: CMTime.zero,duration: videoAsset.duration);
        
        prevIoUsAssetDurationInFloats = assetDurationInFloats;
        prevIoUsAssest = videoAsset;
    }
    else {
        videoComposition.frameDuration = CMTimeMake(value: 1,duration: prevIoUsAssest?.duration ?? CMTime.zero);
    }
    let firstTrack = composition.addMutableTrack(withMediaType: .video,preferredTrackID: kCMPersistentTrackID_Invalid)
    guard let videoAssetTrack = videoAsset.tracks(withMediaType: AVMediaType.video).first else { continue }
    do {
        if videoAsset.tracks(withMediaType: AVMediaType.audio).count > 0 {
            try firstTrack?.insertTimeRange(CMTimeRange(start: CMTime.zero,duration: videoAsset.duration),of: videoAssetTrack,at: CMTime.zero)
        }
    }
    catch (let error) {
        print(error.localizedDescription)
    }
    
    if index == 0 { //applying audio for video not camera
        let firstTrackAudio = composition.addMutableTrack(withMediaType: .audio,preferredTrackID: kCMPersistentTrackID_Invalid)
        do {
            if videoAsset.tracks(withMediaType: AVMediaType.audio).count > 0 {
                try firstTrackAudio?.insertTimeRange(CMTimeRange(start: CMTime.zero,of: videoAsset.tracks(withMediaType: .audio)[0],at: CMTime.zero)
            }
        }
        catch (let error) {
            print(error.localizedDescription)
        }
    }
    
    let firstlayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: firstTrack!)
    var firstAssetTrack: AVAssetTrack?
    if videoAsset.tracks(withMediaType: AVMediaType.video).count > 0 {
        firstAssetTrack = videoAsset.tracks(withMediaType: .video)[0]
        let firstAssetScaletoFitRatio = videoSize.width / ((firstAssetTrack?.naturalSize.width)!)
        if index == 0 {
            xPos = 0
            yPos = 10 
        }else {
            xPos = videoSize.width
            yPos = 10 
        }
        if(arrayVideos.count > 1) {
            let firstAssetTransform = CGAffineTransform(scaleX: firstAssetScaletoFitRatio,y: firstAssetScaletoFitRatio)
            let t1 = firstAssetTrack?.preferredTransform
            let t2 = t1?.concatenating(firstAssetTransform)
            let t3 = CGAffineTransform(translationX: xPos,y: yPos)
            guard let t4 = t2?.concatenating(t3) else { return  }
            firstlayerInstruction.setTransform(t4,at: CMTime.zero)
            firstlayerInstruction.setopacity(0.0,at: videoAsset.duration)
            layerInstructionArray.append(firstlayerInstruction)
        }
    }
} //for end

// Main video composition instruction
mainInstruction.layerInstructions = layerInstructionArray

// Main video composition
videoComposition.instructions = [mainInstruction]

let path = NstemporaryDirectory().appending("mergeVideo.mp4")
let exportURL = URL.init(fileURLWithPath: path)

// Check exist and remove old file
FileManager.default.removeItemIfExisted(exportURL)

// Init exporter
let exporter = AVAssetExportSession.init(asset: composition,presetName: AVAssetExportPresetHighestQuality)
exporter?.outputURL = exportURL
exporter?.outputFileType = AVFileType.mp4
exporter?.shouldOptimizeforNetworkUse = true
exporter?.videoComposition = videoComposition

// Do export
exporter?.exportAsynchronously(completionHandler: {
    dispatchQueue.main.async {
        self.exportDidFinish(exporter: exporter,videoURL: exportURL,completion: {url,error in
            completion(url,error)
        })
    }
})
}

请指导我。 谢谢

解决方法

暂无找到可以解决该程序问题的有效方法,小编努力寻找整理中!

如果你已经找到好的解决方法,欢迎将解决方案带上本链接一起发送给小编。

小编邮箱:dio#foxmail.com (将#修改为@)