AVAssetWritter 第一帧为空白或黑色

问题描述

问题: 我通过从 CMSampleBuffer 获取音频和视频缓冲区来录制视频帧。 AssetWriter 完成写入缓冲区后,最终视频会导致第一帧为黑色或空白(考虑到它仅考虑开头的音频帧)。 虽然,随机出来的视频完全正常,没有黑框。

我的尝试: 我试着等到我获取一个视频帧然后开始录制。然而我得到了同样不稳定的表现。

我想要的:没有空白帧的合适视频。

以下是可能有帮助的代码

Capture Session

 func configureSession() {
    sessionQueue.async {
        print("SFC - Session Configuring")
        if self.setupResult != .success { return }
        
        self.session.beginConfiguration()
        self.session.sessionPreset = .high
        
        do {
            var defaultVideoDevice: AVCaptureDevice?

            if let frontCameraDevice = AVCaptureDevice.default(.builtInWideAngleCamera,for: .video,position: .front){
                defaultVideoDevice = frontCameraDevice
            } else if let backCameraDevice = AVCaptureDevice.default(.builtInWideAngleCamera,position: .back){
                defaultVideoDevice = backCameraDevice
            } else if let dualCameraDevice = AVCaptureDevice.default(.builtInDualWideCamera,position: .back) {
                defaultVideoDevice = dualCameraDevice
            }

            guard let videoDevice = defaultVideoDevice else {
                print("CAM - Camera unavailable")
                self.setupResult = .configurationFailed
                self.session.commitConfiguration()
                return
            }

            let videoInputDevice = try AVCaptureDeviceInput(device: videoDevice)

            if self.session.canAddInput(videoInputDevice) {
                self.session.addInput(videoInputDevice)
                self.videoDeviceInput = videoInputDevice
            } else {
                print("CAM - Couldn't add input to the session")
                self.setupResult = .configurationFailed
                self.session.commitConfiguration()
                return
            }
        } catch {
            print("CAM - Couldn't create device input. Error - ",error.localizedDescription)
            self.setupResult = .configurationFailed
            self.session.commitConfiguration()
            return
        }
        
        
        self.videoOutput.setSampleBufferDelegate(self,queue: self.videoQueue)
        if self.session.canAddOutput(self.videoOutput) {
            self.session.addOutput(self.videoOutput)
            self.photoQualityPrioritizationMode = .balanced
        } else {
            print("Could not add photo output to the session")
            self.setupResult = .configurationFailed
            self.session.commitConfiguration()
            return
        }

        self.videoOutput.connections.first?.videoOrientation = .portrait
        self.videoOutput.videoSettings = [ kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA ]
        self.videoOutput.alwaysdiscardsLateVideoFrames = true
        
        
        // Configure for photos
        if self.session.canAddOutput(self.photoOutput) {
            self.session.addOutput(self.photoOutput)
        }
        
        do {
            let audioDevice = AVCaptureDevice.default(for: .audio)
            let audioDeviceInput = try AVCaptureDeviceInput(device: audioDevice!)
            if self.session.canAddInput(audioDeviceInput) {
                self.session.addInput(audioDeviceInput)
            } else { print("CAM - Couldn't add audio input device to session.") }
        } catch { print("Couldn't create audio input device. Error - ",error.localizedDescription) }
    
        
        self.audioOutput.setSampleBufferDelegate(self,queue: self.videoQueue)
        if self.session.canAddOutput(self.audioOutput) {
            print("SFC - registered audio output with type")
            self.session.addOutput(self.audioOutput)
        } else {
            print("Couldn't add audio output")
            self.setupResult = .configurationFailed
            self.session.commitConfiguration()
        }
    
        self.session.commitConfiguration()

    }
}

func startSession() {
    if SMConstants.currentDevice.isSimulator { return }
    sessionQueue.async {
        self.configureSession()
        print("SFC - Frame Buffers Session Starting")
        self.session.startRunning()
        self.isSessionRunning = self.session.isRunning
        self.sessionQueue.asyncAfter(deadline: .Now() + 1) {
            self.addobservers()
        }
    }
}

Buffer Writer

func captureOutput(_ output: AVCaptureOutput,didOutput sampleBuffer: CMSampleBuffer,from connection: AVCaptureConnection) {
    if !frameCaptureRunning { return }
    write(output: output,buffer: sampleBuffer)
}

public func write(output: AVCaptureOutput,buffer: CMSampleBuffer) {
    writerQueue.sync {
        
        if assetWriter == nil { self.setupWriter() }

        if self.assetWriter?.status == .unkNown {
            self.assetWriter?.startWriting()
            self.assetWriter?.startSession(atSourceTime: CMSampleBufferGetPresentationTimeStamp(buffer))
            printDone(item: "Started AssetWriter")
        }
        if self.assetWriter?.status == .Failed {
            printError(item: "Asset Writer Failed with Error: \(String(describing: self.assetWriter?.error))")
            return
        }

    
        if CMSampleBufferDataIsReady(buffer) {
            if output == videoOutput {
                if let videoInput = self.videoInput,videoInput.isReadyForMoreMediaData {
                    videoInput.append(buffer)
                    printLog(item: "? Pulling video only \(output)")
                    video_frames_written = true
                }
            }
            
            if output == audioOutput {
                if !video_frames_written { return }
                if let audioInput = self.audioInput,audioInput.isReadyForMoreMediaData {
                    audioInput.append(buffer)
                    printLog(item: "? Pulling Audio only \(output)")
                }
            }
        }
    }
}

private func setupWriter() {
    
    clearandResetFilesInDirectory()
    
    self.assetWriter = try? AVAssetWriter(outputURL: self.url,fileType: AVFileType.mp4)
    
    let videoOutputSettings = [
        AVVideoCodecKey: AVVideoCodecType.h264,AVVideoHeightKey: 1920,AVVideoWidthKey:1080
    ] as [String : Any]


    self.videoInput = AVAssetWriterInput(mediaType: .video,outputSettings: videoOutputSettings)
    self.videoInput?.expectsMediaDataInRealTime = true
    if let videoInput = self.videoInput,(self.assetWriter?.canAdd(videoInput))! {
        self.assetWriter?.add(videoInput)
    }
    
    
    let audioOutputSettings = [
        AVFormatIDKey: kAudioFormatMPEG4AAC,AVNumberOfChannelsKey: 1,AVSampleRateKey: 44100,AVEncoderBitRateKey: 64000
    ] as [String: Any]
    
    
    self.audioInput = AVAssetWriterInput(mediaType: .audio,outputSettings: audioOutputSettings)
    self.audioInput?.expectsMediaDataInRealTime = true
    if let audioInput = self.audioInput,(self.assetWriter?.canAdd(audioInput))! {
        printDone(item: "Added Input")
        self.assetWriter?.add(audioInput)
    } else { printError(item: "No audio Input") }
    
    
}

ViewWillAppear 在 CameraController 中

override func viewWillAppear(_ animated: Bool) {
    super.viewWillAppear(animated)
    frameCapture = FrameCapture(filename: "test.mp4",delegate: self)
    frameCapture.startSession()
    previewView.session = frameCapture.session
}

这就是我觉得适合您的想法的全部内容。如果您认为我需要提供更多信息,请告诉我。我会及时的。感谢您抽出宝贵时间。

解决方法

您可能希望在视频缓冲区上 startSession - 如果音频缓冲区首先到达,并且时间戳比第一个视频缓冲区早,那么您将获得空白或黑色的初始帧。