问题描述
我正在开发适用于iOS和macOS的视频滤镜,该滤镜捕获来自默认相机的视频输入,应用滤镜(MPSImageGaussianBlur),然后使用MTKView进行渲染。
它在iOS上运行良好(在iPhone 6s和iPhone 11上为13),但在MacOS上只有红屏(在MacBook Pro上为10.15),我不知道为什么。该代码按预期的方式反复调用captureOutput()和draw()。
这是VS2CameraSession,它执行大部分工作。 (请注意,我已按照CVMetalTextureCacheCreateTextureFromImage returns -6660 on macOS 10.13的建议将kCVPixelBufferMetalCompatibilityKey标志添加到videoSettings中)
import AVFoundation
import MetalPerformanceShaders
class VS2CameraSession: NSObject {
let gpu = MTLCreateSystemDefaultDevice()!
private let session = AVCaptureSession()
private let camera = AVCaptureDevice.default(for: .video)
private var textureCache:CVMetalTextureCache?
private var texture:MTLTexture?
func startRunning() {
CVMetalTextureCacheCreate(nil,nil,gpu,&textureCache)
guard let camera = camera,let input = try? AVCaptureDeviceInput(device: camera) else {
return
}
guard session.canAddInput(input) else {
return
}
session.addInput(input)
let output = AVCaptureVideoDataOutput()
output.alwaysdiscardsLateVideoFrames = true
#if os(macOS)
// https://stackoverflow.com/questions/46549906/cvMetaltexturecachecreatetexturefromimage-returns-6660-on-macos-10-13
output.videoSettings = [
kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA,kCVPixelBufferMetalCompatibilityKey as String: true
]
#else
output.videoSettings = [
kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA
]
#endif
output.setSampleBufferDelegate(self,queue: dispatchQueue.main)
session.addOutput(output)
session.startRunning()
}
func draw(drawable:CAMetalDrawable?) {
guard let texture = self.texture,let drawable = drawable,let commandQueue = gpu.makeCommandQueue(),let commandBuffer = commandQueue.makeCommandBuffer() else {
return
}
// Apply filter(s)
let filter = MPSImageGaussianBlur(device:gpu,sigma: 10.0)
filter.encode(commandBuffer: commandBuffer,sourceTexture: texture,destinationTexture: drawable.texture)
commandBuffer.present(drawable)
commandBuffer.commit()
self.texture = nil // no need to draw it again
}
}
extension VS2CameraSession : AVCaptureVideoDataOutputSampleBufferDelegate {
func captureOutput(_ output: AVCaptureOutput,didOutput sampleBuffer: CMSampleBuffer,from connection: AVCaptureConnection) {
if let pixelBuffer = CMSampleBufferGetimageBuffer(sampleBuffer),let textureCache = self.textureCache {
let width = CVPixelBufferGetWidth(pixelBuffer)
let height = CVPixelBufferGetHeight(pixelBuffer)
var textureRef:CVMetalTexture?
CVMetalTextureCacheCreateTextureFromImage(kcfAllocatorDefault,textureCache,pixelBuffer,.bgra8Unorm,width,height,&textureRef)
texture = CVMetalTextureGetTexture(textureRef!)
}
}
}
这是VS2CameraviewController,它使用VS2CameraSession呈现其视图。
import UIKit
import SwiftUI
import MetalKit
final class VS2CameraviewController: UIViewController {
let cameraSession = VS2CameraSession()
override func loadView() {
let MetalView = MTKView()
MetalView.device = self.cameraSession.gpu
MetalView.delegate = self
MetalView.clearColor = MTLClearColorMake(1,1,1)
MetalView.colorPixelFormat = MTLPixelFormat.bgra8Unorm
MetalView.framebufferOnly = false
self.view = MetalView
}
override func viewDidLoad() {
cameraSession.startRunning()
}
}
extension VS2CameraviewController : MTKViewDelegate {
func mtkView(_ view: MTKView,drawableSizeWillChange size: CGSize) {
}
func draw(in view: MTKView) {
cameraSession.draw(drawable: view.currentDrawable)
}
}
extension VS2CameraviewController : UIViewControllerRepresentable {
typealias UIViewControllerType = VS2CameraviewController
public func makeUIViewController(context: UIViewControllerRepresentableContext<VS2CameraviewController>) -> VS2CameraviewController {
return VS2CameraviewController()
}
public func updateUIViewController(_ uiViewController: VS2CameraviewController,context: UIViewControllerRepresentableContext<VS2CameraviewController>) {
}
}
完整的源代码可在https://github.com/snakajima/VideoShader2/tree/stack_overflow上获得。
解决方法
我在这里找到了答案。 Very slow framerate with AVFoundation and Metal in MacOS
我只需要保留对sampleBuffer的引用以及纹理。