问题描述
请告诉我我的代码可能有什么问题。我尝试将屏幕广播到文件(newFile.mov),然后将其与一些数据一起发送到服务器,但是它没有保存,并且在停止捕获后,文件空了。
这是我来自BExtensionUpload的代码,它捕获广播并将其保存到我可以接收并发送到服务器的组中的文件中
import ReplayKit
class SampleHandler: rpbroadcastsamplehandler {
let appIdentifier = "com.group.CY"
var videoWriterInput: AVAssetWriterInput!
var audioWriterInput: AVAssetWriterInput!
var videoWriter: AVAssetWriter!
var sessionAtSourceTime: CMTime!
var outputFileLocation: URL?
override func broadcastStarted(withSetupInfo setupInfo: [String : NSObject]?) {
do {
outputFileLocation = videoFileLocation()
videoWriter = try AVAssetWriter(outputURL: outputFileLocation!,fileType: AVFileType.mov)
videoWriterInput = AVAssetWriterInput(mediaType: AVMediaType.video,outputSettings: [
AVVideoCodecKey : AVVideoCodecType.h264,AVVideoWidthKey : 720,AVVideoHeightKey : 1280,AVVideoCompressionPropertiesKey : [
AVVideoAverageBitRateKey : 2300000,],])
videoWriterInput.expectsMediaDataInRealTime = true
if videoWriter.canAdd(videoWriterInput) {
videoWriter.add(videoWriterInput)
} else {
print("no input added")
}
audioWriterInput = AVAssetWriterInput(mediaType: AVMediaType.audio,outputSettings: nil)
audioWriterInput.expectsMediaDataInRealTime = true
if videoWriter.canAdd(audioWriterInput!) {
videoWriter.add(audioWriterInput!)
}
videoWriter.startWriting()
} catch let error {
debugPrint(error.localizedDescription)
}
}
override func broadcastFinished() {
super.broadcastFinished()
videoWriterInput.markAsFinished()
videoWriter.finishWriting {
()
}
}
override func processSampleBuffer(_ sampleBuffer: CMSampleBuffer,with sampleBufferType: RPSampleBufferType) {
super.processSampleBuffer(sampleBuffer,with: sampleBufferType)
if writable,sessionAtSourceTime == nil {
sessionAtSourceTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
videoWriter.startSession(atSourceTime: sessionAtSourceTime!)
}
switch sampleBufferType {
case .video:
if videoWriterInput.isReadyForMoreMediaData {
videoWriterInput.append(sampleBuffer)
}
case .audioApp:
if audioWriterInput.isReadyForMoreMediaData {
audioWriterInput?.append(sampleBuffer)
}
case .audioMic:
print("mic")
@unkNown default:
print("unkNown")
}
}
func videoFileLocation() -> URL? {
let fileManager = FileManager.default
do {
if let container = fileManager.containerURL(forSecurityApplicationGroupIdentifier: appIdentifier) {
let videoContainer = container.appendingPathComponent("Video")
try? fileManager.createDirectory(at: videoContainer,withIntermediateDirectories: false,attributes: nil)
let videoOutputUrl = videoContainer.appendingPathComponent("newFile").appendingPathExtension("mov")
if fileManager.fileExists(atPath: videoOutputUrl.path) {
try fileManager.removeItem(at: videoOutputUrl)
}
fileManager.createFile(atPath: videoOutputUrl.path,contents: nil,attributes: nil)
return videoOutputUrl
}
} catch {
print(error)
}
return nil
}
}
解决方法
此代码对我有用,但是除了我在应用程序组中的目录外,我还必须使用PHPhotoLibrary。应用程序组目录不是创建文件的唯一方法,您可以使用所需的任何目录。我希望它可以帮助某人。
import ReplayKit
import Photos
class SampleHandler: RPBroadcastSampleHandler {
let appIdentifier = "group.CY"
let fileManager = FileManager.default
var videoWriterInput: AVAssetWriterInput!
var microphoneWriterInput: AVAssetWriterInput!
var videoWriter: AVAssetWriter!
var sessionBeginAtSourceTime: CMTime!
var isRecording = false
var outputFileLocation: URL!
override func broadcastStarted(withSetupInfo setupInfo: [String : NSObject]?) {
guard !isRecording else { return }
isRecording = true
sessionBeginAtSourceTime = nil
setUpWriter()
}
func setUpWriter() {
let width = UIScreen.main.bounds.width * 2
let height = UIScreen.main.bounds.height * 2
self.outputFileLocation = videoFileLocation()
// Add the video input
videoWriter = try? AVAssetWriter.init(outputURL: self.outputFileLocation,fileType: AVFileType.mp4)
let videoCompressionPropertys = [
AVVideoAverageBitRateKey: width * height * 10.1
]
let videoSettings: [String: Any] = [
AVVideoCodecKey: AVVideoCodecType.h264,AVVideoWidthKey: width,AVVideoHeightKey: height,AVVideoCompressionPropertiesKey: videoCompressionPropertys
]
videoWriterInput = AVAssetWriterInput(mediaType: AVMediaType.video,outputSettings: videoSettings)
videoWriterInput.expectsMediaDataInRealTime = true
// Add the microphone input
var acl = AudioChannelLayout()
memset(&acl,MemoryLayout<AudioChannelLayout>.size)
acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
let audioOutputSettings: [String: Any] =
[ AVFormatIDKey: kAudioFormatMPEG4AAC,AVSampleRateKey : 44100,AVNumberOfChannelsKey : 1,AVEncoderBitRateKey : 64000,AVChannelLayoutKey : Data(bytes: &acl,count: MemoryLayout<AudioChannelLayout>.size)]
microphoneWriterInput = AVAssetWriterInput(mediaType: AVMediaType.audio,outputSettings: audioOutputSettings)
microphoneWriterInput.expectsMediaDataInRealTime = true
if videoWriter.canAdd(videoWriterInput) {
videoWriter.add(videoWriterInput)
}
if videoWriter.canAdd(microphoneWriterInput) {
videoWriter.add(microphoneWriterInput)
}
videoWriter.startWriting()
}
override func broadcastFinished() {
guard isRecording else { return }
isRecording = false
sessionBeginAtSourceTime = nil
let dispatchGroup = DispatchGroup()
dispatchGroup.enter()
videoWriterInput.markAsFinished()
microphoneWriterInput.markAsFinished()
var finishedWriting = false
videoWriter.finishWriting {
PHPhotoLibrary.shared().performChanges({
PHAssetCollectionChangeRequest.creationRequestForAssetCollection(withTitle: "xxx")
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: self.outputFileLocation)
}) { completed,error in
if completed {
NSLog("Video \(self.outputFileLocation.path ?? "") has been moved to camera roll")
}
if error != nil {
NSLog("ERROR:::Cannot move the video \(self.outputFileLocation.path ?? "") to camera roll,error: \(error!.localizedDescription)")
}
finishedWriting = true
}
while finishedWriting == false {
// NSLog("DEBUG:::Waiting to finish writing...")
}
dispatchGroup.leave()
}
dispatchGroup.wait() // <= blocks the thread here
}
override func finishBroadcastWithError(_ error: Error) {
let e = error
print(e)
}
override func processSampleBuffer(_ sampleBuffer: CMSampleBuffer,with sampleBufferType: RPSampleBufferType) {
let writable = canWrite()
if writable,sessionBeginAtSourceTime == nil {
sessionBeginAtSourceTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
videoWriter.startSession(atSourceTime: sessionBeginAtSourceTime!)
}
if writable {
switch sampleBufferType {
case .video:
if videoWriterInput.isReadyForMoreMediaData {
videoWriterInput.append(sampleBuffer)
}
case .audioApp:
print("audio")
case .audioMic:
if microphoneWriterInput.isReadyForMoreMediaData {
microphoneWriterInput.append(sampleBuffer)
}
@unknown default:
print("unknown")
}
}
}
func videoFileLocation() -> URL {
let documentsPath = fileManager.containerURL(forSecurityApplicationGroupIdentifier: appIdentifier)!
let videoOutputUrl = documentsPath
.appendingPathComponent("Library")
.appendingPathComponent("Caches")
.appendingPathComponent("mobile")
.appendingPathExtension("mp4")
do {
if fileManager.fileExists(atPath: videoOutputUrl.path) {
try fileManager.removeItem(at: videoOutputUrl)
}
} catch { print(error) }
return videoOutputUrl
}
func canWrite() -> Bool {
return videoWriter != nil && isRecording && videoWriter?.status == .writing
}
}