Swift 4 - 在 mac os 上使用 AVAssetWriter 进行 avfoundation 屏幕和音频录制 - 视频冻结

Posted

技术标签:

【中文标题】Swift 4 - 在 mac os 上使用 AVAssetWriter 进行 avfoundation 屏幕和音频录制 - 视频冻结【英文标题】:Swift 4 - avfoundation screen and audio recording using AVAssetWriter on mac os - video frozen 【发布时间】:2018-02-01 18:45:21 【问题描述】:

我正在使用 Aperture 录制屏幕上的音频和视频。我们需要降低视频的比特率,所以我正在尝试重写它并使用 AVAssetWriter 录制视频。我的实现基于 CustomCamera 项目并且几乎可以工作。问题出在视频中,几秒钟后它就被冻结了,尽管音频可以正常工作。请问你能帮帮我吗?我不知道问题出在哪里,可能是缓冲区或垃圾收集器收集了一些变量的问题。谢谢。

代码如下:

//
//  ViewController.swift
//  CustomCamera
//
//  Created by Taras Chernyshenko on 6/27/17.
//  Copyright © 2017 Taras Chernyshenko. All rights reserved.
//
import AVFoundation
import Photos

class NewRecorder: NSObject,
  AVCaptureAudioDataOutputSampleBufferDelegate,
AVCaptureVideoDataOutputSampleBufferDelegate 

  private var session: AVCaptureSession = AVCaptureSession()
  private var deviceInput: AVCaptureScreenInput?
  private var previewLayer: AVCaptureVideoPreviewLayer?
  private var videoOutput: AVCaptureVideoDataOutput = AVCaptureVideoDataOutput()
  private var audioOutput: AVCaptureAudioDataOutput = AVCaptureAudioDataOutput()

  //private var videoDevice: AVCaptureDevice = AVCaptureScreenInput(displayID: 69731840) //AVCaptureDevice.default(for: AVMediaType.video)!
  private var audioConnection: AVCaptureConnection?
  private var videoConnection: AVCaptureConnection?

  private var assetWriter: AVAssetWriter?
  private var audioInput: AVAssetWriterInput?
  private var videoInput: AVAssetWriterInput?

  private var fileManager: FileManager = FileManager()
  private var recordingURL: URL?

  private var isCameraRecording: Bool = false
  private var isRecordingSessionStarted: Bool = false

  private var recordingQueue = DispatchQueue(label: "recording.queue")


  func setup() 
    self.session.sessionPreset = AVCaptureSession.Preset.high

    self.recordingURL = URL(fileURLWithPath: "\(NSTemporaryDirectory() as String)/file.mp4")
    if self.fileManager.isDeletableFile(atPath: self.recordingURL!.path) 
      _ = try? self.fileManager.removeItem(atPath: self.recordingURL!.path)
    

    self.assetWriter = try? AVAssetWriter(outputURL: self.recordingURL!,
                                          fileType: AVFileType.mp4)
    self.assetWriter!.movieFragmentInterval = kCMTimeInvalid
    self.assetWriter!.shouldOptimizeForNetworkUse = true

    let audiosettings = [
      AVFormatIDKey : kAudioFormatMPEG4AAC,
      AVNumberOfChannelsKey : 2,
      AVSampleRateKey : 44100.0,
      AVEncoderBitRateKey: 192000
      ] as [String : Any]

    let videoSettings = [
      AVVideoCodecKey : AVVideoCodecType.h264,
      AVVideoWidthKey : 1920,
      AVVideoHeightKey : 1080
      /*AVVideoCompressionPropertiesKey: [
        AVVideoAverageBitRateKey:  NSNumber(value: 5000000)
      ]*/
      ] as [String : Any]

    self.videoInput = AVAssetWriterInput(mediaType: AVMediaType.video,
                                         outputSettings: videoSettings)
    self.audioInput = AVAssetWriterInput(mediaType: AVMediaType.audio,
                                         outputSettings: audioSettings)

    self.videoInput?.expectsMediaDataInRealTime = true
    self.audioInput?.expectsMediaDataInRealTime = true

    if self.assetWriter!.canAdd(self.videoInput!) 
      self.assetWriter?.add(self.videoInput!)
    

    if self.assetWriter!.canAdd(self.audioInput!) 
      self.assetWriter?.add(self.audioInput!)
    

    //self.deviceInput = try? AVCaptureDeviceInput(device: self.videoDevice)
    self.deviceInput = AVCaptureScreenInput(displayID: 724042646)
    self.deviceInput!.minFrameDuration = CMTimeMake(1, Int32(30))
    self.deviceInput!.capturesCursor = true
    self.deviceInput!.capturesMouseClicks = true

    if self.session.canAddInput(self.deviceInput!) 
      self.session.addInput(self.deviceInput!)
    

    self.previewLayer = AVCaptureVideoPreviewLayer(session: self.session)

    //importent line of code what will did a trick
    //self.previewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill

    //let rootLayer = self.view.layer
    //rootLayer.masksToBounds = true
    //self.previewLayer?.frame = CGRect(x: 0, y: 0, width: 1920, height: 1080)

    //rootLayer.insertSublayer(self.previewLayer!, at: 0)

    self.session.startRunning()

    DispatchQueue.main.async 
      self.session.beginConfiguration()

      if self.session.canAddOutput(self.videoOutput) 
        self.session.addOutput(self.videoOutput)
      

      self.videoConnection = self.videoOutput.connection(with: AVMediaType.video)
      /*if self.videoConnection?.isVideoStabilizationSupported == true 
        self.videoConnection?.preferredVideoStabilizationMode = .auto
      */
      self.session.commitConfiguration()

      let audioDevice = AVCaptureDevice.default(for: AVMediaType.audio)
      let audioIn = try? AVCaptureDeviceInput(device: audioDevice!)

      if self.session.canAddInput(audioIn!) 
        self.session.addInput(audioIn!)
      

      if self.session.canAddOutput(self.audioOutput) 
        self.session.addOutput(self.audioOutput)
      

      self.audioConnection = self.audioOutput.connection(with: AVMediaType.audio)
    
  

  func startRecording() 
    if self.assetWriter?.startWriting() != true 
      print("error: \(self.assetWriter?.error.debugDescription ?? "")")
    

    self.videoOutput.setSampleBufferDelegate(self, queue: self.recordingQueue)
    self.audioOutput.setSampleBufferDelegate(self, queue: self.recordingQueue)
  

  func stopRecording() 
    self.videoOutput.setSampleBufferDelegate(nil, queue: nil)
    self.audioOutput.setSampleBufferDelegate(nil, queue: nil)

    self.assetWriter?.finishWriting 
      print("Saved in folder \(self.recordingURL!)")
      exit(0)
    
  
  func captureOutput(_ captureOutput: AVCaptureOutput, didOutput
    sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) 

    if !self.isRecordingSessionStarted 
      let presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
      self.assetWriter?.startSession(atSourceTime: presentationTime)
      self.isRecordingSessionStarted = true
    

    let description = CMSampleBufferGetFormatDescription(sampleBuffer)!

    if CMFormatDescriptionGetMediaType(description) == kCMMediaType_Audio 
      if self.audioInput!.isReadyForMoreMediaData 
        //print("appendSampleBuffer audio");
        self.audioInput?.append(sampleBuffer)
      
     else 
      if self.videoInput!.isReadyForMoreMediaData 
        //print("appendSampleBuffer video");
        if !self.videoInput!.append(sampleBuffer) 
          print("Error writing video buffer");
        
      
    
  

【问题讨论】:

【参考方案1】:

所以我通过移动这段代码来修复它

if self.session.canAddInput(self.deviceInput!) 
  self.session.addInput(self.deviceInput!)

在调用 self.session.beginConfiguration() 之后,所以

self.session.beginConfiguration()
  if self.session.canAddInput(self.deviceInput!) 
    self.session.addInput(self.deviceInput!)
  

  if self.session.canAddOutput(self.videoOutput) 
    self.session.addOutput(self.videoOutput)
  

  self.videoConnection = self.videoOutput.connection(with: AVMediaType.video)

  self.session.commitConfiguration()

【讨论】:

以上是关于Swift 4 - 在 mac os 上使用 AVAssetWriter 进行 avfoundation 屏幕和音频录制 - 视频冻结的主要内容,如果未能解决你的问题,请参考以下文章

Mac OS:直接从 Swift 中的目录访问内容

与 Swift 一起使用的 iOS/Mac OS 流畅匹配框架?

使用 AMPPS 在 mac (Mac OS X Lion 10.70.4) 上设置虚拟主机

pathForResource 在 Mac OS X 控制台应用程序中返回 nil — Swift

无法在 mac Os X 上使用 npm 安装 juggernaut - 版本 10.7.4

如何在 Mac OS X 10.8 / Xcode 4.4 上使用/安装 gcc