swiftxcodemacosavfoundationavkit

Memory leak when using AVCaptureSession to AVAssetWriter using AVAssetWriterDelegate for HLS


We have found AVAssetWriter to leak memory when using the needed delegate AVAssetWriterDelegate to create HLS fMP4 video. Even before ever using the given segment data to process and store.

When releasing the memory by hand (it feels like this is wrong) the memory leak seems to disappear.

Even in a minimal situation the memory increases rapidly.

import Cocoa
import AVFoundation

class ViewController: NSViewController {

    override func viewDidLoad() {
        super.viewDidLoad()
        AVCaptureDevice.requestAccess(for: .video, completionHandler: {
            _ in
        })
    }

    private var fileWriter: AVAssetWriter!
    private var videoInput: AVAssetWriterInput!
    private var bufferAdaptor: AVAssetWriterInputPixelBufferAdaptor!
    private var captureSession: AVCaptureSession!
    internal let recordingQueue = DispatchQueue(label: "RecordingQueue", qos: .userInitiated)
    internal let writerQueue = DispatchQueue(label: "WriterQueue", qos: .userInitiated)

    @IBAction func startCapture(_ sender: NSButton) {
        
        self.writerQueue.async {
            let device = AVCaptureDevice.default(for: .video)!
            
            try! device.lockForConfiguration()
            
            device.activeFormat = device.formats.last!
            device.activeVideoMaxFrameDuration = CMTime(value: 1, timescale: 25)
            device.activeVideoMinFrameDuration = CMTime(value: 1, timescale: 25)
            device.unlockForConfiguration()

            self.fileWriter = AVAssetWriter(contentType: .mpeg4Movie)
            self.fileWriter.preferredOutputSegmentInterval = CMTime(seconds: 0.2, preferredTimescale: 60000)
            self.fileWriter.outputFileTypeProfile = .mpeg4AppleHLS
            self.fileWriter.initialSegmentStartTime = .zero
            
            let videoOutputSettings: [String: Any] = [
                AVVideoWidthKey: 1920,
                AVVideoHeightKey: 1080,
                AVVideoCodecKey: AVVideoCodecType.h264,
                AVVideoCompressionPropertiesKey: [
                    AVVideoProfileLevelKey: AVVideoProfileLevelH264HighAutoLevel,
                    AVVideoAverageBitRateKey: 6000 * 1024
                ]
            ]

            self.videoInput = AVAssetWriterInput(mediaType: AVMediaType.video, outputSettings: videoOutputSettings)
            self.fileWriter.movieTimeScale = CMTimeScale(exactly: 25)!
            self.videoInput.mediaTimeScale = CMTimeScale(exactly: 25)!
            
            self.videoInput.expectsMediaDataInRealTime = true
            self.videoInput.performsMultiPassEncodingIfSupported = false
            
            let sourcePixelBufferAttributes:[String:Any] = [
                kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32ARGB,
                kCVPixelBufferMetalCompatibilityKey as String: true,
            ]
            self.bufferAdaptor = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: self.videoInput, sourcePixelBufferAttributes: sourcePixelBufferAttributes)
            
            if self.fileWriter.canAdd(self.videoInput) {
                self.fileWriter.add(self.videoInput)
            } else {
                return
            }
            
            self.fileWriter.delegate = self

            self.captureSession = AVCaptureSession()
            self.captureSession?.beginConfiguration()

            let videoInput = try! AVCaptureDeviceInput(device: device)
            if self.captureSession?.canAddInput(videoInput) ?? false {
                self.captureSession?.addInput(videoInput)
            } else {
                return
            }
        
            self.captureSession?.sessionPreset = AVCaptureSession.Preset.high

            let videoDataOutput = AVCaptureVideoDataOutput()
            videoDataOutput.setSampleBufferDelegate(self, queue: self.recordingQueue)
            videoDataOutput.alwaysDiscardsLateVideoFrames = true
            videoDataOutput.videoSettings = [
                kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA,
                kCVPixelBufferMetalCompatibilityKey as String: true
            ]
            
            if self.captureSession?.canAddOutput(videoDataOutput) ?? false {
                self.captureSession?.addOutput(videoDataOutput)
            } else {
                return
            }
            self.captureSession?.commitConfiguration()

            try! device.lockForConfiguration()
            self.captureSession?.startRunning()
            device.unlockForConfiguration()

            self.fileWriter.startWriting()
            self.fileWriter.startSession(atSourceTime: CMTime.zero)
        }
    }
    
    func write(sample: CMSampleBuffer) {
        if self.videoInput.isReadyForMoreMediaData {
            self.videoInput.append(sample)
        }
    }
}

extension ViewController: AVAssetWriterDelegate {
    func assetWriter(_ writer: AVAssetWriter, didOutputSegmentData segmentData: Data, segmentType: AVAssetSegmentType, segmentReport: AVAssetSegmentReport?) {
        print(segmentData.count)
        
//      let _ = segmentData.withUnsafeBytes {
//          raw in
//          raw.baseAddress?.deallocate()
//      }
    }
}

extension ViewController: AVCaptureVideoDataOutputSampleBufferDelegate {
    func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
        self.write(sample: sampleBuffer)
    }
}

Run this small code in a new project and you will see the memory grow. Uncomment the lines in the delegate and it's as expected.

What are we missing? Or did we discover a bug? (Already sent in to Apple).

Any ideas are welcome to get this leak closed...


Solution

  • Apple has resolved the memory leak in macOS 13.3 beta. After double checking it indeed seems fixed. No more leaking appears in the sample code.