We have found AVAssetWriter
to leak memory when using the needed delegate AVAssetWriterDelegate
to create HLS fMP4 video. Even before ever using the given segment data to process and store.
When releasing the memory by hand (it feels like this is wrong) the memory leak seems to disappear.
Even in a minimal situation the memory increases rapidly.
import Cocoa
import AVFoundation
class ViewController: NSViewController {
override func viewDidLoad() {
super.viewDidLoad()
AVCaptureDevice.requestAccess(for: .video, completionHandler: {
_ in
})
}
private var fileWriter: AVAssetWriter!
private var videoInput: AVAssetWriterInput!
private var bufferAdaptor: AVAssetWriterInputPixelBufferAdaptor!
private var captureSession: AVCaptureSession!
internal let recordingQueue = DispatchQueue(label: "RecordingQueue", qos: .userInitiated)
internal let writerQueue = DispatchQueue(label: "WriterQueue", qos: .userInitiated)
@IBAction func startCapture(_ sender: NSButton) {
self.writerQueue.async {
let device = AVCaptureDevice.default(for: .video)!
try! device.lockForConfiguration()
device.activeFormat = device.formats.last!
device.activeVideoMaxFrameDuration = CMTime(value: 1, timescale: 25)
device.activeVideoMinFrameDuration = CMTime(value: 1, timescale: 25)
device.unlockForConfiguration()
self.fileWriter = AVAssetWriter(contentType: .mpeg4Movie)
self.fileWriter.preferredOutputSegmentInterval = CMTime(seconds: 0.2, preferredTimescale: 60000)
self.fileWriter.outputFileTypeProfile = .mpeg4AppleHLS
self.fileWriter.initialSegmentStartTime = .zero
let videoOutputSettings: [String: Any] = [
AVVideoWidthKey: 1920,
AVVideoHeightKey: 1080,
AVVideoCodecKey: AVVideoCodecType.h264,
AVVideoCompressionPropertiesKey: [
AVVideoProfileLevelKey: AVVideoProfileLevelH264HighAutoLevel,
AVVideoAverageBitRateKey: 6000 * 1024
]
]
self.videoInput = AVAssetWriterInput(mediaType: AVMediaType.video, outputSettings: videoOutputSettings)
self.fileWriter.movieTimeScale = CMTimeScale(exactly: 25)!
self.videoInput.mediaTimeScale = CMTimeScale(exactly: 25)!
self.videoInput.expectsMediaDataInRealTime = true
self.videoInput.performsMultiPassEncodingIfSupported = false
let sourcePixelBufferAttributes:[String:Any] = [
kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32ARGB,
kCVPixelBufferMetalCompatibilityKey as String: true,
]
self.bufferAdaptor = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: self.videoInput, sourcePixelBufferAttributes: sourcePixelBufferAttributes)
if self.fileWriter.canAdd(self.videoInput) {
self.fileWriter.add(self.videoInput)
} else {
return
}
self.fileWriter.delegate = self
self.captureSession = AVCaptureSession()
self.captureSession?.beginConfiguration()
let videoInput = try! AVCaptureDeviceInput(device: device)
if self.captureSession?.canAddInput(videoInput) ?? false {
self.captureSession?.addInput(videoInput)
} else {
return
}
self.captureSession?.sessionPreset = AVCaptureSession.Preset.high
let videoDataOutput = AVCaptureVideoDataOutput()
videoDataOutput.setSampleBufferDelegate(self, queue: self.recordingQueue)
videoDataOutput.alwaysDiscardsLateVideoFrames = true
videoDataOutput.videoSettings = [
kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA,
kCVPixelBufferMetalCompatibilityKey as String: true
]
if self.captureSession?.canAddOutput(videoDataOutput) ?? false {
self.captureSession?.addOutput(videoDataOutput)
} else {
return
}
self.captureSession?.commitConfiguration()
try! device.lockForConfiguration()
self.captureSession?.startRunning()
device.unlockForConfiguration()
self.fileWriter.startWriting()
self.fileWriter.startSession(atSourceTime: CMTime.zero)
}
}
func write(sample: CMSampleBuffer) {
if self.videoInput.isReadyForMoreMediaData {
self.videoInput.append(sample)
}
}
}
extension ViewController: AVAssetWriterDelegate {
func assetWriter(_ writer: AVAssetWriter, didOutputSegmentData segmentData: Data, segmentType: AVAssetSegmentType, segmentReport: AVAssetSegmentReport?) {
print(segmentData.count)
// let _ = segmentData.withUnsafeBytes {
// raw in
// raw.baseAddress?.deallocate()
// }
}
}
extension ViewController: AVCaptureVideoDataOutputSampleBufferDelegate {
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
self.write(sample: sampleBuffer)
}
}
Run this small code in a new project and you will see the memory grow. Uncomment the lines in the delegate and it's as expected.
What are we missing? Or did we discover a bug? (Already sent in to Apple).
Any ideas are welcome to get this leak closed...
This issue is indeed fixed in macOS 13.3 beta, but for anyone who wants to support older macOS/iOS version where this leak is present:
You can either deallocate the memory like Jory is showing in the question itself. Make sure to guard this with an if #available
so you don't introduce a crash on OS versions that contain the fix:
if #available(macOS 13.3, *) {
// Leak is fixed, DO NOT deallocate manually as this will result in a crash
print("Skipping manual deallocation, we're on a too recent version of macOS.")
} else {
// Deallocate manually when on leaking OS versions
segmentData.withUnsafeBytes { raw in raw.baseAddress?.deallocate() }
}
A less risky alternative is to implement the delegate in Objective-C. The leak occurs only when the segment data is bridged to Swift. If you need to do some stuff with the segment data in Swift you can box it in an Objective-C class.
A sample of this can be found here: https://github.com/nonstrict-hq/avassetwriter-segment-leak-sample
Source: https://nonstrict.eu/blog/2023/avassetwriter-leaks-segment-data/