iosswiftuiimageavcapturesessioncmsamplebufferref

Convert a CMSampleBuffer into a UIImage


Here's a function (code from Apple documentation) that converts a CMSampleBuffer into a UIImage

func imageFromSampleBuffer(sampleBuffer: CMSampleBuffer) -> UIImage {
    // Get a CMSampleBuffer's Core Video image buffer for the media data
    var imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)
    // Lock the base address of the pixel buffer
    CVPixelBufferLockBaseAddress(imageBuffer, 0)

    // Get the number of bytes per row for the pixel buffer
    var baseAddress = CVPixelBufferGetBaseAddress(imageBuffer)

    // Get the number of bytes per row for the pixel buffer
    var bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer)
    // Get the pixel buffer width and height
    var width = CVPixelBufferGetWidth(imageBuffer)
    var height = CVPixelBufferGetHeight(imageBuffer)

    // Create a device-dependent RGB color space
    var colorSpace = CGColorSpaceCreateDeviceRGB()

    // Create a bitmap graphics context with the sample buffer data
    let bitmapInfo = CGBitmapInfo(CGImageAlphaInfo.NoneSkipLast.rawValue)
    var context = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, bitmapInfo)
    // Create a Quartz image from the pixel data in the bitmap graphics context
    var quartzImage = CGBitmapContextCreateImage(context);
    // Unlock the pixel buffer
    CVPixelBufferUnlockBaseAddress(imageBuffer,0);

    // Create an image object from the Quartz image
    var image = UIImage(CGImage: quartzImage)!

    return image
}

When I try to visualize the UIImage using a UIImageView, I get nothing.
Any Ideas?


Solution

  • This is a solution for Swift 3.0, where CMSampleBuffer is extended, creating a variable that gives you an optional UIImage.

    import AVFoundation
    
    extension CMSampleBuffer {
        var uiImage: UIImage? {
            guard let imageBuffer = CMSampleBufferGetImageBuffer(self) else { return nil }
    
            CVPixelBufferLockBaseAddress(imageBuffer, CVPixelBufferLockFlags(rawValue: 0))
            let baseAddress = CVPixelBufferGetBaseAddress(imageBuffer)
            let bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer)
            let width = CVPixelBufferGetWidth(imageBuffer)
            let height = CVPixelBufferGetHeight(imageBuffer)
            let colorSpace = CGColorSpaceCreateDeviceRGB()
            let bitmapInfo = CGBitmapInfo(rawValue: CGImageAlphaInfo.noneSkipFirst.rawValue | CGBitmapInfo.byteOrder32Little.rawValue)
            guard let context = CGContext(data: baseAddress,
                                          width: width,
                                          height: height,
                                          bitsPerComponent: 8,
                                          bytesPerRow: bytesPerRow,
                                          space: colorSpace,
                                          bitmapInfo: bitmapInfo.rawValue) else { return nil }
            guard let cgImage = context.makeImage() else { return nil }
    
            CVPixelBufferUnlockBaseAddress(imageBuffer,CVPixelBufferLockFlags(rawValue: 0));
    
            return UIImage(cgImage: cgImage)
        }
    }