swiftmacoscocoaavfoundationcore-image

Can't write JPEG image to HDD using AVFoundation


I'm using AVFoundation module for accessing a FaceTime camera on macOS 10.13.3.

captureSession.startRunning() method is running, camera's indicator is green. But I can't capture and write JPEG image on my hard drive. What's wrong with my code?

I need an output JPG image 167x188 pixels (width x height).

I'm working in Xcode 9, Swift 4.

import Cocoa
import AVFoundation
import CoreImage

class ViewController: NSViewController, 
                      AVCaptureVideoDataOutputSampleBufferDelegate {

    @IBOutlet weak var camera: NSView!
    @IBOutlet weak var captureButton: NSButton!
    @IBOutlet weak var saveButton: NSButton!

    let captureSession = AVCaptureSession()
    var captureDevice: AVCaptureDevice?
    var previewLayer: AVCaptureVideoPreviewLayer?
    var imageOutput: AVCaptureStillImageOutput?

    override func viewDidLoad() {
        super.viewDidLoad()
    
        self.view.layer?.backgroundColor = NSColor.black.cgColor
    
        camera.layer = CALayer()
        captureSession.sessionPreset = AVCaptureSession.Preset.low
        let devices = AVCaptureDevice.devices()
   
        for device in devices {
            print(device)
         
            if ((device as AnyObject).hasMediaType(AVMediaType.video)) {
                print(device)
                captureDevice = device 
            }
        }
    
        if captureDevice != nil {
            do {
                try captureSession.addInput(AVCaptureDeviceInput(device: captureDevice!))
                previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
                previewLayer?.frame = (self.camera.layer?.frame)!
                self.camera.layer?.addSublayer(previewLayer!)

                captureSession.startRunning()
            
            } catch {
                print(AVCaptureSessionErrorKey.description)
            }
        }
    }

    var assetWriter: AVAssetWriter!
    var imageWriterInput: AVAssetWriterInput!
    let outputSettings = [String: Any]()
    let url = URL(fileURLWithPath: "/Users/catalyst/Desktop/image.jpg")

    @IBAction func captureImage(_ sender: NSButton) {
        print("Taking photo...")
    
        imageOutput = AVCaptureStillImageOutput()
        imageOutput?.outputSettings = [
                       AVVideoCodecKey: AVVideoCodecType.jpeg,
                       AVVideoWidthKey: NSNumber(value: Float(167)),
                      AVVideoHeightKey: NSNumber(value: Float(188))
        ] as [String : Any]
    
        if captureSession.canAddOutput(imageOutput!) {
            captureSession.addOutput(imageOutput!)
        }
    }

    @IBAction func saveImage(_ sender: NSButton) {
        print("Saving photo...")
    
        let writerFileName = url
    
        assetWriter = try? AVAssetWriter(outputURL: writerFileName, 
                                          fileType: AVFileType.jpg)
        imageWriterInput = AVAssetWriterInput(mediaType: AVMediaType.video, 
                                         outputSettings: outputSettings)
    
        assetWriter.add(imageWriterInput)
    }
}

There's no compiler's errors and there's no captured image on my Desktop.

enter image description here


Solution

  • An edit has changed this question's character completely, so here is the

    NEW ANSWER

    This captures an image for me. Make sure sandboxing isn't stopping you from writing to path you indicate. AVCaptureStillImageOutput doesn't seem to resize for you, so you can resize using something like this.

    let captureSession = AVCaptureSession()
    let stillImageOutput = AVCaptureStillImageOutput()
    var previewLayer: AVCaptureVideoPreviewLayer! = nil
    
    override func viewDidLoad() {
        super.viewDidLoad()
    
        let captureDevice = AVCaptureDevice.default(for: .video)
        if captureDevice != nil {
            do {
                try captureSession.addInput(AVCaptureDeviceInput(device: captureDevice!))
    
                // AVCaptureVideoPreviewLayer code
                // assumes you have camera NSView
                previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
                previewLayer!.frame = (self.camera.layer?.frame)!
                self.camera.layer?.addSublayer(previewLayer!)
    
                captureSession.addOutput(stillImageOutput)
                captureSession.startRunning()
    
            } catch {
                print(AVCaptureSessionErrorKey.description)
            }
        }
    }
    
    @IBAction func captureImage(_ sender: NSButton) {
        print("Taking photo...")
        if let videoConnection = stillImageOutput.connection(with: .video) {
            stillImageOutput.captureStillImageAsynchronously(from: videoConnection) { jpegBuffer, error in
                // TODO: check error here
    
                if let data = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(jpegBuffer!) {
                    // make sure sandboxing isn't blocking this path                
                    let url = URL(fileURLWithPath: "/Users/swift/Desktop/picture.jpg")
    
                    // resize here using something like
                    // https://stackoverflow.com/a/32991610/22147
    
                    try! data.write(to: url)
                }
            }
        }
    }
    

    OLD ANSWER

    Your url references a directory. Try referencing a file:

    let url = URL(fileURLWithPath: "/Users/swift/Desktop/movie.mov")
    

    You also need to implement the AVCaptureVideoDataOutputSampleBufferDelegate protocol.

    AVCaptureVideoDataOutput is not a file, it's a way of getting a callback with video and audio buffers in real-ish time and that's where you should append samples to videoWriterInput.

    AVCaptureVideoDataOutput and AVAssetWriter are quite low level. If you just want to record to a file, you can replace them with the simpler AVCaptureMovieFileOutput.