I need a way to take photos programmatically from a macOS app and I am using AVCapturePhotoOutput
to achieve this.
First I initialize the camera with
class ViewController: NSViewController, AVCapturePhotoCaptureDelegate {
fileprivate var previewLayer: AVCaptureVideoPreviewLayer!
fileprivate var captureSession: AVCaptureSession!
fileprivate var captureConnection: AVCaptureConnection!
fileprivate var cameraDevice: AVCaptureDevice!
fileprivate var photoOutput: AVCapturePhotoOutput!
override func viewDidLoad() {
super.viewDidLoad()
self.prepareCamera()
self.startSession()
}
@IBAction func button(_ sender: Any) {
self.capturePhoto()
}
}
extension ViewController {
func startSession() {
if let videoSession = captureSession {
if !videoSession.isRunning {
videoSession.startRunning()
}
}
}
func stopSession() {
if let videoSession = captureSession {
if videoSession.isRunning {
videoSession.stopRunning()
}
}
}
func capturePhoto() {
let photoSettings = AVCapturePhotoSettings()
self.photoOutput?.capturePhoto(with: photoSettings, delegate: self)
}
fileprivate func prepareCamera() {
self.photoOutput = AVCapturePhotoOutput()
self.captureSession = AVCaptureSession()
self.captureSession.sessionPreset = AVCaptureSession.Preset.photo
self.previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
self.previewLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill
if let devices = AVCaptureDevice.devices() as? [AVCaptureDevice] {
for device in devices {
if device.hasMediaType(AVMediaType.video) {
cameraDevice = device
if cameraDevice != nil {
do {
let input = try AVCaptureDeviceInput(device: cameraDevice)
if captureSession.canAddInput(input) {
captureSession.addInput(input)
}
if let previewLayer = self.previewLayer {
if ((previewLayer.connection?.isVideoMirroringSupported) != nil) {
previewLayer.connection?.automaticallyAdjustsVideoMirroring = false
previewLayer.connection?.isVideoMirrored = true
}
previewLayer.frame = self.view.bounds
view.layer = previewLayer
view.wantsLayer = true
}
} catch {
print(error.localizedDescription)
}
}
}
}
let videoOutput = AVCaptureVideoDataOutput()
videoOutput.setSampleBufferDelegate(self, queue: DispatchQueue(label: "sample buffer delegate", attributes: []))
if captureSession.canAddOutput(videoOutput) {
captureSession.addOutput(videoOutput)
}
}
}
}
However I get
2021-06-02 14:15:55.673352+0300 LenzDataUtil[29276:1484981] [General] *** -[AVCapturePhotoOutput capturePhotoWithSettings:delegate:] No active and enabled video connection
2021-06-02 14:15:55.675900+0300 LenzDataUtil[29276:1484981] [General] (
0 CoreFoundation 0x00007fff204ec5bf __exceptionPreprocess + 242
1 libobjc.A.dylib 0x00007fff20224bbb objc_exception_throw + 48
2 AVFCapture 0x00007fff378c4c74 -[AVCapturePhotoOutput_Tundra preparedPhotoSettingsArray] + 0
3 LenzDataUtil 0x0000000108e1b645 $s12LenzDataUtil14ViewControllerC12capturePhotoyyF + 245
4 LenzDataUtil 0x0000000108e1b4be $s12LenzDataUtil14ViewControllerC6buttonyyypF + 222
5 LenzDataUtil 0x0000000108e1b6d0 $s12LenzDataUtil14ViewControllerC6buttonyyypFTo + 80
6 AppKit 0x00007fff22db7b0b -[NSApplication(NSResponder) sendAction:to:from:] + 288
7 AppKit 0x00007fff22db79af -[NSControl sendAction:to:] + 86
8 AppKit 0x00007fff22db78e1 __26-[NSCell _sendActionFrom:]_block_invoke + 131
9 AppKit 0x00007fff22db77e8 -[NSCell _sendActionFrom:] + 171
10 AppKit 0x00007fff22db772e -[NSButtonCell _sendActionFrom:] + 96
11 AppKit 0x00007fff22db4813 NSControlTrackMouse + 1820
12 AppKit 0x00007fff22db40cf -[NSCell trackMouse:inRect:ofView:untilMouseUp:] + 130
13 AppKit 0x00007fff22db3f96 -[NSButtonCell trackMouse:inRect:ofView:untilMouseUp:] + 697
14 AppKit 0x00007fff22db32cd -[NSControl mouseDown:] + 722
15 AppKit 0x00007fff22db16ce -[NSWindow(NSEventRouting) _handleMouseDownEvent:isDelayedEvent:] + 4961
16 AppKit 0x00007fff22d20fb8 -[NSWindow(NSEventRouting) _reallySendEvent:isDelayedEvent:] + 2594
17 AppKit 0x00007fff22d20376 -[NSWindow(NSEventRouting) sendEvent:] + 347
18 AppKit 0x00007fff22d1e784 -[NSApplication(NSEvent) sendEvent:] + 352
19 AppKit 0x00007fff22ff7979 -[NSApplication _handleEvent:] + 65
20 AppKit 0x00007fff22b8769e -[NSApplication run] + 623
21 AppKit 0x00007fff22b5b85c NSApplicationMain + 816
22 LenzDataUtil 0x0000000108e1d154 $sSo21NSApplicationDelegateP6AppKitE4mainyyFZ + 36
23 LenzDataUtil 0x0000000108e1d11e $s12LenzDataUtil11AppDelegateC5$mainyyFZ + 46
24 LenzDataUtil 0x0000000108e1d1d9 main + 41
25 libdyld.dylib 0x00007fff20394f3d start + 1
26 ??? 0x0000000000000003 0x0 + 3
)
after triggering self.capturePhoto()
with a button.
I was eventually able to find a working example of how to use AVCapturePhotoOutput from Swift.
The following code was taken from Dan Clipca's GitHub project, AVCapturePhotoOutput:
//
// ViewController.swift
//
import AVFoundation
import Cocoa
import AppKit
class ViewController: NSViewController, AVCapturePhotoCaptureDelegate {
// MARK: - Properties
var previewLayer: AVCaptureVideoPreviewLayer?
var captureSession: AVCaptureSession?
var captureConnection: AVCaptureConnection?
var cameraDevice: AVCaptureDevice?
var photoOutput: AVCapturePhotoOutput?
var mouseLocation: NSPoint { NSEvent.mouseLocation }
// MARK: - LyfeCicle
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
prepareCamera()
startSession()
}
// MARK: - UtilityFunctions
override var representedObject: Any? {
didSet {
// Update the view, if already loaded.
}
}
@IBAction func button(_ sender: Any) {
moveMouseToRandomScreenPoint()
capturePhoto()
}
func startSession() {
if let videoSession = captureSession {
if !videoSession.isRunning {
videoSession.startRunning()
}
}
}
func stopSession() {
if let videoSession = captureSession {
if videoSession.isRunning {
videoSession.stopRunning()
}
}
}
internal func photoOutput(_ output: AVCapturePhotoOutput, willBeginCaptureFor resolvedSettings: AVCaptureResolvedPhotoSettings) {
print("willBeginCaptureFor")
}
internal func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
print("didFinishProcessingPhoto")
print(photo)
}
func capturePhoto() {
print(captureConnection?.isActive)
let photoSettings = AVCapturePhotoSettings()
photoOutput?.capturePhoto(with: photoSettings, delegate: self)
}
func prepareCamera() {
photoOutput = AVCapturePhotoOutput()
captureSession = AVCaptureSession()
captureSession!.sessionPreset = AVCaptureSession.Preset.photo
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession!)
previewLayer!.videoGravity = AVLayerVideoGravity.resizeAspectFill
do {
let deviceDiscoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [AVCaptureDevice.DeviceType.builtInWideAngleCamera], mediaType: AVMediaType.video, position: AVCaptureDevice.Position.front)
let cameraDevice = deviceDiscoverySession.devices[0]
let videoInput = try AVCaptureDeviceInput(device: cameraDevice)
captureSession!.beginConfiguration()
if captureSession!.canAddInput(videoInput) {
print("Adding videoInput to captureSession")
captureSession!.addInput(videoInput)
} else {
print("Unable to add videoInput to captureSession")
}
if captureSession!.canAddOutput(photoOutput!) {
captureSession!.addOutput(photoOutput!)
print("Adding videoOutput to captureSession")
} else {
print("Unable to add videoOutput to captureSession")
}
captureConnection = AVCaptureConnection(inputPorts: videoInput.ports, output: photoOutput!)
captureSession!.commitConfiguration()
if let previewLayer = previewLayer {
if ((previewLayer.connection?.isVideoMirroringSupported) != nil) {
previewLayer.connection?.automaticallyAdjustsVideoMirroring = false
previewLayer.connection?.isVideoMirrored = true
}
previewLayer.frame = view.bounds
view.layer = previewLayer
view.wantsLayer = true
}
captureSession!.startRunning()
} catch {
print(error.localizedDescription)
}
}
}