iosswiftcameraavfoundationsnapchat

How can I capture photo and video from the same AVCaptureSession?


I'm trying to make a SnapChat-like app. From the same button, I'm willing to allow user to take pictures (touch up inside) and record video (long press).

I'm using AVFoundation for it. The tricky part is that I can't make it work properly in the same AVCaptureSession. I mean, I have only 1 preview layer for both captures, how can I start the right one depending on user's interactions with record button? Did someone already worked with something similar?

Here's a piece of my code:

import UIKit
import AVFoundation

protocol RecordCameraDelegate {
    func didSavedOutputFile(url: URL!, error: Error?)
    func didSavedImage(image: UIImage?)
}

// MARK: - Camera
class RecordCamera : NSObject {

    var videoLayer : AVCaptureVideoPreviewLayer!
    var delegate : RecordCameraDelegate!
    var capturedPhoto : UIImage?

    fileprivate var captureSession = AVCaptureSession()
    fileprivate var photoSession = AVCaptureSession()

    fileprivate var movieOutput = AVCaptureMovieFileOutput()
    fileprivate var cameraDevice : AVCaptureDevicePosition!
    fileprivate let stillImageOutput = AVCaptureStillImageOutput()

    // Devices
    fileprivate lazy var frontCameraDevice: AVCaptureDevice? = {
        let devices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo) as! [AVCaptureDevice]
        return devices.filter { $0.position == .front }.first
    }()

    fileprivate lazy var backCameraDevice: AVCaptureDevice? = {
        let devices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo) as! [AVCaptureDevice]
        return devices.filter { $0.position == .back }.first
    }()

    fileprivate lazy var micDevice: AVCaptureDevice? = {
        return AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio)
    }()

    fileprivate var tempFilePath: URL = {
        let tempPath = URL(fileURLWithPath: NSTemporaryDirectory()).appendingPathComponent("bighug").appendingPathExtension("mp4").absoluteString
        if FileManager.default.fileExists(atPath: tempPath) {
            do {
                try FileManager.default.removeItem(atPath: tempPath)
            } catch let error { print("Can't create File URL: \(String(describing: error))") }
        }
        return URL(string: tempPath)!
    }()

    // MARK: - Initialization
    init(view: UIView, cameraPosition: AVCaptureDevicePosition = .front) {
        super.init()

        cameraDevice = cameraPosition

        // Video
        self.configureToRecord(view: view)
        // Photo
        self.configureToCapturePhoto()
    }

    func configureToRecord(view: UIView? = nil) {

        captureSession.beginConfiguration()
        defer {
            // commit & stop session
            captureSession.commitConfiguration()
            if  !captureSession.isRunning { captureSession.startRunning() }
        }

        captureSession.sessionPreset = AVCaptureSessionPresetHigh

        // Start configuration
        if !captureSession.isRunning {

            // layer
            if  let validView = view {
                videoLayer = AVCaptureVideoPreviewLayer(session: captureSession)
                videoLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
                videoLayer.frame = validView.bounds
                validView.layer.addSublayer(videoLayer)
            }

            // add device inputs (front camera and mic)
            if  cameraDevice == .front {
                captureSession.addInput(deviceInputFrom(device: frontCameraDevice))
            } else {
                captureSession.addInput(deviceInputFrom(device: backCameraDevice))
            }
        }

        captureSession.addInput(deviceInputFrom(device: micDevice))

        // Output
        movieOutput.movieFragmentInterval = kCMTimeInvalid

        // Remove previous output
        if  let existingOutput = captureSession.outputs.first as? AVCaptureOutput {
            captureSession.removeOutput(existingOutput)
        }
        // Add Movie Output
        if  captureSession.canAddOutput(movieOutput) {
            captureSession.addOutput(movieOutput)
        }
    }

    func configureToCapturePhoto() {

        photoSession.beginConfiguration()
        defer { photoSession.commitConfiguration() }

        photoSession.sessionPreset = AVCaptureSessionPresetPhoto
        stillImageOutput.outputSettings = [AVVideoCodecKey:AVVideoCodecJPEG]

        if #available(iOS 10.0, *) {
            let cameraOutput = AVCapturePhotoOutput()
            // Add Photo Output
            if  photoSession.canAddOutput(cameraOutput) {
                photoSession.addOutput(cameraOutput)
            }
        }
        else {
            // Add Photo Output
            if  photoSession.canAddOutput(stillImageOutput) {
                photoSession.addOutput(stillImageOutput)
            }
        }
    }

    func takePicture() {
        if #available(iOS 10.0, *) {
            let cameraOutput = photoSession.outputs.first as! AVCapturePhotoOutput
            // Capture Picture
            let settings = AVCapturePhotoSettings()
            let previewPixelType = settings.availablePreviewPhotoPixelFormatTypes.first!
            let previewFormat = [
                kCVPixelBufferPixelFormatTypeKey as String: previewPixelType,
                kCVPixelBufferWidthKey as String: 828,
                kCVPixelBufferHeightKey as String: 828
            ]
            settings.previewPhotoFormat = previewFormat
            cameraOutput.capturePhoto(with: settings, delegate: self)
        }
        else {
            if  let videoConnection = stillImageOutput.connection(withMediaType: AVMediaTypeVideo) {
                stillImageOutput.captureStillImageAsynchronously(from: videoConnection) { (imageDataSampleBuffer, error) -> Void in
                    let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(imageDataSampleBuffer)
                    //UIImageWriteToSavedPhotosAlbum(UIImage(data: imageData!)!, nil, nil, nil)
                    guard let validData = imageData else { self.delegate?.didSavedImage(image: nil); return }
                    self.capturedPhoto = UIImage(data: validData)
                }
            }
        }
    }

    // MARK: - Record Methods
    func startRecording() {
        // Take picture
        print("Camera started recording")
        self.takePicture()
        // Start recording
        movieOutput.startRecording(
            toOutputFileURL: tempFilePath,
            recordingDelegate: self
        )
    }

    func stopRecording() {
        print("Camera stopped recording")
        movieOutput.stopRecording()
    }

    // MARK: - Modes
    func cameraMode() {

        captureSession.beginConfiguration()
        defer { captureSession.commitConfiguration() }

        let inputs: [AVCaptureInput] = captureSession.inputs?.flatMap { $0 as? AVCaptureInput } ?? []

        // From
        if  cameraDevice == .front {
            if  let validFrontDevice = deviceInputFrom(device: frontCameraDevice) {
                if !inputs.contains(validFrontDevice) {
                    captureSession.addInput(validFrontDevice)
                }
            }
        }
        // Back
        if  cameraDevice == .back {
            if  let validBackDevice = deviceInputFrom(device: backCameraDevice) {
                if !inputs.contains(validBackDevice) {
                    captureSession.addInput(validBackDevice)
                }
            }
        }

        print("Record Camera --> Set VIDEO Mode")
    }

    func audioMode() {

        captureSession.beginConfiguration()
        defer { captureSession.commitConfiguration() }

        let inputs: [AVCaptureInput] = captureSession.inputs?.flatMap { $0 as? AVCaptureInput } ?? []

        // Remove..
        for input in inputs {
            if  let deviceInput = input as? AVCaptureDeviceInput {
                if  deviceInput.device == backCameraDevice
                ||  deviceInput.device == frontCameraDevice {
                    captureSession.removeInput(deviceInput)
                }
            }
        }

        print("Record Camera --> Set AUDIO Mode")
    }

    // MARK: - Util methods
    fileprivate func deviceInputFrom(device: AVCaptureDevice?) -> AVCaptureDeviceInput? {
        guard let validDevice = device else { return nil }
        do {
            return try AVCaptureDeviceInput(device: validDevice)
        } catch let outError {
            print("Device setup error occured: \(String(describing: outError))")
            return nil
        }
    }

    func swipeCamera() {

        cameraDevice = cameraDevice == .front ? .back : .front

        captureSession.beginConfiguration()
        defer { captureSession.commitConfiguration() }

        let inputs: [AVCaptureInput] = captureSession.inputs?.flatMap { $0 as? AVCaptureInput } ?? []

        // Remove...
        for input in inputs {
            if  let deviceInput = input as? AVCaptureDeviceInput {
                if  deviceInput.device == backCameraDevice && cameraDevice == .front {
                    captureSession.removeInput(deviceInput)
                    photoSession.removeInput(deviceInput)
                    break;
                } else if deviceInput.device == frontCameraDevice && cameraDevice == .back {
                    captureSession.removeInput(deviceInput)
                    photoSession.removeInput(deviceInput)
                    break;
                }
            }
        }

        // From
        if  cameraDevice == .front {
            if  let validFrontDevice = deviceInputFrom(device: frontCameraDevice) {
                if !inputs.contains(validFrontDevice) {
                    captureSession.addInput(validFrontDevice)
                    photoSession.addInput(validFrontDevice)
                    print("Record Camera --> Swipe to Front Camera")
                }
            }
        }
        // Back
        if  cameraDevice == .back {
            if  let validBackDevice = deviceInputFrom(device: backCameraDevice) {
                if !inputs.contains(validBackDevice) {
                    captureSession.addInput(validBackDevice)
                    photoSession.addInput(validBackDevice)
                    print("Record Camera --> Swipe to Back Camera")
                }
            }
        }
    }
}

// MARK: - Capture Output
extension RecordCamera : AVCaptureFileOutputRecordingDelegate {

    func capture(_ captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAt fileURL: URL!, fromConnections connections: [Any]!) {
        // Not implemented
    }

    func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!) {
        guard error == nil else {
            if  let photo = capturedPhoto {
                delegate?.didSavedImage(image: photo)
            }
            return
        }
        delegate?.didSavedOutputFile(url: outputFileURL, error: error)
    }
}

@available(iOS 10.0, *)
extension RecordCamera : AVCapturePhotoCaptureDelegate {

    func capture(_ captureOutput: AVCapturePhotoOutput, didCapturePhotoForResolvedSettings resolvedSettings: AVCaptureResolvedPhotoSettings) {
        print("Picture taken")
    }

    func capture(_ captureOutput: AVCapturePhotoOutput, didFinishProcessingPhotoSampleBuffer photoSampleBuffer: CMSampleBuffer?, previewPhotoSampleBuffer: CMSampleBuffer?, resolvedSettings: AVCaptureResolvedPhotoSettings, bracketSettings: AVCaptureBracketedStillImageSettings?, error: Error?) {

        guard error == nil else {
            print("Failed Capturing Picture: \(String(describing: error!.localizedDescription))")
            capturedPhoto = nil
            //self.delegate.didSavedImage(image: nil)
            return
        }

        if  let sampleBuffer = photoSampleBuffer, let previewBuffer = previewPhotoSampleBuffer,
            let imageData = AVCapturePhotoOutput.jpegPhotoDataRepresentation(forJPEGSampleBuffer: sampleBuffer, previewPhotoSampleBuffer: previewBuffer) {
            print("Photo Saved!")
            capturedPhoto = UIImage(data: imageData)
            //self.delegate.didSavedImage(image: image)
        }

    }
}

Solution

  • I did almost the same functionality that you need. I've created and configured one capture session. For video output I used AVCaptureVideoDataOutput class, for audio AVCaptureAudioDataOutput class and for photos - AVCaptureStillImageOutput.

    I used AVAssetWriter to record video and audio because I needed to perform custom video manipulations. Recording is performed in
    AVCaptureVideoDataOutputSampleBufferDelegate methods. That delegate method looks like this.

    func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {
        if !isRecordingVideo {
            return
        }
    
        if captureOutput == self.videoOutput {
            assetVideoWriterQueue.async {
                if self.shouldStartWritingSession {
                    self.assetWriter.startSession(atSourceTime: CMSampleBufferGetPresentationTimeStamp(sampleBuffer))
                    self.shouldStartWritingSession = false
                }
    
                if self.assetWriterInputCamera.isReadyForMoreMediaData {
                    self.assetWriterInputCamera.append(sampleBuffer)
                }
            }
        }
    
        if captureOutput == self.audioOutput {
            assetAudioWriterQueue.async {
                let shouldStartWritingSession = self.shouldStartWritingSession
                if self.assetWriterInputMicrofone.isReadyForMoreMediaData && shouldStartWritingSession == false {
                    self.assetWriterInputMicrofone.append(sampleBuffer)
                }
    
                if shouldStartWritingSession {
                    print("In audioOutput and CANNOT Record")
                }
            }
        }
    }
    

    My still image capturing looks like this:

    func captureStillImage(_ completion: @escaping ((Bool, UIImage?) -> Void)) {
        guard self.state == .running else {
            completion(false, nil)
            return
        }
    
        backgroundQueue.async {
            let connection = self.stillImageOutpup.connection(withMediaType: AVMediaTypeVideo)
    
            self.stillImageOutpup.captureStillImageAsynchronously(from: connection, completionHandler: { (buffer, error) in
                defer {
                    self.state = .running
                }
    
                guard let buffer = buffer, let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(buffer) else {
                    DispatchQueue.main.async {
                        completion(false, nil)
                    }
    
                    return
                }
    
                let image = UIImage(data: imageData)
    
                DispatchQueue.main.async {
                    completion(true, image)
                }
            })
        }
    }
    

    You can fine how to use asset writers on StackOverflow. For example, you may get familiar with this