swiftcameraavcaptureoutput

Capture only camerapreview in AVCapture Swift


here is my code

import UIKit
import AVFoundation

class ViewController: UIViewController {

    @IBOutlet weak var cameraView: UIView!

    var image: UIImage!

    var captureSession = AVCaptureSession()
    var backCamera: AVCaptureDevice?
    var frontCamera: AVCaptureDevice?
    var currentCamera: AVCaptureDevice?

    var photoOutput: AVCapturePhotoOutput?

    var cameraPreviewLayer: AVCaptureVideoPreviewLayer?

    override func viewDidLoad() {
        super.viewDidLoad()
        // Do any additional setup after loading the view, typically from a nib.


    }

    override func viewDidAppear(_ animated: Bool) {

        setupCaptureSession()
        setupDevice()
        setupInputOutput()
        setupPreviewLayer()
        startRunningCaptureSession()

    }

    @IBAction func cameraButton_Tab(_ sender: Any) {

        let settings = AVCapturePhotoSettings()

//        performSegue(withIdentifier: "showPhoto_Segue", sender: nil)
        photoOutput?.capturePhoto(with: settings, delegate: self)

    }

    func setupCaptureSession() {

        captureSession.sessionPreset = AVCaptureSession.Preset.photo

    }

    func setupDevice() {

        let deviceDiscoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [AVCaptureDevice.DeviceType.builtInWideAngleCamera], mediaType: AVMediaType.video, position: AVCaptureDevice.Position.unspecified)

        let devices = deviceDiscoverySession.devices

        for device in devices {

            if device.position == AVCaptureDevice.Position.back {

                backCamera = device

            }else if device.position == AVCaptureDevice.Position.front{

                frontCamera = device

            }

        }

        currentCamera = backCamera

    }

    func setupInputOutput() {

        do{

            let captureDeviceInput = try AVCaptureDeviceInput(device: currentCamera!)
            captureSession.addInput(captureDeviceInput)
            photoOutput = AVCapturePhotoOutput()
            photoOutput?.setPreparedPhotoSettingsArray([AVCapturePhotoSettings(format: [AVVideoCodecKey : AVVideoCodecType.jpeg])], completionHandler: nil)
            captureSession.addOutput(photoOutput!)

        }catch {
            print(error)
        }

    }

    func setupPreviewLayer() {

        cameraPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
        cameraPreviewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
        cameraPreviewLayer?.connection?.videoOrientation = AVCaptureVideoOrientation.portrait
        cameraPreviewLayer!.frame = self.cameraView.bounds
        self.cameraView.layer.insertSublayer(cameraPreviewLayer!, at: 0)

    }

    func startRunningCaptureSession() {

        captureSession.startRunning()

    }

}

extension ViewController: AVCapturePhotoCaptureDelegate {

    func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
        if let imageData = photo.fileDataRepresentation(){
            image = UIImage(data: imageData)

        }
    }

}

enter image description here

See the image, I want save the image which background's color is yellow I can see the camera through of that

But I save the image, it seems that save the whole view, not square.

I make the UIImageView same size of yellow UIView and save the output, it takes the whole view capture and resize of that.

Like change rectangle to square with squeeze

How I cant catch just yellow background size and save?


Solution

  • This didFinishProcessingPhoto will return the complete image like what camera is seeing. You won't the image directly which is shown in your PreviewLayer. So, in order to get the UIImage of shown PreviewLayer, you can resize the captured image.

    Well, resize can also be done in two ways: One keeping aspect ratio and other by passing the exact size. I would recommend to go with aspect ratio because it will ensure that your image won't be squeeze or streched from any size, while passing wrong size won't able to fulfil you requirement.

    Resize UIImage passing new CGSize:

    extension UIImage {
        func scaleImage(toSize newSize: CGSize) -> UIImage? {
            var newImage: UIImage?
            let newRect = CGRect(x: 0, y: 0, width: newSize.width, height: newSize.height).integral
            UIGraphicsBeginImageContextWithOptions(newSize, false, 0)
            if let context = UIGraphicsGetCurrentContext(), let cgImage = self.cgImage {
                context.interpolationQuality = .high
                let flipVertical = CGAffineTransform(a: 1, b: 0, c: 0, d: -1, tx: 0, ty: newSize.height)
                context.concatenate(flipVertical)
                context.draw(cgImage, in: newRect)
                if let img = context.makeImage() {
                    newImage = UIImage(cgImage: img)
                }
                UIGraphicsEndImageContext()
            }
            return newImage
        }
    }
    

    Usage: capturedImage.scaleImage(toSize: CGSize(width: 300, height: 300))

    Resize UIImage keeping aspect ratio:

    extension UIImage {
        func scaleImage(toWidth newWidth: CGFloat) -> UIImage {
            let scale = newWidth / self.size.width
            let newHeight = self.size.height * scale
            let newSize = CGSize(width: newWidth, height: newHeight)
    
            let renderer = UIGraphicsImageRenderer(size: newSize)
    
            let image = renderer.image { (context) in
                self.draw(in: CGRect(origin: CGPoint(x: 0, y: 0), size: newSize))
            }
            return image
        }
    }
    

    Usage: capturedImage.scaleImage(toWidth: 300)

    Reference: Resize UIImage to 200x200pt/px

    Update:

    Keep the below method as it is in your code:

    @IBAction func cameraButton_Tab(_ sender: Any) {
        let settings = AVCapturePhotoSettings()
        photoOutput?.capturePhoto(with: settings, delegate: self)
    }
    
    extension ViewController: AVCapturePhotoCaptureDelegate {
    
        func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
            if let imageData = photo.fileDataRepresentation(){
                let capturedImage = UIImage(data: imageData)
                let cropImage = capturedImage.scaleImage(toWidth: cameraPreviewLayer!.frame.size.width) //It will return the Image size of Camera Preview
            }
        }
    }