barcode-scannerswift5xcode11camera-overlay

How to make camera background darker other than rectOfInterest in Swift 5, XCode 11


I'm looking to make a barcode scanner app. As I'm new to Swift and Xcode I've managed to get help from other stackoverflow articles (this and this one) to create a page where I can scan a barcode within rectOfInterestArea(with corners) instead of full view camera. However I'm having hard time make the rest of the area darker other than rectOfInterestArea. I'm getting whole area darker. Not sure what I'm doing wrong. This is the result:


I'm basically looking to achieve something like

Below is my code

import AVFoundation
import UIKit

class TestViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate {


    var captureSession: AVCaptureSession!
    var previewLayer: AVCaptureVideoPreviewLayer!
    var rectOfInterestArea = UIView()
    var darkView = UIView()

    var scanRect:CGRect = CGRect(x: 0, y: 0, width: 0, height: 0)
    let metadataOutput = AVCaptureMetadataOutput()


    override func viewDidLoad() {
        super.viewDidLoad()


        captureSession = AVCaptureSession()

        guard let videoCaptureDevice = AVCaptureDevice.default(for: .video) else { return }
        let videoInput: AVCaptureDeviceInput

        do {
            videoInput = try AVCaptureDeviceInput(device: videoCaptureDevice)
        } catch {
            return
        }

        if (captureSession.canAddInput(videoInput)) {
            captureSession.addInput(videoInput)
        } else {
            failed()
            return
        }

        if (captureSession.canAddOutput(metadataOutput)) {
            captureSession.addOutput(metadataOutput)

            metadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
            metadataOutput.metadataObjectTypes = [.qr]
        } else {
            failed()
            return
        }

        let size = 300
        let screenWidth = self.view.frame.size.width
        let xPos = (CGFloat(screenWidth) / CGFloat(2)) - (CGFloat(size) / CGFloat(2))
        scanRect = CGRect(x: Int(xPos), y: 150, width: size, height: size)

        rectOfInterestArea.frame = scanRect

        view.addSubview(rectOfInterestArea)

        print(rectOfInterestArea.frame.size.height, " ", rectOfInterestArea.frame.size.width )

        previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
        previewLayer.frame = view.layer.bounds
        previewLayer.videoGravity = .resizeAspectFill

        view.layer.addSublayer(previewLayer)

        print(previewLayer.frame.size.height, " ", previewLayer.frame.size.width )

        view.addSubview(rectOfInterestArea)


        captureSession.startRunning()
        metadataOutput.rectOfInterest = previewLayer.metadataOutputRectConverted(fromLayerRect: scanRect)


    }


    func failed() {
        let ac = UIAlertController(title: "Scanning not supported", message: "Your device does not support scanning a code from an item. Please use a device with a camera.", preferredStyle: .alert)
        ac.addAction(UIAlertAction(title: "OK", style: .default))
        present(ac, animated: true)
        captureSession = nil
    }



    override func viewWillAppear(_ animated: Bool) {
        super.viewWillAppear(animated)

        self.rectOfInterestArea.layer.addSublayer(self.createFrame())
        if (captureSession?.isRunning == false) {
            captureSession.startRunning()
        }
    }

    override func viewWillDisappear(_ animated: Bool) {
        super.viewWillDisappear(animated)

        if (captureSession?.isRunning == true) {
            captureSession.stopRunning()
        }
    }

    func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
        captureSession.stopRunning()

        if let metadataObject = metadataObjects.first {
            guard let readableObject = metadataObject as? AVMetadataMachineReadableCodeObject else { return }
            guard let stringValue = readableObject.stringValue else { return }

            found(code: stringValue)
        }

        dismiss(animated: true)
    }

    func found(code: String) {
        print(code)
    }

    override var prefersStatusBarHidden: Bool {
        return true
    }

    override var supportedInterfaceOrientations: UIInterfaceOrientationMask {
        return .portrait
    }

    func createFrame() -> CAShapeLayer {
        let height: CGFloat = self.rectOfInterestArea.frame.size.height
        let width: CGFloat = self.rectOfInterestArea.frame.size.width
        print(height, " " , width)
        //let h = previewLayer.frame.size.height
        let path = UIBezierPath()
        path.move(to: CGPoint(x: 5, y: 50))
        path.addLine(to: CGPoint(x: 5, y: 5))
        path.addLine(to: CGPoint(x: 50, y: 5))
        path.move(to: CGPoint(x: height - 55, y: 5))
        path.addLine(to: CGPoint(x: height - 5, y: 5))
        path.addLine(to: CGPoint(x: height - 5, y: 55))
        path.move(to: CGPoint(x: 5, y: width - 55))
        path.addLine(to: CGPoint(x: 5, y: width - 5))
        path.addLine(to: CGPoint(x: 55, y: width - 5))
        path.move(to: CGPoint(x: width - 55, y: height - 5))
        path.addLine(to: CGPoint(x: width - 5, y: height - 5))
        path.addLine(to: CGPoint(x: width - 5, y: height - 55))
        let shape = CAShapeLayer()
        shape.path = path.cgPath
        shape.strokeColor = UIColor.white.cgColor
        shape.lineWidth = 5
        shape.fillColor = UIColor.clear.cgColor
        return shape
    }

}

Any help is appreciated!


Solution

  • let path = CGMutablePath()
    path.addRect(bounds)
    path.addRect(rectOfInterest)
    
    let maskLayer = CAShapeLayer()
    maskLayer.path = path
    maskLayer.fillColor = UIColor.black.withAlphaComponent(0.6).cgColor
    maskLayer.fillRule = .evenOdd
    
    addSublayer(maskLayer)
    

    Add the mask layer before adding the layer with the edges.

    Have a look at this repo for my implementation.